96 , fDetailedMonitoring(
kFALSE)
99 , fBaggedSampleFraction(0)
100 , fBoostedMethodTitle(methodTitle)
101 , fBoostedMethodOptions(theOption)
102 , fMonitorBoostedMethod(
kFALSE)
107 , fOverlap_integral(0.0)
121 , fDetailedMonitoring(
kFALSE)
124 , fBaggedSampleFraction(0)
125 , fBoostedMethodTitle(
"")
126 , fBoostedMethodOptions(
"")
127 , fMonitorBoostedMethod(
kFALSE)
132 , fOverlap_integral(0.0)
145 fMethodWeight.clear();
149 fTrainSigMVAHist.clear();
150 fTrainBgdMVAHist.clear();
151 fBTrainSigMVAHist.clear();
152 fBTrainBgdMVAHist.clear();
153 fTestSigMVAHist.clear();
154 fTestBgdMVAHist.clear();
178 DeclareOptionRef( fBoostNum = 1,
"Boost_Num",
179 "Number of times the classifier is boosted" );
181 DeclareOptionRef( fMonitorBoostedMethod =
kTRUE,
"Boost_MonitorMethod",
182 "Write monitoring histograms for each boosted classifier" );
184 DeclareOptionRef( fDetailedMonitoring =
kFALSE,
"Boost_DetailedMonitoring",
185 "Produce histograms for detailed boost monitoring" );
187 DeclareOptionRef( fBoostType =
"AdaBoost",
"Boost_Type",
"Boosting type for the classifiers" );
188 AddPreDefVal(
TString(
"RealAdaBoost"));
189 AddPreDefVal(
TString(
"AdaBoost"));
190 AddPreDefVal(
TString(
"Bagging"));
192 DeclareOptionRef(fBaggedSampleFraction=.6,
"Boost_BaggedSampleFraction",
"Relative size of bagged event sample to original size of the data sample (used whenever bagging is used)" );
194 DeclareOptionRef( fAdaBoostBeta = 1.0,
"Boost_AdaBoostBeta",
195 "The ADA boost parameter that sets the effect of every boost step on the events' weights" );
197 DeclareOptionRef( fTransformString =
"step",
"Boost_Transform",
198 "Type of transform applied to every boosted method linear, log, step" );
200 AddPreDefVal(
TString(
"linear"));
202 AddPreDefVal(
TString(
"gauss"));
204 DeclareOptionRef( fRandomSeed = 0,
"Boost_RandomSeed",
205 "Seed for random number generator used for bagging" );
220 DeclareOptionRef( fHistoricOption =
"ByError",
"Boost_MethodWeightType",
221 "How to set the final weight of the boosted classifiers" );
222 AddPreDefVal(
TString(
"ByError"));
223 AddPreDefVal(
TString(
"Average"));
224 AddPreDefVal(
TString(
"ByROC"));
225 AddPreDefVal(
TString(
"ByOverlap"));
226 AddPreDefVal(
TString(
"LastMethod"));
228 DeclareOptionRef( fHistoricOption =
"step",
"Boost_Transform",
229 "Type of transform applied to every boosted method linear, log, step" );
231 AddPreDefVal(
TString(
"linear"));
233 AddPreDefVal(
TString(
"gauss"));
238 AddPreDefVal(
TString(
"HighEdgeGauss"));
239 AddPreDefVal(
TString(
"HighEdgeCoPara"));
242 DeclareOptionRef( fHistoricBoolOption,
"Boost_RecalculateMVACut",
243 "Recalculate the classifier MVA Signallike cut at every boost iteration" );
253 fBoostedMethodTitle = methodTitle;
254 fBoostedMethodOptions = theOption;
276 results->
Store(
new TH1F(
"MethodWeight",
"Normalized Classifier Weight",fBoostNum,0,fBoostNum),
"ClassifierWeight");
277 results->
Store(
new TH1F(
"BoostWeight",
"Boost Weight",fBoostNum,0,fBoostNum),
"BoostWeight");
278 results->
Store(
new TH1F(
"ErrFraction",
"Error Fraction (by boosted event weights)",fBoostNum,0,fBoostNum),
"ErrorFraction");
279 if (fDetailedMonitoring){
280 results->
Store(
new TH1F(
"ROCIntegral_test",
"ROC integral of single classifier (testing sample)",fBoostNum,0,fBoostNum),
"ROCIntegral_test");
281 results->
Store(
new TH1F(
"ROCIntegralBoosted_test",
"ROC integral of boosted method (testing sample)",fBoostNum,0,fBoostNum),
"ROCIntegralBoosted_test");
282 results->
Store(
new TH1F(
"ROCIntegral_train",
"ROC integral of single classifier (training sample)",fBoostNum,0,fBoostNum),
"ROCIntegral_train");
283 results->
Store(
new TH1F(
"ROCIntegralBoosted_train",
"ROC integral of boosted method (training sample)",fBoostNum,0,fBoostNum),
"ROCIntegralBoosted_train");
284 results->
Store(
new TH1F(
"OverlapIntegal_train",
"Overlap integral (training sample)",fBoostNum,0,fBoostNum),
"Overlap");
294 if (fDetailedMonitoring){
307 results->
Store(
new TH1F(
"SoverBtotal",
"S/B in reweighted training sample",fBoostNum,0,fBoostNum),
"SoverBtotal");
311 results->
Store(
new TH1F(
"SeparationGain",
"SeparationGain",fBoostNum,0,fBoostNum),
"SeparationGain");
317 fMonitorTree=
new TTree(
"MonitorBoost",
"Boost variables");
318 fMonitorTree->Branch(
"iMethod",&fCurrentMethodIdx,
"iMethod/I");
319 fMonitorTree->Branch(
"boostWeight",&fBoostWeight,
"boostWeight/D");
320 fMonitorTree->Branch(
"errorFraction",&fMethodError,
"errorFraction/D");
321 fMonitorBoostedMethod =
kTRUE;
330 Log() << kDEBUG <<
"CheckSetup: fBoostType="<<fBoostType <<
Endl;
331 Log() << kDEBUG <<
"CheckSetup: fAdaBoostBeta="<<fAdaBoostBeta<<
Endl;
332 Log() << kDEBUG <<
"CheckSetup: fBoostWeight="<<fBoostWeight<<
Endl;
333 Log() << kDEBUG <<
"CheckSetup: fMethodError="<<fMethodError<<
Endl;
334 Log() << kDEBUG <<
"CheckSetup: fBoostNum="<<fBoostNum <<
Endl;
335 Log() << kDEBUG <<
"CheckSetup: fRandomSeed=" << fRandomSeed<<
Endl;
336 Log() << kDEBUG <<
"CheckSetup: fTrainSigMVAHist.size()="<<fTrainSigMVAHist.size()<<
Endl;
337 Log() << kDEBUG <<
"CheckSetup: fTestSigMVAHist.size()="<<fTestSigMVAHist.size()<<
Endl;
338 Log() << kDEBUG <<
"CheckSetup: fMonitorBoostedMethod=" << (fMonitorBoostedMethod?
"true" :
"false") <<
Endl;
339 Log() << kDEBUG <<
"CheckSetup: MName=" << fBoostedMethodName <<
" Title="<< fBoostedMethodTitle<<
Endl;
340 Log() << kDEBUG <<
"CheckSetup: MOptions="<< fBoostedMethodOptions <<
Endl;
341 Log() << kDEBUG <<
"CheckSetup: fMonitorTree=" << fMonitorTree <<
Endl;
342 Log() << kDEBUG <<
"CheckSetup: fCurrentMethodIdx=" <<fCurrentMethodIdx <<
Endl;
343 if (fMethods.size()>0) Log() << kDEBUG <<
"CheckSetup: fMethods[0]" <<fMethods[0]<<
Endl;
344 Log() << kDEBUG <<
"CheckSetup: fMethodWeight.size()" << fMethodWeight.size() <<
Endl;
345 if (fMethodWeight.size()>0) Log() << kDEBUG <<
"CheckSetup: fMethodWeight[0]="<<fMethodWeight[0]<<
Endl;
346 Log() << kDEBUG <<
"CheckSetup: trying to repair things" <<
Endl;
361 if (Data()->GetNTrainingEvents()==0) Log() << kFATAL <<
"<Train> Data() has zero events" <<
Endl;
364 if (fMethods.size() > 0) fMethods.clear();
365 fMVAvalues->resize(Data()->GetNTrainingEvents(), 0.0);
367 Log() << kINFO <<
"Training "<< fBoostNum <<
" " << fBoostedMethodName <<
" with title " << fBoostedMethodTitle <<
" Classifiers ... patience please" <<
Endl;
368 Timer timer( fBoostNum, GetName() );
378 Ssiz_t varTrafoStart=fBoostedMethodOptions.Index(
"~VarTransform=");
379 if (varTrafoStart >0) {
380 Ssiz_t varTrafoEnd =fBoostedMethodOptions.Index(
":",varTrafoStart);
381 if (varTrafoEnd<varTrafoStart)
382 varTrafoEnd=fBoostedMethodOptions.Length();
383 fBoostedMethodOptions.Remove(varTrafoStart,varTrafoEnd-varTrafoStart);
388 for (fCurrentMethodIdx=0;fCurrentMethodIdx<fBoostNum;fCurrentMethodIdx++) {
393 fBoostedMethodName.Data(), GetJobName(),
Form(
"%s_B%04i", fBoostedMethodTitle.Data(), fCurrentMethodIdx),
394 DataInfo(), fBoostedMethodOptions);
398 fCurrentMethod = (
dynamic_cast<MethodBase*
>(method));
400 if (fCurrentMethod==0) {
401 Log() << kFATAL <<
"uups.. guess the booking of the " << fCurrentMethodIdx <<
"-th classifier somehow failed" <<
Endl;
409 Log() << kFATAL <<
"Method with type kCategory cannot be casted to MethodCategory. /MethodBoost" <<
Endl;
413 fCurrentMethod->SetMsgType(kWARNING);
414 fCurrentMethod->SetupMethod();
415 fCurrentMethod->ParseOptions();
417 fCurrentMethod->SetAnalysisType( GetAnalysisType() );
418 fCurrentMethod->ProcessSetup();
419 fCurrentMethod->CheckSetup();
423 fCurrentMethod->RerouteTransformationHandler (&(this->GetTransformationHandler()));
429 if (fMonitorBoostedMethod) {
430 methodDir=GetFile()->
GetDirectory(dirName=
Form(
"%s_B%04i",fBoostedMethodName.Data(),fCurrentMethodIdx));
432 methodDir=BaseDir()->
mkdir(dirName,dirTitle=
Form(
"Directory Boosted %s #%04i", fBoostedMethodName.Data(),fCurrentMethodIdx));
434 fCurrentMethod->SetMethodDir(methodDir);
435 fCurrentMethod->BaseDir()->
cd();
445 if (fBoostType==
"Bagging") Bagging();
448 if(!IsSilentFile())fCurrentMethod->WriteMonitoringHistosToFile();
454 if(!IsSilentFile())
if (fCurrentMethodIdx==0 && fMonitorBoostedMethod) CreateMVAHistorgrams();
462 SingleBoost(fCurrentMethod);
468 if (fDetailedMonitoring) {
479 fMonitorTree->Fill();
483 Log() << kDEBUG <<
"AdaBoost (methodErr) err = " << fMethodError <<
Endl;
484 if (fMethodError > 0.49999) StopCounter++;
485 if (StopCounter > 0 && fBoostType !=
"Bagging") {
487 fBoostNum = fCurrentMethodIdx+1;
488 Log() << kINFO <<
"Error rate has reached 0.5 ("<< fMethodError<<
"), boosting process stopped at #" << fBoostNum <<
" classifier" <<
Endl;
490 Log() << kINFO <<
"The classifier might be too strong to boost with Beta = " << fAdaBoostBeta <<
", try reducing it." <<
Endl;
500 Timer* timer1=
new Timer( fBoostNum, GetName() );
502 for (fCurrentMethodIdx=0;fCurrentMethodIdx<fBoostNum;fCurrentMethodIdx++) {
507 if (fCurrentMethodIdx==fBoostNum) {
512 TH1F* tmp =
dynamic_cast<TH1F*
>( results->
GetHist(
"ClassifierWeight") );
513 if (tmp) tmp->
SetBinContent(fCurrentMethodIdx+1,fMethodWeight[fCurrentMethodIdx]);
522 if (fMethods.size()==1) fMethodWeight[0] = 1.0;
533 fBoostedMethodOptions=GetOptions();
540 if (fBoostNum <=0) Log() << kFATAL <<
"CreateHistograms called before fBoostNum is initialized" <<
Endl;
544 Int_t signalClass = 0;
545 if (DataInfo().GetClassInfo(
"Signal") != 0) {
546 signalClass = DataInfo().GetClassInfo(
"Signal")->GetNumber();
549 meanS, meanB, rmsS, rmsB,
xmin,
xmax, signalClass );
556 for (
UInt_t imtd=0; imtd<fBoostNum; imtd++) {
557 fTrainSigMVAHist .push_back(
new TH1F(
Form(
"MVA_Train_S_%04i",imtd),
"MVA_Train_S", fNbins,
xmin,
xmax ) );
558 fTrainBgdMVAHist .push_back(
new TH1F(
Form(
"MVA_Train_B%04i", imtd),
"MVA_Train_B", fNbins,
xmin,
xmax ) );
559 fBTrainSigMVAHist.push_back(
new TH1F(
Form(
"MVA_BTrain_S%04i",imtd),
"MVA_BoostedTrain_S", fNbins,
xmin,
xmax ) );
560 fBTrainBgdMVAHist.push_back(
new TH1F(
Form(
"MVA_BTrain_B%04i",imtd),
"MVA_BoostedTrain_B", fNbins,
xmin,
xmax ) );
561 fTestSigMVAHist .push_back(
new TH1F(
Form(
"MVA_Test_S%04i", imtd),
"MVA_Test_S", fNbins,
xmin,
xmax ) );
562 fTestBgdMVAHist .push_back(
new TH1F(
Form(
"MVA_Test_B%04i", imtd),
"MVA_Test_B", fNbins,
xmin,
xmax ) );
571 for (
Long64_t ievt=0; ievt<GetNEvents(); ievt++) {
572 const Event *ev = Data()->GetEvent(ievt);
582 if (fMonitorBoostedMethod) {
583 for (
UInt_t imtd=0;imtd<fBoostNum;imtd++) {
590 fTrainSigMVAHist[imtd]->SetDirectory(dir);
591 fTrainSigMVAHist[imtd]->Write();
592 fTrainBgdMVAHist[imtd]->SetDirectory(dir);
593 fTrainBgdMVAHist[imtd]->Write();
594 fBTrainSigMVAHist[imtd]->SetDirectory(dir);
595 fBTrainSigMVAHist[imtd]->Write();
596 fBTrainBgdMVAHist[imtd]->SetDirectory(dir);
597 fBTrainBgdMVAHist[imtd]->Write();
604 fMonitorTree->Write();
612 if (fMonitorBoostedMethod) {
613 UInt_t nloop = fTestSigMVAHist.size();
614 if (fMethods.size()<nloop) nloop = fMethods.size();
617 for (
Long64_t ievt=0; ievt<GetNEvents(); ievt++) {
618 const Event* ev = GetEvent(ievt);
620 if (DataInfo().IsSignal(ev)) {
621 for (
UInt_t imtd=0; imtd<nloop; imtd++) {
622 fTestSigMVAHist[imtd]->Fill(fMethods[imtd]->GetMvaValue(),w);
626 for (
UInt_t imtd=0; imtd<nloop; imtd++) {
627 fTestBgdMVAHist[imtd]->Fill(fMethods[imtd]->GetMvaValue(),w);
641 UInt_t nloop = fTestSigMVAHist.size();
642 if (fMethods.size()<nloop) nloop = fMethods.size();
643 if (fMonitorBoostedMethod) {
645 for (
UInt_t imtd=0;imtd<nloop;imtd++) {
650 if (dir==0)
continue;
652 fTestSigMVAHist[imtd]->SetDirectory(dir);
653 fTestSigMVAHist[imtd]->Write();
654 fTestBgdMVAHist[imtd]->SetDirectory(dir);
655 fTestBgdMVAHist[imtd]->Write();
676 if(IsModelPersistence()){
677 TString _fFileDir= DataInfo().GetName();
695 const Int_t nBins=10001;
698 for (
Long64_t ievt=0; ievt<Data()->GetNEvents(); ievt++) {
702 if (val>maxMVA) maxMVA=val;
703 if (val<minMVA) minMVA=val;
705 maxMVA = maxMVA+(maxMVA-minMVA)/nBins;
707 TH1D *mvaS =
new TH1D(
Form(
"MVAS_%d",fCurrentMethodIdx) ,
"",nBins,minMVA,maxMVA);
708 TH1D *mvaB =
new TH1D(
Form(
"MVAB_%d",fCurrentMethodIdx) ,
"",nBins,minMVA,maxMVA);
709 TH1D *mvaSC =
new TH1D(
Form(
"MVASC_%d",fCurrentMethodIdx),
"",nBins,minMVA,maxMVA);
710 TH1D *mvaBC =
new TH1D(
Form(
"MVABC_%d",fCurrentMethodIdx),
"",nBins,minMVA,maxMVA);
714 if (fDetailedMonitoring){
715 results->
Store(mvaS,
Form(
"MVAS_%d",fCurrentMethodIdx));
716 results->
Store(mvaB,
Form(
"MVAB_%d",fCurrentMethodIdx));
717 results->
Store(mvaSC,
Form(
"MVASC_%d",fCurrentMethodIdx));
718 results->
Store(mvaBC,
Form(
"MVABC_%d",fCurrentMethodIdx));
721 for (
Long64_t ievt=0; ievt<Data()->GetNEvents(); ievt++) {
723 Double_t weight = GetEvent(ievt)->GetWeight();
725 if (DataInfo().IsSignal(GetEvent(ievt))){
726 mvaS->Fill(mvaVal,weight);
728 mvaB->
Fill(mvaVal,weight);
764 for (
Int_t ibin=1;ibin<=nBins;ibin++){
775 if (separationGain < sepGain->GetSeparationGain(sSel,bSel,sTot,bTot)
782 if (sSel*(bTot-bSel) > (sTot-sSel)*bSel) mvaCutOrientation=-1;
783 else mvaCutOrientation=1;
816 <<
" s2="<<(sTot-sSelCut)
817 <<
" b2="<<(bTot-bSelCut)
818 <<
" s/b(1)=" << sSelCut/bSelCut
819 <<
" s/b(2)=" << (sTot-sSelCut)/(bTot-bSelCut)
820 <<
" index before cut=" << parentIndex
821 <<
" after: left=" << leftIndex
822 <<
" after: right=" << rightIndex
823 <<
" sepGain=" << parentIndex-( (sSelCut+bSelCut) * leftIndex + (sTot-sSelCut+bTot-bSelCut) * rightIndex )/(sTot+bTot)
824 <<
" sepGain="<<separationGain
827 <<
" idx="<<fCurrentMethodIdx
828 <<
" cutOrientation="<<mvaCutOrientation
855 if (fBoostType==
"AdaBoost") returnVal = this->AdaBoost (method,1);
856 else if (fBoostType==
"RealAdaBoost") returnVal = this->AdaBoost (method,0);
857 else if (fBoostType==
"Bagging") returnVal = this->Bagging ();
859 Log() << kFATAL <<
"<Boost> unknown boost option " << fBoostType<<
" called" <<
Endl;
861 fMethodWeight.push_back(returnVal);
870 Log() << kWARNING <<
" AdaBoost called without classifier reference - needed for calculating AdaBoost " <<
Endl;
879 if (discreteAdaBoost) {
890 for (
Long64_t evt=0; evt<GetNEvents(); evt++) {
891 const Event* ev = Data()->GetEvent(evt);
897 for (
Long64_t ievt=0; ievt<GetNEvents(); ievt++) WrongDetection[ievt]=
kTRUE;
900 for (
Long64_t ievt=0; ievt<GetNEvents(); ievt++) {
901 const Event* ev = GetEvent(ievt);
902 sig=DataInfo().IsSignal(ev);
903 v = fMVAvalues->at(ievt);
908 if (fMonitorBoostedMethod) {
910 fBTrainSigMVAHist[fCurrentMethodIdx]->Fill(
v,w);
914 fBTrainBgdMVAHist[fCurrentMethodIdx]->Fill(
v,w);
920 if (discreteAdaBoost){
922 WrongDetection[ievt]=
kFALSE;
924 WrongDetection[ievt]=
kTRUE;
929 mvaProb = 2*(mvaProb-0.5);
931 if (DataInfo().IsSignal(ev)) trueType = 1;
933 sumWrong+= w*trueType*mvaProb;
937 fMethodError=sumWrong/sumAll;
944 if (fMethodError == 0) {
945 Log() << kWARNING <<
"Your classifier worked perfectly on the training sample --> serious overtraining expected and no boosting done " <<
Endl;
948 if (discreteAdaBoost)
949 boostWeight =
TMath::Log((1.-fMethodError)/fMethodError)*fAdaBoostBeta;
951 boostWeight =
TMath::Log((1.+fMethodError)/(1-fMethodError))*fAdaBoostBeta;
967 for (
Long64_t ievt=0; ievt<GetNEvents(); ievt++) {
968 const Event* ev = Data()->GetEvent(ievt);
970 if (discreteAdaBoost){
972 if (WrongDetection[ievt] && boostWeight != 0) {
983 mvaProb = 2*(mvaProb-0.5);
987 if (DataInfo().IsSignal(ev)) trueType = 1;
990 boostfactor =
TMath::Exp(-1*boostWeight*trueType*mvaProb);
998 Double_t normWeight = oldSum/newSum;
1001 for (
Long64_t ievt=0; ievt<GetNEvents(); ievt++) {
1002 const Event* ev = Data()->GetEvent(ievt);
1011 for (
Long64_t ievt=0; ievt<GetNEvents(); ievt++) {
1012 const Event* ev = Data()->GetEvent(ievt);
1019 delete[] WrongDetection;
1020 if (MVAProb)
delete MVAProb;
1022 fBoostWeight = boostWeight;
1034 for (
Long64_t ievt=0; ievt<GetNEvents(); ievt++) {
1035 const Event* ev = Data()->GetEvent(ievt);
1054 Log() <<
"This method combines several classifier of one species in a "<<
Endl;
1055 Log() <<
"single multivariate quantity via the boost algorithm." <<
Endl;
1056 Log() <<
"the output is a weighted sum over all individual classifiers" <<
Endl;
1057 Log() <<
"By default, the AdaBoost method is employed, which gives " <<
Endl;
1058 Log() <<
"events that were misclassified in the previous tree a larger " <<
Endl;
1059 Log() <<
"weight in the training of the following classifier."<<
Endl;
1060 Log() <<
"Optionally, Bagged boosting can also be applied." <<
Endl;
1064 Log() <<
"The most important parameter in the configuration is the "<<
Endl;
1065 Log() <<
"number of boosts applied (Boost_Num) and the choice of boosting"<<
Endl;
1066 Log() <<
"(Boost_Type), which can be set to either AdaBoost or Bagging." <<
Endl;
1067 Log() <<
"AdaBoosting: The most important parameters in this configuration" <<
Endl;
1068 Log() <<
"is the beta parameter (Boost_AdaBoostBeta) " <<
Endl;
1069 Log() <<
"When boosting a linear classifier, it is sometimes advantageous"<<
Endl;
1070 Log() <<
"to transform the MVA output non-linearly. The following options" <<
Endl;
1071 Log() <<
"are available: step, log, and minmax, the default is no transform."<<
Endl;
1073 Log() <<
"Some classifiers are hard to boost and do not improve much in"<<
Endl;
1074 Log() <<
"their performance by boosting them, some even slightly deteriorate"<<
Endl;
1075 Log() <<
"due to the boosting." <<
Endl;
1076 Log() <<
"The booking of the boost method is special since it requires"<<
Endl;
1077 Log() <<
"the booing of the method to be boosted and the boost itself."<<
Endl;
1078 Log() <<
"This is solved by booking the method to be boosted and to add"<<
Endl;
1079 Log() <<
"all Boost parameters, which all begin with \"Boost_\" to the"<<
Endl;
1080 Log() <<
"options string. The factory separates the options and initiates"<<
Endl;
1081 Log() <<
"the boost process. The TMVA macro directory contains the example"<<
Endl;
1082 Log() <<
"macro \"Boost.C\"" <<
Endl;
1101 for (
UInt_t i=0;i< fMethods.size(); i++){
1105 Double_t sigcut =
m->GetSignalReferenceCut();
1108 if (fTransformString ==
"linear"){
1111 else if (fTransformString ==
"log"){
1112 if (val < sigcut) val = sigcut;
1116 else if (fTransformString ==
"step" ){
1117 if (
m->IsSignalLike(val)) val = 1.;
1120 else if (fTransformString ==
"gauss"){
1124 Log() << kFATAL <<
"error unknown transformation " << fTransformString<<
Endl;
1126 mvaValue+=val*fMethodWeight[i];
1127 norm +=fMethodWeight[i];
1132 NoErrorCalc(err, errUpper);
1159 Data()->SetCurrentType(eTT);
1165 if (singleMethod && !method) {
1166 Log() << kFATAL <<
" What do you do? Your method:"
1167 << fMethods.back()->GetName()
1168 <<
" seems not to be a propper TMVA method"
1177 std::vector<Double_t> OldMethodWeight(fMethodWeight);
1178 if (!singleMethod) {
1181 for (
UInt_t i=0; i<=fCurrentMethodIdx; i++)
1182 AllMethodsWeight += fMethodWeight.at(i);
1184 if (AllMethodsWeight != 0.0) {
1185 for (
UInt_t i=0; i<=fCurrentMethodIdx; i++)
1186 fMethodWeight[i] /= AllMethodsWeight;
1192 std::vector <Float_t>* mvaRes;
1194 mvaRes = fMVAvalues;
1196 mvaRes =
new std::vector <Float_t>(GetNEvents());
1197 for (
Long64_t ievt=0; ievt<GetNEvents(); ievt++) {
1199 (*mvaRes)[ievt] = singleMethod ? method->
GetMvaValue(&err) : GetMvaValue(&err);
1205 fMethodWeight = OldMethodWeight;
1208 Int_t signalClass = 0;
1209 if (DataInfo().GetClassInfo(
"Signal") != 0) {
1210 signalClass = DataInfo().GetClassInfo(
"Signal")->GetNumber();
1213 meanS, meanB, rmsS, rmsB,
xmin,
xmax, signalClass );
1222 TH1 *mva_s_overlap=0, *mva_b_overlap=0;
1223 if (CalcOverlapIntergral) {
1224 mva_s_overlap =
new TH1F(
"MVA_S_OVERLAP",
"MVA_S_OVERLAP", fNbins,
xmin,
xmax );
1225 mva_b_overlap =
new TH1F(
"MVA_B_OVERLAP",
"MVA_B_OVERLAP", fNbins,
xmin,
xmax );
1227 for (
Long64_t ievt=0; ievt<GetNEvents(); ievt++) {
1228 const Event* ev = GetEvent(ievt);
1230 if (DataInfo().IsSignal(ev)) mva_s->
Fill( (*mvaRes)[ievt], w );
1231 else mva_b->
Fill( (*mvaRes)[ievt], w );
1233 if (CalcOverlapIntergral) {
1235 if (DataInfo().IsSignal(ev))
1236 mva_s_overlap->
Fill( (*mvaRes)[ievt], w_ov );
1238 mva_b_overlap->Fill( (*mvaRes)[ievt], w_ov );
1250 if (CalcOverlapIntergral) {
1254 fOverlap_integral = 0.0;
1257 Double_t bc_b = mva_b_overlap->GetBinContent(bin);
1258 if (bc_s > 0.0 && bc_b > 0.0)
1262 delete mva_s_overlap;
1263 delete mva_b_overlap;
1285 Log() << kFATAL <<
"dynamic cast to MethodBase* failed" <<
Endl;
1289 for (
Long64_t ievt=0; ievt<GetNEvents(); ievt++) {
1313 results->
Store(
new TH1I(
"NodesBeforePruning",
"nodes before pruning",this->GetBoostNum(),0,this->GetBoostNum()),
"NodesBeforePruning");
1314 results->
Store(
new TH1I(
"NodesAfterPruning",
"nodes after pruning",this->GetBoostNum(),0,this->GetBoostNum()),
"NodesAfterPruning");
1327 Log() << kINFO <<
"<Train> average number of nodes before/after pruning : "
1339 if (methodIndex < 3){
1340 Log() << kDEBUG <<
"No detailed boost monitoring for "
1341 << GetCurrentMethod(methodIndex)->GetMethodName()
1342 <<
" yet available " <<
Endl;
1350 if (fDetailedMonitoring){
1352 if (DataInfo().GetNVariables() == 2) {
1353 results->
Store(
new TH2F(
Form(
"EventDistSig_%d",methodIndex),
Form(
"EventDistSig_%d",methodIndex),100,0,7,100,0,7));
1355 results->
Store(
new TH2F(
Form(
"EventDistBkg_%d",methodIndex),
Form(
"EventDistBkg_%d",methodIndex),100,0,7,100,0,7));
1359 for (
Long64_t ievt=0; ievt<GetNEvents(); ievt++) {
1360 const Event* ev = GetEvent(ievt);
1366 if (DataInfo().IsSignal(ev))
h=results->
GetHist2D(
Form(
"EventDistSig_%d",methodIndex));
#define REGISTER_METHOD(CLASS)
for example
char * Form(const char *fmt,...)
virtual void SetMarkerColor(Color_t mcolor=1)
Set the marker color.
Describe directory structure in memory.
virtual TDirectory * GetDirectory(const char *namecycle, Bool_t printError=false, const char *funcname="GetDirectory")
Find a directory using apath.
virtual Bool_t cd()
Change current directory to "this" directory.
virtual TDirectory * mkdir(const char *name, const char *title="", Bool_t returnExistingDirectory=kFALSE)
Create a sub-directory "a" or a hierarchy of sub-directories "a/b/c/...".
1-D histogram with a double per channel (see TH1 documentation)}
1-D histogram with a float per channel (see TH1 documentation)}
1-D histogram with an int per channel (see TH1 documentation)}
TH1 is the base class of all histogram classes in ROOT.
virtual Double_t GetMean(Int_t axis=1) const
For axis = 1,2 or 3 returns the mean value of the histogram along X,Y or Z axis.
TAxis * GetXaxis()
Get the behaviour adopted by the object about the statoverflows. See EStatOverflows for more informat...
virtual Int_t GetNbinsX() const
virtual Int_t Fill(Double_t x)
Increment bin with abscissa X by 1.
virtual void SetBinContent(Int_t bin, Double_t content)
Set bin content see convention for numbering bins in TH1::GetBin In case the bin number is greater th...
virtual Double_t GetBinLowEdge(Int_t bin) const
Return bin lower edge for 1D histogram.
virtual Double_t GetBinContent(Int_t bin) const
Return content of bin number bin.
2-D histogram with a float per channel (see TH1 documentation)}
Service class for 2-D histogram classes.
Int_t Fill(Double_t)
Invalid Fill method.
IMethod * Create(const std::string &name, const TString &job, const TString &title, DataSetInfo &dsi, const TString &option)
creates the method if needed based on the method name using the creator function the factory has stor...
static ClassifierFactory & Instance()
access to the ClassifierFactory singleton creates the instance if needed
class TMVA::Config::VariablePlotting fVariablePlotting
Class that contains all the data information.
void ScaleBoostWeight(Double_t s) const
void SetBoostWeight(Double_t w) const
Float_t GetValue(UInt_t ivar) const
return value of i'th variable
Double_t GetOriginalWeight() const
Double_t GetWeight() const
return the event weight - depending on whether the flag IgnoreNegWeightsInTraining is or not.
Implementation of the GiniIndex as separation criterion.
Interface for all concrete MVA method implementations.
Virtual base Class for all MVA method.
void SetSilentFile(Bool_t status)
void SetWeightFileDir(TString fileDir)
set directory of weight file
virtual void DeclareCompatibilityOptions()
options that are used ONLY for the READER to ensure backward compatibility they are hence without any...
virtual void WriteEvaluationHistosToFile(Types::ETreeType treetype)
writes all MVA evaluation histograms to file
TDirectory * BaseDir() const
returns the ROOT directory where info/histograms etc of the corresponding MVA method instance are sto...
virtual Bool_t IsSignalLike()
uses a pre-set cut on the MVA output (SetSignalReferenceCut and SetSignalReferenceCutOrientation) for...
virtual void TestClassification()
initialization
virtual Double_t GetMvaValue(Double_t *errLower=0, Double_t *errUpper=0)=0
Types::EMVA GetMethodType() const
void SetSignalReferenceCut(Double_t cut)
void SetSignalReferenceCutOrientation(Double_t cutOrientation)
void SetModelPersistence(Bool_t status)
Double_t GetSignalReferenceCut() const
virtual Double_t GetROCIntegral(TH1D *histS, TH1D *histB) const
calculate the area (integral) under the ROC curve as a overall quality measure of the classification
Class for boosting a TMVA method.
void MonitorBoost(Types::EBoostStage stage, UInt_t methodIdx=0)
fill various monitoring histograms from information of the individual classifiers that have been boos...
void ResetBoostWeights()
resetting back the boosted weights of the events to 1
void SingleTrain()
initialization
DataSetManager * fDataSetManager
virtual void WriteEvaluationHistosToFile(Types::ETreeType treetype)
writes all MVA evaluation histograms to file
void CreateMVAHistorgrams()
Bool_t fHistoricBoolOption
void WriteMonitoringHistosToFile(void) const
write special monitoring histograms to file dummy implementation here --------------—
Double_t AdaBoost(MethodBase *method, Bool_t useYesNoLeaf)
the standard (discrete or real) AdaBoost algorithm
Bool_t BookMethod(Types::EMVA theMethod, TString methodTitle, TString theOption)
just registering the string from which the boosted classifier will be created
void CheckSetup()
check may be overridden by derived class (sometimes, eg, fitters are used which can only be implement...
virtual void TestClassification()
initialization
void InitHistos()
initialisation routine
void ProcessOptions()
process user options
Double_t GetBoostROCIntegral(Bool_t, Types::ETreeType, Bool_t CalcOverlapIntergral=kFALSE)
Calculate the ROC integral of a single classifier or even the whole boosted classifier.
Double_t SingleBoost(MethodBase *method)
Double_t GetMvaValue(Double_t *err=0, Double_t *errUpper=0)
return boosted MVA response
Double_t Bagging()
Bagging or Bootstrap boosting, gives new random poisson weight for every event.
virtual Bool_t HasAnalysisType(Types::EAnalysisType type, UInt_t numberClasses, UInt_t)
Boost can handle classification with 2 classes and regression with one regression-target.
const Ranking * CreateRanking()
void DeclareCompatibilityOptions()
options that are used ONLY for the READER to ensure backward compatibility they are hence without any...
std::vector< Float_t > * fMVAvalues
virtual ~MethodBoost(void)
destructor
void FindMVACut(MethodBase *method)
find the CUT on the individual MVA that defines an event as correct or misclassified (to be used in t...
void GetHelpMessage() const
Get help message text.
Class for categorizing the phase space.
DataSetManager * fDataSetManager
Virtual base class for combining several TMVA method.
std::vector< IMethod * > fMethods
Analysis of Boosted Decision Trees.
Int_t GetNNodesBeforePruning()
static void InhibitOutput()
static void EnableOutput()
PDF wrapper for histograms; uses user-defined spline interpolation.
Double_t GetMVAProbAt(Double_t value)
void AddEvent(Double_t val, Double_t weight, Int_t type)
Ranking for variables in method (implementation)
Class that is the base-class for a vector of result.
TH2 * GetHist2D(const TString &alias) const
TH1 * GetHist(const TString &alias) const
void Store(TObject *obj, const char *alias=0)
An interface to calculate the "SeparationGain" for different separation criteria used in various trai...
virtual Double_t GetSeparationGain(const Double_t nSelS, const Double_t nSelB, const Double_t nTotS, const Double_t nTotB)
Separation Gain: the measure of how the quality of separation of the sample increases by splitting th...
virtual Double_t GetSeparationIndex(const Double_t s, const Double_t b)=0
Timing information for training and evaluation of MVA methods.
TString GetElapsedTime(Bool_t Scientific=kTRUE)
returns pretty string with elapsed time
void DrawProgressBar(Int_t, const TString &comment="")
draws progress bar in color or B&W caution:
Singleton class for Global types used by TMVA.
TString GetMethodName(Types::EMVA method) const
static Types & Instance()
the the single instance of "Types" if existing already, or create it (Singleton)
virtual void SetTitle(const char *title="")
Set the title of the TNamed.
virtual void Delete(Option_t *option="")
Delete this object.
Random number generator class based on M.
virtual Double_t PoissonD(Double_t mean)
Generates a random number according to a Poisson law.
void ToLower()
Change string to lower-case.
A TTree represents a columnar dataset.
create variable transformations
MsgLogger & Endl(MsgLogger &ml)
Double_t Gaus(Double_t x, Double_t mean=0, Double_t sigma=1, Bool_t norm=kFALSE)
Calculate a gaussian function with mean and sigma.
Short_t Max(Short_t a, Short_t b)
Short_t Min(Short_t a, Short_t b)