96   , fDetailedMonitoring(
kFALSE)
 
   99   , fBaggedSampleFraction(0)
 
  100   , fBoostedMethodTitle(methodTitle)
 
  102   , fMonitorBoostedMethod(
kFALSE)
 
  107   , fOverlap_integral(0.0)
 
 
  121   , fDetailedMonitoring(
kFALSE)
 
  124   , fBaggedSampleFraction(0)
 
  125   , fBoostedMethodTitle(
"")
 
  126   , fBoostedMethodOptions(
"")
 
  127   , fMonitorBoostedMethod(
kFALSE)
 
  132   , fOverlap_integral(0.0)
 
 
  145   fMethodWeight.clear();
 
  149   fTrainSigMVAHist.clear();
 
  150   fTrainBgdMVAHist.clear();
 
  151   fBTrainSigMVAHist.clear();
 
  152   fBTrainBgdMVAHist.clear();
 
  153   fTestSigMVAHist.clear();
 
  154   fTestBgdMVAHist.clear();
 
 
  178   DeclareOptionRef( fBoostNum = 1, 
"Boost_Num",
 
  179                     "Number of times the classifier is boosted" );
 
  181   DeclareOptionRef( fMonitorBoostedMethod = 
kTRUE, 
"Boost_MonitorMethod",
 
  182                     "Write monitoring histograms for each boosted classifier" );
 
  184   DeclareOptionRef( fDetailedMonitoring = 
kFALSE, 
"Boost_DetailedMonitoring",
 
  185                     "Produce histograms for detailed boost  monitoring" );
 
  187   DeclareOptionRef( fBoostType  = 
"AdaBoost", 
"Boost_Type", 
"Boosting type for the classifiers" );
 
  188   AddPreDefVal(
TString(
"RealAdaBoost"));
 
  189   AddPreDefVal(
TString(
"AdaBoost"));
 
  190   AddPreDefVal(
TString(
"Bagging"));
 
  192   DeclareOptionRef(fBaggedSampleFraction=.6,
"Boost_BaggedSampleFraction",
"Relative size of bagged event sample to original size of the data sample (used whenever bagging is used)" );
 
  194   DeclareOptionRef( fAdaBoostBeta = 1.0, 
"Boost_AdaBoostBeta",
 
  195                     "The ADA boost parameter that sets the effect of every boost step on the events' weights" );
 
  197   DeclareOptionRef( fTransformString = 
"step", 
"Boost_Transform",
 
  198                     "Type of transform applied to every boosted method linear, log, step" );
 
  200   AddPreDefVal(
TString(
"linear"));
 
  202   AddPreDefVal(
TString(
"gauss"));
 
  204   DeclareOptionRef( fRandomSeed = 0, 
"Boost_RandomSeed",
 
  205                     "Seed for random number generator used for bagging" );
 
 
  220   DeclareOptionRef( fHistoricOption = 
"ByError", 
"Boost_MethodWeightType",
 
  221                     "How to set the final weight of the boosted classifiers" );
 
  222   AddPreDefVal(
TString(
"ByError"));
 
  223   AddPreDefVal(
TString(
"Average"));
 
  224   AddPreDefVal(
TString(
"ByROC"));
 
  225   AddPreDefVal(
TString(
"ByOverlap"));
 
  226   AddPreDefVal(
TString(
"LastMethod"));
 
  228   DeclareOptionRef( fHistoricOption = 
"step", 
"Boost_Transform",
 
  229                     "Type of transform applied to every boosted method linear, log, step" );
 
  231   AddPreDefVal(
TString(
"linear"));
 
  233   AddPreDefVal(
TString(
"gauss"));
 
  238   AddPreDefVal(
TString(
"HighEdgeGauss"));
 
  239   AddPreDefVal(
TString(
"HighEdgeCoPara"));
 
  242   DeclareOptionRef( fHistoricBoolOption, 
"Boost_RecalculateMVACut",
 
  243                     "Recalculate the classifier MVA Signallike cut at every boost iteration" );
 
 
  253   fBoostedMethodTitle    = methodTitle;
 
 
  276   results->Store(
new TH1F(
"MethodWeight",
"Normalized Classifier Weight",fBoostNum,0,fBoostNum),
"ClassifierWeight");
 
  277   results->Store(
new TH1F(
"BoostWeight",
"Boost Weight",fBoostNum,0,fBoostNum),
"BoostWeight");
 
  278   results->Store(
new TH1F(
"ErrFraction",
"Error Fraction (by boosted event weights)",fBoostNum,0,fBoostNum),
"ErrorFraction");
 
  279   if (fDetailedMonitoring){
 
  280      results->Store(
new TH1F(
"ROCIntegral_test",
"ROC integral of single classifier (testing sample)",fBoostNum,0,fBoostNum),
"ROCIntegral_test");
 
  281      results->Store(
new TH1F(
"ROCIntegralBoosted_test",
"ROC integral of boosted method (testing sample)",fBoostNum,0,fBoostNum),
"ROCIntegralBoosted_test");
 
  282      results->Store(
new TH1F(
"ROCIntegral_train",
"ROC integral of single classifier (training sample)",fBoostNum,0,fBoostNum),
"ROCIntegral_train");
 
  283      results->Store(
new TH1F(
"ROCIntegralBoosted_train",
"ROC integral of boosted method (training sample)",fBoostNum,0,fBoostNum),
"ROCIntegralBoosted_train");
 
  284      results->Store(
new TH1F(
"OverlapIntegal_train",
"Overlap integral (training sample)",fBoostNum,0,fBoostNum),
"Overlap");
 
  288   results->GetHist(
"ClassifierWeight")->GetXaxis()->SetTitle(
"Index of boosted classifier");
 
  289   results->GetHist(
"ClassifierWeight")->GetYaxis()->SetTitle(
"Classifier Weight");
 
  290   results->GetHist(
"BoostWeight")->GetXaxis()->SetTitle(
"Index of boosted classifier");
 
  291   results->GetHist(
"BoostWeight")->GetYaxis()->SetTitle(
"Boost Weight");
 
  292   results->GetHist(
"ErrorFraction")->GetXaxis()->SetTitle(
"Index of boosted classifier");
 
  293   results->GetHist(
"ErrorFraction")->GetYaxis()->SetTitle(
"Error Fraction");
 
  294   if (fDetailedMonitoring){
 
  295      results->GetHist(
"ROCIntegral_test")->GetXaxis()->SetTitle(
"Index of boosted classifier");
 
  296      results->GetHist(
"ROCIntegral_test")->GetYaxis()->SetTitle(
"ROC integral of single classifier");
 
  297      results->GetHist(
"ROCIntegralBoosted_test")->GetXaxis()->SetTitle(
"Number of boosts");
 
  298      results->GetHist(
"ROCIntegralBoosted_test")->GetYaxis()->SetTitle(
"ROC integral boosted");
 
  299      results->GetHist(
"ROCIntegral_train")->GetXaxis()->SetTitle(
"Index of boosted classifier");
 
  300      results->GetHist(
"ROCIntegral_train")->GetYaxis()->SetTitle(
"ROC integral of single classifier");
 
  301      results->GetHist(
"ROCIntegralBoosted_train")->GetXaxis()->SetTitle(
"Number of boosts");
 
  302      results->GetHist(
"ROCIntegralBoosted_train")->GetYaxis()->SetTitle(
"ROC integral boosted");
 
  303      results->GetHist(
"Overlap")->GetXaxis()->SetTitle(
"Index of boosted classifier");
 
  304      results->GetHist(
"Overlap")->GetYaxis()->SetTitle(
"Overlap integral");
 
  307   results->Store(
new TH1F(
"SoverBtotal",
"S/B in reweighted training sample",fBoostNum,0,fBoostNum),
"SoverBtotal");
 
  308   results->GetHist(
"SoverBtotal")->GetYaxis()->SetTitle(
"S/B (boosted sample)");
 
  309   results->GetHist(
"SoverBtotal")->GetXaxis()->SetTitle(
"Index of boosted classifier");
 
  311   results->Store(
new TH1F(
"SeparationGain",
"SeparationGain",fBoostNum,0,fBoostNum),
"SeparationGain");
 
  312   results->GetHist(
"SeparationGain")->GetYaxis()->SetTitle(
"SeparationGain");
 
  313   results->GetHist(
"SeparationGain")->GetXaxis()->SetTitle(
"Index of boosted classifier");
 
  317   fMonitorTree= 
new TTree(
"MonitorBoost",
"Boost variables");
 
  318   fMonitorTree->Branch(
"iMethod",&fCurrentMethodIdx,
"iMethod/I");
 
  319   fMonitorTree->Branch(
"boostWeight",&fBoostWeight,
"boostWeight/D");
 
  320   fMonitorTree->Branch(
"errorFraction",&fMethodError,
"errorFraction/D");
 
  321   fMonitorBoostedMethod = 
kTRUE;
 
 
  330   Log() << kDEBUG << 
"CheckSetup: fBoostType="<<fBoostType << 
Endl;
 
  331   Log() << kDEBUG << 
"CheckSetup: fAdaBoostBeta="<<fAdaBoostBeta<<
Endl;
 
  332   Log() << kDEBUG << 
"CheckSetup: fBoostWeight="<<fBoostWeight<<
Endl;
 
  333   Log() << kDEBUG << 
"CheckSetup: fMethodError="<<fMethodError<<
Endl;
 
  334   Log() << kDEBUG << 
"CheckSetup: fBoostNum="<<fBoostNum << 
Endl;
 
  335   Log() << kDEBUG << 
"CheckSetup: fRandomSeed=" << fRandomSeed<< 
Endl;
 
  336   Log() << kDEBUG << 
"CheckSetup: fTrainSigMVAHist.size()="<<fTrainSigMVAHist.size()<<
Endl;
 
  337   Log() << kDEBUG << 
"CheckSetup: fTestSigMVAHist.size()="<<fTestSigMVAHist.size()<<
Endl;
 
  338   Log() << kDEBUG << 
"CheckSetup: fMonitorBoostedMethod=" << (fMonitorBoostedMethod? 
"true" : 
"false") << 
Endl;
 
  339   Log() << kDEBUG << 
"CheckSetup: MName=" << fBoostedMethodName << 
" Title="<< fBoostedMethodTitle<< 
Endl;
 
  340   Log() << kDEBUG << 
"CheckSetup: MOptions="<< fBoostedMethodOptions << 
Endl;
 
  341   Log() << kDEBUG << 
"CheckSetup: fMonitorTree=" << fMonitorTree <<
Endl;
 
  342   Log() << kDEBUG << 
"CheckSetup: fCurrentMethodIdx=" <<fCurrentMethodIdx << 
Endl;
 
  343   if (fMethods.size()>0) Log() << kDEBUG << 
"CheckSetup: fMethods[0]" <<fMethods[0]<<
Endl;
 
  344   Log() << kDEBUG << 
"CheckSetup: fMethodWeight.size()" << fMethodWeight.size() << 
Endl;
 
  345   if (fMethodWeight.size()>0) Log() << kDEBUG << 
"CheckSetup: fMethodWeight[0]="<<fMethodWeight[0]<<
Endl;
 
  346   Log() << kDEBUG << 
"CheckSetup: trying to repair things" << 
Endl;
 
 
  361   if (Data()->GetNTrainingEvents()==0) Log() << kFATAL << 
"<Train> Data() has zero events" << 
Endl;
 
  364   if (fMethods.size() > 0) fMethods.clear();
 
  365   fMVAvalues->resize(Data()->GetNTrainingEvents(), 0.0);
 
  367   Log() << kINFO << 
"Training "<< fBoostNum << 
" " << fBoostedMethodName << 
" with title " << fBoostedMethodTitle << 
" Classifiers ... patience please" << 
Endl;
 
  388   for (fCurrentMethodIdx=0;fCurrentMethodIdx<fBoostNum;fCurrentMethodIdx++) {
 
  393         fBoostedMethodName.Data(), GetJobName(), 
TString::Format(
"%s_B%04i", fBoostedMethodTitle.Data(), fCurrentMethodIdx),
 
  394         DataInfo(), fBoostedMethodOptions);
 
  400      if (fCurrentMethod==0) {
 
  401         Log() << kFATAL << 
"uups.. guess the booking of the " << fCurrentMethodIdx << 
"-th classifier somehow failed" << 
Endl;
 
  409            Log() << kFATAL << 
"Method with type kCategory cannot be casted to MethodCategory. /MethodBoost" << 
Endl; 
 
  413      fCurrentMethod->SetMsgType(kWARNING);
 
  414      fCurrentMethod->SetupMethod();
 
  415      fCurrentMethod->ParseOptions();
 
  417      fCurrentMethod->SetAnalysisType( GetAnalysisType() );
 
  418      fCurrentMethod->ProcessSetup();
 
  419      fCurrentMethod->CheckSetup();
 
  423      fCurrentMethod->RerouteTransformationHandler (&(this->GetTransformationHandler()));
 
  429        if (fMonitorBoostedMethod) {
 
  435            fCurrentMethod->BaseDir()->cd();
 
  441      timer.DrawProgressBar( fCurrentMethodIdx );
 
  445      if (fBoostType==
"Bagging") Bagging();  
 
  448      if(!IsSilentFile())fCurrentMethod->WriteMonitoringHistosToFile();
 
  454      if(!IsSilentFile()) 
if (fCurrentMethodIdx==0 && fMonitorBoostedMethod) CreateMVAHistorgrams();
 
  462      SingleBoost(fCurrentMethod);
 
  465      results->GetHist(
"BoostWeight")->SetBinContent(fCurrentMethodIdx+1,fBoostWeight);
 
  466      results->GetHist(
"ErrorFraction")->SetBinContent(fCurrentMethodIdx+1,fMethodError);
 
  468      if (fDetailedMonitoring) {
 
  472         results->GetHist(
"ROCIntegral_train")->SetBinContent(fCurrentMethodIdx+1, fROC_training);
 
  474         results->GetHist(
"Overlap")->SetBinContent(fCurrentMethodIdx+1, fOverlap_integral);
 
  479      fMonitorTree->Fill();
 
  483      Log() << kDEBUG << 
"AdaBoost (methodErr) err = " << fMethodError << 
Endl;
 
  486         timer.DrawProgressBar( fBoostNum );
 
  487         fBoostNum = fCurrentMethodIdx+1;
 
  488         Log() << kINFO << 
"Error rate has reached 0.5 ("<< fMethodError<<
"), boosting process stopped at #" << fBoostNum << 
" classifier" << 
Endl;
 
  490            Log() << kINFO << 
"The classifier might be too strong to boost with Beta = " << fAdaBoostBeta << 
", try reducing it." <<
Endl;
 
  502   for (fCurrentMethodIdx=0;fCurrentMethodIdx<fBoostNum;fCurrentMethodIdx++) {
 
  505      timer1->DrawProgressBar( fCurrentMethodIdx );
 
  507      if (fCurrentMethodIdx==fBoostNum) {
 
  508         Log() << kINFO << 
"Elapsed time: " << 
timer1->GetElapsedTime()
 
  512      TH1F* tmp = 
dynamic_cast<TH1F*
>( 
results->GetHist(
"ClassifierWeight") );
 
  513      if (tmp) tmp->SetBinContent(fCurrentMethodIdx+1,fMethodWeight[fCurrentMethodIdx]);
 
  522   if (fMethods.size()==1)  fMethodWeight[0] = 1.0;
 
 
  533   fBoostedMethodOptions=GetOptions();
 
 
  540   if (fBoostNum <=0) Log() << kFATAL << 
"CreateHistograms called before fBoostNum is initialized" << 
Endl;
 
  545   if (DataInfo().GetClassInfo(
"Signal") != 0) {
 
  546      signalClass = DataInfo().GetClassInfo(
"Signal")->GetNumber();
 
 
  573      ev->SetBoostWeight( 1.0 );
 
 
  582   if (fMonitorBoostedMethod) {
 
  590         fTrainSigMVAHist[
imtd]->SetDirectory(dir);
 
  591         fTrainSigMVAHist[
imtd]->Write();
 
  592         fTrainBgdMVAHist[
imtd]->SetDirectory(dir);
 
  593         fTrainBgdMVAHist[
imtd]->Write();
 
  594         fBTrainSigMVAHist[
imtd]->SetDirectory(dir);
 
  595         fBTrainSigMVAHist[
imtd]->Write();
 
  596         fBTrainBgdMVAHist[
imtd]->SetDirectory(dir);
 
  597         fBTrainBgdMVAHist[
imtd]->Write();
 
  604   fMonitorTree->Write();
 
 
  612   if (fMonitorBoostedMethod) {
 
  614      if (fMethods.size()<
nloop) 
nloop = fMethods.size();
 
  620         if (DataInfo().IsSignal(
ev)) {
 
  622               fTestSigMVAHist[
imtd]->Fill(fMethods[
imtd]->GetMvaValue(),
w);
 
  627               fTestBgdMVAHist[
imtd]->Fill(fMethods[
imtd]->GetMvaValue(),
w);
 
 
  642   if (fMethods.size()<
nloop) 
nloop = fMethods.size();
 
  643   if (fMonitorBoostedMethod) {
 
  649         dir = 
mva->BaseDir();
 
  650         if (dir==0) 
continue;
 
  652         fTestSigMVAHist[
imtd]->SetDirectory(dir);
 
  653         fTestSigMVAHist[
imtd]->Write();
 
  654         fTestBgdMVAHist[
imtd]->SetDirectory(dir);
 
  655         fTestBgdMVAHist[
imtd]->Write();
 
 
  675       meth->SetSilentFile(IsSilentFile());
 
  676       if(IsModelPersistence()){
 
  681       meth->SetModelPersistence(IsModelPersistence());
 
 
  695   const Int_t nBins=10001;
 
  714   if (fDetailedMonitoring){
 
  725      if (DataInfo().IsSignal(GetEvent(
ievt))){
 
  752   mvaSC->SetBinContent(1,
mvaS->GetBinContent(1));
 
  753   mvaBC->SetBinContent(1,
mvaB->GetBinContent(1));
 
  827         << 
" idx="<<fCurrentMethodIdx
 
  837   Log() << kDEBUG << 
"(old step) Setting method cut to " <<
method->GetSignalReferenceCut()<< 
Endl;
 
 
  856   else if (fBoostType==
"RealAdaBoost")  
returnVal = this->AdaBoost  (
method,0);
 
  857   else if (fBoostType==
"Bagging")       
returnVal = this->Bagging   ();
 
  859      Log() << kFATAL << 
"<Boost> unknown boost option " << fBoostType<< 
" called" << 
Endl;
 
 
  870      Log() << kWARNING << 
" AdaBoost called without classifier reference - needed for calculating AdaBoost " << 
Endl;
 
  881      Log() << kDEBUG  << 
" individual mva cut value = " << 
method->GetSignalReferenceCut() << 
Endl;
 
  892         MVAProb->AddEvent(fMVAvalues->at(
evt),
ev->GetWeight(),
ev->GetClass());
 
  902      sig=DataInfo().IsSignal(
ev);
 
  903      v = fMVAvalues->at(
ievt);
 
  908        if (fMonitorBoostedMethod) {
 
  910                fBTrainSigMVAHist[fCurrentMethodIdx]->Fill(
v,
w);
 
  911                fTrainSigMVAHist[fCurrentMethodIdx]->Fill(
v,
ev->GetOriginalWeight());
 
  914                fBTrainBgdMVAHist[fCurrentMethodIdx]->Fill(
v,
w);
 
  915                fTrainBgdMVAHist[fCurrentMethodIdx]->Fill(
v,
ev->GetOriginalWeight());
 
  921         if (sig  == 
method->IsSignalLike(fMVAvalues->at(
ievt))){
 
  944   if (fMethodError == 0) { 
 
  945      Log() << kWARNING << 
"Your classifier worked perfectly on the training sample --> serious overtraining expected and no boosting done " << 
Endl;
 
 
 1036      ev->SetBoostWeight(
trandom->PoissonD(fBaggedSampleFraction));
 
 
 1054   Log() << 
"This method combines several classifier of one species in a "<<
Endl;
 
 1055   Log() << 
"single multivariate quantity via the boost algorithm." << 
Endl;
 
 1056   Log() << 
"the output is a weighted sum over all individual classifiers" <<
Endl;
 
 1057   Log() << 
"By default, the AdaBoost method is employed, which gives " << 
Endl;
 
 1058   Log() << 
"events that were misclassified in the previous tree a larger " << 
Endl;
 
 1059   Log() << 
"weight in the training of the following classifier."<<
Endl;
 
 1060   Log() << 
"Optionally, Bagged boosting can also be applied." << 
Endl;
 
 1064   Log() << 
"The most important parameter in the configuration is the "<<
Endl;
 
 1065   Log() << 
"number of boosts applied (Boost_Num) and the choice of boosting"<<
Endl;
 
 1066   Log() << 
"(Boost_Type), which can be set to either AdaBoost or Bagging." << 
Endl;
 
 1067   Log() << 
"AdaBoosting: The most important parameters in this configuration" <<
Endl;
 
 1068   Log() << 
"is the beta parameter (Boost_AdaBoostBeta)  " << 
Endl;
 
 1069   Log() << 
"When boosting a linear classifier, it is sometimes advantageous"<<
Endl;
 
 1070   Log() << 
"to transform the MVA output non-linearly. The following options" <<
Endl;
 
 1071   Log() << 
"are available: step, log, and minmax, the default is no transform."<<
Endl;
 
 1073   Log() << 
"Some classifiers are hard to boost and do not improve much in"<<
Endl;
 
 1074   Log() << 
"their performance by boosting them, some even slightly deteriorate"<< 
Endl;
 
 1075   Log() << 
"due to the boosting." <<
Endl;
 
 1076   Log() << 
"The booking of the boost method is special since it requires"<<
Endl;
 
 1077   Log() << 
"the booing of the method to be boosted and the boost itself."<<
Endl;
 
 1078   Log() << 
"This is solved by booking the method to be boosted and to add"<<
Endl;
 
 1079   Log() << 
"all Boost parameters, which all begin with \"Boost_\" to the"<<
Endl;
 
 1080   Log() << 
"options string. The factory separates the options and initiates"<<
Endl;
 
 1081   Log() << 
"the boost process. The TMVA macro directory contains the example"<<
Endl;
 
 1082   Log() << 
"macro \"Boost.C\"" <<
Endl;
 
 
 1101   for (
UInt_t i=0;i< fMethods.size(); i++){
 
 1104      Double_t val = fTmpEvent ? 
m->GetMvaValue(fTmpEvent) : 
m->GetMvaValue();
 
 1108      if (fTransformString == 
"linear"){
 
 1111      else if (fTransformString == 
"log"){
 
 1116      else if (fTransformString == 
"step" ){
 
 1117         if (
m->IsSignalLike(val)) val = 1.;
 
 1120      else if (fTransformString == 
"gauss"){
 
 1124         Log() << kFATAL << 
"error unknown transformation " << fTransformString<<
Endl;
 
 1127      norm    +=fMethodWeight[i];
 
 
 1159   Data()->SetCurrentType(
eTT);
 
 1166      Log() << kFATAL << 
" What do you do? Your method:" 
 1167            << fMethods.back()->GetName()
 
 1168            << 
" seems not to be a propper TMVA method" 
 1181      for (
UInt_t i=0; i<=fCurrentMethodIdx; i++)
 
 1185         for (
UInt_t i=0; i<=fCurrentMethodIdx; i++)
 
 1192   std::vector <Float_t>* 
mvaRes;
 
 1196      mvaRes = 
new std::vector <Float_t>(GetNEvents());
 
 1209   if (DataInfo().GetClassInfo(
"Signal") != 0) {
 
 1210      signalClass = DataInfo().GetClassInfo(
"Signal")->GetNumber();
 
 1235         if (DataInfo().IsSignal(
ev))
 
 1254      fOverlap_integral = 0.0;
 
 
 1285      Log() << kFATAL << 
"dynamic cast to MethodBase* failed" <<
Endl;
 
 1291      fMVAvalues->at(
ievt) = 
method->GetMvaValue();
 
 
 1313            results->Store(
new TH1I(
"NodesBeforePruning",
"nodes before pruning",this->GetBoostNum(),0,this->GetBoostNum()),
"NodesBeforePruning");
 
 1314            results->Store(
new TH1I(
"NodesAfterPruning",
"nodes after pruning",this->GetBoostNum(),0,this->GetBoostNum()),
"NodesAfterPruning");
 
 1327            Log() << kINFO << 
"<Train> average number of nodes before/after pruning : " 
 1328                  <<   
results->GetHist(
"NodesBeforePruning")->GetMean() << 
" / " 
 1329                  <<   
results->GetHist(
"NodesAfterPruning")->GetMean()
 
 1340         Log() << kDEBUG << 
"No detailed boost monitoring for " 
 1342               << 
" yet available " << 
Endl;
 
 1350      if (fDetailedMonitoring){
 
 1352         if (DataInfo().GetNVariables() == 2) {
 
 
#define REGISTER_METHOD(CLASS)
for example
 
ROOT::Detail::TRangeCast< T, true > TRangeDynCast
TRangeDynCast is an adapter class that allows the typed iteration through a TCollection.
 
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void char Point_t Rectangle_t WindowAttributes_t Float_t Float_t Float_t Int_t Int_t UInt_t UInt_t Rectangle_t Int_t Int_t Window_t TString Int_t GCValues_t GetPrimarySelectionOwner GetDisplay GetScreen GetColormap GetNativeEvent const char const char dpyName wid window const char font_name cursor keysym reg const char only_if_exist regb h Point_t winding char text const char depth char const char Int_t count const char ColorStruct_t color const char Pixmap_t Pixmap_t PictureAttributes_t attr const char char ret_data h unsigned char height h Atom_t Int_t ULong_t ULong_t unsigned char prop_list Atom_t Atom_t Atom_t Time_t type
 
TDataSetManager * fDataSetManager
 
Describe directory structure in memory.
 
virtual Bool_t cd()
Change current directory to "this" directory.
 
1-D histogram with a double per channel (see TH1 documentation)
 
1-D histogram with a float per channel (see TH1 documentation)
 
1-D histogram with an int per channel (see TH1 documentation)
 
TH1 is the base class of all histogram classes in ROOT.
 
2-D histogram with a float per channel (see TH1 documentation)
 
Service class for 2-D histogram classes.
 
static ClassifierFactory & Instance()
access to the ClassifierFactory singleton creates the instance if needed
 
class TMVA::Config::VariablePlotting fVariablePlotting
 
Class that contains all the data information.
 
Implementation of the GiniIndex as separation criterion.
 
Interface for all concrete MVA method implementations.
 
Virtual base Class for all MVA method.
 
virtual void DeclareCompatibilityOptions()
options that are used ONLY for the READER to ensure backward compatibility they are hence without any...
 
virtual void WriteEvaluationHistosToFile(Types::ETreeType treetype)
writes all MVA evaluation histograms to file
 
virtual void TestClassification()
initialization
 
virtual Double_t GetROCIntegral(TH1D *histS, TH1D *histB) const
calculate the area (integral) under the ROC curve as a overall quality measure of the classification
 
Class for boosting a TMVA method.
 
void MonitorBoost(Types::EBoostStage stage, UInt_t methodIdx=0)
fill various monitoring histograms from information of the individual classifiers that have been boos...
 
void ResetBoostWeights()
resetting back the boosted weights of the events to 1
 
void SingleTrain()
initialization
 
DataSetManager * fDataSetManager
DSMTEST.
 
virtual void WriteEvaluationHistosToFile(Types::ETreeType treetype)
writes all MVA evaluation histograms to file
 
void CreateMVAHistorgrams()
 
Bool_t fHistoricBoolOption
historic variable, only needed for "CompatibilityOptions"
 
void WriteMonitoringHistosToFile(void) const
write special monitoring histograms to file dummy implementation here --------------—
 
Double_t AdaBoost(MethodBase *method, Bool_t useYesNoLeaf)
the standard (discrete or real) AdaBoost algorithm
 
Bool_t BookMethod(Types::EMVA theMethod, TString methodTitle, TString theOption)
just registering the string from which the boosted classifier will be created
 
Double_t GetMvaValue(Double_t *err=nullptr, Double_t *errUpper=nullptr)
return boosted MVA response
 
void CheckSetup()
check may be overridden by derived class (sometimes, eg, fitters are used which can only be implement...
 
virtual void TestClassification()
initialization
 
void InitHistos()
initialisation routine
 
void ProcessOptions()
process user options
 
Double_t GetBoostROCIntegral(Bool_t, Types::ETreeType, Bool_t CalcOverlapIntergral=kFALSE)
Calculate the ROC integral of a single classifier or even the whole boosted classifier.
 
Double_t SingleBoost(MethodBase *method)
 
Double_t Bagging()
Bagging or Bootstrap boosting, gives new random poisson weight for every event.
 
virtual Bool_t HasAnalysisType(Types::EAnalysisType type, UInt_t numberClasses, UInt_t)
Boost can handle classification with 2 classes and regression with one regression-target.
 
const Ranking * CreateRanking()
 
void DeclareCompatibilityOptions()
options that are used ONLY for the READER to ensure backward compatibility they are hence without any...
 
std::vector< Float_t > * fMVAvalues
mva values for the last trained method
 
virtual ~MethodBoost(void)
destructor
 
void FindMVACut(MethodBase *method)
find the CUT on the individual MVA that defines an event as correct or misclassified (to be used in t...
 
void GetHelpMessage() const
Get help message text.
 
Class for categorizing the phase space.
 
Virtual base class for combining several TMVA method.
 
std::vector< IMethod * > fMethods
vector of all classifiers
 
Analysis of Boosted Decision Trees.
 
static void InhibitOutput()
 
static void EnableOutput()
 
PDF wrapper for histograms; uses user-defined spline interpolation.
 
Ranking for variables in method (implementation)
 
Class that is the base-class for a vector of result.
 
An interface to calculate the "SeparationGain" for different separation criteria used in various trai...
 
Timing information for training and evaluation of MVA methods.
 
Singleton class for Global types used by TMVA.
 
static Types & Instance()
The single instance of "Types" if existing already, or create it (Singleton)
 
@ kMaxTreeType
also used as temporary storage for trees not yet assigned for testing;training...
 
Random number generator class based on M.
 
static TString Format(const char *fmt,...)
Static method which formats a string using a printf style format descriptor and return a TString.
 
A TTree represents a columnar dataset.
 
create variable transformations
 
MsgLogger & Endl(MsgLogger &ml)
 
Double_t Gaus(Double_t x, Double_t mean=0, Double_t sigma=1, Bool_t norm=kFALSE)
Calculates a gaussian function with mean and sigma.
 
Short_t Max(Short_t a, Short_t b)
Returns the largest of a and b.
 
Double_t Exp(Double_t x)
Returns the base-e exponential function of x, which is e raised to the power x.
 
Double_t Log(Double_t x)
Returns the natural logarithm of x.
 
Short_t Min(Short_t a, Short_t b)
Returns the smallest of a and b.