137#pragma warning ( disable : 4355 ) 
  158      fMultiGraph = 
nullptr;
 
 
  172    std::cerr << kERROR << 
"IPythonInteractive::Init: already initialized..." << std::endl;
 
  177    fGraphs.push_back( 
new TGraph() );
 
  178    fGraphs.back()->SetTitle(title);
 
  179    fGraphs.back()->SetName(title);
 
  180    fGraphs.back()->SetFillColor(color);
 
  181    fGraphs.back()->SetLineColor(color);
 
  182    fGraphs.back()->SetMarkerColor(color);
 
  183    fMultiGraph->Add(fGraphs.back());
 
 
  195   for(
Int_t i=0; i<fNumGraphs; i++){
 
 
  209   fGraphs[0]->Set(fIndex+1);
 
  210   fGraphs[1]->Set(fIndex+1);
 
  211   fGraphs[0]->SetPoint(fIndex, 
x, 
y1);
 
  212   fGraphs[1]->SetPoint(fIndex, 
x, 
y2);
 
 
  225  for(
Int_t i=0; i<fNumGraphs;i++){
 
  226    fGraphs[i]->Set(fIndex+1);
 
  227    fGraphs[i]->SetPoint(fIndex, 
dat[0], 
dat[i+1]);
 
 
  247   fAnalysisType              ( 
Types::kNoAnalysisType ),
 
  248   fRegressionReturnVal       ( 0 ),
 
  249   fMulticlassReturnVal       ( 0 ),
 
  250   fDataSetInfo               ( 
dsi ),
 
  251   fSignalReferenceCut        ( 0.5 ),
 
  252   fSignalReferenceCutOrientation( 1. ),
 
  253   fVariableTransformType     ( 
Types::kSignal ),
 
  255   fMethodName                ( methodTitle ),
 
  260   fConstructedFromWeightFile ( 
kFALSE ),
 
  262   fMethodBaseDir             ( 0 ),
 
  265   fModelPersistence          (
kTRUE),
 
  276   fSplTrainEffBvsS           ( 0 ),
 
  277   fVarTransformString        ( 
"None" ),
 
  278   fTransformationPointer     ( 0 ),
 
  279   fTransformation            ( 
dsi, methodTitle ),
 
  281   fVerbosityLevelString      ( 
"Default" ),
 
  284   fIgnoreNegWeightsInTraining( 
kFALSE ),
 
  286   fBackgroundClass           ( 0 ),
 
 
  311   fAnalysisType              ( 
Types::kNoAnalysisType ),
 
  312   fRegressionReturnVal       ( 0 ),
 
  313   fMulticlassReturnVal       ( 0 ),
 
  314   fDataSetInfo               ( 
dsi ),
 
  315   fSignalReferenceCut        ( 0.5 ),
 
  316   fVariableTransformType     ( 
Types::kSignal ),
 
  318   fMethodName                ( 
"MethodBase"  ),
 
  321   fTMVATrainingVersion       ( 0 ),
 
  322   fROOTTrainingVersion       ( 0 ),
 
  323   fConstructedFromWeightFile ( 
kTRUE ),
 
  325   fMethodBaseDir             ( 0 ),
 
  328   fModelPersistence          (
kTRUE),
 
  339   fSplTrainEffBvsS           ( 0 ),
 
  340   fVarTransformString        ( 
"None" ),
 
  341   fTransformationPointer     ( 0 ),
 
  342   fTransformation            ( 
dsi, 
"" ),
 
  344   fVerbosityLevelString      ( 
"Default" ),
 
  347   fIgnoreNegWeightsInTraining( 
kFALSE ),
 
  349   fBackgroundClass           ( 0 ),
 
 
  367   if (!fSetupCompleted) Log() << kWARNING <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<< 
"Calling destructor of method which got never setup" << 
Endl;
 
  370   if (fInputVars != 0)  { fInputVars->clear(); 
delete fInputVars; }
 
  371   if (fRanking   != 0)  
delete fRanking;
 
  374   if (fDefaultPDF!= 0)  { 
delete fDefaultPDF; fDefaultPDF = 0; }
 
  375   if (fMVAPdfS   != 0)  { 
delete fMVAPdfS; fMVAPdfS = 0; }
 
  376   if (fMVAPdfB   != 0)  { 
delete fMVAPdfB; fMVAPdfB = 0; }
 
  379   if (fSplS)            { 
delete fSplS; fSplS = 0; }
 
  380   if (fSplB)            { 
delete fSplB; fSplB = 0; }
 
  381   if (fSpleffBvsS)      { 
delete fSpleffBvsS; fSpleffBvsS = 0; }
 
  382   if (fSplRefS)         { 
delete fSplRefS; fSplRefS = 0; }
 
  383   if (fSplRefB)         { 
delete fSplRefB; fSplRefB = 0; }
 
  384   if (fSplTrainRefS)    { 
delete fSplTrainRefS; fSplTrainRefS = 0; }
 
  385   if (fSplTrainRefB)    { 
delete fSplTrainRefB; fSplTrainRefB = 0; }
 
  386   if (fSplTrainEffBvsS) { 
delete fSplTrainEffBvsS; fSplTrainEffBvsS = 0; }
 
  388   for (
size_t i = 0; i < fEventCollections.size(); i++ ) {
 
  389      if (fEventCollections.at(i)) {
 
  390         for (std::vector<Event*>::const_iterator it = fEventCollections.at(i)->begin();
 
  391              it != fEventCollections.at(i)->end(); ++it) {
 
  394         delete fEventCollections.at(i);
 
  395         fEventCollections.at(i) = 
nullptr;
 
  399   if (fRegressionReturnVal) 
delete fRegressionReturnVal;
 
  400   if (fMulticlassReturnVal) 
delete fMulticlassReturnVal;
 
 
  410   if (fSetupCompleted) Log() << kFATAL <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<< 
"Calling SetupMethod for the second time" << 
Endl;
 
  412   DeclareBaseOptions();
 
  415   fSetupCompleted = 
kTRUE;
 
 
  425   ProcessBaseOptions();
 
 
  435   CheckForUnusedOptions();
 
 
  443   SetConfigDescription( 
"Configuration options for classifier architecture and tuning" );
 
  451   fSplTrainEffBvsS    = 0;
 
  458   fTxtWeightsOnly     = 
kTRUE;
 
  468   fInputVars = 
new std::vector<TString>;
 
  470      fInputVars->push_back(DataInfo().GetVariableInfo(
ivar).GetLabel());
 
  472   fRegressionReturnVal = 0;
 
  473   fMulticlassReturnVal = 0;
 
  475   fEventCollections.resize( 2 );
 
  476   fEventCollections.at(0) = 0;
 
  477   fEventCollections.at(1) = 0;
 
  480   if (DataInfo().GetClassInfo(
"Signal") != 0) {
 
  481      fSignalClass = DataInfo().GetClassInfo(
"Signal")->GetNumber();
 
  483   if (DataInfo().GetClassInfo(
"Background") != 0) {
 
  484      fBackgroundClass = DataInfo().GetClassInfo(
"Background")->GetNumber();
 
  487   SetConfigDescription( 
"Configuration options for MVA method" );
 
  488   SetConfigName( 
TString(
"Method") + GetMethodTypeName() );
 
 
  511   DeclareOptionRef( fVerbose, 
"V", 
"Verbose output (short form of \"VerbosityLevel\" below - overrides the latter one)" );
 
  513   DeclareOptionRef( fVerbosityLevelString=
"Default", 
"VerbosityLevel", 
"Verbosity level" );
 
  514   AddPreDefVal( 
TString(
"Default") ); 
 
  515   AddPreDefVal( 
TString(
"Debug")   );
 
  516   AddPreDefVal( 
TString(
"Verbose") );
 
  517   AddPreDefVal( 
TString(
"Info")    );
 
  518   AddPreDefVal( 
TString(
"Warning") );
 
  519   AddPreDefVal( 
TString(
"Error")   );
 
  520   AddPreDefVal( 
TString(
"Fatal")   );
 
  524   fTxtWeightsOnly = 
kTRUE;  
 
  527   DeclareOptionRef( fVarTransformString, 
"VarTransform", 
"List of variable transformations performed before training, e.g., \"D_Background,P_Signal,G,N_AllClasses\" for: \"Decorrelation, PCA-transformation, Gaussianisation, Normalisation, each for the given class of events ('AllClasses' denotes all events of all classes, if no class indication is given, 'All' is assumed)\"" );
 
  529   DeclareOptionRef( fHelp, 
"H", 
"Print method-specific help message" );
 
  531   DeclareOptionRef( fHasMVAPdfs, 
"CreateMVAPdfs", 
"Create PDFs for classifier outputs (signal and background)" );
 
  533   DeclareOptionRef( fIgnoreNegWeightsInTraining, 
"IgnoreNegWeightsInTraining",
 
  534                     "Events with negative weights are ignored in the training (but are included for testing and performance evaluation)" );
 
 
  546      fDefaultPDF = 
new PDF( 
TString(GetName())+
"_PDF", GetOptions(), 
"MVAPdf" );
 
  547      fDefaultPDF->DeclareOptions();
 
  548      fDefaultPDF->ParseOptions();
 
  549      fDefaultPDF->ProcessOptions();
 
  550      fMVAPdfB = 
new PDF( 
TString(GetName())+
"_PDFBkg", fDefaultPDF->GetOptions(), 
"MVAPdfBkg", fDefaultPDF );
 
  551      fMVAPdfB->DeclareOptions();
 
  552      fMVAPdfB->ParseOptions();
 
  553      fMVAPdfB->ProcessOptions();
 
  554      fMVAPdfS = 
new PDF( 
TString(GetName())+
"_PDFSig", fMVAPdfB->GetOptions(),    
"MVAPdfSig", fDefaultPDF );
 
  555      fMVAPdfS->DeclareOptions();
 
  556      fMVAPdfS->ParseOptions();
 
  557      fMVAPdfS->ProcessOptions();
 
  560      SetOptions( fMVAPdfS->GetOptions() );
 
  565                                               GetTransformationHandler(),
 
  569      if (fDefaultPDF!= 0) { 
delete fDefaultPDF; fDefaultPDF = 0; }
 
  570      if (fMVAPdfS   != 0) { 
delete fMVAPdfS; fMVAPdfS = 0; }
 
  571      if (fMVAPdfB   != 0) { 
delete fMVAPdfB; fMVAPdfB = 0; }
 
  575      fVerbosityLevelString = 
TString(
"Verbose");
 
  576      Log().SetMinType( kVERBOSE );
 
  578   else if (fVerbosityLevelString == 
"Debug"   ) Log().SetMinType( kDEBUG );
 
  579   else if (fVerbosityLevelString == 
"Verbose" ) Log().SetMinType( kVERBOSE );
 
  580   else if (fVerbosityLevelString == 
"Info"    ) Log().SetMinType( kINFO );
 
  581   else if (fVerbosityLevelString == 
"Warning" ) Log().SetMinType( kWARNING );
 
  582   else if (fVerbosityLevelString == 
"Error"   ) Log().SetMinType( kERROR );
 
  583   else if (fVerbosityLevelString == 
"Fatal"   ) Log().SetMinType( kFATAL );
 
  584   else if (fVerbosityLevelString != 
"Default" ) {
 
  585      Log() << kFATAL << 
"<ProcessOptions> Verbosity level type '" 
  586            << fVerbosityLevelString << 
"' unknown." << 
Endl;
 
 
  598   DeclareOptionRef( fNormalise=
kFALSE, 
"Normalise", 
"Normalise input variables" ); 
 
  599   DeclareOptionRef( fUseDecorr=
kFALSE, 
"D", 
"Use-decorrelated-variables flag" );
 
  600   DeclareOptionRef( fVariableTransformTypeString=
"Signal", 
"VarTransformType",
 
  601                     "Use signal or background events to derive for variable transformation (the transformation is applied on both types of, course)" );
 
  602   AddPreDefVal( 
TString(
"Signal") );
 
  603   AddPreDefVal( 
TString(
"Background") );
 
  604   DeclareOptionRef( fTxtWeightsOnly=
kTRUE, 
"TxtWeightFilesOnly", 
"If True: write all training results (weights) as text files (False: some are written in ROOT format)" );
 
  614   DeclareOptionRef( fNbinsMVAPdf   = 60, 
"NbinsMVAPdf",   
"Number of bins used for the PDFs of classifier outputs" );
 
  615   DeclareOptionRef( fNsmoothMVAPdf = 2,  
"NsmoothMVAPdf", 
"Number of smoothing iterations for classifier PDFs" );
 
 
  629   Log() << kWARNING <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<< 
"Parameter optimization is not yet implemented for method " 
  630         << GetName() << 
Endl;
 
  631   Log() << kWARNING <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<< 
"Currently we need to set hardcoded which parameter is tuned in which ranges"<<
Endl;
 
 
  656   if (Help()) PrintHelpMessage();
 
  659   if(!IsSilentFile()) BaseDir()->cd();
 
  663   GetTransformationHandler().CalcTransformations(Data()->GetEventCollection());
 
  667    << 
"Begin training" << 
Endl;
 
  668   Long64_t nEvents = Data()->GetNEvents();
 
  672    << 
"\tEnd of training                                              " << 
Endl;
 
  675    << 
"Elapsed time for training with " << nEvents <<  
" events: " 
  679    << 
"\tCreate MVA output for ";
 
  682   if (DoMulticlass()) {
 
  683      Log() <<
Form(
"[%s] : ",DataInfo().GetName())<< 
"Multiclass classification on training sample" << 
Endl;
 
  686   else if (!DoRegression()) {
 
  688      Log() <<
Form(
"[%s] : ",DataInfo().GetName())<< 
"classification on training sample" << 
Endl;
 
  697      Log() <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<< 
"regression on training sample" << 
Endl;
 
  701         Log() <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<< 
"Create PDFs" << 
Endl;
 
  708   if (fModelPersistence ) WriteStateToFile();
 
  711   if ((!DoRegression()) && (fModelPersistence)) MakeClass();
 
  718       WriteMonitoringHistosToFile();
 
 
  726   if (!DoRegression()) Log() << kFATAL <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<< 
"Trying to use GetRegressionDeviation() with a classification job" << 
Endl;
 
  727   Log() << kINFO <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<< 
"Create results for " << (
type==
Types::kTraining?
"training":
"testing") << 
Endl;
 
 
  746   Data()->SetCurrentType(
type);
 
  748   Log() << kINFO <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<< 
"Create results for " << (
type==
Types::kTraining?
"training":
"testing") << 
Endl;
 
  752   Long64_t nEvents = Data()->GetNEvents();
 
  756   Log() << kINFO <<
Form(
"Dataset[%s] : ",DataInfo().GetName()) << 
"Evaluation of " << GetMethodName() << 
" on " 
  759   regRes->Resize( nEvents );
 
  770      Data()->SetCurrentEvent(
ievt);
 
  771      std::vector< Float_t > vals = GetRegressionValues();
 
  778   Log() << kINFO <<
Form(
"Dataset[%s] : ",DataInfo().GetName())
 
  779    << 
"Elapsed time for evaluation of " << nEvents <<  
" events: " 
  780         << 
timer.GetElapsedTime() << 
"       " << 
Endl;
 
  784      SetTestTime(
timer.ElapsedSeconds());
 
 
  796   Data()->SetCurrentType(
type);
 
  798   Log() << kINFO <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<< 
"Create results for " << (
type==
Types::kTraining?
"training":
"testing") << 
Endl;
 
  801   if (!
resMulticlass) Log() << kFATAL<<
Form(
"Dataset[%s] : ",DataInfo().GetName())<< 
"unable to create pointer in AddMulticlassOutput, exiting."<<
Endl;
 
  803   Long64_t nEvents = Data()->GetNEvents();
 
  808   Log() << kINFO <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<< 
"Multiclass evaluation of " << GetMethodName() << 
" on " 
  814      Data()->SetCurrentEvent(
ievt);
 
  815      std::vector< Float_t > vals = GetMulticlassValues();
 
  820   Log() << kINFO <<
Form(
"Dataset[%s] : ",DataInfo().GetName())
 
  821    << 
"Elapsed time for evaluation of " << nEvents <<  
" events: " 
  822         << 
timer.GetElapsedTime() << 
"       " << 
Endl;
 
  826      SetTestTime(
timer.ElapsedSeconds());
 
 
 
  856   return GetMvaValue()*GetSignalReferenceCutOrientation() > GetSignalReferenceCut()*GetSignalReferenceCutOrientation() ? 
kTRUE : 
kFALSE;
 
 
  863   return mvaVal*GetSignalReferenceCutOrientation() > GetSignalReferenceCut()*GetSignalReferenceCutOrientation() ? 
kTRUE : 
kFALSE;
 
 
  871   Data()->SetCurrentType(
type);
 
  876   Long64_t nEvents =  Data()->GetNEvents();
 
  877   clRes->Resize( nEvents );
 
  881   std::vector<Double_t> mvaValues = GetMvaValues(0, nEvents, 
true);
 
  885      SetTestTime(
timer.ElapsedSeconds());
 
  891      auto ev = Data()->GetEvent(
ievt);
 
 
  901   Long64_t nEvents = Data()->GetNEvents();
 
  906   nEvents = values.size();
 
  912      Log() << kHEADER << 
Form(
"[%s] : ",DataInfo().GetName())
 
  913            << 
"Evaluation of " << GetMethodName() << 
" on " 
  915            << 
" sample (" << nEvents << 
" events)" << 
Endl;
 
  918      Data()->SetCurrentEvent(
ievt);
 
  919      values[
ievt] = GetMvaValue();
 
  930        << 
"Elapsed time for evaluation of " << nEvents <<  
" events: " 
  931         << 
timer.GetElapsedTime() << 
"       " << 
Endl;
 
 
  953   Data()->SetCurrentType(
type);
 
  958   Long64_t nEvents = Data()->GetNEvents();
 
  963   Log() << kINFO <<
Form(
"Dataset[%s] : ",DataInfo().GetName()) << 
"Evaluation of " << GetMethodName() << 
" on " 
  971      Data()->SetCurrentEvent(
ievt);
 
  973      if (
proba < 0) 
break;
 
  980   Log() << kDEBUG <<
Form(
"Dataset[%s] : ",DataInfo().GetName())
 
  981    << 
"Elapsed time for evaluation of " << nEvents <<  
" events: " 
  982         << 
timer.GetElapsedTime() << 
"       " << 
Endl;
 
 
 1000   Data()->SetCurrentType(
type);
 
 1005   const Int_t nevt = GetNEvents();
 
 1010   Log() << kINFO << 
"Calculate regression for all events" << 
Endl;
 
 1018      Float_t r = GetRegressionValues()[0];
 
 1037      m1  += t*
w; 
s1 += t*t*
w;
 
 1044   timer.DrawProgressBar(nevt - 1);
 
 1045   Log() << kINFO << 
"Elapsed time for evaluation of " << nevt <<  
" events: " 
 1046         << 
timer.GetElapsedTime() << 
"       " << 
Endl;
 
 
 1103   if (!
resMulticlass) Log() << kFATAL<<
Form(
"Dataset[%s] : ",DataInfo().GetName())<< 
"unable to create pointer in TestMulticlass, exiting."<<
Endl;
 
 
 1135   if (0==
mvaRes && !(GetMethodTypeName().Contains(
"Cuts"))) {
 
 1136      Log()<<
Form(
"Dataset[%s] : ",DataInfo().GetName()) << 
"mvaRes " << 
mvaRes << 
" GetMethodTypeName " << GetMethodTypeName()
 
 1137           << 
" contains " << !(GetMethodTypeName().Contains(
"Cuts")) << 
Endl;
 
 1138      Log() << kFATAL<<
Form(
"Dataset[%s] : ",DataInfo().GetName()) << 
"<TestInit> Test variable " << GetTestvarName()
 
 1139            << 
" not found in tree" << 
Endl;
 
 1144                         fMeanS, fMeanB, fRmsS, fRmsB, fXmin, fXmax, fSignalClass );
 
 1152   fCutOrientation = (fMeanS > fMeanB) ? kPositive : kNegative;
 
 1162   if(IsSilentFile()) {
 
 1209   Log() << kHEADER <<
Form(
"[%s] : ",DataInfo().GetName())<< 
"Loop over test events and fill histograms with classifier response..." << 
Endl << 
Endl;
 
 1210   if (
mvaProb) Log() << kINFO << 
"Also filling probability and rarity histograms (on request)..." << 
Endl;
 
 1214   if ( 
mvaRes->GetSize() != GetNEvents() ) {
 
 1215      Log() << kFATAL << 
TString::Format(
"Inconsistent result size  %lld with number of events %u ",    
mvaRes->GetSize() ,  GetNEvents() ) << 
Endl;
 
 1225      if (DataInfo().IsSignal(
ev)) {
 
 1257   if (fSplS) { 
delete fSplS; fSplS = 0; }
 
 1258   if (fSplB) { 
delete fSplB; fSplB = 0; }
 
 
 1272   tf << prefix << 
"#GEN -*-*-*-*-*-*-*-*-*-*-*- general info -*-*-*-*-*-*-*-*-*-*-*-" << std::endl << prefix << std::endl;
 
 1273   tf << prefix << 
"Method         : " << GetMethodTypeName() << 
"::" << GetMethodName() << std::endl;
 
 1274   tf.setf(std::ios::left);
 
 1275   tf << prefix << 
"TMVA Release   : " << std::setw(10) << GetTrainingTMVAVersionString() << 
"    [" 
 1276      << GetTrainingTMVAVersionCode() << 
"]" << std::endl;
 
 1277   tf << prefix << 
"ROOT Release   : " << std::setw(10) << GetTrainingROOTVersionString() << 
"    [" 
 1278      << GetTrainingROOTVersionCode() << 
"]" << std::endl;
 
 1279   tf << prefix << 
"Creator        : " << 
userInfo->fUser << std::endl;
 
 1283   tf << prefix << 
"Training events: " << Data()->GetNTrainingEvents() << std::endl;
 
 1287   tf << prefix << 
"Analysis type  : " << 
"[" << ((GetAnalysisType()==
Types::kRegression) ? 
"Regression" : 
"Classification") << 
"]" << std::endl;
 
 1288   tf << prefix << std::endl;
 
 1293   tf << prefix << std::endl << prefix << 
"#OPT -*-*-*-*-*-*-*-*-*-*-*-*- options -*-*-*-*-*-*-*-*-*-*-*-*-" << std::endl << prefix << std::endl;
 
 1294   WriteOptionsToStream( 
tf, prefix );
 
 1295   tf << prefix << std::endl;
 
 1298   tf << prefix << std::endl << prefix << 
"#VAR -*-*-*-*-*-*-*-*-*-*-*-* variables *-*-*-*-*-*-*-*-*-*-*-*-" << std::endl << prefix << std::endl;
 
 1299   WriteVarsToStream( 
tf, prefix );
 
 1300   tf << prefix << std::endl;
 
 
 1317      AddRegressionOutput( 
type );
 
 1319      AddMulticlassOutput( 
type );
 
 1321      AddClassifierOutput( 
type );
 
 1323         AddClassifierOutputProb( 
type );
 
 
 1333   if (!parent) 
return;
 
 1338   AddInfoItem( 
gi, 
"TMVA Release", GetTrainingTMVAVersionString() + 
" [" + 
gTools().StringFromInt(GetTrainingTMVAVersionCode()) + 
"]" );
 
 1339   AddInfoItem( 
gi, 
"ROOT Release", GetTrainingROOTVersionString() + 
" [" + 
gTools().StringFromInt(GetTrainingROOTVersionCode()) + 
"]");
 
 1340   AddInfoItem( 
gi, 
"Creator", 
userInfo->fUser);
 
 1344   AddInfoItem( 
gi, 
"Training events", 
gTools().StringFromInt(Data()->GetNTrainingEvents()));
 
 1350   AddInfoItem( 
gi, 
"AnalysisType", analysisType );
 
 1354   AddOptionsXMLTo( parent );
 
 1357   AddVarsXMLTo( parent );
 
 1360   if (fModelPersistence)
 
 1361      AddSpectatorsXMLTo( parent );
 
 1364   AddClassesXMLTo(parent);
 
 1367   if (DoRegression()) AddTargetsXMLTo(parent);
 
 1370   GetTransformationHandler(
false).AddXMLTo( parent );
 
 1374   if (fMVAPdfS) fMVAPdfS->AddXMLTo(
pdfs);
 
 1375   if (fMVAPdfB) fMVAPdfB->AddXMLTo(
pdfs);
 
 1378   AddWeightsXMLTo( parent );
 
 
 1389   fMVAPdfS = (
TMVA::PDF*)
rf.Get( 
"MVA_PDF_Signal" );
 
 1390   fMVAPdfB = (
TMVA::PDF*)
rf.Get( 
"MVA_PDF_Background" );
 
 1394   ReadWeightsFromStream( 
rf );
 
 
 1412    << 
"Creating xml weight file: " 
 1417   gTools().
AddAttr(rootnode,
"Method", GetMethodTypeName() + 
"::" + GetMethodName());
 
 1418   WriteStateToXML(rootnode);
 
 
 1433    << 
"Reading weight file: " 
 1436   if (
tfname.EndsWith(
".xml") ) {
 
 1439         Log() << kFATAL << 
"Error parsing XML file " << 
tfname << 
Endl;
 
 1442      ReadStateFromXML(rootnode);
 
 1447      fb.open(
tfname.Data(),std::ios::in);
 
 1448      if (!fb.is_open()) { 
 
 1449         Log() << kFATAL <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<< 
"<ReadStateFromFile> " 
 1450               << 
"Unable to open input weight file: " << 
tfname << 
Endl;
 
 1452      std::istream 
fin(&fb);
 
 1453      ReadStateFromStream(
fin);
 
 1456   if (!fTxtWeightsOnly) {
 
 1459      Log() << kINFO <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<< 
"Reading root weight file: " 
 1462      ReadStateFromStream( *
rfile );
 
 
 1472   ReadStateFromXML(rootnode);
 
 
 1489   Log().SetSource( GetName() );
 
 1491    << 
"Read method \"" << GetMethodName() << 
"\" of type \"" << GetMethodTypeName() << 
"\"" << 
Endl;
 
 1509            if (
name == 
"TrainingTime")
 
 1512            if (
name == 
"AnalysisType") {
 
 1518               else Log() << kFATAL <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<< 
"Analysis type " << val << 
" is not known." << 
Endl;
 
 1521            if (
name == 
"TMVA Release" || 
name == 
"TMVA") {
 
 1525               Log() << kDEBUG <<
Form(
"[%s] : ",DataInfo().GetName()) << 
"MVA method was trained with TMVA Version: " << GetTrainingTMVAVersionString() << 
Endl;
 
 1528            if (
name == 
"ROOT Release" || 
name == 
"ROOT") {
 
 1533           << 
"MVA method was trained with ROOT Version: " << GetTrainingROOTVersionString() << 
Endl;
 
 1539         ReadOptionsFromXML(ch);
 
 1544         ReadVariablesFromXML(ch);
 
 1547         ReadSpectatorsFromXML(ch);
 
 1550         if (DataInfo().GetNClasses()==0) ReadClassesFromXML(ch);
 
 1553         if (DataInfo().GetNTargets()==0 && DoRegression()) ReadTargetsFromXML(ch);
 
 1555      else if (
nodeName==
"Transformations") {
 
 1556         GetTransformationHandler().ReadFromXML(ch);
 
 1560         if (fMVAPdfS) { 
delete fMVAPdfS; fMVAPdfS=0; }
 
 1561         if (fMVAPdfB) { 
delete fMVAPdfB; fMVAPdfB=0; }
 
 1574         ReadWeightsFromXML(ch);
 
 1577         Log() << kWARNING <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<< 
"Unparsed XML node: '" << 
nodeName << 
"'" << 
Endl;
 
 1584   if (GetTransformationHandler().GetCallerName() == 
"") GetTransformationHandler().SetCallerName( GetName() );
 
 
 1600   while (!
TString(buf).BeginsWith(
"Method")) GetLine(
fin,buf);
 
 1609   if (methodName == 
"") methodName = 
methodType;
 
 1610   fMethodName  = methodName;
 
 1612   Log() << kINFO <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<< 
"Read method \"" << GetMethodName() << 
"\" of type \"" << GetMethodTypeName() << 
"\"" << 
Endl;
 
 1615   Log().SetSource( GetName() );
 
 1629   while (!
TString(buf).BeginsWith(
"#OPT")) GetLine(
fin,buf);
 
 1630   ReadOptionsFromStream(
fin);
 
 1634   fin.getline(buf,512);
 
 1635   while (!
TString(buf).BeginsWith(
"#VAR")) 
fin.getline(buf,512);
 
 1636   ReadVarsFromStream(
fin);
 
 1641   if (IsNormalised()) {
 
 1647   if ( fVarTransformString == 
"None") {
 
 1650   } 
else if ( fVarTransformString == 
"Decorrelate" ) {
 
 1652   } 
else if ( fVarTransformString == 
"PCA"  ) {
 
 1654   } 
else if ( fVarTransformString == 
"Uniform" ) {
 
 1656   } 
else if ( fVarTransformString == 
"Gauss" ) {
 
 1658   } 
else if ( fVarTransformString == 
"GaussDecorr" ) {
 
 1662      Log() << kFATAL <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<< 
"<ProcessOptions> Variable transform '" 
 1663            << fVarTransformString << 
"' unknown." << 
Endl;
 
 1666   if (GetTransformationHandler().GetTransformationList().GetSize() > 0) {
 
 1667      fin.getline(buf,512);
 
 1668      while (!
TString(buf).BeginsWith(
"#MAT")) 
fin.getline(buf,512);
 
 1682      fin.getline(buf,512);
 
 1683      while (!
TString(buf).BeginsWith(
"#MVAPDFS")) 
fin.getline(buf,512);
 
 1684      if (fMVAPdfS != 0) { 
delete fMVAPdfS; fMVAPdfS = 0; }
 
 1685      if (fMVAPdfB != 0) { 
delete fMVAPdfB; fMVAPdfB = 0; }
 
 1686      fMVAPdfS = 
new PDF(
TString(GetName()) + 
" MVA PDF Sig");
 
 1687      fMVAPdfB = 
new PDF(
TString(GetName()) + 
" MVA PDF Bkg");
 
 1688      fMVAPdfS->SetReadingVersion( GetTrainingTMVAVersionCode() );
 
 1689      fMVAPdfB->SetReadingVersion( GetTrainingTMVAVersionCode() );
 
 1696   fin.getline(buf,512);
 
 1697   while (!
TString(buf).BeginsWith(
"#WGT")) 
fin.getline(buf,512);
 
 1698   fin.getline(buf,512);
 
 1699   ReadWeightsFromStream( 
fin );;
 
 1702   if (GetTransformationHandler().GetCallerName() == 
"") GetTransformationHandler().SetCallerName( GetName() );
 
 
 1712   o << prefix << 
"NVar " << DataInfo().GetNVariables() << std::endl;
 
 1713   std::vector<VariableInfo>::const_iterator 
varIt = DataInfo().GetVariableInfos().
begin();
 
 1714   for (; 
varIt!=DataInfo().GetVariableInfos().
end(); ++
varIt) { o << prefix; 
varIt->WriteToStream(o); }
 
 1715   o << prefix << 
"NSpec " << DataInfo().GetNSpectators() << std::endl;
 
 1716   varIt = DataInfo().GetSpectatorInfos().
begin();
 
 1717   for (; 
varIt!=DataInfo().GetSpectatorInfos().
end(); ++
varIt) { o << prefix; 
varIt->WriteToStream(o); }
 
 
 1731   if (
readNVar!=DataInfo().GetNVariables()) {
 
 1732      Log() << kFATAL <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<< 
"You declared "<< DataInfo().GetNVariables() << 
" variables in the Reader" 
 1733            << 
" while there are " << 
readNVar << 
" variables declared in the file" 
 1739   std::vector<VariableInfo>::iterator 
varIt = DataInfo().GetVariableInfos().
begin();
 
 1743      if (
varIt->GetExpression() == 
varInfo.GetExpression()) {
 
 1744         varInfo.SetExternalLink((*varIt).GetExternalLink());
 
 1748         Log() << kINFO <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<< 
"ERROR in <ReadVarsFromStream>" << 
Endl;
 
 1749         Log() << kINFO << 
"The definition (or the order) of the variables found in the input file is"  << 
Endl;
 
 1750         Log() << kINFO << 
"is not the same as the one declared in the Reader (which is necessary for" << 
Endl;
 
 1751         Log() << kINFO << 
"the correct working of the method):" << 
Endl;
 
 1752         Log() << kINFO << 
"   var #" << 
varIdx <<
" declared in Reader: " << 
varIt->GetExpression() << 
Endl;
 
 1753         Log() << kINFO << 
"   var #" << 
varIdx <<
" declared in file  : " << 
varInfo.GetExpression() << 
Endl;
 
 1754         Log() << kFATAL << 
"The expression declared to the Reader needs to be checked (name or order are wrong)" << 
Endl;
 
 
 1767   for (
UInt_t idx=0; idx<DataInfo().GetVariableInfos().size(); idx++) {
 
 
 1783   for (
UInt_t idx=0; idx<DataInfo().GetSpectatorInfos().size(); idx++) {
 
 1789      if (
vi.GetVarType()==
'C') 
continue;
 
 
 1826   for (
UInt_t idx=0; idx<DataInfo().GetTargetInfos().size(); idx++) {
 
 
 1842   if (
readNVar!=DataInfo().GetNVariables()) {
 
 1843      Log() << kFATAL <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<< 
"You declared "<< DataInfo().GetNVariables() << 
" variables in the Reader" 
 1844            << 
" while there are " << 
readNVar << 
" variables declared in the file" 
 1862         Log() << kINFO <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<< 
"ERROR in <ReadVariablesFromXML>" << 
Endl;
 
 1863         Log() << kINFO << 
"The definition (or the order) of the variables found in the input file is"  << 
Endl;
 
 1864         Log() << kINFO << 
"not the same as the one declared in the Reader (which is necessary for the" << 
Endl;
 
 1865         Log() << kINFO << 
"correct working of the method):" << 
Endl;
 
 1867         Log() << kINFO << 
"   var #" << 
varIdx <<
" declared in file  : " << 
readVarInfo.GetExpression() << 
Endl;
 
 1868         Log() << kFATAL << 
"The expression declared to the Reader needs to be checked (name or order are wrong)" << 
Endl;
 
 
 1883      Log() << kFATAL<<
Form(
"Dataset[%s] : ",DataInfo().GetName()) << 
"You declared "<< DataInfo().GetNSpectators(
kFALSE) << 
" spectators in the Reader" 
 1884            << 
" while there are " << 
readNSpec << 
" spectators declared in the file" 
 1902         Log() << kINFO <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<< 
"ERROR in <ReadSpectatorsFromXML>" << 
Endl;
 
 1903         Log() << kINFO << 
"The definition (or the order) of the spectators found in the input file is"  << 
Endl;
 
 1904         Log() << kINFO << 
"not the same as the one declared in the Reader (which is necessary for the" << 
Endl;
 
 1905         Log() << kINFO << 
"correct working of the method):" << 
Endl;
 
 1908         Log() << kFATAL << 
"The expression declared to the Reader needs to be checked (name or order are wrong)" << 
Endl;
 
 
 1929         DataInfo().AddClass(classname);
 
 1937         DataInfo().AddClass(className);
 
 1944   if (DataInfo().GetClassInfo(
"Signal") != 0) {
 
 1945      fSignalClass = DataInfo().GetClassInfo(
"Signal")->GetNumber();
 
 1949   if (DataInfo().GetClassInfo(
"Background") != 0) {
 
 1950      fBackgroundClass = DataInfo().GetClassInfo(
"Background")->GetNumber();
 
 
 1970      DataInfo().AddTarget(expression,
"",
"",0,0);
 
 
 1982   if (fBaseDir != 0) 
return fBaseDir;
 
 1983   Log()<<kDEBUG<<
Form(
"Dataset[%s] : ",DataInfo().GetName())<<
" Base Directory for " << GetMethodName() << 
" not set yet --> check if already there.." <<
Endl;
 
 1985   if (IsSilentFile()) {
 
 1986      Log() << kFATAL << 
Form(
"Dataset[%s] : ", DataInfo().GetName())
 
 1987            << 
"MethodBase::BaseDir() - No directory exists when running a Method without output file. Enable the " 
 1988               "output when creating the factory" 
 1994      Log() << kFATAL <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<< 
"MethodBase::BaseDir() - MethodBaseDir() return a NULL pointer!" << 
Endl;
 
 2000         Log()<<kDEBUG<<
Form(
"Dataset[%s] : ",DataInfo().GetName())<<
" Base Directory for " << GetMethodTypeName() << 
" does not exist yet--> created it" <<
Endl;
 
 2004         if (fModelPersistence) {
 
 2012   Log()<<kDEBUG<<
Form(
"Dataset[%s] : ",DataInfo().GetName())<<
" Base Directory for " << GetMethodTypeName() << 
" existed, return it.." <<
Endl;
 
 
 2022   if (fMethodBaseDir != 0) {
 
 2023      return fMethodBaseDir;
 
 2028   Log() << kDEBUG << 
Form(
"Dataset[%s] : ", 
datasetName) << 
" Base Directory for " << GetMethodTypeName()
 
 2029         << 
" not set yet --> check if already there.." << 
Endl;
 
 2034   if (!fMethodBaseDir) {
 
 2036      if (!fMethodBaseDir) {
 
 2037         Log() << kFATAL << 
"Can not create dir " << 
datasetName;
 
 2041   fMethodBaseDir = fMethodBaseDir->GetDirectory(
methodTypeDir.Data());
 
 2043   if (!fMethodBaseDir) {
 
 2047      Log() << kDEBUG << 
Form(
"Dataset[%s] : ", 
datasetName) << 
" Base Directory for " << GetMethodName()
 
 2048            << 
" does not exist yet--> created it" << 
Endl;
 
 2052         << 
"Return from MethodBaseDir() after creating base directory " << 
Endl;
 
 2053   return fMethodBaseDir;
 
 
 2078   if (fWeightFile!=
"") 
return fWeightFile;
 
 
 2100   if (0 != fMVAPdfS) {
 
 2101      fMVAPdfS->GetOriginalHist()->Write();
 
 2102      fMVAPdfS->GetSmoothedHist()->Write();
 
 2103      fMVAPdfS->GetPDFHist()->Write();
 
 2105   if (0 != fMVAPdfB) {
 
 2106      fMVAPdfB->GetOriginalHist()->Write();
 
 2107      fMVAPdfB->GetSmoothedHist()->Write();
 
 2108      fMVAPdfB->GetPDFHist()->Write();
 
 2114      Log() << kFATAL <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<< 
"<WriteEvaluationHistosToFile> Unknown result: " 
 2116            << 
"/kMaxAnalysisType" << 
Endl;
 
 2117   results->GetStorage()->Write();
 
 2121         GetTransformationHandler().PlotVariables (GetEventCollection( 
Types::kTesting ), BaseDir() );
 
 2123         Log() << kINFO << 
TString::Format(
"Dataset[%s] : ",DataInfo().GetName())
 
 2124               << 
" variable plots are not produces ! The number of variables is " << DataInfo().GetNVariables()
 
 
 2144   fin.getline(buf,512);
 
 2146   if (
line.BeginsWith(
"TMVA Release")) {
 
 2150      std::stringstream s(code.
Data());
 
 2151      s >> fTMVATrainingVersion;
 
 2152      Log() << kINFO <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<< 
"MVA method was trained with TMVA Version: " << GetTrainingTMVAVersionString() << 
Endl;
 
 2154   if (
line.BeginsWith(
"ROOT Release")) {
 
 2158      std::stringstream s(code.
Data());
 
 2159      s >> fROOTTrainingVersion;
 
 2160      Log() << kINFO <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<< 
"MVA method was trained with ROOT Version: " << GetTrainingROOTVersionString() << 
Endl;
 
 2162   if (
line.BeginsWith(
"Analysis type")) {
 
 2166      std::stringstream s(code.
Data());
 
 2167      std::string analysisType;
 
 2169      if      (analysisType == 
"regression"     || analysisType == 
"Regression")     SetAnalysisType( 
Types::kRegression );
 
 2170      else if (analysisType == 
"classification" || analysisType == 
"Classification") SetAnalysisType( 
Types::kClassification );
 
 2171      else if (analysisType == 
"multiclass"     || analysisType == 
"Multiclass")     SetAnalysisType( 
Types::kMulticlass );
 
 2172      else Log() << kFATAL << 
"Analysis type " << analysisType << 
" from weight-file not known!" << std::endl;
 
 2174      Log() << kINFO <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<< 
"Method was trained for " 
 
 2195      Log() << kERROR<<
Form(
"Dataset[%s] : ",DataInfo().GetName())<< 
"<CreateMVAPdfs> No result of classifier testing available" << 
Endl;
 
 2202   TH1* 
histMVAPdfS = 
new TH1D( GetMethodTypeName() + 
"_tr_S", GetMethodTypeName() + 
"_tr_S",
 
 2203                                fMVAPdfS->GetHistNBins( 
mvaRes->GetSize() ), minVal, maxVal );
 
 2204   TH1* 
histMVAPdfB = 
new TH1D( GetMethodTypeName() + 
"_tr_B", GetMethodTypeName() + 
"_tr_B",
 
 2205                                fMVAPdfB->GetHistNBins( 
mvaRes->GetSize() ), minVal, maxVal );
 
 2236   if (DataInfo().GetNClasses() == 2) { 
 
 2237      Log() << kINFO<<
Form(
"Dataset[%s] : ",DataInfo().GetName())
 
 2238            << 
TString::Format( 
"<CreateMVAPdfs> Separation from histogram (PDF): %1.3f (%1.3f)",
 
 
 2251   if (!fMVAPdfS || !fMVAPdfB) {
 
 2252      Log() << kINFO<<
Form(
"Dataset[%s] : ",DataInfo().GetName()) << 
"<GetProba> MVA PDFs for Signal and Background don't exist yet, we'll create them on demand" << 
Endl;
 
 2255   Double_t sigFraction = DataInfo().GetTrainingSumSignalWeights() / (DataInfo().GetTrainingSumSignalWeights() + DataInfo().GetTrainingSumBackgrWeights() );
 
 
 2266   if (!fMVAPdfS || !fMVAPdfB) {
 
 2267      Log() << kWARNING <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<< 
"<GetProba> MVA PDFs for Signal and Background don't exist" << 
Endl;
 
 
 2288      Log() << kWARNING <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<< 
"<GetRarity> Required MVA PDF for Signal or Background does not exist: " 
 2289            << 
"select option \"CreateMVAPdfs\"" << 
Endl;
 
 
 2304   Data()->SetCurrentType(
type);
 
 2314   else if (list->GetSize() > 2) {
 
 2315      Log() << kFATAL <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<< 
"<GetEfficiency> Wrong number of arguments" 
 2317            << 
" | required format, e.g., Efficiency:0.05, or empty string" << 
Endl;
 
 2323   if ( 
results->GetHist(
"MVA_S")->GetNbinsX() != 
results->GetHist(
"MVA_B")->GetNbinsX() ||
 
 2324        results->GetHist(
"MVA_HIGHBIN_S")->GetNbinsX() != 
results->GetHist(
"MVA_HIGHBIN_B")->GetNbinsX() ) {
 
 2325      Log() << kFATAL <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<< 
"<GetEfficiency> Binning mismatch between signal and background histos" << 
Endl;
 
 2340   if (
results->DoesExist(
"MVA_EFF_S")==0) {
 
 2343      TH1* 
eff_s = 
new TH1D( GetTestvarName() + 
"_effS", GetTestvarName() + 
" (signal)",     fNbinsH, 
xmin, 
xmax );
 
 2344      TH1* 
eff_b = 
new TH1D( GetTestvarName() + 
"_effB", GetTestvarName() + 
" (background)", fNbinsH, 
xmin, 
xmax );
 
 2349      Int_t sign = (fCutOrientation == kPositive) ? +1 : -1;
 
 2378            Log() << kFATAL <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<< 
"<GetEfficiency> Mismatch in sign" << 
Endl;
 
 2389      TH1* 
eff_BvsS = 
new TH1D( GetTestvarName() + 
"_effBvsS", GetTestvarName() + 
"", fNbins, 0, 1 );
 
 2391      eff_BvsS->SetXTitle( 
"Signal eff" );
 
 2392      eff_BvsS->SetYTitle( 
"Backgr eff" );
 
 2395      TH1* 
rej_BvsS = 
new TH1D( GetTestvarName() + 
"_rejBvsS", GetTestvarName() + 
"", fNbins, 0, 1 );
 
 2397      rej_BvsS->SetXTitle( 
"Signal eff" );
 
 2398      rej_BvsS->SetYTitle( 
"Backgr rejection (1-eff)" );
 
 2402                                   GetTestvarName(), fNbins, 0, 1 );
 
 2405      inveff_BvsS->SetYTitle( 
"Inverse backgr. eff (1/eff)" );
 
 2439         if (
effB>std::numeric_limits<double>::epsilon())
 
 2454         rejB = 1.0 - fSpleffBvsS->Eval( 
effS );
 
 2464      SetSignalReferenceCut( cut );
 
 2469   if (0 == fSpleffBvsS) {
 
 2487         integral += (1.0 - 
effB);
 
 
 2541   if (list->GetSize() != 2) {
 
 2542      Log() << kFATAL <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<< 
"<GetTrainingEfficiency> Wrong number of arguments" 
 2544            << 
" | required format, e.g., Efficiency:0.05" << 
Endl;
 
 2555   if (
results->GetHist(
"MVA_S")->GetNbinsX() != 
results->GetHist(
"MVA_B")->GetNbinsX() ||
 
 2556       results->GetHist(
"MVA_HIGHBIN_S")->GetNbinsX() != 
results->GetHist(
"MVA_HIGHBIN_B")->GetNbinsX() ) {
 
 2557      Log() << kFATAL <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<< 
"<GetTrainingEfficiency> Binning mismatch between signal and background histos" 
 2570   if (
results->DoesExist(
"MVA_TRAIN_S")==0) {
 
 2576      TH1* 
mva_s_tr = 
new TH1D( GetTestvarName() + 
"_Train_S",GetTestvarName() + 
"_Train_S", fNbinsMVAoutput, fXmin, 
sxmax );
 
 2577      TH1* 
mva_b_tr = 
new TH1D( GetTestvarName() + 
"_Train_B",GetTestvarName() + 
"_Train_B", fNbinsMVAoutput, fXmin, 
sxmax );
 
 2584      TH1* 
mva_eff_tr_s = 
new TH1D( GetTestvarName() + 
"_trainingEffS", GetTestvarName() + 
" (signal)",
 
 2586      TH1* 
mva_eff_tr_b = 
new TH1D( GetTestvarName() + 
"_trainingEffB", GetTestvarName() + 
" (background)",
 
 2592      Int_t sign = (fCutOrientation == kPositive) ? +1 : -1;
 
 2594      std::vector<Double_t> mvaValues = GetMvaValues(0,Data()->GetNEvents());
 
 2600         Data()->SetCurrentEvent(
ievt);
 
 2632      TH1* 
eff_bvss = 
new TH1D( GetTestvarName() + 
"_trainingEffBvsS", GetTestvarName() + 
"", fNbins, 0, 1 );
 
 2634      TH1* 
rej_bvss = 
new TH1D( GetTestvarName() + 
"_trainingRejBvsS", GetTestvarName() + 
"", fNbins, 0, 1 );
 
 2642         if (fSplTrainRefS) 
delete fSplTrainRefS;
 
 2643         if (fSplTrainRefB) 
delete fSplTrainRefB;
 
 2658      fEffS = 
results->GetHist(
"MVA_TRAINEFF_S");
 
 2681   if (0 == fSplTrainEffBvsS) 
return 0.0;
 
 2690      effB = fSplTrainEffBvsS->Eval( 
effS );
 
 
 2707   if (!
resMulticlass) Log() << kFATAL<<
Form(
"Dataset[%s] : ",DataInfo().GetName())<< 
"unable to create pointer in GetMulticlassEfficiency, exiting."<<
Endl;
 
 
 2719   if (!
resMulticlass) Log() << kFATAL<< 
"unable to create pointer in GetMulticlassTrainingEfficiency, exiting."<<
Endl;
 
 2721   Log() << kINFO <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<< 
"Determine optimal multiclass cuts for training data..." << 
Endl;
 
 
 2753      Log() << kFATAL << 
"Cannot get confusion matrix for non-multiclass analysis." << std::endl;
 
 2757   Data()->SetCurrentType(
type);
 
 2762      Log() << kFATAL << 
Form(
"Dataset[%s] : ", DataInfo().GetName())
 
 2763            << 
"unable to create pointer in GetMulticlassEfficiency, exiting." << 
Endl;
 
 
 2805      Log() << kFATAL <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<< 
"<GetSeparation> Mismatch in pdfs" << 
Endl;
 
 2809   if (!fSplS || !fSplB) {
 
 2810      Log()<<kDEBUG<<
Form(
"[%s] : ",DataInfo().GetName())<< 
"could not calculate the separation, distributions" 
 2811           << 
" fSplS or fSplB are not yet filled" << 
Endl;
 
 
 2827      Log() << kFATAL <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<< 
"<GetROCIntegral(TH1D*, TH1D*)> Mismatch in hists" << 
Endl;
 
 2843      integral += (1-
pdfB->GetIntegral(cut,
xmax)) * 
pdfS->GetVal(cut);
 
 2848   return integral*step;
 
 
 2861      Log() << kFATAL <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<< 
"<GetSeparation> Mismatch in pdfs" << 
Endl;
 
 2875      integral += (1-
pdfB->GetIntegral(cut,
xmax)) * 
pdfS->GetVal(cut);
 
 2878   return integral*step;
 
 
 2897      Log() << kFATAL <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<< 
"<GetMaximumSignificance> " 
 2898            << 
"Number of signal or background events is <= 0 ==> abort" 
 2902   Log() << kINFO <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<< 
"Using ratio SignalEvents/BackgroundEvents = " 
 2909      Log() << kWARNING <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<< 
"Efficiency histograms empty !" << 
Endl;
 
 2910      Log() << kWARNING <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<< 
"no maximum cut found, return 0" << 
Endl;
 
 2914   for (
Int_t bin=1; bin<=fNbinsH; bin++) {
 
 
 2950   Long64_t entries = Data()->GetNEvents();
 
 2954      Log() << kFATAL <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<< 
"<CalculateEstimator> Wrong tree type: " << 
treeType << 
Endl;
 
 2978      if (DataInfo().IsSignal(
ev)) {
 
 
 3008      classFileName = GetWeightFileDir() + 
"/" + GetJobName() + 
"_" + GetMethodName() + 
".class.C";
 
 3016    << 
"Creating standalone class: " 
 3021      Log() << kFATAL << 
"<MakeClass> Unable to open file: " << 
classFileName << 
Endl;
 
 3026   fout << 
"// Class: " << className << std::endl;
 
 3027   fout << 
"// Automatically generated by MethodBase::MakeClass" << std::endl << 
"//" << std::endl;
 
 3031   fout << 
"/* configuration options =====================================================" << std::endl << std::endl;
 
 3032   WriteStateToStream( 
fout );
 
 3034   fout << 
"============================================================================ */" << std::endl;
 
 3037   fout << 
"" << std::endl;
 
 3038   fout << 
"#include <array>" << std::endl;
 
 3039   fout << 
"#include <vector>" << std::endl;
 
 3040   fout << 
"#include <cmath>" << std::endl;
 
 3041   fout << 
"#include <string>" << std::endl;
 
 3042   fout << 
"#include <iostream>" << std::endl;
 
 3043   fout << 
"" << std::endl;
 
 3046   this->MakeClassSpecificHeader( 
fout, className );
 
 3048   fout << 
"#ifndef IClassifierReader__def" << std::endl;
 
 3049   fout << 
"#define IClassifierReader__def" << std::endl;
 
 3051   fout << 
"class IClassifierReader {" << std::endl;
 
 3053   fout << 
" public:" << std::endl;
 
 3055   fout << 
"   // constructor" << std::endl;
 
 3056   fout << 
"   IClassifierReader() : fStatusIsClean( true ) {}" << std::endl;
 
 3057   fout << 
"   virtual ~IClassifierReader() {}" << std::endl;
 
 3059   fout << 
"   // return classifier response" << std::endl;
 
 3061      fout << 
"   virtual std::vector<double> GetMulticlassValues( const std::vector<double>& inputValues ) const = 0;" << std::endl;
 
 3063      fout << 
"   virtual double GetMvaValue( const std::vector<double>& inputValues ) const = 0;" << std::endl;
 
 3066   fout << 
"   // returns classifier status" << std::endl;
 
 3067   fout << 
"   bool IsStatusClean() const { return fStatusIsClean; }" << std::endl;
 
 3069   fout << 
" protected:" << std::endl;
 
 3071   fout << 
"   bool fStatusIsClean;" << std::endl;
 
 3072   fout << 
"};" << std::endl;
 
 3074   fout << 
"#endif" << std::endl;
 
 3076   fout << 
"class " << className << 
" : public IClassifierReader {" << std::endl;
 
 3078   fout << 
" public:" << std::endl;
 
 3080   fout << 
"   // constructor" << std::endl;
 
 3081   fout << 
"   " << className << 
"( std::vector<std::string>& theInputVars )" << std::endl;
 
 3082   fout << 
"      : IClassifierReader()," << std::endl;
 
 3083   fout << 
"        fClassName( \"" << className << 
"\" )," << std::endl;
 
 3084   fout << 
"        fNvars( " << GetNvar() << 
" )" << std::endl;
 
 3085   fout << 
"   {" << std::endl;
 
 3086   fout << 
"      // the training input variables" << std::endl;
 
 3087   fout << 
"      const char* inputVars[] = { ";
 
 3089      fout << 
"\"" << GetOriginalVarName(
ivar) << 
"\"";
 
 3090      if (
ivar<GetNvar()-1) 
fout << 
", ";
 
 3092   fout << 
" };" << std::endl;
 
 3094   fout << 
"      // sanity checks" << std::endl;
 
 3095   fout << 
"      if (theInputVars.size() <= 0) {" << std::endl;
 
 3096   fout << 
"         std::cout << \"Problem in class \\\"\" << fClassName << \"\\\": empty input vector\" << std::endl;" << std::endl;
 
 3097   fout << 
"         fStatusIsClean = false;" << std::endl;
 
 3098   fout << 
"      }" << std::endl;
 
 3100   fout << 
"      if (theInputVars.size() != fNvars) {" << std::endl;
 
 3101   fout << 
"         std::cout << \"Problem in class \\\"\" << fClassName << \"\\\": mismatch in number of input values: \"" << std::endl;
 
 3102   fout << 
"                   << theInputVars.size() << \" != \" << fNvars << std::endl;" << std::endl;
 
 3103   fout << 
"         fStatusIsClean = false;" << std::endl;
 
 3104   fout << 
"      }" << std::endl;
 
 3106   fout << 
"      // validate input variables" << std::endl;
 
 3107   fout << 
"      for (size_t ivar = 0; ivar < theInputVars.size(); ivar++) {" << std::endl;
 
 3108   fout << 
"         if (theInputVars[ivar] != inputVars[ivar]) {" << std::endl;
 
 3109   fout << 
"            std::cout << \"Problem in class \\\"\" << fClassName << \"\\\": mismatch in input variable names\" << std::endl" << std::endl;
 
 3110   fout << 
"                      << \" for variable [\" << ivar << \"]: \" << theInputVars[ivar].c_str() << \" != \" << inputVars[ivar] << std::endl;" << std::endl;
 
 3111   fout << 
"            fStatusIsClean = false;" << std::endl;
 
 3112   fout << 
"         }" << std::endl;
 
 3113   fout << 
"      }" << std::endl;
 
 3115   fout << 
"      // initialize min and max vectors (for normalisation)" << std::endl;
 
 3117      fout << 
"      fVmin[" << 
ivar << 
"] = " << std::setprecision(15) << GetXmin( 
ivar ) << 
";" << std::endl;
 
 3118      fout << 
"      fVmax[" << 
ivar << 
"] = " << std::setprecision(15) << GetXmax( 
ivar ) << 
";" << std::endl;
 
 3121   fout << 
"      // initialize input variable types" << std::endl;
 
 3123      fout << 
"      fType[" << 
ivar << 
"] = \'" << DataInfo().GetVariableInfo(
ivar).GetVarType() << 
"\';" << std::endl;
 
 3126   fout << 
"      // initialize constants" << std::endl;
 
 3127   fout << 
"      Initialize();" << std::endl;
 
 3129   if (GetTransformationHandler().GetTransformationList().GetSize() != 0) {
 
 3130      fout << 
"      // initialize transformation" << std::endl;
 
 3131      fout << 
"      InitTransform();" << std::endl;
 
 3133   fout << 
"   }" << std::endl;
 
 3135   fout << 
"   // destructor" << std::endl;
 
 3136   fout << 
"   virtual ~" << className << 
"() {" << std::endl;
 
 3137   fout << 
"      Clear(); // method-specific" << std::endl;
 
 3138   fout << 
"   }" << std::endl;
 
 3140   fout << 
"   // the classifier response" << std::endl;
 
 3141   fout << 
"   // \"inputValues\" is a vector of input values in the same order as the" << std::endl;
 
 3142   fout << 
"   // variables given to the constructor" << std::endl;
 
 3144      fout << 
"   std::vector<double> GetMulticlassValues( const std::vector<double>& inputValues ) const override;" << std::endl;
 
 3146      fout << 
"   double GetMvaValue( const std::vector<double>& inputValues ) const override;" << std::endl;
 
 3149   fout << 
" private:" << std::endl;
 
 3151   fout << 
"   // method-specific destructor" << std::endl;
 
 3152   fout << 
"   void Clear();" << std::endl;
 
 3154   if (GetTransformationHandler().GetTransformationList().GetSize()!=0) {
 
 3155      fout << 
"   // input variable transformation" << std::endl;
 
 3156      GetTransformationHandler().MakeFunction(
fout, className,1);
 
 3157      fout << 
"   void InitTransform();" << std::endl;
 
 3158      fout << 
"   void Transform( std::vector<double> & iv, int sigOrBgd ) const;" << std::endl;
 
 3161   fout << 
"   // common member variables" << std::endl;
 
 3162   fout << 
"   const char* fClassName;" << std::endl;
 
 3164   fout << 
"   const size_t fNvars;" << std::endl;
 
 3165   fout << 
"   size_t GetNvar()           const { return fNvars; }" << std::endl;
 
 3166   fout << 
"   char   GetType( int ivar ) const { return fType[ivar]; }" << std::endl;
 
 3168   fout << 
"   // normalisation of input variables" << std::endl;
 
 3169   fout << 
"   double fVmin[" << GetNvar() << 
"];" << std::endl;
 
 3170   fout << 
"   double fVmax[" << GetNvar() << 
"];" << std::endl;
 
 3171   fout << 
"   double NormVariable( double x, double xmin, double xmax ) const {" << std::endl;
 
 3172   fout << 
"      // normalise to output range: [-1, 1]" << std::endl;
 
 3173   fout << 
"      return 2*(x - xmin)/(xmax - xmin) - 1.0;" << std::endl;
 
 3174   fout << 
"   }" << std::endl;
 
 3176   fout << 
"   // type of input variable: 'F' or 'I'" << std::endl;
 
 3177   fout << 
"   char   fType[" << GetNvar() << 
"];" << std::endl;
 
 3179   fout << 
"   // initialize internal variables" << std::endl;
 
 3180   fout << 
"   void Initialize();" << std::endl;
 
 3182      fout << 
"   std::vector<double> GetMulticlassValues__( const std::vector<double>& inputValues ) const;" << std::endl;
 
 3184      fout << 
"   double GetMvaValue__( const std::vector<double>& inputValues ) const;" << std::endl;
 
 3186   fout << 
"" << std::endl;
 
 3187   fout << 
"   // private members (method specific)" << std::endl;
 
 3190   MakeClassSpecific( 
fout, className );
 
 3193      fout << 
"inline std::vector<double> " << className <<  
"::GetMulticlassValues( const std::vector<double>& inputValues ) const" << std::endl;
 
 3195      fout << 
"inline double " << className << 
"::GetMvaValue( const std::vector<double>& inputValues ) const" << std::endl;
 
 3197   fout << 
"{" << std::endl;
 
 3198   fout << 
"   // classifier response value" << std::endl;
 
 3200      fout << 
"   std::vector<double> retval;" << std::endl;
 
 3202      fout << 
"   double retval = 0;" << std::endl;
 
 3205   fout << 
"   // classifier response, sanity check first" << std::endl;
 
 3206   fout << 
"   if (!IsStatusClean()) {" << std::endl;
 
 3207   fout << 
"      std::cout << \"Problem in class \\\"\" << fClassName << \"\\\": cannot return classifier response\"" << std::endl;
 
 3208   fout << 
"                << \" because status is dirty\" << std::endl;" << std::endl;
 
 3209   fout << 
"   }" << std::endl;
 
 3210   fout << 
"   else {" << std::endl;
 
 3211   if (IsNormalised()) {
 
 3212      fout << 
"         // normalise variables" << std::endl;
 
 3213      fout << 
"         std::vector<double> iV;" << std::endl;
 
 3214      fout << 
"         iV.reserve(inputValues.size());" << std::endl;
 
 3215      fout << 
"         int ivar = 0;" << std::endl;
 
 3216      fout << 
"         for (std::vector<double>::const_iterator varIt = inputValues.begin();" << std::endl;
 
 3217      fout << 
"              varIt != inputValues.end(); varIt++, ivar++) {" << std::endl;
 
 3218      fout << 
"            iV.push_back(NormVariable( *varIt, fVmin[ivar], fVmax[ivar] ));" << std::endl;
 
 3219      fout << 
"         }" << std::endl;
 
 3220      if (GetTransformationHandler().GetTransformationList().GetSize() != 0 && GetMethodType() != 
Types::kLikelihood &&
 
 3222         fout << 
"         Transform( iV, -1 );" << std::endl;
 
 3226         fout << 
"         retval = GetMulticlassValues__( iV );" << std::endl;
 
 3228         fout << 
"         retval = GetMvaValue__( iV );" << std::endl;
 
 3231      if (GetTransformationHandler().GetTransformationList().GetSize() != 0 && GetMethodType() != 
Types::kLikelihood &&
 
 3233         fout << 
"         std::vector<double> iV(inputValues);" << std::endl;
 
 3234         fout << 
"         Transform( iV, -1 );" << std::endl;
 
 3236            fout << 
"         retval = GetMulticlassValues__( iV );" << std::endl;
 
 3238            fout << 
"         retval = GetMvaValue__( iV );" << std::endl;
 
 3242            fout << 
"         retval = GetMulticlassValues__( inputValues );" << std::endl;
 
 3244            fout << 
"         retval = GetMvaValue__( inputValues );" << std::endl;
 
 3248   fout << 
"   }" << std::endl;
 
 3250   fout << 
"   return retval;" << std::endl;
 
 3251   fout << 
"}" << std::endl;
 
 3254   if (GetTransformationHandler().GetTransformationList().GetSize()!=0)
 
 3255      GetTransformationHandler().MakeFunction(
fout, className,2);
 
 
 3267   std::streambuf* 
cout_sbuf = std::cout.rdbuf(); 
 
 3268   std::ofstream* o = 0;
 
 3269   if (
gConfig().WriteOptionsReference()) {
 
 3270      Log() << kINFO << 
"Print Help message for class " << GetName() << 
" into file: " << GetReferenceFile() << 
Endl;
 
 3271      o = 
new std::ofstream( GetReferenceFile(), std::ios::app );
 
 3273         Log() << kFATAL << 
"<PrintHelpMessage> Unable to append to output file: " << GetReferenceFile() << 
Endl;
 
 3275      std::cout.rdbuf( o->rdbuf() ); 
 
 3280      Log() << kINFO << 
Endl;
 
 3282            << 
"================================================================" 
 3286            << 
"H e l p   f o r   M V A   m e t h o d   [ " << GetName() << 
" ] :" 
 3291      Log() << 
"Help for MVA method [ " << GetName() << 
" ] :" << 
Endl;
 
 3299      Log() << 
"<Suppress this message by specifying \"!H\" in the booking option>" << 
Endl;
 
 3301            << 
"================================================================" 
 3308      Log() << 
"# End of Message___" << 
Endl;
 
 
 3328   else retval = fEffS->GetBinContent( fEffS->FindBin( 
theCut ) );
 
 3337   if      (
theCut-fXmin < eps) 
retval = (GetCutOrientation() == kPositive) ? 1.0 : 0.0;
 
 3338   else if (fXmax-
theCut < eps) 
retval = (GetCutOrientation() == kPositive) ? 0.0 : 1.0;
 
 
 3351   if (GetTransformationHandler().GetTransformationList().GetEntries() <= 0) {
 
 3352      return (Data()->GetEventCollection(
type));
 
 3359   if (fEventCollections.at(idx) == 0) {
 
 3360      fEventCollections.at(idx) = &(Data()->GetEventCollection(
type));
 
 3361      fEventCollections.at(idx) = GetTransformationHandler().CalcTransformations(*(fEventCollections.at(idx)),
kTRUE);
 
 3363   return *(fEventCollections.at(idx));
 
 
 3371   UInt_t a = GetTrainingTMVAVersionCode() & 0xff0000; 
a>>=16;
 
 3372   UInt_t b = GetTrainingTMVAVersionCode() & 0x00ff00; 
b>>=8;
 
 3373   UInt_t c = GetTrainingTMVAVersionCode() & 0x0000ff;
 
 
 3383   UInt_t a = GetTrainingROOTVersionCode() & 0xff0000; 
a>>=16;
 
 3384   UInt_t b = GetTrainingROOTVersionCode() & 0x00ff00; 
b>>=8;
 
 3385   UInt_t c = GetTrainingROOTVersionCode() & 0x0000ff;
 
 
const Bool_t Use_Splines_for_Eff_
 
const Int_t NBIN_HIST_HIGH
 
#define ROOT_VERSION_CODE
 
ROOT::Detail::TRangeCast< T, true > TRangeDynCast
TRangeDynCast is an adapter class that allows the typed iteration through a TCollection.
 
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void data
 
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void char Point_t Rectangle_t WindowAttributes_t Float_t r
 
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void char Point_t Rectangle_t WindowAttributes_t Float_t Float_t Float_t Int_t Int_t UInt_t UInt_t Rectangle_t result
 
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void char Point_t Rectangle_t WindowAttributes_t Float_t Float_t Float_t Int_t Int_t UInt_t UInt_t Rectangle_t Int_t Int_t Window_t TString Int_t GCValues_t GetPrimarySelectionOwner GetDisplay GetScreen GetColormap GetNativeEvent const char const char dpyName wid window const char font_name cursor keysym reg const char only_if_exist regb h Point_t winding char text const char depth char const char Int_t count const char ColorStruct_t color const char Pixmap_t Pixmap_t PictureAttributes_t attr const char char ret_data h unsigned char height h length
 
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void value
 
Option_t Option_t TPoint TPoint const char y2
 
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void char Point_t Rectangle_t WindowAttributes_t Float_t Float_t Float_t Int_t Int_t UInt_t UInt_t Rectangle_t Int_t Int_t Window_t TString Int_t GCValues_t GetPrimarySelectionOwner GetDisplay GetScreen GetColormap GetNativeEvent const char const char dpyName wid window const char font_name cursor keysym reg const char only_if_exist regb h Point_t winding char text const char depth char const char Int_t count const char ColorStruct_t color const char Pixmap_t Pixmap_t PictureAttributes_t attr const char char ret_data h unsigned char height h Atom_t Int_t ULong_t ULong_t unsigned char prop_list Atom_t Atom_t Atom_t Time_t type
 
Option_t Option_t TPoint TPoint const char y1
 
TMatrixT< Double_t > TMatrixD
 
char * Form(const char *fmt,...)
Formats a string in a circular formatting buffer.
 
R__EXTERN TSystem * gSystem
 
#define TMVA_VERSION_CODE
 
const_iterator begin() const
 
const_iterator end() const
 
Class to manage histogram axis.
 
This class stores the date and time with a precision of one second in an unsigned 32 bit word (950130...
 
Describe directory structure in memory.
 
A ROOT file is an on-disk file, usually with extension .root, that stores objects in a file-system-li...
 
static TFile * Open(const char *name, Option_t *option="", const char *ftitle="", Int_t compress=ROOT::RCompressionSetting::EDefaults::kUseCompiledDefault, Int_t netopt=0)
Create / open a file.
 
A TGraph is an object made of two arrays X and Y with npoints each.
 
1-D histogram with a double per channel (see TH1 documentation)
 
1-D histogram with a float per channel (see TH1 documentation)
 
TH1 is the base class of all histogram classes in ROOT.
 
virtual Double_t GetMean(Int_t axis=1) const
For axis = 1,2 or 3 returns the mean value of the histogram along X,Y or Z axis.
 
static void AddDirectory(Bool_t add=kTRUE)
Sets the flag controlling the automatic add of histograms in memory.
 
virtual Int_t GetQuantiles(Int_t nprobSum, Double_t *q, const Double_t *probSum=nullptr)
Compute Quantiles for this histogram Quantile x_q of a probability distribution Function F is defined...
 
static Bool_t AddDirectoryStatus()
Static function: cannot be inlined on Windows/NT.
 
2-D histogram with a float per channel (see TH1 documentation)
 
Int_t Fill(Double_t) override
Invalid Fill method.
 
Class that contains all the information of a class.
 
TString fWeightFileExtension
 
Int_t fMaxNumOfAllowedVariables
 
VariablePlotting & GetVariablePlotting()
 
class TMVA::Config::VariablePlotting fVariablePlotting
 
MsgLogger * fLogger
! message logger
 
Class that contains all the data information.
 
Class that contains all the data information.
 
static void SetIsTraining(Bool_t)
when this static function is called, it sets the flag whether events with negative event weight shoul...
 
static void SetIgnoreNegWeightsInTraining(Bool_t)
when this static function is called, it sets the flag whether events with negative event weight shoul...
 
Interface for all concrete MVA method implementations.
 
void Init(std::vector< TString > &graphTitles)
This function gets some title and it creates a TGraph for every title.
 
IPythonInteractive()
standard constructor
 
~IPythonInteractive()
standard destructor
 
void ClearGraphs()
This function sets the point number to 0 for all graphs.
 
void AddPoint(Double_t x, Double_t y1, Double_t y2)
This function is used only in 2 TGraph case, and it will add new data points to graphs.
 
Virtual base Class for all MVA method.
 
TDirectory * MethodBaseDir() const
returns the ROOT directory where all instances of the corresponding MVA method are stored
 
virtual Double_t GetKSTrainingVsTest(Char_t SorB, TString opt="X")
 
MethodBase(const TString &jobName, Types::EMVA methodType, const TString &methodTitle, DataSetInfo &dsi, const TString &theOption="")
standard constructor
 
virtual Double_t GetMvaValue(Double_t *errLower=nullptr, Double_t *errUpper=nullptr)=0
 
virtual Double_t GetSeparation(TH1 *, TH1 *) const
compute "separation" defined as
 
void ReadClassesFromXML(void *clsnode)
read number of classes from XML
 
void SetWeightFileDir(TString fileDir)
set directory of weight file
 
void WriteStateToXML(void *parent) const
general method used in writing the header of the weight files where the used variables,...
 
void DeclareBaseOptions()
define the options (their key words) that can be set in the option string here the options valid for ...
 
virtual void TestRegression(Double_t &bias, Double_t &biasT, Double_t &dev, Double_t &devT, Double_t &rms, Double_t &rmsT, Double_t &mInf, Double_t &mInfT, Double_t &corr, Types::ETreeType type)
calculate <sum-of-deviation-squared> of regression output versus "true" value from test sample
 
virtual void DeclareCompatibilityOptions()
options that are used ONLY for the READER to ensure backward compatibility they are hence without any...
 
virtual Double_t GetSignificance() const
compute significance of mean difference
 
virtual Double_t GetProba(const Event *ev)
 
const char * GetName() const
 
virtual TMatrixD GetMulticlassConfusionMatrix(Double_t effB, Types::ETreeType type)
Construct a confusion matrix for a multiclass classifier.
 
void PrintHelpMessage() const
prints out method-specific help method
 
virtual void WriteEvaluationHistosToFile(Types::ETreeType treetype)
writes all MVA evaluation histograms to file
 
virtual void TestMulticlass()
test multiclass classification
 
const std::vector< TMVA::Event * > & GetEventCollection(Types::ETreeType type)
returns the event collection (i.e.
 
virtual std::vector< Double_t > GetDataMvaValues(DataSet *data=nullptr, Long64_t firstEvt=0, Long64_t lastEvt=-1, Bool_t logProgress=false)
get all the MVA values for the events of the given Data type
 
void SetupMethod()
setup of methods
 
TDirectory * BaseDir() const
returns the ROOT directory where info/histograms etc of the corresponding MVA method instance are sto...
 
virtual std::vector< Float_t > GetMulticlassEfficiency(std::vector< std::vector< Float_t > > &purity)
 
void AddInfoItem(void *gi, const TString &name, const TString &value) const
xml writing
 
virtual void AddClassifierOutputProb(Types::ETreeType type)
prepare tree branch with the method's discriminating variable
 
virtual Double_t GetEfficiency(const TString &, Types::ETreeType, Double_t &err)
fill background efficiency (resp.
 
TString GetTrainingTMVAVersionString() const
calculates the TMVA version string from the training version code on the fly
 
void Statistics(Types::ETreeType treeType, const TString &theVarName, Double_t &, Double_t &, Double_t &, Double_t &, Double_t &, Double_t &)
calculates rms,mean, xmin, xmax of the event variable this can be either done for the variables as th...
 
Bool_t GetLine(std::istream &fin, char *buf)
reads one line from the input stream checks for certain keywords and interprets the line if keywords ...
 
void ProcessSetup()
process all options the "CheckForUnusedOptions" is done in an independent call, since it may be overr...
 
virtual std::vector< Double_t > GetMvaValues(Long64_t firstEvt=0, Long64_t lastEvt=-1, Bool_t logProgress=false)
get all the MVA values for the events of the current Data type
 
virtual Bool_t IsSignalLike()
uses a pre-set cut on the MVA output (SetSignalReferenceCut and SetSignalReferenceCutOrientation) for...
 
virtual ~MethodBase()
destructor
 
virtual Double_t GetMaximumSignificance(Double_t SignalEvents, Double_t BackgroundEvents, Double_t &optimal_significance_value) const
plot significance, , curve for given number of signal and background events; returns cut for maximum ...
 
virtual Double_t GetTrainingEfficiency(const TString &)
 
void SetWeightFileName(TString)
set the weight file name (depreciated)
 
virtual void MakeClass(const TString &classFileName=TString("")) const
create reader class for method (classification only at present)
 
TString GetWeightFileName() const
retrieve weight file name
 
virtual void TestClassification()
initialization
 
void AddOutput(Types::ETreeType type, Types::EAnalysisType analysisType)
 
virtual void WriteMonitoringHistosToFile() const
write special monitoring histograms to file dummy implementation here --------------—
 
virtual void AddRegressionOutput(Types::ETreeType type)
prepare tree branch with the method's discriminating variable
 
void InitBase()
default initialization called by all constructors
 
virtual void GetRegressionDeviation(UInt_t tgtNum, Types::ETreeType type, Double_t &stddev, Double_t &stddev90Percent) const
 
void ReadStateFromXMLString(const char *xmlstr)
for reading from memory
 
void CreateMVAPdfs()
Create PDFs of the MVA output variables.
 
TString GetTrainingROOTVersionString() const
calculates the ROOT version string from the training version code on the fly
 
virtual Double_t GetValueForRoot(Double_t)
returns efficiency as function of cut
 
void ReadStateFromFile()
Function to write options and weights to file.
 
void WriteVarsToStream(std::ostream &tf, const TString &prefix="") const
write the list of variables (name, min, max) for a given data transformation method to the stream
 
void ReadVarsFromStream(std::istream &istr)
Read the variables (name, min, max) for a given data transformation method from the stream.
 
void ReadSpectatorsFromXML(void *specnode)
read spectator info from XML
 
void SetTestvarName(const TString &v="")
 
void ReadVariablesFromXML(void *varnode)
read variable info from XML
 
virtual std::map< TString, Double_t > OptimizeTuningParameters(TString fomType="ROCIntegral", TString fitType="FitGA")
call the Optimizer with the set of parameters and ranges that are meant to be tuned.
 
virtual std::vector< Float_t > GetMulticlassTrainingEfficiency(std::vector< std::vector< Float_t > > &purity)
 
void WriteStateToStream(std::ostream &tf) const
general method used in writing the header of the weight files where the used variables,...
 
virtual Double_t GetRarity(Double_t mvaVal, Types::ESBType reftype=Types::kBackground) const
compute rarity:
 
virtual void SetTuneParameters(std::map< TString, Double_t > tuneParameters)
set the tuning parameters according to the argument This is just a dummy .
 
void ReadStateFromStream(std::istream &tf)
read the header from the weight files of the different MVA methods
 
void AddVarsXMLTo(void *parent) const
write variable info to XML
 
void AddTargetsXMLTo(void *parent) const
write target info to XML
 
void ReadTargetsFromXML(void *tarnode)
read target info from XML
 
void ProcessBaseOptions()
the option string is decoded, for available options see "DeclareOptions"
 
void ReadStateFromXML(void *parent)
 
void NoErrorCalc(Double_t *const err, Double_t *const errUpper)
 
void WriteStateToFile() const
write options and weights to file note that each one text file for the main configuration information...
 
void AddClassesXMLTo(void *parent) const
write class info to XML
 
virtual void AddClassifierOutput(Types::ETreeType type)
prepare tree branch with the method's discriminating variable
 
void AddSpectatorsXMLTo(void *parent) const
write spectator info to XML
 
virtual Double_t GetROCIntegral(TH1D *histS, TH1D *histB) const
calculate the area (integral) under the ROC curve as a overall quality measure of the classification
 
virtual void AddMulticlassOutput(Types::ETreeType type)
prepare tree branch with the method's discriminating variable
 
virtual void CheckSetup()
check may be overridden by derived class (sometimes, eg, fitters are used which can only be implement...
 
void SetSource(const std::string &source)
 
PDF wrapper for histograms; uses user-defined spline interpolation.
 
Class that is the base-class for a vector of result.
 
Class which takes the results of a multiclass classification.
 
Class that is the base-class for a vector of result.
 
Class that is the base-class for a vector of result.
 
Root finding using Brents algorithm (translated from CERNLIB function RZERO)
 
Linear interpolation of TGraph.
 
Timing information for training and evaluation of MVA methods.
 
Singleton class for Global types used by TMVA.
 
@ kSignal
Never change this number - it is elsewhere assumed to be zero !
 
Class for type info of MVA input variable.
 
A TMultiGraph is a collection of TGraph (or derived) objects.
 
Collectable string class.
 
void ToLower()
Change string to lower-case.
 
Int_t Atoi() const
Return integer value of string.
 
TSubString Strip(EStripType s=kTrailing, char c=' ') const
Return a substring of self stripped at beginning and/or end.
 
const char * Data() const
 
static TString Format(const char *fmt,...)
Static method which formats a string using a printf style format descriptor and return a TString.
 
Ssiz_t Index(const char *pat, Ssiz_t i=0, ECaseCompare cmp=kExact) const
 
virtual const char * GetBuildNode() const
Return the build node name.
 
virtual int mkdir(const char *name, Bool_t recursive=kFALSE)
Make a file system directory.
 
virtual const char * WorkingDirectory()
Return working directory.
 
virtual UserGroup_t * GetUserInfo(Int_t uid)
Returns all user info in the UserGroup_t structure.
 
void SaveDoc(XMLDocPointer_t xmldoc, const char *filename, Int_t layout=1)
store document content to file if layout<=0, no any spaces or newlines will be placed between xmlnode...
 
void FreeDoc(XMLDocPointer_t xmldoc)
frees allocated document data and deletes document itself
 
XMLNodePointer_t DocGetRootElement(XMLDocPointer_t xmldoc)
returns root node of document
 
XMLDocPointer_t NewDoc(const char *version="1.0")
creates new xml document with provided version
 
XMLDocPointer_t ParseFile(const char *filename, Int_t maxbuf=100000)
Parses content of file and tries to produce xml structures.
 
XMLDocPointer_t ParseString(const char *xmlstring)
parses content of string and tries to produce xml structures
 
void DocSetRootElement(XMLDocPointer_t xmldoc, XMLNodePointer_t xmlnode)
set main (root) node for document
 
void CreateVariableTransforms(const TString &trafoDefinition, TMVA::DataSetInfo &dataInfo, TMVA::TransformationHandler &transformationHandler, TMVA::MsgLogger &log)
 
MsgLogger & Endl(MsgLogger &ml)
 
Short_t Max(Short_t a, Short_t b)
Returns the largest of a and b.
 
Double_t Sqrt(Double_t x)
Returns the square root of x.
 
Short_t Min(Short_t a, Short_t b)
Returns the smallest of a and b.
 
Short_t Abs(Short_t d)
Returns the absolute value of parameter Short_t d.