136#pragma warning ( disable : 4355 )
157 fMultiGraph =
nullptr;
171 std::cerr << kERROR <<
"IPythonInteractive::Init: already initialized..." << std::endl;
176 fGraphs.push_back(
new TGraph() );
177 fGraphs.back()->SetTitle(title);
178 fGraphs.back()->SetName(title);
179 fGraphs.back()->SetFillColor(color);
180 fGraphs.back()->SetLineColor(color);
181 fGraphs.back()->SetMarkerColor(color);
182 fMultiGraph->Add(fGraphs.back());
194 for(
Int_t i=0; i<fNumGraphs; i++){
208 fGraphs[0]->Set(fIndex+1);
209 fGraphs[1]->Set(fIndex+1);
210 fGraphs[0]->SetPoint(fIndex,
x,
y1);
211 fGraphs[1]->SetPoint(fIndex,
x,
y2);
224 for(
Int_t i=0; i<fNumGraphs;i++){
225 fGraphs[i]->Set(fIndex+1);
226 fGraphs[i]->SetPoint(fIndex,
dat[0],
dat[i+1]);
246 fAnalysisType (
Types::kNoAnalysisType ),
247 fRegressionReturnVal ( 0 ),
248 fMulticlassReturnVal ( 0 ),
249 fDataSetInfo (
dsi ),
250 fSignalReferenceCut ( 0.5 ),
251 fSignalReferenceCutOrientation( 1. ),
252 fVariableTransformType (
Types::kSignal ),
254 fMethodName ( methodTitle ),
259 fConstructedFromWeightFile (
kFALSE ),
261 fMethodBaseDir ( 0 ),
264 fModelPersistence (
kTRUE),
275 fSplTrainEffBvsS ( 0 ),
276 fVarTransformString (
"None" ),
277 fTransformationPointer ( 0 ),
278 fTransformation (
dsi, methodTitle ),
280 fVerbosityLevelString (
"Default" ),
283 fIgnoreNegWeightsInTraining(
kFALSE ),
285 fBackgroundClass ( 0 ),
310 fAnalysisType (
Types::kNoAnalysisType ),
311 fRegressionReturnVal ( 0 ),
312 fMulticlassReturnVal ( 0 ),
313 fDataSetInfo (
dsi ),
314 fSignalReferenceCut ( 0.5 ),
315 fVariableTransformType (
Types::kSignal ),
317 fMethodName (
"MethodBase" ),
320 fTMVATrainingVersion ( 0 ),
321 fROOTTrainingVersion ( 0 ),
322 fConstructedFromWeightFile (
kTRUE ),
324 fMethodBaseDir ( 0 ),
327 fModelPersistence (
kTRUE),
338 fSplTrainEffBvsS ( 0 ),
339 fVarTransformString (
"None" ),
340 fTransformationPointer ( 0 ),
341 fTransformation (
dsi,
"" ),
343 fVerbosityLevelString (
"Default" ),
346 fIgnoreNegWeightsInTraining(
kFALSE ),
348 fBackgroundClass ( 0 ),
366 if (!fSetupCompleted) Log() << kWARNING <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<<
"Calling destructor of method which got never setup" <<
Endl;
369 if (fInputVars != 0) { fInputVars->clear();
delete fInputVars; }
370 if (fRanking != 0)
delete fRanking;
373 if (fDefaultPDF!= 0) {
delete fDefaultPDF; fDefaultPDF = 0; }
374 if (fMVAPdfS != 0) {
delete fMVAPdfS; fMVAPdfS = 0; }
375 if (fMVAPdfB != 0) {
delete fMVAPdfB; fMVAPdfB = 0; }
378 if (fSplS) {
delete fSplS; fSplS = 0; }
379 if (fSplB) {
delete fSplB; fSplB = 0; }
380 if (fSpleffBvsS) {
delete fSpleffBvsS; fSpleffBvsS = 0; }
381 if (fSplRefS) {
delete fSplRefS; fSplRefS = 0; }
382 if (fSplRefB) {
delete fSplRefB; fSplRefB = 0; }
383 if (fSplTrainRefS) {
delete fSplTrainRefS; fSplTrainRefS = 0; }
384 if (fSplTrainRefB) {
delete fSplTrainRefB; fSplTrainRefB = 0; }
385 if (fSplTrainEffBvsS) {
delete fSplTrainEffBvsS; fSplTrainEffBvsS = 0; }
387 for (
size_t i = 0; i < fEventCollections.size(); i++ ) {
388 if (fEventCollections.at(i)) {
389 for (std::vector<Event*>::const_iterator it = fEventCollections.at(i)->begin();
390 it != fEventCollections.at(i)->end(); ++it) {
393 delete fEventCollections.at(i);
394 fEventCollections.at(i) =
nullptr;
398 if (fRegressionReturnVal)
delete fRegressionReturnVal;
399 if (fMulticlassReturnVal)
delete fMulticlassReturnVal;
409 if (fSetupCompleted) Log() << kFATAL <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<<
"Calling SetupMethod for the second time" <<
Endl;
411 DeclareBaseOptions();
414 fSetupCompleted =
kTRUE;
424 ProcessBaseOptions();
434 CheckForUnusedOptions();
442 SetConfigDescription(
"Configuration options for classifier architecture and tuning" );
450 fSplTrainEffBvsS = 0;
457 fTxtWeightsOnly =
kTRUE;
467 fInputVars =
new std::vector<TString>;
469 fInputVars->push_back(DataInfo().GetVariableInfo(
ivar).GetLabel());
471 fRegressionReturnVal = 0;
472 fMulticlassReturnVal = 0;
474 fEventCollections.resize( 2 );
475 fEventCollections.at(0) = 0;
476 fEventCollections.at(1) = 0;
479 if (DataInfo().GetClassInfo(
"Signal") != 0) {
480 fSignalClass = DataInfo().GetClassInfo(
"Signal")->GetNumber();
482 if (DataInfo().GetClassInfo(
"Background") != 0) {
483 fBackgroundClass = DataInfo().GetClassInfo(
"Background")->GetNumber();
486 SetConfigDescription(
"Configuration options for MVA method" );
487 SetConfigName(
TString(
"Method") + GetMethodTypeName() );
510 DeclareOptionRef( fVerbose,
"V",
"Verbose output (short form of \"VerbosityLevel\" below - overrides the latter one)" );
512 DeclareOptionRef( fVerbosityLevelString=
"Default",
"VerbosityLevel",
"Verbosity level" );
513 AddPreDefVal(
TString(
"Default") );
514 AddPreDefVal(
TString(
"Debug") );
515 AddPreDefVal(
TString(
"Verbose") );
516 AddPreDefVal(
TString(
"Info") );
517 AddPreDefVal(
TString(
"Warning") );
518 AddPreDefVal(
TString(
"Error") );
519 AddPreDefVal(
TString(
"Fatal") );
523 fTxtWeightsOnly =
kTRUE;
526 DeclareOptionRef( fVarTransformString,
"VarTransform",
"List of variable transformations performed before training, e.g., \"D_Background,P_Signal,G,N_AllClasses\" for: \"Decorrelation, PCA-transformation, Gaussianisation, Normalisation, each for the given class of events ('AllClasses' denotes all events of all classes, if no class indication is given, 'All' is assumed)\"" );
528 DeclareOptionRef( fHelp,
"H",
"Print method-specific help message" );
530 DeclareOptionRef( fHasMVAPdfs,
"CreateMVAPdfs",
"Create PDFs for classifier outputs (signal and background)" );
532 DeclareOptionRef( fIgnoreNegWeightsInTraining,
"IgnoreNegWeightsInTraining",
533 "Events with negative weights are ignored in the training (but are included for testing and performance evaluation)" );
545 fDefaultPDF =
new PDF(
TString(GetName())+
"_PDF", GetOptions(),
"MVAPdf" );
546 fDefaultPDF->DeclareOptions();
547 fDefaultPDF->ParseOptions();
548 fDefaultPDF->ProcessOptions();
549 fMVAPdfB =
new PDF(
TString(GetName())+
"_PDFBkg", fDefaultPDF->GetOptions(),
"MVAPdfBkg", fDefaultPDF );
550 fMVAPdfB->DeclareOptions();
551 fMVAPdfB->ParseOptions();
552 fMVAPdfB->ProcessOptions();
553 fMVAPdfS =
new PDF(
TString(GetName())+
"_PDFSig", fMVAPdfB->GetOptions(),
"MVAPdfSig", fDefaultPDF );
554 fMVAPdfS->DeclareOptions();
555 fMVAPdfS->ParseOptions();
556 fMVAPdfS->ProcessOptions();
559 SetOptions( fMVAPdfS->GetOptions() );
564 GetTransformationHandler(),
568 if (fDefaultPDF!= 0) {
delete fDefaultPDF; fDefaultPDF = 0; }
569 if (fMVAPdfS != 0) {
delete fMVAPdfS; fMVAPdfS = 0; }
570 if (fMVAPdfB != 0) {
delete fMVAPdfB; fMVAPdfB = 0; }
574 fVerbosityLevelString =
TString(
"Verbose");
575 Log().SetMinType( kVERBOSE );
577 else if (fVerbosityLevelString ==
"Debug" ) Log().SetMinType( kDEBUG );
578 else if (fVerbosityLevelString ==
"Verbose" ) Log().SetMinType( kVERBOSE );
579 else if (fVerbosityLevelString ==
"Info" ) Log().SetMinType( kINFO );
580 else if (fVerbosityLevelString ==
"Warning" ) Log().SetMinType( kWARNING );
581 else if (fVerbosityLevelString ==
"Error" ) Log().SetMinType( kERROR );
582 else if (fVerbosityLevelString ==
"Fatal" ) Log().SetMinType( kFATAL );
583 else if (fVerbosityLevelString !=
"Default" ) {
584 Log() << kFATAL <<
"<ProcessOptions> Verbosity level type '"
585 << fVerbosityLevelString <<
"' unknown." <<
Endl;
597 DeclareOptionRef( fNormalise=
kFALSE,
"Normalise",
"Normalise input variables" );
598 DeclareOptionRef( fUseDecorr=
kFALSE,
"D",
"Use-decorrelated-variables flag" );
599 DeclareOptionRef( fVariableTransformTypeString=
"Signal",
"VarTransformType",
600 "Use signal or background events to derive for variable transformation (the transformation is applied on both types of, course)" );
601 AddPreDefVal(
TString(
"Signal") );
602 AddPreDefVal(
TString(
"Background") );
603 DeclareOptionRef( fTxtWeightsOnly=
kTRUE,
"TxtWeightFilesOnly",
"If True: write all training results (weights) as text files (False: some are written in ROOT format)" );
613 DeclareOptionRef( fNbinsMVAPdf = 60,
"NbinsMVAPdf",
"Number of bins used for the PDFs of classifier outputs" );
614 DeclareOptionRef( fNsmoothMVAPdf = 2,
"NsmoothMVAPdf",
"Number of smoothing iterations for classifier PDFs" );
628 Log() << kWARNING <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<<
"Parameter optimization is not yet implemented for method "
629 << GetName() <<
Endl;
630 Log() << kWARNING <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<<
"Currently we need to set hardcoded which parameter is tuned in which ranges"<<
Endl;
632 return std::map<TString,Double_t>();
652 if (Help()) PrintHelpMessage();
655 if(!IsSilentFile()) BaseDir()->cd();
659 GetTransformationHandler().CalcTransformations(Data()->GetEventCollection());
663 <<
"Begin training" <<
Endl;
664 Long64_t nEvents = Data()->GetNEvents();
668 <<
"\tEnd of training " <<
Endl;
671 <<
"Elapsed time for training with " << nEvents <<
" events: "
675 <<
"\tCreate MVA output for ";
678 if (DoMulticlass()) {
679 Log() <<
Form(
"[%s] : ",DataInfo().GetName())<<
"Multiclass classification on training sample" <<
Endl;
682 else if (!DoRegression()) {
684 Log() <<
Form(
"[%s] : ",DataInfo().GetName())<<
"classification on training sample" <<
Endl;
693 Log() <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<<
"regression on training sample" <<
Endl;
697 Log() <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<<
"Create PDFs" <<
Endl;
704 if (fModelPersistence ) WriteStateToFile();
707 if ((!DoRegression()) && (fModelPersistence)) MakeClass();
714 WriteMonitoringHistosToFile();
722 if (!DoRegression()) Log() << kFATAL <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<<
"Trying to use GetRegressionDeviation() with a classification job" <<
Endl;
723 Log() << kINFO <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<<
"Create results for " << (
type==
Types::kTraining?
"training":
"testing") <<
Endl;
741 Long64_t nEvents = Data()->GetNEvents();
752 size_t ntargets = Data()->GetEvent(0)->GetNTargets();
757 Data()->SetCurrentEvent(
ievt);
758 std::vector< Float_t > vals = GetRegressionValues();
760 Log() << kFATAL <<
"Output regression vector with size " << vals.size() <<
" is not consistent with target size of "
763 std::copy(vals.begin(), vals.end(),
itr);
778 Data()->SetCurrentType(
type);
780 Log() << kINFO <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<<
"Create results for " << (
type==
Types::kTraining?
"training":
"testing") <<
Endl;
784 Long64_t nEvents = Data()->GetNEvents();
789 Log() << kINFO <<
Form(
"Dataset[%s] : ",DataInfo().GetName()) <<
"Evaluation of " << GetMethodName() <<
" on "
792 regRes->Resize( nEvents );
794 std::vector<float>
output = GetAllRegressionValues();
796 Data()->SetCurrentEvent(0);
797 size_t nTargets = GetEvent()->GetNTargets();
802 Log() << kFATAL <<
"Output regression vector with size " <<
output.size() <<
" is not consistent with target size of "
803 <<
nTargets <<
" and number of events " << nEvents << std::endl;
815 Log() << kINFO <<
Form(
"Dataset[%s] : ",DataInfo().GetName())
816 <<
"Elapsed time for evaluation of " << nEvents <<
" events: "
817 <<
timer.GetElapsedTime() <<
" " <<
Endl;
821 SetTestTime(
timer.ElapsedSeconds());
833 Long64_t nEvents = Data()->GetNEvents();
838 Data()->SetCurrentEvent(0);
839 std::vector< Float_t > vals = GetMulticlassValues();
840 std::vector<float>
output(nEvents * vals.size());
842 std::copy(vals.begin(), vals.end(),
itr);
845 Data()->SetCurrentEvent(
ievt);
846 vals = GetMulticlassValues();
848 std::copy(vals.begin(), vals.end(),
itr);
859 Data()->SetCurrentType(
type);
861 Log() << kINFO <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<<
"Create results for " << (
type==
Types::kTraining?
"training":
"testing") <<
Endl;
864 if (!
resMulticlass) Log() << kFATAL<<
Form(
"Dataset[%s] : ",DataInfo().GetName())<<
"unable to create pointer in AddMulticlassOutput, exiting."<<
Endl;
866 Long64_t nEvents = Data()->GetNEvents();
871 Log() << kINFO <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<<
"Multiclass evaluation of " << GetMethodName() <<
" on "
875 std::vector<Float_t>
output = GetAllMulticlassValues();
883 Log() << kINFO <<
Form(
"Dataset[%s] : ",DataInfo().GetName())
884 <<
"Elapsed time for evaluation of " << nEvents <<
" events: "
885 <<
timer.GetElapsedTime() <<
" " <<
Endl;
889 SetTestTime(
timer.ElapsedSeconds());
919 return GetMvaValue()*GetSignalReferenceCutOrientation() > GetSignalReferenceCut()*GetSignalReferenceCutOrientation() ?
kTRUE :
kFALSE;
926 return mvaVal*GetSignalReferenceCutOrientation() > GetSignalReferenceCut()*GetSignalReferenceCutOrientation() ?
kTRUE :
kFALSE;
934 Data()->SetCurrentType(
type);
939 Long64_t nEvents = Data()->GetNEvents();
940 clRes->Resize( nEvents );
945 Log() << kHEADER <<
Form(
"[%s] : ",DataInfo().GetName())
946 <<
"Evaluation of " << GetMethodName() <<
" on "
948 <<
" sample (" << nEvents <<
" events)" <<
Endl;
950 std::vector<Double_t> mvaValues = GetMvaValues(0, nEvents,
true);
953 <<
"Elapsed time for evaluation of " << nEvents <<
" events: "
954 <<
timer.GetElapsedTime() <<
" " <<
Endl;
958 SetTestTime(
timer.ElapsedSeconds());
964 auto ev = Data()->GetEvent(
ievt);
974 Long64_t nEvents = Data()->GetNEvents();
979 nEvents = values.size();
985 Log() << kHEADER <<
Form(
"[%s] : ",DataInfo().GetName())
986 <<
"Evaluation of " << GetMethodName() <<
" on "
988 <<
" sample (" << nEvents <<
" events)" <<
Endl;
991 Data()->SetCurrentEvent(
ievt);
992 values[
ievt] = GetMvaValue();
1003 <<
"Elapsed time for evaluation of " << nEvents <<
" events: "
1004 <<
timer.GetElapsedTime() <<
" " <<
Endl;
1026 Data()->SetCurrentType(
type);
1031 Long64_t nEvents = Data()->GetNEvents();
1036 Log() << kINFO <<
Form(
"Dataset[%s] : ",DataInfo().GetName()) <<
"Evaluation of " << GetMethodName() <<
" on "
1044 Data()->SetCurrentEvent(
ievt);
1046 if (
proba < 0)
break;
1047 mvaProb->SetValue(
proba,
ievt, DataInfo().IsSignal( Data()->GetEvent()) );
1053 Log() << kDEBUG <<
Form(
"Dataset[%s] : ",DataInfo().GetName())
1054 <<
"Elapsed time for evaluation of " << nEvents <<
" events: "
1055 <<
timer.GetElapsedTime() <<
" " <<
Endl;
1073 Data()->SetCurrentType(
type);
1078 const Int_t nevt = GetNEvents();
1083 Log() << kINFO <<
"Calculate regression for all events" <<
Endl;
1086 auto output = GetAllRegressionValues();
1087 int ntargets = Data()->GetEvent(0)->GetNTargets();
1111 m1 += t*
w;
s1 += t*t*
w;
1118 timer.DrawProgressBar(nevt - 1);
1119 Log() << kINFO <<
"Elapsed time for evaluation of " << nevt <<
" events: "
1120 <<
timer.GetElapsedTime() <<
" " <<
Endl;
1177 if (!
resMulticlass) Log() << kFATAL<<
Form(
"Dataset[%s] : ",DataInfo().GetName())<<
"unable to create pointer in TestMulticlass, exiting."<<
Endl;
1209 if (0==
mvaRes && !(GetMethodTypeName().Contains(
"Cuts"))) {
1210 Log()<<
Form(
"Dataset[%s] : ",DataInfo().GetName()) <<
"mvaRes " <<
mvaRes <<
" GetMethodTypeName " << GetMethodTypeName()
1211 <<
" contains " << !(GetMethodTypeName().Contains(
"Cuts")) <<
Endl;
1212 Log() << kFATAL<<
Form(
"Dataset[%s] : ",DataInfo().GetName()) <<
"<TestInit> Test variable " << GetTestvarName()
1213 <<
" not found in tree" <<
Endl;
1218 fMeanS, fMeanB, fRmsS, fRmsB, fXmin, fXmax, fSignalClass );
1226 fCutOrientation = (fMeanS > fMeanB) ? kPositive : kNegative;
1236 if(IsSilentFile()) {
1283 Log() << kHEADER <<
Form(
"[%s] : ",DataInfo().GetName())<<
"Loop over test events and fill histograms with classifier response..." <<
Endl <<
Endl;
1284 if (
mvaProb) Log() << kINFO <<
"Also filling probability and rarity histograms (on request)..." <<
Endl;
1288 if (
mvaRes->GetSize() != GetNEvents() ) {
1289 Log() << kFATAL <<
TString::Format(
"Inconsistent result size %lld with number of events %u ",
mvaRes->GetSize() , GetNEvents() ) <<
Endl;
1299 if (DataInfo().IsSignal(
ev)) {
1331 if (fSplS) {
delete fSplS; fSplS = 0; }
1332 if (fSplB) {
delete fSplB; fSplB = 0; }
1346 tf << prefix <<
"#GEN -*-*-*-*-*-*-*-*-*-*-*- general info -*-*-*-*-*-*-*-*-*-*-*-" << std::endl << prefix << std::endl;
1347 tf << prefix <<
"Method : " << GetMethodTypeName() <<
"::" << GetMethodName() << std::endl;
1348 tf.setf(std::ios::left);
1349 tf << prefix <<
"TMVA Release : " << std::setw(10) << GetTrainingTMVAVersionString() <<
" ["
1350 << GetTrainingTMVAVersionCode() <<
"]" << std::endl;
1351 tf << prefix <<
"ROOT Release : " << std::setw(10) << GetTrainingROOTVersionString() <<
" ["
1352 << GetTrainingROOTVersionCode() <<
"]" << std::endl;
1353 tf << prefix <<
"Creator : " <<
userInfo->fUser << std::endl;
1357 tf << prefix <<
"Training events: " << Data()->GetNTrainingEvents() << std::endl;
1361 tf << prefix <<
"Analysis type : " <<
"[" << ((GetAnalysisType()==
Types::kRegression) ?
"Regression" :
"Classification") <<
"]" << std::endl;
1362 tf << prefix << std::endl;
1367 tf << prefix << std::endl << prefix <<
"#OPT -*-*-*-*-*-*-*-*-*-*-*-*- options -*-*-*-*-*-*-*-*-*-*-*-*-" << std::endl << prefix << std::endl;
1368 WriteOptionsToStream(
tf, prefix );
1369 tf << prefix << std::endl;
1372 tf << prefix << std::endl << prefix <<
"#VAR -*-*-*-*-*-*-*-*-*-*-*-* variables *-*-*-*-*-*-*-*-*-*-*-*-" << std::endl << prefix << std::endl;
1373 WriteVarsToStream(
tf, prefix );
1374 tf << prefix << std::endl;
1391 AddRegressionOutput(
type );
1393 AddMulticlassOutput(
type );
1395 AddClassifierOutput(
type );
1397 AddClassifierOutputProb(
type );
1407 if (!parent)
return;
1412 AddInfoItem(
gi,
"TMVA Release", GetTrainingTMVAVersionString() +
" [" +
gTools().StringFromInt(GetTrainingTMVAVersionCode()) +
"]" );
1413 AddInfoItem(
gi,
"ROOT Release", GetTrainingROOTVersionString() +
" [" +
gTools().StringFromInt(GetTrainingROOTVersionCode()) +
"]");
1414 AddInfoItem(
gi,
"Creator",
userInfo->fUser);
1418 AddInfoItem(
gi,
"Training events",
gTools().StringFromInt(Data()->GetNTrainingEvents()));
1424 AddInfoItem(
gi,
"AnalysisType", analysisType );
1428 AddOptionsXMLTo( parent );
1431 AddVarsXMLTo( parent );
1434 if (fModelPersistence)
1435 AddSpectatorsXMLTo( parent );
1438 AddClassesXMLTo(parent);
1441 if (DoRegression()) AddTargetsXMLTo(parent);
1444 GetTransformationHandler(
false).AddXMLTo( parent );
1448 if (fMVAPdfS) fMVAPdfS->AddXMLTo(
pdfs);
1449 if (fMVAPdfB) fMVAPdfB->AddXMLTo(
pdfs);
1452 AddWeightsXMLTo( parent );
1462 fMVAPdfS = (
TMVA::PDF*)
rf.Get(
"MVA_PDF_Signal" );
1463 fMVAPdfB = (
TMVA::PDF*)
rf.Get(
"MVA_PDF_Background" );
1465 ReadWeightsFromStream(
rf );
1483 <<
"Creating xml weight file: "
1488 gTools().
AddAttr(rootnode,
"Method", GetMethodTypeName() +
"::" + GetMethodName());
1489 WriteStateToXML(rootnode);
1504 <<
"Reading weight file: "
1507 if (
tfname.EndsWith(
".xml") ) {
1510 Log() << kFATAL <<
"Error parsing XML file " <<
tfname <<
Endl;
1513 ReadStateFromXML(rootnode);
1518 fb.open(
tfname.Data(),std::ios::in);
1519 if (!fb.is_open()) {
1520 Log() << kFATAL <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<<
"<ReadStateFromFile> "
1521 <<
"Unable to open input weight file: " <<
tfname <<
Endl;
1523 std::istream
fin(&fb);
1524 ReadStateFromStream(
fin);
1527 if (!fTxtWeightsOnly) {
1530 Log() << kINFO <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<<
"Reading root weight file: "
1533 ReadStateFromStream( *
rfile );
1543 ReadStateFromXML(rootnode);
1560 Log().SetSource( GetName() );
1562 <<
"Read method \"" << GetMethodName() <<
"\" of type \"" << GetMethodTypeName() <<
"\"" <<
Endl;
1580 if (
name ==
"TrainingTime")
1583 if (
name ==
"AnalysisType") {
1589 else Log() << kFATAL <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<<
"Analysis type " << val <<
" is not known." <<
Endl;
1592 if (
name ==
"TMVA Release" ||
name ==
"TMVA") {
1596 Log() << kDEBUG <<
Form(
"[%s] : ",DataInfo().GetName()) <<
"MVA method was trained with TMVA Version: " << GetTrainingTMVAVersionString() <<
Endl;
1599 if (
name ==
"ROOT Release" ||
name ==
"ROOT") {
1604 <<
"MVA method was trained with ROOT Version: " << GetTrainingROOTVersionString() <<
Endl;
1610 ReadOptionsFromXML(ch);
1615 ReadVariablesFromXML(ch);
1618 ReadSpectatorsFromXML(ch);
1621 if (DataInfo().GetNClasses()==0) ReadClassesFromXML(ch);
1624 if (DataInfo().GetNTargets()==0 && DoRegression()) ReadTargetsFromXML(ch);
1626 else if (
nodeName==
"Transformations") {
1627 GetTransformationHandler().ReadFromXML(ch);
1631 if (fMVAPdfS) {
delete fMVAPdfS; fMVAPdfS=0; }
1632 if (fMVAPdfB) {
delete fMVAPdfB; fMVAPdfB=0; }
1645 ReadWeightsFromXML(ch);
1648 Log() << kWARNING <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<<
"Unparsed XML node: '" <<
nodeName <<
"'" <<
Endl;
1655 if (GetTransformationHandler().GetCallerName() ==
"") GetTransformationHandler().SetCallerName( GetName() );
1671 while (!
TString(buf).BeginsWith(
"Method")) GetLine(
fin,buf);
1680 if (methodName ==
"") methodName =
methodType;
1681 fMethodName = methodName;
1683 Log() << kINFO <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<<
"Read method \"" << GetMethodName() <<
"\" of type \"" << GetMethodTypeName() <<
"\"" <<
Endl;
1686 Log().SetSource( GetName() );
1700 while (!
TString(buf).BeginsWith(
"#OPT")) GetLine(
fin,buf);
1701 ReadOptionsFromStream(
fin);
1705 fin.getline(buf,512);
1706 while (!
TString(buf).BeginsWith(
"#VAR"))
fin.getline(buf,512);
1707 ReadVarsFromStream(
fin);
1712 if (IsNormalised()) {
1715 norm->BuildTransformationFromVarInfo( DataInfo().GetVariableInfos() );
1718 if ( fVarTransformString ==
"None") {
1721 }
else if ( fVarTransformString ==
"Decorrelate" ) {
1723 }
else if ( fVarTransformString ==
"PCA" ) {
1725 }
else if ( fVarTransformString ==
"Uniform" ) {
1727 }
else if ( fVarTransformString ==
"Gauss" ) {
1729 }
else if ( fVarTransformString ==
"GaussDecorr" ) {
1733 Log() << kFATAL <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<<
"<ProcessOptions> Variable transform '"
1734 << fVarTransformString <<
"' unknown." <<
Endl;
1737 if (GetTransformationHandler().GetTransformationList().GetSize() > 0) {
1738 fin.getline(buf,512);
1739 while (!
TString(buf).BeginsWith(
"#MAT"))
fin.getline(buf,512);
1753 fin.getline(buf,512);
1754 while (!
TString(buf).BeginsWith(
"#MVAPDFS"))
fin.getline(buf,512);
1755 if (fMVAPdfS != 0) {
delete fMVAPdfS; fMVAPdfS = 0; }
1756 if (fMVAPdfB != 0) {
delete fMVAPdfB; fMVAPdfB = 0; }
1757 fMVAPdfS =
new PDF(
TString(GetName()) +
" MVA PDF Sig");
1758 fMVAPdfB =
new PDF(
TString(GetName()) +
" MVA PDF Bkg");
1759 fMVAPdfS->SetReadingVersion( GetTrainingTMVAVersionCode() );
1760 fMVAPdfB->SetReadingVersion( GetTrainingTMVAVersionCode() );
1767 fin.getline(buf,512);
1768 while (!
TString(buf).BeginsWith(
"#WGT"))
fin.getline(buf,512);
1769 fin.getline(buf,512);
1770 ReadWeightsFromStream(
fin );
1773 if (GetTransformationHandler().GetCallerName() ==
"") GetTransformationHandler().SetCallerName( GetName() );
1783 o << prefix <<
"NVar " << DataInfo().GetNVariables() << std::endl;
1784 std::vector<VariableInfo>::const_iterator
varIt = DataInfo().GetVariableInfos().
begin();
1785 for (;
varIt!=DataInfo().GetVariableInfos().
end(); ++
varIt) { o << prefix;
varIt->WriteToStream(o); }
1786 o << prefix <<
"NSpec " << DataInfo().GetNSpectators() << std::endl;
1787 varIt = DataInfo().GetSpectatorInfos().
begin();
1788 for (;
varIt!=DataInfo().GetSpectatorInfos().
end(); ++
varIt) { o << prefix;
varIt->WriteToStream(o); }
1802 if (
readNVar!=DataInfo().GetNVariables()) {
1803 Log() << kFATAL <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<<
"You declared "<< DataInfo().GetNVariables() <<
" variables in the Reader"
1804 <<
" while there are " <<
readNVar <<
" variables declared in the file"
1810 std::vector<VariableInfo>::iterator
varIt = DataInfo().GetVariableInfos().
begin();
1814 if (
varIt->GetExpression() ==
varInfo.GetExpression()) {
1815 varInfo.SetExternalLink((*varIt).GetExternalLink());
1819 Log() << kINFO <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<<
"ERROR in <ReadVarsFromStream>" <<
Endl;
1820 Log() << kINFO <<
"The definition (or the order) of the variables found in the input file is" <<
Endl;
1821 Log() << kINFO <<
"is not the same as the one declared in the Reader (which is necessary for" <<
Endl;
1822 Log() << kINFO <<
"the correct working of the method):" <<
Endl;
1823 Log() << kINFO <<
" var #" <<
varIdx <<
" declared in Reader: " <<
varIt->GetExpression() <<
Endl;
1824 Log() << kINFO <<
" var #" <<
varIdx <<
" declared in file : " <<
varInfo.GetExpression() <<
Endl;
1825 Log() << kFATAL <<
"The expression declared to the Reader needs to be checked (name or order are wrong)" <<
Endl;
1838 for (
UInt_t idx=0; idx<DataInfo().GetVariableInfos().size(); idx++) {
1854 for (
UInt_t idx=0; idx<DataInfo().GetSpectatorInfos().size(); idx++) {
1860 if (
vi.GetVarType()==
'C')
continue;
1897 for (
UInt_t idx=0; idx<DataInfo().GetTargetInfos().size(); idx++) {
1913 if (
readNVar!=DataInfo().GetNVariables()) {
1914 Log() << kFATAL <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<<
"You declared "<< DataInfo().GetNVariables() <<
" variables in the Reader"
1915 <<
" while there are " <<
readNVar <<
" variables declared in the file"
1933 Log() << kINFO <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<<
"ERROR in <ReadVariablesFromXML>" <<
Endl;
1934 Log() << kINFO <<
"The definition (or the order) of the variables found in the input file is" <<
Endl;
1935 Log() << kINFO <<
"not the same as the one declared in the Reader (which is necessary for the" <<
Endl;
1936 Log() << kINFO <<
"correct working of the method):" <<
Endl;
1938 Log() << kINFO <<
" var #" <<
varIdx <<
" declared in file : " <<
readVarInfo.GetExpression() <<
Endl;
1939 Log() << kFATAL <<
"The expression declared to the Reader needs to be checked (name or order are wrong)" <<
Endl;
1954 Log() << kFATAL<<
Form(
"Dataset[%s] : ",DataInfo().GetName()) <<
"You declared "<< DataInfo().GetNSpectators(
kFALSE) <<
" spectators in the Reader"
1955 <<
" while there are " <<
readNSpec <<
" spectators declared in the file"
1973 Log() << kINFO <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<<
"ERROR in <ReadSpectatorsFromXML>" <<
Endl;
1974 Log() << kINFO <<
"The definition (or the order) of the spectators found in the input file is" <<
Endl;
1975 Log() << kINFO <<
"not the same as the one declared in the Reader (which is necessary for the" <<
Endl;
1976 Log() << kINFO <<
"correct working of the method):" <<
Endl;
1979 Log() << kFATAL <<
"The expression declared to the Reader needs to be checked (name or order are wrong)" <<
Endl;
2000 DataInfo().AddClass(classname);
2008 DataInfo().AddClass(className);
2015 if (DataInfo().GetClassInfo(
"Signal") != 0) {
2016 fSignalClass = DataInfo().GetClassInfo(
"Signal")->GetNumber();
2020 if (DataInfo().GetClassInfo(
"Background") != 0) {
2021 fBackgroundClass = DataInfo().GetClassInfo(
"Background")->GetNumber();
2041 DataInfo().AddTarget(expression,
"",
"",0,0);
2053 if (fBaseDir != 0)
return fBaseDir;
2054 Log()<<kDEBUG<<
Form(
"Dataset[%s] : ",DataInfo().GetName())<<
" Base Directory for " << GetMethodName() <<
" not set yet --> check if already there.." <<
Endl;
2056 if (IsSilentFile()) {
2057 Log() << kFATAL <<
Form(
"Dataset[%s] : ", DataInfo().GetName())
2058 <<
"MethodBase::BaseDir() - No directory exists when running a Method without output file. Enable the "
2059 "output when creating the factory"
2065 Log() << kFATAL <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<<
"MethodBase::BaseDir() - MethodBaseDir() return a NULL pointer!" <<
Endl;
2071 Log()<<kDEBUG<<
Form(
"Dataset[%s] : ",DataInfo().GetName())<<
" Base Directory for " << GetMethodTypeName() <<
" does not exist yet--> created it" <<
Endl;
2075 if (fModelPersistence) {
2083 Log()<<kDEBUG<<
Form(
"Dataset[%s] : ",DataInfo().GetName())<<
" Base Directory for " << GetMethodTypeName() <<
" existed, return it.." <<
Endl;
2093 if (fMethodBaseDir != 0) {
2094 return fMethodBaseDir;
2099 Log() << kDEBUG <<
Form(
"Dataset[%s] : ",
datasetName) <<
" Base Directory for " << GetMethodTypeName()
2100 <<
" not set yet --> check if already there.." <<
Endl;
2105 if (!fMethodBaseDir) {
2107 if (!fMethodBaseDir) {
2108 Log() << kFATAL <<
"Can not create dir " <<
datasetName;
2112 fMethodBaseDir = fMethodBaseDir->GetDirectory(
methodTypeDir.Data());
2114 if (!fMethodBaseDir) {
2118 Log() << kDEBUG <<
Form(
"Dataset[%s] : ",
datasetName) <<
" Base Directory for " << GetMethodName()
2119 <<
" does not exist yet--> created it" <<
Endl;
2123 <<
"Return from MethodBaseDir() after creating base directory " <<
Endl;
2124 return fMethodBaseDir;
2149 if (fWeightFile!=
"")
return fWeightFile;
2171 if (0 != fMVAPdfS) {
2172 fMVAPdfS->GetOriginalHist()->Write();
2173 fMVAPdfS->GetSmoothedHist()->Write();
2174 fMVAPdfS->GetPDFHist()->Write();
2176 if (0 != fMVAPdfB) {
2177 fMVAPdfB->GetOriginalHist()->Write();
2178 fMVAPdfB->GetSmoothedHist()->Write();
2179 fMVAPdfB->GetPDFHist()->Write();
2185 Log() << kFATAL <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<<
"<WriteEvaluationHistosToFile> Unknown result: "
2187 <<
"/kMaxAnalysisType" <<
Endl;
2188 results->GetStorage()->Write();
2192 GetTransformationHandler().PlotVariables (GetEventCollection(
Types::kTesting ), BaseDir() );
2194 Log() << kINFO <<
TString::Format(
"Dataset[%s] : ",DataInfo().GetName())
2195 <<
" variable plots are not produces ! The number of variables is " << DataInfo().GetNVariables()
2215 fin.getline(buf,512);
2217 if (
line.BeginsWith(
"TMVA Release")) {
2221 std::stringstream s(code.
Data());
2222 s >> fTMVATrainingVersion;
2223 Log() << kINFO <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<<
"MVA method was trained with TMVA Version: " << GetTrainingTMVAVersionString() <<
Endl;
2225 if (
line.BeginsWith(
"ROOT Release")) {
2229 std::stringstream s(code.
Data());
2230 s >> fROOTTrainingVersion;
2231 Log() << kINFO <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<<
"MVA method was trained with ROOT Version: " << GetTrainingROOTVersionString() <<
Endl;
2233 if (
line.BeginsWith(
"Analysis type")) {
2237 std::stringstream s(code.
Data());
2238 std::string analysisType;
2240 if (analysisType ==
"regression" || analysisType ==
"Regression") SetAnalysisType(
Types::kRegression );
2241 else if (analysisType ==
"classification" || analysisType ==
"Classification") SetAnalysisType(
Types::kClassification );
2242 else if (analysisType ==
"multiclass" || analysisType ==
"Multiclass") SetAnalysisType(
Types::kMulticlass );
2243 else Log() << kFATAL <<
"Analysis type " << analysisType <<
" from weight-file not known!" << std::endl;
2245 Log() << kINFO <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<<
"Method was trained for "
2266 Log() << kERROR<<
Form(
"Dataset[%s] : ",DataInfo().GetName())<<
"<CreateMVAPdfs> No result of classifier testing available" <<
Endl;
2273 TH1*
histMVAPdfS =
new TH1D( GetMethodTypeName() +
"_tr_S", GetMethodTypeName() +
"_tr_S",
2274 fMVAPdfS->GetHistNBins(
mvaRes->GetSize() ), minVal, maxVal );
2275 TH1*
histMVAPdfB =
new TH1D( GetMethodTypeName() +
"_tr_B", GetMethodTypeName() +
"_tr_B",
2276 fMVAPdfB->GetHistNBins(
mvaRes->GetSize() ), minVal, maxVal );
2307 if (DataInfo().GetNClasses() == 2) {
2308 Log() << kINFO<<
Form(
"Dataset[%s] : ",DataInfo().GetName())
2309 <<
TString::Format(
"<CreateMVAPdfs> Separation from histogram (PDF): %1.3f (%1.3f)",
2322 if (!fMVAPdfS || !fMVAPdfB) {
2323 Log() << kINFO<<
Form(
"Dataset[%s] : ",DataInfo().GetName()) <<
"<GetProba> MVA PDFs for Signal and Background don't exist yet, we'll create them on demand" <<
Endl;
2326 Double_t sigFraction = DataInfo().GetTrainingSumSignalWeights() / (DataInfo().GetTrainingSumSignalWeights() + DataInfo().GetTrainingSumBackgrWeights() );
2337 if (!fMVAPdfS || !fMVAPdfB) {
2338 Log() << kWARNING <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<<
"<GetProba> MVA PDFs for Signal and Background don't exist" <<
Endl;
2359 Log() << kWARNING <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<<
"<GetRarity> Required MVA PDF for Signal or Background does not exist: "
2360 <<
"select option \"CreateMVAPdfs\"" <<
Endl;
2375 Data()->SetCurrentType(
type);
2385 else if (list->GetSize() > 2) {
2386 Log() << kFATAL <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<<
"<GetEfficiency> Wrong number of arguments"
2388 <<
" | required format, e.g., Efficiency:0.05, or empty string" <<
Endl;
2394 if (
results->GetHist(
"MVA_S")->GetNbinsX() !=
results->GetHist(
"MVA_B")->GetNbinsX() ||
2395 results->GetHist(
"MVA_HIGHBIN_S")->GetNbinsX() !=
results->GetHist(
"MVA_HIGHBIN_B")->GetNbinsX() ) {
2396 Log() << kFATAL <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<<
"<GetEfficiency> Binning mismatch between signal and background histos" <<
Endl;
2411 if (
results->DoesExist(
"MVA_EFF_S")==0) {
2414 TH1*
eff_s =
new TH1D( GetTestvarName() +
"_effS", GetTestvarName() +
" (signal)", fNbinsH,
xmin,
xmax );
2415 TH1*
eff_b =
new TH1D( GetTestvarName() +
"_effB", GetTestvarName() +
" (background)", fNbinsH,
xmin,
xmax );
2420 Int_t sign = (fCutOrientation == kPositive) ? +1 : -1;
2449 Log() << kFATAL <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<<
"<GetEfficiency> Mismatch in sign" <<
Endl;
2460 TH1*
eff_BvsS =
new TH1D( GetTestvarName() +
"_effBvsS", GetTestvarName() +
"", fNbins, 0, 1 );
2462 eff_BvsS->SetXTitle(
"Signal eff" );
2463 eff_BvsS->SetYTitle(
"Backgr eff" );
2466 TH1*
rej_BvsS =
new TH1D( GetTestvarName() +
"_rejBvsS", GetTestvarName() +
"", fNbins, 0, 1 );
2468 rej_BvsS->SetXTitle(
"Signal eff" );
2469 rej_BvsS->SetYTitle(
"Backgr rejection (1-eff)" );
2473 GetTestvarName(), fNbins, 0, 1 );
2476 inveff_BvsS->SetYTitle(
"Inverse backgr. eff (1/eff)" );
2510 if (
effB>std::numeric_limits<double>::epsilon())
2525 rejB = 1.0 - fSpleffBvsS->Eval(
effS );
2535 SetSignalReferenceCut( cut );
2540 if (0 == fSpleffBvsS) {
2558 integral += (1.0 -
effB);
2612 if (list->GetSize() != 2) {
2613 Log() << kFATAL <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<<
"<GetTrainingEfficiency> Wrong number of arguments"
2615 <<
" | required format, e.g., Efficiency:0.05" <<
Endl;
2626 if (
results->GetHist(
"MVA_S")->GetNbinsX() !=
results->GetHist(
"MVA_B")->GetNbinsX() ||
2627 results->GetHist(
"MVA_HIGHBIN_S")->GetNbinsX() !=
results->GetHist(
"MVA_HIGHBIN_B")->GetNbinsX() ) {
2628 Log() << kFATAL <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<<
"<GetTrainingEfficiency> Binning mismatch between signal and background histos"
2641 if (
results->DoesExist(
"MVA_TRAIN_S")==0) {
2647 TH1*
mva_s_tr =
new TH1D( GetTestvarName() +
"_Train_S",GetTestvarName() +
"_Train_S", fNbinsMVAoutput, fXmin,
sxmax );
2648 TH1*
mva_b_tr =
new TH1D( GetTestvarName() +
"_Train_B",GetTestvarName() +
"_Train_B", fNbinsMVAoutput, fXmin,
sxmax );
2655 TH1*
mva_eff_tr_s =
new TH1D( GetTestvarName() +
"_trainingEffS", GetTestvarName() +
" (signal)",
2657 TH1*
mva_eff_tr_b =
new TH1D( GetTestvarName() +
"_trainingEffB", GetTestvarName() +
" (background)",
2663 Int_t sign = (fCutOrientation == kPositive) ? +1 : -1;
2665 std::vector<Double_t> mvaValues = GetMvaValues(0,Data()->GetNEvents());
2671 Data()->SetCurrentEvent(
ievt);
2703 TH1*
eff_bvss =
new TH1D( GetTestvarName() +
"_trainingEffBvsS", GetTestvarName() +
"", fNbins, 0, 1 );
2705 TH1*
rej_bvss =
new TH1D( GetTestvarName() +
"_trainingRejBvsS", GetTestvarName() +
"", fNbins, 0, 1 );
2713 if (fSplTrainRefS)
delete fSplTrainRefS;
2714 if (fSplTrainRefB)
delete fSplTrainRefB;
2729 fEffS =
results->GetHist(
"MVA_TRAINEFF_S");
2752 if (0 == fSplTrainEffBvsS)
return 0.0;
2761 effB = fSplTrainEffBvsS->Eval(
effS );
2778 if (!
resMulticlass) Log() << kFATAL<<
Form(
"Dataset[%s] : ",DataInfo().GetName())<<
"unable to create pointer in GetMulticlassEfficiency, exiting."<<
Endl;
2790 if (!
resMulticlass) Log() << kFATAL<<
"unable to create pointer in GetMulticlassTrainingEfficiency, exiting."<<
Endl;
2792 Log() << kINFO <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<<
"Determine optimal multiclass cuts for training data..." <<
Endl;
2824 Log() << kFATAL <<
"Cannot get confusion matrix for non-multiclass analysis." << std::endl;
2828 Data()->SetCurrentType(
type);
2833 Log() << kFATAL <<
Form(
"Dataset[%s] : ", DataInfo().GetName())
2834 <<
"unable to create pointer in GetMulticlassEfficiency, exiting." <<
Endl;
2876 Log() << kFATAL <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<<
"<GetSeparation> Mismatch in pdfs" <<
Endl;
2880 if (!fSplS || !fSplB) {
2881 Log()<<kDEBUG<<
Form(
"[%s] : ",DataInfo().GetName())<<
"could not calculate the separation, distributions"
2882 <<
" fSplS or fSplB are not yet filled" <<
Endl;
2898 Log() << kFATAL <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<<
"<GetROCIntegral(TH1D*, TH1D*)> Mismatch in hists" <<
Endl;
2914 integral += (1-
pdfB->GetIntegral(cut,
xmax)) *
pdfS->GetVal(cut);
2919 return integral*step;
2932 Log() << kFATAL <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<<
"<GetSeparation> Mismatch in pdfs" <<
Endl;
2946 integral += (1-
pdfB->GetIntegral(cut,
xmax)) *
pdfS->GetVal(cut);
2949 return integral*step;
2968 Log() << kFATAL <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<<
"<GetMaximumSignificance> "
2969 <<
"Number of signal or background events is <= 0 ==> abort"
2973 Log() << kINFO <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<<
"Using ratio SignalEvents/BackgroundEvents = "
2980 Log() << kWARNING <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<<
"Efficiency histograms empty !" <<
Endl;
2981 Log() << kWARNING <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<<
"no maximum cut found, return 0" <<
Endl;
2985 for (
Int_t bin=1; bin<=fNbinsH; bin++) {
3021 Long64_t entries = Data()->GetNEvents();
3025 Log() << kFATAL <<
Form(
"Dataset[%s] : ",DataInfo().GetName())<<
"<CalculateEstimator> Wrong tree type: " <<
treeType <<
Endl;
3049 if (DataInfo().IsSignal(
ev)) {
3079 classFileName = GetWeightFileDir() +
"/" + GetJobName() +
"_" + GetMethodName() +
".class.C";
3087 <<
"Creating standalone class: "
3092 Log() << kFATAL <<
"<MakeClass> Unable to open file: " <<
classFileName <<
Endl;
3097 fout <<
"// Class: " << className << std::endl;
3098 fout <<
"// Automatically generated by MethodBase::MakeClass" << std::endl <<
"//" << std::endl;
3102 fout <<
"/* configuration options =====================================================" << std::endl << std::endl;
3103 WriteStateToStream(
fout );
3105 fout <<
"============================================================================ */" << std::endl;
3108 fout <<
"" << std::endl;
3109 fout <<
"#include <array>" << std::endl;
3110 fout <<
"#include <vector>" << std::endl;
3111 fout <<
"#include <cmath>" << std::endl;
3112 fout <<
"#include <string>" << std::endl;
3113 fout <<
"#include <iostream>" << std::endl;
3114 fout <<
"" << std::endl;
3117 this->MakeClassSpecificHeader(
fout, className );
3119 fout <<
"#ifndef IClassifierReader__def" << std::endl;
3120 fout <<
"#define IClassifierReader__def" << std::endl;
3122 fout <<
"class IClassifierReader {" << std::endl;
3124 fout <<
" public:" << std::endl;
3126 fout <<
" // constructor" << std::endl;
3127 fout <<
" IClassifierReader() : fStatusIsClean( true ) {}" << std::endl;
3128 fout <<
" virtual ~IClassifierReader() {}" << std::endl;
3130 fout <<
" // return classifier response" << std::endl;
3132 fout <<
" virtual std::vector<double> GetMulticlassValues( const std::vector<double>& inputValues ) const = 0;" << std::endl;
3134 fout <<
" virtual double GetMvaValue( const std::vector<double>& inputValues ) const = 0;" << std::endl;
3137 fout <<
" // returns classifier status" << std::endl;
3138 fout <<
" bool IsStatusClean() const { return fStatusIsClean; }" << std::endl;
3140 fout <<
" protected:" << std::endl;
3142 fout <<
" bool fStatusIsClean;" << std::endl;
3143 fout <<
"};" << std::endl;
3145 fout <<
"#endif" << std::endl;
3147 fout <<
"class " << className <<
" : public IClassifierReader {" << std::endl;
3149 fout <<
" public:" << std::endl;
3151 fout <<
" // constructor" << std::endl;
3152 fout <<
" " << className <<
"( std::vector<std::string>& theInputVars )" << std::endl;
3153 fout <<
" : IClassifierReader()," << std::endl;
3154 fout <<
" fClassName( \"" << className <<
"\" )," << std::endl;
3155 fout <<
" fNvars( " << GetNvar() <<
" )" << std::endl;
3156 fout <<
" {" << std::endl;
3157 fout <<
" // the training input variables" << std::endl;
3158 fout <<
" const char* inputVars[] = { ";
3160 fout <<
"\"" << GetOriginalVarName(
ivar) <<
"\"";
3161 if (
ivar<GetNvar()-1)
fout <<
", ";
3163 fout <<
" };" << std::endl;
3165 fout <<
" // sanity checks" << std::endl;
3166 fout <<
" if (theInputVars.size() <= 0) {" << std::endl;
3167 fout <<
" std::cout << \"Problem in class \\\"\" << fClassName << \"\\\": empty input vector\" << std::endl;" << std::endl;
3168 fout <<
" fStatusIsClean = false;" << std::endl;
3169 fout <<
" }" << std::endl;
3171 fout <<
" if (theInputVars.size() != fNvars) {" << std::endl;
3172 fout <<
" std::cout << \"Problem in class \\\"\" << fClassName << \"\\\": mismatch in number of input values: \"" << std::endl;
3173 fout <<
" << theInputVars.size() << \" != \" << fNvars << std::endl;" << std::endl;
3174 fout <<
" fStatusIsClean = false;" << std::endl;
3175 fout <<
" }" << std::endl;
3177 fout <<
" // validate input variables" << std::endl;
3178 fout <<
" for (size_t ivar = 0; ivar < theInputVars.size(); ivar++) {" << std::endl;
3179 fout <<
" if (theInputVars[ivar] != inputVars[ivar]) {" << std::endl;
3180 fout <<
" std::cout << \"Problem in class \\\"\" << fClassName << \"\\\": mismatch in input variable names\" << std::endl" << std::endl;
3181 fout <<
" << \" for variable [\" << ivar << \"]: \" << theInputVars[ivar].c_str() << \" != \" << inputVars[ivar] << std::endl;" << std::endl;
3182 fout <<
" fStatusIsClean = false;" << std::endl;
3183 fout <<
" }" << std::endl;
3184 fout <<
" }" << std::endl;
3186 fout <<
" // initialize min and max vectors (for normalisation)" << std::endl;
3188 fout <<
" fVmin[" <<
ivar <<
"] = " << std::setprecision(15) << GetXmin(
ivar ) <<
";" << std::endl;
3189 fout <<
" fVmax[" <<
ivar <<
"] = " << std::setprecision(15) << GetXmax(
ivar ) <<
";" << std::endl;
3192 fout <<
" // initialize input variable types" << std::endl;
3194 fout <<
" fType[" <<
ivar <<
"] = \'" << DataInfo().GetVariableInfo(
ivar).GetVarType() <<
"\';" << std::endl;
3197 fout <<
" // initialize constants" << std::endl;
3198 fout <<
" Initialize();" << std::endl;
3200 if (GetTransformationHandler().GetTransformationList().GetSize() != 0) {
3201 fout <<
" // initialize transformation" << std::endl;
3202 fout <<
" InitTransform();" << std::endl;
3204 fout <<
" }" << std::endl;
3206 fout <<
" // destructor" << std::endl;
3207 fout <<
" virtual ~" << className <<
"() {" << std::endl;
3208 fout <<
" Clear(); // method-specific" << std::endl;
3209 fout <<
" }" << std::endl;
3211 fout <<
" // the classifier response" << std::endl;
3212 fout <<
" // \"inputValues\" is a vector of input values in the same order as the" << std::endl;
3213 fout <<
" // variables given to the constructor" << std::endl;
3215 fout <<
" std::vector<double> GetMulticlassValues( const std::vector<double>& inputValues ) const override;" << std::endl;
3217 fout <<
" double GetMvaValue( const std::vector<double>& inputValues ) const override;" << std::endl;
3220 fout <<
" private:" << std::endl;
3222 fout <<
" // method-specific destructor" << std::endl;
3223 fout <<
" void Clear();" << std::endl;
3225 if (GetTransformationHandler().GetTransformationList().GetSize()!=0) {
3226 fout <<
" // input variable transformation" << std::endl;
3227 GetTransformationHandler().MakeFunction(
fout, className,1);
3228 fout <<
" void InitTransform();" << std::endl;
3229 fout <<
" void Transform( std::vector<double> & iv, int sigOrBgd ) const;" << std::endl;
3232 fout <<
" // common member variables" << std::endl;
3233 fout <<
" const char* fClassName;" << std::endl;
3235 fout <<
" const size_t fNvars;" << std::endl;
3236 fout <<
" size_t GetNvar() const { return fNvars; }" << std::endl;
3237 fout <<
" char GetType( int ivar ) const { return fType[ivar]; }" << std::endl;
3239 fout <<
" // normalisation of input variables" << std::endl;
3240 fout <<
" double fVmin[" << GetNvar() <<
"];" << std::endl;
3241 fout <<
" double fVmax[" << GetNvar() <<
"];" << std::endl;
3242 fout <<
" double NormVariable( double x, double xmin, double xmax ) const {" << std::endl;
3243 fout <<
" // normalise to output range: [-1, 1]" << std::endl;
3244 fout <<
" return 2*(x - xmin)/(xmax - xmin) - 1.0;" << std::endl;
3245 fout <<
" }" << std::endl;
3247 fout <<
" // type of input variable: 'F' or 'I'" << std::endl;
3248 fout <<
" char fType[" << GetNvar() <<
"];" << std::endl;
3250 fout <<
" // initialize internal variables" << std::endl;
3251 fout <<
" void Initialize();" << std::endl;
3253 fout <<
" std::vector<double> GetMulticlassValues__( const std::vector<double>& inputValues ) const;" << std::endl;
3255 fout <<
" double GetMvaValue__( const std::vector<double>& inputValues ) const;" << std::endl;
3257 fout <<
"" << std::endl;
3258 fout <<
" // private members (method specific)" << std::endl;
3261 MakeClassSpecific(
fout, className );
3264 fout <<
"inline std::vector<double> " << className <<
"::GetMulticlassValues( const std::vector<double>& inputValues ) const" << std::endl;
3266 fout <<
"inline double " << className <<
"::GetMvaValue( const std::vector<double>& inputValues ) const" << std::endl;
3268 fout <<
"{" << std::endl;
3269 fout <<
" // classifier response value" << std::endl;
3271 fout <<
" std::vector<double> retval;" << std::endl;
3273 fout <<
" double retval = 0;" << std::endl;
3276 fout <<
" // classifier response, sanity check first" << std::endl;
3277 fout <<
" if (!IsStatusClean()) {" << std::endl;
3278 fout <<
" std::cout << \"Problem in class \\\"\" << fClassName << \"\\\": cannot return classifier response\"" << std::endl;
3279 fout <<
" << \" because status is dirty\" << std::endl;" << std::endl;
3280 fout <<
" }" << std::endl;
3281 fout <<
" else {" << std::endl;
3282 if (IsNormalised()) {
3283 fout <<
" // normalise variables" << std::endl;
3284 fout <<
" std::vector<double> iV;" << std::endl;
3285 fout <<
" iV.reserve(inputValues.size());" << std::endl;
3286 fout <<
" int ivar = 0;" << std::endl;
3287 fout <<
" for (std::vector<double>::const_iterator varIt = inputValues.begin();" << std::endl;
3288 fout <<
" varIt != inputValues.end(); varIt++, ivar++) {" << std::endl;
3289 fout <<
" iV.push_back(NormVariable( *varIt, fVmin[ivar], fVmax[ivar] ));" << std::endl;
3290 fout <<
" }" << std::endl;
3291 if (GetTransformationHandler().GetTransformationList().GetSize() != 0 && GetMethodType() !=
Types::kLikelihood &&
3293 fout <<
" Transform( iV, -1 );" << std::endl;
3297 fout <<
" retval = GetMulticlassValues__( iV );" << std::endl;
3299 fout <<
" retval = GetMvaValue__( iV );" << std::endl;
3302 if (GetTransformationHandler().GetTransformationList().GetSize() != 0 && GetMethodType() !=
Types::kLikelihood &&
3304 fout <<
" std::vector<double> iV(inputValues);" << std::endl;
3305 fout <<
" Transform( iV, -1 );" << std::endl;
3307 fout <<
" retval = GetMulticlassValues__( iV );" << std::endl;
3309 fout <<
" retval = GetMvaValue__( iV );" << std::endl;
3313 fout <<
" retval = GetMulticlassValues__( inputValues );" << std::endl;
3315 fout <<
" retval = GetMvaValue__( inputValues );" << std::endl;
3319 fout <<
" }" << std::endl;
3321 fout <<
" return retval;" << std::endl;
3322 fout <<
"}" << std::endl;
3325 if (GetTransformationHandler().GetTransformationList().GetSize()!=0)
3326 GetTransformationHandler().MakeFunction(
fout, className,2);
3338 std::streambuf*
cout_sbuf = std::cout.rdbuf();
3339 std::ofstream* o = 0;
3340 if (
gConfig().WriteOptionsReference()) {
3341 Log() << kINFO <<
"Print Help message for class " << GetName() <<
" into file: " << GetReferenceFile() <<
Endl;
3342 o =
new std::ofstream( GetReferenceFile(), std::ios::app );
3344 Log() << kFATAL <<
"<PrintHelpMessage> Unable to append to output file: " << GetReferenceFile() <<
Endl;
3346 std::cout.rdbuf( o->rdbuf() );
3351 Log() << kINFO <<
Endl;
3353 <<
"================================================================"
3357 <<
"H e l p f o r M V A m e t h o d [ " << GetName() <<
" ] :"
3362 Log() <<
"Help for MVA method [ " << GetName() <<
" ] :" <<
Endl;
3370 Log() <<
"<Suppress this message by specifying \"!H\" in the booking option>" <<
Endl;
3372 <<
"================================================================"
3379 Log() <<
"# End of Message___" <<
Endl;
3399 else retval = fEffS->GetBinContent( fEffS->FindBin(
theCut ) );
3408 if (
theCut-fXmin < eps)
retval = (GetCutOrientation() == kPositive) ? 1.0 : 0.0;
3409 else if (fXmax-
theCut < eps)
retval = (GetCutOrientation() == kPositive) ? 0.0 : 1.0;
3422 if (GetTransformationHandler().GetTransformationList().GetEntries() <= 0) {
3423 return (Data()->GetEventCollection(
type));
3430 if (fEventCollections.at(idx) == 0) {
3431 fEventCollections.at(idx) = &(Data()->GetEventCollection(
type));
3432 fEventCollections.at(idx) = GetTransformationHandler().CalcTransformations(*(fEventCollections.at(idx)),
kTRUE);
3434 return *(fEventCollections.at(idx));
3442 UInt_t a = GetTrainingTMVAVersionCode() & 0xff0000;
a>>=16;
3443 UInt_t b = GetTrainingTMVAVersionCode() & 0x00ff00;
b>>=8;
3444 UInt_t c = GetTrainingTMVAVersionCode() & 0x0000ff;
3454 UInt_t a = GetTrainingROOTVersionCode() & 0xff0000;
a>>=16;
3455 UInt_t b = GetTrainingROOTVersionCode() & 0x00ff00;
b>>=8;
3456 UInt_t c = GetTrainingROOTVersionCode() & 0x0000ff;
const Bool_t Use_Splines_for_Eff_
const Int_t NBIN_HIST_HIGH
#define ROOT_VERSION_CODE
bool Bool_t
Boolean (0=false, 1=true) (bool)
int Int_t
Signed integer 4 bytes (int)
char Char_t
Character 1 byte (char)
float Float_t
Float 4 bytes (float)
double Double_t
Double 8 bytes.
long long Long64_t
Portable signed long integer 8 bytes.
ROOT::Detail::TRangeCast< T, true > TRangeDynCast
TRangeDynCast is an adapter class that allows the typed iteration through a TCollection.
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void data
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void char Point_t Rectangle_t WindowAttributes_t Float_t r
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void char Point_t Rectangle_t WindowAttributes_t Float_t Float_t Float_t Int_t Int_t UInt_t UInt_t Rectangle_t result
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void char Point_t Rectangle_t WindowAttributes_t Float_t Float_t Float_t Int_t Int_t UInt_t UInt_t Rectangle_t Int_t Int_t Window_t TString Int_t GCValues_t GetPrimarySelectionOwner GetDisplay GetScreen GetColormap GetNativeEvent const char const char dpyName wid window const char font_name cursor keysym reg const char only_if_exist regb h Point_t winding char text const char depth char const char Int_t count const char ColorStruct_t color const char Pixmap_t Pixmap_t PictureAttributes_t attr const char char ret_data h unsigned char height h length
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void value
Option_t Option_t TPoint TPoint const char y2
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void char Point_t Rectangle_t WindowAttributes_t Float_t Float_t Float_t Int_t Int_t UInt_t UInt_t Rectangle_t Int_t Int_t Window_t TString Int_t GCValues_t GetPrimarySelectionOwner GetDisplay GetScreen GetColormap GetNativeEvent const char const char dpyName wid window const char font_name cursor keysym reg const char only_if_exist regb h Point_t winding char text const char depth char const char Int_t count const char ColorStruct_t color const char Pixmap_t Pixmap_t PictureAttributes_t attr const char char ret_data h unsigned char height h Atom_t Int_t ULong_t ULong_t unsigned char prop_list Atom_t Atom_t Atom_t Time_t type
Option_t Option_t TPoint TPoint const char y1
TMatrixT< Double_t > TMatrixD
char * Form(const char *fmt,...)
Formats a string in a circular formatting buffer.
R__EXTERN TSystem * gSystem
#define TMVA_VERSION_CODE
const_iterator begin() const
const_iterator end() const
Class to manage histogram axis.
This class stores the date and time with a precision of one second in an unsigned 32 bit word (950130...
TDirectory::TContext keeps track and restore the current directory.
Describe directory structure in memory.
A ROOT file is an on-disk file, usually with extension .root, that stores objects in a file-system-li...
static TFile * Open(const char *name, Option_t *option="", const char *ftitle="", Int_t compress=ROOT::RCompressionSetting::EDefaults::kUseCompiledDefault, Int_t netopt=0)
Create / open a file.
A TGraph is an object made of two arrays X and Y with npoints each.
1-D histogram with a double per channel (see TH1 documentation)
1-D histogram with a float per channel (see TH1 documentation)
TH1 is the base class of all histogram classes in ROOT.
virtual Double_t GetMean(Int_t axis=1) const
For axis = 1,2 or 3 returns the mean value of the histogram along X,Y or Z axis.
virtual Int_t GetQuantiles(Int_t n, Double_t *xp, const Double_t *p=nullptr)
Compute Quantiles for this histogram.
2-D histogram with a float per channel (see TH1 documentation)
Int_t Fill(Double_t) override
Invalid Fill method.
Class that contains all the information of a class.
TString fWeightFileExtension
Int_t fMaxNumOfAllowedVariables
VariablePlotting & GetVariablePlotting()
class TMVA::Config::VariablePlotting fVariablePlotting
MsgLogger * fLogger
! message logger
Class that contains all the data information.
Class that contains all the data information.
static void SetIsTraining(Bool_t)
when this static function is called, it sets the flag whether events with negative event weight shoul...
static void SetIgnoreNegWeightsInTraining(Bool_t)
when this static function is called, it sets the flag whether events with negative event weight shoul...
Interface for all concrete MVA method implementations.
void Init(std::vector< TString > &graphTitles)
This function gets some title and it creates a TGraph for every title.
IPythonInteractive()
standard constructor
~IPythonInteractive()
standard destructor
void ClearGraphs()
This function sets the point number to 0 for all graphs.
void AddPoint(Double_t x, Double_t y1, Double_t y2)
This function is used only in 2 TGraph case, and it will add new data points to graphs.
Virtual base Class for all MVA method.
TDirectory * MethodBaseDir() const
returns the ROOT directory where all instances of the corresponding MVA method are stored
virtual Double_t GetKSTrainingVsTest(Char_t SorB, TString opt="X")
MethodBase(const TString &jobName, Types::EMVA methodType, const TString &methodTitle, DataSetInfo &dsi, const TString &theOption="")
standard constructor
void PrintHelpMessage() const override
prints out method-specific help method
virtual std::vector< Float_t > GetAllMulticlassValues()
Get all multi-class values.
virtual Double_t GetSeparation(TH1 *, TH1 *) const
compute "separation" defined as
const char * GetName() const override
void ReadClassesFromXML(void *clsnode)
read number of classes from XML
void SetWeightFileDir(TString fileDir)
set directory of weight file
void WriteStateToXML(void *parent) const
general method used in writing the header of the weight files where the used variables,...
void DeclareBaseOptions()
define the options (their key words) that can be set in the option string here the options valid for ...
virtual void TestRegression(Double_t &bias, Double_t &biasT, Double_t &dev, Double_t &devT, Double_t &rms, Double_t &rmsT, Double_t &mInf, Double_t &mInfT, Double_t &corr, Types::ETreeType type)
calculate <sum-of-deviation-squared> of regression output versus "true" value from test sample
virtual void DeclareCompatibilityOptions()
options that are used ONLY for the READER to ensure backward compatibility they are hence without any...
virtual Double_t GetSignificance() const
compute significance of mean difference
virtual Double_t GetProba(const Event *ev)
virtual TMatrixD GetMulticlassConfusionMatrix(Double_t effB, Types::ETreeType type)
Construct a confusion matrix for a multiclass classifier.
virtual void WriteEvaluationHistosToFile(Types::ETreeType treetype)
writes all MVA evaluation histograms to file
virtual void TestMulticlass()
test multiclass classification
const std::vector< TMVA::Event * > & GetEventCollection(Types::ETreeType type)
returns the event collection (i.e.
virtual std::vector< Double_t > GetDataMvaValues(DataSet *data=nullptr, Long64_t firstEvt=0, Long64_t lastEvt=-1, Bool_t logProgress=false)
get all the MVA values for the events of the given Data type
void SetupMethod()
setup of methods
TDirectory * BaseDir() const
returns the ROOT directory where info/histograms etc of the corresponding MVA method instance are sto...
virtual std::vector< Float_t > GetMulticlassEfficiency(std::vector< std::vector< Float_t > > &purity)
void AddInfoItem(void *gi, const TString &name, const TString &value) const
xml writing
virtual void AddClassifierOutputProb(Types::ETreeType type)
prepare tree branch with the method's discriminating variable
virtual Double_t GetEfficiency(const TString &, Types::ETreeType, Double_t &err)
fill background efficiency (resp.
TString GetTrainingTMVAVersionString() const
calculates the TMVA version string from the training version code on the fly
void Statistics(Types::ETreeType treeType, const TString &theVarName, Double_t &, Double_t &, Double_t &, Double_t &, Double_t &, Double_t &)
calculates rms,mean, xmin, xmax of the event variable this can be either done for the variables as th...
Bool_t GetLine(std::istream &fin, char *buf)
reads one line from the input stream checks for certain keywords and interprets the line if keywords ...
void ProcessSetup()
process all options the "CheckForUnusedOptions" is done in an independent call, since it may be overr...
virtual std::vector< Double_t > GetMvaValues(Long64_t firstEvt=0, Long64_t lastEvt=-1, Bool_t logProgress=false)
get all the MVA values for the events of the current Data type
virtual Bool_t IsSignalLike()
uses a pre-set cut on the MVA output (SetSignalReferenceCut and SetSignalReferenceCutOrientation) for...
virtual ~MethodBase()
destructor
void WriteMonitoringHistosToFile() const override
write special monitoring histograms to file dummy implementation here --------------—
virtual Double_t GetMaximumSignificance(Double_t SignalEvents, Double_t BackgroundEvents, Double_t &optimal_significance_value) const
plot significance, , curve for given number of signal and background events; returns cut for maximum ...
virtual Double_t GetTrainingEfficiency(const TString &)
void SetWeightFileName(TString)
set the weight file name (depreciated)
TString GetWeightFileName() const
retrieve weight file name
virtual void TestClassification()
initialization
void AddOutput(Types::ETreeType type, Types::EAnalysisType analysisType)
virtual void AddRegressionOutput(Types::ETreeType type)
prepare tree branch with the method's discriminating variable
void InitBase()
default initialization called by all constructors
virtual void GetRegressionDeviation(UInt_t tgtNum, Types::ETreeType type, Double_t &stddev, Double_t &stddev90Percent) const
void ReadStateFromXMLString(const char *xmlstr)
for reading from memory
void MakeClass(const TString &classFileName=TString("")) const override
create reader class for method (classification only at present)
void CreateMVAPdfs()
Create PDFs of the MVA output variables.
TString GetTrainingROOTVersionString() const
calculates the ROOT version string from the training version code on the fly
virtual Double_t GetValueForRoot(Double_t)
returns efficiency as function of cut
void ReadStateFromFile()
Function to write options and weights to file.
void WriteVarsToStream(std::ostream &tf, const TString &prefix="") const
write the list of variables (name, min, max) for a given data transformation method to the stream
void ReadVarsFromStream(std::istream &istr)
Read the variables (name, min, max) for a given data transformation method from the stream.
void ReadSpectatorsFromXML(void *specnode)
read spectator info from XML
void SetTestvarName(const TString &v="")
void ReadVariablesFromXML(void *varnode)
read variable info from XML
virtual std::map< TString, Double_t > OptimizeTuningParameters(TString fomType="ROCIntegral", TString fitType="FitGA")
call the Optimizer with the set of parameters and ranges that are meant to be tuned.
virtual std::vector< Float_t > GetMulticlassTrainingEfficiency(std::vector< std::vector< Float_t > > &purity)
void WriteStateToStream(std::ostream &tf) const
general method used in writing the header of the weight files where the used variables,...
virtual Double_t GetRarity(Double_t mvaVal, Types::ESBType reftype=Types::kBackground) const
compute rarity:
virtual void SetTuneParameters(std::map< TString, Double_t > tuneParameters)
set the tuning parameters according to the argument This is just a dummy .
void ReadStateFromStream(std::istream &tf)
read the header from the weight files of the different MVA methods
void AddVarsXMLTo(void *parent) const
write variable info to XML
Double_t GetMvaValue(Double_t *errLower=nullptr, Double_t *errUpper=nullptr) override=0
void AddTargetsXMLTo(void *parent) const
write target info to XML
void ReadTargetsFromXML(void *tarnode)
read target info from XML
void ProcessBaseOptions()
the option string is decoded, for available options see "DeclareOptions"
void ReadStateFromXML(void *parent)
virtual std::vector< Float_t > GetAllRegressionValues()
Get al regression values in one call.
void NoErrorCalc(Double_t *const err, Double_t *const errUpper)
void WriteStateToFile() const
write options and weights to file note that each one text file for the main configuration information...
void AddClassesXMLTo(void *parent) const
write class info to XML
virtual void AddClassifierOutput(Types::ETreeType type)
prepare tree branch with the method's discriminating variable
void AddSpectatorsXMLTo(void *parent) const
write spectator info to XML
virtual Double_t GetROCIntegral(TH1D *histS, TH1D *histB) const
calculate the area (integral) under the ROC curve as a overall quality measure of the classification
virtual void AddMulticlassOutput(Types::ETreeType type)
prepare tree branch with the method's discriminating variable
virtual void CheckSetup()
check may be overridden by derived class (sometimes, eg, fitters are used which can only be implement...
void SetSource(const std::string &source)
PDF wrapper for histograms; uses user-defined spline interpolation.
Class that is the base-class for a vector of result.
Class which takes the results of a multiclass classification.
Class that is the base-class for a vector of result.
Class that is the base-class for a vector of result.
Root finding using Brents algorithm (translated from CERNLIB function RZERO)
Linear interpolation of TGraph.
Timing information for training and evaluation of MVA methods.
Singleton class for Global types used by TMVA.
@ kSignal
Never change this number - it is elsewhere assumed to be zero !
Class for type info of MVA input variable.
A TMultiGraph is a collection of TGraph (or derived) objects.
Collectable string class.
void ToLower()
Change string to lower-case.
Int_t Atoi() const
Return integer value of string.
TSubString Strip(EStripType s=kTrailing, char c=' ') const
Return a substring of self stripped at beginning and/or end.
const char * Data() const
static TString Format(const char *fmt,...)
Static method which formats a string using a printf style format descriptor and return a TString.
Ssiz_t Index(const char *pat, Ssiz_t i=0, ECaseCompare cmp=kExact) const
virtual const char * GetBuildNode() const
Return the build node name.
virtual int mkdir(const char *name, Bool_t recursive=kFALSE)
Make a file system directory.
virtual const char * WorkingDirectory()
Return working directory.
virtual UserGroup_t * GetUserInfo(Int_t uid)
Returns all user info in the UserGroup_t structure.
void SaveDoc(XMLDocPointer_t xmldoc, const char *filename, Int_t layout=1)
store document content to file if layout<=0, no any spaces or newlines will be placed between xmlnode...
void FreeDoc(XMLDocPointer_t xmldoc)
frees allocated document data and deletes document itself
XMLNodePointer_t DocGetRootElement(XMLDocPointer_t xmldoc)
returns root node of document
XMLDocPointer_t NewDoc(const char *version="1.0")
creates new xml document with provided version
XMLDocPointer_t ParseFile(const char *filename, Int_t maxbuf=100000)
Parses content of file and tries to produce xml structures.
XMLDocPointer_t ParseString(const char *xmlstring)
parses content of string and tries to produce xml structures
void DocSetRootElement(XMLDocPointer_t xmldoc, XMLNodePointer_t xmlnode)
set main (root) node for document
void CreateVariableTransforms(const TString &trafoDefinition, TMVA::DataSetInfo &dataInfo, TMVA::TransformationHandler &transformationHandler, TMVA::MsgLogger &log)
MsgLogger & Endl(MsgLogger &ml)
Short_t Max(Short_t a, Short_t b)
Returns the largest of a and b.
Double_t Sqrt(Double_t x)
Returns the square root of x.
Short_t Min(Short_t a, Short_t b)
Returns the smallest of a and b.
Short_t Abs(Short_t d)
Returns the absolute value of parameter Short_t d.