49#define MinNoTrainingEvents 10
79 auto roc = GetROC(iClass,
type);
80 auto inte = roc->GetROCIntegral();
97 fROCCurve =
new ROCCurve(fMvaTest[iClass]);
99 fROCCurve =
new ROCCurve(fMvaTrain[iClass]);
126 TString hLine =
"--------------------------------------------------- :";
128 fLogger << kINFO << hLine <<
Endl;
129 fLogger << kINFO <<
"DataSet MVA :" <<
Endl;
130 fLogger << kINFO <<
"Name: Method/Title: ROC-integ :" <<
Endl;
131 fLogger << kINFO << hLine <<
Endl;
132 fLogger << kINFO <<
Form(
"%-20s %-15s %#1.3f :", fDataLoaderName.Data(),
137 fLogger << kINFO << hLine <<
Endl;
151 TGraph *roc = GetROC(iClass,
type)->GetROCCurve();
152 roc->
SetName(
Form(
"%s/%s", GetMethodName().Data(), GetMethodTitle().Data()));
153 roc->
SetTitle(
Form(
"%s/%s", GetMethodName().Data(), GetMethodTitle().Data()));
168 return fMethod.GetValue<
TString>(
"MethodName") == methodname &&
169 fMethod.GetValue<
TString>(
"MethodTitle") == methodtitle
182 :
TMVA::
Envelope(
"Classification", dataloader,
file, options), fAnalysisType(
Types::kClassification),
201 :
TMVA::
Envelope(
"Classification", dataloader, NULL, options), fAnalysisType(
Types::kClassification),
221 for (
auto m : fIMethods) {
236 for (
auto &meth : fMethods) {
237 if (meth.GetValue<
TString>(
"MethodName") == methodname && meth.GetValue<
TString>(
"MethodTitle") == methodtitle)
238 return meth.GetValue<
TString>(
"MethodOptions");
259 for (
auto &meth : fMethods) {
260 GetMethod(meth.GetValue<
TString>(
"MethodName"), meth.GetValue<
TString>(
"MethodTitle"));
263 fWorkers.SetNWorkers(fJobs);
270 auto methodname = fMethods[workerID].GetValue<
TString>(
"MethodName");
271 auto methodtitle = fMethods[workerID].GetValue<
TString>(
"MethodTitle");
272 auto meth = GetMethod(methodname, methodtitle);
273 if (!IsSilentFile()) {
274 auto fname =
Form(
".%s%s%s.root", fDataLoader->GetName(), methodname.Data(), methodtitle.
Data());
275 auto f =
new TFile(fname,
"RECREATE");
276 f->mkdir(fDataLoader->GetName());
280 TrainMethod(methodname, methodtitle);
281 TestMethod(methodname, methodtitle);
282 if (!IsSilentFile()) {
285 return GetResults(methodname, methodtitle);
289 fResults = fWorkers.Map(executor,
ROOT::TSeqI(fMethods.size()));
298 TString hLine =
"--------------------------------------------------- :";
299 Log() << kINFO << hLine <<
Endl;
300 Log() << kINFO <<
"DataSet MVA :" <<
Endl;
301 Log() << kINFO <<
"Name: Method/Title: ROC-integ :" <<
Endl;
302 Log() << kINFO << hLine <<
Endl;
303 for (
auto &
r : fResults) {
305 Log() << kINFO <<
Form(
"%-20s %-15s %#1.3f :",
r.GetDataLoaderName().Data(),
306 Form(
"%s/%s",
r.GetMethodName().Data(),
r.GetMethodTitle().Data()),
r.GetROCIntegral())
309 Log() << kINFO << hLine <<
Endl;
311 Log() << kINFO <<
"-----------------------------------------------------" <<
Endl;
312 Log() << kHEADER <<
"Evaluation done." <<
Endl <<
Endl;
313 Log() << kINFO <<
Form(
"Jobs = %d Real Time = %lf ", fJobs, fTimer.RealTime()) <<
Endl;
314 Log() << kINFO <<
"-----------------------------------------------------" <<
Endl;
315 Log() << kINFO <<
"Evaluation done." <<
Endl;
325 for (
auto &meth : fMethods) {
326 TrainMethod(meth.GetValue<
TString>(
"MethodName"), meth.GetValue<
TString>(
"MethodTitle"));
338 auto method = GetMethod(methodname, methodtitle);
341 <<
Form(
"Trying to train method %s %s that maybe is not booked.", methodname.
Data(), methodtitle.
Data())
344 Log() << kHEADER <<
gTools().
Color(
"bold") <<
Form(
"Training method %s %s", methodname.
Data(), methodtitle.
Data())
349 method->DataInfo().GetNClasses() < 2)
350 Log() << kFATAL <<
"You want to do classification training, but specified less than two classes." <<
Endl;
356 Log() << kWARNING <<
"Method " << method->GetMethodName() <<
" not trained (training tree has less entries ["
361 Log() << kHEADER <<
"Train method: " << method->GetMethodName() <<
" for Classification" <<
Endl <<
Endl;
362 method->TrainMethod();
363 Log() << kHEADER <<
"Training finished" <<
Endl <<
Endl;
388 if (!HasMethod(methodname, methodtitle)) {
389 std::cout << methodname <<
" " << methodtitle << std::endl;
390 Log() << kERROR <<
"Trying to get method not booked." <<
Endl;
394 if (HasMethodObject(methodname, methodtitle, index)) {
395 return dynamic_cast<MethodBase *
>(fIMethods[index]);
398 if (GetDataLoaderDataInput().GetEntries() <=
400 Log() << kFATAL <<
"No input data for the training provided!" <<
Endl;
402 Log() << kHEADER <<
"Loading booked method: " <<
gTools().
Color(
"bold") << methodname <<
" " << methodtitle
405 TString moptions = GetMethodOptions(methodname, methodtitle);
410 conf->DeclareOptionRef(boostNum = 0,
"Boost_num",
"Number of times the classifier will be boosted");
411 conf->ParseOptions();
415 if (fModelPersistence) {
416 fFileDir = fDataLoader->GetName();
425 GetDataLoaderDataSetInfo(), moptions);
428 Log() << kDEBUG <<
"Boost Number is " << boostNum <<
" > 0: train boosted classifier" <<
Endl;
433 Log() << kFATAL <<
"Method with type kBoost cannot be casted to MethodCategory. /Classification" <<
Endl;
435 if (fModelPersistence)
440 methBoost->
SetFile(fFile.get());
452 Log() << kFATAL <<
"Method with type kCategory cannot be casted to MethodCategory. /Classification" <<
Endl;
454 if (fModelPersistence)
462 if (!method->
HasAnalysisType(fAnalysisType, GetDataLoaderDataSetInfo().GetNClasses(),
463 GetDataLoaderDataSetInfo().GetNTargets())) {
464 Log() << kWARNING <<
"Method " << method->
GetMethodTypeName() <<
" is not capable of handling ";
465 Log() <<
"classification with " << GetDataLoaderDataSetInfo().GetNClasses() <<
" classes." <<
Endl;
469 if (fModelPersistence)
481 fIMethods.push_back(method);
495 if (fIMethods.empty())
497 for (
UInt_t i = 0; i < fIMethods.size(); i++) {
499 auto methbase =
dynamic_cast<MethodBase *
>(fIMethods[i]);
500 if (methbase->GetMethodTypeName() == methodname && methbase->GetMethodName() == methodtitle) {
514 for (
auto &meth : fMethods) {
515 TestMethod(meth.GetValue<
TString>(
"MethodName"), meth.GetValue<
TString>(
"MethodTitle"));
527 auto method = GetMethod(methodname, methodtitle);
530 <<
Form(
"Trying to train method %s %s that maybe is not booked.", methodname.
Data(), methodtitle.
Data())
538 Log() << kHEADER <<
"Test method: " << method->GetMethodName() <<
" for Classification"
550 Int_t nmeth_used[2] = {0, 0};
552 std::vector<std::vector<TString>> mname(2);
553 std::vector<std::vector<Double_t>> sig(2), sep(2), roc(2);
554 std::vector<std::vector<Double_t>> eff01(2), eff10(2), eff30(2), effArea(2);
555 std::vector<std::vector<Double_t>> eff01err(2), eff10err(2), eff30err(2);
556 std::vector<std::vector<Double_t>> trainEff01(2), trainEff10(2), trainEff30(2);
558 method->SetFile(fFile.get());
559 method->SetSilentFile(IsSilentFile());
562 if (!IsCutsMethod(method))
563 methodNoCuts = method;
565 Log() << kHEADER <<
"Evaluate classifier: " << method->GetMethodName() <<
Endl <<
Endl;
566 isel = (method->GetMethodTypeName().Contains(
"Variable")) ? 1 : 0;
569 method->TestClassification();
572 mname[isel].push_back(method->GetMethodName());
573 sig[isel].push_back(method->GetSignificance());
574 sep[isel].push_back(method->GetSeparation());
575 roc[isel].push_back(method->GetROCIntegral());
578 eff01[isel].push_back(method->GetEfficiency(
"Efficiency:0.01",
Types::kTesting, err));
579 eff01err[isel].push_back(err);
580 eff10[isel].push_back(method->GetEfficiency(
"Efficiency:0.10",
Types::kTesting, err));
581 eff10err[isel].push_back(err);
582 eff30[isel].push_back(method->GetEfficiency(
"Efficiency:0.30",
Types::kTesting, err));
583 eff30err[isel].push_back(err);
584 effArea[isel].push_back(method->GetEfficiency(
"",
Types::kTesting, err));
586 trainEff01[isel].push_back(method->GetTrainingEfficiency(
"Efficiency:0.01"));
587 trainEff10[isel].push_back(method->GetTrainingEfficiency(
"Efficiency:0.10"));
588 trainEff30[isel].push_back(method->GetTrainingEfficiency(
"Efficiency:0.30"));
592 if (!IsSilentFile()) {
593 Log() << kDEBUG <<
"\tWrite evaluation histograms to file" <<
Endl;
599 for (
Int_t k = 0; k < 2; k++) {
600 std::vector<std::vector<Double_t>> vtemp;
601 vtemp.push_back(effArea[k]);
602 vtemp.push_back(eff10[k]);
603 vtemp.push_back(eff01[k]);
604 vtemp.push_back(eff30[k]);
605 vtemp.push_back(eff10err[k]);
606 vtemp.push_back(eff01err[k]);
607 vtemp.push_back(eff30err[k]);
608 vtemp.push_back(trainEff10[k]);
609 vtemp.push_back(trainEff01[k]);
610 vtemp.push_back(trainEff30[k]);
611 vtemp.push_back(sig[k]);
612 vtemp.push_back(sep[k]);
613 vtemp.push_back(roc[k]);
614 std::vector<TString> vtemps = mname[k];
616 effArea[k] = vtemp[0];
620 eff10err[k] = vtemp[4];
621 eff01err[k] = vtemp[5];
622 eff30err[k] = vtemp[6];
623 trainEff10[k] = vtemp[7];
624 trainEff01[k] = vtemp[8];
625 trainEff30[k] = vtemp[9];
639 const Int_t nmeth = methodNoCuts == NULL ? 0 : 1;
640 const Int_t nvar = method->fDataSetInfo.GetNVariables();
645 std::vector<Double_t> rvec;
652 std::vector<TString> *theVars =
new std::vector<TString>;
653 std::vector<ResultsClassification *> mvaRes;
656 theVars->back().ReplaceAll(
"MVA_",
"");
667 DataSet *defDs = method->fDataSetInfo.GetDataSet();
674 for (
Int_t im = 0; im < nmeth; im++) {
678 Log() << kWARNING <<
"Found NaN return value in event: " << ievt <<
" for method \""
684 for (
Int_t iv = 0; iv < nvar; iv++)
686 if (method->fDataSetInfo.IsSignal(ev)) {
695 for (
Int_t im = 0; im < nmeth; im++) {
696 for (
Int_t jm = im; jm < nmeth; jm++) {
697 if ((dvec[im] - rvec[im]) * (dvec[jm] - rvec[jm]) > 0) {
720 if (corrMatS != 0 && corrMatB != 0) {
725 for (
Int_t im = 0; im < nmeth; im++) {
726 for (
Int_t jm = 0; jm < nmeth; jm++) {
727 mvaMatS(im, jm) = (*corrMatS)(im, jm);
728 mvaMatB(im, jm) = (*corrMatB)(im, jm);
733 std::vector<TString> theInputVars;
736 for (
Int_t iv = 0; iv < nvar; iv++) {
737 theInputVars.push_back(method->fDataSetInfo.GetVariableInfo(iv).GetLabel());
738 for (
Int_t jm = 0; jm < nmeth; jm++) {
739 varmvaMatS(iv, jm) = (*corrMatS)(nmeth + iv, jm);
740 varmvaMatB(iv, jm) = (*corrMatB)(nmeth + iv, jm);
745 Log() << kINFO <<
Endl;
746 Log() << kINFO <<
Form(
"Dataset[%s] : ", method->fDataSetInfo.GetName())
747 <<
"Inter-MVA correlation matrix (signal):" <<
Endl;
749 Log() << kINFO <<
Endl;
751 Log() << kINFO <<
Form(
"Dataset[%s] : ", method->fDataSetInfo.GetName())
752 <<
"Inter-MVA correlation matrix (background):" <<
Endl;
754 Log() << kINFO <<
Endl;
757 Log() << kINFO <<
Form(
"Dataset[%s] : ", method->fDataSetInfo.GetName())
758 <<
"Correlations between input variables and MVA response (signal):" <<
Endl;
760 Log() << kINFO <<
Endl;
762 Log() << kINFO <<
Form(
"Dataset[%s] : ", method->fDataSetInfo.GetName())
763 <<
"Correlations between input variables and MVA response (background):" <<
Endl;
765 Log() << kINFO <<
Endl;
767 Log() << kWARNING <<
Form(
"Dataset[%s] : ", method->fDataSetInfo.GetName())
768 <<
"<TestAllMethods> cannot compute correlation matrices" <<
Endl;
771 Log() << kINFO <<
Form(
"Dataset[%s] : ", method->fDataSetInfo.GetName())
772 <<
"The following \"overlap\" matrices contain the fraction of events for which " <<
Endl;
773 Log() << kINFO <<
Form(
"Dataset[%s] : ", method->fDataSetInfo.GetName())
774 <<
"the MVAs 'i' and 'j' have returned conform answers about \"signal-likeness\"" <<
Endl;
775 Log() << kINFO <<
Form(
"Dataset[%s] : ", method->fDataSetInfo.GetName())
776 <<
"An event is signal-like, if its MVA output exceeds the following value:" <<
Endl;
778 Log() << kINFO <<
Form(
"Dataset[%s] : ", method->fDataSetInfo.GetName())
779 <<
"which correspond to the working point: eff(signal) = 1 - eff(background)" <<
Endl;
783 Log() << kINFO <<
Form(
"Dataset[%s] : ", method->fDataSetInfo.GetName())
784 <<
"Note: no correlations and overlap with cut method are provided at present" <<
Endl;
787 Log() << kINFO <<
Endl;
788 Log() << kINFO <<
Form(
"Dataset[%s] : ", method->fDataSetInfo.GetName())
789 <<
"Inter-MVA overlap matrix (signal):" <<
Endl;
791 Log() << kINFO <<
Endl;
793 Log() << kINFO <<
Form(
"Dataset[%s] : ", method->fDataSetInfo.GetName())
794 <<
"Inter-MVA overlap matrix (background):" <<
Endl;
815 auto &fResult = GetResults(methodname, methodtitle);
819 Log().EnableOutput();
822 TString hLine =
"------------------------------------------------------------------------------------------"
823 "-------------------------";
824 Log() << kINFO <<
"Evaluation results ranked by best signal efficiency and purity (area)" <<
Endl;
825 Log() << kINFO << hLine <<
Endl;
826 Log() << kINFO <<
"DataSet MVA " <<
Endl;
827 Log() << kINFO <<
"Name: Method: ROC-integ" <<
Endl;
829 Log() << kDEBUG << hLine <<
Endl;
830 for (
Int_t k = 0; k < 2; k++) {
831 if (k == 1 && nmeth_used[k] > 0) {
832 Log() << kINFO << hLine <<
Endl;
833 Log() << kINFO <<
"Input Variables: " <<
Endl << hLine <<
Endl;
835 for (
Int_t i = 0; i < nmeth_used[k]; i++) {
836 TString datasetName = fDataLoader->GetName();
837 TString methodName = mname[k][i];
845 std::vector<Bool_t> *mvaResType =
dynamic_cast<ResultsClassification *
>(results)->GetValueVectorTypes();
848 if (mvaResType->size() != 0) {
849 rocIntegral = GetROCIntegral(methodname, methodtitle);
852 if (sep[k][i] < 0 || sig[k][i] < 0) {
854 fResult.fROCIntegral = effArea[k][i];
856 <<
Form(
"%-13s %-15s: %#1.3f", fDataLoader->GetName(), methodName.
Data(), fResult.fROCIntegral)
859 fResult.fROCIntegral = rocIntegral;
860 Log() << kINFO <<
Form(
"%-13s %-15s: %#1.3f", datasetName.
Data(), methodName.
Data(), rocIntegral)
865 Log() << kINFO << hLine <<
Endl;
866 Log() << kINFO <<
Endl;
867 Log() << kINFO <<
"Testing efficiency compared to training efficiency (overtraining check)" <<
Endl;
868 Log() << kINFO << hLine <<
Endl;
870 <<
"DataSet MVA Signal efficiency: from test sample (from training sample) "
872 Log() << kINFO <<
"Name: Method: @B=0.01 @B=0.10 @B=0.30 "
874 Log() << kINFO << hLine <<
Endl;
875 for (
Int_t k = 0; k < 2; k++) {
876 if (k == 1 && nmeth_used[k] > 0) {
877 Log() << kINFO << hLine <<
Endl;
878 Log() << kINFO <<
"Input Variables: " <<
Endl << hLine <<
Endl;
880 for (
Int_t i = 0; i < nmeth_used[k]; i++) {
882 mname[k][i].ReplaceAll(
"Variable_",
"");
884 Log() << kINFO <<
Form(
"%-20s %-15s: %#1.3f (%#1.3f) %#1.3f (%#1.3f) %#1.3f (%#1.3f)",
885 method->fDataSetInfo.GetName(), (
const char *)mname[k][i], eff01[k][i],
886 trainEff01[k][i], eff10[k][i], trainEff10[k][i], eff30[k][i], trainEff30[k][i])
890 Log() << kINFO << hLine <<
Endl;
891 Log() << kINFO <<
Endl;
893 if (
gTools().CheckForSilentOption(GetOptions()))
894 Log().InhibitOutput();
895 }
else if (IsCutsMethod(method)) {
896 for (
Int_t k = 0; k < 2; k++) {
897 for (
Int_t i = 0; i < nmeth_used[k]; i++) {
899 if (sep[k][i] < 0 || sig[k][i] < 0) {
901 fResult.fROCIntegral = effArea[k][i];
910 if (IsCutsMethod(method)) {
911 fResult.fIsCuts =
kTRUE;
913 auto rocCurveTest = GetROC(methodname, methodtitle, 0,
Types::kTesting);
914 fResult.fMvaTest[0] = rocCurveTest->GetMvas();
915 fResult.fROCIntegral = GetROCIntegral(methodname, methodtitle);
917 TString className = method->DataInfo().GetClassInfo(0)->GetName();
918 fResult.fClassNames.push_back(className);
920 if (!IsSilentFile()) {
922 RootBaseDir()->cd(method->fDataSetInfo.GetName());
946 if (fResults.size() == 0)
947 Log() << kFATAL <<
"No Classification results available" <<
Endl;
971 for (
auto &result : fResults) {
972 if (result.IsMethod(methodname, methodtitle))
976 result.
fMethod[
"MethodName"] = methodname;
977 result.
fMethod[
"MethodTitle"] = methodtitle;
979 fResults.push_back(result);
980 return fResults.back();
1000 Log() << kERROR <<
Form(
"Given class number (iClass = %i) does not exist. There are %i classes in dataset.",
1010 std::vector<Bool_t> *mvaResTypes =
dynamic_cast<ResultsClassification *
>(results)->GetValueVectorTypes();
1011 std::vector<Float_t> mvaResWeights;
1014 mvaResWeights.reserve(eventCollection.size());
1015 for (
auto ev : eventCollection) {
1016 mvaResWeights.push_back(ev->GetWeight());
1019 rocCurve =
new TMVA::ROCCurve(*mvaRes, *mvaResTypes, mvaResWeights);
1022 std::vector<Float_t> mvaRes;
1023 std::vector<Bool_t> mvaResTypes;
1024 std::vector<Float_t> mvaResWeights;
1026 std::vector<std::vector<Float_t>> *rawMvaRes =
dynamic_cast<ResultsMulticlass *
>(results)->GetValueVector();
1031 mvaRes.reserve(rawMvaRes->size());
1032 for (
auto item : *rawMvaRes) {
1033 mvaRes.push_back(item[iClass]);
1037 mvaResTypes.reserve(eventCollection.size());
1038 mvaResWeights.reserve(eventCollection.size());
1039 for (
auto ev : eventCollection) {
1040 mvaResTypes.push_back(ev->GetClass() == iClass);
1041 mvaResWeights.push_back(ev->GetWeight());
1044 rocCurve =
new TMVA::ROCCurve(mvaRes, mvaResTypes, mvaResWeights);
1062 return GetROC(GetMethod(methodname, methodtitle), iClass,
type);
1075 TMVA::ROCCurve *rocCurve = GetROC(methodname, methodtitle, iClass);
1078 <<
Form(
"ROCCurve object was not created in MethodName = %s MethodTitle = %s not found with Dataset = %s ",
1079 methodname.
Data(), methodtitle.
Data(), fDataLoader->GetName())
1100 while ((key = (
TKey *)nextkey())) {
1109 CopyFrom(subdir,
file);
1114 TTree *newT = T->CloneTree(-1,
"fast");
1132 auto dsdir = fFile->mkdir(fDataLoader->GetName());
1133 TTree *TrainTree = 0;
1134 TTree *TestTree = 0;
1137 for (
UInt_t i = 0; i < fMethods.size(); i++) {
1138 auto methodname = fMethods[i].GetValue<
TString>(
"MethodName");
1139 auto methodtitle = fMethods[i].GetValue<
TString>(
"MethodTitle");
1140 auto fname =
Form(
".%s%s%s.root", fDataLoader->GetName(), methodname.Data(), methodtitle.
Data());
1143 ifile =
new TFile(fname);
1146 ofile =
new TFile(fname);
1149 auto tmptrain = (
TTree *)ds->Get(
"TrainTree");
1150 auto tmptest = (
TTree *)ds->Get(
"TestTree");
1152 fFile->cd(fDataLoader->GetName());
1154 auto methdirname =
Form(
"Method_%s", methodtitle.
Data());
1155 auto methdir = dsdir->mkdir(methdirname, methdirname);
1156 auto methdirbase = methdir->mkdir(methodtitle.
Data(), methodtitle.
Data());
1160 CopyFrom(mfdirbase, (
TFile *)methdirbase);
1163 TrainTree = tmptrain->
CopyTree(
"");
1167 auto trainbranch = TrainTree->
Branch(methodtitle.
Data(), &mva);
1168 tmptrain->SetBranchAddress(methodtitle.
Data(), &mva);
1170 for (
UInt_t ev = 0; ev < entries; ev++) {
1171 tmptrain->GetEntry(ev);
1172 trainbranch->Fill();
1174 auto testbranch = TestTree->
Branch(methodtitle.
Data(), &mva);
1175 tmptest->SetBranchAddress(methodtitle.
Data(), &mva);
1177 for (
UInt_t ev = 0; ev < entries; ev++) {
1178 tmptest->GetEntry(ev);
1188 for (
UInt_t i = 0; i < fMethods.size(); i++) {
1189 auto methodname = fMethods[i].GetValue<
TString>(
"MethodName");
1190 auto methodtitle = fMethods[i].GetValue<
TString>(
"MethodTitle");
1191 auto fname =
Form(
".%s%s%s.root", fDataLoader->GetName(), methodname.Data(), methodtitle.
Data());
#define MinNoTrainingEvents
TMatrixT< Double_t > TMatrixD
char * Form(const char *fmt,...)
R__EXTERN TSystem * gSystem
A pseudo container class which is a generator of indices.
Long64_t GetEntries() const
TClass instances represent classes, structs and namespaces in the ROOT type system.
Bool_t InheritsFrom(const char *cl) const
Return kTRUE if this class inherits from a class with name "classname".
static TClass * GetClass(const char *name, Bool_t load=kTRUE, Bool_t silent=kFALSE)
Static method returning pointer to TClass of the specified class name.
A ROOT file is structured in Directories (like a file system).
Bool_t cd(const char *path=nullptr) override
Change current directory to "this" directory.
TObject * Get(const char *namecycle) override
Return pointer to object identified by namecycle.
Describe directory structure in memory.
virtual TObject * Get(const char *namecycle)
Return pointer to object identified by namecycle.
virtual void SaveSelf(Bool_t=kFALSE)
virtual TList * GetListOfKeys() const
virtual Bool_t cd(const char *path=nullptr)
Change current directory to "this" directory.
A ROOT file is a suite of consecutive data records (TKey instances) with a well defined format.
void Close(Option_t *option="") override
Close a file.
A TGraph is an object made of two arrays X and Y with npoints each.
virtual void SetName(const char *name="")
Set graph name.
virtual void SetTitle(const char *title="")
Change (i.e.
TAxis * GetXaxis() const
Get x axis of the graph.
TAxis * GetYaxis() const
Get y axis of the graph.
Book space in a file, create I/O buffers, to fill them, (un)compress them.
virtual const char * GetClassName() const
virtual TObject * ReadObj()
To read a TObject* from the file.
IMethod * Create(const std::string &name, const TString &job, const TString &title, DataSetInfo &dsi, const TString &option)
creates the method if needed based on the method name using the creator function the factory has stor...
static ClassifierFactory & Instance()
access to the ClassifierFactory singleton creates the instance if needed
void SetDrawProgressBar(Bool_t d)
void SetUseColor(Bool_t uc)
class TMVA::Config::VariablePlotting fVariablePlotting
void SetConfigDescription(const char *d)
OptionBase * DeclareOptionRef(T &ref, const TString &name, const TString &desc="")
void SetConfigName(const char *n)
virtual void ParseOptions()
options parser
void CheckForUnusedOptions() const
checks for unused options in option string
UInt_t GetNClasses() const
Class that contains all the data information.
Long64_t GetNEvtSigTest()
return number of signal test events in dataset
const Event * GetEvent() const
Long64_t GetNEvents(Types::ETreeType type=Types::kMaxTreeType) const
Results * GetResults(const TString &, Types::ETreeType type, Types::EAnalysisType analysistype)
void SetCurrentType(Types::ETreeType type) const
const std::vector< Event * > & GetEventCollection(Types::ETreeType type=Types::kMaxTreeType) const
Long64_t GetNEvtBkgdTest()
return number of background test events in dataset
Abstract base class for all high level ml algorithms, you can book ml methods like BDT,...
Bool_t fModelPersistence
file to save the results
std::shared_ptr< DataLoader > fDataLoader
Booked method information.
virtual void ParseOptions()
Method to parse the internal option string.
Float_t GetValue(UInt_t ivar) const
return value of i'th variable
static void SetIsTraining(Bool_t)
when this static function is called, it sets the flag whether events with negative event weight shoul...
Double_t GetROCIntegral(UInt_t iClass=0, TMVA::Types::ETreeType type=TMVA::Types::kTesting)
Method to get ROC-Integral value from mvas.
TGraph * GetROCGraph(UInt_t iClass=0, TMVA::Types::ETreeType type=TMVA::Types::kTesting)
Method to get TGraph object with the ROC curve.
void Show()
Method to print the results in stdout.
Bool_t IsMethod(TString methodname, TString methodtitle)
Method to check if method was booked.
std::map< UInt_t, std::vector< std::tuple< Float_t, Float_t, Bool_t > > > fMvaTest
ROCCurve * GetROC(UInt_t iClass=0, TMVA::Types::ETreeType type=TMVA::Types::kTesting)
Method to get TMVA::ROCCurve Object.
ClassificationResult & operator=(const ClassificationResult &r)
std::map< UInt_t, std::vector< std::tuple< Float_t, Float_t, Bool_t > > > fMvaTrain
Classification(DataLoader *loader, TFile *file, TString options)
Contructor to create a two class classifier.
Double_t GetROCIntegral(TString methodname, TString methodtitle, UInt_t iClass=0)
Method to get ROC-Integral value from mvas.
virtual void Test()
Perform test evaluation in all booked methods.
TString GetMethodOptions(TString methodname, TString methodtitle)
return the options for the booked method.
MethodBase * GetMethod(TString methodname, TString methodtitle)
Return a TMVA::MethodBase object.
virtual void TrainMethod(TString methodname, TString methodtitle)
Lets train an specific ml method.
Bool_t HasMethodObject(TString methodname, TString methodtitle, Int_t &index)
Allows to check if the TMVA::MethodBase was created and return the index in the vector.
std::vector< ClassificationResult > & GetResults()
return the the vector of TMVA::Experimental::ClassificationResult objects.
virtual void Train()
Method to train all booked ml methods.
virtual void Evaluate()
Method to perform Train/Test over all ml method booked.
Types::EAnalysisType fAnalysisType
vector of objects with booked methods
TMVA::ROCCurve * GetROC(TMVA::MethodBase *method, UInt_t iClass=0, TMVA::Types::ETreeType type=TMVA::Types::kTesting)
Method to get TMVA::ROCCurve Object.
Bool_t IsCutsMethod(TMVA::MethodBase *method)
Allows to check if the ml method is a Cuts method.
void CopyFrom(TDirectory *src, TFile *file)
virtual void TestMethod(TString methodname, TString methodtitle)
Lets perform test an specific ml method.
Interface for all concrete MVA method implementations.
virtual Bool_t HasAnalysisType(Types::EAnalysisType type, UInt_t numberClasses, UInt_t numberTargets)=0
Virtual base Class for all MVA method.
void SetSilentFile(Bool_t status)
void SetWeightFileDir(TString fileDir)
set directory of weight file
TString GetMethodTypeName() const
const char * GetName() const
const TString & GetTestvarName() const
void SetupMethod()
setup of methods
virtual void SetAnalysisType(Types::EAnalysisType type)
const TString & GetMethodName() const
void ProcessSetup()
process all options the "CheckForUnusedOptions" is done in an independent call, since it may be overr...
DataSetInfo & DataInfo() const
Types::EMVA GetMethodType() const
void SetFile(TFile *file)
void SetModelPersistence(Bool_t status)
Double_t GetSignalReferenceCut() const
virtual void CheckSetup()
check may be overridden by derived class (sometimes, eg, fitters are used which can only be implement...
Class for boosting a TMVA method.
void SetBoostedMethodName(TString methodName)
DataSetManager * fDataSetManager
Class for categorizing the phase space.
DataSetManager * fDataSetManager
ostringstream derivative to redirect and format output
static void InhibitOutput()
static void EnableOutput()
Double_t GetROCIntegral(const UInt_t points=41)
Calculates the ROC integral (AUC)
Class that is the base-class for a vector of result.
Class which takes the results of a multiclass classification.
Class that is the base-class for a vector of result.
Singleton class for Global types used by TMVA.
static Types & Instance()
the the single instance of "Types" if existing already, or create it (Singleton)
virtual void SetTitle(const char *title="")
Set the title of the TNamed.
virtual const char * GetName() const
Returns name of object.
Mother of all ROOT objects.
virtual Int_t Write(const char *name=0, Int_t option=0, Int_t bufsize=0)
Write this object to the current directory.
@ kOverwrite
overwrite existing object with same name
Principal Components Analysis (PCA)
virtual void AddRow(const Double_t *x)
Add a data point and update the covariance matrix.
const TMatrixD * GetCovarianceMatrix() const
virtual void MakePrincipals()
Perform the principal components analysis.
const char * Data() const
TString & ReplaceAll(const TString &s1, const TString &s2)
virtual int MakeDirectory(const char *name)
Make a directory.
virtual int Unlink(const char *name)
Unlink, i.e.
A TTree represents a columnar dataset.
virtual TTree * CopyTree(const char *selection, Option_t *option="", Long64_t nentries=kMaxEntries, Long64_t firstentry=0)
Copy a tree with selection.
TBranch * Branch(const char *name, T *obj, Int_t bufsize=32000, Int_t splitlevel=99)
Add a new branch, and infer the data type from the type of obj being passed.
virtual Int_t Write(const char *name=0, Int_t option=0, Int_t bufsize=0)
Write this object to the current directory.
create variable transformations
MsgLogger & Endl(MsgLogger &ml)