141 #pragma warning ( disable : 4355 ) 174 std::cerr <<
kERROR <<
"IPythonInteractive::Init: already initialized..." << std::endl;
178 for(
auto& title : graphTitles){
180 fGraphs.back()->SetTitle(title);
181 fGraphs.back()->SetName(title);
182 fGraphs.back()->SetFillColor(color);
183 fGraphs.back()->SetLineColor(color);
184 fGraphs.back()->SetMarkerColor(color);
244 fAnalysisType (
Types::kNoAnalysisType ),
245 fRegressionReturnVal ( 0 ),
246 fMulticlassReturnVal ( 0 ),
247 fDataSetInfo ( dsi ),
248 fSignalReferenceCut ( 0.5 ),
249 fSignalReferenceCutOrientation( 1. ),
250 fVariableTransformType (
Types::kSignal ),
251 fJobName ( jobName ),
252 fMethodName ( methodTitle ),
253 fMethodType ( methodType ),
257 fConstructedFromWeightFile (
kFALSE ),
259 fMethodBaseDir ( 0 ),
262 fModelPersistence (
kTRUE),
273 fSplTrainEffBvsS ( 0 ),
274 fVarTransformString (
"None" ),
275 fTransformationPointer ( 0 ),
276 fTransformation ( dsi, methodTitle ),
278 fVerbosityLevelString (
"Default" ),
281 fIgnoreNegWeightsInTraining(
kFALSE ),
283 fBackgroundClass ( 0 ),
385 for (
Int_t i = 0; i < 2; i++ ) {
477 if (
DataInfo().GetClassInfo(
"Signal") != 0) {
480 if (
DataInfo().GetClassInfo(
"Background") != 0) {
505 DeclareOptionRef(
fVerbose,
"V",
"Verbose output (short form of \"VerbosityLevel\" below - overrides the latter one)" );
521 DeclareOptionRef(
fVarTransformString,
"VarTransform",
"List of variable transformations performed before training, e.g., \"D_Background,P_Signal,G,N_AllClasses\" for: \"Decorrelation, PCA-transformation, Gaussianisation, Normalisation, each for the given class of events ('AllClasses' denotes all events of all classes, if no class indication is given, 'All' is assumed)\"" );
528 "Events with negative weights are ignored in the training (but are included for testing and performance evaluation)" );
579 Log() <<
kFATAL <<
"<ProcessOptions> Verbosity level type '" 595 "Use signal or background events to derive for variable transformation (the transformation is applied on both types of, course)" );
627 std::map<TString,Double_t> tunedParameters;
628 tunedParameters.size();
629 return tunedParameters;
661 <<
"Begin training" <<
Endl;
666 <<
"\tEnd of training " <<
Endl;
669 <<
"Elapsed time for training with " << nEvents <<
" events: " 673 <<
"\tCreate MVA output for ";
723 bool truncate =
false;
724 TH1F*
h1 = regRes->QuadraticDeviation( tgtNum , truncate, 1.);
729 TH1F* h2 = regRes->QuadraticDeviation( tgtNum , truncate, yq[0]);
753 regRes->Resize( nEvents );
757 regRes->SetValue( vals, ievt );
762 <<
"Elapsed time for evaluation of " << nEvents <<
" events: " 771 regRes->CreateDeviationHistograms( histNamePrefix );
794 resMulticlass->
Resize( nEvents );
798 resMulticlass->SetValue( vals, ievt );
803 <<
"Elapsed time for evaluation of " << nEvents <<
" events: " 821 if (errUpper) *errUpper=-1;
863 std::vector<Double_t> mvaValues =
GetMvaValues(0, nEvents,
true);
871 clRes->
SetValue( mvaValues[ievt], ievt );
881 if (firstEvt > lastEvt || lastEvt > nEvents) lastEvt =
nEvents;
882 if (firstEvt < 0) firstEvt = 0;
883 std::vector<Double_t> values(lastEvt-firstEvt);
885 nEvents = values.size();
895 for (
Int_t ievt=firstEvt; ievt<lastEvt; ievt++) {
902 if (modulo <= 0 ) modulo = 1;
908 <<
"Elapsed time for evaluation of " << nEvents <<
" events: " 933 mvaProb->
Resize( nEvents );
938 if (proba < 0)
break;
943 if (modulo <= 0 ) modulo = 1;
948 <<
"Elapsed time for evaluation of " << nEvents <<
" events: " 970 bias = 0; biasT = 0; dev = 0; devT = 0; rms = 0; rmsT = 0;
972 Double_t m1 = 0, m2 = 0, s1 = 0, s2 = 0, s12 = 0;
978 for (
Long64_t ievt=0; ievt<nevt; ievt++) {
1002 m1 += t*w; s1 += t*t*w;
1003 m2 += r*w; s2 += r*r*w;
1016 corr = s12/sumw - m1*m2;
1017 corr /=
TMath::Sqrt( (s1/sumw - m1*m1) * (s2/sumw - m2*m2) );
1020 TH2F* hist =
new TH2F(
"hist",
"hist", 150, xmin, xmax, 100, xmin, xmax );
1021 TH2F* histT =
new TH2F(
"histT",
"histT", 150, xmin, xmax, 100, xmin, xmax );
1028 for (
Long64_t ievt=0; ievt<nevt; ievt++) {
1029 Float_t d = (rV[ievt] - tV[ievt]);
1030 hist->
Fill( rV[ievt], tV[ievt], wV[ievt] );
1031 if (d >= devMin && d <= devMax) {
1033 biasT += wV[ievt] * d;
1035 rmsT += wV[ievt] * d * d;
1036 histT->
Fill( rV[ievt], tV[ievt], wV[ievt] );
1087 <<
" not found in tree" <<
Endl;
1119 mvaRes->
Store(mva_s,
"MVA_S");
1120 mvaRes->
Store(mva_b,
"MVA_B");
1130 proba_s =
new TH1D( TestvarName +
"_Proba_S", TestvarName +
"_Proba_S",
fNbinsMVAoutput, 0.0, 1.0 );
1131 proba_b =
new TH1D( TestvarName +
"_Proba_B", TestvarName +
"_Proba_B",
fNbinsMVAoutput, 0.0, 1.0 );
1132 mvaRes->
Store(proba_s,
"Prob_S");
1133 mvaRes->
Store(proba_b,
"Prob_B");
1138 rarity_s =
new TH1D( TestvarName +
"_Rarity_S", TestvarName +
"_Rarity_S",
fNbinsMVAoutput, 0.0, 1.0 );
1139 rarity_b =
new TH1D( TestvarName +
"_Rarity_B", TestvarName +
"_Rarity_B",
fNbinsMVAoutput, 0.0, 1.0 );
1140 mvaRes->
Store(rarity_s,
"Rar_S");
1141 mvaRes->
Store(rarity_b,
"Rar_B");
1147 TH1* mva_eff_s =
new TH1D( TestvarName +
"_S_high", TestvarName +
"_S_high",
fNbinsH, fXmin, sxmax );
1148 TH1* mva_eff_b =
new TH1D( TestvarName +
"_B_high", TestvarName +
"_B_high",
fNbinsH, fXmin, sxmax );
1149 mvaRes->
Store(mva_eff_s,
"MVA_HIGHBIN_S");
1150 mvaRes->
Store(mva_eff_b,
"MVA_HIGHBIN_B");
1160 if (mvaProb)
Log() <<
kINFO <<
"Also filling probability and rarity histograms (on request)..." <<
Endl;
1176 mvaResTypes->push_back(
kTRUE);
1177 mva_s ->
Fill( v, w );
1179 proba_s->
Fill( (*mvaProb)[ievt][0], w );
1183 mva_eff_s ->
Fill( v, w );
1186 mvaResTypes->push_back(
kFALSE);
1187 mva_b ->
Fill( v, w );
1189 proba_b->
Fill( (*mvaProb)[ievt][0], w );
1192 mva_eff_b ->
Fill( v, w );
1222 tf << prefix <<
"#GEN -*-*-*-*-*-*-*-*-*-*-*- general info -*-*-*-*-*-*-*-*-*-*-*-" << std::endl << prefix << std::endl;
1224 tf.setf(std::ios::left);
1229 tf << prefix <<
"Creator : " << userInfo->
fUser << std::endl;
1238 tf << prefix << std::endl;
1243 tf << prefix << std::endl << prefix <<
"#OPT -*-*-*-*-*-*-*-*-*-*-*-*- options -*-*-*-*-*-*-*-*-*-*-*-*-" << std::endl << prefix << std::endl;
1245 tf << prefix << std::endl;
1248 tf << prefix << std::endl << prefix <<
"#VAR -*-*-*-*-*-*-*-*-*-*-*-* variables *-*-*-*-*-*-*-*-*-*-*-*-" << std::endl << prefix << std::endl;
1250 tf << prefix << std::endl;
1283 if (!parent)
return;
1362 <<
"Creating xml weight file: " 1383 <<
"Reading weight file: " 1387 #if ROOT_VERSION_CODE >= ROOT_VERSION(5,29,0) 1398 fb.open(tfname.
Data(),std::ios::in);
1399 if (!fb.is_open()) {
1401 <<
"Unable to open input weight file: " << tfname <<
Endl;
1403 std::istream fin(&fb);
1421 #if ROOT_VERSION_CODE >= ROOT_VERSION(5,26,00) 1428 << xmlstr <<
" ) is not available for ROOT versions prior to 5.26/00." <<
Endl;
1455 if (nodeName==
"GeneralInfo") {
1460 while (antypeNode) {
1463 if (
name ==
"TrainingTime")
1466 if (
name ==
"AnalysisType") {
1475 if (
name ==
"TMVA Release" ||
name ==
"TMVA") {
1482 if (
name ==
"ROOT Release" ||
name ==
"ROOT") {
1492 else if (nodeName==
"Options") {
1497 else if (nodeName==
"Variables") {
1500 else if (nodeName==
"Spectators") {
1503 else if (nodeName==
"Classes") {
1506 else if (nodeName==
"Targets") {
1509 else if (nodeName==
"Transformations") {
1512 else if (nodeName==
"MVAPdfs") {
1527 else if (nodeName==
"Weights") {
1558 methodType = methodType(methodType.Last(
' '),methodType.Length());
1563 if (methodName ==
"") methodName = methodType;
1588 fin.getline(buf,512);
1621 fin.getline(buf,512);
1625 varTrafo->ReadTransformationFromStream(fin, trafo );
1636 fin.getline(buf,512);
1650 fin.getline(buf,512);
1652 fin.getline(buf,512);
1683 istr >> dummy >> readNVar;
1687 <<
" while there are " << readNVar <<
" variables declared in the file" 1703 Log() <<
kINFO <<
"The definition (or the order) of the variables found in the input file is" <<
Endl;
1704 Log() <<
kINFO <<
"is not the same as the one declared in the Reader (which is necessary for" <<
Endl;
1705 Log() <<
kINFO <<
"the correct working of the method):" <<
Endl;
1706 Log() <<
kINFO <<
" var #" << varIdx <<
" declared in Reader: " << varIt->GetExpression() <<
Endl;
1708 Log() <<
kFATAL <<
"The expression declared to the Reader needs to be checked (name or order are wrong)" <<
Endl;
1762 for (
UInt_t iCls=0; iCls<nClasses; ++iCls) {
1798 <<
" while there are " << readNVar <<
" variables declared in the file" 1813 existingVarInfo = readVarInfo;
1817 Log() <<
kINFO <<
"The definition (or the order) of the variables found in the input file is" <<
Endl;
1818 Log() <<
kINFO <<
"not the same as the one declared in the Reader (which is necessary for the" <<
Endl;
1819 Log() <<
kINFO <<
"correct working of the method):" <<
Endl;
1822 Log() <<
kFATAL <<
"The expression declared to the Reader needs to be checked (name or order are wrong)" <<
Endl;
1838 <<
" while there are " << readNSpec <<
" spectators declared in the file" 1853 existingSpecInfo = readSpecInfo;
1857 Log() <<
kINFO <<
"The definition (or the order) of the spectators found in the input file is" <<
Endl;
1858 Log() <<
kINFO <<
"not the same as the one declared in the Reader (which is necessary for the" <<
Endl;
1859 Log() <<
kINFO <<
"correct working of the method):" <<
Endl;
1862 Log() <<
kFATAL <<
"The expression declared to the Reader needs to be checked (name or order are wrong)" <<
Endl;
1881 for (
UInt_t icls = 0; icls<readNCls;++icls) {
1898 if (
DataInfo().GetClassInfo(
"Signal") != 0) {
1903 if (
DataInfo().GetClassInfo(
"Background") != 0) {
1948 sdir = methodDir->
mkdir(defaultDir);
1953 wfilePath.Write(
"TrainingPath" );
1954 wfileName.Write(
"WeightFileName" );
2020 return ( wFileDir + (wFileDir[wFileDir.
Length()-1]==
'/' ?
"" :
"/")
2050 <<
"/kMaxAnalysisType" <<
Endl;
2072 fin.getline(buf,512);
2078 std::stringstream s(code.
Data());
2086 std::stringstream s(code.
Data());
2094 std::stringstream s(code.
Data());
2095 std::string analysisType;
2100 else Log() <<
kFATAL <<
"Analysis type " << analysisType <<
" from weight-file not known!" << std::endl;
2122 if (mvaRes==0 || mvaRes->
GetSize()==0) {
2137 histMVAPdfS->Sumw2();
2138 histMVAPdfB->
Sumw2();
2146 else histMVAPdfB->
Fill( theVal, theWeight );
2155 histMVAPdfS->Write();
2156 histMVAPdfB->
Write();
2164 if (
DataInfo().GetNClasses() == 2) {
2166 <<
Form(
"<CreateMVAPdfs> Separation from histogram (PDF): %1.3f (%1.3f)",
2180 Log() <<
kINFO<<
Form(
"Dataset[%s] : ",
DataInfo().
GetName()) <<
"<GetProba> MVA PDFs for Signal and Background don't exist yet, we'll create them on demand" <<
Endl;
2186 return GetProba(mvaVal,sigFraction);
2201 Double_t denom = p_s*ap_sig + p_b*(1 - ap_sig);
2203 return (denom > 0) ? (p_s*ap_sig) / denom : -1;
2215 <<
"select option \"CreateMVAPdfs\"" <<
Endl;
2240 else if (list->
GetSize() > 2) {
2242 <<
" in string: " << theString
2243 <<
" | required format, e.g., Efficiency:0.05, or empty string" <<
Endl;
2259 TH1 * effhist = results->
GetHist(
"MVA_HIGHBIN_S");
2266 if (results->
DoesExist(
"MVA_EFF_S")==0) {
2271 results->
Store(eff_s,
"MVA_EFF_S");
2272 results->
Store(eff_b,
"MVA_EFF_B");
2284 Float_t theVal = (*mvaRes)[ievt];
2287 TH1* theHist = isSignal ? eff_s : eff_b;
2290 if (isSignal) nevtS+=theWeight;
2294 if (sign > 0 && maxbin >
fNbinsH)
continue;
2295 if (sign < 0 && maxbin < 1 )
continue;
2296 if (sign > 0 && maxbin < 1 ) maxbin = 1;
2316 results->
Store(eff_BvsS,
"MVA_EFF_BvsS");
2322 results->
Store(rej_BvsS);
2324 rej_BvsS->
SetYTitle(
"Backgr rejection (1-eff)" );
2329 results->
Store(inveff_BvsS);
2331 inveff_BvsS->
SetYTitle(
"Inverse backgr. eff (1/eff)" );
2374 Double_t effS = 0., rejB, effS_ = 0., rejB_ = 0.;
2375 Int_t nbins_ = 5000;
2376 for (
Int_t bini=1; bini<=nbins_; bini++) {
2379 effS = (bini - 0.5)/
Float_t(nbins_);
2383 if ((effS - rejB)*(effS_ - rejB_) < 0)
break;
2401 Double_t effS = 0, effB = 0, effS_ = 0, effB_ = 0;
2402 Int_t nbins_ = 1000;
2408 for (
Int_t bini=1; bini<=nbins_; bini++) {
2411 effS = (bini - 0.5)/
Float_t(nbins_);
2413 integral += (1.0 - effB);
2427 for (
Int_t bini=1; bini<=nbins_; bini++) {
2430 effS = (bini - 0.5)/
Float_t(nbins_);
2434 if ((effB - effBref)*(effB_ - effBref) <= 0)
break;
2440 effS = 0.5*(effS + effS_);
2443 if (nevtS > 0) effSerr =
TMath::Sqrt( effS*(1.0 - effS)/nevtS );
2469 <<
" in string: " << theString
2470 <<
" | required format, e.g., Efficiency:0.05" <<
Endl;
2483 Log() <<
kFATAL <<
Form(
"Dataset[%s] : ",
DataInfo().
GetName())<<
"<GetTrainingEfficiency> Binning mismatch between signal and background histos" 2491 TH1 * effhist = results->
GetHist(
"MVA_HIGHBIN_S");
2496 if (results->
DoesExist(
"MVA_TRAIN_S")==0) {
2504 results->
Store(mva_s_tr,
"MVA_TRAIN_S");
2505 results->
Store(mva_b_tr,
"MVA_TRAIN_B");
2514 results->
Store(mva_eff_tr_s,
"MVA_TRAINEFF_S");
2515 results->
Store(mva_eff_tr_b,
"MVA_TRAINEFF_B");
2535 theClsHist->
Fill( theVal, theWeight );
2539 if (sign > 0 && maxbin >
fNbinsH)
continue;
2540 if (sign < 0 && maxbin < 1 )
continue;
2541 if (sign > 0 && maxbin < 1 ) maxbin = 1;
2544 if (sign > 0)
for (
Int_t ibin=1; ibin<=maxbin; ibin++) theEffHist->
AddBinContent( ibin , theWeight );
2561 results->
Store(eff_bvss,
"EFF_BVSS_TR");
2562 results->
Store(rej_bvss,
"REJ_BVSS_TR");
2610 Double_t effS = 0., effB, effS_ = 0., effB_ = 0.;
2611 Int_t nbins_ = 1000;
2612 for (
Int_t bini=1; bini<=nbins_; bini++) {
2615 effS = (bini - 0.5)/
Float_t(nbins_);
2619 if ((effB - effBref)*(effB_ - effBref) <= 0)
break;
2624 return 0.5*(effS + effS_);
2646 if (!resMulticlass)
Log() <<
kFATAL<<
"unable to create pointer in GetMulticlassTrainingEfficiency, exiting."<<
Endl;
2686 if ((!pdfS && pdfB) || (pdfS && !pdfB))
2688 if (!pdfS) pdfS =
fSplS;
2689 if (!pdfB) pdfB =
fSplB;
2693 <<
" fSplS or fSplB are not yet filled" <<
Endl;
2708 if ((!histS && histB) || (histS && !histB))
2711 if (histS==0 || histB==0)
return 0.;
2724 for (
UInt_t i=0; i<nsteps; i++) {
2728 return integral*step;
2740 if ((!pdfS && pdfB) || (pdfS && !pdfB))
2742 if (!pdfS) pdfS =
fSplS;
2743 if (!pdfB) pdfB =
fSplB;
2745 if (pdfS==0 || pdfB==0)
return 0.;
2754 for (
UInt_t i=0; i<nsteps; i++) {
2758 return integral*step;
2768 Double_t& max_significance_value )
const 2773 Double_t effS(0),effB(0),significance(0);
2776 if (SignalEvents <= 0 || BackgroundEvents <= 0) {
2778 <<
"Number of signal or background events is <= 0 ==> abort" 2783 << SignalEvents/BackgroundEvents <<
Endl;
2788 if ( (eff_s==0) || (eff_b==0) ) {
2799 significance =
sqrt(SignalEvents)*( effS )/
sqrt( effS + ( BackgroundEvents / SignalEvents) * effB );
2809 delete temp_histogram;
2814 return max_significance;
2853 for (
Int_t ievt = 0; ievt < entries; ievt++) {
2862 meanS += weight*theVar;
2863 rmsS += weight*theVar*theVar;
2867 meanB += weight*theVar;
2868 rmsB += weight*theVar*theVar;
2876 meanS = meanS/sumwS;
2877 meanB = meanB/sumwB;
2891 if (theClassFileName ==
"")
2894 classFileName = theClassFileName;
2898 TString tfname( classFileName );
2900 <<
"Creating standalone class: " 2903 std::ofstream fout( classFileName );
2905 Log() <<
kFATAL <<
"<MakeClass> Unable to open file: " << classFileName <<
Endl;
2910 fout <<
"// Class: " << className << std::endl;
2911 fout <<
"// Automatically generated by MethodBase::MakeClass" << std::endl <<
"//" << std::endl;
2915 fout <<
"/* configuration options =====================================================" << std::endl << std::endl;
2918 fout <<
"============================================================================ */" << std::endl;
2921 fout <<
"" << std::endl;
2922 fout <<
"#include <vector>" << std::endl;
2923 fout <<
"#include <cmath>" << std::endl;
2924 fout <<
"#include <string>" << std::endl;
2925 fout <<
"#include <iostream>" << std::endl;
2926 fout <<
"" << std::endl;
2931 fout <<
"#ifndef IClassifierReader__def" << std::endl;
2932 fout <<
"#define IClassifierReader__def" << std::endl;
2934 fout <<
"class IClassifierReader {" << std::endl;
2936 fout <<
" public:" << std::endl;
2938 fout <<
" // constructor" << std::endl;
2939 fout <<
" IClassifierReader() : fStatusIsClean( true ) {}" << std::endl;
2940 fout <<
" virtual ~IClassifierReader() {}" << std::endl;
2942 fout <<
" // return classifier response" << std::endl;
2943 fout <<
" virtual double GetMvaValue( const std::vector<double>& inputValues ) const = 0;" << std::endl;
2945 fout <<
" // returns classifier status" << std::endl;
2946 fout <<
" bool IsStatusClean() const { return fStatusIsClean; }" << std::endl;
2948 fout <<
" protected:" << std::endl;
2950 fout <<
" bool fStatusIsClean;" << std::endl;
2951 fout <<
"};" << std::endl;
2953 fout <<
"#endif" << std::endl;
2955 fout <<
"class " << className <<
" : public IClassifierReader {" << std::endl;
2957 fout <<
" public:" << std::endl;
2959 fout <<
" // constructor" << std::endl;
2960 fout <<
" " << className <<
"( std::vector<std::string>& theInputVars ) " << std::endl;
2961 fout <<
" : IClassifierReader()," << std::endl;
2962 fout <<
" fClassName( \"" << className <<
"\" )," << std::endl;
2963 fout <<
" fNvars( " <<
GetNvar() <<
" )," << std::endl;
2964 fout <<
" fIsNormalised( " << (
IsNormalised() ?
"true" :
"false") <<
" )" << std::endl;
2965 fout <<
" { " << std::endl;
2966 fout <<
" // the training input variables" << std::endl;
2967 fout <<
" const char* inputVars[] = { ";
2970 if (ivar<
GetNvar()-1) fout <<
", ";
2972 fout <<
" };" << std::endl;
2974 fout <<
" // sanity checks" << std::endl;
2975 fout <<
" if (theInputVars.size() <= 0) {" << std::endl;
2976 fout <<
" std::cout << \"Problem in class \\\"\" << fClassName << \"\\\": empty input vector\" << std::endl;" << std::endl;
2977 fout <<
" fStatusIsClean = false;" << std::endl;
2978 fout <<
" }" << std::endl;
2980 fout <<
" if (theInputVars.size() != fNvars) {" << std::endl;
2981 fout <<
" std::cout << \"Problem in class \\\"\" << fClassName << \"\\\": mismatch in number of input values: \"" << std::endl;
2982 fout <<
" << theInputVars.size() << \" != \" << fNvars << std::endl;" << std::endl;
2983 fout <<
" fStatusIsClean = false;" << std::endl;
2984 fout <<
" }" << std::endl;
2986 fout <<
" // validate input variables" << std::endl;
2987 fout <<
" for (size_t ivar = 0; ivar < theInputVars.size(); ivar++) {" << std::endl;
2988 fout <<
" if (theInputVars[ivar] != inputVars[ivar]) {" << std::endl;
2989 fout <<
" std::cout << \"Problem in class \\\"\" << fClassName << \"\\\": mismatch in input variable names\" << std::endl" << std::endl;
2990 fout <<
" << \" for variable [\" << ivar << \"]: \" << theInputVars[ivar].c_str() << \" != \" << inputVars[ivar] << std::endl;" << std::endl;
2991 fout <<
" fStatusIsClean = false;" << std::endl;
2992 fout <<
" }" << std::endl;
2993 fout <<
" }" << std::endl;
2995 fout <<
" // initialize min and max vectors (for normalisation)" << std::endl;
2997 fout <<
" fVmin[" << ivar <<
"] = " << std::setprecision(15) <<
GetXmin( ivar ) <<
";" << std::endl;
2998 fout <<
" fVmax[" << ivar <<
"] = " << std::setprecision(15) <<
GetXmax( ivar ) <<
";" << std::endl;
3001 fout <<
" // initialize input variable types" << std::endl;
3006 fout <<
" // initialize constants" << std::endl;
3007 fout <<
" Initialize();" << std::endl;
3010 fout <<
" // initialize transformation" << std::endl;
3011 fout <<
" InitTransform();" << std::endl;
3013 fout <<
" }" << std::endl;
3015 fout <<
" // destructor" << std::endl;
3016 fout <<
" virtual ~" << className <<
"() {" << std::endl;
3017 fout <<
" Clear(); // method-specific" << std::endl;
3018 fout <<
" }" << std::endl;
3020 fout <<
" // the classifier response" << std::endl;
3021 fout <<
" // \"inputValues\" is a vector of input values in the same order as the " << std::endl;
3022 fout <<
" // variables given to the constructor" << std::endl;
3023 fout <<
" double GetMvaValue( const std::vector<double>& inputValues ) const;" << std::endl;
3025 fout <<
" private:" << std::endl;
3027 fout <<
" // method-specific destructor" << std::endl;
3028 fout <<
" void Clear();" << std::endl;
3031 fout <<
" // input variable transformation" << std::endl;
3033 fout <<
" void InitTransform();" << std::endl;
3034 fout <<
" void Transform( std::vector<double> & iv, int sigOrBgd ) const;" << std::endl;
3037 fout <<
" // common member variables" << std::endl;
3038 fout <<
" const char* fClassName;" << std::endl;
3040 fout <<
" const size_t fNvars;" << std::endl;
3041 fout <<
" size_t GetNvar() const { return fNvars; }" << std::endl;
3042 fout <<
" char GetType( int ivar ) const { return fType[ivar]; }" << std::endl;
3044 fout <<
" // normalisation of input variables" << std::endl;
3045 fout <<
" const bool fIsNormalised;" << std::endl;
3046 fout <<
" bool IsNormalised() const { return fIsNormalised; }" << std::endl;
3047 fout <<
" double fVmin[" <<
GetNvar() <<
"];" << std::endl;
3048 fout <<
" double fVmax[" <<
GetNvar() <<
"];" << std::endl;
3049 fout <<
" double NormVariable( double x, double xmin, double xmax ) const {" << std::endl;
3050 fout <<
" // normalise to output range: [-1, 1]" << std::endl;
3051 fout <<
" return 2*(x - xmin)/(xmax - xmin) - 1.0;" << std::endl;
3052 fout <<
" }" << std::endl;
3054 fout <<
" // type of input variable: 'F' or 'I'" << std::endl;
3055 fout <<
" char fType[" <<
GetNvar() <<
"];" << std::endl;
3057 fout <<
" // initialize internal variables" << std::endl;
3058 fout <<
" void Initialize();" << std::endl;
3059 fout <<
" double GetMvaValue__( const std::vector<double>& inputValues ) const;" << std::endl;
3060 fout <<
"" << std::endl;
3061 fout <<
" // private members (method specific)" << std::endl;
3066 fout <<
" inline double " << className <<
"::GetMvaValue( const std::vector<double>& inputValues ) const" << std::endl;
3067 fout <<
" {" << std::endl;
3068 fout <<
" // classifier response value" << std::endl;
3069 fout <<
" double retval = 0;" << std::endl;
3071 fout <<
" // classifier response, sanity check first" << std::endl;
3072 fout <<
" if (!IsStatusClean()) {" << std::endl;
3073 fout <<
" std::cout << \"Problem in class \\\"\" << fClassName << \"\\\": cannot return classifier response\"" << std::endl;
3074 fout <<
" << \" because status is dirty\" << std::endl;" << std::endl;
3075 fout <<
" retval = 0;" << std::endl;
3076 fout <<
" }" << std::endl;
3077 fout <<
" else {" << std::endl;
3078 fout <<
" if (IsNormalised()) {" << std::endl;
3079 fout <<
" // normalise variables" << std::endl;
3080 fout <<
" std::vector<double> iV;" << std::endl;
3081 fout <<
" iV.reserve(inputValues.size());" << std::endl;
3082 fout <<
" int ivar = 0;" << std::endl;
3083 fout <<
" for (std::vector<double>::const_iterator varIt = inputValues.begin();" << std::endl;
3084 fout <<
" varIt != inputValues.end(); varIt++, ivar++) {" << std::endl;
3085 fout <<
" iV.push_back(NormVariable( *varIt, fVmin[ivar], fVmax[ivar] ));" << std::endl;
3086 fout <<
" }" << std::endl;
3090 fout <<
" Transform( iV, -1 );" << std::endl;
3092 fout <<
" retval = GetMvaValue__( iV );" << std::endl;
3093 fout <<
" }" << std::endl;
3094 fout <<
" else {" << std::endl;
3098 fout <<
" std::vector<double> iV;" << std::endl;
3099 fout <<
" int ivar = 0;" << std::endl;
3100 fout <<
" for (std::vector<double>::const_iterator varIt = inputValues.begin();" << std::endl;
3101 fout <<
" varIt != inputValues.end(); varIt++, ivar++) {" << std::endl;
3102 fout <<
" iV.push_back(*varIt);" << std::endl;
3103 fout <<
" }" << std::endl;
3104 fout <<
" Transform( iV, -1 );" << std::endl;
3105 fout <<
" retval = GetMvaValue__( iV );" << std::endl;
3108 fout <<
" retval = GetMvaValue__( inputValues );" << std::endl;
3110 fout <<
" }" << std::endl;
3111 fout <<
" }" << std::endl;
3113 fout <<
" return retval;" << std::endl;
3114 fout <<
" }" << std::endl;
3130 std::streambuf* cout_sbuf = std::cout.rdbuf();
3131 std::ofstream* o = 0;
3132 if (
gConfig().WriteOptionsReference()) {
3138 std::cout.rdbuf( o->rdbuf() );
3145 <<
"================================================================" 3149 <<
"H e l p f o r M V A m e t h o d [ " <<
GetName() <<
" ] :" 3154 Log() <<
"Help for MVA method [ " <<
GetName() <<
" ] :" <<
Endl;
3162 Log() <<
"<Suppress this message by specifying \"!H\" in the booking option>" <<
Endl;
3164 <<
"================================================================" 3171 Log() <<
"# End of Message___" <<
Endl;
3174 std::cout.rdbuf( cout_sbuf );
3259 if (mvaRes !=
NULL) {
3262 TH1D *mva_s_tr =
dynamic_cast<TH1D*
> (mvaRes->
GetHist(
"MVA_TRAIN_S"));
3263 TH1D *mva_b_tr =
dynamic_cast<TH1D*
> (mvaRes->
GetHist(
"MVA_TRAIN_B"));
3265 if ( !mva_s || !mva_b || !mva_s_tr || !mva_b_tr)
return -1;
3267 if (SorB ==
's' || SorB ==
'S')
virtual void DeclareOptions()=0
Bool_t HasMVAPdfs() const
Types::EAnalysisType fAnalysisType
virtual const char * GetName() const
Returns name of object.
virtual Int_t Write(const char *name=0, Int_t option=0, Int_t bufsize=0)
Write this object to the current directory.
virtual void AddClassifierOutputProb(Types::ETreeType type)
prepare tree branch with the method's discriminating variable
virtual Int_t FindBin(Double_t x, Double_t y=0, Double_t z=0)
Return Global bin number corresponding to x,y,z.
virtual void Scale(Double_t c1=1, Option_t *option="")
Multiply this histogram by a constant c1.
virtual Int_t Fill(Double_t x)
Increment bin with abscissa X by 1.
void WriteStateToXML(void *parent) const
general method used in writing the header of the weight files where the used variables, variable transformation type etc.
virtual void SetTuneParameters(std::map< TString, Double_t > tuneParameters)
set the tuning parameters accoding to the argument This is just a dummy .
virtual void MakeClass(const TString &classFileName=TString("")) const
create reader class for method (classification only at present)
virtual Double_t GetMaximum(Double_t maxval=FLT_MAX) const
Return maximum value smaller than maxval of bins in the range, unless the value has been overridden b...
UInt_t GetNVariables() const
virtual Double_t GetBinCenter(Int_t bin) const
Return bin center for 1D histogram.
virtual const std::vector< Float_t > & GetMulticlassValues()
#define TMVA_VERSION_CODE
void SetCurrentEvent(Long64_t ievt) const
MsgLogger & Endl(MsgLogger &ml)
Bool_t GetLine(std::istream &fin, char *buf)
reads one line from the input stream checks for certain keywords and interprets the line if keywords ...
void ReadOptionsFromXML(void *node)
void ReadXML(void *pdfnode)
XML file reading.
void AddOutput(Types::ETreeType type, Types::EAnalysisType analysisType)
VariableInfo & AddTarget(const TString &expression, const TString &title, const TString &unit, Double_t min, Double_t max, Bool_t normalized=kTRUE, void *external=0)
add a variable (can be a complex expression) to the set of variables used in the MV analysis ...
void AddPoint(Double_t x, Double_t y1, Double_t y2)
This function is used only in 2 TGraph case, and it will add new data points to graphs.
Bool_t fIgnoreNegWeightsInTraining
virtual const char * WorkingDirectory()
Return working directory.
void ReadStateFromXML(void *parent)
std::vector< VariableInfo > & GetSpectatorInfos()
void WriteVarsToStream(std::ostream &tf, const TString &prefix="") const
write the list of variables (name, min, max) for a given data transformation method to the stream ...
virtual Double_t GetMvaValue(Double_t *errLower=0, Double_t *errUpper=0)=0
virtual void MakeClassSpecificHeader(std::ostream &, const TString &="") const
Collectable string class.
virtual Double_t GetValueForRoot(Double_t)
returns efficiency as function of cut
std::vector< TGraph * > fGraphs
void ReadOptionsFromStream(std::istream &istr)
read option back from the weight file
TString & ReplaceAll(const TString &s1, const TString &s2)
TH1 * GetSmoothedHist() const
virtual const char * GetBuildNode() const
Return the build node name.
void BuildPDF(const TH1 *theHist)
virtual void WriteEvaluationHistosToFile(Types::ETreeType treetype)
writes all MVA evaluation histograms to file
const TString & GetOriginalVarName(Int_t ivar) const
virtual std::map< TString, Double_t > OptimizeTuningParameters(TString fomType="ROCIntegral", TString fitType="FitGA")
call the Optimzier with the set of paremeters and ranges that are meant to be tuned.
TString fVariableTransformTypeString
XMLDocPointer_t NewDoc(const char *version="1.0")
creates new xml document with provided version
OptionBase * DeclareOptionRef(T &ref, const TString &name, const TString &desc="")
TransformationHandler * fTransformationPointer
Types::ESBType fVariableTransformType
virtual Double_t GetBinContent(Int_t bin) const
Return content of bin number bin.
A ROOT file is a suite of consecutive data records (TKey instances) with a well defined format...
virtual Double_t Eval(Double_t x) const
returns linearly interpolated TGraph entry around x
A TMultiGraph is a collection of TGraph (or derived) objects.
virtual Int_t GetQuantiles(Int_t nprobSum, Double_t *q, const Double_t *probSum=0)
Compute Quantiles for this histogram Quantile x_q of a probability distribution Function F is defined...
virtual int MakeDirectory(const char *name)
Make a directory.
virtual TObject * Get(const char *namecycle)
Return pointer to object identified by namecycle.
void DrawProgressBar(Int_t, const TString &comment="")
draws progress bar in color or B&W caution:
virtual Double_t GetMean(Int_t axis=1) const
For axis = 1,2 or 3 returns the mean value of the histogram along X,Y or Z axis.
Ssiz_t Index(const char *pat, Ssiz_t i=0, ECaseCompare cmp=kExact) const
virtual const std::vector< Float_t > & GetRegressionValues()
const TString & GetReferenceFile() const
static Bool_t AddDirectoryStatus()
Static function: cannot be inlined on Windows/NT.
tomato 1-D histogram with a float per channel (see TH1 documentation)}
void SetTrainTime(Double_t trainTime)
TMultiGraph * fMultiGraph
TransformationHandler & GetTransformationHandler(Bool_t takeReroutedIfAvailable=true)
Short_t Min(Short_t a, Short_t b)
void ToLower()
Change string to lower-case.
virtual Double_t GetKSTrainingVsTest(Char_t SorB, TString opt="X")
virtual void SetYTitle(const char *title)
virtual TDirectory * mkdir(const char *name, const char *title="")
Create a sub-directory and return a pointer to the created directory.
virtual void TestMulticlass()
test multiclass classification
TString GetTrainingROOTVersionString() const
calculates the ROOT version string from the training version code on the fly
UInt_t GetNClasses() const
UInt_t GetNTargets() const
const std::vector< Event * > & GetEventCollection(Types::ETreeType type=Types::kMaxTreeType) const
std::vector< TString > * fInputVars
virtual void GetRegressionDeviation(UInt_t tgtNum, Types::ETreeType type, Double_t &stddev, Double_t &stddev90Percent) const
#define ROOT_VERSION_CODE
void ReadTargetsFromXML(void *tarnode)
read target info from XML
virtual Double_t GetMaximumSignificance(Double_t SignalEvents, Double_t BackgroundEvents, Double_t &optimal_significance_value) const
plot significance, S/Sqrt(S^2 + B^2), curve for given number of signal and background events; returns...
void AddInfoItem(void *gi, const TString &name, const TString &value) const
xml writing
TString GetElapsedTime(Bool_t Scientific=kTRUE)
virtual Double_t GetROCIntegral(TH1D *histS, TH1D *histB) const
calculate the area (integral) under the ROC curve as a overall quality measure of the classification ...
TDirectory * MethodBaseDir() const
returns the ROOT directory where all instances of the corresponding MVA method are stored ...
Double_t GetTrainingSumSignalWeights()
void FreeDoc(XMLDocPointer_t xmldoc)
frees allocated document data and deletes document itself
Double_t GetTrainTime() const
virtual Bool_t IsSignalLike()
uses a pre-set cut on the MVA output (SetSignalReferenceCut and SetSignalReferenceCutOrientation) for...
void CreateMVAPdfs()
Create PDFs of the MVA output variables.
static void AddDirectory(Bool_t add=kTRUE)
Sets the flag controlling the automatic add of histograms in memory.
void ReadVariablesFromXML(void *varnode)
read variable info from XML
const TString & GetExpression() const
static TFile * Open(const char *name, Option_t *option="", const char *ftitle="", Int_t compress=1, Int_t netopt=0)
Create / open a file.
const TString & GetWeightFileDir() const
void WriteStateToFile() const
write options and weights to file note that each one text file for the main configuration information...
static void SetIsTraining(Bool_t)
when this static function is called, it sets the flag whether events with negative event weight shoul...
TString GetTrainingTMVAVersionString() const
calculates the TMVA version string from the training version code on the fly
DataSetInfo & fDataSetInfo
UInt_t TreeIndex(Types::ETreeType type) const
static TString Format(const char *fmt,...)
Static method which formats a string using a printf style format descriptor and return a TString...
ECutOrientation fCutOrientation
virtual ~MethodBase()
destructor
void DocSetRootElement(XMLDocPointer_t xmldoc, XMLNodePointer_t xmlnode)
set main (root) node for document
UInt_t GetTrainingTMVAVersionCode() const
virtual std::vector< Double_t > GetMvaValues(Long64_t firstEvt=0, Long64_t lastEvt=-1, Bool_t logProgress=false)
get all the MVA values for the events of the current Data type
const Event * GetEvent() const
MethodBase(const TString &jobName, Types::EMVA methodType, const TString &methodTitle, DataSetInfo &dsi, const TString &theOption="")
standard constructur
void ClearGraphs()
This function sets the point number to 0 for all graphs.
void ReadStateFromFile()
Function to write options and weights to file.
Types::ETreeType GetCurrentType() const
std::vector< Float_t > * GetValueVector()
~IPythonInteractive()
standard destructor
virtual void AddClassifierOutput(Types::ETreeType type)
prepare tree branch with the method's discriminating variable
virtual Double_t GetRarity(Double_t mvaVal, Types::ESBType reftype=Types::kBackground) const
compute rarity: R(x) = Integrate_[-oo..x] { PDF(x') dx' } where PDF(x) is the PDF of the classifier's...
void PrintHelpMessage() const
prints out method-specific help method
void ReadClassesFromXML(void *clsnode)
read number of classes from XML
Bool_t EndsWith(const char *pat, ECaseCompare cmp=kExact) const
Return true if string ends with the specified string.
virtual void ParseOptions()
options parser
Double_t GetXmin(Int_t ivar) const
void SetupMethod()
setup of methods
void Init(std::vector< TString > &graphTitles)
This function gets some title and it creates a TGraph for every title.
DataSetInfo & DataInfo() const
void SetOptions(const TString &s)
virtual UserGroup_t * GetUserInfo(Int_t uid)
Returns all user info in the UserGroup_t structure.
Bool_t DoRegression() const
XMLDocPointer_t ParseString(const char *xmlstring)
parses content of string and tries to produce xml structures
void SetMinType(EMsgType minType)
Ssiz_t First(char c) const
Find first occurrence of a character c.
Bool_t DoesExist(const TString &alias) const
virtual void ProcessOptions()=0
virtual Double_t GetProba(const Event *ev)
virtual void AddBinContent(Int_t bin)
Increment bin content by 1.
Double_t GetWeight() const
return the event weight - depending on whether the flag IgnoreNegWeightsInTraining is or not...
Long64_t GetNTrainingEvents() const
std::vector< VariableInfo > & GetTargetInfos()
virtual Double_t GetEfficiency(const TString &, Types::ETreeType, Double_t &err)
fill background efficiency (resp.
void CreateVariableTransforms(const TString &trafoDefinition, TMVA::DataSetInfo &dataInfo, TMVA::TransformationHandler &transformationHandler, TMVA::MsgLogger &log)
virtual std::vector< Float_t > GetMulticlassEfficiency(std::vector< std::vector< Float_t > > &purity)
Double_t Root(Double_t refValue)
Root finding using Brents algorithm; taken from CERNLIB function RZERO.
virtual void AddWeightsXMLTo(void *parent) const =0
UInt_t fTMVATrainingVersion
UInt_t GetNEvents() const
temporary event when testing on a different DataSet than the own one
Double_t GetXmax(Int_t ivar) const
TransformationHandler fTransformation
void ReadStateFromXMLString(const char *xmlstr)
for reading from memory
Bool_t DoMulticlass() const
virtual Double_t KolmogorovTest(const TH1 *h2, Option_t *option="") const
Statistical test of compatibility in shape between this histogram and h2, using Kolmogorov test...
virtual void MakeClassSpecific(std::ostream &, const TString &="") const
virtual void ReadWeightsFromXML(void *wghtnode)=0
Int_t GetHistNBins(Int_t evtNum=0)
void SaveDoc(XMLDocPointer_t xmldoc, const char *filename, Int_t layout=1)
store document content to file if layout<=0, no any spaces or newlines will be placed between xmlnode...
TString fWeightFileExtension
void * GetExternalLink() const
Float_t GetTarget(UInt_t itgt) const
void WriteStateToStream(std::ostream &tf) const
general method used in writing the header of the weight files where the used variables, variable transformation type etc.
Results * GetResults(const TString &, Types::ETreeType type, Types::EAnalysisType analysistype)
TString info(resultsName+"/"); switch(type) { case Types::kTraining: info += "kTraining/"; break; cas...
Class to manage histogram axis.
R__EXTERN TSystem * gSystem
TDirectory * fMethodBaseDir
UInt_t fROOTTrainingVersion
const char * GetName() const
ClassInfo * GetClassInfo(Int_t clNum) const
void ReadVarsFromStream(std::istream &istr)
Read the variables (name, min, max) for a given data transformation method from the stream...
void AddClassesXMLTo(void *parent) const
write class info to XML
const Int_t NBIN_HIST_HIGH
tomato 2-D histogram with a float per channel (see TH1 documentation)}
class TMVA::Config::VariablePlotting fVariablePlotting
void Statistics(Types::ETreeType treeType, const TString &theVarName, Double_t &, Double_t &, Double_t &, Double_t &, Double_t &, Double_t &)
calculates rms,mean, xmin, xmax of the event variable this can be either done for the variables as th...
Bool_t BeginsWith(const char *s, ECaseCompare cmp=kExact) const
virtual void SetBinContent(Int_t bin, Double_t content)
Set bin content see convention for numbering bins in TH1::GetBin In case the bin number is greater th...
Float_t GetAchievablePur(UInt_t cls)
void SetReadingVersion(UInt_t rv)
void SetValue(Float_t value, Int_t ievt)
set MVA response
UInt_t GetTrainingROOTVersionCode() const
char * Form(const char *fmt,...)
void ReadFromXML(void *varnode)
read VariableInfo from stream
TSubString Strip(EStripType s=kTrailing, char c=' ') const
Return a substring of self stripped at beginning and/or end.
const TString & GetJobName() const
const TString & GetMethodName() const
UInt_t GetNSpectators(bool all=kTRUE) const
virtual Double_t Eval(Double_t x) const =0
TSpline * fSplTrainEffBvsS
virtual TObject * At(Int_t idx) const
Returns the object at position idx. Returns 0 if idx is out of range.
void DeclareBaseOptions()
define the options (their key words) that can be set in the option string here the options valid for ...
tomato 1-D histogram with a double per channel (see TH1 documentation)}
virtual Double_t GetSignificance() const
compute significance of mean difference significance = |<S> - |/Sqrt(RMS_S2 + RMS_B2) ...
TString GetWeightFileName() const
retrieve weight file name
Double_t GetSignalReferenceCutOrientation() const
void ProcessBaseOptions()
the option string is decoded, for availabel options see "DeclareOptions"
Double_t ElapsedSeconds(void)
computes elapsed tim in seconds
Int_t FindVarIndex(const TString &) const
find variable by name
UInt_t GetNVariables() const
std::vector< const std::vector< TMVA::Event * > * > fEventCollections
void AddSpectatorsXMLTo(void *parent) const
write spectator info to XML
Float_t GetValue(UInt_t ivar) const
return value of i'th variable
TString fVerbosityLevelString
void DeclareOptions()
define the options (their key words) that can be set in the option string know options: PDFInterpol[i...
TList * GetStorage() const
static void SetIgnoreNegWeightsInTraining(Bool_t)
when this static function is called, it sets the flag whether events with negative event weight shoul...
XMLDocPointer_t ParseFile(const char *filename, Int_t maxbuf=100000)
Parses content of file and tries to produce xml structures.
const std::vector< TMVA::Event * > & GetEventCollection(Types::ETreeType type)
returns the event collection (i.e.
virtual void CheckSetup()
check may be overridden by derived class (sometimes, eg, fitters are used which can only be implement...
void SetTestTime(Double_t testTime)
virtual void AddRegressionOutput(Types::ETreeType type)
prepare tree branch with the method's discriminating variable
std::vector< Double_t > GetBestMultiClassCuts(UInt_t targetClass)
void SetWeightFileName(TString)
set the weight file name (depreciated)
virtual Double_t GetSeparation(TH1 *, TH1 *) const
compute "separation" defined as <s2> = (1/2) Int_-oo..+oo { (S(x) - B(x))^2/(S(x) + B(x)) dx } ...
Describe directory structure in memory.
std::vector< Float_t > * fMulticlassReturnVal
Bool_t IsNormalised() const
Double_t GetTrainingSumBackgrWeights()
TH1 * GetHist(const TString &alias) const
static RooMathCoreReg dummy
virtual void GetHelpMessage() const =0
void SetCurrentType(Types::ETreeType type) const
void AddVarsXMLTo(void *parent) const
write variable info to XML
Bool_t Contains(const char *pat, ECaseCompare cmp=kExact) const
const Bool_t Use_Splines_for_Eff_
const char * AsString() const
Return the date & time as a string (ctime() format).
VariableInfo & GetVariableInfo(Int_t i)
void AddPreDefVal(const T &)
IPythonInteractive()
standard constructur
virtual void WriteMonitoringHistosToFile() const
write special monitoring histograms to file dummy implementation here --------------— ...
ClassInfo * AddClass(const TString &className)
void AddXMLTo(void *parent)
XML file writing.
Bool_t fConstructedFromWeightFile
virtual const char * GetName() const
Returns name of object.
void ProcessSetup()
process all options the "CheckForUnusedOptions" is done in an independent call, since it may be overr...
TString fVarTransformString
virtual void AddMulticlassOutput(Types::ETreeType type)
prepare tree branch with the method's discriminating variable
const TString & GetOptions() const
void SetConfigName(const char *n)
void ValidatePDF(TH1 *original=0) const
comparison of original histogram with reference PDF
Float_t GetAchievableEff(UInt_t cls)
void SetSource(const std::string &source)
virtual std::vector< Float_t > GetMulticlassTrainingEfficiency(std::vector< std::vector< Float_t > > &purity)
virtual void SetXTitle(const char *title)
virtual void TestRegression(Double_t &bias, Double_t &biasT, Double_t &dev, Double_t &devT, Double_t &rms, Double_t &rmsT, Double_t &mInf, Double_t &mInfT, Double_t &corr, Types::ETreeType type)
calculate <sum-of-deviation-squared> of regression output versus "true" value from test sample ...
virtual Bool_t cd(const char *path=0)
Change current directory to "this" directory.
void ReadFromStream(std::istream &istr)
TDirectory * BaseDir() const
returns the ROOT directory where info/histograms etc of the corresponding MVA method instance are sto...
TString GetMethodTypeName() const
virtual void DeclareCompatibilityOptions()
options that are used ONLY for the READER to ensure backward compatibility they are hence without any...
Short_t Max(Short_t a, Short_t b)
void AddToXML(void *varnode)
write class to XML
Double_t fSignalReferenceCut
the data set information (sometimes needed)
void SetWeightFileDir(TString fileDir)
set directory of weight file
XMLNodePointer_t DocGetRootElement(XMLDocPointer_t xmldoc)
returns root node of document
Double_t GetSignalReferenceCut() const
virtual void Sumw2(Bool_t flag=kTRUE)
Create structure to store sum of squares of weights.
A Graph is a graphics object made of two arrays X and Y with npoints each.
TH1 * GetOriginalHist() const
virtual TDirectory * GetDirectory(const char *namecycle, Bool_t printError=false, const char *funcname="GetDirectory")
Find a directory using apath.
you should not use this method at all Int_t Int_t Double_t Double_t Double_t Int_t Double_t Double_t Double_t Double_t b
THist< 1, double, THistStatContent, THistStatUncertainty > TH1D
Long64_t GetNEvents(Types::ETreeType type=Types::kMaxTreeType) const
virtual Double_t GetTrainingEfficiency(const TString &)
void AddOptionsXMLTo(void *parent) const
write options to XML file
Double_t GetIntegral(Double_t xmin, Double_t xmax)
computes PDF integral within given ranges
void ReadSpectatorsFromXML(void *specnode)
read spectator info from XML
Int_t Atoi() const
Return integer value of string.
Bool_t IsSignal(const Event *ev) const
std::vector< Bool_t > * GetValueVectorTypes()
ECutOrientation GetCutOrientation() const
void InitBase()
default initialization called by all constructors
std::vector< Float_t > * fRegressionReturnVal
Types::EAnalysisType GetAnalysisType() const
void AddTargetsXMLTo(void *parent) const
write target info to XML
void Store(TObject *obj, const char *alias=0)
virtual Int_t GetNbinsX() const
virtual void Add(TGraph *graph, Option_t *chopt="")
Add a new graph to the list of graphs.
Double_t Sqrt(Double_t x)
const TString & GetTestvarName() const
virtual Int_t GetSize() const
virtual void ReadWeightsFromStream(std::istream &)=0
Int_t Fill(Double_t)
Invalid Fill method.
THist< 2, float, THistStatContent, THistStatUncertainty > TH2F
void SetTestvarName(const TString &v="")
double norm(double *x, double *p)
Types::EMVA GetMethodType() const
void CheckForUnusedOptions() const
checks for unused options in option string
virtual Int_t GetMaximumBin() const
Return location of bin with maximum value in the range.
virtual void TestClassification()
initialization
void ReadStateFromStream(std::istream &tf)
read the header from the weight files of the different MVA methods
virtual Int_t Write(const char *name=0, Int_t option=0, Int_t bufsize=0)
Write all objects in this collection.
const Event * GetEvent() const
std::vector< VariableInfo > & GetVariableInfos()
void SetExternalLink(void *p)
virtual void SetAnalysisType(Types::EAnalysisType type)
void Resize(Int_t entries)
void NoErrorCalc(Double_t *const err, Double_t *const errUpper)
void SetSignalReferenceCut(Double_t cut)
void WriteOptionsToStream(std::ostream &o, const TString &prefix) const
write options to output stream (e.g. in writing the MVA weight files
void Resize(Ssiz_t n)
Resize the string. Truncate or add blanks as necessary.
This class stores the date and time with a precision of one second in an unsigned 32 bit word (950130...
Double_t GetVal(Double_t x) const
returns value PDF(x)
void SetConfigDescription(const char *d)
virtual void Close(Option_t *option="")
Close a file.
const char * Data() const