141 #pragma warning ( disable : 4355 ) 174 std::cerr <<
kERROR <<
"IPythonInteractive::Init: already initialized..." << std::endl;
178 for(
auto& title : graphTitles){
180 fGraphs.back()->SetTitle(title);
181 fGraphs.back()->SetName(title);
182 fGraphs.back()->SetFillColor(color);
183 fGraphs.back()->SetLineColor(color);
184 fGraphs.back()->SetMarkerColor(color);
244 fAnalysisType (
Types::kNoAnalysisType ),
245 fRegressionReturnVal ( 0 ),
246 fMulticlassReturnVal ( 0 ),
247 fDataSetInfo ( dsi ),
248 fSignalReferenceCut ( 0.5 ),
249 fSignalReferenceCutOrientation( 1. ),
250 fVariableTransformType (
Types::kSignal ),
251 fJobName ( jobName ),
252 fMethodName ( methodTitle ),
253 fMethodType ( methodType ),
257 fConstructedFromWeightFile (
kFALSE ),
259 fMethodBaseDir ( 0 ),
262 fModelPersistence (kTRUE),
273 fSplTrainEffBvsS ( 0 ),
274 fVarTransformString (
"None" ),
275 fTransformationPointer ( 0 ),
276 fTransformation ( dsi, methodTitle ),
278 fVerbosityLevelString (
"Default" ),
281 fIgnoreNegWeightsInTraining(
kFALSE ),
283 fBackgroundClass ( 0 ),
385 for (
Int_t i = 0; i < 2; i++ ) {
477 if (
DataInfo().GetClassInfo(
"Signal") != 0) {
480 if (
DataInfo().GetClassInfo(
"Background") != 0) {
505 DeclareOptionRef(
fVerbose,
"V",
"Verbose output (short form of \"VerbosityLevel\" below - overrides the latter one)" );
521 DeclareOptionRef(
fVarTransformString,
"VarTransform",
"List of variable transformations performed before training, e.g., \"D_Background,P_Signal,G,N_AllClasses\" for: \"Decorrelation, PCA-transformation, Gaussianisation, Normalisation, each for the given class of events ('AllClasses' denotes all events of all classes, if no class indication is given, 'All' is assumed)\"" );
528 "Events with negative weights are ignored in the training (but are included for testing and performance evaluation)" );
579 Log() <<
kFATAL <<
"<ProcessOptions> Verbosity level type '" 595 "Use signal or background events to derive for variable transformation (the transformation is applied on both types of, course)" );
598 DeclareOptionRef(
fTxtWeightsOnly=kTRUE,
"TxtWeightFilesOnly",
"If True: write all training results (weights) as text files (False: some are written in ROOT format)" );
627 std::map<TString,Double_t> tunedParameters;
628 tunedParameters.size();
629 return tunedParameters;
661 <<
"Begin training" <<
Endl;
666 <<
"\tEnd of training " <<
Endl;
669 <<
"Elapsed time for training with " << nEvents <<
" events: " 673 <<
"\tCreate MVA output for ";
723 bool truncate =
false;
724 TH1F*
h1 = regRes->QuadraticDeviation( tgtNum , truncate, 1.);
729 TH1F* h2 = regRes->QuadraticDeviation( tgtNum , truncate, yq[0]);
753 regRes->Resize( nEvents );
757 regRes->SetValue( vals, ievt );
762 <<
"Elapsed time for evaluation of " << nEvents <<
" events: " 771 regRes->CreateDeviationHistograms( histNamePrefix );
794 resMulticlass->
Resize( nEvents );
798 resMulticlass->SetValue( vals, ievt );
803 <<
"Elapsed time for evaluation of " << nEvents <<
" events: " 821 if (errUpper) *errUpper=-1;
860 std::vector<Double_t> mvaValues =
GetMvaValues(0, nEvents,
true);
869 clRes->
SetValue( mvaValues[ievt], ievt );
884 if (firstEvt > lastEvt || lastEvt > nEvents) lastEvt =
nEvents;
885 if (firstEvt < 0) firstEvt = 0;
886 std::vector<Double_t> values(lastEvt-firstEvt);
888 nEvents = values.size();
898 for (
Int_t ievt=firstEvt; ievt<lastEvt; ievt++) {
905 if (modulo <= 0 ) modulo = 1;
911 <<
"Elapsed time for evaluation of " << nEvents <<
" events: " 936 mvaProb->
Resize( nEvents );
941 if (proba < 0)
break;
946 if (modulo <= 0 ) modulo = 1;
951 <<
"Elapsed time for evaluation of " << nEvents <<
" events: " 973 bias = 0; biasT = 0; dev = 0; devT = 0; rms = 0; rmsT = 0;
975 Double_t m1 = 0, m2 = 0, s1 = 0, s2 = 0, s12 = 0;
981 for (
Long64_t ievt=0; ievt<nevt; ievt++) {
1005 m1 += t*w; s1 += t*t*w;
1006 m2 += r*w; s2 += r*r*w;
1019 corr = s12/sumw - m1*m2;
1020 corr /=
TMath::Sqrt( (s1/sumw - m1*m1) * (s2/sumw - m2*m2) );
1023 TH2F* hist =
new TH2F(
"hist",
"hist", 150, xmin, xmax, 100, xmin, xmax );
1024 TH2F* histT =
new TH2F(
"histT",
"histT", 150, xmin, xmax, 100, xmin, xmax );
1031 for (
Long64_t ievt=0; ievt<nevt; ievt++) {
1032 Float_t d = (rV[ievt] - tV[ievt]);
1033 hist->
Fill( rV[ievt], tV[ievt], wV[ievt] );
1034 if (d >= devMin && d <= devMax) {
1036 biasT += wV[ievt] * d;
1038 rmsT += wV[ievt] * d * d;
1039 histT->
Fill( rV[ievt], tV[ievt], wV[ievt] );
1090 <<
" not found in tree" <<
Endl;
1122 mvaRes->
Store(mva_s,
"MVA_S");
1123 mvaRes->
Store(mva_b,
"MVA_B");
1133 proba_s =
new TH1D( TestvarName +
"_Proba_S", TestvarName +
"_Proba_S",
fNbinsMVAoutput, 0.0, 1.0 );
1134 proba_b =
new TH1D( TestvarName +
"_Proba_B", TestvarName +
"_Proba_B",
fNbinsMVAoutput, 0.0, 1.0 );
1135 mvaRes->
Store(proba_s,
"Prob_S");
1136 mvaRes->
Store(proba_b,
"Prob_B");
1141 rarity_s =
new TH1D( TestvarName +
"_Rarity_S", TestvarName +
"_Rarity_S",
fNbinsMVAoutput, 0.0, 1.0 );
1142 rarity_b =
new TH1D( TestvarName +
"_Rarity_B", TestvarName +
"_Rarity_B",
fNbinsMVAoutput, 0.0, 1.0 );
1143 mvaRes->
Store(rarity_s,
"Rar_S");
1144 mvaRes->
Store(rarity_b,
"Rar_B");
1150 TH1* mva_eff_s =
new TH1D( TestvarName +
"_S_high", TestvarName +
"_S_high",
fNbinsH, fXmin, sxmax );
1151 TH1* mva_eff_b =
new TH1D( TestvarName +
"_B_high", TestvarName +
"_B_high",
fNbinsH, fXmin, sxmax );
1152 mvaRes->
Store(mva_eff_s,
"MVA_HIGHBIN_S");
1153 mvaRes->
Store(mva_eff_b,
"MVA_HIGHBIN_B");
1163 if (mvaProb)
Log() <<
kINFO <<
"Also filling probability and rarity histograms (on request)..." <<
Endl;
1179 mvaResTypes->push_back(kTRUE);
1180 mva_s ->
Fill( v, w );
1182 proba_s->
Fill( (*mvaProb)[ievt][0], w );
1186 mva_eff_s ->
Fill( v, w );
1189 mvaResTypes->push_back(
kFALSE);
1190 mva_b ->
Fill( v, w );
1192 proba_b->
Fill( (*mvaProb)[ievt][0], w );
1195 mva_eff_b ->
Fill( v, w );
1225 tf << prefix <<
"#GEN -*-*-*-*-*-*-*-*-*-*-*- general info -*-*-*-*-*-*-*-*-*-*-*-" << std::endl << prefix << std::endl;
1227 tf.setf(std::ios::left);
1232 tf << prefix <<
"Creator : " << userInfo->
fUser << std::endl;
1241 tf << prefix << std::endl;
1246 tf << prefix << std::endl << prefix <<
"#OPT -*-*-*-*-*-*-*-*-*-*-*-*- options -*-*-*-*-*-*-*-*-*-*-*-*-" << std::endl << prefix << std::endl;
1248 tf << prefix << std::endl;
1251 tf << prefix << std::endl << prefix <<
"#VAR -*-*-*-*-*-*-*-*-*-*-*-* variables *-*-*-*-*-*-*-*-*-*-*-*-" << std::endl << prefix << std::endl;
1253 tf << prefix << std::endl;
1286 if (!parent)
return;
1365 <<
"Creating xml weight file: " 1386 <<
"Reading weight file: " 1390 #if ROOT_VERSION_CODE >= ROOT_VERSION(5,29,0) 1401 fb.open(tfname.
Data(),std::ios::in);
1402 if (!fb.is_open()) {
1404 <<
"Unable to open input weight file: " << tfname <<
Endl;
1406 std::istream fin(&fb);
1424 #if ROOT_VERSION_CODE >= ROOT_VERSION(5,26,00) 1431 << xmlstr <<
" ) is not available for ROOT versions prior to 5.26/00." <<
Endl;
1458 if (nodeName==
"GeneralInfo") {
1463 while (antypeNode) {
1466 if (
name ==
"TrainingTime")
1469 if (
name ==
"AnalysisType") {
1478 if (
name ==
"TMVA Release" ||
name ==
"TMVA") {
1485 if (
name ==
"ROOT Release" ||
name ==
"ROOT") {
1495 else if (nodeName==
"Options") {
1500 else if (nodeName==
"Variables") {
1503 else if (nodeName==
"Spectators") {
1506 else if (nodeName==
"Classes") {
1509 else if (nodeName==
"Targets") {
1512 else if (nodeName==
"Transformations") {
1515 else if (nodeName==
"MVAPdfs") {
1530 else if (nodeName==
"Weights") {
1561 methodType = methodType(methodType.Last(
' '),methodType.Length());
1566 if (methodName ==
"") methodName = methodType;
1591 fin.getline(buf,512);
1624 fin.getline(buf,512);
1628 varTrafo->ReadTransformationFromStream(fin, trafo );
1639 fin.getline(buf,512);
1653 fin.getline(buf,512);
1655 fin.getline(buf,512);
1686 istr >> dummy >> readNVar;
1690 <<
" while there are " << readNVar <<
" variables declared in the file" 1706 Log() <<
kINFO <<
"The definition (or the order) of the variables found in the input file is" <<
Endl;
1707 Log() <<
kINFO <<
"is not the same as the one declared in the Reader (which is necessary for" <<
Endl;
1708 Log() <<
kINFO <<
"the correct working of the method):" <<
Endl;
1709 Log() <<
kINFO <<
" var #" << varIdx <<
" declared in Reader: " << varIt->GetExpression() <<
Endl;
1711 Log() <<
kFATAL <<
"The expression declared to the Reader needs to be checked (name or order are wrong)" <<
Endl;
1765 for (
UInt_t iCls=0; iCls<nClasses; ++iCls) {
1801 <<
" while there are " << readNVar <<
" variables declared in the file" 1816 existingVarInfo = readVarInfo;
1820 Log() <<
kINFO <<
"The definition (or the order) of the variables found in the input file is" <<
Endl;
1821 Log() <<
kINFO <<
"not the same as the one declared in the Reader (which is necessary for the" <<
Endl;
1822 Log() <<
kINFO <<
"correct working of the method):" <<
Endl;
1825 Log() <<
kFATAL <<
"The expression declared to the Reader needs to be checked (name or order are wrong)" <<
Endl;
1841 <<
" while there are " << readNSpec <<
" spectators declared in the file" 1856 existingSpecInfo = readSpecInfo;
1860 Log() <<
kINFO <<
"The definition (or the order) of the spectators found in the input file is" <<
Endl;
1861 Log() <<
kINFO <<
"not the same as the one declared in the Reader (which is necessary for the" <<
Endl;
1862 Log() <<
kINFO <<
"correct working of the method):" <<
Endl;
1865 Log() <<
kFATAL <<
"The expression declared to the Reader needs to be checked (name or order are wrong)" <<
Endl;
1884 for (
UInt_t icls = 0; icls<readNCls;++icls) {
1901 if (
DataInfo().GetClassInfo(
"Signal") != 0) {
1906 if (
DataInfo().GetClassInfo(
"Background") != 0) {
1951 sdir = methodDir->
mkdir(defaultDir);
1956 wfilePath.Write(
"TrainingPath" );
1957 wfileName.Write(
"WeightFileName" );
2023 return ( wFileDir + (wFileDir[wFileDir.
Length()-1]==
'/' ?
"" :
"/")
2053 <<
"/kMaxAnalysisType" <<
Endl;
2075 fin.getline(buf,512);
2081 std::stringstream s(code.
Data());
2089 std::stringstream s(code.
Data());
2097 std::stringstream s(code.
Data());
2098 std::string analysisType;
2103 else Log() <<
kFATAL <<
"Analysis type " << analysisType <<
" from weight-file not known!" << std::endl;
2125 if (mvaRes==0 || mvaRes->
GetSize()==0) {
2140 histMVAPdfS->Sumw2();
2141 histMVAPdfB->
Sumw2();
2149 else histMVAPdfB->
Fill( theVal, theWeight );
2158 histMVAPdfS->Write();
2159 histMVAPdfB->
Write();
2167 if (
DataInfo().GetNClasses() == 2) {
2169 <<
Form(
"<CreateMVAPdfs> Separation from histogram (PDF): %1.3f (%1.3f)",
2183 Log() <<
kINFO<<
Form(
"Dataset[%s] : ",
DataInfo().
GetName()) <<
"<GetProba> MVA PDFs for Signal and Background don't exist yet, we'll create them on demand" <<
Endl;
2189 return GetProba(mvaVal,sigFraction);
2204 Double_t denom = p_s*ap_sig + p_b*(1 - ap_sig);
2206 return (denom > 0) ? (p_s*ap_sig) / denom : -1;
2218 <<
"select option \"CreateMVAPdfs\"" <<
Endl;
2242 if (!list || list->
GetSize() < 2) computeArea = kTRUE;
2243 else if (list->
GetSize() > 2) {
2245 <<
" in string: " << theString
2246 <<
" | required format, e.g., Efficiency:0.05, or empty string" <<
Endl;
2262 TH1 * effhist = results->
GetHist(
"MVA_HIGHBIN_S");
2269 if (results->
DoesExist(
"MVA_EFF_S")==0) {
2274 results->
Store(eff_s,
"MVA_EFF_S");
2275 results->
Store(eff_b,
"MVA_EFF_B");
2287 Float_t theVal = (*mvaRes)[ievt];
2290 TH1* theHist = isSignal ? eff_s : eff_b;
2293 if (isSignal) nevtS+=theWeight;
2297 if (sign > 0 && maxbin >
fNbinsH)
continue;
2298 if (sign < 0 && maxbin < 1 )
continue;
2299 if (sign > 0 && maxbin < 1 ) maxbin = 1;
2319 results->
Store(eff_BvsS,
"MVA_EFF_BvsS");
2325 results->
Store(rej_BvsS);
2327 rej_BvsS->
SetYTitle(
"Backgr rejection (1-eff)" );
2332 results->
Store(inveff_BvsS);
2334 inveff_BvsS->
SetYTitle(
"Inverse backgr. eff (1/eff)" );
2377 Double_t effS = 0., rejB, effS_ = 0., rejB_ = 0.;
2378 Int_t nbins_ = 5000;
2379 for (
Int_t bini=1; bini<=nbins_; bini++) {
2382 effS = (bini - 0.5)/
Float_t(nbins_);
2386 if ((effS - rejB)*(effS_ - rejB_) < 0)
break;
2404 Double_t effS = 0, effB = 0, effS_ = 0, effB_ = 0;
2405 Int_t nbins_ = 1000;
2411 for (
Int_t bini=1; bini<=nbins_; bini++) {
2414 effS = (bini - 0.5)/
Float_t(nbins_);
2416 integral += (1.0 - effB);
2430 for (
Int_t bini=1; bini<=nbins_; bini++) {
2433 effS = (bini - 0.5)/
Float_t(nbins_);
2437 if ((effB - effBref)*(effB_ - effBref) <= 0)
break;
2443 effS = 0.5*(effS + effS_);
2446 if (nevtS > 0) effSerr =
TMath::Sqrt( effS*(1.0 - effS)/nevtS );
2472 <<
" in string: " << theString
2473 <<
" | required format, e.g., Efficiency:0.05" <<
Endl;
2486 Log() <<
kFATAL <<
Form(
"Dataset[%s] : ",
DataInfo().
GetName())<<
"<GetTrainingEfficiency> Binning mismatch between signal and background histos" 2494 TH1 * effhist = results->
GetHist(
"MVA_HIGHBIN_S");
2499 if (results->
DoesExist(
"MVA_TRAIN_S")==0) {
2507 results->
Store(mva_s_tr,
"MVA_TRAIN_S");
2508 results->
Store(mva_b_tr,
"MVA_TRAIN_B");
2517 results->
Store(mva_eff_tr_s,
"MVA_TRAINEFF_S");
2518 results->
Store(mva_eff_tr_b,
"MVA_TRAINEFF_B");
2538 theClsHist->
Fill( theVal, theWeight );
2542 if (sign > 0 && maxbin >
fNbinsH)
continue;
2543 if (sign < 0 && maxbin < 1 )
continue;
2544 if (sign > 0 && maxbin < 1 ) maxbin = 1;
2547 if (sign > 0)
for (
Int_t ibin=1; ibin<=maxbin; ibin++) theEffHist->
AddBinContent( ibin , theWeight );
2564 results->
Store(eff_bvss,
"EFF_BVSS_TR");
2565 results->
Store(rej_bvss,
"REJ_BVSS_TR");
2613 Double_t effS = 0., effB, effS_ = 0., effB_ = 0.;
2614 Int_t nbins_ = 1000;
2615 for (
Int_t bini=1; bini<=nbins_; bini++) {
2618 effS = (bini - 0.5)/
Float_t(nbins_);
2622 if ((effB - effBref)*(effB_ - effBref) <= 0)
break;
2627 return 0.5*(effS + effS_);
2649 if (!resMulticlass)
Log() <<
kFATAL<<
"unable to create pointer in GetMulticlassTrainingEfficiency, exiting."<<
Endl;
2689 if ((!pdfS && pdfB) || (pdfS && !pdfB))
2691 if (!pdfS) pdfS =
fSplS;
2692 if (!pdfB) pdfB =
fSplB;
2696 <<
" fSplS or fSplB are not yet filled" <<
Endl;
2711 if ((!histS && histB) || (histS && !histB))
2714 if (histS==0 || histB==0)
return 0.;
2727 for (
UInt_t i=0; i<nsteps; i++) {
2731 return integral*step;
2743 if ((!pdfS && pdfB) || (pdfS && !pdfB))
2745 if (!pdfS) pdfS =
fSplS;
2746 if (!pdfB) pdfB =
fSplB;
2748 if (pdfS==0 || pdfB==0)
return 0.;
2757 for (
UInt_t i=0; i<nsteps; i++) {
2761 return integral*step;
2771 Double_t& max_significance_value )
const 2776 Double_t effS(0),effB(0),significance(0);
2779 if (SignalEvents <= 0 || BackgroundEvents <= 0) {
2781 <<
"Number of signal or background events is <= 0 ==> abort" 2786 << SignalEvents/BackgroundEvents <<
Endl;
2791 if ( (eff_s==0) || (eff_b==0) ) {
2802 significance =
sqrt(SignalEvents)*( effS )/
sqrt( effS + ( BackgroundEvents / SignalEvents) * effB );
2812 delete temp_histogram;
2817 return max_significance;
2856 for (
Int_t ievt = 0; ievt < entries; ievt++) {
2865 meanS += weight*theVar;
2866 rmsS += weight*theVar*theVar;
2870 meanB += weight*theVar;
2871 rmsB += weight*theVar*theVar;
2879 meanS = meanS/sumwS;
2880 meanB = meanB/sumwB;
2894 if (theClassFileName ==
"")
2897 classFileName = theClassFileName;
2901 TString tfname( classFileName );
2903 <<
"Creating standalone class: " 2906 std::ofstream fout( classFileName );
2908 Log() <<
kFATAL <<
"<MakeClass> Unable to open file: " << classFileName <<
Endl;
2913 fout <<
"// Class: " << className << std::endl;
2914 fout <<
"// Automatically generated by MethodBase::MakeClass" << std::endl <<
"//" << std::endl;
2918 fout <<
"/* configuration options =====================================================" << std::endl << std::endl;
2921 fout <<
"============================================================================ */" << std::endl;
2924 fout <<
"" << std::endl;
2925 fout <<
"#include <vector>" << std::endl;
2926 fout <<
"#include <cmath>" << std::endl;
2927 fout <<
"#include <string>" << std::endl;
2928 fout <<
"#include <iostream>" << std::endl;
2929 fout <<
"" << std::endl;
2934 fout <<
"#ifndef IClassifierReader__def" << std::endl;
2935 fout <<
"#define IClassifierReader__def" << std::endl;
2937 fout <<
"class IClassifierReader {" << std::endl;
2939 fout <<
" public:" << std::endl;
2941 fout <<
" // constructor" << std::endl;
2942 fout <<
" IClassifierReader() : fStatusIsClean( true ) {}" << std::endl;
2943 fout <<
" virtual ~IClassifierReader() {}" << std::endl;
2945 fout <<
" // return classifier response" << std::endl;
2946 fout <<
" virtual double GetMvaValue( const std::vector<double>& inputValues ) const = 0;" << std::endl;
2948 fout <<
" // returns classifier status" << std::endl;
2949 fout <<
" bool IsStatusClean() const { return fStatusIsClean; }" << std::endl;
2951 fout <<
" protected:" << std::endl;
2953 fout <<
" bool fStatusIsClean;" << std::endl;
2954 fout <<
"};" << std::endl;
2956 fout <<
"#endif" << std::endl;
2958 fout <<
"class " << className <<
" : public IClassifierReader {" << std::endl;
2960 fout <<
" public:" << std::endl;
2962 fout <<
" // constructor" << std::endl;
2963 fout <<
" " << className <<
"( std::vector<std::string>& theInputVars ) " << std::endl;
2964 fout <<
" : IClassifierReader()," << std::endl;
2965 fout <<
" fClassName( \"" << className <<
"\" )," << std::endl;
2966 fout <<
" fNvars( " <<
GetNvar() <<
" )," << std::endl;
2967 fout <<
" fIsNormalised( " << (
IsNormalised() ?
"true" :
"false") <<
" )" << std::endl;
2968 fout <<
" { " << std::endl;
2969 fout <<
" // the training input variables" << std::endl;
2970 fout <<
" const char* inputVars[] = { ";
2973 if (ivar<
GetNvar()-1) fout <<
", ";
2975 fout <<
" };" << std::endl;
2977 fout <<
" // sanity checks" << std::endl;
2978 fout <<
" if (theInputVars.size() <= 0) {" << std::endl;
2979 fout <<
" std::cout << \"Problem in class \\\"\" << fClassName << \"\\\": empty input vector\" << std::endl;" << std::endl;
2980 fout <<
" fStatusIsClean = false;" << std::endl;
2981 fout <<
" }" << std::endl;
2983 fout <<
" if (theInputVars.size() != fNvars) {" << std::endl;
2984 fout <<
" std::cout << \"Problem in class \\\"\" << fClassName << \"\\\": mismatch in number of input values: \"" << std::endl;
2985 fout <<
" << theInputVars.size() << \" != \" << fNvars << std::endl;" << std::endl;
2986 fout <<
" fStatusIsClean = false;" << std::endl;
2987 fout <<
" }" << std::endl;
2989 fout <<
" // validate input variables" << std::endl;
2990 fout <<
" for (size_t ivar = 0; ivar < theInputVars.size(); ivar++) {" << std::endl;
2991 fout <<
" if (theInputVars[ivar] != inputVars[ivar]) {" << std::endl;
2992 fout <<
" std::cout << \"Problem in class \\\"\" << fClassName << \"\\\": mismatch in input variable names\" << std::endl" << std::endl;
2993 fout <<
" << \" for variable [\" << ivar << \"]: \" << theInputVars[ivar].c_str() << \" != \" << inputVars[ivar] << std::endl;" << std::endl;
2994 fout <<
" fStatusIsClean = false;" << std::endl;
2995 fout <<
" }" << std::endl;
2996 fout <<
" }" << std::endl;
2998 fout <<
" // initialize min and max vectors (for normalisation)" << std::endl;
3000 fout <<
" fVmin[" << ivar <<
"] = " << std::setprecision(15) <<
GetXmin( ivar ) <<
";" << std::endl;
3001 fout <<
" fVmax[" << ivar <<
"] = " << std::setprecision(15) <<
GetXmax( ivar ) <<
";" << std::endl;
3004 fout <<
" // initialize input variable types" << std::endl;
3009 fout <<
" // initialize constants" << std::endl;
3010 fout <<
" Initialize();" << std::endl;
3013 fout <<
" // initialize transformation" << std::endl;
3014 fout <<
" InitTransform();" << std::endl;
3016 fout <<
" }" << std::endl;
3018 fout <<
" // destructor" << std::endl;
3019 fout <<
" virtual ~" << className <<
"() {" << std::endl;
3020 fout <<
" Clear(); // method-specific" << std::endl;
3021 fout <<
" }" << std::endl;
3023 fout <<
" // the classifier response" << std::endl;
3024 fout <<
" // \"inputValues\" is a vector of input values in the same order as the " << std::endl;
3025 fout <<
" // variables given to the constructor" << std::endl;
3026 fout <<
" double GetMvaValue( const std::vector<double>& inputValues ) const;" << std::endl;
3028 fout <<
" private:" << std::endl;
3030 fout <<
" // method-specific destructor" << std::endl;
3031 fout <<
" void Clear();" << std::endl;
3034 fout <<
" // input variable transformation" << std::endl;
3036 fout <<
" void InitTransform();" << std::endl;
3037 fout <<
" void Transform( std::vector<double> & iv, int sigOrBgd ) const;" << std::endl;
3040 fout <<
" // common member variables" << std::endl;
3041 fout <<
" const char* fClassName;" << std::endl;
3043 fout <<
" const size_t fNvars;" << std::endl;
3044 fout <<
" size_t GetNvar() const { return fNvars; }" << std::endl;
3045 fout <<
" char GetType( int ivar ) const { return fType[ivar]; }" << std::endl;
3047 fout <<
" // normalisation of input variables" << std::endl;
3048 fout <<
" const bool fIsNormalised;" << std::endl;
3049 fout <<
" bool IsNormalised() const { return fIsNormalised; }" << std::endl;
3050 fout <<
" double fVmin[" <<
GetNvar() <<
"];" << std::endl;
3051 fout <<
" double fVmax[" <<
GetNvar() <<
"];" << std::endl;
3052 fout <<
" double NormVariable( double x, double xmin, double xmax ) const {" << std::endl;
3053 fout <<
" // normalise to output range: [-1, 1]" << std::endl;
3054 fout <<
" return 2*(x - xmin)/(xmax - xmin) - 1.0;" << std::endl;
3055 fout <<
" }" << std::endl;
3057 fout <<
" // type of input variable: 'F' or 'I'" << std::endl;
3058 fout <<
" char fType[" <<
GetNvar() <<
"];" << std::endl;
3060 fout <<
" // initialize internal variables" << std::endl;
3061 fout <<
" void Initialize();" << std::endl;
3062 fout <<
" double GetMvaValue__( const std::vector<double>& inputValues ) const;" << std::endl;
3063 fout <<
"" << std::endl;
3064 fout <<
" // private members (method specific)" << std::endl;
3069 fout <<
" inline double " << className <<
"::GetMvaValue( const std::vector<double>& inputValues ) const" << std::endl;
3070 fout <<
" {" << std::endl;
3071 fout <<
" // classifier response value" << std::endl;
3072 fout <<
" double retval = 0;" << std::endl;
3074 fout <<
" // classifier response, sanity check first" << std::endl;
3075 fout <<
" if (!IsStatusClean()) {" << std::endl;
3076 fout <<
" std::cout << \"Problem in class \\\"\" << fClassName << \"\\\": cannot return classifier response\"" << std::endl;
3077 fout <<
" << \" because status is dirty\" << std::endl;" << std::endl;
3078 fout <<
" retval = 0;" << std::endl;
3079 fout <<
" }" << std::endl;
3080 fout <<
" else {" << std::endl;
3081 fout <<
" if (IsNormalised()) {" << std::endl;
3082 fout <<
" // normalise variables" << std::endl;
3083 fout <<
" std::vector<double> iV;" << std::endl;
3084 fout <<
" iV.reserve(inputValues.size());" << std::endl;
3085 fout <<
" int ivar = 0;" << std::endl;
3086 fout <<
" for (std::vector<double>::const_iterator varIt = inputValues.begin();" << std::endl;
3087 fout <<
" varIt != inputValues.end(); varIt++, ivar++) {" << std::endl;
3088 fout <<
" iV.push_back(NormVariable( *varIt, fVmin[ivar], fVmax[ivar] ));" << std::endl;
3089 fout <<
" }" << std::endl;
3093 fout <<
" Transform( iV, -1 );" << std::endl;
3095 fout <<
" retval = GetMvaValue__( iV );" << std::endl;
3096 fout <<
" }" << std::endl;
3097 fout <<
" else {" << std::endl;
3101 fout <<
" std::vector<double> iV;" << std::endl;
3102 fout <<
" int ivar = 0;" << std::endl;
3103 fout <<
" for (std::vector<double>::const_iterator varIt = inputValues.begin();" << std::endl;
3104 fout <<
" varIt != inputValues.end(); varIt++, ivar++) {" << std::endl;
3105 fout <<
" iV.push_back(*varIt);" << std::endl;
3106 fout <<
" }" << std::endl;
3107 fout <<
" Transform( iV, -1 );" << std::endl;
3108 fout <<
" retval = GetMvaValue__( iV );" << std::endl;
3111 fout <<
" retval = GetMvaValue__( inputValues );" << std::endl;
3113 fout <<
" }" << std::endl;
3114 fout <<
" }" << std::endl;
3116 fout <<
" return retval;" << std::endl;
3117 fout <<
" }" << std::endl;
3133 std::streambuf* cout_sbuf = std::cout.rdbuf();
3134 std::ofstream* o = 0;
3135 if (
gConfig().WriteOptionsReference()) {
3141 std::cout.rdbuf( o->rdbuf() );
3148 <<
"================================================================" 3152 <<
"H e l p f o r M V A m e t h o d [ " <<
GetName() <<
" ] :" 3157 Log() <<
"Help for MVA method [ " <<
GetName() <<
" ] :" <<
Endl;
3165 Log() <<
"<Suppress this message by specifying \"!H\" in the booking option>" <<
Endl;
3167 <<
"================================================================" 3174 Log() <<
"# End of Message___" <<
Endl;
3177 std::cout.rdbuf( cout_sbuf );
3262 if (mvaRes !=
NULL) {
3265 TH1D *mva_s_tr =
dynamic_cast<TH1D*
> (mvaRes->
GetHist(
"MVA_TRAIN_S"));
3266 TH1D *mva_b_tr =
dynamic_cast<TH1D*
> (mvaRes->
GetHist(
"MVA_TRAIN_B"));
3268 if ( !mva_s || !mva_b || !mva_s_tr || !mva_b_tr)
return -1;
3270 if (SorB ==
's' || SorB ==
'S')
virtual void DeclareOptions()=0
Types::EAnalysisType fAnalysisType
virtual Int_t Write(const char *name=0, Int_t option=0, Int_t bufsize=0)
Write this object to the current directory.
virtual void AddClassifierOutputProb(Types::ETreeType type)
prepare tree branch with the method's discriminating variable
virtual Int_t FindBin(Double_t x, Double_t y=0, Double_t z=0)
Return Global bin number corresponding to x,y,z.
virtual void Scale(Double_t c1=1, Option_t *option="")
Multiply this histogram by a constant c1.
void AddOptionsXMLTo(void *parent) const
write options to XML file
virtual Int_t Fill(Double_t x)
Increment bin with abscissa X by 1.
virtual void SetTuneParameters(std::map< TString, Double_t > tuneParameters)
set the tuning parameters accoding to the argument This is just a dummy .
const TString & GetWeightFileDir() const
virtual const std::vector< Float_t > & GetMulticlassValues()
void AddInfoItem(void *gi, const TString &name, const TString &value) const
xml writing
#define TMVA_VERSION_CODE
MsgLogger & Endl(MsgLogger &ml)
TH1 * GetHist(const TString &alias) const
Bool_t GetLine(std::istream &fin, char *buf)
reads one line from the input stream checks for certain keywords and interprets the line if keywords ...
void ReadOptionsFromXML(void *node)
void ReadXML(void *pdfnode)
XML file reading.
virtual Double_t GetBinContent(Int_t bin) const
Return content of bin number bin.
TString GetTrainingROOTVersionString() const
calculates the ROOT version string from the training version code on the fly
void AddOutput(Types::ETreeType type, Types::EAnalysisType analysisType)
VariableInfo & AddTarget(const TString &expression, const TString &title, const TString &unit, Double_t min, Double_t max, Bool_t normalized=kTRUE, void *external=0)
add a variable (can be a complex expression) to the set of variables used in the MV analysis ...
void AddPoint(Double_t x, Double_t y1, Double_t y2)
This function is used only in 2 TGraph case, and it will add new data points to graphs.
Bool_t fIgnoreNegWeightsInTraining
virtual const char * WorkingDirectory()
Return working directory.
void ReadStateFromXML(void *parent)
std::vector< VariableInfo > & GetSpectatorInfos()
virtual Double_t GetMvaValue(Double_t *errLower=0, Double_t *errUpper=0)=0
Collectable string class.
virtual Int_t GetMaximumBin() const
Return location of bin with maximum value in the range.
virtual Double_t GetValueForRoot(Double_t)
returns efficiency as function of cut
std::vector< TGraph * > fGraphs
const TString & GetExpression() const
void ReadOptionsFromStream(std::istream &istr)
read option back from the weight file
TString & ReplaceAll(const TString &s1, const TString &s2)
const char * GetName() const
void CheckForUnusedOptions() const
checks for unused options in option string
void BuildPDF(const TH1 *theHist)
virtual void WriteEvaluationHistosToFile(Types::ETreeType treetype)
writes all MVA evaluation histograms to file
TString GetTrainingTMVAVersionString() const
calculates the TMVA version string from the training version code on the fly
UInt_t GetNClasses() const
virtual std::map< TString, Double_t > OptimizeTuningParameters(TString fomType="ROCIntegral", TString fitType="FitGA")
call the Optimzier with the set of paremeters and ranges that are meant to be tuned.
TString fVariableTransformTypeString
XMLDocPointer_t NewDoc(const char *version="1.0")
creates new xml document with provided version
OptionBase * DeclareOptionRef(T &ref, const TString &name, const TString &desc="")
TransformationHandler * fTransformationPointer
Types::ESBType fVariableTransformType
A ROOT file is a suite of consecutive data records (TKey instances) with a well defined format...
A TMultiGraph is a collection of TGraph (or derived) objects.
UInt_t TreeIndex(Types::ETreeType type) const
virtual Int_t GetQuantiles(Int_t nprobSum, Double_t *q, const Double_t *probSum=0)
Compute Quantiles for this histogram Quantile x_q of a probability distribution Function F is defined...
virtual int MakeDirectory(const char *name)
Make a directory.
const TString & GetOriginalVarName(Int_t ivar) const
virtual TObject * Get(const char *namecycle)
Return pointer to object identified by namecycle.
void DrawProgressBar(Int_t, const TString &comment="")
draws progress bar in color or B&W caution:
virtual Double_t GetMaximumSignificance(Double_t SignalEvents, Double_t BackgroundEvents, Double_t &optimal_significance_value) const
plot significance, S/Sqrt(S^2 + B^2), curve for given number of signal and background events; returns...
virtual const std::vector< Float_t > & GetRegressionValues()
Bool_t IsNormalised() const
static Bool_t AddDirectoryStatus()
Static function: cannot be inlined on Windows/NT.
tomato 1-D histogram with a float per channel (see TH1 documentation)}
void SetTrainTime(Double_t trainTime)
TMultiGraph * fMultiGraph
TransformationHandler & GetTransformationHandler(Bool_t takeReroutedIfAvailable=true)
Short_t Min(Short_t a, Short_t b)
void ToLower()
Change string to lower-case.
virtual Double_t GetKSTrainingVsTest(Char_t SorB, TString opt="X")
virtual void SetYTitle(const char *title)
virtual TDirectory * mkdir(const char *name, const char *title="")
Create a sub-directory and return a pointer to the created directory.
virtual void TestMulticlass()
test multiclass classification
ECutOrientation GetCutOrientation() const
virtual Int_t GetNbinsX() const
std::vector< TString > * fInputVars
#define ROOT_VERSION_CODE
void ReadTargetsFromXML(void *tarnode)
read target info from XML
Double_t GetWeight() const
return the event weight - depending on whether the flag IgnoreNegWeightsInTraining is or not...
TString GetElapsedTime(Bool_t Scientific=kTRUE)
UInt_t GetNVariables() const
Bool_t BeginsWith(const char *s, ECaseCompare cmp=kExact) const
Double_t GetTrainingSumSignalWeights()
void WriteOptionsToStream(std::ostream &o, const TString &prefix) const
write options to output stream (e.g. in writing the MVA weight files
void FreeDoc(XMLDocPointer_t xmldoc)
frees allocated document data and deletes document itself
virtual TObject * At(Int_t idx) const
Returns the object at position idx. Returns 0 if idx is out of range.
const TString & GetReferenceFile() const
virtual Bool_t IsSignalLike()
uses a pre-set cut on the MVA output (SetSignalReferenceCut and SetSignalReferenceCutOrientation) for...
const TString & GetMethodName() const
void CreateMVAPdfs()
Create PDFs of the MVA output variables.
static void AddDirectory(Bool_t add=kTRUE)
Sets the flag controlling the automatic add of histograms in memory.
TString GetWeightFileName() const
retrieve weight file name
Float_t GetValue(UInt_t ivar) const
return value of i'th variable
void ReadVariablesFromXML(void *varnode)
read variable info from XML
static TFile * Open(const char *name, Option_t *option="", const char *ftitle="", Int_t compress=1, Int_t netopt=0)
Create / open a file.
virtual const char * GetName() const
Returns name of object.
const char * Data() const
Types::EAnalysisType GetAnalysisType() const
static void SetIsTraining(Bool_t)
when this static function is called, it sets the flag whether events with negative event weight shoul...
DataSetInfo & fDataSetInfo
static TString Format(const char *fmt,...)
Static method which formats a string using a printf style format descriptor and return a TString...
ECutOrientation fCutOrientation
void WriteStateToStream(std::ostream &tf) const
general method used in writing the header of the weight files where the used variables, variable transformation type etc.
virtual ~MethodBase()
destructor
Bool_t IsSignal(const Event *ev) const
void DocSetRootElement(XMLDocPointer_t xmldoc, XMLNodePointer_t xmlnode)
set main (root) node for document
virtual std::vector< Double_t > GetMvaValues(Long64_t firstEvt=0, Long64_t lastEvt=-1, Bool_t logProgress=false)
get all the MVA values for the events of the current Data type
UInt_t GetNTargets() const
void WriteVarsToStream(std::ostream &tf, const TString &prefix="") const
write the list of variables (name, min, max) for a given data transformation method to the stream ...
MethodBase(const TString &jobName, Types::EMVA methodType, const TString &methodTitle, DataSetInfo &dsi, const TString &theOption="")
standard constructur
void ClearGraphs()
This function sets the point number to 0 for all graphs.
void ReadStateFromFile()
Function to write options and weights to file.
virtual void MakeClass(const TString &classFileName=TString("")) const
create reader class for method (classification only at present)
std::vector< Float_t > * GetValueVector()
~IPythonInteractive()
standard destructor
virtual void AddClassifierOutput(Types::ETreeType type)
prepare tree branch with the method's discriminating variable
Bool_t DoMulticlass() const
void ReadClassesFromXML(void *clsnode)
read number of classes from XML
virtual void ParseOptions()
options parser
void SetupMethod()
setup of methods
TH1 * GetSmoothedHist() const
UInt_t GetNEvents() const
temporary event when testing on a different DataSet than the own one
void Init(std::vector< TString > &graphTitles)
This function gets some title and it creates a TGraph for every title.
void SetOptions(const TString &s)
virtual UserGroup_t * GetUserInfo(Int_t uid)
Returns all user info in the UserGroup_t structure.
XMLDocPointer_t ParseString(const char *xmlstring)
parses content of string and tries to produce xml structures
Int_t Atoi() const
Return integer value of string.
void SetMinType(EMsgType minType)
Types::EMVA GetMethodType() const
void SetCurrentEvent(Long64_t ievt) const
virtual void AddWeightsXMLTo(void *parent) const =0
virtual void ProcessOptions()=0
virtual Double_t GetProba(const Event *ev)
virtual void AddBinContent(Int_t bin)
Increment bin content by 1.
void ValidatePDF(TH1 *original=0) const
comparison of original histogram with reference PDF
std::vector< VariableInfo > & GetTargetInfos()
virtual Double_t GetEfficiency(const TString &, Types::ETreeType, Double_t &err)
fill background efficiency (resp.
void CreateVariableTransforms(const TString &trafoDefinition, TMVA::DataSetInfo &dataInfo, TMVA::TransformationHandler &transformationHandler, TMVA::MsgLogger &log)
Bool_t HasMVAPdfs() const
virtual std::vector< Float_t > GetMulticlassEfficiency(std::vector< std::vector< Float_t > > &purity)
Double_t Root(Double_t refValue)
Root finding using Brents algorithm; taken from CERNLIB function RZERO.
UInt_t fTMVATrainingVersion
TransformationHandler fTransformation
void ReadStateFromXMLString(const char *xmlstr)
for reading from memory
UInt_t GetNVariables() const
void AddClassesXMLTo(void *parent) const
write class info to XML
virtual void ReadWeightsFromXML(void *wghtnode)=0
Int_t GetHistNBins(Int_t evtNum=0)
void SaveDoc(XMLDocPointer_t xmldoc, const char *filename, Int_t layout=1)
store document content to file if layout<=0, no any spaces or newlines will be placed between xmlnode...
TString fWeightFileExtension
virtual Double_t GetBinCenter(Int_t bin) const
Return bin center for 1D histogram.
virtual Double_t GetMean(Int_t axis=1) const
For axis = 1,2 or 3 returns the mean value of the histogram along X,Y or Z axis.
Results * GetResults(const TString &, Types::ETreeType type, Types::EAnalysisType analysistype)
TString info(resultsName+"/"); switch(type) { case Types::kTraining: info += "kTraining/"; break; cas...
Bool_t EndsWith(const char *pat, ECaseCompare cmp=kExact) const
Return true if string ends with the specified string.
Class to manage histogram axis.
R__EXTERN TSystem * gSystem
virtual const char * GetBuildNode() const
Return the build node name.
TDirectory * fMethodBaseDir
UInt_t fROOTTrainingVersion
Bool_t DoesExist(const TString &alias) const
void ReadVarsFromStream(std::istream &istr)
Read the variables (name, min, max) for a given data transformation method from the stream...
TH1 * GetOriginalHist() const
virtual void WriteMonitoringHistosToFile() const
write special monitoring histograms to file dummy implementation here --------------— ...
const Int_t NBIN_HIST_HIGH
tomato 2-D histogram with a float per channel (see TH1 documentation)}
class TMVA::Config::VariablePlotting fVariablePlotting
ClassInfo * GetClassInfo(Int_t clNum) const
void Statistics(Types::ETreeType treeType, const TString &theVarName, Double_t &, Double_t &, Double_t &, Double_t &, Double_t &, Double_t &)
calculates rms,mean, xmin, xmax of the event variable this can be either done for the variables as th...
virtual void SetBinContent(Int_t bin, Double_t content)
Set bin content see convention for numbering bins in TH1::GetBin In case the bin number is greater th...
Float_t GetAchievablePur(UInt_t cls)
void SetReadingVersion(UInt_t rv)
void SetValue(Float_t value, Int_t ievt)
set MVA response
virtual Double_t KolmogorovTest(const TH1 *h2, Option_t *option="") const
Statistical test of compatibility in shape between this histogram and h2, using Kolmogorov test...
const Event * GetEvent() const
char * Form(const char *fmt,...)
void ReadFromXML(void *varnode)
read VariableInfo from stream
virtual const char * GetName() const
Returns name of object.
TSubString Strip(EStripType s=kTrailing, char c= ' ') const
Return a substring of self stripped at beginning and/or end.
virtual Double_t GetSignificance() const
compute significance of mean difference significance = |<S> - |/Sqrt(RMS_S2 + RMS_B2) ...
TSpline * fSplTrainEffBvsS
Types::ETreeType GetCurrentType() const
void DeclareBaseOptions()
define the options (their key words) that can be set in the option string here the options valid for ...
UInt_t GetTrainingROOTVersionCode() const
tomato 1-D histogram with a double per channel (see TH1 documentation)}
Double_t GetTrainTime() const
void ProcessBaseOptions()
the option string is decoded, for availabel options see "DeclareOptions"
Double_t ElapsedSeconds(void)
computes elapsed tim in seconds
std::vector< const std::vector< TMVA::Event * > * > fEventCollections
TString fVerbosityLevelString
void WriteStateToFile() const
write options and weights to file note that each one text file for the main configuration information...
const Event * GetEvent() const
void AddTargetsXMLTo(void *parent) const
write target info to XML
void DeclareOptions()
define the options (their key words) that can be set in the option string know options: PDFInterpol[i...
void AddVarsXMLTo(void *parent) const
write variable info to XML
void SetCurrentType(Types::ETreeType type) const
TList * GetStorage() const
static void SetIgnoreNegWeightsInTraining(Bool_t)
when this static function is called, it sets the flag whether events with negative event weight shoul...
XMLDocPointer_t ParseFile(const char *filename, Int_t maxbuf=100000)
Parses content of file and tries to produce xml structures.
virtual void MakeClassSpecific(std::ostream &, const TString &="") const
const std::vector< TMVA::Event * > & GetEventCollection(Types::ETreeType type)
returns the event collection (i.e.
virtual Int_t GetSize() const
virtual void CheckSetup()
check may be overridden by derived class (sometimes, eg, fitters are used which can only be implement...
void SetTestTime(Double_t testTime)
virtual void AddRegressionOutput(Types::ETreeType type)
prepare tree branch with the method's discriminating variable
virtual void GetHelpMessage() const =0
Double_t GetVal(Double_t x) const
returns value PDF(x)
std::vector< Double_t > GetBestMultiClassCuts(UInt_t targetClass)
void SetWeightFileName(TString)
set the weight file name (depreciated)
Describe directory structure in memory.
virtual Double_t GetROCIntegral(TH1D *histS, TH1D *histB) const
calculate the area (integral) under the ROC curve as a overall quality measure of the classification ...
std::vector< Float_t > * fMulticlassReturnVal
Double_t GetTrainingSumBackgrWeights()
TDirectory * BaseDir() const
returns the ROOT directory where info/histograms etc of the corresponding MVA method instance are sto...
static RooMathCoreReg dummy
Long64_t GetNEvents(Types::ETreeType type=Types::kMaxTreeType) const
const Bool_t Use_Splines_for_Eff_
DataSetInfo & DataInfo() const
VariableInfo & GetVariableInfo(Int_t i)
void AddPreDefVal(const T &)
IPythonInteractive()
standard constructur
virtual void GetRegressionDeviation(UInt_t tgtNum, Types::ETreeType type, Double_t &stddev, Double_t &stddev90Percent) const
ClassInfo * AddClass(const TString &className)
void AddXMLTo(void *parent)
XML file writing.
Double_t GetSignalReferenceCutOrientation() const
virtual Double_t Eval(Double_t x) const
returns linearly interpolated TGraph entry around x
Bool_t fConstructedFromWeightFile
void ProcessSetup()
process all options the "CheckForUnusedOptions" is done in an independent call, since it may be overr...
TString fVarTransformString
virtual void AddMulticlassOutput(Types::ETreeType type)
prepare tree branch with the method's discriminating variable
void SetConfigName(const char *n)
Float_t GetTarget(UInt_t itgt) const
void * GetExternalLink() const
Float_t GetAchievableEff(UInt_t cls)
void SetSource(const std::string &source)
virtual Double_t Eval(Double_t x) const =0
Bool_t DoRegression() const
virtual Double_t GetSeparation(TH1 *, TH1 *) const
compute "separation" defined as <s2> = (1/2) Int_-oo..+oo { (S(x) - B(x))^2/(S(x) + B(x)) dx } ...
virtual void MakeClassSpecificHeader(std::ostream &, const TString &="") const
virtual std::vector< Float_t > GetMulticlassTrainingEfficiency(std::vector< std::vector< Float_t > > &purity)
virtual void SetXTitle(const char *title)
virtual void TestRegression(Double_t &bias, Double_t &biasT, Double_t &dev, Double_t &devT, Double_t &rms, Double_t &rmsT, Double_t &mInf, Double_t &mInfT, Double_t &corr, Types::ETreeType type)
calculate <sum-of-deviation-squared> of regression output versus "true" value from test sample ...
virtual Bool_t cd(const char *path=0)
Change current directory to "this" directory.
const TString & GetJobName() const
void ReadFromStream(std::istream &istr)
Double_t GetXmax(Int_t ivar) const
void PrintHelpMessage() const
prints out method-specific help method
UInt_t GetTrainingTMVAVersionCode() const
virtual void DeclareCompatibilityOptions()
options that are used ONLY for the READER to ensure backward compatibility they are hence without any...
Short_t Max(Short_t a, Short_t b)
const TString & GetOptions() const
void AddToXML(void *varnode)
write class to XML
Bool_t Contains(const char *pat, ECaseCompare cmp=kExact) const
Double_t fSignalReferenceCut
the data set information (sometimes needed)
TDirectory * MethodBaseDir() const
returns the ROOT directory where all instances of the corresponding MVA method are stored ...
void SetWeightFileDir(TString fileDir)
set directory of weight file
XMLNodePointer_t DocGetRootElement(XMLDocPointer_t xmldoc)
returns root node of document
void AddSpectatorsXMLTo(void *parent) const
write spectator info to XML
UInt_t GetNSpectators(bool all=kTRUE) const
virtual void Sumw2(Bool_t flag=kTRUE)
Create structure to store sum of squares of weights.
A Graph is a graphics object made of two arrays X and Y with npoints each.
virtual TDirectory * GetDirectory(const char *namecycle, Bool_t printError=false, const char *funcname="GetDirectory")
Find a directory using apath.
Int_t FindVarIndex(const TString &) const
find variable by name
you should not use this method at all Int_t Int_t Double_t Double_t Double_t Int_t Double_t Double_t Double_t Double_t b
THist< 1, double, THistStatContent, THistStatUncertainty > TH1D
virtual Double_t GetTrainingEfficiency(const TString &)
Double_t GetIntegral(Double_t xmin, Double_t xmax)
computes PDF integral within given ranges
const std::vector< Event * > & GetEventCollection(Types::ETreeType type=Types::kMaxTreeType) const
void ReadSpectatorsFromXML(void *specnode)
read spectator info from XML
std::vector< Bool_t > * GetValueVectorTypes()
void InitBase()
default initialization called by all constructors
std::vector< Float_t > * fRegressionReturnVal
Long64_t GetNTrainingEvents() const
Double_t GetSignalReferenceCut() const
void Store(TObject *obj, const char *alias=0)
virtual Double_t GetRarity(Double_t mvaVal, Types::ESBType reftype=Types::kBackground) const
compute rarity: R(x) = Integrate_[-oo..x] { PDF(x') dx' } where PDF(x) is the PDF of the classifier's...
virtual void Add(TGraph *graph, Option_t *chopt="")
Add a new graph to the list of graphs.
Double_t Sqrt(Double_t x)
TString GetMethodTypeName() const
Ssiz_t Index(const char *pat, Ssiz_t i=0, ECaseCompare cmp=kExact) const
virtual void ReadWeightsFromStream(std::istream &)=0
const char * AsString() const
Return the date & time as a string (ctime() format).
virtual Double_t GetMaximum(Double_t maxval=FLT_MAX) const
Return maximum value smaller than maxval of bins in the range, unless the value has been overridden b...
Int_t Fill(Double_t)
Invalid Fill method.
THist< 2, float, THistStatContent, THistStatUncertainty > TH2F
void SetTestvarName(const TString &v="")
double norm(double *x, double *p)
void WriteStateToXML(void *parent) const
general method used in writing the header of the weight files where the used variables, variable transformation type etc.
virtual void TestClassification()
initialization
void ReadStateFromStream(std::istream &tf)
read the header from the weight files of the different MVA methods
virtual Int_t Write(const char *name=0, Int_t option=0, Int_t bufsize=0)
Write all objects in this collection.
std::vector< VariableInfo > & GetVariableInfos()
void SetExternalLink(void *p)
virtual void SetAnalysisType(Types::EAnalysisType type)
void Resize(Int_t entries)
Ssiz_t First(char c) const
Find first occurrence of a character c.
void NoErrorCalc(Double_t *const err, Double_t *const errUpper)
void SetSignalReferenceCut(Double_t cut)
void Resize(Ssiz_t n)
Resize the string. Truncate or add blanks as necessary.
This class stores the date and time with a precision of one second in an unsigned 32 bit word (950130...
void SetConfigDescription(const char *d)
Double_t GetXmin(Int_t ivar) const
virtual void Close(Option_t *option="")
Close a file.
const TString & GetTestvarName() const