98 fScaleWithPreselEff(0),
102 fLogger( new
MsgLogger(
"DataSetFactory", kINFO) )
111 std::vector<TTreeFormula*>::const_iterator formIt;
113 for (formIt = fInputFormulas.begin() ; formIt!=fInputFormulas.end() ; ++formIt)
if (*formIt)
delete *formIt;
114 for (formIt = fTargetFormulas.begin() ; formIt!=fTargetFormulas.end() ; ++formIt)
if (*formIt)
delete *formIt;
115 for (formIt = fCutFormulas.begin() ; formIt!=fCutFormulas.end() ; ++formIt)
if (*formIt)
delete *formIt;
116 for (formIt = fWeightFormula.begin() ; formIt!=fWeightFormula.end() ; ++formIt)
if (*formIt)
delete *formIt;
117 for (formIt = fSpectatorFormulas.begin(); formIt!=fSpectatorFormulas.end(); ++formIt)
if (*formIt)
delete *formIt;
129 DataSet * ds = BuildInitialDataSet( dsi, dataInput );
131 if (ds->
GetNEvents() > 1 && fComputeCorrelations ) {
153 Log() << kDEBUG <<
Form(
"Dataset[%s] : ",dsi.
GetName()) <<
"Build DataSet consisting of one Event with dynamically changing variables" <<
Endl;
163 std::vector<Float_t*>* evdyn =
new std::vector<Float_t*>(0);
167 if (varinfos.empty())
168 Log() << kFATAL <<
Form(
"Dataset[%s] : ",dsi.
GetName()) <<
"Dynamic data set cannot be built, since no variable informations are present. Apparently no variables have been set. This should not happen, please contact the TMVA authors." <<
Endl;
170 std::vector<VariableInfo>::iterator it = varinfos.begin(), itEnd=varinfos.end();
171 for (;it!=itEnd;++it) {
174 Log() << kDEBUG <<
Form(
"Dataset[%s] : ",dsi.
GetName()) <<
"The link to the external variable is NULL while I am trying to build a dynamic data set. In this case fTmpEvent from MethodBase HAS TO BE USED in the method to get useful values in variables." <<
Endl;
175 else evdyn->push_back (external);
179 it = spectatorinfos.begin();
180 for (;it!=spectatorinfos.end();++it) evdyn->push_back( (
Float_t*)(*it).GetExternalLink() );
182 TMVA::Event * ev =
new Event((
const std::vector<Float_t*>*&)evdyn, varinfos.size());
183 std::vector<Event*>* newEventVector =
new std::vector<Event*>;
184 newEventVector->push_back(ev);
190 delete newEventVector;
202 if (dataInput.
GetEntries()==0)
return BuildDynamicDataSet( dsi );
207 std::vector< TString >* classList = dataInput.
GetClassList();
208 for (std::vector<TString>::iterator it = classList->begin(); it< classList->end(); ++it) {
219 InitOptions( dsi, eventCounts, normMode, splitSeed, splitMode , mixMode );
222 BuildEventVector( dsi, dataInput, tmpEventVector, eventCounts );
224 DataSet* ds = MixEvents( dsi, tmpEventVector, eventCounts,
225 splitMode, mixMode, normMode, splitSeed );
228 if (showCollectedOutput) {
255 Log() << kFATAL <<
"Expression " << expression.
Data()
256 <<
" could not be resolved to a valid formula. " <<
Endl;
258 Log() << kWARNING <<
"Expression: " << expression.
Data()
259 <<
" does not provide data for this event. "
260 <<
"This event is not taken into account. --> please check if you use as a variable "
261 <<
"an entry of an array which is not filled for some events "
262 <<
"(e.g. arr[4] when arr has only 3 elements)." <<
Endl;
263 Log() << kWARNING <<
"If you want to take the event into account you can do something like: "
264 <<
"\"Alt$(arr[4],0)\" where in cases where arr doesn't have a 4th element, "
265 <<
" 0 is taken as an alternative." <<
Endl;
272 for (
int i = 0, iEnd = ttf->
GetNcodes (); i < iEnd; ++i)
300 Log() << kDEBUG <<
Form(
"Dataset[%s] : ",dsi.
GetName()) <<
"transform input variables" <<
Endl;
301 std::vector<TTreeFormula*>::const_iterator formIt, formItEnd;
302 for (formIt = fInputFormulas.begin(), formItEnd=fInputFormulas.end(); formIt!=formItEnd; ++formIt)
if (*formIt)
delete *formIt;
303 fInputFormulas.clear();
310 fInputFormulas.push_back( ttf );
316 Log() << kDEBUG <<
Form(
"Dataset[%s] : ",dsi.
GetName()) <<
"transform regression targets" <<
Endl;
317 for (formIt = fTargetFormulas.begin(), formItEnd = fTargetFormulas.end(); formIt!=formItEnd; ++formIt)
if (*formIt)
delete *formIt;
318 fTargetFormulas.clear();
323 fTargetFormulas.push_back( ttf );
329 Log() << kDEBUG <<
Form(
"Dataset[%s] : ",dsi.
GetName()) <<
"transform spectator variables" <<
Endl;
330 for (formIt = fSpectatorFormulas.begin(), formItEnd = fSpectatorFormulas.end(); formIt!=formItEnd; ++formIt)
if (*formIt)
delete *formIt;
331 fSpectatorFormulas.clear();
336 fSpectatorFormulas.push_back( ttf );
343 for (formIt = fCutFormulas.begin(), formItEnd = fCutFormulas.end(); formIt!=formItEnd; ++formIt)
if (*formIt)
delete *formIt;
344 fCutFormulas.clear();
351 Bool_t worked = CheckTTreeFormula( ttf, tmpCutExp, hasDollar );
357 fCutFormulas.push_back( ttf );
363 Log() << kDEBUG <<
Form(
"Dataset[%s] : ",dsi.
GetName()) <<
"transform weights" <<
Endl;
364 for (formIt = fWeightFormula.begin(), formItEnd = fWeightFormula.end(); formIt!=formItEnd; ++formIt)
if (*formIt)
delete *formIt;
365 fWeightFormula.clear();
370 fWeightFormula.push_back( 0 );
376 ttf =
new TTreeFormula(
"FormulaWeight", tmpWeight, tr );
377 Bool_t worked = CheckTTreeFormula( ttf, tmpWeight, hasDollar );
386 fWeightFormula.push_back( ttf );
393 Log() << kDEBUG <<
Form(
"Dataset[%s] : ",dsi.
GetName()) <<
"enable branches: input variables" <<
Endl;
395 for (formIt = fInputFormulas.begin(); formIt!=fInputFormulas.end(); ++formIt) {
402 Log() << kDEBUG <<
Form(
"Dataset[%s] : ",dsi.
GetName()) <<
"enable branches: targets" <<
Endl;
403 for (formIt = fTargetFormulas.begin(); formIt!=fTargetFormulas.end(); ++formIt) {
409 Log() << kDEBUG <<
Form(
"Dataset[%s] : ",dsi.
GetName()) <<
"enable branches: spectators" <<
Endl;
410 for (formIt = fSpectatorFormulas.begin(); formIt!=fSpectatorFormulas.end(); ++formIt) {
416 Log() << kDEBUG <<
Form(
"Dataset[%s] : ",dsi.
GetName()) <<
"enable branches: cuts" <<
Endl;
417 for (formIt = fCutFormulas.begin(); formIt!=fCutFormulas.end(); ++formIt) {
424 Log() << kDEBUG <<
Form(
"Dataset[%s] : ",dsi.
GetName()) <<
"enable branches: weights" <<
Endl;
425 for (formIt = fWeightFormula.begin(); formIt!=fWeightFormula.end(); ++formIt) {
432 Log() << kDEBUG <<
Form(
"Dataset[%s] : ",dsi.
GetName()) <<
"tree initialized" <<
Endl;
452 for (
UInt_t ivar=0; ivar<nvar ; ivar++) { min[ivar] = FLT_MAX; max[ivar] = -FLT_MAX; }
453 for (
UInt_t ivar=0; ivar<ntgts; ivar++) { tgmin[ivar] = FLT_MAX; tgmax[ivar] = -FLT_MAX; }
454 for (
UInt_t ivar=0; ivar<nvis; ivar++) { vmin[ivar] = FLT_MAX; vmax[ivar] = -FLT_MAX; }
460 for (
UInt_t ivar=0; ivar<nvar; ivar++) {
462 if (
v<min[ivar]) min[ivar] =
v;
463 if (
v>max[ivar]) max[ivar] =
v;
465 for (
UInt_t itgt=0; itgt<ntgts; itgt++) {
467 if (
v<tgmin[itgt]) tgmin[itgt] =
v;
468 if (
v>tgmax[itgt]) tgmax[itgt] =
v;
470 for (
UInt_t ivis=0; ivis<nvis; ivis++) {
472 if (
v<vmin[ivis]) vmin[ivis] =
v;
473 if (
v>vmax[ivis]) vmax[ivis] =
v;
477 for (
UInt_t ivar=0; ivar<nvar; ivar++) {
480 if(
TMath::Abs(max[ivar]-min[ivar]) <= FLT_MIN )
483 for (
UInt_t ivar=0; ivar<ntgts; ivar++) {
486 if(
TMath::Abs(tgmax[ivar]-tgmin[ivar]) <= FLT_MIN )
489 for (
UInt_t ivar=0; ivar<nvis; ivar++) {
511 TMatrixD* mat = CalcCovarianceMatrix( ds, classNumber );
516 for (ivar=0; ivar<nvar; ivar++) {
517 for (jvar=0; jvar<nvar; jvar++) {
519 Double_t d = (*mat)(ivar, ivar)*(*mat)(jvar, jvar);
520 if (
d > 0) (*mat)(ivar, jvar) /=
sqrt(
d);
522 Log() << kWARNING <<
Form(
"Dataset[%s] : ",
DataSetInfo().
GetName())<<
"<GetCorrelationMatrix> Zero variances for variables "
523 <<
"(" << ivar <<
", " << jvar <<
") = " <<
d
525 (*mat)(ivar, jvar) = 0;
531 for (ivar=0; ivar<nvar; ivar++) (*mat)(ivar, ivar) = 1.0;
542 UInt_t ivar = 0, jvar = 0;
549 for (ivar=0; ivar<nvar; ivar++) {
551 for (jvar=0; jvar<nvar; jvar++) mat2(ivar, jvar) = 0;
559 if (ev->
GetClass() != classNumber )
continue;
564 for (ivar=0; ivar<nvar; ivar++) {
567 vec(ivar) += xi*weight;
568 mat2(ivar, ivar) += (xi*xi*weight);
570 for (jvar=ivar+1; jvar<nvar; jvar++) {
572 mat2(ivar, jvar) += (xi*xj*weight);
577 for (ivar=0; ivar<nvar; ivar++)
578 for (jvar=ivar+1; jvar<nvar; jvar++)
579 mat2(jvar, ivar) = mat2(ivar, jvar);
583 for (ivar=0; ivar<nvar; ivar++) {
584 for (jvar=0; jvar<nvar; jvar++) {
585 (*mat)(ivar, jvar) = mat2(ivar, jvar)/ic - vec(ivar)*vec(jvar)/(ic*ic);
607 splitSpecs.
SetConfigDescription(
"Configuration options given in the \"PrepareForTrainingAndTesting\" call; these options define the creation of the data sets used for training and expert validation by TMVA" );
609 splitMode =
"Random";
611 "Method of picking training and testing events (default: random)" );
616 mixMode =
"SameAsSplitMode";
618 "Method of mixing events of different classes into one dataset (default: SameAsSplitMode)" );
626 "Seed for random event shuffling" );
628 normMode =
"EqualNumEvents";
630 "Overall renormalisation of event-by-event weights used in the training (NumEvents: average weight of 1 per event, independently for signal and background; EqualNumEvents: average weight of 1 per event for signal, and sum of weights for background equal to sum of weights for signal)" );
635 splitSpecs.
DeclareOptionRef(fScaleWithPreselEff=
kFALSE,
"ScaleWithPreselEff",
"Scale the number of requested events by the eff. of the preselection cuts (or not)" );
646 splitSpecs.
DeclareOptionRef( nEventRequests.at(cl).nTrainingEventsRequested,
TString(
"nTrain_")+clName, titleTrain );
647 splitSpecs.
DeclareOptionRef( nEventRequests.at(cl).nTestingEventsRequested ,
TString(
"nTest_")+clName , titleTest );
648 splitSpecs.
DeclareOptionRef( nEventRequests.at(cl).TrainTestSplitRequested ,
TString(
"TrainTestSplit_")+clName , titleTest );
653 splitSpecs.
DeclareOptionRef( fVerboseLevel=
TString(
"Info"),
"VerboseLevel",
"VerboseLevel (Debug/Verbose/Info)" );
658 fCorrelations =
kTRUE;
659 splitSpecs.
DeclareOptionRef(fCorrelations,
"Correlations",
"Boolean to show correlation output (Default: true)");
660 fComputeCorrelations =
kTRUE;
661 splitSpecs.
DeclareOptionRef(fComputeCorrelations,
"CalcCorrelations",
"Compute correlations and also some variable statistics, e.g. min/max (Default: true )");
667 if (
Verbose()) fLogger->SetMinType( kVERBOSE );
668 if (fVerboseLevel.CompareTo(
"Debug") ==0) fLogger->SetMinType( kDEBUG );
669 if (fVerboseLevel.CompareTo(
"Verbose") ==0) fLogger->SetMinType( kVERBOSE );
670 if (fVerboseLevel.CompareTo(
"Info") ==0) fLogger->SetMinType( kINFO );
676 <<
"\tSplitmode is: \"" << splitMode <<
"\" the mixmode is: \"" << mixMode <<
"\"" <<
Endl;
677 if (mixMode==
"SAMEASSPLITMODE") mixMode = splitMode;
678 else if (mixMode!=splitMode)
679 Log() << kINFO <<
Form(
"Dataset[%s] : ",dsi.
GetName()) <<
"DataSet splitmode="<<splitMode
680 <<
" differs from mixmode="<<mixMode<<
Endl;
704 for (
size_t i=0; i<nclasses; i++) {
705 eventCounts[i].varAvLength =
new Float_t[nvars];
706 for (
UInt_t ivar=0; ivar<nvars; ivar++)
707 eventCounts[i].varAvLength[ivar] = 0;
717 std::map<TString, int> nanInfWarnings;
718 std::map<TString, int> nanInfErrors;
722 for (
UInt_t cl=0; cl<nclasses; cl++) {
726 EventStats& classEventCounts = eventCounts[cl];
740 std::vector<Float_t> vars(nvars);
741 std::vector<Float_t> tgts(ntgts);
742 std::vector<Float_t> vis(nvis);
751 ChangeToNewTree( currentInfo, dsi );
758 for (
Long64_t evtIdx = 0; evtIdx < nEvts; evtIdx++) {
765 ChangeToNewTree( currentInfo, dsi );
769 Int_t sizeOfArrays = 1;
770 Int_t prevArrExpr = 0;
775 for (
UInt_t ivar=0; ivar<nvars; ivar++) {
776 Int_t ndata = fInputFormulas[ivar]->GetNdata();
778 if (ndata == 1)
continue;
780 varIsArray[ivar] =
kTRUE;
781 if (sizeOfArrays == 1) {
782 sizeOfArrays = ndata;
785 else if (sizeOfArrays!=ndata) {
786 Log() << kERROR <<
Form(
"Dataset[%s] : ",dsi.
GetName())<<
"ERROR while preparing training and testing trees:" <<
Endl;
787 Log() <<
Form(
"Dataset[%s] : ",dsi.
GetName())<<
" multiple array-type expressions of different length were encountered" <<
Endl;
788 Log() <<
Form(
"Dataset[%s] : ",dsi.
GetName())<<
" location of error: event " << evtIdx
791 Log() <<
Form(
"Dataset[%s] : ",dsi.
GetName())<<
" expression " << fInputFormulas[ivar]->GetTitle() <<
" has "
792 <<
Form(
"Dataset[%s] : ",dsi.
GetName()) << ndata <<
" entries, while" <<
Endl;
793 Log() <<
Form(
"Dataset[%s] : ",dsi.
GetName())<<
" expression " << fInputFormulas[prevArrExpr]->GetTitle() <<
" has "
794 <<
Form(
"Dataset[%s] : ",dsi.
GetName())<< fInputFormulas[prevArrExpr]->GetNdata() <<
" entries" <<
Endl;
800 for (
Int_t idata = 0; idata<sizeOfArrays; idata++) {
803 auto checkNanInf = [&](std::map<TString, int> &msgMap,
Float_t value,
const char *what,
const char *formulaTitle) {
805 contains_NaN_or_inf =
kTRUE;
806 ++msgMap[
TString::Format(
"Dataset[%s] : %s expression resolves to indeterminate value (NaN): %s", dsi.
GetName(), what, formulaTitle)];
808 contains_NaN_or_inf =
kTRUE;
809 ++msgMap[
TString::Format(
"Dataset[%s] : %s expression resolves to infinite value (+inf or -inf): %s", dsi.
GetName(), what, formulaTitle)];
817 formula = fCutFormulas[cl];
823 checkNanInf(nanInfErrors, cutVal,
"Cut", formula->
GetTitle());
827 auto &nanMessages = cutVal < 0.5 ? nanInfWarnings : nanInfErrors;
830 for (
UInt_t ivar=0; ivar<nvars; ivar++) {
831 formula = fInputFormulas[ivar];
834 vars[ivar] = (ndata == 1 ?
837 checkNanInf(nanMessages, vars[ivar],
"Input", formula->
GetTitle());
841 for (
UInt_t itrgt=0; itrgt<ntgts; itrgt++) {
842 formula = fTargetFormulas[itrgt];
844 tgts[itrgt] = (ndata == 1 ?
847 checkNanInf(nanMessages, tgts[itrgt],
"Target", formula->
GetTitle());
851 for (
UInt_t itVis=0; itVis<nvis; itVis++) {
852 formula = fSpectatorFormulas[itVis];
854 vis[itVis] = (ndata == 1 ?
857 checkNanInf(nanMessages, vis[itVis],
"Spectator", formula->
GetTitle());
863 formula = fWeightFormula[cl];
866 weight *= (ndata == 1 ?
869 checkNanInf(nanMessages, weight,
"Weight", formula->
GetTitle());
879 if (cutVal<0.5)
continue;
888 if (contains_NaN_or_inf) {
889 Log() << kWARNING <<
Form(
"Dataset[%s] : ",dsi.
GetName())<<
"NaN or +-inf in Event " << evtIdx <<
Endl;
890 if (sizeOfArrays>1)
Log() << kWARNING <<
Form(
"Dataset[%s] : ",dsi.
GetName())<<
" rejected" <<
Endl;
900 event_v.push_back(
new Event(vars, tgts , vis, cl , weight));
907 if (!nanInfWarnings.empty()) {
908 Log() << kWARNING <<
"Found events with NaN and/or +-inf values" <<
Endl;
909 for (
const auto &warning : nanInfWarnings) {
910 auto &
log =
Log() << kWARNING << warning.first;
911 if (warning.second > 1)
log <<
" (" << warning.second <<
" times)";
914 Log() << kWARNING <<
"These NaN and/or +-infs were all removed by the specified cut, continuing." <<
Endl;
918 if (!nanInfErrors.empty()) {
919 Log() << kWARNING <<
"Found events with NaN and/or +-inf values (not removed by cut)" <<
Endl;
920 for (
const auto &error : nanInfErrors) {
921 auto &
log =
Log() << kWARNING << error.first;
922 if (error.second > 1)
log <<
" (" << error.second <<
" times)";
925 Log() << kFATAL <<
"How am I supposed to train a NaN or +-inf?!" <<
Endl;
931 Log() << kHEADER <<
Form(
"[%s] : ",dsi.
GetName()) <<
"Number of events in input trees" <<
Endl;
932 Log() << kDEBUG <<
"(after possible flattening of arrays):" <<
Endl;
939 <<
" -- number of events : "
940 << std::setw(5) << eventCounts[cl].nEvBeforeCut
941 <<
" / sum of weights: " << std::setw(5) << eventCounts[cl].nWeEvBeforeCut <<
Endl;
947 <<
" tree -- total number of entries: "
951 if (fScaleWithPreselEff)
953 <<
"\tPreselection: (will affect number of requested training and testing events)" <<
Endl;
956 <<
"\tPreselection: (will NOT affect number of requested training and testing events)" <<
Endl;
964 <<
" -- number of events passed: "
965 << std::setw(5) << eventCounts[cl].nEvAfterCut
966 <<
" / sum of weights: " << std::setw(5) << eventCounts[cl].nWeEvAfterCut <<
Endl;
969 <<
" -- efficiency : "
970 << std::setw(6) << eventCounts[cl].nWeEvAfterCut/eventCounts[cl].nWeEvBeforeCut <<
Endl;
974 <<
" No preselection cuts applied on event classes" <<
Endl;
997 if (splitMode.
Contains(
"RANDOM" ) ) {
1001 if( ! unspecifiedEvents.empty() ) {
1002 Log() << kDEBUG <<
"randomly shuffling "
1003 << unspecifiedEvents.size()
1004 <<
" events of class " << cls
1005 <<
" which are not yet associated to testing or training" <<
Endl;
1006 std::shuffle(unspecifiedEvents.begin(), unspecifiedEvents.end(), rndm);
1012 Log() << kDEBUG <<
Form(
"Dataset[%s] : ",dsi.
GetName())<<
"SPLITTING ========" <<
Endl;
1014 Log() << kDEBUG <<
Form(
"Dataset[%s] : ",dsi.
GetName())<<
"---- class " << cls <<
Endl;
1015 Log() << kDEBUG <<
Form(
"Dataset[%s] : ",dsi.
GetName())<<
"check number of training/testing events, requested and available number of events and for class " << cls <<
Endl;
1022 Int_t availableTraining = eventVectorTraining.size();
1023 Int_t availableTesting = eventVectorTesting.size();
1024 Int_t availableUndefined = eventVectorUndefined.size();
1027 if (fScaleWithPreselEff) {
1028 presel_scale = eventCounts[cls].cutScaling();
1029 if (presel_scale < 1)
1030 Log() << kINFO <<
Form(
"Dataset[%s] : ",dsi.
GetName()) <<
" you have opted for scaling the number of requested training/testing events\n to be scaled by the preselection efficiency"<<
Endl;
1033 if (eventCounts[cls].cutScaling() < 1)
1034 Log() << kINFO <<
Form(
"Dataset[%s] : ",dsi.
GetName()) <<
" you have opted for interpreting the requested number of training/testing events\n to be the number of events AFTER your preselection cuts" <<
Endl;
1041 if(eventCounts[cls].TrainTestSplitRequested < 1.0 && eventCounts[cls].TrainTestSplitRequested > 0.0){
1042 eventCounts[cls].nTrainingEventsRequested =
Int_t(eventCounts[cls].TrainTestSplitRequested*(availableTraining+availableTesting+availableUndefined));
1043 eventCounts[cls].nTestingEventsRequested =
Int_t(0);
1045 else if(eventCounts[cls].TrainTestSplitRequested != 0.0)
Log() << kFATAL <<
Form(
"The option TrainTestSplit_<class> has to be in range (0, 1] but is set to %f.",eventCounts[cls].TrainTestSplitRequested) <<
Endl;
1046 Int_t requestedTraining =
Int_t(eventCounts[cls].nTrainingEventsRequested * presel_scale);
1047 Int_t requestedTesting =
Int_t(eventCounts[cls].nTestingEventsRequested * presel_scale);
1049 Log() << kDEBUG <<
Form(
"Dataset[%s] : ",dsi.
GetName())<<
"events in training trees : " << availableTraining <<
Endl;
1050 Log() << kDEBUG <<
Form(
"Dataset[%s] : ",dsi.
GetName())<<
"events in testing trees : " << availableTesting <<
Endl;
1051 Log() << kDEBUG <<
Form(
"Dataset[%s] : ",dsi.
GetName())<<
"events in unspecified trees : " << availableUndefined <<
Endl;
1052 Log() << kDEBUG <<
Form(
"Dataset[%s] : ",dsi.
GetName())<<
"requested for training : " << requestedTraining <<
Endl;;
1055 Log() <<
" ( " << eventCounts[cls].nTrainingEventsRequested
1056 <<
" * " << presel_scale <<
" preselection efficiency)" <<
Endl;
1059 Log() << kDEBUG <<
"requested for testing : " << requestedTesting;
1061 Log() <<
" ( " << eventCounts[cls].nTestingEventsRequested
1062 <<
" * " << presel_scale <<
" preselection efficiency)" <<
Endl;
1113 Int_t useForTesting(0),useForTraining(0);
1114 Int_t allAvailable(availableUndefined + availableTraining + availableTesting);
1116 if( (requestedTraining == 0) && (requestedTesting == 0)){
1120 if ( availableUndefined >=
TMath::Abs(availableTraining - availableTesting) ) {
1122 useForTraining = useForTesting = allAvailable/2;
1125 useForTraining = availableTraining;
1126 useForTesting = availableTesting;
1127 if (availableTraining < availableTesting)
1128 useForTraining += availableUndefined;
1130 useForTesting += availableUndefined;
1132 requestedTraining = useForTraining;
1133 requestedTesting = useForTesting;
1136 else if (requestedTesting == 0){
1138 useForTraining =
TMath::Max(requestedTraining,availableTraining);
1139 if (allAvailable < useForTraining) {
1140 Log() << kFATAL <<
Form(
"Dataset[%s] : ",dsi.
GetName())<<
"More events requested for training ("
1141 << requestedTraining <<
") than available ("
1142 << allAvailable <<
")!" <<
Endl;
1144 useForTesting = allAvailable - useForTraining;
1145 requestedTesting = useForTesting;
1148 else if (requestedTraining == 0){
1149 useForTesting =
TMath::Max(requestedTesting,availableTesting);
1150 if (allAvailable < useForTesting) {
1151 Log() << kFATAL <<
Form(
"Dataset[%s] : ",dsi.
GetName())<<
"More events requested for testing ("
1152 << requestedTesting <<
") than available ("
1153 << allAvailable <<
")!" <<
Endl;
1155 useForTraining= allAvailable - useForTesting;
1156 requestedTraining = useForTraining;
1165 Int_t stillNeedForTraining =
TMath::Max(requestedTraining-availableTraining,0);
1166 Int_t stillNeedForTesting =
TMath::Max(requestedTesting-availableTesting,0);
1168 int NFree = availableUndefined - stillNeedForTraining - stillNeedForTesting;
1169 if (NFree <0) NFree = 0;
1170 useForTraining =
TMath::Max(requestedTraining,availableTraining) + NFree/2;
1171 useForTesting= allAvailable - useForTraining;
1174 Log() << kDEBUG <<
Form(
"Dataset[%s] : ",dsi.
GetName())<<
"determined event sample size to select training sample from="<<useForTraining<<
Endl;
1175 Log() << kDEBUG <<
Form(
"Dataset[%s] : ",dsi.
GetName())<<
"determined event sample size to select test sample from="<<useForTesting<<
Endl;
1180 if( splitMode ==
"ALTERNATE" ){
1181 Log() << kDEBUG <<
Form(
"Dataset[%s] : ",dsi.
GetName())<<
"split 'ALTERNATE'" <<
Endl;
1182 Int_t nTraining = availableTraining;
1183 Int_t nTesting = availableTesting;
1184 for( EventVector::iterator it = eventVectorUndefined.begin(), itEnd = eventVectorUndefined.end(); it != itEnd; ){
1186 if( nTraining <= requestedTraining ){
1187 eventVectorTraining.insert( eventVectorTraining.end(), (*it) );
1192 eventVectorTesting.insert( eventVectorTesting.end(), (*it) );
1197 Log() << kDEBUG <<
Form(
"Dataset[%s] : ",dsi.
GetName())<<
"split '" << splitMode <<
"'" <<
Endl;
1200 Log() << kDEBUG <<
Form(
"Dataset[%s] : ",dsi.
GetName())<<
"availableundefined : " << availableUndefined <<
Endl;
1201 Log() << kDEBUG <<
Form(
"Dataset[%s] : ",dsi.
GetName())<<
"useForTraining : " << useForTraining <<
Endl;
1202 Log() << kDEBUG <<
Form(
"Dataset[%s] : ",dsi.
GetName())<<
"useForTesting : " << useForTesting <<
Endl;
1203 Log() << kDEBUG <<
Form(
"Dataset[%s] : ",dsi.
GetName())<<
"availableTraining : " << availableTraining <<
Endl;
1204 Log() << kDEBUG <<
Form(
"Dataset[%s] : ",dsi.
GetName())<<
"availableTesting : " << availableTesting <<
Endl;
1206 if( availableUndefined<(useForTraining-availableTraining) ||
1207 availableUndefined<(useForTesting -availableTesting ) ||
1208 availableUndefined<(useForTraining+useForTesting-availableTraining-availableTesting ) ){
1209 Log() << kFATAL <<
Form(
"Dataset[%s] : ",dsi.
GetName())<<
"More events requested than available!" <<
Endl;
1213 if (useForTraining>availableTraining){
1214 eventVectorTraining.insert( eventVectorTraining.end() , eventVectorUndefined.begin(), eventVectorUndefined.begin()+ useForTraining- availableTraining );
1215 eventVectorUndefined.erase( eventVectorUndefined.begin(), eventVectorUndefined.begin() + useForTraining- availableTraining);
1217 if (useForTesting>availableTesting){
1218 eventVectorTesting.insert( eventVectorTesting.end() , eventVectorUndefined.begin(), eventVectorUndefined.begin()+ useForTesting- availableTesting );
1221 eventVectorUndefined.clear();
1224 if (splitMode.
Contains(
"RANDOM" )){
1225 UInt_t sizeTraining = eventVectorTraining.size();
1226 if( sizeTraining >
UInt_t(requestedTraining) ){
1227 std::vector<UInt_t> indicesTraining( sizeTraining );
1231 std::shuffle(indicesTraining.begin(), indicesTraining.end(), rndm);
1233 indicesTraining.erase( indicesTraining.begin()+sizeTraining-
UInt_t(requestedTraining), indicesTraining.end() );
1235 for( std::vector<UInt_t>::iterator it = indicesTraining.begin(), itEnd = indicesTraining.end(); it != itEnd; ++it ){
1236 delete eventVectorTraining.at( (*it) );
1237 eventVectorTraining.at( (*it) ) = NULL;
1240 eventVectorTraining.erase( std::remove( eventVectorTraining.begin(), eventVectorTraining.end(), (
void*)NULL ), eventVectorTraining.end() );
1243 UInt_t sizeTesting = eventVectorTesting.size();
1244 if( sizeTesting >
UInt_t(requestedTesting) ){
1245 std::vector<UInt_t> indicesTesting( sizeTesting );
1249 std::shuffle(indicesTesting.begin(), indicesTesting.end(), rndm);
1251 indicesTesting.erase( indicesTesting.begin()+sizeTesting-
UInt_t(requestedTesting), indicesTesting.end() );
1253 for( std::vector<UInt_t>::iterator it = indicesTesting.begin(), itEnd = indicesTesting.end(); it != itEnd; ++it ){
1254 delete eventVectorTesting.at( (*it) );
1255 eventVectorTesting.at( (*it) ) = NULL;
1258 eventVectorTesting.erase( std::remove( eventVectorTesting.begin(), eventVectorTesting.end(), (
void*)NULL ), eventVectorTesting.end() );
1262 if( eventVectorTraining.size() <
UInt_t(requestedTraining) )
1263 Log() << kWARNING <<
Form(
"Dataset[%s] : ",dsi.
GetName())<<
"DataSetFactory/requested number of training samples larger than size of eventVectorTraining.\n"
1264 <<
"There is probably an issue. Please contact the TMVA developers." <<
Endl;
1265 std::for_each( eventVectorTraining.begin()+requestedTraining, eventVectorTraining.end(), DeleteFunctor<Event>() );
1266 eventVectorTraining.erase(eventVectorTraining.begin()+requestedTraining,eventVectorTraining.end());
1268 if( eventVectorTesting.size() <
UInt_t(requestedTesting) )
1269 Log() << kWARNING <<
Form(
"Dataset[%s] : ",dsi.
GetName())<<
"DataSetFactory/requested number of testing samples larger than size of eventVectorTesting.\n"
1270 <<
"There is probably an issue. Please contact the TMVA developers." <<
Endl;
1271 std::for_each( eventVectorTesting.begin()+requestedTesting, eventVectorTesting.end(), DeleteFunctor<Event>() );
1272 eventVectorTesting.erase(eventVectorTesting.begin()+requestedTesting,eventVectorTesting.end());
1278 Int_t trainingSize = 0;
1279 Int_t testingSize = 0;
1293 trainingEventVector->reserve( trainingSize );
1294 testingEventVector->reserve( testingSize );
1300 Log() << kDEBUG <<
" MIXING ============= " <<
Endl;
1302 if( mixMode ==
"ALTERNATE" ){
1307 Log() << kINFO <<
Form(
"Dataset[%s] : ",dsi.
GetName()) <<
"Training sample: You are trying to mix events in alternate mode although the classes have different event numbers. This works but the alternation stops at the last event of the smaller class."<<
Endl;
1310 Log() << kINFO <<
Form(
"Dataset[%s] : ",dsi.
GetName()) <<
"Testing sample: You are trying to mix events in alternate mode although the classes have different event numbers. This works but the alternation stops at the last event of the smaller class."<<
Endl;
1313 typedef EventVector::iterator EvtVecIt;
1314 EvtVecIt itEvent, itEventEnd;
1317 Log() << kDEBUG <<
"insert class 0 into training and test vector" <<
Endl;
1319 testingEventVector->insert( testingEventVector->end(), tmpEventVector[
Types::kTesting].at(0).begin(), tmpEventVector[
Types::kTesting].at(0).end() );
1324 Log() << kDEBUG <<
Form(
"Dataset[%s] : ",dsi.
GetName())<<
"insert class " << cls <<
Endl;
1326 itTarget = trainingEventVector->begin() - 1;
1328 for( itEvent = tmpEventVector[
Types::kTraining].at(cls).begin(), itEventEnd = tmpEventVector[
Types::kTraining].at(cls).end(); itEvent != itEventEnd; ++itEvent ){
1330 if( (trainingEventVector->end() - itTarget) <
Int_t(cls+1) ) {
1331 itTarget = trainingEventVector->end();
1332 trainingEventVector->insert( itTarget, itEvent, itEventEnd );
1336 trainingEventVector->insert( itTarget, (*itEvent) );
1340 itTarget = testingEventVector->begin() - 1;
1342 for( itEvent = tmpEventVector[
Types::kTesting].at(cls).begin(), itEventEnd = tmpEventVector[
Types::kTesting].at(cls).end(); itEvent != itEventEnd; ++itEvent ){
1344 if( ( testingEventVector->end() - itTarget ) <
Int_t(cls+1) ) {
1345 itTarget = testingEventVector->end();
1346 testingEventVector->insert( itTarget, itEvent, itEventEnd );
1350 testingEventVector->insert( itTarget, (*itEvent) );
1356 trainingEventVector->insert( trainingEventVector->end(), tmpEventVector[
Types::kTraining].at(cls).begin(), tmpEventVector[
Types::kTraining].at(cls).end() );
1357 testingEventVector->insert ( testingEventVector->end(), tmpEventVector[
Types::kTesting].at(cls).begin(), tmpEventVector[
Types::kTesting].at(cls).end() );
1366 if (mixMode ==
"RANDOM") {
1369 std::shuffle(trainingEventVector->begin(), trainingEventVector->end(), rndm);
1370 std::shuffle(testingEventVector->begin(), testingEventVector->end(), rndm);
1373 Log() << kDEBUG <<
Form(
"Dataset[%s] : ",dsi.
GetName())<<
"trainingEventVector " << trainingEventVector->size() <<
Endl;
1374 Log() << kDEBUG <<
Form(
"Dataset[%s] : ",dsi.
GetName())<<
"testingEventVector " << testingEventVector->size() <<
Endl;
1386 Log() << kFATAL <<
"Dataset " << std::string(dsi.
GetName()) <<
" does not have any training events, I better stop here and let you fix that one first " <<
Endl;
1390 Log() << kERROR <<
"Dataset " << std::string(dsi.
GetName()) <<
" does not have any testing events, guess that will cause problems later..but for now, I continue " <<
Endl;
1393 delete trainingEventVector;
1394 delete testingEventVector;
1418 Int_t trainingSize = 0;
1419 Int_t testingSize = 0;
1427 Double_t trainingSumSignalWeights = 0;
1428 Double_t trainingSumBackgrWeights = 0;
1429 Double_t testingSumSignalWeights = 0;
1430 Double_t testingSumBackgrWeights = 0;
1435 trainingSizePerClass.at(cls) = tmpEventVector[
Types::kTraining].at(cls).size();
1436 testingSizePerClass.at(cls) = tmpEventVector[
Types::kTesting].at(cls).size();
1438 trainingSize += trainingSizePerClass.back();
1439 testingSize += testingSizePerClass.back();
1452 trainingSumWeightsPerClass.at(cls) =
1457 testingSumWeightsPerClass.at(cls) =
1463 trainingSumSignalWeights += trainingSumWeightsPerClass.at(cls);
1464 testingSumSignalWeights += testingSumWeightsPerClass.at(cls);
1466 trainingSumBackgrWeights += trainingSumWeightsPerClass.at(cls);
1467 testingSumBackgrWeights += testingSumWeightsPerClass.at(cls);
1487 if (normMode ==
"NONE") {
1488 Log() << kINFO <<
Form(
"Dataset[%s] : ",dsi.
GetName()) <<
"No weight renormalisation applied: use original global and event weights" <<
Endl;
1494 else if (normMode ==
"NUMEVENTS") {
1496 <<
"\tWeight renormalisation mode: \"NumEvents\": renormalises all event classes " <<
Endl;
1498 <<
" such that the effective (weighted) number of events in each class equals the respective " <<
Endl;
1500 <<
" number of events (entries) that you demanded in PrepareTrainingAndTestTree(\"\",\"nTrain_Signal=.. )" <<
Endl;
1502 <<
" ... i.e. such that Sum[i=1..N_j]{w_i} = N_j, j=0,1,2..." <<
Endl;
1504 <<
" ... (note that N_j is the sum of TRAINING events (nTrain_j...with j=Signal,Background.." <<
Endl;
1506 <<
" ..... Testing events are not renormalised nor included in the renormalisation factor! )"<<
Endl;
1512 renormFactor.at(cls) = ((
Float_t)trainingSizePerClass.at(cls) )/
1513 (trainingSumWeightsPerClass.at(cls)) ;
1516 else if (normMode ==
"EQUALNUMEVENTS") {
1522 Log() << kINFO <<
Form(
"Dataset[%s] : ",dsi.
GetName()) <<
"Weight renormalisation mode: \"EqualNumEvents\": renormalises all event classes ..." <<
Endl;
1523 Log() << kINFO <<
Form(
"Dataset[%s] : ",dsi.
GetName()) <<
" such that the effective (weighted) number of events in each class is the same " <<
Endl;
1524 Log() << kINFO <<
Form(
"Dataset[%s] : ",dsi.
GetName()) <<
" (and equals the number of events (entries) given for class=0 )" <<
Endl;
1525 Log() << kINFO <<
Form(
"Dataset[%s] : ",dsi.
GetName()) <<
"... i.e. such that Sum[i=1..N_j]{w_i} = N_classA, j=classA, classB, ..." <<
Endl;
1526 Log() << kINFO <<
Form(
"Dataset[%s] : ",dsi.
GetName()) <<
"... (note that N_j is the sum of TRAINING events" <<
Endl;
1527 Log() << kINFO <<
Form(
"Dataset[%s] : ",dsi.
GetName()) <<
" ..... Testing events are not renormalised nor included in the renormalisation factor!)" <<
Endl;
1530 UInt_t referenceClass = 0;
1532 renormFactor.at(cls) =
Float_t(trainingSizePerClass.at(referenceClass))/
1533 (trainingSumWeightsPerClass.at(cls));
1537 Log() << kFATAL <<
Form(
"Dataset[%s] : ",dsi.
GetName())<<
"<PrepareForTrainingAndTesting> Unknown NormMode: " << normMode <<
Endl;
1545 <<
"--> Rescale " << setiosflags(ios::left) << std::setw(maxL)
1547 for (EventVector::iterator it = tmpEventVector[
Types::kTraining].at(cls).begin(),
1548 itEnd = tmpEventVector[
Types::kTraining].at(cls).end(); it != itEnd; ++it){
1549 (*it)->SetWeight ((*it)->GetWeight() * renormFactor.at(cls));
1560 <<
"Number of training and testing events" <<
Endl;
1561 Log() << kDEBUG <<
"\tafter rescaling:" <<
Endl;
1563 <<
"---------------------------------------------------------------------------" <<
Endl;
1565 trainingSumSignalWeights = 0;
1566 trainingSumBackgrWeights = 0;
1567 testingSumSignalWeights = 0;
1568 testingSumBackgrWeights = 0;
1571 trainingSumWeightsPerClass.at(cls) =
1576 testingSumWeightsPerClass.at(cls) =
1582 trainingSumSignalWeights += trainingSumWeightsPerClass.at(cls);
1583 testingSumSignalWeights += testingSumWeightsPerClass.at(cls);
1585 trainingSumBackgrWeights += trainingSumWeightsPerClass.at(cls);
1586 testingSumBackgrWeights += testingSumWeightsPerClass.at(cls);
1592 << setiosflags(ios::left) << std::setw(maxL)
1594 <<
"training events : " << trainingSizePerClass.at(cls) <<
Endl;
1595 Log() << kDEBUG <<
"\t(sum of weights: " << trainingSumWeightsPerClass.at(cls) <<
")"
1596 <<
" - requested were " << eventCounts[cls].nTrainingEventsRequested <<
" events" <<
Endl;
1598 << setiosflags(ios::left) << std::setw(maxL)
1600 <<
"testing events : " << testingSizePerClass.at(cls) <<
Endl;
1601 Log() << kDEBUG <<
"\t(sum of weights: " << testingSumWeightsPerClass.at(cls) <<
")"
1602 <<
" - requested were " << eventCounts[cls].nTestingEventsRequested <<
" events" <<
Endl;
1604 << setiosflags(ios::left) << std::setw(maxL)
1606 <<
"training and testing events: "
1607 << (trainingSizePerClass.at(cls)+testingSizePerClass.at(cls)) <<
Endl;
1608 Log() << kDEBUG <<
"\t(sum of weights: "
1609 << (trainingSumWeightsPerClass.at(cls)+testingSumWeightsPerClass.at(cls)) <<
")" <<
Endl;
1610 if(eventCounts[cls].nEvAfterCut<eventCounts[cls].nEvBeforeCut) {
1611 Log() << kINFO <<
Form(
"Dataset[%s] : ",dsi.
GetName()) << setiosflags(ios::left) << std::setw(maxL)
1613 <<
"due to the preselection a scaling factor has been applied to the numbers of requested events: "
1614 << eventCounts[cls].cutScaling() <<
Endl;
TMatrixT< Double_t > TMatrixD
char * Form(const char *fmt,...)
A specialized string object used for TTree selections.
virtual TFile * GetFile() const
A TLeaf describes individual elements of a TBranch See TBranch structure in TTree.
virtual Bool_t IsOnTerminalBranch() const
TBranch * GetBranch() const
const TCut & GetCut() const
void SetNumber(const UInt_t index)
const TString & GetWeight() const
void SetConfigDescription(const char *d)
OptionBase * DeclareOptionRef(T &ref, const TString &name, const TString &desc="")
void AddPreDefVal(const T &)
void SetConfigName(const char *n)
virtual void ParseOptions()
options parser
void CheckForUnusedOptions() const
checks for unused options in option string
~DataSetFactory()
destructor
DataSet * BuildInitialDataSet(DataSetInfo &, TMVA::DataInputHandler &)
if no entries, than create a DataSet with one Event which uses dynamic variables (pointers to variabl...
DataSetFactory()
constructor
std::map< Types::ETreeType, EventVectorOfClasses > EventVectorOfClassesOfTreeType
void ChangeToNewTree(TreeInfo &, const DataSetInfo &)
While the data gets copied into the local training and testing trees, the input tree can change (for ...
void BuildEventVector(DataSetInfo &dsi, DataInputHandler &dataInput, EventVectorOfClassesOfTreeType &eventsmap, EvtStatsPerClass &eventCounts)
build empty event vectors distributes events between kTraining/kTesting/kMaxTreeType
DataSet * CreateDataSet(DataSetInfo &, DataInputHandler &)
steering the creation of a new dataset
DataSet * MixEvents(DataSetInfo &dsi, EventVectorOfClassesOfTreeType &eventsmap, EvtStatsPerClass &eventCounts, const TString &splitMode, const TString &mixMode, const TString &normMode, UInt_t splitSeed)
Select and distribute unassigned events to kTraining and kTesting.
std::vector< int > NumberPerClass
std::vector< EventVector > EventVectorOfClasses
void InitOptions(DataSetInfo &dsi, EvtStatsPerClass &eventsmap, TString &normMode, UInt_t &splitSeed, TString &splitMode, TString &mixMode)
the dataset splitting
void CalcMinMax(DataSet *, DataSetInfo &dsi)
compute covariance matrix
std::vector< Double_t > ValuePerClass
DataSet * BuildDynamicDataSet(DataSetInfo &)
std::vector< EventStats > EvtStatsPerClass
Bool_t CheckTTreeFormula(TTreeFormula *ttf, const TString &expression, Bool_t &hasDollar)
checks a TTreeFormula for problems
void RenormEvents(DataSetInfo &dsi, EventVectorOfClassesOfTreeType &eventsmap, const EvtStatsPerClass &eventCounts, const TString &normMode)
renormalisation of the TRAINING event weights
TMatrixD * CalcCorrelationMatrix(DataSet *, const UInt_t classNumber)
computes correlation matrix for variables "theVars" in tree; "theType" defines the required event "ty...
TMatrixD * CalcCovarianceMatrix(DataSet *, const UInt_t classNumber)
compute covariance matrix
std::vector< Event * > EventVector
Class that contains all the data information.
std::vector< VariableInfo > & GetVariableInfos()
UInt_t GetNVariables() const
UInt_t GetNSpectators(bool all=kTRUE) const
ClassInfo * AddClass(const TString &className)
virtual const char * GetName() const
Returns name of object.
std::vector< VariableInfo > & GetSpectatorInfos()
void SetNormalization(const TString &norm)
UInt_t GetNClasses() const
const TString & GetSplitOptions() const
UInt_t GetNTargets() const
void SetTestingSumSignalWeights(Double_t testingSumSignalWeights)
UInt_t GetSignalClassIndex()
void SetTrainingSumSignalWeights(Double_t trainingSumSignalWeights)
ClassInfo * GetClassInfo(Int_t clNum) const
void SetTestingSumBackgrWeights(Double_t testingSumBackgrWeights)
Int_t GetClassNameMaxLength() const
void PrintCorrelationMatrix(const TString &className)
calculates the correlation matrices for signal and background, prints them to standard output,...
VariableInfo & GetVariableInfo(Int_t i)
void SetTrainingSumBackgrWeights(Double_t trainingSumBackgrWeights)
VariableInfo & GetTargetInfo(Int_t i)
VariableInfo & GetSpectatorInfo(Int_t i)
void SetCorrelationMatrix(const TString &className, TMatrixD *matrix)
Class that contains all the data information.
UInt_t GetNTargets() const
access the number of targets through the datasetinfo
void SetEventCollection(std::vector< Event * > *, Types::ETreeType, Bool_t deleteEvents=true)
Sets the event collection (by DataSetFactory)
Long64_t GetNTestEvents() const
const Event * GetEvent() const
Long64_t GetNEvents(Types::ETreeType type=Types::kMaxTreeType) const
Long64_t GetNClassEvents(Int_t type, UInt_t classNumber)
Long64_t GetNTrainingEvents() const
UInt_t GetNSpectators() const
access the number of targets through the datasetinfo
UInt_t GetNVariables() const
access the number of variables through the datasetinfo
void SetCurrentType(Types::ETreeType type) const
void SetCurrentEvent(Long64_t ievt) const
Float_t GetValue(UInt_t ivar) const
return value of i'th variable
Double_t GetWeight() const
return the event weight - depending on whether the flag IgnoreNegWeightsInTraining is or not.
Float_t GetSpectator(UInt_t ivar) const
return spectator content
Float_t GetTarget(UInt_t itgt) const
ostringstream derivative to redirect and format output
Types::ETreeType GetTreeType() const
const TString & GetClassName() const
Double_t GetWeight() const
const TString & GetExpression() const
const TString & GetInternalName() const
virtual const char * GetTitle() const
Returns title of object.
virtual const char * GetName() const
Returns name of object.
virtual const char * ClassName() const
Returns name of class to which the object belongs.
const char * Data() const
void ToUpper()
Change string to upper case.
static TString Format(const char *fmt,...)
Static method which formats a string using a printf style format descriptor and return a TString.
Bool_t Contains(const char *pat, ECaseCompare cmp=kExact) const
A TTree represents a columnar dataset.
TFile * GetCurrentFile() const
Return pointer to the current file.
TDirectory * GetDirectory() const
virtual Long64_t GetEntries() const
virtual TTree * GetTree() const
virtual Long64_t LoadTree(Long64_t entry)
Set current entry.
virtual Int_t GetEntry(Long64_t entry=0, Int_t getall=0)
Read all branches of entry and return total number of bytes read.
virtual void ResetBranchAddresses()
Tell all of our branches to drop their current objects and allocate new ones.
virtual void SetBranchStatus(const char *bname, Bool_t status=1, UInt_t *found=0)
Set branch status to Process or DoNotProcess.
std::string GetName(const std::string &scope_name)
RooCmdArg Verbose(Bool_t flag=kTRUE)
create variable transformations
Int_t LargestCommonDivider(Int_t a, Int_t b)
MsgLogger & Endl(MsgLogger &ml)
Short_t Max(Short_t a, Short_t b)
Int_t Finite(Double_t x)
Check if it is finite with a mask in order to be consistent in presence of fast math.
constexpr Double_t E()
Base of natural log: