77   std::map<std::string,int> Use;
 
   87   Use[
"Likelihood"]      = 1;
 
   88   Use[
"LikelihoodD"]     = 0; 
 
   89   Use[
"LikelihoodPCA"]   = 1; 
 
   90   Use[
"LikelihoodKDE"]   = 0;
 
   91   Use[
"LikelihoodMIX"]   = 0;
 
   98   Use[
"PDEFoamBoost"]    = 0; 
 
  105   Use[
"BoostedFisher"]   = 0; 
 
  148   std::cout << std::endl;
 
  149   std::cout << 
"==> Start TMVAClassification" << std::endl;
 
  153      for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) it->second = 0;
 
  160            std::cout << 
"Method \"" << 
regMethod << 
"\" not known in TMVA under this name. Choose among the following:" << std::endl;
 
  161            for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) std::cout << it->first << 
" ";
 
  162            std::cout << std::endl;
 
  178      throw std::runtime_error(
"ERROR: could not open data file");
 
  180   std::cout << 
"--- TMVAClassification       : Using input file: " << 
input->GetName() << std::endl;
 
  191      throw std::runtime_error(
"ERROR: could not open output file");
 
  204   auto factory = std::make_unique<TMVA::Factory>(
 
  206      "!V:!Silent:Color:DrawProgressBar:Transformations=I;D;P;G,D:AnalysisType=Classification");
 
  218   dataloader->AddVariable( 
"myvar1 := var1+var2", 
'F' );
 
  219   dataloader->AddVariable( 
"myvar2 := var1-var2", 
"Expression 2", 
"", 
'F' );
 
  220   dataloader->AddVariable( 
"var3",                
"Variable 3", 
"units", 
'F' );
 
  221   dataloader->AddVariable( 
"var4",                
"Variable 4", 
"units", 
'F' );
 
  227   dataloader->AddSpectator( 
"spec1 := var1*2",  
"Spectator 1", 
"units", 
'F' );
 
  228   dataloader->AddSpectator( 
"spec2 := var1*3",  
"Spectator 2", 
"units", 
'F' );
 
  282   dataloader->SetBackgroundWeightExpression( 
"weight" );
 
  300                                        "nTrain_Signal=1000:nTrain_Background=1000:SplitMode=Random:NormMode=NumEvents:!V" );
 
  312                           "!H:!V:FitMethod=MC:EffSel:SampleSize=200000:VarProp=FSmart" );
 
  316                           "!H:!V:FitMethod=MC:EffSel:SampleSize=200000:VarProp=FSmart:VarTransform=Decorrelate" );
 
  320                           "!H:!V:FitMethod=MC:EffSel:SampleSize=200000:VarProp=FSmart:VarTransform=PCA" );
 
  324                           "H:!V:FitMethod=GA:CutRangeMin[0]=-10:CutRangeMax[0]=10:VarProp[1]=FMax:EffSel:Steps=30:Cycles=3:PopSize=400:SC_steps=10:SC_rate=5:SC_factor=0.95" );
 
  328                           "!H:!V:FitMethod=SA:EffSel:MaxCalls=150000:KernelTemp=IncAdaptive:InitialTemp=1e+6:MinTemp=1e-6:Eps=1e-10:UseDefaultScale" );
 
  331   if (Use[
"Likelihood"])
 
  333                           "H:!V:TransformOutput:PDFInterpol=Spline2:NSmoothSig[0]=20:NSmoothBkg[0]=20:NSmoothBkg[1]=10:NSmooth=1:NAvEvtPerBin=50" );
 
  336   if (Use[
"LikelihoodD"])
 
  338                           "!H:!V:TransformOutput:PDFInterpol=Spline2:NSmoothSig[0]=20:NSmoothBkg[0]=20:NSmooth=5:NAvEvtPerBin=50:VarTransform=Decorrelate" );
 
  341   if (Use[
"LikelihoodPCA"])
 
  343                           "!H:!V:!TransformOutput:PDFInterpol=Spline2:NSmoothSig[0]=20:NSmoothBkg[0]=20:NSmooth=5:NAvEvtPerBin=50:VarTransform=PCA" );
 
  346   if (Use[
"LikelihoodKDE"])
 
  348                           "!H:!V:!TransformOutput:PDFInterpol=KDE:KDEtype=Gauss:KDEiter=Adaptive:KDEFineFactor=0.3:KDEborder=None:NAvEvtPerBin=50" );
 
  351   if (Use[
"LikelihoodMIX"])
 
  353                           "!H:!V:!TransformOutput:PDFInterpolSig[0]=KDE:PDFInterpolBkg[0]=KDE:PDFInterpolSig[1]=KDE:PDFInterpolBkg[1]=KDE:PDFInterpolSig[2]=Spline2:PDFInterpolBkg[2]=Spline2:PDFInterpolSig[3]=Spline2:PDFInterpolBkg[3]=Spline2:KDEtype=Gauss:KDEiter=Nonadaptive:KDEborder=None:NAvEvtPerBin=50" );
 
  362                           "!H:!V:NormTree=T:VolumeRangeMode=Adaptive:KernelEstimator=Gauss:GaussSigma=0.3:NEventsMin=400:NEventsMax=600" );
 
  366                           "!H:!V:VolumeRangeMode=Adaptive:KernelEstimator=Gauss:GaussSigma=0.3:NEventsMin=400:NEventsMax=600:VarTransform=Decorrelate" );
 
  370                           "!H:!V:VolumeRangeMode=Adaptive:KernelEstimator=Gauss:GaussSigma=0.3:NEventsMin=400:NEventsMax=600:VarTransform=PCA" );
 
  375                           "!H:!V:SigBgSeparate=F:TailCut=0.001:VolFrac=0.0666:nActiveCells=500:nSampl=2000:nBin=5:Nmin=100:Kernel=None:Compress=T" );
 
  377   if (Use[
"PDEFoamBoost"])
 
  379                           "!H:!V:Boost_Num=30:Boost_Transform=linear:SigBgSeparate=F:MaxDepth=4:UseYesNoCell=T:DTLogic=MisClassificationError:FillFoamWithOrigWeights=F:TailCut=0:nActiveCells=500:nBin=20:Nmin=400:Kernel=None:Compress=T" );
 
  384                           "H:nkNN=20:ScaleFrac=0.8:SigmaFact=1.0:Kernel=Gaus:UseKernel=F:UseWeight=T:!Trim" );
 
  392      factory->BookMethod( 
dataloader, 
TMVA::Types::kLD, 
"LD", 
"H:!V:VarTransform=None:CreateMVAPdfs:PDFInterpolMVAPdf=Spline2:NbinsMVAPdf=50:NsmoothMVAPdf=10" );
 
  396      factory->BookMethod( 
dataloader, 
TMVA::Types::kFisher, 
"Fisher", 
"H:!V:Fisher:VarTransform=None:CreateMVAPdfs:PDFInterpolMVAPdf=Spline2:NbinsMVAPdf=50:NsmoothMVAPdf=10" );
 
  403   if (Use[
"BoostedFisher"])
 
  405                           "H:!V:Boost_Num=20:Boost_Transform=log:Boost_Type=AdaBoost:Boost_AdaBoostBeta=0.2:!Boost_DetailedMonitoring" );
 
  410                           "H:!V:Formula=(0)+(1)*x0+(2)*x1+(3)*x2+(4)*x3:ParRanges=(-1,1);(-10,10);(-10,10);(-10,10);(-10,10):FitMethod=MC:SampleSize=100000:Sigma=0.1" );
 
  414                           "H:!V:Formula=(0)+(1)*x0+(2)*x1+(3)*x2+(4)*x3:ParRanges=(-1,1);(-10,10);(-10,10);(-10,10);(-10,10):FitMethod=GA:PopSize=100:Cycles=2:Steps=5:Trim=True:SaveBestGen=1" );
 
  418                           "H:!V:Formula=(0)+(1)*x0+(2)*x1+(3)*x2+(4)*x3:ParRanges=(-1,1);(-10,10);(-10,10);(-10,10);(-10,10):FitMethod=SA:MaxCalls=15000:KernelTemp=IncAdaptive:InitialTemp=1e+6:MinTemp=1e-6:Eps=1e-10:UseDefaultScale" );
 
  422                           "H:!V:Formula=(0)+(1)*x0+(2)*x1+(3)*x2+(4)*x3:ParRanges=(-1,1);(-10,10);(-10,10);(-10,10);(-10,10):FitMethod=MINUIT:ErrorLevel=1:PrintLevel=-1:FitStrategy=2:UseImprove:UseMinos:SetBatch" );
 
  426                           "H:!V:Formula=(0)+(1)*x0+(2)*x1+(3)*x2+(4)*x3:ParRanges=(-1,1);(-10,10);(-10,10);(-10,10);(-10,10):FitMethod=GA:Converger=MINUIT:ErrorLevel=1:PrintLevel=-1:FitStrategy=0:!UseImprove:!UseMinos:SetBatch:Cycles=1:PopSize=5:Steps=5:Trim" );
 
  430                           "H:!V:Formula=(0)+(1)*x0+(2)*x1+(3)*x2+(4)*x3:ParRanges=(-1,1);(-10,10);(-10,10);(-10,10);(-10,10):FitMethod=MC:Converger=MINUIT:ErrorLevel=1:PrintLevel=-1:FitStrategy=0:!UseImprove:!UseMinos:SetBatch:SampleSize=20" );
 
  434      factory->BookMethod( 
dataloader, 
TMVA::Types::kMLP, 
"MLP", 
"H:!V:NeuronType=tanh:VarTransform=N:NCycles=600:HiddenLayers=N+5:TestRate=5:!UseRegulator" );
 
  437      factory->BookMethod( 
dataloader, 
TMVA::Types::kMLP, 
"MLPBFGS", 
"H:!V:NeuronType=tanh:VarTransform=N:NCycles=600:HiddenLayers=N+5:TestRate=5:TrainingMethod=BFGS:!UseRegulator" );
 
  440      factory->BookMethod( 
dataloader, 
TMVA::Types::kMLP, 
"MLPBNN", 
"H:!V:NeuronType=tanh:VarTransform=N:NCycles=60:HiddenLayers=N+5:TestRate=5:TrainingMethod=BFGS:UseRegulator" ); 
 
  444   if (Use[
"DNN_CPU"] 
or Use[
"DNN_GPU"]) {
 
  451                                        "ConvergenceSteps=20,BatchSize=100,TestRepetitions=1," 
  452                                        "WeightDecay=1e-4,Regularization=None," 
  453                                        "DropConfig=0.0+0.5+0.5+0.5");
 
  457                          "WeightInitialization=XAVIERUNIFORM");
 
  462      if (Use[
"DNN_GPU"]) {
 
  467      if (Use[
"DNN_CPU"]) {
 
  479      factory->BookMethod( 
dataloader, 
TMVA::Types::kTMlpANN, 
"TMlpANN", 
"!H:!V:NCycles=200:HiddenLayers=N+1,N:LearningMethod=BFGS:ValidationFraction=0.3"  ); 
 
  488                           "!H:!V:NTrees=1000:MinNodeSize=2.5%:BoostType=Grad:Shrinkage=0.10:UseBaggedBoost:BaggedSampleFraction=0.5:nCuts=20:MaxDepth=2" );
 
  492                           "!H:!V:NTrees=850:MinNodeSize=2.5%:MaxDepth=3:BoostType=AdaBoost:AdaBoostBeta=0.5:UseBaggedBoost:BaggedSampleFraction=0.5:SeparationType=GiniIndex:nCuts=20" );
 
  496                           "!H:!V:NTrees=400:BoostType=Bagging:SeparationType=GiniIndex:nCuts=20" );
 
  500                           "!H:!V:NTrees=400:MinNodeSize=5%:MaxDepth=3:BoostType=AdaBoost:SeparationType=GiniIndex:nCuts=20:VarTransform=Decorrelate" );
 
  504                           "!H:!V:NTrees=50:MinNodeSize=2.5%:UseFisherCuts:MaxDepth=3:BoostType=AdaBoost:AdaBoostBeta=0.5:SeparationType=GiniIndex:nCuts=20" );
 
  509                           "H:!V:RuleFitModule=RFTMVA:Model=ModRuleLinear:MinImp=0.001:RuleMinDist=0.001:NTrees=20:fEventsMin=0.01:fEventsMax=0.5:GDTau=-1.0:GDTauPrec=0.01:GDStep=0.01:GDNSteps=10000:GDErrScale=1.02" );
 
  525   factory->TrainAllMethods();
 
  528   factory->TestAllMethods();
 
  531   factory->EvaluateAllMethods();
 
  538   std::cout << 
"==> Wrote root file: " << 
outputFile->GetName() << std::endl;
 
  539   std::cout << 
"==> TMVAClassification is done!" << std::endl;
 
  551   for (
int i=1; i<
argc; i++) {
 
ROOT::Detail::TRangeCast< T, true > TRangeDynCast
TRangeDynCast is an adapter class that allows the typed iteration through a TCollection.
 
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void input
 
A specialized string object used for TTree selections.
 
static TFile * Open(const char *name, Option_t *option="", const char *ftitle="", Int_t compress=ROOT::RCompressionSetting::EDefaults::kUseCompiledDefault, Int_t netopt=0)
Create / open a file.
 
A TTree represents a columnar dataset.
 
void TMVAGui(const char *fName="TMVA.root", TString dataset="")