68   std::map<std::string,int> Use;
 
  107   std::cout << std::endl;
 
  108   std::cout << 
"==> Start TMVARegression" << std::endl;
 
  112      for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) it->second = 0;
 
  119            std::cout << 
"Method \"" << 
regMethod << 
"\" not known in TMVA under this name. Choose among the following:" << std::endl;
 
  120            for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) std::cout << it->first << 
" ";
 
  121            std::cout << std::endl;
 
  147                                               "!V:!Silent:Color:DrawProgressBar:AnalysisType=Regression" );
 
  160   dataloader->AddVariable( 
"var1", 
"Variable 1", 
"units", 
'F' );
 
  161   dataloader->AddVariable( 
"var2", 
"Variable 2", 
"units", 
'F' );
 
  166   dataloader->AddSpectator( 
"spec1:=var1*2",  
"Spectator 1", 
"units", 
'F' );
 
  167   dataloader->AddSpectator( 
"spec2:=var1*3",  
"Spectator 2", 
"units", 
'F' );
 
  179   TString fname =  
gROOT->GetTutorialDir() + 
"/machine_learning/data/tmva_reg_example.root";
 
  184      std::cout << 
"ERROR: could not open data file" << std::endl;
 
  187   std::cout << 
"--- TMVARegression           : Using input file: " << 
input->GetName() << std::endl;
 
  201   dataloader->SetWeightExpression( 
"var1", 
"Regression" );
 
  208                                         "nTrain_Regression=1000:nTest_Regression=0:SplitMode=Random:NormMode=NumEvents:!V" );
 
  228                           "!H:!V:NormTree=T:VolumeRangeMode=Adaptive:KernelEstimator=Gauss:GaussSigma=0.3:NEventsMin=40:NEventsMax=60:VarTransform=None" );
 
  236             "!H:!V:MultiTargetRegression=F:TargetSelection=Mpv:TailCut=0.001:VolFrac=0.0666:nActiveCells=500:nSampl=2000:nBin=5:Compress=T:Kernel=None:Nmin=10:VarTransform=None" );
 
  241                           "nkNN=20:ScaleFrac=0.8:SigmaFact=1.0:Kernel=Gaus:UseKernel=F:UseWeight=T:!Trim" );
 
  246                           "!H:!V:VarTransform=None" );
 
  251                          "!H:!V:Formula=(0)+(1)*x0+(2)*x1:ParRanges=(-100,100);(-100,100);(-100,100):FitMethod=MC:SampleSize=100000:Sigma=0.1:VarTransform=D" );
 
  255                           "!H:!V:Formula=(0)+(1)*x0+(2)*x1:ParRanges=(-100,100);(-100,100);(-100,100):FitMethod=GA:PopSize=100:Cycles=3:Steps=30:Trim=True:SaveBestGen=1:VarTransform=Norm" );
 
  259                           "!H:!V:Formula=(0)+(1)*x0+(2)*x1:ParRanges=(-100,100);(-100,100);(-100,100);(-10,10):FitMethod=MINUIT:ErrorLevel=1:PrintLevel=-1:FitStrategy=2:UseImprove:UseMinos:SetBatch" );
 
  263                           "!H:!V:Formula=(0)+(1)*x0+(2)*x1:ParRanges=(-100,100);(-100,100);(-100,100):FitMethod=GA:Converger=MINUIT:ErrorLevel=1:PrintLevel=-1:FitStrategy=0:!UseImprove:!UseMinos:SetBatch:Cycles=1:PopSize=5:Steps=5:Trim" );
 
  267      factory->
BookMethod( 
dataloader,  
TMVA::Types::kMLP, 
"MLP", 
"!H:!V:VarTransform=Norm:NeuronType=tanh:NCycles=20000:HiddenLayers=N+20:TestRate=6:TrainingMethod=BFGS:Sampling=0.3:SamplingEpoch=0.8:ConvergenceImprove=1e-6:ConvergenceTests=15:!UseRegulator" );
 
  269   if (Use[
"DNN_CPU"] || Use[
"DNN_GPU"]) {
 
  278      trainingStrategyString +=
"LearningRate=1e-3,Momentum=0.3,ConvergenceSteps=20,BatchSize=50,TestRepetitions=1,WeightDecay=0.0,Regularization=None,Optimizer=Adam";
 
  280      TString nnOptions(
"!H:V:ErrorStrategy=SUMOFSQUARES:VarTransform=G:WeightInitialization=XAVIERUNIFORM:Architecture=");
 
  301                           "!H:!V:NTrees=100:MinNodeSize=1.0%:BoostType=AdaBoostR2:SeparationType=RegressionVariance:nCuts=20:PruneMethod=CostComplexity:PruneStrength=30" );
 
  305                           "!H:!V:NTrees=2000::BoostType=Grad:Shrinkage=0.1:UseBaggedBoost:BaggedSampleFraction=0.5:nCuts=20:MaxDepth=3:MaxDepth=4" );
 
  324   std::cout << 
"==> Wrote root file: " << 
outputFile->GetName() << std::endl;
 
  325   std::cout << 
"==> TMVARegression is done!" << std::endl;
 
  338   for (
int i=1; i<
argc; i++) {
 
ROOT::Detail::TRangeCast< T, true > TRangeDynCast
TRangeDynCast is an adapter class that allows the typed iteration through a TCollection.
 
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void input
 
R__EXTERN TSystem * gSystem
 
A specialized string object used for TTree selections.
 
A ROOT file is an on-disk file, usually with extension .root, that stores objects in a file-system-li...
 
static TFile * Open(const char *name, Option_t *option="", const char *ftitle="", Int_t compress=ROOT::RCompressionSetting::EDefaults::kUseCompiledDefault, Int_t netopt=0)
Create / open a file.
 
This is the main MVA steering class.
 
void TrainAllMethods()
Iterates through all booked methods and calls training.
 
MethodBase * BookMethod(DataLoader *loader, TString theMethodName, TString methodTitle, TString theOption="")
Book a classifier or regression method.
 
void TestAllMethods()
Evaluates all booked methods on the testing data and adds the output to the Results in the corresponi...
 
void EvaluateAllMethods(void)
Iterates over all MVAs that have been booked, and calls their evaluation methods.
 
virtual Bool_t AccessPathName(const char *path, EAccessMode mode=kFileExists)
Returns FALSE if one can access a file using the specified access mode.
 
A TTree represents a columnar dataset.
 
create variable transformations
 
void TMVARegGui(const char *fName="TMVAReg.root", TString dataset="")