50void TMVARegression( 
TString myMethodList = 
"" )
 
   68   std::map<std::string,int> Use;
 
  107   std::cout << std::endl;
 
  108   std::cout << 
"==> Start TMVARegression" << std::endl;
 
  111   if (myMethodList != 
"") {
 
  112      for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) it->second = 0;
 
  115      for (
UInt_t i=0; i<mlist.size(); i++) {
 
  116         std::string regMethod(mlist[i].Data());
 
  118         if (Use.find(regMethod) == Use.end()) {
 
  119            std::cout << 
"Method \"" << regMethod << 
"\" not known in TMVA under this name. Choose among the following:" << std::endl;
 
  120            for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) std::cout << it->first << 
" ";
 
  121            std::cout << std::endl;
 
  133   TString outfileName( 
"TMVAReg.root" );
 
  147                                               "!V:!Silent:Color:DrawProgressBar:AnalysisType=Regression" );
 
  160   dataloader->
AddVariable( 
"var1", 
"Variable 1", 
"units", 
'F' );
 
  161   dataloader->
AddVariable( 
"var2", 
"Variable 2", 
"units", 
'F' );
 
  166   dataloader->
AddSpectator( 
"spec1:=var1*2",  
"Spectator 1", 
"units", 
'F' );
 
  167   dataloader->
AddSpectator( 
"spec2:=var1*3",  
"Spectator 2", 
"units", 
'F' );
 
  179   TString fname = 
"./tmva_reg_example.root";
 
  185      input = 
TFile::Open(
"http://root.cern.ch/files/tmva_reg_example.root", 
"CACHEREAD"); 
 
  188      std::cout << 
"ERROR: could not open data file" << std::endl;
 
  191   std::cout << 
"--- TMVARegression           : Using input file: " << 
input->GetName() << std::endl;
 
  212                                         "nTrain_Regression=1000:nTest_Regression=0:SplitMode=Random:NormMode=NumEvents:!V" );
 
  232                           "!H:!V:NormTree=T:VolumeRangeMode=Adaptive:KernelEstimator=Gauss:GaussSigma=0.3:NEventsMin=40:NEventsMax=60:VarTransform=None" );
 
  240             "!H:!V:MultiTargetRegression=F:TargetSelection=Mpv:TailCut=0.001:VolFrac=0.0666:nActiveCells=500:nSampl=2000:nBin=5:Compress=T:Kernel=None:Nmin=10:VarTransform=None" );
 
  245                           "nkNN=20:ScaleFrac=0.8:SigmaFact=1.0:Kernel=Gaus:UseKernel=F:UseWeight=T:!Trim" );
 
  250                           "!H:!V:VarTransform=None" );
 
  255                          "!H:!V:Formula=(0)+(1)*x0+(2)*x1:ParRanges=(-100,100);(-100,100);(-100,100):FitMethod=MC:SampleSize=100000:Sigma=0.1:VarTransform=D" );
 
  259                           "!H:!V:Formula=(0)+(1)*x0+(2)*x1:ParRanges=(-100,100);(-100,100);(-100,100):FitMethod=GA:PopSize=100:Cycles=3:Steps=30:Trim=True:SaveBestGen=1:VarTransform=Norm" );
 
  263                           "!H:!V:Formula=(0)+(1)*x0+(2)*x1:ParRanges=(-100,100);(-100,100);(-100,100);(-10,10):FitMethod=MINUIT:ErrorLevel=1:PrintLevel=-1:FitStrategy=2:UseImprove:UseMinos:SetBatch" );
 
  267                           "!H:!V:Formula=(0)+(1)*x0+(2)*x1:ParRanges=(-100,100);(-100,100);(-100,100):FitMethod=GA:Converger=MINUIT:ErrorLevel=1:PrintLevel=-1:FitStrategy=0:!UseImprove:!UseMinos:SetBatch:Cycles=1:PopSize=5:Steps=5:Trim" );
 
  271      factory->
BookMethod( dataloader,  
TMVA::Types::kMLP, 
"MLP", 
"!H:!V:VarTransform=Norm:NeuronType=tanh:NCycles=20000:HiddenLayers=N+20:TestRate=6:TrainingMethod=BFGS:Sampling=0.3:SamplingEpoch=0.8:ConvergenceImprove=1e-6:ConvergenceTests=15:!UseRegulator" );
 
  273   if (Use[
"DNN_CPU"] || Use[
"DNN_GPU"]) {
 
  275      TString archOption =  Use[
"DNN_GPU"] ? 
"GPU" : 
"CPU";
 
  277      TString layoutString(
"Layout=TANH|50,TANH|50,TANH|50,LINEAR");
 
  280      TString trainingStrategyString(
"TrainingStrategy=");
 
  282      trainingStrategyString +=
"LearningRate=1e-3,Momentum=0.3,ConvergenceSteps=20,BatchSize=50,TestRepetitions=1,WeightDecay=0.0,Regularization=None,Optimizer=Adam";
 
  284      TString nnOptions(
"!H:V:ErrorStrategy=SUMOFSQUARES:VarTransform=G:WeightInitialization=XAVIERUNIFORM:Architecture=");
 
  285      nnOptions.Append(archOption);
 
  286      nnOptions.Append(
":");
 
  287      nnOptions.Append(layoutString);
 
  288      nnOptions.Append(
":");
 
  289      nnOptions.Append(trainingStrategyString);
 
  305                           "!H:!V:NTrees=100:MinNodeSize=1.0%:BoostType=AdaBoostR2:SeparationType=RegressionVariance:nCuts=20:PruneMethod=CostComplexity:PruneStrength=30" );
 
  309                           "!H:!V:NTrees=2000::BoostType=Grad:Shrinkage=0.1:UseBaggedBoost:BaggedSampleFraction=0.5:nCuts=20:MaxDepth=3:MaxDepth=4" );
 
  328   std::cout << 
"==> Wrote root file: " << outputFile->
GetName() << std::endl;
 
  329   std::cout << 
"==> TMVARegression is done!" << std::endl;
 
  338int main( 
int argc, 
char** argv )
 
  342   for (
int i=1; i<argc; i++) {
 
  344      if(regMethod==
"-b" || regMethod==
"--batch") 
continue;
 
  346      methodList += regMethod;
 
  348   TMVARegression(methodList);
 
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void input
 
R__EXTERN TSystem * gSystem
 
A specialized string object used for TTree selections.
 
A ROOT file is a suite of consecutive data records (TKey instances) with a well defined format.
 
static Bool_t SetCacheFileDir(ROOT::Internal::TStringView cacheDir, Bool_t operateDisconnected=kTRUE, Bool_t forceCacheread=kFALSE)
 
static TFile * Open(const char *name, Option_t *option="", const char *ftitle="", Int_t compress=ROOT::RCompressionSetting::EDefaults::kUseCompiledDefault, Int_t netopt=0)
Create / open a file.
 
void Close(Option_t *option="") override
Close a file.
 
void AddSpectator(const TString &expression, const TString &title="", const TString &unit="", Double_t min=0, Double_t max=0)
user inserts target in data set info
 
void AddRegressionTree(TTree *tree, Double_t weight=1.0, Types::ETreeType treetype=Types::kMaxTreeType)
 
void SetWeightExpression(const TString &variable, const TString &className="")
 
void PrepareTrainingAndTestTree(const TCut &cut, const TString &splitOpt)
prepare the training and test trees -> same cuts for signal and background
 
void AddTarget(const TString &expression, const TString &title="", const TString &unit="", Double_t min=0, Double_t max=0)
user inserts target in data set info
 
void AddVariable(const TString &expression, const TString &title, const TString &unit, char type='F', Double_t min=0, Double_t max=0)
user inserts discriminating variable in data set info
 
This is the main MVA steering class.
 
void TrainAllMethods()
Iterates through all booked methods and calls training.
 
MethodBase * BookMethod(DataLoader *loader, TString theMethodName, TString methodTitle, TString theOption="")
Book a classifier or regression method.
 
void TestAllMethods()
Evaluates all booked methods on the testing data and adds the output to the Results in the corresponi...
 
void EvaluateAllMethods(void)
Iterates over all MVAs that have been booked, and calls their evaluation methods.
 
const char * GetName() const override
Returns name of object.
 
virtual Bool_t AccessPathName(const char *path, EAccessMode mode=kFileExists)
Returns FALSE if one can access a file using the specified access mode.
 
A TTree represents a columnar dataset.
 
create variable transformations
 
void TMVARegGui(const char *fName="TMVAReg.root", TString dataset="")