50void TMVARegression( TString myMethodList =
"" )
68 std::map<std::string,int> Use;
100 std::cout << std::endl;
101 std::cout <<
"==> Start TMVARegression" << std::endl;
104 if (myMethodList !=
"") {
105 for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) it->second = 0;
108 for (
UInt_t i=0; i<mlist.size(); i++) {
109 std::string regMethod(mlist[i].Data());
111 if (Use.find(regMethod) == Use.end()) {
112 std::cout <<
"Method \"" << regMethod <<
"\" not known in TMVA under this name. Choose among the following:" << std::endl;
113 for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) std::cout << it->first <<
" ";
114 std::cout << std::endl;
126 TString outfileName(
"TMVAReg.root" );
127 TFile* outputFile =
TFile::Open( outfileName,
"RECREATE" );
140 "!V:!Silent:Color:DrawProgressBar:AnalysisType=Regression" );
153 dataloader->AddVariable(
"var1",
"Variable 1",
"units",
'F' );
154 dataloader->AddVariable(
"var2",
"Variable 2",
"units",
'F' );
159 dataloader->AddSpectator(
"spec1:=var1*2",
"Spectator 1",
"units",
'F' );
160 dataloader->AddSpectator(
"spec2:=var1*3",
"Spectator 2",
"units",
'F' );
172 TString fname =
"./tmva_reg_example.root";
178 input =
TFile::Open(
"http://root.cern.ch/files/tmva_reg_example.root",
"CACHEREAD");
181 std::cout <<
"ERROR: could not open data file" << std::endl;
184 std::cout <<
"--- TMVARegression : Using input file: " << input->GetName() << std::endl;
188 TTree *regTree = (TTree*)input->Get(
"TreeR");
194 dataloader->AddRegressionTree( regTree, regWeight );
198 dataloader->SetWeightExpression(
"var1",
"Regression" );
204 dataloader->PrepareTrainingAndTestTree( mycut,
205 "nTrain_Regression=1000:nTest_Regression=0:SplitMode=Random:NormMode=NumEvents:!V" );
225 "!H:!V:NormTree=T:VolumeRangeMode=Adaptive:KernelEstimator=Gauss:GaussSigma=0.3:NEventsMin=40:NEventsMax=60:VarTransform=None" );
233 "!H:!V:MultiTargetRegression=F:TargetSelection=Mpv:TailCut=0.001:VolFrac=0.0666:nActiveCells=500:nSampl=2000:nBin=5:Compress=T:Kernel=None:Nmin=10:VarTransform=None" );
238 "nkNN=20:ScaleFrac=0.8:SigmaFact=1.0:Kernel=Gaus:UseKernel=F:UseWeight=T:!Trim" );
243 "!H:!V:VarTransform=None" );
248 "!H:!V:Formula=(0)+(1)*x0+(2)*x1:ParRanges=(-100,100);(-100,100);(-100,100):FitMethod=MC:SampleSize=100000:Sigma=0.1:VarTransform=D" );
252 "!H:!V:Formula=(0)+(1)*x0+(2)*x1:ParRanges=(-100,100);(-100,100);(-100,100):FitMethod=GA:PopSize=100:Cycles=3:Steps=30:Trim=True:SaveBestGen=1:VarTransform=Norm" );
256 "!H:!V:Formula=(0)+(1)*x0+(2)*x1:ParRanges=(-100,100);(-100,100);(-100,100);(-10,10):FitMethod=MINUIT:ErrorLevel=1:PrintLevel=-1:FitStrategy=2:UseImprove:UseMinos:SetBatch" );
260 "!H:!V:Formula=(0)+(1)*x0+(2)*x1:ParRanges=(-100,100);(-100,100);(-100,100):FitMethod=GA:Converger=MINUIT:ErrorLevel=1:PrintLevel=-1:FitStrategy=0:!UseImprove:!UseMinos:SetBatch:Cycles=1:PopSize=5:Steps=5:Trim" );
264 factory->BookMethod(
dataloader,
TMVA::Types::kMLP,
"MLP",
"!H:!V:VarTransform=Norm:NeuronType=tanh:NCycles=20000:HiddenLayers=N+20:TestRate=6:TrainingMethod=BFGS:Sampling=0.3:SamplingEpoch=0.8:ConvergenceImprove=1e-6:ConvergenceTests=15:!UseRegulator" );
266 if (Use[
"DNN_CPU"]) {
276 TString layoutString(
"Layout=TANH|50,Layout=TANH|50,Layout=TANH|50,LINEAR");
278 TString training0(
"LearningRate=1e-2,Momentum=0.5,Repetitions=1,ConvergenceSteps=20,BatchSize=50,"
279 "TestRepetitions=10,WeightDecay=0.01,Regularization=NONE,DropConfig=0.2+0.2+0.2+0.,"
280 "DropRepetitions=2");
281 TString training1(
"LearningRate=1e-3,Momentum=0.9,Repetitions=1,ConvergenceSteps=20,BatchSize=50,"
282 "TestRepetitions=5,WeightDecay=0.01,Regularization=L2,DropConfig=0.1+0.1+0.1,DropRepetitions="
284 TString training2(
"LearningRate=1e-4,Momentum=0.3,Repetitions=1,ConvergenceSteps=10,BatchSize=50,"
285 "TestRepetitions=5,WeightDecay=0.01,Regularization=NONE");
287 TString trainingStrategyString(
"TrainingStrategy=");
288 trainingStrategyString += training0 +
"|" + training1 +
"|" + training2;
294 "!H:V:ErrorStrategy=SUMOFSQUARES:VarTransform=G:WeightInitialization=XAVIERUNIFORM:Architecture=CPU");
296 nnOptions.Append(
":");
297 nnOptions.Append(layoutString);
298 nnOptions.Append(
":");
299 nnOptions.Append(trainingStrategyString);
313 "!H:!V:NTrees=100:MinNodeSize=1.0%:BoostType=AdaBoostR2:SeparationType=RegressionVariance:nCuts=20:PruneMethod=CostComplexity:PruneStrength=30" );
317 "!H:!V:NTrees=2000::BoostType=Grad:Shrinkage=0.1:UseBaggedBoost:BaggedSampleFraction=0.5:nCuts=20:MaxDepth=3:MaxDepth=4" );
336 std::cout <<
"==> Wrote root file: " << outputFile->GetName() << std::endl;
337 std::cout <<
"==> TMVARegression is done!" << std::endl;
346int main(
int argc,
char** argv )
350 for (
int i=1; i<argc; i++) {
351 TString regMethod(argv[i]);
352 if(regMethod==
"-b" || regMethod==
"--batch")
continue;
353 if (!methodList.IsNull()) methodList += TString(
",");
354 methodList += regMethod;
356 TMVARegression(methodList);
R__EXTERN TSystem * gSystem
static Bool_t SetCacheFileDir(ROOT::Internal::TStringView cacheDir, Bool_t operateDisconnected=kTRUE, Bool_t forceCacheread=kFALSE)
static TFile * Open(const char *name, Option_t *option="", const char *ftitle="", Int_t compress=ROOT::RCompressionSetting::EDefaults::kUseGeneralPurpose, Int_t netopt=0)
Create / open a file.
This is the main MVA steering class.
virtual Bool_t AccessPathName(const char *path, EAccessMode mode=kFileExists)
Returns FALSE if one can access a file using the specified access mode.
int main(int argc, char **argv)
Abstract ClassifierFactory template that handles arbitrary types.
void TMVARegGui(const char *fName="TMVAReg.root", TString dataset="")