50 void TMVARegression( TString myMethodList =
"" )
68 std::map<std::string,int> Use;
100 std::cout << std::endl;
101 std::cout <<
"==> Start TMVARegression" << std::endl;
104 if (myMethodList !=
"") {
105 for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) it->second = 0;
108 for (
UInt_t i=0; i<mlist.size(); i++) {
109 std::string regMethod(mlist[i]);
111 if (Use.find(regMethod) == Use.end()) {
112 std::cout <<
"Method \"" << regMethod <<
"\" not known in TMVA under this name. Choose among the following:" << std::endl;
113 for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) std::cout << it->first <<
" ";
114 std::cout << std::endl;
126 TString outfileName(
"TMVAReg.root" );
127 TFile* outputFile =
TFile::Open( outfileName,
"RECREATE" );
140 "!V:!Silent:Color:DrawProgressBar:AnalysisType=Regression" );
153 dataloader->
AddVariable(
"var1",
"Variable 1",
"units",
'F' );
154 dataloader->
AddVariable(
"var2",
"Variable 2",
"units",
'F' );
159 dataloader->
AddSpectator(
"spec1:=var1*2",
"Spectator 1",
"units",
'F' );
160 dataloader->
AddSpectator(
"spec2:=var1*3",
"Spectator 2",
"units",
'F' );
172 TString fname =
"./tmva_reg_example.root";
178 input =
TFile::Open(
"http://root.cern.ch/files/tmva_reg_example.root",
"CACHEREAD");
181 std::cout <<
"ERROR: could not open data file" << std::endl;
184 std::cout <<
"--- TMVARegression : Using input file: " << input->GetName() << std::endl;
188 TTree *regTree = (TTree*)input->Get(
"TreeR");
205 "nTrain_Regression=1000:nTest_Regression=0:SplitMode=Random:NormMode=NumEvents:!V" );
225 "!H:!V:NormTree=T:VolumeRangeMode=Adaptive:KernelEstimator=Gauss:GaussSigma=0.3:NEventsMin=40:NEventsMax=60:VarTransform=None" );
233 "!H:!V:MultiTargetRegression=F:TargetSelection=Mpv:TailCut=0.001:VolFrac=0.0666:nActiveCells=500:nSampl=2000:nBin=5:Compress=T:Kernel=None:Nmin=10:VarTransform=None" );
238 "nkNN=20:ScaleFrac=0.8:SigmaFact=1.0:Kernel=Gaus:UseKernel=F:UseWeight=T:!Trim" );
243 "!H:!V:VarTransform=None" );
248 "!H:!V:Formula=(0)+(1)*x0+(2)*x1:ParRanges=(-100,100);(-100,100);(-100,100):FitMethod=MC:SampleSize=100000:Sigma=0.1:VarTransform=D" );
252 "!H:!V:Formula=(0)+(1)*x0+(2)*x1:ParRanges=(-100,100);(-100,100);(-100,100):FitMethod=GA:PopSize=100:Cycles=3:Steps=30:Trim=True:SaveBestGen=1:VarTransform=Norm" );
256 "!H:!V:Formula=(0)+(1)*x0+(2)*x1:ParRanges=(-100,100);(-100,100);(-100,100);(-10,10):FitMethod=MINUIT:ErrorLevel=1:PrintLevel=-1:FitStrategy=2:UseImprove:UseMinos:SetBatch" );
260 "!H:!V:Formula=(0)+(1)*x0+(2)*x1:ParRanges=(-100,100);(-100,100);(-100,100):FitMethod=GA:Converger=MINUIT:ErrorLevel=1:PrintLevel=-1:FitStrategy=0:!UseImprove:!UseMinos:SetBatch:Cycles=1:PopSize=5:Steps=5:Trim" );
264 factory->
BookMethod( dataloader,
TMVA::Types::kMLP,
"MLP",
"!H:!V:VarTransform=Norm:NeuronType=tanh:NCycles=20000:HiddenLayers=N+20:TestRate=6:TrainingMethod=BFGS:Sampling=0.3:SamplingEpoch=0.8:ConvergenceImprove=1e-6:ConvergenceTests=15:!UseRegulator" );
266 if (Use[
"DNN_CPU"]) {
276 TString layoutString(
"Layout=TANH|50,Layout=TANH|50,Layout=TANH|50,LINEAR");
278 TString training0(
"LearningRate=1e-2,Momentum=0.5,Repetitions=1,ConvergenceSteps=20,BatchSize=50," 279 "TestRepetitions=10,WeightDecay=0.01,Regularization=NONE,DropConfig=0.2+0.2+0.2+0.," 280 "DropRepetitions=2");
281 TString training1(
"LearningRate=1e-3,Momentum=0.9,Repetitions=1,ConvergenceSteps=20,BatchSize=50," 282 "TestRepetitions=5,WeightDecay=0.01,Regularization=L2,DropConfig=0.1+0.1+0.1,DropRepetitions=" 284 TString training2(
"LearningRate=1e-4,Momentum=0.3,Repetitions=1,ConvergenceSteps=10,BatchSize=50," 285 "TestRepetitions=5,WeightDecay=0.01,Regularization=NONE");
287 TString trainingStrategyString(
"TrainingStrategy=");
288 trainingStrategyString += training0 +
"|" + training1 +
"|" + training2;
294 "!H:V:ErrorStrategy=SUMOFSQUARES:VarTransform=G:WeightInitialization=XAVIERUNIFORM:Architecture=CPU");
296 nnOptions.Append(
":");
297 nnOptions.Append(layoutString);
298 nnOptions.Append(
":");
299 nnOptions.Append(trainingStrategyString);
313 "!H:!V:NTrees=100:MinNodeSize=1.0%:BoostType=AdaBoostR2:SeparationType=RegressionVariance:nCuts=20:PruneMethod=CostComplexity:PruneStrength=30" );
317 "!H:!V:NTrees=2000::BoostType=Grad:Shrinkage=0.1:UseBaggedBoost:BaggedSampleFraction=0.5:nCuts=20:MaxDepth=3:MaxDepth=4" );
336 std::cout <<
"==> Wrote root file: " << outputFile->GetName() << std::endl;
337 std::cout <<
"==> TMVARegression is done!" << std::endl;
346 int main(
int argc,
char** argv )
350 for (
int i=1; i<argc; i++) {
351 TString regMethod(argv[i]);
352 if(regMethod==
"-b" || regMethod==
"--batch")
continue;
353 if (!methodList.IsNull()) methodList += TString(
",");
354 methodList += regMethod;
356 TMVARegression(methodList);
virtual Bool_t AccessPathName(const char *path, EAccessMode mode=kFileExists)
Returns FALSE if one can access a file using the specified access mode.
MethodBase * BookMethod(DataLoader *loader, TString theMethodName, TString methodTitle, TString theOption="")
Book a classifier or regression method.
static Bool_t SetCacheFileDir(ROOT::Internal::TStringView cacheDir, Bool_t operateDisconnected=kTRUE, Bool_t forceCacheread=kFALSE)
void TrainAllMethods()
Iterates through all booked methods and calls training.
void AddVariable(const TString &expression, const TString &title, const TString &unit, char type='F', Double_t min=0, Double_t max=0)
user inserts discriminating variable in data set info
static TFile * Open(const char *name, Option_t *option="", const char *ftitle="", Int_t compress=1, Int_t netopt=0)
Create / open a file.
int main(int argc, char **argv)
R__EXTERN TSystem * gSystem
void EvaluateAllMethods(void)
Iterates over all MVAs that have been booked, and calls their evaluation methods. ...
void TestAllMethods()
Evaluates all booked methods on the testing data and adds the output to the Results in the corresponi...
void AddRegressionTree(TTree *tree, Double_t weight=1.0, Types::ETreeType treetype=Types::kMaxTreeType)
This is the main MVA steering class.
void PrepareTrainingAndTestTree(const TCut &cut, const TString &splitOpt)
prepare the training and test trees -> same cuts for signal and background
void AddTarget(const TString &expression, const TString &title="", const TString &unit="", Double_t min=0, Double_t max=0)
user inserts target in data set info
void SetWeightExpression(const TString &variable, const TString &className="")
Abstract ClassifierFactory template that handles arbitrary types.
void TMVARegGui(const char *fName="TMVAReg.root", TString dataset="")
void AddSpectator(const TString &expression, const TString &title="", const TString &unit="", Double_t min=0, Double_t max=0)
user inserts target in data set info