50 void TMVARegression( TString myMethodList =
"" )
68 std::map<std::string,int> Use;
96 std::cout << std::endl;
97 std::cout <<
"==> Start TMVARegression" << std::endl;
100 if (myMethodList !=
"") {
101 for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) it->second = 0;
104 for (
UInt_t i=0; i<mlist.size(); i++) {
105 std::string regMethod(mlist[i]);
107 if (Use.find(regMethod) == Use.end()) {
108 std::cout <<
"Method \"" << regMethod <<
"\" not known in TMVA under this name. Choose among the following:" << std::endl;
109 for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) std::cout << it->first <<
" ";
110 std::cout << std::endl;
122 TString outfileName(
"TMVAReg.root" );
123 TFile* outputFile =
TFile::Open( outfileName,
"RECREATE" );
136 "!V:!Silent:Color:DrawProgressBar:AnalysisType=Regression" );
149 dataloader->
AddVariable(
"var1",
"Variable 1",
"units",
'F' );
150 dataloader->
AddVariable(
"var2",
"Variable 2",
"units",
'F' );
155 dataloader->
AddSpectator(
"spec1:=var1*2",
"Spectator 1",
"units",
'F' );
156 dataloader->
AddSpectator(
"spec2:=var1*3",
"Spectator 2",
"units",
'F' );
168 TString fname =
"./tmva_reg_example.root";
174 input =
TFile::Open(
"http://root.cern.ch/files/tmva_reg_example.root",
"CACHEREAD");
177 std::cout <<
"ERROR: could not open data file" << std::endl;
180 std::cout <<
"--- TMVARegression : Using input file: " << input->GetName() << std::endl;
184 TTree *regTree = (TTree*)input->Get(
"TreeR");
201 "nTrain_Regression=1000:nTest_Regression=0:SplitMode=Random:NormMode=NumEvents:!V" );
221 "!H:!V:NormTree=T:VolumeRangeMode=Adaptive:KernelEstimator=Gauss:GaussSigma=0.3:NEventsMin=40:NEventsMax=60:VarTransform=None" );
229 "!H:!V:MultiTargetRegression=F:TargetSelection=Mpv:TailCut=0.001:VolFrac=0.0666:nActiveCells=500:nSampl=2000:nBin=5:Compress=T:Kernel=None:Nmin=10:VarTransform=None" );
234 "nkNN=20:ScaleFrac=0.8:SigmaFact=1.0:Kernel=Gaus:UseKernel=F:UseWeight=T:!Trim" );
239 "!H:!V:VarTransform=None" );
244 "!H:!V:Formula=(0)+(1)*x0+(2)*x1:ParRanges=(-100,100);(-100,100);(-100,100):FitMethod=MC:SampleSize=100000:Sigma=0.1:VarTransform=D" );
248 "!H:!V:Formula=(0)+(1)*x0+(2)*x1:ParRanges=(-100,100);(-100,100);(-100,100):FitMethod=GA:PopSize=100:Cycles=3:Steps=30:Trim=True:SaveBestGen=1:VarTransform=Norm" );
252 "!H:!V:Formula=(0)+(1)*x0+(2)*x1:ParRanges=(-100,100);(-100,100);(-100,100);(-10,10):FitMethod=MINUIT:ErrorLevel=1:PrintLevel=-1:FitStrategy=2:UseImprove:UseMinos:SetBatch" );
256 "!H:!V:Formula=(0)+(1)*x0+(2)*x1:ParRanges=(-100,100);(-100,100);(-100,100):FitMethod=GA:Converger=MINUIT:ErrorLevel=1:PrintLevel=-1:FitStrategy=0:!UseImprove:!UseMinos:SetBatch:Cycles=1:PopSize=5:Steps=5:Trim" );
260 factory->
BookMethod( dataloader,
TMVA::Types::kMLP,
"MLP",
"!H:!V:VarTransform=Norm:NeuronType=tanh:NCycles=20000:HiddenLayers=N+20:TestRate=6:TrainingMethod=BFGS:Sampling=0.3:SamplingEpoch=0.8:ConvergenceImprove=1e-6:ConvergenceTests=15:!UseRegulator" );
262 if (Use[
"DNN_CPU"]) {
272 TString layoutString(
"Layout=TANH|100,LINEAR");
274 TString training0(
"LearningRate=1e-5,Momentum=0.5,Repetitions=1,ConvergenceSteps=500,BatchSize=50," 275 "TestRepetitions=7,WeightDecay=0.01,Regularization=NONE,DropConfig=0.5+0.5+0.5+0.5," 276 "DropRepetitions=2");
277 TString training1(
"LearningRate=1e-5,Momentum=0.9,Repetitions=1,ConvergenceSteps=170,BatchSize=30," 278 "TestRepetitions=7,WeightDecay=0.01,Regularization=L2,DropConfig=0.1+0.1+0.1,DropRepetitions=" 280 TString training2(
"LearningRate=1e-5,Momentum=0.3,Repetitions=1,ConvergenceSteps=150,BatchSize=40," 281 "TestRepetitions=7,WeightDecay=0.01,Regularization=NONE");
282 TString training3(
"LearningRate=1e-6,Momentum=0.1,Repetitions=1,ConvergenceSteps=500,BatchSize=100," 283 "TestRepetitions=7,WeightDecay=0.0001,Regularization=NONE");
285 TString trainingStrategyString(
"TrainingStrategy=");
286 trainingStrategyString += training0 +
"|" + training1 +
"|" + training2 +
"|" + training3;
292 "!H:V:ErrorStrategy=SUMOFSQUARES:VarTransform=G:WeightInitialization=XAVIERUNIFORM:Architecture=CPU");
294 nnOptions.Append(
":");
295 nnOptions.Append(layoutString);
296 nnOptions.Append(
":");
297 nnOptions.Append(trainingStrategyString);
311 "!H:!V:NTrees=100:MinNodeSize=1.0%:BoostType=AdaBoostR2:SeparationType=RegressionVariance:nCuts=20:PruneMethod=CostComplexity:PruneStrength=30" );
315 "!H:!V:NTrees=2000::BoostType=Grad:Shrinkage=0.1:UseBaggedBoost:BaggedSampleFraction=0.5:nCuts=20:MaxDepth=3:MaxDepth=4" );
334 std::cout <<
"==> Wrote root file: " << outputFile->GetName() << std::endl;
335 std::cout <<
"==> TMVARegression is done!" << std::endl;
344 int main(
int argc,
char** argv )
348 for (
int i=1; i<argc; i++) {
349 TString regMethod(argv[i]);
350 if(regMethod==
"-b" || regMethod==
"--batch")
continue;
351 if (!methodList.IsNull()) methodList += TString(
",");
352 methodList += regMethod;
354 TMVARegression(methodList);
virtual Bool_t AccessPathName(const char *path, EAccessMode mode=kFileExists)
Returns FALSE if one can access a file using the specified access mode.
MethodBase * BookMethod(DataLoader *loader, TString theMethodName, TString methodTitle, TString theOption="")
Book a classifier or regression method.
void TrainAllMethods()
Iterates through all booked methods and calls training.
void AddVariable(const TString &expression, const TString &title, const TString &unit, char type='F', Double_t min=0, Double_t max=0)
user inserts discriminating variable in data set info
static TFile * Open(const char *name, Option_t *option="", const char *ftitle="", Int_t compress=1, Int_t netopt=0)
Create / open a file.
R__EXTERN TSystem * gSystem
void EvaluateAllMethods(void)
Iterates over all MVAs that have been booked, and calls their evaluation methods. ...
void AddRegressionTree(TTree *tree, Double_t weight=1.0, Types::ETreeType treetype=Types::kMaxTreeType)
This is the main MVA steering class.
void PrepareTrainingAndTestTree(const TCut &cut, const TString &splitOpt)
prepare the training and test trees -> same cuts for signal and background
void AddTarget(const TString &expression, const TString &title="", const TString &unit="", Double_t min=0, Double_t max=0)
user inserts target in data set info
void SetWeightExpression(const TString &variable, const TString &className="")
Abstract ClassifierFactory template that handles arbitrary types.
static Bool_t SetCacheFileDir(const char *cacheDir, Bool_t operateDisconnected=kTRUE, Bool_t forceCacheread=kFALSE)
Sets the directory where to locally stage/cache remote files.
void TMVARegGui(const char *fName="TMVAReg.root", TString dataset="")
int main(int argc, char **argv)
void AddSpectator(const TString &expression, const TString &title="", const TString &unit="", Double_t min=0, Double_t max=0)
user inserts target in data set info