Logo ROOT   6.10/09
Reference Guide
mlpRegression.C
Go to the documentation of this file.
1 /// \file
2 /// \ingroup tutorial_mlp
3 /// This macro shows the use of an ANN for regression analysis:
4 /// given a set {i} of input vectors i and a set {o} of output vectors o,
5 /// one looks for the unknown function f(i)=o.
6 /// The ANN can approximate this function; TMLPAnalyzer::DrawTruthDeviation
7 /// methods can be used to evaluate the quality of the approximation.
8 ///
9 /// For simplicity, we use a known function to create test and training data.
10 /// In reality this function is usually not known, and the data comes e.g.
11 /// from measurements.
12 ///
13 /// \macro_image
14 /// \macro_output
15 /// \macro_code
16 ///
17 /// \author Axel Naumann, 2005-02-02
18 
19 Double_t theUnknownFunction(Double_t x, Double_t y) {
20  return sin((1.7+x)*(x-0.3)-2.3*(y+0.7));
21 }
22 
23 void mlpRegression() {
24  // create a tree with train and test data.
25  // we have two input parameters x and y,
26  // and one output value f(x,y)
27  TNtuple* t=new TNtuple("tree","tree","x:y:f");
28  TRandom r;
29  for (Int_t i=0; i<1000; i++) {
30  Float_t x=r.Rndm();
31  Float_t y=r.Rndm();
32  // fill it with x, y, and f(x,y) - usually this function
33  // is not known, and the value of f given an x and a y comes
34  // e.g. from measurements
35  t->Fill(x,y,theUnknownFunction(x,y));
36  }
37 
38  // create ANN
39  TMultiLayerPerceptron* mlp=new TMultiLayerPerceptron("x,y:10:8:f",t,
40  "Entry$%2","(Entry$%2)==0");
41  mlp->Train(150,"graph update=10");
42 
43  // analyze it
44  TMLPAnalyzer* mlpa=new TMLPAnalyzer(mlp);
45  mlpa->GatherInformations();
46  mlpa->CheckNetwork();
47  mlpa->DrawDInputs();
48 
49  // draw statistics shows the quality of the ANN's approximation
50  TCanvas* cIO=new TCanvas("TruthDeviation", "TruthDeviation");
51  cIO->Divide(2,2);
52  cIO->cd(1);
53  // draw the difference between the ANN's output for (x,y) and
54  // the true value f(x,y), vs. f(x,y), as TProfiles
55  mlpa->DrawTruthDeviations();
56 
57  cIO->cd(2);
58  // draw the difference between the ANN's output for (x,y) and
59  // the true value f(x,y), vs. x, and vs. y, as TProfiles
61 
62  cIO->cd(3);
63  // draw a box plot of the ANN's output for (x,y) vs f(x,y)
64  mlpa->GetIOTree()->Draw("Out.Out0-True.True0:True.True0>>hDelta","","goff");
65  TH2F* hDelta=(TH2F*)gDirectory->Get("hDelta");
66  hDelta->SetTitle("Difference between ANN output and truth vs. truth");
67  hDelta->Draw("BOX");
68 
69  cIO->cd(4);
70  // draw difference of ANN's output for (x,y) vs f(x,y) assuming
71  // the ANN can extrapolate
72  Double_t vx[225];
73  Double_t vy[225];
74  Double_t delta[225];
75  Double_t v[2];
76  for (Int_t ix=0; ix<15; ix++) {
77  v[0]=ix/5.-1.;
78  for (Int_t iy=0; iy<15; iy++) {
79  v[1]=iy/5.-1.;
80  Int_t idx=ix*15+iy;
81  vx[idx]=v[0];
82  vy[idx]=v[1];
83  delta[idx]=mlp->Evaluate(0, v)-theUnknownFunction(v[0],v[1]);
84  }
85  }
86  TGraph2D* g2Extrapolate=new TGraph2D("ANN extrapolation",
87  "ANN extrapolation, ANN output - truth",
88  225, vx, vy, delta);
89 
90  g2Extrapolate->Draw("TRI2");
91 }
virtual void Draw(Option_t *option="")
Specific drawing options can be used to paint a TGraph2D:
Definition: TGraph2D.cxx:704
void CheckNetwork()
Gives some information about the network in the terminal.
float Float_t
Definition: RtypesCore.h:53
TVirtualPad * cd(Int_t subpadnumber=0)
Set current canvas & pad.
Definition: TCanvas.cxx:679
Double_t Evaluate(Int_t index, Double_t *params) const
Returns the Neural Net for a given set of input parameters #parameters must equal #input neurons...
int Int_t
Definition: RtypesCore.h:41
void GatherInformations()
Collect information about what is usefull in the network.
void DrawDInputs()
Draws the distribution (on the test sample) of the impact on the network output of a small variation ...
Float_t delta
TTree * GetIOTree() const
Definition: TMLPAnalyzer.h:67
Double_t x[n]
Definition: legend1.C:17
This is the base class for the ROOT Random number generators.
Definition: TRandom.h:27
double sin(double)
virtual Double_t Rndm()
Machine independent random number generator.
Definition: TRandom.cxx:512
TRandom2 r(17)
A simple TTree restricted to a list of float variables only.
Definition: TNtuple.h:28
virtual void Draw(Option_t *option="")
Draw this histogram with options.
Definition: TH1.cxx:2851
void Train(Int_t nEpoch, Option_t *option="text", Double_t minE=0)
Train the network.
SVector< double, 2 > v
Definition: Dict.h:5
tomato 2-D histogram with a float per channel (see TH1 documentation)}
Definition: TH2.h:249
The Canvas class.
Definition: TCanvas.h:31
double Double_t
Definition: RtypesCore.h:55
virtual void Draw(Option_t *opt)
Default Draw method for all objects.
Definition: TTree.h:355
Double_t y[n]
Definition: legend1.C:17
virtual Int_t Fill()
Fill a Ntuple with current values in fArgs.
Definition: TNtuple.cxx:170
virtual void Divide(Int_t nx=1, Int_t ny=1, Float_t xmargin=0.01, Float_t ymargin=0.01, Int_t color=0)
Automatic pad generation by division.
Definition: TPad.cxx:1135
#define gDirectory
Definition: TDirectory.h:211
THStack * DrawTruthDeviations(Option_t *option="")
Creates TProfiles of the difference of the MLP output minus the true value vs the true value...
virtual void SetTitle(const char *title)
Change (i.e.
Definition: TH1.cxx:6028
Graphics object made of three arrays X, Y and Z with the same number of points each.
Definition: TGraph2D.h:40
THStack * DrawTruthDeviationInsOut(Int_t outnode=0, Option_t *option="")
Creates a profile of the difference of the MLP output outnode minus the true value of outnode vs the ...