Logo ROOT  
Reference Guide
 
Loading...
Searching...
No Matches
mlpRegression.C
Go to the documentation of this file.
1/// \file
2/// \ingroup tutorial_mlp
3/// This macro shows the use of an ANN for regression analysis:
4/// given a set {i} of input vectors i and a set {o} of output vectors o,
5/// one looks for the unknown function f(i)=o.
6/// The ANN can approximate this function; TMLPAnalyzer::DrawTruthDeviation
7/// methods can be used to evaluate the quality of the approximation.
8///
9/// For simplicity, we use a known function to create test and training data.
10/// In reality this function is usually not known, and the data comes e.g.
11/// from measurements.
12///
13/// \macro_image
14/// \macro_output
15/// \macro_code
16///
17/// \author Axel Naumann, 2005-02-02
18
19Double_t theUnknownFunction(Double_t x, Double_t y) {
20 return sin((1.7+x)*(x-0.3)-2.3*(y+0.7));
21}
22
23void mlpRegression() {
24 // create a tree with train and test data.
25 // we have two input parameters x and y,
26 // and one output value f(x,y)
27 TNtuple* t=new TNtuple("tree","tree","x:y:f");
28 TRandom r;
29 for (Int_t i=0; i<1000; i++) {
30 Float_t x=r.Rndm();
31 Float_t y=r.Rndm();
32 // fill it with x, y, and f(x,y) - usually this function
33 // is not known, and the value of f given an x and a y comes
34 // e.g. from measurements
35 t->Fill(x,y,theUnknownFunction(x,y));
36 }
37
38 // create ANN
39 TMultiLayerPerceptron* mlp=new TMultiLayerPerceptron("x,y:10:8:f",t,
40 "Entry$%2","(Entry$%2)==0");
41 mlp->Train(150,"graph update=10");
42
43 // analyze it
44 TMLPAnalyzer* mlpa=new TMLPAnalyzer(mlp);
45 mlpa->GatherInformations();
46 mlpa->CheckNetwork();
47 mlpa->DrawDInputs();
48
49 // draw statistics shows the quality of the ANN's approximation
50 TCanvas* cIO=new TCanvas("TruthDeviation", "TruthDeviation");
51 cIO->Divide(2,2);
52 cIO->cd(1);
53 // draw the difference between the ANN's output for (x,y) and
54 // the true value f(x,y), vs. f(x,y), as TProfiles
55 mlpa->DrawTruthDeviations();
56
57 cIO->cd(2);
58 // draw the difference between the ANN's output for (x,y) and
59 // the true value f(x,y), vs. x, and vs. y, as TProfiles
61
62 cIO->cd(3);
63 // draw a box plot of the ANN's output for (x,y) vs f(x,y)
64 mlpa->GetIOTree()->Draw("Out.Out0-True.True0:True.True0>>hDelta","","goff");
65 TH2F* hDelta=(TH2F*)gDirectory->Get("hDelta");
66 hDelta->SetTitle("Difference between ANN output and truth vs. truth");
67 hDelta->Draw("BOX");
68
69 cIO->cd(4);
70 // draw difference of ANN's output for (x,y) vs f(x,y) assuming
71 // the ANN can extrapolate
72 Double_t vx[225];
73 Double_t vy[225];
74 Double_t delta[225];
75 Double_t v[2];
76 for (Int_t ix=0; ix<15; ix++) {
77 v[0]=ix/5.-1.;
78 for (Int_t iy=0; iy<15; iy++) {
79 v[1]=iy/5.-1.;
80 Int_t idx=ix*15+iy;
81 vx[idx]=v[0];
82 vy[idx]=v[1];
83 delta[idx]=mlp->Evaluate(0, v)-theUnknownFunction(v[0],v[1]);
84 }
85 }
86 TGraph2D* g2Extrapolate=new TGraph2D("ANN extrapolation",
87 "ANN extrapolation, ANN output - truth",
88 225, vx, vy, delta);
89
90 g2Extrapolate->Draw("TRI2");
91}
int Int_t
Definition RtypesCore.h:45
float Float_t
Definition RtypesCore.h:57
double Double_t
Definition RtypesCore.h:59
#define gDirectory
Definition TDirectory.h:384
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void char Point_t Rectangle_t WindowAttributes_t Float_t r
The Canvas class.
Definition TCanvas.h:23
TVirtualPad * cd(Int_t subpadnumber=0) override
Set current canvas & pad.
Definition TCanvas.cxx:716
Graphics object made of three arrays X, Y and Z with the same number of points each.
Definition TGraph2D.h:41
void Draw(Option_t *option="P0") override
Specific drawing options can be used to paint a TGraph2D:
Definition TGraph2D.cxx:738
void SetTitle(const char *title) override
Change/set the title.
Definition TH1.cxx:6686
void Draw(Option_t *option="") override
Draw this histogram with options.
Definition TH1.cxx:3066
2-D histogram with a float per channel (see TH1 documentation)
Definition TH2.h:307
This utility class contains a set of tests useful when developing a neural network.
void DrawDInputs()
Draws the distribution (on the test sample) of the impact on the network output of a small variation ...
THStack * DrawTruthDeviationInsOut(Int_t outnode=0, Option_t *option="")
Creates a profile of the difference of the MLP output outnode minus the true value of outnode vs the ...
void CheckNetwork()
Gives some information about the network in the terminal.
void GatherInformations()
Collect information about what is useful in the network.
THStack * DrawTruthDeviations(Option_t *option="")
Creates TProfiles of the difference of the MLP output minus the true value vs the true value,...
TTree * GetIOTree() const
This class describes a neural network.
Double_t Evaluate(Int_t index, Double_t *params) const
Returns the Neural Net for a given set of input parameters #parameters must equal #input neurons.
void Train(Int_t nEpoch, Option_t *option="text", Double_t minE=0)
Train the network.
A simple TTree restricted to a list of float variables only.
Definition TNtuple.h:28
Int_t Fill() override
Fill a Ntuple with current values in fArgs.
Definition TNtuple.cxx:169
void Divide(Int_t nx=1, Int_t ny=1, Float_t xmargin=0.01, Float_t ymargin=0.01, Int_t color=0) override
Automatic pad generation by division.
Definition TPad.cxx:1196
This is the base class for the ROOT Random number generators.
Definition TRandom.h:27
void Draw(Option_t *opt) override
Default Draw method for all objects.
Definition TTree.h:431
RVec< PromoteType< T > > sin(const RVec< T > &v)
Definition RVec.hxx:1814
Double_t y[n]
Definition legend1.C:17
Double_t x[n]
Definition legend1.C:17