Logo ROOT  
Reference Guide
TMLPAnalyzer.cxx
Go to the documentation of this file.
1// @(#)root/mlp:$Id$
2// Author: Christophe.Delaere@cern.ch 25/04/04
3
4/*************************************************************************
5 * Copyright (C) 1995-2003, Rene Brun and Fons Rademakers. *
6 * All rights reserved. *
7 * *
8 * For the licensing terms see $ROOTSYS/LICENSE. *
9 * For the list of contributors see $ROOTSYS/README/CREDITS. *
10 *************************************************************************/
11
12/** \class TMLPAnalyzer
13
14This utility class contains a set of tests usefull when developing
15a neural network.
16It allows you to check for unneeded variables, and to control
17the network structure.
18
19*/
20
21#include "TROOT.h"
22#include "TSynapse.h"
23#include "TNeuron.h"
25#include "TMLPAnalyzer.h"
26#include "TTree.h"
27#include "TTreeFormula.h"
28#include "TEventList.h"
29#include "TH1D.h"
30#include "TProfile.h"
31#include "THStack.h"
32#include "TLegend.h"
33#include "TVirtualPad.h"
34#include "TRegexp.h"
35#include "TMath.h"
36#include "Riostream.h"
37#include <stdlib.h>
38
40
41////////////////////////////////////////////////////////////////////////////////
42/// Destructor
43
45{
46 delete fAnalysisTree;
47 delete fIOTree;
48}
49
50////////////////////////////////////////////////////////////////////////////////
51/// Returns the number of layers.
52
54{
55 TString fStructure = fNetwork->GetStructure();
56 return fStructure.CountChar(':')+1;
57}
58
59////////////////////////////////////////////////////////////////////////////////
60/// Returns the number of neurons in given layer.
61
63{
64 if(layer==1) {
65 TString fStructure = fNetwork->GetStructure();
66 TString input = TString(fStructure(0, fStructure.First(':')));
67 return input.CountChar(',')+1;
68 }
69 else if(layer==GetLayers()) {
70 TString fStructure = fNetwork->GetStructure();
71 TString output = TString(fStructure(fStructure.Last(':') + 1,
72 fStructure.Length() - fStructure.Last(':')));
73 return output.CountChar(',')+1;
74 }
75 else {
76 Int_t cnt=1;
77 TString fStructure = fNetwork->GetStructure();
78 TString hidden = TString(fStructure(fStructure.First(':') + 1,
79 fStructure.Last(':') - fStructure.First(':') - 1));
80 Int_t beg = 0;
81 Int_t end = hidden.Index(":", beg + 1);
82 Int_t num = 0;
83 while (end != -1) {
84 num = atoi(TString(hidden(beg, end - beg)).Data());
85 cnt++;
86 beg = end + 1;
87 end = hidden.Index(":", beg + 1);
88 if(layer==cnt) return num;
89 }
90 num = atoi(TString(hidden(beg, hidden.Length() - beg)).Data());
91 cnt++;
92 if(layer==cnt) return num;
93 }
94 return -1;
95}
96
97////////////////////////////////////////////////////////////////////////////////
98/// Returns the formula used as input for neuron (idx) in
99/// the first layer.
100
102{
103 TString fStructure = fNetwork->GetStructure();
104 TString input = TString(fStructure(0, fStructure.First(':')));
105 Int_t beg = 0;
106 Int_t end = input.Index(",", beg + 1);
107 TString brName;
108 Int_t cnt = 0;
109 while (end != -1) {
110 brName = TString(input(beg, end - beg));
111 if (brName[0]=='@')
112 brName = brName(1,brName.Length()-1);
113 beg = end + 1;
114 end = input.Index(",", beg + 1);
115 if(cnt==idx) return brName;
116 cnt++;
117 }
118 brName = TString(input(beg, input.Length() - beg));
119 if (brName[0]=='@')
120 brName = brName(1,brName.Length()-1);
121 return brName;
122}
123
124////////////////////////////////////////////////////////////////////////////////
125/// Returns the name of any neuron from the input layer
126
128{
129 TNeuron* neuron=(TNeuron*)fNetwork->fFirstLayer[in];
130 return neuron ? neuron->GetName() : "NO SUCH NEURON";
131}
132
133////////////////////////////////////////////////////////////////////////////////
134/// Returns the name of any neuron from the output layer
135
137{
138 TNeuron* neuron=(TNeuron*)fNetwork->fLastLayer[out];
139 return neuron ? neuron->GetName() : "NO SUCH NEURON";
140}
141
142////////////////////////////////////////////////////////////////////////////////
143/// Gives some information about the network in the terminal.
144
146{
147 TString fStructure = fNetwork->GetStructure();
148 std::cout << "Network with structure: " << fStructure.Data() << std::endl;
149 std::cout << "inputs with low values in the differences plot may not be needed" << std::endl;
150 // Checks if some input variable is not needed
151 char var[64], sel[64];
152 for (Int_t i = 0; i < GetNeurons(1); i++) {
153 snprintf(var,64,"diff>>tmp%d",i);
154 snprintf(sel,64,"inNeuron==%d",i);
155 fAnalysisTree->Draw(var, sel, "goff");
156 TH1F* tmp = (TH1F*)gDirectory->Get(Form("tmp%d",i));
157 if (!tmp) continue;
158 std::cout << GetInputNeuronTitle(i)
159 << " -> " << tmp->GetMean()
160 << " +/- " << tmp->GetRMS() << std::endl;
161 }
162}
163
164////////////////////////////////////////////////////////////////////////////////
165/// Collect information about what is useful in the network.
166/// This method has to be called first when analyzing a network.
167/// Fills the two analysis trees.
168
170{
171 Double_t shift = 0.1;
172 TTree* data = fNetwork->fData;
174 Int_t nEvents = test->GetN();
175 Int_t nn = GetNeurons(1);
176 Double_t* params = new Double_t[nn];
177 Double_t* rms = new Double_t[nn];
178 TTreeFormula** formulas = new TTreeFormula*[nn];
179 Int_t* index = new Int_t[nn];
180 TString formula;
181 TRegexp re("{[0-9]+}$");
182 Ssiz_t len = formula.Length();
183 Ssiz_t pos = -1;
184 Int_t i(0), j(0), k(0), l(0);
185 for(i=0; i<nn; i++){
186 formula = GetNeuronFormula(i);
187 pos = re.Index(formula,&len);
188 if(pos==-1 || len<3) {
189 formulas[i] = new TTreeFormula(Form("NF%lu",(ULong_t)this),formula,data);
190 index[i] = 0;
191 }
192 else {
193 TString newformula(formula,pos);
194 TString val = formula(pos+1,len-2);
195 formulas[i] = new TTreeFormula(Form("NF%lu",(ULong_t)this),newformula,data);
196 formula = newformula;
197 index[i] = val.Atoi();
198 }
199 TH1D tmp("tmpb", "tmpb", 1, -FLT_MAX, FLT_MAX);
200 data->Draw(Form("%s>>tmpb",formula.Data()),"","goff");
201 rms[i] = tmp.GetRMS();
202 }
203 Int_t inNeuron = 0;
204 Double_t diff = 0.;
205 if(fAnalysisTree) delete fAnalysisTree;
206 fAnalysisTree = new TTree("result","analysis");
208 fAnalysisTree->Branch("inNeuron",&inNeuron,"inNeuron/I");
209 fAnalysisTree->Branch("diff",&diff,"diff/D");
210 Int_t numOutNodes=GetNeurons(GetLayers());
211 Double_t *outVal=new Double_t[numOutNodes];
212 Double_t *trueVal=new Double_t[numOutNodes];
213
214 delete fIOTree;
215 fIOTree=new TTree("MLP_iotree","MLP_iotree");
217 TString leaflist;
218 for (i=0; i<nn; i++)
219 leaflist+=Form("In%d/D:",i);
220 leaflist.Remove(leaflist.Length()-1);
221 fIOTree->Branch("In", params, leaflist);
222
223 leaflist="";
224 for (i=0; i<numOutNodes; i++)
225 leaflist+=Form("Out%d/D:",i);
226 leaflist.Remove(leaflist.Length()-1);
227 fIOTree->Branch("Out", outVal, leaflist);
228
229 leaflist="";
230 for (i=0; i<numOutNodes; i++)
231 leaflist+=Form("True%d/D:",i);
232 leaflist.Remove(leaflist.Length()-1);
233 fIOTree->Branch("True", trueVal, leaflist);
234 Double_t v1 = 0.;
235 Double_t v2 = 0.;
236 // Loop on the events in the test sample
237 for(j=0; j< nEvents; j++) {
238 fNetwork->GetEntry(test->GetEntry(j));
239 // Loop on the neurons to evaluate
240 for(k=0; k<GetNeurons(1); k++) {
241 params[k] = formulas[k]->EvalInstance(index[k]);
242 }
243 for(k=0; k<GetNeurons(GetLayers()); k++) {
244 outVal[k] = fNetwork->Evaluate(k,params);
245 trueVal[k] = ((TNeuron*)fNetwork->fLastLayer[k])->GetBranch();
246 }
247 fIOTree->Fill();
248
249 // Loop on the input neurons
250 for (i = 0; i < GetNeurons(1); i++) {
251 inNeuron = i;
252 diff = 0;
253 // Loop on the neurons in the output layer
254 for(l=0; l<GetNeurons(GetLayers()); l++){
255 params[i] += shift*rms[i];
256 v1 = fNetwork->Evaluate(l,params);
257 params[i] -= 2*shift*rms[i];
258 v2 = fNetwork->Evaluate(l,params);
259 diff += (v1-v2)*(v1-v2);
260 // reset to original value
261 params[i] += shift*rms[i];
262 }
263 diff = TMath::Sqrt(diff);
265 }
266 }
267 delete[] params;
268 delete[] rms;
269 delete[] outVal;
270 delete[] trueVal;
271 delete[] index;
272 for(i=0; i<GetNeurons(1); i++) delete formulas[i];
273 delete [] formulas;
276}
277
278////////////////////////////////////////////////////////////////////////////////
279/// Draws the distribution (on the test sample) of the
280/// impact on the network output of a small variation of
281/// the ith input.
282
284{
285 char sel[64];
286 snprintf(sel,64, "inNeuron==%d", i);
287 fAnalysisTree->Draw("diff", sel);
288}
289
290////////////////////////////////////////////////////////////////////////////////
291/// Draws the distribution (on the test sample) of the
292/// impact on the network output of a small variation of
293/// each input.
294/// DrawDInputs() draws something that approximates the distribution of the
295/// derivative of the NN w.r.t. each input. That quantity is recognized as
296/// one of the measures to determine key quantities in the network.
297///
298/// What is done is to vary one input around its nominal value and to see
299/// how the NN changes. This is done for each entry in the sample and produces
300/// a distribution.
301///
302/// What you can learn from that is:
303/// - is variable a really useful, or is my network insensitive to it ?
304/// - is there any risk of big systematic ? Is the network extremely sensitive
305/// to small variations of any of my inputs ?
306///
307/// As you might understand, this is to be considered with care and can serve
308/// as input for an "educated guess" when optimizing the network.
309
311{
312 THStack* stack = new THStack("differences","differences (impact of variables on ANN)");
313 TLegend* legend = new TLegend(0.75,0.75,0.95,0.95);
314 TH1F* tmp = 0;
315 char var[64], sel[64];
316 for(Int_t i = 0; i < GetNeurons(1); i++) {
317 snprintf(var,64, "diff>>tmp%d", i);
318 snprintf(sel,64, "inNeuron==%d", i);
319 fAnalysisTree->Draw(var, sel, "goff");
320 tmp = (TH1F*)gDirectory->Get(Form("tmp%d",i));
321 tmp->SetDirectory(0);
322 tmp->SetLineColor(i+1);
323 stack->Add(tmp);
324 legend->AddEntry(tmp,GetInputNeuronTitle(i),"l");
325 }
326 stack->Draw("nostack");
327 legend->Draw();
328 gPad->SetLogy();
329}
330
331////////////////////////////////////////////////////////////////////////////////
332/// Draws the distribution of the neural network (using ith neuron).
333/// Two distributions are drawn, for events passing respectively the "signal"
334/// and "background" cuts. Only the test sample is used.
335
336void TMLPAnalyzer::DrawNetwork(Int_t neuron, const char* signal, const char* bg)
337{
338 TTree* data = fNetwork->fData;
340 TEventList* current = data->GetEventList();
341 data->SetEventList(test);
342 THStack* stack = new THStack("__NNout_TMLPA",Form("Neural net output (neuron %d)",neuron));
343 TH1F *bgh = new TH1F("__bgh_TMLPA", "NN output", 50, -0.5, 1.5);
344 TH1F *sigh = new TH1F("__sigh_TMLPA", "NN output", 50, -0.5, 1.5);
345 bgh->SetDirectory(0);
346 sigh->SetDirectory(0);
347 Int_t nEvents = 0;
348 Int_t j=0;
349 // build event lists for signal and background
350 TEventList* signal_list = new TEventList("__tmpSig_MLPA");
351 TEventList* bg_list = new TEventList("__tmpBkg_MLPA");
352 data->Draw(">>__tmpSig_MLPA",signal,"goff");
353 data->Draw(">>__tmpBkg_MLPA",bg,"goff");
354
355 // fill the background
356 nEvents = bg_list->GetN();
357 for(j=0; j< nEvents; j++) {
358 bgh->Fill(fNetwork->Result(bg_list->GetEntry(j),neuron));
359 }
360 // fill the signal
361 nEvents = signal_list->GetN();
362 for(j=0; j< nEvents; j++) {
363 sigh->Fill(fNetwork->Result(signal_list->GetEntry(j),neuron));
364 }
365 // draws the result
366 bgh->SetLineColor(kBlue);
367 bgh->SetFillStyle(3008);
368 bgh->SetFillColor(kBlue);
369 sigh->SetLineColor(kRed);
370 sigh->SetFillStyle(3003);
371 sigh->SetFillColor(kRed);
372 bgh->SetStats(0);
373 sigh->SetStats(0);
374 stack->Add(bgh);
375 stack->Add(sigh);
376 TLegend *legend = new TLegend(.75, .80, .95, .95);
377 legend->AddEntry(bgh, "Background");
378 legend->AddEntry(sigh,"Signal");
379 stack->Draw("nostack");
380 legend->Draw();
381 // restore the default event list
382 data->SetEventList(current);
383 delete signal_list;
384 delete bg_list;
385}
386
387////////////////////////////////////////////////////////////////////////////////
388/// Create a profile of the difference of the MLP output minus the
389/// true value for a given output node outnode, vs the true value for
390/// outnode, for all test data events. This method is mainly useful
391/// when doing regression analysis with the MLP (i.e. not classification,
392/// but continuous truth values).
393/// The resulting TProfile histogram is returned.
394/// It is not drawn if option "goff" is specified.
395/// Options are passed to TProfile::Draw
396
398 Option_t *option /*=""*/)
399{
401 TString pipehist=Form("MLP_truthdev_%d",outnode);
402 TString drawline;
403 drawline.Form("Out.Out%d-True.True%d:True.True%d>>",
404 outnode, outnode, outnode);
405 fIOTree->Draw(drawline+pipehist+"(20)", "", "goff prof");
406 TProfile* h=(TProfile*)gDirectory->Get(pipehist);
407 h->SetDirectory(0);
408 const char* title=GetOutputNeuronTitle(outnode);
409 if (title) {
410 h->SetTitle(Form("#Delta(output - truth) vs. truth for %s",
411 title));
412 h->GetXaxis()->SetTitle(title);
413 h->GetYaxis()->SetTitle(Form("#Delta(output - truth) for %s", title));
414 }
415 if (!strstr(option,"goff"))
416 h->Draw();
417 return h;
418}
419
420////////////////////////////////////////////////////////////////////////////////
421/// Creates TProfiles of the difference of the MLP output minus the
422/// true value vs the true value, one for each output, filled with the
423/// test data events. This method is mainly useful when doing regression
424/// analysis with the MLP (i.e. not classification, but continuous truth
425/// values).
426/// The returned THStack contains all the TProfiles. It is drawn unless
427/// the option "goff" is specified.
428/// Options are passed to TProfile::Draw.
429
431{
432 THStack *hs=new THStack("MLP_TruthDeviation",
433 "Deviation of MLP output from truth");
434
435 // leg!=0 means we're drawing
436 TLegend *leg=0;
437 if (!option || !strstr(option,"goff"))
438 leg=new TLegend(.4,.85,.95,.95,"#Delta(output - truth) vs. truth for:");
439
440 const char* xAxisTitle=0;
441
442 // create profile for each input neuron,
443 // adding them into the THStack and the TLegend
444 for (Int_t outnode=0; outnode<GetNeurons(GetLayers()); outnode++) {
445 TProfile* h=DrawTruthDeviation(outnode, "goff");
446 h->SetLineColor(1+outnode);
447 hs->Add(h, option);
448 if (leg) leg->AddEntry(h,GetOutputNeuronTitle(outnode));
449 if (!outnode)
450 // Xaxis title is the same for all, extract it from the first one.
451 xAxisTitle=h->GetXaxis()->GetTitle();
452 }
453
454 if (leg) {
455 hs->Draw("nostack");
456 leg->Draw();
457 // gotta draw before accessing the axes
458 hs->GetXaxis()->SetTitle(xAxisTitle);
459 hs->GetYaxis()->SetTitle("#Delta(output - truth)");
460 }
461
462 return hs;
463}
464
465////////////////////////////////////////////////////////////////////////////////
466/// Creates a profile of the difference of the MLP output outnode minus
467/// the true value of outnode vs the input value innode, for all test
468/// data events.
469/// The resulting TProfile histogram is returned.
470/// It is not drawn if option "goff" is specified.
471/// Options are passed to TProfile::Draw
472
474 Int_t outnode /*=0*/,
475 Option_t *option /*=""*/)
476{
478 TString pipehist=Form("MLP_truthdev_i%d_o%d", innode, outnode);
479 TString drawline;
480 drawline.Form("Out.Out%d-True.True%d:In.In%d>>",
481 outnode, outnode, innode);
482 fIOTree->Draw(drawline+pipehist+"(50)", "", "goff prof");
483 TProfile* h=(TProfile*)gROOT->FindObject(pipehist);
484 h->SetDirectory(0);
485 const char* titleInNeuron=GetInputNeuronTitle(innode);
486 const char* titleOutNeuron=GetOutputNeuronTitle(outnode);
487 h->SetTitle(Form("#Delta(output - truth) of %s vs. input %s",
488 titleOutNeuron, titleInNeuron));
489 h->GetXaxis()->SetTitle(Form("%s", titleInNeuron));
490 h->GetYaxis()->SetTitle(Form("#Delta(output - truth) for %s",
491 titleOutNeuron));
492 if (!strstr(option,"goff"))
493 h->Draw(option);
494 return h;
495}
496
497////////////////////////////////////////////////////////////////////////////////
498/// Creates a profile of the difference of the MLP output outnode minus the
499/// true value of outnode vs the input value, stacked for all inputs, for
500/// all test data events.
501/// The returned THStack contains all the TProfiles. It is drawn unless
502/// the option "goff" is specified.
503/// Options are passed to TProfile::Draw.
504
506 Option_t *option /*=""*/)
507{
508 TString sName;
509 sName.Form("MLP_TruthDeviationIO_%d", outnode);
510 const char* outputNodeTitle=GetOutputNeuronTitle(outnode);
511 THStack *hs=new THStack(sName,
512 Form("Deviation of MLP output %s from truth",
513 outputNodeTitle));
514
515 // leg!=0 means we're drawing.
516 TLegend *leg=0;
517 if (!option || !strstr(option,"goff"))
518 leg=new TLegend(.4,.75,.95,.95,
519 Form("#Delta(output - truth) of %s vs. input for:",
520 outputNodeTitle));
521
522 // create profile for each input neuron,
523 // adding them into the THStack and the TLegend
524 Int_t numInNodes=GetNeurons(1);
525 Int_t innode=0;
526 for (innode=0; innode<numInNodes; innode++) {
527 TProfile* h=DrawTruthDeviationInOut(innode, outnode, "goff");
528 h->SetLineColor(1+innode);
529 hs->Add(h, option);
530 if (leg) leg->AddEntry(h,h->GetXaxis()->GetTitle());
531 }
532
533 if (leg) {
534 hs->Draw("nostack");
535 leg->Draw();
536 // gotta draw before accessing the axes
537 hs->GetXaxis()->SetTitle("Input value");
538 hs->GetYaxis()->SetTitle(Form("#Delta(output - truth) for %s",
539 outputNodeTitle));
540 }
541
542 return hs;
543}
#define h(i)
Definition: RSha256.hxx:106
unsigned long ULong_t
Definition: RtypesCore.h:53
double Double_t
Definition: RtypesCore.h:57
const char Option_t
Definition: RtypesCore.h:64
#define ClassImp(name)
Definition: Rtypes.h:361
@ kRed
Definition: Rtypes.h:64
@ kBlue
Definition: Rtypes.h:64
#define gDirectory
Definition: TDirectory.h:229
#define gROOT
Definition: TROOT.h:406
char * Form(const char *fmt,...)
#define gPad
Definition: TVirtualPad.h:287
#define snprintf
Definition: civetweb.c:1540
virtual void SetFillColor(Color_t fcolor)
Set the fill area color.
Definition: TAttFill.h:37
virtual void SetFillStyle(Style_t fstyle)
Set the fill area style.
Definition: TAttFill.h:39
virtual void SetLineColor(Color_t lcolor)
Set the line color.
Definition: TAttLine.h:40
A TEventList object is a list of selected events (entries) in a TTree.
Definition: TEventList.h:31
virtual Long64_t GetEntry(Int_t index) const
Return value of entry at index in the list.
Definition: TEventList.cxx:223
virtual Int_t GetN() const
Definition: TEventList.h:56
1-D histogram with a double per channel (see TH1 documentation)}
Definition: TH1.h:614
1-D histogram with a float per channel (see TH1 documentation)}
Definition: TH1.h:571
virtual void SetDirectory(TDirectory *dir)
By default when an histogram is created, it is added to the list of histogram objects in the current ...
Definition: TH1.cxx:8393
virtual Double_t GetMean(Int_t axis=1) const
For axis = 1,2 or 3 returns the mean value of the histogram along X,Y or Z axis.
Definition: TH1.cxx:7086
virtual Int_t Fill(Double_t x)
Increment bin with abscissa X by 1.
Definition: TH1.cxx:3275
Double_t GetRMS(Int_t axis=1) const
Definition: TH1.h:311
virtual void SetStats(Bool_t stats=kTRUE)
Set statistics option on/off.
Definition: TH1.cxx:8446
The Histogram stack class.
Definition: THStack.h:31
virtual void Draw(Option_t *chopt="")
Draw this multihist with its current attributes.
Definition: THStack.cxx:445
TAxis * GetYaxis() const
Get x axis of the histogram used to draw the stack.
Definition: THStack.cxx:616
virtual void Add(TH1 *h, Option_t *option="")
add a new histogram to the list Only 1-d and 2-d histograms currently supported.
Definition: THStack.cxx:359
TAxis * GetXaxis() const
Get x axis of the histogram used to draw the stack.
Definition: THStack.cxx:601
This class displays a legend box (TPaveText) containing several legend entries.
Definition: TLegend.h:23
TLegendEntry * AddEntry(const TObject *obj, const char *label="", Option_t *option="lpf")
Add a new entry to this legend.
Definition: TLegend.cxx:330
virtual void Draw(Option_t *option="")
Draw this legend with its current attributes.
Definition: TLegend.cxx:423
This utility class contains a set of tests usefull when developing a neural network.
Definition: TMLPAnalyzer.h:25
TTree * fAnalysisTree
Definition: TMLPAnalyzer.h:29
Int_t GetNeurons(Int_t layer)
Returns the number of neurons in given layer.
Int_t GetLayers()
Returns the number of layers.
TProfile * DrawTruthDeviation(Int_t outnode=0, Option_t *option="")
Create a profile of the difference of the MLP output minus the true value for a given output node out...
void DrawDInput(Int_t i)
Draws the distribution (on the test sample) of the impact on the network output of a small variation ...
const char * GetOutputNeuronTitle(Int_t out)
Returns the name of any neuron from the output layer.
void DrawDInputs()
Draws the distribution (on the test sample) of the impact on the network output of a small variation ...
TTree * fIOTree
Definition: TMLPAnalyzer.h:30
THStack * DrawTruthDeviationInsOut(Int_t outnode=0, Option_t *option="")
Creates a profile of the difference of the MLP output outnode minus the true value of outnode vs the ...
void CheckNetwork()
Gives some information about the network in the terminal.
void GatherInformations()
Collect information about what is useful in the network.
THStack * DrawTruthDeviations(Option_t *option="")
Creates TProfiles of the difference of the MLP output minus the true value vs the true value,...
TProfile * DrawTruthDeviationInOut(Int_t innode, Int_t outnode=0, Option_t *option="")
Creates a profile of the difference of the MLP output outnode minus the true value of outnode vs the ...
const char * GetInputNeuronTitle(Int_t in)
Returns the name of any neuron from the input layer.
TMultiLayerPerceptron * fNetwork
Definition: TMLPAnalyzer.h:28
virtual ~TMLPAnalyzer()
Destructor.
TString GetNeuronFormula(Int_t idx)
Returns the formula used as input for neuron (idx) in the first layer.
void DrawNetwork(Int_t neuron, const char *signal, const char *bg)
Draws the distribution of the neural network (using ith neuron).
Double_t Evaluate(Int_t index, Double_t *params) const
Returns the Neural Net for a given set of input parameters #parameters must equal #input neurons.
TEventList * fTest
! EventList defining the events in the test dataset
TTree * fData
! pointer to the tree used as datasource
Double_t Result(Int_t event, Int_t index=0) const
Computes the output for a given event.
TObjArray fLastLayer
Collection of the output neurons; subset of fNetwork.
TObjArray fFirstLayer
Collection of the input neurons; subset of fNetwork.
void GetEntry(Int_t) const
Load an entry into the network.
virtual void SetTitle(const char *title="")
Set the title of the TNamed.
Definition: TNamed.cxx:164
virtual const char * GetName() const
Returns name of object.
Definition: TNamed.h:47
This class describes an elementary neuron, which is the basic element for a Neural Network.
Definition: TNeuron.h:25
Profile Histogram.
Definition: TProfile.h:32
Regular expression class.
Definition: TRegexp.h:31
Ssiz_t Index(const TString &str, Ssiz_t *len, Ssiz_t start=0) const
Find the first occurrence of the regexp in string and return the position, or -1 if there is no match...
Definition: TRegexp.cxx:209
Basic string class.
Definition: TString.h:131
Ssiz_t Length() const
Definition: TString.h:405
Int_t Atoi() const
Return integer value of string.
Definition: TString.cxx:1921
Ssiz_t First(char c) const
Find first occurrence of a character c.
Definition: TString.cxx:499
const char * Data() const
Definition: TString.h:364
Ssiz_t Last(char c) const
Find last occurrence of a character c.
Definition: TString.cxx:892
Int_t CountChar(Int_t c) const
Return number of times character c occurs in the string.
Definition: TString.cxx:476
TString & Remove(Ssiz_t pos)
Definition: TString.h:668
void Form(const char *fmt,...)
Formats a string using a printf style format descriptor.
Definition: TString.cxx:2289
Ssiz_t Index(const char *pat, Ssiz_t i=0, ECaseCompare cmp=kExact) const
Definition: TString.h:634
Used to pass a selection expression to the Tree drawing routine.
Definition: TTreeFormula.h:58
T EvalInstance(Int_t i=0, const char *stringStack[]=0)
Evaluate this treeformula.
A TTree represents a columnar dataset.
Definition: TTree.h:78
virtual Int_t Fill()
Fill all branches.
Definition: TTree.cxx:4524
virtual void SetDirectory(TDirectory *dir)
Change the tree's directory.
Definition: TTree.cxx:8813
virtual void SetEventList(TEventList *list)
This function transfroms the given TEventList into a TEntryList The new TEntryList is owned by the TT...
Definition: TTree.cxx:8916
TBranch * Branch(const char *name, T *obj, Int_t bufsize=32000, Int_t splitlevel=99)
Add a new branch, and infer the data type from the type of obj being passed.
Definition: TTree.h:348
TEventList * GetEventList() const
Definition: TTree.h:467
virtual void Draw(Option_t *opt)
Default Draw method for all objects.
Definition: TTree.h:426
virtual void ResetBranchAddresses()
Tell all of our branches to drop their current objects and allocate new ones.
Definition: TTree.cxx:7969
leg
Definition: legend1.C:34
Double_t Sqrt(Double_t x)
Definition: TMath.h:681
Definition: test.py:1
const char * cnt
Definition: TXMLSetup.cxx:74
auto * l
Definition: textangle.C:4
static void output(int code)
Definition: gifencode.c:226