Logo ROOT  
Reference Guide
 
Loading...
Searching...
No Matches
TMLPAnalyzer.cxx
Go to the documentation of this file.
1// @(#)root/mlp:$Id$
2// Author: Christophe.Delaere@cern.ch 25/04/04
3
4/*************************************************************************
5 * Copyright (C) 1995-2003, Rene Brun and Fons Rademakers. *
6 * All rights reserved. *
7 * *
8 * For the licensing terms see $ROOTSYS/LICENSE. *
9 * For the list of contributors see $ROOTSYS/README/CREDITS. *
10 *************************************************************************/
11
12/** \class TMLPAnalyzer
13
14This utility class contains a set of tests useful when developing
15a neural network.
16It allows you to check for unneeded variables, and to control
17the network structure.
18
19*/
20
21#include "TROOT.h"
22#include "TSynapse.h"
23#include "TNeuron.h"
25#include "TMLPAnalyzer.h"
26#include "TTree.h"
27#include "TTreeFormula.h"
28#include "TEventList.h"
29#include "TH1D.h"
30#include "TProfile.h"
31#include "THStack.h"
32#include "TLegend.h"
33#include "TVirtualPad.h"
34#include "TRegexp.h"
35#include "TMath.h"
36#include "snprintf.h"
37#include <iostream>
38#include <cstdlib>
39
40
41////////////////////////////////////////////////////////////////////////////////
42/// Destructor
43
45{
46 delete fAnalysisTree;
47 delete fIOTree;
48}
49
50////////////////////////////////////////////////////////////////////////////////
51/// Returns the number of layers.
52
54{
55 TString fStructure = fNetwork->GetStructure();
56 return fStructure.CountChar(':')+1;
57}
58
59////////////////////////////////////////////////////////////////////////////////
60/// Returns the number of neurons in given layer.
61
63{
64 if(layer==1) {
65 TString fStructure = fNetwork->GetStructure();
66 TString input = TString(fStructure(0, fStructure.First(':')));
67 return input.CountChar(',')+1;
68 }
69 else if(layer==GetLayers()) {
70 TString fStructure = fNetwork->GetStructure();
71 TString output = TString(fStructure(fStructure.Last(':') + 1,
72 fStructure.Length() - fStructure.Last(':')));
73 return output.CountChar(',')+1;
74 }
75 else {
76 Int_t cnt=1;
77 TString fStructure = fNetwork->GetStructure();
78 TString hidden = TString(fStructure(fStructure.First(':') + 1,
79 fStructure.Last(':') - fStructure.First(':') - 1));
80 Int_t beg = 0;
81 Int_t end = hidden.Index(":", beg + 1);
82 Int_t num = 0;
83 while (end != -1) {
84 num = atoi(TString(hidden(beg, end - beg)).Data());
85 cnt++;
86 beg = end + 1;
87 end = hidden.Index(":", beg + 1);
88 if(layer==cnt) return num;
89 }
90 num = atoi(TString(hidden(beg, hidden.Length() - beg)).Data());
91 cnt++;
92 if(layer==cnt) return num;
93 }
94 return -1;
95}
96
97////////////////////////////////////////////////////////////////////////////////
98/// Returns the formula used as input for neuron (idx) in
99/// the first layer.
100
102{
103 TString fStructure = fNetwork->GetStructure();
104 TString input = TString(fStructure(0, fStructure.First(':')));
105 Int_t beg = 0;
106 Int_t end = input.Index(",", beg + 1);
108 Int_t cnt = 0;
109 while (end != -1) {
110 brName = TString(input(beg, end - beg));
111 if (brName[0]=='@')
112 brName = brName(1,brName.Length()-1);
113 beg = end + 1;
114 end = input.Index(",", beg + 1);
115 if(cnt==idx) return brName;
116 cnt++;
117 }
118 brName = TString(input(beg, input.Length() - beg));
119 if (brName[0]=='@')
120 brName = brName(1,brName.Length()-1);
121 return brName;
122}
123
124////////////////////////////////////////////////////////////////////////////////
125/// Returns the name of any neuron from the input layer
126
128{
130 return neuron ? neuron->GetName() : "NO SUCH NEURON";
131}
132
133////////////////////////////////////////////////////////////////////////////////
134/// Returns the name of any neuron from the output layer
135
137{
138 TNeuron* neuron=(TNeuron*)fNetwork->fLastLayer[out];
139 return neuron ? neuron->GetName() : "NO SUCH NEURON";
140}
141
142////////////////////////////////////////////////////////////////////////////////
143/// Gives some information about the network in the terminal.
144
146{
147 TString fStructure = fNetwork->GetStructure();
148 std::cout << "Network with structure: " << fStructure.Data() << std::endl;
149 std::cout << "inputs with low values in the differences plot may not be needed" << std::endl;
150 // Checks if some input variable is not needed
151 char var[64], sel[64];
152 for (Int_t i = 0; i < GetNeurons(1); i++) {
153 snprintf(var,64,"diff>>tmp%d",i);
154 snprintf(sel,64,"inNeuron==%d",i);
155 fAnalysisTree->Draw(var, sel, "goff");
156 TH1F* tmp = (TH1F*)gDirectory->Get(Form("tmp%d",i));
157 if (!tmp) continue;
158 std::cout << GetInputNeuronTitle(i)
159 << " -> " << tmp->GetMean()
160 << " +/- " << tmp->GetRMS() << std::endl;
161 }
162}
163
164////////////////////////////////////////////////////////////////////////////////
165/// Collect information about what is useful in the network.
166/// This method has to be called first when analyzing a network.
167/// Fills the two analysis trees.
168
170{
171 Double_t shift = 0.1;
174 Int_t nEvents = test->GetN();
175 Int_t nn = GetNeurons(1);
176 Double_t* params = new Double_t[nn];
177 Double_t* rms = new Double_t[nn];
179 Int_t* index = new Int_t[nn];
180 TString formula;
181 TRegexp re("{[0-9]+}$");
182 Ssiz_t len = formula.Length();
183 Ssiz_t pos = -1;
184 Int_t i(0), j(0), k(0), l(0);
185 for(i=0; i<nn; i++){
186 formula = GetNeuronFormula(i);
187 pos = re.Index(formula,&len);
188 if(pos==-1 || len<3) {
189 formulas[i] = new TTreeFormula(Form("NF%zu",(size_t)this),formula,data);
190 index[i] = 0;
191 }
192 else {
193 TString newformula(formula,pos);
194 TString val = formula(pos+1,len-2);
195 formulas[i] = new TTreeFormula(Form("NF%zu",(size_t)this),newformula,data);
196 formula = newformula;
197 index[i] = val.Atoi();
198 }
199 TH1D tmp("tmpb", "tmpb", 1, -FLT_MAX, FLT_MAX);
200 data->Draw(Form("%s>>tmpb",formula.Data()),"","goff");
201 rms[i] = tmp.GetRMS();
202 }
203 Int_t inNeuron = 0;
204 Double_t diff = 0.;
205 if(fAnalysisTree) delete fAnalysisTree;
206 fAnalysisTree = new TTree("result","analysis");
207 fAnalysisTree->SetDirectory(nullptr);
208 fAnalysisTree->Branch("inNeuron",&inNeuron,"inNeuron/I");
209 fAnalysisTree->Branch("diff",&diff,"diff/D");
213
214 delete fIOTree;
215 fIOTree=new TTree("MLP_iotree","MLP_iotree");
216 fIOTree->SetDirectory(nullptr);
218 for (i=0; i<nn; i++)
219 leaflist+=Form("In%d/D:",i);
220 leaflist.Remove(leaflist.Length()-1);
221 fIOTree->Branch("In", params, leaflist);
222
223 leaflist="";
224 for (i=0; i<numOutNodes; i++)
225 leaflist+=Form("Out%d/D:",i);
226 leaflist.Remove(leaflist.Length()-1);
227 fIOTree->Branch("Out", outVal, leaflist);
228
229 leaflist="";
230 for (i=0; i<numOutNodes; i++)
231 leaflist+=Form("True%d/D:",i);
232 leaflist.Remove(leaflist.Length()-1);
233 fIOTree->Branch("True", trueVal, leaflist);
234 Double_t v1 = 0.;
235 Double_t v2 = 0.;
236 // Loop on the events in the test sample
237 for(j=0; j< nEvents; j++) {
238 fNetwork->GetEntry(test->GetEntry(j));
239 // Loop on the neurons to evaluate
240 for(k=0; k<GetNeurons(1); k++) {
241 params[k] = formulas[k]->EvalInstance(index[k]);
242 }
243 for(k=0; k<GetNeurons(GetLayers()); k++) {
244 outVal[k] = fNetwork->Evaluate(k,params);
245 trueVal[k] = ((TNeuron*)fNetwork->fLastLayer[k])->GetBranch();
246 }
247 fIOTree->Fill();
248
249 // Loop on the input neurons
250 for (i = 0; i < GetNeurons(1); i++) {
251 inNeuron = i;
252 diff = 0;
253 // Loop on the neurons in the output layer
254 for(l=0; l<GetNeurons(GetLayers()); l++){
255 params[i] += shift*rms[i];
256 v1 = fNetwork->Evaluate(l,params);
257 params[i] -= 2*shift*rms[i];
258 v2 = fNetwork->Evaluate(l,params);
259 diff += (v1-v2)*(v1-v2);
260 // reset to original value
261 params[i] += shift*rms[i];
262 }
265 }
266 }
267 delete[] params;
268 delete[] rms;
269 delete[] outVal;
270 delete[] trueVal;
271 delete[] index;
272 for(i=0; i<GetNeurons(1); i++) delete formulas[i];
273 delete [] formulas;
276}
277
278////////////////////////////////////////////////////////////////////////////////
279/// Draws the distribution (on the test sample) of the
280/// impact on the network output of a small variation of
281/// the ith input.
282
284{
285 char sel[64];
286 snprintf(sel,64, "inNeuron==%d", i);
287 fAnalysisTree->Draw("diff", sel);
288}
289
290////////////////////////////////////////////////////////////////////////////////
291/// Draws the distribution (on the test sample) of the
292/// impact on the network output of a small variation of
293/// each input.
294/// DrawDInputs() draws something that approximates the distribution of the
295/// derivative of the NN w.r.t. each input. That quantity is recognized as
296/// one of the measures to determine key quantities in the network.
297///
298/// What is done is to vary one input around its nominal value and to see
299/// how the NN changes. This is done for each entry in the sample and produces
300/// a distribution.
301///
302/// What you can learn from that is:
303/// - is variable a really useful, or is my network insensitive to it ?
304/// - is there any risk of big systematic ? Is the network extremely sensitive
305/// to small variations of any of my inputs ?
306///
307/// As you might understand, this is to be considered with care and can serve
308/// as input for an "educated guess" when optimizing the network.
309
311{
312 THStack* stack = new THStack("differences","differences (impact of variables on ANN)");
313 TLegend* legend = new TLegend(0.75,0.75,0.95,0.95);
314 TH1F* tmp = nullptr;
315 char var[64], sel[64];
316 for(Int_t i = 0; i < GetNeurons(1); i++) {
317 snprintf(var,64, "diff>>tmp%d", i);
318 snprintf(sel,64, "inNeuron==%d", i);
319 fAnalysisTree->Draw(var, sel, "goff");
320 tmp = (TH1F*)gDirectory->Get(Form("tmp%d",i));
321 tmp->SetDirectory(nullptr);
322 tmp->SetLineColor(i+1);
323 stack->Add(tmp);
324 legend->AddEntry(tmp,GetInputNeuronTitle(i),"l");
325 }
326 stack->Draw("nostack");
327 legend->Draw();
328 gPad->SetLogy();
329}
330
331////////////////////////////////////////////////////////////////////////////////
332/// Draws the distribution of the neural network (using ith neuron).
333/// Two distributions are drawn, for events passing respectively the "signal"
334/// and "background" cuts. Only the test sample is used.
335
336void TMLPAnalyzer::DrawNetwork(Int_t neuron, const char* signal, const char* bg)
337{
340 TEventList* current = data->GetEventList();
341 data->SetEventList(test);
342 THStack* stack = new THStack("__NNout_TMLPA",Form("Neural net output (neuron %d)",neuron));
343 TH1F *bgh = new TH1F("__bgh_TMLPA", "NN output", 50, -0.5, 1.5);
344 TH1F *sigh = new TH1F("__sigh_TMLPA", "NN output", 50, -0.5, 1.5);
345 bgh->SetDirectory(nullptr);
346 sigh->SetDirectory(nullptr);
347 Int_t nEvents = 0;
348 Int_t j=0;
349 // build event lists for signal and background
350 TEventList* signal_list = new TEventList("__tmpSig_MLPA");
351 TEventList* bg_list = new TEventList("__tmpBkg_MLPA");
352 data->Draw(">>__tmpSig_MLPA",signal,"goff");
353 data->Draw(">>__tmpBkg_MLPA",bg,"goff");
354
355 // fill the background
356 nEvents = bg_list->GetN();
357 for(j=0; j< nEvents; j++) {
358 bgh->Fill(fNetwork->Result(bg_list->GetEntry(j),neuron));
359 }
360 // fill the signal
361 nEvents = signal_list->GetN();
362 for(j=0; j< nEvents; j++) {
363 sigh->Fill(fNetwork->Result(signal_list->GetEntry(j),neuron));
364 }
365 // draws the result
366 bgh->SetLineColor(kBlue);
367 bgh->SetFillStyle(3008);
368 bgh->SetFillColor(kBlue);
369 sigh->SetLineColor(kRed);
370 sigh->SetFillStyle(3003);
371 sigh->SetFillColor(kRed);
372 bgh->SetStats(false);
373 sigh->SetStats(false);
374 stack->Add(bgh);
375 stack->Add(sigh);
376 TLegend *legend = new TLegend(.75, .80, .95, .95);
377 legend->AddEntry(bgh, "Background");
378 legend->AddEntry(sigh,"Signal");
379 stack->Draw("nostack");
380 legend->Draw();
381 // restore the default event list
382 data->SetEventList(current);
383 delete signal_list;
384 delete bg_list;
385}
386
387////////////////////////////////////////////////////////////////////////////////
388/// Create a profile of the difference of the MLP output minus the
389/// true value for a given output node outnode, vs the true value for
390/// outnode, for all test data events. This method is mainly useful
391/// when doing regression analysis with the MLP (i.e. not classification,
392/// but continuous truth values).
393/// The resulting TProfile histogram is returned.
394/// It is not drawn if option "goff" is specified.
395/// Options are passed to TProfile::Draw
396
398 Option_t *option /*=""*/)
399{
401 TString pipehist=Form("MLP_truthdev_%d",outnode);
403 drawline.Form("Out.Out%d-True.True%d:True.True%d>>",
405 fIOTree->Draw(drawline+pipehist+"(20)", "", "goff prof");
407 h->SetDirectory(nullptr);
408 const char* title=GetOutputNeuronTitle(outnode);
409 if (title) {
410 h->SetTitle(Form("#Delta(output - truth) vs. truth for %s",
411 title));
412 h->GetXaxis()->SetTitle(title);
413 h->GetYaxis()->SetTitle(Form("#Delta(output - truth) for %s", title));
414 }
415 if (!strstr(option,"goff"))
416 h->Draw();
417 return h;
418}
419
420////////////////////////////////////////////////////////////////////////////////
421/// Creates TProfiles of the difference of the MLP output minus the
422/// true value vs the true value, one for each output, filled with the
423/// test data events. This method is mainly useful when doing regression
424/// analysis with the MLP (i.e. not classification, but continuous truth
425/// values).
426/// The returned THStack contains all the TProfiles. It is drawn unless
427/// the option "goff" is specified.
428/// Options are passed to TProfile::Draw.
429
431{
432 THStack *hs=new THStack("MLP_TruthDeviation",
433 "Deviation of MLP output from truth");
434
435 // leg!=0 means we're drawing
436 TLegend *leg=nullptr;
437 if (!option || !strstr(option,"goff"))
438 leg=new TLegend(.4,.85,.95,.95,"#Delta(output - truth) vs. truth for:");
439
440 const char* xAxisTitle=nullptr;
441
442 // create profile for each input neuron,
443 // adding them into the THStack and the TLegend
446 h->SetLineColor(1+outnode);
447 hs->Add(h, option);
448 if (leg) leg->AddEntry(h,GetOutputNeuronTitle(outnode));
449 if (!outnode)
450 // Xaxis title is the same for all, extract it from the first one.
451 xAxisTitle=h->GetXaxis()->GetTitle();
452 }
453
454 if (leg) {
455 hs->Draw("nostack");
456 leg->Draw();
457 // gotta draw before accessing the axes
458 hs->GetXaxis()->SetTitle(xAxisTitle);
459 hs->GetYaxis()->SetTitle("#Delta(output - truth)");
460 }
461
462 return hs;
463}
464
465////////////////////////////////////////////////////////////////////////////////
466/// Creates a profile of the difference of the MLP output outnode minus
467/// the true value of outnode vs the input value innode, for all test
468/// data events.
469/// The resulting TProfile histogram is returned.
470/// It is not drawn if option "goff" is specified.
471/// Options are passed to TProfile::Draw
472
474 Int_t outnode /*=0*/,
475 Option_t *option /*=""*/)
476{
478 TString pipehist=Form("MLP_truthdev_i%d_o%d", innode, outnode);
480 drawline.Form("Out.Out%d-True.True%d:In.In%d>>",
482 fIOTree->Draw(drawline+pipehist+"(50)", "", "goff prof");
484 h->SetDirectory(nullptr);
487 h->SetTitle(Form("#Delta(output - truth) of %s vs. input %s",
489 h->GetXaxis()->SetTitle(Form("%s", titleInNeuron));
490 h->GetYaxis()->SetTitle(Form("#Delta(output - truth) for %s",
492 if (!strstr(option,"goff"))
493 h->Draw(option);
494 return h;
495}
496
497////////////////////////////////////////////////////////////////////////////////
498/// Creates a profile of the difference of the MLP output outnode minus the
499/// true value of outnode vs the input value, stacked for all inputs, for
500/// all test data events.
501/// The returned THStack contains all the TProfiles. It is drawn unless
502/// the option "goff" is specified.
503/// Options are passed to TProfile::Draw.
504
506 Option_t *option /*=""*/)
507{
509 sName.Form("MLP_TruthDeviationIO_%d", outnode);
511 THStack *hs=new THStack(sName,
512 Form("Deviation of MLP output %s from truth",
514
515 // leg!=0 means we're drawing.
516 TLegend *leg=nullptr;
517 if (!option || !strstr(option,"goff"))
518 leg=new TLegend(.4,.75,.95,.95,
519 Form("#Delta(output - truth) of %s vs. input for:",
521
522 // create profile for each input neuron,
523 // adding them into the THStack and the TLegend
525 Int_t innode=0;
526 for (innode=0; innode<numInNodes; innode++) {
528 h->SetLineColor(1+innode);
529 hs->Add(h, option);
530 if (leg) leg->AddEntry(h,h->GetXaxis()->GetTitle());
531 }
532
533 if (leg) {
534 hs->Draw("nostack");
535 leg->Draw();
536 // gotta draw before accessing the axes
537 hs->GetXaxis()->SetTitle("Input value");
538 hs->GetYaxis()->SetTitle(Form("#Delta(output - truth) for %s",
540 }
541
542 return hs;
543}
#define h(i)
Definition RSha256.hxx:106
const char Option_t
Option string (const char)
Definition RtypesCore.h:80
@ kRed
Definition Rtypes.h:67
@ kBlue
Definition Rtypes.h:67
ROOT::Detail::TRangeCast< T, true > TRangeDynCast
TRangeDynCast is an adapter class that allows the typed iteration through a TCollection.
#define gDirectory
Definition TDirectory.h:385
Option_t Option_t option
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void data
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void input
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void char Point_t Rectangle_t WindowAttributes_t Float_t Float_t Float_t Int_t Int_t UInt_t UInt_t Rectangle_t Int_t Int_t Window_t TString Int_t GCValues_t GetPrimarySelectionOwner GetDisplay GetScreen GetColormap GetNativeEvent const char const char dpyName wid window const char font_name cursor keysym reg const char only_if_exist regb h Point_t winding char text const char depth char const char Int_t count const char ColorStruct_t color const char Pixmap_t Pixmap_t PictureAttributes_t attr const char char ret_data h unsigned char height h Atom_t Int_t ULong_t ULong_t unsigned char prop_list Atom_t sel
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void char Point_t Rectangle_t WindowAttributes_t index
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void char Point_t Rectangle_t WindowAttributes_t Float_t Float_t Float_t Int_t Int_t UInt_t UInt_t Rectangle_t Int_t Int_t Window_t TString Int_t GCValues_t GetPrimarySelectionOwner GetDisplay GetScreen GetColormap GetNativeEvent const char const char dpyName wid window const char font_name cursor keysym reg const char only_if_exist regb h Point_t winding char text const char depth char const char Int_t count const char ColorStruct_t color const char Pixmap_t Pixmap_t PictureAttributes_t attr const char char ret_data h unsigned char height h Atom_t Int_t ULong_t ULong_t unsigned char prop_list Atom_t Atom_t Atom_t Time_t UChar_t len
#define gROOT
Definition TROOT.h:411
char * Form(const char *fmt,...)
Formats a string in a circular formatting buffer.
Definition TString.cxx:2495
#define gPad
#define snprintf
Definition civetweb.c:1579
<div class="legacybox"><h2>Legacy Code</h2> TEventList is a legacy interface: there will be no bug fi...
Definition TEventList.h:31
1-D histogram with a double per channel (see TH1 documentation)
Definition TH1.h:927
1-D histogram with a float per channel (see TH1 documentation)
Definition TH1.h:879
virtual void SetDirectory(TDirectory *dir)
By default, when a histogram is created, it is added to the list of histogram objects in the current ...
Definition TH1.cxx:8978
TObject * FindObject(const char *name) const override
Search object named name in the list of functions.
Definition TH1.cxx:3845
The Histogram stack class.
Definition THStack.h:40
virtual void Add(TH1 *h, Option_t *option="")
Add a new histogram to the list.
Definition THStack.cxx:364
void Draw(Option_t *chopt="") override
Draw this stack with its current attributes.
Definition THStack.cxx:451
This class displays a legend box (TPaveText) containing several legend entries.
Definition TLegend.h:23
TTree * fAnalysisTree
Int_t GetNeurons(Int_t layer)
Returns the number of neurons in given layer.
Int_t GetLayers()
Returns the number of layers.
TProfile * DrawTruthDeviation(Int_t outnode=0, Option_t *option="")
Create a profile of the difference of the MLP output minus the true value for a given output node out...
void DrawDInput(Int_t i)
Draws the distribution (on the test sample) of the impact on the network output of a small variation ...
const char * GetOutputNeuronTitle(Int_t out)
Returns the name of any neuron from the output layer.
~TMLPAnalyzer() override
Destructor.
void DrawDInputs()
Draws the distribution (on the test sample) of the impact on the network output of a small variation ...
TTree * fIOTree
THStack * DrawTruthDeviationInsOut(Int_t outnode=0, Option_t *option="")
Creates a profile of the difference of the MLP output outnode minus the true value of outnode vs the ...
void CheckNetwork()
Gives some information about the network in the terminal.
void GatherInformations()
Collect information about what is useful in the network.
THStack * DrawTruthDeviations(Option_t *option="")
Creates TProfiles of the difference of the MLP output minus the true value vs the true value,...
TProfile * DrawTruthDeviationInOut(Int_t innode, Int_t outnode=0, Option_t *option="")
Creates a profile of the difference of the MLP output outnode minus the true value of outnode vs the ...
const char * GetInputNeuronTitle(Int_t in)
Returns the name of any neuron from the input layer.
TMultiLayerPerceptron * fNetwork
TString GetNeuronFormula(Int_t idx)
Returns the formula used as input for neuron (idx) in the first layer.
void DrawNetwork(Int_t neuron, const char *signal, const char *bg)
Draws the distribution of the neural network (using ith neuron).
Double_t Evaluate(Int_t index, Double_t *params) const
Returns the Neural Net for a given set of input parameters #parameters must equal #input neurons.
TEventList * fTest
! EventList defining the events in the test dataset
TTree * fData
! pointer to the tree used as datasource
Double_t Result(Int_t event, Int_t index=0) const
Computes the output for a given event.
TObjArray fLastLayer
Collection of the output neurons; subset of fNetwork.
TObjArray fFirstLayer
Collection of the input neurons; subset of fNetwork.
void GetEntry(Int_t) const
Load an entry into the network.
const char * GetName() const override
Returns name of object.
Definition TNamed.h:49
This class describes an elementary neuron, which is the basic element for a Neural Network.
Definition TNeuron.h:25
Profile Histogram.
Definition TProfile.h:32
Regular expression class.
Definition TRegexp.h:31
Basic string class.
Definition TString.h:138
Ssiz_t Length() const
Definition TString.h:425
Int_t Atoi() const
Return integer value of string.
Definition TString.cxx:1994
Ssiz_t First(char c) const
Find first occurrence of a character c.
Definition TString.cxx:545
const char * Data() const
Definition TString.h:384
Ssiz_t Last(char c) const
Find last occurrence of a character c.
Definition TString.cxx:938
Int_t CountChar(Int_t c) const
Return number of times character c occurs in the string.
Definition TString.cxx:522
Ssiz_t Index(const char *pat, Ssiz_t i=0, ECaseCompare cmp=kExact) const
Definition TString.h:659
Used to pass a selection expression to the Tree drawing routine.
A TTree represents a columnar dataset.
Definition TTree.h:89
virtual Int_t Fill()
Fill all branches.
Definition TTree.cxx:4653
void Draw(Option_t *opt) override
Default Draw method for all objects.
Definition TTree.h:470
virtual void SetDirectory(TDirectory *dir)
Change the tree's directory.
Definition TTree.cxx:9173
TBranch * Branch(const char *name, T *obj, Int_t bufsize=32000, Int_t splitlevel=99)
Add a new branch, and infer the data type from the type of obj being passed.
Definition TTree.h:389
virtual void ResetBranchAddresses()
Tell all of our branches to drop their current objects and allocate new ones.
Definition TTree.cxx:8230
leg
Definition legend1.C:34
Double_t Sqrt(Double_t x)
Returns the square root of x.
Definition TMath.h:673
TLine l
Definition textangle.C:4
static void output()