Logo ROOT  
Reference Guide
 
Loading...
Searching...
No Matches
TMLPAnalyzer.cxx
Go to the documentation of this file.
1// @(#)root/mlp:$Id$
2// Author: Christophe.Delaere@cern.ch 25/04/04
3
4/*************************************************************************
5 * Copyright (C) 1995-2003, Rene Brun and Fons Rademakers. *
6 * All rights reserved. *
7 * *
8 * For the licensing terms see $ROOTSYS/LICENSE. *
9 * For the list of contributors see $ROOTSYS/README/CREDITS. *
10 *************************************************************************/
11
12/** \class TMLPAnalyzer
13
14This utility class contains a set of tests usefull when developing
15a neural network.
16It allows you to check for unneeded variables, and to control
17the network structure.
18
19*/
20
21#include "TROOT.h"
22#include "TSynapse.h"
23#include "TNeuron.h"
25#include "TMLPAnalyzer.h"
26#include "TTree.h"
27#include "TTreeFormula.h"
28#include "TEventList.h"
29#include "TH1D.h"
30#include "TProfile.h"
31#include "THStack.h"
32#include "TLegend.h"
33#include "TVirtualPad.h"
34#include "TRegexp.h"
35#include "TMath.h"
36#include "snprintf.h"
37#include <iostream>
38#include <cstdlib>
39
41
42////////////////////////////////////////////////////////////////////////////////
43/// Destructor
44
46{
47 delete fAnalysisTree;
48 delete fIOTree;
49}
50
51////////////////////////////////////////////////////////////////////////////////
52/// Returns the number of layers.
53
55{
56 TString fStructure = fNetwork->GetStructure();
57 return fStructure.CountChar(':')+1;
58}
59
60////////////////////////////////////////////////////////////////////////////////
61/// Returns the number of neurons in given layer.
62
64{
65 if(layer==1) {
66 TString fStructure = fNetwork->GetStructure();
67 TString input = TString(fStructure(0, fStructure.First(':')));
68 return input.CountChar(',')+1;
69 }
70 else if(layer==GetLayers()) {
71 TString fStructure = fNetwork->GetStructure();
72 TString output = TString(fStructure(fStructure.Last(':') + 1,
73 fStructure.Length() - fStructure.Last(':')));
74 return output.CountChar(',')+1;
75 }
76 else {
77 Int_t cnt=1;
78 TString fStructure = fNetwork->GetStructure();
79 TString hidden = TString(fStructure(fStructure.First(':') + 1,
80 fStructure.Last(':') - fStructure.First(':') - 1));
81 Int_t beg = 0;
82 Int_t end = hidden.Index(":", beg + 1);
83 Int_t num = 0;
84 while (end != -1) {
85 num = atoi(TString(hidden(beg, end - beg)).Data());
86 cnt++;
87 beg = end + 1;
88 end = hidden.Index(":", beg + 1);
89 if(layer==cnt) return num;
90 }
91 num = atoi(TString(hidden(beg, hidden.Length() - beg)).Data());
92 cnt++;
93 if(layer==cnt) return num;
94 }
95 return -1;
96}
97
98////////////////////////////////////////////////////////////////////////////////
99/// Returns the formula used as input for neuron (idx) in
100/// the first layer.
101
103{
104 TString fStructure = fNetwork->GetStructure();
105 TString input = TString(fStructure(0, fStructure.First(':')));
106 Int_t beg = 0;
107 Int_t end = input.Index(",", beg + 1);
108 TString brName;
109 Int_t cnt = 0;
110 while (end != -1) {
111 brName = TString(input(beg, end - beg));
112 if (brName[0]=='@')
113 brName = brName(1,brName.Length()-1);
114 beg = end + 1;
115 end = input.Index(",", beg + 1);
116 if(cnt==idx) return brName;
117 cnt++;
118 }
119 brName = TString(input(beg, input.Length() - beg));
120 if (brName[0]=='@')
121 brName = brName(1,brName.Length()-1);
122 return brName;
123}
124
125////////////////////////////////////////////////////////////////////////////////
126/// Returns the name of any neuron from the input layer
127
129{
130 TNeuron* neuron=(TNeuron*)fNetwork->fFirstLayer[in];
131 return neuron ? neuron->GetName() : "NO SUCH NEURON";
132}
133
134////////////////////////////////////////////////////////////////////////////////
135/// Returns the name of any neuron from the output layer
136
138{
139 TNeuron* neuron=(TNeuron*)fNetwork->fLastLayer[out];
140 return neuron ? neuron->GetName() : "NO SUCH NEURON";
141}
142
143////////////////////////////////////////////////////////////////////////////////
144/// Gives some information about the network in the terminal.
145
147{
148 TString fStructure = fNetwork->GetStructure();
149 std::cout << "Network with structure: " << fStructure.Data() << std::endl;
150 std::cout << "inputs with low values in the differences plot may not be needed" << std::endl;
151 // Checks if some input variable is not needed
152 char var[64], sel[64];
153 for (Int_t i = 0; i < GetNeurons(1); i++) {
154 snprintf(var,64,"diff>>tmp%d",i);
155 snprintf(sel,64,"inNeuron==%d",i);
156 fAnalysisTree->Draw(var, sel, "goff");
157 TH1F* tmp = (TH1F*)gDirectory->Get(Form("tmp%d",i));
158 if (!tmp) continue;
159 std::cout << GetInputNeuronTitle(i)
160 << " -> " << tmp->GetMean()
161 << " +/- " << tmp->GetRMS() << std::endl;
162 }
163}
164
165////////////////////////////////////////////////////////////////////////////////
166/// Collect information about what is useful in the network.
167/// This method has to be called first when analyzing a network.
168/// Fills the two analysis trees.
169
171{
172 Double_t shift = 0.1;
173 TTree* data = fNetwork->fData;
175 Int_t nEvents = test->GetN();
176 Int_t nn = GetNeurons(1);
177 Double_t* params = new Double_t[nn];
178 Double_t* rms = new Double_t[nn];
179 TTreeFormula** formulas = new TTreeFormula*[nn];
180 Int_t* index = new Int_t[nn];
181 TString formula;
182 TRegexp re("{[0-9]+}$");
183 Ssiz_t len = formula.Length();
184 Ssiz_t pos = -1;
185 Int_t i(0), j(0), k(0), l(0);
186 for(i=0; i<nn; i++){
187 formula = GetNeuronFormula(i);
188 pos = re.Index(formula,&len);
189 if(pos==-1 || len<3) {
190 formulas[i] = new TTreeFormula(Form("NF%zu",(size_t)this),formula,data);
191 index[i] = 0;
192 }
193 else {
194 TString newformula(formula,pos);
195 TString val = formula(pos+1,len-2);
196 formulas[i] = new TTreeFormula(Form("NF%zu",(size_t)this),newformula,data);
197 formula = newformula;
198 index[i] = val.Atoi();
199 }
200 TH1D tmp("tmpb", "tmpb", 1, -FLT_MAX, FLT_MAX);
201 data->Draw(Form("%s>>tmpb",formula.Data()),"","goff");
202 rms[i] = tmp.GetRMS();
203 }
204 Int_t inNeuron = 0;
205 Double_t diff = 0.;
206 if(fAnalysisTree) delete fAnalysisTree;
207 fAnalysisTree = new TTree("result","analysis");
209 fAnalysisTree->Branch("inNeuron",&inNeuron,"inNeuron/I");
210 fAnalysisTree->Branch("diff",&diff,"diff/D");
211 Int_t numOutNodes=GetNeurons(GetLayers());
212 Double_t *outVal=new Double_t[numOutNodes];
213 Double_t *trueVal=new Double_t[numOutNodes];
214
215 delete fIOTree;
216 fIOTree=new TTree("MLP_iotree","MLP_iotree");
218 TString leaflist;
219 for (i=0; i<nn; i++)
220 leaflist+=Form("In%d/D:",i);
221 leaflist.Remove(leaflist.Length()-1);
222 fIOTree->Branch("In", params, leaflist);
223
224 leaflist="";
225 for (i=0; i<numOutNodes; i++)
226 leaflist+=Form("Out%d/D:",i);
227 leaflist.Remove(leaflist.Length()-1);
228 fIOTree->Branch("Out", outVal, leaflist);
229
230 leaflist="";
231 for (i=0; i<numOutNodes; i++)
232 leaflist+=Form("True%d/D:",i);
233 leaflist.Remove(leaflist.Length()-1);
234 fIOTree->Branch("True", trueVal, leaflist);
235 Double_t v1 = 0.;
236 Double_t v2 = 0.;
237 // Loop on the events in the test sample
238 for(j=0; j< nEvents; j++) {
239 fNetwork->GetEntry(test->GetEntry(j));
240 // Loop on the neurons to evaluate
241 for(k=0; k<GetNeurons(1); k++) {
242 params[k] = formulas[k]->EvalInstance(index[k]);
243 }
244 for(k=0; k<GetNeurons(GetLayers()); k++) {
245 outVal[k] = fNetwork->Evaluate(k,params);
246 trueVal[k] = ((TNeuron*)fNetwork->fLastLayer[k])->GetBranch();
247 }
248 fIOTree->Fill();
249
250 // Loop on the input neurons
251 for (i = 0; i < GetNeurons(1); i++) {
252 inNeuron = i;
253 diff = 0;
254 // Loop on the neurons in the output layer
255 for(l=0; l<GetNeurons(GetLayers()); l++){
256 params[i] += shift*rms[i];
257 v1 = fNetwork->Evaluate(l,params);
258 params[i] -= 2*shift*rms[i];
259 v2 = fNetwork->Evaluate(l,params);
260 diff += (v1-v2)*(v1-v2);
261 // reset to original value
262 params[i] += shift*rms[i];
263 }
264 diff = TMath::Sqrt(diff);
266 }
267 }
268 delete[] params;
269 delete[] rms;
270 delete[] outVal;
271 delete[] trueVal;
272 delete[] index;
273 for(i=0; i<GetNeurons(1); i++) delete formulas[i];
274 delete [] formulas;
277}
278
279////////////////////////////////////////////////////////////////////////////////
280/// Draws the distribution (on the test sample) of the
281/// impact on the network output of a small variation of
282/// the ith input.
283
285{
286 char sel[64];
287 snprintf(sel,64, "inNeuron==%d", i);
288 fAnalysisTree->Draw("diff", sel);
289}
290
291////////////////////////////////////////////////////////////////////////////////
292/// Draws the distribution (on the test sample) of the
293/// impact on the network output of a small variation of
294/// each input.
295/// DrawDInputs() draws something that approximates the distribution of the
296/// derivative of the NN w.r.t. each input. That quantity is recognized as
297/// one of the measures to determine key quantities in the network.
298///
299/// What is done is to vary one input around its nominal value and to see
300/// how the NN changes. This is done for each entry in the sample and produces
301/// a distribution.
302///
303/// What you can learn from that is:
304/// - is variable a really useful, or is my network insensitive to it ?
305/// - is there any risk of big systematic ? Is the network extremely sensitive
306/// to small variations of any of my inputs ?
307///
308/// As you might understand, this is to be considered with care and can serve
309/// as input for an "educated guess" when optimizing the network.
310
312{
313 THStack* stack = new THStack("differences","differences (impact of variables on ANN)");
314 TLegend* legend = new TLegend(0.75,0.75,0.95,0.95);
315 TH1F* tmp = 0;
316 char var[64], sel[64];
317 for(Int_t i = 0; i < GetNeurons(1); i++) {
318 snprintf(var,64, "diff>>tmp%d", i);
319 snprintf(sel,64, "inNeuron==%d", i);
320 fAnalysisTree->Draw(var, sel, "goff");
321 tmp = (TH1F*)gDirectory->Get(Form("tmp%d",i));
322 tmp->SetDirectory(0);
323 tmp->SetLineColor(i+1);
324 stack->Add(tmp);
325 legend->AddEntry(tmp,GetInputNeuronTitle(i),"l");
326 }
327 stack->Draw("nostack");
328 legend->Draw();
329 gPad->SetLogy();
330}
331
332////////////////////////////////////////////////////////////////////////////////
333/// Draws the distribution of the neural network (using ith neuron).
334/// Two distributions are drawn, for events passing respectively the "signal"
335/// and "background" cuts. Only the test sample is used.
336
337void TMLPAnalyzer::DrawNetwork(Int_t neuron, const char* signal, const char* bg)
338{
339 TTree* data = fNetwork->fData;
341 TEventList* current = data->GetEventList();
342 data->SetEventList(test);
343 THStack* stack = new THStack("__NNout_TMLPA",Form("Neural net output (neuron %d)",neuron));
344 TH1F *bgh = new TH1F("__bgh_TMLPA", "NN output", 50, -0.5, 1.5);
345 TH1F *sigh = new TH1F("__sigh_TMLPA", "NN output", 50, -0.5, 1.5);
346 bgh->SetDirectory(0);
347 sigh->SetDirectory(0);
348 Int_t nEvents = 0;
349 Int_t j=0;
350 // build event lists for signal and background
351 TEventList* signal_list = new TEventList("__tmpSig_MLPA");
352 TEventList* bg_list = new TEventList("__tmpBkg_MLPA");
353 data->Draw(">>__tmpSig_MLPA",signal,"goff");
354 data->Draw(">>__tmpBkg_MLPA",bg,"goff");
355
356 // fill the background
357 nEvents = bg_list->GetN();
358 for(j=0; j< nEvents; j++) {
359 bgh->Fill(fNetwork->Result(bg_list->GetEntry(j),neuron));
360 }
361 // fill the signal
362 nEvents = signal_list->GetN();
363 for(j=0; j< nEvents; j++) {
364 sigh->Fill(fNetwork->Result(signal_list->GetEntry(j),neuron));
365 }
366 // draws the result
367 bgh->SetLineColor(kBlue);
368 bgh->SetFillStyle(3008);
369 bgh->SetFillColor(kBlue);
370 sigh->SetLineColor(kRed);
371 sigh->SetFillStyle(3003);
372 sigh->SetFillColor(kRed);
373 bgh->SetStats(0);
374 sigh->SetStats(0);
375 stack->Add(bgh);
376 stack->Add(sigh);
377 TLegend *legend = new TLegend(.75, .80, .95, .95);
378 legend->AddEntry(bgh, "Background");
379 legend->AddEntry(sigh,"Signal");
380 stack->Draw("nostack");
381 legend->Draw();
382 // restore the default event list
383 data->SetEventList(current);
384 delete signal_list;
385 delete bg_list;
386}
387
388////////////////////////////////////////////////////////////////////////////////
389/// Create a profile of the difference of the MLP output minus the
390/// true value for a given output node outnode, vs the true value for
391/// outnode, for all test data events. This method is mainly useful
392/// when doing regression analysis with the MLP (i.e. not classification,
393/// but continuous truth values).
394/// The resulting TProfile histogram is returned.
395/// It is not drawn if option "goff" is specified.
396/// Options are passed to TProfile::Draw
397
399 Option_t *option /*=""*/)
400{
402 TString pipehist=Form("MLP_truthdev_%d",outnode);
403 TString drawline;
404 drawline.Form("Out.Out%d-True.True%d:True.True%d>>",
405 outnode, outnode, outnode);
406 fIOTree->Draw(drawline+pipehist+"(20)", "", "goff prof");
407 TProfile* h=(TProfile*)gDirectory->Get(pipehist);
408 h->SetDirectory(0);
409 const char* title=GetOutputNeuronTitle(outnode);
410 if (title) {
411 h->SetTitle(Form("#Delta(output - truth) vs. truth for %s",
412 title));
413 h->GetXaxis()->SetTitle(title);
414 h->GetYaxis()->SetTitle(Form("#Delta(output - truth) for %s", title));
415 }
416 if (!strstr(option,"goff"))
417 h->Draw();
418 return h;
419}
420
421////////////////////////////////////////////////////////////////////////////////
422/// Creates TProfiles of the difference of the MLP output minus the
423/// true value vs the true value, one for each output, filled with the
424/// test data events. This method is mainly useful when doing regression
425/// analysis with the MLP (i.e. not classification, but continuous truth
426/// values).
427/// The returned THStack contains all the TProfiles. It is drawn unless
428/// the option "goff" is specified.
429/// Options are passed to TProfile::Draw.
430
432{
433 THStack *hs=new THStack("MLP_TruthDeviation",
434 "Deviation of MLP output from truth");
435
436 // leg!=0 means we're drawing
437 TLegend *leg=0;
438 if (!option || !strstr(option,"goff"))
439 leg=new TLegend(.4,.85,.95,.95,"#Delta(output - truth) vs. truth for:");
440
441 const char* xAxisTitle=0;
442
443 // create profile for each input neuron,
444 // adding them into the THStack and the TLegend
445 for (Int_t outnode=0; outnode<GetNeurons(GetLayers()); outnode++) {
446 TProfile* h=DrawTruthDeviation(outnode, "goff");
447 h->SetLineColor(1+outnode);
448 hs->Add(h, option);
449 if (leg) leg->AddEntry(h,GetOutputNeuronTitle(outnode));
450 if (!outnode)
451 // Xaxis title is the same for all, extract it from the first one.
452 xAxisTitle=h->GetXaxis()->GetTitle();
453 }
454
455 if (leg) {
456 hs->Draw("nostack");
457 leg->Draw();
458 // gotta draw before accessing the axes
459 hs->GetXaxis()->SetTitle(xAxisTitle);
460 hs->GetYaxis()->SetTitle("#Delta(output - truth)");
461 }
462
463 return hs;
464}
465
466////////////////////////////////////////////////////////////////////////////////
467/// Creates a profile of the difference of the MLP output outnode minus
468/// the true value of outnode vs the input value innode, for all test
469/// data events.
470/// The resulting TProfile histogram is returned.
471/// It is not drawn if option "goff" is specified.
472/// Options are passed to TProfile::Draw
473
475 Int_t outnode /*=0*/,
476 Option_t *option /*=""*/)
477{
479 TString pipehist=Form("MLP_truthdev_i%d_o%d", innode, outnode);
480 TString drawline;
481 drawline.Form("Out.Out%d-True.True%d:In.In%d>>",
482 outnode, outnode, innode);
483 fIOTree->Draw(drawline+pipehist+"(50)", "", "goff prof");
484 TProfile* h=(TProfile*)gROOT->FindObject(pipehist);
485 h->SetDirectory(0);
486 const char* titleInNeuron=GetInputNeuronTitle(innode);
487 const char* titleOutNeuron=GetOutputNeuronTitle(outnode);
488 h->SetTitle(Form("#Delta(output - truth) of %s vs. input %s",
489 titleOutNeuron, titleInNeuron));
490 h->GetXaxis()->SetTitle(Form("%s", titleInNeuron));
491 h->GetYaxis()->SetTitle(Form("#Delta(output - truth) for %s",
492 titleOutNeuron));
493 if (!strstr(option,"goff"))
494 h->Draw(option);
495 return h;
496}
497
498////////////////////////////////////////////////////////////////////////////////
499/// Creates a profile of the difference of the MLP output outnode minus the
500/// true value of outnode vs the input value, stacked for all inputs, for
501/// all test data events.
502/// The returned THStack contains all the TProfiles. It is drawn unless
503/// the option "goff" is specified.
504/// Options are passed to TProfile::Draw.
505
507 Option_t *option /*=""*/)
508{
509 TString sName;
510 sName.Form("MLP_TruthDeviationIO_%d", outnode);
511 const char* outputNodeTitle=GetOutputNeuronTitle(outnode);
512 THStack *hs=new THStack(sName,
513 Form("Deviation of MLP output %s from truth",
514 outputNodeTitle));
515
516 // leg!=0 means we're drawing.
517 TLegend *leg=0;
518 if (!option || !strstr(option,"goff"))
519 leg=new TLegend(.4,.75,.95,.95,
520 Form("#Delta(output - truth) of %s vs. input for:",
521 outputNodeTitle));
522
523 // create profile for each input neuron,
524 // adding them into the THStack and the TLegend
525 Int_t numInNodes=GetNeurons(1);
526 Int_t innode=0;
527 for (innode=0; innode<numInNodes; innode++) {
528 TProfile* h=DrawTruthDeviationInOut(innode, outnode, "goff");
529 h->SetLineColor(1+innode);
530 hs->Add(h, option);
531 if (leg) leg->AddEntry(h,h->GetXaxis()->GetTitle());
532 }
533
534 if (leg) {
535 hs->Draw("nostack");
536 leg->Draw();
537 // gotta draw before accessing the axes
538 hs->GetXaxis()->SetTitle("Input value");
539 hs->GetYaxis()->SetTitle(Form("#Delta(output - truth) for %s",
540 outputNodeTitle));
541 }
542
543 return hs;
544}
#define h(i)
Definition RSha256.hxx:106
double Double_t
Definition RtypesCore.h:59
const char Option_t
Definition RtypesCore.h:66
#define ClassImp(name)
Definition Rtypes.h:364
@ kRed
Definition Rtypes.h:66
@ kBlue
Definition Rtypes.h:66
#define gDirectory
Definition TDirectory.h:385
#define gROOT
Definition TROOT.h:404
char * Form(const char *fmt,...)
#define gPad
#define snprintf
Definition civetweb.c:1540
virtual void SetFillColor(Color_t fcolor)
Set the fill area color.
Definition TAttFill.h:37
virtual void SetFillStyle(Style_t fstyle)
Set the fill area style.
Definition TAttFill.h:39
virtual void SetLineColor(Color_t lcolor)
Set the line color.
Definition TAttLine.h:40
A TEventList object is a list of selected events (entries) in a TTree.
Definition TEventList.h:31
virtual Long64_t GetEntry(Int_t index) const
Return value of entry at index in the list.
virtual Int_t GetN() const
Definition TEventList.h:56
1-D histogram with a double per channel (see TH1 documentation)}
Definition TH1.h:618
1-D histogram with a float per channel (see TH1 documentation)}
Definition TH1.h:575
virtual void SetDirectory(TDirectory *dir)
By default, when a histogram is created, it is added to the list of histogram objects in the current ...
Definition TH1.cxx:8767
virtual Double_t GetMean(Int_t axis=1) const
For axis = 1,2 or 3 returns the mean value of the histogram along X,Y or Z axis.
Definition TH1.cxx:7410
virtual Int_t Fill(Double_t x)
Increment bin with abscissa X by 1.
Definition TH1.cxx:3351
virtual TObject * FindObject(const char *name) const
Search object named name in the list of functions.
Definition TH1.cxx:3866
Double_t GetRMS(Int_t axis=1) const
Definition TH1.h:315
virtual void SetStats(Bool_t stats=kTRUE)
Set statistics option on/off.
Definition TH1.cxx:8820
The Histogram stack class.
Definition THStack.h:38
virtual void Draw(Option_t *chopt="")
Draw this multihist with its current attributes.
Definition THStack.cxx:467
TAxis * GetYaxis() const
Get y axis of the histogram used to draw the stack.
Definition THStack.cxx:643
virtual void Add(TH1 *h, Option_t *option="")
add a new histogram to the list Only 1-d and 2-d histograms currently supported.
Definition THStack.cxx:381
TAxis * GetXaxis() const
Get x axis of the histogram used to draw the stack.
Definition THStack.cxx:628
This class displays a legend box (TPaveText) containing several legend entries.
Definition TLegend.h:23
TLegendEntry * AddEntry(const TObject *obj, const char *label="", Option_t *option="lpf")
Add a new entry to this legend.
Definition TLegend.cxx:330
virtual void Draw(Option_t *option="")
Draw this legend with its current attributes.
Definition TLegend.cxx:423
This utility class contains a set of tests usefull when developing a neural network.
TTree * fAnalysisTree
Int_t GetNeurons(Int_t layer)
Returns the number of neurons in given layer.
Int_t GetLayers()
Returns the number of layers.
TProfile * DrawTruthDeviation(Int_t outnode=0, Option_t *option="")
Create a profile of the difference of the MLP output minus the true value for a given output node out...
void DrawDInput(Int_t i)
Draws the distribution (on the test sample) of the impact on the network output of a small variation ...
const char * GetOutputNeuronTitle(Int_t out)
Returns the name of any neuron from the output layer.
void DrawDInputs()
Draws the distribution (on the test sample) of the impact on the network output of a small variation ...
TTree * fIOTree
THStack * DrawTruthDeviationInsOut(Int_t outnode=0, Option_t *option="")
Creates a profile of the difference of the MLP output outnode minus the true value of outnode vs the ...
void CheckNetwork()
Gives some information about the network in the terminal.
void GatherInformations()
Collect information about what is useful in the network.
THStack * DrawTruthDeviations(Option_t *option="")
Creates TProfiles of the difference of the MLP output minus the true value vs the true value,...
TProfile * DrawTruthDeviationInOut(Int_t innode, Int_t outnode=0, Option_t *option="")
Creates a profile of the difference of the MLP output outnode minus the true value of outnode vs the ...
const char * GetInputNeuronTitle(Int_t in)
Returns the name of any neuron from the input layer.
TMultiLayerPerceptron * fNetwork
virtual ~TMLPAnalyzer()
Destructor.
TString GetNeuronFormula(Int_t idx)
Returns the formula used as input for neuron (idx) in the first layer.
void DrawNetwork(Int_t neuron, const char *signal, const char *bg)
Draws the distribution of the neural network (using ith neuron).
Double_t Evaluate(Int_t index, Double_t *params) const
Returns the Neural Net for a given set of input parameters #parameters must equal #input neurons.
TEventList * fTest
! EventList defining the events in the test dataset
TTree * fData
! pointer to the tree used as datasource
Double_t Result(Int_t event, Int_t index=0) const
Computes the output for a given event.
TObjArray fLastLayer
Collection of the output neurons; subset of fNetwork.
TObjArray fFirstLayer
Collection of the input neurons; subset of fNetwork.
void GetEntry(Int_t) const
Load an entry into the network.
virtual void SetTitle(const char *title="")
Set the title of the TNamed.
Definition TNamed.cxx:164
virtual const char * GetName() const
Returns name of object.
Definition TNamed.h:47
This class describes an elementary neuron, which is the basic element for a Neural Network.
Definition TNeuron.h:25
Profile Histogram.
Definition TProfile.h:32
Regular expression class.
Definition TRegexp.h:31
Ssiz_t Index(const TString &str, Ssiz_t *len, Ssiz_t start=0) const
Find the first occurrence of the regexp in string and return the position, or -1 if there is no match...
Definition TRegexp.cxx:209
Basic string class.
Definition TString.h:136
Ssiz_t Length() const
Definition TString.h:410
Int_t Atoi() const
Return integer value of string.
Definition TString.cxx:1946
Ssiz_t First(char c) const
Find first occurrence of a character c.
Definition TString.cxx:523
const char * Data() const
Definition TString.h:369
Ssiz_t Last(char c) const
Find last occurrence of a character c.
Definition TString.cxx:916
Int_t CountChar(Int_t c) const
Return number of times character c occurs in the string.
Definition TString.cxx:500
TString & Remove(Ssiz_t pos)
Definition TString.h:673
void Form(const char *fmt,...)
Formats a string using a printf style format descriptor.
Definition TString.cxx:2314
Ssiz_t Index(const char *pat, Ssiz_t i=0, ECaseCompare cmp=kExact) const
Definition TString.h:639
Used to pass a selection expression to the Tree drawing routine.
T EvalInstance(Int_t i=0, const char *stringStack[]=0)
Evaluate this treeformula.
A TTree represents a columnar dataset.
Definition TTree.h:79
virtual Int_t Fill()
Fill all branches.
Definition TTree.cxx:4594
virtual void SetDirectory(TDirectory *dir)
Change the tree's directory.
Definition TTree.cxx:8932
virtual void SetEventList(TEventList *list)
This function transfroms the given TEventList into a TEntryList The new TEntryList is owned by the TT...
Definition TTree.cxx:9035
TBranch * Branch(const char *name, T *obj, Int_t bufsize=32000, Int_t splitlevel=99)
Add a new branch, and infer the data type from the type of obj being passed.
Definition TTree.h:350
TEventList * GetEventList() const
Definition TTree.h:470
virtual void Draw(Option_t *opt)
Default Draw method for all objects.
Definition TTree.h:428
virtual void ResetBranchAddresses()
Tell all of our branches to drop their current objects and allocate new ones.
Definition TTree.cxx:8054
leg
Definition legend1.C:34
Double_t Sqrt(Double_t x)
Definition TMath.h:641
Definition test.py:1
auto * l
Definition textangle.C:4
static void output(int code)
Definition gifencode.c:226