Logo ROOT  
Reference Guide
 
Loading...
Searching...
No Matches
MethodANNBase.h
Go to the documentation of this file.
1// @(#)root/tmva $Id$
2// Author: Andreas Hoecker, Peter Speckmayer, Matt Jachowski, Jan Therhaag
3
4/**********************************************************************************
5 * Project: TMVA - a Root-integrated toolkit for multivariate data analysis *
6 * Package: TMVA *
7 * Class : MethodANNBase *
8 * *
9 * *
10 * Description: *
11 * Artificial neural network base class for the discrimination of signal *
12 * from background. *
13 * *
14 * Authors (alphabetical): *
15 * Andreas Hoecker <Andreas.Hocker@cern.ch> - CERN, Switzerland *
16 * Matt Jachowski <jachowski@stanford.edu> - Stanford University, USA *
17 * Peter Speckmayer <Peter.Speckmayer@cern.ch> - CERN, Switzerland *
18 * Joerg Stelzer <Joerg.Stelzer@cern.ch> - CERN, Switzerland *
19 * Jan Therhaag <Jan.Therhaag@cern.ch> - U of Bonn, Germany *
20 * *
21 * Small changes (regression): *
22 * Krzysztof Danielowski <danielow@cern.ch> - IFJ PAN & AGH, Poland *
23 * Kamil Kraszewski <kalq@cern.ch> - IFJ PAN & UJ , Poland *
24 * Maciej Kruk <mkruk@cern.ch> - IFJ PAN & AGH, Poland *
25 * *
26 * Copyright (c) 2005-2011: *
27 * CERN, Switzerland *
28 * *
29 * Redistribution and use in source and binary forms, with or without *
30 * modification, are permitted according to the terms listed in LICENSE *
31 * (see tmva/doc/LICENSE) *
32 **********************************************************************************/
33
34#ifndef ROOT_TMVA_MethodANNBase
35#define ROOT_TMVA_MethodANNBase
36
37//////////////////////////////////////////////////////////////////////////
38// //
39// MethodANNBase //
40// //
41// Base class for all TMVA methods using artificial neural networks //
42// //
43//////////////////////////////////////////////////////////////////////////
44
45#include "TString.h"
46#include <vector>
47#include "TTree.h"
48#include "TObjArray.h"
49#include "TRandom3.h"
50#include "TMatrix.h"
51
52#include "TMVA/MethodBase.h"
53#include "TMVA/TActivation.h"
54#include "TMVA/TNeuron.h"
55#include "TMVA/TNeuronInput.h"
56
57class TH1;
58class TH1F;
59
60namespace TMVA {
61
62 class MethodANNBase : public MethodBase {
63
64 public:
65
66 // constructors dictated by subclassing off of MethodBase
67 MethodANNBase( const TString& jobName,
68 Types::EMVA methodType,
69 const TString& methodTitle,
70 DataSetInfo& theData,
71 const TString& theOption );
72
73 MethodANNBase( Types::EMVA methodType,
74 DataSetInfo& theData,
75 const TString& theWeightFile);
76
77 virtual ~MethodANNBase();
78
79 // this does the real initialization work
80 void InitANNBase();
81
82 // setters for subclasses
83 void SetActivation(TActivation* activation) {
84 if (fActivation != nullptr) delete fActivation;
85 fActivation = activation;
86 }
87 void SetNeuronInputCalculator(TNeuronInput* inputCalculator) {
88 if (fInputCalculator != nullptr) delete fInputCalculator;
89 fInputCalculator = inputCalculator;
90 }
91
92 // this will have to be overridden by every subclass
93 virtual void Train() = 0;
94
95 // print network, for debugging
96 virtual void PrintNetwork() const;
97
98
99 // call this function like that:
100 // ...
101 // MethodMLP* mlp = dynamic_cast<MethodMLP*>(method);
102 // std::vector<float> layerValues;
103 // mlp->GetLayerActivation (2, std::back_inserter(layerValues));
104 // ... do now something with the layerValues
105 //
106 template <typename WriteIterator>
107 void GetLayerActivation (size_t layer, WriteIterator writeIterator);
108
110
111 // write weights to file
112 void AddWeightsXMLTo( void* parent ) const;
113 void ReadWeightsFromXML( void* wghtnode );
114
115 // read weights from file
116 virtual void ReadWeightsFromStream( std::istream& istr );
117
118 // calculate the MVA value
119 virtual Double_t GetMvaValue( Double_t* err = nullptr, Double_t* errUpper = nullptr );
120
121 virtual const std::vector<Float_t> &GetRegressionValues();
122
123 virtual const std::vector<Float_t> &GetMulticlassValues();
124
125 // write method specific histos to target file
126 virtual void WriteMonitoringHistosToFile() const;
127
128 // ranking of input variables
129 const Ranking* CreateRanking();
130
131 // the option handling methods
132 virtual void DeclareOptions();
133 virtual void ProcessOptions();
134
135 Bool_t Debug() const;
136
137 enum EEstimator { kMSE=0,kCE};
138
139 TObjArray* fNetwork; // TObjArray of TObjArrays representing network
140
141 protected:
142
143 virtual void MakeClassSpecific( std::ostream&, const TString& ) const;
144
145 std::vector<Int_t>* ParseLayoutString( TString layerSpec );
146 virtual void BuildNetwork( std::vector<Int_t>* layout, std::vector<Double_t>* weights = nullptr,
147 Bool_t fromFile = kFALSE );
148 void ForceNetworkInputs( const Event* ev, Int_t ignoreIndex = -1 );
150
151 // debugging utilities
152 void PrintMessage( TString message, Bool_t force = kFALSE ) const;
154 void WaitForKeyboard();
155
156 // accessors
160
161 // protected variables
162 TObjArray* fSynapses; // array of pointers to synapses, no structural data
163 TActivation* fActivation; // activation function to be used for hidden layers
164 TActivation* fOutput; // activation function to be used for output layers, depending on estimator
165 TActivation* fIdentity; // activation for input and output layers
166 TRandom3* frgen; // random number generator for various uses
167 TNeuronInput* fInputCalculator; // input calculator for all neurons
168
169 std::vector<Int_t> fRegulatorIdx; //index to different priors from every synapses
170 std::vector<Double_t> fRegulators; //the priors as regulator
173
174 // monitoring histograms
175 TH1F* fEstimatorHistTrain; // monitors convergence of training sample
176 TH1F* fEstimatorHistTest; // monitors convergence of independent test sample
177
178 // monitoring histograms (not available for regression)
179 void CreateWeightMonitoringHists( const TString& bulkname, std::vector<TH1*>* hv = nullptr ) const;
180 std::vector<TH1*> fEpochMonHistS; // epoch monitoring histograms for signal
181 std::vector<TH1*> fEpochMonHistB; // epoch monitoring histograms for background
182 std::vector<TH1*> fEpochMonHistW; // epoch monitoring histograms for weights
183
184
185 // general
187 bool fUseRegulator; ///< zjh
188
189 protected:
190 Int_t fRandomSeed; ///< random seed for initial synapse weights
191
192 Int_t fNcycles; ///< number of epochs to train
193
194 TString fNeuronType; ///< name of neuron activation function class
195 TString fNeuronInputType; ///< name of neuron input calculator class
196
197
198 private:
199
200 // helper functions for building network
201 void BuildLayers(std::vector<Int_t>* layout, Bool_t from_file = false);
202 void BuildLayer(Int_t numNeurons, TObjArray* curLayer, TObjArray* prevLayer,
203 Int_t layerIndex, Int_t numLayers, Bool_t from_file = false);
204 void AddPreLinks(TNeuron* neuron, TObjArray* prevLayer);
205
206 // helper functions for weight initialization
207 void InitWeights();
208 void ForceWeights(std::vector<Double_t>* weights);
209
210 // helper functions for deleting network
211 void DeleteNetwork();
212 void DeleteNetworkLayer(TObjArray*& layer);
213
214 // debugging utilities
215 void PrintLayer(TObjArray* layer) const;
216 void PrintNeuron(TNeuron* neuron) const;
217
218 // private variables
219 TObjArray* fInputLayer; ///< cache this for fast access
220 std::vector<TNeuron*> fOutputNeurons; ///< cache this for fast access
221 TString fLayerSpec; ///< layout specification option
222
223 // some static flags
224 static const Bool_t fgDEBUG = kTRUE; ///< debug flag
225
226 ClassDef(MethodANNBase,0); // Base class for TMVA ANNs
227 };
228
229
230
231 template <typename WriteIterator>
232 inline void MethodANNBase::GetLayerActivation (size_t layerNumber, WriteIterator writeIterator)
233 {
234 // get the activation values of the nodes in layer "layer"
235 // write the node activation values into the writeIterator
236 // assumes, that the network has been computed already (by calling
237 // "GetRegressionValues")
238
239 if (layerNumber >= (size_t)fNetwork->GetEntriesFast())
240 return;
241
242 TObjArray* layer = (TObjArray*)fNetwork->At(layerNumber);
243 UInt_t nNodes = layer->GetEntriesFast();
244 for (UInt_t iNode = 0; iNode < nNodes; iNode++)
245 {
246 (*writeIterator) = ((TNeuron*)layer->At(iNode))->GetActivationValue();
247 ++writeIterator;
248 }
249 }
250
251
252} // namespace TMVA
253
254#endif
bool Bool_t
Definition RtypesCore.h:63
constexpr Bool_t kFALSE
Definition RtypesCore.h:101
constexpr Bool_t kTRUE
Definition RtypesCore.h:100
#define ClassDef(name, id)
Definition Rtypes.h:337
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void char Point_t Rectangle_t WindowAttributes_t index
1-D histogram with a float per channel (see TH1 documentation)
Definition TH1.h:621
TH1 is the base class of all histogram classes in ROOT.
Definition TH1.h:59
Class that contains all the data information.
Definition DataSetInfo.h:62
Base class for all TMVA methods using artificial neural networks.
TString fLayerSpec
layout specification option
std::vector< TH1 * > fEpochMonHistW
std::vector< TH1 * > fEpochMonHistB
std::vector< Int_t > * ParseLayoutString(TString layerSpec)
parse layout specification string and return a vector, each entry containing the number of neurons to...
virtual void ProcessOptions()
do nothing specific at this moment
TNeuronInput * fInputCalculator
virtual ~MethodANNBase()
destructor
virtual Double_t GetMvaValue(Double_t *err=nullptr, Double_t *errUpper=nullptr)
get the mva value generated by the NN
std::vector< TNeuron * > fOutputNeurons
cache this for fast access
void DeleteNetworkLayer(TObjArray *&layer)
delete a network layer
virtual void BuildNetwork(std::vector< Int_t > *layout, std::vector< Double_t > *weights=nullptr, Bool_t fromFile=kFALSE)
build network given a layout (number of neurons in each layer) and optional weights array
const Ranking * CreateRanking()
compute ranking of input variables by summing function of weights
TNeuron * GetInputNeuron(Int_t index)
static const Bool_t fgDEBUG
debug flag
void DeleteNetwork()
delete/clear network
void GetLayerActivation(size_t layer, WriteIterator writeIterator)
void WaitForKeyboard()
wait for keyboard input, for debugging
TNeuron * GetOutputNeuron(Int_t index=0)
void AddPreLinks(TNeuron *neuron, TObjArray *prevLayer)
add synapses connecting a neuron to its preceding layer
TActivation * fOutput
void PrintNeuron(TNeuron *neuron) const
print a neuron, for debugging
void PrintMessage(TString message, Bool_t force=kFALSE) const
print messages, turn off printing by setting verbose and debug flag appropriately
void AddWeightsXMLTo(void *parent) const
create XML description of ANN classifier
TString fNeuronType
name of neuron activation function class
void InitANNBase()
initialize ANNBase object
Double_t GetNetworkOutput()
std::vector< Int_t > fRegulatorIdx
std::vector< Double_t > fRegulators
void SetNeuronInputCalculator(TNeuronInput *inputCalculator)
TObjArray * fInputLayer
cache this for fast access
std::vector< TH1 * > fEpochMonHistS
virtual void Train()=0
Int_t fRandomSeed
random seed for initial synapse weights
void PrintLayer(TObjArray *layer) const
print a single layer, for debugging
void InitWeights()
initialize the synapse weights randomly
virtual void DeclareOptions()
define the options (their key words) that can be set in the option string here the options valid for ...
virtual void ReadWeightsFromStream(std::istream &istr)
destroy/clear the network then read it back in from the weights file
void BuildLayers(std::vector< Int_t > *layout, Bool_t from_file=false)
build the network layers
virtual void MakeClassSpecific(std::ostream &, const TString &) const
write specific classifier response
void ForceWeights(std::vector< Double_t > *weights)
force the synapse weights
void BuildLayer(Int_t numNeurons, TObjArray *curLayer, TObjArray *prevLayer, Int_t layerIndex, Int_t numLayers, Bool_t from_file=false)
build a single layer with neurons and synapses connecting this layer to the previous layer
void ForceNetworkCalculations()
calculate input values to each neuron
void ForceNetworkInputs(const Event *ev, Int_t ignoreIndex=-1)
force the input values of the input neurons force the value for each input neuron
TString fNeuronInputType
name of neuron input calculator class
virtual const std::vector< Float_t > & GetMulticlassValues()
get the multiclass classification values generated by the NN
void ReadWeightsFromXML(void *wghtnode)
read MLP from xml weight file
Int_t fNcycles
number of epochs to train
Bool_t Debug() const
who the hell makes such strange Debug flags that even use "global pointers"..
virtual void WriteMonitoringHistosToFile() const
write histograms to file
virtual const std::vector< Float_t > & GetRegressionValues()
get the regression value generated by the NN
TActivation * fIdentity
TMatrixD fInvHessian
zjh
virtual void PrintNetwork() const
print network representation, for debugging
void SetActivation(TActivation *activation)
TActivation * fActivation
void CreateWeightMonitoringHists(const TString &bulkname, std::vector< TH1 * > *hv=nullptr) const
Virtual base Class for all MVA method.
Definition MethodBase.h:111
virtual void ReadWeightsFromStream(std::istream &)=0
Ranking for variables in method (implementation)
Definition Ranking.h:48
Interface for TNeuron activation function classes.
Definition TActivation.h:42
Interface for TNeuron input calculation classes.
Neuron class used by TMVA artificial neural network methods.
Definition TNeuron.h:49
Double_t GetActivationValue() const
Definition TNeuron.h:105
An array of TObjects.
Definition TObjArray.h:31
Int_t GetEntriesFast() const
Definition TObjArray.h:58
TObject * At(Int_t idx) const override
Definition TObjArray.h:164
Random number generator class based on M.
Definition TRandom3.h:27
Basic string class.
Definition TString.h:139
create variable transformations