Logo ROOT   6.12/07
Reference Guide
MethodANNBase.h
Go to the documentation of this file.
1 // @(#)root/tmva $Id$
2 // Author: Andreas Hoecker, Peter Speckmayer, Matt Jachowski, Jan Therhaag
3 
4 /**********************************************************************************
5  * Project: TMVA - a Root-integrated toolkit for multivariate data analysis *
6  * Package: TMVA *
7  * Class : MethodANNBase *
8  * Web : http://tmva.sourceforge.net *
9  * *
10  * Description: *
11  * Artificial neural network base class for the discrimination of signal *
12  * from background. *
13  * *
14  * Authors (alphabetical): *
15  * Andreas Hoecker <Andreas.Hocker@cern.ch> - CERN, Switzerland *
16  * Matt Jachowski <jachowski@stanford.edu> - Stanford University, USA *
17  * Peter Speckmayer <Peter.Speckmayer@cern.ch> - CERN, Switzerland *
18  * Joerg Stelzer <Joerg.Stelzer@cern.ch> - CERN, Switzerland *
19  * Jan Therhaag <Jan.Therhaag@cern.ch> - U of Bonn, Germany *
20  * *
21  * Small changes (regression): *
22  * Krzysztof Danielowski <danielow@cern.ch> - IFJ PAN & AGH, Poland *
23  * Kamil Kraszewski <kalq@cern.ch> - IFJ PAN & UJ , Poland *
24  * Maciej Kruk <mkruk@cern.ch> - IFJ PAN & AGH, Poland *
25  * *
26  * Copyright (c) 2005-2011: *
27  * CERN, Switzerland *
28  * *
29  * Redistribution and use in source and binary forms, with or without *
30  * modification, are permitted according to the terms listed in LICENSE *
31  * (http://tmva.sourceforge.net/LICENSE) *
32  **********************************************************************************/
33 
34 #ifndef ROOT_TMVA_MethodANNBase
35 #define ROOT_TMVA_MethodANNBase
36 
37 //////////////////////////////////////////////////////////////////////////
38 // //
39 // MethodANNBase //
40 // //
41 // Base class for all TMVA methods using artificial neural networks //
42 // //
43 //////////////////////////////////////////////////////////////////////////
44 
45 #include "TString.h"
46 #include <vector>
47 #include "TTree.h"
48 #include "TObjArray.h"
49 #include "TRandom3.h"
50 #include "TMatrix.h"
51 
52 #include "TMVA/MethodBase.h"
53 #include "TMVA/TActivation.h"
54 #include "TMVA/TNeuron.h"
55 #include "TMVA/TNeuronInput.h"
56 
57 class TH1;
58 class TH1F;
59 
60 namespace TMVA {
61 
62  class MethodANNBase : public MethodBase {
63 
64  public:
65 
66  // constructors dictated by subclassing off of MethodBase
67  MethodANNBase( const TString& jobName,
68  Types::EMVA methodType,
69  const TString& methodTitle,
70  DataSetInfo& theData,
71  const TString& theOption );
72 
73  MethodANNBase( Types::EMVA methodType,
74  DataSetInfo& theData,
75  const TString& theWeightFile);
76 
77  virtual ~MethodANNBase();
78 
79  // this does the real initialization work
80  void InitANNBase();
81 
82  // setters for subclasses
83  void SetActivation(TActivation* activation) {
84  if (fActivation != nullptr) delete fActivation;
85  fActivation = activation;
86  }
87  void SetNeuronInputCalculator(TNeuronInput* inputCalculator) {
88  if (fInputCalculator != nullptr) delete fInputCalculator;
89  fInputCalculator = inputCalculator;
90  }
91 
92  // this will have to be overridden by every subclass
93  virtual void Train() = 0;
94 
95  // print network, for debugging
96  virtual void PrintNetwork() const;
97 
98 
99  // call this function like that:
100  // ...
101  // MethodMLP* mlp = dynamic_cast<MethodMLP*>(method);
102  // std::vector<float> layerValues;
103  // mlp->GetLayerActivation (2, std::back_inserter(layerValues));
104  // ... do now something with the layerValues
105  //
106  template <typename WriteIterator>
107  void GetLayerActivation (size_t layer, WriteIterator writeIterator);
108 
110 
111  // write weights to file
112  void AddWeightsXMLTo( void* parent ) const;
113  void ReadWeightsFromXML( void* wghtnode );
114 
115  // read weights from file
116  virtual void ReadWeightsFromStream( std::istream& istr );
117 
118  // calculate the MVA value
119  virtual Double_t GetMvaValue( Double_t* err = 0, Double_t* errUpper = 0 );
120 
121  virtual const std::vector<Float_t> &GetRegressionValues();
122 
123  virtual const std::vector<Float_t> &GetMulticlassValues();
124 
125  // write method specific histos to target file
126  virtual void WriteMonitoringHistosToFile() const;
127 
128  // ranking of input variables
129  const Ranking* CreateRanking();
130 
131  // the option handling methods
132  virtual void DeclareOptions();
133  virtual void ProcessOptions();
134 
135  Bool_t Debug() const;
136 
137  enum EEstimator { kMSE=0,kCE};
138 
139  TObjArray* fNetwork; // TObjArray of TObjArrays representing network
140 
141  protected:
142 
143  virtual void MakeClassSpecific( std::ostream&, const TString& ) const;
144 
145  std::vector<Int_t>* ParseLayoutString( TString layerSpec );
146  virtual void BuildNetwork( std::vector<Int_t>* layout, std::vector<Double_t>* weights=NULL,
147  Bool_t fromFile = kFALSE );
148  void ForceNetworkInputs( const Event* ev, Int_t ignoreIndex = -1 );
150 
151  // debugging utilities
152  void PrintMessage( TString message, Bool_t force = kFALSE ) const;
154  void WaitForKeyboard();
155 
156  // accessors
157  Int_t NumCycles() { return fNcycles; }
158  TNeuron* GetInputNeuron (Int_t index) { return (TNeuron*)fInputLayer->At(index); }
159  TNeuron* GetOutputNeuron(Int_t index = 0) { return fOutputNeurons.at(index); }
160 
161  // protected variables
162  TObjArray* fSynapses; // array of pointers to synapses, no structural data
163  TActivation* fActivation; // activation function to be used for hidden layers
164  TActivation* fOutput; // activation function to be used for output layers, depending on estimator
165  TActivation* fIdentity; // activation for input and output layers
166  TRandom3* frgen; // random number generator for various uses
167  TNeuronInput* fInputCalculator; // input calculator for all neurons
168 
169  std::vector<Int_t> fRegulatorIdx; //index to different priors from every synapses
170  std::vector<Double_t> fRegulators; //the priors as regulator
173 
174  // monitoring histograms
175  TH1F* fEstimatorHistTrain; // monitors convergence of training sample
176  TH1F* fEstimatorHistTest; // monitors convergence of independent test sample
177 
178  // monitoring histograms (not available for regression)
179  void CreateWeightMonitoringHists( const TString& bulkname, std::vector<TH1*>* hv = 0 ) const;
180  std::vector<TH1*> fEpochMonHistS; // epoch monitoring histograms for signal
181  std::vector<TH1*> fEpochMonHistB; // epoch monitoring histograms for background
182  std::vector<TH1*> fEpochMonHistW; // epoch monitoring histograms for weights
183 
184 
185  // general
187  bool fUseRegulator; // zjh
188 
189  protected:
190  Int_t fRandomSeed; // random seed for initial synapse weights
191 
192  Int_t fNcycles; // number of epochs to train
193 
194  TString fNeuronType; // name of neuron activation function class
195  TString fNeuronInputType; // name of neuron input calculator class
196 
197 
198  private:
199 
200  // helper functions for building network
201  void BuildLayers(std::vector<Int_t>* layout, Bool_t from_file = false);
202  void BuildLayer(Int_t numNeurons, TObjArray* curLayer, TObjArray* prevLayer,
203  Int_t layerIndex, Int_t numLayers, Bool_t from_file = false);
204  void AddPreLinks(TNeuron* neuron, TObjArray* prevLayer);
205 
206  // helper functions for weight initialization
207  void InitWeights();
208  void ForceWeights(std::vector<Double_t>* weights);
209 
210  // helper functions for deleting network
211  void DeleteNetwork();
212  void DeleteNetworkLayer(TObjArray*& layer);
213 
214  // debugging utilities
215  void PrintLayer(TObjArray* layer) const;
216  void PrintNeuron(TNeuron* neuron) const;
217 
218  // private variables
219  TObjArray* fInputLayer; // cache this for fast access
220  std::vector<TNeuron*> fOutputNeurons; // cache this for fast access
221  TString fLayerSpec; // layout specification option
222 
223  // some static flags
224  static const Bool_t fgDEBUG = kTRUE; // debug flag
225 
226  ClassDef(MethodANNBase,0); // Base class for TMVA ANNs
227  };
228 
229 
230 
231  template <typename WriteIterator>
232  inline void MethodANNBase::GetLayerActivation (size_t layerNumber, WriteIterator writeIterator)
233  {
234  // get the activation values of the nodes in layer "layer"
235  // write the node activation values into the writeIterator
236  // assumes, that the network has been computed already (by calling
237  // "GetRegressionValues")
238 
239  if (layerNumber >= (size_t)fNetwork->GetEntriesFast())
240  return;
241 
242  TObjArray* layer = (TObjArray*)fNetwork->At(layerNumber);
243  UInt_t nNodes = layer->GetEntriesFast();
244  for (UInt_t iNode = 0; iNode < nNodes; iNode++)
245  {
246  (*writeIterator) = ((TNeuron*)layer->At(iNode))->GetActivationValue();
247  ++writeIterator;
248  }
249  }
250 
251 
252 } // namespace TMVA
253 
254 #endif
void WaitForKeyboard()
wait for keyboard input, for debugging
virtual void WriteMonitoringHistosToFile() const
write histograms to file
virtual Double_t GetMvaValue(Double_t *err=0, Double_t *errUpper=0)
get the mva value generated by the NN
void BuildLayer(Int_t numNeurons, TObjArray *curLayer, TObjArray *prevLayer, Int_t layerIndex, Int_t numLayers, Bool_t from_file=false)
build a single layer with neurons and synapses connecting this layer to the previous layer ...
An array of TObjects.
Definition: TObjArray.h:37
Random number generator class based on M.
Definition: TRandom3.h:27
void SetActivation(TActivation *activation)
Definition: MethodANNBase.h:83
void ForceNetworkCalculations()
calculate input values to each neuron
void CreateWeightMonitoringHists(const TString &bulkname, std::vector< TH1 *> *hv=0) const
void DeleteNetwork()
delete/clear network
MethodANNBase(const TString &jobName, Types::EMVA methodType, const TString &methodTitle, DataSetInfo &theData, const TString &theOption)
standard constructor Note: Right now it is an option to choose the neuron input function, but only the input function "sum" leads to weight convergence – otherwise the weights go to nan and lead to an ABORT.
TActivation * fActivation
void AddPreLinks(TNeuron *neuron, TObjArray *prevLayer)
add synapses connecting a neuron to its preceding layer
const Ranking * CreateRanking()
compute ranking of input variables by summing function of weights
virtual void ReadWeightsFromStream(std::istream &istr)
destroy/clear the network then read it back in from the weights file
Virtual base Class for all MVA method.
Definition: MethodBase.h:109
virtual void MakeClassSpecific(std::ostream &, const TString &) const
write specific classifier response
TNeuronInput * fInputCalculator
TObjArray * fInputLayer
Basic string class.
Definition: TString.h:125
1-D histogram with a float per channel (see TH1 documentation)}
Definition: TH1.h:567
Ranking for variables in method (implementation)
Definition: Ranking.h:48
int Int_t
Definition: RtypesCore.h:41
bool Bool_t
Definition: RtypesCore.h:59
void GetLayerActivation(size_t layer, WriteIterator writeIterator)
Double_t GetActivationValue() const
Definition: TNeuron.h:105
void AddWeightsXMLTo(void *parent) const
create XML description of ANN classifier
virtual void DeclareOptions()
define the options (their key words) that can be set in the option string here the options valid for ...
TObject * At(Int_t idx) const
Definition: TObjArray.h:165
static const Bool_t fgDEBUG
void ForceWeights(std::vector< Double_t > *weights)
force the synapse weights
Bool_t Debug() const
who the hell makes such strange Debug flags that even use "global pointers"..
TActivation * fOutput
#define ClassDef(name, id)
Definition: Rtypes.h:320
std::vector< TH1 * > fEpochMonHistB
void PrintMessage(TString message, Bool_t force=kFALSE) const
print messages, turn off printing by setting verbose and debug flag appropriately ...
TObjArray * fNetwork
Neuron class used by TMVA artificial neural network methods.
Definition: TNeuron.h:49
virtual void ProcessOptions()
do nothing specific at this moment
Class that contains all the data information.
Definition: DataSetInfo.h:60
TActivation * fIdentity
virtual void BuildNetwork(std::vector< Int_t > *layout, std::vector< Double_t > *weights=NULL, Bool_t fromFile=kFALSE)
build network given a layout (number of neurons in each layer) and optional weights array ...
void SetNeuronInputCalculator(TNeuronInput *inputCalculator)
Definition: MethodANNBase.h:87
void ReadWeightsFromXML(void *wghtnode)
read MLP from xml weight file
TObjArray * fSynapses
virtual void PrintNetwork() const
print network representation, for debugging
unsigned int UInt_t
Definition: RtypesCore.h:42
Int_t GetEntriesFast() const
Definition: TObjArray.h:64
std::vector< TH1 * > fEpochMonHistW
std::vector< Double_t > fRegulators
TNeuron * GetInputNeuron(Int_t index)
std::vector< Int_t > fRegulatorIdx
void InitWeights()
initialize the synapse weights randomly
const Bool_t kFALSE
Definition: RtypesCore.h:88
std::vector< Int_t > * ParseLayoutString(TString layerSpec)
parse layout specification string and return a vector, each entry containing the number of neurons to...
double Double_t
Definition: RtypesCore.h:55
virtual const std::vector< Float_t > & GetMulticlassValues()
get the multiclass classification values generated by the NN
TNeuron * GetOutputNeuron(Int_t index=0)
void ForceNetworkInputs(const Event *ev, Int_t ignoreIndex=-1)
force the input values of the input neurons force the value for each input neuron ...
The TH1 histogram class.
Definition: TH1.h:56
std::vector< TNeuron * > fOutputNeurons
virtual ~MethodANNBase()
destructor
Double_t GetNetworkOutput()
Abstract ClassifierFactory template that handles arbitrary types.
virtual const std::vector< Float_t > & GetRegressionValues()
get the regression value generated by the NN
void PrintNeuron(TNeuron *neuron) const
print a neuron, for debugging
std::vector< TH1 * > fEpochMonHistS
Interface for TNeuron input calculation classes.
Definition: TNeuronInput.h:42
void DeleteNetworkLayer(TObjArray *&layer)
delete a network layer
Interface for TNeuron activation function classes.
Definition: TActivation.h:42
virtual void ReadWeightsFromStream(std::istream &)=0
void BuildLayers(std::vector< Int_t > *layout, Bool_t from_file=false)
build the network layers
const Bool_t kTRUE
Definition: RtypesCore.h:87
Base class for all TMVA methods using artificial neural networks.
Definition: MethodANNBase.h:62
virtual void Train()=0
void PrintLayer(TObjArray *layer) const
print a single layer, for debugging
void InitANNBase()
initialize ANNBase object