// @(#)root/mlp:$Id$
// Author: Christophe.Delaere@cern.ch   20/07/03

/*************************************************************************
 * Copyright (C) 1995-2003, Rene Brun and Fons Rademakers.               *
 * All rights reserved.                                                  *
 *                                                                       *
 * For the licensing terms see $ROOTSYS/LICENSE.                         *
 * For the list of contributors see $ROOTSYS/README/CREDITS.             *
 *************************************************************************/

#ifndef ROOT_TMultiLayerPerceptron
#define ROOT_TMultiLayerPerceptron

#ifndef ROOT_TObject
#include "TObject.h"
#endif
#ifndef ROOT_TString
#include "TString.h"
#endif
#ifndef ROOT_TObjArray
#include "TObjArray.h"
#endif
#ifndef ROOT_TMatrixD
#include "TMatrixD.h"
#endif
#ifndef ROOT_TNeuron
#include "TNeuron.h"
#endif

class TTree;
class TEventList;
class TTreeFormula;
class TTreeFormulaManager;

//____________________________________________________________________
//
// TMultiLayerPerceptron
//
// This class decribes a Neural network.
// There are facilities to train the network and use the output.
//
// The input layer is made of inactive neurons (returning the
// normalized input), hidden layers are made of sigmoids and output
// neurons are linear.
//
// The basic input is a TTree and two (training and test) TEventLists.
// For classification jobs, a branch (maybe in a TFriend) must contain
// the expected output.
// 6 learning methods are available: kStochastic, kBatch,
// kSteepestDescent, kRibierePolak, kFletcherReeves and kBFGS.
//
// This implementation is *inspired* from the mlpfit package from
// J.Schwindling et al.
//
//____________________________________________________________________

class TMultiLayerPerceptron : public TObject {
 friend class TMLPAnalyzer;

 public:
   enum ELearningMethod { kStochastic, kBatch, kSteepestDescent,
                          kRibierePolak, kFletcherReeves, kBFGS };
   enum EDataSet { kTraining, kTest };
   TMultiLayerPerceptron();
   TMultiLayerPerceptron(const char* layout, TTree* data = 0,
                         const char* training = "Entry$%2==0",
                         const char* test = "",
                         TNeuron::ENeuronType type = TNeuron::kSigmoid,
                         const char* extF = "", const char* extD  = "");
   TMultiLayerPerceptron(const char* layout,
                         const char* weight, TTree* data = 0,
                         const char* training = "Entry$%2==0",
                         const char* test = "",
                         TNeuron::ENeuronType type = TNeuron::kSigmoid,
                         const char* extF = "", const char* extD  = "");
   TMultiLayerPerceptron(const char* layout, TTree* data,
                         TEventList* training,
                         TEventList* test,
                         TNeuron::ENeuronType type = TNeuron::kSigmoid,
                         const char* extF = "", const char* extD  = "");
   TMultiLayerPerceptron(const char* layout,
                         const char* weight, TTree* data,
                         TEventList* training,
                         TEventList* test,
                         TNeuron::ENeuronType type = TNeuron::kSigmoid,
                         const char* extF = "", const char* extD  = "");
   virtual ~TMultiLayerPerceptron();
   void SetData(TTree*);
   void SetTrainingDataSet(TEventList* train);
   void SetTestDataSet(TEventList* test);
   void SetTrainingDataSet(const char* train);
   void SetTestDataSet(const char* test);
   void SetLearningMethod(TMultiLayerPerceptron::ELearningMethod method);
   void SetEventWeight(const char*);
   void Train(Int_t nEpoch, Option_t* option = "text", Double_t minE=0);
   Double_t Result(Int_t event, Int_t index = 0) const;
   Double_t GetError(Int_t event) const;
   Double_t GetError(TMultiLayerPerceptron::EDataSet set) const;
   void ComputeDEDw() const;
   void Randomize() const;
   void SetEta(Double_t eta);
   void SetEpsilon(Double_t eps);
   void SetDelta(Double_t delta);
   void SetEtaDecay(Double_t ed);
   void SetTau(Double_t tau);
   void SetReset(Int_t reset);
   inline Double_t GetEta()      const { return fEta; }
   inline Double_t GetEpsilon()  const { return fEpsilon; }
   inline Double_t GetDelta()    const { return fDelta; }
   inline Double_t GetEtaDecay() const { return fEtaDecay; }
   TMultiLayerPerceptron::ELearningMethod GetLearningMethod() const { return fLearningMethod; }
   inline Double_t GetTau()      const { return fTau; }
   inline Int_t GetReset()       const { return fReset; }
   inline TString GetStructure() const { return fStructure; }
   inline TNeuron::ENeuronType GetType() const { return fType; }
   void DrawResult(Int_t index = 0, Option_t* option = "test") const;
   Bool_t DumpWeights(Option_t* filename = "-") const;
   Bool_t LoadWeights(Option_t* filename = "");
   Double_t Evaluate(Int_t index, Double_t* params) const;
   void Export(Option_t* filename = "NNfunction", Option_t* language = "C++") const;
   virtual void Draw(Option_t *option="");

 protected:
   void AttachData();
   void BuildNetwork();
   void GetEntry(Int_t) const;
   // it's a choice not to force learning function being const, even if possible
   void MLP_Stochastic(Double_t*);
   void MLP_Batch(Double_t*);
   Bool_t LineSearch(Double_t*, Double_t*);
   void SteepestDir(Double_t*);
   void ConjugateGradientsDir(Double_t*, Double_t);
   void SetGammaDelta(TMatrixD&, TMatrixD&, Double_t*);
   bool GetBFGSH(TMatrixD&, TMatrixD &, TMatrixD&);
   void BFGSDir(TMatrixD&, Double_t*);
   Double_t DerivDir(Double_t*);
   Double_t GetCrossEntropyBinary() const;
   Double_t GetCrossEntropy() const;
   Double_t GetSumSquareError() const;

 private:
   TMultiLayerPerceptron(const TMultiLayerPerceptron&); // Not implemented
   TMultiLayerPerceptron& operator=(const TMultiLayerPerceptron&); // Not implemented
   void ExpandStructure();
   void BuildFirstLayer(TString&);
   void BuildHiddenLayers(TString&);
   void BuildOneHiddenLayer(const TString& sNumNodes, Int_t& layer,
                            Int_t& prevStart, Int_t& prevStop,
                            Bool_t lastLayer);
   void BuildLastLayer(TString&, Int_t);
   void Shuffle(Int_t*, Int_t) const;
   void MLP_Line(Double_t*, Double_t*, Double_t);

   TTree* fData;                   //! pointer to the tree used as datasource
   Int_t fCurrentTree;             //! index of the current tree in a chain
   Double_t fCurrentTreeWeight;    //! weight of the current tree in a chain
   TObjArray fNetwork;             // Collection of all the neurons in the network
   TObjArray fFirstLayer;          // Collection of the input neurons; subset of fNetwork
   TObjArray fLastLayer;           // Collection of the output neurons; subset of fNetwork
   TObjArray fSynapses;            // Collection of all the synapses in the network
   TString fStructure;             // String containing the network structure
   TString fWeight;                // String containing the event weight
   TNeuron::ENeuronType fType;     // Type of hidden neurons
   TNeuron::ENeuronType fOutType;  // Type of output neurons
   TString fextF;                  // String containing the function name
   TString fextD;                  // String containing the derivative name
   TEventList *fTraining;          //! EventList defining the events in the training dataset
   TEventList *fTest;              //! EventList defining the events in the test dataset
   ELearningMethod fLearningMethod; //! The Learning Method
   TTreeFormula* fEventWeight;     //! formula representing the event weight
   TTreeFormulaManager* fManager;  //! TTreeFormulaManager for the weight and neurons
   Double_t fEta;                  //! Eta - used in stochastic minimisation - Default=0.1
   Double_t fEpsilon;              //! Epsilon - used in stochastic minimisation - Default=0.
   Double_t fDelta;                //! Delta - used in stochastic minimisation - Default=0.
   Double_t fEtaDecay;             //! EtaDecay - Eta *= EtaDecay at each epoch - Default=1.
   Double_t fTau;                  //! Tau - used in line search - Default=3.
   Double_t fLastAlpha;            //! internal parameter used in line search
   Int_t fReset;                   //! number of epochs between two resets of the search direction to the steepest descent - Default=50
   Bool_t fTrainingOwner;          //! internal flag whether one has to delete fTraining or not
   Bool_t fTestOwner;              //! internal flag whether one has to delete fTest or not
   ClassDef(TMultiLayerPerceptron, 4)	// a Neural Network
};

#endif
 TMultiLayerPerceptron.h:1
 TMultiLayerPerceptron.h:2
 TMultiLayerPerceptron.h:3
 TMultiLayerPerceptron.h:4
 TMultiLayerPerceptron.h:5
 TMultiLayerPerceptron.h:6
 TMultiLayerPerceptron.h:7
 TMultiLayerPerceptron.h:8
 TMultiLayerPerceptron.h:9
 TMultiLayerPerceptron.h:10
 TMultiLayerPerceptron.h:11
 TMultiLayerPerceptron.h:12
 TMultiLayerPerceptron.h:13
 TMultiLayerPerceptron.h:14
 TMultiLayerPerceptron.h:15
 TMultiLayerPerceptron.h:16
 TMultiLayerPerceptron.h:17
 TMultiLayerPerceptron.h:18
 TMultiLayerPerceptron.h:19
 TMultiLayerPerceptron.h:20
 TMultiLayerPerceptron.h:21
 TMultiLayerPerceptron.h:22
 TMultiLayerPerceptron.h:23
 TMultiLayerPerceptron.h:24
 TMultiLayerPerceptron.h:25
 TMultiLayerPerceptron.h:26
 TMultiLayerPerceptron.h:27
 TMultiLayerPerceptron.h:28
 TMultiLayerPerceptron.h:29
 TMultiLayerPerceptron.h:30
 TMultiLayerPerceptron.h:31
 TMultiLayerPerceptron.h:32
 TMultiLayerPerceptron.h:33
 TMultiLayerPerceptron.h:34
 TMultiLayerPerceptron.h:35
 TMultiLayerPerceptron.h:36
 TMultiLayerPerceptron.h:37
 TMultiLayerPerceptron.h:38
 TMultiLayerPerceptron.h:39
 TMultiLayerPerceptron.h:40
 TMultiLayerPerceptron.h:41
 TMultiLayerPerceptron.h:42
 TMultiLayerPerceptron.h:43
 TMultiLayerPerceptron.h:44
 TMultiLayerPerceptron.h:45
 TMultiLayerPerceptron.h:46
 TMultiLayerPerceptron.h:47
 TMultiLayerPerceptron.h:48
 TMultiLayerPerceptron.h:49
 TMultiLayerPerceptron.h:50
 TMultiLayerPerceptron.h:51
 TMultiLayerPerceptron.h:52
 TMultiLayerPerceptron.h:53
 TMultiLayerPerceptron.h:54
 TMultiLayerPerceptron.h:55
 TMultiLayerPerceptron.h:56
 TMultiLayerPerceptron.h:57
 TMultiLayerPerceptron.h:58
 TMultiLayerPerceptron.h:59
 TMultiLayerPerceptron.h:60
 TMultiLayerPerceptron.h:61
 TMultiLayerPerceptron.h:62
 TMultiLayerPerceptron.h:63
 TMultiLayerPerceptron.h:64
 TMultiLayerPerceptron.h:65
 TMultiLayerPerceptron.h:66
 TMultiLayerPerceptron.h:67
 TMultiLayerPerceptron.h:68
 TMultiLayerPerceptron.h:69
 TMultiLayerPerceptron.h:70
 TMultiLayerPerceptron.h:71
 TMultiLayerPerceptron.h:72
 TMultiLayerPerceptron.h:73
 TMultiLayerPerceptron.h:74
 TMultiLayerPerceptron.h:75
 TMultiLayerPerceptron.h:76
 TMultiLayerPerceptron.h:77
 TMultiLayerPerceptron.h:78
 TMultiLayerPerceptron.h:79
 TMultiLayerPerceptron.h:80
 TMultiLayerPerceptron.h:81
 TMultiLayerPerceptron.h:82
 TMultiLayerPerceptron.h:83
 TMultiLayerPerceptron.h:84
 TMultiLayerPerceptron.h:85
 TMultiLayerPerceptron.h:86
 TMultiLayerPerceptron.h:87
 TMultiLayerPerceptron.h:88
 TMultiLayerPerceptron.h:89
 TMultiLayerPerceptron.h:90
 TMultiLayerPerceptron.h:91
 TMultiLayerPerceptron.h:92
 TMultiLayerPerceptron.h:93
 TMultiLayerPerceptron.h:94
 TMultiLayerPerceptron.h:95
 TMultiLayerPerceptron.h:96
 TMultiLayerPerceptron.h:97
 TMultiLayerPerceptron.h:98
 TMultiLayerPerceptron.h:99
 TMultiLayerPerceptron.h:100
 TMultiLayerPerceptron.h:101
 TMultiLayerPerceptron.h:102
 TMultiLayerPerceptron.h:103
 TMultiLayerPerceptron.h:104
 TMultiLayerPerceptron.h:105
 TMultiLayerPerceptron.h:106
 TMultiLayerPerceptron.h:107
 TMultiLayerPerceptron.h:108
 TMultiLayerPerceptron.h:109
 TMultiLayerPerceptron.h:110
 TMultiLayerPerceptron.h:111
 TMultiLayerPerceptron.h:112
 TMultiLayerPerceptron.h:113
 TMultiLayerPerceptron.h:114
 TMultiLayerPerceptron.h:115
 TMultiLayerPerceptron.h:116
 TMultiLayerPerceptron.h:117
 TMultiLayerPerceptron.h:118
 TMultiLayerPerceptron.h:119
 TMultiLayerPerceptron.h:120
 TMultiLayerPerceptron.h:121
 TMultiLayerPerceptron.h:122
 TMultiLayerPerceptron.h:123
 TMultiLayerPerceptron.h:124
 TMultiLayerPerceptron.h:125
 TMultiLayerPerceptron.h:126
 TMultiLayerPerceptron.h:127
 TMultiLayerPerceptron.h:128
 TMultiLayerPerceptron.h:129
 TMultiLayerPerceptron.h:130
 TMultiLayerPerceptron.h:131
 TMultiLayerPerceptron.h:132
 TMultiLayerPerceptron.h:133
 TMultiLayerPerceptron.h:134
 TMultiLayerPerceptron.h:135
 TMultiLayerPerceptron.h:136
 TMultiLayerPerceptron.h:137
 TMultiLayerPerceptron.h:138
 TMultiLayerPerceptron.h:139
 TMultiLayerPerceptron.h:140
 TMultiLayerPerceptron.h:141
 TMultiLayerPerceptron.h:142
 TMultiLayerPerceptron.h:143
 TMultiLayerPerceptron.h:144
 TMultiLayerPerceptron.h:145
 TMultiLayerPerceptron.h:146
 TMultiLayerPerceptron.h:147
 TMultiLayerPerceptron.h:148
 TMultiLayerPerceptron.h:149
 TMultiLayerPerceptron.h:150
 TMultiLayerPerceptron.h:151
 TMultiLayerPerceptron.h:152
 TMultiLayerPerceptron.h:153
 TMultiLayerPerceptron.h:154
 TMultiLayerPerceptron.h:155
 TMultiLayerPerceptron.h:156
 TMultiLayerPerceptron.h:157
 TMultiLayerPerceptron.h:158
 TMultiLayerPerceptron.h:159
 TMultiLayerPerceptron.h:160
 TMultiLayerPerceptron.h:161
 TMultiLayerPerceptron.h:162
 TMultiLayerPerceptron.h:163
 TMultiLayerPerceptron.h:164
 TMultiLayerPerceptron.h:165
 TMultiLayerPerceptron.h:166
 TMultiLayerPerceptron.h:167
 TMultiLayerPerceptron.h:168
 TMultiLayerPerceptron.h:169
 TMultiLayerPerceptron.h:170
 TMultiLayerPerceptron.h:171
 TMultiLayerPerceptron.h:172
 TMultiLayerPerceptron.h:173
 TMultiLayerPerceptron.h:174
 TMultiLayerPerceptron.h:175
 TMultiLayerPerceptron.h:176
 TMultiLayerPerceptron.h:177
 TMultiLayerPerceptron.h:178
 TMultiLayerPerceptron.h:179
 TMultiLayerPerceptron.h:180
 TMultiLayerPerceptron.h:181
 TMultiLayerPerceptron.h:182
 TMultiLayerPerceptron.h:183
 TMultiLayerPerceptron.h:184
 TMultiLayerPerceptron.h:185