ROOT  6.07/01
Reference Guide
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Properties Friends Macros Groups Pages
MethodCFMlpANN_Utils.h
Go to the documentation of this file.
1 // @(#)root/tmva $Id$
2 // Author: Andreas Hoecker, Joerg Stelzer, Helge Voss, Kai Voss
3 
4 /**********************************************************************************
5  * Project: TMVA - a Root-integrated toolkit for multivariate data analysis *
6  * Package: TMVA *
7  * Class : MethodCFMlpANN_utils *
8  * Web : http://tmva.sourceforge.net *
9  * *
10  * Reference for the original FORTRAN version "mlpl3.F": *
11  * Authors : J. Proriol and contributions from ALEPH-Clermont-Fd *
12  * Team members *
13  * Copyright: Laboratoire Physique Corpusculaire *
14  * Universite de Blaise Pascal, IN2P3/CNRS *
15  * Description: *
16  * Utility routine, obtained via f2c from original mlpl3.F FORTRAN routine *
17  * *
18  * Authors (alphabetical): *
19  * Andreas Hoecker <Andreas.Hocker@cern.ch> - CERN, Switzerland *
20  * Xavier Prudent <prudent@lapp.in2p3.fr> - LAPP, France *
21  * Helge Voss <Helge.Voss@cern.ch> - MPI-K Heidelberg, Germany *
22  * Kai Voss <Kai.Voss@cern.ch> - U. of Victoria, Canada *
23  * *
24  * Copyright (c) 2005: *
25  * CERN, Switzerland *
26  * U. of Victoria, Canada *
27  * MPI-K Heidelberg, Germany *
28  * LAPP, Annecy, France *
29  * *
30  * Redistribution and use in source and binary forms, with or without *
31  * modification, are permitted according to the terms listed in LICENSE *
32  * (http://tmva.sourceforge.net/LICENSE) *
33  **********************************************************************************/
34 
35 #ifndef ROOT_TMVA_MethodCFMlpANN_Utils
36 #define ROOT_TMVA_MethodCFMlpANN_Utils
37 
38 #ifndef ROOT_TMVA_MethodCFMlpANN_def
40 #endif
41 #ifndef ROOT_TMVA_MsgLogger
42 #include "TMVA/MsgLogger.h"
43 #endif
44 
45 #ifndef ROOT_Rtypes
46 #include "Rtypes.h"
47 #endif
48 
49 #include <cstdlib>
50 //////////////////////////////////////////////////////////////////////////
51 // //
52 // MethodCFMlpANN_Utils //
53 // //
54 // Implementation of Clermond-Ferrand artificial neural network //
55 // //
56 //////////////////////////////////////////////////////////////////////////
57 
58 namespace TMVA {
59 
61 
62  public:
63 
65  virtual ~MethodCFMlpANN_Utils();
66 
67  protected:
68 
69  void Train_nn( Double_t *tin2, Double_t *tout2, Int_t *ntrain,
70  Int_t *ntest, Int_t *nvar2, Int_t *nlayer,
71  Int_t *nodes, Int_t *ncycle );
72 
73  void Entree_new( Int_t *, char *, Int_t *ntrain, Int_t *ntest,
74  Int_t *numlayer, Int_t *nodes, Int_t *numcycle,
75  Int_t );
76 
78  Double_t*, Int_t*, Int_t* ) = 0;
79 
80  Double_t Fdecroi(Int_t *i__);
81  Double_t Sen3a(void);
82 
83  void Wini ();
84  void En_avant (Int_t *ievent);
85  void En_avant2 (Int_t *ievent);
86  void En_arriere(Int_t *ievent);
87  void Leclearn (Int_t *ktest, Double_t *tout2, Double_t *tin2);
88  void Out (Int_t *iii, Int_t *maxcycle);
89  void Cout (Int_t *, Double_t *xxx);
90  void Innit (char *det, Double_t *tout2, Double_t *tin2, Int_t );
91  void TestNN ();
92  void Inl ();
93  void GraphNN (Int_t *ilearn, Double_t *, Double_t *, char *, Int_t);
94  void Foncf (Int_t *i__, Double_t *u, Double_t *f);
95  void Cout2 (Int_t * /*i1*/, Double_t *yyy);
96  void Lecev2 (Int_t *ktest, Double_t *tout2, Double_t *tin2);
97  void Arret (const char* mot );
98  void CollectVar(Int_t *nvar, Int_t *class__, Double_t *xpg);
99 
100  protected:
101 
102  static Int_t fg_100; // constant
103  static Int_t fg_0; // constant
104  static const Int_t fg_max_nVar_; // static maximum number of input variables
105  static const Int_t fg_max_nNodes_; // maximum number of nodes per variable
106  static Int_t fg_999; // constant
107  static const char* const fg_MethodName; // method name for print
108 
109  Double_t W_ref(const Double_t wNN[], Int_t a_1, Int_t a_2, Int_t a_3) const {
110  return wNN [(a_3*max_nNodes_ + a_2)*max_nLayers_ + a_1 - 187];
111  }
112  Double_t& W_ref(Double_t wNN[], Int_t a_1, Int_t a_2, Int_t a_3) {
113  return wNN [((a_3)*max_nNodes_ + (a_2))*max_nLayers_ + a_1 - 187];
114  }
115 
116  Double_t Ww_ref(const Double_t wwNN[], Int_t a_1,Int_t a_2) const {
117  return wwNN[(a_2)*max_nLayers_ + a_1 - 7];
118  }
119  Double_t& Ww_ref(Double_t wwNN[], Int_t a_1,Int_t a_2) {
120  return wwNN[(a_2)*max_nLayers_ + a_1 - 7];
121  }
122 
123  // ANN training parameters
124  struct {
128  } fParam_1;
129 
130  // ANN training results
131  struct {
134  } fVarn_1;
135 
136  // dynamic data table
137  class VARn2 {
138  public:
139  VARn2() : fNevt(0), fNvar(0) {
140  fxx = 0;
141  }
142  ~VARn2() {
143  Delete();
144  }
145  void Create( Int_t nevt, Int_t nvar ) {
146  fNevt = nevt+1; fNvar = nvar+1; // fortran array style 1...N
147  fxx = new Double_t*[fNevt];
148  for (Int_t i=0; i<fNevt; i++) fxx[i] = new Double_t[fNvar];
149  }
150  Double_t operator=( Double_t val ) { return val; }
151  Double_t &operator()( Int_t ievt, Int_t ivar ) const {
152  if (0 != fxx && ievt < fNevt && ivar < fNvar) return fxx[ievt][ivar];
153  else {
154  printf( "*** ERROR in varn3_(): fxx is zero pointer ==> abort ***\n") ;
155  std::exit(1);
156  return fxx[0][0];
157  }
158  }
159  void Delete( void ) {
160  if (0 != fxx) for (Int_t i=0; i<fNevt; i++) if (0 != fxx[i]) delete [] fxx[i];
161  delete[] fxx;
162  fxx=0;
163  }
164 
168  } fVarn2_1, fVarn3_1;
169 
170  // ANN weights
171  struct {
180  } fNeur_1;
181 
182  // ANN weights
183  struct {
190  } fDel_1;
191 
192  // flags and stuff (don't ask me...)
193  struct {
196  } fCost_1;
197 
198  void SetLogger(MsgLogger *l) { fLogger = l; }
199 
200  private:
202  MsgLogger& ULog() { if (fLogger) return *fLogger; return *(fLogger = new MsgLogger("CFMLP_Utils")); } // avoiding control reaches end of non-void function warning
203 
204  public:
205 
206  ClassDef(MethodCFMlpANN_Utils,0) // Implementation of Clermond-Ferrand artificial neural network
207  };
208 
209 } // namespace TMVA
210 
211 #endif
void Foncf(Int_t *i__, Double_t *u, Double_t *f)
Double_t temp[max_nLayers_]
Double_t y[max_nLayers_ *max_nNodes_]
Double_t delta[max_nLayers_ *max_nNodes_ *max_nNodes_]
const int max_nVar_
void Entree_new(Int_t *, char *, Int_t *ntrain, Int_t *ntest, Int_t *numlayer, Int_t *nodes, Int_t *numcycle, Int_t)
struct TMVA::MethodCFMlpANN_Utils::@218 fCost_1
const int max_nLayers_
int Int_t
Definition: RtypesCore.h:41
virtual Int_t DataInterface(Double_t *, Double_t *, Int_t *, Int_t *, Int_t *, Int_t *, Double_t *, Int_t *, Int_t *)=0
Double_t ww[max_nLayers_ *max_nNodes_]
const int max_Events_
TFile * f
Double_t delww[max_nLayers_ *max_nNodes_]
Double_t Ww_ref(const Double_t wwNN[], Int_t a_1, Int_t a_2) const
Double_t W_ref(const Double_t wNN[], Int_t a_1, Int_t a_2, Int_t a_3) const
#define ClassDef(name, id)
Definition: Rtypes.h:254
void Cout(Int_t *, Double_t *xxx)
void Out(Int_t *iii, Int_t *maxcycle)
const int max_nNodes_
struct TMVA::MethodCFMlpANN_Utils::@214 fParam_1
Double_t w[max_nLayers_ *max_nNodes_ *max_nNodes_]
struct TMVA::MethodCFMlpANN_Utils::@215 fVarn_1
void Cout2(Int_t *, Double_t *yyy)
void GraphNN(Int_t *ilearn, Double_t *, Double_t *, char *, Int_t)
void Train_nn(Double_t *tin2, Double_t *tout2, Int_t *ntrain, Int_t *ntest, Int_t *nvar2, Int_t *nlayer, Int_t *nodes, Int_t *ncycle)
Double_t deltaww[max_nLayers_ *max_nNodes_]
Double_t & Ww_ref(Double_t wwNN[], Int_t a_1, Int_t a_2)
Double_t & W_ref(Double_t wNN[], Int_t a_1, Int_t a_2, Int_t a_3)
Double_t x[max_nLayers_ *max_nNodes_]
void Leclearn(Int_t *ktest, Double_t *tout2, Double_t *tin2)
TLine * l
Definition: textangle.C:4
struct TMVA::MethodCFMlpANN_Utils::@217 fDel_1
Double_t delw[max_nLayers_ *max_nNodes_ *max_nNodes_]
struct TMVA::MethodCFMlpANN_Utils::@216 fNeur_1
void Create(Int_t nevt, Int_t nvar)
double Double_t
Definition: RtypesCore.h:55
void Lecev2(Int_t *ktest, Double_t *tout2, Double_t *tin2)
Double_t & operator()(Int_t ievt, Int_t ivar) const
ClassImp(TMCParticle) void TMCParticle printf(": p=(%7.3f,%7.3f,%9.3f) ;", fPx, fPy, fPz)
void Innit(char *det, Double_t *tout2, Double_t *tin2, Int_t)
class TMVA::MethodCFMlpANN_Utils::VARn2 fVarn3_1
class TMVA::MethodCFMlpANN_Utils::VARn2 fVarn2_1
void CollectVar(Int_t *nvar, Int_t *class__, Double_t *xpg)
Double_t del[max_nLayers_ *max_nNodes_]
static const char *const fg_MethodName