Logo ROOT  
Reference Guide
 
Loading...
Searching...
No Matches
MethodCFMlpANN_Utils.h
Go to the documentation of this file.
1// @(#)root/tmva $Id$
2// Author: Andreas Hoecker, Joerg Stelzer, Helge Voss, Kai Voss
3
4/**********************************************************************************
5 * Project: TMVA - a Root-integrated toolkit for multivariate data analysis *
6 * Package: TMVA *
7 * Class : MethodCFMlpANN_utils *
8 * Web : http://tmva.sourceforge.net *
9 * *
10 * Reference for the original FORTRAN version "mlpl3.F": *
11 * Authors : J. Proriol and contributions from ALEPH-Clermont-Fd *
12 * Team members *
13 * Copyright: Laboratoire Physique Corpusculaire *
14 * Universite de Blaise Pascal, IN2P3/CNRS *
15 * Description: *
16 * Utility routine, obtained via f2c from original mlpl3.F FORTRAN routine *
17 * *
18 * Authors (alphabetical): *
19 * Andreas Hoecker <Andreas.Hocker@cern.ch> - CERN, Switzerland *
20 * Xavier Prudent <prudent@lapp.in2p3.fr> - LAPP, France *
21 * Helge Voss <Helge.Voss@cern.ch> - MPI-K Heidelberg, Germany *
22 * Kai Voss <Kai.Voss@cern.ch> - U. of Victoria, Canada *
23 * *
24 * Copyright (c) 2005: *
25 * CERN, Switzerland *
26 * U. of Victoria, Canada *
27 * MPI-K Heidelberg, Germany *
28 * LAPP, Annecy, France *
29 * *
30 * Redistribution and use in source and binary forms, with or without *
31 * modification, are permitted according to the terms listed in LICENSE *
32 * (http://tmva.sourceforge.net/LICENSE) *
33 **********************************************************************************/
34
35#ifndef ROOT_TMVA_MethodCFMlpANN_Utils
36#define ROOT_TMVA_MethodCFMlpANN_Utils
37
39#include "TMVA/MsgLogger.h"
40
41#include "Rtypes.h"
42
43#include <cstdlib>
44//////////////////////////////////////////////////////////////////////////
45// //
46// MethodCFMlpANN_Utils //
47// //
48// Implementation of Clermond-Ferrand artificial neural network //
49// //
50//////////////////////////////////////////////////////////////////////////
51
52namespace TMVA {
53
55
56 public:
57
59 virtual ~MethodCFMlpANN_Utils();
60
61 protected:
62
63 void Train_nn( Double_t *tin2, Double_t *tout2, Int_t *ntrain,
64 Int_t *ntest, Int_t *nvar2, Int_t *nlayer,
65 Int_t *nodes, Int_t *ncycle );
66
67 void Entree_new( Int_t *, char *, Int_t *ntrain, Int_t *ntest,
68 Int_t *numlayer, Int_t *nodes, Int_t *numcycle,
69 Int_t );
70
72 Double_t*, Int_t*, Int_t* ) = 0;
73
75 Double_t Sen3a(void);
76
77 void Wini ();
78 void En_avant (Int_t *ievent);
79 void En_avant2 (Int_t *ievent);
80 void En_arriere(Int_t *ievent);
81 void Leclearn (Int_t *ktest, Double_t *tout2, Double_t *tin2);
82 void Out (Int_t *iii, Int_t *maxcycle);
83 void Cout (Int_t *, Double_t *xxx);
84 void Innit (char *det, Double_t *tout2, Double_t *tin2, Int_t );
85 void TestNN ();
86 void Inl ();
87 void GraphNN (Int_t *ilearn, Double_t *, Double_t *, char *, Int_t);
88 void Foncf (Int_t *i__, Double_t *u, Double_t *f);
89 void Cout2 (Int_t * /*i1*/, Double_t *yyy);
90 void Lecev2 (Int_t *ktest, Double_t *tout2, Double_t *tin2);
91 void Arret (const char* mot );
92 void CollectVar(Int_t *nvar, Int_t *class__, Double_t *xpg);
93
94 protected:
95
96 Int_t fg_100; // constant
97 Int_t fg_0; // constant
98 static const Int_t fg_max_nVar_; // static maximum number of input variables
99 static const Int_t fg_max_nNodes_; // maximum number of nodes per variable
100 Int_t fg_999; // constant
101 static const char* const fg_MethodName; // method name for print
102
103 Double_t W_ref(const Double_t wNN[], Int_t a_1, Int_t a_2, Int_t a_3) const {
104 return wNN [(a_3*max_nNodes_ + a_2)*max_nLayers_ + a_1 - 187];
105 }
106 Double_t& W_ref(Double_t wNN[], Int_t a_1, Int_t a_2, Int_t a_3) {
107 return wNN [((a_3)*max_nNodes_ + (a_2))*max_nLayers_ + a_1 - 187];
108 }
109
110 Double_t Ww_ref(const Double_t wwNN[], Int_t a_1,Int_t a_2) const {
111 return wwNN[(a_2)*max_nLayers_ + a_1 - 7];
112 }
113 Double_t& Ww_ref(Double_t wwNN[], Int_t a_1,Int_t a_2) {
114 return wwNN[(a_2)*max_nLayers_ + a_1 - 7];
115 }
116
117 // ANN training parameters
118 struct {
123
124 // ANN training results
125 struct {
126 Double_t xmax[max_nVar_], xmin[max_nVar_];
127 Int_t nclass[max_Events_], mclass[max_Events_], iclass;
129
130 // dynamic data table
131 class VARn2 {
132 public:
133 VARn2() : fNevt(0), fNvar(0) {
134 fxx = 0;
135 }
137 Delete();
138 }
140 fNevt = nevt+1; fNvar = nvar+1; // fortran array style 1...N
141 fxx = new Double_t*[fNevt];
142 for (Int_t i=0; i<fNevt; i++) fxx[i] = new Double_t[fNvar];
143 }
144 Double_t operator=( Double_t val ) { return val; }
145 Double_t &operator()( Int_t ievt, Int_t ivar ) const {
146 if (0 != fxx && ievt < fNevt && ivar < fNvar) return fxx[ievt][ivar];
147 else {
148 printf( "*** ERROR in varn3_(): fxx is zero pointer ==> abort ***\n") ;
149 std::exit(1);
150 return fxx[0][0];
151 }
152 }
153 void Delete( void ) {
154 if (0 != fxx) for (Int_t i=0; i<fNevt; i++) if (0 != fxx[i]) delete [] fxx[i];
155 delete[] fxx;
156 fxx=0;
157 }
158
163
164 // ANN weights
165 struct {
166 Double_t x[max_nLayers_*max_nNodes_];
167 Double_t y[max_nLayers_*max_nNodes_];
168 Double_t o[max_nNodes_];
169 Double_t w[max_nLayers_*max_nNodes_*max_nNodes_];
170 Double_t ww[max_nLayers_*max_nNodes_];
171 Double_t cut[max_nNodes_];
172 Double_t deltaww[max_nLayers_*max_nNodes_];
173 Int_t neuron[max_nLayers_];
175
176 // ANN weights
177 struct {
178 Double_t coef[max_nNodes_], temp[max_nLayers_], demin, demax;
179 Double_t del[max_nLayers_*max_nNodes_];
180 Double_t delw[max_nLayers_*max_nNodes_*max_nNodes_];
181 Double_t delta[max_nLayers_*max_nNodes_*max_nNodes_];
182 Double_t delww[max_nLayers_*max_nNodes_];
185
186 // flags and stuff (don't ask me...)
187 struct {
191
193
194 private:
196 MsgLogger& ULog() { if (fLogger) return *fLogger; return *(fLogger = new MsgLogger("CFMLP_Utils")); } // avoiding control reaches end of non-void function warning
197
198 public:
199
200 ClassDef(MethodCFMlpANN_Utils,0); // Implementation of Clermond-Ferrand artificial neural network
201 };
202
203} // namespace TMVA
204
205#endif
#define f(i)
Definition RSha256.hxx:104
double Double_t
Definition RtypesCore.h:59
#define ClassDef(name, id)
Definition Rtypes.h:325
void Create(Int_t nevt, Int_t nvar)
Double_t & operator()(Int_t ievt, Int_t ivar) const
Implementation of Clermond-Ferrand artificial neural network.
Double_t delta[max_nLayers_ *max_nNodes_ *max_nNodes_]
void Foncf(Int_t *i__, Double_t *u, Double_t *f)
void Out(Int_t *iii, Int_t *maxcycle)
MethodCFMlpANN_Utils()
default constructor
void Innit(char *det, Double_t *tout2, Double_t *tin2, Int_t)
void Entree_new(Int_t *, char *, Int_t *ntrain, Int_t *ntest, Int_t *numlayer, Int_t *nodes, Int_t *numcycle, Int_t)
void CollectVar(Int_t *nvar, Int_t *class__, Double_t *xpg)
[smart comments to be added]
void Leclearn(Int_t *ktest, Double_t *tout2, Double_t *tin2)
[smart comments to be added]
virtual Int_t DataInterface(Double_t *, Double_t *, Int_t *, Int_t *, Int_t *, Int_t *, Double_t *, Int_t *, Int_t *)=0
Double_t & Ww_ref(Double_t wwNN[], Int_t a_1, Int_t a_2)
void GraphNN(Int_t *ilearn, Double_t *, Double_t *, char *, Int_t)
[smart comments to be added]
class TMVA::MethodCFMlpANN_Utils::VARn2 fVarn3_1
Double_t w[max_nLayers_ *max_nNodes_ *max_nNodes_]
class TMVA::MethodCFMlpANN_Utils::VARn2 fVarn2_1
Double_t del[max_nLayers_ *max_nNodes_]
struct TMVA::MethodCFMlpANN_Utils::@170 fCost_1
void En_avant2(Int_t *ievent)
[smart comments to be added]
Double_t Ww_ref(const Double_t wwNN[], Int_t a_1, Int_t a_2) const
Double_t Fdecroi(Int_t *i__)
[smart comments to be added]
struct TMVA::MethodCFMlpANN_Utils::@166 fParam_1
void En_arriere(Int_t *ievent)
[smart comments to be added]
Double_t & W_ref(Double_t wNN[], Int_t a_1, Int_t a_2, Int_t a_3)
void Cout(Int_t *, Double_t *xxx)
[smart comments to be added]
Double_t y[max_nLayers_ *max_nNodes_]
Double_t delw[max_nLayers_ *max_nNodes_ *max_nNodes_]
struct TMVA::MethodCFMlpANN_Utils::@168 fNeur_1
Double_t W_ref(const Double_t wNN[], Int_t a_1, Int_t a_2, Int_t a_3) const
struct TMVA::MethodCFMlpANN_Utils::@169 fDel_1
struct TMVA::MethodCFMlpANN_Utils::@167 fVarn_1
Double_t Sen3a(void)
[smart comments to be added]
Double_t deltaww[max_nLayers_ *max_nNodes_]
void Train_nn(Double_t *tin2, Double_t *tout2, Int_t *ntrain, Int_t *ntest, Int_t *nvar2, Int_t *nlayer, Int_t *nodes, Int_t *ncycle)
Double_t x[max_nLayers_ *max_nNodes_]
void Wini()
[smart comments to be added]
Double_t ww[max_nLayers_ *max_nNodes_]
void En_avant(Int_t *ievent)
[smart comments to be added]
void Cout2(Int_t *, Double_t *yyy)
[smart comments to be added]
void TestNN()
[smart comments to be added]
Double_t delww[max_nLayers_ *max_nNodes_]
void Lecev2(Int_t *ktest, Double_t *tout2, Double_t *tin2)
[smart comments to be added]
virtual ~MethodCFMlpANN_Utils()
Destructor.
static const char *const fg_MethodName
void Inl()
[smart comments to be added]
ostringstream derivative to redirect and format output
Definition MsgLogger.h:57
create variable transformations
auto * l
Definition textangle.C:4