90 using std::stringstream;
143 MethodCFMlpANN_nsel(0)
191 Int_t currentHiddenLayer = 1;
193 while(layerSpec.
Length()>0) {
195 if (layerSpec.
First(
',')<0) {
200 sToAdd = layerSpec(0,layerSpec.
First(
','));
201 layerSpec = layerSpec(layerSpec.First(
',')+1,layerSpec.Length());
205 nNodes += atoi(sToAdd);
206 fNodes[currentHiddenLayer++] = nNodes;
213 Log() << kFATAL <<
"Mechanism to ignore events with negative weights in training not yet available for method: " 215 <<
" --> please remove \"IgnoreNegWeightsInTraining\" option from booking string." 219 Log() << kINFO <<
"Use configuration (nodes per layer): in=";
221 Log() << kINFO << fNodes[fNlayers-1] <<
"=out" <<
Endl;
233 fClass =
new std::vector<Int_t>( nEvtTrain );
238 for (
Int_t ievt=0; ievt<nEvtTrain; ievt++) {
245 for (ivar=0; ivar<
GetNvar(); ivar++) {
246 (*fData)( ievt, ivar ) = ev->
GetValue(ivar);
290 Int_t ntrain(
Data()->GetNTrainingEvents());
297 for (
Int_t i=0; i<nlayers; i++) nodes[i] =
fNodes[i];
305 for (
Int_t layer=0; layer<nlayers; layer++)
310 Train_nn( &dumDat, &dumDat, &ntrain, &ntest, &nvar, &nlayers, nodes, &ncycles );
312 Log() << kWARNING <<
"<Train> sorry CFMlpANN does not run on Windows" <<
Endl;
330 std::vector<Double_t> inputVec(
GetNvar() );
334 if (!isOK)
Log() << kFATAL <<
"EvalANN returns (!isOK) for event " <<
Endl;
349 for (
UInt_t ivar=0; ivar<
GetNvar(); ivar++) xeev[ivar] = inVar[ivar];
355 if (
fVarn_1.xmax[jvar] < xeev[jvar]) xeev[jvar] =
fVarn_1.xmax[jvar];
356 if (
fVarn_1.xmin[jvar] > xeev[jvar]) xeev[jvar] =
fVarn_1.xmin[jvar];
362 xeev[jvar] = xeev[jvar] - ((
fVarn_1.xmax[jvar] +
fVarn_1.xmin[jvar])/2);
363 xeev[jvar] = xeev[jvar] / ((
fVarn_1.xmax[jvar] -
fVarn_1.xmin[jvar])/2);
381 for (
Int_t ivar=0; ivar<
fNeur_1.neuron[0]; ivar++)
fYNN[0][ivar] = xeev[ivar];
403 if (u/
fDel_1.temp[i] > 170) f = +1;
404 else if (u/
fDel_1.temp[i] < -170) f = -1;
407 f = (1 - yy)/(1 + yy);
425 Log() << kFATAL <<
"<ReadWeightsFromFile> mismatch in number of variables" <<
Endl;
429 Log() << kFATAL <<
"<ReadWeightsFromFile> mismatch in number of classes" <<
Endl;
433 Log() << kFATAL <<
"<ReadWeightsFromStream> reached EOF prematurely " <<
Endl;
456 const Int_t nchar( 100 );
457 char* dumchar =
new char[nchar];
469 for (
Int_t k=1; k<=kk; k++) {
470 Int_t jmin = 10*k - 9;
473 for (
Int_t j=jmin; j<=jmax; j++) {
477 for (
Int_t j=jmin; j<=jmax; j++) {
482 istr.getline( dumchar, nchar );
489 istr.getline( dumchar, nchar );
490 istr.getline( dumchar, nchar );
492 istr >>
fDel_1.temp[layer];
497 Log() << kFATAL <<
"<ReadWeightsFromFile> mismatch in zeroth layer:" 519 Log() << kFATAL <<
"ERROR in MethodCFMlpANN_DataInterface zero pointer xpg" <<
Endl;
522 Log() << kFATAL <<
"ERROR in MethodCFMlpANN_DataInterface mismatch in num of variables: " 549 s << std::scientific <<
fVarn_1.xmin[ivar] <<
" " <<
fVarn_1.xmax[ivar] <<
" ";
555 n << std::scientific <<
fNeur_1.neuron[layer] <<
" ";
560 void* neuronnode=NULL;
563 stringstream weights;
564 weights.precision( 16 );
574 temp.precision( 16 );
576 temp << std::scientific <<
fDel_1.temp[layer] <<
" ";
588 stringstream content(minmaxcontent);
599 stringstream ncontent(neuronscontent);
603 ncontent >>
fNeur_1.neuron[layer];
608 void* neuronnode=NULL;
612 stringstream weights(neuronweights);
622 stringstream t(temp);
635 o <<
"Number of vars " <<
fParam_1.nvar << std::endl;
636 o <<
"Output nodes " <<
fParam_1.lclass << std::endl;
640 o <<
"Var " << ivar <<
" [" <<
fVarn_1.xmin[ivar] <<
" - " <<
fVarn_1.xmax[ivar] <<
"]" << std::endl;
643 o <<
"Number of layers " <<
fParam_1.layerm << std::endl;
645 o <<
"Nodes per layer ";
648 o <<
fNeur_1.neuron[layer] <<
" ";
661 for (
Int_t k=1; k<=kk; k++) {
662 Int_t jmin = 10*k - 9;
666 for (j=jmin; j<=jmax; j++) {
674 for (i=1; i<=
fNeur_1.neuron[layer-1]; i++) {
675 for (j=jmin; j<=jmax; j++) {
687 o <<
"Del.temp in layer " << layer <<
" : " <<
fDel_1.temp[layer] << std::endl;
696 fout <<
" // not implemented for class: \"" << className <<
"\"" << std::endl;
697 fout <<
"};" << std::endl;
Double_t GetData(Int_t isel, Int_t ivar) const
void Train(void)
training of the Clement-Ferrand NN classifier
void DeclareOptions()
define the options (their key words) that can be set in the option string know options: NCycles=xx :t...
MsgLogger & Endl(MsgLogger &ml)
Double_t temp[max_nLayers_]
Singleton class for Global types used by TMVA.
void NN_ava(Double_t *)
auxiliary functions
void MakeClassSpecificHeader(std::ostream &, const TString &="") const
write specific classifier response for header
Int_t GetClass(Int_t ivar) const
void ReadWeightsFromXML(void *wghtnode)
read weights from xml file
void MakeClassSpecific(std::ostream &, const TString &) const
OptionBase * DeclareOptionRef(T &ref, const TString &name, const TString &desc="")
Virtual base Class for all MVA method.
Double_t W_ref(const Double_t wNN[], Int_t a_1, Int_t a_2, Int_t a_3) const
Int_t DataInterface(Double_t *, Double_t *, Int_t *, Int_t *, Int_t *, Int_t *, Double_t *, Int_t *, Int_t *)
data interface function
virtual ~MethodCFMlpANN(void)
destructor
Int_t MethodCFMlpANN_nsel
struct TMVA::MethodCFMlpANN_Utils::@144 fVarn_1
const Event * GetEvent() const
std::vector< Int_t > * fClass
virtual Bool_t HasAnalysisType(Types::EAnalysisType type, UInt_t numberClasses, UInt_t)
CFMlpANN can handle classification with 2 classes.
DataSetInfo & DataInfo() const
Ssiz_t First(char c) const
Find first occurrence of a character c.
Class that contains all the data information.
void PrintWeights(std::ostream &o) const
write the weights of the neural net
Long64_t GetNTrainingEvents() const
Double_t GetMvaValue(Double_t *err=0, Double_t *errUpper=0)
returns CFMlpANN output (normalised within [0,1])
Double_t NN_fonc(Int_t, Double_t) const
activation function
void Train_nn(Double_t *tin2, Double_t *tout2, Int_t *ntrain, Int_t *ntest, Int_t *nvar2, Int_t *nlayer, Int_t *nodes, Int_t *ncycle)
Double_t Ww_ref(const Double_t wwNN[], Int_t a_1, Int_t a_2) const
Bool_t BeginsWith(const char *s, ECaseCompare cmp=kExact) const
Double_t x[max_nLayers_ *max_nNodes_]
void GetHelpMessage() const
get help message text
Double_t EvalANN(std::vector< Double_t > &, Bool_t &isOK)
evaluates NN value as function of input variables
void ReadWeightsFromStream(std::istream &istr)
read back the weight from the training from file (stream)
void SetNormalised(Bool_t norm)
Float_t GetValue(UInt_t ivar) const
return value of i'th variable
TString & Remove(Ssiz_t pos)
Bool_t IgnoreEventsWithNegWeightsInTraining() const
Int_t neuron[max_nLayers_]
void AddWeightsXMLTo(void *parent) const
write weights to xml file
static constexpr double s
TMatrixT< Float_t > TMatrix
void Init(void)
default initialisation called by all constructors
struct TMVA::MethodCFMlpANN_Utils::@146 fDel_1
void ProcessOptions()
decode the options in the option string
void SetLogger(MsgLogger *l)
#define REGISTER_METHOD(CLASS)
for example
Abstract ClassifierFactory template that handles arbitrary types.
Interface to Clermond-Ferrand artificial neural network.
TString GetMethodTypeName() const
Bool_t IsSignal(const Event *ev) const
MethodCFMlpANN(const TString &jobName, const TString &methodTitle, DataSetInfo &theData, const TString &theOption="3000:N-1:N-2")
standard constructor
struct TMVA::MethodCFMlpANN_Utils::@145 fNeur_1
struct TMVA::MethodCFMlpANN_Utils::@143 fParam_1
void NoErrorCalc(Double_t *const err, Double_t *const errUpper)