Logo ROOT  
Reference Guide
MethodANNBase.cxx
Go to the documentation of this file.
1 // @(#)root/tmva $Id$
2 // Author: Andreas Hoecker, Peter Speckmayer, Matt Jachowski, Jan Therhaag, Jiahang Zhong
3 
4 /**********************************************************************************
5  * Project: TMVA - a Root-integrated toolkit for multivariate data analysis *
6  * Package: TMVA *
7  * Class : MethodANNBase *
8  * Web : http://tmva.sourceforge.net *
9  * *
10  * Description: *
11  * Artificial neural network base class for the discrimination of signal *
12  * from background. *
13  * *
14  * Authors (alphabetical): *
15  * Krzysztof Danielowski <danielow@cern.ch> - IFJ & AGH, Poland *
16  * Andreas Hoecker <Andreas.Hocker@cern.ch> - CERN, Switzerland *
17  * Matt Jachowski <jachowski@stanford.edu> - Stanford University, USA *
18  * Kamil Kraszewski <kalq@cern.ch> - IFJ & UJ, Poland *
19  * Maciej Kruk <mkruk@cern.ch> - IFJ & AGH, Poland *
20  * Peter Speckmayer <peter.speckmayer@cern.ch> - CERN, Switzerland *
21  * Joerg Stelzer <stelzer@cern.ch> - DESY, Germany *
22  * Jan Therhaag <Jan.Therhaag@cern.ch> - U of Bonn, Germany *
23  * Jiahang Zhong <Jiahang.Zhong@cern.ch> - Academia Sinica, Taipei *
24  * *
25  * Copyright (c) 2005-2011: *
26  * CERN, Switzerland *
27  * U. of Bonn, Germany *
28  * *
29  * Redistribution and use in source and binary forms, with or without *
30  * modification, are permitted according to the terms listed in LICENSE *
31  * (http://tmva.sourceforge.net/LICENSE) *
32  **********************************************************************************/
33 
34 /*! \class TMVA::MethodANNBase
35 \ingroup TMVA
36 
37 Base class for all TMVA methods using artificial neural networks.
38 
39 */
40 
41 #include "TMVA/MethodBase.h"
42 
43 #include "TMVA/Configurable.h"
44 #include "TMVA/DataSetInfo.h"
45 #include "TMVA/MethodANNBase.h"
46 #include "TMVA/MsgLogger.h"
47 #include "TMVA/TNeuron.h"
48 #include "TMVA/TSynapse.h"
50 #include "TMVA/TActivationTanh.h"
51 #include "TMVA/Types.h"
52 #include "TMVA/Tools.h"
54 #include "TMVA/Ranking.h"
55 #include "TMVA/Version.h"
56 
57 #include "TString.h"
58 #include "TDirectory.h"
59 #include "TRandom3.h"
60 #include "TH2F.h"
61 #include "TH1.h"
62 #include "TMath.h"
63 #include "TMatrixT.h"
64 
65 #include <iostream>
66 #include <vector>
67 #include <cstdlib>
68 #include <stdexcept>
69 #if __cplusplus > 199711L
70 #include <atomic>
71 #endif
72 
73 
74 using std::vector;
75 
77 
78 ////////////////////////////////////////////////////////////////////////////////
79 /// standard constructor
80 /// Note: Right now it is an option to choose the neuron input function,
81 /// but only the input function "sum" leads to weight convergence --
82 /// otherwise the weights go to nan and lead to an ABORT.
83 
85  Types::EMVA methodType,
86  const TString& methodTitle,
87  DataSetInfo& theData,
88  const TString& theOption )
89 : TMVA::MethodBase( jobName, methodType, methodTitle, theData, theOption)
90  , fEstimator(kMSE)
91  , fUseRegulator(kFALSE)
92  , fRandomSeed(0)
93 {
94  InitANNBase();
95 
97 }
98 
99 ////////////////////////////////////////////////////////////////////////////////
100 /// construct the Method from the weight file
101 
103  DataSetInfo& theData,
104  const TString& theWeightFile)
105  : TMVA::MethodBase( methodType, theData, theWeightFile)
106  , fEstimator(kMSE)
107  , fUseRegulator(kFALSE)
108  , fRandomSeed(0)
109 {
110  InitANNBase();
111 
112  DeclareOptions();
113 }
114 
115 ////////////////////////////////////////////////////////////////////////////////
116 /// define the options (their key words) that can be set in the option string
117 /// here the options valid for ALL MVA methods are declared.
118 ///
119 /// know options:
120 ///
121 /// - NCycles=xx :the number of training cycles
122 /// - Normalize=kTRUE,kFALSe :if normalised in put variables should be used
123 /// - HiddenLayser="N-1,N-2" :the specification of the hidden layers
124 /// - NeuronType=sigmoid,tanh,radial,linar : the type of activation function
125 /// used at the neuron
126 
128 {
129  DeclareOptionRef( fNcycles = 500, "NCycles", "Number of training cycles" );
130  DeclareOptionRef( fLayerSpec = "N,N-1", "HiddenLayers", "Specification of hidden layer architecture" );
131  DeclareOptionRef( fNeuronType = "sigmoid", "NeuronType", "Neuron activation function type" );
132  DeclareOptionRef( fRandomSeed = 1, "RandomSeed", "Random seed for initial synapse weights (0 means unique seed for each run; default value '1')");
133 
134  DeclareOptionRef(fEstimatorS="MSE", "EstimatorType",
135  "MSE (Mean Square Estimator) for Gaussian Likelihood or CE(Cross-Entropy) for Bernoulli Likelihood" ); //zjh
136  AddPreDefVal(TString("MSE")); //zjh
137  AddPreDefVal(TString("CE")); //zjh
138 
139 
140  TActivationChooser aChooser;
141  std::vector<TString>* names = aChooser.GetAllActivationNames();
142  Int_t nTypes = names->size();
143  for (Int_t i = 0; i < nTypes; i++)
144  AddPreDefVal(names->at(i));
145  delete names;
146 
147  DeclareOptionRef(fNeuronInputType="sum", "NeuronInputType","Neuron input function type");
148  TNeuronInputChooser iChooser;
149  names = iChooser.GetAllNeuronInputNames();
150  nTypes = names->size();
151  for (Int_t i = 0; i < nTypes; i++) AddPreDefVal(names->at(i));
152  delete names;
153 }
154 
155 
156 ////////////////////////////////////////////////////////////////////////////////
157 /// do nothing specific at this moment
158 
160 {
161  if ( DoRegression() || DoMulticlass()) fEstimatorS = "MSE"; //zjh
162  else fEstimatorS = "CE" ; //hhv
163  if (fEstimatorS == "MSE" ) fEstimator = kMSE;
164  else if (fEstimatorS == "CE") fEstimator = kCE; //zjh
165  std::vector<Int_t>* layout = ParseLayoutString(fLayerSpec);
166  BuildNetwork(layout);
167  delete layout;
168 }
169 
170 ////////////////////////////////////////////////////////////////////////////////
171 /// parse layout specification string and return a vector, each entry
172 /// containing the number of neurons to go in each successive layer
173 
174 std::vector<Int_t>* TMVA::MethodANNBase::ParseLayoutString(TString layerSpec)
175 {
176  std::vector<Int_t>* layout = new std::vector<Int_t>();
177  layout->push_back((Int_t)GetNvar());
178  while(layerSpec.Length()>0) {
179  TString sToAdd="";
180  if (layerSpec.First(',')<0) {
181  sToAdd = layerSpec;
182  layerSpec = "";
183  }
184  else {
185  sToAdd = layerSpec(0,layerSpec.First(','));
186  layerSpec = layerSpec(layerSpec.First(',')+1,layerSpec.Length());
187  }
188  int nNodes = 0;
189  if (sToAdd.BeginsWith("n") || sToAdd.BeginsWith("N")) { sToAdd.Remove(0,1); nNodes = GetNvar(); }
190  nNodes += atoi(sToAdd);
191  layout->push_back(nNodes);
192  }
193  if( DoRegression() )
194  layout->push_back( DataInfo().GetNTargets() ); // one output node for each target
195  else if( DoMulticlass() )
196  layout->push_back( DataInfo().GetNClasses() ); // one output node for each class
197  else
198  layout->push_back(1); // one output node (for signal/background classification)
199 
200  int n = 0;
201  for( std::vector<Int_t>::iterator it = layout->begin(); it != layout->end(); ++it ){
202  n++;
203  }
204 
205  return layout;
206 }
207 
208 ////////////////////////////////////////////////////////////////////////////////
209 /// initialize ANNBase object
210 
212 {
213  fNetwork = NULL;
214  frgen = NULL;
215  fActivation = NULL;
216  fOutput = NULL; //zjh
217  fIdentity = NULL;
218  fInputCalculator = NULL;
219  fSynapses = NULL;
220  fEstimatorHistTrain = NULL;
221  fEstimatorHistTest = NULL;
222 
223  // reset monitoring histogram vectors
224  fEpochMonHistS.clear();
225  fEpochMonHistB.clear();
226  fEpochMonHistW.clear();
227 
228  // these will be set in BuildNetwork()
229  fInputLayer = NULL;
230  fOutputNeurons.clear();
231 
232  frgen = new TRandom3(fRandomSeed);
233 
234  fSynapses = new TObjArray();
235 }
236 
237 ////////////////////////////////////////////////////////////////////////////////
238 /// destructor
239 
241 {
242  DeleteNetwork();
243 }
244 
245 ////////////////////////////////////////////////////////////////////////////////
246 /// delete/clear network
247 
249 {
250  if (fNetwork != NULL) {
251  TObjArray *layer;
252  Int_t numLayers = fNetwork->GetEntriesFast();
253  for (Int_t i = 0; i < numLayers; i++) {
254  layer = (TObjArray*)fNetwork->At(i);
255  DeleteNetworkLayer(layer);
256  }
257  delete fNetwork;
258  }
259 
260  if (frgen != NULL) delete frgen;
261  if (fActivation != NULL) delete fActivation;
262  if (fOutput != NULL) delete fOutput; //zjh
263  if (fIdentity != NULL) delete fIdentity;
264  if (fInputCalculator != NULL) delete fInputCalculator;
265  if (fSynapses != NULL) delete fSynapses;
266 
267  fNetwork = NULL;
268  frgen = NULL;
269  fActivation = NULL;
270  fOutput = NULL; //zjh
271  fIdentity = NULL;
272  fInputCalculator = NULL;
273  fSynapses = NULL;
274 }
275 
276 ////////////////////////////////////////////////////////////////////////////////
277 /// delete a network layer
278 
280 {
281  TNeuron* neuron;
282  Int_t numNeurons = layer->GetEntriesFast();
283  for (Int_t i = 0; i < numNeurons; i++) {
284  neuron = (TNeuron*)layer->At(i);
285  neuron->DeletePreLinks();
286  delete neuron;
287  }
288  delete layer;
289 }
290 
291 ////////////////////////////////////////////////////////////////////////////////
292 /// build network given a layout (number of neurons in each layer)
293 /// and optional weights array
294 
295 void TMVA::MethodANNBase::BuildNetwork( std::vector<Int_t>* layout, std::vector<Double_t>* weights, Bool_t fromFile )
296 {
297  if (fEstimatorS == "MSE") fEstimator = kMSE; //zjh
298  else if (fEstimatorS == "CE") fEstimator = kCE; //zjh
299  else Log()<<kWARNING<<"fEstimator="<<fEstimator<<"\tfEstimatorS="<<fEstimatorS<<Endl;
300  if (fEstimator!=kMSE && fEstimator!=kCE) Log()<<kWARNING<<"Estimator type unspecified \t"<<Endl; //zjh
301 
302 
303  Log() << kHEADER << "Building Network. " << Endl;
304 
305  DeleteNetwork();
306  InitANNBase();
307 
308  // set activation and input functions
309  TActivationChooser aChooser;
310  fActivation = aChooser.CreateActivation(fNeuronType);
311  fIdentity = aChooser.CreateActivation("linear");
312  if (fEstimator==kMSE) fOutput = aChooser.CreateActivation("linear"); //zjh
313  else if (fEstimator==kCE) fOutput = aChooser.CreateActivation("sigmoid"); //zjh
314  TNeuronInputChooser iChooser;
315  fInputCalculator = iChooser.CreateNeuronInput(fNeuronInputType);
316 
317  fNetwork = new TObjArray();
318  fRegulatorIdx.clear();
319  fRegulators.clear();
320  BuildLayers( layout, fromFile );
321 
322  // cache input layer and output neuron for fast access
323  fInputLayer = (TObjArray*)fNetwork->At(0);
324  TObjArray* outputLayer = (TObjArray*)fNetwork->At(fNetwork->GetEntriesFast()-1);
325  fOutputNeurons.clear();
326  for (Int_t i = 0; i < outputLayer->GetEntries(); i++) {
327  fOutputNeurons.push_back( (TNeuron*)outputLayer->At(i) );
328  }
329 
330  if (weights == NULL) InitWeights();
331  else ForceWeights(weights);
332 }
333 
334 ////////////////////////////////////////////////////////////////////////////////
335 /// build the network layers
336 
337 void TMVA::MethodANNBase::BuildLayers( std::vector<Int_t>* layout, Bool_t fromFile )
338 {
339  TObjArray* curLayer;
340  TObjArray* prevLayer = NULL;
341 
342  Int_t numLayers = layout->size();
343 
344  for (Int_t i = 0; i < numLayers; i++) {
345  curLayer = new TObjArray();
346  BuildLayer(layout->at(i), curLayer, prevLayer, i, numLayers, fromFile);
347  prevLayer = curLayer;
348  fNetwork->Add(curLayer);
349  }
350 
351  // cache pointers to synapses for fast access, the order matters
352  for (Int_t i = 0; i < numLayers; i++) {
353  TObjArray* layer = (TObjArray*)fNetwork->At(i);
354  Int_t numNeurons = layer->GetEntriesFast();
355  if (i!=0 && i!=numLayers-1) fRegulators.push_back(0.); //zjh
356  for (Int_t j = 0; j < numNeurons; j++) {
357  if (i==0) fRegulators.push_back(0.);//zjh
358  TNeuron* neuron = (TNeuron*)layer->At(j);
359  Int_t numSynapses = neuron->NumPostLinks();
360  for (Int_t k = 0; k < numSynapses; k++) {
361  TSynapse* synapse = neuron->PostLinkAt(k);
362  fSynapses->Add(synapse);
363  fRegulatorIdx.push_back(fRegulators.size()-1);//zjh
364  }
365  }
366  }
367 }
368 
369 ////////////////////////////////////////////////////////////////////////////////
370 /// build a single layer with neurons and synapses connecting this
371 /// layer to the previous layer
372 
373 void TMVA::MethodANNBase::BuildLayer( Int_t numNeurons, TObjArray* curLayer,
374  TObjArray* prevLayer, Int_t layerIndex,
375  Int_t numLayers, Bool_t fromFile )
376 {
377  TNeuron* neuron;
378  for (Int_t j = 0; j < numNeurons; j++) {
379  if (fromFile && (layerIndex != numLayers-1) && (j==numNeurons-1)){
380  neuron = new TNeuron();
381  neuron->SetActivationEqn(fIdentity);
382  neuron->SetBiasNeuron();
383  neuron->ForceValue(1.0);
384  curLayer->Add(neuron);
385  }
386  else {
387  neuron = new TNeuron();
388  neuron->SetInputCalculator(fInputCalculator);
389 
390  // input layer
391  if (layerIndex == 0) {
392  neuron->SetActivationEqn(fIdentity);
393  neuron->SetInputNeuron();
394  }
395  else {
396  // output layer
397  if (layerIndex == numLayers-1) {
398  neuron->SetOutputNeuron();
399  neuron->SetActivationEqn(fOutput); //zjh
400  }
401  // hidden layers
402  else neuron->SetActivationEqn(fActivation);
403  AddPreLinks(neuron, prevLayer);
404  }
405 
406  curLayer->Add(neuron);
407  }
408  }
409 
410  // add bias neutron (except to output layer)
411  if(!fromFile){
412  if (layerIndex != numLayers-1) {
413  neuron = new TNeuron();
414  neuron->SetActivationEqn(fIdentity);
415  neuron->SetBiasNeuron();
416  neuron->ForceValue(1.0);
417  curLayer->Add(neuron);
418  }
419  }
420 }
421 
422 ////////////////////////////////////////////////////////////////////////////////
423 /// add synapses connecting a neuron to its preceding layer
424 
426 {
427  TSynapse* synapse;
428  int numNeurons = prevLayer->GetEntriesFast();
429  TNeuron* preNeuron;
430 
431  for (Int_t i = 0; i < numNeurons; i++) {
432  preNeuron = (TNeuron*)prevLayer->At(i);
433  synapse = new TSynapse();
434  synapse->SetPreNeuron(preNeuron);
435  synapse->SetPostNeuron(neuron);
436  preNeuron->AddPostLink(synapse);
437  neuron->AddPreLink(synapse);
438  }
439 }
440 
441 ////////////////////////////////////////////////////////////////////////////////
442 /// initialize the synapse weights randomly
443 
445 {
446  PrintMessage("Initializing weights");
447 
448  // init synapse weights
449  Int_t numSynapses = fSynapses->GetEntriesFast();
450  TSynapse* synapse;
451  for (Int_t i = 0; i < numSynapses; i++) {
452  synapse = (TSynapse*)fSynapses->At(i);
453  synapse->SetWeight(4.0*frgen->Rndm() - 2.0);
454  }
455 }
456 
457 ////////////////////////////////////////////////////////////////////////////////
458 /// force the synapse weights
459 
460 void TMVA::MethodANNBase::ForceWeights(std::vector<Double_t>* weights)
461 {
462  PrintMessage("Forcing weights");
463 
464  Int_t numSynapses = fSynapses->GetEntriesFast();
465  TSynapse* synapse;
466  for (Int_t i = 0; i < numSynapses; i++) {
467  synapse = (TSynapse*)fSynapses->At(i);
468  synapse->SetWeight(weights->at(i));
469  }
470 }
471 
472 ////////////////////////////////////////////////////////////////////////////////
473 /// force the input values of the input neurons
474 /// force the value for each input neuron
475 
477 {
478  Double_t x;
479  TNeuron* neuron;
480 
481  // const Event* ev = GetEvent();
482  for (UInt_t j = 0; j < GetNvar(); j++) {
483 
484  x = (j != (UInt_t)ignoreIndex)?ev->GetValue(j):0;
485 
486  neuron = GetInputNeuron(j);
487  neuron->ForceValue(x);
488  }
489 }
490 
491 ////////////////////////////////////////////////////////////////////////////////
492 /// calculate input values to each neuron
493 
495 {
496  TObjArray* curLayer;
497  TNeuron* neuron;
498  Int_t numLayers = fNetwork->GetEntriesFast();
499  Int_t numNeurons;
500 
501  for (Int_t i = 0; i < numLayers; i++) {
502  curLayer = (TObjArray*)fNetwork->At(i);
503  numNeurons = curLayer->GetEntriesFast();
504 
505  for (Int_t j = 0; j < numNeurons; j++) {
506  neuron = (TNeuron*) curLayer->At(j);
507  neuron->CalculateValue();
508  neuron->CalculateActivationValue();
509 
510  }
511  }
512 }
513 
514 ////////////////////////////////////////////////////////////////////////////////
515 /// print messages, turn off printing by setting verbose and debug flag appropriately
516 
518 {
519  if (Verbose() || Debug() || force) Log() << kINFO << message << Endl;
520 }
521 
522 ////////////////////////////////////////////////////////////////////////////////
523 /// wait for keyboard input, for debugging
524 
526 {
527  std::string dummy;
528  Log() << kINFO << "***Type anything to continue (q to quit): ";
529  std::getline(std::cin, dummy);
530  if (dummy == "q" || dummy == "Q") {
531  PrintMessage( "quit" );
532  delete this;
533  exit(0);
534  }
535 }
536 
537 ////////////////////////////////////////////////////////////////////////////////
538 /// print network representation, for debugging
539 
541 {
542  if (!Debug()) return;
543 
544  Log() << kINFO << Endl;
545  PrintMessage( "Printing network " );
546  Log() << kINFO << "-------------------------------------------------------------------" << Endl;
547 
548  TObjArray* curLayer;
549  Int_t numLayers = fNetwork->GetEntriesFast();
550 
551  for (Int_t i = 0; i < numLayers; i++) {
552 
553  curLayer = (TObjArray*)fNetwork->At(i);
554  Int_t numNeurons = curLayer->GetEntriesFast();
555 
556  Log() << kINFO << "Layer #" << i << " (" << numNeurons << " neurons):" << Endl;
557  PrintLayer( curLayer );
558  }
559 }
560 
561 ////////////////////////////////////////////////////////////////////////////////
562 /// print a single layer, for debugging
563 
565 {
566  Int_t numNeurons = layer->GetEntriesFast();
567  TNeuron* neuron;
568 
569  for (Int_t j = 0; j < numNeurons; j++) {
570  neuron = (TNeuron*) layer->At(j);
571  Log() << kINFO << "\tNeuron #" << j << " (LinksIn: " << neuron->NumPreLinks()
572  << " , LinksOut: " << neuron->NumPostLinks() << ")" << Endl;
573  PrintNeuron( neuron );
574  }
575 }
576 
577 ////////////////////////////////////////////////////////////////////////////////
578 /// print a neuron, for debugging
579 
581 {
582  Log() << kINFO
583  << "\t\tValue:\t" << neuron->GetValue()
584  << "\t\tActivation: " << neuron->GetActivationValue()
585  << "\t\tDelta: " << neuron->GetDelta() << Endl;
586  Log() << kINFO << "\t\tActivationEquation:\t";
587  neuron->PrintActivationEqn();
588  Log() << kINFO << "\t\tLinksIn:" << Endl;
589  neuron->PrintPreLinks();
590  Log() << kINFO << "\t\tLinksOut:" << Endl;
591  neuron->PrintPostLinks();
592 }
593 
594 ////////////////////////////////////////////////////////////////////////////////
595 /// get the mva value generated by the NN
596 
598 {
599  TNeuron* neuron;
600 
601  TObjArray* inputLayer = (TObjArray*)fNetwork->At(0);
602 
603  const Event * ev = GetEvent();
604 
605  for (UInt_t i = 0; i < GetNvar(); i++) {
606  neuron = (TNeuron*)inputLayer->At(i);
607  neuron->ForceValue( ev->GetValue(i) );
608  }
609  ForceNetworkCalculations();
610 
611  // check the output of the network
612  TObjArray* outputLayer = (TObjArray*)fNetwork->At( fNetwork->GetEntriesFast()-1 );
613  neuron = (TNeuron*)outputLayer->At(0);
614 
615  // cannot determine error
616  NoErrorCalc(err, errUpper);
617 
618  return neuron->GetActivationValue();
619 }
620 
621 ////////////////////////////////////////////////////////////////////////////////
622 /// get the regression value generated by the NN
623 
624 const std::vector<Float_t> &TMVA::MethodANNBase::GetRegressionValues()
625 {
626  TNeuron* neuron;
627 
628  TObjArray* inputLayer = (TObjArray*)fNetwork->At(0);
629 
630  const Event * ev = GetEvent();
631 
632  for (UInt_t i = 0; i < GetNvar(); i++) {
633  neuron = (TNeuron*)inputLayer->At(i);
634  neuron->ForceValue( ev->GetValue(i) );
635  }
636  ForceNetworkCalculations();
637 
638  // check the output of the network
639  TObjArray* outputLayer = (TObjArray*)fNetwork->At( fNetwork->GetEntriesFast()-1 );
640 
641  if (fRegressionReturnVal == NULL) fRegressionReturnVal = new std::vector<Float_t>();
642  fRegressionReturnVal->clear();
643 
644  Event * evT = new Event(*ev);
645  UInt_t ntgts = outputLayer->GetEntriesFast();
646  for (UInt_t itgt = 0; itgt < ntgts; itgt++) {
647  evT->SetTarget(itgt,((TNeuron*)outputLayer->At(itgt))->GetActivationValue());
648  }
649 
650  const Event* evT2 = GetTransformationHandler().InverseTransform( evT );
651  for (UInt_t itgt = 0; itgt < ntgts; itgt++) {
652  fRegressionReturnVal->push_back( evT2->GetTarget(itgt) );
653  }
654 
655  delete evT;
656 
657  return *fRegressionReturnVal;
658 }
659 
660 ////////////////////////////////////////////////////////////////////////////////
661 /// get the multiclass classification values generated by the NN
662 
663 const std::vector<Float_t> &TMVA::MethodANNBase::GetMulticlassValues()
664 {
665  TNeuron* neuron;
666 
667  TObjArray* inputLayer = (TObjArray*)fNetwork->At(0);
668 
669  const Event * ev = GetEvent();
670 
671  for (UInt_t i = 0; i < GetNvar(); i++) {
672  neuron = (TNeuron*)inputLayer->At(i);
673  neuron->ForceValue( ev->GetValue(i) );
674  }
675  ForceNetworkCalculations();
676 
677  // check the output of the network
678 
679  if (fMulticlassReturnVal == NULL) fMulticlassReturnVal = new std::vector<Float_t>();
680  fMulticlassReturnVal->clear();
681  std::vector<Float_t> temp;
682 
683  UInt_t nClasses = DataInfo().GetNClasses();
684  for (UInt_t icls = 0; icls < nClasses; icls++) {
685  temp.push_back(GetOutputNeuron( icls )->GetActivationValue() );
686  }
687 
688  for(UInt_t iClass=0; iClass<nClasses; iClass++){
689  Double_t norm = 0.0;
690  for(UInt_t j=0;j<nClasses;j++){
691  if(iClass!=j)
692  norm+=exp(temp[j]-temp[iClass]);
693  }
694  (*fMulticlassReturnVal).push_back(1.0/(1.0+norm));
695  }
696 
697 
698 
699  return *fMulticlassReturnVal;
700 }
701 
702 
703 ////////////////////////////////////////////////////////////////////////////////
704 /// create XML description of ANN classifier
705 
706 void TMVA::MethodANNBase::AddWeightsXMLTo( void* parent ) const
707 {
708  Int_t numLayers = fNetwork->GetEntriesFast();
709  void* wght = gTools().xmlengine().NewChild(parent, 0, "Weights");
710  void* xmlLayout = gTools().xmlengine().NewChild(wght, 0, "Layout");
711  gTools().xmlengine().NewAttr(xmlLayout, 0, "NLayers", gTools().StringFromInt(fNetwork->GetEntriesFast()) );
712  TString weights = "";
713  for (Int_t i = 0; i < numLayers; i++) {
714  TObjArray* layer = (TObjArray*)fNetwork->At(i);
715  Int_t numNeurons = layer->GetEntriesFast();
716  void* layerxml = gTools().xmlengine().NewChild(xmlLayout, 0, "Layer");
717  gTools().xmlengine().NewAttr(layerxml, 0, "Index", gTools().StringFromInt(i) );
718  gTools().xmlengine().NewAttr(layerxml, 0, "NNeurons", gTools().StringFromInt(numNeurons) );
719  for (Int_t j = 0; j < numNeurons; j++) {
720  TNeuron* neuron = (TNeuron*)layer->At(j);
721  Int_t numSynapses = neuron->NumPostLinks();
722  void* neuronxml = gTools().AddChild(layerxml, "Neuron");
723  gTools().AddAttr(neuronxml, "NSynapses", gTools().StringFromInt(numSynapses) );
724  if(numSynapses==0) continue;
725  std::stringstream s("");
726  s.precision( 16 );
727  for (Int_t k = 0; k < numSynapses; k++) {
728  TSynapse* synapse = neuron->PostLinkAt(k);
729  s << std::scientific << synapse->GetWeight() << " ";
730  }
731  gTools().AddRawLine( neuronxml, s.str().c_str() );
732  }
733  }
734 
735  // if inverse hessian exists, write inverse hessian to weight file
736  if( fInvHessian.GetNcols()>0 ){
737  void* xmlInvHessian = gTools().xmlengine().NewChild(wght, 0, "InverseHessian");
738 
739  // get the matrix dimensions
740  Int_t nElements = fInvHessian.GetNoElements();
741  Int_t nRows = fInvHessian.GetNrows();
742  Int_t nCols = fInvHessian.GetNcols();
743  gTools().xmlengine().NewAttr(xmlInvHessian, 0, "NElements", gTools().StringFromInt(nElements) );
744  gTools().xmlengine().NewAttr(xmlInvHessian, 0, "NRows", gTools().StringFromInt(nRows) );
745  gTools().xmlengine().NewAttr(xmlInvHessian, 0, "NCols", gTools().StringFromInt(nCols) );
746 
747  // read in the matrix elements
748  Double_t* elements = new Double_t[nElements+10];
749  fInvHessian.GetMatrix2Array( elements );
750 
751  // store the matrix elements row-wise
752  Int_t index = 0;
753  for( Int_t row = 0; row < nRows; ++row ){
754  void* xmlRow = gTools().xmlengine().NewChild(xmlInvHessian, 0, "Row");
755  gTools().xmlengine().NewAttr(xmlRow, 0, "Index", gTools().StringFromInt(row) );
756 
757  // create the rows
758  std::stringstream s("");
759  s.precision( 16 );
760  for( Int_t col = 0; col < nCols; ++col ){
761  s << std::scientific << (*(elements+index)) << " ";
762  ++index;
763  }
764  gTools().xmlengine().AddRawLine( xmlRow, s.str().c_str() );
765  }
766  delete[] elements;
767  }
768 }
769 
770 
771 ////////////////////////////////////////////////////////////////////////////////
772 /// read MLP from xml weight file
773 
775 {
776  // build the layout first
777  Bool_t fromFile = kTRUE;
778  std::vector<Int_t>* layout = new std::vector<Int_t>();
779 
780  void* xmlLayout = NULL;
781  xmlLayout = gTools().GetChild(wghtnode, "Layout");
782  if( !xmlLayout )
783  xmlLayout = wghtnode;
784 
785  UInt_t nLayers;
786  gTools().ReadAttr( xmlLayout, "NLayers", nLayers );
787  layout->resize( nLayers );
788 
789  void* ch = gTools().xmlengine().GetChild(xmlLayout);
790  UInt_t index;
791  UInt_t nNeurons;
792  while (ch) {
793  gTools().ReadAttr( ch, "Index", index );
794  gTools().ReadAttr( ch, "NNeurons", nNeurons );
795  layout->at(index) = nNeurons;
796  ch = gTools().GetNextChild(ch);
797  }
798 
799  BuildNetwork( layout, NULL, fromFile );
800  // use 'slow' (exact) TanH if processing old weigh file to ensure 100% compatible results
801  // otherwise use the new default, the 'tast tanh' approximation
802  if (GetTrainingTMVAVersionCode() < TMVA_VERSION(4,2,1) && fActivation->GetExpression().Contains("tanh")){
803  TActivationTanh* act = dynamic_cast<TActivationTanh*>( fActivation );
804  if (act) act->SetSlow();
805  }
806 
807  // fill the weights of the synapses
808  UInt_t nSyn;
809  Float_t weight;
810  ch = gTools().xmlengine().GetChild(xmlLayout);
811  UInt_t iLayer = 0;
812  while (ch) { // layers
813  TObjArray* layer = (TObjArray*)fNetwork->At(iLayer);
814  gTools().ReadAttr( ch, "Index", index );
815  gTools().ReadAttr( ch, "NNeurons", nNeurons );
816 
817  void* nodeN = gTools().GetChild(ch);
818  UInt_t iNeuron = 0;
819  while( nodeN ){ // neurons
820  TNeuron *neuron = (TNeuron*)layer->At(iNeuron);
821  gTools().ReadAttr( nodeN, "NSynapses", nSyn );
822  if( nSyn > 0 ){
823  const char* content = gTools().GetContent(nodeN);
824  std::stringstream s(content);
825  for (UInt_t iSyn = 0; iSyn<nSyn; iSyn++) { // synapses
826 
827  TSynapse* synapse = neuron->PostLinkAt(iSyn);
828  s >> weight;
829  //Log() << kWARNING << neuron << " " << weight << Endl;
830  synapse->SetWeight(weight);
831  }
832  }
833  nodeN = gTools().GetNextChild(nodeN);
834  iNeuron++;
835  }
836  ch = gTools().GetNextChild(ch);
837  iLayer++;
838  }
839 
840  delete layout;
841 
842  void* xmlInvHessian = NULL;
843  xmlInvHessian = gTools().GetChild(wghtnode, "InverseHessian");
844  if( !xmlInvHessian )
845  // no inverse hessian available
846  return;
847 
848  fUseRegulator = kTRUE;
849 
850  Int_t nElements = 0;
851  Int_t nRows = 0;
852  Int_t nCols = 0;
853  gTools().ReadAttr( xmlInvHessian, "NElements", nElements );
854  gTools().ReadAttr( xmlInvHessian, "NRows", nRows );
855  gTools().ReadAttr( xmlInvHessian, "NCols", nCols );
856 
857  // adjust the matrix dimensions
858  fInvHessian.ResizeTo( nRows, nCols );
859 
860  // prepare an array to read in the values
861  Double_t* elements;
862  if (nElements > std::numeric_limits<int>::max()-100){
863  Log() << kFATAL << "you tried to read a hessian matrix with " << nElements << " elements, --> too large, guess s.th. went wrong reading from the weight file" << Endl;
864  return;
865  } else {
866  elements = new Double_t[nElements+10];
867  }
868 
869 
870 
871  void* xmlRow = gTools().xmlengine().GetChild(xmlInvHessian);
872  Int_t row = 0;
873  index = 0;
874  while (xmlRow) { // rows
875  gTools().ReadAttr( xmlRow, "Index", row );
876 
877  const char* content = gTools().xmlengine().GetNodeContent(xmlRow);
878 
879  std::stringstream s(content);
880  for (Int_t iCol = 0; iCol<nCols; iCol++) { // columns
881  s >> (*(elements+index));
882  ++index;
883  }
884  xmlRow = gTools().xmlengine().GetNext(xmlRow);
885  ++row;
886  }
887 
888  fInvHessian.SetMatrixArray( elements );
889 
890  delete[] elements;
891 }
892 
893 ////////////////////////////////////////////////////////////////////////////////
894 /// destroy/clear the network then read it back in from the weights file
895 
897 {
898  // delete network so we can reconstruct network from scratch
899 
900  TString dummy;
901 
902  // synapse weights
903  Double_t weight;
904  std::vector<Double_t>* weights = new std::vector<Double_t>();
905  istr>> dummy;
906  while (istr>> dummy >> weight) weights->push_back(weight); // use w/ slower write-out
907 
908  ForceWeights(weights);
909 
910 
911  delete weights;
912 }
913 
914 ////////////////////////////////////////////////////////////////////////////////
915 /// compute ranking of input variables by summing function of weights
916 
918 {
919  // create the ranking object
920  fRanking = new Ranking( GetName(), "Importance" );
921 
922  TNeuron* neuron;
923  TSynapse* synapse;
924  Double_t importance, avgVal;
925  TString varName;
926 
927  for (UInt_t ivar = 0; ivar < GetNvar(); ivar++) {
928 
929  neuron = GetInputNeuron(ivar);
930  Int_t numSynapses = neuron->NumPostLinks();
931  importance = 0;
932  varName = GetInputVar(ivar); // fix this line
933 
934  // figure out average value of variable i
935  Double_t meanS, meanB, rmsS, rmsB, xmin, xmax;
936  Statistics( TMVA::Types::kTraining, varName,
937  meanS, meanB, rmsS, rmsB, xmin, xmax );
938 
939  avgVal = (TMath::Abs(meanS) + TMath::Abs(meanB))/2.0;
940  double meanrms = (TMath::Abs(rmsS) + TMath::Abs(rmsB))/2.;
941  if (avgVal<meanrms) avgVal = meanrms;
942  if (IsNormalised()) avgVal = 0.5*(1 + gTools().NormVariable( avgVal, GetXmin( ivar ), GetXmax( ivar )));
943 
944  for (Int_t j = 0; j < numSynapses; j++) {
945  synapse = neuron->PostLinkAt(j);
946  importance += synapse->GetWeight() * synapse->GetWeight();
947  }
948 
949  importance *= avgVal * avgVal;
950 
951  fRanking->AddRank( Rank( varName, importance ) );
952  }
953 
954  return fRanking;
955 }
956 
957 ////////////////////////////////////////////////////////////////////////////////
958 
960  std::vector<TH1*>* hv ) const
961 {
962  TH2F* hist;
963  Int_t numLayers = fNetwork->GetEntriesFast();
964 
965  for (Int_t i = 0; i < numLayers-1; i++) {
966 
967  TObjArray* layer1 = (TObjArray*)fNetwork->At(i);
968  TObjArray* layer2 = (TObjArray*)fNetwork->At(i+1);
969  Int_t numNeurons1 = layer1->GetEntriesFast();
970  Int_t numNeurons2 = layer2->GetEntriesFast();
971 
972  TString name = Form("%s%i%i", bulkname.Data(), i, i+1);
973  hist = new TH2F(name + "", name + "",
974  numNeurons1, 0, numNeurons1, numNeurons2, 0, numNeurons2);
975 
976  for (Int_t j = 0; j < numNeurons1; j++) {
977 
978  TNeuron* neuron = (TNeuron*)layer1->At(j);
979  Int_t numSynapses = neuron->NumPostLinks();
980 
981  for (Int_t k = 0; k < numSynapses; k++) {
982 
983  TSynapse* synapse = neuron->PostLinkAt(k);
984  hist->SetBinContent(j+1, k+1, synapse->GetWeight());
985 
986  }
987  }
988 
989  if (hv) hv->push_back( hist );
990  else {
991  hist->Write();
992  delete hist;
993  }
994  }
995 }
996 
997 ////////////////////////////////////////////////////////////////////////////////
998 /// write histograms to file
999 
1001 {
1002  PrintMessage(Form("Write special histos to file: %s", BaseDir()->GetPath()), kTRUE);
1003 
1004  if (fEstimatorHistTrain) fEstimatorHistTrain->Write();
1005  if (fEstimatorHistTest ) fEstimatorHistTest ->Write();
1006 
1007  // histograms containing weights for architecture plotting (used in macro "network.cxx")
1008  CreateWeightMonitoringHists( "weights_hist" );
1009 
1010  // now save all the epoch-wise monitoring information
1011 #if __cplusplus > 199711L
1012  static std::atomic<int> epochMonitoringDirectoryNumber{0};
1013 #else
1014  static int epochMonitoringDirectoryNumber = 0;
1015 #endif
1016  int epochVal = epochMonitoringDirectoryNumber++;
1017  TDirectory* epochdir = NULL;
1018  if( epochVal == 0 )
1019  epochdir = BaseDir()->mkdir( "EpochMonitoring" );
1020  else
1021  epochdir = BaseDir()->mkdir( Form("EpochMonitoring_%4d",epochVal) );
1022 
1023  epochdir->cd();
1024  for (std::vector<TH1*>::const_iterator it = fEpochMonHistS.begin(); it != fEpochMonHistS.end(); ++it) {
1025  (*it)->Write();
1026  delete (*it);
1027  }
1028  for (std::vector<TH1*>::const_iterator it = fEpochMonHistB.begin(); it != fEpochMonHistB.end(); ++it) {
1029  (*it)->Write();
1030  delete (*it);
1031  }
1032  for (std::vector<TH1*>::const_iterator it = fEpochMonHistW.begin(); it != fEpochMonHistW.end(); ++it) {
1033  (*it)->Write();
1034  delete (*it);
1035  }
1036  BaseDir()->cd();
1037 }
1038 
1039 ////////////////////////////////////////////////////////////////////////////////
1040 /// write specific classifier response
1041 
1042 void TMVA::MethodANNBase::MakeClassSpecific( std::ostream& fout, const TString& className ) const
1043 {
1044  Int_t numLayers = fNetwork->GetEntries();
1045 
1046  fout << std::endl;
1047  fout << " double ActivationFnc(double x) const;" << std::endl;
1048  fout << " double OutputActivationFnc(double x) const;" << std::endl; //zjh
1049  fout << std::endl;
1050  int numNodesFrom = -1;
1051  for (Int_t lIdx = 0; lIdx < numLayers; lIdx++) {
1052  int numNodesTo = ((TObjArray*)fNetwork->At(lIdx))->GetEntries();
1053  if (numNodesFrom<0) { numNodesFrom=numNodesTo; continue; }
1054  fout << " double fWeightMatrix" << lIdx-1 << "to" << lIdx << "[" << numNodesTo << "][" << numNodesFrom << "];";
1055  fout << " // weight matrix from layer " << lIdx-1 << " to " << lIdx << std::endl;
1056  numNodesFrom = numNodesTo;
1057  }
1058  fout << std::endl;
1059  fout << "};" << std::endl;
1060 
1061  fout << std::endl;
1062 
1063  fout << "inline void " << className << "::Initialize()" << std::endl;
1064  fout << "{" << std::endl;
1065  fout << " // build network structure" << std::endl;
1066 
1067  for (Int_t i = 0; i < numLayers-1; i++) {
1068  fout << " // weight matrix from layer " << i << " to " << i+1 << std::endl;
1069  TObjArray* layer = (TObjArray*)fNetwork->At(i);
1070  Int_t numNeurons = layer->GetEntriesFast();
1071  for (Int_t j = 0; j < numNeurons; j++) {
1072  TNeuron* neuron = (TNeuron*)layer->At(j);
1073  Int_t numSynapses = neuron->NumPostLinks();
1074  for (Int_t k = 0; k < numSynapses; k++) {
1075  TSynapse* synapse = neuron->PostLinkAt(k);
1076  fout << " fWeightMatrix" << i << "to" << i+1 << "[" << k << "][" << j << "] = " << synapse->GetWeight() << ";" << std::endl;
1077  }
1078  }
1079  }
1080 
1081  fout << "}" << std::endl;
1082  fout << std::endl;
1083 
1084  // writing of the GetMvaValue__ method
1085  fout << "inline double " << className << "::GetMvaValue__( const std::vector<double>& inputValues ) const" << std::endl;
1086  fout << "{" << std::endl;
1087  fout << " if (inputValues.size() != (unsigned int)" << ((TObjArray *)fNetwork->At(0))->GetEntries() - 1 << ") {"
1088  << std::endl;
1089  fout << " std::cout << \"Input vector needs to be of size \" << "
1090  << ((TObjArray *)fNetwork->At(0))->GetEntries() - 1 << " << std::endl;" << std::endl;
1091  fout << " return 0;" << std::endl;
1092  fout << " }" << std::endl;
1093  fout << std::endl;
1094  for (Int_t lIdx = 1; lIdx < numLayers; lIdx++) {
1095  TObjArray *layer = (TObjArray *)fNetwork->At(lIdx);
1096  int numNodes = layer->GetEntries();
1097  fout << " std::array<double, " << numNodes << "> fWeights" << lIdx << " {{}};" << std::endl;
1098  }
1099  for (Int_t lIdx = 1; lIdx < numLayers - 1; lIdx++) {
1100  fout << " fWeights" << lIdx << ".back() = 1.;" << std::endl;
1101  }
1102  fout << std::endl;
1103  for (Int_t i = 0; i < numLayers - 1; i++) {
1104  fout << " // layer " << i << " to " << i + 1 << std::endl;
1105  if (i + 1 == numLayers - 1) {
1106  fout << " for (int o=0; o<" << ((TObjArray *)fNetwork->At(i + 1))->GetEntries() << "; o++) {" << std::endl;
1107  } else {
1108  fout << " for (int o=0; o<" << ((TObjArray *)fNetwork->At(i + 1))->GetEntries() - 1 << "; o++) {"
1109  << std::endl;
1110  }
1111  if (0 == i) {
1112  fout << " std::array<double, " << ((TObjArray *)fNetwork->At(i))->GetEntries()
1113  << "> buffer; // no need to initialise" << std::endl;
1114  fout << " for (int i = 0; i<" << ((TObjArray *)fNetwork->At(i))->GetEntries() << " - 1; i++) {"
1115  << std::endl;
1116  fout << " buffer[i] = fWeightMatrix" << i << "to" << i + 1 << "[o][i] * inputValues[i];" << std::endl;
1117  fout << " } // loop over i" << std::endl;
1118  fout << " buffer.back() = fWeightMatrix" << i << "to" << i + 1 << "[o]["
1119  << ((TObjArray *)fNetwork->At(i))->GetEntries() - 1 << "];" << std::endl;
1120  } else {
1121  fout << " std::array<double, " << ((TObjArray *)fNetwork->At(i))->GetEntries()
1122  << "> buffer; // no need to initialise" << std::endl;
1123  fout << " for (int i=0; i<" << ((TObjArray *)fNetwork->At(i))->GetEntries() << "; i++) {" << std::endl;
1124  fout << " buffer[i] = fWeightMatrix" << i << "to" << i + 1 << "[o][i] * fWeights" << i << "[i];"
1125  << std::endl;
1126  fout << " } // loop over i" << std::endl;
1127  }
1128  fout << " for (int i=0; i<" << ((TObjArray *)fNetwork->At(i))->GetEntries() << "; i++) {" << std::endl;
1129  if (fNeuronInputType == "sum") {
1130  fout << " fWeights" << i + 1 << "[o] += buffer[i];" << std::endl;
1131  } else if (fNeuronInputType == "sqsum") {
1132  fout << " fWeights" << i + 1 << "[o] += buffer[i]*buffer[i];" << std::endl;
1133  } else { // fNeuronInputType == TNeuronInputChooser::kAbsSum
1134  fout << " fWeights" << i + 1 << "[o] += fabs(buffer[i]);" << std::endl;
1135  }
1136  fout << " } // loop over i" << std::endl;
1137  fout << " } // loop over o" << std::endl;
1138  if (i + 1 == numLayers - 1) {
1139  fout << " for (int o=0; o<" << ((TObjArray *)fNetwork->At(i + 1))->GetEntries() << "; o++) {" << std::endl;
1140  } else {
1141  fout << " for (int o=0; o<" << ((TObjArray *)fNetwork->At(i + 1))->GetEntries() - 1 << "; o++) {"
1142  << std::endl;
1143  }
1144  if (i+1 != numLayers-1) // in the last layer no activation function is applied
1145  fout << " fWeights" << i + 1 << "[o] = ActivationFnc(fWeights" << i + 1 << "[o]);" << std::endl;
1146  else
1147  fout << " fWeights" << i + 1 << "[o] = OutputActivationFnc(fWeights" << i + 1 << "[o]);"
1148  << std::endl; // zjh
1149  fout << " } // loop over o" << std::endl;
1150  }
1151  fout << std::endl;
1152  fout << " return fWeights" << numLayers - 1 << "[0];" << std::endl;
1153  fout << "}" << std::endl;
1154 
1155  fout << std::endl;
1156  TString fncName = className+"::ActivationFnc";
1157  fActivation->MakeFunction(fout, fncName);
1158  fncName = className+"::OutputActivationFnc"; //zjh
1159  fOutput->MakeFunction(fout, fncName);//zjh
1160 
1161  fout << std::endl;
1162  fout << "// Clean up" << std::endl;
1163  fout << "inline void " << className << "::Clear()" << std::endl;
1164  fout << "{" << std::endl;
1165  fout << "}" << std::endl;
1166 }
1167 
1168 ////////////////////////////////////////////////////////////////////////////////
1169 /// who the hell makes such strange Debug flags that even use "global pointers"..
1170 
1172 {
1173  return fgDEBUG;
1174 }
TMVA::MethodANNBase::PrintMessage
void PrintMessage(TString message, Bool_t force=kFALSE) const
print messages, turn off printing by setting verbose and debug flag appropriately
Definition: MethodANNBase.cxx:517
TMVA::MethodANNBase::WriteMonitoringHistosToFile
virtual void WriteMonitoringHistosToFile() const
write histograms to file
Definition: MethodANNBase.cxx:1000
n
const Int_t n
Definition: legend1.C:16
TMVA::TNeuron::PrintPostLinks
void PrintPostLinks() const
Definition: TNeuron.h:119
MethodANNBase.h
TMVA::Tools::NormVariable
Double_t NormVariable(Double_t x, Double_t xmin, Double_t xmax)
normalise to output range: [-1, 1]
Definition: Tools.cxx:122
TXMLEngine::NewAttr
XMLAttrPointer_t NewAttr(XMLNodePointer_t xmlnode, XMLNsPointer_t, const char *name, const char *value)
creates new attribute for xmlnode, namespaces are not supported for attributes
Definition: TXMLEngine.cxx:586
TMVA::TNeuron::AddPreLink
void AddPreLink(TSynapse *pre)
add synapse as a pre-link to this neuron
Definition: TNeuron.cxx:169
kTRUE
const Bool_t kTRUE
Definition: RtypesCore.h:100
TMVA::Tools::GetChild
void * GetChild(void *parent, const char *childname=0)
get child node
Definition: Tools.cxx:1162
TDirectory::mkdir
virtual TDirectory * mkdir(const char *name, const char *title="", Bool_t returnExistingDirectory=kFALSE)
Create a sub-directory "a" or a hierarchy of sub-directories "a/b/c/...".
Definition: TDirectory.cxx:1050
TObjArray
An array of TObjects.
Definition: TObjArray.h:37
TDirectory.h
TMVA::TSynapse::SetWeight
void SetWeight(Double_t weight)
set synapse weight
Definition: TSynapse.cxx:68
TXMLEngine::NewChild
XMLNodePointer_t NewChild(XMLNodePointer_t parent, XMLNsPointer_t ns, const char *name, const char *content=nullptr)
create new child element for parent node
Definition: TXMLEngine.cxx:715
TMVA::Tools::GetContent
const char * GetContent(void *node)
XML helpers.
Definition: Tools.cxx:1186
TMVA::TNeuron::PrintPreLinks
void PrintPreLinks() const
Definition: TNeuron.h:118
TMVA::MethodANNBase::AddWeightsXMLTo
void AddWeightsXMLTo(void *parent) const
create XML description of ANN classifier
Definition: MethodANNBase.cxx:706
TH2F
2-D histogram with a float per channel (see TH1 documentation)}
Definition: TH2.h:251
TMVA::MethodANNBase
Base class for all TMVA methods using artificial neural networks.
Definition: MethodANNBase.h:62
TMVA::TSynapse::GetWeight
Double_t GetWeight()
Definition: TSynapse.h:53
TString::Data
const char * Data() const
Definition: TString.h:369
TNeuronInputChooser.h
TMVA::MethodANNBase::GetMvaValue
virtual Double_t GetMvaValue(Double_t *err=0, Double_t *errUpper=0)
get the mva value generated by the NN
Definition: MethodANNBase.cxx:597
TMVA::MethodANNBase::CreateRanking
const Ranking * CreateRanking()
compute ranking of input variables by summing function of weights
Definition: MethodANNBase.cxx:917
DataSetInfo.h
ClassImp
#define ClassImp(name)
Definition: Rtypes.h:364
Form
char * Form(const char *fmt,...)
TMVA::MethodANNBase::BuildLayers
void BuildLayers(std::vector< Int_t > *layout, Bool_t from_file=false)
build the network layers
Definition: MethodANNBase.cxx:337
TMVA::Ranking
Ranking for variables in method (implementation)
Definition: Ranking.h:48
xmax
float xmax
Definition: THbookFile.cxx:95
TMVA::MethodANNBase::Debug
Bool_t Debug() const
who the hell makes such strange Debug flags that even use "global pointers"..
Definition: MethodANNBase.cxx:1171
TMath::Log
Double_t Log(Double_t x)
Definition: TMath.h:760
TMVA::TNeuron::GetDelta
Double_t GetDelta() const
Definition: TNeuron.h:106
TMVA::MethodANNBase::ProcessOptions
virtual void ProcessOptions()
do nothing specific at this moment
Definition: MethodANNBase.cxx:159
Ranking.h
TMVA::Tools::AddChild
void * AddChild(void *parent, const char *childname, const char *content=0, bool isRootNode=false)
add child node
Definition: Tools.cxx:1136
TMVA::TNeuron::AddPostLink
void AddPostLink(TSynapse *post)
add synapse as a post-link to this neuron
Definition: TNeuron.cxx:178
exp
double exp(double)
Float_t
float Float_t
Definition: RtypesCore.h:57
TGeant4Unit::s
static constexpr double s
Definition: TGeant4SystemOfUnits.h:162
TMVA::TActivationChooser::CreateActivation
TActivation * CreateActivation(EActivationType type) const
instantiate the correct activation object according to the type chosen (given as the enumeration type...
Definition: TActivationChooser.cxx:70
TMVA::TSynapse::SetPreNeuron
void SetPreNeuron(TNeuron *pre)
Definition: TSynapse.h:65
TMVA::TNeuron::SetInputNeuron
void SetInputNeuron()
Definition: TNeuron.h:112
TMVA::TActivationTanh
Tanh activation function for ANN.
Definition: TActivationTanh.h:41
TObjArray::GetEntries
Int_t GetEntries() const
Return the number of objects in array (i.e.
Definition: TObjArray.cxx:523
x
Double_t x[n]
Definition: legend1.C:17
TString::Length
Ssiz_t Length() const
Definition: TString.h:410
TXMLEngine::GetChild
XMLNodePointer_t GetChild(XMLNodePointer_t xmlnode, Bool_t realnode=kTRUE)
returns first child of xmlnode
Definition: TXMLEngine.cxx:1146
TMVA::MethodANNBase::GetRegressionValues
virtual const std::vector< Float_t > & GetRegressionValues()
get the regression value generated by the NN
Definition: MethodANNBase.cxx:624
MethodBase.h
TDirectory::cd
virtual Bool_t cd(const char *path=nullptr)
Change current directory to "this" directory.
Definition: TDirectory.cxx:504
TMVA::Event::GetTarget
Float_t GetTarget(UInt_t itgt) const
Definition: Event.h:102
TMVA::Event::SetTarget
void SetTarget(UInt_t itgt, Float_t value)
set the target value (dimension itgt) to value
Definition: Event.cxx:359
TMath::Abs
Short_t Abs(Short_t d)
Definition: TMathBase.h:120
TMVA::Rank
Definition: Ranking.h:76
TMVA::MethodANNBase::PrintLayer
void PrintLayer(TObjArray *layer) const
print a single layer, for debugging
Definition: MethodANNBase.cxx:564
TMatrixT.h
TObjArray::At
TObject * At(Int_t idx) const
Definition: TObjArray.h:166
TString
Basic string class.
Definition: TString.h:136
TMVA::MethodANNBase::DeleteNetworkLayer
void DeleteNetworkLayer(TObjArray *&layer)
delete a network layer
Definition: MethodANNBase.cxx:279
TMVA::MethodANNBase::WaitForKeyboard
void WaitForKeyboard()
wait for keyboard input, for debugging
Definition: MethodANNBase.cxx:525
TString.h
bool
TObjArray::Add
void Add(TObject *obj)
Definition: TObjArray.h:74
TMVA::Tools::xmlengine
TXMLEngine & xmlengine()
Definition: Tools.h:268
Version.h
TMVA::TNeuron::CalculateValue
void CalculateValue()
calculate neuron input
Definition: TNeuron.cxx:93
TMVA::MethodANNBase::ForceWeights
void ForceWeights(std::vector< Double_t > *weights)
force the synapse weights
Definition: MethodANNBase.cxx:460
TMVA::Tools::AddAttr
void AddAttr(void *node, const char *, const T &value, Int_t precision=16)
add attribute to xml
Definition: Tools.h:353
TMVA::TNeuron::GetValue
Double_t GetValue() const
Definition: TNeuron.h:104
TMVA::Event::GetValue
Float_t GetValue(UInt_t ivar) const
return value of i'th variable
Definition: Event.cxx:236
TMVA::MethodANNBase::InitWeights
void InitWeights()
initialize the synapse weights randomly
Definition: MethodANNBase.cxx:444
TMVA::TActivationTanh::SetSlow
void SetSlow()
Definition: TActivationTanh.h:66
TMVA::MethodANNBase::DeleteNetwork
void DeleteNetwork()
delete/clear network
Definition: MethodANNBase.cxx:248
TMVA::MethodANNBase::~MethodANNBase
virtual ~MethodANNBase()
destructor
Definition: MethodANNBase.cxx:240
TMVA::DataSetInfo
Class that contains all the data information.
Definition: DataSetInfo.h:62
TMVA::MethodANNBase::BuildNetwork
virtual void BuildNetwork(std::vector< Int_t > *layout, std::vector< Double_t > *weights=NULL, Bool_t fromFile=kFALSE)
build network given a layout (number of neurons in each layer) and optional weights array
Definition: MethodANNBase.cxx:295
TMVA::TNeuron::PostLinkAt
TSynapse * PostLinkAt(Int_t index) const
Definition: TNeuron.h:111
TMVA::TNeuron
Neuron class used by TMVA artificial neural network methods.
Definition: TNeuron.h:49
TMVA::MethodANNBase::ParseLayoutString
std::vector< Int_t > * ParseLayoutString(TString layerSpec)
parse layout specification string and return a vector, each entry containing the number of neurons to...
Definition: MethodANNBase.cxx:174
TMVA::TNeuron::PrintActivationEqn
void PrintActivationEqn()
print activation equation, for debugging
Definition: TNeuron.cxx:327
TSynapse.h
MsgLogger.h
TMVA::MethodANNBase::ForceNetworkCalculations
void ForceNetworkCalculations()
calculate input values to each neuron
Definition: MethodANNBase.cxx:494
TMVA::TNeuron::SetActivationEqn
void SetActivationEqn(TActivation *activation)
set activation equation
Definition: TNeuron.cxx:160
xmin
float xmin
Definition: THbookFile.cxx:95
TRandom3
Random number generator class based on M.
Definition: TRandom3.h:27
TMVA::TSynapse
Synapse class used by TMVA artificial neural network methods.
Definition: TSynapse.h:42
TMVA::MethodANNBase::MethodANNBase
MethodANNBase(const TString &jobName, Types::EMVA methodType, const TString &methodTitle, DataSetInfo &theData, const TString &theOption)
standard constructor Note: Right now it is an option to choose the neuron input function,...
Definition: MethodANNBase.cxx:84
TObjArray::GetEntriesFast
Int_t GetEntriesFast() const
Definition: TObjArray.h:64
TString::Remove
TString & Remove(Ssiz_t pos)
Definition: TString.h:673
RooFit::Verbose
RooCmdArg Verbose(Bool_t flag=kTRUE)
Definition: RooGlobalFunc.cxx:201
kFALSE
const Bool_t kFALSE
Definition: RtypesCore.h:101
TXMLEngine::GetNext
XMLNodePointer_t GetNext(XMLNodePointer_t xmlnode, Bool_t realnode=kTRUE)
return next to xmlnode node if realnode==kTRUE, any special nodes in between will be skipped
Definition: TXMLEngine.cxx:1167
TMVA::TNeuron::SetBiasNeuron
void SetBiasNeuron()
Definition: TNeuron.h:114
TMVA::Tools::ReadAttr
void ReadAttr(void *node, const char *, T &value)
read attribute from xml
Definition: Tools.h:335
TString::First
Ssiz_t First(char c) const
Find first occurrence of a character c.
Definition: TString.cxx:520
TMVA::MethodANNBase::BuildLayer
void BuildLayer(Int_t numNeurons, TObjArray *curLayer, TObjArray *prevLayer, Int_t layerIndex, Int_t numLayers, Bool_t from_file=false)
build a single layer with neurons and synapses connecting this layer to the previous layer
Definition: MethodANNBase.cxx:373
TMVA::TNeuron::ForceValue
void ForceValue(Double_t value)
force the value, typically for input and bias neurons
Definition: TNeuron.cxx:84
TRandom3.h
TMVA::MethodANNBase::ReadWeightsFromXML
void ReadWeightsFromXML(void *wghtnode)
read MLP from xml weight file
Definition: MethodANNBase.cxx:774
UInt_t
unsigned int UInt_t
Definition: RtypesCore.h:46
TString::BeginsWith
Bool_t BeginsWith(const char *s, ECaseCompare cmp=kExact) const
Definition: TString.h:615
TMVA::TNeuronInputChooser::GetAllNeuronInputNames
std::vector< TString > * GetAllNeuronInputNames() const
Definition: TNeuronInputChooser.h:102
TMVA::TNeuronInputChooser::CreateNeuronInput
TNeuronInput * CreateNeuronInput(ENeuronInputType type) const
Definition: TNeuronInputChooser.h:83
TMVA::MethodBase
Virtual base Class for all MVA method.
Definition: MethodBase.h:111
Types.h
Configurable.h
TH2F.h
TMVA::Endl
MsgLogger & Endl(MsgLogger &ml)
Definition: MsgLogger.h:158
unsigned int
TMVA::Types::kTraining
@ kTraining
Definition: Types.h:145
TMVA::TSynapse::SetPostNeuron
void SetPostNeuron(TNeuron *post)
Definition: TSynapse.h:68
TMVA::MethodANNBase::GetMulticlassValues
virtual const std::vector< Float_t > & GetMulticlassValues()
get the multiclass classification values generated by the NN
Definition: MethodANNBase.cxx:663
TMVA::TNeuron::CalculateActivationValue
void CalculateActivationValue()
calculate neuron activation/output
Definition: TNeuron.cxx:102
Double_t
double Double_t
Definition: RtypesCore.h:59
TMVA::MethodANNBase::MakeClassSpecific
virtual void MakeClassSpecific(std::ostream &, const TString &) const
write specific classifier response
Definition: MethodANNBase.cxx:1042
TMVA::TNeuron::DeletePreLinks
void DeletePreLinks()
delete all pre-links
Definition: TNeuron.cxx:187
TActivationChooser.h
TMVA::MethodANNBase::PrintNeuron
void PrintNeuron(TNeuron *neuron) const
print a neuron, for debugging
Definition: MethodANNBase.cxx:580
TMVA::kFATAL
@ kFATAL
Definition: Types.h:63
TMVA::TActivationChooser::GetAllActivationNames
std::vector< TString > * GetAllActivationNames() const
returns the names of all know activation functions
Definition: TActivationChooser.cxx:107
TMVA::Tools::GetNextChild
void * GetNextChild(void *prevchild, const char *childname=0)
XML helpers.
Definition: Tools.cxx:1174
Debug
void Debug(Int_t level, const char *va_(fmt),...)
Definition: TTreeProxyGenerator.cxx:79
TMVA::Types::EMVA
EMVA
Definition: Types.h:78
TMVA::TNeuronInputChooser
Class for easily choosing neuron input functions.
Definition: TNeuronInputChooser.h:66
TObject::Write
virtual Int_t Write(const char *name=0, Int_t option=0, Int_t bufsize=0)
Write this object to the current directory.
Definition: TObject.cxx:798
TMVA::Event
Definition: Event.h:51
TMVA::MethodANNBase::AddPreLinks
void AddPreLinks(TNeuron *neuron, TObjArray *prevLayer)
add synapses connecting a neuron to its preceding layer
Definition: MethodANNBase.cxx:425
TMVA_VERSION
#define TMVA_VERSION(a, b, c)
Definition: Version.h:48
TActivationTanh.h
name
char name[80]
Definition: TGX11.cxx:110
TMVA::TNeuron::SetOutputNeuron
void SetOutputNeuron()
Definition: TNeuron.h:113
TDirectory
Describe directory structure in memory.
Definition: TDirectory.h:45
TMVA::MethodANNBase::PrintNetwork
virtual void PrintNetwork() const
print network representation, for debugging
Definition: MethodANNBase.cxx:540
TMVA::kHEADER
@ kHEADER
Definition: Types.h:65
TMVA::TActivationChooser
Class for easily choosing activation functions.
Definition: TActivationChooser.h:44
TMVA::TNeuron::GetActivationValue
Double_t GetActivationValue() const
Definition: TNeuron.h:105
TMVA::kINFO
@ kINFO
Definition: Types.h:60
TMVA::MethodANNBase::ForceNetworkInputs
void ForceNetworkInputs(const Event *ev, Int_t ignoreIndex=-1)
force the input values of the input neurons force the value for each input neuron
Definition: MethodANNBase.cxx:476
Tools.h
TMVA::MethodANNBase::InitANNBase
void InitANNBase()
initialize ANNBase object
Definition: MethodANNBase.cxx:211
TMVA::TNeuron::NumPostLinks
Int_t NumPostLinks() const
Definition: TNeuron.h:109
TMVA::MethodANNBase::DeclareOptions
virtual void DeclareOptions()
define the options (their key words) that can be set in the option string here the options valid for ...
Definition: MethodANNBase.cxx:127
TMVA::gTools
Tools & gTools()
TH1.h
TMVA::MethodANNBase::ReadWeightsFromStream
virtual void ReadWeightsFromStream(std::istream &)=0
TMVA::TNeuron::NumPreLinks
Int_t NumPreLinks() const
Definition: TNeuron.h:108
TMVA::Tools::AddRawLine
Bool_t AddRawLine(void *node, const char *raw)
XML helpers.
Definition: Tools.cxx:1202
TXMLEngine::GetNodeContent
const char * GetNodeContent(XMLNodePointer_t xmlnode)
get contents (if any) of xmlnode
Definition: TXMLEngine.cxx:1083
TMVA::kWARNING
@ kWARNING
Definition: Types.h:61
TH2::SetBinContent
virtual void SetBinContent(Int_t bin, Double_t content)
Set bin content.
Definition: TH2.cxx:2507
TMath.h
TMVA::TNeuron::SetInputCalculator
void SetInputCalculator(TNeuronInput *calculator)
set input calculator
Definition: TNeuron.cxx:151
TMVA
create variable transformations
Definition: GeneticMinimizer.h:22
int
TXMLEngine::AddRawLine
Bool_t AddRawLine(XMLNodePointer_t parent, const char *line)
Add just line into xml file Line should has correct xml syntax that later it can be decoded by xml pa...
Definition: TXMLEngine.cxx:915
TMVA::MethodANNBase::CreateWeightMonitoringHists
void CreateWeightMonitoringHists(const TString &bulkname, std::vector< TH1 * > *hv=0) const
Definition: MethodANNBase.cxx:959
TNeuron.h