34#ifndef ROOT_TMVA_MethodANNBase 
   35#define ROOT_TMVA_MethodANNBase 
  106      template <
typename WriteIterator>
 
  146      virtual void        BuildNetwork( std::vector<Int_t>* 
layout, std::vector<Double_t>* weights = 
nullptr,
 
 
  231   template <
typename WriteIterator>
 
  246               (*writeIterator) = ((
TNeuron*)
layer->At(iNode))->GetActivationValue();
 
 
#define ClassDef(name, id)
 
ROOT::Detail::TRangeCast< T, true > TRangeDynCast
TRangeDynCast is an adapter class that allows the typed iteration through a TCollection.
 
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void char Point_t Rectangle_t WindowAttributes_t index
 
1-D histogram with a float per channel (see TH1 documentation)
 
TH1 is the base class of all histogram classes in ROOT.
 
Class that contains all the data information.
 
Base class for all TMVA methods using artificial neural networks.
 
TString fLayerSpec
layout specification option
 
std::vector< TH1 * > fEpochMonHistW
 
std::vector< TH1 * > fEpochMonHistB
 
std::vector< Int_t > * ParseLayoutString(TString layerSpec)
parse layout specification string and return a vector, each entry containing the number of neurons to...
 
virtual void ProcessOptions()
do nothing specific at this moment
 
TNeuronInput * fInputCalculator
 
virtual ~MethodANNBase()
destructor
 
virtual Double_t GetMvaValue(Double_t *err=nullptr, Double_t *errUpper=nullptr)
get the mva value generated by the NN
 
std::vector< TNeuron * > fOutputNeurons
cache this for fast access
 
void DeleteNetworkLayer(TObjArray *&layer)
delete a network layer
 
virtual void BuildNetwork(std::vector< Int_t > *layout, std::vector< Double_t > *weights=nullptr, Bool_t fromFile=kFALSE)
build network given a layout (number of neurons in each layer) and optional weights array
 
const Ranking * CreateRanking()
compute ranking of input variables by summing function of weights
 
TNeuron * GetInputNeuron(Int_t index)
 
static const Bool_t fgDEBUG
debug flag
 
void DeleteNetwork()
delete/clear network
 
void GetLayerActivation(size_t layer, WriteIterator writeIterator)
 
void WaitForKeyboard()
wait for keyboard input, for debugging
 
MethodANNBase(const TString &jobName, Types::EMVA methodType, const TString &methodTitle, DataSetInfo &theData, const TString &theOption)
standard constructor Note: Right now it is an option to choose the neuron input function,...
 
TNeuron * GetOutputNeuron(Int_t index=0)
 
void AddPreLinks(TNeuron *neuron, TObjArray *prevLayer)
add synapses connecting a neuron to its preceding layer
 
void PrintNeuron(TNeuron *neuron) const
print a neuron, for debugging
 
void PrintMessage(TString message, Bool_t force=kFALSE) const
print messages, turn off printing by setting verbose and debug flag appropriately
 
void AddWeightsXMLTo(void *parent) const
create XML description of ANN classifier
 
TString fNeuronType
name of neuron activation function class
 
void InitANNBase()
initialize ANNBase object
 
Double_t GetNetworkOutput()
 
std::vector< Int_t > fRegulatorIdx
 
std::vector< Double_t > fRegulators
 
TH1F * fEstimatorHistTest
 
void SetNeuronInputCalculator(TNeuronInput *inputCalculator)
 
TObjArray * fInputLayer
cache this for fast access
 
std::vector< TH1 * > fEpochMonHistS
 
Int_t fRandomSeed
random seed for initial synapse weights
 
void PrintLayer(TObjArray *layer) const
print a single layer, for debugging
 
void InitWeights()
initialize the synapse weights randomly
 
virtual void DeclareOptions()
define the options (their key words) that can be set in the option string here the options valid for ...
 
virtual void ReadWeightsFromStream(std::istream &istr)
destroy/clear the network then read it back in from the weights file
 
void BuildLayers(std::vector< Int_t > *layout, Bool_t from_file=false)
build the network layers
 
TH1F * fEstimatorHistTrain
 
virtual void MakeClassSpecific(std::ostream &, const TString &) const
write specific classifier response
 
void ForceWeights(std::vector< Double_t > *weights)
force the synapse weights
 
void BuildLayer(Int_t numNeurons, TObjArray *curLayer, TObjArray *prevLayer, Int_t layerIndex, Int_t numLayers, Bool_t from_file=false)
build a single layer with neurons and synapses connecting this layer to the previous layer
 
void ForceNetworkCalculations()
calculate input values to each neuron
 
void ForceNetworkInputs(const Event *ev, Int_t ignoreIndex=-1)
force the input values of the input neurons force the value for each input neuron
 
TString fNeuronInputType
name of neuron input calculator class
 
virtual const std::vector< Float_t > & GetMulticlassValues()
get the multiclass classification values generated by the NN
 
void ReadWeightsFromXML(void *wghtnode)
read MLP from xml weight file
 
Int_t fNcycles
number of epochs to train
 
Bool_t Debug() const
who the hell makes such strange Debug flags that even use "global pointers"..
 
virtual void WriteMonitoringHistosToFile() const
write histograms to file
 
virtual const std::vector< Float_t > & GetRegressionValues()
get the regression value generated by the NN
 
virtual void PrintNetwork() const
print network representation, for debugging
 
void SetActivation(TActivation *activation)
 
TActivation * fActivation
 
void CreateWeightMonitoringHists(const TString &bulkname, std::vector< TH1 * > *hv=nullptr) const
 
Virtual base Class for all MVA method.
 
virtual void ReadWeightsFromStream(std::istream &)=0
 
Ranking for variables in method (implementation)
 
Interface for TNeuron activation function classes.
 
Neuron class used by TMVA artificial neural network methods.
 
Double_t GetActivationValue() const
 
Int_t GetEntriesFast() const
 
TObject * At(Int_t idx) const override
 
Random number generator class based on M.
 
create variable transformations