34 #ifndef ROOT_TMVA_MethodANNBase 35 #define ROOT_TMVA_MethodANNBase 93 virtual void Train() = 0;
106 template <
typename WriteIterator>
146 virtual void BuildNetwork( std::vector<Int_t>* layout, std::vector<Double_t>* weights=NULL,
231 template <
typename WriteIterator>
244 for (
UInt_t iNode = 0; iNode < nNodes; iNode++)
246 (*writeIterator) = ((
TNeuron*)layer->
At(iNode))->GetActivationValue();
void WaitForKeyboard()
wait for keyboard input, for debugging
virtual void WriteMonitoringHistosToFile() const
write histograms to file
virtual Double_t GetMvaValue(Double_t *err=0, Double_t *errUpper=0)
get the mva value generated by the NN
void BuildLayer(Int_t numNeurons, TObjArray *curLayer, TObjArray *prevLayer, Int_t layerIndex, Int_t numLayers, Bool_t from_file=false)
build a single layer with neurons and synapses connecting this layer to the previous layer ...
Random number generator class based on M.
void SetActivation(TActivation *activation)
void ForceNetworkCalculations()
calculate input values to each neuron
void CreateWeightMonitoringHists(const TString &bulkname, std::vector< TH1 *> *hv=0) const
void DeleteNetwork()
delete/clear network
MethodANNBase(const TString &jobName, Types::EMVA methodType, const TString &methodTitle, DataSetInfo &theData, const TString &theOption)
standard constructor Note: Right now it is an option to choose the neuron input function, but only the input function "sum" leads to weight convergence – otherwise the weights go to nan and lead to an ABORT.
TActivation * fActivation
void AddPreLinks(TNeuron *neuron, TObjArray *prevLayer)
add synapses connecting a neuron to its preceding layer
const Ranking * CreateRanking()
compute ranking of input variables by summing function of weights
virtual void ReadWeightsFromStream(std::istream &istr)
destroy/clear the network then read it back in from the weights file
Virtual base Class for all MVA method.
virtual void MakeClassSpecific(std::ostream &, const TString &) const
write specific classifier response
TNeuronInput * fInputCalculator
1-D histogram with a float per channel (see TH1 documentation)}
Ranking for variables in method (implementation)
void GetLayerActivation(size_t layer, WriteIterator writeIterator)
Double_t GetActivationValue() const
void AddWeightsXMLTo(void *parent) const
create XML description of ANN classifier
virtual void DeclareOptions()
define the options (their key words) that can be set in the option string here the options valid for ...
TObject * At(Int_t idx) const
static const Bool_t fgDEBUG
void ForceWeights(std::vector< Double_t > *weights)
force the synapse weights
Bool_t Debug() const
who the hell makes such strange Debug flags that even use "global pointers"..
#define ClassDef(name, id)
std::vector< TH1 * > fEpochMonHistB
void PrintMessage(TString message, Bool_t force=kFALSE) const
print messages, turn off printing by setting verbose and debug flag appropriately ...
Neuron class used by TMVA artificial neural network methods.
virtual void ProcessOptions()
do nothing specific at this moment
Class that contains all the data information.
virtual void BuildNetwork(std::vector< Int_t > *layout, std::vector< Double_t > *weights=NULL, Bool_t fromFile=kFALSE)
build network given a layout (number of neurons in each layer) and optional weights array ...
void SetNeuronInputCalculator(TNeuronInput *inputCalculator)
void ReadWeightsFromXML(void *wghtnode)
read MLP from xml weight file
virtual void PrintNetwork() const
print network representation, for debugging
Int_t GetEntriesFast() const
std::vector< TH1 * > fEpochMonHistW
std::vector< Double_t > fRegulators
TNeuron * GetInputNeuron(Int_t index)
std::vector< Int_t > fRegulatorIdx
void InitWeights()
initialize the synapse weights randomly
std::vector< Int_t > * ParseLayoutString(TString layerSpec)
parse layout specification string and return a vector, each entry containing the number of neurons to...
virtual const std::vector< Float_t > & GetMulticlassValues()
get the multiclass classification values generated by the NN
TNeuron * GetOutputNeuron(Int_t index=0)
void ForceNetworkInputs(const Event *ev, Int_t ignoreIndex=-1)
force the input values of the input neurons force the value for each input neuron ...
std::vector< TNeuron * > fOutputNeurons
virtual ~MethodANNBase()
destructor
Double_t GetNetworkOutput()
Abstract ClassifierFactory template that handles arbitrary types.
virtual const std::vector< Float_t > & GetRegressionValues()
get the regression value generated by the NN
void PrintNeuron(TNeuron *neuron) const
print a neuron, for debugging
std::vector< TH1 * > fEpochMonHistS
TH1F * fEstimatorHistTrain
void DeleteNetworkLayer(TObjArray *&layer)
delete a network layer
Interface for TNeuron activation function classes.
virtual void ReadWeightsFromStream(std::istream &)=0
void BuildLayers(std::vector< Int_t > *layout, Bool_t from_file=false)
build the network layers
Base class for all TMVA methods using artificial neural networks.
TH1F * fEstimatorHistTest
void PrintLayer(TObjArray *layer) const
print a single layer, for debugging
void InitANNBase()
initialize ANNBase object