#ifndef ROOT_TMVA_MethodANNBase
#define ROOT_TMVA_MethodANNBase
#ifndef ROOT_TString
#include "TString.h"
#endif
#include <vector>
#ifndef ROOT_TTree
#include "TTree.h"
#endif
#ifndef ROOT_TObjArray
#include "TObjArray.h"
#endif
#ifndef ROOT_TRandom3
#include "TRandom3.h"
#endif
#ifndef ROOT_TMatrix
#include "TMatrix.h"
#endif
#ifndef ROOT_TMVA_MethodBase
#include "TMVA/MethodBase.h"
#endif
#ifndef ROOT_TMVA_TActivation
#include "TMVA/TActivation.h"
#endif
#ifndef ROOT_TMVA_TNeuron
#include "TMVA/TNeuron.h"
#endif
#ifndef ROOT_TMVA_TNeuronInput
#include "TMVA/TNeuronInput.h"
#endif
class TH1;
class TH1F;
namespace TMVA {
class MethodANNBase : public MethodBase {
public:
MethodANNBase( const TString& jobName,
Types::EMVA methodType,
const TString& methodTitle,
DataSetInfo& theData,
const TString& theOption,
TDirectory* theTargetDir );
MethodANNBase( Types::EMVA methodType,
DataSetInfo& theData,
const TString& theWeightFile,
TDirectory* theTargetDir );
virtual ~MethodANNBase();
void InitANNBase();
void SetActivation(TActivation* activation) {
if (fActivation != NULL) delete fActivation; fActivation = activation;
}
void SetNeuronInputCalculator(TNeuronInput* inputCalculator) {
if (fInputCalculator != NULL) delete fInputCalculator;
fInputCalculator = inputCalculator;
}
virtual void Train() = 0;
virtual void PrintNetwork() const;
template <typename WriteIterator>
void GetLayerActivation (size_t layer, WriteIterator writeIterator);
using MethodBase::ReadWeightsFromStream;
void AddWeightsXMLTo( void* parent ) const;
void ReadWeightsFromXML( void* wghtnode );
virtual void ReadWeightsFromStream( std::istream& istr );
virtual Double_t GetMvaValue( Double_t* err = 0, Double_t* errUpper = 0 );
virtual const std::vector<Float_t> &GetRegressionValues();
virtual const std::vector<Float_t> &GetMulticlassValues();
virtual void WriteMonitoringHistosToFile() const;
const Ranking* CreateRanking();
virtual void DeclareOptions();
virtual void ProcessOptions();
Bool_t Debug() const;
enum EEstimator { kMSE=0,kCE};
protected:
virtual void MakeClassSpecific( std::ostream&, const TString& ) const;
std::vector<Int_t>* ParseLayoutString( TString layerSpec );
virtual void BuildNetwork( std::vector<Int_t>* layout, std::vector<Double_t>* weights=NULL,
Bool_t fromFile = kFALSE );
void ForceNetworkInputs( const Event* ev, Int_t ignoreIndex = -1 );
Double_t GetNetworkOutput() { return GetOutputNeuron()->GetActivationValue(); }
void PrintMessage( TString message, Bool_t force = kFALSE ) const;
void ForceNetworkCalculations();
void WaitForKeyboard();
Int_t NumCycles() { return fNcycles; }
TNeuron* GetInputNeuron (Int_t index) { return (TNeuron*)fInputLayer->At(index); }
TNeuron* GetOutputNeuron(Int_t index = 0) { return fOutputNeurons.at(index); }
TObjArray* fNetwork;
TObjArray* fSynapses;
TActivation* fActivation;
TActivation* fOutput;
TActivation* fIdentity;
TRandom3* frgen;
TNeuronInput* fInputCalculator;
std::vector<Int_t> fRegulatorIdx;
std::vector<Double_t> fRegulators;
EEstimator fEstimator;
TString fEstimatorS;
TH1F* fEstimatorHistTrain;
TH1F* fEstimatorHistTest;
void CreateWeightMonitoringHists( const TString& bulkname, std::vector<TH1*>* hv = 0 ) const;
std::vector<TH1*> fEpochMonHistS;
std::vector<TH1*> fEpochMonHistB;
std::vector<TH1*> fEpochMonHistW;
TMatrixD fInvHessian;
bool fUseRegulator;
protected:
Int_t fRandomSeed;
Int_t fNcycles;
TString fNeuronType;
TString fNeuronInputType;
private:
void BuildLayers(std::vector<Int_t>* layout, Bool_t from_file = false);
void BuildLayer(Int_t numNeurons, TObjArray* curLayer, TObjArray* prevLayer,
Int_t layerIndex, Int_t numLayers, Bool_t from_file = false);
void AddPreLinks(TNeuron* neuron, TObjArray* prevLayer);
void InitWeights();
void ForceWeights(std::vector<Double_t>* weights);
void DeleteNetwork();
void DeleteNetworkLayer(TObjArray*& layer);
void PrintLayer(TObjArray* layer) const;
void PrintNeuron(TNeuron* neuron) const;
TObjArray* fInputLayer;
std::vector<TNeuron*> fOutputNeurons;
TString fLayerSpec;
static const Bool_t fgDEBUG = kTRUE;
ClassDef(MethodANNBase,0)
};
template <typename WriteIterator>
inline void MethodANNBase::GetLayerActivation (size_t layerNumber, WriteIterator writeIterator)
{
if (layerNumber >= (size_t)fNetwork->GetEntriesFast())
return;
TObjArray* layer = (TObjArray*)fNetwork->At(layerNumber);
UInt_t nNodes = layer->GetEntriesFast();
for (UInt_t iNode = 0; iNode < nNodes; iNode++)
{
(*writeIterator) = ((TNeuron*)layer->At(iNode))->GetActivationValue();
++writeIterator;
}
}
}
#endif