ROOT logo
ROOT » TMVA » TMVA::MethodMLP

class TMVA::MethodMLP: public TMVA::MethodANNBase, public TMVA::IFitterTarget, public TMVA::ConvergenceTest


 Multilayer Perceptron class built off of MethodANNBase

Function Members (Methods)

public:
virtual~MethodMLP()
voidTObject::AbstractMethod(const char* method) const
voidTMVA::Configurable::AddOptionsXMLTo(void* parent) const
voidTMVA::MethodBase::AddOutput(TMVA::Types::ETreeType type, TMVA::Types::EAnalysisType analysisType)
virtual voidTMVA::MethodANNBase::AddWeightsXMLTo(void* parent) const
virtual voidTObject::AppendPad(Option_t* option = "")
TDirectory*TMVA::MethodBase::BaseDir() const
virtual voidTObject::Browse(TBrowser* b)
voidTMVA::Configurable::CheckForUnusedOptions() const
virtual voidTMVA::MethodBase::CheckSetup()
static TClass*Class()
virtual const char*TObject::ClassName() const
virtual voidTObject::Clear(Option_t* = "")
virtual TObject*TObject::Clone(const char* newname = "") const
virtual Int_tTObject::Compare(const TObject* obj) const
Double_tComputeEstimator(vector<Double_t>& parameters)
TMVA::ConfigurableTMVA::Configurable::Configurable(const TString& theOption = "")
virtual voidTObject::Copy(TObject& object) const
virtual const TMVA::Ranking*TMVA::MethodANNBase::CreateRanking()
TMVA::DataSet*TMVA::MethodBase::Data() const
TMVA::DataSetInfo&TMVA::MethodBase::DataInfo() const
Bool_tTMVA::MethodANNBase::Debug() const
virtual voidTMVA::MethodBase::DeclareCompatibilityOptions()
virtual voidTObject::Delete(Option_t* option = "")MENU
voidTMVA::MethodBase::DisableWriting(Bool_t setter)
virtual Int_tTObject::DistancetoPrimitive(Int_t px, Int_t py)
Bool_tTMVA::MethodBase::DoMulticlass() const
Bool_tTMVA::MethodBase::DoRegression() const
virtual voidTObject::Draw(Option_t* option = "")
virtual voidTObject::DrawClass() constMENU
virtual TObject*TObject::DrawClone(Option_t* option = "") constMENU
virtual voidTObject::Dump() constMENU
virtual voidTObject::Error(const char* method, const char* msgfmt) const
virtual Double_tEstimatorFunction(vector<Double_t>& parameters)
virtual voidTObject::Execute(const char* method, const char* params, Int_t* error = 0)
virtual voidTObject::Execute(TMethod* method, TObjArray* params, Int_t* error = 0)
virtual voidTObject::ExecuteEvent(Int_t event, Int_t px, Int_t py)
virtual voidTObject::Fatal(const char* method, const char* msgfmt) const
virtual TObject*TObject::FindObject(const char* name) const
virtual TObject*TObject::FindObject(const TObject* obj) const
TMVA::Types::EAnalysisTypeTMVA::MethodBase::GetAnalysisType() const
const char*TMVA::Configurable::GetConfigDescription() const
const char*TMVA::Configurable::GetConfigName() const
virtual Option_t*TObject::GetDrawOption() const
static Long_tTObject::GetDtorOnly()
virtual Double_tTMVA::MethodBase::GetEfficiency(const TString&, TMVA::Types::ETreeType, Double_t& err)
const TMVA::Event*TMVA::MethodBase::GetEvent() const
const TMVA::Event*TMVA::MethodBase::GetEvent(const TMVA::Event* ev) const
const TMVA::Event*TMVA::MethodBase::GetEvent(Long64_t ievt) const
const TMVA::Event*TMVA::MethodBase::GetEvent(Long64_t ievt, TMVA::Types::ETreeType type) const
const vector<TMVA::Event*>&TMVA::MethodBase::GetEventCollection(TMVA::Types::ETreeType type)
virtual const char*TObject::GetIconName() const
const TString&TMVA::MethodBase::GetInputLabel(Int_t i) const
const TString&TMVA::MethodBase::GetInputTitle(Int_t i) const
const TString&TMVA::MethodBase::GetInputVar(Int_t i) const
const TString&TMVA::MethodBase::GetJobName() const
virtual Double_tTMVA::MethodBase::GetMaximumSignificance(Double_t SignalEvents, Double_t BackgroundEvents, Double_t& optimal_significance_value) const
Double_tTMVA::MethodBase::GetMean(Int_t ivar) const
const TString&TMVA::MethodBase::GetMethodName() const
TMVA::Types::EMVATMVA::MethodBase::GetMethodType() const
TStringTMVA::MethodBase::GetMethodTypeName() const
virtual vector<Float_t>TMVA::MethodBase::GetMulticlassEfficiency(vector<std::vector<Float_t> >& purity)
virtual vector<Float_t>TMVA::MethodBase::GetMulticlassTrainingEfficiency(vector<std::vector<Float_t> >& purity)
virtual const vector<Float_t>&TMVA::MethodANNBase::GetMulticlassValues()
virtual Double_tGetMvaValue(Double_t* err = 0, Double_t* errUpper = 0)
virtual const char*TMVA::MethodBase::GetName() const
UInt_tTMVA::MethodBase::GetNEvents() const
UInt_tTMVA::MethodBase::GetNTargets() const
UInt_tTMVA::MethodBase::GetNvar() const
UInt_tTMVA::MethodBase::GetNVariables() const
virtual char*TObject::GetObjectInfo(Int_t px, Int_t py) const
static Bool_tTObject::GetObjectStat()
virtual Option_t*TObject::GetOption() const
const TString&TMVA::Configurable::GetOptions() const
virtual Double_tTMVA::MethodBase::GetProba(Double_t mvaVal, Double_t ap_sig)
const TStringTMVA::MethodBase::GetProbaName() const
virtual Double_tTMVA::MethodBase::GetRarity(Double_t mvaVal, TMVA::Types::ESBType reftype = Types::kBackground) const
virtual voidTMVA::MethodBase::GetRegressionDeviation(UInt_t tgtNum, TMVA::Types::ETreeType type, Double_t& stddev, Double_t& stddev90Percent) const
virtual const vector<Float_t>&TMVA::MethodANNBase::GetRegressionValues()
Double_tTMVA::MethodBase::GetRMS(Int_t ivar) const
virtual Double_tTMVA::MethodBase::GetROCIntegral(TH1F* histS, TH1F* histB) const
virtual Double_tTMVA::MethodBase::GetROCIntegral(TMVA::PDF* pdfS = 0, TMVA::PDF* pdfB = 0) const
virtual Double_tTMVA::MethodBase::GetSeparation(TH1*, TH1*) const
virtual Double_tTMVA::MethodBase::GetSeparation(TMVA::PDF* pdfS = 0, TMVA::PDF* pdfB = 0) const
Double_tTMVA::MethodBase::GetSignalReferenceCut() const
Double_tTMVA::MethodBase::GetSignalReferenceCutOrientation() const
virtual Double_tTMVA::MethodBase::GetSignificance() const
const TMVA::Event*TMVA::MethodBase::GetTestingEvent(Long64_t ievt) const
Double_tTMVA::MethodBase::GetTestTime() const
const TString&TMVA::MethodBase::GetTestvarName() const
virtual const char*TObject::GetTitle() const
virtual Double_tTMVA::MethodBase::GetTrainingEfficiency(const TString&)
const TMVA::Event*TMVA::MethodBase::GetTrainingEvent(Long64_t ievt) const
UInt_tTMVA::MethodBase::GetTrainingROOTVersionCode() const
TStringTMVA::MethodBase::GetTrainingROOTVersionString() const
UInt_tTMVA::MethodBase::GetTrainingTMVAVersionCode() const
TStringTMVA::MethodBase::GetTrainingTMVAVersionString() const
Double_tTMVA::MethodBase::GetTrainTime() const
TMVA::TransformationHandler&TMVA::MethodBase::GetTransformationHandler(Bool_t takeReroutedIfAvailable = true)
const TMVA::TransformationHandler&TMVA::MethodBase::GetTransformationHandler(Bool_t takeReroutedIfAvailable = true) const
virtual UInt_tTObject::GetUniqueID() const
TStringTMVA::MethodBase::GetWeightFileName() const
Double_tTMVA::MethodBase::GetXmax(Int_t ivar) const
Double_tTMVA::MethodBase::GetXmin(Int_t ivar) const
virtual Bool_tTObject::HandleTimer(TTimer* timer)
virtual Bool_tHasAnalysisType(TMVA::Types::EAnalysisType type, UInt_t numberClasses, UInt_t numberTargets)
virtual ULong_tTObject::Hash() const
boolHasInverseHessian()
Bool_tTMVA::MethodBase::HasMVAPdfs() const
virtual voidTObject::Info(const char* method, const char* msgfmt) const
virtual Bool_tTObject::InheritsFrom(const char* classname) const
virtual Bool_tTObject::InheritsFrom(const TClass* cl) const
voidTMVA::MethodANNBase::InitANNBase()
virtual voidTObject::Inspect() constMENU
voidTObject::InvertBit(UInt_t f)
virtual TClass*IsA() const
virtual Bool_tTObject::IsEqual(const TObject* obj) const
virtual Bool_tTObject::IsFolder() const
Bool_tTObject::IsOnHeap() const
virtual Bool_tTMVA::MethodBase::IsSignalLike()
virtual Bool_tTMVA::MethodBase::IsSignalLike(Double_t mvaVal)
virtual Bool_tTObject::IsSortable() const
Bool_tTObject::IsZombie() const
virtual voidTObject::ls(Option_t* option = "") const
virtual voidTMVA::MethodBase::MakeClass(const TString& classFileName = TString("")) const
voidTObject::MayNotUse(const char* method) const
TDirectory*TMVA::MethodBase::MethodBaseDir() const
TMVA::MethodMLPMethodMLP(TMVA::DataSetInfo& theData, const TString& theWeightFile, TDirectory* theTargetDir = 0)
TMVA::MethodMLPMethodMLP(const TString& jobName, const TString& methodTitle, TMVA::DataSetInfo& theData, const TString& theOption, TDirectory* theTargetDir = 0)
virtual Bool_tTMVA::MethodBase::MonitorBoost(TMVA::MethodBoost*)
virtual Bool_tTObject::Notify()
voidTObject::Obsolete(const char* method, const char* asOfVers, const char* removedFromVers) const
static voidTObject::operator delete(void* ptr)
static voidTObject::operator delete(void* ptr, void* vp)
static voidTObject::operator delete[](void* ptr)
static voidTObject::operator delete[](void* ptr, void* vp)
void*TObject::operator new(size_t sz)
void*TObject::operator new(size_t sz, void* vp)
void*TObject::operator new[](size_t sz)
void*TObject::operator new[](size_t sz, void* vp)
TMVA::IMethod&TMVA::IMethod::operator=(const TMVA::IMethod&)
virtual map<TString,Double_t>TMVA::MethodBase::OptimizeTuningParameters(TString fomType = "ROCIntegral", TString fitType = "FitGA")
virtual voidTObject::Paint(Option_t* option = "")
virtual voidTMVA::Configurable::ParseOptions()
virtual voidTObject::Pop()
virtual voidTObject::Print(Option_t* option = "") const
virtual voidTMVA::MethodBase::PrintHelpMessage() const
virtual voidTMVA::MethodANNBase::PrintNetwork() const
voidTMVA::Configurable::PrintOptions() const
voidTMVA::MethodBase::ProcessSetup()
virtual voidTMVA::IFitterTarget::ProgressNotifier(TString, TString)
virtual Int_tTObject::Read(const char* name)
voidTMVA::Configurable::ReadOptionsFromStream(istream& istr)
voidTMVA::Configurable::ReadOptionsFromXML(void* node)
voidTMVA::MethodBase::ReadStateFromFile()
voidTMVA::MethodBase::ReadStateFromStream(istream& tf)
voidTMVA::MethodBase::ReadStateFromStream(TFile& rf)
voidTMVA::MethodBase::ReadStateFromXMLString(const char* xmlstr)
virtual voidTMVA::MethodANNBase::ReadWeightsFromStream(istream& istr)
virtual voidTMVA::MethodANNBase::ReadWeightsFromXML(void* wghtnode)
virtual voidTObject::RecursiveRemove(TObject* obj)
voidTMVA::MethodBase::RerouteTransformationHandler(TMVA::TransformationHandler* fTargetTransformation)
virtual voidTMVA::MethodBase::Reset()
voidTObject::ResetBit(UInt_t f)
virtual voidTObject::SaveAs(const char* filename = "", Option_t* option = "") constMENU
virtual voidTObject::SavePrimitive(ostream& out, Option_t* option = "")
voidTMVA::MethodANNBase::SetActivation(TMVA::TActivation* activation)
virtual voidTMVA::MethodBase::SetAnalysisType(TMVA::Types::EAnalysisType type)
voidTMVA::MethodBase::SetBaseDir(TDirectory* methodDir)
voidTObject::SetBit(UInt_t f)
voidTObject::SetBit(UInt_t f, Bool_t set)
voidTMVA::Configurable::SetConfigDescription(const char* d)
voidTMVA::Configurable::SetConfigName(const char* n)
virtual voidTMVA::MethodBase::SetCurrentEvent(Long64_t ievt) const
virtual voidTObject::SetDrawOption(Option_t* option = "")MENU
static voidTObject::SetDtorOnly(void* obj)
voidTMVA::MethodBase::SetMethodBaseDir(TDirectory* methodDir)
voidTMVA::MethodBase::SetMethodDir(TDirectory* methodDir)
voidTMVA::Configurable::SetMsgType(TMVA::EMsgType t)
voidTMVA::MethodANNBase::SetNeuronInputCalculator(TMVA::TNeuronInput* inputCalculator)
static voidTObject::SetObjectStat(Bool_t stat)
voidTMVA::Configurable::SetOptions(const TString& s)
voidTMVA::MethodBase::SetSignalReferenceCut(Double_t cut)
voidTMVA::MethodBase::SetSignalReferenceCutOrientation(Double_t cutOrientation)
voidTMVA::MethodBase::SetTestTime(Double_t testTime)
voidTMVA::MethodBase::SetTestvarName(const TString& v = "")
voidTMVA::MethodBase::SetTrainTime(Double_t trainTime)
virtual voidTMVA::MethodBase::SetTuneParameters(map<TString,Double_t> tuneParameters)
virtual voidTObject::SetUniqueID(UInt_t uid)
voidTMVA::MethodBase::SetupMethod()
virtual voidShowMembers(TMemberInspector& insp)
virtual voidStreamer(TBuffer& b)
voidStreamerNVirtual(TBuffer& b)
virtual voidTObject::SysError(const char* method, const char* msgfmt) const
Bool_tTObject::TestBit(UInt_t f) const
Int_tTObject::TestBits(UInt_t f) const
virtual voidTMVA::MethodBase::TestClassification()
virtual voidTMVA::MethodBase::TestMulticlass()
virtual voidTMVA::MethodBase::TestRegression(Double_t& bias, Double_t& biasT, Double_t& dev, Double_t& devT, Double_t& rms, Double_t& rmsT, Double_t& mInf, Double_t& mInfT, Double_t& corr, TMVA::Types::ETreeType type)
virtual voidTrain()
voidTMVA::MethodBase::TrainMethod()
virtual voidTObject::UseCurrentStyle()
virtual voidTObject::Warning(const char* method, const char* msgfmt) const
virtual Int_tTObject::Write(const char* name = 0, Int_t option = 0, Int_t bufsize = 0)
virtual Int_tTObject::Write(const char* name = 0, Int_t option = 0, Int_t bufsize = 0) const
virtual voidTMVA::MethodBase::WriteEvaluationHistosToFile(TMVA::Types::ETreeType treetype)
virtual voidTMVA::MethodANNBase::WriteMonitoringHistosToFile() const
voidTMVA::Configurable::WriteOptionsToStream(ostream& o, const TString& prefix) const
voidTMVA::MethodBase::WriteStateToFile() const
protected:
virtual voidTMVA::MethodANNBase::BuildNetwork(vector<Int_t>* layout, vector<Double_t>* weights = NULL, Bool_t fromFile = kFALSE)
voidTMVA::MethodANNBase::CreateWeightMonitoringHists(const TString& bulkname, vector<TH1*>* hv = 0) const
virtual voidTObject::DoError(int level, const char* location, const char* fmt, va_list va) const
voidTMVA::Configurable::EnableLooseOptions(Bool_t b = kTRUE)
voidTMVA::MethodANNBase::ForceNetworkCalculations()
voidTMVA::MethodANNBase::ForceNetworkInputs(const TMVA::Event* ev, Int_t ignoreIndex = -1)
virtual voidGetHelpMessage() const
TMVA::TNeuron*TMVA::MethodANNBase::GetInputNeuron(Int_t index)
const TString&TMVA::MethodBase::GetInternalVarName(Int_t ivar) const
Double_tTMVA::MethodANNBase::GetNetworkOutput()
const TString&TMVA::MethodBase::GetOriginalVarName(Int_t ivar) const
TMVA::TNeuron*TMVA::MethodANNBase::GetOutputNeuron(Int_t index = 0)
const TString&TMVA::Configurable::GetReferenceFile() const
static TMVA::MethodBase*TMVA::MethodBase::GetThisBase()
Float_tTMVA::MethodBase::GetTWeight(const TMVA::Event* ev) const
const TString&TMVA::MethodBase::GetWeightFileDir() const
Bool_tTMVA::MethodBase::HasTrainingTree() const
Bool_tTMVA::MethodBase::Help() const
Bool_tTMVA::MethodBase::IgnoreEventsWithNegWeightsInTraining() const
Bool_tTMVA::MethodBase::IsConstructedFromWeightFile() const
Bool_tTMVA::MethodBase::IsNormalised() const
TMVA::MsgLogger&TMVA::Configurable::Log() const
Bool_tTMVA::Configurable::LooseOptionCheckingEnabled() const
virtual voidMakeClassSpecific(ostream&, const TString&) const
virtual voidTMVA::MethodBase::MakeClassSpecificHeader(ostream&, const TString& = "") const
voidTObject::MakeZombie()
voidTMVA::MethodBase::NoErrorCalc(Double_t *const err, Double_t *const errUpper)
Int_tTMVA::MethodANNBase::NumCycles()
vector<Int_t>*TMVA::MethodANNBase::ParseLayoutString(TString layerSpec)
voidTMVA::MethodANNBase::PrintMessage(TString message, Bool_t force = kFALSE) const
voidTMVA::Configurable::ResetSetFlag()
voidTMVA::MethodBase::SetNormalised(Bool_t norm)
voidTMVA::MethodBase::SetWeightFileDir(TString fileDir)
voidTMVA::MethodBase::SetWeightFileName(TString)
voidTMVA::MethodBase::Statistics(TMVA::Types::ETreeType treeType, const TString& theVarName, Double_t&, Double_t&, Double_t&, Double_t&, Double_t&, Double_t&)
Bool_tTMVA::MethodBase::TxtWeightsOnly() const
Bool_tTMVA::MethodBase::Verbose() const
voidTMVA::MethodANNBase::WaitForKeyboard()
voidTMVA::Configurable::WriteOptionsReferenceToFile()
private:
voidAdjustSynapseWeights()
voidBackPropagationMinimize(Int_t nEpochs)
voidBFGSMinimize(Int_t nEpochs)
Double_tCalculateEstimator(TMVA::Types::ETreeType treeType = Types::kTraining, Int_t iEpoch = -1)
voidCalculateNeuronDeltas()
voidComputeDEDw()
voidDecaySynapseWeights(Bool_t lateEpoch)
virtual voidDeclareOptions()
Double_tDerivDir(TMatrixD& Dir)
voidGeneticMinimize()
voidGetApproxInvHessian(TMatrixD& InvHessian, bool regulate = true)
Double_tGetCEErr(const TMVA::Event* ev, UInt_t index = 0)
Double_tGetDesiredOutput(const TMVA::Event* ev)
Double_tGetError()
Bool_tGetHessian(TMatrixD& Hessian, TMatrixD& Gamma, TMatrixD& Delta)
Double_tGetMSEErr(const TMVA::Event* ev, UInt_t index = 0)
virtual voidInit()
voidInitializeLearningRates()
Bool_tLineSearch(TMatrixD& Dir, vector<Double_t>& Buffer, Double_t* dError = 0)
virtual voidProcessOptions()
voidSetDir(TMatrixD& Hessian, TMatrixD& Dir)
voidSetDirWeights(vector<Double_t>& Origin, TMatrixD& Dir, Double_t alpha)
voidSetGammaDelta(TMatrixD& Gamma, TMatrixD& Delta, vector<Double_t>& Buffer)
voidShuffle(Int_t* index, Int_t n)
voidSimulateEvent(const TMVA::Event* ev)
voidSteepestDir(TMatrixD& Dir)
voidTrain(Int_t nEpochs)
voidTrainOneEpoch()
voidTrainOneEvent(Int_t ievt)
voidTrainOneEventFast(Int_t ievt, Float_t*& branchVar, Int_t& type)
voidUpdateNetwork(Double_t desired, Double_t eventWeight = 1.0)
voidUpdateNetwork(vector<Float_t>& desired, Double_t eventWeight = 1.0)
voidUpdatePriors()
voidUpdateRegulators()
voidUpdateSynapses()

Data Members

public:
enum ETrainingMethod { kBP
kBFGS
kGA
};
enum EBPTrainingMode { kSequential
kBatch
};
enum TMVA::MethodANNBase::EEstimator { kMSE
kCE
};
enum TMVA::MethodBase::EWeightFileType { kROOT
kTEXT
};
enum TObject::EStatusBits { kCanDelete
kMustCleanup
kObjInCanvas
kIsReferenced
kHasUUID
kCannotPick
kNoContextMenu
kInvalidObject
};
enum TObject::[unnamed] { kIsOnHeap
kNotDeleted
kZombie
kBitMask
kSingleKey
kOverwrite
kWriteDelete
};
public:
Bool_tTMVA::MethodBase::fSetupCompletedis method setup
const TMVA::Event*TMVA::MethodBase::fTmpEvent! temporary event when testing on a different DataSet than the own one
protected:
TMVA::TActivation*TMVA::MethodANNBase::fActivationactivation function to be used for hidden layers
TMVA::Types::EAnalysisTypeTMVA::MethodBase::fAnalysisTypemethod-mode : true --> regression, false --> classification
UInt_tTMVA::MethodBase::fBackgroundClassindex of the Background-class
vector<TH1*>TMVA::MethodANNBase::fEpochMonHistBepoch monitoring hitograms for background
vector<TH1*>TMVA::MethodANNBase::fEpochMonHistSepoch monitoring hitograms for signal
vector<TH1*>TMVA::MethodANNBase::fEpochMonHistWepoch monitoring hitograms for weights
TMVA::MethodANNBase::EEstimatorTMVA::MethodANNBase::fEstimator
TH1F*TMVA::MethodANNBase::fEstimatorHistTestmonitors convergence of independent test sample
TH1F*TMVA::MethodANNBase::fEstimatorHistTrainmonitors convergence of training sample
TStringTMVA::MethodANNBase::fEstimatorS
TMVA::TActivation*TMVA::MethodANNBase::fIdentityactivation for input and output layers
TMVA::TNeuronInput*TMVA::MethodANNBase::fInputCalculatorinput calculator for all neurons
vector<TString>*TMVA::MethodBase::fInputVarsvector of input variables used in MVA
TMatrixDTMVA::MethodANNBase::fInvHessianzjh
vector<Float_t>*TMVA::MethodBase::fMulticlassReturnValholds the return-values for the multiclass classification
Int_tTMVA::MethodBase::fNbinsnumber of bins in input variable histograms
Int_tTMVA::MethodBase::fNbinsHnumber of bins in evaluation histograms
Int_tTMVA::MethodBase::fNbinsMVAoutputnumber of bins in MVA output histograms
TObjArray*TMVA::MethodANNBase::fNetworkTObjArray of TObjArrays representing network
TMVA::TActivation*TMVA::MethodANNBase::fOutputactivation function to be used for output layers, depending on estimator
Int_tTMVA::MethodANNBase::fRandomSeedrandom seed for initial synapse weights
TMVA::Ranking*TMVA::MethodBase::fRankingpointer to ranking object (created by derived classifiers)
vector<Float_t>*TMVA::MethodBase::fRegressionReturnValholds the return-values for the regression
vector<Int_t>TMVA::MethodANNBase::fRegulatorIdxindex to different priors from every synapses
vector<Double_t>TMVA::MethodANNBase::fRegulatorsthe priors as regulator
UInt_tTMVA::MethodBase::fSignalClassindex of the Signal-class
TObjArray*TMVA::MethodANNBase::fSynapsesarray of pointers to synapses, no structural data
boolTMVA::MethodANNBase::fUseRegulatorzjh
TRandom3*TMVA::MethodANNBase::frgenrandom number generator for various uses
private:
TMVA::MethodMLP::EBPTrainingModefBPModebackprop learning mode (sequential or batch)
Int_tfBatchSizebatch size, only matters if in batch learning mode
TStringfBpModeSbackprop learning mode option string (sequential or batch)
boolfCalculateErrorscompute inverse hessian matrix at the end of the training
Double_tfDecayRatedecay rate for above learning rate
vector<std::pair<Float_t,Float_t> >*fDeviationsFromTargetsdeviation from the targets, event weight
Bool_tfEpochMoncreate and fill epoch-wise monitoring histograms (makes outputfile big!)
Double_tfGA_SC_factorGA settings: SC_factor
Int_tfGA_SC_rateGA settings: SC_rate
Int_tfGA_SC_stepsGA settings: SC_steps
Int_tfGA_nstepsGA settings: number of steps
Int_tfGA_preCalcGA settings: number of pre-calc steps
Double_tfLastAlphaline search variable
Double_tfLearnRatelearning rate for synapse weight adjustments
Double_tfPriorzjh
vector<Double_t>fPriorDevzjh
Int_tfResetStepreset time (how often we clear hessian matrix)
Float_tfSamplingEpochfraction of epochs where sampling is used
Float_tfSamplingFractionfraction of events which is sampled for training
Bool_tfSamplingTestingThe testing sample is sampled
Bool_tfSamplingTrainingThe training sample is sampled
Float_tfSamplingWeightchanging factor for event weights when sampling is turned on
Double_tfTauline search variable
Int_tfTestRatetest for overtraining performed at each #th epochs
TStringfTrainMethodStraining method option param
TMVA::MethodMLP::ETrainingMethodfTrainingMethodmethod of training, BP or GA
Int_tfUpdateLimitzjh
boolfUseRegulatorzjh
Float_tfWeightRangesuppress outliers for the estimator calculation
static const Bool_tfgPRINT_BATCHdebug flags
static const Int_tfgPRINT_ESTIMATOR_INCdebug flags
static const Bool_tfgPRINT_SEQdebug flags

Class Charts

Inheritance Inherited Members Includes Libraries
Class Charts

Function documentation

MethodMLP(const TString& jobName, const TString& methodTitle, TMVA::DataSetInfo& theData, const TString& theOption, TDirectory* theTargetDir = 0)
 standard constructor
MethodMLP(TMVA::DataSetInfo& theData, const TString& theWeightFile, TDirectory* theTargetDir = 0)
 constructor from a weight file
~MethodMLP()
 destructor
 nothing to be done
Bool_t HasAnalysisType(TMVA::Types::EAnalysisType type, UInt_t numberClasses, UInt_t numberTargets)
 MLP can handle classification with 2 classes and regression with one regression-target
void Init()
 default initializations
void DeclareOptions()
 define the options (their key words) that can be set in the option string
 know options:
 TrainingMethod  <string>     Training method
    available values are:         BP   Back-Propagation <default>
                                  GA   Genetic Algorithm (takes a LONG time)

 LearningRate    <float>      NN learning rate parameter
 DecayRate       <float>      Decay rate for learning parameter
 TestRate        <int>        Test for overtraining performed at each #th epochs

 BPMode          <string>     Back-propagation learning mode
    available values are:         sequential <default>
                                  batch

 BatchSize       <int>        Batch size: number of events/batch, only set if in Batch Mode,
                                          -1 for BatchSize=number_of_events
void ProcessOptions()
 process user options
void InitializeLearningRates()
 initialize learning rates of synapses, used only by backpropagation
Double_t CalculateEstimator(TMVA::Types::ETreeType treeType = Types::kTraining, Int_t iEpoch = -1)
 calculate the estimator that training is attempting to minimize
void Train(Int_t nEpochs)
void BFGSMinimize(Int_t nEpochs)
 train network with BFGS algorithm
void SetGammaDelta(TMatrixD& Gamma, TMatrixD& Delta, vector<Double_t>& Buffer)
void ComputeDEDw()
void SimulateEvent(const TMVA::Event* ev)
void SteepestDir(TMatrixD& Dir)
Bool_t GetHessian(TMatrixD& Hessian, TMatrixD& Gamma, TMatrixD& Delta)
void SetDir(TMatrixD& Hessian, TMatrixD& Dir)
Double_t DerivDir(TMatrixD& Dir)
Bool_t LineSearch(TMatrixD& Dir, vector<Double_t>& Buffer, Double_t* dError = 0)
void SetDirWeights(vector<Double_t>& Origin, TMatrixD& Dir, Double_t alpha)
Double_t GetError()
Double_t GetMSEErr(const TMVA::Event* ev, UInt_t index = 0)
Double_t GetCEErr(const TMVA::Event* ev, UInt_t index = 0)
void BackPropagationMinimize(Int_t nEpochs)
 minimize estimator / train network with backpropagation algorithm
void TrainOneEpoch()
 train network over a single epoch/cyle of events
void Shuffle(Int_t* index, Int_t n)
 Input:
   index: the array to shuffle
   n: the size of the array
 Output:
   index: the shuffled indexes
 This method is used for sequential training
void DecaySynapseWeights(Bool_t lateEpoch)
 decay synapse weights
 in last 10 epochs, lower learning rate even more to find a good minimum
void TrainOneEventFast(Int_t ievt, Float_t*& branchVar, Int_t& type)
 fast per-event training
void TrainOneEvent(Int_t ievt)
 train network over a single event
 this uses the new event model
Double_t GetDesiredOutput(const TMVA::Event* ev)
 get the desired output of this event
void UpdateNetwork(Double_t desired, Double_t eventWeight = 1.0)
 update the network based on how closely
 the output matched the desired output
void UpdateNetwork(vector<Float_t>& desired, Double_t eventWeight = 1.0)
 update the network based on how closely
 the output matched the desired output
void CalculateNeuronDeltas()
 have each neuron calculate its delta by backpropagation
void GeneticMinimize()
 create genetics class similar to GeneticCut
 give it vector of parameter ranges (parameters = weights)
 link fitness function of this class to ComputeEstimator
 instantiate GA (see MethodCuts)
 run it
 then this should exist for GA, Minuit and random sampling
Double_t EstimatorFunction(vector<Double_t>& parameters)
 interface to the estimate
Double_t ComputeEstimator(vector<Double_t>& parameters)
 this function is called by GeneticANN for GA optimization
void UpdateSynapses()
 update synapse error fields and adjust the weights (if in sequential mode)
void AdjustSynapseWeights()
 just adjust the synapse weights (should be called in batch mode)
void UpdatePriors()
void UpdateRegulators()
void GetApproxInvHessian(TMatrixD& InvHessian, bool regulate = true)
Double_t GetMvaValue(Double_t* err = 0, Double_t* errUpper = 0)
void MakeClassSpecific(ostream& , const TString& ) const
 write specific classifier response
void GetHelpMessage() const
 get help message text

 typical length of text line:
         "|--------------------------------------------------------------|"
void Train()
bool HasInverseHessian()
{ return fCalculateErrors; }