ROOT logo
ROOT » TMVA » TMVA::MethodCFMlpANN

class TMVA::MethodCFMlpANN: public TMVA::MethodBase, private TMVA::MethodCFMlpANN_Utils



/* Interface to Clermond-Ferrand artificial neural network

The CFMlpANN belong to the class of Multilayer Perceptrons (MLP), which are feed-forward networks according to the following propagation schema:

Schema for artificial neural network
The input layer contains as many neurons as input variables used in the MVA. The output layer contains two neurons for the signal and background event classes. In between the input and output layers are a variable number of k hidden layers with arbitrary numbers of neurons. (While the structure of the input and output layers is determined by the problem, the hidden layers can be configured by the user through the option string of the method booking.)
As indicated in the sketch, all neuron inputs to a layer are linear combinations of the neuron output of the previous layer. The transfer from input to output within a neuron is performed by means of an "activation function". In general, the activation function of a neuron can be zero (deactivated), one (linear), or non-linear. The above example uses a sigmoid activation function. The transfer function of the output layer is usually linear. As a consequence: an ANN without hidden layer should give identical discrimination power as a linear discriminant analysis (Fisher). In case of one hidden layer, the ANN computes a linear combination of sigmoid.
The learning method used by the CFMlpANN is only stochastic. */

Function Members (Methods)

public:
virtual~MethodCFMlpANN()
voidTObject::AbstractMethod(const char* method) const
voidTMVA::Configurable::AddOptionsXMLTo(void* parent) const
voidTMVA::MethodBase::AddOutput(TMVA::Types::ETreeType type, TMVA::Types::EAnalysisType analysisType)
virtual voidAddWeightsXMLTo(void* parent) const
virtual voidTObject::AppendPad(Option_t* option = "")
TDirectory*TMVA::MethodBase::BaseDir() const
virtual voidTObject::Browse(TBrowser* b)
voidTMVA::Configurable::CheckForUnusedOptions() const
virtual voidTMVA::MethodBase::CheckSetup()
static TClass*Class()
virtual const char*TObject::ClassName() const
virtual voidTObject::Clear(Option_t* = "")
virtual TObject*TObject::Clone(const char* newname = "") const
virtual Int_tTObject::Compare(const TObject* obj) const
TMVA::ConfigurableTMVA::Configurable::Configurable(const TString& theOption = "")
virtual voidTObject::Copy(TObject& object) const
virtual const TMVA::Ranking*CreateRanking()
TMVA::DataSet*TMVA::MethodBase::Data() const
TMVA::DataSetInfo&TMVA::MethodBase::DataInfo() const
virtual voidTMVA::MethodBase::DeclareCompatibilityOptions()
virtual voidTObject::Delete(Option_t* option = "")MENU
voidTMVA::MethodBase::DisableWriting(Bool_t setter)
virtual Int_tTObject::DistancetoPrimitive(Int_t px, Int_t py)
Bool_tTMVA::MethodBase::DoMulticlass() const
Bool_tTMVA::MethodBase::DoRegression() const
virtual voidTObject::Draw(Option_t* option = "")
virtual voidTObject::DrawClass() constMENU
virtual TObject*TObject::DrawClone(Option_t* option = "") constMENU
virtual voidTObject::Dump() constMENU
virtual voidTObject::Error(const char* method, const char* msgfmt) const
virtual voidTObject::Execute(const char* method, const char* params, Int_t* error = 0)
virtual voidTObject::Execute(TMethod* method, TObjArray* params, Int_t* error = 0)
virtual voidTObject::ExecuteEvent(Int_t event, Int_t px, Int_t py)
virtual voidTObject::Fatal(const char* method, const char* msgfmt) const
virtual TObject*TObject::FindObject(const char* name) const
virtual TObject*TObject::FindObject(const TObject* obj) const
TMVA::Types::EAnalysisTypeTMVA::MethodBase::GetAnalysisType() const
Int_tGetClass(Int_t ivar) const
const char*TMVA::Configurable::GetConfigDescription() const
const char*TMVA::Configurable::GetConfigName() const
Double_tGetData(Int_t isel, Int_t ivar) const
virtual Option_t*TObject::GetDrawOption() const
static Long_tTObject::GetDtorOnly()
virtual Double_tTMVA::MethodBase::GetEfficiency(const TString&, TMVA::Types::ETreeType, Double_t& err)
const TMVA::Event*TMVA::MethodBase::GetEvent() const
const TMVA::Event*TMVA::MethodBase::GetEvent(const TMVA::Event* ev) const
const TMVA::Event*TMVA::MethodBase::GetEvent(Long64_t ievt) const
const TMVA::Event*TMVA::MethodBase::GetEvent(Long64_t ievt, TMVA::Types::ETreeType type) const
const vector<TMVA::Event*>&TMVA::MethodBase::GetEventCollection(TMVA::Types::ETreeType type)
virtual const char*TObject::GetIconName() const
const TString&TMVA::MethodBase::GetInputLabel(Int_t i) const
const TString&TMVA::MethodBase::GetInputTitle(Int_t i) const
const TString&TMVA::MethodBase::GetInputVar(Int_t i) const
const TString&TMVA::MethodBase::GetJobName() const
virtual Double_tTMVA::MethodBase::GetMaximumSignificance(Double_t SignalEvents, Double_t BackgroundEvents, Double_t& optimal_significance_value) const
Double_tTMVA::MethodBase::GetMean(Int_t ivar) const
const TString&TMVA::MethodBase::GetMethodName() const
TMVA::Types::EMVATMVA::MethodBase::GetMethodType() const
TStringTMVA::MethodBase::GetMethodTypeName() const
virtual vector<Float_t>TMVA::MethodBase::GetMulticlassEfficiency(vector<std::vector<Float_t> >& purity)
virtual vector<Float_t>TMVA::MethodBase::GetMulticlassTrainingEfficiency(vector<std::vector<Float_t> >& purity)
virtual const vector<Float_t>&TMVA::MethodBase::GetMulticlassValues()
virtual Double_tGetMvaValue(Double_t* err = 0, Double_t* errUpper = 0)
virtual const char*TMVA::MethodBase::GetName() const
UInt_tTMVA::MethodBase::GetNEvents() const
UInt_tTMVA::MethodBase::GetNTargets() const
UInt_tTMVA::MethodBase::GetNvar() const
UInt_tTMVA::MethodBase::GetNVariables() const
virtual char*TObject::GetObjectInfo(Int_t px, Int_t py) const
static Bool_tTObject::GetObjectStat()
virtual Option_t*TObject::GetOption() const
const TString&TMVA::Configurable::GetOptions() const
virtual Double_tTMVA::MethodBase::GetProba(Double_t mvaVal, Double_t ap_sig)
const TStringTMVA::MethodBase::GetProbaName() const
virtual Double_tTMVA::MethodBase::GetRarity(Double_t mvaVal, TMVA::Types::ESBType reftype = Types::kBackground) const
virtual voidTMVA::MethodBase::GetRegressionDeviation(UInt_t tgtNum, TMVA::Types::ETreeType type, Double_t& stddev, Double_t& stddev90Percent) const
virtual const vector<Float_t>&TMVA::MethodBase::GetRegressionValues()
Double_tTMVA::MethodBase::GetRMS(Int_t ivar) const
virtual Double_tTMVA::MethodBase::GetROCIntegral(TH1F* histS, TH1F* histB) const
virtual Double_tTMVA::MethodBase::GetROCIntegral(TMVA::PDF* pdfS = 0, TMVA::PDF* pdfB = 0) const
virtual Double_tTMVA::MethodBase::GetSeparation(TH1*, TH1*) const
virtual Double_tTMVA::MethodBase::GetSeparation(TMVA::PDF* pdfS = 0, TMVA::PDF* pdfB = 0) const
Double_tTMVA::MethodBase::GetSignalReferenceCut() const
Double_tTMVA::MethodBase::GetSignalReferenceCutOrientation() const
virtual Double_tTMVA::MethodBase::GetSignificance() const
const TMVA::Event*TMVA::MethodBase::GetTestingEvent(Long64_t ievt) const
Double_tTMVA::MethodBase::GetTestTime() const
const TString&TMVA::MethodBase::GetTestvarName() const
virtual const char*TObject::GetTitle() const
virtual Double_tTMVA::MethodBase::GetTrainingEfficiency(const TString&)
const TMVA::Event*TMVA::MethodBase::GetTrainingEvent(Long64_t ievt) const
UInt_tTMVA::MethodBase::GetTrainingROOTVersionCode() const
TStringTMVA::MethodBase::GetTrainingROOTVersionString() const
UInt_tTMVA::MethodBase::GetTrainingTMVAVersionCode() const
TStringTMVA::MethodBase::GetTrainingTMVAVersionString() const
Double_tTMVA::MethodBase::GetTrainTime() const
TMVA::TransformationHandler&TMVA::MethodBase::GetTransformationHandler(Bool_t takeReroutedIfAvailable = true)
const TMVA::TransformationHandler&TMVA::MethodBase::GetTransformationHandler(Bool_t takeReroutedIfAvailable = true) const
virtual UInt_tTObject::GetUniqueID() const
TStringTMVA::MethodBase::GetWeightFileName() const
Double_tTMVA::MethodBase::GetXmax(Int_t ivar) const
Double_tTMVA::MethodBase::GetXmin(Int_t ivar) const
virtual Bool_tTObject::HandleTimer(TTimer* timer)
virtual Bool_tHasAnalysisType(TMVA::Types::EAnalysisType type, UInt_t numberClasses, UInt_t)
virtual ULong_tTObject::Hash() const
Bool_tTMVA::MethodBase::HasMVAPdfs() const
virtual voidTObject::Info(const char* method, const char* msgfmt) const
virtual Bool_tTObject::InheritsFrom(const char* classname) const
virtual Bool_tTObject::InheritsFrom(const TClass* cl) const
virtual voidTObject::Inspect() constMENU
voidTObject::InvertBit(UInt_t f)
virtual TClass*IsA() const
virtual Bool_tTObject::IsEqual(const TObject* obj) const
virtual Bool_tTObject::IsFolder() const
Bool_tTObject::IsOnHeap() const
virtual Bool_tTMVA::MethodBase::IsSignalLike()
virtual Bool_tTMVA::MethodBase::IsSignalLike(Double_t mvaVal)
virtual Bool_tTObject::IsSortable() const
Bool_tTObject::IsZombie() const
virtual voidTObject::ls(Option_t* option = "") const
virtual voidTMVA::MethodBase::MakeClass(const TString& classFileName = TString("")) const
voidTObject::MayNotUse(const char* method) const
TDirectory*TMVA::MethodBase::MethodBaseDir() const
TMVA::MethodCFMlpANNMethodCFMlpANN(TMVA::DataSetInfo& theData, const TString& theWeightFile, TDirectory* theTargetDir = NULL)
TMVA::MethodCFMlpANNMethodCFMlpANN(const TString& jobName, const TString& methodTitle, TMVA::DataSetInfo& theData, const TString& theOption = "3000:N-1:N-2", TDirectory* theTargetDir = 0)
virtual Bool_tTMVA::MethodBase::MonitorBoost(TMVA::MethodBoost*)
virtual Bool_tTObject::Notify()
voidTObject::Obsolete(const char* method, const char* asOfVers, const char* removedFromVers) const
static voidTObject::operator delete(void* ptr)
static voidTObject::operator delete(void* ptr, void* vp)
static voidTObject::operator delete[](void* ptr)
static voidTObject::operator delete[](void* ptr, void* vp)
void*TObject::operator new(size_t sz)
void*TObject::operator new(size_t sz, void* vp)
void*TObject::operator new[](size_t sz)
void*TObject::operator new[](size_t sz, void* vp)
TMVA::IMethod&TMVA::IMethod::operator=(const TMVA::IMethod&)
virtual map<TString,Double_t>TMVA::MethodBase::OptimizeTuningParameters(TString fomType = "ROCIntegral", TString fitType = "FitGA")
virtual voidTObject::Paint(Option_t* option = "")
virtual voidTMVA::Configurable::ParseOptions()
virtual voidTObject::Pop()
virtual voidTObject::Print(Option_t* option = "") const
virtual voidTMVA::MethodBase::PrintHelpMessage() const
voidTMVA::Configurable::PrintOptions() const
voidTMVA::MethodBase::ProcessSetup()
virtual Int_tTObject::Read(const char* name)
voidTMVA::Configurable::ReadOptionsFromStream(istream& istr)
voidTMVA::Configurable::ReadOptionsFromXML(void* node)
voidTMVA::MethodBase::ReadStateFromFile()
voidTMVA::MethodBase::ReadStateFromStream(istream& tf)
voidTMVA::MethodBase::ReadStateFromStream(TFile& rf)
voidTMVA::MethodBase::ReadStateFromXMLString(const char* xmlstr)
virtual voidReadWeightsFromStream(istream& istr)
virtual voidReadWeightsFromXML(void* wghtnode)
virtual voidTObject::RecursiveRemove(TObject* obj)
voidTMVA::MethodBase::RerouteTransformationHandler(TMVA::TransformationHandler* fTargetTransformation)
virtual voidTMVA::MethodBase::Reset()
voidTObject::ResetBit(UInt_t f)
virtual voidTObject::SaveAs(const char* filename = "", Option_t* option = "") constMENU
virtual voidTObject::SavePrimitive(ostream& out, Option_t* option = "")
virtual voidTMVA::MethodBase::SetAnalysisType(TMVA::Types::EAnalysisType type)
voidTMVA::MethodBase::SetBaseDir(TDirectory* methodDir)
voidTObject::SetBit(UInt_t f)
voidTObject::SetBit(UInt_t f, Bool_t set)
voidTMVA::Configurable::SetConfigDescription(const char* d)
voidTMVA::Configurable::SetConfigName(const char* n)
virtual voidTMVA::MethodBase::SetCurrentEvent(Long64_t ievt) const
virtual voidTObject::SetDrawOption(Option_t* option = "")MENU
static voidTObject::SetDtorOnly(void* obj)
voidTMVA::MethodBase::SetMethodBaseDir(TDirectory* methodDir)
voidTMVA::MethodBase::SetMethodDir(TDirectory* methodDir)
voidTMVA::Configurable::SetMsgType(TMVA::EMsgType t)
static voidTObject::SetObjectStat(Bool_t stat)
voidTMVA::Configurable::SetOptions(const TString& s)
voidTMVA::MethodBase::SetSignalReferenceCut(Double_t cut)
voidTMVA::MethodBase::SetSignalReferenceCutOrientation(Double_t cutOrientation)
voidTMVA::MethodBase::SetTestTime(Double_t testTime)
voidTMVA::MethodBase::SetTestvarName(const TString& v = "")
voidTMVA::MethodBase::SetTrainTime(Double_t trainTime)
virtual voidTMVA::MethodBase::SetTuneParameters(map<TString,Double_t> tuneParameters)
virtual voidTObject::SetUniqueID(UInt_t uid)
voidTMVA::MethodBase::SetupMethod()
virtual voidShowMembers(TMemberInspector& insp)
virtual voidStreamer(TBuffer& b)
voidStreamerNVirtual(TBuffer& b)
virtual voidTObject::SysError(const char* method, const char* msgfmt) const
Bool_tTObject::TestBit(UInt_t f) const
Int_tTObject::TestBits(UInt_t f) const
virtual voidTMVA::MethodBase::TestClassification()
virtual voidTMVA::MethodBase::TestMulticlass()
virtual voidTMVA::MethodBase::TestRegression(Double_t& bias, Double_t& biasT, Double_t& dev, Double_t& devT, Double_t& rms, Double_t& rmsT, Double_t& mInf, Double_t& mInfT, Double_t& corr, TMVA::Types::ETreeType type)
static TMVA::MethodCFMlpANN*This()
virtual voidTrain()
voidTMVA::MethodBase::TrainMethod()
virtual voidTObject::UseCurrentStyle()
virtual voidTObject::Warning(const char* method, const char* msgfmt) const
virtual Int_tTObject::Write(const char* name = 0, Int_t option = 0, Int_t bufsize = 0)
virtual Int_tTObject::Write(const char* name = 0, Int_t option = 0, Int_t bufsize = 0) const
virtual voidTMVA::MethodBase::WriteEvaluationHistosToFile(TMVA::Types::ETreeType treetype)
virtual voidTMVA::MethodBase::WriteMonitoringHistosToFile() const
voidTMVA::Configurable::WriteOptionsToStream(ostream& o, const TString& prefix) const
voidTMVA::MethodBase::WriteStateToFile() const
protected:
virtual Int_tDataInterface(Double_t*, Double_t*, Int_t*, Int_t*, Int_t*, Int_t*, Double_t*, Int_t*, Int_t*)
virtual voidTObject::DoError(int level, const char* location, const char* fmt, va_list va) const
voidTMVA::Configurable::EnableLooseOptions(Bool_t b = kTRUE)
virtual voidGetHelpMessage() const
const TString&TMVA::MethodBase::GetInternalVarName(Int_t ivar) const
const TString&TMVA::MethodBase::GetOriginalVarName(Int_t ivar) const
const TString&TMVA::Configurable::GetReferenceFile() const
static TMVA::MethodBase*TMVA::MethodBase::GetThisBase()
Float_tTMVA::MethodBase::GetTWeight(const TMVA::Event* ev) const
const TString&TMVA::MethodBase::GetWeightFileDir() const
Bool_tTMVA::MethodBase::HasTrainingTree() const
Bool_tTMVA::MethodBase::Help() const
Bool_tTMVA::MethodBase::IgnoreEventsWithNegWeightsInTraining() const
Bool_tTMVA::MethodBase::IsConstructedFromWeightFile() const
Bool_tTMVA::MethodBase::IsNormalised() const
TMVA::MsgLogger&TMVA::Configurable::Log() const
Bool_tTMVA::Configurable::LooseOptionCheckingEnabled() const
virtual voidMakeClassSpecific(ostream&, const TString&) const
virtual voidMakeClassSpecificHeader(ostream&, const TString& = "") const
voidTObject::MakeZombie()
voidTMVA::MethodBase::NoErrorCalc(Double_t *const err, Double_t *const errUpper)
voidTMVA::Configurable::ResetSetFlag()
voidTMVA::MethodBase::SetNormalised(Bool_t norm)
voidTMVA::MethodBase::SetWeightFileDir(TString fileDir)
voidTMVA::MethodBase::SetWeightFileName(TString)
voidTMVA::MethodBase::Statistics(TMVA::Types::ETreeType treeType, const TString& theVarName, Double_t&, Double_t&, Double_t&, Double_t&, Double_t&, Double_t&)
Bool_tTMVA::MethodBase::TxtWeightsOnly() const
Bool_tTMVA::MethodBase::Verbose() const
voidTMVA::Configurable::WriteOptionsReferenceToFile()
private:
voidTMVA::MethodCFMlpANN_Utils::Arret(const char* mot)
voidTMVA::MethodCFMlpANN_Utils::CollectVar(Int_t* nvar, Int_t* class__, Double_t* xpg)
voidTMVA::MethodCFMlpANN_Utils::Cout(Int_t*, Double_t* xxx)
voidTMVA::MethodCFMlpANN_Utils::Cout2(Int_t*, Double_t* yyy)
virtual voidDeclareOptions()
voidTMVA::MethodCFMlpANN_Utils::En_arriere(Int_t* ievent)
voidTMVA::MethodCFMlpANN_Utils::En_avant(Int_t* ievent)
voidTMVA::MethodCFMlpANN_Utils::En_avant2(Int_t* ievent)
voidTMVA::MethodCFMlpANN_Utils::Entree_new(Int_t*, char*, Int_t* ntrain, Int_t* ntest, Int_t* numlayer, Int_t* nodes, Int_t* numcycle, Int_t)
Double_tEvalANN(vector<Double_t>&, Bool_t& isOK)
Double_tTMVA::MethodCFMlpANN_Utils::Fdecroi(Int_t* i__)
voidTMVA::MethodCFMlpANN_Utils::Foncf(Int_t* i__, Double_t* u, Double_t* f)
voidTMVA::MethodCFMlpANN_Utils::GraphNN(Int_t* ilearn, Double_t*, Double_t*, char*, Int_t)
virtual voidInit()
voidTMVA::MethodCFMlpANN_Utils::Inl()
voidTMVA::MethodCFMlpANN_Utils::Innit(char* det, Double_t* tout2, Double_t* tin2, Int_t)
voidTMVA::MethodCFMlpANN_Utils::Lecev2(Int_t* ktest, Double_t* tout2, Double_t* tin2)
voidTMVA::MethodCFMlpANN_Utils::Leclearn(Int_t* ktest, Double_t* tout2, Double_t* tin2)
voidNN_ava(Double_t*)
Double_tNN_fonc(Int_t, Double_t) const
voidTMVA::MethodCFMlpANN_Utils::Out(Int_t* iii, Int_t* maxcycle)
voidPrintWeights(ostream& o) const
virtual voidProcessOptions()
Double_tTMVA::MethodCFMlpANN_Utils::Sen3a()
voidTMVA::MethodCFMlpANN_Utils::SetLogger(TMVA::MsgLogger* l)
voidTMVA::MethodCFMlpANN_Utils::TestNN()
voidTMVA::MethodCFMlpANN_Utils::Train_nn(Double_t* tin2, Double_t* tout2, Int_t* ntrain, Int_t* ntest, Int_t* nvar2, Int_t* nlayer, Int_t* nodes, Int_t* ncycle)
Double_tTMVA::MethodCFMlpANN_Utils::W_ref(const Double_t* wNN, Int_t a_1, Int_t a_2, Int_t a_3) const
Double_t&TMVA::MethodCFMlpANN_Utils::W_ref(Double_t* wNN, Int_t a_1, Int_t a_2, Int_t a_3)
voidTMVA::MethodCFMlpANN_Utils::Wini()
Double_tTMVA::MethodCFMlpANN_Utils::Ww_ref(const Double_t* wwNN, Int_t a_1, Int_t a_2) const
Double_t&TMVA::MethodCFMlpANN_Utils::Ww_ref(Double_t* wwNN, Int_t a_1, Int_t a_2)

Data Members

public:
enum TMVA::MethodBase::EWeightFileType { kROOT
kTEXT
};
enum TObject::EStatusBits { kCanDelete
kMustCleanup
kObjInCanvas
kIsReferenced
kHasUUID
kCannotPick
kNoContextMenu
kInvalidObject
};
enum TObject::[unnamed] { kIsOnHeap
kNotDeleted
kZombie
kBitMask
kSingleKey
kOverwrite
kWriteDelete
};
public:
Bool_tTMVA::MethodBase::fSetupCompletedis method setup
const TMVA::Event*TMVA::MethodBase::fTmpEvent! temporary event when testing on a different DataSet than the own one
protected:
TMVA::Types::EAnalysisTypeTMVA::MethodBase::fAnalysisTypemethod-mode : true --> regression, false --> classification
UInt_tTMVA::MethodBase::fBackgroundClassindex of the Background-class
vector<TString>*TMVA::MethodBase::fInputVarsvector of input variables used in MVA
vector<Float_t>*TMVA::MethodBase::fMulticlassReturnValholds the return-values for the multiclass classification
Int_tTMVA::MethodBase::fNbinsnumber of bins in input variable histograms
Int_tTMVA::MethodBase::fNbinsHnumber of bins in evaluation histograms
Int_tTMVA::MethodBase::fNbinsMVAoutputnumber of bins in MVA output histograms
TMVA::Ranking*TMVA::MethodBase::fRankingpointer to ranking object (created by derived classifiers)
vector<Float_t>*TMVA::MethodBase::fRegressionReturnValholds the return-values for the regression
UInt_tTMVA::MethodBase::fSignalClassindex of the Signal-class
private:
vector<Int_t>*fClassthe event class (1=signal, 2=background)
TMVA::MethodCFMlpANN_Utils::TMVA::MethodCFMlpANN_Utils::fCost_1
TMatrixF*fDatathe (data,var) string
TMVA::MethodCFMlpANN_Utils::TMVA::MethodCFMlpANN_Utils::fDel_1
TStringfLayerSpecthe hidden layer specification string
Int_tfNcyclesnumber of training cycles
TMVA::MethodCFMlpANN_Utils::TMVA::MethodCFMlpANN_Utils::fNeur_1
Int_tfNlayersnumber of layers (including input and output layers)
Int_t*fNodesnumber of nodes per layer
TMVA::MethodCFMlpANN_Utils::TMVA::MethodCFMlpANN_Utils::fParam_1
TMVA::MethodCFMlpANN_Utils::VARn2TMVA::MethodCFMlpANN_Utils::fVarn2_1
TMVA::MethodCFMlpANN_Utils::VARn2TMVA::MethodCFMlpANN_Utils::fVarn3_1
TMVA::MethodCFMlpANN_Utils::TMVA::MethodCFMlpANN_Utils::fVarn_1
Double_t**fYNNweights
static TMVA::MethodCFMlpANN*fgThisthis carrier
static Int_tTMVA::MethodCFMlpANN_Utils::fg_0constant
static Int_tTMVA::MethodCFMlpANN_Utils::fg_100constant
static Int_tTMVA::MethodCFMlpANN_Utils::fg_999constant
static const char*TMVA::MethodCFMlpANN_Utils::fg_MethodNamemethod name for print
static Int_tTMVA::MethodCFMlpANN_Utils::fg_max_nNodes_maximum number of nodes per variable
static Int_tTMVA::MethodCFMlpANN_Utils::fg_max_nVar_static maximum number of input variables

Class Charts

Inheritance Inherited Members Includes Libraries
Class Charts

Function documentation

MethodCFMlpANN(const TString& jobName, const TString& methodTitle, TMVA::DataSetInfo& theData, const TString& theOption = "3000:N-1:N-2", TDirectory* theTargetDir = 0)
 standard constructor
 option string: "n_training_cycles:n_hidden_layers"
 default is:  n_training_cycles = 5000, n_layers = 4

 * note that the number of hidden layers in the NN is:
   n_hidden_layers = n_layers - 2

 * since there is one input and one output layer. The number of
   nodes (neurons) is predefined to be:
   n_nodes[i] = nvars + 1 - i (where i=1..n_layers)

   with nvars being the number of variables used in the NN.

 Hence, the default case is: n_neurons(layer 1 (input)) : nvars
                             n_neurons(layer 2 (hidden)): nvars-1
                             n_neurons(layer 3 (hidden)): nvars-1
                             n_neurons(layer 4 (out))   : 2

 This artificial neural network usually needs a relatively large
 number of cycles to converge (8000 and more). Overtraining can
 be efficienctly tested by comparing the signal and background
 output of the NN for the events that were used for training and
 an independent data sample (with equal properties). If the separation
 performance is significantly better for the training sample, the
 NN interprets statistical effects, and is hence overtrained. In
 this case, the number of cycles should be reduced, or the size
 of the training sample increased.
MethodCFMlpANN(TMVA::DataSetInfo& theData, const TString& theWeightFile, TDirectory* theTargetDir = NULL)
 constructor from weight file
Bool_t HasAnalysisType(TMVA::Types::EAnalysisType type, UInt_t numberClasses, UInt_t )
 CFMlpANN can handle classification with 2 classes
void DeclareOptions()
 define the options (their key words) that can be set in the option string
 know options: NCycles=xx              :the number of training cycles
               HiddenLayser="N-1,N-2"  :the specification of the hidden layers
void ProcessOptions()
 decode the options in the option string
void Init( void )
 default initialisation called by all constructors
~MethodCFMlpANN( void )
 destructor
void Train( void )
 training of the Clement-Ferrand NN classifier
Double_t GetMvaValue(Double_t* err = 0, Double_t* errUpper = 0)
 returns CFMlpANN output (normalised within [0,1])
Double_t EvalANN(vector<Double_t>& , Bool_t& isOK)
 evaluates NN value as function of input variables
void NN_ava(Double_t* )
 auxiliary functions
Double_t NN_fonc(Int_t , Double_t ) const
 activation function
void ReadWeightsFromStream(istream& istr)
 read back the weight from the training from file (stream)
Int_t DataInterface(Double_t* , Double_t* , Int_t* , Int_t* , Int_t* , Int_t* , Double_t* , Int_t* , Int_t* )
 data interface function
void AddWeightsXMLTo(void* parent) const
 write weights to xml file
void ReadWeightsFromXML(void* wghtnode)
 read weights from xml file
void PrintWeights(ostream& o) const
 write the weights of the neural net
TMVA::MethodCFMlpANN* This( void )
 static pointer to this object (required for external functions
void MakeClassSpecific(ostream& , const TString& ) const
 write specific classifier response
void MakeClassSpecificHeader(ostream& , const TString& = "") const
 write specific classifier response for header
void GetHelpMessage() const
 get help message text

 typical length of text line:
         "|--------------------------------------------------------------|"
Double_t GetData(Int_t isel, Int_t ivar) const
 data accessors for external functions
{ return (*fData)(isel, ivar); }
Int_t GetClass(Int_t ivar) const
{ return (*fClass)[ivar]; }
const Ranking* CreateRanking()
 ranking of input variables
{ return 0; }