ROOT logo
ROOT » MATH » MLP » TNeuron

class TNeuron: public TNamed


 TNeuron

 This class decribes an elementary neuron, which is the basic
 element for a Neural Network.
 A network is built connecting neurons by synapses.
 There are different types of neurons: linear (a+bx),
 sigmoid (1/(1+exp(-x)), tanh or gaussian.
 An external function can also be used, together with its derivative.
 In a Multi Layer Perceptron, the input layer is made of
 inactive neurons (returning the normalized input) and output neurons
 are linear. Hidden neurons may be anything, the default being sigmoids.

 This implementation contains several methods to compute the value,
 the derivative, the DeDw, ...
 Values are stored in local buffers. The SetNewEvent() method is
 there to inform buffered values are outdated.


Function Members (Methods)

public:
TNeuron(TNeuron::ENeuronType type = kSigmoid, const char* name = "", const char* title = "", const char* extF = "", const char* extD = "")
virtual~TNeuron()
voidTObject::AbstractMethod(const char* method) const
voidAddInLayer(TNeuron*)
virtual voidTObject::AppendPad(Option_t* option = "")
virtual voidTObject::Browse(TBrowser* b)
static TClass*Class()
virtual const char*TObject::ClassName() const
virtual voidTNamed::Clear(Option_t* option = "")
virtual TObject*TNamed::Clone(const char* newname = "") const
virtual Int_tTNamed::Compare(const TObject* obj) const
virtual voidTNamed::Copy(TObject& named) const
virtual voidTObject::Delete(Option_t* option = "")MENU
virtual Int_tTObject::DistancetoPrimitive(Int_t px, Int_t py)
virtual voidTObject::Draw(Option_t* option = "")
virtual voidTObject::DrawClass() constMENU
virtual TObject*TObject::DrawClone(Option_t* option = "") constMENU
virtual voidTObject::Dump() constMENU
virtual voidTObject::Error(const char* method, const char* msgfmt) const
virtual voidTObject::Execute(const char* method, const char* params, Int_t* error = 0)
virtual voidTObject::Execute(TMethod* method, TObjArray* params, Int_t* error = 0)
virtual voidTObject::ExecuteEvent(Int_t event, Int_t px, Int_t py)
virtual voidTObject::Fatal(const char* method, const char* msgfmt) const
virtual voidTNamed::FillBuffer(char*& buffer)
virtual TObject*TObject::FindObject(const char* name) const
virtual TObject*TObject::FindObject(const TObject* obj) const
voidForceExternalValue(Double_t value)
Double_tGetBranch() const
Double_tGetDeDw() const
Double_tGetDEDw() const
Double_tGetDerivative() const
virtual Option_t*TObject::GetDrawOption() const
static Long_tTObject::GetDtorOnly()
Double_tGetError() const
virtual const char*TObject::GetIconName() const
TNeuron*GetInLayer(Int_t n) const
Double_tGetInput() const
virtual const char*TNamed::GetName() const
const Double_t*GetNormalisation() const
virtual char*TObject::GetObjectInfo(Int_t px, Int_t py) const
static Bool_tTObject::GetObjectStat()
virtual Option_t*TObject::GetOption() const
TSynapse*GetPost(Int_t n) const
TSynapse*GetPre(Int_t n) const
Double_tGetTarget() const
virtual const char*TNamed::GetTitle() const
TNeuron::ENeuronTypeGetType() const
virtual UInt_tTObject::GetUniqueID() const
Double_tGetValue() const
Double_tGetWeight() const
virtual Bool_tTObject::HandleTimer(TTimer* timer)
virtual ULong_tTNamed::Hash() const
virtual voidTObject::Info(const char* method, const char* msgfmt) const
virtual Bool_tTObject::InheritsFrom(const char* classname) const
virtual Bool_tTObject::InheritsFrom(const TClass* cl) const
virtual voidTObject::Inspect() constMENU
voidTObject::InvertBit(UInt_t f)
virtual TClass*IsA() const
virtual Bool_tTObject::IsEqual(const TObject* obj) const
virtual Bool_tTObject::IsFolder() const
Bool_tTObject::IsOnHeap() const
virtual Bool_tTNamed::IsSortable() const
Bool_tTObject::IsZombie() const
virtual voidTNamed::ls(Option_t* option = "") const
voidTObject::MayNotUse(const char* method) const
virtual Bool_tTObject::Notify()
voidTObject::Obsolete(const char* method, const char* asOfVers, const char* removedFromVers) const
static voidTObject::operator delete(void* ptr)
static voidTObject::operator delete(void* ptr, void* vp)
static voidTObject::operator delete[](void* ptr)
static voidTObject::operator delete[](void* ptr, void* vp)
void*TObject::operator new(size_t sz)
void*TObject::operator new(size_t sz, void* vp)
void*TObject::operator new[](size_t sz)
void*TObject::operator new[](size_t sz, void* vp)
virtual voidTObject::Paint(Option_t* option = "")
virtual voidTObject::Pop()
virtual voidTNamed::Print(Option_t* option = "") const
virtual Int_tTObject::Read(const char* name)
virtual voidTObject::RecursiveRemove(TObject* obj)
voidTObject::ResetBit(UInt_t f)
virtual voidTObject::SaveAs(const char* filename = "", Option_t* option = "") constMENU
virtual voidTObject::SavePrimitive(ostream& out, Option_t* option = "")
voidTObject::SetBit(UInt_t f)
voidTObject::SetBit(UInt_t f, Bool_t set)
voidSetDEDw(Double_t in)
virtual voidTObject::SetDrawOption(Option_t* option = "")MENU
static voidTObject::SetDtorOnly(void* obj)
virtual voidTNamed::SetName(const char* name)MENU
virtual voidTNamed::SetNameTitle(const char* name, const char* title)
voidSetNewEvent() const
voidSetNormalisation(Double_t mean, Double_t RMS)
static voidTObject::SetObjectStat(Bool_t stat)
virtual voidTNamed::SetTitle(const char* title = "")MENU
virtual voidTObject::SetUniqueID(UInt_t uid)
voidSetWeight(Double_t w)
virtual voidShowMembers(TMemberInspector& insp)
virtual Int_tTNamed::Sizeof() const
virtual voidStreamer(TBuffer& b)
voidStreamerNVirtual(TBuffer& b)
virtual voidTObject::SysError(const char* method, const char* msgfmt) const
Bool_tTObject::TestBit(UInt_t f) const
Int_tTObject::TestBits(UInt_t f) const
TTreeFormula*UseBranch(TTree*, const char*)
virtual voidTObject::UseCurrentStyle()
virtual voidTObject::Warning(const char* method, const char* msgfmt) const
virtual Int_tTObject::Write(const char* name = 0, Int_t option = 0, Int_t bufsize = 0)
virtual Int_tTObject::Write(const char* name = 0, Int_t option = 0, Int_t bufsize = 0) const
protected:
voidAddPost(TSynapse*)
voidAddPre(TSynapse*)
virtual voidTObject::DoError(int level, const char* location, const char* fmt, va_list va) const
Double_tDSigmoid(Double_t x) const
voidTObject::MakeZombie()
Double_tSigmoid(Double_t x) const
private:
TNeuron(const TNeuron&)
TNeuron&operator=(const TNeuron&)

Data Members

public:
enum ENeuronType { kOff
kLinear
kSigmoid
kTanh
kGauss
kSoftmax
kExternal
};
enum TObject::EStatusBits { kCanDelete
kMustCleanup
kObjInCanvas
kIsReferenced
kHasUUID
kCannotPick
kNoContextMenu
kInvalidObject
};
enum TObject::[unnamed] { kIsOnHeap
kNotDeleted
kZombie
kBitMask
kSingleKey
kOverwrite
kWriteDelete
};
protected:
TStringTNamed::fNameobject identifier
TStringTNamed::fTitleobject title
private:
Double_tfDEDw! buffer containing the sum over all examples of DeDw
Double_tfDeDw! buffer containing the last derivative of the error
Double_tfDerivative! buffer containing the last neuron derivative
TFormula*fExtDderivative (external mode)
TFormula*fExtFfunction (external mode)
TTreeFormula*fFormula! formula to be used for inputs and outputs
Int_tfIndex! index in the formula
Double_tfInput! buffer containing the last neuron input
Bool_tfNewDeDw! do we need to compute fDeDw again ?
Bool_tfNewDeriv! do we need to compute fDerivative again ?
Bool_tfNewInput! do we need to compute fInput again ?
Bool_tfNewValue! do we need to compute fValue again ?
Double_tfNorm[2]normalisation to mean=0, RMS=1.
TNeuron::ENeuronTypefTypeneuron type
Double_tfValue! buffer containing the last neuron output
Double_tfWeightweight used for computation
TObjArrayflayerpointers to the current level in a network (neurons, not synapses)
TObjArrayfpostpointers to the next level in a network
TObjArrayfprepointers to the previous level in a network

Class Charts

Inheritance Inherited Members Includes Libraries
Class Charts

Function documentation

TNeuron(TNeuron::ENeuronType type = kSigmoid, const char* name = "", const char* title = "", const char* extF = "", const char* extD = "")
 Usual constructor
Double_t Sigmoid(Double_t x) const
 The Sigmoid.
 Fast computation of the values of the sigmoid function.
 Uses values of the function up  to the seventh order
 tabulated at 700 points.
 Values were computed in long double precision (16 bytes,
 precision to about 37 digits) on a hp computer.
 Some values were checked with Mathematica.
 Result should be correct to ~ 15 digits (about double
 precision)

 From the mlpfit package (J.Schwindling   20-Jul-1999)
Double_t DSigmoid(Double_t x) const
 The Derivative of the Sigmoid.
void AddPre(TSynapse* )
 Adds a synapse to the neuron as an input
 This method is used by the TSynapse while
 connecting two neurons.
void AddPost(TSynapse* )
 Adds a synapse to the neuron as an output
 This method is used by the TSynapse while
 connecting two neurons.
void AddInLayer(TNeuron* )
 Tells a neuron which neurons form its layer (including itself).
 This is needed for self-normalizing functions, like Softmax.
TTreeFormula* UseBranch(TTree* , const char* )
 Sets a formula that can be used to make the neuron an input.
 The formula is automatically normalized to mean=0, RMS=1.
 This normalisation is used by GetValue() (input neurons)
 and GetError() (output neurons)
Double_t GetBranch() const
 Returns the formula value.
Double_t GetInput() const
 Returns neuron input
Double_t GetValue() const
 Computes the output using the appropriate function and all
 the weighted inputs, or uses the branch as input.
 In that case, the branch normalisation is also used.
Double_t GetDerivative() const
 computes the derivative for the appropriate function
 at the working point
Double_t GetError() const
 Computes the error for output neurons.
 Returns 0 for other neurons.
Double_t GetTarget() const
 Computes the normalized target pattern for output neurons.
 Returns 0 for other neurons.
Double_t GetDeDw() const
 Computes the derivative of the error wrt the neuron weight.
void ForceExternalValue(Double_t value)
 Uses the branch type to force an external value.
void SetNormalisation(Double_t mean, Double_t RMS)
 Sets the normalization variables.
 Any input neuron will return (branch-mean)/RMS.
 When UseBranch is called, mean and RMS are automatically set
 to the actual branch mean and RMS.
void SetWeight(Double_t w)
 Sets the neuron weight to w.
 The neuron weight corresponds to the bias in the
 linear combination of the inputs.
void SetNewEvent() const
 Inform the neuron that inputs of the network have changed,
 so that the buffered values have to be recomputed.
void SetDEDw(Double_t in)
 Sets the derivative of the total error wrt the neuron weight.
TNeuron(TNeuron::ENeuronType type = kSigmoid, const char* name = "", const char* title = "", const char* extF = "", const char* extD = "")
{}
TSynapse* GetPre(Int_t n) const
{ return (TSynapse*) fpre.At(n); }
TSynapse* GetPost(Int_t n) const
{ return (TSynapse*) fpost.At(n); }
TNeuron* GetInLayer(Int_t n) const
{ return (TNeuron*) flayer.At(n); }
ENeuronType GetType() const
Double_t GetWeight() const
{ return fWeight; }
const Double_t* GetNormalisation() const
{ return fNorm; }
Double_t GetDEDw() const
{ return fDEDw; }
TNeuron& operator=(const TNeuron& )