Logo ROOT   6.08/07
Reference Guide
TActivationReLU.h
Go to the documentation of this file.
1 // @(#)root/tmva $Id$
2 // Author: Matt Jachowski
3 
4 /**********************************************************************************
5  * Project: TMVA - a Root-integrated toolkit for multivariate data analysis *
6  * Package: TMVA *
7  * Class : TMVA::TActivationReLU *
8  * Web : http://tmva.sourceforge.net *
9  * *
10  * Description: *
11  * Tanh activation function for TNeuron *
12  * *
13  * Authors (alphabetical): *
14  * Matt Jachowski <jachowski@stanford.edu> - Stanford University, USA *
15  * *
16  * Copyright (c) 2005: *
17  * CERN, Switzerland *
18  * *
19  * Redistribution and use in source and binary forms, with or without *
20  * modification, are permitted according to the terms listed in LICENSE *
21  * (http://tmva.sourceforge.net/LICENSE) *
22  **********************************************************************************/
23 
24 #ifndef ROOT_TMVA_TActivationReLU
25 #define ROOT_TMVA_TActivationReLU
26 
27 //////////////////////////////////////////////////////////////////////////
28 // //
29 // TActivationReLU //
30 // //
31 // Rectified Linear Unit activation function for TNeuron //
32 // //
33 //////////////////////////////////////////////////////////////////////////
34 
35 #ifndef ROOT_TFormula
36 #include "TFormula.h"
37 #endif
38 #ifndef ROOT_TString
39 #include "TString.h"
40 #endif
41 
42 #ifndef ROOT_TMVA_TActivation
43 #include "TMVA/TActivation.h"
44 #endif
45 
46 namespace TMVA {
47 
48  class TActivationReLU : public TActivation {
49 
50  public:
51 
54 
55  // evaluate the activation function
56  Double_t Eval(Double_t arg) { return arg>0 ? arg : 0;}
57 
58  // evaluate the derivative of the activation function
59  Double_t EvalDerivative(Double_t arg) { return arg>0 ? 1 : 0;}
60 
61  // minimum of the range of the activation function
62  Double_t GetMin() { return -1; }
63 
64  // maximum of the range of the activation function
65  Double_t GetMax() { return 1; }
66 
67  // expression for the activation function
69 
70  // writer of function code
71  virtual void MakeFunction(std::ostream& fout, const TString& fncName);
72 
73  private:
74 
75  ClassDef(TActivationReLU,0); // Tanh sigmoid activation function for TNeuron
76  };
77 
78 } // namespace TMVA
79 
80 #endif
Double_t Eval(Double_t arg)
~TActivationReLU()
destructor
Basic string class.
Definition: TString.h:137
#define ClassDef(name, id)
Definition: Rtypes.h:254
TString GetExpression()
get expressions for the tanh and its derivative
TActivationReLU()
constructor for ReLU
Double_t EvalDerivative(Double_t arg)
double Double_t
Definition: RtypesCore.h:55
virtual void MakeFunction(std::ostream &fout, const TString &fncName)
writes the sigmoid activation function source code
Abstract ClassifierFactory template that handles arbitrary types.