Logo ROOT   6.16/01
Reference Guide
TActivationReLU.h
Go to the documentation of this file.
1// @(#)root/tmva $Id$
2// Author: Matt Jachowski
3
4/**********************************************************************************
5 * Project: TMVA - a Root-integrated toolkit for multivariate data analysis *
6 * Package: TMVA *
7 * Class : TMVA::TActivationReLU *
8 * Web : http://tmva.sourceforge.net *
9 * *
10 * Description: *
11 * Tanh activation function for TNeuron *
12 * *
13 * Authors (alphabetical): *
14 * Matt Jachowski <jachowski@stanford.edu> - Stanford University, USA *
15 * *
16 * Copyright (c) 2005: *
17 * CERN, Switzerland *
18 * *
19 * Redistribution and use in source and binary forms, with or without *
20 * modification, are permitted according to the terms listed in LICENSE *
21 * (http://tmva.sourceforge.net/LICENSE) *
22 **********************************************************************************/
23
24#ifndef ROOT_TMVA_TActivationReLU
25#define ROOT_TMVA_TActivationReLU
26
27//////////////////////////////////////////////////////////////////////////
28// //
29// TActivationReLU //
30// //
31// Rectified Linear Unit activation function for TNeuron //
32// //
33//////////////////////////////////////////////////////////////////////////
34
35#include "TString.h"
36
37#include "TMVA/TActivation.h"
38
39namespace TMVA {
40
42
43 public:
46
47 // evaluate the activation function
48 Double_t Eval(Double_t arg) { return arg>0 ? arg : 0;}
49
50 // evaluate the derivative of the activation function
51 Double_t EvalDerivative(Double_t arg) { return arg>0 ? 1 : 0;}
52
53 // minimum of the range of the activation function
54 Double_t GetMin() { return -1; }
55
56 // maximum of the range of the activation function
57 Double_t GetMax() { return 1; }
58
59 // expression for the activation function
61
62 // writer of function code
63 virtual void MakeFunction(std::ostream& fout, const TString& fncName);
64
65 private:
66 ClassDef(TActivationReLU, 0); // Rectified Linear Unit activation function for TNeuron
67 };
68
69} // namespace TMVA
70
71#endif
double Double_t
Definition: RtypesCore.h:55
#define ClassDef(name, id)
Definition: Rtypes.h:324
Rectified Linear Unit activation function for TNeuron.
Double_t Eval(Double_t arg)
virtual void MakeFunction(std::ostream &fout, const TString &fncName)
writes the Rectified Linear Unit activation function source code
Double_t EvalDerivative(Double_t arg)
TString GetExpression()
get expressions for the tanh and its derivative
Interface for TNeuron activation function classes.
Definition: TActivation.h:42
Basic string class.
Definition: TString.h:131
Abstract ClassifierFactory template that handles arbitrary types.