27template<
typename AFloat>
36template<
typename AFloat>
52template<
typename AFloat>
67template<
typename AFloat>
80template<
typename AFloat>
97template<
typename AFloat>
110template<
typename AFloat>
127template<
typename AFloat>
140template<
typename AFloat>
157template<
typename AFloat>
170template<
typename AFloat>
187template<
typename AFloat>
200template<
typename AFloat>
217template<
typename AFloat>
230template<
typename AFloat>
cudaStream_t GetComputeStream() const
const AFloat * GetDataPointer() const
void SetComputeStream(cudaStream_t stream)
static void SoftSignDerivative(Tensor_t &B, const Tensor_t &A)
static void SymmetricReluDerivative(Tensor_t &B, const Tensor_t &A)
static void IdentityDerivative(Tensor_t &B, const Tensor_t &A)
static void ActivationFunctionForward(Tensor_t &X, EActivationFunction activFunct, const ActivationDescriptor_t activationDescr, const double coef=0.0, const AFloat alpha=1, const AFloat beta=0)
static void SoftSign(Tensor_t &B)
static void Gauss(Tensor_t &B)
static void Sigmoid(Tensor_t &B)
static void Tanh(Tensor_t &B)
static void ActivationFunctionBackward(Tensor_t &dX, const Tensor_t &Y, const Tensor_t &dY, const Tensor_t &X, EActivationFunction activFunct, const ActivationDescriptor_t activationDescr, const AFloat alpha=1, const AFloat beta=0)
Computes the gradient of the activation function.
CudaActivationDescriptor ActivationDescriptor_t
TCudaTensor< AFloat > Tensor_t
static void ReluDerivative(Tensor_t &B, const Tensor_t &A)
static void Hadamard(Tensor_t &A, const Tensor_t &B)
In-place Hadamard (element-wise) product of matrices A and B with the result being written into A.
static void GaussDerivative(Tensor_t &B, const Tensor_t &A)
static void Relu(Tensor_t &B)
static void SymmetricRelu(Tensor_t &B)
static void SigmoidDerivative(Tensor_t &B, const Tensor_t &A)
static void TanhDerivative(Tensor_t &B, const Tensor_t &A)
static dim3 BlockDims2D()
static dim3 GridDims2D(int nrows, int ncols)
__global__ void SymmetricRelu(AFloat *A, int m, int n)
__global__ void Sigmoid(AFloat *A, int m, int n)
__global__ void SigmoidDerivative(AFloat *B, const AFloat *A, int m, int n)
__global__ void IdentityDerivative(AFloat *A, int m, int n)
__global__ void Relu(AFloat *A, int m, int n)
__global__ void ReluDerivative(AFloat *B, const AFloat *A, int m, int n)
__global__ void GaussDerivative(AFloat *B, const AFloat *A, int m, int n)
__global__ void Tanh(AFloat *A, int m, int n)
__global__ void TanhDerivative(AFloat *B, const AFloat *A, int m, int n)
__global__ void Gauss(AFloat *A, int m, int n)
__global__ void SymmetricReluDerivative(AFloat *B, const AFloat *A, int m, int n)
__global__ void SoftSignDerivative(AFloat *B, const AFloat *A, int m, int n)
__global__ void SoftSign(AFloat *A, int m, int n)
void evaluate(typename Architecture_t::Tensor_t &A, EActivationFunction f)
Apply the given activation function to each value in the given tensor A.
EActivationFunction
Enum that represents layer activation functions.
void evaluateDerivative(typename Architecture_t::Tensor_t &B, EActivationFunction f, const typename Architecture_t::Tensor_t &A)
Compute the first partial derivative of the activation function for the values given in tensor A and ...
create variable transformations