|
template<typename ItValue , typename ItFunction > |
void | TMVA::DNN::applyFunctions (ItValue itValue, ItValue itValueEnd, ItFunction itFunction) |
|
template<typename ItValue , typename ItFunction , typename ItInverseFunction , typename ItGradient > |
void | TMVA::DNN::applyFunctions (ItValue itValue, ItValue itValueEnd, ItFunction itFunction, ItInverseFunction itInverseFunction, ItGradient itGradient) |
|
template<typename ItSource , typename ItWeight , typename ItTarget > |
void | TMVA::DNN::applyWeights (ItSource itSourceBegin, ItSource itSourceEnd, ItWeight itWeight, ItTarget itTargetBegin, ItTarget itTargetEnd) |
|
template<typename ItSource , typename ItWeight , typename ItPrev > |
void | TMVA::DNN::applyWeightsBackwards (ItSource itCurrBegin, ItSource itCurrEnd, ItWeight itWeight, ItPrev itPrevBegin, ItPrev itPrevEnd) |
|
template<typename LAYERDATA > |
void | TMVA::DNN::backward (LAYERDATA &prevLayerData, LAYERDATA &currLayerData) |
| backward application of the weights (back-propagation of the error) More...
|
|
template<typename ItProbability , typename ItTruth , typename ItDelta , typename ItInvActFnc > |
double | TMVA::DNN::crossEntropy (ItProbability itProbabilityBegin, ItProbability itProbabilityEnd, ItTruth itTruthBegin, ItTruth, ItDelta itDelta, ItDelta itDeltaEnd, ItInvActFnc, double patternWeight) |
| cross entropy error function More...
|
|
template<typename LAYERDATA > |
void | TMVA::DNN::forward (const LAYERDATA &prevLayerData, LAYERDATA &currLayerData) |
| apply the weights (and functions) in forward direction of the DNN More...
|
|
double | TMVA::DNN::gaussDouble (double mean, double sigma) |
|
template<typename T > |
bool | TMVA::DNN::isFlagSet (T flag, T value) |
|
ModeOutputValues | TMVA::DNN::operator & (ModeOutputValues lhs, ModeOutputValues rhs) |
|
ModeOutputValues | TMVA::DNN::operator &= (ModeOutputValues &lhs, ModeOutputValues rhs) |
|
ModeOutputValues | TMVA::DNN::operator| (ModeOutputValues lhs, ModeOutputValues rhs) |
|
ModeOutputValues | TMVA::DNN::operator|= (ModeOutputValues &lhs, ModeOutputValues rhs) |
|
int | TMVA::DNN::randomInt (int maxValue) |
|
template<typename ItOutput , typename ItTruth , typename ItDelta , typename ItInvActFnc > |
double | TMVA::DNN::softMaxCrossEntropy (ItOutput itProbabilityBegin, ItOutput itProbabilityEnd, ItTruth itTruthBegin, ItTruth, ItDelta itDelta, ItDelta itDeltaEnd, ItInvActFnc, double patternWeight) |
| soft-max-cross-entropy error function (for mutual exclusive cross-entropy) More...
|
|
template<typename ItOutput , typename ItTruth , typename ItDelta , typename ItInvActFnc > |
double | TMVA::DNN::sumOfSquares (ItOutput itOutputBegin, ItOutput itOutputEnd, ItTruth itTruthBegin, ItTruth itTruthEnd, ItDelta itDelta, ItDelta itDeltaEnd, ItInvActFnc itInvActFnc, double patternWeight) |
|
double | TMVA::DNN::uniformDouble (double minValue, double maxValue) |
|
template<typename ItSource , typename ItDelta , typename ItTargetGradient , typename ItGradient > |
void | TMVA::DNN::update (ItSource itSource, ItSource itSourceEnd, ItDelta itTargetDeltaBegin, ItDelta itTargetDeltaEnd, ItTargetGradient itTargetGradientBegin, ItGradient itGradient) |
| update the gradients More...
|
|
template<EnumRegularization Regularization, typename ItSource , typename ItDelta , typename ItTargetGradient , typename ItGradient , typename ItWeight > |
void | TMVA::DNN::update (ItSource itSource, ItSource itSourceEnd, ItDelta itTargetDeltaBegin, ItDelta itTargetDeltaEnd, ItTargetGradient itTargetGradientBegin, ItGradient itGradient, ItWeight itWeight, double weightDecay) |
| update the gradients, using regularization More...
|
|
template<typename LAYERDATA > |
void | TMVA::DNN::update (const LAYERDATA &prevLayerData, LAYERDATA &currLayerData, double factorWeightDecay, EnumRegularization regularization) |
| update the node values More...
|
|
template<typename ItWeight > |
double | TMVA::DNN::weightDecay (double error, ItWeight itWeight, ItWeight itWeightEnd, double factorWeightDecay, EnumRegularization eRegularization) |
| compute the weight decay for regularization (L1 or L2) More...
|
|