69#if __cplusplus > 199711L
89:
TMVA::
MethodBase( jobName, methodType, methodTitle, theData, theOption)
129 DeclareOptionRef( fNcycles = 500,
"NCycles",
"Number of training cycles" );
130 DeclareOptionRef( fLayerSpec =
"N,N-1",
"HiddenLayers",
"Specification of hidden layer architecture" );
131 DeclareOptionRef( fNeuronType =
"sigmoid",
"NeuronType",
"Neuron activation function type" );
132 DeclareOptionRef( fRandomSeed = 1,
"RandomSeed",
"Random seed for initial synapse weights (0 means unique seed for each run; default value '1')");
134 DeclareOptionRef(fEstimatorS=
"MSE",
"EstimatorType",
135 "MSE (Mean Square Estimator) for Gaussian Likelihood or CE(Cross-Entropy) for Bernoulli Likelihood" );
142 Int_t nTypes = names->size();
143 for (
Int_t i = 0; i < nTypes; i++)
144 AddPreDefVal(names->at(i));
147 DeclareOptionRef(fNeuronInputType=
"sum",
"NeuronInputType",
"Neuron input function type");
150 nTypes = names->size();
151 for (
Int_t i = 0; i < nTypes; i++) AddPreDefVal(names->at(i));
161 if ( DoRegression() || DoMulticlass()) fEstimatorS =
"MSE";
162 else fEstimatorS =
"CE" ;
163 if (fEstimatorS ==
"MSE" ) fEstimator = kMSE;
164 else if (fEstimatorS ==
"CE") fEstimator = kCE;
165 std::vector<Int_t>* layout = ParseLayoutString(fLayerSpec);
166 BuildNetwork(layout);
176 std::vector<Int_t>* layout =
new std::vector<Int_t>();
177 layout->push_back((
Int_t)GetNvar());
178 while(layerSpec.
Length()>0) {
180 if (layerSpec.
First(
',')<0) {
185 sToAdd = layerSpec(0,layerSpec.
First(
','));
186 layerSpec = layerSpec(layerSpec.
First(
',')+1,layerSpec.
Length());
190 nNodes += atoi(sToAdd);
191 layout->push_back(nNodes);
194 layout->push_back( DataInfo().GetNTargets() );
195 else if( DoMulticlass() )
196 layout->push_back( DataInfo().GetNClasses() );
198 layout->push_back(1);
201 for( std::vector<Int_t>::iterator it = layout->begin(); it != layout->end(); ++it ){
218 fInputCalculator = NULL;
220 fEstimatorHistTrain = NULL;
221 fEstimatorHistTest = NULL;
224 fEpochMonHistS.clear();
225 fEpochMonHistB.clear();
226 fEpochMonHistW.clear();
230 fOutputNeurons.clear();
250 if (fNetwork != NULL) {
252 Int_t numLayers = fNetwork->GetEntriesFast();
253 for (
Int_t i = 0; i < numLayers; i++) {
255 DeleteNetworkLayer(layer);
260 if (frgen != NULL)
delete frgen;
261 if (fActivation != NULL)
delete fActivation;
262 if (fOutput != NULL)
delete fOutput;
263 if (fIdentity != NULL)
delete fIdentity;
264 if (fInputCalculator != NULL)
delete fInputCalculator;
265 if (fSynapses != NULL)
delete fSynapses;
272 fInputCalculator = NULL;
283 for (
Int_t i = 0; i < numNeurons; i++) {
297 if (fEstimatorS ==
"MSE") fEstimator = kMSE;
298 else if (fEstimatorS ==
"CE") fEstimator = kCE;
299 else Log()<<kWARNING<<
"fEstimator="<<fEstimator<<
"\tfEstimatorS="<<fEstimatorS<<
Endl;
300 if (fEstimator!=kMSE && fEstimator!=kCE) Log()<<kWARNING<<
"Estimator type unspecified \t"<<
Endl;
303 Log() << kHEADER <<
"Building Network. " <<
Endl;
318 fRegulatorIdx.clear();
320 BuildLayers( layout, fromFile );
323 fInputLayer = (
TObjArray*)fNetwork->At(0);
325 fOutputNeurons.clear();
327 fOutputNeurons.push_back( (
TNeuron*)outputLayer->
At(i) );
330 if (weights == NULL) InitWeights();
331 else ForceWeights(weights);
342 Int_t numLayers = layout->size();
344 for (
Int_t i = 0; i < numLayers; i++) {
346 BuildLayer(layout->at(i), curLayer, prevLayer, i, numLayers, fromFile);
347 prevLayer = curLayer;
348 fNetwork->
Add(curLayer);
352 for (
Int_t i = 0; i < numLayers; i++) {
355 if (i!=0 && i!=numLayers-1) fRegulators.push_back(0.);
356 for (
Int_t j = 0; j < numNeurons; j++) {
357 if (i==0) fRegulators.push_back(0.);
360 for (
Int_t k = 0; k < numSynapses; k++) {
362 fSynapses->Add(synapse);
363 fRegulatorIdx.push_back(fRegulators.size()-1);
378 for (
Int_t j = 0; j < numNeurons; j++) {
379 if (fromFile && (layerIndex != numLayers-1) && (j==numNeurons-1)){
384 curLayer->
Add(neuron);
391 if (layerIndex == 0) {
397 if (layerIndex == numLayers-1) {
403 AddPreLinks(neuron, prevLayer);
406 curLayer->
Add(neuron);
412 if (layerIndex != numLayers-1) {
417 curLayer->
Add(neuron);
431 for (
Int_t i = 0; i < numNeurons; i++) {
446 PrintMessage(
"Initializing weights");
449 Int_t numSynapses = fSynapses->GetEntriesFast();
451 for (
Int_t i = 0; i < numSynapses; i++) {
452 synapse = (
TSynapse*)fSynapses->At(i);
453 synapse->
SetWeight(4.0*frgen->Rndm() - 2.0);
462 PrintMessage(
"Forcing weights");
464 Int_t numSynapses = fSynapses->GetEntriesFast();
466 for (
Int_t i = 0; i < numSynapses; i++) {
467 synapse = (
TSynapse*)fSynapses->At(i);
482 for (
UInt_t j = 0; j < GetNvar(); j++) {
486 neuron = GetInputNeuron(j);
498 Int_t numLayers = fNetwork->GetEntriesFast();
501 for (
Int_t i = 0; i < numLayers; i++) {
505 for (
Int_t j = 0; j < numNeurons; j++) {
519 if (Verbose() ||
Debug() || force) Log() << kINFO << message <<
Endl;
528 Log() << kINFO <<
"***Type anything to continue (q to quit): ";
529 std::getline(std::cin, dummy);
530 if (dummy ==
"q" || dummy ==
"Q") {
531 PrintMessage(
"quit" );
542 if (!
Debug())
return;
544 Log() << kINFO <<
Endl;
545 PrintMessage(
"Printing network " );
546 Log() << kINFO <<
"-------------------------------------------------------------------" <<
Endl;
549 Int_t numLayers = fNetwork->GetEntriesFast();
551 for (
Int_t i = 0; i < numLayers; i++) {
556 Log() << kINFO <<
"Layer #" << i <<
" (" << numNeurons <<
" neurons):" <<
Endl;
557 PrintLayer( curLayer );
569 for (
Int_t j = 0; j < numNeurons; j++) {
571 Log() << kINFO <<
"\tNeuron #" << j <<
" (LinksIn: " << neuron->
NumPreLinks()
573 PrintNeuron( neuron );
583 <<
"\t\tValue:\t" << neuron->
GetValue()
586 Log() << kINFO <<
"\t\tActivationEquation:\t";
588 Log() << kINFO <<
"\t\tLinksIn:" <<
Endl;
590 Log() << kINFO <<
"\t\tLinksOut:" <<
Endl;
603 const Event * ev = GetEvent();
605 for (
UInt_t i = 0; i < GetNvar(); i++) {
609 ForceNetworkCalculations();
616 NoErrorCalc(err, errUpper);
630 const Event * ev = GetEvent();
632 for (
UInt_t i = 0; i < GetNvar(); i++) {
636 ForceNetworkCalculations();
641 if (fRegressionReturnVal == NULL) fRegressionReturnVal =
new std::vector<Float_t>();
642 fRegressionReturnVal->clear();
646 for (
UInt_t itgt = 0; itgt < ntgts; itgt++) {
650 const Event* evT2 = GetTransformationHandler().InverseTransform( evT );
651 for (
UInt_t itgt = 0; itgt < ntgts; itgt++) {
652 fRegressionReturnVal->push_back( evT2->
GetTarget(itgt) );
657 return *fRegressionReturnVal;
669 const Event * ev = GetEvent();
671 for (
UInt_t i = 0; i < GetNvar(); i++) {
675 ForceNetworkCalculations();
679 if (fMulticlassReturnVal == NULL) fMulticlassReturnVal =
new std::vector<Float_t>();
680 fMulticlassReturnVal->clear();
681 std::vector<Float_t> temp;
683 UInt_t nClasses = DataInfo().GetNClasses();
684 for (
UInt_t icls = 0; icls < nClasses; icls++) {
685 temp.push_back(GetOutputNeuron( icls )->GetActivationValue() );
688 for(
UInt_t iClass=0; iClass<nClasses; iClass++){
690 for(
UInt_t j=0;j<nClasses;j++){
692 norm+=
exp(temp[j]-temp[iClass]);
694 (*fMulticlassReturnVal).push_back(1.0/(1.0+norm));
699 return *fMulticlassReturnVal;
708 Int_t numLayers = fNetwork->GetEntriesFast();
713 for (
Int_t i = 0; i < numLayers; i++) {
719 for (
Int_t j = 0; j < numNeurons; j++) {
724 if(numSynapses==0)
continue;
725 std::stringstream s(
"");
727 for (
Int_t k = 0; k < numSynapses; k++) {
729 s << std::scientific << synapse->
GetWeight() <<
" ";
736 if( fInvHessian.GetNcols()>0 ){
740 Int_t nElements = fInvHessian.GetNoElements();
741 Int_t nRows = fInvHessian.GetNrows();
742 Int_t nCols = fInvHessian.GetNcols();
749 fInvHessian.GetMatrix2Array( elements );
753 for(
Int_t row = 0; row < nRows; ++row ){
758 std::stringstream s(
"");
760 for(
Int_t col = 0; col < nCols; ++col ){
761 s << std::scientific << (*(elements+index)) <<
" ";
778 std::vector<Int_t>* layout =
new std::vector<Int_t>();
780 void* xmlLayout = NULL;
783 xmlLayout = wghtnode;
787 layout->resize( nLayers );
795 layout->at(index) = nNeurons;
799 BuildNetwork( layout, NULL, fromFile );
802 if (GetTrainingTMVAVersionCode() <
TMVA_VERSION(4,2,1) && fActivation->GetExpression().Contains(
"tanh")){
824 std::stringstream s(content);
825 for (
UInt_t iSyn = 0; iSyn<nSyn; iSyn++) {
842 void* xmlInvHessian = NULL;
848 fUseRegulator =
kTRUE;
858 fInvHessian.ResizeTo( nRows, nCols );
862 if (nElements > std::numeric_limits<int>::max()-100){
863 Log() << kFATAL <<
"you tried to read a hessian matrix with " << nElements <<
" elements, --> too large, guess s.th. went wrong reading from the weight file" <<
Endl;
866 elements =
new Double_t[nElements+10];
879 std::stringstream s(content);
880 for (
Int_t iCol = 0; iCol<nCols; iCol++) {
881 s >> (*(elements+index));
888 fInvHessian.SetMatrixArray( elements );
904 std::vector<Double_t>* weights =
new std::vector<Double_t>();
906 while (istr>> dummy >> weight) weights->push_back(weight);
908 ForceWeights(weights);
920 fRanking =
new Ranking( GetName(),
"Importance" );
927 for (
UInt_t ivar = 0; ivar < GetNvar(); ivar++) {
929 neuron = GetInputNeuron(ivar);
932 varName = GetInputVar(ivar);
937 meanS, meanB, rmsS, rmsB,
xmin,
xmax );
941 if (avgVal<meanrms) avgVal = meanrms;
942 if (IsNormalised()) avgVal = 0.5*(1 +
gTools().
NormVariable( avgVal, GetXmin( ivar ), GetXmax( ivar )));
944 for (
Int_t j = 0; j < numSynapses; j++) {
949 importance *= avgVal * avgVal;
951 fRanking->AddRank(
Rank( varName, importance ) );
960 std::vector<TH1*>* hv )
const
963 Int_t numLayers = fNetwork->GetEntriesFast();
965 for (
Int_t i = 0; i < numLayers-1; i++) {
974 numNeurons1, 0, numNeurons1, numNeurons2, 0, numNeurons2);
976 for (
Int_t j = 0; j < numNeurons1; j++) {
981 for (
Int_t k = 0; k < numSynapses; k++) {
989 if (hv) hv->push_back( hist );
1002 PrintMessage(
Form(
"Write special histos to file: %s", BaseDir()->GetPath()),
kTRUE);
1004 if (fEstimatorHistTrain) fEstimatorHistTrain->Write();
1005 if (fEstimatorHistTest ) fEstimatorHistTest ->Write();
1008 CreateWeightMonitoringHists(
"weights_hist" );
1011#if __cplusplus > 199711L
1012 static std::atomic<int> epochMonitoringDirectoryNumber{0};
1014 static int epochMonitoringDirectoryNumber = 0;
1016 int epochVal = epochMonitoringDirectoryNumber++;
1019 epochdir = BaseDir()->
mkdir(
"EpochMonitoring" );
1021 epochdir = BaseDir()->
mkdir(
Form(
"EpochMonitoring_%4d",epochVal) );
1024 for (std::vector<TH1*>::const_iterator it = fEpochMonHistS.begin(); it != fEpochMonHistS.end(); ++it) {
1028 for (std::vector<TH1*>::const_iterator it = fEpochMonHistB.begin(); it != fEpochMonHistB.end(); ++it) {
1032 for (std::vector<TH1*>::const_iterator it = fEpochMonHistW.begin(); it != fEpochMonHistW.end(); ++it) {
1044 Int_t numLayers = fNetwork->GetEntries();
1047 fout <<
" double ActivationFnc(double x) const;" << std::endl;
1048 fout <<
" double OutputActivationFnc(double x) const;" << std::endl;
1050 int numNodesFrom = -1;
1051 for (
Int_t lIdx = 0; lIdx < numLayers; lIdx++) {
1052 int numNodesTo = ((
TObjArray*)fNetwork->At(lIdx))->GetEntries();
1053 if (numNodesFrom<0) { numNodesFrom=numNodesTo;
continue; }
1054 fout <<
" double fWeightMatrix" << lIdx-1 <<
"to" << lIdx <<
"[" << numNodesTo <<
"][" << numNodesFrom <<
"];";
1055 fout <<
" // weight matrix from layer " << lIdx-1 <<
" to " << lIdx << std::endl;
1056 numNodesFrom = numNodesTo;
1059 fout <<
"};" << std::endl;
1063 fout <<
"inline void " << className <<
"::Initialize()" << std::endl;
1064 fout <<
"{" << std::endl;
1065 fout <<
" // build network structure" << std::endl;
1067 for (
Int_t i = 0; i < numLayers-1; i++) {
1068 fout <<
" // weight matrix from layer " << i <<
" to " << i+1 << std::endl;
1071 for (
Int_t j = 0; j < numNeurons; j++) {
1074 for (
Int_t k = 0; k < numSynapses; k++) {
1076 fout <<
" fWeightMatrix" << i <<
"to" << i+1 <<
"[" << k <<
"][" << j <<
"] = " << synapse->
GetWeight() <<
";" << std::endl;
1081 fout <<
"}" << std::endl;
1085 fout <<
"inline double " << className <<
"::GetMvaValue__( const std::vector<double>& inputValues ) const" << std::endl;
1086 fout <<
"{" << std::endl;
1087 fout <<
" if (inputValues.size() != (unsigned int)" << ((
TObjArray *)fNetwork->At(0))->GetEntries() - 1 <<
") {"
1089 fout <<
" std::cout << \"Input vector needs to be of size \" << "
1090 << ((
TObjArray *)fNetwork->At(0))->GetEntries() - 1 <<
" << std::endl;" << std::endl;
1091 fout <<
" return 0;" << std::endl;
1092 fout <<
" }" << std::endl;
1094 for (
Int_t lIdx = 1; lIdx < numLayers; lIdx++) {
1097 fout <<
" std::array<double, " << numNodes <<
"> fWeights" << lIdx <<
" {{}};" << std::endl;
1099 for (
Int_t lIdx = 1; lIdx < numLayers - 1; lIdx++) {
1100 fout <<
" fWeights" << lIdx <<
".back() = 1.;" << std::endl;
1103 for (
Int_t i = 0; i < numLayers - 1; i++) {
1104 fout <<
" // layer " << i <<
" to " << i + 1 << std::endl;
1105 if (i + 1 == numLayers - 1) {
1106 fout <<
" for (int o=0; o<" << ((
TObjArray *)fNetwork->At(i + 1))->GetEntries() <<
"; o++) {" << std::endl;
1108 fout <<
" for (int o=0; o<" << ((
TObjArray *)fNetwork->At(i + 1))->GetEntries() - 1 <<
"; o++) {"
1112 fout <<
" std::array<double, " << ((
TObjArray *)fNetwork->At(i))->GetEntries()
1113 <<
"> buffer; // no need to initialise" << std::endl;
1114 fout <<
" for (int i = 0; i<" << ((
TObjArray *)fNetwork->At(i))->GetEntries() <<
" - 1; i++) {"
1116 fout <<
" buffer[i] = fWeightMatrix" << i <<
"to" << i + 1 <<
"[o][i] * inputValues[i];" << std::endl;
1117 fout <<
" } // loop over i" << std::endl;
1118 fout <<
" buffer.back() = fWeightMatrix" << i <<
"to" << i + 1 <<
"[o]["
1119 << ((
TObjArray *)fNetwork->At(i))->GetEntries() - 1 <<
"];" << std::endl;
1121 fout <<
" std::array<double, " << ((
TObjArray *)fNetwork->At(i))->GetEntries()
1122 <<
"> buffer; // no need to initialise" << std::endl;
1123 fout <<
" for (int i=0; i<" << ((
TObjArray *)fNetwork->At(i))->GetEntries() <<
"; i++) {" << std::endl;
1124 fout <<
" buffer[i] = fWeightMatrix" << i <<
"to" << i + 1 <<
"[o][i] * fWeights" << i <<
"[i];"
1126 fout <<
" } // loop over i" << std::endl;
1128 fout <<
" for (int i=0; i<" << ((
TObjArray *)fNetwork->At(i))->GetEntries() <<
"; i++) {" << std::endl;
1129 if (fNeuronInputType ==
"sum") {
1130 fout <<
" fWeights" << i + 1 <<
"[o] += buffer[i];" << std::endl;
1131 }
else if (fNeuronInputType ==
"sqsum") {
1132 fout <<
" fWeights" << i + 1 <<
"[o] += buffer[i]*buffer[i];" << std::endl;
1134 fout <<
" fWeights" << i + 1 <<
"[o] += fabs(buffer[i]);" << std::endl;
1136 fout <<
" } // loop over i" << std::endl;
1137 fout <<
" } // loop over o" << std::endl;
1138 if (i + 1 == numLayers - 1) {
1139 fout <<
" for (int o=0; o<" << ((
TObjArray *)fNetwork->At(i + 1))->GetEntries() <<
"; o++) {" << std::endl;
1141 fout <<
" for (int o=0; o<" << ((
TObjArray *)fNetwork->At(i + 1))->GetEntries() - 1 <<
"; o++) {"
1144 if (i+1 != numLayers-1)
1145 fout <<
" fWeights" << i + 1 <<
"[o] = ActivationFnc(fWeights" << i + 1 <<
"[o]);" << std::endl;
1147 fout <<
" fWeights" << i + 1 <<
"[o] = OutputActivationFnc(fWeights" << i + 1 <<
"[o]);"
1149 fout <<
" } // loop over o" << std::endl;
1152 fout <<
" return fWeights" << numLayers - 1 <<
"[0];" << std::endl;
1153 fout <<
"}" << std::endl;
1156 TString fncName = className+
"::ActivationFnc";
1157 fActivation->MakeFunction(fout, fncName);
1158 fncName = className+
"::OutputActivationFnc";
1159 fOutput->MakeFunction(fout, fncName);
1162 fout <<
"// Clean up" << std::endl;
1163 fout <<
"inline void " << className <<
"::Clear()" << std::endl;
1164 fout <<
"{" << std::endl;
1165 fout <<
"}" << std::endl;
char * Form(const char *fmt,...)
void Debug(Int_t level, const char *va_(fmt),...)
#define TMVA_VERSION(a, b, c)
Describe directory structure in memory.
virtual TDirectory * mkdir(const char *name, const char *title="", Bool_t returnExistingDirectory=kFALSE)
Create a sub-directory "a" or a hierarchy of sub-directories "a/b/c/...".
virtual Bool_t cd(const char *path=nullptr)
Change current directory to "this" directory.
2-D histogram with a float per channel (see TH1 documentation)}
virtual void SetBinContent(Int_t bin, Double_t content)
Set bin content.
Class that contains all the data information.
Float_t GetValue(UInt_t ivar) const
return value of i'th variable
void SetTarget(UInt_t itgt, Float_t value)
set the target value (dimension itgt) to value
Float_t GetTarget(UInt_t itgt) const
Base class for all TMVA methods using artificial neural networks.
std::vector< Int_t > * ParseLayoutString(TString layerSpec)
parse layout specification string and return a vector, each entry containing the number of neurons to...
virtual void ProcessOptions()
do nothing specific at this moment
virtual ~MethodANNBase()
destructor
void DeleteNetworkLayer(TObjArray *&layer)
delete a network layer
const Ranking * CreateRanking()
compute ranking of input variables by summing function of weights
void DeleteNetwork()
delete/clear network
void WaitForKeyboard()
wait for keyboard input, for debugging
MethodANNBase(const TString &jobName, Types::EMVA methodType, const TString &methodTitle, DataSetInfo &theData, const TString &theOption)
standard constructor Note: Right now it is an option to choose the neuron input function,...
void AddPreLinks(TNeuron *neuron, TObjArray *prevLayer)
add synapses connecting a neuron to its preceding layer
void CreateWeightMonitoringHists(const TString &bulkname, std::vector< TH1 * > *hv=0) const
void PrintNeuron(TNeuron *neuron) const
print a neuron, for debugging
void PrintMessage(TString message, Bool_t force=kFALSE) const
print messages, turn off printing by setting verbose and debug flag appropriately
void AddWeightsXMLTo(void *parent) const
create XML description of ANN classifier
void InitANNBase()
initialize ANNBase object
void PrintLayer(TObjArray *layer) const
print a single layer, for debugging
virtual void BuildNetwork(std::vector< Int_t > *layout, std::vector< Double_t > *weights=NULL, Bool_t fromFile=kFALSE)
build network given a layout (number of neurons in each layer) and optional weights array
void InitWeights()
initialize the synapse weights randomly
virtual void DeclareOptions()
define the options (their key words) that can be set in the option string here the options valid for ...
virtual void ReadWeightsFromStream(std::istream &istr)
destroy/clear the network then read it back in from the weights file
void BuildLayers(std::vector< Int_t > *layout, Bool_t from_file=false)
build the network layers
virtual void MakeClassSpecific(std::ostream &, const TString &) const
write specific classifier response
void ForceWeights(std::vector< Double_t > *weights)
force the synapse weights
virtual Double_t GetMvaValue(Double_t *err=0, Double_t *errUpper=0)
get the mva value generated by the NN
void BuildLayer(Int_t numNeurons, TObjArray *curLayer, TObjArray *prevLayer, Int_t layerIndex, Int_t numLayers, Bool_t from_file=false)
build a single layer with neurons and synapses connecting this layer to the previous layer
void ForceNetworkCalculations()
calculate input values to each neuron
void ForceNetworkInputs(const Event *ev, Int_t ignoreIndex=-1)
force the input values of the input neurons force the value for each input neuron
virtual const std::vector< Float_t > & GetMulticlassValues()
get the multiclass classification values generated by the NN
void ReadWeightsFromXML(void *wghtnode)
read MLP from xml weight file
Bool_t Debug() const
who the hell makes such strange Debug flags that even use "global pointers"..
virtual void WriteMonitoringHistosToFile() const
write histograms to file
virtual const std::vector< Float_t > & GetRegressionValues()
get the regression value generated by the NN
virtual void PrintNetwork() const
print network representation, for debugging
Virtual base Class for all MVA method.
Ranking for variables in method (implementation)
Class for easily choosing activation functions.
std::vector< TString > * GetAllActivationNames() const
returns the names of all know activation functions
TActivation * CreateActivation(EActivationType type) const
instantiate the correct activation object according to the type chosen (given as the enumeration type...
Tanh activation function for ANN.
Neuron class used by TMVA artificial neural network methods.
Double_t GetActivationValue() const
void ForceValue(Double_t value)
force the value, typically for input and bias neurons
TSynapse * PostLinkAt(Int_t index) const
void SetActivationEqn(TActivation *activation)
set activation equation
Double_t GetDelta() const
void AddPostLink(TSynapse *post)
add synapse as a post-link to this neuron
void SetInputCalculator(TNeuronInput *calculator)
set input calculator
Int_t NumPreLinks() const
void PrintActivationEqn()
print activation equation, for debugging
void CalculateValue()
calculate neuron input
void CalculateActivationValue()
calculate neuron activation/output
void PrintPostLinks() const
Int_t NumPostLinks() const
void AddPreLink(TSynapse *pre)
add synapse as a pre-link to this neuron
Double_t GetValue() const
void DeletePreLinks()
delete all pre-links
void PrintPreLinks() const
Synapse class used by TMVA artificial neural network methods.
void SetWeight(Double_t weight)
set synapse weight
void SetPostNeuron(TNeuron *post)
void SetPreNeuron(TNeuron *pre)
Int_t GetEntriesFast() const
Int_t GetEntries() const
Return the number of objects in array (i.e.
TObject * At(Int_t idx) const
virtual Int_t Write(const char *name=0, Int_t option=0, Int_t bufsize=0)
Write this object to the current directory.
Random number generator class based on M.
Ssiz_t First(char c) const
Find first occurrence of a character c.
const char * Data() const
Bool_t BeginsWith(const char *s, ECaseCompare cmp=kExact) const
TString & Remove(Ssiz_t pos)
Bool_t AddRawLine(XMLNodePointer_t parent, const char *line)
Add just line into xml file Line should has correct xml syntax that later it can be decoded by xml pa...
XMLNodePointer_t NewChild(XMLNodePointer_t parent, XMLNsPointer_t ns, const char *name, const char *content=nullptr)
create new child element for parent node
XMLNodePointer_t GetChild(XMLNodePointer_t xmlnode, Bool_t realnode=kTRUE)
returns first child of xmlnode
XMLAttrPointer_t NewAttr(XMLNodePointer_t xmlnode, XMLNsPointer_t, const char *name, const char *value)
creates new attribute for xmlnode, namespaces are not supported for attributes
const char * GetNodeContent(XMLNodePointer_t xmlnode)
get contents (if any) of xmlnode
XMLNodePointer_t GetNext(XMLNodePointer_t xmlnode, Bool_t realnode=kTRUE)
return next to xmlnode node if realnode==kTRUE, any special nodes in between will be skipped
create variable transformations
MsgLogger & Endl(MsgLogger &ml)