70#if __cplusplus > 199711L
90:
TMVA::
MethodBase( jobName, methodType, methodTitle, theData, theOption)
130 DeclareOptionRef( fNcycles = 500,
"NCycles",
"Number of training cycles" );
131 DeclareOptionRef( fLayerSpec =
"N,N-1",
"HiddenLayers",
"Specification of hidden layer architecture" );
132 DeclareOptionRef( fNeuronType =
"sigmoid",
"NeuronType",
"Neuron activation function type" );
133 DeclareOptionRef( fRandomSeed = 1,
"RandomSeed",
"Random seed for initial synapse weights (0 means unique seed for each run; default value '1')");
135 DeclareOptionRef(fEstimatorS=
"MSE",
"EstimatorType",
136 "MSE (Mean Square Estimator) for Gaussian Likelihood or CE(Cross-Entropy) for Bernoulli Likelihood" );
143 Int_t nTypes = names->size();
144 for (
Int_t i = 0; i < nTypes; i++)
145 AddPreDefVal(names->at(i));
148 DeclareOptionRef(fNeuronInputType=
"sum",
"NeuronInputType",
"Neuron input function type");
151 nTypes = names->size();
152 for (
Int_t i = 0; i < nTypes; i++) AddPreDefVal(names->at(i));
162 if ( DoRegression() || DoMulticlass()) fEstimatorS =
"MSE";
163 else fEstimatorS =
"CE" ;
164 if (fEstimatorS ==
"MSE" ) fEstimator = kMSE;
165 else if (fEstimatorS ==
"CE") fEstimator = kCE;
166 std::vector<Int_t>* layout = ParseLayoutString(fLayerSpec);
167 BuildNetwork(layout);
177 std::vector<Int_t>* layout =
new std::vector<Int_t>();
178 layout->push_back((
Int_t)GetNvar());
179 while(layerSpec.
Length()>0) {
181 if (layerSpec.
First(
',')<0) {
186 sToAdd = layerSpec(0,layerSpec.
First(
','));
187 layerSpec = layerSpec(layerSpec.
First(
',')+1,layerSpec.
Length());
191 nNodes += atoi(sToAdd);
192 layout->push_back(nNodes);
195 layout->push_back( DataInfo().GetNTargets() );
196 else if( DoMulticlass() )
197 layout->push_back( DataInfo().GetNClasses() );
199 layout->push_back(1);
202 for( std::vector<Int_t>::iterator it = layout->begin(); it != layout->end(); ++it ){
219 fInputCalculator = NULL;
221 fEstimatorHistTrain = NULL;
222 fEstimatorHistTest = NULL;
225 fEpochMonHistS.clear();
226 fEpochMonHistB.clear();
227 fEpochMonHistW.clear();
231 fOutputNeurons.clear();
251 if (fNetwork != NULL) {
254 for (
Int_t i = 0; i < numLayers; i++) {
256 DeleteNetworkLayer(layer);
261 if (frgen != NULL)
delete frgen;
262 if (fActivation != NULL)
delete fActivation;
263 if (fOutput != NULL)
delete fOutput;
264 if (fIdentity != NULL)
delete fIdentity;
265 if (fInputCalculator != NULL)
delete fInputCalculator;
266 if (fSynapses != NULL)
delete fSynapses;
273 fInputCalculator = NULL;
284 for (
Int_t i = 0; i < numNeurons; i++) {
298 if (fEstimatorS ==
"MSE") fEstimator = kMSE;
299 else if (fEstimatorS ==
"CE") fEstimator = kCE;
300 else Log()<<kWARNING<<
"fEstimator="<<fEstimator<<
"\tfEstimatorS="<<fEstimatorS<<
Endl;
301 if (fEstimator!=kMSE && fEstimator!=kCE)
Log()<<kWARNING<<
"Estimator type unspecified \t"<<
Endl;
304 Log() << kHEADER <<
"Building Network. " <<
Endl;
319 fRegulatorIdx.clear();
321 BuildLayers( layout, fromFile );
324 fInputLayer = (
TObjArray*)fNetwork->At(0);
326 fOutputNeurons.clear();
328 fOutputNeurons.push_back( (
TNeuron*)outputLayer->
At(i) );
331 if (weights == NULL) InitWeights();
332 else ForceWeights(weights);
343 Int_t numLayers = layout->size();
345 for (
Int_t i = 0; i < numLayers; i++) {
347 BuildLayer(layout->at(i), curLayer, prevLayer, i, numLayers, fromFile);
348 prevLayer = curLayer;
349 fNetwork->
Add(curLayer);
353 for (
Int_t i = 0; i < numLayers; i++) {
356 if (i!=0 && i!=numLayers-1) fRegulators.push_back(0.);
357 for (
Int_t j = 0; j < numNeurons; j++) {
358 if (i==0) fRegulators.push_back(0.);
361 for (
Int_t k = 0; k < numSynapses; k++) {
363 fSynapses->Add(synapse);
364 fRegulatorIdx.push_back(fRegulators.size()-1);
379 for (
Int_t j = 0; j < numNeurons; j++) {
380 if (fromFile && (layerIndex != numLayers-1) && (j==numNeurons-1)){
385 curLayer->
Add(neuron);
392 if (layerIndex == 0) {
398 if (layerIndex == numLayers-1) {
404 AddPreLinks(neuron, prevLayer);
407 curLayer->
Add(neuron);
413 if (layerIndex != numLayers-1) {
418 curLayer->
Add(neuron);
432 for (
Int_t i = 0; i < numNeurons; i++) {
447 PrintMessage(
"Initializing weights");
450 Int_t numSynapses = fSynapses->GetEntriesFast();
452 for (
Int_t i = 0; i < numSynapses; i++) {
453 synapse = (
TSynapse*)fSynapses->At(i);
454 synapse->
SetWeight(4.0*frgen->Rndm() - 2.0);
463 PrintMessage(
"Forcing weights");
465 Int_t numSynapses = fSynapses->GetEntriesFast();
467 for (
Int_t i = 0; i < numSynapses; i++) {
468 synapse = (
TSynapse*)fSynapses->At(i);
483 for (
UInt_t j = 0; j < GetNvar(); j++) {
487 neuron = GetInputNeuron(j);
499 Int_t numLayers = fNetwork->GetEntriesFast();
502 for (
Int_t i = 0; i < numLayers; i++) {
506 for (
Int_t j = 0; j < numNeurons; j++) {
529 Log() << kINFO <<
"***Type anything to continue (q to quit): ";
530 std::getline(std::cin,
dummy);
532 PrintMessage(
"quit" );
543 if (!
Debug())
return;
546 PrintMessage(
"Printing network " );
547 Log() << kINFO <<
"-------------------------------------------------------------------" <<
Endl;
552 for (
Int_t i = 0; i < numLayers; i++) {
557 Log() << kINFO <<
"Layer #" << i <<
" (" << numNeurons <<
" neurons):" <<
Endl;
558 PrintLayer( curLayer );
570 for (
Int_t j = 0; j < numNeurons; j++) {
572 Log() << kINFO <<
"\tNeuron #" << j <<
" (LinksIn: " << neuron->
NumPreLinks()
574 PrintNeuron( neuron );
584 <<
"\t\tValue:\t" << neuron->
GetValue()
587 Log() << kINFO <<
"\t\tActivationEquation:\t";
589 Log() << kINFO <<
"\t\tLinksIn:" <<
Endl;
591 Log() << kINFO <<
"\t\tLinksOut:" <<
Endl;
604 const Event * ev = GetEvent();
606 for (
UInt_t i = 0; i < GetNvar(); i++) {
610 ForceNetworkCalculations();
617 NoErrorCalc(err, errUpper);
631 const Event * ev = GetEvent();
633 for (
UInt_t i = 0; i < GetNvar(); i++) {
637 ForceNetworkCalculations();
642 if (fRegressionReturnVal == NULL) fRegressionReturnVal =
new std::vector<Float_t>();
643 fRegressionReturnVal->clear();
647 for (
UInt_t itgt = 0; itgt < ntgts; itgt++) {
651 const Event* evT2 = GetTransformationHandler().InverseTransform( evT );
652 for (
UInt_t itgt = 0; itgt < ntgts; itgt++) {
653 fRegressionReturnVal->push_back( evT2->
GetTarget(itgt) );
658 return *fRegressionReturnVal;
670 const Event * ev = GetEvent();
672 for (
UInt_t i = 0; i < GetNvar(); i++) {
676 ForceNetworkCalculations();
680 if (fMulticlassReturnVal == NULL) fMulticlassReturnVal =
new std::vector<Float_t>();
681 fMulticlassReturnVal->clear();
682 std::vector<Float_t> temp;
684 UInt_t nClasses = DataInfo().GetNClasses();
685 for (
UInt_t icls = 0; icls < nClasses; icls++) {
686 temp.push_back(GetOutputNeuron( icls )->GetActivationValue() );
689 for(
UInt_t iClass=0; iClass<nClasses; iClass++){
691 for(
UInt_t j=0;j<nClasses;j++){
693 norm+=
exp(temp[j]-temp[iClass]);
695 (*fMulticlassReturnVal).push_back(1.0/(1.0+norm));
700 return *fMulticlassReturnVal;
709 Int_t numLayers = fNetwork->GetEntriesFast();
714 for (
Int_t i = 0; i < numLayers; i++) {
720 for (
Int_t j = 0; j < numNeurons; j++) {
725 if(numSynapses==0)
continue;
726 std::stringstream
s(
"");
728 for (
Int_t k = 0; k < numSynapses; k++) {
730 s << std::scientific << synapse->
GetWeight() <<
" ";
737 if( fInvHessian.GetNcols()>0 ){
741 Int_t nElements = fInvHessian.GetNoElements();
742 Int_t nRows = fInvHessian.GetNrows();
743 Int_t nCols = fInvHessian.GetNcols();
750 fInvHessian.GetMatrix2Array( elements );
754 for(
Int_t row = 0; row < nRows; ++row ){
759 std::stringstream
s(
"");
761 for(
Int_t col = 0; col < nCols; ++col ){
762 s << std::scientific << (*(elements+index)) <<
" ";
779 std::vector<Int_t>* layout =
new std::vector<Int_t>();
781 void* xmlLayout = NULL;
784 xmlLayout = wghtnode;
788 layout->resize( nLayers );
796 layout->at(index) = nNeurons;
800 BuildNetwork( layout, NULL, fromFile );
803 if (GetTrainingTMVAVersionCode() <
TMVA_VERSION(4,2,1) && fActivation->GetExpression().Contains(
"tanh")){
825 std::stringstream
s(content);
826 for (
UInt_t iSyn = 0; iSyn<nSyn; iSyn++) {
843 void* xmlInvHessian = NULL;
849 fUseRegulator =
kTRUE;
859 fInvHessian.ResizeTo( nRows, nCols );
863 if (nElements > std::numeric_limits<int>::max()-100){
864 Log() << kFATAL <<
"you tried to read a hessian matrix with " << nElements <<
" elements, --> too large, guess s.th. went wrong reading from the weight file" <<
Endl;
867 elements =
new Double_t[nElements+10];
880 std::stringstream
s(content);
881 for (
Int_t iCol = 0; iCol<nCols; iCol++) {
882 s >> (*(elements+index));
889 fInvHessian.SetMatrixArray( elements );
905 std::vector<Double_t>* weights =
new std::vector<Double_t>();
907 while (istr>>
dummy >> weight) weights->push_back(weight);
909 ForceWeights(weights);
928 for (
UInt_t ivar = 0; ivar < GetNvar(); ivar++) {
930 neuron = GetInputNeuron(ivar);
933 varName = GetInputVar(ivar);
938 meanS, meanB, rmsS, rmsB,
xmin,
xmax );
942 if (avgVal<meanrms) avgVal = meanrms;
943 if (IsNormalised()) avgVal = 0.5*(1 +
gTools().
NormVariable( avgVal, GetXmin( ivar ), GetXmax( ivar )));
945 for (
Int_t j = 0; j < numSynapses; j++) {
950 importance *= avgVal * avgVal;
952 fRanking->AddRank(
Rank( varName, importance ) );
961 std::vector<TH1*>* hv )
const
964 Int_t numLayers = fNetwork->GetEntriesFast();
966 for (
Int_t i = 0; i < numLayers-1; i++) {
975 numNeurons1, 0, numNeurons1, numNeurons2, 0, numNeurons2);
977 for (
Int_t j = 0; j < numNeurons1; j++) {
982 for (
Int_t k = 0; k < numSynapses; k++) {
990 if (hv) hv->push_back( hist );
1003 PrintMessage(
Form(
"Write special histos to file: %s", BaseDir()->GetPath()),
kTRUE);
1005 if (fEstimatorHistTrain) fEstimatorHistTrain->Write();
1006 if (fEstimatorHistTest ) fEstimatorHistTest ->Write();
1009 CreateWeightMonitoringHists(
"weights_hist" );
1012#if __cplusplus > 199711L
1013 static std::atomic<int> epochMonitoringDirectoryNumber{0};
1015 static int epochMonitoringDirectoryNumber = 0;
1017 int epochVal = epochMonitoringDirectoryNumber++;
1020 epochdir = BaseDir()->
mkdir(
"EpochMonitoring" );
1022 epochdir = BaseDir()->
mkdir(
Form(
"EpochMonitoring_%4d",epochVal) );
1025 for (std::vector<TH1*>::const_iterator it = fEpochMonHistS.begin(); it != fEpochMonHistS.end(); ++it) {
1029 for (std::vector<TH1*>::const_iterator it = fEpochMonHistB.begin(); it != fEpochMonHistB.end(); ++it) {
1033 for (std::vector<TH1*>::const_iterator it = fEpochMonHistW.begin(); it != fEpochMonHistW.end(); ++it) {
1045 Int_t numLayers = fNetwork->GetEntries();
1048 fout <<
" double ActivationFnc(double x) const;" << std::endl;
1049 fout <<
" double OutputActivationFnc(double x) const;" << std::endl;
1051 int numNodesFrom = -1;
1052 for (
Int_t lIdx = 0; lIdx < numLayers; lIdx++) {
1053 int numNodesTo = ((
TObjArray*)fNetwork->At(lIdx))->GetEntries();
1054 if (numNodesFrom<0) { numNodesFrom=numNodesTo;
continue; }
1055 fout <<
" double fWeightMatrix" << lIdx-1 <<
"to" << lIdx <<
"[" << numNodesTo <<
"][" << numNodesFrom <<
"];";
1056 fout <<
" // weight matrix from layer " << lIdx-1 <<
" to " << lIdx << std::endl;
1057 numNodesFrom = numNodesTo;
1060 fout <<
"};" << std::endl;
1064 fout <<
"inline void " << className <<
"::Initialize()" << std::endl;
1065 fout <<
"{" << std::endl;
1066 fout <<
" // build network structure" << std::endl;
1068 for (
Int_t i = 0; i < numLayers-1; i++) {
1069 fout <<
" // weight matrix from layer " << i <<
" to " << i+1 << std::endl;
1072 for (
Int_t j = 0; j < numNeurons; j++) {
1075 for (
Int_t k = 0; k < numSynapses; k++) {
1077 fout <<
" fWeightMatrix" << i <<
"to" << i+1 <<
"[" << k <<
"][" << j <<
"] = " << synapse->
GetWeight() <<
";" << std::endl;
1082 fout <<
"}" << std::endl;
1086 fout <<
"inline double " << className <<
"::GetMvaValue__( const std::vector<double>& inputValues ) const" << std::endl;
1087 fout <<
"{" << std::endl;
1088 fout <<
" if (inputValues.size() != (unsigned int)" << ((
TObjArray *)fNetwork->At(0))->GetEntries() - 1 <<
") {"
1090 fout <<
" std::cout << \"Input vector needs to be of size \" << "
1091 << ((
TObjArray *)fNetwork->At(0))->GetEntries() - 1 <<
" << std::endl;" << std::endl;
1092 fout <<
" return 0;" << std::endl;
1093 fout <<
" }" << std::endl;
1095 for (
Int_t lIdx = 1; lIdx < numLayers; lIdx++) {
1098 fout <<
" std::array<double, " << numNodes <<
"> fWeights" << lIdx <<
" {{}};" << std::endl;
1100 for (
Int_t lIdx = 1; lIdx < numLayers - 1; lIdx++) {
1101 fout <<
" fWeights" << lIdx <<
".back() = 1.;" << std::endl;
1104 for (
Int_t i = 0; i < numLayers - 1; i++) {
1105 fout <<
" // layer " << i <<
" to " << i + 1 << std::endl;
1106 if (i + 1 == numLayers - 1) {
1107 fout <<
" for (int o=0; o<" << ((
TObjArray *)fNetwork->At(i + 1))->GetEntries() <<
"; o++) {" << std::endl;
1109 fout <<
" for (int o=0; o<" << ((
TObjArray *)fNetwork->At(i + 1))->GetEntries() - 1 <<
"; o++) {"
1113 fout <<
" std::array<double, " << ((
TObjArray *)fNetwork->At(i))->GetEntries()
1114 <<
"> buffer; // no need to initialise" << std::endl;
1115 fout <<
" for (int i = 0; i<" << ((
TObjArray *)fNetwork->At(i))->GetEntries() <<
" - 1; i++) {"
1117 fout <<
" buffer[i] = fWeightMatrix" << i <<
"to" << i + 1 <<
"[o][i] * inputValues[i];" << std::endl;
1118 fout <<
" } // loop over i" << std::endl;
1119 fout <<
" buffer.back() = fWeightMatrix" << i <<
"to" << i + 1 <<
"[o]["
1120 << ((
TObjArray *)fNetwork->At(i))->GetEntries() - 1 <<
"];" << std::endl;
1122 fout <<
" std::array<double, " << ((
TObjArray *)fNetwork->At(i))->GetEntries()
1123 <<
"> buffer; // no need to initialise" << std::endl;
1124 fout <<
" for (int i=0; i<" << ((
TObjArray *)fNetwork->At(i))->GetEntries() <<
"; i++) {" << std::endl;
1125 fout <<
" buffer[i] = fWeightMatrix" << i <<
"to" << i + 1 <<
"[o][i] * fWeights" << i <<
"[i];"
1127 fout <<
" } // loop over i" << std::endl;
1129 fout <<
" for (int i=0; i<" << ((
TObjArray *)fNetwork->At(i))->GetEntries() <<
"; i++) {" << std::endl;
1130 if (fNeuronInputType ==
"sum") {
1131 fout <<
" fWeights" << i + 1 <<
"[o] += buffer[i];" << std::endl;
1132 }
else if (fNeuronInputType ==
"sqsum") {
1133 fout <<
" fWeights" << i + 1 <<
"[o] += buffer[i]*buffer[i];" << std::endl;
1135 fout <<
" fWeights" << i + 1 <<
"[o] += fabs(buffer[i]);" << std::endl;
1137 fout <<
" } // loop over i" << std::endl;
1138 fout <<
" } // loop over o" << std::endl;
1139 if (i + 1 == numLayers - 1) {
1140 fout <<
" for (int o=0; o<" << ((
TObjArray *)fNetwork->At(i + 1))->GetEntries() <<
"; o++) {" << std::endl;
1142 fout <<
" for (int o=0; o<" << ((
TObjArray *)fNetwork->At(i + 1))->GetEntries() - 1 <<
"; o++) {"
1145 if (i+1 != numLayers-1)
1146 fout <<
" fWeights" << i + 1 <<
"[o] = ActivationFnc(fWeights" << i + 1 <<
"[o]);" << std::endl;
1148 fout <<
" fWeights" << i + 1 <<
"[o] = OutputActivationFnc(fWeights" << i + 1 <<
"[o]);"
1150 fout <<
" } // loop over o" << std::endl;
1153 fout <<
" return fWeights" << numLayers - 1 <<
"[0];" << std::endl;
1154 fout <<
"}" << std::endl;
1157 TString fncName = className+
"::ActivationFnc";
1158 fActivation->MakeFunction(fout, fncName);
1159 fncName = className+
"::OutputActivationFnc";
1160 fOutput->MakeFunction(fout, fncName);
1163 fout <<
"// Clean up" << std::endl;
1164 fout <<
"inline void " << className <<
"::Clear()" << std::endl;
1165 fout <<
"{" << std::endl;
1166 fout <<
"}" << std::endl;
static RooMathCoreReg dummy
char * Form(const char *fmt,...)
void Debug(Int_t level, const char *va_(fmt),...)
#define TMVA_VERSION(a, b, c)
Describe directory structure in memory.
virtual TDirectory * mkdir(const char *name, const char *title="", Bool_t returnExistingDirectory=kFALSE)
Create a sub-directory "a" or a hierarchy of sub-directories "a/b/c/...".
virtual Bool_t cd(const char *path=nullptr)
Change current directory to "this" directory.
2-D histogram with a float per channel (see TH1 documentation)}
virtual void SetBinContent(Int_t bin, Double_t content)
Set bin content.
Class that contains all the data information.
Float_t GetValue(UInt_t ivar) const
return value of i'th variable
void SetTarget(UInt_t itgt, Float_t value)
set the target value (dimension itgt) to value
Float_t GetTarget(UInt_t itgt) const
Base class for all TMVA methods using artificial neural networks.
std::vector< Int_t > * ParseLayoutString(TString layerSpec)
parse layout specification string and return a vector, each entry containing the number of neurons to...
virtual void ProcessOptions()
do nothing specific at this moment
virtual ~MethodANNBase()
destructor
void DeleteNetworkLayer(TObjArray *&layer)
delete a network layer
const Ranking * CreateRanking()
compute ranking of input variables by summing function of weights
void DeleteNetwork()
delete/clear network
void WaitForKeyboard()
wait for keyboard input, for debugging
MethodANNBase(const TString &jobName, Types::EMVA methodType, const TString &methodTitle, DataSetInfo &theData, const TString &theOption)
standard constructor Note: Right now it is an option to choose the neuron input function,...
void AddPreLinks(TNeuron *neuron, TObjArray *prevLayer)
add synapses connecting a neuron to its preceding layer
void CreateWeightMonitoringHists(const TString &bulkname, std::vector< TH1 * > *hv=0) const
void PrintNeuron(TNeuron *neuron) const
print a neuron, for debugging
void PrintMessage(TString message, Bool_t force=kFALSE) const
print messages, turn off printing by setting verbose and debug flag appropriately
void AddWeightsXMLTo(void *parent) const
create XML description of ANN classifier
void InitANNBase()
initialize ANNBase object
void PrintLayer(TObjArray *layer) const
print a single layer, for debugging
virtual void BuildNetwork(std::vector< Int_t > *layout, std::vector< Double_t > *weights=NULL, Bool_t fromFile=kFALSE)
build network given a layout (number of neurons in each layer) and optional weights array
void InitWeights()
initialize the synapse weights randomly
virtual void DeclareOptions()
define the options (their key words) that can be set in the option string here the options valid for ...
virtual void ReadWeightsFromStream(std::istream &istr)
destroy/clear the network then read it back in from the weights file
void BuildLayers(std::vector< Int_t > *layout, Bool_t from_file=false)
build the network layers
virtual void MakeClassSpecific(std::ostream &, const TString &) const
write specific classifier response
void ForceWeights(std::vector< Double_t > *weights)
force the synapse weights
virtual Double_t GetMvaValue(Double_t *err=0, Double_t *errUpper=0)
get the mva value generated by the NN
void BuildLayer(Int_t numNeurons, TObjArray *curLayer, TObjArray *prevLayer, Int_t layerIndex, Int_t numLayers, Bool_t from_file=false)
build a single layer with neurons and synapses connecting this layer to the previous layer
void ForceNetworkCalculations()
calculate input values to each neuron
void ForceNetworkInputs(const Event *ev, Int_t ignoreIndex=-1)
force the input values of the input neurons force the value for each input neuron
virtual const std::vector< Float_t > & GetMulticlassValues()
get the multiclass classification values generated by the NN
void ReadWeightsFromXML(void *wghtnode)
read MLP from xml weight file
Bool_t Debug() const
who the hell makes such strange Debug flags that even use "global pointers"..
virtual void WriteMonitoringHistosToFile() const
write histograms to file
virtual const std::vector< Float_t > & GetRegressionValues()
get the regression value generated by the NN
virtual void PrintNetwork() const
print network representation, for debugging
Virtual base Class for all MVA method.
Ranking for variables in method (implementation)
Class for easily choosing activation functions.
std::vector< TString > * GetAllActivationNames() const
returns the names of all know activation functions
TActivation * CreateActivation(EActivationType type) const
instantiate the correct activation object according to the type chosen (given as the enumeration type...
Tanh activation function for ANN.
Neuron class used by TMVA artificial neural network methods.
Double_t GetActivationValue() const
void ForceValue(Double_t value)
force the value, typically for input and bias neurons
TSynapse * PostLinkAt(Int_t index) const
void SetActivationEqn(TActivation *activation)
set activation equation
Double_t GetDelta() const
void AddPostLink(TSynapse *post)
add synapse as a post-link to this neuron
void SetInputCalculator(TNeuronInput *calculator)
set input calculator
Int_t NumPreLinks() const
void PrintActivationEqn()
print activation equation, for debugging
void CalculateValue()
calculate neuron input
void CalculateActivationValue()
calculate neuron activation/output
void PrintPostLinks() const
Int_t NumPostLinks() const
void AddPreLink(TSynapse *pre)
add synapse as a pre-link to this neuron
Double_t GetValue() const
void DeletePreLinks()
delete all pre-links
void PrintPreLinks() const
Synapse class used by TMVA artificial neural network methods.
void SetWeight(Double_t weight)
set synapse weight
void SetPostNeuron(TNeuron *post)
void SetPreNeuron(TNeuron *pre)
Int_t GetEntriesFast() const
Int_t GetEntries() const
Return the number of objects in array (i.e.
TObject * At(Int_t idx) const
virtual Int_t Write(const char *name=0, Int_t option=0, Int_t bufsize=0)
Write this object to the current directory.
Random number generator class based on M.
Ssiz_t First(char c) const
Find first occurrence of a character c.
const char * Data() const
Bool_t BeginsWith(const char *s, ECaseCompare cmp=kExact) const
TString & Remove(Ssiz_t pos)
Bool_t AddRawLine(XMLNodePointer_t parent, const char *line)
Add just line into xml file Line should has correct xml syntax that later it can be decoded by xml pa...
XMLNodePointer_t NewChild(XMLNodePointer_t parent, XMLNsPointer_t ns, const char *name, const char *content=nullptr)
create new child element for parent node
XMLNodePointer_t GetChild(XMLNodePointer_t xmlnode, Bool_t realnode=kTRUE)
returns first child of xmlnode
XMLAttrPointer_t NewAttr(XMLNodePointer_t xmlnode, XMLNsPointer_t, const char *name, const char *value)
creates new attribute for xmlnode, namespaces are not supported for attributes
const char * GetNodeContent(XMLNodePointer_t xmlnode)
get contents (if any) of xmlnode
XMLNodePointer_t GetNext(XMLNodePointer_t xmlnode, Bool_t realnode=kTRUE)
return next to xmlnode node if realnode==kTRUE, any special nodes in between will be skipped
std::string GetName(const std::string &scope_name)
RooCmdArg Verbose(Bool_t flag=kTRUE)
static constexpr double s
create variable transformations
MsgLogger & Endl(MsgLogger &ml)