43 #if __cplusplus > 199711L
49 #include "TDirectory.h"
78 TMVA::MethodANNBase::MethodANNBase( const
TString& jobName,
79 Types::EMVA methodType,
84 :
TMVA::MethodBase( jobName, methodType, methodTitle, theData, theOption, theTargetDir )
101 :
TMVA::
MethodBase( methodType, theData, theWeightFile, theTargetDir )
123 DeclareOptionRef( fNcycles = 500,
"NCycles",
"Number of training cycles" );
124 DeclareOptionRef( fLayerSpec =
"N,N-1",
"HiddenLayers",
"Specification of hidden layer architecture" );
125 DeclareOptionRef( fNeuronType =
"sigmoid",
"NeuronType",
"Neuron activation function type" );
126 DeclareOptionRef( fRandomSeed = 1,
"RandomSeed",
"Random seed for initial synapse weights (0 means unique seed for each run; default value '1')");
128 DeclareOptionRef(fEstimatorS=
"MSE",
"EstimatorType",
129 "MSE (Mean Square Estimator) for Gaussian Likelihood or CE(Cross-Entropy) for Bernoulli Likelihood" );
136 Int_t nTypes = names->size();
137 for (
Int_t i = 0; i < nTypes; i++)
138 AddPreDefVal(names->at(i));
141 DeclareOptionRef(fNeuronInputType=
"sum",
"NeuronInputType",
"Neuron input function type");
144 nTypes = names->size();
145 for (
Int_t i = 0; i < nTypes; i++) AddPreDefVal(names->at(i));
155 if ( DoRegression() || DoMulticlass()) fEstimatorS =
"MSE";
156 else fEstimatorS =
"CE" ;
157 if (fEstimatorS ==
"MSE" ) fEstimator = kMSE;
158 else if (fEstimatorS ==
"CE") fEstimator = kCE;
159 std::vector<Int_t>* layout = ParseLayoutString(fLayerSpec);
160 BuildNetwork(layout);
170 std::vector<Int_t>* layout =
new std::vector<Int_t>();
171 layout->push_back((
Int_t)GetNvar());
172 while(layerSpec.
Length()>0) {
174 if (layerSpec.
First(
',')<0) {
179 sToAdd = layerSpec(0,layerSpec.
First(
','));
180 layerSpec = layerSpec(layerSpec.First(
',')+1,layerSpec.Length());
184 nNodes += atoi(sToAdd);
185 layout->push_back(nNodes);
188 layout->push_back( DataInfo().GetNTargets() );
189 else if( DoMulticlass() )
190 layout->push_back( DataInfo().GetNClasses() );
192 layout->push_back(1);
195 for( std::vector<Int_t>::iterator it = layout->begin(); it != layout->end(); it++ ){
212 fInputCalculator =
NULL;
214 fEstimatorHistTrain =
NULL;
215 fEstimatorHistTest =
NULL;
218 fEpochMonHistS.clear();
219 fEpochMonHistB.clear();
220 fEpochMonHistW.clear();
224 fOutputNeurons.clear();
244 if (fNetwork !=
NULL) {
247 for (
Int_t i = 0; i < numLayers; i++) {
249 DeleteNetworkLayer(layer);
254 if (frgen !=
NULL)
delete frgen;
255 if (fActivation !=
NULL)
delete fActivation;
256 if (fOutput !=
NULL)
delete fOutput;
257 if (fIdentity !=
NULL)
delete fIdentity;
258 if (fInputCalculator !=
NULL)
delete fInputCalculator;
259 if (fSynapses !=
NULL)
delete fSynapses;
266 fInputCalculator =
NULL;
277 for (
Int_t i = 0; i < numNeurons; i++) {
291 if (fEstimatorS ==
"MSE") fEstimator = kMSE;
292 else if (fEstimatorS ==
"CE") fEstimator = kCE;
293 else Log()<<
kWARNING<<
"fEstimator="<<fEstimator<<
"\tfEstimatorS="<<fEstimatorS<<
Endl;
294 if (fEstimator!=kMSE && fEstimator!=kCE)
Log()<<
kWARNING<<
"Estimator type unspecified \t"<<
Endl;
311 fRegulatorIdx.clear();
313 BuildLayers( layout, fromFile );
316 fInputLayer = (
TObjArray*)fNetwork->At(0);
318 fOutputNeurons.clear();
320 fOutputNeurons.push_back( (
TNeuron*)outputLayer->
At(i) );
323 if (weights ==
NULL) InitWeights();
324 else ForceWeights(weights);
338 Int_t numLayers = layout->size();
340 for (
Int_t i = 0; i < numLayers; i++) {
342 BuildLayer(layout->at(i), curLayer, prevLayer, i, numLayers, fromFile);
343 prevLayer = curLayer;
344 fNetwork->
Add(curLayer);
348 for (
Int_t i = 0; i < numLayers; i++) {
351 if (i!=0 && i!=numLayers-1) fRegulators.push_back(0.);
352 for (
Int_t j = 0; j < numNeurons; j++) {
353 if (i==0) fRegulators.push_back(0.);
356 for (
Int_t k = 0; k < numSynapses; k++) {
358 fSynapses->Add(synapse);
359 fRegulatorIdx.push_back(fRegulators.size()-1);
374 for (
Int_t j = 0; j < numNeurons; j++) {
375 if (fromFile && (layerIndex != numLayers-1) && (j==numNeurons-1)){
380 curLayer->
Add(neuron);
387 if (layerIndex == 0) {
393 if (layerIndex == numLayers-1) {
399 AddPreLinks(neuron, prevLayer);
402 curLayer->
Add(neuron);
408 if (layerIndex != numLayers-1) {
413 curLayer->
Add(neuron);
427 for (
Int_t i = 0; i < numNeurons; i++) {
442 PrintMessage(
"Initializing weights");
445 Int_t numSynapses = fSynapses->GetEntriesFast();
447 for (
Int_t i = 0; i < numSynapses; i++) {
448 synapse = (
TSynapse*)fSynapses->At(i);
449 synapse->
SetWeight(4.0*frgen->Rndm() - 2.0);
458 PrintMessage(
"Forcing weights");
460 Int_t numSynapses = fSynapses->GetEntriesFast();
462 for (
Int_t i = 0; i < numSynapses; i++) {
463 synapse = (
TSynapse*)fSynapses->At(i);
478 for (
UInt_t j = 0; j < GetNvar(); j++) {
482 neuron = GetInputNeuron(j);
494 Int_t numLayers = fNetwork->GetEntriesFast();
497 for (
Int_t i = 0; i < numLayers; i++) {
501 for (
Int_t j = 0; j < numNeurons; j++) {
524 Log() <<
kINFO <<
"***Type anything to continue (q to quit): ";
525 std::getline(std::cin, dummy);
526 if (dummy ==
"q" || dummy ==
"Q") {
527 PrintMessage(
"quit" );
538 if (!
Debug())
return;
541 PrintMessage(
"Printing network " );
542 Log() <<
kINFO <<
"-------------------------------------------------------------------" <<
Endl;
547 for (
Int_t i = 0; i < numLayers; i++) {
552 Log() <<
kINFO <<
"Layer #" << i <<
" (" << numNeurons <<
" neurons):" <<
Endl;
553 PrintLayer( curLayer );
565 for (
Int_t j = 0; j < numNeurons; j++) {
569 PrintNeuron( neuron );
579 <<
"\t\tValue:\t" << neuron->
GetValue()
582 Log() <<
kINFO <<
"\t\tActivationEquation:\t";
599 const Event * ev = GetEvent();
601 for (
UInt_t i = 0; i < GetNvar(); i++) {
605 ForceNetworkCalculations();
612 NoErrorCalc(err, errUpper);
626 const Event * ev = GetEvent();
628 for (
UInt_t i = 0; i < GetNvar(); i++) {
632 ForceNetworkCalculations();
637 if (fRegressionReturnVal ==
NULL) fRegressionReturnVal =
new std::vector<Float_t>();
638 fRegressionReturnVal->clear();
642 for (
UInt_t itgt = 0; itgt < ntgts; itgt++) {
646 const Event* evT2 = GetTransformationHandler().InverseTransform( evT );
647 for (
UInt_t itgt = 0; itgt < ntgts; itgt++) {
648 fRegressionReturnVal->push_back( evT2->
GetTarget(itgt) );
653 return *fRegressionReturnVal;
673 const Event * ev = GetEvent();
675 for (
UInt_t i = 0; i < GetNvar(); i++) {
679 ForceNetworkCalculations();
683 if (fMulticlassReturnVal ==
NULL) fMulticlassReturnVal =
new std::vector<Float_t>();
684 fMulticlassReturnVal->clear();
685 std::vector<Float_t> temp;
687 UInt_t nClasses = DataInfo().GetNClasses();
688 for (
UInt_t icls = 0; icls < nClasses; icls++) {
689 temp.push_back(GetOutputNeuron( icls )->GetActivationValue() );
692 for(
UInt_t iClass=0; iClass<nClasses; iClass++){
694 for(
UInt_t j=0;j<nClasses;j++){
696 norm+=
exp(temp[j]-temp[iClass]);
698 (*fMulticlassReturnVal).push_back(1.0/(1.0+norm));
703 return *fMulticlassReturnVal;
712 Int_t numLayers = fNetwork->GetEntriesFast();
717 for (
Int_t i = 0; i < numLayers; i++) {
723 for (
Int_t j = 0; j < numNeurons; j++) {
728 if(numSynapses==0)
continue;
729 std::stringstream s(
"");
731 for (
Int_t k = 0; k < numSynapses; k++) {
733 s << std::scientific << synapse->
GetWeight() <<
" ";
740 if( fInvHessian.GetNcols()>0 ){
744 Int_t nElements = fInvHessian.GetNoElements();
745 Int_t nRows = fInvHessian.GetNrows();
746 Int_t nCols = fInvHessian.GetNcols();
753 fInvHessian.GetMatrix2Array( elements );
757 for(
Int_t row = 0; row < nRows; ++row ){
762 std::stringstream s(
"");
764 for(
Int_t col = 0; col < nCols; ++col ){
765 s << std::scientific << (*(elements+index)) <<
" ";
782 std::vector<Int_t>* layout =
new std::vector<Int_t>();
784 void* xmlLayout =
NULL;
787 xmlLayout = wghtnode;
791 layout->resize( nLayers );
799 layout->at(index) = nNeurons;
803 BuildNetwork( layout,
NULL, fromFile );
806 if (GetTrainingTMVAVersionCode() <
TMVA_VERSION(4,2,1) && fActivation->GetExpression().Contains(
"tanh")){
828 std::stringstream s(content);
829 for (
UInt_t iSyn = 0; iSyn<nSyn; iSyn++) {
846 void* xmlInvHessian =
NULL;
852 fUseRegulator =
kTRUE;
862 fInvHessian.ResizeTo( nRows, nCols );
867 Log() <<
kFATAL <<
"you tried to read a hessian matrix with " << nElements <<
" elements, --> too large, guess s.th. went wrong reading from the weight file" <<
Endl;
870 elements =
new Double_t[nElements+10];
883 std::stringstream s(content);
884 for (
Int_t iCol = 0; iCol<nCols; iCol++) {
885 s >> (*(elements+index));
892 fInvHessian.SetMatrixArray( elements );
909 std::vector<Double_t>* weights =
new std::vector<Double_t>();
911 while (istr>> dummy >> weight) weights->push_back(weight);
913 ForceWeights(weights);
925 fRanking =
new Ranking( GetName(),
"Importance" );
932 for (
UInt_t ivar = 0; ivar < GetNvar(); ivar++) {
934 neuron = GetInputNeuron(ivar);
937 varName = GetInputVar(ivar);
942 meanS, meanB, rmsS, rmsB, xmin, xmax );
946 if (avgVal<meanrms) avgVal = meanrms;
947 if (IsNormalised()) avgVal = 0.5*(1 +
gTools().
NormVariable( avgVal, GetXmin( ivar ), GetXmax( ivar )));
949 for (
Int_t j = 0; j < numSynapses; j++) {
954 importance *= avgVal * avgVal;
956 fRanking->AddRank(
Rank( varName, importance ) );
965 std::vector<TH1*>* hv )
const
968 Int_t numLayers = fNetwork->GetEntriesFast();
970 for (
Int_t i = 0; i < numLayers-1; i++) {
978 hist =
new TH2F(name +
"", name +
"",
979 numNeurons1, 0, numNeurons1, numNeurons2, 0, numNeurons2);
981 for (
Int_t j = 0; j < numNeurons1; j++) {
986 for (
Int_t k = 0; k < numSynapses; k++) {
989 hist->SetBinContent(j+1, k+1, synapse->
GetWeight());
994 if (hv) hv->push_back( hist );
1007 PrintMessage(
Form(
"Write special histos to file: %s", BaseDir()->GetPath()),
kTRUE);
1009 if (fEstimatorHistTrain) fEstimatorHistTrain->Write();
1010 if (fEstimatorHistTest ) fEstimatorHistTest ->Write();
1013 CreateWeightMonitoringHists(
"weights_hist" );
1016 #if __cplusplus > 199711L
1017 static std::atomic<int> epochMonitoringDirectoryNumber{0};
1019 static int epochMonitoringDirectoryNumber = 0;
1021 int epochVal = epochMonitoringDirectoryNumber++;
1024 epochdir = BaseDir()->
mkdir(
"EpochMonitoring" );
1026 epochdir = BaseDir()->
mkdir(
Form(
"EpochMonitoring_%4d",epochVal) );
1029 for (std::vector<TH1*>::const_iterator it = fEpochMonHistS.begin(); it != fEpochMonHistS.end(); it++) {
1033 for (std::vector<TH1*>::const_iterator it = fEpochMonHistB.begin(); it != fEpochMonHistB.end(); it++) {
1037 for (std::vector<TH1*>::const_iterator it = fEpochMonHistW.begin(); it != fEpochMonHistW.end(); it++) {
1049 Int_t numLayers = fNetwork->GetEntries();
1052 fout <<
" double ActivationFnc(double x) const;" << std::endl;
1053 fout <<
" double OutputActivationFnc(double x) const;" << std::endl;
1055 fout <<
" int fLayers;" << std::endl;
1056 fout <<
" int fLayerSize["<<numLayers<<
"];" << std::endl;
1057 int numNodesFrom = -1;
1058 for (
Int_t lIdx = 0; lIdx < numLayers; lIdx++) {
1059 int numNodesTo = ((
TObjArray*)fNetwork->At(lIdx))->GetEntries();
1060 if (numNodesFrom<0) { numNodesFrom=numNodesTo;
continue; }
1061 fout <<
" double fWeightMatrix" << lIdx-1 <<
"to" << lIdx <<
"[" << numNodesTo <<
"][" << numNodesFrom <<
"];";
1062 fout <<
" // weight matrix from layer " << lIdx-1 <<
" to " << lIdx << std::endl;
1063 numNodesFrom = numNodesTo;
1066 fout <<
" double * fWeights["<<numLayers<<
"];" << std::endl;
1067 fout <<
"};" << std::endl;
1071 fout <<
"inline void " << className <<
"::Initialize()" << std::endl;
1072 fout <<
"{" << std::endl;
1073 fout <<
" // build network structure" << std::endl;
1074 fout <<
" fLayers = " << numLayers <<
";" << std::endl;
1075 for (
Int_t lIdx = 0; lIdx < numLayers; lIdx++) {
1078 fout <<
" fLayerSize[" << lIdx <<
"] = " << numNodes <<
"; fWeights["<<lIdx<<
"] = new double["<<numNodes<<
"]; " << std::endl;
1081 for (
Int_t i = 0; i < numLayers-1; i++) {
1082 fout <<
" // weight matrix from layer " << i <<
" to " << i+1 << std::endl;
1085 for (
Int_t j = 0; j < numNeurons; j++) {
1088 for (
Int_t k = 0; k < numSynapses; k++) {
1090 fout <<
" fWeightMatrix" << i <<
"to" << i+1 <<
"[" << k <<
"][" << j <<
"] = " << synapse->
GetWeight() <<
";" << std::endl;
1095 fout <<
"}" << std::endl;
1099 fout <<
"inline double " << className <<
"::GetMvaValue__( const std::vector<double>& inputValues ) const" << std::endl;
1100 fout <<
"{" << std::endl;
1101 fout <<
" if (inputValues.size() != (unsigned int)fLayerSize[0]-1) {" << std::endl;
1102 fout <<
" std::cout << \"Input vector needs to be of size \" << fLayerSize[0]-1 << std::endl;" << std::endl;
1103 fout <<
" return 0;" << std::endl;
1104 fout <<
" }" << std::endl;
1106 fout <<
" for (int l=0; l<fLayers; l++)" << std::endl;
1107 fout <<
" for (int i=0; i<fLayerSize[l]; i++) fWeights[l][i]=0;" << std::endl;
1109 fout <<
" for (int l=0; l<fLayers-1; l++)" << std::endl;
1110 fout <<
" fWeights[l][fLayerSize[l]-1]=1;" << std::endl;
1112 fout <<
" for (int i=0; i<fLayerSize[0]-1; i++)" << std::endl;
1113 fout <<
" fWeights[0][i]=inputValues[i];" << std::endl;
1115 for (
Int_t i = 0; i < numLayers-1; i++) {
1116 fout <<
" // layer " << i <<
" to " << i+1 << std::endl;
1117 if (i+1 == numLayers-1) {
1118 fout <<
" for (int o=0; o<fLayerSize[" << i+1 <<
"]; o++) {" << std::endl;
1121 fout <<
" for (int o=0; o<fLayerSize[" << i+1 <<
"]-1; o++) {" << std::endl;
1123 fout <<
" for (int i=0; i<fLayerSize[" << i <<
"]; i++) {" << std::endl;
1124 fout <<
" double inputVal = fWeightMatrix" << i <<
"to" << i+1 <<
"[o][i] * fWeights[" << i <<
"][i];" << std::endl;
1126 if ( fNeuronInputType ==
"sum") {
1127 fout <<
" fWeights[" << i+1 <<
"][o] += inputVal;" << std::endl;
1129 else if ( fNeuronInputType ==
"sqsum") {
1130 fout <<
" fWeights[" << i+1 <<
"][o] += inputVal*inputVal;" << std::endl;
1133 fout <<
" fWeights[" << i+1 <<
"][o] += fabs(inputVal);" << std::endl;
1135 fout <<
" }" << std::endl;
1136 if (i+1 != numLayers-1)
1137 fout <<
" fWeights[" << i+1 <<
"][o] = ActivationFnc(fWeights[" << i+1 <<
"][o]);" << std::endl;
1138 else fout <<
" fWeights[" << i+1 <<
"][o] = OutputActivationFnc(fWeights[" << i+1 <<
"][o]);" << std::endl;
1139 fout <<
" }" << std::endl;
1142 fout <<
" return fWeights[" << numLayers-1 <<
"][0];" << std::endl;
1143 fout <<
"}" << std::endl;
1146 TString fncName = className+
"::ActivationFnc";
1147 fActivation->MakeFunction(fout, fncName);
1148 fncName = className+
"::OutputActivationFnc";
1149 fOutput->MakeFunction(fout, fncName);
1151 fout <<
" " << std::endl;
1152 fout <<
"// Clean up" << std::endl;
1153 fout <<
"inline void " << className <<
"::Clear() " << std::endl;
1154 fout <<
"{" << std::endl;
1155 fout <<
" // clean up the arrays" << std::endl;
1156 fout <<
" for (int lIdx = 0; lIdx < "<<numLayers<<
"; lIdx++) {" << std::endl;
1157 fout <<
" delete[] fWeights[lIdx];" << std::endl;
1158 fout <<
" }" << std::endl;
1159 fout <<
"}" << std::endl;
void WaitForKeyboard()
wait for keyboard input, for debugging
Double_t GetDelta() const
virtual Double_t GetMvaValue(Double_t *err=0, Double_t *errUpper=0)
get the mva value generated by the NN
void BuildLayer(Int_t numNeurons, TObjArray *curLayer, TObjArray *prevLayer, Int_t layerIndex, Int_t numLayers, Bool_t from_file=false)
build a single layer with neurons and synapses connecting this layer to the previous layer ...
void AddWeightsXMLTo(void *parent) const
create XML description of ANN classifier
Random number generator class based on M.
MsgLogger & Endl(MsgLogger &ml)
void ForceNetworkCalculations()
calculate input values to each neuron
void ForceValue(Double_t value)
force the value, typically for input and bias neurons
void DeleteNetwork()
delete/clear network
void AddPreLinks(TNeuron *neuron, TObjArray *prevLayer)
add synapses connecting a neuron to its preceding layer
Double_t GetValue() const
const Ranking * CreateRanking()
compute ranking of input variables by summing function of weights
void SetPostNeuron(TNeuron *post)
virtual void ReadWeightsFromStream(std::istream &istr)
destroy/clear the network then read it back in from the weights file
XMLNodePointer_t GetNext(XMLNodePointer_t xmlnode, Bool_t realnode=kTRUE)
return next to xmlnode node if realnode==kTRUE, any special nodes in between will be skipped ...
virtual TDirectory * mkdir(const char *name, const char *title="")
Create a sub-directory and return a pointer to the created directory.
void PrintPostLinks() const
Int_t GetEntriesFast() const
virtual void DeclareOptions()
define the options (their key words) that can be set in the option string here the options valid for ...
void SetActivationEqn(TActivation *activation)
set activation equation
Bool_t BeginsWith(const char *s, ECaseCompare cmp=kExact) const
Float_t GetValue(UInt_t ivar) const
return value of i'th variable
const char * GetNodeContent(XMLNodePointer_t xmlnode)
get contents (if any) of xml node
void ForceWeights(std::vector< Double_t > *weights)
force the synapse weights
const char * Data() const
void PrintLayer(TObjArray *layer) const
print a single layer, for debugging
void PrintMessage(TString message, Bool_t force=kFALSE) const
print messages, turn off printing by setting verbose and debug flag appropriately ...
virtual void ProcessOptions()
do nothing specific at this moment
virtual void BuildNetwork(std::vector< Int_t > *layout, std::vector< Double_t > *weights=NULL, Bool_t fromFile=kFALSE)
build network given a layout (number of neurons in each layer) and optional weights array ...
TActivation * CreateActivation(EActivationType type) const
ClassImp(TMVA::MethodANNBase) TMVA
standard constructor Note: Right now it is an option to choose the neuron input function, but only the input function "sum" leads to weight convergence – otherwise the weights go to nan and lead to an ABORT.
void ReadWeightsFromXML(void *wghtnode)
read MLP from xml weight file
Double_t GetActivationValue() const
Int_t NumPostLinks() const
Bool_t AddRawLine(XMLNodePointer_t parent, const char *line)
Add just line into xml file Line should has correct xml syntax that later it can be decoded by xml pa...
virtual void WriteMonitoringHistosToFile() const
write histograms to file
2-D histogram with a float per channel (see TH1 documentation)}
Bool_t Debug() const
who the hell makes such strange Debug flags that even use "global pointers"..
void PrintActivationEqn()
print activation equation, for debugging
char * Form(const char *fmt,...)
void SetTarget(UInt_t itgt, Float_t value)
set the target value (dimension itgt) to value
void PrintPreLinks() const
void CreateWeightMonitoringHists(const TString &bulkname, std::vector< TH1 * > *hv=0) const
void Debug(Int_t level, const char *va_(fmt),...)
void InitWeights()
initialize the synapse weights randomly
TString & Remove(Ssiz_t pos)
#define TMVA_VERSION(a, b, c)
std::vector< Int_t > * ParseLayoutString(TString layerSpec)
parse layout specification string and return a vector, each entry containing the number of neurons to...
XMLAttrPointer_t NewAttr(XMLNodePointer_t xmlnode, XMLNsPointer_t, const char *name, const char *value)
creates new attribute for xmlnode, namespaces are not supported for attributes
virtual const std::vector< Float_t > & GetMulticlassValues()
get the multiclass classification values generated by the NN
Describe directory structure in memory.
void ForceNetworkInputs(const Event *ev, Int_t ignoreIndex=-1)
force the input values of the input neurons force the value for each input neuron ...
static RooMathCoreReg dummy
void CalculateValue()
calculate neuron input
std::vector< TString > * GetAllActivationNames() const
Int_t GetEntries() const
Return the number of objects in array (i.e.
RooCmdArg Verbose(Bool_t flag=kTRUE)
virtual ~MethodANNBase()
destructor
static Vc_ALWAYS_INLINE int_v max(const int_v &x, const int_v &y)
void SetPreNeuron(TNeuron *pre)
virtual void PrintNetwork() const
print network representation, for debugging
void SetWeight(Double_t weight)
set synapse weight
Float_t GetTarget(UInt_t itgt) const
void CalculateActivationValue()
calculate neuron activation/output
Abstract ClassifierFactory template that handles arbitrary types.
void AddPostLink(TSynapse *post)
add synapse as a post-link to this neuron
MethodANNBase(const TString &jobName, Types::EMVA methodType, const TString &methodTitle, DataSetInfo &theData, const TString &theOption, TDirectory *theTargetDir)
virtual Bool_t cd(const char *path=0)
Change current directory to "this" directory.
XMLNodePointer_t GetChild(XMLNodePointer_t xmlnode, Bool_t realnode=kTRUE)
returns first child of xml node
XMLNodePointer_t NewChild(XMLNodePointer_t parent, XMLNsPointer_t ns, const char *name, const char *content=0)
create new child element for parent node
virtual const std::vector< Float_t > & GetRegressionValues()
get the regression value generated by the NN
TSynapse * PostLinkAt(Int_t index) const
Int_t NumPreLinks() const
TObject * At(Int_t idx) const
void AddPreLink(TSynapse *pre)
add synapse as a pre-link to this neuron
void DeletePreLinks()
delete all pre-links
void DeleteNetworkLayer(TObjArray *&layer)
delete a network layer
void BuildLayers(std::vector< Int_t > *layout, Bool_t from_file=false)
build the network layers
double norm(double *x, double *p)
virtual void MakeClassSpecific(std::ostream &, const TString &) const
write specific classifier response
void SetInputCalculator(TNeuronInput *calculator)
set input calculator
Ssiz_t First(char c) const
Find first occurrence of a character c.
void PrintNeuron(TNeuron *neuron) const
print a neuron, for debugging
void InitANNBase()
initialize ANNBase object