70 #if __cplusplus > 199711L 90 :
TMVA::
MethodBase( jobName, methodType, methodTitle, theData, theOption)
133 DeclareOptionRef(
fRandomSeed = 1,
"RandomSeed",
"Random seed for initial synapse weights (0 means unique seed for each run; default value '1')");
136 "MSE (Mean Square Estimator) for Gaussian Likelihood or CE(Cross-Entropy) for Bernoulli Likelihood" );
143 Int_t nTypes = names->size();
144 for (
Int_t i = 0; i < nTypes; i++)
151 nTypes = names->size();
177 std::vector<Int_t>* layout =
new std::vector<Int_t>();
179 while(layerSpec.
Length()>0) {
181 if (layerSpec.
First(
',')<0) {
186 sToAdd = layerSpec(0,layerSpec.
First(
','));
187 layerSpec = layerSpec(layerSpec.First(
',')+1,layerSpec.Length());
191 nNodes += atoi(sToAdd);
192 layout->push_back(nNodes);
197 layout->push_back(
DataInfo().GetNClasses() );
199 layout->push_back(1);
202 for( std::vector<Int_t>::iterator it = layout->begin(); it != layout->end(); it++ ){
254 for (
Int_t i = 0; i < numLayers; i++) {
284 for (
Int_t i = 0; i < numNeurons; i++) {
304 Log() << kHEADER <<
"Building Network. " <<
Endl;
343 Int_t numLayers = layout->size();
345 for (
Int_t i = 0; i < numLayers; i++) {
347 BuildLayer(layout->at(i), curLayer, prevLayer, i, numLayers, fromFile);
348 prevLayer = curLayer;
353 for (
Int_t i = 0; i < numLayers; i++) {
356 if (i!=0 && i!=numLayers-1)
fRegulators.push_back(0.);
357 for (
Int_t j = 0; j < numNeurons; j++) {
361 for (
Int_t k = 0; k < numSynapses; k++) {
379 for (
Int_t j = 0; j < numNeurons; j++) {
380 if (fromFile && (layerIndex != numLayers-1) && (j==numNeurons-1)){
385 curLayer->
Add(neuron);
392 if (layerIndex == 0) {
398 if (layerIndex == numLayers-1) {
407 curLayer->
Add(neuron);
413 if (layerIndex != numLayers-1) {
418 curLayer->
Add(neuron);
432 for (
Int_t i = 0; i < numNeurons; i++) {
452 for (
Int_t i = 0; i < numSynapses; i++) {
467 for (
Int_t i = 0; i < numSynapses; i++) {
502 for (
Int_t i = 0; i < numLayers; i++) {
506 for (
Int_t j = 0; j < numNeurons; j++) {
529 Log() << kINFO <<
"***Type anything to continue (q to quit): ";
530 std::getline(std::cin, dummy);
531 if (dummy ==
"q" || dummy ==
"Q") {
543 if (!
Debug())
return;
547 Log() << kINFO <<
"-------------------------------------------------------------------" <<
Endl;
552 for (
Int_t i = 0; i < numLayers; i++) {
557 Log() << kINFO <<
"Layer #" << i <<
" (" << numNeurons <<
" neurons):" <<
Endl;
570 for (
Int_t j = 0; j < numNeurons; j++) {
572 Log() << kINFO <<
"\tNeuron #" << j <<
" (LinksIn: " << neuron->
NumPreLinks()
584 <<
"\t\tValue:\t" << neuron->
GetValue()
587 Log() << kINFO <<
"\t\tActivationEquation:\t";
589 Log() << kINFO <<
"\t\tLinksIn:" <<
Endl;
591 Log() << kINFO <<
"\t\tLinksOut:" <<
Endl;
647 for (
UInt_t itgt = 0; itgt < ntgts; itgt++) {
652 for (
UInt_t itgt = 0; itgt < ntgts; itgt++) {
682 std::vector<Float_t> temp;
685 for (
UInt_t icls = 0; icls < nClasses; icls++) {
689 for(
UInt_t iClass=0; iClass<nClasses; iClass++){
691 for(
UInt_t j=0;j<nClasses;j++){
693 norm+=
exp(temp[j]-temp[iClass]);
695 (*fMulticlassReturnVal).push_back(1.0/(1.0+norm));
714 for (
Int_t i = 0; i < numLayers; i++) {
720 for (
Int_t j = 0; j < numNeurons; j++) {
725 if(numSynapses==0)
continue;
726 std::stringstream
s(
"");
728 for (
Int_t k = 0; k < numSynapses; k++) {
730 s << std::scientific << synapse->
GetWeight() <<
" ";
754 for(
Int_t row = 0; row < nRows; ++row ){
759 std::stringstream
s(
"");
761 for(
Int_t col = 0; col < nCols; ++col ){
762 s << std::scientific << (*(elements+index)) <<
" ";
779 std::vector<Int_t>* layout =
new std::vector<Int_t>();
781 void* xmlLayout = NULL;
784 xmlLayout = wghtnode;
788 layout->resize( nLayers );
796 layout->at(index) = nNeurons;
825 std::stringstream
s(content);
826 for (
UInt_t iSyn = 0; iSyn<nSyn; iSyn++) {
843 void* xmlInvHessian = NULL;
863 if (nElements > std::numeric_limits<int>::max()-100){
864 Log() << kFATAL <<
"you tried to read a hessian matrix with " << nElements <<
" elements, --> too large, guess s.th. went wrong reading from the weight file" <<
Endl;
867 elements =
new Double_t[nElements+10];
880 std::stringstream
s(content);
881 for (
Int_t iCol = 0; iCol<nCols; iCol++) {
882 s >> (*(elements+index));
905 std::vector<Double_t>* weights =
new std::vector<Double_t>();
907 while (istr>> dummy >> weight) weights->push_back(weight);
938 meanS, meanB, rmsS, rmsB, xmin, xmax );
942 if (avgVal<meanrms) avgVal = meanrms;
945 for (
Int_t j = 0; j < numSynapses; j++) {
950 importance *= avgVal * avgVal;
961 std::vector<TH1*>* hv )
const 966 for (
Int_t i = 0; i < numLayers-1; i++) {
974 hist =
new TH2F(name +
"", name +
"",
975 numNeurons1, 0, numNeurons1, numNeurons2, 0, numNeurons2);
977 for (
Int_t j = 0; j < numNeurons1; j++) {
982 for (
Int_t k = 0; k < numSynapses; k++) {
985 hist->SetBinContent(j+1, k+1, synapse->
GetWeight());
990 if (hv) hv->push_back( hist );
1012 #if __cplusplus > 199711L 1013 static std::atomic<int> epochMonitoringDirectoryNumber{0};
1015 static int epochMonitoringDirectoryNumber = 0;
1017 int epochVal = epochMonitoringDirectoryNumber++;
1048 fout <<
" double ActivationFnc(double x) const;" << std::endl;
1049 fout <<
" double OutputActivationFnc(double x) const;" << std::endl;
1051 fout <<
" int fLayers;" << std::endl;
1052 fout <<
" int fLayerSize["<<numLayers<<
"];" << std::endl;
1053 int numNodesFrom = -1;
1054 for (
Int_t lIdx = 0; lIdx < numLayers; lIdx++) {
1056 if (numNodesFrom<0) { numNodesFrom=numNodesTo;
continue; }
1057 fout <<
" double fWeightMatrix" << lIdx-1 <<
"to" << lIdx <<
"[" << numNodesTo <<
"][" << numNodesFrom <<
"];";
1058 fout <<
" // weight matrix from layer " << lIdx-1 <<
" to " << lIdx << std::endl;
1059 numNodesFrom = numNodesTo;
1062 fout <<
"};" << std::endl;
1066 fout <<
"inline void " << className <<
"::Initialize()" << std::endl;
1067 fout <<
"{" << std::endl;
1068 fout <<
" // build network structure" << std::endl;
1069 fout <<
" fLayers = " << numLayers <<
";" << std::endl;
1070 for (
Int_t lIdx = 0; lIdx < numLayers; lIdx++) {
1073 fout <<
" fLayerSize[" << lIdx <<
"] = " << numNodes <<
";" << std::endl;
1076 for (
Int_t i = 0; i < numLayers-1; i++) {
1077 fout <<
" // weight matrix from layer " << i <<
" to " << i+1 << std::endl;
1080 for (
Int_t j = 0; j < numNeurons; j++) {
1083 for (
Int_t k = 0; k < numSynapses; k++) {
1085 fout <<
" fWeightMatrix" << i <<
"to" << i+1 <<
"[" << k <<
"][" << j <<
"] = " << synapse->
GetWeight() <<
";" << std::endl;
1090 fout <<
"}" << std::endl;
1094 fout <<
"inline double " << className <<
"::GetMvaValue__( const std::vector<double>& inputValues ) const" << std::endl;
1095 fout <<
"{" << std::endl;
1096 fout <<
" if (inputValues.size() != (unsigned int)fLayerSize[0]-1) {" << std::endl;
1097 fout <<
" std::cout << \"Input vector needs to be of size \" << fLayerSize[0]-1 << std::endl;" << std::endl;
1098 fout <<
" return 0;" << std::endl;
1099 fout <<
" }" << std::endl;
1101 for (
Int_t lIdx = 0; lIdx < numLayers; lIdx++) {
1104 fout <<
" std::array<double, " << numNodes <<
"> fWeights" << lIdx <<
" {{}};" << std::endl;
1106 for (
Int_t lIdx = 0; lIdx < numLayers - 1; lIdx++) {
1107 fout <<
" fWeights" << lIdx <<
".back() = 1.;" << std::endl;
1110 fout <<
" for (int i=0; i<fLayerSize[0]-1; i++)" << std::endl;
1111 fout <<
" fWeights0[i]=inputValues[i];" << std::endl;
1113 for (
Int_t i = 0; i < numLayers-1; i++) {
1114 fout <<
" // layer " << i <<
" to " << i+1 << std::endl;
1115 if (i+1 == numLayers-1) {
1116 fout <<
" for (int o=0; o<fLayerSize[" << i+1 <<
"]; o++) {" << std::endl;
1119 fout <<
" for (int o=0; o<fLayerSize[" << i+1 <<
"]-1; o++) {" << std::endl;
1121 fout <<
" for (int i=0; i<fLayerSize[" << i <<
"]; i++) {" << std::endl;
1122 fout <<
" double inputVal = fWeightMatrix" << i <<
"to" << i + 1 <<
"[o][i] * fWeights" << i <<
"[i];" 1126 fout <<
" fWeights" << i + 1 <<
"[o] += inputVal;" << std::endl;
1129 fout <<
" fWeights" << i + 1 <<
"[o] += inputVal*inputVal;" << std::endl;
1132 fout <<
" fWeights" << i + 1 <<
"[o] += fabs(inputVal);" << std::endl;
1134 fout <<
" } // loop over i" << std::endl;
1135 if (i+1 != numLayers-1)
1136 fout <<
" fWeights" << i + 1 <<
"[o] = ActivationFnc(fWeights" << i + 1 <<
"[o]);" << std::endl;
1138 fout <<
" fWeights" << i + 1 <<
"[o] = OutputActivationFnc(fWeights" << i + 1 <<
"[o]);" 1140 fout <<
" } // loop over o" << std::endl;
1143 fout <<
" return fWeights" << numLayers - 1 <<
"[0];" << std::endl;
1144 fout <<
"}" << std::endl;
1147 TString fncName = className+
"::ActivationFnc";
1149 fncName = className+
"::OutputActivationFnc";
1152 fout <<
" " << std::endl;
1153 fout <<
"// Clean up" << std::endl;
1154 fout <<
"inline void " << className <<
"::Clear() " << std::endl;
1155 fout <<
"{" << std::endl;
1156 fout <<
"}" << std::endl;
void WaitForKeyboard()
wait for keyboard input, for debugging
virtual void WriteMonitoringHistosToFile() const
write histograms to file
virtual Int_t Write(const char *name=0, Int_t option=0, Int_t bufsize=0)
Write this object to the current directory.
virtual Double_t GetMvaValue(Double_t *err=0, Double_t *errUpper=0)
get the mva value generated by the NN
void BuildLayer(Int_t numNeurons, TObjArray *curLayer, TObjArray *prevLayer, Int_t layerIndex, Int_t numLayers, Bool_t from_file=false)
build a single layer with neurons and synapses connecting this layer to the previous layer ...
Random number generator class based on M.
MsgLogger & Endl(MsgLogger &ml)
void ForceNetworkCalculations()
calculate input values to each neuron
virtual TString GetExpression()=0
void CreateWeightMonitoringHists(const TString &bulkname, std::vector< TH1 *> *hv=0) const
virtual TMatrixTBase< Element > & SetMatrixArray(const Element *data, Option_t *option="")
Copy array data to matrix .
void DeleteNetwork()
delete/clear network
virtual Double_t Rndm()
Machine independent random number generator.
MethodANNBase(const TString &jobName, Types::EMVA methodType, const TString &methodTitle, DataSetInfo &theData, const TString &theOption)
standard constructor Note: Right now it is an option to choose the neuron input function, but only the input function "sum" leads to weight convergence – otherwise the weights go to nan and lead to an ABORT.
TActivation * fActivation
void AddPreLinks(TNeuron *neuron, TObjArray *prevLayer)
add synapses connecting a neuron to its preceding layer
Synapse class used by TMVA artificial neural network methods.
const Ranking * CreateRanking()
compute ranking of input variables by summing function of weights
void SetPostNeuron(TNeuron *post)
OptionBase * DeclareOptionRef(T &ref, const TString &name, const TString &desc="")
void PrintActivationEqn()
print activation equation, for debugging
virtual void ReadWeightsFromStream(std::istream &istr)
destroy/clear the network then read it back in from the weights file
void ForceValue(Double_t value)
force the value, typically for input and bias neurons
Virtual base Class for all MVA method.
virtual void MakeClassSpecific(std::ostream &, const TString &) const
write specific classifier response
TNeuronInput * fInputCalculator
XMLNodePointer_t GetNext(XMLNodePointer_t xmlnode, Bool_t realnode=kTRUE)
return next to xmlnode node if realnode==kTRUE, any special nodes in between will be skipped ...
TransformationHandler & GetTransformationHandler(Bool_t takeReroutedIfAvailable=true)
Ranking for variables in method (implementation)
virtual TDirectory * mkdir(const char *name, const char *title="")
Create a sub-directory "a" or a hierarchy of sub-directories "a/b/c/...".
UInt_t GetNClasses() const
void DeletePreLinks()
delete all pre-links
UInt_t GetNTargets() const
Double_t GetActivationValue() const
void AddWeightsXMLTo(void *parent) const
create XML description of ANN classifier
virtual void DeclareOptions()
define the options (their key words) that can be set in the option string here the options valid for ...
TObject * At(Int_t idx) const
virtual TMatrixTBase< Element > & ResizeTo(Int_t nrows, Int_t ncols, Int_t=-1)
Set size of the matrix to nrows x ncols New dynamic elements are created, the overlapping part of the...
void SetInputCalculator(TNeuronInput *calculator)
set input calculator
static const Bool_t fgDEBUG
const char * GetNodeContent(XMLNodePointer_t xmlnode)
get contents (if any) of xmlnode
Int_t GetNoElements() const
void ForceWeights(std::vector< Double_t > *weights)
force the synapse weights
Bool_t Debug() const
who the hell makes such strange Debug flags that even use "global pointers"..
const TString & GetInputVar(Int_t i) const
std::vector< TH1 * > fEpochMonHistB
TSynapse * PostLinkAt(Int_t index) const
UInt_t GetTrainingTMVAVersionCode() const
void PrintMessage(TString message, Bool_t force=kFALSE) const
print messages, turn off printing by setting verbose and debug flag appropriately ...
const Event * GetEvent() const
Neuron class used by TMVA artificial neural network methods.
Double_t GetXmin(Int_t ivar) const
DataSetInfo & DataInfo() const
Bool_t DoRegression() const
virtual void ProcessOptions()
do nothing specific at this moment
Ssiz_t First(char c) const
Find first occurrence of a character c.
Class that contains all the data information.
void AddPostLink(TSynapse *post)
add synapse as a post-link to this neuron
Int_t NumPreLinks() const
virtual void BuildNetwork(std::vector< Int_t > *layout, std::vector< Double_t > *weights=NULL, Bool_t fromFile=kFALSE)
build network given a layout (number of neurons in each layer) and optional weights array ...
void CalculateActivationValue()
calculate neuron activation/output
TActivation * CreateActivation(EActivationType type) const
instantiate the correct activation object according to the type chosen (given as the enumeration type...
void ReadWeightsFromXML(void *wghtnode)
read MLP from xml weight file
Double_t GetXmax(Int_t ivar) const
Bool_t DoMulticlass() const
Tanh activation function for ANN.
Int_t NumPostLinks() const
virtual void PrintNetwork() const
print network representation, for debugging
Float_t GetTarget(UInt_t itgt) const
virtual void MakeFunction(std::ostream &fout, const TString &fncName)=0
Bool_t AddRawLine(XMLNodePointer_t parent, const char *line)
Add just line into xml file Line should has correct xml syntax that later it can be decoded by xml pa...
void SetWeight(Double_t weight)
set synapse weight
const char * GetName() const
2-D histogram with a float per channel (see TH1 documentation)}
void Statistics(Types::ETreeType treeType, const TString &theVarName, Double_t &, Double_t &, Double_t &, Double_t &, Double_t &, Double_t &)
calculates rms,mean, xmin, xmax of the event variable this can be either done for the variables as th...
Bool_t BeginsWith(const char *s, ECaseCompare cmp=kExact) const
void AddPreLink(TSynapse *pre)
add synapse as a pre-link to this neuron
Double_t GetDelta() const
Int_t GetEntriesFast() const
char * Form(const char *fmt,...)
virtual void GetMatrix2Array(Element *data, Option_t *option="") const
Copy matrix data to array .
std::vector< TH1 * > fEpochMonHistW
void PrintPostLinks() const
void SetTarget(UInt_t itgt, Float_t value)
set the target value (dimension itgt) to value
std::vector< Double_t > fRegulators
TNeuron * GetInputNeuron(Int_t index)
std::vector< Int_t > fRegulatorIdx
void InitWeights()
initialize the synapse weights randomly
Double_t GetValue() const
Float_t GetValue(UInt_t ivar) const
return value of i'th variable
TString & Remove(Ssiz_t pos)
#define TMVA_VERSION(a, b, c)
std::vector< Int_t > * ParseLayoutString(TString layerSpec)
parse layout specification string and return a vector, each entry containing the number of neurons to...
XMLAttrPointer_t NewAttr(XMLNodePointer_t xmlnode, XMLNsPointer_t, const char *name, const char *value)
creates new attribute for xmlnode, namespaces are not supported for attributes
virtual const std::vector< Float_t > & GetMulticlassValues()
get the multiclass classification values generated by the NN
TNeuron * GetOutputNeuron(Int_t index=0)
Describe directory structure in memory.
std::vector< Float_t > * fMulticlassReturnVal
void PrintPreLinks() const
void CalculateValue()
calculate neuron input
Bool_t IsNormalised() const
void ForceNetworkInputs(const Event *ev, Int_t ignoreIndex=-1)
force the input values of the input neurons force the value for each input neuron ...
static RooMathCoreReg dummy
Bool_t Contains(const char *pat, ECaseCompare cmp=kExact) const
static constexpr double s
void AddPreDefVal(const T &)
std::vector< TNeuron * > fOutputNeurons
virtual ~MethodANNBase()
destructor
void SetPreNeuron(TNeuron *pre)
std::vector< TString > * GetAllActivationNames() const
returns the names of all know activation functions
Abstract ClassifierFactory template that handles arbitrary types.
virtual Bool_t cd(const char *path=0)
Change current directory to "this" directory.
XMLNodePointer_t GetChild(XMLNodePointer_t xmlnode, Bool_t realnode=kTRUE)
returns first child of xmlnode
TDirectory * BaseDir() const
returns the ROOT directory where info/histograms etc of the corresponding MVA method instance are sto...
virtual void AddRank(const Rank &rank)
Add a new rank take ownership of it.
XMLNodePointer_t NewChild(XMLNodePointer_t parent, XMLNsPointer_t ns, const char *name, const char *content=0)
create new child element for parent node
virtual const std::vector< Float_t > & GetRegressionValues()
get the regression value generated by the NN
void PrintNeuron(TNeuron *neuron) const
print a neuron, for debugging
Int_t GetEntries() const
Return the number of objects in array (i.e.
std::vector< TH1 * > fEpochMonHistS
std::vector< Float_t > * fRegressionReturnVal
Class for easily choosing activation functions.
TH1F * fEstimatorHistTrain
void DeleteNetworkLayer(TObjArray *&layer)
delete a network layer
THist< 2, float, THistStatContent, THistStatUncertainty > TH2F
void BuildLayers(std::vector< Int_t > *layout, Bool_t from_file=false)
build the network layers
Base class for all TMVA methods using artificial neural networks.
TH1F * fEstimatorHistTest
void NoErrorCalc(Double_t *const err, Double_t *const errUpper)
void PrintLayer(TObjArray *layer) const
print a single layer, for debugging
void InitANNBase()
initialize ANNBase object
void SetActivationEqn(TActivation *activation)
set activation equation
const char * Data() const