222 #include "TSynapse.h" 306 const char* extF,
const char* extD)
365 const char* extF,
const char* extD)
421 const char * training,
424 const char* extF,
const char* extD)
441 if(testcut==
"") testcut =
Form(
"!(%s)",training);
451 data->
Draw(
Form(
">>fTestList_%lu",(
ULong_t)
this),(
const char *)testcut,
"goff");
455 Warning(
"TMultiLayerPerceptron::TMultiLayerPerceptron",
"Data not set. Cannot define datasets");
488 const char * training,
491 const char* extF,
const char* extD)
508 if(testcut==
"") testcut =
Form(
"!(%s)",training);
518 data->
Draw(
Form(
">>fTestList_%lu",(
ULong_t)
this),(
const char *)testcut,
"goff");
522 Warning(
"TMultiLayerPerceptron::TMultiLayerPerceptron",
"Data not set. Cannot define datasets");
549 std::cerr <<
"Error: data already defined." << std::endl;
610 Warning(
"TMultiLayerPerceptron::TMultiLayerPerceptron",
"Data not set. Cannot define datasets");
629 Warning(
"TMultiLayerPerceptron::TMultiLayerPerceptron",
"Data not set. Cannot define datasets");
746 Bool_t minE_Train =
false;
752 Int_t displayStepping = 1;
756 displayStepping = atoi(out.
Data() + 7);
766 TGraph *train_residual_plot = 0;
767 TGraph *test_residual_plot = 0;
769 Error(
"Train",
"Training/Test samples still not defined. Cannot train the neural network");
772 Info(
"Train",
"Using %d train and %d test entries.",
776 std::cout <<
"Training the Neural Network" << std::endl;
780 canvas =
new TCanvas(
"NNtraining",
"Neural Net training");
783 if(!canvas) canvas =
new TCanvas(
"NNtraining",
"Neural Net training");
785 train_residual_plot =
new TGraph(nEpoch);
786 test_residual_plot =
new TGraph(nEpoch);
790 residual_plot->
Add(train_residual_plot);
791 residual_plot->
Add(test_residual_plot);
792 residual_plot->
Draw(
"LA");
804 for (i = 0; i < els; i++)
807 TMatrixD bfgsh(matrix_size, matrix_size);
813 for (
Int_t iepoch = 0; (iepoch < nEpoch) && (!minE_Train || training_E>minE) && (!minE_Test || test_E>minE) ; iepoch++) {
842 for (i = 0; i < els; i++)
843 onorm += dir[i] * dir[i];
851 prod -= dir[idx++] * neuron->
GetDEDw();
857 prod -= dir[idx++] * synapse->
GetDEDw();
874 for (i = 0; i < els; i++)
875 onorm += dir[i] * dir[i];
901 if (
GetBFGSH(bfgsh, gamma, delta)) {
916 Error(
"TMultiLayerPerceptron::Train()",
"Line search fail");
926 Error(
"TMultiLayerPerceptron::Train()",
"Stop.");
935 if ((verbosity % 2) && ((!(iepoch % displayStepping)) || (iepoch == nEpoch - 1))) {
936 std::cout <<
"Epoch: " << iepoch
937 <<
" learn=" << training_E
938 <<
" test=" << test_E
942 train_residual_plot->
SetPoint(iepoch, iepoch,training_E);
943 test_residual_plot->
SetPoint(iepoch, iepoch,test_E);
947 for (i = 1; i < nEpoch; i++) {
948 train_residual_plot->
SetPoint(i, i, trp);
949 test_residual_plot->
SetPoint(i, i, tep);
952 if ((!(iepoch % displayStepping)) || (iepoch == nEpoch - 1)) {
968 std::cout <<
"Training done." << std::endl;
972 "Training sample",
"L");
1002 if (nEntries == 0)
return 0.0;
1033 for (i = 0; i < nEvents; i++) {
1038 for (i = 0; i < nEvents; i++) {
1055 return (error / 2.);
1068 if (target < DBL_EPSILON) {
1074 if ((1 - target) < DBL_EPSILON) {
1080 if (output == 0.0 || output == 1.0)
1083 error -= target *
TMath::Log(output / target) + (1-target) *
TMath::Log((1 - output)/(1 - target));
1099 if (target > DBL_EPSILON) {
1103 error -= target *
TMath::Log(output / target);
1131 for (i = 0; i < nEvents; i++) {
1158 for (i = 0; i < nEvents; i++) {
1220 Bool_t normalize =
false;
1224 Int_t maxop, maxpar, maxconst;
1286 for (i = 0; i<nneurons; i++) {
1291 Warning(
"TMultiLayerPerceptron::ExpandStructure()",
"Variable size arrays cannot be used to build implicitely an input layer. The index 0 will be assumed.");
1300 if(i||j) newInput +=
",";
1308 if(i) newInput +=
",";
1314 fStructure = newInput +
":" + hiddenAndOutput;
1331 hidden(hidden.
Last(
':') + 1,
1332 hidden.
Length() - (hidden.
Last(
':') + 1))).Data());
1333 if (input.
Length() == 0) {
1334 Error(
"BuildNetwork()",
"malformed structure. No input layer.");
1337 if (output.
Length() == 0) {
1338 Error(
"BuildNetwork()",
"malformed structure. No output layer.");
1357 for (i = 0; i<nneurons; i++) {
1373 Int_t prevStart = 0;
1379 end = hidden.
Index(
":", beg + 1);
1396 Error(
"BuildOneHiddenLayer",
1397 "The specification '%s' for hidden layer %d must contain only numbers!",
1398 sNumNodes.
Data(), layer - 1);
1401 for (
Int_t i = 0; i < num; i++) {
1402 name.
Form(
"HiddenL%d:N%d",layer,i);
1405 for (
Int_t j = prevStart; j < prevStop; j++) {
1414 for (
Int_t i = prevStop; i < nEntries; i++) {
1416 for (
Int_t j = prevStop; j < nEntries; j++)
1421 prevStart = prevStop;
1444 Int_t prevStart = prevStop - prev;
1450 for (i = 0; i<nneurons; i++) {
1453 name=
output(pos,nextpos-pos);
1457 for (j = prevStart; j < prevStop; j++) {
1466 for (i = prevStop; i < nEntries; i++) {
1468 for (j = prevStop; j < nEntries; j++)
1489 Error(
"DrawResult()",
"no such output.");
1494 new TCanvas(
"NNresult",
"Neural Net output");
1501 setname =
Form(
"train%d",index);
1504 setname =
Form(
"test%d",index);
1506 if ((!
fData) || (!events)) {
1507 Error(
"DrawResult()",
"no dataset.");
1512 TString title =
"Neural Net Output control. ";
1514 setname =
"MLP_" + setname +
"_comp";
1517 hist =
new TH2D(setname.
Data(), title.
Data(), 50, -1, 1, 50, -1, 1);
1520 for (i = 0; i < nEvents; i++) {
1527 TString title =
"Neural Net Output. ";
1529 setname =
"MLP_" + setname;
1532 hist =
new TH1D(setname, title, 50, 1, -1);
1535 for (i = 0; i < nEvents; i++)
1543 hist =
new TH1D(setname, title, 50, 1, -1);
1545 nEvents = events->
GetN();
1546 for (i = 0; i < nEvents; i++)
1562 Error(
"TMultiLayerPerceptron::DumpWeights()",
"Invalid file name");
1566 output = &std::cout;
1568 output =
new std::ofstream(filen.
Data());
1570 *output <<
"#input normalization" << std::endl;
1575 *output << neuron->GetNormalisation()[0] <<
" " 1576 << neuron->GetNormalisation()[1] << std::endl;
1578 *output <<
"#output normalization" << std::endl;
1582 *output << neuron->GetNormalisation()[0] <<
" " 1583 << neuron->GetNormalisation()[1] << std::endl;
1585 *output <<
"#neurons weights" << std::endl;
1588 *output << neuron->GetWeight() << std::endl;
1592 *output <<
"#synapses weights" << std::endl;
1594 *output << synapse->
GetWeight() << std::endl;
1597 ((std::ofstream *) output)->close();
1612 Error(
"TMultiLayerPerceptron::LoadWeights()",
"Invalid file name");
1615 char *buff =
new char[100];
1616 std::ifstream input(filen.
Data());
1618 input.getline(buff, 100);
1626 input.getline(buff, 100);
1628 input.getline(buff, 100);
1635 input.getline(buff, 100);
1637 input.getline(buff, 100);
1645 input.getline(buff, 100);
1647 input.getline(buff, 100);
1694 Warning(
"TMultiLayerPerceptron::Export",
"Request to export a network using an external function");
1697 TString basefilename = filename;
1701 TString classname = basefilename;
1706 std::ofstream headerfile(header);
1707 std::ofstream sourcefile(source);
1708 headerfile <<
"#ifndef " << basefilename <<
"_h" << std::endl;
1709 headerfile <<
"#define " << basefilename <<
"_h" << std::endl << std::endl;
1710 headerfile <<
"class " << classname <<
" { " << std::endl;
1711 headerfile <<
"public:" << std::endl;
1712 headerfile <<
" " << classname <<
"() {}" << std::endl;
1713 headerfile <<
" ~" << classname <<
"() {}" << std::endl;
1714 sourcefile <<
"#include \"" << header <<
"\"" << std::endl;
1715 sourcefile <<
"#include <cmath>" << std::endl << std::endl;
1716 headerfile <<
" double Value(int index";
1717 sourcefile <<
"double " << classname <<
"::Value(int index";
1719 headerfile <<
",double in" << i;
1720 sourcefile <<
",double in" << i;
1722 headerfile <<
");" << std::endl;
1723 sourcefile <<
") {" << std::endl;
1725 sourcefile <<
" input" << i <<
" = (in" << i <<
" - " 1729 sourcefile <<
" switch(index) {" << std::endl;
1734 sourcefile <<
" case " << idx++ <<
":" << std::endl
1735 <<
" return neuron" << neuron <<
"();" << std::endl;
1736 sourcefile <<
" default:" << std::endl
1737 <<
" return 0.;" << std::endl <<
" }" 1739 sourcefile <<
"}" << std::endl << std::endl;
1740 headerfile <<
" double Value(int index, double* input);" << std::endl;
1741 sourcefile <<
"double " << classname <<
"::Value(int index, double* input) {" << std::endl;
1743 sourcefile <<
" input" << i <<
" = (input[" << i <<
"] - " 1747 sourcefile <<
" switch(index) {" << std::endl;
1752 sourcefile <<
" case " << idx++ <<
":" << std::endl
1753 <<
" return neuron" << neuron <<
"();" << std::endl;
1754 sourcefile <<
" default:" << std::endl
1755 <<
" return 0.;" << std::endl <<
" }" 1757 sourcefile <<
"}" << std::endl << std::endl;
1758 headerfile <<
"private:" << std::endl;
1760 headerfile <<
" double input" << i <<
";" << std::endl;
1765 if (!neuron->
GetPre(0)) {
1766 headerfile <<
" double neuron" << neuron <<
"();" << std::endl;
1767 sourcefile <<
"double " << classname <<
"::neuron" << neuron
1768 <<
"() {" << std::endl;
1769 sourcefile <<
" return input" << idx++ <<
";" << std::endl;
1770 sourcefile <<
"}" << std::endl << std::endl;
1772 headerfile <<
" double input" << neuron <<
"();" << std::endl;
1773 sourcefile <<
"double " << classname <<
"::input" << neuron
1774 <<
"() {" << std::endl;
1775 sourcefile <<
" double input = " << neuron->
GetWeight()
1776 <<
";" << std::endl;
1779 while ((syn = neuron->
GetPre(n++))) {
1780 sourcefile <<
" input += synapse" << syn <<
"();" << std::endl;
1782 sourcefile <<
" return input;" << std::endl;
1783 sourcefile <<
"}" << std::endl << std::endl;
1785 headerfile <<
" double neuron" << neuron <<
"();" << std::endl;
1786 sourcefile <<
"double " << classname <<
"::neuron" << neuron <<
"() {" << std::endl;
1787 sourcefile <<
" double input = input" << neuron <<
"();" << std::endl;
1791 sourcefile <<
" return ((input < -709. ? 0. : (1/(1+exp(-input)))) * ";
1796 sourcefile <<
" return (input * ";
1801 sourcefile <<
" return (tanh(input) * ";
1806 sourcefile <<
" return (exp(-input*input) * ";
1811 sourcefile <<
" return (exp(input) / (";
1814 sourcefile <<
"exp(input" << side <<
"())";
1816 sourcefile <<
" + exp(input" << side <<
"())";
1817 sourcefile <<
") * ";
1822 sourcefile <<
" return (0.0 * ";
1827 sourcefile <<
"}" << std::endl << std::endl;
1834 headerfile <<
" double synapse" << synapse <<
"();" << std::endl;
1835 sourcefile <<
"double " << classname <<
"::synapse" 1836 << synapse <<
"() {" << std::endl;
1837 sourcefile <<
" return (neuron" << synapse->
GetPre()
1838 <<
"()*" << synapse->
GetWeight() <<
");" << std::endl;
1839 sourcefile <<
"}" << std::endl << std::endl;
1842 headerfile <<
"};" << std::endl << std::endl;
1843 headerfile <<
"#endif // " << basefilename <<
"_h" << std::endl << std::endl;
1846 std::cout << header <<
" and " << source <<
" created." << std::endl;
1848 else if(lg ==
"FORTRAN") {
1849 TString implicit =
" implicit double precision (a-h,n-z)\n";
1850 std::ofstream sigmoid(
"sigmoid.f");
1851 sigmoid <<
" double precision FUNCTION SIGMOID(X)" << std::endl
1853 <<
" IF(X.GT.37.) THEN" << std::endl
1854 <<
" SIGMOID = 1." << std::endl
1855 <<
" ELSE IF(X.LT.-709.) THEN" << std::endl
1856 <<
" SIGMOID = 0." << std::endl
1857 <<
" ELSE" << std::endl
1858 <<
" SIGMOID = 1./(1.+EXP(-X))" << std::endl
1859 <<
" ENDIF" << std::endl
1860 <<
" END" << std::endl;
1864 std::ofstream sourcefile(source);
1867 sourcefile <<
" double precision function " << filename
1868 <<
"(x, index)" << std::endl;
1869 sourcefile << implicit;
1870 sourcefile <<
" double precision x(" <<
1874 sourcefile <<
"C --- Last Layer" << std::endl;
1878 TString ifelseif =
" if (index.eq.";
1880 sourcefile << ifelseif.
Data() << idx++ <<
") then" << std::endl
1882 <<
"=neuron" << neuron <<
"(x);" << std::endl;
1883 ifelseif =
" else if (index.eq.";
1885 sourcefile <<
" else" << std::endl
1886 <<
" " << filename <<
"=0.d0" << std::endl
1887 <<
" endif" << std::endl;
1888 sourcefile <<
" end" << std::endl;
1891 sourcefile <<
"C --- First and Hidden layers" << std::endl;
1896 sourcefile <<
" double precision function neuron" 1897 << neuron <<
"(x)" << std::endl
1899 sourcefile <<
" double precision x(" 1901 if (!neuron->
GetPre(0)) {
1902 sourcefile <<
" neuron" << neuron
1903 <<
" = (x(" << idx+1 <<
") - " 1907 <<
"d0" << std::endl;
1910 sourcefile <<
" neuron" << neuron
1911 <<
" = " << neuron->
GetWeight() <<
"d0" << std::endl;
1914 while ((syn = neuron->
GetPre(n++)))
1915 sourcefile <<
" neuron" << neuron
1916 <<
" = neuron" << neuron
1917 <<
" + synapse" << syn <<
"(x)" << std::endl;
1921 sourcefile <<
" neuron" << neuron
1922 <<
"= (sigmoid(neuron" << neuron <<
")*";
1931 sourcefile <<
" neuron" << neuron
1932 <<
"= (tanh(neuron" << neuron <<
")*";
1937 sourcefile <<
" neuron" << neuron
1938 <<
"= (exp(-neuron" << neuron <<
"*neuron" 1946 sourcefile <<
" div = exp(neuron" << side <<
"())" << std::endl;
1948 sourcefile <<
" div = div + exp(neuron" << side <<
"())" << std::endl;
1949 sourcefile <<
" neuron" << neuron ;
1950 sourcefile <<
"= (exp(neuron" << neuron <<
") / div * ";
1955 sourcefile <<
" neuron " << neuron <<
"= 0.";
1961 sourcefile <<
" end" << std::endl;
1966 sourcefile <<
"C --- Synapses" << std::endl;
1970 sourcefile <<
" double precision function " <<
"synapse" 1971 << synapse <<
"(x)\n" << implicit;
1972 sourcefile <<
" double precision x(" 1974 sourcefile <<
" synapse" << synapse
1975 <<
"=neuron" << synapse->
GetPre()
1976 <<
"(x)*" << synapse->
GetWeight() <<
"d0" << std::endl;
1977 sourcefile <<
" end" << std::endl << std::endl;
1981 std::cout << source <<
" created." << std::endl;
1983 else if(lg ==
"PYTHON") {
1987 std::ofstream pythonfile(pyfile);
1988 pythonfile <<
"from math import exp" << std::endl << std::endl;
1989 pythonfile <<
"from math import tanh" << std::endl << std::endl;
1990 pythonfile <<
"class " << classname <<
":" << std::endl;
1991 pythonfile <<
"\tdef value(self,index";
1993 pythonfile <<
",in" << i;
1995 pythonfile <<
"):" << std::endl;
1997 pythonfile <<
"\t\tself.input" << i <<
" = (in" << i <<
" - " 2004 pythonfile <<
"\t\tif index==" << idx++
2005 <<
": return self.neuron" << neuron <<
"();" << std::endl;
2006 pythonfile <<
"\t\treturn 0." << std::endl;
2011 pythonfile <<
"\tdef neuron" << neuron <<
"(self):" << std::endl;
2013 pythonfile <<
"\t\treturn self.input" << idx++ << std::endl;
2015 pythonfile <<
"\t\tinput = " << neuron->
GetWeight() << std::endl;
2018 while ((syn = neuron->
GetPre(n++)))
2019 pythonfile <<
"\t\tinput = input + self.synapse" 2020 << syn <<
"()" << std::endl;
2024 pythonfile <<
"\t\tif input<-709. : return " << neuron->
GetNormalisation()[1] << std::endl;
2025 pythonfile <<
"\t\treturn ((1/(1+exp(-input)))*";
2030 pythonfile <<
"\t\treturn (input*";
2035 pythonfile <<
"\t\treturn (tanh(input)*";
2040 pythonfile <<
"\t\treturn (exp(-input*input)*";
2045 pythonfile <<
"\t\treturn (exp(input) / (";
2048 pythonfile <<
"exp(self.neuron" << side <<
"())";
2050 pythonfile <<
" + exp(self.neuron" << side <<
"())";
2051 pythonfile <<
") * ";
2056 pythonfile <<
"\t\treturn 0.";
2067 pythonfile <<
"\tdef synapse" << synapse <<
"(self):" << std::endl;
2068 pythonfile <<
"\t\treturn (self.neuron" << synapse->
GetPre()
2069 <<
"()*" << synapse->
GetWeight() <<
")" << std::endl;
2073 std::cout << pyfile <<
" created." << std::endl;
2092 for (
Int_t i = 0; i <
n; i++) {
2093 j = (
Int_t) (rnd.Rndm() *
a);
2095 index[j] = index[i];
2110 for (i = 0; i < nEvents; i++)
2116 for (i = 0; i < nEvents; i++) {
2185 neuron->
SetWeight(origin[idx] + (dir[idx] * dist));
2191 synapse->
SetWeight(origin[idx] + (dir[idx] * dist));
2207 dir[idx++] = -neuron->
GetDEDw();
2211 dir[idx++] = -synapse->
GetDEDw();
2250 MLP_Line(origin, direction, alpha2);
2256 for (icount = 0; icount < 100; icount++) {
2258 MLP_Line(origin, direction, alpha3);
2275 for (icount = 0; icount < 100; icount++) {
2277 MLP_Line(origin, direction, alpha2);
2295 (err3 - err1) / ((err3 - err2) / (alpha3 - alpha2)
2296 - (err2 - err1) / (alpha2 - alpha1)));
2305 buffer[idx] = neuron->
GetWeight() - origin[idx];
2311 buffer[idx] = synapse->
GetWeight() - origin[idx];
2333 dir[idx] = -neuron->
GetDEDw() + beta * dir[idx];
2339 dir[idx] = -synapse->
GetDEDw() + beta * dir[idx];
2387 gamma[idx++][0] = -neuron->
GetDEDw();
2392 gamma[idx++][0] = -synapse->
GetDEDw();
2394 for (
Int_t i = 0; i < els; i++)
2395 delta[i].Assign(buffer[i]);
2402 gamma[idx++][0] += neuron->
GetDEDw();
2407 gamma[idx++][0] += synapse->
GetDEDw();
2425 output += neuron->
GetDEDw() * dir[idx++];
2430 output += synapse->
GetDEDw() * dir[idx++];
2450 dedw[idx++][0] = neuron->
GetDEDw();
2455 dedw[idx++][0] = synapse->
GetDEDw();
2458 for (
Int_t i = 0; i < els; i++)
2459 dir[i] = -direction[i][0];
2471 #define NeuronSize 2.5 2474 Float_t xStep = 1./(nLayers+1.);
2476 for(layer=0; layer< nLayers-1; layer++) {
2488 Int_t num = atoi(
TString(hidden(beg, end - beg)).Data());
2491 end = hidden.
Index(
":", beg + 1);
2492 if(layer==cnt) nNeurons_this = num;
2496 if(layer==cnt) nNeurons_this = num;
2499 if(layer==nLayers-2) {
2501 nNeurons_next = output.
CountChar(
',')+1;
2509 Int_t num = atoi(
TString(hidden(beg, end - beg)).Data());
2512 end = hidden.
Index(
":", beg + 1);
2513 if(layer+1==cnt) nNeurons_next = num;
2517 if(layer+1==cnt) nNeurons_next = num;
2519 Float_t yStep_this = 1./(nNeurons_this+1.);
2520 Float_t yStep_next = 1./(nNeurons_next+1.);
2525 maxWeight = maxWeight < theSynapse->
GetWeight() ? theSynapse->
GetWeight() : maxWeight;
2528 for(
Int_t neuron1=0; neuron1<nNeurons_this; neuron1++) {
2529 for(
Int_t neuron2=0; neuron2<nNeurons_next; neuron2++) {
2530 TLine* synapse =
new TLine(xStep*(layer+1),yStep_this*(neuron1+1),xStep*(layer+2),yStep_next*(neuron2+1));
2533 if (!theSynapse)
continue;
2542 for(layer=0; layer< nLayers; layer++) {
2548 else if(layer==nLayers-1) {
2558 Int_t num = atoi(
TString(hidden(beg, end - beg)).Data());
2561 end = hidden.
Index(
":", beg + 1);
2562 if(layer==cnt) nNeurons = num;
2566 if(layer==cnt) nNeurons = num;
2568 Float_t yStep = 1./(nNeurons+1.);
2569 for(
Int_t neuron=0; neuron<nNeurons; neuron++) {
2579 Float_t yStep = 1./(nrItems+1);
2580 for (
Int_t item = 0; item < nrItems; item++) {
2582 TText* label =
new TText(0.5*xStep,yStep*(item+1),brName.
Data());
2588 yStep=1./(numOutNodes+1);
2589 for (
Int_t outnode=0; outnode<numOutNodes; outnode++) {
2591 if (neuron && neuron->
GetName()) {
virtual void SetTitleOffset(Float_t offset=1)
Set distance between the axis and the axis title Offset is a correction factor with respect to the "s...
Double_t DerivDir(Double_t *)
scalar product between gradient and direction = derivative along direction
virtual const char * GetName() const
Returns name of object.
virtual TMatrixTBase< Element > & UnitMatrix()
Make a unit matrix (matrix need not be a square one).
ENeuronType GetType() const
Returns the neuron type.
virtual void SetLineWidth(Width_t lwidth)
Set the line width.
virtual Int_t Fill(Double_t x)
Increment bin with abscissa X by 1.
Bool_t LineSearch(Double_t *, Double_t *)
Search along the line defined by direction.
void SteepestDir(Double_t *)
Sets the search direction to steepest descent.
double dist(Rotation3D const &r1, Rotation3D const &r2)
TSynapse * GetPre(Int_t n) const
void Randomize() const
Randomize the weights.
void BuildHiddenLayers(TString &)
Builds hidden layers.
virtual Bool_t ProcessEvents()
Process pending events (GUI, timers, sockets).
void BFGSDir(TMatrixD &, Double_t *)
Computes the direction for the BFGS algorithm as the product between the Hessian estimate (bfgsh) and...
Random number generator class based on M.
void ForceExternalValue(Double_t value)
Uses the branch type to force an external value.
virtual void Info(const char *method, const char *msgfmt,...) const
Issue info message.
Double_t fEpsilon
Eta - used in stochastic minimisation - Default=0.1.
This class displays a legend box (TPaveText) containing several legend entries.
TTreeFormulaManager * fManager
formula representing the event weight
Collectable string class.
void SetTestDataSet(TEventList *test)
Sets the Test dataset.
void SetNewEvent() const
Inform the neuron that inputs of the network have changed, so that the buffered values have to be rec...
virtual void SetOwner(Bool_t enable=kTRUE)
Set whether this collection is the owner (enable==true) of its content.
virtual void Draw(Option_t *option="")
Draw this legend with its current attributes.
Double_t fCurrentTreeWeight
index of the current tree in a chain
void SetEpsilon(Double_t eps)
Sets Epsilon - used in stochastic minimisation (look at the constructor for the complete description ...
void Export(Option_t *filename="NNfunction", Option_t *language="C++") const
Exports the NN as a function for any non-ROOT-dependant code Supported languages are: only C++ ...
A TMultiGraph is a collection of TGraph (or derived) objects.
virtual void Draw(Option_t *option="")
Draw this marker with its current attributes.
void ToUpper()
Change string to upper case.
Regular expression class.
Double_t Evaluate(Int_t index, Double_t *params) const
Returns the Neural Net for a given set of input parameters #parameters must equal #input neurons...
Ssiz_t Index(const char *pat, Ssiz_t i=0, ECaseCompare cmp=kExact) const
virtual Int_t GetEntry(Long64_t entry=0, Int_t getall=0)
Read all branches of entry and return total number of bytes read.
void AttachData()
Connects the TTree to Neurons in input and output layers.
virtual int Load(const char *module, const char *entry="", Bool_t system=kFALSE)
Load a shared library.
void SetEtaDecay(Double_t ed)
Sets EtaDecay - Eta *= EtaDecay at each epoch (look at the constructor for the complete description o...
void ToLower()
Change string to lower-case.
Iterator of object array.
virtual void Draw(Option_t *option="")
Default Draw method for all objects.
void SetEta(Double_t eta)
Sets Eta - used in stochastic minimisation (look at the constructor for the complete description of l...
void SetData(TTree *)
Set the data source.
Double_t GetDeDw() const
Computes the derivative of the error wrt the synapse weight.
void Shuffle(Int_t *, Int_t) const
Shuffle the Int_t index[n] in input.
TObject * At(Int_t idx) const
Double_t GetSumSquareError() const
Error on the output for a given event.
TObjArray fNetwork
weight of the current tree in a chain
double beta(double x, double y)
Calculates the beta function.
virtual Double_t GetWeight() const
void ConjugateGradientsDir(Double_t *, Double_t)
Sets the search direction to conjugate gradient direction beta should be: ||g_{(t+1)}||^2 / ||g_{(t)}...
Double_t GetBranch() const
Returns the formula value.
void DrawResult(Int_t index=0, Option_t *option="test") const
Draws the neural net output It produces an histogram with the output for the two datasets.
TAxis * GetXaxis()
Get x axis of the graph.
bool GetBFGSH(TMatrixD &, TMatrixD &, TMatrixD &)
Computes the hessian matrix using the BFGS update algorithm.
Double_t fEta
TTreeFormulaManager for the weight and neurons.
you should not use this method at all Int_t Int_t Double_t Double_t Double_t Int_t Double_t Double_t Double_t tau
Double_t GetValue() const
Computes the output using the appropriate function and all the weighted inputs, or uses the branch as...
virtual Long64_t GetEntry(Int_t index) const
Return value of entry at index in the list.
Bool_t IsAlnum() const
Returns true if all characters in string are alphanumeric.
TTreeFormula * UseBranch(TTree *, const char *)
Sets a formula that can be used to make the neuron an input.
virtual ~TMultiLayerPerceptron()
Destructor.
void SetReset(Int_t reset)
Sets number of epochs between two resets of the search direction to the steepest descent.
virtual Int_t GetN() const
virtual void SetMarkerColor(Color_t mcolor=1)
Set the marker color.
TObject * Next()
Return next object in array. Returns 0 when no more objects in array.
Bool_t EndsWith(const char *pat, ECaseCompare cmp=kExact) const
Return true if string ends with the specified string.
TNeuron * GetInLayer(Int_t n) const
Bool_t DumpWeights(Option_t *filename="-") const
Dumps the weights to a text file.
Base class for several text objects.
Int_t fCurrentTree
pointer to the tree used as datasource
TNeuron::ENeuronType fType
Ssiz_t First(char c) const
Find first occurrence of a character c.
virtual void Draw(Option_t *chopt="")
Draw this multigraph with its current attributes.
TVirtualPad is an abstract base class for the Pad and Canvas classes.
virtual void Reset(Option_t *option="")
Reset.
void SetWeight(Double_t w)
Sets the neuron weight to w.
Bool_t IsAlpha() const
Returns true if all characters in string are alphabetic.
virtual Int_t GetTreeNumber() const
TMatrixT< Double_t > TMatrixD
const Double_t * GetNormalisation() const
virtual void SetLineColor(Color_t lcolor)
Set the line color.
Int_t GetLast() const
Return index of last object in array.
void BuildOneHiddenLayer(const TString &sNumNodes, Int_t &layer, Int_t &prevStart, Int_t &prevStop, Bool_t lastLayer)
Builds a hidden layer, updates the number of layers.
void AddInLayer(TNeuron *)
Tells a neuron which neurons form its layer (including itself).
void SetNormalisation(Double_t mean, Double_t RMS)
Sets the normalization variables.
R__EXTERN TSystem * gSystem
virtual void Draw(Option_t *option="")
Draw this histogram with options.
void Train(Int_t nEpoch, Option_t *option="text", Double_t minE=0)
Train the network.
TNeuron::ENeuronType fOutType
virtual void Draw(Option_t *option="")
Draws the network structure.
void Form(const char *fmt,...)
Formats a string using a printf style format descriptor.
virtual void UnZoom()
Reset first & last bin to the full range.
Int_t GetEntriesFast() const
virtual void Error(const char *method, const char *msgfmt,...) const
Issue error message.
char * Form(const char *fmt,...)
Double_t GetError() const
Computes the error for output neurons.
TAxis * GetYaxis()
Get y axis of the graph.
void SetDEDw(Double_t in)
Sets the derivative of the total error wrt the neuron weight.
Bool_t fTestOwner
internal flag whether one has to delete fTraining or not
Double_t GetError(Int_t event) const
Error on the output for a given event.
A TEventList object is a list of selected events (entries) in a TTree.
virtual TObject * At(Int_t idx) const
Returns the object at position idx. Returns 0 if idx is out of range.
1-D histogram with a double per channel (see TH1 documentation)}
Bool_t LoadWeights(Option_t *filename="")
Loads the weights from a text file conforming to the format defined by DumpWeights.
Int_t fReset
internal parameter used in line search
virtual void SetMarkerSize(Size_t msize=1)
Set the marker size.
void SetTrainingDataSet(TEventList *train)
Sets the Training dataset.
virtual void Modified(Bool_t flag=1)=0
TObjArray * Tokenize(const TString &delim) const
This function is used to isolate sequential tokens in a TString.
TIterator * MakeIterator(Bool_t dir=kIterForward) const
Returns an array iterator.
void SetWeight(Double_t w)
Sets the weight of the synapse.
TObject * UncheckedAt(Int_t i) const
Double_t GetWeight() const
void SetGammaDelta(TMatrixD &, TMatrixD &, Double_t *)
Sets the gamma (g_{(t+1)}-g_{(t)}) and delta (w_{(t+1)}-w_{(t)}) vectors Gamma is computed here...
void SetDEDw(Double_t in)
Sets the derivative of the total error wrt the synapse weight.
TLegendEntry * AddEntry(const TObject *obj, const char *label="", Option_t *option="lpf")
Add a new entry to this legend.
Ssiz_t Last(char c) const
Find last occurrence of a character c.
Double_t GetDeDw() const
Computes the derivative of the error wrt the neuron weight.
void ComputeDEDw() const
Compute the DEDw = sum on all training events of dedw for each weight normalized by the number of eve...
Double_t fEtaDecay
Delta - used in stochastic minimisation - Default=0.
Double_t fDelta
Epsilon - used in stochastic minimisation - Default=0.
The TTimeStamp encapsulates seconds and ns since EPOCH.
virtual void Draw(Option_t *opt)
Default Draw method for all objects.
void SetDelta(Double_t delta)
Sets Delta - used in stochastic minimisation (look at the constructor for the complete description of...
Bool_t Contains(const char *pat, ECaseCompare cmp=kExact) const
Double_t fTau
EtaDecay - Eta *= EtaDecay at each epoch - Default=1.
Double_t GetTarget() const
Computes the normalized target pattern for output neurons.
void MLP_Line(Double_t *, Double_t *, Double_t)
Sets the weights to a point along a line Weights are set to [origin + (dist * dir)].
THist< 2, double, THistStatContent, THistStatUncertainty > TH2D
static TClass * GetClass(const char *name, Bool_t load=kTRUE, Bool_t silent=kFALSE)
Static method returning pointer to TClass of the specified class name.
TEventList * fTest
EventList defining the events in the training dataset.
virtual void SetLineStyle(Style_t lstyle)
Set the line style.
void SetDecimals(Bool_t dot=kTRUE)
Sets the decimals flag By default, blank characters are stripped, and then the label is correctly ali...
void MLP_Batch(Double_t *)
One step for the batch (stochastic) method.
virtual Long64_t GetEntries() const
Double_t GetWeight() const
ELearningMethod fLearningMethod
EventList defining the events in the test dataset.
virtual void SetPoint(Int_t i, Double_t x, Double_t y)
Set x and y values for point number i.
TList * GetListOfGraphs() const
void MLP_Stochastic(Double_t *)
One step for the stochastic method buffer should contain the previous dw vector and will be updated...
Bool_t fTrainingOwner
number of epochs between two resets of the search direction to the steepest descent - Default=50 ...
A Graph is a graphics object made of two arrays X and Y with npoints each.
Double_t GetCrossEntropyBinary() const
Cross entropy error for sigmoid output neurons, for a given event.
THist< 1, double, THistStatContent, THistStatUncertainty > TH1D
void ExpandStructure()
Expand the structure of the first layer.
Int_t CountChar(Int_t c) const
Return number of times character c occurs in the string.
TMultiLayerPerceptron()
Default constructor.
virtual void AddLast(TObject *obj)
Add object in the next empty slot in the array.
virtual void Reset(Option_t *option="")
Reset this histogram: contents, errors, etc.
A TTree object has a header with a name and a title.
void BuildFirstLayer(TString &)
Instanciates the neurons in input Inputs are normalised and the type is set to kOff (simple forward o...
void SetEventWeight(const char *)
Set the event weight.
void GetEntry(Int_t) const
Load an entry into the network.
virtual void Add(TGraph *graph, Option_t *chopt="")
Add a new graph to the list of graphs.
Double_t Sqrt(Double_t x)
Double_t fLastAlpha
Tau - used in line search - Default=3.
Double_t Result(Int_t event, Int_t index=0) const
Computes the output for a given event.
Int_t Fill(Double_t)
Invalid Fill method.
Double_t GetCrossEntropy() const
Cross entropy error for a softmax output neuron, for a given event.
virtual void SetTitle(const char *title="")
Set the title of the TNamed.
void BuildLastLayer(TString &, Int_t)
Builds the output layer Neurons are linear combinations of input, by defaul.
void SetLearningMethod(TMultiLayerPerceptron::ELearningMethod method)
Sets the learning method.
void SetTau(Double_t tau)
Sets Tau - used in line search (look at the constructor for the complete description of learning meth...
void BuildNetwork()
Instanciates the network from the description.
TNeuron::ENeuronType GetType() const
virtual void Warning(const char *method, const char *msgfmt,...) const
Issue warning message.
virtual void SetLeftMargin(Float_t leftmargin)
Set Pad left margin in fraction of the pad width.
TTreeFormula * fEventWeight
The Learning Method.
2-D histogram with a double per channel (see TH1 documentation)}
const char * Data() const