321 const char* extF,
const char* extD)
380 const char * weight,
TTree * data,
384 const char* extF,
const char* extD)
446 const char * training,
449 const char* extF,
const char* extD)
472 if(testcut==
"") testcut =
Form(
"!(%s)",training);
481 data->Draw(
Form(
">>fTrainingList_%zu",(
size_t)
this),training,
"goff");
482 data->Draw(
Form(
">>fTestList_%zu",(
size_t)
this),(
const char *)testcut,
"goff");
486 Warning(
"TMultiLayerPerceptron::TMultiLayerPerceptron",
"Data not set. Cannot define datasets");
524 const char * weight,
TTree * data,
525 const char * training,
528 const char* extF,
const char* extD)
551 if(testcut==
"") testcut =
Form(
"!(%s)",training);
560 data->Draw(
Form(
">>fTrainingList_%zu",(
size_t)
this),training,
"goff");
561 data->Draw(
Form(
">>fTestList_%zu",(
size_t)
this),(
const char *)testcut,
"goff");
565 Warning(
"TMultiLayerPerceptron::TMultiLayerPerceptron",
"Data not set. Cannot define datasets");
592 std::cerr <<
"Error: data already defined." << std::endl;
653 fData->Draw(
Form(
">>fTrainingList_%zu",(
size_t)
this),train,
"goff");
656 Warning(
"TMultiLayerPerceptron::TMultiLayerPerceptron",
"Data not set. Cannot define datasets");
668 if(
fTest)
if(strncmp(
fTest->GetName(),
Form(
"fTestList_%zu",(
size_t)
this),10))
delete fTest;
675 fData->Draw(
Form(
">>fTestList_%zu",(
size_t)
this),test,
"goff");
678 Warning(
"TMultiLayerPerceptron::TMultiLayerPerceptron",
"Data not set. Cannot define datasets");
761 fData->GetEntry(entry);
795 Bool_t minE_Train =
false;
801 Int_t displayStepping = 1;
805 displayStepping = atoi(out.
Data() + 7);
815 TGraph *train_residual_plot =
nullptr;
816 TGraph *test_residual_plot =
nullptr;
818 Error(
"Train",
"Training/Test samples still not defined. Cannot train the neural network");
821 Info(
"Train",
"Using %d train and %d test entries.",
825 std::cout <<
"Training the Neural Network" << std::endl;
829 canvas =
new TCanvas(
"NNtraining",
"Neural Net training");
832 if(!canvas) canvas =
new TCanvas(
"NNtraining",
"Neural Net training");
834 train_residual_plot =
new TGraph(nEpoch);
835 test_residual_plot =
new TGraph(nEpoch);
839 residual_plot->
Add(train_residual_plot);
840 residual_plot->
Add(test_residual_plot);
841 residual_plot->
Draw(
"LA");
853 for (i = 0; i < els; i++)
856 TMatrixD bfgsh(matrix_size, matrix_size);
862 for (
Int_t iepoch = 0; (iepoch < nEpoch) && (!minE_Train || training_E>minE) && (!minE_Test || test_E>minE) ; iepoch++) {
891 for (i = 0; i < els; i++)
892 onorm += dir[i] * dir[i];
900 prod -= dir[idx++] * neuron->
GetDEDw();
906 prod -= dir[idx++] * synapse->
GetDEDw();
923 for (i = 0; i < els; i++)
924 onorm += dir[i] * dir[i];
950 if (
GetBFGSH(bfgsh, gamma, delta)) {
965 Error(
"TMultiLayerPerceptron::Train()",
"Line search fail");
975 Error(
"TMultiLayerPerceptron::Train()",
"Stop.");
984 if ((verbosity % 2) && ((!(iepoch % displayStepping)) || (iepoch == nEpoch - 1))) {
985 std::cout <<
"Epoch: " << iepoch
986 <<
" learn=" << training_E
987 <<
" test=" << test_E
991 train_residual_plot->
SetPoint(iepoch, iepoch,training_E);
992 test_residual_plot->
SetPoint(iepoch, iepoch,test_E);
996 for (i = 1; i < nEpoch; i++) {
997 train_residual_plot->
SetPoint(i, i, trp);
998 test_residual_plot->
SetPoint(i, i, tep);
1001 if ((!(iepoch % displayStepping)) || (iepoch == nEpoch - 1)) {
1017 std::cout <<
"Training done." << std::endl;
1018 if (verbosity / 2) {
1021 "Training sample",
"L");
1023 "Test sample",
"L");
1051 if (nEntries == 0)
return 0.0;
1081 Int_t nEvents = list->GetN();
1082 for (i = 0; i < nEvents; i++) {
1083 error +=
GetError(list->GetEntry(i));
1087 for (i = 0; i < nEvents; i++) {
1104 return (error / 2.);
1117 if (target < DBL_EPSILON) {
1123 if ((1 - target) < DBL_EPSILON) {
1129 if (output == 0.0 || output == 1.0)
1132 error -= target *
TMath::Log(output / target) + (1-target) *
TMath::Log((1 - output)/(1 - target));
1148 if (target > DBL_EPSILON) {
1152 error -= target *
TMath::Log(output / target);
1180 for (i = 0; i < nEvents; i++) {
1207 for (i = 0; i < nEvents; i++) {
1269 Bool_t normalize =
false;
1273 Int_t maxop, maxpar, maxconst;
1335 for (i = 0; i<nneurons; i++) {
1339 if(
f.GetMultiplicity()==1 &&
f.GetNdata()>1) {
1340 Warning(
"TMultiLayerPerceptron::ExpandStructure()",
"Variable size arrays cannot be used to build implicitly an input layer. The index 0 will be assumed.");
1347 else if(
f.GetNdata()>1) {
1348 for(
Int_t j=0; j<
f.GetNdata(); j++) {
1349 if(i||j) newInput +=
",";
1357 if(i) newInput +=
",";
1363 fStructure = newInput +
":" + hiddenAndOutput;
1380 hidden(hidden.
Last(
':') + 1,
1382 if (input.
Length() == 0) {
1383 Error(
"BuildNetwork()",
"malformed structure. No input layer.");
1386 if (output.
Length() == 0) {
1387 Error(
"BuildNetwork()",
"malformed structure. No output layer.");
1406 for (i = 0; i<nneurons; i++) {
1422 Int_t prevStart = 0;
1428 end = hidden.
Index(
":", beg + 1);
1445 Error(
"BuildOneHiddenLayer",
1446 "The specification '%s' for hidden layer %d must contain only numbers!",
1447 sNumNodes.
Data(), layer - 1);
1450 for (
Int_t i = 0; i < num; i++) {
1451 name.Form(
"HiddenL%d:N%d",layer,i);
1454 for (
Int_t j = prevStart; j < prevStop; j++) {
1463 for (
Int_t i = prevStop; i < nEntries; i++) {
1465 for (
Int_t j = prevStop; j < nEntries; j++)
1470 prevStart = prevStop;
1471 prevStop =
fNetwork.GetEntriesFast();
1493 Int_t prevStart = prevStop - prev;
1499 for (i = 0; i<nneurons; i++) {
1502 name=output(pos,nextpos-pos);
1506 for (j = prevStart; j < prevStop; j++) {
1515 for (i = prevStop; i < nEntries; i++) {
1517 for (j = prevStop; j < nEntries; j++)
1538 Error(
"DrawResult()",
"no such output.");
1543 new TCanvas(
"NNresult",
"Neural Net output");
1550 setname =
Form(
"train%d",index);
1553 setname =
Form(
"test%d",index);
1555 if ((!
fData) || (!events)) {
1556 Error(
"DrawResult()",
"no dataset.");
1561 TString title =
"Neural Net Output control. ";
1563 setname =
"MLP_" + setname +
"_comp";
1566 hist =
new TH2D(setname.
Data(), title.Data(), 50, -1, 1, 50, -1, 1);
1569 for (i = 0; i < nEvents; i++) {
1576 TString title =
"Neural Net Output. ";
1578 setname =
"MLP_" + setname;
1581 hist =
new TH1D(setname, title, 50, 1, -1);
1584 for (i = 0; i < nEvents; i++)
1592 hist =
new TH1D(setname, title, 50, 1, -1);
1594 nEvents = events->
GetN();
1595 for (i = 0; i < nEvents; i++)
1609 std::ostream * output;
1611 Error(
"TMultiLayerPerceptron::DumpWeights()",
"Invalid file name");
1615 output = &std::cout;
1617 output =
new std::ofstream(filen.
Data());
1619 *output <<
"#input normalization" << std::endl;
1627 *output <<
"#output normalization" << std::endl;
1634 *output <<
"#neurons weights" << std::endl;
1637 *output << neuron->
GetWeight() << std::endl;
1641 *output <<
"#synapses weights" << std::endl;
1643 *output << synapse->
GetWeight() << std::endl;
1646 ((std::ofstream *) output)->close();
1661 Error(
"TMultiLayerPerceptron::LoadWeights()",
"Invalid file name");
1664 char *buff =
new char[100];
1665 std::ifstream input(filen.
Data());
1667 input.getline(buff, 100);
1675 input.getline(buff, 100);
1677 input.getline(buff, 100);
1684 input.getline(buff, 100);
1686 input.getline(buff, 100);
1694 input.getline(buff, 100);
1696 input.getline(buff, 100);
1743 Warning(
"TMultiLayerPerceptron::Export",
"Request to export a network using an external function");
1746 TString basefilename = filename;
1750 TString classname = basefilename;
1755 std::ofstream headerfile(header);
1756 std::ofstream sourcefile(source);
1757 headerfile <<
"#ifndef " << basefilename <<
"_h" << std::endl;
1758 headerfile <<
"#define " << basefilename <<
"_h" << std::endl << std::endl;
1759 headerfile <<
"class " << classname <<
" { " << std::endl;
1760 headerfile <<
"public:" << std::endl;
1761 headerfile <<
" " << classname <<
"() {}" << std::endl;
1762 headerfile <<
" ~" << classname <<
"() {}" << std::endl;
1763 sourcefile <<
"#include \"" << header <<
"\"" << std::endl;
1764 sourcefile <<
"#include <cmath>" << std::endl << std::endl;
1765 headerfile <<
" double Value(int index";
1766 sourcefile <<
"double " << classname <<
"::Value(int index";
1767 for (i = 0; i <
fFirstLayer.GetEntriesFast(); i++) {
1768 headerfile <<
",double in" << i;
1769 sourcefile <<
",double in" << i;
1771 headerfile <<
");" << std::endl;
1772 sourcefile <<
") {" << std::endl;
1773 for (i = 0; i <
fFirstLayer.GetEntriesFast(); i++)
1774 sourcefile <<
" input" << i <<
" = (in" << i <<
" - "
1778 sourcefile <<
" switch(index) {" << std::endl;
1783 sourcefile <<
" case " << idx++ <<
":" << std::endl
1784 <<
" return neuron" << neuron <<
"();" << std::endl;
1785 sourcefile <<
" default:" << std::endl
1786 <<
" return 0.;" << std::endl <<
" }"
1788 sourcefile <<
"}" << std::endl << std::endl;
1789 headerfile <<
" double Value(int index, double* input);" << std::endl;
1790 sourcefile <<
"double " << classname <<
"::Value(int index, double* input) {" << std::endl;
1791 for (i = 0; i <
fFirstLayer.GetEntriesFast(); i++)
1792 sourcefile <<
" input" << i <<
" = (input[" << i <<
"] - "
1796 sourcefile <<
" switch(index) {" << std::endl;
1801 sourcefile <<
" case " << idx++ <<
":" << std::endl
1802 <<
" return neuron" << neuron <<
"();" << std::endl;
1803 sourcefile <<
" default:" << std::endl
1804 <<
" return 0.;" << std::endl <<
" }"
1806 sourcefile <<
"}" << std::endl << std::endl;
1807 headerfile <<
"private:" << std::endl;
1808 for (i = 0; i <
fFirstLayer.GetEntriesFast(); i++)
1809 headerfile <<
" double input" << i <<
";" << std::endl;
1814 if (!neuron->
GetPre(0)) {
1815 headerfile <<
" double neuron" << neuron <<
"();" << std::endl;
1816 sourcefile <<
"double " << classname <<
"::neuron" << neuron
1817 <<
"() {" << std::endl;
1818 sourcefile <<
" return input" << idx++ <<
";" << std::endl;
1819 sourcefile <<
"}" << std::endl << std::endl;
1821 headerfile <<
" double input" << neuron <<
"();" << std::endl;
1822 sourcefile <<
"double " << classname <<
"::input" << neuron
1823 <<
"() {" << std::endl;
1824 sourcefile <<
" double input = " << neuron->
GetWeight()
1825 <<
";" << std::endl;
1828 while ((syn = neuron->
GetPre(
n++))) {
1829 sourcefile <<
" input += synapse" << syn <<
"();" << std::endl;
1831 sourcefile <<
" return input;" << std::endl;
1832 sourcefile <<
"}" << std::endl << std::endl;
1834 headerfile <<
" double neuron" << neuron <<
"();" << std::endl;
1835 sourcefile <<
"double " << classname <<
"::neuron" << neuron <<
"() {" << std::endl;
1836 sourcefile <<
" double input = input" << neuron <<
"();" << std::endl;
1840 sourcefile <<
" return ((input < -709. ? 0. : (1/(1+exp(-input)))) * ";
1845 sourcefile <<
" return (input * ";
1850 sourcefile <<
" return (tanh(input) * ";
1855 sourcefile <<
" return (exp(-input*input) * ";
1860 sourcefile <<
" return (exp(input) / (";
1863 sourcefile <<
"exp(input" << side <<
"())";
1865 sourcefile <<
" + exp(input" << side <<
"())";
1866 sourcefile <<
") * ";
1871 sourcefile <<
" return (0.0 * ";
1876 sourcefile <<
"}" << std::endl << std::endl;
1883 headerfile <<
" double synapse" << synapse <<
"();" << std::endl;
1884 sourcefile <<
"double " << classname <<
"::synapse"
1885 << synapse <<
"() {" << std::endl;
1886 sourcefile <<
" return (neuron" << synapse->
GetPre()
1887 <<
"()*" << synapse->
GetWeight() <<
");" << std::endl;
1888 sourcefile <<
"}" << std::endl << std::endl;
1891 headerfile <<
"};" << std::endl << std::endl;
1892 headerfile <<
"#endif // " << basefilename <<
"_h" << std::endl << std::endl;
1895 std::cout << header <<
" and " << source <<
" created." << std::endl;
1897 else if(lg ==
"FORTRAN") {
1898 TString implicit =
" implicit double precision (a-h,n-z)\n";
1899 std::ofstream sigmoid(
"sigmoid.f");
1900 sigmoid <<
" double precision FUNCTION SIGMOID(X)" << std::endl
1902 <<
" IF(X.GT.37.) THEN" << std::endl
1903 <<
" SIGMOID = 1." << std::endl
1904 <<
" ELSE IF(X.LT.-709.) THEN" << std::endl
1905 <<
" SIGMOID = 0." << std::endl
1906 <<
" ELSE" << std::endl
1907 <<
" SIGMOID = 1./(1.+EXP(-X))" << std::endl
1908 <<
" ENDIF" << std::endl
1909 <<
" END" << std::endl;
1913 std::ofstream sourcefile(source);
1916 sourcefile <<
" double precision function " << filename
1917 <<
"(x, index)" << std::endl;
1918 sourcefile << implicit;
1919 sourcefile <<
" double precision x(" <<
1920 fFirstLayer.GetEntriesFast() <<
")" << std::endl << std::endl;
1923 sourcefile <<
"C --- Last Layer" << std::endl;
1927 TString ifelseif =
" if (index.eq.";
1929 sourcefile << ifelseif.
Data() << idx++ <<
") then" << std::endl
1931 <<
"=neuron" << neuron <<
"(x);" << std::endl;
1932 ifelseif =
" else if (index.eq.";
1934 sourcefile <<
" else" << std::endl
1935 <<
" " << filename <<
"=0.d0" << std::endl
1936 <<
" endif" << std::endl;
1937 sourcefile <<
" end" << std::endl;
1940 sourcefile <<
"C --- First and Hidden layers" << std::endl;
1945 sourcefile <<
" double precision function neuron"
1946 << neuron <<
"(x)" << std::endl
1948 sourcefile <<
" double precision x("
1949 <<
fFirstLayer.GetEntriesFast() <<
")" << std::endl << std::endl;
1950 if (!neuron->
GetPre(0)) {
1951 sourcefile <<
" neuron" << neuron
1952 <<
" = (x(" << idx+1 <<
") - "
1956 <<
"d0" << std::endl;
1959 sourcefile <<
" neuron" << neuron
1960 <<
" = " << neuron->
GetWeight() <<
"d0" << std::endl;
1963 while ((syn = neuron->
GetPre(
n++)))
1964 sourcefile <<
" neuron" << neuron
1965 <<
" = neuron" << neuron
1966 <<
" + synapse" << syn <<
"(x)" << std::endl;
1970 sourcefile <<
" neuron" << neuron
1971 <<
"= (sigmoid(neuron" << neuron <<
")*";
1980 sourcefile <<
" neuron" << neuron
1981 <<
"= (tanh(neuron" << neuron <<
")*";
1986 sourcefile <<
" neuron" << neuron
1987 <<
"= (exp(-neuron" << neuron <<
"*neuron"
1995 sourcefile <<
" div = exp(neuron" << side <<
"())" << std::endl;
1997 sourcefile <<
" div = div + exp(neuron" << side <<
"())" << std::endl;
1998 sourcefile <<
" neuron" << neuron ;
1999 sourcefile <<
"= (exp(neuron" << neuron <<
") / div * ";
2004 sourcefile <<
" neuron " << neuron <<
"= 0.";
2010 sourcefile <<
" end" << std::endl;
2015 sourcefile <<
"C --- Synapses" << std::endl;
2019 sourcefile <<
" double precision function " <<
"synapse"
2020 << synapse <<
"(x)\n" << implicit;
2021 sourcefile <<
" double precision x("
2022 <<
fFirstLayer.GetEntriesFast() <<
")" << std::endl << std::endl;
2023 sourcefile <<
" synapse" << synapse
2024 <<
"=neuron" << synapse->
GetPre()
2025 <<
"(x)*" << synapse->
GetWeight() <<
"d0" << std::endl;
2026 sourcefile <<
" end" << std::endl << std::endl;
2030 std::cout << source <<
" created." << std::endl;
2032 else if(lg ==
"PYTHON") {
2036 std::ofstream pythonfile(pyfile);
2037 pythonfile <<
"from math import exp" << std::endl << std::endl;
2038 pythonfile <<
"from math import tanh" << std::endl << std::endl;
2039 pythonfile <<
"class " << classname <<
":" << std::endl;
2040 pythonfile <<
"\tdef value(self,index";
2041 for (i = 0; i <
fFirstLayer.GetEntriesFast(); i++) {
2042 pythonfile <<
",in" << i;
2044 pythonfile <<
"):" << std::endl;
2045 for (i = 0; i <
fFirstLayer.GetEntriesFast(); i++)
2046 pythonfile <<
"\t\tself.input" << i <<
" = (in" << i <<
" - "
2053 pythonfile <<
"\t\tif index==" << idx++
2054 <<
": return self.neuron" << neuron <<
"();" << std::endl;
2055 pythonfile <<
"\t\treturn 0." << std::endl;
2060 pythonfile <<
"\tdef neuron" << neuron <<
"(self):" << std::endl;
2062 pythonfile <<
"\t\treturn self.input" << idx++ << std::endl;
2064 pythonfile <<
"\t\tinput = " << neuron->
GetWeight() << std::endl;
2067 while ((syn = neuron->
GetPre(
n++)))
2068 pythonfile <<
"\t\tinput = input + self.synapse"
2069 << syn <<
"()" << std::endl;
2073 pythonfile <<
"\t\tif input<-709. : return " << neuron->
GetNormalisation()[1] << std::endl;
2074 pythonfile <<
"\t\treturn ((1/(1+exp(-input)))*";
2079 pythonfile <<
"\t\treturn (input*";
2084 pythonfile <<
"\t\treturn (tanh(input)*";
2089 pythonfile <<
"\t\treturn (exp(-input*input)*";
2094 pythonfile <<
"\t\treturn (exp(input) / (";
2097 pythonfile <<
"exp(self.neuron" << side <<
"())";
2099 pythonfile <<
" + exp(self.neuron" << side <<
"())";
2100 pythonfile <<
") * ";
2105 pythonfile <<
"\t\treturn 0.";
2116 pythonfile <<
"\tdef synapse" << synapse <<
"(self):" << std::endl;
2117 pythonfile <<
"\t\treturn (self.neuron" << synapse->
GetPre()
2118 <<
"()*" << synapse->
GetWeight() <<
")" << std::endl;
2122 std::cout << pyfile <<
" created." << std::endl;
2144 for (
Int_t i = 0; i <
n; i++) {
2147 index[j] = index[i];
2162 for (i = 0; i < nEvents; i++)
2168 for (i = 0; i < nEvents; i++) {
2237 neuron->
SetWeight(origin[idx] + (dir[idx] * dist));
2243 synapse->
SetWeight(origin[idx] + (dir[idx] * dist));
2259 dir[idx++] = -neuron->
GetDEDw();
2263 dir[idx++] = -synapse->
GetDEDw();
2302 MLP_Line(origin, direction, alpha2);
2308 for (icount = 0; icount < 100; icount++) {
2310 MLP_Line(origin, direction, alpha3);
2327 for (icount = 0; icount < 100; icount++) {
2329 MLP_Line(origin, direction, alpha2);
2347 (err3 - err1) / ((err3 - err2) / (alpha3 - alpha2)
2348 - (err2 - err1) / (alpha2 - alpha1)));
2357 buffer[idx] = neuron->
GetWeight() - origin[idx];
2363 buffer[idx] = synapse->
GetWeight() - origin[idx];
2387 dir[idx] = -neuron->
GetDEDw() + beta * dir[idx];
2393 dir[idx] = -synapse->
GetDEDw() + beta * dir[idx];
2441 gamma[idx++][0] = -neuron->
GetDEDw();
2446 gamma[idx++][0] = -synapse->
GetDEDw();
2448 for (
Int_t i = 0; i < els; i++)
2449 delta[i].Assign(buffer[i]);
2456 gamma[idx++][0] += neuron->
GetDEDw();
2461 gamma[idx++][0] += synapse->
GetDEDw();
2479 output += neuron->
GetDEDw() * dir[idx++];
2484 output += synapse->
GetDEDw() * dir[idx++];
2504 dedw[idx++][0] = neuron->
GetDEDw();
2509 dedw[idx++][0] = synapse->
GetDEDw();
2512 for (
Int_t i = 0; i < els; i++)
2513 dir[i] = -direction[i][0];
2525#define NeuronSize 2.5
2528 Float_t xStep = 1./(nLayers+1.);
2530 for(layer=0; layer< nLayers-1; layer++) {
2542 Int_t num = atoi(
TString(hidden(beg, end - beg)).Data());
2545 end = hidden.
Index(
":", beg + 1);
2546 if(layer==cnt) nNeurons_this = num;
2550 if(layer==cnt) nNeurons_this = num;
2553 if(layer==nLayers-2) {
2555 nNeurons_next = output.
CountChar(
',')+1;
2563 Int_t num = atoi(
TString(hidden(beg, end - beg)).Data());
2566 end = hidden.
Index(
":", beg + 1);
2567 if(layer+1==cnt) nNeurons_next = num;
2571 if(layer+1==cnt) nNeurons_next = num;
2573 Float_t yStep_this = 1./(nNeurons_this+1.);
2574 Float_t yStep_next = 1./(nNeurons_next+1.);
2579 maxWeight = maxWeight < theSynapse->
GetWeight() ? theSynapse->
GetWeight() : maxWeight;
2582 for(
Int_t neuron1=0; neuron1<nNeurons_this; neuron1++) {
2583 for(
Int_t neuron2=0; neuron2<nNeurons_next; neuron2++) {
2584 TLine* synapse =
new TLine(xStep*(layer+1),yStep_this*(neuron1+1),xStep*(layer+2),yStep_next*(neuron2+1));
2587 if (!theSynapse)
continue;
2596 for(layer=0; layer< nLayers; layer++) {
2602 else if(layer==nLayers-1) {
2612 Int_t num = atoi(
TString(hidden(beg, end - beg)).Data());
2615 end = hidden.
Index(
":", beg + 1);
2616 if(layer==cnt) nNeurons = num;
2620 if(layer==cnt) nNeurons = num;
2622 Float_t yStep = 1./(nNeurons+1.);
2623 for(
Int_t neuron=0; neuron<nNeurons; neuron++) {
2625 m->SetMarkerColor(4);
2633 Float_t yStep = 1./(nrItems+1);
2634 for (
Int_t item = 0; item < nrItems; item++) {
2636 TText* label =
new TText(0.5*xStep,yStep*(item+1),brName.
Data());
2642 yStep=1./(numOutNodes+1);
2643 for (
Int_t outnode=0; outnode<numOutNodes; outnode++) {
2645 if (neuron && neuron->
GetName()) {
int Int_t
Signed integer 4 bytes (int).
int Ssiz_t
String size (currently int).
bool Bool_t
Boolean (0=false, 1=true) (bool).
double Double_t
Double 8 bytes.
constexpr Ssiz_t kNPOS
The equivalent of std::string::npos for the ROOT class TString.
float Float_t
Float 4 bytes (float).
const char Option_t
Option string (const char).
#define R__ASSERT(e)
Checks condition e and reports a fatal error if it's false.
TMatrixT< Double_t > TMatrixD
char * Form(const char *fmt,...)
Formats a string in a circular formatting buffer.
virtual void SetTitleOffset(Float_t offset=1)
Set distance between the axis and the axis title.
virtual void SetLineStyle(Style_t lstyle)
Set the line style.
virtual void SetLineWidth(Width_t lwidth)
Set the line width.
virtual void SetLineColor(Color_t lcolor)
Set the line color.
virtual void SetLeftMargin(Float_t leftmargin)
Set Pad left margin in fraction of the pad width.
void SetDecimals(Bool_t dot=kTRUE)
Sets the decimals flag By default, blank characters are stripped, and then the label is correctly ali...
virtual void UnZoom()
Reset first & last bin to the full range.
static TClass * GetClass(const char *name, Bool_t load=kTRUE, Bool_t silent=kFALSE)
Static method returning pointer to TClass of the specified class name.
TDirectory::TContext keeps track and restore the current directory.
<div class="legacybox"><h2>Legacy Code</h2> TEventList is a legacy interface: there will be no bug fi...
virtual Long64_t GetEntry(Int_t index) const
Return value of entry at index in the list.
virtual Int_t GetN() const
virtual void SetPoint(Int_t i, Double_t x, Double_t y)
1-D histogram with a double per channel (see TH1 documentation)
void Reset(Option_t *option="") override
Reset.
virtual Int_t Fill(Double_t x)
Increment bin with abscissa X by 1.
void Draw(Option_t *option="") override
Draw this histogram with options.
2-D histogram with a double per channel (see TH1 documentation)
void Reset(Option_t *option="") override
Reset this histogram: contents, errors, etc.
Int_t Fill(Double_t) override
Invalid Fill method.
TLegendEntry * AddEntry(const TObject *obj, const char *label="", Option_t *option="lpf")
void Draw(Option_t *option="") override
Draw this box with its current attributes.
Use the TLine constructor to create a simple line.
TObject * At(Int_t idx) const override
Returns the object at position idx. Returns 0 if idx is out of range.
virtual TMatrixTBase< Element > & UnitMatrix()
Make a unit matrix (matrix need not be a square one).
virtual void Add(TGraph *graph, Option_t *chopt="")
TList * GetListOfGraphs() const
void Draw(Option_t *chopt="") override
Default Draw method for all objects.
TTreeFormula * fEventWeight
! formula representing the event weight
void BuildOneHiddenLayer(const TString &sNumNodes, Int_t &layer, Int_t &prevStart, Int_t &prevStop, Bool_t lastLayer)
Builds a hidden layer, updates the number of layers.
void SteepestDir(Double_t *)
Sets the search direction to steepest descent.
void BuildNetwork()
Instantiates the network from the description.
TObjArray fNetwork
Collection of all the neurons in the network.
Double_t Evaluate(Int_t index, Double_t *params) const
Returns the Neural Net for a given set of input parameters #parameters must equal #input neurons.
TEventList * fTest
! EventList defining the events in the test dataset
bool GetBFGSH(TMatrixD &, TMatrixD &, TMatrixD &)
Computes the hessian matrix using the BFGS update algorithm.
void BuildHiddenLayers(TString &)
Builds hidden layers.
void BuildFirstLayer(TString &)
Instantiates the neurons in input Inputs are normalised and the type is set to kOff (simple forward o...
void SetTau(Double_t tau)
Sets Tau - used in line search (look at the constructor for the complete description of learning meth...
TMultiLayerPerceptron()
Default constructor.
Double_t GetSumSquareError() const
Error on the output for a given event.
void ConjugateGradientsDir(Double_t *, Double_t)
Sets the search direction to conjugate gradient direction beta should be:
Double_t fTau
! Tau - used in line search - Default=3.
TTree * fData
! pointer to the tree used as datasource
Double_t Result(Int_t event, Int_t index=0) const
Computes the output for a given event.
void SetGammaDelta(TMatrixD &, TMatrixD &, Double_t *)
Sets the gamma and delta vectors Gamma is computed here, so ComputeDEDw cannot have been called bef...
TEventList * fTraining
! EventList defining the events in the training dataset
TString fStructure
String containing the network structure.
Int_t fReset
! number of epochs between two resets of the search direction to the steepest descent - Default=50
Bool_t LoadWeights(Option_t *filename="")
Loads the weights from a text file conforming to the format defined by DumpWeights.
void MLP_Batch(Double_t *)
One step for the batch (stochastic) method.
TNeuron::ENeuronType fOutType
Type of output neurons.
Double_t fCurrentTreeWeight
! weight of the current tree in a chain
ELearningMethod fLearningMethod
! The Learning Method
Double_t fLastAlpha
! internal parameter used in line search
Int_t fCurrentTree
! index of the current tree in a chain
void Export(Option_t *filename="NNfunction", Option_t *language="C++") const
Exports the NN as a function for any non-ROOT-dependant code Supported languages are: only C++ ,...
Double_t fEpsilon
! Epsilon - used in stochastic minimisation - Default=0.
void Train(Int_t nEpoch, Option_t *option="text", Double_t minE=0)
Train the network.
TNeuron::ENeuronType GetType() const
void BFGSDir(TMatrixD &, Double_t *)
Computes the direction for the BFGS algorithm as the product between the Hessian estimate (bfgsh) and...
void SetTestDataSet(TEventList *test)
Sets the Test dataset.
Bool_t fTrainingOwner
! internal flag whether one has to delete fTraining or not
void SetLearningMethod(TMultiLayerPerceptron::ELearningMethod method)
Sets the learning method.
void SetTrainingDataSet(TEventList *train)
Sets the Training dataset.
void BuildLastLayer(TString &, Int_t)
Builds the output layer Neurons are linear combinations of input, by default.
Double_t fDelta
! Delta - used in stochastic minimisation - Default=0.
TTreeFormulaManager * fManager
! TTreeFormulaManager for the weight and neurons
void Randomize() const
Randomize the weights.
Bool_t LineSearch(Double_t *, Double_t *)
Search along the line defined by direction.
void ExpandStructure()
Expand the structure of the first layer.
Double_t fEta
! Eta - used in stochastic minimisation - Default=0.1
Double_t GetError(Int_t event) const
Error on the output for a given event.
Double_t fEtaDecay
! EtaDecay - Eta *= EtaDecay at each epoch - Default=1.
void SetEtaDecay(Double_t ed)
Sets EtaDecay - Eta *= EtaDecay at each epoch (look at the constructor for the complete description o...
void AttachData()
Connects the TTree to Neurons in input and output layers.
void SetData(TTree *)
Set the data source.
void SetEventWeight(const char *)
Set the event weight.
Bool_t DumpWeights(Option_t *filename="-") const
Dumps the weights to a text file.
TString fWeight
String containing the event weight.
void SetDelta(Double_t delta)
Sets Delta - used in stochastic minimisation (look at the constructor for the complete description of...
~TMultiLayerPerceptron() override
Destructor.
Double_t GetCrossEntropy() const
Cross entropy error for a softmax output neuron, for a given event.
void SetReset(Int_t reset)
Sets number of epochs between two resets of the search direction to the steepest descent.
Bool_t fTestOwner
! internal flag whether one has to delete fTest or not
void Shuffle(Int_t *, Int_t) const
Shuffle the Int_t index[n] in input.
Double_t DerivDir(Double_t *)
scalar product between gradient and direction = derivative along direction
void MLP_Stochastic(Double_t *)
One step for the stochastic method buffer should contain the previous dw vector and will be updated.
void Draw(Option_t *option="") override
Draws the network structure.
TObjArray fSynapses
Collection of all the synapses in the network.
void MLP_Line(Double_t *, Double_t *, Double_t)
Sets the weights to a point along a line Weights are set to [origin + (dist * dir)].
TNeuron::ENeuronType fType
Type of hidden neurons.
TObjArray fLastLayer
Collection of the output neurons; subset of fNetwork.
TString fextD
String containing the derivative name.
void ComputeDEDw() const
Compute the DEDw = sum on all training events of dedw for each weight normalized by the number of eve...
Double_t GetCrossEntropyBinary() const
Cross entropy error for sigmoid output neurons, for a given event.
void DrawResult(Int_t index=0, Option_t *option="test") const
Draws the neural net output It produces an histogram with the output for the two datasets.
void SetEta(Double_t eta)
Sets Eta - used in stochastic minimisation (look at the constructor for the complete description of l...
TObjArray fFirstLayer
Collection of the input neurons; subset of fNetwork.
void GetEntry(Int_t) const
Load an entry into the network.
void SetEpsilon(Double_t eps)
Sets Epsilon - used in stochastic minimisation (look at the constructor for the complete description ...
TString fextF
String containing the function name.
virtual void SetTitle(const char *title="")
Set the title of the TNamed.
const char * GetName() const override
Returns name of object.
This class describes an elementary neuron, which is the basic element for a Neural Network.
Double_t GetWeight() const
void SetWeight(Double_t w)
Sets the neuron weight to w.
Double_t GetValue() const
Computes the output using the appropriate function and all the weighted inputs, or uses the branch as...
void SetDEDw(Double_t in)
Sets the derivative of the total error wrt the neuron weight.
Double_t GetDeDw() const
Computes the derivative of the error wrt the neuron weight.
Double_t GetBranch() const
Returns the formula value.
TNeuron * GetInLayer(Int_t n) const
Double_t GetError() const
Computes the error for output neurons.
TTreeFormula * UseBranch(TTree *, const char *)
Sets a formula that can be used to make the neuron an input.
TSynapse * GetPre(Int_t n) const
void ForceExternalValue(Double_t value)
Uses the branch type to force an external value.
Double_t GetTarget() const
Computes the normalized target pattern for output neurons.
const Double_t * GetNormalisation() const
ENeuronType GetType() const
Returns the neuron type.
void SetNewEvent() const
Inform the neuron that inputs of the network have changed, so that the buffered values have to be rec...
void SetNormalisation(Double_t mean, Double_t RMS)
Sets the normalization variables.
void AddInLayer(TNeuron *)
Tells a neuron which neurons form its layer (including itself).
Iterator of object array.
TObject * Next() override
Return next object in array. Returns 0 when no more objects in array.
TObject * At(Int_t idx) const override
Int_t GetLast() const override
Return index of last object in array.
Collectable string class.
virtual void Warning(const char *method, const char *msgfmt,...) const
Issue warning message.
virtual void Error(const char *method, const char *msgfmt,...) const
Issue error message.
virtual void Draw(Option_t *option="")
Default Draw method for all objects.
virtual void Info(const char *method, const char *msgfmt,...) const
Issue info message.
Random number generator class based on M.
Double_t Rndm() override
Machine independent random number generator.
Regular expression class.
void ToLower()
Change string to lower-case.
const char * Data() const
Bool_t IsAlpha() const
Returns true if all characters in string are alphabetic.
Ssiz_t Last(char c) const
Find last occurrence of a character c.
void ToUpper()
Change string to upper case.
TObjArray * Tokenize(const TString &delim) const
This function is used to isolate sequential tokens in a TString.
Int_t CountChar(Int_t c) const
Return number of times character c occurs in the string.
Bool_t Contains(const char *pat, ECaseCompare cmp=kExact) const
Bool_t IsAlnum() const
Returns true if all characters in string are alphanumeric.
Ssiz_t Index(const char *pat, Ssiz_t i=0, ECaseCompare cmp=kExact) const
This is a simple weighted bidirectional connection between two neurons.
Double_t GetDeDw() const
Computes the derivative of the error wrt the synapse weight.
void SetWeight(Double_t w)
Sets the weight of the synapse.
Double_t GetWeight() const
void SetDEDw(Double_t in)
Sets the derivative of the total error wrt the synapse weight.
The TTimeStamp encapsulates seconds and ns since EPOCH.
A TTree represents a columnar dataset.
TVirtualPad is an abstract base class for the Pad and Canvas classes.
virtual void Modified(Bool_t flag=1)=0
Double_t Log(Double_t x)
Returns the natural logarithm of x.
Double_t Sqrt(Double_t x)
Returns the square root of x.
Short_t Abs(Short_t d)
Returns the absolute value of parameter Short_t d.