90   fLocalTrainingTree(0),
 
   92   fValidationFraction(0.5),
 
  101                                    const TString& theWeightFile) :
 
  104   fLocalTrainingTree(0),
 
  106   fValidationFraction(0.5),
 
  107   fLearningMethod( 
"" )
 
  134   if (fMLP) 
delete fMLP;
 
  144   while (layerSpec.
Length()>0) {
 
  146      if (layerSpec.
First(
',')<0) {
 
  151         sToAdd = layerSpec(0,layerSpec.
First(
','));
 
  152         layerSpec = layerSpec(layerSpec.
First(
',')+1,layerSpec.
Length());
 
  156      nNodes += atoi(sToAdd);
 
  157      fHiddenLayer = 
Form( 
"%s%i:", (
const char*)fHiddenLayer, nNodes );
 
  161   std::vector<TString>::iterator itrVar    = (*fInputVars).begin();
 
  162   std::vector<TString>::iterator itrVarEnd = (*fInputVars).end();
 
  163   fMLPBuildOptions = 
"";
 
  164   for (; itrVar != itrVarEnd; ++itrVar) {
 
  167      fMLPBuildOptions += myVar;
 
  168      fMLPBuildOptions += 
",";
 
  170   fMLPBuildOptions.
Chop(); 
 
  173   fMLPBuildOptions += fHiddenLayer;
 
  174   fMLPBuildOptions += 
"type";
 
  176   Log() << kINFO << 
"Use " << fNcycles << 
" training cycles" << 
Endl;
 
  177   Log() << kINFO << 
"Use configuration (nodes per hidden layer): " << fHiddenLayer << 
Endl;
 
  197   DeclareOptionRef( fNcycles    = 200,       
"NCycles",      
"Number of training cycles" );
 
  198   DeclareOptionRef( fLayerSpec  = 
"N,N-1",   
"HiddenLayers", 
"Specification of hidden layer architecture (N stands for number of variables; any integers may also be used)" );
 
  200   DeclareOptionRef( fValidationFraction = 0.5, 
"ValidationFraction",
 
  201                     "Fraction of events in training tree used for cross validation" );
 
  203   DeclareOptionRef( fLearningMethod = 
"Stochastic", 
"LearningMethod", 
"Learning method" );
 
  204   AddPreDefVal( 
TString(
"Stochastic") );
 
  205   AddPreDefVal( 
TString(
"Batch") );
 
  206   AddPreDefVal( 
TString(
"SteepestDescent") );
 
  207   AddPreDefVal( 
TString(
"RibierePolak") );
 
  208   AddPreDefVal( 
TString(
"FletcherReeves") );
 
  209   AddPreDefVal( 
TString(
"BFGS") );
 
  217   CreateMLPOptions(fLayerSpec);
 
  219   if (IgnoreEventsWithNegWeightsInTraining()) {
 
  220      Log() << kFATAL << 
"Mechanism to ignore events with negative weights in training not available for method" 
  221            << GetMethodTypeName()
 
  222            << 
" --> please remove \"IgnoreNegWeightsInTraining\" option from booking string." 
  232   const Event* ev = GetEvent();
 
  235   for (
UInt_t ivar = 0; ivar<Data()->GetNVariables(); ivar++) {
 
  241   NoErrorCalc(err, errUpper);
 
  264   const Long_t basketsize = 128000;
 
  267   TTree *localTrainingTree = 
new TTree( 
"TMLPtrain", 
"Local training tree for TMlpANN" );
 
  268   localTrainingTree->
Branch( 
"type",       &
type,        
"type/I",        basketsize );
 
  269   localTrainingTree->
Branch( 
"weight",     &weight,      
"weight/F",      basketsize );
 
  271   for (
UInt_t ivar=0; ivar<GetNvar(); ivar++) {
 
  272      const char* myVar = GetInternalVarName(ivar).Data();
 
  273      localTrainingTree->
Branch( myVar, &vArr[ivar], 
Form(
"Var%02i/F", ivar), basketsize );
 
  276   for (
UInt_t ievt=0; ievt<Data()->GetNEvents(); ievt++) {
 
  277      const Event *ev = GetEvent(ievt);
 
  278      for (
UInt_t i=0; i<GetNvar(); i++) {
 
  281      type   = DataInfo().IsSignal( ev ) ? 1 : 0;
 
  283      localTrainingTree->
Fill();
 
  291   trainList += 1.0-fValidationFraction;
 
  293   trainList += (
Int_t)Data()->GetNEvtSigTrain();
 
  294   trainList += 
" || (Entry$>";
 
  295   trainList += (
Int_t)Data()->GetNEvtSigTrain();
 
  296   trainList += 
" && Entry$<";
 
  297   trainList += (
Int_t)(Data()->GetNEvtSigTrain() + (1.0 - fValidationFraction)*Data()->GetNEvtBkgdTrain());
 
  302   Log() << kHEADER << 
"Requirement for training   events: \"" << trainList << 
"\"" << 
Endl;
 
  303   Log() << kINFO << 
"Requirement for validation events: \"" << testList << 
"\"" << 
Endl;
 
  308   if (fMLP != 0) { 
delete fMLP; fMLP = 0; }
 
  313   fMLP->SetEventWeight( 
"weight" );
 
  318   fLearningMethod.ToLower();
 
  326      Log() << kFATAL << 
"Unknown Learning Method: \"" << fLearningMethod << 
"\"" << 
Endl;
 
  328   fMLP->SetLearningMethod( learningMethod );
 
  331   fMLP->Train(fNcycles, 
"" ); 
 
  335   delete localTrainingTree;
 
  347   gTools().
AddAttr( arch, 
"BuildOptions", fMLPBuildOptions.Data() );
 
  350   const TString tmpfile=GetWeightFileDir()+
"/TMlp.nn.weights.temp";
 
  351   fMLP->DumpWeights( tmpfile.
Data() );
 
  352   std::ifstream inf( tmpfile.
Data() );
 
  356   while (inf.getline(temp,256)) {
 
  362         dummy = dummy(0,dummy.
First(
' '));
 
  367      data += (dummy + 
" ");
 
  384   const TString fname = GetWeightFileDir()+
"/TMlp.nn.weights.temp";
 
  385   std::ofstream fout( fname.
Data() );
 
  386   double temp1=0,temp2=0;
 
  389      std::stringstream content(nodecontent);
 
  390      if (strcmp(
gTools().GetName(ch),
"input")==0) {
 
  391         fout << 
"#input normalization" << std::endl;
 
  392         while ((content >> temp1) &&(content >> temp2)) {
 
  393            fout << temp1 << 
" " << temp2 << std::endl;
 
  396      if (strcmp(
gTools().GetName(ch),
"output")==0) {
 
  397         fout << 
"#output normalization" << std::endl;
 
  398         while ((content >> temp1) &&(content >> temp2)) {
 
  399            fout << temp1 << 
" " << temp2 << std::endl;
 
  402      if (strcmp(
gTools().GetName(ch),
"neurons")==0) {
 
  403         fout << 
"#neurons weights" << std::endl;
 
  404         while (content >> temp1) {
 
  405            fout << temp1 << std::endl;
 
  408      if (strcmp(
gTools().GetName(ch),
"synapses")==0) {
 
  409         fout << 
"#synapses weights" ;
 
  410         while (content >> temp1) {
 
  411            fout << std::endl << temp1 ;
 
  424   TTree * dummyTree = 
new TTree(
"dummy",
"Empty dummy tree", 1);
 
  425   for (
UInt_t ivar = 0; ivar<Data()->GetNVariables(); ivar++) {
 
  426      TString vn = DataInfo().GetVariableInfo(ivar).GetInternalName();
 
  431   if (fMLP != 0) { 
delete fMLP; fMLP = 0; }
 
  433   fMLP->LoadWeights( fname );
 
  443   std::ofstream fout( 
"./TMlp.nn.weights.temp" );
 
  444   fout << istr.rdbuf();
 
  448   Log() << kINFO << 
"Load TMLP weights into " << fMLP << 
Endl;
 
  453   TTree * dummyTree = 
new TTree(
"dummy",
"Empty dummy tree", 1);
 
  454   for (
UInt_t ivar = 0; ivar<Data()->GetNVariables(); ivar++) {
 
  455      TString vn = DataInfo().GetVariableInfo(ivar).GetLabel();
 
  460   if (fMLP != 0) { 
delete fMLP; fMLP = 0; }
 
  463   fMLP->LoadWeights( 
"./TMlp.nn.weights.temp" );
 
  477   if (theClassFileName == 
"")
 
  478      classFileName = GetWeightFileDir() + 
"/" + GetJobName() + 
"_" + GetMethodName() + 
".class";
 
  480      classFileName = theClassFileName;
 
  483   Log() << kINFO << 
"Creating specific (TMultiLayerPerceptron) standalone response class: " << classFileName << 
Endl;
 
  484   fMLP->Export( classFileName.
Data() );
 
  506   Log() << 
"This feed-forward multilayer perceptron neural network is the " << 
Endl;
 
  507   Log() << 
"standard implementation distributed with ROOT (class TMultiLayerPerceptron)." << 
Endl;
 
  509   Log() << 
"Detailed information is available here:" << 
Endl;
 
  510   if (
gConfig().WriteOptionsReference()) {
 
  511      Log() << 
"<a href=\"http://root.cern.ch/root/html/TMultiLayerPerceptron.html\">";
 
  512      Log() << 
"http://root.cern.ch/root/html/TMultiLayerPerceptron.html</a>" << 
Endl;
 
  514   else Log() << 
"http://root.cern.ch/root/html/TMultiLayerPerceptron.html" << 
Endl;
 
#define REGISTER_METHOD(CLASS)
for example
 
const Bool_t EnforceNormalization__
 
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void data
 
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void char Point_t Rectangle_t WindowAttributes_t Float_t Float_t Float_t Int_t Int_t UInt_t UInt_t Rectangle_t Int_t Int_t Window_t TString Int_t GCValues_t GetPrimarySelectionOwner GetDisplay GetScreen GetColormap GetNativeEvent const char const char dpyName wid window const char font_name cursor keysym reg const char only_if_exist regb h Point_t winding char text const char depth char const char Int_t count const char ColorStruct_t color const char Pixmap_t Pixmap_t PictureAttributes_t attr const char char ret_data h unsigned char height h Atom_t Int_t ULong_t ULong_t unsigned char prop_list Atom_t Atom_t Atom_t Time_t type
 
char * Form(const char *fmt,...)
Formats a string in a circular formatting buffer.
 
Class that contains all the data information.
 
Float_t GetValue(UInt_t ivar) const
return value of i'th variable
 
Double_t GetWeight() const
return the event weight - depending on whether the flag IgnoreNegWeightsInTraining is or not.
 
Virtual base Class for all MVA method.
 
This is the TMVA TMultiLayerPerceptron interface class.
 
void ReadWeightsFromStream(std::istream &istr)
read weights from stream since the MLP can not read from the stream, we 1st: write the weights to tem...
 
void Init(void)
default initialisations
 
virtual Bool_t HasAnalysisType(Types::EAnalysisType type, UInt_t numberClasses, UInt_t numberTargets)
TMlpANN can handle classification with 2 classes.
 
void Train(void)
performs TMlpANN training available learning methods:
 
Double_t GetMvaValue(Double_t *err=nullptr, Double_t *errUpper=nullptr)
calculate the value of the neural net for the current event
 
void DeclareOptions()
define the options (their key words) that can be set in the option string
 
void CreateMLPOptions(TString)
translates options from option string into TMlpANN language
 
void ReadWeightsFromXML(void *wghtnode)
rebuild temporary textfile from xml weightfile and load this file into MLP
 
MethodTMlpANN(const TString &jobName, const TString &methodTitle, DataSetInfo &theData, const TString &theOption="3000:N-1:N-2")
standard constructor
 
void ProcessOptions()
builds the neural network as specified by the user
 
void MakeClassSpecific(std::ostream &, const TString &) const
write specific classifier response nothing to do here - all taken care of by TMultiLayerPerceptron
 
void AddWeightsXMLTo(void *parent) const
write weights to xml file
 
void MakeClass(const TString &classFileName=TString("")) const
create reader class for classifier -> overwrites base class function create specific class for TMulti...
 
virtual ~MethodTMlpANN(void)
destructor
 
void GetHelpMessage() const
get help message text
 
Singleton class for Global types used by TMVA.
 
This class describes a neural network.
 
TSubString Strip(EStripType s=kTrailing, char c=' ') const
Return a substring of self stripped at beginning and/or end.
 
Ssiz_t First(char c) const
Find first occurrence of a character c.
 
const char * Data() const
 
TString & ReplaceAll(const TString &s1, const TString &s2)
 
Bool_t BeginsWith(const char *s, ECaseCompare cmp=kExact) const
 
TString & Remove(Ssiz_t pos)
 
A TTree represents a columnar dataset.
 
virtual Int_t Fill()
Fill all branches.
 
TBranch * Branch(const char *name, T *obj, Int_t bufsize=32000, Int_t splitlevel=99)
Add a new branch, and infer the data type from the type of obj being passed.
 
create variable transformations
 
MsgLogger & Endl(MsgLogger &ml)