ROOT  6.06/09
Reference Guide
MethodTMlpANN.cxx
Go to the documentation of this file.
1 // @(#)root/tmva $Id$
2 // Author: Andreas Hoecker, Joerg Stelzer, Helge Voss, Kai Voss, Eckhard von Toerne
3 /**********************************************************************************
4  * Project: TMVA - a Root-integrated toolkit for multivariate data analysis *
5  * Package: TMVA *
6  * Class : MethodTMlpANN *
7  * Web : http://tmva.sourceforge.net *
8  * *
9  * Description: *
10  * Implementation (see header for description) *
11  * *
12  * Authors (alphabetical): *
13  * Andreas Hoecker <Andreas.Hocker@cern.ch> - CERN, Switzerland *
14  * Helge Voss <Helge.Voss@cern.ch> - MPI-K Heidelberg, Germany *
15  * Kai Voss <Kai.Voss@cern.ch> - U. of Victoria, Canada *
16  * *
17  * Copyright (c) 2005: *
18  * CERN, Switzerland *
19  * U. of Victoria, Canada *
20  * MPI-K Heidelberg, Germany *
21  * *
22  * Redistribution and use in source and binary forms, with or without *
23  * modification, are permitted according to the terms listed in LICENSE *
24  * (http://tmva.sourceforge.net/LICENSE) *
25  **********************************************************************************/
26 
27 ////////////////////////////////////////////////////////////////////////////////
28 
29 /* Begin_Html
30 
31  This is the TMVA TMultiLayerPerceptron interface class. It provides the
32  training and testing the ROOT internal MLP class in the TMVA framework.<be>
33 
34  Available learning methods:<br>
35  <ul>
36  <li>Stochastic </li>
37  <li>Batch </li>
38  <li>SteepestDescent </li>
39  <li>RibierePolak </li>
40  <li>FletcherReeves </li>
41  <li>BFGS </li>
42  </ul>
43 End_Html */
44 //
45 // See the TMultiLayerPerceptron class description
46 // for details on this ANN.
47 //
48 //_______________________________________________________________________
49 
50 #include <cstdlib>
51 #include <iostream>
52 #include <fstream>
53 
54 #include "Riostream.h"
55 #include "TLeaf.h"
56 #include "TEventList.h"
57 #include "TObjString.h"
58 #include "TROOT.h"
59 #include "TMultiLayerPerceptron.h"
60 
61 #include "TMVA/Config.h"
62 #include "TMVA/MethodTMlpANN.h"
63 
64 #include "TMVA/ClassifierFactory.h"
65 #ifndef ROOT_TMVA_Tools
66 #include "TMVA/Tools.h"
67 #endif
68 
69 using std::atoi;
70 
71 // some additional TMlpANN options
73 #if ROOT_VERSION_CODE > ROOT_VERSION(5,13,06)
74 //const TMultiLayerPerceptron::ELearningMethod LearningMethod__= TMultiLayerPerceptron::kStochastic;
75 // const TMultiLayerPerceptron::ELearningMethod LearningMethod__= TMultiLayerPerceptron::kBatch;
76 #else
77 //const TMultiLayerPerceptron::LearningMethod LearningMethod__= TMultiLayerPerceptron::kStochastic;
78 #endif
79 
80 REGISTER_METHOD(TMlpANN)
81 
82 ClassImp(TMVA::MethodTMlpANN)
83 
84 ////////////////////////////////////////////////////////////////////////////////
85 /// standard constructor
86 
87 TMVA::MethodTMlpANN::MethodTMlpANN( const TString& jobName,
88  const TString& methodTitle,
89  DataSetInfo& theData,
90  const TString& theOption,
91  TDirectory* theTargetDir) :
92  TMVA::MethodBase( jobName, Types::kTMlpANN, methodTitle, theData, theOption, theTargetDir ),
93  fMLP(0),
94  fLocalTrainingTree(0),
95  fNcycles(100),
96  fValidationFraction(0.5),
97  fLearningMethod( "" )
98 {
99 }
100 
101 ////////////////////////////////////////////////////////////////////////////////
102 /// constructor from weight file
103 
105  const TString& theWeightFile,
106  TDirectory* theTargetDir ) :
107  TMVA::MethodBase( Types::kTMlpANN, theData, theWeightFile, theTargetDir ),
108  fMLP(0),
109  fLocalTrainingTree(0),
110  fNcycles(100),
111  fValidationFraction(0.5),
112  fLearningMethod( "" )
113 {
114 }
115 
116 ////////////////////////////////////////////////////////////////////////////////
117 /// TMlpANN can handle classification with 2 classes
118 
120  UInt_t /*numberTargets*/ )
121 {
122  if (type == Types::kClassification && numberClasses == 2) return kTRUE;
123  return kFALSE;
124 }
125 
126 
127 ////////////////////////////////////////////////////////////////////////////////
128 /// default initialisations
129 
131 {
132 }
133 
134 ////////////////////////////////////////////////////////////////////////////////
135 /// destructor
136 
138 {
139  if (fMLP) delete fMLP;
140 }
141 
142 ////////////////////////////////////////////////////////////////////////////////
143 /// translates options from option string into TMlpANN language
144 
146 {
147  fHiddenLayer = ":";
148 
149  while (layerSpec.Length()>0) {
150  TString sToAdd="";
151  if (layerSpec.First(',')<0) {
152  sToAdd = layerSpec;
153  layerSpec = "";
154  }
155  else {
156  sToAdd = layerSpec(0,layerSpec.First(','));
157  layerSpec = layerSpec(layerSpec.First(',')+1,layerSpec.Length());
158  }
159  int nNodes = 0;
160  if (sToAdd.BeginsWith("N")) { sToAdd.Remove(0,1); nNodes = GetNvar(); }
161  nNodes += atoi(sToAdd);
162  fHiddenLayer = Form( "%s%i:", (const char*)fHiddenLayer, nNodes );
163  }
164 
165  // set input vars
166  std::vector<TString>::iterator itrVar = (*fInputVars).begin();
167  std::vector<TString>::iterator itrVarEnd = (*fInputVars).end();
168  fMLPBuildOptions = "";
169  for (; itrVar != itrVarEnd; itrVar++) {
170  if (EnforceNormalization__) fMLPBuildOptions += "@";
171  TString myVar = *itrVar; ;
172  fMLPBuildOptions += myVar;
173  fMLPBuildOptions += ",";
174  }
175  fMLPBuildOptions.Chop(); // remove last ","
176 
177  // prepare final options for MLP kernel
178  fMLPBuildOptions += fHiddenLayer;
179  fMLPBuildOptions += "type";
180 
181  Log() << kINFO << "Use " << fNcycles << " training cycles" << Endl;
182  Log() << kINFO << "Use configuration (nodes per hidden layer): " << fHiddenLayer << Endl;
183 }
184 
185 ////////////////////////////////////////////////////////////////////////////////
186 /// define the options (their key words) that can be set in the option string
187 /// know options:
188 /// NCycles <integer> Number of training cycles (too many cycles could overtrain the network)
189 /// HiddenLayers <string> Layout of the hidden layers (nodes per layer)
190 /// * specifiactions for each hidden layer are separated by commata
191 /// * for each layer the number of nodes can be either absolut (simply a number)
192 /// or relative to the number of input nodes to the neural net (N)
193 /// * there is always a single node in the output layer
194 /// example: a net with 6 input nodes and "Hiddenlayers=N-1,N-2" has 6,5,4,1 nodes in the
195 /// layers 1,2,3,4, repectively
196 
198 {
199  DeclareOptionRef( fNcycles = 200, "NCycles", "Number of training cycles" );
200  DeclareOptionRef( fLayerSpec = "N,N-1", "HiddenLayers", "Specification of hidden layer architecture (N stands for number of variables; any integers may also be used)" );
201 
202  DeclareOptionRef( fValidationFraction = 0.5, "ValidationFraction",
203  "Fraction of events in training tree used for cross validation" );
204 
205  DeclareOptionRef( fLearningMethod = "Stochastic", "LearningMethod", "Learning method" );
206  AddPreDefVal( TString("Stochastic") );
207  AddPreDefVal( TString("Batch") );
208  AddPreDefVal( TString("SteepestDescent") );
209  AddPreDefVal( TString("RibierePolak") );
210  AddPreDefVal( TString("FletcherReeves") );
211  AddPreDefVal( TString("BFGS") );
212 }
213 
214 ////////////////////////////////////////////////////////////////////////////////
215 /// builds the neural network as specified by the user
216 
218 {
219  CreateMLPOptions(fLayerSpec);
220 
221  if (IgnoreEventsWithNegWeightsInTraining()) {
222  Log() << kFATAL << "Mechanism to ignore events with negative weights in training not available for method"
223  << GetMethodTypeName()
224  << " --> please remove \"IgnoreNegWeightsInTraining\" option from booking string."
225  << Endl;
226  }
227 }
228 
229 ////////////////////////////////////////////////////////////////////////////////
230 /// calculate the value of the neural net for the current event
231 
233 {
234  const Event* ev = GetEvent();
235  TTHREAD_TLS_DECL_ARG(Double_t*, d, new Double_t[Data()->GetNVariables()]);
236 
237  for (UInt_t ivar = 0; ivar<Data()->GetNVariables(); ivar++) {
238  d[ivar] = (Double_t)ev->GetValue(ivar);
239  }
240  Double_t mvaVal = fMLP->Evaluate(0,d);
241 
242  // cannot determine error
243  NoErrorCalc(err, errUpper);
244 
245  return mvaVal;
246 }
247 
248 ////////////////////////////////////////////////////////////////////////////////
249 /// performs TMlpANN training
250 /// available learning methods:
251 ///
252 /// TMultiLayerPerceptron::kStochastic
253 /// TMultiLayerPerceptron::kBatch
254 /// TMultiLayerPerceptron::kSteepestDescent
255 /// TMultiLayerPerceptron::kRibierePolak
256 /// TMultiLayerPerceptron::kFletcherReeves
257 /// TMultiLayerPerceptron::kBFGS
258 ///
259 /// TMultiLayerPerceptron wants test and training tree at once
260 /// so merge the training and testing trees from the MVA factory first:
261 
263 {
264  Int_t type;
265  Float_t weight;
266  const Long_t basketsize = 128000;
267  Float_t* vArr = new Float_t[GetNvar()];
268 
269  TTree *localTrainingTree = new TTree( "TMLPtrain", "Local training tree for TMlpANN" );
270  localTrainingTree->Branch( "type", &type, "type/I", basketsize );
271  localTrainingTree->Branch( "weight", &weight, "weight/F", basketsize );
272 
273  for (UInt_t ivar=0; ivar<GetNvar(); ivar++) {
274  const char* myVar = GetInternalVarName(ivar).Data();
275  localTrainingTree->Branch( myVar, &vArr[ivar], Form("Var%02i/F", ivar), basketsize );
276  }
277 
278  for (UInt_t ievt=0; ievt<Data()->GetNEvents(); ievt++) {
279  const Event *ev = GetEvent(ievt);
280  for (UInt_t i=0; i<GetNvar(); i++) {
281  vArr[i] = ev->GetValue( i );
282  }
283  type = DataInfo().IsSignal( ev ) ? 1 : 0;
284  weight = ev->GetWeight();
285  localTrainingTree->Fill();
286  }
287 
288  // These are the event lists for the mlp train method
289  // first events in the tree are for training
290  // the rest for internal testing (cross validation)...
291  // NOTE: the training events are ordered: first part is signal, second part background
292  TString trainList = "Entry$<";
293  trainList += 1.0-fValidationFraction;
294  trainList += "*";
295  trainList += (Int_t)Data()->GetNEvtSigTrain();
296  trainList += " || (Entry$>";
297  trainList += (Int_t)Data()->GetNEvtSigTrain();
298  trainList += " && Entry$<";
299  trainList += (Int_t)(Data()->GetNEvtSigTrain() + (1.0 - fValidationFraction)*Data()->GetNEvtBkgdTrain());
300  trainList += ")";
301  TString testList = TString("!(") + trainList + ")";
302 
303  // print the requirements
304  Log() << kINFO << "Requirement for training events: \"" << trainList << "\"" << Endl;
305  Log() << kINFO << "Requirement for validation events: \"" << testList << "\"" << Endl;
306 
307  // localTrainingTree->Print();
308 
309  // create NN
310  if (fMLP != 0) { delete fMLP; fMLP = 0; }
311  fMLP = new TMultiLayerPerceptron( fMLPBuildOptions.Data(),
312  localTrainingTree,
313  trainList,
314  testList );
315  fMLP->SetEventWeight( "weight" );
316 
317  // set learning method
318 #if ROOT_VERSION_CODE > ROOT_VERSION(5,13,06)
320 #else
321  TMultiLayerPerceptron::LearningMethod learningMethod = TMultiLayerPerceptron::kStochastic;
322 #endif
323 
324  fLearningMethod.ToLower();
325  if (fLearningMethod == "stochastic" ) learningMethod = TMultiLayerPerceptron::kStochastic;
326  else if (fLearningMethod == "batch" ) learningMethod = TMultiLayerPerceptron::kBatch;
327  else if (fLearningMethod == "steepestdescent" ) learningMethod = TMultiLayerPerceptron::kSteepestDescent;
328  else if (fLearningMethod == "ribierepolak" ) learningMethod = TMultiLayerPerceptron::kRibierePolak;
329  else if (fLearningMethod == "fletcherreeves" ) learningMethod = TMultiLayerPerceptron::kFletcherReeves;
330  else if (fLearningMethod == "bfgs" ) learningMethod = TMultiLayerPerceptron::kBFGS;
331  else {
332  Log() << kFATAL << "Unknown Learning Method: \"" << fLearningMethod << "\"" << Endl;
333  }
334  fMLP->SetLearningMethod( learningMethod );
335 
336  // train NN
337  fMLP->Train(fNcycles, "text,update=50" );
338 
339  // write weights to File;
340  // this is not nice, but fMLP gets deleted at the end of Train()
341  delete localTrainingTree;
342  delete [] vArr;
343 }
344 
345 
346 ////////////////////////////////////////////////////////////////////////////////
347 /// write weights to xml file
348 
349 void TMVA::MethodTMlpANN::AddWeightsXMLTo( void* parent ) const
350 {
351  // first the architecture
352  void *wght = gTools().AddChild(parent, "Weights");
353  void* arch = gTools().AddChild( wght, "Architecture" );
354  gTools().AddAttr( arch, "BuildOptions", fMLPBuildOptions.Data() );
355 
356  // dump weights first in temporary txt file, read from there into xml
357  fMLP->DumpWeights( "weights/TMlp.nn.weights.temp" );
358  std::ifstream inf( "weights/TMlp.nn.weights.temp" );
359  char temp[256];
360  TString data("");
361  void *ch=NULL;
362  while (inf.getline(temp,256)) {
363  TString dummy(temp);
364  //std::cout << dummy << std::endl; // remove annoying debug printout with std::cout
365  if (dummy.BeginsWith('#')) {
366  if (ch!=0) gTools().AddRawLine( ch, data.Data() );
367  dummy = dummy.Strip(TString::kLeading, '#');
368  dummy = dummy(0,dummy.First(' '));
369  ch = gTools().AddChild(wght, dummy);
370  data.Resize(0);
371  continue;
372  }
373  data += (dummy + " ");
374  }
375  if (ch != 0) gTools().AddRawLine( ch, data.Data() );
376 
377  inf.close();
378 }
379 
380 ////////////////////////////////////////////////////////////////////////////////
381 /// rebuild temporary textfile from xml weightfile and load this
382 /// file into MLP
383 
385 {
386  void* ch = gTools().GetChild(wghtnode);
387  gTools().ReadAttr( ch, "BuildOptions", fMLPBuildOptions );
388 
389  ch = gTools().GetNextChild(ch);
390  const char* fname = "weights/TMlp.nn.weights.temp";
391  std::ofstream fout( fname );
392  double temp1=0,temp2=0;
393  while (ch) {
394  const char* nodecontent = gTools().GetContent(ch);
395  std::stringstream content(nodecontent);
396  if (strcmp(gTools().GetName(ch),"input")==0) {
397  fout << "#input normalization" << std::endl;
398  while ((content >> temp1) &&(content >> temp2)) {
399  fout << temp1 << " " << temp2 << std::endl;
400  }
401  }
402  if (strcmp(gTools().GetName(ch),"output")==0) {
403  fout << "#output normalization" << std::endl;
404  while ((content >> temp1) &&(content >> temp2)) {
405  fout << temp1 << " " << temp2 << std::endl;
406  }
407  }
408  if (strcmp(gTools().GetName(ch),"neurons")==0) {
409  fout << "#neurons weights" << std::endl;
410  while (content >> temp1) {
411  fout << temp1 << std::endl;
412  }
413  }
414  if (strcmp(gTools().GetName(ch),"synapses")==0) {
415  fout << "#synapses weights" ;
416  while (content >> temp1) {
417  fout << std::endl << temp1 ;
418  }
419  }
420  ch = gTools().GetNextChild(ch);
421  }
422  fout.close();;
423 
424  // Here we create a dummy tree necessary to create a minimal NN
425  // to be used for testing, evaluation and application
426  TTHREAD_TLS_DECL_ARG(Double_t*, d, new Double_t[Data()->GetNVariables()]);
427  TTHREAD_TLS(Int_t) type;
428 
429  gROOT->cd();
430  TTree * dummyTree = new TTree("dummy","Empty dummy tree", 1);
431  for (UInt_t ivar = 0; ivar<Data()->GetNVariables(); ivar++) {
432  TString vn = DataInfo().GetVariableInfo(ivar).GetInternalName();
433  dummyTree->Branch(Form("%s",vn.Data()), d+ivar, Form("%s/D",vn.Data()));
434  }
435  dummyTree->Branch("type", &type, "type/I");
436 
437  if (fMLP != 0) { delete fMLP; fMLP = 0; }
438  fMLP = new TMultiLayerPerceptron( fMLPBuildOptions.Data(), dummyTree );
439  fMLP->LoadWeights( fname );
440 }
441 
442 ////////////////////////////////////////////////////////////////////////////////
443 /// read weights from stream
444 /// since the MLP can not read from the stream, we
445 /// 1st: write the weights to temporary file
446 
448 {
449  std::ofstream fout( "./TMlp.nn.weights.temp" );
450  fout << istr.rdbuf();
451  fout.close();
452  // 2nd: load the weights from the temporary file into the MLP
453  // the MLP is already build
454  Log() << kINFO << "Load TMLP weights into " << fMLP << Endl;
455 
456  Double_t* d = new Double_t[Data()->GetNVariables()] ;
457  Int_t type;
458  gROOT->cd();
459  TTree * dummyTree = new TTree("dummy","Empty dummy tree", 1);
460  for (UInt_t ivar = 0; ivar<Data()->GetNVariables(); ivar++) {
461  TString vn = DataInfo().GetVariableInfo(ivar).GetLabel();
462  dummyTree->Branch(Form("%s",vn.Data()), d+ivar, Form("%s/D",vn.Data()));
463  }
464  dummyTree->Branch("type", &type, "type/I");
465 
466  if (fMLP != 0) { delete fMLP; fMLP = 0; }
467  fMLP = new TMultiLayerPerceptron( fMLPBuildOptions.Data(), dummyTree );
468 
469  fMLP->LoadWeights( "./TMlp.nn.weights.temp" );
470  // here we can delete the temporary file
471  // how?
472  delete [] d;
473 }
474 
475 ////////////////////////////////////////////////////////////////////////////////
476 /// create reader class for classifier -> overwrites base class function
477 /// create specific class for TMultiLayerPerceptron
478 
479 void TMVA::MethodTMlpANN::MakeClass( const TString& theClassFileName ) const
480 {
481  // the default consists of
482  TString classFileName = "";
483  if (theClassFileName == "")
484  classFileName = GetWeightFileDir() + "/" + GetJobName() + "_" + GetMethodName() + ".class";
485  else
486  classFileName = theClassFileName;
487 
488  classFileName.ReplaceAll(".class","");
489  Log() << kINFO << "Creating specific (TMultiLayerPerceptron) standalone response class: " << classFileName << Endl;
490  fMLP->Export( classFileName.Data() );
491 }
492 
493 ////////////////////////////////////////////////////////////////////////////////
494 /// write specific classifier response
495 /// nothing to do here - all taken care of by TMultiLayerPerceptron
496 
497 void TMVA::MethodTMlpANN::MakeClassSpecific( std::ostream& /*fout*/, const TString& /*className*/ ) const
498 {
499 }
500 
501 ////////////////////////////////////////////////////////////////////////////////
502 /// get help message text
503 ///
504 /// typical length of text line:
505 /// "|--------------------------------------------------------------|"
506 
508 {
509  Log() << Endl;
510  Log() << gTools().Color("bold") << "--- Short description:" << gTools().Color("reset") << Endl;
511  Log() << Endl;
512  Log() << "This feed-forward multilayer perceptron neural network is the " << Endl;
513  Log() << "standard implementation distributed with ROOT (class TMultiLayerPerceptron)." << Endl;
514  Log() << Endl;
515  Log() << "Detailed information is available here:" << Endl;
516  if (gConfig().WriteOptionsReference()) {
517  Log() << "<a href=\"http://root.cern.ch/root/html/TMultiLayerPerceptron.html\">";
518  Log() << "http://root.cern.ch/root/html/TMultiLayerPerceptron.html</a>" << Endl;
519  }
520  else Log() << "http://root.cern.ch/root/html/TMultiLayerPerceptron.html" << Endl;
521  Log() << Endl;
522 }
void Train(void)
performs TMlpANN training available learning methods:
MsgLogger & Endl(MsgLogger &ml)
Definition: MsgLogger.h:162
Ssiz_t Length() const
Definition: TString.h:390
float Float_t
Definition: RtypesCore.h:53
Double_t GetMvaValue(Double_t *err=0, Double_t *errUpper=0)
calculate the value of the neural net for the current event
TString & ReplaceAll(const TString &s1, const TString &s2)
Definition: TString.h:635
virtual Int_t Fill()
Fill all branches.
Definition: TTree.cxx:4328
void MakeClass(const TString &classFileName=TString("")) const
create reader class for classifier -> overwrites base class function create specific class for TMulti...
Config & gConfig()
void GetHelpMessage() const
get help message text
EAnalysisType
Definition: Types.h:124
#define gROOT
Definition: TROOT.h:340
Basic string class.
Definition: TString.h:137
int Int_t
Definition: RtypesCore.h:41
bool Bool_t
Definition: RtypesCore.h:59
const Bool_t kFALSE
Definition: Rtypes.h:92
MethodTMlpANN(const TString &jobName, const TString &methodTitle, DataSetInfo &theData, const TString &theOption="3000:N-1:N-2", TDirectory *theTargetDir=0)
Double_t GetWeight() const
return the event weight - depending on whether the flag IgnoreNegWeightsInTraining is or not...
Definition: Event.cxx:376
void AddAttr(void *node, const char *, const T &value, Int_t precision=16)
Definition: Tools.h:308
Bool_t BeginsWith(const char *s, ECaseCompare cmp=kExact) const
Definition: TString.h:558
void * AddChild(void *parent, const char *childname, const char *content=0, bool isRootNode=false)
add child node
Definition: Tools.cxx:1134
Float_t GetValue(UInt_t ivar) const
return value of i'th variable
Definition: Event.cxx:231
const char * Data() const
Definition: TString.h:349
void MakeClassSpecific(std::ostream &, const TString &) const
write specific classifier response nothing to do here - all taken care of by TMultiLayerPerceptron ...
Tools & gTools()
Definition: Tools.cxx:79
void Init(void)
default initialisations
void * GetChild(void *parent, const char *childname=0)
get child node
Definition: Tools.cxx:1158
ClassImp(TMVA::MethodTMlpANN) TMVA
standard constructor
void ReadWeightsFromXML(void *wghtnode)
rebuild temporary textfile from xml weightfile and load this file into MLP
std::vector< std::vector< double > > Data
virtual ~MethodTMlpANN(void)
destructor
const Bool_t EnforceNormalization__
void ProcessOptions()
builds the neural network as specified by the user
Bool_t AddRawLine(void *node, const char *raw)
XML helpers.
Definition: Tools.cxx:1198
std::string GetMethodName(TCppMethod_t)
Definition: Cppyy.cxx:707
unsigned int UInt_t
Definition: RtypesCore.h:42
char * Form(const char *fmt,...)
const char * GetContent(void *node)
XML helpers.
Definition: Tools.cxx:1182
TSubString Strip(EStripType s=kTrailing, char c= ' ') const
Return a substring of self stripped at beginning and/or end.
Definition: TString.cxx:1069
void ReadAttr(void *node, const char *, T &value)
Definition: Tools.h:295
Bool_t LoadWeights(Option_t *filename="")
Loads the weights from a text file conforming to the format defined by DumpWeights.
TString & Remove(Ssiz_t pos)
Definition: TString.h:616
long Long_t
Definition: RtypesCore.h:50
double Double_t
Definition: RtypesCore.h:55
Describe directory structure in memory.
Definition: TDirectory.h:41
int type
Definition: TGX11.cxx:120
void ReadWeightsFromStream(std::istream &istr)
read weights from stream since the MLP can not read from the stream, we 1st: write the weights to tem...
static RooMathCoreReg dummy
void * GetNextChild(void *prevchild, const char *childname=0)
XML helpers.
Definition: Tools.cxx:1170
void AddWeightsXMLTo(void *parent) const
write weights to xml file
const TString & Color(const TString &)
human readable color strings
Definition: Tools.cxx:837
virtual Int_t Branch(TCollection *list, Int_t bufsize=32000, Int_t splitlevel=99, const char *name="")
Create one branch for each element in the collection.
Definition: TTree.cxx:1624
#define REGISTER_METHOD(CLASS)
for example
Abstract ClassifierFactory template that handles arbitrary types.
#define NULL
Definition: Rtypes.h:82
void CreateMLPOptions(TString)
translates options from option string into TMlpANN language
A TTree object has a header with a name and a title.
Definition: TTree.h:94
void SetEventWeight(const char *)
Set the event weight.
const Bool_t kTRUE
Definition: Rtypes.h:91
Definition: math.cpp:60
Ssiz_t First(char c) const
Find first occurrence of a character c.
Definition: TString.cxx:466
void DeclareOptions()
define the options (their key words) that can be set in the option string know options: NCycles
void Resize(Ssiz_t n)
Resize the string. Truncate or add blanks as necessary.
Definition: TString.cxx:1058
TString & Chop()
Definition: TString.h:622
virtual Bool_t HasAnalysisType(Types::EAnalysisType type, UInt_t numberClasses, UInt_t numberTargets)
TMlpANN can handle classification with 2 classes.