91 fLocalTrainingTree(0),
93 fValidationFraction(0.5),
102 const TString& theWeightFile) :
105 fLocalTrainingTree(0),
107 fValidationFraction(0.5),
108 fLearningMethod(
"" )
135 if (fMLP)
delete fMLP;
145 while (layerSpec.
Length()>0) {
147 if (layerSpec.
First(
',')<0) {
152 sToAdd = layerSpec(0,layerSpec.
First(
','));
153 layerSpec = layerSpec(layerSpec.
First(
',')+1,layerSpec.
Length());
157 nNodes += atoi(sToAdd);
158 fHiddenLayer =
Form(
"%s%i:", (
const char*)fHiddenLayer, nNodes );
162 std::vector<TString>::iterator itrVar = (*fInputVars).begin();
163 std::vector<TString>::iterator itrVarEnd = (*fInputVars).end();
164 fMLPBuildOptions =
"";
165 for (; itrVar != itrVarEnd; ++itrVar) {
168 fMLPBuildOptions += myVar;
169 fMLPBuildOptions +=
",";
171 fMLPBuildOptions.
Chop();
174 fMLPBuildOptions += fHiddenLayer;
175 fMLPBuildOptions +=
"type";
177 Log() << kINFO <<
"Use " << fNcycles <<
" training cycles" <<
Endl;
178 Log() << kINFO <<
"Use configuration (nodes per hidden layer): " << fHiddenLayer <<
Endl;
198 DeclareOptionRef( fNcycles = 200,
"NCycles",
"Number of training cycles" );
199 DeclareOptionRef( fLayerSpec =
"N,N-1",
"HiddenLayers",
"Specification of hidden layer architecture (N stands for number of variables; any integers may also be used)" );
201 DeclareOptionRef( fValidationFraction = 0.5,
"ValidationFraction",
202 "Fraction of events in training tree used for cross validation" );
204 DeclareOptionRef( fLearningMethod =
"Stochastic",
"LearningMethod",
"Learning method" );
205 AddPreDefVal(
TString(
"Stochastic") );
206 AddPreDefVal(
TString(
"Batch") );
207 AddPreDefVal(
TString(
"SteepestDescent") );
208 AddPreDefVal(
TString(
"RibierePolak") );
209 AddPreDefVal(
TString(
"FletcherReeves") );
210 AddPreDefVal(
TString(
"BFGS") );
218 CreateMLPOptions(fLayerSpec);
220 if (IgnoreEventsWithNegWeightsInTraining()) {
221 Log() << kFATAL <<
"Mechanism to ignore events with negative weights in training not available for method"
222 << GetMethodTypeName()
223 <<
" --> please remove \"IgnoreNegWeightsInTraining\" option from booking string."
233 const Event* ev = GetEvent();
236 for (
UInt_t ivar = 0; ivar<Data()->GetNVariables(); ivar++) {
242 NoErrorCalc(err, errUpper);
265 const Long_t basketsize = 128000;
268 TTree *localTrainingTree =
new TTree(
"TMLPtrain",
"Local training tree for TMlpANN" );
269 localTrainingTree->
Branch(
"type", &
type,
"type/I", basketsize );
270 localTrainingTree->
Branch(
"weight", &weight,
"weight/F", basketsize );
272 for (
UInt_t ivar=0; ivar<GetNvar(); ivar++) {
273 const char* myVar = GetInternalVarName(ivar).Data();
274 localTrainingTree->
Branch( myVar, &vArr[ivar],
Form(
"Var%02i/F", ivar), basketsize );
277 for (
UInt_t ievt=0; ievt<Data()->GetNEvents(); ievt++) {
278 const Event *ev = GetEvent(ievt);
279 for (
UInt_t i=0; i<GetNvar(); i++) {
282 type = DataInfo().IsSignal( ev ) ? 1 : 0;
284 localTrainingTree->
Fill();
292 trainList += 1.0-fValidationFraction;
294 trainList += (
Int_t)Data()->GetNEvtSigTrain();
295 trainList +=
" || (Entry$>";
296 trainList += (
Int_t)Data()->GetNEvtSigTrain();
297 trainList +=
" && Entry$<";
298 trainList += (
Int_t)(Data()->GetNEvtSigTrain() + (1.0 - fValidationFraction)*Data()->GetNEvtBkgdTrain());
303 Log() << kHEADER <<
"Requirement for training events: \"" << trainList <<
"\"" <<
Endl;
304 Log() << kINFO <<
"Requirement for validation events: \"" << testList <<
"\"" <<
Endl;
309 if (fMLP != 0) {
delete fMLP; fMLP = 0; }
314 fMLP->SetEventWeight(
"weight" );
319 fLearningMethod.ToLower();
327 Log() << kFATAL <<
"Unknown Learning Method: \"" << fLearningMethod <<
"\"" <<
Endl;
329 fMLP->SetLearningMethod( learningMethod );
332 fMLP->Train(fNcycles,
"" );
336 delete localTrainingTree;
348 gTools().
AddAttr( arch,
"BuildOptions", fMLPBuildOptions.Data() );
351 const TString tmpfile=GetWeightFileDir()+
"/TMlp.nn.weights.temp";
352 fMLP->DumpWeights( tmpfile.
Data() );
353 std::ifstream inf( tmpfile.
Data() );
357 while (inf.getline(temp,256)) {
360 if (
dummy.BeginsWith(
'#')) {
368 data += (
dummy +
" ");
385 const TString fname = GetWeightFileDir()+
"/TMlp.nn.weights.temp";
386 std::ofstream fout( fname.
Data() );
387 double temp1=0,temp2=0;
390 std::stringstream content(nodecontent);
392 fout <<
"#input normalization" << std::endl;
393 while ((content >> temp1) &&(content >> temp2)) {
394 fout << temp1 <<
" " << temp2 << std::endl;
398 fout <<
"#output normalization" << std::endl;
399 while ((content >> temp1) &&(content >> temp2)) {
400 fout << temp1 <<
" " << temp2 << std::endl;
404 fout <<
"#neurons weights" << std::endl;
405 while (content >> temp1) {
406 fout << temp1 << std::endl;
410 fout <<
"#synapses weights" ;
411 while (content >> temp1) {
412 fout << std::endl << temp1 ;
425 TTree * dummyTree =
new TTree(
"dummy",
"Empty dummy tree", 1);
426 for (
UInt_t ivar = 0; ivar<Data()->GetNVariables(); ivar++) {
427 TString vn = DataInfo().GetVariableInfo(ivar).GetInternalName();
432 if (fMLP != 0) {
delete fMLP; fMLP = 0; }
434 fMLP->LoadWeights( fname );
444 std::ofstream fout(
"./TMlp.nn.weights.temp" );
445 fout << istr.rdbuf();
449 Log() << kINFO <<
"Load TMLP weights into " << fMLP <<
Endl;
454 TTree * dummyTree =
new TTree(
"dummy",
"Empty dummy tree", 1);
455 for (
UInt_t ivar = 0; ivar<Data()->GetNVariables(); ivar++) {
456 TString vn = DataInfo().GetVariableInfo(ivar).GetLabel();
461 if (fMLP != 0) {
delete fMLP; fMLP = 0; }
464 fMLP->LoadWeights(
"./TMlp.nn.weights.temp" );
478 if (theClassFileName ==
"")
479 classFileName = GetWeightFileDir() +
"/" + GetJobName() +
"_" +
GetMethodName() +
".class";
481 classFileName = theClassFileName;
484 Log() << kINFO <<
"Creating specific (TMultiLayerPerceptron) standalone response class: " << classFileName <<
Endl;
485 fMLP->Export( classFileName.
Data() );
507 Log() <<
"This feed-forward multilayer perceptron neural network is the " <<
Endl;
508 Log() <<
"standard implementation distributed with ROOT (class TMultiLayerPerceptron)." <<
Endl;
510 Log() <<
"Detailed information is available here:" <<
Endl;
511 if (
gConfig().WriteOptionsReference()) {
512 Log() <<
"<a href=\"http://root.cern.ch/root/html/TMultiLayerPerceptron.html\">";
513 Log() <<
"http://root.cern.ch/root/html/TMultiLayerPerceptron.html</a>" <<
Endl;
515 else Log() <<
"http://root.cern.ch/root/html/TMultiLayerPerceptron.html" <<
Endl;
#define REGISTER_METHOD(CLASS)
for example
const Bool_t EnforceNormalization__
static RooMathCoreReg dummy
char * Form(const char *fmt,...)
Class that contains all the data information.
Float_t GetValue(UInt_t ivar) const
return value of i'th variable
Double_t GetWeight() const
return the event weight - depending on whether the flag IgnoreNegWeightsInTraining is or not.
Virtual base Class for all MVA method.
This is the TMVA TMultiLayerPerceptron interface class.
void ReadWeightsFromStream(std::istream &istr)
read weights from stream since the MLP can not read from the stream, we 1st: write the weights to tem...
void Init(void)
default initialisations
virtual Bool_t HasAnalysisType(Types::EAnalysisType type, UInt_t numberClasses, UInt_t numberTargets)
TMlpANN can handle classification with 2 classes.
void Train(void)
performs TMlpANN training available learning methods:
Double_t GetMvaValue(Double_t *err=0, Double_t *errUpper=0)
calculate the value of the neural net for the current event
void DeclareOptions()
define the options (their key words) that can be set in the option string
void CreateMLPOptions(TString)
translates options from option string into TMlpANN language
void ReadWeightsFromXML(void *wghtnode)
rebuild temporary textfile from xml weightfile and load this file into MLP
MethodTMlpANN(const TString &jobName, const TString &methodTitle, DataSetInfo &theData, const TString &theOption="3000:N-1:N-2")
standard constructor
void ProcessOptions()
builds the neural network as specified by the user
void MakeClassSpecific(std::ostream &, const TString &) const
write specific classifier response nothing to do here - all taken care of by TMultiLayerPerceptron
void AddWeightsXMLTo(void *parent) const
write weights to xml file
void MakeClass(const TString &classFileName=TString("")) const
create reader class for classifier -> overwrites base class function create specific class for TMulti...
virtual ~MethodTMlpANN(void)
destructor
void GetHelpMessage() const
get help message text
Singleton class for Global types used by TMVA.
This class describes a neural network.
Ssiz_t First(char c) const
Find first occurrence of a character c.
const char * Data() const
TString & ReplaceAll(const TString &s1, const TString &s2)
void Resize(Ssiz_t n)
Resize the string. Truncate or add blanks as necessary.
Bool_t BeginsWith(const char *s, ECaseCompare cmp=kExact) const
TString & Remove(Ssiz_t pos)
A TTree represents a columnar dataset.
virtual Int_t Fill()
Fill all branches.
TBranch * Branch(const char *name, T *obj, Int_t bufsize=32000, Int_t splitlevel=99)
Add a new branch, and infer the data type from the type of obj being passed.
std::string GetMethodName(TCppMethod_t)
std::string GetName(const std::string &scope_name)
create variable transformations
MsgLogger & Endl(MsgLogger &ml)