Logo ROOT  
Reference Guide
 
Loading...
Searching...
No Matches
MethodTMlpANN.cxx
Go to the documentation of this file.
1// @(#)root/tmva $Id$
2// Author: Andreas Hoecker, Joerg Stelzer, Helge Voss, Kai Voss, Eckhard von Toerne
3/**********************************************************************************
4 * Project: TMVA - a Root-integrated toolkit for multivariate data analysis *
5 * Package: TMVA *
6 * Class : MethodTMlpANN *
7 * Web : http://tmva.sourceforge.net *
8 * *
9 * Description: *
10 * Implementation (see header for description) *
11 * *
12 * Authors (alphabetical): *
13 * Andreas Hoecker <Andreas.Hocker@cern.ch> - CERN, Switzerland *
14 * Helge Voss <Helge.Voss@cern.ch> - MPI-K Heidelberg, Germany *
15 * Kai Voss <Kai.Voss@cern.ch> - U. of Victoria, Canada *
16 * *
17 * Copyright (c) 2005: *
18 * CERN, Switzerland *
19 * U. of Victoria, Canada *
20 * MPI-K Heidelberg, Germany *
21 * *
22 * Redistribution and use in source and binary forms, with or without *
23 * modification, are permitted according to the terms listed in LICENSE *
24 * (http://tmva.sourceforge.net/LICENSE) *
25 **********************************************************************************/
26
27/*! \class TMVA::MethodTMlpANN
28\ingroup TMVA
29
30This is the TMVA TMultiLayerPerceptron interface class. It provides the
31training and testing the ROOT internal MLP class in the TMVA framework.
32
33Available learning methods:<br>
34
35 - Stochastic
36 - Batch
37 - SteepestDescent
38 - RibierePolak
39 - FletcherReeves
40 - BFGS
41
42See the TMultiLayerPerceptron class description
43for details on this ANN.
44*/
45
46#include "TMVA/MethodTMlpANN.h"
47
48#include "TMVA/Config.h"
49#include "TMVA/Configurable.h"
50#include "TMVA/DataSet.h"
51#include "TMVA/DataSetInfo.h"
52#include "TMVA/IMethod.h"
53#include "TMVA/MethodBase.h"
54#include "TMVA/MsgLogger.h"
55#include "TMVA/Types.h"
56#include "TMVA/VariableInfo.h"
57
59#include "TMVA/Tools.h"
60
61#include "TLeaf.h"
62#include "TEventList.h"
63#include "TROOT.h"
65#include "ThreadLocalStorage.h"
66
67#include <cstdlib>
68#include <iostream>
69#include <fstream>
70
71
72using std::atoi;
73
74// some additional TMlpANN options
76
77REGISTER_METHOD(TMlpANN)
78
80
81////////////////////////////////////////////////////////////////////////////////
82/// standard constructor
83
85 const TString& methodTitle,
86 DataSetInfo& theData,
87 const TString& theOption) :
88 TMVA::MethodBase( jobName, Types::kTMlpANN, methodTitle, theData, theOption),
89 fMLP(0),
90 fLocalTrainingTree(0),
91 fNcycles(100),
92 fValidationFraction(0.5),
93 fLearningMethod( "" )
94{
95}
96
97////////////////////////////////////////////////////////////////////////////////
98/// constructor from weight file
99
101 const TString& theWeightFile) :
102 TMVA::MethodBase( Types::kTMlpANN, theData, theWeightFile),
103 fMLP(0),
104 fLocalTrainingTree(0),
105 fNcycles(100),
106 fValidationFraction(0.5),
107 fLearningMethod( "" )
108{
109}
110
111////////////////////////////////////////////////////////////////////////////////
112/// TMlpANN can handle classification with 2 classes
113
115 UInt_t /*numberTargets*/ )
116{
117 if (type == Types::kClassification && numberClasses == 2) return kTRUE;
118 return kFALSE;
119}
120
121
122////////////////////////////////////////////////////////////////////////////////
123/// default initialisations
124
126{
127}
128
129////////////////////////////////////////////////////////////////////////////////
130/// destructor
131
133{
134 if (fMLP) delete fMLP;
135}
136
137////////////////////////////////////////////////////////////////////////////////
138/// translates options from option string into TMlpANN language
139
141{
142 fHiddenLayer = ":";
143
144 while (layerSpec.Length()>0) {
145 TString sToAdd="";
146 if (layerSpec.First(',')<0) {
147 sToAdd = layerSpec;
148 layerSpec = "";
149 }
150 else {
151 sToAdd = layerSpec(0,layerSpec.First(','));
152 layerSpec = layerSpec(layerSpec.First(',')+1,layerSpec.Length());
153 }
154 int nNodes = 0;
155 if (sToAdd.BeginsWith("N")) { sToAdd.Remove(0,1); nNodes = GetNvar(); }
156 nNodes += atoi(sToAdd);
157 fHiddenLayer = Form( "%s%i:", (const char*)fHiddenLayer, nNodes );
158 }
159
160 // set input vars
161 std::vector<TString>::iterator itrVar = (*fInputVars).begin();
162 std::vector<TString>::iterator itrVarEnd = (*fInputVars).end();
163 fMLPBuildOptions = "";
164 for (; itrVar != itrVarEnd; ++itrVar) {
165 if (EnforceNormalization__) fMLPBuildOptions += "@";
166 TString myVar = *itrVar; ;
167 fMLPBuildOptions += myVar;
168 fMLPBuildOptions += ",";
169 }
170 fMLPBuildOptions.Chop(); // remove last ","
171
172 // prepare final options for MLP kernel
173 fMLPBuildOptions += fHiddenLayer;
174 fMLPBuildOptions += "type";
175
176 Log() << kINFO << "Use " << fNcycles << " training cycles" << Endl;
177 Log() << kINFO << "Use configuration (nodes per hidden layer): " << fHiddenLayer << Endl;
178}
179
180////////////////////////////////////////////////////////////////////////////////
181/// define the options (their key words) that can be set in the option string
182///
183/// know options:
184///
185/// - NCycles <integer> Number of training cycles (too many cycles could overtrain the network)
186/// - HiddenLayers <string> Layout of the hidden layers (nodes per layer)
187/// * specifications for each hidden layer are separated by comma
188/// * for each layer the number of nodes can be either absolut (simply a number)
189/// or relative to the number of input nodes to the neural net (N)
190/// * there is always a single node in the output layer
191///
192/// example: a net with 6 input nodes and "Hiddenlayers=N-1,N-2" has 6,5,4,1 nodes in the
193/// layers 1,2,3,4, respectively
194
196{
197 DeclareOptionRef( fNcycles = 200, "NCycles", "Number of training cycles" );
198 DeclareOptionRef( fLayerSpec = "N,N-1", "HiddenLayers", "Specification of hidden layer architecture (N stands for number of variables; any integers may also be used)" );
199
200 DeclareOptionRef( fValidationFraction = 0.5, "ValidationFraction",
201 "Fraction of events in training tree used for cross validation" );
202
203 DeclareOptionRef( fLearningMethod = "Stochastic", "LearningMethod", "Learning method" );
204 AddPreDefVal( TString("Stochastic") );
205 AddPreDefVal( TString("Batch") );
206 AddPreDefVal( TString("SteepestDescent") );
207 AddPreDefVal( TString("RibierePolak") );
208 AddPreDefVal( TString("FletcherReeves") );
209 AddPreDefVal( TString("BFGS") );
210}
211
212////////////////////////////////////////////////////////////////////////////////
213/// builds the neural network as specified by the user
214
216{
217 CreateMLPOptions(fLayerSpec);
218
219 if (IgnoreEventsWithNegWeightsInTraining()) {
220 Log() << kFATAL << "Mechanism to ignore events with negative weights in training not available for method"
221 << GetMethodTypeName()
222 << " --> please remove \"IgnoreNegWeightsInTraining\" option from booking string."
223 << Endl;
224 }
225}
226
227////////////////////////////////////////////////////////////////////////////////
228/// calculate the value of the neural net for the current event
229
231{
232 const Event* ev = GetEvent();
233 TTHREAD_TLS_DECL_ARG(Double_t*, d, new Double_t[Data()->GetNVariables()]);
234
235 for (UInt_t ivar = 0; ivar<Data()->GetNVariables(); ivar++) {
236 d[ivar] = (Double_t)ev->GetValue(ivar);
237 }
238 Double_t mvaVal = fMLP->Evaluate(0,d);
239
240 // cannot determine error
241 NoErrorCalc(err, errUpper);
242
243 return mvaVal;
244}
245
246////////////////////////////////////////////////////////////////////////////////
247/// performs TMlpANN training
248/// available learning methods:
249///
250/// - TMultiLayerPerceptron::kStochastic
251/// - TMultiLayerPerceptron::kBatch
252/// - TMultiLayerPerceptron::kSteepestDescent
253/// - TMultiLayerPerceptron::kRibierePolak
254/// - TMultiLayerPerceptron::kFletcherReeves
255/// - TMultiLayerPerceptron::kBFGS
256///
257/// TMultiLayerPerceptron wants test and training tree at once
258/// so merge the training and testing trees from the MVA factory first:
259
261{
262 Int_t type;
263 Float_t weight;
264 const Long_t basketsize = 128000;
265 Float_t* vArr = new Float_t[GetNvar()];
266
267 TTree *localTrainingTree = new TTree( "TMLPtrain", "Local training tree for TMlpANN" );
268 localTrainingTree->Branch( "type", &type, "type/I", basketsize );
269 localTrainingTree->Branch( "weight", &weight, "weight/F", basketsize );
270
271 for (UInt_t ivar=0; ivar<GetNvar(); ivar++) {
272 const char* myVar = GetInternalVarName(ivar).Data();
273 localTrainingTree->Branch( myVar, &vArr[ivar], Form("Var%02i/F", ivar), basketsize );
274 }
275
276 for (UInt_t ievt=0; ievt<Data()->GetNEvents(); ievt++) {
277 const Event *ev = GetEvent(ievt);
278 for (UInt_t i=0; i<GetNvar(); i++) {
279 vArr[i] = ev->GetValue( i );
280 }
281 type = DataInfo().IsSignal( ev ) ? 1 : 0;
282 weight = ev->GetWeight();
283 localTrainingTree->Fill();
284 }
285
286 // These are the event lists for the mlp train method
287 // first events in the tree are for training
288 // the rest for internal testing (cross validation)...
289 // NOTE: the training events are ordered: first part is signal, second part background
290 TString trainList = "Entry$<";
291 trainList += 1.0-fValidationFraction;
292 trainList += "*";
293 trainList += (Int_t)Data()->GetNEvtSigTrain();
294 trainList += " || (Entry$>";
295 trainList += (Int_t)Data()->GetNEvtSigTrain();
296 trainList += " && Entry$<";
297 trainList += (Int_t)(Data()->GetNEvtSigTrain() + (1.0 - fValidationFraction)*Data()->GetNEvtBkgdTrain());
298 trainList += ")";
299 TString testList = TString("!(") + trainList + ")";
300
301 // print the requirements
302 Log() << kHEADER << "Requirement for training events: \"" << trainList << "\"" << Endl;
303 Log() << kINFO << "Requirement for validation events: \"" << testList << "\"" << Endl;
304
305 // localTrainingTree->Print();
306
307 // create NN
308 if (fMLP != 0) { delete fMLP; fMLP = 0; }
309 fMLP = new TMultiLayerPerceptron( fMLPBuildOptions.Data(),
310 localTrainingTree,
311 trainList,
312 testList );
313 fMLP->SetEventWeight( "weight" );
314
315 // set learning method
317
318 fLearningMethod.ToLower();
319 if (fLearningMethod == "stochastic" ) learningMethod = TMultiLayerPerceptron::kStochastic;
320 else if (fLearningMethod == "batch" ) learningMethod = TMultiLayerPerceptron::kBatch;
321 else if (fLearningMethod == "steepestdescent" ) learningMethod = TMultiLayerPerceptron::kSteepestDescent;
322 else if (fLearningMethod == "ribierepolak" ) learningMethod = TMultiLayerPerceptron::kRibierePolak;
323 else if (fLearningMethod == "fletcherreeves" ) learningMethod = TMultiLayerPerceptron::kFletcherReeves;
324 else if (fLearningMethod == "bfgs" ) learningMethod = TMultiLayerPerceptron::kBFGS;
325 else {
326 Log() << kFATAL << "Unknown Learning Method: \"" << fLearningMethod << "\"" << Endl;
327 }
328 fMLP->SetLearningMethod( learningMethod );
329
330 // train NN
331 fMLP->Train(fNcycles, "" ); //"text,update=50" );
332
333 // write weights to File;
334 // this is not nice, but fMLP gets deleted at the end of Train()
335 delete localTrainingTree;
336 delete [] vArr;
337}
338
339////////////////////////////////////////////////////////////////////////////////
340/// write weights to xml file
341
342void TMVA::MethodTMlpANN::AddWeightsXMLTo( void* parent ) const
343{
344 // first the architecture
345 void *wght = gTools().AddChild(parent, "Weights");
346 void* arch = gTools().AddChild( wght, "Architecture" );
347 gTools().AddAttr( arch, "BuildOptions", fMLPBuildOptions.Data() );
348
349 // dump weights first in temporary txt file, read from there into xml
350 const TString tmpfile=GetWeightFileDir()+"/TMlp.nn.weights.temp";
351 fMLP->DumpWeights( tmpfile.Data() );
352 std::ifstream inf( tmpfile.Data() );
353 char temp[256];
354 TString data("");
355 void *ch=NULL;
356 while (inf.getline(temp,256)) {
357 TString dummy(temp);
358 //std::cout << dummy << std::endl; // remove annoying debug printout with std::cout
359 if (dummy.BeginsWith('#')) {
360 if (ch!=0) gTools().AddRawLine( ch, data.Data() );
361 dummy = dummy.Strip(TString::kLeading, '#');
362 dummy = dummy(0,dummy.First(' '));
363 ch = gTools().AddChild(wght, dummy);
364 data.Resize(0);
365 continue;
366 }
367 data += (dummy + " ");
368 }
369 if (ch != 0) gTools().AddRawLine( ch, data.Data() );
370
371 inf.close();
372}
373
374////////////////////////////////////////////////////////////////////////////////
375/// rebuild temporary textfile from xml weightfile and load this
376/// file into MLP
377
379{
380 void* ch = gTools().GetChild(wghtnode);
381 gTools().ReadAttr( ch, "BuildOptions", fMLPBuildOptions );
382
383 ch = gTools().GetNextChild(ch);
384 const TString fname = GetWeightFileDir()+"/TMlp.nn.weights.temp";
385 std::ofstream fout( fname.Data() );
386 double temp1=0,temp2=0;
387 while (ch) {
388 const char* nodecontent = gTools().GetContent(ch);
389 std::stringstream content(nodecontent);
390 if (strcmp(gTools().GetName(ch),"input")==0) {
391 fout << "#input normalization" << std::endl;
392 while ((content >> temp1) &&(content >> temp2)) {
393 fout << temp1 << " " << temp2 << std::endl;
394 }
395 }
396 if (strcmp(gTools().GetName(ch),"output")==0) {
397 fout << "#output normalization" << std::endl;
398 while ((content >> temp1) &&(content >> temp2)) {
399 fout << temp1 << " " << temp2 << std::endl;
400 }
401 }
402 if (strcmp(gTools().GetName(ch),"neurons")==0) {
403 fout << "#neurons weights" << std::endl;
404 while (content >> temp1) {
405 fout << temp1 << std::endl;
406 }
407 }
408 if (strcmp(gTools().GetName(ch),"synapses")==0) {
409 fout << "#synapses weights" ;
410 while (content >> temp1) {
411 fout << std::endl << temp1 ;
412 }
413 }
414 ch = gTools().GetNextChild(ch);
415 }
416 fout.close();;
417
418 // Here we create a dummy tree necessary to create a minimal NN
419 // to be used for testing, evaluation and application
420 TTHREAD_TLS_DECL_ARG(Double_t*, d, new Double_t[Data()->GetNVariables()]);
421 TTHREAD_TLS(Int_t) type;
422
423 gROOT->cd();
424 TTree * dummyTree = new TTree("dummy","Empty dummy tree", 1);
425 for (UInt_t ivar = 0; ivar<Data()->GetNVariables(); ivar++) {
426 TString vn = DataInfo().GetVariableInfo(ivar).GetInternalName();
427 dummyTree->Branch(Form("%s",vn.Data()), d+ivar, Form("%s/D",vn.Data()));
428 }
429 dummyTree->Branch("type", &type, "type/I");
430
431 if (fMLP != 0) { delete fMLP; fMLP = 0; }
432 fMLP = new TMultiLayerPerceptron( fMLPBuildOptions.Data(), dummyTree );
433 fMLP->LoadWeights( fname );
434}
435
436////////////////////////////////////////////////////////////////////////////////
437/// read weights from stream
438/// since the MLP can not read from the stream, we
439/// 1st: write the weights to temporary file
440
442{
443 std::ofstream fout( "./TMlp.nn.weights.temp" );
444 fout << istr.rdbuf();
445 fout.close();
446 // 2nd: load the weights from the temporary file into the MLP
447 // the MLP is already build
448 Log() << kINFO << "Load TMLP weights into " << fMLP << Endl;
449
450 Double_t* d = new Double_t[Data()->GetNVariables()] ;
451 Int_t type;
452 gROOT->cd();
453 TTree * dummyTree = new TTree("dummy","Empty dummy tree", 1);
454 for (UInt_t ivar = 0; ivar<Data()->GetNVariables(); ivar++) {
455 TString vn = DataInfo().GetVariableInfo(ivar).GetLabel();
456 dummyTree->Branch(Form("%s",vn.Data()), d+ivar, Form("%s/D",vn.Data()));
457 }
458 dummyTree->Branch("type", &type, "type/I");
459
460 if (fMLP != 0) { delete fMLP; fMLP = 0; }
461 fMLP = new TMultiLayerPerceptron( fMLPBuildOptions.Data(), dummyTree );
462
463 fMLP->LoadWeights( "./TMlp.nn.weights.temp" );
464 // here we can delete the temporary file
465 // how?
466 delete [] d;
467}
468
469////////////////////////////////////////////////////////////////////////////////
470/// create reader class for classifier -> overwrites base class function
471/// create specific class for TMultiLayerPerceptron
472
473void TMVA::MethodTMlpANN::MakeClass( const TString& theClassFileName ) const
474{
475 // the default consists of
476 TString classFileName = "";
477 if (theClassFileName == "")
478 classFileName = GetWeightFileDir() + "/" + GetJobName() + "_" + GetMethodName() + ".class";
479 else
480 classFileName = theClassFileName;
481
482 classFileName.ReplaceAll(".class","");
483 Log() << kINFO << "Creating specific (TMultiLayerPerceptron) standalone response class: " << classFileName << Endl;
484 fMLP->Export( classFileName.Data() );
485}
486
487////////////////////////////////////////////////////////////////////////////////
488/// write specific classifier response
489/// nothing to do here - all taken care of by TMultiLayerPerceptron
490
491void TMVA::MethodTMlpANN::MakeClassSpecific( std::ostream& /*fout*/, const TString& /*className*/ ) const
492{
493}
494
495////////////////////////////////////////////////////////////////////////////////
496/// get help message text
497///
498/// typical length of text line:
499/// "|--------------------------------------------------------------|"
500
502{
503 Log() << Endl;
504 Log() << gTools().Color("bold") << "--- Short description:" << gTools().Color("reset") << Endl;
505 Log() << Endl;
506 Log() << "This feed-forward multilayer perceptron neural network is the " << Endl;
507 Log() << "standard implementation distributed with ROOT (class TMultiLayerPerceptron)." << Endl;
508 Log() << Endl;
509 Log() << "Detailed information is available here:" << Endl;
510 if (gConfig().WriteOptionsReference()) {
511 Log() << "<a href=\"http://root.cern.ch/root/html/TMultiLayerPerceptron.html\">";
512 Log() << "http://root.cern.ch/root/html/TMultiLayerPerceptron.html</a>" << Endl;
513 }
514 else Log() << "http://root.cern.ch/root/html/TMultiLayerPerceptron.html" << Endl;
515 Log() << Endl;
516}
#define REGISTER_METHOD(CLASS)
for example
const Bool_t EnforceNormalization__
#define d(i)
Definition RSha256.hxx:102
int Int_t
Definition RtypesCore.h:45
const Bool_t kFALSE
Definition RtypesCore.h:92
long Long_t
Definition RtypesCore.h:54
bool Bool_t
Definition RtypesCore.h:63
double Double_t
Definition RtypesCore.h:59
float Float_t
Definition RtypesCore.h:57
const Bool_t kTRUE
Definition RtypesCore.h:91
#define ClassImp(name)
Definition Rtypes.h:364
int type
Definition TGX11.cxx:121
#define gROOT
Definition TROOT.h:406
char * Form(const char *fmt,...)
Class that contains all the data information.
Definition DataSetInfo.h:62
Float_t GetValue(UInt_t ivar) const
return value of i'th variable
Definition Event.cxx:236
Double_t GetWeight() const
return the event weight - depending on whether the flag IgnoreNegWeightsInTraining is or not.
Definition Event.cxx:381
Virtual base Class for all MVA method.
Definition MethodBase.h:111
This is the TMVA TMultiLayerPerceptron interface class.
void ReadWeightsFromStream(std::istream &istr)
read weights from stream since the MLP can not read from the stream, we 1st: write the weights to tem...
void Init(void)
default initialisations
virtual Bool_t HasAnalysisType(Types::EAnalysisType type, UInt_t numberClasses, UInt_t numberTargets)
TMlpANN can handle classification with 2 classes.
void Train(void)
performs TMlpANN training available learning methods:
Double_t GetMvaValue(Double_t *err=0, Double_t *errUpper=0)
calculate the value of the neural net for the current event
void DeclareOptions()
define the options (their key words) that can be set in the option string
void CreateMLPOptions(TString)
translates options from option string into TMlpANN language
void ReadWeightsFromXML(void *wghtnode)
rebuild temporary textfile from xml weightfile and load this file into MLP
MethodTMlpANN(const TString &jobName, const TString &methodTitle, DataSetInfo &theData, const TString &theOption="3000:N-1:N-2")
standard constructor
void ProcessOptions()
builds the neural network as specified by the user
void MakeClassSpecific(std::ostream &, const TString &) const
write specific classifier response nothing to do here - all taken care of by TMultiLayerPerceptron
void AddWeightsXMLTo(void *parent) const
write weights to xml file
void MakeClass(const TString &classFileName=TString("")) const
create reader class for classifier -> overwrites base class function create specific class for TMulti...
virtual ~MethodTMlpANN(void)
destructor
void GetHelpMessage() const
get help message text
void * GetNextChild(void *prevchild, const char *childname=0)
XML helpers.
Definition Tools.cxx:1174
void * AddChild(void *parent, const char *childname, const char *content=0, bool isRootNode=false)
add child node
Definition Tools.cxx:1136
Bool_t AddRawLine(void *node, const char *raw)
XML helpers.
Definition Tools.cxx:1202
const TString & Color(const TString &)
human readable color strings
Definition Tools.cxx:840
const char * GetContent(void *node)
XML helpers.
Definition Tools.cxx:1186
void * GetChild(void *parent, const char *childname=0)
get child node
Definition Tools.cxx:1162
void ReadAttr(void *node, const char *, T &value)
read attribute from xml
Definition Tools.h:335
void AddAttr(void *node, const char *, const T &value, Int_t precision=16)
add attribute to xml
Definition Tools.h:353
Singleton class for Global types used by TMVA.
Definition Types.h:73
@ kClassification
Definition Types.h:129
This class describes a neural network.
Basic string class.
Definition TString.h:136
Ssiz_t Length() const
Definition TString.h:410
TSubString Strip(EStripType s=kTrailing, char c=' ') const
Return a substring of self stripped at beginning and/or end.
Definition TString.cxx:1126
Ssiz_t First(char c) const
Find first occurrence of a character c.
Definition TString.cxx:519
const char * Data() const
Definition TString.h:369
TString & Chop()
Definition TString.h:679
TString & ReplaceAll(const TString &s1, const TString &s2)
Definition TString.h:692
void Resize(Ssiz_t n)
Resize the string. Truncate or add blanks as necessary.
Definition TString.cxx:1115
@ kLeading
Definition TString.h:267
Bool_t BeginsWith(const char *s, ECaseCompare cmp=kExact) const
Definition TString.h:615
TString & Remove(Ssiz_t pos)
Definition TString.h:673
A TTree represents a columnar dataset.
Definition TTree.h:79
virtual Int_t Fill()
Fill all branches.
Definition TTree.cxx:4590
TBranch * Branch(const char *name, T *obj, Int_t bufsize=32000, Int_t splitlevel=99)
Add a new branch, and infer the data type from the type of obj being passed.
Definition TTree.h:350
create variable transformations
Config & gConfig()
Tools & gTools()
MsgLogger & Endl(MsgLogger &ml)
Definition MsgLogger.h:158