Logo ROOT  
Reference Guide
 
Loading...
Searching...
No Matches
MethodTMlpANN.cxx
Go to the documentation of this file.
1// @(#)root/tmva $Id$
2// Author: Andreas Hoecker, Joerg Stelzer, Helge Voss, Kai Voss, Eckhard von Toerne
3/**********************************************************************************
4 * Project: TMVA - a Root-integrated toolkit for multivariate data analysis *
5 * Package: TMVA *
6 * Class : MethodTMlpANN *
7 * *
8 * *
9 * Description: *
10 * Implementation (see header for description) *
11 * *
12 * Authors (alphabetical): *
13 * Andreas Hoecker <Andreas.Hocker@cern.ch> - CERN, Switzerland *
14 * Helge Voss <Helge.Voss@cern.ch> - MPI-K Heidelberg, Germany *
15 * Kai Voss <Kai.Voss@cern.ch> - U. of Victoria, Canada *
16 * *
17 * Copyright (c) 2005: *
18 * CERN, Switzerland *
19 * U. of Victoria, Canada *
20 * MPI-K Heidelberg, Germany *
21 * *
22 * Redistribution and use in source and binary forms, with or without *
23 * modification, are permitted according to the terms listed in LICENSE *
24 * (see tmva/doc/LICENSE) *
25 **********************************************************************************/
26
27/*! \class TMVA::MethodTMlpANN
28\ingroup TMVA
29
30This is the TMVA TMultiLayerPerceptron interface class. It provides the
31training and testing the ROOT internal MLP class in the TMVA framework.
32
33Available learning methods:<br>
34
35 - Stochastic
36 - Batch
37 - SteepestDescent
38 - RibierePolak
39 - FletcherReeves
40 - BFGS
41
42See the TMultiLayerPerceptron class description
43for details on this ANN.
44*/
45
46#include "TMVA/MethodTMlpANN.h"
47
48#include "TMVA/Config.h"
49#include "TMVA/Configurable.h"
50#include "TMVA/DataSet.h"
51#include "TMVA/DataSetInfo.h"
52#include "TMVA/IMethod.h"
53#include "TMVA/MethodBase.h"
54#include "TMVA/MsgLogger.h"
55#include "TMVA/Types.h"
56#include "TMVA/VariableInfo.h"
57
59#include "TMVA/Tools.h"
60
61#include "TLeaf.h"
62#include "TEventList.h"
63#include "TROOT.h"
65#include "ThreadLocalStorage.h"
66
67#include <cstdlib>
68#include <iostream>
69#include <fstream>
70
71
72using std::atoi;
73
74// some additional TMlpANN options
76
77REGISTER_METHOD(TMlpANN)
78
80
81////////////////////////////////////////////////////////////////////////////////
82/// standard constructor
83
85 const TString& methodTitle,
86 DataSetInfo& theData,
87 const TString& theOption) :
88 TMVA::MethodBase( jobName, Types::kTMlpANN, methodTitle, theData, theOption),
89 fMLP(0),
90 fLocalTrainingTree(0),
91 fNcycles(100),
92 fValidationFraction(0.5),
93 fLearningMethod( "" )
94{
95}
96
97////////////////////////////////////////////////////////////////////////////////
98/// constructor from weight file
99
101 const TString& theWeightFile) :
102 TMVA::MethodBase( Types::kTMlpANN, theData, theWeightFile),
103 fMLP(0),
104 fLocalTrainingTree(0),
105 fNcycles(100),
106 fValidationFraction(0.5),
107 fLearningMethod( "" )
108{
109}
110
111////////////////////////////////////////////////////////////////////////////////
112/// TMlpANN can handle classification with 2 classes
113
115 UInt_t /*numberTargets*/ )
116{
117 if (type == Types::kClassification && numberClasses == 2) return kTRUE;
118 return kFALSE;
119}
120
121
122////////////////////////////////////////////////////////////////////////////////
123/// default initialisations
124
126{
127}
128
129////////////////////////////////////////////////////////////////////////////////
130/// destructor
131
133{
134 if (fMLP) delete fMLP;
135}
136
137////////////////////////////////////////////////////////////////////////////////
138/// translates options from option string into TMlpANN language
139
141{
142 fHiddenLayer = ":";
143
144 while (layerSpec.Length()>0) {
145 TString sToAdd="";
146 if (layerSpec.First(',')<0) {
147 sToAdd = layerSpec;
148 layerSpec = "";
149 }
150 else {
151 sToAdd = layerSpec(0,layerSpec.First(','));
152 layerSpec = layerSpec(layerSpec.First(',')+1,layerSpec.Length());
153 }
154 int nNodes = 0;
155 if (sToAdd.BeginsWith("N")) { sToAdd.Remove(0,1); nNodes = GetNvar(); }
156 nNodes += atoi(sToAdd);
157 fHiddenLayer = TString::Format( "%s%i:", (const char*)fHiddenLayer, nNodes );
158 }
159
160 // set input vars
161 std::vector<TString>::iterator itrVar = (*fInputVars).begin();
162 std::vector<TString>::iterator itrVarEnd = (*fInputVars).end();
163 fMLPBuildOptions = "";
164 for (; itrVar != itrVarEnd; ++itrVar) {
165 if (EnforceNormalization__) fMLPBuildOptions += "@";
166 TString myVar = *itrVar; ;
167 fMLPBuildOptions += myVar;
168 fMLPBuildOptions += ",";
169 }
170 fMLPBuildOptions.Chop(); // remove last ","
171
172 // prepare final options for MLP kernel
173 fMLPBuildOptions += fHiddenLayer;
174 fMLPBuildOptions += "type";
175
176 Log() << kINFO << "Use " << fNcycles << " training cycles" << Endl;
177 Log() << kINFO << "Use configuration (nodes per hidden layer): " << fHiddenLayer << Endl;
178}
179
180////////////////////////////////////////////////////////////////////////////////
181/// define the options (their key words) that can be set in the option string
182///
183/// know options:
184///
185/// - NCycles `<integer>` Number of training cycles (too many cycles could overtrain the network)
186/// - HiddenLayers `<string>` Layout of the hidden layers (nodes per layer)
187/// * specifications for each hidden layer are separated by comma
188/// * for each layer the number of nodes can be either absolut (simply a number)
189/// or relative to the number of input nodes to the neural net (N)
190/// * there is always a single node in the output layer
191///
192/// example: a net with 6 input nodes and "Hiddenlayers=N-1,N-2" has 6,5,4,1 nodes in the
193/// layers 1,2,3,4, respectively
194
196{
197 DeclareOptionRef( fNcycles = 200, "NCycles", "Number of training cycles" );
198 DeclareOptionRef( fLayerSpec = "N,N-1", "HiddenLayers", "Specification of hidden layer architecture (N stands for number of variables; any integers may also be used)" );
199
200 DeclareOptionRef( fValidationFraction = 0.5, "ValidationFraction",
201 "Fraction of events in training tree used for cross validation" );
202
203 DeclareOptionRef( fLearningMethod = "Stochastic", "LearningMethod", "Learning method" );
204 AddPreDefVal( TString("Stochastic") );
205 AddPreDefVal( TString("Batch") );
206 AddPreDefVal( TString("SteepestDescent") );
207 AddPreDefVal( TString("RibierePolak") );
208 AddPreDefVal( TString("FletcherReeves") );
209 AddPreDefVal( TString("BFGS") );
210}
211
212////////////////////////////////////////////////////////////////////////////////
213/// builds the neural network as specified by the user
214
216{
217 CreateMLPOptions(fLayerSpec);
218
219 if (IgnoreEventsWithNegWeightsInTraining()) {
220 Log() << kFATAL << "Mechanism to ignore events with negative weights in training not available for method"
221 << GetMethodTypeName()
222 << " --> please remove \"IgnoreNegWeightsInTraining\" option from booking string."
223 << Endl;
224 }
225}
226
227////////////////////////////////////////////////////////////////////////////////
228/// calculate the value of the neural net for the current event
229
231{
232 const Event* ev = GetEvent();
233 TTHREAD_TLS_DECL_ARG(Double_t*, d, new Double_t[Data()->GetNVariables()]);
234
235 for (UInt_t ivar = 0; ivar<Data()->GetNVariables(); ivar++) {
236 d[ivar] = (Double_t)ev->GetValue(ivar);
237 }
238 Double_t mvaVal = fMLP->Evaluate(0,d);
239
240 // cannot determine error
241 NoErrorCalc(err, errUpper);
242
243 return mvaVal;
244}
245
246////////////////////////////////////////////////////////////////////////////////
247/// performs TMlpANN training
248/// available learning methods:
249///
250/// - TMultiLayerPerceptron::kStochastic
251/// - TMultiLayerPerceptron::kBatch
252/// - TMultiLayerPerceptron::kSteepestDescent
253/// - TMultiLayerPerceptron::kRibierePolak
254/// - TMultiLayerPerceptron::kFletcherReeves
255/// - TMultiLayerPerceptron::kBFGS
256///
257/// TMultiLayerPerceptron wants test and training tree at once
258/// so merge the training and testing trees from the MVA factory first:
259
261{
262 Int_t type;
263 Float_t weight;
264 const Long_t basketsize = 128000;
265 Float_t* vArr = new Float_t[GetNvar()];
266
267 TTree *localTrainingTree = new TTree( "TMLPtrain", "Local training tree for TMlpANN" );
268 localTrainingTree->Branch( "type", &type, "type/I", basketsize );
269 localTrainingTree->Branch( "weight", &weight, "weight/F", basketsize );
270
271 for (UInt_t ivar=0; ivar<GetNvar(); ivar++) {
272 TString myVar = GetInternalVarName(ivar);
273 TString myTyp = TString::Format("Var%02i/F", ivar);
274 localTrainingTree->Branch( myVar.Data(), &vArr[ivar], myTyp.Data(), basketsize );
275 }
276
277 for (UInt_t ievt=0; ievt<Data()->GetNEvents(); ievt++) {
278 const Event *ev = GetEvent(ievt);
279 for (UInt_t i=0; i<GetNvar(); i++) {
280 vArr[i] = ev->GetValue( i );
281 }
282 type = DataInfo().IsSignal( ev ) ? 1 : 0;
283 weight = ev->GetWeight();
284 localTrainingTree->Fill();
285 }
286
287 // These are the event lists for the mlp train method
288 // first events in the tree are for training
289 // the rest for internal testing (cross validation)...
290 // NOTE: the training events are ordered: first part is signal, second part background
291 TString trainList = "Entry$<";
292 trainList += 1.0-fValidationFraction;
293 trainList += "*";
294 trainList += (Int_t)Data()->GetNEvtSigTrain();
295 trainList += " || (Entry$>";
296 trainList += (Int_t)Data()->GetNEvtSigTrain();
297 trainList += " && Entry$<";
298 trainList += (Int_t)(Data()->GetNEvtSigTrain() + (1.0 - fValidationFraction)*Data()->GetNEvtBkgdTrain());
299 trainList += ")";
300 TString testList = TString("!(") + trainList + ")";
301
302 // print the requirements
303 Log() << kHEADER << "Requirement for training events: \"" << trainList << "\"" << Endl;
304 Log() << kINFO << "Requirement for validation events: \"" << testList << "\"" << Endl;
305
306 // localTrainingTree->Print();
307
308 // create NN
309 if (fMLP) { delete fMLP; fMLP = nullptr; }
310 fMLP = new TMultiLayerPerceptron( fMLPBuildOptions.Data(),
311 localTrainingTree,
312 trainList,
313 testList );
314 fMLP->SetEventWeight( "weight" );
315
316 // set learning method
318
319 fLearningMethod.ToLower();
320 if (fLearningMethod == "stochastic" ) learningMethod = TMultiLayerPerceptron::kStochastic;
321 else if (fLearningMethod == "batch" ) learningMethod = TMultiLayerPerceptron::kBatch;
322 else if (fLearningMethod == "steepestdescent" ) learningMethod = TMultiLayerPerceptron::kSteepestDescent;
323 else if (fLearningMethod == "ribierepolak" ) learningMethod = TMultiLayerPerceptron::kRibierePolak;
324 else if (fLearningMethod == "fletcherreeves" ) learningMethod = TMultiLayerPerceptron::kFletcherReeves;
325 else if (fLearningMethod == "bfgs" ) learningMethod = TMultiLayerPerceptron::kBFGS;
326 else {
327 Log() << kFATAL << "Unknown Learning Method: \"" << fLearningMethod << "\"" << Endl;
328 }
329 fMLP->SetLearningMethod( learningMethod );
330
331 // train NN
332 fMLP->Train(fNcycles, "" ); //"text,update=50" );
333
334 // write weights to File;
335 // this is not nice, but fMLP gets deleted at the end of Train()
336 delete localTrainingTree;
337 delete [] vArr;
338}
339
340////////////////////////////////////////////////////////////////////////////////
341/// write weights to xml file
342
343void TMVA::MethodTMlpANN::AddWeightsXMLTo( void* parent ) const
344{
345 // first the architecture
346 void *wght = gTools().AddChild(parent, "Weights");
347 void* arch = gTools().AddChild( wght, "Architecture" );
348 gTools().AddAttr( arch, "BuildOptions", fMLPBuildOptions.Data() );
349
350 // dump weights first in temporary txt file, read from there into xml
351 const TString tmpfile=GetWeightFileDir()+"/TMlp.nn.weights.temp";
352 fMLP->DumpWeights( tmpfile.Data() );
353 std::ifstream inf( tmpfile.Data() );
354 char temp[256];
355 TString data("");
356 void *ch = nullptr;
357 while (inf.getline(temp,256)) {
358 TString dummy(temp);
359 //std::cout << dummy << std::endl; // remove annoying debug printout with std::cout
360 if (dummy.BeginsWith('#')) {
361 if (ch) gTools().AddRawLine( ch, data.Data() );
362 dummy = dummy.Strip(TString::kLeading, '#');
363 dummy = dummy(0,dummy.First(' '));
364 ch = gTools().AddChild(wght, dummy);
365 data.Resize(0);
366 continue;
367 }
368 data += (dummy + " ");
369 }
370 if (ch) gTools().AddRawLine( ch, data.Data() );
371
372 inf.close();
373}
374
375////////////////////////////////////////////////////////////////////////////////
376/// rebuild temporary textfile from xml weightfile and load this
377/// file into MLP
378
380{
381 void* ch = gTools().GetChild(wghtnode);
382 gTools().ReadAttr( ch, "BuildOptions", fMLPBuildOptions );
383
384 ch = gTools().GetNextChild(ch);
385 const TString fname = GetWeightFileDir()+"/TMlp.nn.weights.temp";
386 std::ofstream fout( fname.Data() );
387 double temp1=0,temp2=0;
388 while (ch) {
389 const char* nodecontent = gTools().GetContent(ch);
390 std::stringstream content(nodecontent);
391 if (strcmp(gTools().GetName(ch),"input")==0) {
392 fout << "#input normalization" << std::endl;
393 while ((content >> temp1) &&(content >> temp2)) {
394 fout << temp1 << " " << temp2 << std::endl;
395 }
396 }
397 if (strcmp(gTools().GetName(ch),"output")==0) {
398 fout << "#output normalization" << std::endl;
399 while ((content >> temp1) &&(content >> temp2)) {
400 fout << temp1 << " " << temp2 << std::endl;
401 }
402 }
403 if (strcmp(gTools().GetName(ch),"neurons")==0) {
404 fout << "#neurons weights" << std::endl;
405 while (content >> temp1) {
406 fout << temp1 << std::endl;
407 }
408 }
409 if (strcmp(gTools().GetName(ch),"synapses")==0) {
410 fout << "#synapses weights" ;
411 while (content >> temp1) {
412 fout << std::endl << temp1 ;
413 }
414 }
415 ch = gTools().GetNextChild(ch);
416 }
417 fout.close();
418
419 // Here we create a dummy tree necessary to create a minimal NN
420 // to be used for testing, evaluation and application
421 TTHREAD_TLS_DECL_ARG(Double_t*, d, new Double_t[Data()->GetNVariables()]);
422 TTHREAD_TLS(Int_t) type;
423
424 gROOT->cd();
425 TTree * dummyTree = new TTree("dummy","Empty dummy tree", 1);
426 for (UInt_t ivar = 0; ivar<Data()->GetNVariables(); ivar++) {
427 TString vn = DataInfo().GetVariableInfo(ivar).GetInternalName();
428 TString vt = TString::Format("%s/D", vn.Data());
429 dummyTree->Branch(vn.Data(), d+ivar, vt.Data());
430 }
431 dummyTree->Branch("type", &type, "type/I");
432
433 if (fMLP) { delete fMLP; fMLP = nullptr; }
434 fMLP = new TMultiLayerPerceptron( fMLPBuildOptions.Data(), dummyTree );
435 fMLP->LoadWeights( fname );
436}
437
438////////////////////////////////////////////////////////////////////////////////
439/// read weights from stream
440/// since the MLP can not read from the stream, we
441/// 1st: write the weights to temporary file
442
444{
445 std::ofstream fout( "./TMlp.nn.weights.temp" );
446 fout << istr.rdbuf();
447 fout.close();
448 // 2nd: load the weights from the temporary file into the MLP
449 // the MLP is already build
450 Log() << kINFO << "Load TMLP weights into " << fMLP << Endl;
451
452 Double_t *d = new Double_t[Data()->GetNVariables()] ;
453 Int_t type;
454 gROOT->cd();
455 TTree * dummyTree = new TTree("dummy","Empty dummy tree", 1);
456 for (UInt_t ivar = 0; ivar<Data()->GetNVariables(); ivar++) {
457 TString vn = DataInfo().GetVariableInfo(ivar).GetLabel();
458 TString vt = TString::Format("%s/D", vn.Data());
459 dummyTree->Branch(vn.Data(), d+ivar, vt.Data());
460 }
461 dummyTree->Branch("type", &type, "type/I");
462
463 if (fMLP) { delete fMLP; fMLP = nullptr; }
464 fMLP = new TMultiLayerPerceptron( fMLPBuildOptions.Data(), dummyTree );
465
466 fMLP->LoadWeights( "./TMlp.nn.weights.temp" );
467 // here we can delete the temporary file
468 // how?
469 delete [] d;
470}
471
472////////////////////////////////////////////////////////////////////////////////
473/// create reader class for classifier -> overwrites base class function
474/// create specific class for TMultiLayerPerceptron
475
476void TMVA::MethodTMlpANN::MakeClass( const TString& theClassFileName ) const
477{
478 // the default consists of
479 TString classFileName = "";
480 if (theClassFileName == "")
481 classFileName = GetWeightFileDir() + "/" + GetJobName() + "_" + GetMethodName() + ".class";
482 else
483 classFileName = theClassFileName;
484
485 classFileName.ReplaceAll(".class","");
486 Log() << kINFO << "Creating specific (TMultiLayerPerceptron) standalone response class: " << classFileName << Endl;
487 fMLP->Export( classFileName.Data() );
488}
489
490////////////////////////////////////////////////////////////////////////////////
491/// write specific classifier response
492/// nothing to do here - all taken care of by TMultiLayerPerceptron
493
494void TMVA::MethodTMlpANN::MakeClassSpecific( std::ostream& /*fout*/, const TString& /*className*/ ) const
495{
496}
497
498////////////////////////////////////////////////////////////////////////////////
499/// get help message text
500///
501/// typical length of text line:
502/// "|--------------------------------------------------------------|"
503
505{
506 Log() << Endl;
507 Log() << gTools().Color("bold") << "--- Short description:" << gTools().Color("reset") << Endl;
508 Log() << Endl;
509 Log() << "This feed-forward multilayer perceptron neural network is the " << Endl;
510 Log() << "standard implementation distributed with ROOT (class TMultiLayerPerceptron)." << Endl;
511 Log() << Endl;
512 Log() << "Detailed information is available here:" << Endl;
513 if (gConfig().WriteOptionsReference()) {
514 Log() << "<a href=\"https://root.cern/doc/master/classTMultiLayerPerceptron.html\">";
515 Log() << "https://root.cern/doc/master/classTMultiLayerPerceptron.html</a>" << Endl;
516 }
517 else Log() << "https://root.cern/doc/master/classTMultiLayerPerceptron.html" << Endl;
518 Log() << Endl;
519}
#define REGISTER_METHOD(CLASS)
for example
const Bool_t EnforceNormalization__
#define d(i)
Definition RSha256.hxx:102
bool Bool_t
Definition RtypesCore.h:63
int Int_t
Definition RtypesCore.h:45
long Long_t
Definition RtypesCore.h:54
float Float_t
Definition RtypesCore.h:57
constexpr Bool_t kFALSE
Definition RtypesCore.h:94
double Double_t
Definition RtypesCore.h:59
constexpr Bool_t kTRUE
Definition RtypesCore.h:93
#define ClassImp(name)
Definition Rtypes.h:382
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void data
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void char Point_t Rectangle_t WindowAttributes_t Float_t Float_t Float_t Int_t Int_t UInt_t UInt_t Rectangle_t Int_t Int_t Window_t TString Int_t GCValues_t GetPrimarySelectionOwner GetDisplay GetScreen GetColormap GetNativeEvent const char const char dpyName wid window const char font_name cursor keysym reg const char only_if_exist regb h Point_t winding char text const char depth char const char Int_t count const char ColorStruct_t color const char Pixmap_t Pixmap_t PictureAttributes_t attr const char char ret_data h unsigned char height h Atom_t Int_t ULong_t ULong_t unsigned char prop_list Atom_t Atom_t Atom_t Time_t type
#define gROOT
Definition TROOT.h:406
Class that contains all the data information.
Definition DataSetInfo.h:62
Float_t GetValue(UInt_t ivar) const
return value of i'th variable
Definition Event.cxx:236
Double_t GetWeight() const
return the event weight - depending on whether the flag IgnoreNegWeightsInTraining is or not.
Definition Event.cxx:389
Virtual base Class for all MVA method.
Definition MethodBase.h:111
This is the TMVA TMultiLayerPerceptron interface class.
void ReadWeightsFromStream(std::istream &istr)
read weights from stream since the MLP can not read from the stream, we 1st: write the weights to tem...
void Init(void)
default initialisations
virtual Bool_t HasAnalysisType(Types::EAnalysisType type, UInt_t numberClasses, UInt_t numberTargets)
TMlpANN can handle classification with 2 classes.
void Train(void)
performs TMlpANN training available learning methods:
Double_t GetMvaValue(Double_t *err=nullptr, Double_t *errUpper=nullptr)
calculate the value of the neural net for the current event
void DeclareOptions()
define the options (their key words) that can be set in the option string
void CreateMLPOptions(TString)
translates options from option string into TMlpANN language
void ReadWeightsFromXML(void *wghtnode)
rebuild temporary textfile from xml weightfile and load this file into MLP
MethodTMlpANN(const TString &jobName, const TString &methodTitle, DataSetInfo &theData, const TString &theOption="3000:N-1:N-2")
standard constructor
void ProcessOptions()
builds the neural network as specified by the user
void MakeClassSpecific(std::ostream &, const TString &) const
write specific classifier response nothing to do here - all taken care of by TMultiLayerPerceptron
void AddWeightsXMLTo(void *parent) const
write weights to xml file
void MakeClass(const TString &classFileName=TString("")) const
create reader class for classifier -> overwrites base class function create specific class for TMulti...
virtual ~MethodTMlpANN(void)
destructor
void GetHelpMessage() const
get help message text
Bool_t AddRawLine(void *node, const char *raw)
XML helpers.
Definition Tools.cxx:1190
const TString & Color(const TString &)
human readable color strings
Definition Tools.cxx:828
const char * GetContent(void *node)
XML helpers.
Definition Tools.cxx:1174
void ReadAttr(void *node, const char *, T &value)
read attribute from xml
Definition Tools.h:329
void * GetChild(void *parent, const char *childname=nullptr)
get child node
Definition Tools.cxx:1150
void AddAttr(void *node, const char *, const T &value, Int_t precision=16)
add attribute to xml
Definition Tools.h:347
void * AddChild(void *parent, const char *childname, const char *content=nullptr, bool isRootNode=false)
add child node
Definition Tools.cxx:1124
void * GetNextChild(void *prevchild, const char *childname=nullptr)
XML helpers.
Definition Tools.cxx:1162
Singleton class for Global types used by TMVA.
Definition Types.h:71
@ kClassification
Definition Types.h:127
This class describes a neural network.
Basic string class.
Definition TString.h:139
Ssiz_t Length() const
Definition TString.h:417
TSubString Strip(EStripType s=kTrailing, char c=' ') const
Return a substring of self stripped at beginning and/or end.
Definition TString.cxx:1163
Ssiz_t First(char c) const
Find first occurrence of a character c.
Definition TString.cxx:538
const char * Data() const
Definition TString.h:376
TString & Chop()
Definition TString.h:691
TString & ReplaceAll(const TString &s1, const TString &s2)
Definition TString.h:704
@ kLeading
Definition TString.h:276
Bool_t BeginsWith(const char *s, ECaseCompare cmp=kExact) const
Definition TString.h:623
TString & Remove(Ssiz_t pos)
Definition TString.h:685
static TString Format(const char *fmt,...)
Static method which formats a string using a printf style format descriptor and return a TString.
Definition TString.cxx:2378
A TTree represents a columnar dataset.
Definition TTree.h:79
virtual Int_t Fill()
Fill all branches.
Definition TTree.cxx:4603
TBranch * Branch(const char *name, T *obj, Int_t bufsize=32000, Int_t splitlevel=99)
Add a new branch, and infer the data type from the type of obj being passed.
Definition TTree.h:353
create variable transformations
Config & gConfig()
Tools & gTools()
MsgLogger & Endl(MsgLogger &ml)
Definition MsgLogger.h:148