65 #ifndef ROOT_TMVA_Tools
73 #if ROOT_VERSION_CODE > ROOT_VERSION(5,13,06)
87 TMVA::MethodTMlpANN::MethodTMlpANN( const
TString& jobName,
92 TMVA::MethodBase( jobName, Types::kTMlpANN, methodTitle, theData, theOption, theTargetDir ),
94 fLocalTrainingTree(0),
96 fValidationFraction(0.5),
109 fLocalTrainingTree(0),
111 fValidationFraction(0.5),
112 fLearningMethod(
"" )
139 if (fMLP)
delete fMLP;
149 while (layerSpec.
Length()>0) {
151 if (layerSpec.
First(
',')<0) {
156 sToAdd = layerSpec(0,layerSpec.
First(
','));
157 layerSpec = layerSpec(layerSpec.First(
',')+1,layerSpec.Length());
161 nNodes += atoi(sToAdd);
162 fHiddenLayer =
Form(
"%s%i:", (
const char*)fHiddenLayer, nNodes );
166 std::vector<TString>::iterator itrVar = (*fInputVars).begin();
167 std::vector<TString>::iterator itrVarEnd = (*fInputVars).end();
168 fMLPBuildOptions =
"";
169 for (; itrVar != itrVarEnd; itrVar++) {
172 fMLPBuildOptions += myVar;
173 fMLPBuildOptions +=
",";
175 fMLPBuildOptions.
Chop();
178 fMLPBuildOptions += fHiddenLayer;
179 fMLPBuildOptions +=
"type";
181 Log() <<
kINFO <<
"Use " << fNcycles <<
" training cycles" <<
Endl;
182 Log() <<
kINFO <<
"Use configuration (nodes per hidden layer): " << fHiddenLayer <<
Endl;
199 DeclareOptionRef( fNcycles = 200,
"NCycles",
"Number of training cycles" );
200 DeclareOptionRef( fLayerSpec =
"N,N-1",
"HiddenLayers",
"Specification of hidden layer architecture (N stands for number of variables; any integers may also be used)" );
202 DeclareOptionRef( fValidationFraction = 0.5,
"ValidationFraction",
203 "Fraction of events in training tree used for cross validation" );
205 DeclareOptionRef( fLearningMethod =
"Stochastic",
"LearningMethod",
"Learning method" );
206 AddPreDefVal(
TString(
"Stochastic") );
207 AddPreDefVal(
TString(
"Batch") );
208 AddPreDefVal(
TString(
"SteepestDescent") );
209 AddPreDefVal(
TString(
"RibierePolak") );
210 AddPreDefVal(
TString(
"FletcherReeves") );
211 AddPreDefVal(
TString(
"BFGS") );
219 CreateMLPOptions(fLayerSpec);
221 if (IgnoreEventsWithNegWeightsInTraining()) {
222 Log() <<
kFATAL <<
"Mechanism to ignore events with negative weights in training not available for method"
223 << GetMethodTypeName()
224 <<
" --> please remove \"IgnoreNegWeightsInTraining\" option from booking string."
234 const Event* ev = GetEvent();
237 for (
UInt_t ivar = 0; ivar<
Data()->GetNVariables(); ivar++) {
240 Double_t mvaVal = fMLP->Evaluate(0,d);
243 NoErrorCalc(err, errUpper);
266 const Long_t basketsize = 128000;
269 TTree *localTrainingTree =
new TTree(
"TMLPtrain",
"Local training tree for TMlpANN" );
270 localTrainingTree->
Branch(
"type", &type,
"type/I", basketsize );
271 localTrainingTree->
Branch(
"weight", &weight,
"weight/F", basketsize );
273 for (
UInt_t ivar=0; ivar<GetNvar(); ivar++) {
274 const char* myVar = GetInternalVarName(ivar).Data();
275 localTrainingTree->
Branch( myVar, &vArr[ivar],
Form(
"Var%02i/F", ivar), basketsize );
278 for (
UInt_t ievt=0; ievt<
Data()->GetNEvents(); ievt++) {
279 const Event *ev = GetEvent(ievt);
280 for (
UInt_t i=0; i<GetNvar(); i++) {
283 type = DataInfo().IsSignal( ev ) ? 1 : 0;
285 localTrainingTree->
Fill();
293 trainList += 1.0-fValidationFraction;
295 trainList += (
Int_t)
Data()->GetNEvtSigTrain();
296 trainList +=
" || (Entry$>";
297 trainList += (
Int_t)
Data()->GetNEvtSigTrain();
298 trainList +=
" && Entry$<";
299 trainList += (
Int_t)(
Data()->GetNEvtSigTrain() + (1.0 - fValidationFraction)*
Data()->GetNEvtBkgdTrain());
304 Log() <<
kINFO <<
"Requirement for training events: \"" << trainList <<
"\"" <<
Endl;
305 Log() <<
kINFO <<
"Requirement for validation events: \"" << testList <<
"\"" <<
Endl;
310 if (fMLP != 0) {
delete fMLP; fMLP = 0; }
318 #if ROOT_VERSION_CODE > ROOT_VERSION(5,13,06)
324 fLearningMethod.ToLower();
332 Log() <<
kFATAL <<
"Unknown Learning Method: \"" << fLearningMethod <<
"\"" <<
Endl;
334 fMLP->SetLearningMethod( learningMethod );
337 fMLP->Train(fNcycles,
"text,update=50" );
341 delete localTrainingTree;
354 gTools().
AddAttr( arch,
"BuildOptions", fMLPBuildOptions.Data() );
357 fMLP->DumpWeights(
"weights/TMlp.nn.weights.temp" );
358 std::ifstream inf(
"weights/TMlp.nn.weights.temp" );
362 while (inf.getline(temp,256)) {
373 data += (dummy +
" ");
390 const char* fname =
"weights/TMlp.nn.weights.temp";
391 std::ofstream fout( fname );
392 double temp1=0,temp2=0;
395 std::stringstream content(nodecontent);
396 if (strcmp(
gTools().GetName(ch),
"input")==0) {
397 fout <<
"#input normalization" << std::endl;
398 while ((content >> temp1) &&(content >> temp2)) {
399 fout << temp1 <<
" " << temp2 << std::endl;
402 if (strcmp(
gTools().GetName(ch),
"output")==0) {
403 fout <<
"#output normalization" << std::endl;
404 while ((content >> temp1) &&(content >> temp2)) {
405 fout << temp1 <<
" " << temp2 << std::endl;
408 if (strcmp(
gTools().GetName(ch),
"neurons")==0) {
409 fout <<
"#neurons weights" << std::endl;
410 while (content >> temp1) {
411 fout << temp1 << std::endl;
414 if (strcmp(
gTools().GetName(ch),
"synapses")==0) {
415 fout <<
"#synapses weights" ;
416 while (content >> temp1) {
417 fout << std::endl << temp1 ;
430 TTree * dummyTree =
new TTree(
"dummy",
"Empty dummy tree", 1);
431 for (
UInt_t ivar = 0; ivar<
Data()->GetNVariables(); ivar++) {
432 TString vn = DataInfo().GetVariableInfo(ivar).GetInternalName();
437 if (fMLP != 0) {
delete fMLP; fMLP = 0; }
449 std::ofstream fout(
"./TMlp.nn.weights.temp" );
450 fout << istr.rdbuf();
454 Log() <<
kINFO <<
"Load TMLP weights into " << fMLP <<
Endl;
459 TTree * dummyTree =
new TTree(
"dummy",
"Empty dummy tree", 1);
460 for (
UInt_t ivar = 0; ivar<
Data()->GetNVariables(); ivar++) {
461 TString vn = DataInfo().GetVariableInfo(ivar).GetLabel();
464 dummyTree->
Branch(
"type", &type,
"type/I");
466 if (fMLP != 0) {
delete fMLP; fMLP = 0; }
483 if (theClassFileName ==
"")
484 classFileName = GetWeightFileDir() +
"/" + GetJobName() +
"_" +
GetMethodName() +
".class";
486 classFileName = theClassFileName;
489 Log() <<
kINFO <<
"Creating specific (TMultiLayerPerceptron) standalone response class: " << classFileName <<
Endl;
490 fMLP->Export( classFileName.
Data() );
512 Log() <<
"This feed-forward multilayer perceptron neural network is the " <<
Endl;
513 Log() <<
"standard implementation distributed with ROOT (class TMultiLayerPerceptron)." <<
Endl;
515 Log() <<
"Detailed information is available here:" <<
Endl;
516 if (
gConfig().WriteOptionsReference()) {
517 Log() <<
"<a href=\"http://root.cern.ch/root/html/TMultiLayerPerceptron.html\">";
518 Log() <<
"http://root.cern.ch/root/html/TMultiLayerPerceptron.html</a>" <<
Endl;
520 else Log() <<
"http://root.cern.ch/root/html/TMultiLayerPerceptron.html" <<
Endl;
void Train(void)
performs TMlpANN training available learning methods:
MsgLogger & Endl(MsgLogger &ml)
Double_t GetMvaValue(Double_t *err=0, Double_t *errUpper=0)
calculate the value of the neural net for the current event
TString & ReplaceAll(const TString &s1, const TString &s2)
virtual Int_t Fill()
Fill all branches.
void MakeClass(const TString &classFileName=TString("")) const
create reader class for classifier -> overwrites base class function create specific class for TMulti...
void GetHelpMessage() const
get help message text
MethodTMlpANN(const TString &jobName, const TString &methodTitle, DataSetInfo &theData, const TString &theOption="3000:N-1:N-2", TDirectory *theTargetDir=0)
Double_t GetWeight() const
return the event weight - depending on whether the flag IgnoreNegWeightsInTraining is or not...
Bool_t BeginsWith(const char *s, ECaseCompare cmp=kExact) const
Float_t GetValue(UInt_t ivar) const
return value of i'th variable
const char * Data() const
void MakeClassSpecific(std::ostream &, const TString &) const
write specific classifier response nothing to do here - all taken care of by TMultiLayerPerceptron ...
void Init(void)
default initialisations
ClassImp(TMVA::MethodTMlpANN) TMVA
standard constructor
void ReadWeightsFromXML(void *wghtnode)
rebuild temporary textfile from xml weightfile and load this file into MLP
std::vector< std::vector< double > > Data
virtual ~MethodTMlpANN(void)
destructor
const Bool_t EnforceNormalization__
void ProcessOptions()
builds the neural network as specified by the user
std::string GetMethodName(TCppMethod_t)
char * Form(const char *fmt,...)
TSubString Strip(EStripType s=kTrailing, char c= ' ') const
Return a substring of self stripped at beginning and/or end.
Bool_t LoadWeights(Option_t *filename="")
Loads the weights from a text file conforming to the format defined by DumpWeights.
TString & Remove(Ssiz_t pos)
Describe directory structure in memory.
void ReadWeightsFromStream(std::istream &istr)
read weights from stream since the MLP can not read from the stream, we 1st: write the weights to tem...
static RooMathCoreReg dummy
void AddWeightsXMLTo(void *parent) const
write weights to xml file
virtual Int_t Branch(TCollection *list, Int_t bufsize=32000, Int_t splitlevel=99, const char *name="")
Create one branch for each element in the collection.
#define REGISTER_METHOD(CLASS)
for example
Abstract ClassifierFactory template that handles arbitrary types.
void CreateMLPOptions(TString)
translates options from option string into TMlpANN language
A TTree object has a header with a name and a title.
void SetEventWeight(const char *)
Set the event weight.
Ssiz_t First(char c) const
Find first occurrence of a character c.
void DeclareOptions()
define the options (their key words) that can be set in the option string know options: NCycles
void Resize(Ssiz_t n)
Resize the string. Truncate or add blanks as necessary.
virtual Bool_t HasAnalysisType(Types::EAnalysisType type, UInt_t numberClasses, UInt_t numberTargets)
TMlpANN can handle classification with 2 classes.