100 const TString& theWeightFile) :
143 while (layerSpec.
Length()>0) {
145 if (layerSpec.
First(
',')<0) {
150 sToAdd = layerSpec(0,layerSpec.
First(
','));
151 layerSpec = layerSpec(layerSpec.
First(
',')+1,layerSpec.
Length());
155 nNodes += atoi(sToAdd);
160 std::vector<TString>::iterator itrVar = (*fInputVars).begin();
161 std::vector<TString>::iterator itrVarEnd = (*fInputVars).end();
163 for (; itrVar != itrVarEnd; ++itrVar) {
197 DeclareOptionRef(
fLayerSpec =
"N,N-1",
"HiddenLayers",
"Specification of hidden layer architecture (N stands for number of variables; any integers may also be used)" );
200 "Fraction of events in training tree used for cross validation" );
219 Log() << kFATAL <<
"Mechanism to ignore events with negative weights in training not available for method"
221 <<
" --> please remove \"IgnoreNegWeightsInTraining\" option from booking string."
234 for (
UInt_t ivar = 0; ivar<
Data()->GetNVariables(); ivar++) {
263 const Long_t basketsize = 128000;
266 TTree *localTrainingTree =
new TTree(
"TMLPtrain",
"Local training tree for TMlpANN" );
267 localTrainingTree->
Branch(
"type", &type,
"type/I", basketsize );
268 localTrainingTree->
Branch(
"weight", &weight,
"weight/F", basketsize );
273 localTrainingTree->
Branch( myVar.
Data(), &vArr[ivar], myTyp.
Data(), basketsize );
276 for (
UInt_t ievt=0; ievt<
Data()->GetNEvents(); ievt++) {
281 type =
DataInfo().IsSignal( ev ) ? 1 : 0;
283 localTrainingTree->
Fill();
293 trainList += (
Int_t)
Data()->GetNEvtSigTrain();
294 trainList +=
" || (Entry$>";
295 trainList += (
Int_t)
Data()->GetNEvtSigTrain();
296 trainList +=
" && Entry$<";
302 Log() << kHEADER <<
"Requirement for training events: \"" << trainList <<
"\"" <<
Endl;
303 Log() << kINFO <<
"Requirement for validation events: \"" << testList <<
"\"" <<
Endl;
313 fMLP->SetEventWeight(
"weight" );
328 fMLP->SetLearningMethod( learningMethod );
335 delete localTrainingTree;
351 fMLP->DumpWeights( tmpfile.
Data() );
352 std::ifstream inf( tmpfile.
Data() );
356 while (inf.getline(temp,256)) {
362 dummy = dummy(0,dummy.
First(
' '));
367 data += (dummy +
" ");
385 std::ofstream fout( fname.
Data() );
386 double temp1=0,temp2=0;
389 std::stringstream content(nodecontent);
391 fout <<
"#input normalization" << std::endl;
392 while ((content >> temp1) &&(content >> temp2)) {
393 fout << temp1 <<
" " << temp2 << std::endl;
397 fout <<
"#output normalization" << std::endl;
398 while ((content >> temp1) &&(content >> temp2)) {
399 fout << temp1 <<
" " << temp2 << std::endl;
403 fout <<
"#neurons weights" << std::endl;
404 while (content >> temp1) {
405 fout << temp1 << std::endl;
409 fout <<
"#synapses weights" ;
410 while (content >> temp1) {
411 fout << std::endl << temp1 ;
421 TTHREAD_TLS(
Int_t) type;
424 TTree * dummyTree =
new TTree(
"dummy",
"Empty dummy tree", 1);
425 for (
UInt_t ivar = 0; ivar<
Data()->GetNVariables(); ivar++) {
430 dummyTree->
Branch(
"type", &type,
"type/I");
434 fMLP->LoadWeights( fname );
444 std::ofstream fout(
"./TMlp.nn.weights.temp" );
445 fout << istr.rdbuf();
449 Log() << kINFO <<
"Load TMLP weights into " <<
fMLP <<
Endl;
454 TTree * dummyTree =
new TTree(
"dummy",
"Empty dummy tree", 1);
455 for (
UInt_t ivar = 0; ivar<
Data()->GetNVariables(); ivar++) {
460 dummyTree->
Branch(
"type", &type,
"type/I");
465 fMLP->LoadWeights(
"./TMlp.nn.weights.temp" );
479 if (theClassFileName ==
"")
482 classFileName = theClassFileName;
485 Log() << kINFO <<
"Creating specific (TMultiLayerPerceptron) standalone response class: " << classFileName <<
Endl;
486 fMLP->Export( classFileName.
Data() );
508 Log() <<
"This feed-forward multilayer perceptron neural network is the " <<
Endl;
509 Log() <<
"standard implementation distributed with ROOT (class TMultiLayerPerceptron)." <<
Endl;
511 Log() <<
"Detailed information is available here:" <<
Endl;
512 if (
gConfig().WriteOptionsReference()) {
513 Log() <<
"<a href=\"https://root.cern/doc/master/classTMultiLayerPerceptron.html\">";
514 Log() <<
"https://root.cern/doc/master/classTMultiLayerPerceptron.html</a>" <<
Endl;
516 else Log() <<
"https://root.cern/doc/master/classTMultiLayerPerceptron.html" <<
Endl;
#define REGISTER_METHOD(CLASS)
for example
const Bool_t EnforceNormalization__
int Int_t
Signed integer 4 bytes (int).
unsigned int UInt_t
Unsigned integer 4 bytes (unsigned int).
long Long_t
Signed long integer 4 bytes (long). Size depends on architecture.
bool Bool_t
Boolean (0=false, 1=true) (bool).
double Double_t
Double 8 bytes.
float Float_t
Float 4 bytes (float).
OptionBase * DeclareOptionRef(T &ref, const TString &name, const TString &desc="")
void AddPreDefVal(const T &)
Class that contains all the data information.
Float_t GetValue(UInt_t ivar) const
return value of i'th variable
Double_t GetWeight() const
return the event weight - depending on whether the flag IgnoreNegWeightsInTraining is or not.
MethodBase(const TString &jobName, Types::EMVA methodType, const TString &methodTitle, DataSetInfo &dsi, const TString &theOption="")
standard constructor
const char * GetName() const override
TString GetMethodTypeName() const
const TString & GetJobName() const
Bool_t IgnoreEventsWithNegWeightsInTraining() const
const TString & GetWeightFileDir() const
const TString & GetMethodName() const
const Event * GetEvent() const
DataSetInfo & DataInfo() const
UInt_t GetNVariables() const
void NoErrorCalc(Double_t *const err, Double_t *const errUpper)
const TString & GetInternalVarName(Int_t ivar) const
Double_t fValidationFraction
fraction of events in training tree used for cross validation
void ReadWeightsFromStream(std::istream &istr) override
read weights from stream since the MLP can not read from the stream, we 1st: write the weights to tem...
void MakeClass(const TString &classFileName=TString("")) const override
create reader class for classifier -> overwrites base class function create specific class for TMulti...
void Train(void) override
performs TMlpANN training available learning methods:
TString fLearningMethod
the learning method (given via option string)
TString fMLPBuildOptions
option string to build the mlp
void ReadWeightsFromXML(void *wghtnode) override
rebuild temporary textfile from xml weightfile and load this file into MLP
void AddWeightsXMLTo(void *parent) const override
write weights to xml file
void DeclareOptions() override
define the options (their key words) that can be set in the option string
TTree * fLocalTrainingTree
local copy of training tree
void CreateMLPOptions(TString)
translates options from option string into TMlpANN language
void ProcessOptions() override
builds the neural network as specified by the user
MethodTMlpANN(const TString &jobName, const TString &methodTitle, DataSetInfo &theData, const TString &theOption="3000:N-1:N-2")
standard constructor
TMultiLayerPerceptron * fMLP
the TMLP
Double_t GetMvaValue(Double_t *err=nullptr, Double_t *errUpper=nullptr) override
calculate the value of the neural net for the current event
TString fLayerSpec
Layer specification option.
void Init(void) override
default initialisations
virtual ~MethodTMlpANN(void)
destructor
Int_t fNcycles
number of training cycles
Bool_t HasAnalysisType(Types::EAnalysisType type, UInt_t numberClasses, UInt_t numberTargets) override
TMlpANN can handle classification with 2 classes.
void GetHelpMessage() const override
get help message text
void MakeClassSpecific(std::ostream &, const TString &) const override
write specific classifier response nothing to do here - all taken care of by TMultiLayerPerceptron
TString fHiddenLayer
string containing the hidden layer structure
Singleton class for Global types used by TMVA.
This class describes a neural network.
TSubString Strip(EStripType s=kTrailing, char c=' ') const
Return a substring of self stripped at beginning and/or end.
Ssiz_t First(char c) const
Find first occurrence of a character c.
const char * Data() const
TString & ReplaceAll(const TString &s1, const TString &s2)
Bool_t BeginsWith(const char *s, ECaseCompare cmp=kExact) const
TString & Remove(Ssiz_t pos)
static TString Format(const char *fmt,...)
Static method which formats a string using a printf style format descriptor and return a TString.
A TTree represents a columnar dataset.
virtual Int_t Fill()
Fill all branches.
TBranch * Branch(const char *name, T *obj, Int_t bufsize=32000, Int_t splitlevel=99)
Add a new branch, and infer the data type from the type of obj being passed.
create variable transformations
MsgLogger & Endl(MsgLogger &ml)