63 #ifndef ROOT_TMVA_Tools 83 #if ROOT_VERSION_CODE > ROOT_VERSION(5,13,06) 103 fLocalTrainingTree(0),
105 fValidationFraction(0.5),
106 fLearningMethod( "" )
114 const TString& theWeightFile) :
117 fLocalTrainingTree(0),
119 fValidationFraction(0.5),
120 fLearningMethod(
"" )
157 while (layerSpec.
Length()>0) {
159 if (layerSpec.
First(
',')<0) {
164 sToAdd = layerSpec(0,layerSpec.
First(
','));
165 layerSpec = layerSpec(layerSpec.First(
',')+1,layerSpec.Length());
169 nNodes += atoi(sToAdd);
174 std::vector<TString>::iterator itrVar = (*fInputVars).begin();
175 std::vector<TString>::iterator itrVarEnd = (*fInputVars).end();
177 for (; itrVar != itrVarEnd; itrVar++) {
208 DeclareOptionRef(
fLayerSpec =
"N,N-1",
"HiddenLayers",
"Specification of hidden layer architecture (N stands for number of variables; any integers may also be used)" );
211 "Fraction of events in training tree used for cross validation" );
230 Log() <<
kFATAL <<
"Mechanism to ignore events with negative weights in training not available for method" 232 <<
" --> please remove \"IgnoreNegWeightsInTraining\" option from booking string." 274 const Long_t basketsize = 128000;
277 TTree *localTrainingTree =
new TTree(
"TMLPtrain",
"Local training tree for TMlpANN" );
278 localTrainingTree->
Branch(
"type", &type,
"type/I", basketsize );
279 localTrainingTree->
Branch(
"weight", &weight,
"weight/F", basketsize );
283 localTrainingTree->
Branch( myVar, &vArr[ivar],
Form(
"Var%02i/F", ivar), basketsize );
293 localTrainingTree->
Fill();
304 trainList +=
" || (Entry$>";
306 trainList +=
" && Entry$<";
312 Log() <<
kHEADER <<
"Requirement for training events: \"" << trainList <<
"\"" <<
Endl;
313 Log() <<
kINFO <<
"Requirement for validation events: \"" << testList <<
"\"" <<
Endl;
326 #if ROOT_VERSION_CODE > ROOT_VERSION(5,13,06) 349 delete localTrainingTree;
367 std::ifstream inf( tmpfile.
Data() );
371 while (inf.getline(temp,256)) {
382 data += (dummy +
" ");
400 std::ofstream fout( fname.
Data() );
401 double temp1=0,temp2=0;
404 std::stringstream content(nodecontent);
406 fout <<
"#input normalization" << std::endl;
407 while ((content >> temp1) &&(content >> temp2)) {
408 fout << temp1 <<
" " << temp2 << std::endl;
412 fout <<
"#output normalization" << std::endl;
413 while ((content >> temp1) &&(content >> temp2)) {
414 fout << temp1 <<
" " << temp2 << std::endl;
418 fout <<
"#neurons weights" << std::endl;
419 while (content >> temp1) {
420 fout << temp1 << std::endl;
424 fout <<
"#synapses weights" ;
425 while (content >> temp1) {
426 fout << std::endl << temp1 ;
439 TTree * dummyTree =
new TTree(
"dummy",
"Empty dummy tree", 1);
458 std::ofstream fout(
"./TMlp.nn.weights.temp" );
459 fout << istr.rdbuf();
468 TTree * dummyTree =
new TTree(
"dummy",
"Empty dummy tree", 1);
473 dummyTree->
Branch(
"type", &type,
"type/I");
492 if (theClassFileName ==
"")
495 classFileName = theClassFileName;
498 Log() <<
kINFO <<
"Creating specific (TMultiLayerPerceptron) standalone response class: " << classFileName <<
Endl;
521 Log() <<
"This feed-forward multilayer perceptron neural network is the " <<
Endl;
522 Log() <<
"standard implementation distributed with ROOT (class TMultiLayerPerceptron)." <<
Endl;
524 Log() <<
"Detailed information is available here:" <<
Endl;
525 if (
gConfig().WriteOptionsReference()) {
526 Log() <<
"<a href=\"http://root.cern.ch/root/html/TMultiLayerPerceptron.html\">";
527 Log() <<
"http://root.cern.ch/root/html/TMultiLayerPerceptron.html</a>" <<
Endl;
529 else Log() <<
"http://root.cern.ch/root/html/TMultiLayerPerceptron.html" <<
Endl;
void Train(void)
performs TMlpANN training available learning methods:
MsgLogger & Endl(MsgLogger &ml)
const TString & GetInternalName() const
Double_t GetMvaValue(Double_t *err=0, Double_t *errUpper=0)
calculate the value of the neural net for the current event
TString & ReplaceAll(const TString &s1, const TString &s2)
virtual Int_t Fill()
Fill all branches.
OptionBase * DeclareOptionRef(T &ref, const TString &name, const TString &desc="")
void Export(Option_t *filename="NNfunction", Option_t *language="C++") const
Exports the NN as a function for any non-ROOT-dependant code Supported languages are: only C++ ...
UInt_t GetNVariables() const
access the number of variables through the datasetinfo
Double_t Evaluate(Int_t index, Double_t *params) const
Returns the Neural Net for a given set of input parameters #parameters must equal #input neurons...
Double_t fValidationFraction
void GetHelpMessage() const
get help message text
const TString & GetInternalVarName(Int_t ivar) const
void ToLower()
Change string to lower-case.
const TString & GetLabel() const
const TString & GetWeightFileDir() const
Long64_t GetNEvtBkgdTrain()
return number of background training events in dataset
MethodTMlpANN(const TString &jobName, const TString &methodTitle, DataSetInfo &theData, const TString &theOption="3000:N-1:N-2")
standard constructor
void Init(void)
default initialisations
const Event * GetEvent() const
void ReadWeightsFromXML(void *wghtnode)
rebuild temporary textfile from xml weightfile and load this file into MLP
DataSetInfo & DataInfo() const
Bool_t DumpWeights(Option_t *filename="-") const
Dumps the weights to a text file.
virtual ~MethodTMlpANN(void)
destructor
Ssiz_t First(char c) const
Find first occurrence of a character c.
const Bool_t EnforceNormalization__
void ProcessOptions()
builds the neural network as specified by the user
Double_t GetWeight() const
return the event weight - depending on whether the flag IgnoreNegWeightsInTraining is or not...
void Train(Int_t nEpoch, Option_t *option="text", Double_t minE=0)
Train the network.
const char * GetName() const
Bool_t BeginsWith(const char *s, ECaseCompare cmp=kExact) const
char * Form(const char *fmt,...)
TSubString Strip(EStripType s=kTrailing, char c=' ') const
Return a substring of self stripped at beginning and/or end.
const TString & GetJobName() const
const TString & GetMethodName() const
Bool_t LoadWeights(Option_t *filename="")
Loads the weights from a text file conforming to the format defined by DumpWeights.
TMultiLayerPerceptron * fMLP
UInt_t GetNVariables() const
Float_t GetValue(UInt_t ivar) const
return value of i'th variable
TString & Remove(Ssiz_t pos)
Bool_t IgnoreEventsWithNegWeightsInTraining() const
Long64_t GetNEvtSigTrain()
return number of signal training events in dataset
void ReadWeightsFromStream(std::istream &istr)
read weights from stream since the MLP can not read from the stream, we 1st: write the weights to tem...
static RooMathCoreReg dummy
VariableInfo & GetVariableInfo(Int_t i)
void AddPreDefVal(const T &)
virtual Int_t Branch(TCollection *list, Int_t bufsize=32000, Int_t splitlevel=99, const char *name="")
Create one branch for each element in the collection.
void MakeClass(const TString &classFileName=TString("")) const
create reader class for classifier -> overwrites base class function create specific class for TMulti...
#define REGISTER_METHOD(CLASS)
for example
Abstract ClassifierFactory template that handles arbitrary types.
void AddWeightsXMLTo(void *parent) const
write weights to xml file
TString GetMethodTypeName() const
Long64_t GetNEvents(Types::ETreeType type=Types::kMaxTreeType) const
void CreateMLPOptions(TString)
translates options from option string into TMlpANN language
Bool_t IsSignal(const Event *ev) const
A TTree object has a header with a name and a title.
void SetEventWeight(const char *)
Set the event weight.
void MakeClassSpecific(std::ostream &, const TString &) const
write specific classifier response nothing to do here - all taken care of by TMultiLayerPerceptron ...
void SetLearningMethod(TMultiLayerPerceptron::ELearningMethod method)
Sets the learning method.
void DeclareOptions()
define the options (their key words) that can be set in the option string know options: NCycles <inte...
void NoErrorCalc(Double_t *const err, Double_t *const errUpper)
void Resize(Ssiz_t n)
Resize the string. Truncate or add blanks as necessary.
virtual Bool_t HasAnalysisType(Types::EAnalysisType type, UInt_t numberClasses, UInt_t numberTargets)
TMlpANN can handle classification with 2 classes.
const char * Data() const