63 fTuneParameters(tuneParameters),
65 fOptimizationFitType(optimizationFitType),
72 std::string
name =
"OptimizeConfigParameters_";
76 Log() << kFATAL <<
" ERROR: Sorry, Regression is not yet implement for automatic parameter optimization" 77 <<
" --> exit" <<
Endl;
80 Log() << kINFO <<
"Automatic optimisation of tuning parameters in " 83 std::map<TString,TMVA::Interval*>::iterator it;
85 Log() << kINFO << it->first
86 <<
" in range from: " << it->second->GetMin()
87 <<
" to: " << it->second->GetMax()
88 <<
" in : " << it->second->GetNbins() <<
" steps" 109 if (ymin>y[i]) ymin=y[i];
110 if (ymax<y[i]) ymax=y[i];
134 <<
" that is not (yet) coded --> exit()" <<
Endl;
138 std::map<TString,Double_t>::iterator it;
140 Log() << kINFO << it->first <<
" = " << it->second <<
Endl;
151 std::vector < int > indices;
152 for (
UInt_t i=0; i< base.size(); i++){
153 indices.push_back(val % base[i] );
154 val = int(
floor(
float(val) /
float(base[i]) ) );
168 Double_t bestFOM=-1000000, currentFOM;
170 std::map<TString,Double_t> currentParameters;
171 std::map<TString,TMVA::Interval*>::iterator it;
175 currentParameters.clear();
179 currentParameters.insert(std::pair<TString,Double_t>(it->first,it->second->GetMin()));
180 fTunedParameters.insert(std::pair<TString,Double_t>(it->first,it->second->GetMin()));
187 std::vector< std::vector <Double_t> >
v;
189 std::vector< Double_t > tmp;
190 for (
Int_t k=0; k<it->second->GetNbins(); k++){
191 tmp.push_back(it->second->GetElement(k));
196 std::vector< int > Nindividual;
197 for (
UInt_t i=0; i<v.size(); i++) {
199 Nindividual.push_back(v[i].size());
203 for (
int i=0; i<Ntot; i++){
206 for (it=
fTuneParameters.begin(), index=0; index< indices.size(); index++, it++){
207 currentParameters[it->first] = v[index][indices[index]];
209 Log() << kINFO <<
"--------------------------" <<
Endl;
210 Log() << kINFO <<
"Settings being evaluated:" <<
Endl;
211 for (std::map<TString,Double_t>::iterator it_print=currentParameters.begin();
212 it_print!=currentParameters.end(); it_print++){
213 Log() << kINFO <<
" " << it_print->first <<
" = " << it_print->second <<
Endl;
221 GetMethod()->Data()->GetEventCollection());
226 Log() << kINFO <<
"FOM was found : " << currentFOM <<
"; current best is " << bestFOM <<
Endl;
228 if (currentFOM > bestFOM) {
229 bestFOM = currentFOM;
230 for (std::map<TString,Double_t>::iterator iter=currentParameters.begin();
231 iter != currentParameters.end(); iter++){
246 std::vector<TMVA::Interval*> ranges;
247 std::map<TString, TMVA::Interval*>::iterator it;
248 std::vector<Double_t> pars;
252 pars.push_back( (it->second)->GetMean() );
265 TString opt=
"FitStrategy=0:UseImprove=False:UseMinos=False:Tolerance=100";
269 "FitterMinuit_BDTOptimize",
272 TString opt=
"PopSize=20:Steps=30:Cycles=3:ConvCrit=0.01:SaveBestCycle=5";
274 "FitterGA_BDTOptimize",
277 Log() << kWARNING <<
" you did not specify a valid OptimizationFitType " 278 <<
" will use the default (FitGA) " <<
Endl;
279 TString opt=
"PopSize=20:Steps=30:Cycles=3:ConvCrit=0.01:SaveBestCycle=5";
281 "FitterGA_BDTOptimize",
291 for (
UInt_t ipar=0; ipar<ranges.size(); ipar++)
delete ranges[ipar];
298 fTunedParameters.insert(std::pair<TString,Double_t>(it->first,pars[jcount++]));
310 std::map< std::vector<Double_t> ,
Double_t>::const_iterator iter;
319 std::map<TString,Double_t> currentParameters;
322 std::map<TString, TMVA::Interval*>::iterator it;
324 currentParameters[it->first] = pars[icount++];
332 CalcTransformations(
GetMethod()->Data()->GetEventCollection());
355 std::cout <<
" ERROR: Sorry, Regression is not yet implement for automatic parameter optimisation" 356 <<
" --> exit" << std::endl;
367 Log()<<kFATAL <<
" ERROR, you've specified as Figure of Merit in the " 368 <<
" parameter optimisation " <<
fFOMType <<
" which has not" 369 <<
" been implemented yet!! ---> exit " <<
Endl;
404 for (
UInt_t iev=0; iev < events.size() ; iev++){
408 if (events[iev]->
GetClass() == signalClassNr) {
429 std::cout <<
"Separation calculation via histograms (not PDFs) seems to give still strange results!! Don't do that, check!!"<<std::endl;
460 for (
UInt_t i=0; i<nsteps; i++){
469 std::cout <<
" Error in OptimizeConfigParameters GetROCIntegral, unequal histograms for sig and bkg.." << std::endl;
479 for (
Int_t ibin=1; ibin<=nbins; ibin++){
484 for (
Int_t ibin=1; ibin <= nbins; ibin++){
504 std::cout <<
" Error in OptimizeConfigParameters GetSigEffAt, unequal histograms for sig and bkg.." << std::endl;
519 while (bkgCumulator[nbins-ibin] > (1-bkgEff)) {
520 sigEff = sigCumulator[nbins]-sigCumulator[nbins-ibin];
541 std::cout <<
" Error in OptimizeConfigParameters GetBkgEffAt, unequal histograms for sig and bkg.." << std::endl;
557 while ( sigCumulator[nbins]-sigCumulator[nbins-ibin] < sigEff) {
558 bkgEff = bkgCumulator[nbins]-bkgCumulator[nbins-ibin];
578 std::cout <<
" Error in OptimizeConfigParameters GetBkgEffAt, unequal histograms for sig and bkg.." << std::endl;
594 while ( sigCumulator[nbins]-sigCumulator[nbins-ibin] < sigEff) {
595 bkgRej = bkgCumulator[nbins-ibin];
std::map< TString, Double_t > fTunedParameters
virtual Int_t Write(const char *name=0, Int_t option=0, Int_t bufsize=0)
Write this object to the current directory.
std::string GetName(const std::string &scope_name)
virtual Int_t Fill(Double_t x)
Increment bin with abscissa X by 1.
virtual void SetTuneParameters(std::map< TString, Double_t > tuneParameters)
set the tuning parameters according to the argument This is just a dummy .
std::map< TString, TMVA::Interval * > fTuneParameters
MsgLogger & Endl(MsgLogger &ml)
Base class for TMVA fitters.
virtual Double_t GetMvaValue(Double_t *errLower=0, Double_t *errUpper=0)=0
virtual void SetName(const char *name)
Set the name of the TNamed.
virtual Double_t GetBinContent(Int_t bin) const
Return content of bin number bin.
Virtual base Class for all MVA method.
Double_t GetSeparation()
return the separation between the signal and background MVA ouput distribution
TransformationHandler & GetTransformationHandler(Bool_t takeReroutedIfAvailable=true)
void optimizeScan()
do the actual optimization using a simple scan method, i.e.
Short_t Min(Short_t a, Short_t b)
virtual void SetYTitle(const char *title)
const std::vector< Event * > & GetEventCollection(Types::ETreeType type=Types::kMaxTreeType) const
Double_t GetFOM()
Return the Figure of Merit (FOM) used in the parameter optimization process.
static void SetIsTraining(Bool_t)
when this static function is called, it sets the flag whether events with negative event weight shoul...
void GetMVADists()
fill the private histograms with the mva distributions for sig/bkg
Double_t Run()
estimator function interface for fitting
std::vector< Float_t > fFOMvsIter
MethodBase *const fMethod
DataSetInfo & DataInfo() const
Bool_t DoRegression() const
PDF wrapper for histograms; uses user-defined spline interpolation.
virtual void Delete(Option_t *option="")
Delete this object.
virtual Double_t * GetIntegral()
Return a pointer to the array of bins integral.
Double_t GetROCIntegral()
calculate the area (integral) under the ROC curve as a overall quality measure of the classification ...
const char * GetName() const
ClassInfo * GetClassInfo(Int_t clNum) const
std::map< TString, Double_t > optimize()
The TMVA::Interval Class.
virtual ~OptimizeConfigParameters()
the destructor (delete the OptimizeConfigParameters, store the graph and .. delete it) ...
Double_t GetSigEffAtBkgEff(Double_t bkgEff=0.1)
calculate the signal efficiency for a given background efficiency
virtual Double_t GetBinWidth(Int_t bin) const
Return bin width for 1D histogram.
Double_t GetBkgEffAtSigEff(Double_t sigEff=0.5)
calculate the background efficiency for a given signal efficiency
THist< 2, double, THistStatContent, THistStatUncertainty > TH2D
ostringstream derivative to redirect and format output
virtual void SetXTitle(const char *title)
virtual Bool_t cd(const char *path=0)
Change current directory to "this" directory.
std::vector< int > GetScanIndices(int val, std::vector< int > base)
helper function to scan through the all the combinations in the parameter space
Double_t GetBkgRejAtSigEff(Double_t sigEff=0.5)
calculate the background rejection for a given signal efficiency
TDirectory * BaseDir() const
returns the ROOT directory where info/histograms etc of the corresponding MVA method instance are sto...
Short_t Max(Short_t a, Short_t b)
A Graph is a graphics object made of two arrays X and Y with npoints each.
Double_t EstimatorFunction(std::vector< Double_t > &)
return the estimator (from current FOM) for the fitting interface
std::map< std::vector< Double_t >, Double_t > fAlreadyTrainedParCombination
THist< 1, double, THistStatContent, THistStatUncertainty > TH1D
Double_t GetIntegral(Double_t xmin, Double_t xmax)
computes PDF integral within given ranges
virtual Int_t GetNbinsX() const
Fitter using a Genetic Algorithm.
void CheckForUnusedOptions() const
checks for unused options in option string
OptimizeConfigParameters(MethodBase *const method, std::map< TString, TMVA::Interval *> tuneParameters, TString fomType="Separation", TString optimizationType="GA")
Constructor which sets either "Classification or Regression".
TAxis * GetXaxis()
Get the behaviour adopted by the object about the statoverflows. See EStatOverflows for more informat...
Double_t GetVal(Double_t x) const
returns value PDF(x)
2-D histogram with a double per channel (see TH1 documentation)}
TString fOptimizationFitType