45 return fROCCurves.get();
54 MsgLogger fLogger(
"HyperParameterOptimisation");
56 for(
UInt_t j=0;
j<fFoldParameters.size(); ++
j) {
57 fLogger<<kHEADER<<
"===========================================================" <<
Endl;
58 fLogger<<kINFO<<
"Optimisation for " << fMethodName <<
" fold " <<
j+1 <<
Endl;
60 for(
auto &it : fFoldParameters.at(
j)) {
61 fLogger<<kINFO<< it.first <<
" " << it.second <<
Endl;
71 fFomType(
"Separation"),
75 fClassifier(
new TMVA::
Factory(
"HyperParameterOptimisation",
"!V:!ROC:Silent:!ModelPersistence:!Color:!DrawProgressBar:AnalysisType=Classification"))
97 for (
auto &
meth : fMethods) {
104 fDataLoader->MakeKFoldDataSet(split);
107 fResults.fMethodName = methodName;
109 for (
UInt_t i = 0; i < fNumFolds; ++i) {
117 auto smethod = fClassifier->BookMethod(fDataLoader.get(), methodName, methodTitle,
methodOptions);
119 auto params =
smethod->OptimizeTuningParameters(fFomType, fFitType);
120 fResults.fFoldParameters.push_back(params);
124 fClassifier->DeleteAllMethods();
126 fClassifier->fMethodsMap.clear();
ROOT::Detail::TRangeCast< T, true > TRangeDynCast
TRangeDynCast is an adapter class that allows the typed iteration through a TCollection.
Abstract base class for all high level ml algorithms, you can book ml methods like BDT,...
static void SetIsTraining(Bool_t)
when this static function is called, it sets the flag whether events with negative event weight shoul...
This is the main MVA steering class.
TMultiGraph * GetROCCurves(Bool_t fLegend=kTRUE)
~HyperParameterOptimisationResult()
HyperParameterOptimisationResult()
virtual void Evaluate()
Virtual method to be implemented with your algorithm.
HyperParameterOptimisation(DataLoader *dataloader)
void SetNumFolds(UInt_t folds)
~HyperParameterOptimisation()
ostringstream derivative to redirect and format output
static void EnableOutput()
A TMultiGraph is a collection of TGraph (or derived) objects.
create variable transformations
MsgLogger & Endl(MsgLogger &ml)