Logo ROOT  
Reference Guide
 
Loading...
Searching...
No Matches
MethodRSVM.cxx
Go to the documentation of this file.
1// @(#)root/tmva/rmva $Id$
2// Author: Omar Zapata,Lorenzo Moneta, Sergei Gleyzer 2015
3
4/**********************************************************************************
5 * Project: TMVA - a Root-integrated toolkit for multivariate data analysis *
6 * Package: TMVA *
7 * Class : MethodRSVM- *
8 * Web : http://oproject.org *
9 * *
10 * Description: *
11 * Support Vector Machines *
12 * *
13 * *
14 * Redistribution and use in source and binary forms, with or without *
15 * modification, are permitted according to the terms listed in LICENSE *
16 * (see tmva/doc/LICENSE) *
17 * *
18 **********************************************************************************/
19
20#include <iomanip>
21
22#include "TMath.h"
23#include "Riostream.h"
24#include "TMatrix.h"
25#include "TMatrixD.h"
26#include "TVectorD.h"
27
29#include "TMVA/MethodRSVM.h"
30#include "TMVA/Tools.h"
31#include "TMVA/Config.h"
32#include "TMVA/Ranking.h"
33#include "TMVA/Types.h"
34#include "TMVA/PDF.h"
36
37#include "TMVA/Results.h"
38#include "TMVA/Timer.h"
39
40using namespace TMVA;
41
43
44//creating an Instance
46
47
48//_______________________________________________________________________
50 const TString &methodTitle,
52 const TString &theOption) :
53 RMethodBase(jobName, Types::kRSVM, methodTitle, dsi, theOption),
54 fMvaCounter(0),
55 svm("svm"),
56 predict("predict"),
57 asfactor("as.factor"),
58 fModel(NULL)
59{
60 // standard constructor for the RSVM
61 //Booking options
62 fScale = kTRUE;
63 fType = "C-classification";
64 fKernel = "radial";
65 fDegree = 3;
66
67 fGamma = (fDfTrain.GetNcols() == 1) ? 1.0 : (1.0 / fDfTrain.GetNcols());
68 fCoef0 = 0;
69 fCost = 1;
70 fNu = 0.5;
71 fCacheSize = 40;
72 fTolerance = 0.001;
73 fEpsilon = 0.1;
75 fCross = 0;
77 fFitted = kTRUE;
78}
79
80//_______________________________________________________________________
83 fMvaCounter(0),
84 svm("svm"),
85 predict("predict"),
86 asfactor("as.factor"),
87 fModel(NULL)
88{
89 // standard constructor for the RSVM
90 //Booking options
91 fScale = kTRUE;
92 fType = "C-classification";
93 fKernel = "radial";
94 fDegree = 3;
95
96 fGamma = (fDfTrain.GetNcols() == 1) ? 1.0 : (1.0 / fDfTrain.GetNcols());
97 fCoef0 = 0;
98 fCost = 1;
99 fNu = 0.5;
100 fCacheSize = 40;
101 fTolerance = 0.001;
102 fEpsilon = 0.1;
104 fCross = 0;
106 fFitted = kTRUE;
107}
108
109
110//_______________________________________________________________________
112{
113 if (fModel) delete fModel;
114}
115
116//_______________________________________________________________________
122
123
124//_______________________________________________________________________
126{
127 if (!IsModuleLoaded) {
128 Error("Init", "R's package e1071 can not be loaded.");
129 Log() << kFATAL << " R's package e1071 can not be loaded."
130 << Endl;
131 return;
132 }
133}
134
136{
137 if (Data()->GetNTrainingEvents() == 0) Log() << kFATAL << "<Train> Data() has zero events" << Endl;
138 //SVM require a named vector
140 ClassWeightsTrain["background"] = Data()->GetNEvtBkgdTrain();
141 ClassWeightsTrain["signal"] = Data()->GetNEvtSigTrain();
142
143 Log() << kINFO
144 << " Probability is " << fProbability
145 << " Tolerance is " << fTolerance
146 << " Type is " << fType
147 << Endl;
148
149
152 ROOT::R::Label["scale"] = fScale, \
153 ROOT::R::Label["type"] = fType, \
154 ROOT::R::Label["kernel"] = fKernel, \
155 ROOT::R::Label["degree"] = fDegree, \
156 ROOT::R::Label["gamma"] = fGamma, \
157 ROOT::R::Label["coef0"] = fCoef0, \
158 ROOT::R::Label["cost"] = fCost, \
159 ROOT::R::Label["nu"] = fNu, \
160 ROOT::R::Label["class.weights"] = ClassWeightsTrain, \
161 ROOT::R::Label["cachesize"] = fCacheSize, \
162 ROOT::R::Label["tolerance"] = fTolerance, \
163 ROOT::R::Label["epsilon"] = fEpsilon, \
164 ROOT::R::Label["shrinking"] = fShrinking, \
165 ROOT::R::Label["cross"] = fCross, \
166 ROOT::R::Label["probability"] = fProbability, \
167 ROOT::R::Label["fitted"] = fFitted);
169 if (IsModelPersistence())
170 {
171 TString path = GetWeightFileDir() + "/" + GetName() + ".RData";
172 Log() << Endl;
173 Log() << gTools().Color("bold") << "--- Saving State File In:" << gTools().Color("reset") << path << Endl;
174 Log() << Endl;
175 r["RSVMModel"] << Model;
176 r << "save(RSVMModel,file='" + path + "')";
177 }
178}
179
180//_______________________________________________________________________
182{
183 DeclareOptionRef(fScale, "Scale", "A logical vector indicating the variables to be scaled. If\
184 ‘scale’ is of length 1, the value is recycled as many times \
185 as needed. Per default, data are scaled internally (both ‘x’\
186 and ‘y’ variables) to zero mean and unit variance. The center \
187 and scale values are returned and used for later predictions.");
188 DeclareOptionRef(fType, "Type", "‘svm’ can be used as a classification machine, as a \
189 regression machine, or for novelty detection. Depending of\
190 whether ‘y’ is a factor or not, the default setting for\
191 ‘type’ is ‘C-classification’ or ‘eps-regression’,\
192 respectively, but may be overwritten by setting an explicit value.\
193 Valid options are:\
194 - ‘C-classification’\
195 - ‘nu-classification’\
196 - ‘one-classification’ (for novelty detection)\
197 - ‘eps-regression’\
198 - ‘nu-regression’");
199 DeclareOptionRef(fKernel, "Kernel", "the kernel used in training and predicting. You might\
200 consider changing some of the following parameters, depending on the kernel type.\
201 linear: u'*v\
202 polynomial: (gamma*u'*v + coef0)^degree\
203 radial basis: exp(-gamma*|u-v|^2)\
204 sigmoid: tanh(gamma*u'*v + coef0)");
205 DeclareOptionRef(fDegree, "Degree", "parameter needed for kernel of type ‘polynomial’ (default: 3)");
206 DeclareOptionRef(fGamma, "Gamma", "parameter needed for all kernels except ‘linear’ (default:1/(data dimension))");
207 DeclareOptionRef(fCoef0, "Coef0", "parameter needed for kernels of type ‘polynomial’ and ‘sigmoid’ (default: 0)");
208 DeclareOptionRef(fCost, "Cost", "cost of constraints violation (default: 1)-it is the ‘C’-constant of the regularization term in the Lagrange formulation.");
209 DeclareOptionRef(fNu, "Nu", "parameter needed for ‘nu-classification’, ‘nu-regression’,and ‘one-classification’");
210 DeclareOptionRef(fCacheSize, "CacheSize", "cache memory in MB (default 40)");
211 DeclareOptionRef(fTolerance, "Tolerance", "tolerance of termination criterion (default: 0.001)");
212 DeclareOptionRef(fEpsilon, "Epsilon", "epsilon in the insensitive-loss function (default: 0.1)");
213 DeclareOptionRef(fShrinking, "Shrinking", "option whether to use the shrinking-heuristics (default:‘TRUE’)");
214 DeclareOptionRef(fCross, "Cross", "if a integer value k>0 is specified, a k-fold cross validation on the training data is performed to assess the quality of the model: the accuracy rate for classification and the Mean Squared Error for regression");
215 DeclareOptionRef(fProbability, "Probability", "logical indicating whether the model should allow for probability predictions");
216 DeclareOptionRef(fFitted, "Fitted", "logical indicating whether the fitted values should be computed and included in the model or not (default: ‘TRUE’)");
217
218}
219
220//_______________________________________________________________________
222{
223 r["RMVA.RSVM.Scale"] = fScale;
224 r["RMVA.RSVM.Type"] = fType;
225 r["RMVA.RSVM.Kernel"] = fKernel;
226 r["RMVA.RSVM.Degree"] = fDegree;
227 r["RMVA.RSVM.Gamma"] = fGamma;
228 r["RMVA.RSVM.Coef0"] = fCoef0;
229 r["RMVA.RSVM.Cost"] = fCost;
230 r["RMVA.RSVM.Nu"] = fNu;
231 r["RMVA.RSVM.CacheSize"] = fCacheSize;
232 r["RMVA.RSVM.Tolerance"] = fTolerance;
233 r["RMVA.RSVM.Epsilon"] = fEpsilon;
234 r["RMVA.RSVM.Shrinking"] = fShrinking;
235 r["RMVA.RSVM.Cross"] = fCross;
236 r["RMVA.RSVM.Probability"] = fProbability;
237 r["RMVA.RSVM.Fitted"] = fFitted;
238
239}
240
241//_______________________________________________________________________
243{
244 Log() << kINFO << "Testing Classification RSVM METHOD " << Endl;
245
247}
248
249
250//_______________________________________________________________________
252{
255 const TMVA::Event *ev = GetEvent();
256 const UInt_t nvar = DataInfo().GetNVariables();
258 for (UInt_t i = 0; i < nvar; i++) {
259 fDfEvent[DataInfo().GetListOfVariables()[i].Data()] = ev->GetValues()[i];
260 }
261 //if using persistence model
263
264 ROOT::R::TRObject result = predict(*fModel, fDfEvent, ROOT::R::Label["decision.values"] = kTRUE, ROOT::R::Label["probability"] = kTRUE);
265 TVectorD values = result.GetAttribute("decision.values");
266 mvaValue = values[0]; //returning signal prob
267 return mvaValue;
268}
269
270////////////////////////////////////////////////////////////////////////////////
271/// get all the MVA values for the events of the current Data type
273{
274 Long64_t nEvents = Data()->GetNEvents();
275 if (firstEvt > lastEvt || lastEvt > nEvents) lastEvt = nEvents;
276 if (firstEvt < 0) firstEvt = 0;
277
278 nEvents = lastEvt-firstEvt;
279
280 UInt_t nvars = Data()->GetNVariables();
281
282 // use timer
283 Timer timer( nEvents, GetName(), kTRUE );
284 if (logProgress)
285 Log() << kINFO<<Form("Dataset[%s] : ",DataInfo().GetName())<< "Evaluation of " << GetMethodName() << " on "
286 << (Data()->GetCurrentType()==Types::kTraining?"training":"testing") << " sample (" << nEvents << " events)" << Endl;
287
288
289 // fill R DATA FRAME with events data
290 std::vector<std::vector<Float_t> > inputData(nvars);
291 for (UInt_t i = 0; i < nvars; i++) {
292 inputData[i] = std::vector<Float_t>(nEvents);
293 }
294
295 for (Int_t ievt=firstEvt; ievt<lastEvt; ievt++) {
297 const TMVA::Event *e = Data()->GetEvent();
298 assert(nvars == e->GetNVariables());
299 for (UInt_t i = 0; i < nvars; i++) {
300 inputData[i][ievt] = e->GetValue(i);
301 }
302 // if (ievt%100 == 0)
303 // std::cout << "Event " << ievt << " type" << DataInfo().IsSignal(e) << " : " << pValue[ievt*nvars] << " " << pValue[ievt*nvars+1] << " " << pValue[ievt*nvars+2] << std::endl;
304 }
305
307 for (UInt_t i = 0; i < nvars; i++) {
308 evtData[DataInfo().GetListOfVariables()[i].Data()] = inputData[i];
309 }
310 //if using persistence model
312
313 std::vector<Double_t> mvaValues(nEvents);
314
315
316 ROOT::R::TRObject result = predict(*fModel, evtData, ROOT::R::Label["decision.values"] = kTRUE, ROOT::R::Label["probability"] = kTRUE);
317
318 r["result"] << result;
319 r << "v2 <- attr(result, \"probabilities\") ";
320 int probSize = 0;
321 r["length(v2)"] >> probSize;
322 //r << "print(v2)";
323 if (probSize > 0) {
324 std::vector<Double_t> probValues = result.GetAttribute("probabilities");
325 // probabilities are for both cases
326 assert(probValues.size() == 2*mvaValues.size());
327 for (int i = 0; i < nEvents; ++i)
328 // R stores vector column-wise (as in Fortran)
329 // and signal probabilities are the second column
330 mvaValues[i] = probValues[nEvents+i];
331
332 }
333 // use decision values
334 else {
335 Log() << kINFO << " : Probabilities are not available. Use decision values instead !" << Endl;
336 //std::cout << "examine the result " << std::endl;
337 std::vector<Double_t> probValues = result.GetAttribute("decision.values");
338 mvaValues = probValues;
339 // std::cout << "decision values " << values1.size() << std::endl;
340 // for ( auto & v : values1) std::cout << v << " ";
341 // std::cout << std::endl;
342 }
343
344
345 if (logProgress) {
346 Log() << kINFO <<Form("Dataset[%s] : ",DataInfo().GetName())<< "Elapsed time for evaluation of " << nEvents << " events: "
347 << timer.GetElapsedTime() << " " << Endl;
348 }
349
350 return mvaValues;
351
352}
353
354//_______________________________________________________________________
356{
357 ROOT::R::TRInterface::Instance().Require("e1071");
358 TString path = GetWeightFileDir() + "/" + GetName() + ".RData";
359 Log() << Endl;
360 Log() << gTools().Color("bold") << "--- Loading State File From:" << gTools().Color("reset") << path << Endl;
361 Log() << Endl;
362 r << "load('" + path + "')";
363 SEXP Model;
364 r["RSVMModel"] >> Model;
365 fModel = new ROOT::R::TRObject(Model);
366
367}
368
369//_______________________________________________________________________
371{
372// get help message text
373//
374// typical length of text line:
375// "|--------------------------------------------------------------|"
376 Log() << Endl;
377 Log() << gTools().Color("bold") << "--- Short description:" << gTools().Color("reset") << Endl;
378 Log() << Endl;
379 Log() << "Decision Trees and Rule-Based Models " << Endl;
380 Log() << Endl;
381 Log() << gTools().Color("bold") << "--- Performance optimisation:" << gTools().Color("reset") << Endl;
382 Log() << Endl;
383 Log() << Endl;
384 Log() << gTools().Color("bold") << "--- Performance tuning via configuration options:" << gTools().Color("reset") << Endl;
385 Log() << Endl;
386 Log() << "<None>" << Endl;
387}
388
#define REGISTER_METHOD(CLASS)
for example
#define e(i)
Definition RSha256.hxx:103
bool Bool_t
Boolean (0=false, 1=true) (bool)
Definition RtypesCore.h:77
constexpr Bool_t kFALSE
Definition RtypesCore.h:108
long long Long64_t
Portable signed long integer 8 bytes.
Definition RtypesCore.h:83
constexpr Bool_t kTRUE
Definition RtypesCore.h:107
ROOT::Detail::TRangeCast< T, true > TRangeDynCast
TRangeDynCast is an adapter class that allows the typed iteration through a TCollection.
void Error(const char *location, const char *msgfmt,...)
Use this function in case an error occurred.
Definition TError.cxx:208
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void char Point_t Rectangle_t WindowAttributes_t Float_t r
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void char Point_t Rectangle_t WindowAttributes_t Float_t Float_t Float_t Int_t Int_t UInt_t UInt_t Rectangle_t result
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void char Point_t Rectangle_t WindowAttributes_t Float_t Float_t Float_t Int_t Int_t UInt_t UInt_t Rectangle_t Int_t Int_t Window_t TString Int_t GCValues_t GetPrimarySelectionOwner GetDisplay GetScreen GetColormap GetNativeEvent const char const char dpyName wid window const char font_name cursor keysym reg const char only_if_exist regb h Point_t winding char text const char depth char const char Int_t count const char ColorStruct_t color const char Pixmap_t Pixmap_t PictureAttributes_t attr const char char ret_data h unsigned char height h Atom_t Int_t ULong_t ULong_t unsigned char prop_list Atom_t Atom_t Atom_t Time_t type
char * Form(const char *fmt,...)
Formats a string in a circular formatting buffer.
Definition TString.cxx:2495
This is a class to create DataFrames from ROOT to R.
Int_t GetNcols()
Method to get the number of columns.
static TRInterface & Instance()
static method to get an TRInterface instance reference
This is a class to get ROOT's objects from R's objects.
Definition TRObject.h:70
OptionBase * DeclareOptionRef(T &ref, const TString &name, const TString &desc="")
MsgLogger & Log() const
Class that contains all the data information.
Definition DataSetInfo.h:62
UInt_t GetNVariables() const
std::vector< TString > GetListOfVariables() const
returns list of variables
Long64_t GetNEvtSigTrain()
return number of signal training events in dataset
Definition DataSet.cxx:443
const Event * GetEvent() const
returns event without transformations
Definition DataSet.cxx:202
Types::ETreeType GetCurrentType() const
Definition DataSet.h:194
Long64_t GetNEvents(Types::ETreeType type=Types::kMaxTreeType) const
Definition DataSet.h:206
UInt_t GetNVariables() const
access the number of variables through the datasetinfo
Definition DataSet.cxx:216
void SetCurrentEvent(Long64_t ievt) const
Definition DataSet.h:88
Long64_t GetNEvtBkgdTrain()
return number of background training events in dataset
Definition DataSet.cxx:451
const char * GetName() const override
Definition MethodBase.h:334
Bool_t IsModelPersistence() const
Definition MethodBase.h:383
const TString & GetWeightFileDir() const
Definition MethodBase.h:492
const TString & GetMethodName() const
Definition MethodBase.h:331
const Event * GetEvent() const
Definition MethodBase.h:751
DataSetInfo & DataInfo() const
Definition MethodBase.h:410
virtual void TestClassification()
initialization
void ReadStateFromFile()
Function to write options and weights to file.
void NoErrorCalc(Double_t *const err, Double_t *const errUpper)
DataSet * Data() const
Definition MethodBase.h:409
virtual std::vector< Double_t > GetMvaValues(Long64_t firstEvt=0, Long64_t lastEvt=-1, Bool_t logProgress=false)
get all the MVA values for the events of the current Data type
virtual void TestClassification()
initialization
ROOT::R::TRFunctionImport asfactor
Definition MethodRSVM.h:130
static Bool_t IsModuleLoaded
Definition MethodRSVM.h:127
Double_t GetMvaValue(Double_t *errLower=nullptr, Double_t *errUpper=nullptr)
ROOT::R::TRObject * fModel
Definition MethodRSVM.h:131
MethodRSVM(const TString &jobName, const TString &methodTitle, DataSetInfo &theData, const TString &theOption="")
ROOT::R::TRFunctionImport svm
Definition MethodRSVM.h:128
void GetHelpMessage() const
ROOT::R::TRFunctionImport predict
Definition MethodRSVM.h:129
Bool_t HasAnalysisType(Types::EAnalysisType type, UInt_t numberClasses, UInt_t numberTargets)
std::vector< std::string > fFactorTrain
Definition RMethodBase.h:95
ROOT::R::TRInterface & r
Definition RMethodBase.h:52
ROOT::R::TRDataFrame fDfTrain
Definition RMethodBase.h:91
Timing information for training and evaluation of MVA methods.
Definition Timer.h:58
const TString & Color(const TString &)
human readable color strings
Definition Tools.cxx:828
Singleton class for Global types used by TMVA.
Definition Types.h:71
@ kClassification
Definition Types.h:127
@ kTraining
Definition Types.h:143
Basic string class.
Definition TString.h:138
const Rcpp::internal::NamedPlaceHolder & Label
create variable transformations
Tools & gTools()
MsgLogger & Endl(MsgLogger &ml)
Definition MsgLogger.h:148