80using std::stringstream;
 
  102   , fSVKernelFunction(0)
 
  105   , fDoubleSigmaSquared(0)
 
 
  137   , fSVKernelFunction(0)
 
  140   , fDoubleSigmaSquared(0)
 
 
  163   fSupportVectors->clear();
 
  167   if (fWgSet !=0)           { 
delete fWgSet; fWgSet=0; }
 
  168   if (fSVKernelFunction !=0 ) { 
delete fSVKernelFunction; fSVKernelFunction = 0; }
 
 
  177   fSupportVectors->clear();
 
  183   if (fWgSet !=0)           { fWgSet=0; }
 
  184   if (fSVKernelFunction !=0 ) { fSVKernelFunction = 0; }
 
  186      Data()->DeleteResults(GetMethodName(), 
Types::kTraining, GetAnalysisType());
 
  189   Log() << kDEBUG << 
" successfully(?) reset the method " << 
Endl;
 
 
  208   SetNormalised( 
kTRUE );
 
  213   fInputData = 
new std::vector<TMVA::SVEvent*>(0);
 
  214   fSupportVectors = 
new std::vector<TMVA::SVEvent*>(0);
 
 
  222   DeclareOptionRef( fTheKernel = 
"RBF", 
"Kernel", 
"Pick which kernel ( RBF or MultiGauss )");
 
  224   DeclareOptionRef( fGamma = 1., 
"Gamma", 
"RBF kernel parameter: Gamma (size of the Kernel)");
 
  226   DeclareOptionRef( fOrder = 3, 
"Order", 
"Polynomial Kernel parameter: polynomial order");
 
  227   DeclareOptionRef( fTheta = 1., 
"Theta", 
"Polynomial Kernel parameter: polynomial theta");
 
  229   DeclareOptionRef( fGammas = 
"", 
"GammaList", 
"MultiGauss parameters" );
 
  232   DeclareOptionRef( fTune = 
"All", 
"Tune", 
"Tune Parameters");
 
  234   DeclareOptionRef( fMultiKernels = 
"None", 
"KernelList", 
"Sum or product of kernels");
 
  235   DeclareOptionRef( fLoss = 
"hinge", 
"Loss", 
"Loss function");
 
  237   DeclareOptionRef( fCost,   
"C",        
"Cost parameter" );
 
  238   if (DoRegression()) {
 
  243   DeclareOptionRef( fTolerance = 0.01, 
"Tol",      
"Tolerance parameter" );  
 
  244   DeclareOptionRef( fMaxIter   = 1000, 
"MaxIter",  
"Maximum number of training loops" );
 
 
  254   DeclareOptionRef( fNSubSets  = 1,    
"NSubSets", 
"Number of training subsets" );
 
  255   DeclareOptionRef( fTheKernel = 
"Gauss", 
"Kernel", 
"Uses kernel function");
 
  257   DeclareOptionRef( fDoubleSigmaSquared = 2., 
"Sigma", 
"Kernel parameter: sigma");
 
  259   DeclareOptionRef( fOrder = 3, 
"Order", 
"Polynomial Kernel parameter: polynomial order");
 
  261   DeclareOptionRef( fTheta = 1., 
"Theta", 
"Sigmoid Kernel parameter: theta");
 
  262   DeclareOptionRef( fKappa = 1., 
"Kappa", 
"Sigmoid Kernel parameter: kappa");
 
 
  270   if (IgnoreEventsWithNegWeightsInTraining()) {
 
  271      Log() << kFATAL << 
"Mechanism to ignore events with negative weights in training not yet available for method: " 
  272            << GetMethodTypeName()
 
  273            << 
" --> please remove \"IgnoreNegWeightsInTraining\" option from booking string." 
 
  283   fIPyMaxIter = fMaxIter;
 
  286   Log() << kDEBUG << 
"Create event vector"<< 
Endl;
 
  288   fDataSize = Data()->GetNEvents();
 
  308      if (GetEvent(
ievnt)->GetWeight() != 0){
 
  309         if(DataInfo().IsSignal(GetEvent(
ievnt))){
 
  323   if( fTheKernel == 
"RBF"){
 
  326   else if( fTheKernel == 
"MultiGauss" ){
 
  332         if(fmGamma.size()!=0){ GetMGamma(fmGamma); } 
 
  335               fmGamma.push_back(1.0);
 
  342   else if( fTheKernel == 
"Polynomial" ){
 
  345   else if( fTheKernel == 
"Prod" ){
 
  351         if(fmGamma.size()!=0){ GetMGamma(fmGamma); } 
 
  355   else if( fTheKernel == 
"Sum" ){
 
  361         if(fmGamma.size()!=0){ GetMGamma(fmGamma); } 
 
  366      Log() << kWARNING << fTheKernel << 
" is not a recognised kernel function." << 
Endl;
 
  370   Log()<< kINFO << 
"Building SVM Working Set...with "<<
fInputData->size()<<
" event instances"<< 
Endl;
 
  373   Log() << kINFO <<
"Elapsed time for Working Set build: "<< 
bldwstime.GetElapsedTime()<<
Endl;
 
  377   Log() << kINFO << 
"Sorry, no computing time forecast available for SVM, please wait ..." << 
Endl;
 
  379   if (fInteractive) fWgSet->SetIPythonInteractive(&fExitFromTraining, &fIPyCurrentIter);
 
  381   fWgSet->Train(fMaxIter);
 
  383   Log() << kINFO << 
"Elapsed time: " << 
timer.GetElapsedTime()
 
  386   fBparm          = fWgSet->GetBpar();
 
  387   fSupportVectors = fWgSet->GetSupportVectors();
 
  391    if (!fExitFromTraining) fIPyMaxIter = fIPyCurrentIter;
 
 
  408   for (std::vector<TMVA::SVEvent*>::iterator 
veciter=fSupportVectors->
begin();
 
  411      temp[0] = (*veciter)->GetNs();
 
  412      temp[1] = (*veciter)->GetTypeFlag();
 
  413      temp[2] = (*veciter)->GetAlpha();
 
  414      temp[3] = (*veciter)->GetAlpha_p();
 
  416         temp[
ivar+4] = (*(*veciter)->GetDataVector())[
ivar];
 
 
  445   std::vector<Float_t>* 
svector = 
new std::vector<Float_t>(GetNvar());
 
  447   if (fMaxVars!=0) 
delete fMaxVars;
 
  448   fMaxVars = 
new TVectorD( GetNvar() );
 
  449   if (fMinVars!=0) 
delete fMinVars;
 
  450   fMinVars = 
new TVectorD( GetNvar() );
 
  451   if (fSupportVectors!=0) {
 
  452      for (vector< SVEvent* >::iterator it = fSupportVectors->begin(); it!=fSupportVectors->end(); ++it)
 
  454      delete fSupportVectors;
 
  456   fSupportVectors = 
new std::vector<TMVA::SVEvent*>(0);
 
  477   if (fSVKernelFunction!=0) 
delete fSVKernelFunction;
 
  478   if( fTheKernel == 
"RBF" ){
 
  481   else if( fTheKernel == 
"MultiGauss" ){
 
  482      SetMGamma(fGammaList);
 
  485   else if( fTheKernel == 
"Polynomial" ){
 
  488   else if( fTheKernel == 
"Prod" ){
 
  489      SetMGamma(fGammaList);
 
  492   else if( fTheKernel == 
"Sum" ){
 
  493      SetMGamma(fGammaList);
 
  497      Log() << kWARNING << fTheKernel << 
" is not a recognised kernel function." << 
Endl;
 
 
  515   if (fSupportVectors !=0) { 
delete fSupportVectors; fSupportVectors = 0;}
 
  516   fSupportVectors = 
new std::vector<TMVA::SVEvent*>(0);
 
  524   fSupportVectors->reserve(
fNsupv);
 
  530   std::vector<Float_t>* 
svector = 
new std::vector<Float_t>(GetNvar());
 
  532   fMaxVars = 
new TVectorD( GetNvar() );
 
  533   fMinVars = 
new TVectorD( GetNvar() );
 
  549   delete fSVKernelFunction;
 
  550   if (fTheKernel == 
"Gauss" ) {
 
  559         Log() << kFATAL <<
"Unknown kernel function found in weight file!" << 
Endl;
 
  562      fSVKernelFunction->setCompatibilityParams(k, fOrder, fTheta, fKappa);
 
 
  585      myMVA += ( fSupportVectors->at(
ievt)->GetAlpha()
 
  586                 * fSupportVectors->at(
ievt)->GetTypeFlag()
 
  587                 * fSVKernelFunction->Evaluate( fSupportVectors->at(
ievt), 
ev ) );
 
 
  604   if( fRegressionReturnVal == 
NULL )
 
  605      fRegressionReturnVal = 
new std::vector<Float_t>();
 
  606   fRegressionReturnVal->clear();
 
  614      myMVA += ( fSupportVectors->at(
ievt)->GetDeltaAlpha()
 
  615                 *fSVKernelFunction->Evaluate( fSupportVectors->at(
ievt), 
ev ) );
 
  621   const Event* 
evT2 = GetTransformationHandler().InverseTransform( 
evT );
 
  623   fRegressionReturnVal->push_back(
evT2->GetTarget(0));
 
  629   return *fRegressionReturnVal;
 
 
  637   const int fNsupv = fSupportVectors->size();
 
  638   fout << 
"   // not implemented for class: \"" << className << 
"\"" << std::endl;
 
  639   fout << 
"   float        fBparameter;" << std::endl;
 
  640   fout << 
"   int          fNOfSuppVec;" << std::endl;
 
  641   fout << 
"   static float fAllSuppVectors[][" << 
fNsupv << 
"];" << std::endl;
 
  642   fout << 
"   static float fAlphaTypeCoef[" << 
fNsupv << 
"];" << std::endl;
 
  644   fout << 
"   // Kernel parameter(s) " << std::endl;
 
  645   fout << 
"   float fGamma;"  << std::endl;
 
  646   fout << 
"};" << std::endl;
 
  647   fout << 
"" << std::endl;
 
  650   fout << 
"inline void " << className << 
"::Initialize() " << std::endl;
 
  651   fout << 
"{" << std::endl;
 
  652   fout << 
"   fBparameter = " << fBparm << 
";" << std::endl;
 
  653   fout << 
"   fNOfSuppVec = " << 
fNsupv << 
";" << std::endl;
 
  654   fout << 
"   fGamma = " << fGamma << 
";" <<std::endl;
 
  655   fout << 
"}" << std::endl;
 
  659   fout << 
"inline double " << className << 
"::GetMvaValue__(const std::vector<double>& inputValues ) const" << std::endl;
 
  660   fout << 
"{" << std::endl;
 
  661   fout << 
"   double mvaval = 0; " << std::endl;
 
  662   fout << 
"   double temp = 0; " << std::endl;
 
  664   fout << 
"   for (int ievt = 0; ievt < fNOfSuppVec; ievt++ ){" << std::endl;
 
  665   fout << 
"      temp = 0;" << std::endl;
 
  666   fout << 
"      for ( unsigned int ivar = 0; ivar < GetNvar(); ivar++ ) {" << std::endl;
 
  668   fout << 
"         temp += (fAllSuppVectors[ivar][ievt] - inputValues[ivar])  " << std::endl;
 
  669   fout << 
"               * (fAllSuppVectors[ivar][ievt] - inputValues[ivar]); " << std::endl;
 
  670   fout << 
"      }" << std::endl;
 
  671   fout << 
"      mvaval += fAlphaTypeCoef[ievt] * exp( -fGamma * temp ); " << std::endl;
 
  673   fout << 
"   }" << std::endl;
 
  674   fout << 
"   mvaval -= fBparameter;" << std::endl;
 
  675   fout << 
"   return 1./(1. + exp(mvaval));" << std::endl;
 
  676   fout << 
"}" << std::endl;
 
  677   fout << 
"// Clean up" << std::endl;
 
  678   fout << 
"inline void " << className << 
"::Clear() " << std::endl;
 
  679   fout << 
"{" << std::endl;
 
  680   fout << 
"   // nothing to clear " << std::endl;
 
  681   fout << 
"}" << std::endl;
 
  682   fout << 
"" << std::endl;
 
  685   fout << 
"float " << className << 
"::fAlphaTypeCoef[] =" << std::endl;
 
  688      fout << fSupportVectors->at(
isv)->GetDeltaAlpha() * fSupportVectors->at(
isv)->GetTypeFlag();
 
  691   fout << 
" };" << std::endl << std::endl;
 
  693   fout << 
"float " << className << 
"::fAllSuppVectors[][" << 
fNsupv << 
"] =" << std::endl;
 
  699         fout << fSupportVectors->at(
isv)->GetDataVector()->at(
ivar);
 
  703      if (
ivar < GetNvar()-1) 
fout << 
", " << std::endl;
 
  704      else                    fout << std::endl;
 
  706   fout << 
"};" << std::endl<< std::endl;
 
 
  720   Log() << 
"The Support Vector Machine (SVM) builds a hyperplane separating" << 
Endl;
 
  721   Log() << 
"signal and background events (vectors) using the minimal subset of " << 
Endl;
 
  722   Log() << 
"all vectors used for training (support vectors). The extension to" << 
Endl;
 
  723   Log() << 
"the non-linear case is performed by mapping input vectors into a " << 
Endl;
 
  724   Log() << 
"higher-dimensional feature space in which linear separation is " << 
Endl;
 
  725   Log() << 
"possible. The use of the kernel functions thereby eliminates the " << 
Endl;
 
  726   Log() << 
"explicit transformation to the feature space. The implemented SVM " << 
Endl;
 
  727   Log() << 
"algorithm performs the classification tasks using linear, polynomial, " << 
Endl;
 
  728   Log() << 
"Gaussian and sigmoidal kernel functions. The Gaussian kernel allows " << 
Endl;
 
  729   Log() << 
"to apply any discriminant shape in the input space." << 
Endl;
 
  733   Log() << 
"SVM is a general purpose non-linear classification method, which " << 
Endl;
 
  734   Log() << 
"does not require data preprocessing like decorrelation or Principal " << 
Endl;
 
  735   Log() << 
"Component Analysis. It generalises quite well and can handle analyses " << 
Endl;
 
  736   Log() << 
"with large numbers of input variables." << 
Endl;
 
  740   Log() << 
"Optimal performance requires primarily a proper choice of the kernel " << 
Endl;
 
  741   Log() << 
"parameters (the width \"Sigma\" in case of Gaussian kernel) and the" << 
Endl;
 
  742   Log() << 
"cost parameter \"C\". The user must optimise them empirically by running" << 
Endl;
 
  743   Log() << 
"SVM several times with different parameter sets. The time needed for " << 
Endl;
 
  744   Log() << 
"each evaluation scales like the square of the number of training " << 
Endl;
 
  745   Log() << 
"events so that a coarse preliminary tuning should be performed on " << 
Endl;
 
  746   Log() << 
"reduced data sets." << 
Endl;
 
 
  763   std::map< TString,std::vector<Double_t> > 
optVars;
 
  768   std::map< TString,std::vector<Double_t> >::iterator iter;
 
  774   Log() << kINFO << 
"Using the " << fTheKernel << 
" kernel." << 
Endl;
 
  776   if( fTheKernel == 
"RBF" ){
 
  783            if( iter->first == 
"Gamma" || iter->first == 
"C"){
 
  784               tuneParameters.insert(std::pair<TString,Interval*>(iter->first, 
new Interval(iter->second.at(0),iter->second.at(1),iter->second.at(2))));
 
  787               Log() << kWARNING << iter->first << 
" is not a recognised tuneable parameter." << 
Endl;
 
  793   else if( fTheKernel == 
"Polynomial" ){
 
  801            if( iter->first == 
"Theta" || iter->first == 
"C"){
 
  802               tuneParameters.insert(std::pair<TString,Interval*>(iter->first, 
new Interval(iter->second.at(0),iter->second.at(1),iter->second.at(2))));
 
  804            else if( iter->first == 
"Order"){
 
  805               tuneParameters.insert(std::pair<TString,Interval*>(iter->first, 
new Interval(iter->second.at(0),iter->second.at(1),iter->second.at(2))));
 
  808               Log() << kWARNING << iter->first << 
" is not a recognised tuneable parameter." << 
Endl;
 
  814   else if( fTheKernel == 
"MultiGauss" ){
 
  816         for(
int i=0; i<fNumVars; i++){
 
  818            s << fVarNames.at(i);
 
  819            string str = 
"Gamma_" + s.str();
 
  825            if( iter->first == 
"GammaList"){
 
  826               for(
int j=0; 
j<fNumVars; 
j++){
 
  828                  s << fVarNames.at(
j);
 
  829                  string str = 
"Gamma_" + s.str();
 
  830                  tuneParameters.insert(std::pair<TString,Interval*>(str, 
new Interval(iter->second.at(0),iter->second.at(1),iter->second.at(2))));
 
  833            else if( iter->first == 
"C"){
 
  834               tuneParameters.insert(std::pair<TString,Interval*>(iter->first, 
new Interval(iter->second.at(0),iter->second.at(1),iter->second.at(2))));
 
  837               Log() << kWARNING << iter->first << 
" is not a recognised tuneable parameter." << 
Endl;
 
  843   else if( fTheKernel == 
"Prod" ){
 
  850         else if(
value == 
"MultiGauss"){
 
  851            for(
int i=0; i<fNumVars; i++){
 
  853               s << fVarNames.at(i);
 
  854               string str = 
"Gamma_" + s.str();
 
  858         else if(
value == 
"Polynomial"){
 
  863            Log() << kWARNING << 
value << 
" is not a recognised kernel function." << 
Endl;
 
  869   else if( fTheKernel == 
"Sum" ){
 
  876         else if(
value == 
"MultiGauss"){
 
  877            for(
int i=0; i<fNumVars; i++){
 
  879               s << fVarNames.at(i);
 
  880               string str = 
"Gamma_" + s.str();
 
  884         else if(
value == 
"Polynomial"){
 
  889            Log() << kWARNING << 
value << 
" is not a recognised kernel function." << 
Endl;
 
  896      Log() << kWARNING << fTheKernel << 
" is not a recognised kernel function." << 
Endl;
 
  899   Log() << kINFO << 
" the following SVM parameters will be tuned on the respective *grid*\n" << 
Endl;
 
  900   std::map<TString,TMVA::Interval*>::iterator it;
 
  902      Log() << kWARNING << it->first <<
Endl;
 
  903      std::ostringstream 
oss;
 
 
  919   std::map<TString,Double_t>::iterator it;
 
  920   if( fTheKernel == 
"RBF" ){
 
  922         Log() << kWARNING << it->first << 
" = " << it->second << 
Endl;
 
  923         if (it->first == 
"Gamma"){
 
  924            SetGamma (it->second);
 
  926         else if(it->first == 
"C"){
 
  927            SetCost (it->second);
 
  930            Log() << kFATAL << 
" SetParameter for " << it->first << 
" not implemented " << 
Endl;
 
  934   else if( fTheKernel == 
"MultiGauss" ){
 
  936      for(
int i=0; i<fNumVars; i++){
 
  938         s << fVarNames.at(i);
 
  939         string str = 
"Gamma_" + s.str();
 
  944         if (it->first == 
"C"){
 
  945            Log() << kWARNING << it->first << 
" = " << it->second << 
Endl;
 
  951   else if( fTheKernel == 
"Polynomial" ){
 
  953         Log() << kWARNING << it->first << 
" = " << it->second << 
Endl;
 
  954         if (it->first == 
"Order"){
 
  955            SetOrder(it->second);
 
  957         else if (it->first == 
"Theta"){
 
  958            SetTheta(it->second);
 
  960         else if(it->first == 
"C"){ SetCost (it->second);
 
  962         else if(it->first == 
"Mult"){
 
  966            Log() << kFATAL << 
" SetParameter for " << it->first << 
" not implemented " << 
Endl;
 
  970   else if( fTheKernel == 
"Prod" || fTheKernel == 
"Sum"){
 
  974         Log() << kWARNING << it->first << 
" = " << it->second << 
Endl;
 
  975         for(
int i=0; i<fNumVars; i++){
 
  977            s << fVarNames.at(i);
 
  978            string str = 
"Gamma_" + s.str();
 
  979            if(it->first == str){
 
  980               fmGamma.push_back(it->second);
 
  984         if (it->first == 
"Gamma"){
 
  985            SetGamma (it->second);
 
  988         else if (it->first == 
"Order"){
 
  989            SetOrder (it->second);
 
  992         else if (it->first == 
"Theta"){
 
  993            SetTheta (it->second);
 
  996         else if (it->first == 
"C"){ SetCost (it->second);
 
  997            SetCost (it->second);
 
 1002               Log() << kFATAL << 
" SetParameter for " << it->first << 
" not implemented " << 
Endl;
 
 1008      Log() << kWARNING << fTheKernel << 
" is not a recognised kernel function." << 
Endl;
 
 
 1022      fmGamma.push_back(
value);
 
 
 1036      if(i!=(
gammas.size()-1)){
 
 
 1056   std::vector<TMVA::SVKernelFunction::EKernelType> 
kernelsList;
 
 1062         else if(
value == 
"MultiGauss"){
 
 1070            Log() << kWARNING << 
value << 
" is not a recognised kernel function." << 
Endl;
 
 1078         else if(
value == 
"MultiGauss"){
 
 1086            Log() << kWARNING << 
value << 
" is not a recognised kernel function." << 
Endl;
 
 1092      Log() << kWARNING << 
"Unable to split MultiKernels. Delimiters */+ required." << 
Endl;
 
 
 1108   std::map< TString,std::vector<Double_t> > 
optVars;
 
 1112      unsigned first = 
value.find(
'[')+1;
 
 1113      unsigned last = 
value.find_last_of(
']');
 
 1115      std::stringstream 
strNew (
value.substr(first,last-first));
 
 1117      std::vector<Double_t> 
tempVec;
 
 1121         if (
strNew.peek() == 
';'){
 
 1126      if(i != 3 && i == 
tempVec.size()){
 
 1148            Log() << kWARNING << 
optParam << 
" is not a recognised tuneable parameter." << 
Endl;
 
 
 1173      if(DataInfo().IsSignal(
ev)){
 
 1184            Log() << kWARNING << 
lossFunction << 
" is not a recognised loss function." << 
Endl;
 
 1199            Log() << kWARNING << 
lossFunction << 
" is not a recognised loss function." << 
Endl;
 
 
#define REGISTER_METHOD(CLASS)
for example
 
ROOT::Detail::TRangeCast< T, true > TRangeDynCast
TRangeDynCast is an adapter class that allows the typed iteration through a TCollection.
 
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void value
 
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void char Point_t Rectangle_t WindowAttributes_t Float_t Float_t Float_t Int_t Int_t UInt_t UInt_t Rectangle_t Int_t Int_t Window_t TString Int_t GCValues_t GetPrimarySelectionOwner GetDisplay GetScreen GetColormap GetNativeEvent const char const char dpyName wid window const char font_name cursor keysym reg const char only_if_exist regb h Point_t winding char text const char depth char const char Int_t count const char ColorStruct_t color const char Pixmap_t Pixmap_t PictureAttributes_t attr const char char ret_data h unsigned char height h Atom_t Int_t ULong_t ULong_t unsigned char prop_list Atom_t Atom_t Atom_t Time_t type
 
void Print(Option_t *option="") const override
 
TVectorT< Double_t > TVectorD
 
const_iterator begin() const
 
const_iterator end() const
 
A ROOT file is an on-disk file, usually with extension .root, that stores objects in a file-system-li...
 
Class that contains all the data information.
 
The TMVA::Interval Class.
 
Virtual base Class for all MVA method.
 
virtual void DeclareCompatibilityOptions()
options that are used ONLY for the READER to ensure backward compatibility they are hence without any...
 
SMO Platt's SVM classifier with Keerthi & Shavade improvements.
 
Double_t getLoss(TString lossFunction)
getLoss Calculates loss for testing dataset.
 
virtual void SetTuneParameters(std::map< TString, Double_t > tuneParameters)
Set the tuning parameters according to the argument.
 
Double_t GetMvaValue(Double_t *err=nullptr, Double_t *errUpper=nullptr)
returns MVA value for given event
 
void DeclareOptions()
declare options available for this method
 
std::vector< TString > fVarNames
 
void WriteWeightsToStream(TFile &fout) const
TODO write IT write training sample (TTree) to file.
 
void SetMGamma(std::string &mg)
Takes as input a string of values for multigaussian gammas and splits it, filling the gamma vector re...
 
virtual Bool_t HasAnalysisType(Types::EAnalysisType type, UInt_t numberClasses, UInt_t numberTargets)
SVM can handle classification with 2 classes and regression with one regression-target.
 
void ReadWeightsFromStream(std::istream &istr)
 
virtual std::map< TString, Double_t > OptimizeTuningParameters(TString fomType="ROCIntegral", TString fitType="Minuit")
Optimize Tuning Parameters This is used to optimise the kernel function parameters and cost.
 
void GetMGamma(const std::vector< float > &gammas)
Produces GammaList string for multigaussian kernel to be written to xml file.
 
void AddWeightsXMLTo(void *parent) const
write configuration to xml file
 
Float_t fNumVars
number of input variables for multi-gaussian
 
std::map< TString, std::vector< Double_t > > GetTuningOptions()
GetTuningOptions Function to allow for ranges and number of steps (for scan) when optimising kernel f...
 
void ReadWeightsFromXML(void *wghtnode)
 
void ProcessOptions()
option post processing (if necessary)
 
void Train(void)
Train SVM.
 
void Init(void)
default initialisation
 
void MakeClassSpecific(std::ostream &, const TString &) const
write specific classifier response
 
virtual ~MethodSVM(void)
destructor
 
const std::vector< Float_t > & GetRegressionValues()
 
MethodSVM(const TString &jobName, const TString &methodTitle, DataSetInfo &theData, const TString &theOption="")
standard constructor
 
void GetHelpMessage() const
get help message text
 
std::vector< TMVA::SVKernelFunction::EKernelType > MakeKernelList(std::string multiKernels, TString kernel)
MakeKernelList Function providing string manipulation for product or sum of kernels functions to take...
 
void DeclareCompatibilityOptions()
options that are used ONLY for the READER to ensure backward compatibility
 
std::map< TString, Double_t > optimize()
 
Class that is the base-class for a vector of result.
 
Event class for Support Vector Machine.
 
Kernel for Support Vector Machine.
 
Working class for Support Vector Machine.
 
Timing information for training and evaluation of MVA methods.
 
Singleton class for Global types used by TMVA.
 
MsgLogger & Endl(MsgLogger &ml)
 
Double_t Exp(Double_t x)
Returns the base-e exponential function of x, which is e raised to the power x.
 
Double_t Log(Double_t x)
Returns the natural logarithm of x.