Logo ROOT   6.12/07
Reference Guide
TMVAClassification_LD.class.C
Go to the documentation of this file.
1 // Class: ReadLD
2 // Automatically generated by MethodBase::MakeClass
3 //
4 
5 /* configuration options =====================================================
6 
7 #GEN -*-*-*-*-*-*-*-*-*-*-*- general info -*-*-*-*-*-*-*-*-*-*-*-
8 
9 Method : LD::LD
10 TMVA Release : 4.2.1 [262657]
11 ROOT Release : 6.12/07 [396295]
12 Creator : sftnight
13 Date : Sat Sep 29 23:25:13 2018
14 Host : Linux ec-ubuntu-14-04-x86-64-2 3.13.0-157-generic #207-Ubuntu SMP Mon Aug 20 16:44:59 UTC 2018 x86_64 x86_64 x86_64 GNU/Linux
15 Dir : /mnt/build/workspace/root-makedoc-v612/rootspi/rdoc/src/v6-12-00-patches/documentation/doxygen
16 Training events: 2000
17 Analysis type : [Classification]
18 
19 
20 #OPT -*-*-*-*-*-*-*-*-*-*-*-*- options -*-*-*-*-*-*-*-*-*-*-*-*-
21 
22 # Set by User:
23 V: "False" [Verbose output (short form of "VerbosityLevel" below - overrides the latter one)]
24 VarTransform: "None" [List of variable transformations performed before training, e.g., "D_Background,P_Signal,G,N_AllClasses" for: "Decorrelation, PCA-transformation, Gaussianisation, Normalisation, each for the given class of events ('AllClasses' denotes all events of all classes, if no class indication is given, 'All' is assumed)"]
25 H: "True" [Print method-specific help message]
26 CreateMVAPdfs: "True" [Create PDFs for classifier outputs (signal and background)]
27 # Default:
28 VerbosityLevel: "Default" [Verbosity level]
29 IgnoreNegWeightsInTraining: "False" [Events with negative weights are ignored in the training (but are included for testing and performance evaluation)]
30 ##
31 
32 
33 #VAR -*-*-*-*-*-*-*-*-*-*-*-* variables *-*-*-*-*-*-*-*-*-*-*-*-
34 
35 NVar 4
36 var1+var2 myvar1 myvar1 myvar1 'F' [-8.14423561096,7.26972866058]
37 var1-var2 myvar2 myvar2 Expression 2 'F' [-3.96643972397,4.0258936882]
38 var3 var3 var3 Variable 3 units 'F' [-5.03730010986,4.27845287323]
39 var4 var4 var4 Variable 4 units 'F' [-5.95050764084,4.64035463333]
40 NSpec 2
41 var1*2 spec1 spec1 Spectator 1 units 'F' [-9.91655540466,8.67800140381]
42 var1*3 spec2 spec2 Spectator 2 units 'F' [-14.874833107,13.0170021057]
43 
44 
45 ============================================================================ */
46 
47 #include <array>
48 #include <vector>
49 #include <cmath>
50 #include <string>
51 #include <iostream>
52 
53 #ifndef IClassifierReader__def
54 #define IClassifierReader__def
55 
56 class IClassifierReader {
57 
58  public:
59 
60  // constructor
61  IClassifierReader() : fStatusIsClean( true ) {}
62  virtual ~IClassifierReader() {}
63 
64  // return classifier response
65  virtual double GetMvaValue( const std::vector<double>& inputValues ) const = 0;
66 
67  // returns classifier status
68  bool IsStatusClean() const { return fStatusIsClean; }
69 
70  protected:
71 
72  bool fStatusIsClean;
73 };
74 
75 #endif
76 
77 class ReadLD : public IClassifierReader {
78 
79  public:
80 
81  // constructor
82  ReadLD( std::vector<std::string>& theInputVars )
83  : IClassifierReader(),
84  fClassName( "ReadLD" ),
85  fNvars( 4 ),
86  fIsNormalised( false )
87  {
88  // the training input variables
89  const char* inputVars[] = { "var1+var2", "var1-var2", "var3", "var4" };
90 
91  // sanity checks
92  if (theInputVars.size() <= 0) {
93  std::cout << "Problem in class \"" << fClassName << "\": empty input vector" << std::endl;
94  fStatusIsClean = false;
95  }
96 
97  if (theInputVars.size() != fNvars) {
98  std::cout << "Problem in class \"" << fClassName << "\": mismatch in number of input values: "
99  << theInputVars.size() << " != " << fNvars << std::endl;
100  fStatusIsClean = false;
101  }
102 
103  // validate input variables
104  for (size_t ivar = 0; ivar < theInputVars.size(); ivar++) {
105  if (theInputVars[ivar] != inputVars[ivar]) {
106  std::cout << "Problem in class \"" << fClassName << "\": mismatch in input variable names" << std::endl
107  << " for variable [" << ivar << "]: " << theInputVars[ivar].c_str() << " != " << inputVars[ivar] << std::endl;
108  fStatusIsClean = false;
109  }
110  }
111 
112  // initialize min and max vectors (for normalisation)
113  fVmin[0] = 0;
114  fVmax[0] = 0;
115  fVmin[1] = 0;
116  fVmax[1] = 0;
117  fVmin[2] = 0;
118  fVmax[2] = 0;
119  fVmin[3] = 0;
120  fVmax[3] = 0;
121 
122  // initialize input variable types
123  fType[0] = 'F';
124  fType[1] = 'F';
125  fType[2] = 'F';
126  fType[3] = 'F';
127 
128  // initialize constants
129  Initialize();
130 
131  }
132 
133  // destructor
134  virtual ~ReadLD() {
135  Clear(); // method-specific
136  }
137 
138  // the classifier response
139  // "inputValues" is a vector of input values in the same order as the
140  // variables given to the constructor
141  double GetMvaValue( const std::vector<double>& inputValues ) const;
142 
143  private:
144 
145  // method-specific destructor
146  void Clear();
147 
148  // common member variables
149  const char* fClassName;
150 
151  const size_t fNvars;
152  size_t GetNvar() const { return fNvars; }
153  char GetType( int ivar ) const { return fType[ivar]; }
154 
155  // normalisation of input variables
156  const bool fIsNormalised;
157  bool IsNormalised() const { return fIsNormalised; }
158  double fVmin[4];
159  double fVmax[4];
160  double NormVariable( double x, double xmin, double xmax ) const {
161  // normalise to output range: [-1, 1]
162  return 2*(x - xmin)/(xmax - xmin) - 1.0;
163  }
164 
165  // type of input variable: 'F' or 'I'
166  char fType[4];
167 
168  // initialize internal variables
169  void Initialize();
170  double GetMvaValue__( const std::vector<double>& inputValues ) const;
171 
172  // private members (method specific)
173  std::vector<double> fLDCoefficients;
174 };
175 
176 inline void ReadLD::Initialize()
177 {
178  fLDCoefficients.push_back( -0.0555695127335 );
179  fLDCoefficients.push_back( -0.32641367671 );
180  fLDCoefficients.push_back( -0.0803002889612 );
181  fLDCoefficients.push_back( -0.194829607347 );
182  fLDCoefficients.push_back( 0.757665927943 );
183 
184  // sanity check
185  if (fLDCoefficients.size() != fNvars+1) {
186  std::cout << "Problem in class \"" << fClassName << "\"::Initialize: mismatch in number of input values"
187  << fLDCoefficients.size() << " != " << fNvars+1 << std::endl;
188  fStatusIsClean = false;
189  }
190 }
191 
192 inline double ReadLD::GetMvaValue__( const std::vector<double>& inputValues ) const
193 {
194  double retval = fLDCoefficients[0];
195  for (size_t ivar = 1; ivar < fNvars+1; ivar++) {
196  retval += fLDCoefficients[ivar]*inputValues[ivar-1];
197  }
198 
199  return retval;
200 }
201 
202 // Clean up
203 inline void ReadLD::Clear()
204 {
205  // clear coefficients
206  fLDCoefficients.clear();
207 }
208  inline double ReadLD::GetMvaValue( const std::vector<double>& inputValues ) const
209  {
210  // classifier response value
211  double retval = 0;
212 
213  // classifier response, sanity check first
214  if (!IsStatusClean()) {
215  std::cout << "Problem in class \"" << fClassName << "\": cannot return classifier response"
216  << " because status is dirty" << std::endl;
217  retval = 0;
218  }
219  else {
220  if (IsNormalised()) {
221  // normalise variables
222  std::vector<double> iV;
223  iV.reserve(inputValues.size());
224  int ivar = 0;
225  for (std::vector<double>::const_iterator varIt = inputValues.begin();
226  varIt != inputValues.end(); varIt++, ivar++) {
227  iV.push_back(NormVariable( *varIt, fVmin[ivar], fVmax[ivar] ));
228  }
229  retval = GetMvaValue__( iV );
230  }
231  else {
232  retval = GetMvaValue__( inputValues );
233  }
234  }
235 
236  return retval;
237  }
float xmin
Definition: THbookFile.cxx:93
Type GetType(const std::string &Name)
Definition: Systematics.cxx:34
Double_t x[n]
Definition: legend1.C:17
void Initialize(Bool_t useTMVAStyle=kTRUE)
Definition: tmvaglob.cxx:176
float xmax
Definition: THbookFile.cxx:93
PyObject * fType