#include "TMVA/MethodHMatrix.h"
#include "TMVA/Tools.h"
#include "TMatrix.h"
#include "Riostream.h"
#include <algorithm>
ClassImp(TMVA::MethodHMatrix)
;
//Begin_Html
/*
H-Matrix method, which is implemented as a simple comparison of
chi-squared estimators for signal and background, taking into
account the linear correlations between the input variables
This MVA approach is used by the DØ collaboration (FNAL) for the
purpose of electron identification (see, eg.,
<a href="http://arxiv.org/abs/hep-ex/9507007">hep-ex/9507007</a>).
As it is implemented in TMVA, it is usually equivalent or worse than
the Fisher-Mahalanobis discriminant, and it has only been added for
the purpose of completeness.
Two χ<sup>2</sup> estimators are computed for an event, each one
for signal and background, using the estimates for the means and
covariance matrices obtained from the training sample:<br>
<center>
<img vspace=6 src="gif/tmva_chi2.gif" align="bottom" >
</center>
TMVA then uses as normalised analyser for event (<i>i</i>) the ratio:
(<i>χ<sub>S</sub>(i)<sup>2</sup> − χ<sub>B</sub><sup>2</sup>(i)</i>)
(<i>χ<sub>S</sub><sup>2</sup>(i) + χ<sub>B</sub><sup>2</sup>(i)</i>).
*/
//End_Html
TMVA::MethodHMatrix::MethodHMatrix( TString jobName, TString methodTitle, DataSet& theData,
TString theOption, TDirectory* theTargetDir )
: TMVA::MethodBase( jobName, methodTitle, theData, theOption, theTargetDir )
{
ProcessOptions();
InitHMatrix();
}
TMVA::MethodHMatrix::MethodHMatrix( DataSet & theData,
TString theWeightFile,
TDirectory* theTargetDir )
: TMVA::MethodBase( theData, theWeightFile, theTargetDir )
{
InitHMatrix();
}
void TMVA::MethodHMatrix::InitHMatrix( void )
{
SetMethodName( "HMatrix" );
SetMethodType( TMVA::Types::kHMatrix );
SetTestvarName();
fNormaliseInputVars = kTRUE;
fInvHMatrixS = new TMatrixD( GetNvar(), GetNvar() );
fInvHMatrixB = new TMatrixD( GetNvar(), GetNvar() );
fVecMeanS = new TVectorD( GetNvar() );
fVecMeanB = new TVectorD( GetNvar() );
SetSignalReferenceCut( 0.0 );
}
TMVA::MethodHMatrix::~MethodHMatrix( void )
{
if (NULL != fInvHMatrixS) delete fInvHMatrixS;
if (NULL != fInvHMatrixB) delete fInvHMatrixB;
if (NULL != fVecMeanS ) delete fVecMeanS;
if (NULL != fVecMeanB ) delete fVecMeanB;
}
void TMVA::MethodHMatrix::DeclareOptions()
{
}
void TMVA::MethodHMatrix::ProcessOptions()
{
MethodBase::ProcessOptions();
}
void TMVA::MethodHMatrix::Train( void )
{
if (!CheckSanity()) fLogger << kFATAL << "<Train> sanity check failed" << Endl;
Double_t meanS, meanB, rmsS, rmsB, xmin, xmax;
for (Int_t ivar=0; ivar<GetNvar(); ivar++) {
Statistics( TMVA::Types::kTraining, (*fInputVars)[ivar],
meanS, meanB, rmsS, rmsB, xmin, xmax, fNormaliseInputVars );
(*fVecMeanS)(ivar) = meanS;
(*fVecMeanB)(ivar) = meanB;
}
this->ComputeCovariance( kTRUE, fInvHMatrixS );
this->ComputeCovariance( kFALSE, fInvHMatrixB );
if ( TMath::Abs(fInvHMatrixS->Determinant()) < 10E-24 ) {
fLogger << kWARNING << "<Train> H-matrix S is almost singular with deterinant= "
<< TMath::Abs(fInvHMatrixS->Determinant())
<< " did you use the variables that are linear combinations or highly correlated ???"
<< Endl;
}
if ( TMath::Abs(fInvHMatrixB->Determinant()) < 10E-24 ) {
fLogger << kWARNING << "<Train> H-matrix B is almost singular with deterinant= "
<< TMath::Abs(fInvHMatrixB->Determinant())
<< " did you use the variables that are linear combinations or highly correlated ???"
<< Endl;
}
if ( TMath::Abs(fInvHMatrixS->Determinant()) < 10E-120 ) {
fLogger << kFATAL << "<Train> H-matrix S is singular with deterinant= "
<< TMath::Abs(fInvHMatrixS->Determinant())
<< " did you use the variables that are linear combinations ???"
<< Endl;
}
if ( TMath::Abs(fInvHMatrixB->Determinant()) < 10E-120 ) {
fLogger << kFATAL << "<Train> H-matrix B is singular with deterinant= "
<< TMath::Abs(fInvHMatrixB->Determinant())
<< " did you use the variables that are linear combinations ???"
<< Endl;
}
fInvHMatrixS->Invert();
fInvHMatrixB->Invert();
}
void TMVA::MethodHMatrix::ComputeCovariance( Bool_t isSignal, TMatrixD* mat )
{
UInt_t nvar = GetNvar(), ivar, jvar;
TVectorD vec(nvar);
TMatrixD mat2(nvar, nvar);
for (ivar=0; ivar<nvar; ivar++) {
vec(ivar) = 0;
for (jvar=0; jvar<nvar; jvar++) {
mat2(ivar, jvar) = 0;
}
}
Int_t ic = 0;
for (Int_t i=0; i<Data().GetNEvtTrain(); i++) {
ReadTrainingEvent(i);
if (Data().Event().IsSignal() == isSignal) {
ic++;
for (ivar=0; ivar<nvar; ivar++) {
Double_t xi = (fNormaliseInputVars) ? GetEventValNormalized(ivar) : GetEventVal(ivar);
vec(ivar) += xi;
mat2(ivar, ivar) += (xi*xi);
for (jvar=ivar+1; jvar<nvar; jvar++) {
Double_t xj = (fNormaliseInputVars) ? GetEventValNormalized(jvar) : GetEventVal(jvar);
mat2(ivar, jvar) += (xi*xj);
mat2(jvar, ivar) = mat2(ivar, jvar);
}
}
}
}
Double_t n = (Double_t)ic;
for (ivar=0; ivar<nvar; ivar++) {
for (jvar=0; jvar<nvar; jvar++) {
(*mat)(ivar, jvar) = mat2(ivar, jvar)/n - vec(ivar)*vec(jvar)/(n*n);
}
}
}
Double_t TMVA::MethodHMatrix::GetMvaValue()
{
Double_t s = GetChi2( Types::kSignal );
Double_t b = GetChi2( Types::kBackground );
if( (s+b)<0 ) fLogger << kFATAL << "big trouble: s+b: " << s+b << Endl;
return (b - s)/(s + b);
}
Double_t TMVA::MethodHMatrix::GetChi2( TMVA::Event *e, Types::ESBType type ) const
{
Int_t ivar,jvar;
vector<Double_t> val( GetNvar() );
for (ivar=0; ivar<GetNvar(); ivar++) {
val[ivar] = e->GetVal(ivar);
if (fNormaliseInputVars)
val[ivar] = __N__( val[ivar], GetXmin( ivar ), GetXmax( ivar ) );
}
Double_t chi2 = 0;
for (ivar=0; ivar<GetNvar(); ivar++) {
for (jvar=0; jvar<GetNvar(); jvar++) {
if (type == Types::kSignal)
chi2 += ( (val[ivar] - (*fVecMeanS)(ivar))*(val[jvar] - (*fVecMeanS)(jvar))
* (*fInvHMatrixS)(ivar,jvar) );
else
chi2 += ( (val[ivar] - (*fVecMeanB)(ivar))*(val[jvar] - (*fVecMeanB)(jvar))
* (*fInvHMatrixB)(ivar,jvar) );
}
}
if (chi2 < 0) fLogger << kFATAL << "<GetChi2> negative chi2: " << chi2 << Endl;
return chi2;
}
Double_t TMVA::MethodHMatrix::GetChi2( Types::ESBType type ) const
{
Int_t ivar,jvar;
vector<Double_t> val( GetNvar() );
for (ivar=0; ivar<GetNvar(); ivar++)
val[ivar] = fNormaliseInputVars?GetEventValNormalized(ivar):Data().Event().GetVal( ivar );
Double_t chi2 = 0;
for (ivar=0; ivar<GetNvar(); ivar++) {
for (jvar=0; jvar<GetNvar(); jvar++) {
if (type == Types::kSignal)
chi2 += ( (val[ivar] - (*fVecMeanS)(ivar))*(val[jvar] - (*fVecMeanS)(jvar))
* (*fInvHMatrixS)(ivar,jvar) );
else
chi2 += ( (val[ivar] - (*fVecMeanB)(ivar))*(val[jvar] - (*fVecMeanB)(jvar))
* (*fInvHMatrixB)(ivar,jvar) );
}
}
if (chi2 < 0) fLogger << kFATAL << "<GetChi2> negative chi2: " << chi2 << Endl;
return chi2;
}
void TMVA::MethodHMatrix::WriteWeightsToStream( ostream & o ) const
{
Int_t ivar,jvar;
o << this->GetMethodName() <<endl;
o << "NVars= " << GetNvar() <<endl;
for (ivar=0; ivar<GetNvar(); ivar++) {
TString var = (*fInputVars)[ivar];
o << var << " " << GetXmin( var ) << " " << GetXmax( var ) << endl;
}
for (ivar=0; ivar<GetNvar(); ivar++) {
o << (*fVecMeanS)(ivar) << " " << (*fVecMeanB)(ivar) << endl;
}
for (ivar=0; ivar<GetNvar(); ivar++) {
for (jvar=0; jvar<GetNvar(); jvar++) {
o << (*fInvHMatrixS)(ivar,jvar) << " ";
}
o << endl;
}
for (ivar=0; ivar<GetNvar(); ivar++) {
for (jvar=0; jvar<GetNvar(); jvar++) {
o << (*fInvHMatrixB)(ivar,jvar) << " ";
}
o << endl;
}
}
void TMVA::MethodHMatrix::ReadWeightsFromStream( istream & istr )
{
Int_t ivar,jvar;
TString var, dummy;
Double_t xmin, xmax;
Int_t dummyInt;
istr >> dummy;
this->SetMethodName(dummy);
istr >> dummy >> dummyInt; SetNvar(dummyInt);
for (ivar=0; ivar<GetNvar(); ivar++) {
istr >> var >> xmin >> xmax;
if (var != (*fInputVars)[ivar]) {
fLogger << kFATAL << "error while reading weight file; "
<< "unknown variable: " << var << " at position: " << ivar << ". "
<< "Expected variable: " << (*fInputVars)[ivar] << Endl;
}
Data().SetXmin( ivar, xmin );
Data().SetXmax( ivar, xmax );
}
for (ivar=0; ivar<GetNvar(); ivar++)
istr >> (*fVecMeanS)(ivar) >> (*fVecMeanB)(ivar);
for (ivar=0; ivar<GetNvar(); ivar++)
for (jvar=0; jvar<GetNvar(); jvar++)
istr >> (*fInvHMatrixS)(ivar,jvar);
for (ivar=0; ivar<GetNvar(); ivar++)
for (jvar=0; jvar<GetNvar(); jvar++)
istr >> (*fInvHMatrixB)(ivar,jvar);
}
ROOT page - Class index - Class Hierarchy - Top of the page
This page has been automatically generated. If you have any comments or suggestions about the page layout send a mail to ROOT support, or contact the developers with any questions or problems regarding ROOT.