132 TMVA::MethodFisher::MethodFisher( const
TString& jobName,
137 MethodBase( jobName,
Types::kFisher, methodTitle, dsi, theOption, theTargetDir ),
139 fTheMethod ( "Fisher" ),
140 fFisherMethod ( kFisher ),
160 fTheMethod (
"Fisher" ),
161 fFisherMethod ( kFisher ),
179 fFisherCoeff =
new std::vector<Double_t>( GetNvar() );
182 SetSignalReferenceCut( 0.0 );
197 DeclareOptionRef( fTheMethod =
"Fisher",
"Method",
"Discrimination method" );
198 AddPreDefVal(
TString(
"Fisher"));
199 AddPreDefVal(
TString(
"Mahalanobis"));
207 if (fTheMethod ==
"Fisher" ) fFisherMethod = kFisher;
208 else fFisherMethod = kMahalanobis;
219 if (fBetw ) {
delete fBetw; fBetw = 0; }
220 if (fWith ) {
delete fWith; fWith = 0; }
221 if (fCov ) {
delete fCov; fCov = 0; }
222 if (fDiscrimPow ) {
delete fDiscrimPow; fDiscrimPow = 0; }
223 if (fFisherCoeff) {
delete fFisherCoeff; fFisherCoeff = 0; }
244 GetCov_WithinClass();
247 GetCov_BetweenClass();
269 const Event * ev = GetEvent();
271 for (
UInt_t ivar=0; ivar<GetNvar(); ivar++)
272 result += (*fFisherCoeff)[ivar]*ev->
GetValue(ivar);
275 NoErrorCalc(err, errUpper);
287 fMeanMatx =
new TMatrixD( GetNvar(), 3 );
290 fBetw =
new TMatrixD( GetNvar(), GetNvar() );
291 fWith =
new TMatrixD( GetNvar(), GetNvar() );
292 fCov =
new TMatrixD( GetNvar(), GetNvar() );
295 fDiscrimPow =
new std::vector<Double_t>( GetNvar() );
307 const UInt_t nvar = DataInfo().GetNVariables();
312 for (
UInt_t ivar=0; ivar<nvar; ivar++) { sumS[ivar] = sumB[ivar] = 0; }
315 for (
Int_t ievt=0; ievt<
Data()->GetNEvents(); ievt++) {
318 const Event * ev = GetEvent(ievt);
322 if (DataInfo().IsSignal(ev)) fSumOfWeightsS += weight;
323 else fSumOfWeightsB += weight;
325 Double_t* sum = DataInfo().IsSignal(ev) ? sumS : sumB;
327 for (
UInt_t ivar=0; ivar<nvar; ivar++) sum[ivar] += ev->
GetValue( ivar )*weight;
330 for (
UInt_t ivar=0; ivar<nvar; ivar++) {
331 (*fMeanMatx)( ivar, 2 ) = sumS[ivar];
332 (*fMeanMatx)( ivar, 0 ) = sumS[ivar]/fSumOfWeightsS;
334 (*fMeanMatx)( ivar, 2 ) += sumB[ivar];
335 (*fMeanMatx)( ivar, 1 ) = sumB[ivar]/fSumOfWeightsB;
338 (*fMeanMatx)( ivar, 2 ) /= (fSumOfWeightsS + fSumOfWeightsB);
353 assert( fSumOfWeightsS > 0 && fSumOfWeightsB > 0 );
358 const Int_t nvar = GetNvar();
359 const Int_t nvar2 = nvar*nvar;
363 memset(sumSig,0,nvar2*
sizeof(
Double_t));
364 memset(sumBgd,0,nvar2*
sizeof(
Double_t));
367 for (
Int_t ievt=0; ievt<
Data()->GetNEvents(); ievt++) {
370 const Event* ev = GetEvent(ievt);
378 if (DataInfo().IsSignal(ev)) {
379 Double_t v = ( (xval[
x] - (*fMeanMatx)(
x, 0))*(xval[
y] - (*fMeanMatx)(
y, 0)) )*weight;
382 Double_t v = ( (xval[
x] - (*fMeanMatx)(
x, 1))*(xval[
y] - (*fMeanMatx)(
y, 1)) )*weight;
401 (*fWith)(
x,
y) = sumSig[k]/fSumOfWeightsS + sumBgd[k]/fSumOfWeightsB;
419 assert( fSumOfWeightsS > 0 && fSumOfWeightsB > 0);
426 prodSig = ( ((*fMeanMatx)(
x, 0) - (*fMeanMatx)(
x, 2))*
427 ((*fMeanMatx)(
y, 0) - (*fMeanMatx)(
y, 2)) );
428 prodBgd = ( ((*fMeanMatx)(
x, 1) - (*fMeanMatx)(
x, 2))*
429 ((*fMeanMatx)(
y, 1) - (*fMeanMatx)(
y, 2)) );
431 (*fBetw)(
x,
y) = (fSumOfWeightsS*prodSig + fSumOfWeightsB*prodBgd) / (fSumOfWeightsS + fSumOfWeightsB);
443 (*fCov)(
x,
y) = (*fWith)(
x,
y) + (*fBetw)(
x,
y);
459 assert( fSumOfWeightsS > 0 && fSumOfWeightsB > 0);
463 switch (GetFisherMethod()) {
471 Log() <<
kFATAL <<
"<GetFisherCoeff> undefined method" << GetFisherMethod() <<
Endl;
477 Log() <<
kWARNING <<
"<GetFisherCoeff> matrix is almost singular with deterninant="
479 <<
" did you use the variables that are linear combinations or highly correlated?"
484 Log() <<
kFATAL <<
"<GetFisherCoeff> matrix is singular with determinant="
486 <<
" did you use the variables that are linear combinations? \n"
487 <<
" do you any clue as to what went wrong in above printout of the covariance matrix? "
494 Double_t xfact =
TMath::Sqrt( fSumOfWeightsS*fSumOfWeightsB ) / (fSumOfWeightsS + fSumOfWeightsB);
497 std::vector<Double_t> diffMeans( GetNvar() );
499 for (ivar=0; ivar<GetNvar(); ivar++) {
500 (*fFisherCoeff)[ivar] = 0;
502 for (jvar=0; jvar<GetNvar(); jvar++) {
503 Double_t d = (*fMeanMatx)(jvar, 0) - (*fMeanMatx)(jvar, 1);
504 (*fFisherCoeff)[ivar] += invCov(ivar, jvar)*
d;
507 (*fFisherCoeff)[ivar] *= xfact;
513 for (ivar=0; ivar<GetNvar(); ivar++){
514 fF0 += (*fFisherCoeff)[ivar]*((*fMeanMatx)(ivar, 0) + (*fMeanMatx)(ivar, 1));
529 for (
UInt_t ivar=0; ivar<GetNvar(); ivar++) {
530 if ((*fCov)(ivar, ivar) != 0)
531 (*fDiscrimPow)[ivar] = (*fBetw)(ivar, ivar)/(*fCov)(ivar, ivar);
533 (*fDiscrimPow)[ivar] = 0;
543 fRanking =
new Ranking( GetName(),
"Discr. power" );
545 for (
UInt_t ivar=0; ivar<GetNvar(); ivar++) {
546 fRanking->AddRank(
Rank( GetInputLabel(ivar), (*fDiscrimPow)[ivar] ) );
558 Log() <<
kINFO <<
"Results for Fisher coefficients:" <<
Endl;
560 if (GetTransformationHandler().GetTransformationList().GetSize() != 0) {
561 Log() <<
kINFO <<
"NOTE: The coefficients must be applied to TRANFORMED variables" <<
Endl;
562 Log() <<
kINFO <<
" List of the transformation: " <<
Endl;
563 TListIter trIt(&GetTransformationHandler().GetTransformationList());
568 std::vector<TString> vars;
569 std::vector<Double_t> coeffs;
570 for (
UInt_t ivar=0; ivar<GetNvar(); ivar++) {
571 vars .push_back( GetInputLabel(ivar) );
572 coeffs.push_back( (*fFisherCoeff)[ivar] );
574 vars .push_back(
"(offset)" );
575 coeffs.push_back( fF0 );
581 if (IsNormalised()) {
582 Log() <<
kINFO <<
"NOTE: You have chosen to use the \"Normalise\" booking option. Hence, the" <<
Endl;
583 Log() <<
kINFO <<
" coefficients must be applied to NORMALISED (') variables as follows:" <<
Endl;
585 for (
UInt_t ivar=0; ivar<GetNvar(); ivar++)
if (GetInputLabel(ivar).Length() > maxL) maxL = GetInputLabel(ivar).Length();
588 for (
UInt_t ivar=0; ivar<GetNvar(); ivar++) {
590 << std::setw(maxL+9) <<
TString(
"[") + GetInputLabel(ivar) +
"]' = 2*("
591 << std::setw(maxL+2) <<
TString(
"[") + GetInputLabel(ivar) +
"]"
592 << std::setw(3) << (GetXmin(ivar) > 0 ?
" - " :
" + ")
593 << std::setw(6) <<
TMath::Abs(GetXmin(ivar)) << std::setw(3) <<
")/"
594 << std::setw(6) << (GetXmax(ivar) - GetXmin(ivar) )
595 << std::setw(3) <<
" - 1"
598 Log() <<
kINFO <<
"The TMVA Reader will properly account for this normalisation, but if the" <<
Endl;
599 Log() <<
kINFO <<
"Fisher classifier is applied outside the Reader, the transformation must be" <<
Endl;
600 Log() <<
kINFO <<
"implemented -- or the \"Normalise\" option is removed and Fisher retrained." <<
Endl;
611 for (
UInt_t ivar=0; ivar<GetNvar(); ivar++) istr >> (*fFisherCoeff)[ivar];
624 for (
UInt_t ivar=0; ivar<GetNvar(); ivar++) {
627 gTools().
AddAttr( coeffxml,
"Value", (*fFisherCoeff)[ivar] );
638 fFisherCoeff->resize(ncoeff-1);
645 if (coeffidx==0) fF0 = coeff;
646 else (*fFisherCoeff)[coeffidx-1] = coeff;
656 Int_t dp = fout.precision();
657 fout <<
" double fFisher0;" << std::endl;
658 fout <<
" std::vector<double> fFisherCoefficients;" << std::endl;
659 fout <<
"};" << std::endl;
660 fout <<
"" << std::endl;
661 fout <<
"inline void " << className <<
"::Initialize() " << std::endl;
662 fout <<
"{" << std::endl;
663 fout <<
" fFisher0 = " << std::setprecision(12) << fF0 <<
";" << std::endl;
664 for (
UInt_t ivar=0; ivar<GetNvar(); ivar++) {
665 fout <<
" fFisherCoefficients.push_back( " << std::setprecision(12) << (*fFisherCoeff)[ivar] <<
" );" << std::endl;
668 fout <<
" // sanity check" << std::endl;
669 fout <<
" if (fFisherCoefficients.size() != fNvars) {" << std::endl;
670 fout <<
" std::cout << \"Problem in class \\\"\" << fClassName << \"\\\"::Initialize: mismatch in number of input values\"" << std::endl;
671 fout <<
" << fFisherCoefficients.size() << \" != \" << fNvars << std::endl;" << std::endl;
672 fout <<
" fStatusIsClean = false;" << std::endl;
673 fout <<
" } " << std::endl;
674 fout <<
"}" << std::endl;
676 fout <<
"inline double " << className <<
"::GetMvaValue__( const std::vector<double>& inputValues ) const" << std::endl;
677 fout <<
"{" << std::endl;
678 fout <<
" double retval = fFisher0;" << std::endl;
679 fout <<
" for (size_t ivar = 0; ivar < fNvars; ivar++) {" << std::endl;
680 fout <<
" retval += fFisherCoefficients[ivar]*inputValues[ivar];" << std::endl;
681 fout <<
" }" << std::endl;
683 fout <<
" return retval;" << std::endl;
684 fout <<
"}" << std::endl;
686 fout <<
"// Clean up" << std::endl;
687 fout <<
"inline void " << className <<
"::Clear() " << std::endl;
688 fout <<
"{" << std::endl;
689 fout <<
" // clear coefficients" << std::endl;
690 fout <<
" fFisherCoefficients.clear(); " << std::endl;
691 fout <<
"}" << std::endl;
692 fout << std::setprecision(dp);
706 Log() <<
"Fisher discriminants select events by distinguishing the mean " <<
Endl;
707 Log() <<
"values of the signal and background distributions in a trans- " <<
Endl;
708 Log() <<
"formed variable space where linear correlations are removed." <<
Endl;
710 Log() <<
" (More precisely: the \"linear discriminator\" determines" <<
Endl;
711 Log() <<
" an axis in the (correlated) hyperspace of the input " <<
Endl;
712 Log() <<
" variables such that, when projecting the output classes " <<
Endl;
713 Log() <<
" (signal and background) upon this axis, they are pushed " <<
Endl;
714 Log() <<
" as far as possible away from each other, while events" <<
Endl;
715 Log() <<
" of a same class are confined in a close vicinity. The " <<
Endl;
716 Log() <<
" linearity property of this classifier is reflected in the " <<
Endl;
717 Log() <<
" metric with which \"far apart\" and \"close vicinity\" are " <<
Endl;
718 Log() <<
" determined: the covariance matrix of the discriminating" <<
Endl;
719 Log() <<
" variable space.)" <<
Endl;
723 Log() <<
"Optimal performance for Fisher discriminants is obtained for " <<
Endl;
724 Log() <<
"linearly correlated Gaussian-distributed variables. Any deviation" <<
Endl;
725 Log() <<
"from this ideal reduces the achievable separation power. In " <<
Endl;
726 Log() <<
"particular, no discrimination at all is achieved for a variable" <<
Endl;
727 Log() <<
"that has the same sample mean for signal and background, even if " <<
Endl;
728 Log() <<
"the shapes of the distributions are very different. Thus, Fisher " <<
Endl;
729 Log() <<
"discriminants often benefit from suitable transformations of the " <<
Endl;
730 Log() <<
"input variables. For example, if a variable x in [-1,1] has a " <<
Endl;
731 Log() <<
"a parabolic signal distributions, and a uniform background" <<
Endl;
732 Log() <<
"distributions, their mean value is zero in both cases, leading " <<
Endl;
733 Log() <<
"to no separation. The simple transformation x -> |x| renders this " <<
Endl;
734 Log() <<
"variable powerful for the use in a Fisher discriminant." <<
Endl;
void GetCov_BetweenClass(void)
the matrix of covariance 'between class' reflects the dispersion of the events of a class relative to...
const Ranking * CreateRanking()
computes ranking of input variables
void MakeClassSpecific(std::ostream &, const TString &) const
write Fisher-specific classifier response
MsgLogger & Endl(MsgLogger &ml)
void ReadWeightsFromStream(std::istream &i)
read Fisher coefficients from weight file
virtual ~MethodFisher(void)
destructor
void AddWeightsXMLTo(void *parent) const
create XML description of Fisher classifier
void GetCov_Full(void)
compute full covariance matrix from sum of within and between matrices
void GetDiscrimPower(void)
computation of discrimination power indicator for each variable small values of "fWith" indicates lit...
virtual Bool_t HasAnalysisType(Types::EAnalysisType type, UInt_t numberClasses, UInt_t numberTargets)
Fisher can only handle classification with 2 classes.
MethodFisher(const TString &jobName, const TString &methodTitle, DataSetInfo &dsi, const TString &theOption="Fisher", TDirectory *theTargetDir=0)
standard constructor for the "Fisher"
Double_t GetWeight() const
return the event weight - depending on whether the flag IgnoreNegWeightsInTraining is or not...
std::vector< std::vector< double > > Data
TMatrixT< Element > & Invert(Double_t *det=0)
Invert the matrix and calculate its determinant.
TMatrixT< Double_t > TMatrixD
void Print(Option_t *name="") const
Print the matrix as a table of elements.
Double_t GetMvaValue(Double_t *err=0, Double_t *errUpper=0)
returns the Fisher value (no fixed range)
void Train(void)
computation of Fisher coefficients by series of matrix operations
void ReadWeightsFromXML(void *wghtnode)
read Fisher coefficients from xml weight file
void ProcessOptions()
process user options
ClassImp(TMVA::MethodFisher)
void InitMatrices(void)
initializaton method; creates global matrices and vectors
void GetHelpMessage() const
get help message text
void DeclareOptions()
MethodFisher options: format and syntax of option string: "type" where type is "Fisher" or "Mahalanob...
void GetMean(void)
compute mean values of variables in each sample, and the overall means
void GetCov_WithinClass(void)
the matrix of covariance 'within class' reflects the dispersion of the events relative to the center ...
Describe directory structure in memory.
Float_t GetValue(UInt_t ivar) const
return value of i'th variable
#define REGISTER_METHOD(CLASS)
for example
virtual Double_t Determinant() const
Return the matrix determinant.
void PrintCoefficients(void)
display Fisher coefficients and discriminating power for each variable check maximum length of variab...
Double_t Sqrt(Double_t x)
void GetFisherCoeff(void)
Fisher = Sum { [coeff]*[variables] }.
void Init(void)
default initialization called by all constructors