ROOT logo
// @(#)root/tmva $Id: MethodPDERS.cxx 29195 2009-06-24 10:39:49Z brun $
// Author: Andreas Hoecker, Yair Mahalalel, Joerg Stelzer, Helge Voss, Kai Voss

/***********************************************************************************
 * Project: TMVA - a Root-integrated toolkit for multivariate data analysis        *
 * Package: TMVA                                                                   *
 * Class  : MethodPDERS                                                            *
 * Web    : http://tmva.sourceforge.net                                            *
 *                                                                                 *
 * Description:                                                                    *
 *      Implementation                                                             *
 *                                                                                 *
 * Authors (alphabetical):                                                         *
 *      Krzysztof Danielowski <danielow@cern.ch>       - IFJ PAN & AGH, Poland     *     
 *      Andreas Hoecker       <Andreas.Hocker@cern.ch> - CERN, Switzerland         *
 *      Kamil Kraszewski      <kalq@cern.ch>           - IFJ PAN & UJ, Poland      *
 *      Maciej Kruk           <mkruk@cern.ch>          - IFJ PAN & AGH, Poland     *
 *      Yair Mahalalel        <Yair.Mahalalel@cern.ch> - CERN, Switzerland         *
 *      Helge Voss            <Helge.Voss@cern.ch>     - MPI-K Heidelberg, Germany *
 *      Kai Voss              <Kai.Voss@cern.ch>       - U. of Victoria, Canada    *
 *                                                                                 *
 * Copyright (c) 2005:                                                             *
 *      CERN, Switzerland                                                          *
 *      U. of Victoria, Canada                                                     *
 *      MPI-K Heidelberg, Germany                                                  *
 *                                                                                 *
 * Redistribution and use in source and binary forms, with or without              *
 * modification, are permitted according to the terms listed in LICENSE            *
 * (http://tmva.sourceforge.net/LICENSE)                                           *
 ***********************************************************************************/

//_______________________________________________________________________
// Begin_Html
/*
  This is a generalization of the above Likelihood methods to <i>N</i><sub>var</sub>
  dimensions, where <i>N</i><sub>var</sub> is the number of input variables
  used in the MVA. If the multi-dimensional probability density functions 
  (PDFs) for signal and background were known, this method contains the entire 
  physical information, and is therefore optimal. Usually, kernel estimation 
  methods are used to approximate the PDFs using the events from the 
  training sample. <br><p></p>
   
  A very simple probability density estimator (PDE) has been suggested
  in <a href="http://arxiv.org/abs/hep-ex/0211019">hep-ex/0211019</a>. The
  PDE for a given test event is obtained from counting the (normalized) 
  number of signal and background (training) events that occur in the 
  "vicinity" of the test event. The volume that describes "vicinity" is 
  user-defined. A <a href="http://arxiv.org/abs/hep-ex/0211019">search 
  method based on binary-trees</a> is used to effectively reduce the 
  selection time for the range search. Three different volume definitions
  are optional: <br><p></p>
  <ul>
  <li><u>MinMax:</u>
  the volume is defined in each dimension with respect 
  to the full variable range found in the training sample. </li>
  <li><u>RMS:</u>
  the volume is defined in each dimensions with respect 
  to the RMS estimated from the training sample. </li>
  <li><u>Adaptive:</u>
  a volume element is defined in each dimensions with 
  respect to the RMS estimated from the training sample. The overall 
  scale of the volume element is then determined for each event so 
  that the total number of events confined in the volume be within 
  a user-defined range.</li>
  </ul><p></p>
  The adaptive range search is used by default.
  // End_Html
  */
//_______________________________________________________________________

#include <assert.h>
#include <algorithm>

#include "TFile.h"
#include "TObjString.h"
#include "TMath.h"

#include "TMVA/ClassifierFactory.h"
#include "TMVA/MethodPDERS.h"
#include "TMVA/Tools.h"
#include "TMVA/RootFinder.h"

#define TMVA_MethodPDERS__countByHand__Debug__
#undef  TMVA_MethodPDERS__countByHand__Debug__

namespace TMVA {
   const Bool_t MethodPDERS_UseFindRoot = kFALSE; 
};

TMVA::MethodPDERS* TMVA::MethodPDERS::fgThisPDERS = NULL;

REGISTER_METHOD(PDERS)

ClassImp(TMVA::MethodPDERS)

//_______________________________________________________________________
TMVA::MethodPDERS::MethodPDERS( const TString& jobName,
                                const TString& methodTitle,
                                DataSetInfo& theData, 
                                const TString& theOption,
                                TDirectory* theTargetDir ) :
   MethodBase( jobName, Types::kPDERS, methodTitle, theData, theOption, theTargetDir ),
   fDelta(0),
   fShift(0)
{
   // standard constructor for the PDERS method
}

//_______________________________________________________________________
TMVA::MethodPDERS::MethodPDERS( DataSetInfo& theData,
                                const TString& theWeightFile,
                                TDirectory* theTargetDir ) :
   MethodBase( Types::kPDERS, theData, theWeightFile, theTargetDir ),
   fDelta( 0 ),
   fShift( 0 )
{
   // construct MethodPDERS through from file
}

//_______________________________________________________________________
Bool_t TMVA::MethodPDERS::HasAnalysisType( Types::EAnalysisType type, UInt_t numberClasses, UInt_t /*numberTargets*/ )
{
   // PDERS can handle classification with 2 classes and regression with one or more regression-targets
   if (type == Types::kClassification && numberClasses == 2) return kTRUE;
   if (type == Types::kRegression) return kTRUE;
   return kFALSE;
}

//_______________________________________________________________________
void TMVA::MethodPDERS::Init( void )
{
   // default initialisation routine called by all constructors

   fBinaryTree = NULL;

   UpdateThis();

   // default options
   fDeltaFrac       = 3.0;
   fVRangeMode      = kAdaptive;
   fKernelEstimator = kBox;

   // special options for Adaptive mode
   fNEventsMin      = 100;
   fNEventsMax      = 200;
   fMaxVIterations  = 150;
   fInitialScale    = 0.99;
   fGaussSigma      = 0.1;
   fNormTree        = kFALSE;
    
   fkNNMin      = Int_t(fNEventsMin);
   fkNNMax      = Int_t(fNEventsMax);

   fInitializedVolumeEle = kFALSE;
   fAverageRMS.clear();

   // the minimum requirement to declare an event signal-like
   SetSignalReferenceCut( 0.0 );
}

//_______________________________________________________________________
TMVA::MethodPDERS::~MethodPDERS( void )
{
   // destructor
   if (fDelta) delete fDelta;
   if (fShift) delete fShift;

   if (NULL != fBinaryTree) delete fBinaryTree;
}

//_______________________________________________________________________
void TMVA::MethodPDERS::DeclareOptions() 
{
   // define the options (their key words) that can be set in the option string 
   // know options:
   // VolumeRangeMode   <string>  Method to determine volume range
   //    available values are:        MinMax 
   //                                 Unscaled
   //                                 RMS
   //                                 kNN
   //                                 Adaptive <default>
   //
   // KernelEstimator   <string>  Kernel estimation function
   //    available values are:        Box <default>
   //                                 Sphere
   //                                 Teepee
   //                                 Gauss
   //                                 Sinc3
   //                                 Sinc5
   //                                 Sinc7
   //                                 Sinc9
   //                                 Sinc11
   //                                 Lanczos2
   //                                 Lanczos3
   //                                 Lanczos5
   //                                 Lanczos8
   //                                 Trim
   //
   // DeltaFrac         <float>   Ratio of #EventsMin/#EventsMax for MinMax and RMS volume range
   // NEventsMin        <int>     Minimum number of events for adaptive volume range             
   // NEventsMax        <int>     Maximum number of events for adaptive volume range
   // MaxVIterations    <int>     Maximum number of iterations for adaptive volume range
   // InitialScale      <float>   Initial scale for adaptive volume range           
   // GaussSigma        <float>   Width with respect to the volume size of Gaussian kernel estimator
   DeclareOptionRef(fVolumeRange="Adaptive", "VolumeRangeMode", "Method to determine volume size");
   AddPreDefVal(TString("Unscaled"));
   AddPreDefVal(TString("MinMax"));
   AddPreDefVal(TString("RMS"));
   AddPreDefVal(TString("Adaptive"));
   AddPreDefVal(TString("kNN"));

   DeclareOptionRef(fKernelString="Box", "KernelEstimator", "Kernel estimation function");
   AddPreDefVal(TString("Box"));
   AddPreDefVal(TString("Sphere"));
   AddPreDefVal(TString("Teepee"));
   AddPreDefVal(TString("Gauss"));
   AddPreDefVal(TString("Sinc3"));
   AddPreDefVal(TString("Sinc5"));
   AddPreDefVal(TString("Sinc7"));
   AddPreDefVal(TString("Sinc9"));
   AddPreDefVal(TString("Sinc11"));
   AddPreDefVal(TString("Lanczos2"));
   AddPreDefVal(TString("Lanczos3"));
   AddPreDefVal(TString("Lanczos5"));
   AddPreDefVal(TString("Lanczos8"));
   AddPreDefVal(TString("Trim"));

   DeclareOptionRef(fDeltaFrac     , "DeltaFrac",      "nEventsMin/Max for minmax and rms volume range");
   DeclareOptionRef(fNEventsMin    , "NEventsMin",     "nEventsMin for adaptive volume range");
   DeclareOptionRef(fNEventsMax    , "NEventsMax",     "nEventsMax for adaptive volume range");
   DeclareOptionRef(fMaxVIterations, "MaxVIterations", "MaxVIterations for adaptive volume range");
   DeclareOptionRef(fInitialScale  , "InitialScale",   "InitialScale for adaptive volume range");
   DeclareOptionRef(fGaussSigma    , "GaussSigma",     "Width (wrt volume size) of Gaussian kernel estimator");
   DeclareOptionRef(fNormTree      , "NormTree",       "Normalize binary search tree");
}

//_______________________________________________________________________
void TMVA::MethodPDERS::ProcessOptions() 
{
   // process the options specified by the user
   
   if (IgnoreEventsWithNegWeightsInTraining()) {
      Log() << kFATAL << "Mechanism to ignore events with negative weights in training not yet available for method: "
            << GetMethodTypeName() 
            << " --> please remove \"IgnoreNegWeightsInTraining\" option from booking string."
            << Endl;
   }

   fGaussSigmaNorm = fGaussSigma; // * TMath::Sqrt( Double_t(GetNvar()) );

   fVRangeMode = MethodPDERS::kUnsupported;

   if      (fVolumeRange == "MinMax"    ) fVRangeMode = kMinMax;
   else if (fVolumeRange == "RMS"       ) fVRangeMode = kRMS;
   else if (fVolumeRange == "Adaptive"  ) fVRangeMode = kAdaptive;
   else if (fVolumeRange == "Unscaled"  ) fVRangeMode = kUnscaled;
   else if (fVolumeRange == "kNN"   ) fVRangeMode = kkNN;
   else {
      Log() << kFATAL << "VolumeRangeMode parameter '" << fVolumeRange << "' unknown" << Endl;
   }

   if      (fKernelString == "Box"      ) fKernelEstimator = kBox;
   else if (fKernelString == "Sphere"   ) fKernelEstimator = kSphere;
   else if (fKernelString == "Teepee"   ) fKernelEstimator = kTeepee;
   else if (fKernelString == "Gauss"    ) fKernelEstimator = kGauss;
   else if (fKernelString == "Sinc3"    ) fKernelEstimator = kSinc3;
   else if (fKernelString == "Sinc5"    ) fKernelEstimator = kSinc5;
   else if (fKernelString == "Sinc7"    ) fKernelEstimator = kSinc7;
   else if (fKernelString == "Sinc9"    ) fKernelEstimator = kSinc9;
   else if (fKernelString == "Sinc11"   ) fKernelEstimator = kSinc11;
   else if (fKernelString == "Lanczos2" ) fKernelEstimator = kLanczos2;
   else if (fKernelString == "Lanczos3" ) fKernelEstimator = kLanczos3;
   else if (fKernelString == "Lanczos5" ) fKernelEstimator = kLanczos5;
   else if (fKernelString == "Lanczos8" ) fKernelEstimator = kLanczos8;
   else if (fKernelString == "Trim"     ) fKernelEstimator = kTrim;
   else {
      Log() << kFATAL << "KernelEstimator parameter '" << fKernelString << "' unknown" << Endl;
   }

   // TODO: Add parameter validation

   Log() << kVERBOSE << "interpreted option string: vRangeMethod: '"
           << (const char*)((fVRangeMode == kMinMax) ? "MinMax" :
                            (fVRangeMode == kUnscaled) ? "Unscaled" :
                            (fVRangeMode == kRMS   ) ? "RMS" : "Adaptive") << "'" << Endl;
   if (fVRangeMode == kMinMax || fVRangeMode == kRMS)
      Log() << kVERBOSE << "deltaFrac: " << fDeltaFrac << Endl;
   else
      Log() << kVERBOSE << "nEventsMin/Max, maxVIterations, initialScale: "
              << fNEventsMin << "  " << fNEventsMax
              << "  " << fMaxVIterations << "  " << fInitialScale << Endl;
   Log() << kVERBOSE << "KernelEstimator = " << fKernelString << Endl;
}

//_______________________________________________________________________
void TMVA::MethodPDERS::Train( void )
{
   // this is a dummy training: the preparation work to do is the construction
   // of the binary tree as a pointer chain. It is easier to directly save the
   // trainingTree in the weight file, and to rebuild the binary tree in the
   // test phase from scratch

   if (IsNormalised()) Log() << kFATAL << "\"Normalise\" option cannot be used with PDERS; " 
                               << "please remove the option from the configuration string, or "
                               << "use \"!Normalise\""
                               << Endl;

   CreateBinarySearchTree( Types::kTraining );
    
   CalcAverages();
   SetVolumeElement();

   fInitializedVolumeEle = kTRUE;
}

//_______________________________________________________________________
Double_t TMVA::MethodPDERS::GetMvaValue( Double_t* err )
{
   // init the size of a volume element using a defined fraction of the
   // volume containing the entire events
   if (fInitializedVolumeEle == kFALSE) {
      fInitializedVolumeEle = kTRUE;

      // binary trees must exist
      assert( fBinaryTree );

      CalcAverages();
      SetVolumeElement();
   }

   // cannot determine error
   if (err != 0) *err = -1;

   return this->CRScalc( *GetEvent() );
}

//_______________________________________________________________________
const std::vector< Float_t >& TMVA::MethodPDERS::GetRegressionValues()
{
   if (fRegressionReturnVal == 0) fRegressionReturnVal = new std::vector<Float_t>;
   fRegressionReturnVal->clear();
   // init the size of a volume element using a defined fraction of the
   // volume containing the entire events
   if (fInitializedVolumeEle == kFALSE) {
      fInitializedVolumeEle = kTRUE;

      // binary trees must exist
      assert( fBinaryTree );

      CalcAverages();

      SetVolumeElement();
   }

   const Event* ev = GetEvent();
   this->RRScalc( *ev, fRegressionReturnVal );

   Event * evT = new Event(*ev);
   UInt_t ivar = 0;
   for (std::vector<Float_t>::iterator it = fRegressionReturnVal->begin(); it != fRegressionReturnVal->end(); it++ ) {
      evT->SetTarget(ivar,(*it));
      ivar++;
   }

   const Event* evT2 = GetTransformationHandler().InverseTransform( evT );
   fRegressionReturnVal->clear();

   for (ivar = 0; ivar<evT2->GetNTargets(); ivar++) {
      fRegressionReturnVal->push_back(evT2->GetTarget(ivar));
   }

   delete evT;


   return (*fRegressionReturnVal);
}

//_______________________________________________________________________
void TMVA::MethodPDERS::CalcAverages()
{
   // compute also average RMS values required for adaptive Gaussian
   if (fVRangeMode == kAdaptive || fVRangeMode == kRMS || fVRangeMode == kkNN  ) {
      fAverageRMS.clear();
      fBinaryTree->CalcStatistics();

      for (UInt_t ivar=0; ivar<GetNvar(); ivar++) { 
	 if (!DoRegression()){ //why there are separate rms for signal and background?
	    Float_t rmsS = fBinaryTree->RMS(Types::kSignal, ivar);
	    Float_t rmsB = fBinaryTree->RMS(Types::kBackground, ivar);
	    fAverageRMS.push_back( (rmsS + rmsB)*0.5 );
	 }else{
	    Float_t rms = fBinaryTree->RMS( ivar );
	    fAverageRMS.push_back( rms );
	 }
      }
   }
}   

//_______________________________________________________________________
void TMVA::MethodPDERS::CreateBinarySearchTree( Types::ETreeType type )
{
   // create binary search trees for signal and background
   if (NULL != fBinaryTree) delete fBinaryTree;
   fBinaryTree = new BinarySearchTree();
   if (fNormTree) {
      fBinaryTree->SetNormalize( kTRUE );
   }

   fBinaryTree->Fill( GetEventCollection(type) );

   if (fNormTree) {
      fBinaryTree->NormalizeTree();
   }

   if (!DoRegression()) {
      // these are the signal and background scales for the weights
      fScaleS = 1.0/fBinaryTree->GetSumOfWeights( Types::kSignal );
      fScaleB = 1.0/fBinaryTree->GetSumOfWeights( Types::kBackground );

      Log() << kVERBOSE << "Signal and background scales: " << fScaleS << " " << fScaleB << Endl;
   }
}

//_______________________________________________________________________
void TMVA::MethodPDERS::SetVolumeElement( void ) {
   // defines volume dimensions

   if (GetNvar()==0) {
      Log() << kFATAL << "GetNvar() == 0" << Endl;
      return;
   }

   // init relative scales
   fkNNMin      = Int_t(fNEventsMin);
   fkNNMax      = Int_t(fNEventsMax);   

   if (fDelta) delete fDelta;
   if (fShift) delete fShift;
   fDelta = new std::vector<Float_t>( GetNvar() );
   fShift = new std::vector<Float_t>( GetNvar() );

   for (UInt_t ivar=0; ivar<GetNvar(); ivar++) {
      switch (fVRangeMode) {
         
      case kRMS:
      case kkNN:
      case kAdaptive:
         // sanity check
         if (fAverageRMS.size() != GetNvar())
            Log() << kFATAL << "<SetVolumeElement> RMS not computed: " << fAverageRMS.size() << Endl;
         (*fDelta)[ivar] = fAverageRMS[ivar]*fDeltaFrac;
         Log() << kVERBOSE << "delta of var[" << (*fInputVars)[ivar]
                 << "\t]: " << fAverageRMS[ivar]
                 << "\t  |  comp with |max - min|: " << (GetXmax( ivar ) - GetXmin( ivar ))
                 << Endl;
         break;
      case kMinMax:
         (*fDelta)[ivar] = (GetXmax( ivar ) - GetXmin( ivar ))*fDeltaFrac;
         break;
      case kUnscaled:
         (*fDelta)[ivar] = fDeltaFrac;
         break;
      default:
         Log() << kFATAL << "<SetVolumeElement> unknown range-set mode: "
                 << fVRangeMode << Endl;
      }
      (*fShift)[ivar] = 0.5; // volume is centered around test value
   }

}

//_______________________________________________________________________
Double_t TMVA::MethodPDERS::IGetVolumeContentForRoot( Double_t scale )
{
   // Interface to RootFinder
   return ThisPDERS()->GetVolumeContentForRoot( scale );
}

//_______________________________________________________________________
Double_t TMVA::MethodPDERS::GetVolumeContentForRoot( Double_t scale )
{
   // count number of events in rescaled volume

   Volume v( *fHelpVolume );
   v.ScaleInterval( scale );

   Double_t count = GetBinaryTree()->SearchVolume( &v );

   v.Delete();
   return count;
}

void TMVA::MethodPDERS::GetSample( const Event& e,
	                           std::vector<const BinarySearchTreeNode*>& events,
                                   Volume *volume
	                         )
{
   Float_t count = 0;

   // -------------------------------------------------------------------------
   //
   // ==== test of volume search =====
   //
   // #define TMVA::MethodPDERS__countByHand__Debug__

#ifdef  TMVA_MethodPDERS__countByHand__Debug__

   // starting values
   count = fBinaryTree->SearchVolume( volume );

   Int_t iS = 0, iB = 0;
   UInt_t nvar = GetNvar();
   for (UInt_t ievt_=0; ievt_<Data()->GetNTrainingEvents(); ievt_++) {
      const Event * ev = GetTrainingEvent(ievt_);
      Bool_t inV;
      for (Int_t ivar=0; ivar<nvar; ivar++) {
         Float_t x = ev->GetVal(ivar);
         inV = (x > (*volume->Lower)[ivar] && x <= (*volume->Upper)[ivar]);
         if (!inV) break;
      }
      if (inV) {
         in++;
      }
   }
   Log() << kVERBOSE << "debug: my test: " << in << Endl;// <- ***********tree
   Log() << kVERBOSE << "debug: binTree: " << count << Endl << Endl;// <- ***********tree

#endif

   // -------------------------------------------------------------------------

   if (fVRangeMode == kRMS || fVRangeMode == kMinMax || fVRangeMode == kUnscaled) { // Constant volume

      std::vector<Double_t> *lb = new std::vector<Double_t>( GetNvar() );
      for (UInt_t ivar=0; ivar<GetNvar(); ivar++) (*lb)[ivar] = e.GetVal(ivar);
      std::vector<Double_t> *ub = new std::vector<Double_t>( *lb );
      for (UInt_t ivar=0; ivar<GetNvar(); ivar++) {
         (*lb)[ivar] -= (*fDelta)[ivar]*(1.0 - (*fShift)[ivar]);
         (*ub)[ivar] += (*fDelta)[ivar]*(*fShift)[ivar];
      }
      Volume* svolume = new Volume( lb, ub );
      // starting values

      fBinaryTree->SearchVolume( svolume, &events );
   } 
   else if (fVRangeMode == kAdaptive) {      // adaptive volume

      // -----------------------------------------------------------------------

      // TODO: optimize, perhaps multi stage with broadening limits, 
      // or a different root finding method entirely,
      if (MethodPDERS_UseFindRoot) { 

         // that won't need to search through large volume, where the bottle neck probably is

         fHelpVolume = volume;

         UpdateThis(); // necessary update of static pointer
         RootFinder rootFinder( &IGetVolumeContentForRoot, 0.01, 50, 200, 10 );
         Double_t scale = rootFinder.Root( (fNEventsMin + fNEventsMax)/2.0 );

         volume->ScaleInterval( scale );

         fBinaryTree->SearchVolume( volume, &events );

         fHelpVolume = NULL;
      }
      // -----------------------------------------------------------------------
      else {

         // starting values
         count = fBinaryTree->SearchVolume( volume );

         Float_t nEventsO = count;
         Int_t i_=0;

         while (nEventsO < fNEventsMin) { // this isn't a sain start... try again
            volume->ScaleInterval( 1.15 );
            count = fBinaryTree->SearchVolume( volume );
            nEventsO = count;
            i_++;
         }
         if (i_ > 50) Log() << kWARNING << "warning in event: " << e
                              << ": adaptive volume pre-adjustment reached "
                              << ">50 iterations in while loop (" << i_ << ")" << Endl;

         Float_t nEventsN    = nEventsO;
         Float_t nEventsE    = 0.5*(fNEventsMin + fNEventsMax);
         Float_t scaleO      = 1.0;
         Float_t scaleN      = fInitialScale;
         Float_t scale       = scaleN;
	 Float_t scaleBest   = scaleN;
	 Float_t nEventsBest = nEventsN;

         for (Int_t ic=1; ic<fMaxVIterations; ic++) {
            if (nEventsN < fNEventsMin || nEventsN > fNEventsMax) {

               // search for events in rescaled volume
               Volume* v = new Volume( *volume );
               v->ScaleInterval( scale );
               nEventsN  = fBinaryTree->SearchVolume( v );

               // determine next iteration (linear approximation)
               if (nEventsN > 1 && nEventsN - nEventsO != 0)
                  if (scaleN - scaleO != 0)
                     scale += (scaleN - scaleO)/(nEventsN - nEventsO)*(nEventsE - nEventsN);
                  else
                     scale += (-0.01); // should not actually occur...
               else
                  scale += 0.5; // use much larger volume

               // save old scale
               scaleN   = scale;

               // take if better (don't accept it if too small number of events)
               if (TMath::Abs(nEventsN - nEventsE) < TMath::Abs(nEventsBest - nEventsE) &&
                   (nEventsN >= fNEventsMin || nEventsBest < nEventsN)) {
                  nEventsBest = nEventsN;
		            scaleBest   = scale;
               }

               v->Delete();
               delete v;
            }
            else break;
         }

         // last sanity check
         nEventsN = nEventsBest;
         // include "1" to cover float precision
         if (nEventsN < fNEventsMin-1 || nEventsN > fNEventsMax+1)
            Log() << kWARNING << "warning in event " << e
                    << ": adaptive volume adjustment reached "
                    << "max. #iterations (" << fMaxVIterations << ")"
                    << "[ nEvents: " << nEventsN << "  " << fNEventsMin << "  " << fNEventsMax << "]"
                    << Endl;

	 volume->ScaleInterval( scaleBest );
	 fBinaryTree->SearchVolume( volume, &events );
      }
        
   } // end of adaptive method
   else if (fVRangeMode == kkNN)
      {
         Volume v(*volume);
         
	 events.clear();
         // check number of signals in begining volume
         Int_t kNNcount = fBinaryTree->SearchVolumeWithMaxLimit( &v, &events, fkNNMax+1 );   
         //if this number is too large return fkNNMax+1
			
         Int_t t_times = 0;  // number of iterations
      
         while ( !(kNNcount >= fkNNMin && kNNcount <= fkNNMax) ) {
            if (kNNcount < fkNNMin) {         //if we have too less points
               Float_t scale = 2;      //better scale needed
               volume->ScaleInterval( scale );
            }
            else if (kNNcount > fkNNMax) {    //if we have too many points
               Float_t scale = 0.1;      //better scale needed
               volume->ScaleInterval( scale );
            }
            events.clear();
          
            v = *volume ;
         
            kNNcount = fBinaryTree->SearchVolumeWithMaxLimit( &v, &events, fkNNMax+1 );  //new search
         
            t_times++;
         
            if (t_times == fMaxVIterations) {
               Log() << kWARNING << "warining in event" << e
                       << ": kNN volume adjustment reached "
                       << "max. #iterations (" << fMaxVIterations << ")"
                       << "[ kNN: " << fkNNMin << " " << fkNNMax << Endl;
               break;
            }
         }
      
         //vector to normalize distance in each dimension
         Double_t *dim_normalization = new Double_t[GetNvar()];
         for (UInt_t ivar=0; ivar<GetNvar(); ivar++) {
            dim_normalization [ivar] = 1.0 / ((*v.fUpper)[ivar] - (*v.fLower)[ivar]);
         }      

         std::vector<const BinarySearchTreeNode*> tempVector;    // temporary vector for events
      
         if (kNNcount >= fkNNMin) {
            std::vector<Double_t> *distances = new std::vector<Double_t>( kNNcount );
         
            //counting the distance for each event
            for (Int_t j=0;j< Int_t(events.size()) ;j++)
               (*distances)[j] = GetNormalizedDistance ( e, *events[j], dim_normalization );
         
            //counting the fkNNMin-th element    
            std::vector<Double_t>::iterator wsk = distances->begin();
            for (Int_t j=0;j<fkNNMin-1;j++) wsk++;
            std::nth_element( distances->begin(), wsk, distances->end() );
         
            //getting all elements that are closer than fkNNMin-th element
            //signals
            for (Int_t j=0;j<Int_t(events.size());j++) {
               Double_t dist = GetNormalizedDistance( e, *events[j], dim_normalization );
               
               if (dist <= (*distances)[fkNNMin-1])        
                  tempVector.push_back( events[j] );
            }      
            fMax_distance = (*distances)[fkNNMin-1];
            delete distances;
         }      
			events = tempVector;
      }
   else {

      // troubles ahead...
      Log() << kFATAL << "<GetSample> unknown RangeMode: " << fVRangeMode << Endl;
   }
   // -----------------------------------------------------------------------
}

//_______________________________________________________________________
Double_t TMVA::MethodPDERS::CRScalc( const Event& e )
{
   std::vector<const BinarySearchTreeNode*> events;

   // computes event weight by counting number of signal and background 
   // events (of reference sample) that are found within given volume
   // defined by the event
   std::vector<Double_t> *lb = new std::vector<Double_t>( GetNvar() );
   for (UInt_t ivar=0; ivar<GetNvar(); ivar++) (*lb)[ivar] = e.GetVal(ivar);

   std::vector<Double_t> *ub = new std::vector<Double_t>( *lb );
   for (UInt_t ivar=0; ivar<GetNvar(); ivar++) {
      (*lb)[ivar] -= (*fDelta)[ivar]*(1.0 - (*fShift)[ivar]);
      (*ub)[ivar] += (*fDelta)[ivar]*(*fShift)[ivar];
   }

   Volume *volume = new Volume( lb, ub );
   
   GetSample( e, events, volume );
   Double_t count = CKernelEstimate( e, events, *volume );
   delete volume;
   delete lb;
   delete ub;

   return count;
}

//_______________________________________________________________________
void TMVA::MethodPDERS::RRScalc( const Event& e, std::vector<Float_t>* count )
{
   std::vector<const BinarySearchTreeNode*> events;

   // computes event weight by counting number of signal and background 
   // events (of reference sample) that are found within given volume
   // defined by the event
   std::vector<Double_t> *lb = new std::vector<Double_t>( GetNvar() );
   for (UInt_t ivar=0; ivar<GetNvar(); ivar++) (*lb)[ivar] = e.GetVal(ivar);

   std::vector<Double_t> *ub = new std::vector<Double_t>( *lb );
   for (UInt_t ivar=0; ivar<GetNvar(); ivar++) {
      (*lb)[ivar] -= (*fDelta)[ivar]*(1.0 - (*fShift)[ivar]);
      (*ub)[ivar] += (*fDelta)[ivar]*(*fShift)[ivar];
   }
   Volume *volume = new Volume( lb, ub );
   
   GetSample( e, events, volume );
   RKernelEstimate( e, events, *volume, count );

   delete volume;

   return;
}
//_______________________________________________________________________
Double_t TMVA::MethodPDERS::CKernelEstimate( const Event & event,
                                             std::vector<const BinarySearchTreeNode*>& events, Volume& v )
{
   // normalization factors so we can work with radius 1 hyperspheres
   Double_t *dim_normalization = new Double_t[GetNvar()];
   for (UInt_t ivar=0; ivar<GetNvar(); ivar++)
      dim_normalization [ivar] = 2 / ((*v.fUpper)[ivar] - (*v.fLower)[ivar]);
  
   Double_t pdfSumS = 0;
   Double_t pdfSumB = 0;

   // Iteration over sample points
   for (std::vector<const BinarySearchTreeNode*>::iterator iev = events.begin(); iev != events.end(); iev++) {
    
   // First switch to the one dimensional distance
   Double_t normalized_distance = GetNormalizedDistance (event, *(*iev), dim_normalization);
    
   // always working within the hyperelipsoid, except for when we don't
   // note that rejection ratio goes to 1 as nvar goes to infinity
   if (normalized_distance > 1 && fKernelEstimator != kBox) continue;      
      
   if ( (*iev)->IsSignal() )
      pdfSumS += ApplyKernelFunction (normalized_distance) * (*iev)->GetWeight();
   else
      pdfSumB += ApplyKernelFunction (normalized_distance) * (*iev)->GetWeight();
   }
   pdfSumS = KernelNormalization( pdfSumS < 0. ? 0. : pdfSumS );
   pdfSumB = KernelNormalization( pdfSumB < 0. ? 0. : pdfSumB );

   if (pdfSumS < 1e-20 && pdfSumB < 1e-20) return 0.5;
   if (pdfSumB < 1e-20) return 1.0;
   if (pdfSumS < 1e-20) return 0.0;

   Float_t r = pdfSumB*fScaleB/(pdfSumS*fScaleS);
   return 1.0/(r + 1.0);   // TODO: propagate errors from here
}

//_______________________________________________________________________
void TMVA::MethodPDERS::RKernelEstimate( const Event & event,
					 std::vector<const BinarySearchTreeNode*>& events, Volume& v,
					 std::vector<Float_t>* pdfSum )
{
   // normalization factors so we can work with radius 1 hyperspheres
   Double_t *dim_normalization = new Double_t[GetNvar()];
   for (UInt_t ivar=0; ivar<GetNvar(); ivar++)
      dim_normalization [ivar] = 2 / ((*v.fUpper)[ivar] - (*v.fLower)[ivar]);
  
   //   std::vector<Float_t> pdfSum;
   pdfSum->clear();
   Float_t pdfDiv = 0;
    fNRegOut = 1; // for now, regression is just for 1 dimension

   for (Int_t ivar = 0; ivar < fNRegOut ; ivar++)
      pdfSum->push_back( 0 );

   // Iteration over sample points
   for (std::vector<const BinarySearchTreeNode*>::iterator iev = events.begin(); iev != events.end(); iev++) {
    
      // First switch to the one dimensional distance
      Double_t normalized_distance = GetNormalizedDistance (event, *(*iev), dim_normalization);
    
      // always working within the hyperelipsoid, except for when we don't
      // note that rejection ratio goes to 1 as nvar goes to infinity
      if (normalized_distance > 1 && fKernelEstimator != kBox) continue;      
      
      for (Int_t ivar = 0; ivar < fNRegOut ; ivar++) {
         pdfSum->at(ivar) += ApplyKernelFunction (normalized_distance) * (*iev)->GetWeight() * (*iev)->GetTargets()[ivar];
         pdfDiv           += ApplyKernelFunction (normalized_distance) * (*iev)->GetWeight();
      } 
   }
   
   if (pdfDiv == 0)
      return;

   for (Int_t ivar = 0; ivar < fNRegOut ; ivar++)
      pdfSum->at(ivar) /= pdfDiv;

   return;
}

//_______________________________________________________________________
Double_t TMVA::MethodPDERS::ApplyKernelFunction (Double_t normalized_distance) 
{
   // from the normalized euclidean distance calculate the distance
   // for a certain kernel
   switch (fKernelEstimator) {
   case kBox:
   case kSphere:
      return 1;
      break;
   case kTeepee:
      return (1 - normalized_distance);
      break;
   case kGauss:
      return TMath::Gaus( normalized_distance, 0, fGaussSigmaNorm, kFALSE);
      break;
   case kSinc3:
   case kSinc5:
   case kSinc7:
   case kSinc9:
   case kSinc11: {
      Double_t side_crossings = 2 + ((int) fKernelEstimator) - ((int) kSinc3);
      return NormSinc (side_crossings * normalized_distance);
   }
      break;
   case kLanczos2:
      return LanczosFilter (2, normalized_distance);
      break;
   case kLanczos3:
      return LanczosFilter (3, normalized_distance);
      break;
   case kLanczos5:
      return LanczosFilter (5, normalized_distance);
      break;
   case kLanczos8:
      return LanczosFilter (8, normalized_distance);
      break;
   case kTrim: {
      Double_t x = normalized_distance / fMax_distance;
      x = 1 - x*x*x;
      return x*x*x;
   }
      break;
   default:
      Log() << kFATAL << "Kernel estimation function unsupported. Enumerator is " << fKernelEstimator << Endl;
      break;
   }

   return 0;
}
      
//_______________________________________________________________________
Double_t TMVA::MethodPDERS::KernelNormalization (Double_t pdf) 
{
   // Calculating the normalization factor only once (might need a reset at some point. 
   // Can the method be restarted with different params?)

   // Caching jammed to disable function. 
   // It's not really useful afterall, badly implemented and untested :-)
   static Double_t ret = 1.0; 
   
   if (ret != 0.0) return ret*pdf; 

   // We first normalize by the volume of the hypersphere.
   switch (fKernelEstimator) {
   case kBox:
   case kSphere:
      ret = 1.;
      break;
   case kTeepee:
      ret =   (GetNvar() * (GetNvar() + 1) * TMath::Gamma (((Double_t) GetNvar()) / 2.)) /
         ( TMath::Power (2., (Double_t) GetNvar() + 1) * TMath::Power (TMath::Pi(), ((Double_t) GetNvar()) / 2.));
      break;
   case kGauss:
      // We use full range integral here. Reasonable because of the fast function decay.
      ret = 1. / TMath::Power ( 2 * TMath::Pi() * fGaussSigmaNorm * fGaussSigmaNorm, ((Double_t) GetNvar()) / 2.);
      break;
   case kSinc3:
   case kSinc5:
   case kSinc7:
   case kSinc9:
   case kSinc11:
   case kLanczos2:
   case kLanczos3:
   case kLanczos5:
   case kLanczos8:
      // We use the full range integral here. Reasonable because the central lobe domintes it.
      ret = 1 / TMath::Power ( 2., (Double_t) GetNvar() );
      break;
   default:
      Log() << kFATAL << "Kernel estimation function unsupported. Enumerator is " << fKernelEstimator << Endl;
   }

   // Normalizing by the full volume
   ret *= ( TMath::Power (2., GetNvar()) * TMath::Gamma (1 + (((Double_t) GetNvar()) / 2.)) ) /
      TMath::Power (TMath::Pi(), ((Double_t) GetNvar()) / 2.);

   return ret*pdf;
}

//_______________________________________________________________________
Double_t TMVA::MethodPDERS::GetNormalizedDistance ( const Event &base_event,
                                                    const BinarySearchTreeNode &sample_event,
                                                    Double_t *dim_normalization) 
{
   // We use Euclidian metric here. Might not be best or most efficient.
   Double_t ret=0;
   for (UInt_t ivar=0; ivar<GetNvar(); ivar++) {
      Double_t dist = dim_normalization[ivar] * (sample_event.GetEventV()[ivar] - base_event.GetVal(ivar));
      ret += dist*dist;
   }
   // DD 26.11.2008: bugfix: division by (GetNvar()) was missing for correct normalisation
   ret /= GetNvar();
   return TMath::Sqrt (ret);
}

//_______________________________________________________________________
Double_t TMVA::MethodPDERS::NormSinc (Double_t x)
{
   // NormSinc
   if (x < 10e-10 && x > -10e-10) {
      return 1; // Poor man's l'Hopital
   }

   Double_t pix = TMath::Pi() * x;
   Double_t sinc = TMath::Sin(pix) / pix;
   Double_t ret;

   if (GetNvar() % 2)
      ret = TMath::Power (sinc, GetNvar());
   else
      ret = TMath::Abs (sinc) * TMath::Power (sinc, GetNvar() - 1);

   return ret;
}

//_______________________________________________________________________
Double_t TMVA::MethodPDERS::LanczosFilter (Int_t level, Double_t x)
{
   // Lanczos Filter
   if (x < 10e-10 && x > -10e-10) {
      return 1; // Poor man's l'Hopital
   }

   Double_t pix = TMath::Pi() * x;
   Double_t pixtimesn = pix * ((Double_t) level);
   Double_t lanczos = (TMath::Sin(pix) / pix) * (TMath::Sin(pixtimesn) / pixtimesn);
   Double_t ret;

   if (GetNvar() % 2) ret = TMath::Power (lanczos, GetNvar());
   else               ret = TMath::Abs (lanczos) * TMath::Power (lanczos, GetNvar() - 1);

   return ret;
}

//_______________________________________________________________________
Float_t TMVA::MethodPDERS::GetError( Float_t countS, Float_t countB,
                                     Float_t sumW2S, Float_t sumW2B ) const
{
   // statistical error estimate for RS estimator

   Float_t c = fScaleB/fScaleS;
   Float_t d = countS + c*countB; d *= d;

   if (d < 1e-10) return 1; // Error is zero because of B = S = 0

   Float_t f = c*c/d/d;
   Float_t err = f*countB*countB*sumW2S + f*countS*countS*sumW2B;

   if (err < 1e-10) return 1; // Error is zero because of B or S = 0

   return sqrt(err);
}

//_______________________________________________________________________
void TMVA::MethodPDERS::WriteWeightsToStream( ostream& o ) const
{
   // write only a short comment to file
   if (TxtWeightsOnly()) {
      if (fBinaryTree)
         o << *fBinaryTree;
      else
         Log() << kFATAL << "Signal and background binary search tree not available" << Endl; 
   } 
   else {
      TString rfname( GetWeightFileName() ); rfname.ReplaceAll( ".txt", ".root" );
      o << "# weights stored in root i/o file: " << rfname << std::endl;  
   }
}

//_______________________________________________________________________
void TMVA::MethodPDERS::AddWeightsXMLTo( void* parent ) const 
{
   // write weights to xml file
   void* wght = gTools().xmlengine().NewChild(parent, 0, "Weights");
   if (fBinaryTree)
      fBinaryTree->AddXMLTo(wght);
   else
      Log() << kFATAL << "Signal and background binary search tree not available" << Endl; 
   //Log() << kFATAL << "Please implement writing of weights as XML" << Endl;
}

//_______________________________________________________________________
void TMVA::MethodPDERS::ReadWeightsFromXML( void* wghtnode)
{
   if (NULL != fBinaryTree) delete fBinaryTree; 
   void* treenode = gTools().xmlengine().GetChild(wghtnode);
   fBinaryTree = dynamic_cast<BinarySearchTree*>(TMVA::BinaryTree::CreateFromXML(treenode));
   fBinaryTree->SetPeriode( GetNvar() );
   fBinaryTree->CalcStatistics();
   fBinaryTree->CountNodes();
   fScaleS = 1.0/fBinaryTree->GetSumOfWeights( Types::kSignal );
   fScaleB = 1.0/fBinaryTree->GetSumOfWeights( Types::kBackground );
   Log() << kINFO << "signal and background scales: " << fScaleS << " " << fScaleB << Endl;
   CalcAverages();
   SetVolumeElement();
   fInitializedVolumeEle = kTRUE;
}

//_______________________________________________________________________
void TMVA::MethodPDERS::ReadWeightsFromStream( istream& istr)
{
   // read weight info from file
   if (TxtWeightsOnly()) {
      if (NULL != fBinaryTree) delete fBinaryTree;

      fBinaryTree = new BinarySearchTree();

      istr >> *fBinaryTree;

      fBinaryTree->SetPeriode( GetNvar() );

      fBinaryTree->CalcStatistics();

      fBinaryTree->CountNodes();

      // these are the signal and background scales for the weights
      fScaleS = 1.0/fBinaryTree->GetSumOfWeights( Types::kSignal );
      fScaleB = 1.0/fBinaryTree->GetSumOfWeights( Types::kBackground );

      Log() << kINFO << "signal and background scales: " << fScaleS << " " << fScaleB << Endl;

      CalcAverages();

      SetVolumeElement();

      fInitializedVolumeEle = kTRUE;
   }
}

//_______________________________________________________________________
void TMVA::MethodPDERS::WriteWeightsToStream( TFile& ) const
{
   // write training sample (TTree) to file
}

//_______________________________________________________________________
void TMVA::MethodPDERS::ReadWeightsFromStream( TFile& /*rf*/ )
{
   // read training sample from file
}

//_______________________________________________________________________
void TMVA::MethodPDERS::MakeClassSpecific( std::ostream& fout, const TString& className ) const
{
   // write specific classifier response
   fout << "   // not implemented for class: \"" << className << "\"" << std::endl;
   fout << "};" << std::endl;
}

//_______________________________________________________________________
void TMVA::MethodPDERS::GetHelpMessage() const
{
   // get help message text
   //
   // typical length of text line: 
   //         "|--------------------------------------------------------------|"
   Log() << Endl;
   Log() << gTools().Color("bold") << "--- Short description:" << gTools().Color("reset") << Endl;
   Log() << Endl;
   Log() << "PDERS is a generalization of the projective likelihood classifier " << Endl;
   Log() << "to N dimensions, where N is the number of input variables used." << Endl;
   Log() << "In its adaptive form it is mostly equivalent to k-Nearest-Neighbor" << Endl;
   Log() << "(k-NN) methods. If the multidimensional PDF for signal and background" << Endl;
   Log() << "were known, this classifier would exploit the full information" << Endl;
   Log() << "contained in the input variables, and would hence be optimal. In " << Endl;
   Log() << "practice however, huge training samples are necessary to sufficiently " << Endl;
   Log() << "populate the multidimensional phase space. " << Endl;
   Log() << Endl;
   Log() << "The simplest implementation of PDERS counts the number of signal" << Endl;
   Log() << "and background events in the vicinity of a test event, and returns" << Endl;
   Log() << "a weight according to the majority species of the neighboring events." << Endl;
   Log() << "A more involved version of PDERS (selected by the option \"KernelEstimator\")" << Endl;
   Log() << "uses Kernel estimation methods to approximate the shape of the PDF." << Endl;
   Log() << Endl;
   Log() << gTools().Color("bold") << "--- Performance optimisation:" << gTools().Color("reset") << Endl;
   Log() << Endl;
   Log() << "PDERS can be very powerful in case of strongly non-linear problems, " << Endl;
   Log() << "e.g., distinct islands of signal and background regions. Because of " << Endl;
   Log() << "the exponential growth of the phase space, it is important to restrict" << Endl;
   Log() << "the number of input variables (dimension) to the strictly necessary." << Endl;
   Log() << Endl;
   Log() << "Note that PDERS is a slowly responding classifier. Moreover, the necessity" << Endl;
   Log() << "to store the entire binary tree in memory, to avoid accessing virtual " << Endl;
   Log() << "memory, limits the number of training events that can effectively be " << Endl;
   Log() << "used to model the multidimensional PDF." << Endl;
   Log() << Endl;
   Log() << gTools().Color("bold") << "--- Performance tuning via configuration options:" << gTools().Color("reset") << Endl;
   Log() << Endl;
   Log() << "If the PDERS response is found too slow when using the adaptive volume " << Endl;
   Log() << "size (option \"VolumeRangeMode=Adaptive\"), it might be found beneficial" << Endl;
   Log() << "to reduce the number of events required in the volume, and/or to enlarge" << Endl;
   Log() << "the allowed range (\"NeventsMin/Max\"). PDERS is relatively insensitive" << Endl;
   Log() << "to the width (\"GaussSigma\") of the Gaussian kernel (if used)." << Endl;
}
 MethodPDERS.cxx:1
 MethodPDERS.cxx:2
 MethodPDERS.cxx:3
 MethodPDERS.cxx:4
 MethodPDERS.cxx:5
 MethodPDERS.cxx:6
 MethodPDERS.cxx:7
 MethodPDERS.cxx:8
 MethodPDERS.cxx:9
 MethodPDERS.cxx:10
 MethodPDERS.cxx:11
 MethodPDERS.cxx:12
 MethodPDERS.cxx:13
 MethodPDERS.cxx:14
 MethodPDERS.cxx:15
 MethodPDERS.cxx:16
 MethodPDERS.cxx:17
 MethodPDERS.cxx:18
 MethodPDERS.cxx:19
 MethodPDERS.cxx:20
 MethodPDERS.cxx:21
 MethodPDERS.cxx:22
 MethodPDERS.cxx:23
 MethodPDERS.cxx:24
 MethodPDERS.cxx:25
 MethodPDERS.cxx:26
 MethodPDERS.cxx:27
 MethodPDERS.cxx:28
 MethodPDERS.cxx:29
 MethodPDERS.cxx:30
 MethodPDERS.cxx:31
 MethodPDERS.cxx:32
 MethodPDERS.cxx:33
 MethodPDERS.cxx:34
 MethodPDERS.cxx:35
 MethodPDERS.cxx:36
 MethodPDERS.cxx:37
 MethodPDERS.cxx:38
 MethodPDERS.cxx:39
 MethodPDERS.cxx:40
 MethodPDERS.cxx:41
 MethodPDERS.cxx:42
 MethodPDERS.cxx:43
 MethodPDERS.cxx:44
 MethodPDERS.cxx:45
 MethodPDERS.cxx:46
 MethodPDERS.cxx:47
 MethodPDERS.cxx:48
 MethodPDERS.cxx:49
 MethodPDERS.cxx:50
 MethodPDERS.cxx:51
 MethodPDERS.cxx:52
 MethodPDERS.cxx:53
 MethodPDERS.cxx:54
 MethodPDERS.cxx:55
 MethodPDERS.cxx:56
 MethodPDERS.cxx:57
 MethodPDERS.cxx:58
 MethodPDERS.cxx:59
 MethodPDERS.cxx:60
 MethodPDERS.cxx:61
 MethodPDERS.cxx:62
 MethodPDERS.cxx:63
 MethodPDERS.cxx:64
 MethodPDERS.cxx:65
 MethodPDERS.cxx:66
 MethodPDERS.cxx:67
 MethodPDERS.cxx:68
 MethodPDERS.cxx:69
 MethodPDERS.cxx:70
 MethodPDERS.cxx:71
 MethodPDERS.cxx:72
 MethodPDERS.cxx:73
 MethodPDERS.cxx:74
 MethodPDERS.cxx:75
 MethodPDERS.cxx:76
 MethodPDERS.cxx:77
 MethodPDERS.cxx:78
 MethodPDERS.cxx:79
 MethodPDERS.cxx:80
 MethodPDERS.cxx:81
 MethodPDERS.cxx:82
 MethodPDERS.cxx:83
 MethodPDERS.cxx:84
 MethodPDERS.cxx:85
 MethodPDERS.cxx:86
 MethodPDERS.cxx:87
 MethodPDERS.cxx:88
 MethodPDERS.cxx:89
 MethodPDERS.cxx:90
 MethodPDERS.cxx:91
 MethodPDERS.cxx:92
 MethodPDERS.cxx:93
 MethodPDERS.cxx:94
 MethodPDERS.cxx:95
 MethodPDERS.cxx:96
 MethodPDERS.cxx:97
 MethodPDERS.cxx:98
 MethodPDERS.cxx:99
 MethodPDERS.cxx:100
 MethodPDERS.cxx:101
 MethodPDERS.cxx:102
 MethodPDERS.cxx:103
 MethodPDERS.cxx:104
 MethodPDERS.cxx:105
 MethodPDERS.cxx:106
 MethodPDERS.cxx:107
 MethodPDERS.cxx:108
 MethodPDERS.cxx:109
 MethodPDERS.cxx:110
 MethodPDERS.cxx:111
 MethodPDERS.cxx:112
 MethodPDERS.cxx:113
 MethodPDERS.cxx:114
 MethodPDERS.cxx:115
 MethodPDERS.cxx:116
 MethodPDERS.cxx:117
 MethodPDERS.cxx:118
 MethodPDERS.cxx:119
 MethodPDERS.cxx:120
 MethodPDERS.cxx:121
 MethodPDERS.cxx:122
 MethodPDERS.cxx:123
 MethodPDERS.cxx:124
 MethodPDERS.cxx:125
 MethodPDERS.cxx:126
 MethodPDERS.cxx:127
 MethodPDERS.cxx:128
 MethodPDERS.cxx:129
 MethodPDERS.cxx:130
 MethodPDERS.cxx:131
 MethodPDERS.cxx:132
 MethodPDERS.cxx:133
 MethodPDERS.cxx:134
 MethodPDERS.cxx:135
 MethodPDERS.cxx:136
 MethodPDERS.cxx:137
 MethodPDERS.cxx:138
 MethodPDERS.cxx:139
 MethodPDERS.cxx:140
 MethodPDERS.cxx:141
 MethodPDERS.cxx:142
 MethodPDERS.cxx:143
 MethodPDERS.cxx:144
 MethodPDERS.cxx:145
 MethodPDERS.cxx:146
 MethodPDERS.cxx:147
 MethodPDERS.cxx:148
 MethodPDERS.cxx:149
 MethodPDERS.cxx:150
 MethodPDERS.cxx:151
 MethodPDERS.cxx:152
 MethodPDERS.cxx:153
 MethodPDERS.cxx:154
 MethodPDERS.cxx:155
 MethodPDERS.cxx:156
 MethodPDERS.cxx:157
 MethodPDERS.cxx:158
 MethodPDERS.cxx:159
 MethodPDERS.cxx:160
 MethodPDERS.cxx:161
 MethodPDERS.cxx:162
 MethodPDERS.cxx:163
 MethodPDERS.cxx:164
 MethodPDERS.cxx:165
 MethodPDERS.cxx:166
 MethodPDERS.cxx:167
 MethodPDERS.cxx:168
 MethodPDERS.cxx:169
 MethodPDERS.cxx:170
 MethodPDERS.cxx:171
 MethodPDERS.cxx:172
 MethodPDERS.cxx:173
 MethodPDERS.cxx:174
 MethodPDERS.cxx:175
 MethodPDERS.cxx:176
 MethodPDERS.cxx:177
 MethodPDERS.cxx:178
 MethodPDERS.cxx:179
 MethodPDERS.cxx:180
 MethodPDERS.cxx:181
 MethodPDERS.cxx:182
 MethodPDERS.cxx:183
 MethodPDERS.cxx:184
 MethodPDERS.cxx:185
 MethodPDERS.cxx:186
 MethodPDERS.cxx:187
 MethodPDERS.cxx:188
 MethodPDERS.cxx:189
 MethodPDERS.cxx:190
 MethodPDERS.cxx:191
 MethodPDERS.cxx:192
 MethodPDERS.cxx:193
 MethodPDERS.cxx:194
 MethodPDERS.cxx:195
 MethodPDERS.cxx:196
 MethodPDERS.cxx:197
 MethodPDERS.cxx:198
 MethodPDERS.cxx:199
 MethodPDERS.cxx:200
 MethodPDERS.cxx:201
 MethodPDERS.cxx:202
 MethodPDERS.cxx:203
 MethodPDERS.cxx:204
 MethodPDERS.cxx:205
 MethodPDERS.cxx:206
 MethodPDERS.cxx:207
 MethodPDERS.cxx:208
 MethodPDERS.cxx:209
 MethodPDERS.cxx:210
 MethodPDERS.cxx:211
 MethodPDERS.cxx:212
 MethodPDERS.cxx:213
 MethodPDERS.cxx:214
 MethodPDERS.cxx:215
 MethodPDERS.cxx:216
 MethodPDERS.cxx:217
 MethodPDERS.cxx:218
 MethodPDERS.cxx:219
 MethodPDERS.cxx:220
 MethodPDERS.cxx:221
 MethodPDERS.cxx:222
 MethodPDERS.cxx:223
 MethodPDERS.cxx:224
 MethodPDERS.cxx:225
 MethodPDERS.cxx:226
 MethodPDERS.cxx:227
 MethodPDERS.cxx:228
 MethodPDERS.cxx:229
 MethodPDERS.cxx:230
 MethodPDERS.cxx:231
 MethodPDERS.cxx:232
 MethodPDERS.cxx:233
 MethodPDERS.cxx:234
 MethodPDERS.cxx:235
 MethodPDERS.cxx:236
 MethodPDERS.cxx:237
 MethodPDERS.cxx:238
 MethodPDERS.cxx:239
 MethodPDERS.cxx:240
 MethodPDERS.cxx:241
 MethodPDERS.cxx:242
 MethodPDERS.cxx:243
 MethodPDERS.cxx:244
 MethodPDERS.cxx:245
 MethodPDERS.cxx:246
 MethodPDERS.cxx:247
 MethodPDERS.cxx:248
 MethodPDERS.cxx:249
 MethodPDERS.cxx:250
 MethodPDERS.cxx:251
 MethodPDERS.cxx:252
 MethodPDERS.cxx:253
 MethodPDERS.cxx:254
 MethodPDERS.cxx:255
 MethodPDERS.cxx:256
 MethodPDERS.cxx:257
 MethodPDERS.cxx:258
 MethodPDERS.cxx:259
 MethodPDERS.cxx:260
 MethodPDERS.cxx:261
 MethodPDERS.cxx:262
 MethodPDERS.cxx:263
 MethodPDERS.cxx:264
 MethodPDERS.cxx:265
 MethodPDERS.cxx:266
 MethodPDERS.cxx:267
 MethodPDERS.cxx:268
 MethodPDERS.cxx:269
 MethodPDERS.cxx:270
 MethodPDERS.cxx:271
 MethodPDERS.cxx:272
 MethodPDERS.cxx:273
 MethodPDERS.cxx:274
 MethodPDERS.cxx:275
 MethodPDERS.cxx:276
 MethodPDERS.cxx:277
 MethodPDERS.cxx:278
 MethodPDERS.cxx:279
 MethodPDERS.cxx:280
 MethodPDERS.cxx:281
 MethodPDERS.cxx:282
 MethodPDERS.cxx:283
 MethodPDERS.cxx:284
 MethodPDERS.cxx:285
 MethodPDERS.cxx:286
 MethodPDERS.cxx:287
 MethodPDERS.cxx:288
 MethodPDERS.cxx:289
 MethodPDERS.cxx:290
 MethodPDERS.cxx:291
 MethodPDERS.cxx:292
 MethodPDERS.cxx:293
 MethodPDERS.cxx:294
 MethodPDERS.cxx:295
 MethodPDERS.cxx:296
 MethodPDERS.cxx:297
 MethodPDERS.cxx:298
 MethodPDERS.cxx:299
 MethodPDERS.cxx:300
 MethodPDERS.cxx:301
 MethodPDERS.cxx:302
 MethodPDERS.cxx:303
 MethodPDERS.cxx:304
 MethodPDERS.cxx:305
 MethodPDERS.cxx:306
 MethodPDERS.cxx:307
 MethodPDERS.cxx:308
 MethodPDERS.cxx:309
 MethodPDERS.cxx:310
 MethodPDERS.cxx:311
 MethodPDERS.cxx:312
 MethodPDERS.cxx:313
 MethodPDERS.cxx:314
 MethodPDERS.cxx:315
 MethodPDERS.cxx:316
 MethodPDERS.cxx:317
 MethodPDERS.cxx:318
 MethodPDERS.cxx:319
 MethodPDERS.cxx:320
 MethodPDERS.cxx:321
 MethodPDERS.cxx:322
 MethodPDERS.cxx:323
 MethodPDERS.cxx:324
 MethodPDERS.cxx:325
 MethodPDERS.cxx:326
 MethodPDERS.cxx:327
 MethodPDERS.cxx:328
 MethodPDERS.cxx:329
 MethodPDERS.cxx:330
 MethodPDERS.cxx:331
 MethodPDERS.cxx:332
 MethodPDERS.cxx:333
 MethodPDERS.cxx:334
 MethodPDERS.cxx:335
 MethodPDERS.cxx:336
 MethodPDERS.cxx:337
 MethodPDERS.cxx:338
 MethodPDERS.cxx:339
 MethodPDERS.cxx:340
 MethodPDERS.cxx:341
 MethodPDERS.cxx:342
 MethodPDERS.cxx:343
 MethodPDERS.cxx:344
 MethodPDERS.cxx:345
 MethodPDERS.cxx:346
 MethodPDERS.cxx:347
 MethodPDERS.cxx:348
 MethodPDERS.cxx:349
 MethodPDERS.cxx:350
 MethodPDERS.cxx:351
 MethodPDERS.cxx:352
 MethodPDERS.cxx:353
 MethodPDERS.cxx:354
 MethodPDERS.cxx:355
 MethodPDERS.cxx:356
 MethodPDERS.cxx:357
 MethodPDERS.cxx:358
 MethodPDERS.cxx:359
 MethodPDERS.cxx:360
 MethodPDERS.cxx:361
 MethodPDERS.cxx:362
 MethodPDERS.cxx:363
 MethodPDERS.cxx:364
 MethodPDERS.cxx:365
 MethodPDERS.cxx:366
 MethodPDERS.cxx:367
 MethodPDERS.cxx:368
 MethodPDERS.cxx:369
 MethodPDERS.cxx:370
 MethodPDERS.cxx:371
 MethodPDERS.cxx:372
 MethodPDERS.cxx:373
 MethodPDERS.cxx:374
 MethodPDERS.cxx:375
 MethodPDERS.cxx:376
 MethodPDERS.cxx:377
 MethodPDERS.cxx:378
 MethodPDERS.cxx:379
 MethodPDERS.cxx:380
 MethodPDERS.cxx:381
 MethodPDERS.cxx:382
 MethodPDERS.cxx:383
 MethodPDERS.cxx:384
 MethodPDERS.cxx:385
 MethodPDERS.cxx:386
 MethodPDERS.cxx:387
 MethodPDERS.cxx:388
 MethodPDERS.cxx:389
 MethodPDERS.cxx:390
 MethodPDERS.cxx:391
 MethodPDERS.cxx:392
 MethodPDERS.cxx:393
 MethodPDERS.cxx:394
 MethodPDERS.cxx:395
 MethodPDERS.cxx:396
 MethodPDERS.cxx:397
 MethodPDERS.cxx:398
 MethodPDERS.cxx:399
 MethodPDERS.cxx:400
 MethodPDERS.cxx:401
 MethodPDERS.cxx:402
 MethodPDERS.cxx:403
 MethodPDERS.cxx:404
 MethodPDERS.cxx:405
 MethodPDERS.cxx:406
 MethodPDERS.cxx:407
 MethodPDERS.cxx:408
 MethodPDERS.cxx:409
 MethodPDERS.cxx:410
 MethodPDERS.cxx:411
 MethodPDERS.cxx:412
 MethodPDERS.cxx:413
 MethodPDERS.cxx:414
 MethodPDERS.cxx:415
 MethodPDERS.cxx:416
 MethodPDERS.cxx:417
 MethodPDERS.cxx:418
 MethodPDERS.cxx:419
 MethodPDERS.cxx:420
 MethodPDERS.cxx:421
 MethodPDERS.cxx:422
 MethodPDERS.cxx:423
 MethodPDERS.cxx:424
 MethodPDERS.cxx:425
 MethodPDERS.cxx:426
 MethodPDERS.cxx:427
 MethodPDERS.cxx:428
 MethodPDERS.cxx:429
 MethodPDERS.cxx:430
 MethodPDERS.cxx:431
 MethodPDERS.cxx:432
 MethodPDERS.cxx:433
 MethodPDERS.cxx:434
 MethodPDERS.cxx:435
 MethodPDERS.cxx:436
 MethodPDERS.cxx:437
 MethodPDERS.cxx:438
 MethodPDERS.cxx:439
 MethodPDERS.cxx:440
 MethodPDERS.cxx:441
 MethodPDERS.cxx:442
 MethodPDERS.cxx:443
 MethodPDERS.cxx:444
 MethodPDERS.cxx:445
 MethodPDERS.cxx:446
 MethodPDERS.cxx:447
 MethodPDERS.cxx:448
 MethodPDERS.cxx:449
 MethodPDERS.cxx:450
 MethodPDERS.cxx:451
 MethodPDERS.cxx:452
 MethodPDERS.cxx:453
 MethodPDERS.cxx:454
 MethodPDERS.cxx:455
 MethodPDERS.cxx:456
 MethodPDERS.cxx:457
 MethodPDERS.cxx:458
 MethodPDERS.cxx:459
 MethodPDERS.cxx:460
 MethodPDERS.cxx:461
 MethodPDERS.cxx:462
 MethodPDERS.cxx:463
 MethodPDERS.cxx:464
 MethodPDERS.cxx:465
 MethodPDERS.cxx:466
 MethodPDERS.cxx:467
 MethodPDERS.cxx:468
 MethodPDERS.cxx:469
 MethodPDERS.cxx:470
 MethodPDERS.cxx:471
 MethodPDERS.cxx:472
 MethodPDERS.cxx:473
 MethodPDERS.cxx:474
 MethodPDERS.cxx:475
 MethodPDERS.cxx:476
 MethodPDERS.cxx:477
 MethodPDERS.cxx:478
 MethodPDERS.cxx:479
 MethodPDERS.cxx:480
 MethodPDERS.cxx:481
 MethodPDERS.cxx:482
 MethodPDERS.cxx:483
 MethodPDERS.cxx:484
 MethodPDERS.cxx:485
 MethodPDERS.cxx:486
 MethodPDERS.cxx:487
 MethodPDERS.cxx:488
 MethodPDERS.cxx:489
 MethodPDERS.cxx:490
 MethodPDERS.cxx:491
 MethodPDERS.cxx:492
 MethodPDERS.cxx:493
 MethodPDERS.cxx:494
 MethodPDERS.cxx:495
 MethodPDERS.cxx:496
 MethodPDERS.cxx:497
 MethodPDERS.cxx:498
 MethodPDERS.cxx:499
 MethodPDERS.cxx:500
 MethodPDERS.cxx:501
 MethodPDERS.cxx:502
 MethodPDERS.cxx:503
 MethodPDERS.cxx:504
 MethodPDERS.cxx:505
 MethodPDERS.cxx:506
 MethodPDERS.cxx:507
 MethodPDERS.cxx:508
 MethodPDERS.cxx:509
 MethodPDERS.cxx:510
 MethodPDERS.cxx:511
 MethodPDERS.cxx:512
 MethodPDERS.cxx:513
 MethodPDERS.cxx:514
 MethodPDERS.cxx:515
 MethodPDERS.cxx:516
 MethodPDERS.cxx:517
 MethodPDERS.cxx:518
 MethodPDERS.cxx:519
 MethodPDERS.cxx:520
 MethodPDERS.cxx:521
 MethodPDERS.cxx:522
 MethodPDERS.cxx:523
 MethodPDERS.cxx:524
 MethodPDERS.cxx:525
 MethodPDERS.cxx:526
 MethodPDERS.cxx:527
 MethodPDERS.cxx:528
 MethodPDERS.cxx:529
 MethodPDERS.cxx:530
 MethodPDERS.cxx:531
 MethodPDERS.cxx:532
 MethodPDERS.cxx:533
 MethodPDERS.cxx:534
 MethodPDERS.cxx:535
 MethodPDERS.cxx:536
 MethodPDERS.cxx:537
 MethodPDERS.cxx:538
 MethodPDERS.cxx:539
 MethodPDERS.cxx:540
 MethodPDERS.cxx:541
 MethodPDERS.cxx:542
 MethodPDERS.cxx:543
 MethodPDERS.cxx:544
 MethodPDERS.cxx:545
 MethodPDERS.cxx:546
 MethodPDERS.cxx:547
 MethodPDERS.cxx:548
 MethodPDERS.cxx:549
 MethodPDERS.cxx:550
 MethodPDERS.cxx:551
 MethodPDERS.cxx:552
 MethodPDERS.cxx:553
 MethodPDERS.cxx:554
 MethodPDERS.cxx:555
 MethodPDERS.cxx:556
 MethodPDERS.cxx:557
 MethodPDERS.cxx:558
 MethodPDERS.cxx:559
 MethodPDERS.cxx:560
 MethodPDERS.cxx:561
 MethodPDERS.cxx:562
 MethodPDERS.cxx:563
 MethodPDERS.cxx:564
 MethodPDERS.cxx:565
 MethodPDERS.cxx:566
 MethodPDERS.cxx:567
 MethodPDERS.cxx:568
 MethodPDERS.cxx:569
 MethodPDERS.cxx:570
 MethodPDERS.cxx:571
 MethodPDERS.cxx:572
 MethodPDERS.cxx:573
 MethodPDERS.cxx:574
 MethodPDERS.cxx:575
 MethodPDERS.cxx:576
 MethodPDERS.cxx:577
 MethodPDERS.cxx:578
 MethodPDERS.cxx:579
 MethodPDERS.cxx:580
 MethodPDERS.cxx:581
 MethodPDERS.cxx:582
 MethodPDERS.cxx:583
 MethodPDERS.cxx:584
 MethodPDERS.cxx:585
 MethodPDERS.cxx:586
 MethodPDERS.cxx:587
 MethodPDERS.cxx:588
 MethodPDERS.cxx:589
 MethodPDERS.cxx:590
 MethodPDERS.cxx:591
 MethodPDERS.cxx:592
 MethodPDERS.cxx:593
 MethodPDERS.cxx:594
 MethodPDERS.cxx:595
 MethodPDERS.cxx:596
 MethodPDERS.cxx:597
 MethodPDERS.cxx:598
 MethodPDERS.cxx:599
 MethodPDERS.cxx:600
 MethodPDERS.cxx:601
 MethodPDERS.cxx:602
 MethodPDERS.cxx:603
 MethodPDERS.cxx:604
 MethodPDERS.cxx:605
 MethodPDERS.cxx:606
 MethodPDERS.cxx:607
 MethodPDERS.cxx:608
 MethodPDERS.cxx:609
 MethodPDERS.cxx:610
 MethodPDERS.cxx:611
 MethodPDERS.cxx:612
 MethodPDERS.cxx:613
 MethodPDERS.cxx:614
 MethodPDERS.cxx:615
 MethodPDERS.cxx:616
 MethodPDERS.cxx:617
 MethodPDERS.cxx:618
 MethodPDERS.cxx:619
 MethodPDERS.cxx:620
 MethodPDERS.cxx:621
 MethodPDERS.cxx:622
 MethodPDERS.cxx:623
 MethodPDERS.cxx:624
 MethodPDERS.cxx:625
 MethodPDERS.cxx:626
 MethodPDERS.cxx:627
 MethodPDERS.cxx:628
 MethodPDERS.cxx:629
 MethodPDERS.cxx:630
 MethodPDERS.cxx:631
 MethodPDERS.cxx:632
 MethodPDERS.cxx:633
 MethodPDERS.cxx:634
 MethodPDERS.cxx:635
 MethodPDERS.cxx:636
 MethodPDERS.cxx:637
 MethodPDERS.cxx:638
 MethodPDERS.cxx:639
 MethodPDERS.cxx:640
 MethodPDERS.cxx:641
 MethodPDERS.cxx:642
 MethodPDERS.cxx:643
 MethodPDERS.cxx:644
 MethodPDERS.cxx:645
 MethodPDERS.cxx:646
 MethodPDERS.cxx:647
 MethodPDERS.cxx:648
 MethodPDERS.cxx:649
 MethodPDERS.cxx:650
 MethodPDERS.cxx:651
 MethodPDERS.cxx:652
 MethodPDERS.cxx:653
 MethodPDERS.cxx:654
 MethodPDERS.cxx:655
 MethodPDERS.cxx:656
 MethodPDERS.cxx:657
 MethodPDERS.cxx:658
 MethodPDERS.cxx:659
 MethodPDERS.cxx:660
 MethodPDERS.cxx:661
 MethodPDERS.cxx:662
 MethodPDERS.cxx:663
 MethodPDERS.cxx:664
 MethodPDERS.cxx:665
 MethodPDERS.cxx:666
 MethodPDERS.cxx:667
 MethodPDERS.cxx:668
 MethodPDERS.cxx:669
 MethodPDERS.cxx:670
 MethodPDERS.cxx:671
 MethodPDERS.cxx:672
 MethodPDERS.cxx:673
 MethodPDERS.cxx:674
 MethodPDERS.cxx:675
 MethodPDERS.cxx:676
 MethodPDERS.cxx:677
 MethodPDERS.cxx:678
 MethodPDERS.cxx:679
 MethodPDERS.cxx:680
 MethodPDERS.cxx:681
 MethodPDERS.cxx:682
 MethodPDERS.cxx:683
 MethodPDERS.cxx:684
 MethodPDERS.cxx:685
 MethodPDERS.cxx:686
 MethodPDERS.cxx:687
 MethodPDERS.cxx:688
 MethodPDERS.cxx:689
 MethodPDERS.cxx:690
 MethodPDERS.cxx:691
 MethodPDERS.cxx:692
 MethodPDERS.cxx:693
 MethodPDERS.cxx:694
 MethodPDERS.cxx:695
 MethodPDERS.cxx:696
 MethodPDERS.cxx:697
 MethodPDERS.cxx:698
 MethodPDERS.cxx:699
 MethodPDERS.cxx:700
 MethodPDERS.cxx:701
 MethodPDERS.cxx:702
 MethodPDERS.cxx:703
 MethodPDERS.cxx:704
 MethodPDERS.cxx:705
 MethodPDERS.cxx:706
 MethodPDERS.cxx:707
 MethodPDERS.cxx:708
 MethodPDERS.cxx:709
 MethodPDERS.cxx:710
 MethodPDERS.cxx:711
 MethodPDERS.cxx:712
 MethodPDERS.cxx:713
 MethodPDERS.cxx:714
 MethodPDERS.cxx:715
 MethodPDERS.cxx:716
 MethodPDERS.cxx:717
 MethodPDERS.cxx:718
 MethodPDERS.cxx:719
 MethodPDERS.cxx:720
 MethodPDERS.cxx:721
 MethodPDERS.cxx:722
 MethodPDERS.cxx:723
 MethodPDERS.cxx:724
 MethodPDERS.cxx:725
 MethodPDERS.cxx:726
 MethodPDERS.cxx:727
 MethodPDERS.cxx:728
 MethodPDERS.cxx:729
 MethodPDERS.cxx:730
 MethodPDERS.cxx:731
 MethodPDERS.cxx:732
 MethodPDERS.cxx:733
 MethodPDERS.cxx:734
 MethodPDERS.cxx:735
 MethodPDERS.cxx:736
 MethodPDERS.cxx:737
 MethodPDERS.cxx:738
 MethodPDERS.cxx:739
 MethodPDERS.cxx:740
 MethodPDERS.cxx:741
 MethodPDERS.cxx:742
 MethodPDERS.cxx:743
 MethodPDERS.cxx:744
 MethodPDERS.cxx:745
 MethodPDERS.cxx:746
 MethodPDERS.cxx:747
 MethodPDERS.cxx:748
 MethodPDERS.cxx:749
 MethodPDERS.cxx:750
 MethodPDERS.cxx:751
 MethodPDERS.cxx:752
 MethodPDERS.cxx:753
 MethodPDERS.cxx:754
 MethodPDERS.cxx:755
 MethodPDERS.cxx:756
 MethodPDERS.cxx:757
 MethodPDERS.cxx:758
 MethodPDERS.cxx:759
 MethodPDERS.cxx:760
 MethodPDERS.cxx:761
 MethodPDERS.cxx:762
 MethodPDERS.cxx:763
 MethodPDERS.cxx:764
 MethodPDERS.cxx:765
 MethodPDERS.cxx:766
 MethodPDERS.cxx:767
 MethodPDERS.cxx:768
 MethodPDERS.cxx:769
 MethodPDERS.cxx:770
 MethodPDERS.cxx:771
 MethodPDERS.cxx:772
 MethodPDERS.cxx:773
 MethodPDERS.cxx:774
 MethodPDERS.cxx:775
 MethodPDERS.cxx:776
 MethodPDERS.cxx:777
 MethodPDERS.cxx:778
 MethodPDERS.cxx:779
 MethodPDERS.cxx:780
 MethodPDERS.cxx:781
 MethodPDERS.cxx:782
 MethodPDERS.cxx:783
 MethodPDERS.cxx:784
 MethodPDERS.cxx:785
 MethodPDERS.cxx:786
 MethodPDERS.cxx:787
 MethodPDERS.cxx:788
 MethodPDERS.cxx:789
 MethodPDERS.cxx:790
 MethodPDERS.cxx:791
 MethodPDERS.cxx:792
 MethodPDERS.cxx:793
 MethodPDERS.cxx:794
 MethodPDERS.cxx:795
 MethodPDERS.cxx:796
 MethodPDERS.cxx:797
 MethodPDERS.cxx:798
 MethodPDERS.cxx:799
 MethodPDERS.cxx:800
 MethodPDERS.cxx:801
 MethodPDERS.cxx:802
 MethodPDERS.cxx:803
 MethodPDERS.cxx:804
 MethodPDERS.cxx:805
 MethodPDERS.cxx:806
 MethodPDERS.cxx:807
 MethodPDERS.cxx:808
 MethodPDERS.cxx:809
 MethodPDERS.cxx:810
 MethodPDERS.cxx:811
 MethodPDERS.cxx:812
 MethodPDERS.cxx:813
 MethodPDERS.cxx:814
 MethodPDERS.cxx:815
 MethodPDERS.cxx:816
 MethodPDERS.cxx:817
 MethodPDERS.cxx:818
 MethodPDERS.cxx:819
 MethodPDERS.cxx:820
 MethodPDERS.cxx:821
 MethodPDERS.cxx:822
 MethodPDERS.cxx:823
 MethodPDERS.cxx:824
 MethodPDERS.cxx:825
 MethodPDERS.cxx:826
 MethodPDERS.cxx:827
 MethodPDERS.cxx:828
 MethodPDERS.cxx:829
 MethodPDERS.cxx:830
 MethodPDERS.cxx:831
 MethodPDERS.cxx:832
 MethodPDERS.cxx:833
 MethodPDERS.cxx:834
 MethodPDERS.cxx:835
 MethodPDERS.cxx:836
 MethodPDERS.cxx:837
 MethodPDERS.cxx:838
 MethodPDERS.cxx:839
 MethodPDERS.cxx:840
 MethodPDERS.cxx:841
 MethodPDERS.cxx:842
 MethodPDERS.cxx:843
 MethodPDERS.cxx:844
 MethodPDERS.cxx:845
 MethodPDERS.cxx:846
 MethodPDERS.cxx:847
 MethodPDERS.cxx:848
 MethodPDERS.cxx:849
 MethodPDERS.cxx:850
 MethodPDERS.cxx:851
 MethodPDERS.cxx:852
 MethodPDERS.cxx:853
 MethodPDERS.cxx:854
 MethodPDERS.cxx:855
 MethodPDERS.cxx:856
 MethodPDERS.cxx:857
 MethodPDERS.cxx:858
 MethodPDERS.cxx:859
 MethodPDERS.cxx:860
 MethodPDERS.cxx:861
 MethodPDERS.cxx:862
 MethodPDERS.cxx:863
 MethodPDERS.cxx:864
 MethodPDERS.cxx:865
 MethodPDERS.cxx:866
 MethodPDERS.cxx:867
 MethodPDERS.cxx:868
 MethodPDERS.cxx:869
 MethodPDERS.cxx:870
 MethodPDERS.cxx:871
 MethodPDERS.cxx:872
 MethodPDERS.cxx:873
 MethodPDERS.cxx:874
 MethodPDERS.cxx:875
 MethodPDERS.cxx:876
 MethodPDERS.cxx:877
 MethodPDERS.cxx:878
 MethodPDERS.cxx:879
 MethodPDERS.cxx:880
 MethodPDERS.cxx:881
 MethodPDERS.cxx:882
 MethodPDERS.cxx:883
 MethodPDERS.cxx:884
 MethodPDERS.cxx:885
 MethodPDERS.cxx:886
 MethodPDERS.cxx:887
 MethodPDERS.cxx:888
 MethodPDERS.cxx:889
 MethodPDERS.cxx:890
 MethodPDERS.cxx:891
 MethodPDERS.cxx:892
 MethodPDERS.cxx:893
 MethodPDERS.cxx:894
 MethodPDERS.cxx:895
 MethodPDERS.cxx:896
 MethodPDERS.cxx:897
 MethodPDERS.cxx:898
 MethodPDERS.cxx:899
 MethodPDERS.cxx:900
 MethodPDERS.cxx:901
 MethodPDERS.cxx:902
 MethodPDERS.cxx:903
 MethodPDERS.cxx:904
 MethodPDERS.cxx:905
 MethodPDERS.cxx:906
 MethodPDERS.cxx:907
 MethodPDERS.cxx:908
 MethodPDERS.cxx:909
 MethodPDERS.cxx:910
 MethodPDERS.cxx:911
 MethodPDERS.cxx:912
 MethodPDERS.cxx:913
 MethodPDERS.cxx:914
 MethodPDERS.cxx:915
 MethodPDERS.cxx:916
 MethodPDERS.cxx:917
 MethodPDERS.cxx:918
 MethodPDERS.cxx:919
 MethodPDERS.cxx:920
 MethodPDERS.cxx:921
 MethodPDERS.cxx:922
 MethodPDERS.cxx:923
 MethodPDERS.cxx:924
 MethodPDERS.cxx:925
 MethodPDERS.cxx:926
 MethodPDERS.cxx:927
 MethodPDERS.cxx:928
 MethodPDERS.cxx:929
 MethodPDERS.cxx:930
 MethodPDERS.cxx:931
 MethodPDERS.cxx:932
 MethodPDERS.cxx:933
 MethodPDERS.cxx:934
 MethodPDERS.cxx:935
 MethodPDERS.cxx:936
 MethodPDERS.cxx:937
 MethodPDERS.cxx:938
 MethodPDERS.cxx:939
 MethodPDERS.cxx:940
 MethodPDERS.cxx:941
 MethodPDERS.cxx:942
 MethodPDERS.cxx:943
 MethodPDERS.cxx:944
 MethodPDERS.cxx:945
 MethodPDERS.cxx:946
 MethodPDERS.cxx:947
 MethodPDERS.cxx:948
 MethodPDERS.cxx:949
 MethodPDERS.cxx:950
 MethodPDERS.cxx:951
 MethodPDERS.cxx:952
 MethodPDERS.cxx:953
 MethodPDERS.cxx:954
 MethodPDERS.cxx:955
 MethodPDERS.cxx:956
 MethodPDERS.cxx:957
 MethodPDERS.cxx:958
 MethodPDERS.cxx:959
 MethodPDERS.cxx:960
 MethodPDERS.cxx:961
 MethodPDERS.cxx:962
 MethodPDERS.cxx:963
 MethodPDERS.cxx:964
 MethodPDERS.cxx:965
 MethodPDERS.cxx:966
 MethodPDERS.cxx:967
 MethodPDERS.cxx:968
 MethodPDERS.cxx:969
 MethodPDERS.cxx:970
 MethodPDERS.cxx:971
 MethodPDERS.cxx:972
 MethodPDERS.cxx:973
 MethodPDERS.cxx:974
 MethodPDERS.cxx:975
 MethodPDERS.cxx:976
 MethodPDERS.cxx:977
 MethodPDERS.cxx:978
 MethodPDERS.cxx:979
 MethodPDERS.cxx:980
 MethodPDERS.cxx:981
 MethodPDERS.cxx:982
 MethodPDERS.cxx:983
 MethodPDERS.cxx:984
 MethodPDERS.cxx:985
 MethodPDERS.cxx:986
 MethodPDERS.cxx:987
 MethodPDERS.cxx:988
 MethodPDERS.cxx:989
 MethodPDERS.cxx:990
 MethodPDERS.cxx:991
 MethodPDERS.cxx:992
 MethodPDERS.cxx:993
 MethodPDERS.cxx:994
 MethodPDERS.cxx:995
 MethodPDERS.cxx:996
 MethodPDERS.cxx:997
 MethodPDERS.cxx:998
 MethodPDERS.cxx:999
 MethodPDERS.cxx:1000
 MethodPDERS.cxx:1001
 MethodPDERS.cxx:1002
 MethodPDERS.cxx:1003
 MethodPDERS.cxx:1004
 MethodPDERS.cxx:1005
 MethodPDERS.cxx:1006
 MethodPDERS.cxx:1007
 MethodPDERS.cxx:1008
 MethodPDERS.cxx:1009
 MethodPDERS.cxx:1010
 MethodPDERS.cxx:1011
 MethodPDERS.cxx:1012
 MethodPDERS.cxx:1013
 MethodPDERS.cxx:1014
 MethodPDERS.cxx:1015
 MethodPDERS.cxx:1016
 MethodPDERS.cxx:1017
 MethodPDERS.cxx:1018
 MethodPDERS.cxx:1019
 MethodPDERS.cxx:1020
 MethodPDERS.cxx:1021
 MethodPDERS.cxx:1022
 MethodPDERS.cxx:1023
 MethodPDERS.cxx:1024
 MethodPDERS.cxx:1025
 MethodPDERS.cxx:1026
 MethodPDERS.cxx:1027
 MethodPDERS.cxx:1028
 MethodPDERS.cxx:1029
 MethodPDERS.cxx:1030
 MethodPDERS.cxx:1031
 MethodPDERS.cxx:1032
 MethodPDERS.cxx:1033
 MethodPDERS.cxx:1034
 MethodPDERS.cxx:1035
 MethodPDERS.cxx:1036
 MethodPDERS.cxx:1037
 MethodPDERS.cxx:1038
 MethodPDERS.cxx:1039
 MethodPDERS.cxx:1040
 MethodPDERS.cxx:1041
 MethodPDERS.cxx:1042
 MethodPDERS.cxx:1043
 MethodPDERS.cxx:1044
 MethodPDERS.cxx:1045
 MethodPDERS.cxx:1046
 MethodPDERS.cxx:1047
 MethodPDERS.cxx:1048
 MethodPDERS.cxx:1049
 MethodPDERS.cxx:1050
 MethodPDERS.cxx:1051
 MethodPDERS.cxx:1052
 MethodPDERS.cxx:1053
 MethodPDERS.cxx:1054
 MethodPDERS.cxx:1055
 MethodPDERS.cxx:1056
 MethodPDERS.cxx:1057
 MethodPDERS.cxx:1058
 MethodPDERS.cxx:1059
 MethodPDERS.cxx:1060
 MethodPDERS.cxx:1061
 MethodPDERS.cxx:1062
 MethodPDERS.cxx:1063
 MethodPDERS.cxx:1064
 MethodPDERS.cxx:1065
 MethodPDERS.cxx:1066
 MethodPDERS.cxx:1067
 MethodPDERS.cxx:1068
 MethodPDERS.cxx:1069
 MethodPDERS.cxx:1070
 MethodPDERS.cxx:1071
 MethodPDERS.cxx:1072
 MethodPDERS.cxx:1073
 MethodPDERS.cxx:1074
 MethodPDERS.cxx:1075
 MethodPDERS.cxx:1076
 MethodPDERS.cxx:1077
 MethodPDERS.cxx:1078
 MethodPDERS.cxx:1079
 MethodPDERS.cxx:1080
 MethodPDERS.cxx:1081
 MethodPDERS.cxx:1082
 MethodPDERS.cxx:1083
 MethodPDERS.cxx:1084
 MethodPDERS.cxx:1085
 MethodPDERS.cxx:1086
 MethodPDERS.cxx:1087
 MethodPDERS.cxx:1088
 MethodPDERS.cxx:1089
 MethodPDERS.cxx:1090
 MethodPDERS.cxx:1091
 MethodPDERS.cxx:1092
 MethodPDERS.cxx:1093
 MethodPDERS.cxx:1094
 MethodPDERS.cxx:1095
 MethodPDERS.cxx:1096
 MethodPDERS.cxx:1097
 MethodPDERS.cxx:1098
 MethodPDERS.cxx:1099
 MethodPDERS.cxx:1100
 MethodPDERS.cxx:1101
 MethodPDERS.cxx:1102
 MethodPDERS.cxx:1103
 MethodPDERS.cxx:1104
 MethodPDERS.cxx:1105
 MethodPDERS.cxx:1106
 MethodPDERS.cxx:1107
 MethodPDERS.cxx:1108
 MethodPDERS.cxx:1109
 MethodPDERS.cxx:1110
 MethodPDERS.cxx:1111
 MethodPDERS.cxx:1112
 MethodPDERS.cxx:1113
 MethodPDERS.cxx:1114
 MethodPDERS.cxx:1115
 MethodPDERS.cxx:1116
 MethodPDERS.cxx:1117
 MethodPDERS.cxx:1118
 MethodPDERS.cxx:1119
 MethodPDERS.cxx:1120
 MethodPDERS.cxx:1121
 MethodPDERS.cxx:1122
 MethodPDERS.cxx:1123
 MethodPDERS.cxx:1124
 MethodPDERS.cxx:1125
 MethodPDERS.cxx:1126
 MethodPDERS.cxx:1127
 MethodPDERS.cxx:1128
 MethodPDERS.cxx:1129
 MethodPDERS.cxx:1130
 MethodPDERS.cxx:1131
 MethodPDERS.cxx:1132
 MethodPDERS.cxx:1133
 MethodPDERS.cxx:1134
 MethodPDERS.cxx:1135
 MethodPDERS.cxx:1136
 MethodPDERS.cxx:1137
 MethodPDERS.cxx:1138
 MethodPDERS.cxx:1139
 MethodPDERS.cxx:1140
 MethodPDERS.cxx:1141
 MethodPDERS.cxx:1142
 MethodPDERS.cxx:1143
 MethodPDERS.cxx:1144
 MethodPDERS.cxx:1145
 MethodPDERS.cxx:1146
 MethodPDERS.cxx:1147
 MethodPDERS.cxx:1148
 MethodPDERS.cxx:1149
 MethodPDERS.cxx:1150
 MethodPDERS.cxx:1151
 MethodPDERS.cxx:1152
 MethodPDERS.cxx:1153
 MethodPDERS.cxx:1154
 MethodPDERS.cxx:1155
 MethodPDERS.cxx:1156
 MethodPDERS.cxx:1157
 MethodPDERS.cxx:1158
 MethodPDERS.cxx:1159
 MethodPDERS.cxx:1160
 MethodPDERS.cxx:1161
 MethodPDERS.cxx:1162
 MethodPDERS.cxx:1163
 MethodPDERS.cxx:1164
 MethodPDERS.cxx:1165
 MethodPDERS.cxx:1166
 MethodPDERS.cxx:1167
 MethodPDERS.cxx:1168
 MethodPDERS.cxx:1169