// @(#)root/tmva $Id$
// Author: Andreas Hoecker, Joerg Stelzer, Fredrik Tegenfeldt, Helge Voss

/**********************************************************************************
 * Project: TMVA - a Root-integrated toolkit for multivariate data analysis       *
 * Package: TMVA                                                                  *
 * Class  : Rule                                                                  *
 * Web    : http://tmva.sourceforge.net                                           *
 *                                                                                *
 * Description:                                                                   *
 *      A class describung a 'rule'                                               *
 *      Each internal node of a tree defines a rule from all the parental nodes.  *
 *      A rule with 0 or 1 nodes in the list is a root rule -> corresponds to a0. *
 *      Input: a decision tree (in the constructor)                               *
 *             its coefficient                                                    *
 *                                                                                *
 *                                                                                *
 * Authors (alphabetical):                                                        *
 *      Fredrik Tegenfeldt <Fredrik.Tegenfeldt@cern.ch> - Iowa State U., USA      *
 *                                                                                *
 * Copyright (c) 2005:                                                            *
 *      CERN, Switzerland                                                         *
 *      Iowa State U.                                                             *
 *      MPI-K Heidelberg, Germany                                                 *
 *                                                                                *
 * Redistribution and use in source and binary forms, with or without             *
 * modification, are permitted according to the terms listed in LICENSE           *
 * (http://tmva.sourceforge.net/LICENSE)                                          *
 **********************************************************************************/

#include <algorithm>

#include "TKey.h"
#include "TRandom3.h"

#include "TMVA/SeparationBase.h"
#include "TMVA/GiniIndex.h"
#include "TMVA/RuleFit.h"
#include "TMVA/MethodRuleFit.h"
#include "TMVA/Timer.h"
#include "TMVA/Tools.h"
#include "TMVA/Factory.h" // for root base dir

ClassImp(TMVA::RuleFit)

//_______________________________________________________________________
TMVA::RuleFit::RuleFit( const MethodBase *rfbase )
: fVisHistsUseImp( kTRUE ),
   fLogger( new MsgLogger("RuleFit") )
{
   // constructor
   Initialize( rfbase );
   std::srand( randSEED );  // initialize random number generator used by std::random_shuffle
}

//_______________________________________________________________________
TMVA::RuleFit::RuleFit()
   : fNTreeSample(0)
   , fNEveEffTrain(0)
   , fMethodRuleFit(0)
   , fMethodBase(0)
   , fVisHistsUseImp( kTRUE )
   , fLogger( new MsgLogger("RuleFit") )
{
   // default constructor
   std::srand( randSEED ); // initialize random number generator used by std::random_shuffle
}

//_______________________________________________________________________
TMVA::RuleFit::~RuleFit()
{
   // destructor
   delete fLogger;
}

//_______________________________________________________________________
void TMVA::RuleFit::InitNEveEff()
{
   // init effective number of events (using event weights)
   UInt_t neve = fTrainingEvents.size();
   if (neve==0) return;
   //
   fNEveEffTrain = CalcWeightSum( &fTrainingEvents );
   //
}

//_______________________________________________________________________
void TMVA::RuleFit::InitPtrs(  const MethodBase *rfbase )
{
   // initialize pointers
   this->SetMethodBase(rfbase);
   fRuleEnsemble.Initialize( this );
   fRuleFitParams.SetRuleFit( this );
}

//_______________________________________________________________________
void TMVA::RuleFit::Initialize(  const MethodBase *rfbase )
{
   // initialize the parameters of the RuleFit method and make rules
   InitPtrs(rfbase);

   if (fMethodRuleFit){ 
      fMethodRuleFit->Data()->SetCurrentType(Types::kTraining);
      UInt_t nevents = fMethodRuleFit->Data()->GetNTrainingEvents();
      std::vector<const TMVA::Event*> tmp;
      for (Long64_t ievt=0; ievt<nevents; ievt++) {
         const Event *event = fMethodRuleFit->GetEvent(ievt);
         tmp.push_back(event);
      }      
      SetTrainingEvents( tmp );
   }
   //     SetTrainingEvents( fMethodRuleFit->GetTrainingEvents() );

   InitNEveEff();

   MakeForest();

   // Make the model - Rule + Linear (if fDoLinear is true)
   fRuleEnsemble.MakeModel();

   // init rulefit params
   fRuleFitParams.Init();

}

//_______________________________________________________________________
void TMVA::RuleFit::SetMethodBase( const MethodBase *rfbase )
{
   // set MethodBase
   fMethodBase = rfbase;
   fMethodRuleFit = dynamic_cast<const MethodRuleFit *>(rfbase);
}

//_______________________________________________________________________
void TMVA::RuleFit::Copy( const RuleFit& other )
{
   // copy method
   if(this != &other) {
      fMethodRuleFit   = other.GetMethodRuleFit();
      fMethodBase      = other.GetMethodBase();
      fTrainingEvents  = other.GetTrainingEvents();
      //      fSubsampleEvents = other.GetSubsampleEvents();
   
      fForest       = other.GetForest();
      fRuleEnsemble = other.GetRuleEnsemble();
   }
}

//_______________________________________________________________________
Double_t TMVA::RuleFit::CalcWeightSum( const std::vector<const Event *> *events, UInt_t neve )
{
   // calculate the sum of weights
   if (events==0) return 0.0;
   if (neve==0) neve=events->size();
   //
   Double_t sumw=0;
   for (UInt_t ie=0; ie<neve; ie++) {
      sumw += ((*events)[ie])->GetWeight();
   }
   return sumw;
}

//_______________________________________________________________________
void TMVA::RuleFit::SetMsgType( EMsgType t )
{
   // set the current message type to that of mlog for this class and all other subtools
   fLogger->SetMinType(t);
   fRuleEnsemble.SetMsgType(t);
   fRuleFitParams.SetMsgType(t);
}

//_______________________________________________________________________
void TMVA::RuleFit::BuildTree( DecisionTree *dt )
{
   // build the decision tree using fNTreeSample events from fTrainingEventsRndm
   if (dt==0) return;
   if (fMethodRuleFit==0) {
      Log() << kFATAL << "RuleFit::BuildTree() - Attempting to build a tree NOT from a MethodRuleFit" << Endl;
   }
   std::vector<const Event *> evevec;
   for (UInt_t ie=0; ie<fNTreeSample; ie++) {
      evevec.push_back(fTrainingEventsRndm[ie]);
   }
   dt->BuildTree(evevec);
   if (fMethodRuleFit->GetPruneMethod() != DecisionTree::kNoPruning) {
      dt->SetPruneMethod(fMethodRuleFit->GetPruneMethod());
      dt->SetPruneStrength(fMethodRuleFit->GetPruneStrength());
      dt->PruneTree();
   }
}

//_______________________________________________________________________
void TMVA::RuleFit::MakeForest()
{
   // make a forest of decisiontrees
   if (fMethodRuleFit==0) {
      Log() << kFATAL << "RuleFit::BuildTree() - Attempting to build a tree NOT from a MethodRuleFit" << Endl;
   }
   Log() << kDEBUG << "Creating a forest with " << fMethodRuleFit->GetNTrees() << " decision trees" << Endl;
   Log() << kDEBUG << "Each tree is built using a random subsample with " << fNTreeSample << " events" << Endl;
   //
   Timer timer( fMethodRuleFit->GetNTrees(), "RuleFit" );

   // Double_t fsig;
   Int_t nsig,nbkg;
   //
   TRandom3 rndGen;
   //
   //
   // First save all event weights.
   // Weights are modifed by the boosting.
   // Those weights we do not want for the later fitting.
   //
   Bool_t useBoost = fMethodRuleFit->UseBoost(); // (AdaBoost (True) or RandomForest/Tree (False)

   if (useBoost) SaveEventWeights();

   for (Int_t i=0; i<fMethodRuleFit->GetNTrees(); i++) {
      //      timer.DrawProgressBar(i);
      if (!useBoost) ReshuffleEvents();
      nsig=0;
      nbkg=0;
      for (UInt_t ie = 0; ie<fNTreeSample; ie++) {
         if (fMethodBase->DataInfo().IsSignal(fTrainingEventsRndm[ie])) nsig++; // ignore weights here
         else nbkg++;
      }
      // fsig = Double_t(nsig)/Double_t(nsig+nbkg);
      // do not implement the above in this release...just set it to default

      DecisionTree *dt;
      Bool_t tryAgain=kTRUE;
      Int_t ntries=0;
      const Int_t ntriesMax=10;
      Double_t frnd;
      while (tryAgain) {
         frnd = 100*rndGen.Uniform( fMethodRuleFit->GetMinFracNEve(), 0.5*fMethodRuleFit->GetMaxFracNEve() );
         Int_t     iclass = 0; // event class being treated as signal during training
         Bool_t    useRandomisedTree = !useBoost;  
         dt = new DecisionTree( fMethodRuleFit->GetSeparationBase(), frnd, fMethodRuleFit->GetNCuts(), &(fMethodRuleFit->DataInfo()), iclass, useRandomisedTree);
         dt->SetNVars(fMethodBase->GetNvar());

         BuildTree(dt); // reads fNTreeSample events from fTrainingEventsRndm
         if (dt->GetNNodes()<3) {
            delete dt;
            dt=0;
         }
         ntries++;
         tryAgain = ((dt==0) && (ntries<ntriesMax));
      }
      if (dt) {
         fForest.push_back(dt);
         if (useBoost) Boost(dt);

      } else {

         Log() << kWARNING << "------------------------------------------------------------------" << Endl;
         Log() << kWARNING << " Failed growing a tree even after " << ntriesMax << " trials" << Endl;
         Log() << kWARNING << " Possible solutions: " << Endl;
         Log() << kWARNING << "   1. increase the number of training events" << Endl;
         Log() << kWARNING << "   2. set a lower min fraction cut (fEventsMin)" << Endl;
         Log() << kWARNING << "   3. maybe also decrease the max fraction cut (fEventsMax)" << Endl;
         Log() << kWARNING << " If the above warning occurs rarely only, it can be ignored" << Endl;
         Log() << kWARNING << "------------------------------------------------------------------" << Endl;
      }

      Log() << kDEBUG << "Built tree with minimum cut at N = " << frnd <<"% events" 
            << " => N(nodes) = " << fForest.back()->GetNNodes()
            << " ; n(tries) = " << ntries
            << Endl;
   }

   // Now restore event weights
   if (useBoost) RestoreEventWeights();

   // print statistics on the forest created
   ForestStatistics();
}

//_______________________________________________________________________
void TMVA::RuleFit::SaveEventWeights()
{
   // save event weights - must be done before making the forest
   fEventWeights.clear();
   for (std::vector<const Event*>::iterator e=fTrainingEvents.begin(); e!=fTrainingEvents.end(); e++) {
      Double_t w = (*e)->GetBoostWeight();
      fEventWeights.push_back(w);
   }
}

//_______________________________________________________________________
void TMVA::RuleFit::RestoreEventWeights()
{
   // save event weights - must be done before making the forest
   UInt_t ie=0;
   if (fEventWeights.size() != fTrainingEvents.size()) {
      Log() << kERROR << "RuleFit::RestoreEventWeights() called without having called SaveEventWeights() before!" << Endl;
      return;
   }
   for (std::vector<const Event*>::iterator e=fTrainingEvents.begin(); e!=fTrainingEvents.end(); e++) {
      (*e)->SetBoostWeight(fEventWeights[ie]);
      ie++;
   }
}

//_______________________________________________________________________
void TMVA::RuleFit::Boost( DecisionTree *dt )
{
   // Boost the events. The algorithm below is the called AdaBoost.
   // See MethodBDT for details.
   // Actually, this is a more or less copy of MethodBDT::AdaBoost().
   Double_t sumw=0;      // sum of initial weights - all events
   Double_t sumwfalse=0; // idem, only missclassified events
   //
   std::vector<Char_t> correctSelected; // <--- boolean stored
   //
   for (std::vector<const Event*>::iterator e=fTrainingEvents.begin(); e!=fTrainingEvents.end(); e++) {
      Bool_t isSignalType = (dt->CheckEvent(*e,kTRUE) > 0.5 );
      Double_t w = (*e)->GetWeight();
      sumw += w;
      // 
      if (isSignalType == fMethodBase->DataInfo().IsSignal(*e)) { // correctly classified
         correctSelected.push_back(kTRUE);
      } 
      else {                                // missclassified
         sumwfalse+= w;
         correctSelected.push_back(kFALSE);
      }    
   }
   // missclassification error
   Double_t err = sumwfalse/sumw;
   // calculate boost weight for missclassified events
   // use for now the exponent = 1.0
   // one could have w = ((1-err)/err)^beta
   Double_t boostWeight = (err>0 ? (1.0-err)/err : 1000.0);
   Double_t newSumw=0.0;
   UInt_t ie=0;
   // set new weight to missclassified events
   for (std::vector<const Event*>::iterator e=fTrainingEvents.begin(); e!=fTrainingEvents.end(); e++) {
      if (!correctSelected[ie])
         (*e)->SetBoostWeight( (*e)->GetBoostWeight() * boostWeight);
      newSumw+=(*e)->GetWeight();    
      ie++;
   }
   // reweight all events
   Double_t scale = sumw/newSumw;
   for (std::vector<const Event*>::iterator e=fTrainingEvents.begin(); e!=fTrainingEvents.end(); e++) {
      (*e)->SetBoostWeight( (*e)->GetBoostWeight() * scale);
   }
   Log() << kDEBUG << "boostWeight = " << boostWeight << "    scale = " << scale << Endl;
}

//_______________________________________________________________________
void TMVA::RuleFit::ForestStatistics()
{
   // summary of statistics of all trees
   // * end-nodes: average and spread
   UInt_t ntrees = fForest.size();
   if (ntrees==0) return;
   const DecisionTree *tree;
   Double_t sumn2 = 0;
   Double_t sumn  = 0;
   Double_t nd;
   for (UInt_t i=0; i<ntrees; i++) {
      tree = fForest[i];
      nd = Double_t(tree->GetNNodes());
      sumn  += nd;
      sumn2 += nd*nd;
   }
   Double_t sig = TMath::Sqrt( gTools().ComputeVariance( sumn2, sumn, ntrees ));
   Log() << kVERBOSE << "Nodes in trees: average & std dev = " << sumn/ntrees << " , " << sig << Endl;
}

//_______________________________________________________________________
void TMVA::RuleFit::FitCoefficients()
{
   //
   // Fit the coefficients for the rule ensemble
   //
   Log() << kVERBOSE << "Fitting rule/linear terms" << Endl;
   fRuleFitParams.MakeGDPath();
}

//_______________________________________________________________________
void TMVA::RuleFit::CalcImportance()
{
   // calculates the importance of each rule

   Log() << kVERBOSE << "Calculating importance" << Endl;
   fRuleEnsemble.CalcImportance();
   fRuleEnsemble.CleanupRules();
   fRuleEnsemble.CleanupLinear();
   fRuleEnsemble.CalcVarImportance();
   Log() << kVERBOSE << "Filling rule statistics" << Endl;
   fRuleEnsemble.RuleResponseStats();
}

//_______________________________________________________________________
Double_t TMVA::RuleFit::EvalEvent( const Event& e )
{
   // evaluate single event

   return fRuleEnsemble.EvalEvent( e );
}

//_______________________________________________________________________
void TMVA::RuleFit::SetTrainingEvents( const std::vector<const Event *>& el )
{
   // set the training events randomly
   if (fMethodRuleFit==0) Log() << kFATAL << "RuleFit::SetTrainingEvents - MethodRuleFit not initialized" << Endl;
   UInt_t neve = el.size();
   if (neve==0) Log() << kWARNING << "An empty sample of training events was given" << Endl;

   // copy vector
   fTrainingEvents.clear();
   fTrainingEventsRndm.clear();
   for (UInt_t i=0; i<neve; i++) {
      fTrainingEvents.push_back(static_cast< const Event *>(el[i]));
      fTrainingEventsRndm.push_back(static_cast< const Event *>(el[i]));
   }

   // Re-shuffle the vector, ie, recreate it in a random order
   std::random_shuffle( fTrainingEventsRndm.begin(), fTrainingEventsRndm.end() );

   // fraction events per tree
   fNTreeSample = static_cast<UInt_t>(neve*fMethodRuleFit->GetTreeEveFrac());
   Log() << kDEBUG << "Number of events per tree : " << fNTreeSample
         << " ( N(events) = " << neve << " )"
         << " randomly drawn without replacement" << Endl;
}

//_______________________________________________________________________
void TMVA::RuleFit::GetRndmSampleEvents(std::vector< const Event * > & evevec, UInt_t nevents)
{
   // draw a random subsample of the training events without replacement
   ReshuffleEvents();
   if ((nevents<fTrainingEventsRndm.size()) && (nevents>0)) {
      evevec.resize(nevents);
      for (UInt_t ie=0; ie<nevents; ie++) {
         evevec[ie] = fTrainingEventsRndm[ie];
      }
   } 
   else {
      Log() << kWARNING << "GetRndmSampleEvents() : requested sub sample size larger than total size (BUG!).";
   }
}
//_______________________________________________________________________
void TMVA::RuleFit::NormVisHists(std::vector<TH2F *> & hlist)
{
   // normalize rule importance hists
   //
   // if all weights are positive, the scale will be 1/maxweight
   // if minimum weight < 0, then the scale will be 1/max(maxweight,abs(minweight))
   //
   if (hlist.empty()) return;
   //
   Double_t wmin=0;
   Double_t wmax=0;
   Double_t w,wm;
   Double_t awmin;
   Double_t scale;
   for (UInt_t i=0; i<hlist.size(); i++) {
      TH2F *hs = hlist[i];
      w  = hs->GetMaximum();
      wm = hs->GetMinimum();
      if (i==0) {
         wmin=wm;
         wmax=w;
      } 
      else {
         if (w>wmax)  wmax=w;
         if (wm<wmin) wmin=wm;
      }
   }
   awmin = TMath::Abs(wmin);
   Double_t usemin,usemax;
   if (awmin>wmax) {
      scale = 1.0/awmin;
      usemin = -1.0;
      usemax = scale*wmax;
   } 
   else {
      scale = 1.0/wmax;
      usemin = scale*wmin;
      usemax = 1.0;
   }
   
   //
   for (UInt_t i=0; i<hlist.size(); i++) {
      TH2F *hs = hlist[i];
      hs->Scale(scale);
      hs->SetMinimum(usemin);
      hs->SetMaximum(usemax);
   }
}

//_______________________________________________________________________
void TMVA::RuleFit::FillCut(TH2F* h2, const Rule *rule, Int_t vind)
{
   // Fill cut

   if (rule==0) return;
   if (h2==0) return;
   //
   Double_t rmin,  rmax;
   Bool_t   dormin,dormax;
   Bool_t ruleHasVar = rule->GetRuleCut()->GetCutRange(vind,rmin,rmax,dormin,dormax);
   if (!ruleHasVar) return;
   //
   Int_t firstbin = h2->GetBin(1,1,1);
   if(firstbin<0) firstbin=0;
   Int_t lastbin = h2->GetBin(h2->GetNbinsX(),1,1);
   Int_t binmin=(dormin ? h2->FindBin(rmin,0.5):firstbin);
   Int_t binmax=(dormax ? h2->FindBin(rmax,0.5):lastbin);
   Int_t fbin;
   Double_t xbinw = h2->GetXaxis()->GetBinWidth(firstbin);
   Double_t fbmin = h2->GetXaxis()->GetBinLowEdge(binmin-firstbin+1);
   Double_t lbmax = h2->GetXaxis()->GetBinLowEdge(binmax-firstbin+1)+xbinw;
   Double_t fbfrac = (dormin ? ((fbmin+xbinw-rmin)/xbinw):1.0);
   Double_t lbfrac = (dormax ? ((rmax-lbmax+xbinw)/xbinw):1.0);
   Double_t f;
   Double_t xc;
   Double_t val;

   for (Int_t bin = binmin; bin<binmax+1; bin++) {
      fbin = bin-firstbin+1;
      if (bin==binmin) {
         f = fbfrac;
      }
      else if (bin==binmax) {
         f = lbfrac;
      }
      else {
         f = 1.0;
      }
      xc = h2->GetXaxis()->GetBinCenter(fbin);
      //
      if (fVisHistsUseImp) {
         val = rule->GetImportance();
      } 
      else {
         val = rule->GetCoefficient()*rule->GetSupport();
      }
      h2->Fill(xc,0.5,val*f);
   }
}

//_______________________________________________________________________
void TMVA::RuleFit::FillLin(TH2F* h2,Int_t vind)
{
   // fill lin
   if (h2==0) return;
   if (!fRuleEnsemble.DoLinear()) return;
   //
   Int_t firstbin = 1;
   Int_t lastbin = h2->GetNbinsX();
   Double_t xc;
   Double_t val;
   if (fVisHistsUseImp) {
      val = fRuleEnsemble.GetLinImportance(vind);
   }
   else {
      val = fRuleEnsemble.GetLinCoefficients(vind);
   }
   for (Int_t bin = firstbin; bin<lastbin+1; bin++) {
      xc = h2->GetXaxis()->GetBinCenter(bin);
      h2->Fill(xc,0.5,val);
   }
}

//_______________________________________________________________________
void TMVA::RuleFit::FillCorr(TH2F* h2,const Rule *rule,Int_t vx, Int_t vy)
{
   // fill rule correlation between vx and vy, weighted with either the importance or the coefficient
   if (rule==0) return;
   if (h2==0) return;
   Double_t val;
   if (fVisHistsUseImp) {
      val = rule->GetImportance();
   }
   else {
      val = rule->GetCoefficient()*rule->GetSupport();
   }
   //
   Double_t rxmin,   rxmax,   rymin,   rymax;
   Bool_t   dorxmin, dorxmax, dorymin, dorymax;
   //
   // Get range in rule for X and Y
   //
   Bool_t ruleHasVarX = rule->GetRuleCut()->GetCutRange(vx,rxmin,rxmax,dorxmin,dorxmax);
   Bool_t ruleHasVarY = rule->GetRuleCut()->GetCutRange(vy,rymin,rymax,dorymin,dorymax);
   if (!(ruleHasVarX || ruleHasVarY)) return;
   // min max of varX and varY in hist
   Double_t vxmin = (dorxmin ? rxmin:h2->GetXaxis()->GetXmin());
   Double_t vxmax = (dorxmax ? rxmax:h2->GetXaxis()->GetXmax());
   Double_t vymin = (dorymin ? rymin:h2->GetYaxis()->GetXmin());
   Double_t vymax = (dorymax ? rymax:h2->GetYaxis()->GetXmax());
   // min max bin in X and Y
   Int_t binxmin  = h2->GetXaxis()->FindBin(vxmin);
   Int_t binxmax  = h2->GetXaxis()->FindBin(vxmax);
   Int_t binymin  = h2->GetYaxis()->FindBin(vymin);
   Int_t binymax  = h2->GetYaxis()->FindBin(vymax);
   // bin widths
   Double_t xbinw = h2->GetXaxis()->GetBinWidth(binxmin);
   Double_t ybinw = h2->GetYaxis()->GetBinWidth(binxmin);
   Double_t xbinmin = h2->GetXaxis()->GetBinLowEdge(binxmin);
   Double_t xbinmax = h2->GetXaxis()->GetBinLowEdge(binxmax)+xbinw;
   Double_t ybinmin = h2->GetYaxis()->GetBinLowEdge(binymin);
   Double_t ybinmax = h2->GetYaxis()->GetBinLowEdge(binymax)+ybinw;
   // fraction of edges
   Double_t fxbinmin = (dorxmin ? ((xbinmin+xbinw-vxmin)/xbinw):1.0);
   Double_t fxbinmax = (dorxmax ? ((vxmax-xbinmax+xbinw)/xbinw):1.0);
   Double_t fybinmin = (dorymin ? ((ybinmin+ybinw-vymin)/ybinw):1.0);
   Double_t fybinmax = (dorymax ? ((vymax-ybinmax+ybinw)/ybinw):1.0);
   //
   Double_t fx,fy;
   Double_t xc,yc;
   // fill histo
   for (Int_t binx = binxmin; binx<binxmax+1; binx++) {
      if (binx==binxmin) {
         fx = fxbinmin;
      } 
      else if (binx==binxmax) {
         fx = fxbinmax;
      } 
      else {
         fx = 1.0;
      }
      xc = h2->GetXaxis()->GetBinCenter(binx);
      for (Int_t biny = binymin; biny<binymax+1; biny++) {
         if (biny==binymin) {
            fy = fybinmin;
         } 
         else if (biny==binymax) {
            fy = fybinmax;
         } 
         else {
            fy = 1.0;
         }
         yc = h2->GetYaxis()->GetBinCenter(biny);
         h2->Fill(xc,yc,val*fx*fy);
      }
   }
}

//_______________________________________________________________________
void TMVA::RuleFit::FillVisHistCut(const Rule* rule, std::vector<TH2F *> & hlist)
{
   // help routine to MakeVisHists() - fills for all variables
   Int_t nhists = hlist.size();
   Int_t nvar   = fMethodBase->GetNvar();
   if (nhists!=nvar) Log() << kFATAL << "BUG TRAP: number of hists is not equal the number of variables!" << Endl;
   //
   std::vector<Int_t> vindex;
   TString hstr;
   // not a nice way to do a check...
   for (Int_t ih=0; ih<nhists; ih++) {
      hstr = hlist[ih]->GetTitle();
      for (Int_t iv=0; iv<nvar; iv++) {
         if (fMethodBase->GetInputTitle(iv) == hstr)
            vindex.push_back(iv);
      }
   }
   //
   for (Int_t iv=0; iv<nvar; iv++) {
      if (rule) {
         if (rule->ContainsVariable(vindex[iv])) {
            FillCut(hlist[iv],rule,vindex[iv]);
         }
      } 
      else {
         FillLin(hlist[iv],vindex[iv]);
      }
   }
}
//_______________________________________________________________________
void TMVA::RuleFit::FillVisHistCorr(const Rule * rule, std::vector<TH2F *> & hlist)
{
   // help routine to MakeVisHists() - fills for all correlation plots
   if (rule==0) return;
   Double_t ruleimp  = rule->GetImportance();
   if (!(ruleimp>0)) return;
   if (ruleimp<fRuleEnsemble.GetImportanceCut()) return;
   //
   Int_t nhists = hlist.size();
   Int_t nvar   = fMethodBase->GetNvar();
   Int_t ncorr  = (nvar*(nvar+1)/2)-nvar;
   if (nhists!=ncorr) Log() << kERROR << "BUG TRAP: number of corr hists is not correct! ncorr = "
                            << ncorr << " nvar = " << nvar << " nhists = " << nhists << Endl;
   //
   std::vector< std::pair<Int_t,Int_t> > vindex;
   TString hstr, var1, var2;
   Int_t iv1=0,iv2=0;
   // not a nice way to do a check...
   for (Int_t ih=0; ih<nhists; ih++) {
      hstr = hlist[ih]->GetName();
      if (GetCorrVars( hstr, var1, var2 )) {
         iv1 = fMethodBase->DataInfo().FindVarIndex( var1 );
         iv2 = fMethodBase->DataInfo().FindVarIndex( var2 );
         vindex.push_back( std::pair<Int_t,Int_t>(iv2,iv1) ); // pair X, Y
      } 
      else {
         Log() << kERROR << "BUG TRAP: should not be here - failed getting var1 and var2" << Endl;
      }
   }
   //
   for (Int_t ih=0; ih<nhists; ih++) {
      if ( (rule->ContainsVariable(vindex[ih].first)) ||
           (rule->ContainsVariable(vindex[ih].second)) ) {
         FillCorr(hlist[ih],rule,vindex[ih].first,vindex[ih].second);
      }
   }
}
//_______________________________________________________________________
Bool_t TMVA::RuleFit::GetCorrVars(TString & title, TString & var1, TString & var2)
{
   // get first and second variables from title
   var1="";
   var2="";
   if(!title.BeginsWith("scat_")) return kFALSE;

   TString titleCopy = title(5,title.Length());
   if(titleCopy.Index("_RF2D")>=0) titleCopy.Remove(titleCopy.Index("_RF2D"));

   Int_t splitPos = titleCopy.Index("_vs_");
   if(splitPos>=0) { // there is a _vs_ in the string
      var1 = titleCopy(0,splitPos);
      var2 = titleCopy(splitPos+4, titleCopy.Length());
      return kTRUE;
   } 
   else {
      var1 = titleCopy;
      return kFALSE;
   }
}
//_______________________________________________________________________
void TMVA::RuleFit::MakeVisHists()
{
   // this will create histograms visualizing the rule ensemble

   const TString directories[5] = { "InputVariables_Id",
                                    "InputVariables_Deco",
                                    "InputVariables_PCA",
                                    "InputVariables_Gauss",
                                    "InputVariables_Gauss_Deco" };

   const TString corrDirName = "CorrelationPlots";   
   
   TDirectory* rootDir   = Factory::RootBaseDir();
   TDirectory* varDir    = 0;
   TDirectory* corrDir   = 0;

   TDirectory* methodDir = fMethodBase->BaseDir();
   TString varDirName;
   //
   Bool_t done=(rootDir==0);
   Int_t type=0;
   if (done) {
      Log() << kWARNING << "No basedir - BUG??" << Endl;
      return;
   }
   while (!done) {
      varDir = (TDirectory*)rootDir->Get( directories[type] );
      type++;
      done = ((varDir!=0) || (type>4));
   }
   if (varDir==0) {
      Log() << kWARNING << "No input variable directory found - BUG?" << Endl;
      return;
   }
   corrDir = (TDirectory*)varDir->Get( corrDirName );
   if (corrDir==0) {
      Log() << kWARNING << "No correlation directory found" << Endl;
      Log() << kWARNING << "Check for other warnings related to correlation histograms" << Endl;
      return;
   }
   if (methodDir==0) {
      Log() << kWARNING << "No rulefit method directory found - BUG?" << Endl;
      return;
   }

   varDirName = varDir->GetName();
   varDir->cd();
   //
   // get correlation plot directory
   corrDir = (TDirectory *)varDir->Get(corrDirName);
   if (corrDir==0) {
      Log() << kWARNING << "No correlation directory found : " << corrDirName << Endl;
      return;
   }

   // how many plots are in the var directory?
   Int_t noPlots = ((varDir->GetListOfKeys())->GetEntries()) / 2;
   Log() << kDEBUG << "Got number of plots = " << noPlots << Endl;
 
   // loop over all objects in directory
   std::vector<TH2F *> h1Vector;
   std::vector<TH2F *> h2CorrVector;
   TIter next(varDir->GetListOfKeys());
   TKey *key;
   while ((key = (TKey*)next())) {
      // make sure, that we only look at histograms
      TClass *cl = gROOT->GetClass(key->GetClassName());
      if (!cl->InheritsFrom(TH1F::Class())) continue;
      TH1F *sig = (TH1F*)key->ReadObj();
      TString hname= sig->GetName();
      Log() << kDEBUG << "Got histogram : " << hname << Endl;

      // check for all signal histograms
      if (hname.Contains("__S")){ // found a new signal plot
         TString htitle = sig->GetTitle();
         htitle.ReplaceAll("signal","");
         TString newname = hname;
         newname.ReplaceAll("__Signal","__RF");
         newname.ReplaceAll("__S","__RF");

         methodDir->cd();
         TH2F *newhist = new TH2F(newname,htitle,sig->GetNbinsX(),sig->GetXaxis()->GetXmin(),sig->GetXaxis()->GetXmax(),
                                  1,sig->GetYaxis()->GetXmin(),sig->GetYaxis()->GetXmax());
         varDir->cd();
         h1Vector.push_back( newhist );
      }
   }
   //
   corrDir->cd();
   TString var1,var2;
   TIter nextCorr(corrDir->GetListOfKeys());
   while ((key = (TKey*)nextCorr())) {
      // make sure, that we only look at histograms
      TClass *cl = gROOT->GetClass(key->GetClassName());
      if (!cl->InheritsFrom(TH2F::Class())) continue;
      TH2F *sig = (TH2F*)key->ReadObj();
      TString hname= sig->GetName();

      // check for all signal histograms
      if ((hname.Contains("scat_")) && (hname.Contains("_Signal"))) {
         Log() << kDEBUG << "Got histogram (2D) : " << hname << Endl;
         TString htitle = sig->GetTitle();
         htitle.ReplaceAll("(Signal)","");
         TString newname = hname;
         newname.ReplaceAll("_Signal","_RF2D");

         methodDir->cd();
         const Int_t rebin=2;
         TH2F *newhist = new TH2F(newname,htitle,
                                  sig->GetNbinsX()/rebin,sig->GetXaxis()->GetXmin(),sig->GetXaxis()->GetXmax(),
                                  sig->GetNbinsY()/rebin,sig->GetYaxis()->GetXmin(),sig->GetYaxis()->GetXmax());
         if (GetCorrVars( newname, var1, var2 )) {
            Int_t iv1 = fMethodBase->DataInfo().FindVarIndex(var1);
            Int_t iv2 = fMethodBase->DataInfo().FindVarIndex(var2);
            if (iv1<0) {
               sig->GetYaxis()->SetTitle(var1);
            } 
            else {
               sig->GetYaxis()->SetTitle(fMethodBase->GetInputTitle(iv1));
            }
            if (iv2<0) {
               sig->GetXaxis()->SetTitle(var2);
            } 
            else {
               sig->GetXaxis()->SetTitle(fMethodBase->GetInputTitle(iv2));
            }
         }
         corrDir->cd();
         h2CorrVector.push_back( newhist );
      }
   }


   varDir->cd();
   // fill rules
   UInt_t nrules = fRuleEnsemble.GetNRules();
   const Rule *rule;
   for (UInt_t i=0; i<nrules; i++) {
      rule = fRuleEnsemble.GetRulesConst(i);
      FillVisHistCut(rule, h1Vector);
   }
   // fill linear terms and normalise hists
   FillVisHistCut(0, h1Vector);
   NormVisHists(h1Vector);
 
   //
   corrDir->cd();
   // fill rules
   for (UInt_t i=0; i<nrules; i++) {
      rule = fRuleEnsemble.GetRulesConst(i);
      FillVisHistCorr(rule, h2CorrVector);
   }
   NormVisHists(h2CorrVector);

   // write histograms to file   
   methodDir->cd();
   for (UInt_t i=0; i<h1Vector.size();     i++) h1Vector[i]->Write();
   for (UInt_t i=0; i<h2CorrVector.size(); i++) h2CorrVector[i]->Write();
}

//_______________________________________________________________________
void TMVA::RuleFit::MakeDebugHists()
{
   // this will create a histograms intended rather for debugging or for the curious user

   TDirectory* methodDir = fMethodBase->BaseDir();
   if (methodDir==0) {
      Log() << kWARNING << "<MakeDebugHists> No rulefit method directory found - bug?" << Endl;
      return;
   }
   //
   methodDir->cd();
   std::vector<Double_t> distances;
   std::vector<Double_t> fncuts;
   std::vector<Double_t> fnvars;
   const Rule *ruleA;
   const Rule *ruleB;
   Double_t dABmin=1000000.0;
   Double_t dABmax=-1.0;
   UInt_t nrules = fRuleEnsemble.GetNRules();
   for (UInt_t i=0; i<nrules; i++) {
      ruleA = fRuleEnsemble.GetRulesConst(i);
      for (UInt_t j=i+1; j<nrules; j++) {
         ruleB = fRuleEnsemble.GetRulesConst(j);
         Double_t dAB = ruleA->RuleDist( *ruleB, kTRUE );
         if (dAB>-0.5) {
            UInt_t nc = ruleA->GetNcuts();
            UInt_t nv = ruleA->GetNumVarsUsed();
            distances.push_back(dAB);
            fncuts.push_back(static_cast<Double_t>(nc));
            fnvars.push_back(static_cast<Double_t>(nv));
            if (dAB<dABmin) dABmin=dAB;
            if (dAB>dABmax) dABmax=dAB;
         }
      }
   }
   //
   TH1F *histDist = new TH1F("RuleDist","Rule distances",100,dABmin,dABmax);
   TTree *distNtuple = new TTree("RuleDistNtuple","RuleDist ntuple");
   Double_t ntDist;
   Double_t ntNcuts;
   Double_t ntNvars;
   distNtuple->Branch("dist", &ntDist,  "dist/D");
   distNtuple->Branch("ncuts",&ntNcuts, "ncuts/D");
   distNtuple->Branch("nvars",&ntNvars, "nvars/D");
   //
   for (UInt_t i=0; i<distances.size(); i++) {
      histDist->Fill(distances[i]);
      ntDist  = distances[i];
      ntNcuts = fncuts[i];
      ntNvars = fnvars[i];
      distNtuple->Fill();
   }
   distNtuple->Write();
}
 RuleFit.cxx:1
 RuleFit.cxx:2
 RuleFit.cxx:3
 RuleFit.cxx:4
 RuleFit.cxx:5
 RuleFit.cxx:6
 RuleFit.cxx:7
 RuleFit.cxx:8
 RuleFit.cxx:9
 RuleFit.cxx:10
 RuleFit.cxx:11
 RuleFit.cxx:12
 RuleFit.cxx:13
 RuleFit.cxx:14
 RuleFit.cxx:15
 RuleFit.cxx:16
 RuleFit.cxx:17
 RuleFit.cxx:18
 RuleFit.cxx:19
 RuleFit.cxx:20
 RuleFit.cxx:21
 RuleFit.cxx:22
 RuleFit.cxx:23
 RuleFit.cxx:24
 RuleFit.cxx:25
 RuleFit.cxx:26
 RuleFit.cxx:27
 RuleFit.cxx:28
 RuleFit.cxx:29
 RuleFit.cxx:30
 RuleFit.cxx:31
 RuleFit.cxx:32
 RuleFit.cxx:33
 RuleFit.cxx:34
 RuleFit.cxx:35
 RuleFit.cxx:36
 RuleFit.cxx:37
 RuleFit.cxx:38
 RuleFit.cxx:39
 RuleFit.cxx:40
 RuleFit.cxx:41
 RuleFit.cxx:42
 RuleFit.cxx:43
 RuleFit.cxx:44
 RuleFit.cxx:45
 RuleFit.cxx:46
 RuleFit.cxx:47
 RuleFit.cxx:48
 RuleFit.cxx:49
 RuleFit.cxx:50
 RuleFit.cxx:51
 RuleFit.cxx:52
 RuleFit.cxx:53
 RuleFit.cxx:54
 RuleFit.cxx:55
 RuleFit.cxx:56
 RuleFit.cxx:57
 RuleFit.cxx:58
 RuleFit.cxx:59
 RuleFit.cxx:60
 RuleFit.cxx:61
 RuleFit.cxx:62
 RuleFit.cxx:63
 RuleFit.cxx:64
 RuleFit.cxx:65
 RuleFit.cxx:66
 RuleFit.cxx:67
 RuleFit.cxx:68
 RuleFit.cxx:69
 RuleFit.cxx:70
 RuleFit.cxx:71
 RuleFit.cxx:72
 RuleFit.cxx:73
 RuleFit.cxx:74
 RuleFit.cxx:75
 RuleFit.cxx:76
 RuleFit.cxx:77
 RuleFit.cxx:78
 RuleFit.cxx:79
 RuleFit.cxx:80
 RuleFit.cxx:81
 RuleFit.cxx:82
 RuleFit.cxx:83
 RuleFit.cxx:84
 RuleFit.cxx:85
 RuleFit.cxx:86
 RuleFit.cxx:87
 RuleFit.cxx:88
 RuleFit.cxx:89
 RuleFit.cxx:90
 RuleFit.cxx:91
 RuleFit.cxx:92
 RuleFit.cxx:93
 RuleFit.cxx:94
 RuleFit.cxx:95
 RuleFit.cxx:96
 RuleFit.cxx:97
 RuleFit.cxx:98
 RuleFit.cxx:99
 RuleFit.cxx:100
 RuleFit.cxx:101
 RuleFit.cxx:102
 RuleFit.cxx:103
 RuleFit.cxx:104
 RuleFit.cxx:105
 RuleFit.cxx:106
 RuleFit.cxx:107
 RuleFit.cxx:108
 RuleFit.cxx:109
 RuleFit.cxx:110
 RuleFit.cxx:111
 RuleFit.cxx:112
 RuleFit.cxx:113
 RuleFit.cxx:114
 RuleFit.cxx:115
 RuleFit.cxx:116
 RuleFit.cxx:117
 RuleFit.cxx:118
 RuleFit.cxx:119
 RuleFit.cxx:120
 RuleFit.cxx:121
 RuleFit.cxx:122
 RuleFit.cxx:123
 RuleFit.cxx:124
 RuleFit.cxx:125
 RuleFit.cxx:126
 RuleFit.cxx:127
 RuleFit.cxx:128
 RuleFit.cxx:129
 RuleFit.cxx:130
 RuleFit.cxx:131
 RuleFit.cxx:132
 RuleFit.cxx:133
 RuleFit.cxx:134
 RuleFit.cxx:135
 RuleFit.cxx:136
 RuleFit.cxx:137
 RuleFit.cxx:138
 RuleFit.cxx:139
 RuleFit.cxx:140
 RuleFit.cxx:141
 RuleFit.cxx:142
 RuleFit.cxx:143
 RuleFit.cxx:144
 RuleFit.cxx:145
 RuleFit.cxx:146
 RuleFit.cxx:147
 RuleFit.cxx:148
 RuleFit.cxx:149
 RuleFit.cxx:150
 RuleFit.cxx:151
 RuleFit.cxx:152
 RuleFit.cxx:153
 RuleFit.cxx:154
 RuleFit.cxx:155
 RuleFit.cxx:156
 RuleFit.cxx:157
 RuleFit.cxx:158
 RuleFit.cxx:159
 RuleFit.cxx:160
 RuleFit.cxx:161
 RuleFit.cxx:162
 RuleFit.cxx:163
 RuleFit.cxx:164
 RuleFit.cxx:165
 RuleFit.cxx:166
 RuleFit.cxx:167
 RuleFit.cxx:168
 RuleFit.cxx:169
 RuleFit.cxx:170
 RuleFit.cxx:171
 RuleFit.cxx:172
 RuleFit.cxx:173
 RuleFit.cxx:174
 RuleFit.cxx:175
 RuleFit.cxx:176
 RuleFit.cxx:177
 RuleFit.cxx:178
 RuleFit.cxx:179
 RuleFit.cxx:180
 RuleFit.cxx:181
 RuleFit.cxx:182
 RuleFit.cxx:183
 RuleFit.cxx:184
 RuleFit.cxx:185
 RuleFit.cxx:186
 RuleFit.cxx:187
 RuleFit.cxx:188
 RuleFit.cxx:189
 RuleFit.cxx:190
 RuleFit.cxx:191
 RuleFit.cxx:192
 RuleFit.cxx:193
 RuleFit.cxx:194
 RuleFit.cxx:195
 RuleFit.cxx:196
 RuleFit.cxx:197
 RuleFit.cxx:198
 RuleFit.cxx:199
 RuleFit.cxx:200
 RuleFit.cxx:201
 RuleFit.cxx:202
 RuleFit.cxx:203
 RuleFit.cxx:204
 RuleFit.cxx:205
 RuleFit.cxx:206
 RuleFit.cxx:207
 RuleFit.cxx:208
 RuleFit.cxx:209
 RuleFit.cxx:210
 RuleFit.cxx:211
 RuleFit.cxx:212
 RuleFit.cxx:213
 RuleFit.cxx:214
 RuleFit.cxx:215
 RuleFit.cxx:216
 RuleFit.cxx:217
 RuleFit.cxx:218
 RuleFit.cxx:219
 RuleFit.cxx:220
 RuleFit.cxx:221
 RuleFit.cxx:222
 RuleFit.cxx:223
 RuleFit.cxx:224
 RuleFit.cxx:225
 RuleFit.cxx:226
 RuleFit.cxx:227
 RuleFit.cxx:228
 RuleFit.cxx:229
 RuleFit.cxx:230
 RuleFit.cxx:231
 RuleFit.cxx:232
 RuleFit.cxx:233
 RuleFit.cxx:234
 RuleFit.cxx:235
 RuleFit.cxx:236
 RuleFit.cxx:237
 RuleFit.cxx:238
 RuleFit.cxx:239
 RuleFit.cxx:240
 RuleFit.cxx:241
 RuleFit.cxx:242
 RuleFit.cxx:243
 RuleFit.cxx:244
 RuleFit.cxx:245
 RuleFit.cxx:246
 RuleFit.cxx:247
 RuleFit.cxx:248
 RuleFit.cxx:249
 RuleFit.cxx:250
 RuleFit.cxx:251
 RuleFit.cxx:252
 RuleFit.cxx:253
 RuleFit.cxx:254
 RuleFit.cxx:255
 RuleFit.cxx:256
 RuleFit.cxx:257
 RuleFit.cxx:258
 RuleFit.cxx:259
 RuleFit.cxx:260
 RuleFit.cxx:261
 RuleFit.cxx:262
 RuleFit.cxx:263
 RuleFit.cxx:264
 RuleFit.cxx:265
 RuleFit.cxx:266
 RuleFit.cxx:267
 RuleFit.cxx:268
 RuleFit.cxx:269
 RuleFit.cxx:270
 RuleFit.cxx:271
 RuleFit.cxx:272
 RuleFit.cxx:273
 RuleFit.cxx:274
 RuleFit.cxx:275
 RuleFit.cxx:276
 RuleFit.cxx:277
 RuleFit.cxx:278
 RuleFit.cxx:279
 RuleFit.cxx:280
 RuleFit.cxx:281
 RuleFit.cxx:282
 RuleFit.cxx:283
 RuleFit.cxx:284
 RuleFit.cxx:285
 RuleFit.cxx:286
 RuleFit.cxx:287
 RuleFit.cxx:288
 RuleFit.cxx:289
 RuleFit.cxx:290
 RuleFit.cxx:291
 RuleFit.cxx:292
 RuleFit.cxx:293
 RuleFit.cxx:294
 RuleFit.cxx:295
 RuleFit.cxx:296
 RuleFit.cxx:297
 RuleFit.cxx:298
 RuleFit.cxx:299
 RuleFit.cxx:300
 RuleFit.cxx:301
 RuleFit.cxx:302
 RuleFit.cxx:303
 RuleFit.cxx:304
 RuleFit.cxx:305
 RuleFit.cxx:306
 RuleFit.cxx:307
 RuleFit.cxx:308
 RuleFit.cxx:309
 RuleFit.cxx:310
 RuleFit.cxx:311
 RuleFit.cxx:312
 RuleFit.cxx:313
 RuleFit.cxx:314
 RuleFit.cxx:315
 RuleFit.cxx:316
 RuleFit.cxx:317
 RuleFit.cxx:318
 RuleFit.cxx:319
 RuleFit.cxx:320
 RuleFit.cxx:321
 RuleFit.cxx:322
 RuleFit.cxx:323
 RuleFit.cxx:324
 RuleFit.cxx:325
 RuleFit.cxx:326
 RuleFit.cxx:327
 RuleFit.cxx:328
 RuleFit.cxx:329
 RuleFit.cxx:330
 RuleFit.cxx:331
 RuleFit.cxx:332
 RuleFit.cxx:333
 RuleFit.cxx:334
 RuleFit.cxx:335
 RuleFit.cxx:336
 RuleFit.cxx:337
 RuleFit.cxx:338
 RuleFit.cxx:339
 RuleFit.cxx:340
 RuleFit.cxx:341
 RuleFit.cxx:342
 RuleFit.cxx:343
 RuleFit.cxx:344
 RuleFit.cxx:345
 RuleFit.cxx:346
 RuleFit.cxx:347
 RuleFit.cxx:348
 RuleFit.cxx:349
 RuleFit.cxx:350
 RuleFit.cxx:351
 RuleFit.cxx:352
 RuleFit.cxx:353
 RuleFit.cxx:354
 RuleFit.cxx:355
 RuleFit.cxx:356
 RuleFit.cxx:357
 RuleFit.cxx:358
 RuleFit.cxx:359
 RuleFit.cxx:360
 RuleFit.cxx:361
 RuleFit.cxx:362
 RuleFit.cxx:363
 RuleFit.cxx:364
 RuleFit.cxx:365
 RuleFit.cxx:366
 RuleFit.cxx:367
 RuleFit.cxx:368
 RuleFit.cxx:369
 RuleFit.cxx:370
 RuleFit.cxx:371
 RuleFit.cxx:372
 RuleFit.cxx:373
 RuleFit.cxx:374
 RuleFit.cxx:375
 RuleFit.cxx:376
 RuleFit.cxx:377
 RuleFit.cxx:378
 RuleFit.cxx:379
 RuleFit.cxx:380
 RuleFit.cxx:381
 RuleFit.cxx:382
 RuleFit.cxx:383
 RuleFit.cxx:384
 RuleFit.cxx:385
 RuleFit.cxx:386
 RuleFit.cxx:387
 RuleFit.cxx:388
 RuleFit.cxx:389
 RuleFit.cxx:390
 RuleFit.cxx:391
 RuleFit.cxx:392
 RuleFit.cxx:393
 RuleFit.cxx:394
 RuleFit.cxx:395
 RuleFit.cxx:396
 RuleFit.cxx:397
 RuleFit.cxx:398
 RuleFit.cxx:399
 RuleFit.cxx:400
 RuleFit.cxx:401
 RuleFit.cxx:402
 RuleFit.cxx:403
 RuleFit.cxx:404
 RuleFit.cxx:405
 RuleFit.cxx:406
 RuleFit.cxx:407
 RuleFit.cxx:408
 RuleFit.cxx:409
 RuleFit.cxx:410
 RuleFit.cxx:411
 RuleFit.cxx:412
 RuleFit.cxx:413
 RuleFit.cxx:414
 RuleFit.cxx:415
 RuleFit.cxx:416
 RuleFit.cxx:417
 RuleFit.cxx:418
 RuleFit.cxx:419
 RuleFit.cxx:420
 RuleFit.cxx:421
 RuleFit.cxx:422
 RuleFit.cxx:423
 RuleFit.cxx:424
 RuleFit.cxx:425
 RuleFit.cxx:426
 RuleFit.cxx:427
 RuleFit.cxx:428
 RuleFit.cxx:429
 RuleFit.cxx:430
 RuleFit.cxx:431
 RuleFit.cxx:432
 RuleFit.cxx:433
 RuleFit.cxx:434
 RuleFit.cxx:435
 RuleFit.cxx:436
 RuleFit.cxx:437
 RuleFit.cxx:438
 RuleFit.cxx:439
 RuleFit.cxx:440
 RuleFit.cxx:441
 RuleFit.cxx:442
 RuleFit.cxx:443
 RuleFit.cxx:444
 RuleFit.cxx:445
 RuleFit.cxx:446
 RuleFit.cxx:447
 RuleFit.cxx:448
 RuleFit.cxx:449
 RuleFit.cxx:450
 RuleFit.cxx:451
 RuleFit.cxx:452
 RuleFit.cxx:453
 RuleFit.cxx:454
 RuleFit.cxx:455
 RuleFit.cxx:456
 RuleFit.cxx:457
 RuleFit.cxx:458
 RuleFit.cxx:459
 RuleFit.cxx:460
 RuleFit.cxx:461
 RuleFit.cxx:462
 RuleFit.cxx:463
 RuleFit.cxx:464
 RuleFit.cxx:465
 RuleFit.cxx:466
 RuleFit.cxx:467
 RuleFit.cxx:468
 RuleFit.cxx:469
 RuleFit.cxx:470
 RuleFit.cxx:471
 RuleFit.cxx:472
 RuleFit.cxx:473
 RuleFit.cxx:474
 RuleFit.cxx:475
 RuleFit.cxx:476
 RuleFit.cxx:477
 RuleFit.cxx:478
 RuleFit.cxx:479
 RuleFit.cxx:480
 RuleFit.cxx:481
 RuleFit.cxx:482
 RuleFit.cxx:483
 RuleFit.cxx:484
 RuleFit.cxx:485
 RuleFit.cxx:486
 RuleFit.cxx:487
 RuleFit.cxx:488
 RuleFit.cxx:489
 RuleFit.cxx:490
 RuleFit.cxx:491
 RuleFit.cxx:492
 RuleFit.cxx:493
 RuleFit.cxx:494
 RuleFit.cxx:495
 RuleFit.cxx:496
 RuleFit.cxx:497
 RuleFit.cxx:498
 RuleFit.cxx:499
 RuleFit.cxx:500
 RuleFit.cxx:501
 RuleFit.cxx:502
 RuleFit.cxx:503
 RuleFit.cxx:504
 RuleFit.cxx:505
 RuleFit.cxx:506
 RuleFit.cxx:507
 RuleFit.cxx:508
 RuleFit.cxx:509
 RuleFit.cxx:510
 RuleFit.cxx:511
 RuleFit.cxx:512
 RuleFit.cxx:513
 RuleFit.cxx:514
 RuleFit.cxx:515
 RuleFit.cxx:516
 RuleFit.cxx:517
 RuleFit.cxx:518
 RuleFit.cxx:519
 RuleFit.cxx:520
 RuleFit.cxx:521
 RuleFit.cxx:522
 RuleFit.cxx:523
 RuleFit.cxx:524
 RuleFit.cxx:525
 RuleFit.cxx:526
 RuleFit.cxx:527
 RuleFit.cxx:528
 RuleFit.cxx:529
 RuleFit.cxx:530
 RuleFit.cxx:531
 RuleFit.cxx:532
 RuleFit.cxx:533
 RuleFit.cxx:534
 RuleFit.cxx:535
 RuleFit.cxx:536
 RuleFit.cxx:537
 RuleFit.cxx:538
 RuleFit.cxx:539
 RuleFit.cxx:540
 RuleFit.cxx:541
 RuleFit.cxx:542
 RuleFit.cxx:543
 RuleFit.cxx:544
 RuleFit.cxx:545
 RuleFit.cxx:546
 RuleFit.cxx:547
 RuleFit.cxx:548
 RuleFit.cxx:549
 RuleFit.cxx:550
 RuleFit.cxx:551
 RuleFit.cxx:552
 RuleFit.cxx:553
 RuleFit.cxx:554
 RuleFit.cxx:555
 RuleFit.cxx:556
 RuleFit.cxx:557
 RuleFit.cxx:558
 RuleFit.cxx:559
 RuleFit.cxx:560
 RuleFit.cxx:561
 RuleFit.cxx:562
 RuleFit.cxx:563
 RuleFit.cxx:564
 RuleFit.cxx:565
 RuleFit.cxx:566
 RuleFit.cxx:567
 RuleFit.cxx:568
 RuleFit.cxx:569
 RuleFit.cxx:570
 RuleFit.cxx:571
 RuleFit.cxx:572
 RuleFit.cxx:573
 RuleFit.cxx:574
 RuleFit.cxx:575
 RuleFit.cxx:576
 RuleFit.cxx:577
 RuleFit.cxx:578
 RuleFit.cxx:579
 RuleFit.cxx:580
 RuleFit.cxx:581
 RuleFit.cxx:582
 RuleFit.cxx:583
 RuleFit.cxx:584
 RuleFit.cxx:585
 RuleFit.cxx:586
 RuleFit.cxx:587
 RuleFit.cxx:588
 RuleFit.cxx:589
 RuleFit.cxx:590
 RuleFit.cxx:591
 RuleFit.cxx:592
 RuleFit.cxx:593
 RuleFit.cxx:594
 RuleFit.cxx:595
 RuleFit.cxx:596
 RuleFit.cxx:597
 RuleFit.cxx:598
 RuleFit.cxx:599
 RuleFit.cxx:600
 RuleFit.cxx:601
 RuleFit.cxx:602
 RuleFit.cxx:603
 RuleFit.cxx:604
 RuleFit.cxx:605
 RuleFit.cxx:606
 RuleFit.cxx:607
 RuleFit.cxx:608
 RuleFit.cxx:609
 RuleFit.cxx:610
 RuleFit.cxx:611
 RuleFit.cxx:612
 RuleFit.cxx:613
 RuleFit.cxx:614
 RuleFit.cxx:615
 RuleFit.cxx:616
 RuleFit.cxx:617
 RuleFit.cxx:618
 RuleFit.cxx:619
 RuleFit.cxx:620
 RuleFit.cxx:621
 RuleFit.cxx:622
 RuleFit.cxx:623
 RuleFit.cxx:624
 RuleFit.cxx:625
 RuleFit.cxx:626
 RuleFit.cxx:627
 RuleFit.cxx:628
 RuleFit.cxx:629
 RuleFit.cxx:630
 RuleFit.cxx:631
 RuleFit.cxx:632
 RuleFit.cxx:633
 RuleFit.cxx:634
 RuleFit.cxx:635
 RuleFit.cxx:636
 RuleFit.cxx:637
 RuleFit.cxx:638
 RuleFit.cxx:639
 RuleFit.cxx:640
 RuleFit.cxx:641
 RuleFit.cxx:642
 RuleFit.cxx:643
 RuleFit.cxx:644
 RuleFit.cxx:645
 RuleFit.cxx:646
 RuleFit.cxx:647
 RuleFit.cxx:648
 RuleFit.cxx:649
 RuleFit.cxx:650
 RuleFit.cxx:651
 RuleFit.cxx:652
 RuleFit.cxx:653
 RuleFit.cxx:654
 RuleFit.cxx:655
 RuleFit.cxx:656
 RuleFit.cxx:657
 RuleFit.cxx:658
 RuleFit.cxx:659
 RuleFit.cxx:660
 RuleFit.cxx:661
 RuleFit.cxx:662
 RuleFit.cxx:663
 RuleFit.cxx:664
 RuleFit.cxx:665
 RuleFit.cxx:666
 RuleFit.cxx:667
 RuleFit.cxx:668
 RuleFit.cxx:669
 RuleFit.cxx:670
 RuleFit.cxx:671
 RuleFit.cxx:672
 RuleFit.cxx:673
 RuleFit.cxx:674
 RuleFit.cxx:675
 RuleFit.cxx:676
 RuleFit.cxx:677
 RuleFit.cxx:678
 RuleFit.cxx:679
 RuleFit.cxx:680
 RuleFit.cxx:681
 RuleFit.cxx:682
 RuleFit.cxx:683
 RuleFit.cxx:684
 RuleFit.cxx:685
 RuleFit.cxx:686
 RuleFit.cxx:687
 RuleFit.cxx:688
 RuleFit.cxx:689
 RuleFit.cxx:690
 RuleFit.cxx:691
 RuleFit.cxx:692
 RuleFit.cxx:693
 RuleFit.cxx:694
 RuleFit.cxx:695
 RuleFit.cxx:696
 RuleFit.cxx:697
 RuleFit.cxx:698
 RuleFit.cxx:699
 RuleFit.cxx:700
 RuleFit.cxx:701
 RuleFit.cxx:702
 RuleFit.cxx:703
 RuleFit.cxx:704
 RuleFit.cxx:705
 RuleFit.cxx:706
 RuleFit.cxx:707
 RuleFit.cxx:708
 RuleFit.cxx:709
 RuleFit.cxx:710
 RuleFit.cxx:711
 RuleFit.cxx:712
 RuleFit.cxx:713
 RuleFit.cxx:714
 RuleFit.cxx:715
 RuleFit.cxx:716
 RuleFit.cxx:717
 RuleFit.cxx:718
 RuleFit.cxx:719
 RuleFit.cxx:720
 RuleFit.cxx:721
 RuleFit.cxx:722
 RuleFit.cxx:723
 RuleFit.cxx:724
 RuleFit.cxx:725
 RuleFit.cxx:726
 RuleFit.cxx:727
 RuleFit.cxx:728
 RuleFit.cxx:729
 RuleFit.cxx:730
 RuleFit.cxx:731
 RuleFit.cxx:732
 RuleFit.cxx:733
 RuleFit.cxx:734
 RuleFit.cxx:735
 RuleFit.cxx:736
 RuleFit.cxx:737
 RuleFit.cxx:738
 RuleFit.cxx:739
 RuleFit.cxx:740
 RuleFit.cxx:741
 RuleFit.cxx:742
 RuleFit.cxx:743
 RuleFit.cxx:744
 RuleFit.cxx:745
 RuleFit.cxx:746
 RuleFit.cxx:747
 RuleFit.cxx:748
 RuleFit.cxx:749
 RuleFit.cxx:750
 RuleFit.cxx:751
 RuleFit.cxx:752
 RuleFit.cxx:753
 RuleFit.cxx:754
 RuleFit.cxx:755
 RuleFit.cxx:756
 RuleFit.cxx:757
 RuleFit.cxx:758
 RuleFit.cxx:759
 RuleFit.cxx:760
 RuleFit.cxx:761
 RuleFit.cxx:762
 RuleFit.cxx:763
 RuleFit.cxx:764
 RuleFit.cxx:765
 RuleFit.cxx:766
 RuleFit.cxx:767
 RuleFit.cxx:768
 RuleFit.cxx:769
 RuleFit.cxx:770
 RuleFit.cxx:771
 RuleFit.cxx:772
 RuleFit.cxx:773
 RuleFit.cxx:774
 RuleFit.cxx:775
 RuleFit.cxx:776
 RuleFit.cxx:777
 RuleFit.cxx:778
 RuleFit.cxx:779
 RuleFit.cxx:780
 RuleFit.cxx:781
 RuleFit.cxx:782
 RuleFit.cxx:783
 RuleFit.cxx:784
 RuleFit.cxx:785
 RuleFit.cxx:786
 RuleFit.cxx:787
 RuleFit.cxx:788
 RuleFit.cxx:789
 RuleFit.cxx:790
 RuleFit.cxx:791
 RuleFit.cxx:792
 RuleFit.cxx:793
 RuleFit.cxx:794
 RuleFit.cxx:795
 RuleFit.cxx:796
 RuleFit.cxx:797
 RuleFit.cxx:798
 RuleFit.cxx:799
 RuleFit.cxx:800
 RuleFit.cxx:801
 RuleFit.cxx:802
 RuleFit.cxx:803
 RuleFit.cxx:804
 RuleFit.cxx:805
 RuleFit.cxx:806
 RuleFit.cxx:807
 RuleFit.cxx:808
 RuleFit.cxx:809
 RuleFit.cxx:810
 RuleFit.cxx:811
 RuleFit.cxx:812
 RuleFit.cxx:813
 RuleFit.cxx:814
 RuleFit.cxx:815
 RuleFit.cxx:816
 RuleFit.cxx:817
 RuleFit.cxx:818
 RuleFit.cxx:819
 RuleFit.cxx:820
 RuleFit.cxx:821
 RuleFit.cxx:822
 RuleFit.cxx:823
 RuleFit.cxx:824
 RuleFit.cxx:825
 RuleFit.cxx:826
 RuleFit.cxx:827
 RuleFit.cxx:828
 RuleFit.cxx:829
 RuleFit.cxx:830
 RuleFit.cxx:831
 RuleFit.cxx:832
 RuleFit.cxx:833
 RuleFit.cxx:834
 RuleFit.cxx:835
 RuleFit.cxx:836
 RuleFit.cxx:837
 RuleFit.cxx:838
 RuleFit.cxx:839
 RuleFit.cxx:840
 RuleFit.cxx:841
 RuleFit.cxx:842
 RuleFit.cxx:843
 RuleFit.cxx:844
 RuleFit.cxx:845
 RuleFit.cxx:846
 RuleFit.cxx:847
 RuleFit.cxx:848
 RuleFit.cxx:849
 RuleFit.cxx:850
 RuleFit.cxx:851
 RuleFit.cxx:852
 RuleFit.cxx:853
 RuleFit.cxx:854
 RuleFit.cxx:855
 RuleFit.cxx:856
 RuleFit.cxx:857
 RuleFit.cxx:858
 RuleFit.cxx:859
 RuleFit.cxx:860
 RuleFit.cxx:861
 RuleFit.cxx:862
 RuleFit.cxx:863
 RuleFit.cxx:864
 RuleFit.cxx:865
 RuleFit.cxx:866
 RuleFit.cxx:867
 RuleFit.cxx:868
 RuleFit.cxx:869
 RuleFit.cxx:870
 RuleFit.cxx:871
 RuleFit.cxx:872
 RuleFit.cxx:873
 RuleFit.cxx:874
 RuleFit.cxx:875
 RuleFit.cxx:876
 RuleFit.cxx:877
 RuleFit.cxx:878
 RuleFit.cxx:879
 RuleFit.cxx:880
 RuleFit.cxx:881
 RuleFit.cxx:882
 RuleFit.cxx:883
 RuleFit.cxx:884
 RuleFit.cxx:885
 RuleFit.cxx:886
 RuleFit.cxx:887
 RuleFit.cxx:888
 RuleFit.cxx:889
 RuleFit.cxx:890
 RuleFit.cxx:891
 RuleFit.cxx:892
 RuleFit.cxx:893
 RuleFit.cxx:894
 RuleFit.cxx:895
 RuleFit.cxx:896
 RuleFit.cxx:897
 RuleFit.cxx:898
 RuleFit.cxx:899
 RuleFit.cxx:900
 RuleFit.cxx:901
 RuleFit.cxx:902
 RuleFit.cxx:903
 RuleFit.cxx:904
 RuleFit.cxx:905
 RuleFit.cxx:906
 RuleFit.cxx:907
 RuleFit.cxx:908
 RuleFit.cxx:909
 RuleFit.cxx:910
 RuleFit.cxx:911
 RuleFit.cxx:912
 RuleFit.cxx:913
 RuleFit.cxx:914
 RuleFit.cxx:915
 RuleFit.cxx:916
 RuleFit.cxx:917
 RuleFit.cxx:918
 RuleFit.cxx:919
 RuleFit.cxx:920
 RuleFit.cxx:921
 RuleFit.cxx:922
 RuleFit.cxx:923
 RuleFit.cxx:924
 RuleFit.cxx:925
 RuleFit.cxx:926
 RuleFit.cxx:927
 RuleFit.cxx:928
 RuleFit.cxx:929
 RuleFit.cxx:930
 RuleFit.cxx:931
 RuleFit.cxx:932
 RuleFit.cxx:933
 RuleFit.cxx:934
 RuleFit.cxx:935
 RuleFit.cxx:936
 RuleFit.cxx:937
 RuleFit.cxx:938
 RuleFit.cxx:939
 RuleFit.cxx:940
 RuleFit.cxx:941
 RuleFit.cxx:942
 RuleFit.cxx:943
 RuleFit.cxx:944
 RuleFit.cxx:945
 RuleFit.cxx:946
 RuleFit.cxx:947
 RuleFit.cxx:948
 RuleFit.cxx:949
 RuleFit.cxx:950