64 TMVA::MethodKNN::MethodKNN( const
TString& jobName,
69 : TMVA::MethodBase(jobName, Types::kKNN, methodTitle, theData, theOption, theTargetDir)
112 if (fModule)
delete fModule;
130 DeclareOptionRef(fnkNN = 20,
"nkNN",
"Number of k-nearest neighbors");
131 DeclareOptionRef(fBalanceDepth = 6,
"BalanceDepth",
"Binary tree balance depth");
132 DeclareOptionRef(fScaleFrac = 0.80,
"ScaleFrac",
"Fraction of events used to compute variable width");
133 DeclareOptionRef(fSigmaFact = 1.0,
"SigmaFact",
"Scale factor for sigma in Gaussian kernel");
134 DeclareOptionRef(fKernel =
"Gaus",
"Kernel",
"Use polynomial (=Poln) or Gaussian (=Gaus) kernel");
135 DeclareOptionRef(fTrim =
kFALSE,
"Trim",
"Use equal number of signal and background events");
136 DeclareOptionRef(fUseKernel =
kFALSE,
"UseKernel",
"Use polynomial kernel weight");
137 DeclareOptionRef(fUseWeight =
kTRUE,
"UseWeight",
"Use weight to count kNN events");
138 DeclareOptionRef(fUseLDA =
kFALSE,
"UseLDA",
"Use local linear discriminant - experimental feature");
146 DeclareOptionRef(fTreeOptDepth = 6,
"TreeOptDepth",
"Binary tree optimisation depth");
156 Log() <<
kWARNING <<
"kNN must be a positive integer: set kNN = " << fnkNN <<
Endl;
158 if (fScaleFrac < 0.0) {
160 Log() <<
kWARNING <<
"ScaleFrac can not be negative: set ScaleFrac = " << fScaleFrac <<
Endl;
162 if (fScaleFrac > 1.0) {
165 if (!(fBalanceDepth > 0)) {
167 Log() <<
kWARNING <<
"Optimize must be a positive integer: set Optimize = " << fBalanceDepth <<
Endl;
172 <<
" kNN = \n" << fnkNN
173 <<
" UseKernel = \n" << fUseKernel
174 <<
" SigmaFact = \n" << fSigmaFact
175 <<
" ScaleFrac = \n" << fScaleFrac
176 <<
" Kernel = \n" << fKernel
177 <<
" Trim = \n" << fTrim
178 <<
" Optimize = " << fBalanceDepth <<
Endl;
216 if (fScaleFrac > 0.0) {
223 Log() <<
kINFO <<
"Creating kd-tree with " << fEvent.size() <<
" events" <<
Endl;
225 for (kNN::EventVec::const_iterator event = fEvent.begin();
event != fEvent.end(); ++event) {
226 fModule->Add(*event);
230 fModule->Fill(static_cast<UInt_t>(fBalanceDepth),
231 static_cast<UInt_t>(100.0*fScaleFrac),
242 if (IsNormalised()) {
243 Log() <<
kINFO <<
"Input events are normalized - setting ScaleFrac to 0" <<
Endl;
247 if (!fEvent.empty()) {
248 Log() <<
kINFO <<
"Erasing " << fEvent.size() <<
" previously stored events" <<
Endl;
251 if (GetNVariables() < 1)
252 Log() <<
kFATAL <<
"MethodKNN::Train() - mismatched or wrong number of event variables" <<
Endl;
255 Log() <<
kINFO <<
"Reading " << GetNEvents() <<
" events" <<
Endl;
257 for (
UInt_t ievt = 0; ievt < GetNEvents(); ++ievt) {
259 const Event* evt_ = GetEvent(ievt);
263 if (IgnoreEventsWithNegWeightsInTraining() && weight <= 0)
continue;
266 for (
UInt_t ivar = 0; ivar < evt_ -> GetNVariables(); ++ivar) vvec[ivar] = evt_->
GetValue(ivar);
270 if (DataInfo().IsSignal(evt_)) {
271 fSumOfWeightsS += weight;
275 fSumOfWeightsB += weight;
282 kNN::Event event_knn(vvec, weight, event_type);
284 fEvent.push_back(event_knn);
288 <<
"Number of signal events " << fSumOfWeightsS << Endl
289 <<
"Number of background events " << fSumOfWeightsB <<
Endl;
301 NoErrorCalc(err, errUpper);
306 const Event *ev = GetEvent();
307 const Int_t nvar = GetNVariables();
313 for (
Int_t ivar = 0; ivar < nvar; ++ivar) {
321 fModule->Find(event_knn, knn + 2);
323 const kNN::List &rlist = fModule->GetkNNList();
324 if (rlist.size() != knn + 2) {
334 Bool_t use_gaus =
false, use_poln =
false;
336 if (fKernel ==
"Gaus") use_gaus =
true;
337 else if (fKernel ==
"Poln") use_poln =
true;
347 if (!(kradius > 0.0)) {
358 std::vector<Double_t> rms_vec;
362 if (rms_vec.empty() || rms_vec.size() != event_knn.
GetNVar()) {
369 Double_t weight_all = 0, weight_sig = 0, weight_bac = 0;
371 for (kNN::List::const_iterator lit = rlist.begin(); lit != rlist.end(); ++lit) {
378 if (lit->second < 0.0) {
379 Log() <<
kFATAL <<
"A neighbor has negative distance to query event" <<
Endl;
381 else if (!(lit->second > 0.0)) {
382 Log() <<
kVERBOSE <<
"A neighbor has zero distance to query event" <<
Endl;
390 if (fUseWeight) weight_all += evweight;
394 if (fUseWeight) weight_sig += evweight;
398 if (fUseWeight) weight_bac += evweight;
402 Log() <<
kFATAL <<
"Unknown type for training event" <<
Endl;
408 if (count_all >= knn) {
414 if (!(count_all > 0)) {
415 Log() <<
kFATAL <<
"Size kNN result list is not positive" <<
Endl;
420 if (count_all < knn) {
421 Log() <<
kDEBUG <<
"count_all and kNN have different size: " << count_all <<
" < " << knn <<
Endl;
425 if (!(weight_all > 0.0)) {
426 Log() <<
kFATAL <<
"kNN result total weight is not positive" <<
Endl;
430 return weight_sig/weight_all;
441 if( fRegressionReturnVal == 0 )
442 fRegressionReturnVal =
new std::vector<Float_t>;
444 fRegressionReturnVal->clear();
449 const Event *evt = GetEvent();
450 const Int_t nvar = GetNVariables();
452 std::vector<float> reg_vec;
456 for (
Int_t ivar = 0; ivar < nvar; ++ivar) {
464 fModule->Find(event_knn, knn + 2);
466 const kNN::List &rlist = fModule->GetkNNList();
467 if (rlist.size() != knn + 2) {
469 return *fRegressionReturnVal;
476 for (kNN::List::const_iterator lit = rlist.begin(); lit != rlist.end(); ++lit) {
483 if (reg_vec.empty()) {
487 for(
UInt_t ivar = 0; ivar < tvec.size(); ++ivar) {
488 if (fUseWeight) reg_vec[ivar] += tvec[ivar]*weight;
489 else reg_vec[ivar] += tvec[ivar];
492 if (fUseWeight) weight_all += weight;
498 if (count_all == knn) {
504 if (!(weight_all > 0.0)) {
505 Log() <<
kFATAL <<
"Total weight sum is not positive: " << weight_all <<
Endl;
506 return *fRegressionReturnVal;
509 for (
UInt_t ivar = 0; ivar < reg_vec.size(); ++ivar) {
510 reg_vec[ivar] /= weight_all;
514 fRegressionReturnVal->insert(fRegressionReturnVal->begin(), reg_vec.begin(), reg_vec.end());
516 return *fRegressionReturnVal;
533 if (fEvent.size()>0)
gTools().
AddAttr(wght,
"NVar",fEvent.begin()->GetNVar());
534 if (fEvent.size()>0)
gTools().
AddAttr(wght,
"NTgt",fEvent.begin()->GetNTgt());
536 for (kNN::EventVec::const_iterator event = fEvent.begin();
event != fEvent.end(); ++event) {
538 std::stringstream s(
"");
540 for (
UInt_t ivar = 0; ivar <
event->GetNVar(); ++ivar) {
541 if (ivar>0) s <<
" ";
542 s << std::scientific <<
event->GetVar(ivar);
545 for (
UInt_t itgt = 0; itgt <
event->GetNTgt(); ++itgt) {
546 s <<
" " << std::scientific <<
event->GetTgt(itgt);
559 UInt_t nvar = 0, ntgt = 0;
574 std::stringstream s(
gTools().GetContent(ch) );
576 for(
UInt_t ivar=0; ivar<nvar; ivar++)
579 for(
UInt_t itgt=0; itgt<ntgt; itgt++)
584 kNN::Event event_knn(vvec, evtWeight, evtType, tvec);
585 fEvent.push_back(event_knn);
597 Log() <<
kINFO <<
"Starting ReadWeightsFromStream(std::istream& is) function..." <<
Endl;
599 if (!fEvent.empty()) {
600 Log() <<
kINFO <<
"Erasing " << fEvent.size() <<
" previously stored events" <<
Endl;
608 std::getline(is, line);
610 if (line.empty() || line.find(
"#") != std::string::npos) {
615 std::string::size_type pos=0;
616 while( (pos=line.find(
',',pos)) != std::string::npos ) { count++; pos++; }
621 if (count < 3 || nvar != count - 2) {
632 std::string::size_type prev = 0;
634 for (std::string::size_type ipos = 0; ipos < line.size(); ++ipos) {
635 if (line[ipos] !=
',' && ipos + 1 != line.size()) {
639 if (!(ipos > prev)) {
643 std::string vstring = line.substr(prev, ipos - prev);
644 if (ipos + 1 == line.size()) {
645 vstring = line.substr(prev, ipos - prev + 1);
648 if (vstring.empty()) {
655 else if (vcount == 1) {
656 type = std::atoi(vstring.c_str());
658 else if (vcount == 2) {
659 weight = std::atof(vstring.c_str());
661 else if (vcount - 3 < vvec.size()) {
662 vvec[vcount - 3] = std::atof(vstring.c_str());
672 fEvent.push_back(
kNN::Event(vvec, weight, type));
675 Log() <<
kINFO <<
"Read " << fEvent.size() <<
" events from text file" <<
Endl;
686 Log() <<
kINFO <<
"Starting WriteWeightsToStream(TFile &rf) function..." <<
Endl;
688 if (fEvent.empty()) {
696 tree->
Branch(
"event",
"TMVA::kNN::Event", &event);
699 for (kNN::EventVec::const_iterator it = fEvent.begin(); it != fEvent.end(); ++it) {
701 size += tree->
Fill();
710 Log() <<
kINFO <<
"Wrote " << size <<
"MB and " << fEvent.size()
711 <<
" events to ROOT file" <<
Endl;
722 Log() <<
kINFO <<
"Starting ReadWeightsFromStream(TFile &rf) function..." <<
Endl;
724 if (!fEvent.empty()) {
725 Log() <<
kINFO <<
"Erasing " << fEvent.size() <<
" previously stored events" <<
Endl;
742 for (
Int_t i = 0; i < nevent; ++i) {
744 fEvent.push_back(*event);
750 Log() <<
kINFO <<
"Read " << size <<
"MB and " << fEvent.size()
751 <<
" events from ROOT file" <<
Endl;
764 fout <<
" // not implemented for class: \"" << className <<
"\"" << std::endl;
765 fout <<
"};" << std::endl;
779 Log() <<
"The k-nearest neighbor (k-NN) algorithm is a multi-dimensional classification" <<
Endl
780 <<
"and regression algorithm. Similarly to other TMVA algorithms, k-NN uses a set of" <<
Endl
781 <<
"training events for which a classification category/regression target is known. " <<
Endl
782 <<
"The k-NN method compares a test event to all training events using a distance " <<
Endl
783 <<
"function, which is an Euclidean distance in a space defined by the input variables. "<<
Endl
784 <<
"The k-NN method, as implemented in TMVA, uses a kd-tree algorithm to perform a" <<
Endl
785 <<
"quick search for the k events with shortest distance to the test event. The method" <<
Endl
786 <<
"returns a fraction of signal events among the k neighbors. It is recommended" <<
Endl
787 <<
"that a histogram which stores the k-NN decision variable is binned with k+1 bins" <<
Endl
788 <<
"between 0 and 1." <<
Endl;
791 Log() <<
gTools().
Color(
"bold") <<
"--- Performance tuning via configuration options: "
794 Log() <<
"The k-NN method estimates a density of signal and background events in a "<< Endl
795 <<
"neighborhood around the test event. The method assumes that the density of the " << Endl
796 <<
"signal and background events is uniform and constant within the neighborhood. " << Endl
797 <<
"k is an adjustable parameter and it determines an average size of the " << Endl
798 <<
"neighborhood. Small k values (less than 10) are sensitive to statistical " << Endl
799 <<
"fluctuations and large (greater than 100) values might not sufficiently capture " << Endl
800 <<
"local differences between events in the training set. The speed of the k-NN" << Endl
801 <<
"method also increases with larger values of k. " <<
Endl;
803 Log() <<
"The k-NN method assigns equal weight to all input variables. Different scales " << Endl
804 <<
"among the input variables is compensated using ScaleFrac parameter: the input " << Endl
805 <<
"variables are scaled so that the widths for central ScaleFrac*100% events are " << Endl
806 <<
"equal among all the input variables." <<
Endl;
809 Log() <<
gTools().
Color(
"bold") <<
"--- Additional configuration options: "
812 Log() <<
"The method inclues an option to use a Gaussian kernel to smooth out the k-NN" << Endl
813 <<
"response. The kernel re-weights events using a distance to the test event." <<
Endl;
823 if (!(avalue < 1.0)) {
827 const Double_t prod = 1.0 - avalue * avalue * avalue;
829 return (prod * prod * prod);
836 const kNN::Event &event,
const std::vector<Double_t> &svec)
const
838 if (event_knn.
GetNVar() !=
event.GetNVar() || event_knn.
GetNVar() != svec.size()) {
839 Log() <<
kFATAL <<
"Mismatched vectors in Gaussian kernel function" <<
Endl;
846 double sum_exp = 0.0;
848 for(
unsigned int ivar = 0; ivar < event_knn.
GetNVar(); ++ivar) {
850 const Double_t diff_ =
event.GetVar(ivar) - event_knn.
GetVar(ivar);
852 if (!(sigm_ > 0.0)) {
857 sum_exp += diff_*diff_/(2.0*sigm_*sigm_);
879 for (kNN::List::const_iterator lit = rlist.begin(); lit != rlist.end(); ++lit)
881 if (!(lit->second > 0.0))
continue;
883 if (kradius < lit->second || kradius < 0.0) kradius = lit->second;
886 if (kcount >= knn)
break;
899 std::vector<Double_t> rvec;
903 for (kNN::List::const_iterator lit = rlist.begin(); lit != rlist.end(); ++lit)
905 if (!(lit->second > 0.0))
continue;
908 const kNN::Event &event_ = node_-> GetEvent();
911 rvec.insert(rvec.end(), event_.
GetNVar(), 0.0);
913 else if (rvec.size() != event_.
GetNVar()) {
914 Log() <<
kFATAL <<
"Wrong number of variables, should never happen!" <<
Endl;
919 for(
unsigned int ivar = 0; ivar < event_.
GetNVar(); ++ivar) {
921 rvec[ivar] += diff_*diff_;
925 if (kcount >= knn)
break;
934 for(
unsigned int ivar = 0; ivar < rvec.size(); ++ivar) {
935 if (!(rvec[ivar] > 0.0)) {
953 for (kNN::List::const_iterator lit = rlist.begin(); lit != rlist.end(); ++lit) {
960 sig_vec.push_back(tvec);
963 bac_vec.push_back(tvec);
966 Log() <<
kFATAL <<
"Unknown type for training event" <<
Endl;
970 fLDA.Initialize(sig_vec, bac_vec);
972 return fLDA.GetProb(event_knn.
GetVars(), 1);
virtual void Clear(Option_t *="")
void ProcessOptions()
process the options specified by the user
MsgLogger & Endl(MsgLogger &ml)
void DeclareOptions()
MethodKNN options.
virtual Int_t Fill()
Fill all branches.
A ROOT file is a suite of consecutive data records (TKey instances) with a well defined format...
void Train(void)
kNN training
virtual TObject * Get(const char *namecycle)
Return pointer to object identified by namecycle.
void MakeKNN(void)
create kNN
virtual Int_t GetEntry(Long64_t entry=0, Int_t getall=0)
Read all branches of entry and return total number of bytes read.
Double_t GetWeight() const
return the event weight - depending on whether the flag IgnoreNegWeightsInTraining is or not...
void DeclareCompatibilityOptions()
options that are used ONLY for the READER to ensure backward compatibility
virtual Int_t WriteTObject(const TObject *obj, const char *name=0, Option_t *option="", Int_t bufsize=0)
Write object obj to this directory.
void ReadWeightsFromStream(std::istream &istr)
read the weights
MethodKNN(const TString &jobName, const TString &methodTitle, DataSetInfo &theData, const TString &theOption="KNN", TDirectory *theTargetDir=NULL)
void Init(void)
Initialization.
virtual Int_t SetBranchAddress(const char *bname, void *add, TBranch **ptr=0)
Change branch address, dealing with clone trees properly.
Double_t PolnKernel(Double_t value) const
polynomial kernel
static Vc_ALWAYS_INLINE Vector< T > abs(const Vector< T > &x)
void WriteWeightsToStream(TFile &rf) const
save weights to ROOT file
void MakeClassSpecific(std::ostream &, const TString &) const
write specific classifier response
std::vector< Float_t > & GetTargets()
Double_t GetMvaValue(Double_t *err=0, Double_t *errUpper=0)
Compute classifier response.
VarType GetVar(UInt_t i) const
Double_t GausKernel(const kNN::Event &event_knn, const kNN::Event &event, const std::vector< Double_t > &svec) const
Gaussian kernel.
virtual ~MethodKNN(void)
destructor
ClassImp(TMVA::MethodKNN) TMVA
standard constructor
const Ranking * CreateRanking()
no ranking available
void SetTargets(const VarVec &tvec)
void ReadWeightsFromXML(void *wghtnode)
virtual void SetDirectory(TDirectory *dir)
Change the tree's directory.
Describe directory structure in memory.
Double_t GetWeight() const
Float_t GetValue(UInt_t ivar) const
return value of i'th variable
const T & GetEvent() const
void AddWeightsXMLTo(void *parent) const
write weights to XML
virtual Bool_t HasAnalysisType(Types::EAnalysisType type, UInt_t numberClasses, UInt_t numberTargets)
FDA can handle classification with 2 classes and regression with one regression-target.
virtual Int_t Branch(TCollection *list, Int_t bufsize=32000, Int_t splitlevel=99, const char *name="")
Create one branch for each element in the collection.
#define REGISTER_METHOD(CLASS)
for example
const VarVec & GetTargets() const
void GetHelpMessage() const
get help message text
Double_t GetWeight() const
virtual void DeclareCompatibilityOptions()
options that are used ONLY for the READER to ensure backward compatibility they are hence without any...
const std::vector< Double_t > getRMS(const kNN::List &rlist, const kNN::Event &event_knn) const
Get polynomial kernel radius.
std::vector< std::vector< Float_t > > LDAEvents
virtual Long64_t GetEntries() const
A TTree object has a header with a name and a title.
Double_t Sqrt(Double_t x)
const std::vector< Float_t > & GetRegressionValues()
Return vector of averages for target values of k-nearest neighbors.
std::vector< VarType > VarVec
double getLDAValue(const kNN::List &rlist, const kNN::Event &event_knn)
const VarVec & GetVars() const
Double_t getKernelRadius(const kNN::List &rlist) const
Get polynomial kernel radius.