151 Log() << kWARNING <<
"kNN must be a positive integer: set kNN = " <<
fnkNN <<
Endl;
155 Log() << kWARNING <<
"ScaleFrac can not be negative: set ScaleFrac = " <<
fScaleFrac <<
Endl;
162 Log() << kWARNING <<
"Optimize must be a positive integer: set Optimize = " <<
fBalanceDepth <<
Endl;
167 <<
" kNN = \n" <<
fnkNN
172 <<
" Trim = \n" <<
fTrim
205 Log() << kFATAL <<
"ModulekNN is not created" <<
Endl;
218 Log() << kINFO <<
"Creating kd-tree with " <<
fEvent.size() <<
" events" <<
Endl;
220 for (kNN::EventVec::const_iterator event =
fEvent.begin(); event !=
fEvent.end(); ++event) {
235 Log() << kHEADER <<
"<Train> start..." <<
Endl;
238 Log() << kINFO <<
"Input events are normalized - setting ScaleFrac to 0" <<
Endl;
243 Log() << kINFO <<
"Erasing " <<
fEvent.size() <<
" previously stored events" <<
Endl;
247 Log() << kFATAL <<
"MethodKNN::Train() - mismatched or wrong number of event variables" <<
Endl;
277 kNN::Event event_knn(vvec, weight, event_type);
279 fEvent.push_back(event_knn);
310 for (
Int_t ivar = 0; ivar < nvar; ++ivar) {
318 fModule->Find(event_knn, knn + 2);
321 if (rlist.size() != knn + 2) {
322 Log() << kFATAL <<
"kNN result list is empty" <<
Endl;
331 Bool_t use_gaus =
false, use_poln =
false;
333 if (
fKernel ==
"Gaus") use_gaus =
true;
334 else if (
fKernel ==
"Poln") use_poln =
true;
344 if (!(kradius > 0.0)) {
345 Log() << kFATAL <<
"kNN radius is not positive" <<
Endl;
355 std::vector<Double_t> rms_vec;
359 if (rms_vec.empty() || rms_vec.size() != event_knn.
GetNVar()) {
360 Log() << kFATAL <<
"Failed to compute RMS vector" <<
Endl;
366 Double_t weight_all = 0, weight_sig = 0;
368 for (kNN::List::const_iterator lit = rlist.begin(); lit != rlist.end(); ++lit) {
375 if (lit->second < 0.0) {
376 Log() << kFATAL <<
"A neighbor has negative distance to query event" <<
Endl;
378 else if (!(lit->second > 0.0)) {
379 Log() << kVERBOSE <<
"A neighbor has zero distance to query event" <<
Endl;
390 if (node.
GetEvent().GetType() == 1) {
394 else if (node.
GetEvent().GetType() == 2) {
397 Log() << kFATAL <<
"Unknown type for training event" <<
Endl;
403 if (count_all >= knn) {
409 if (!(count_all > 0)) {
410 Log() << kFATAL <<
"Size kNN result list is not positive" <<
Endl;
415 if (count_all < knn) {
416 Log() << kDEBUG <<
"count_all and kNN have different size: " << count_all <<
" < " << knn <<
Endl;
420 if (!(weight_all > 0.0)) {
421 Log() << kFATAL <<
"kNN result total weight is not positive" <<
Endl;
425 return weight_sig/weight_all;
445 std::vector<float> reg_vec;
449 for (
Int_t ivar = 0; ivar < nvar; ++ivar) {
457 fModule->Find(event_knn, knn + 2);
460 if (rlist.size() != knn + 2) {
461 Log() << kFATAL <<
"kNN result list is empty" <<
Endl;
469 for (kNN::List::const_iterator lit = rlist.begin(); lit != rlist.end(); ++lit) {
476 if (reg_vec.empty()) {
480 for(
UInt_t ivar = 0; ivar < tvec.size(); ++ivar) {
481 if (
fUseWeight) reg_vec[ivar] += tvec[ivar]*weight;
482 else reg_vec[ivar] += tvec[ivar];
491 if (count_all == knn) {
497 if (!(weight_all > 0.0)) {
498 Log() << kFATAL <<
"Total weight sum is not positive: " << weight_all <<
Endl;
502 for (
UInt_t ivar = 0; ivar < reg_vec.size(); ++ivar) {
503 reg_vec[ivar] /= weight_all;
529 for (kNN::EventVec::const_iterator event =
fEvent.begin(); event !=
fEvent.end(); ++event) {
531 std::stringstream s(
"");
533 for (
UInt_t ivar = 0; ivar <
event->GetNVar(); ++ivar) {
534 if (ivar>0) s <<
" ";
535 s << std::scientific <<
event->GetVar(ivar);
538 for (
UInt_t itgt = 0; itgt <
event->GetNTgt(); ++itgt) {
539 s <<
" " << std::scientific <<
event->GetTgt(itgt);
552 UInt_t nvar = 0, ntgt = 0;
567 std::stringstream s(
gTools().GetContent(ch) );
569 for(
UInt_t ivar=0; ivar<nvar; ivar++)
572 for(
UInt_t itgt=0; itgt<ntgt; itgt++)
577 kNN::Event event_knn(vvec, evtWeight, evtType, tvec);
578 fEvent.push_back(event_knn);
590 Log() << kINFO <<
"Starting ReadWeightsFromStream(std::istream& is) function..." <<
Endl;
593 Log() << kINFO <<
"Erasing " <<
fEvent.size() <<
" previously stored events" <<
Endl;
601 std::getline(is,
line);
603 if (
line.empty() ||
line.find(
"#") != std::string::npos) {
608 std::string::size_type pos=0;
609 while( (pos=
line.find(
',',pos)) != std::string::npos ) { count++; pos++; }
614 if (count < 3 || nvar != count - 2) {
615 Log() << kFATAL <<
"Missing comma delimeter(s)" <<
Endl;
625 std::string::size_type prev = 0;
627 for (std::string::size_type ipos = 0; ipos <
line.size(); ++ipos) {
628 if (
line[ipos] !=
',' && ipos + 1 !=
line.size()) {
632 if (!(ipos > prev)) {
633 Log() << kFATAL <<
"Wrong substring limits" <<
Endl;
636 std::string vstring =
line.substr(prev, ipos - prev);
637 if (ipos + 1 ==
line.size()) {
638 vstring =
line.substr(prev, ipos - prev + 1);
641 if (vstring.empty()) {
642 Log() << kFATAL <<
"Failed to parse string" <<
Endl;
648 else if (vcount == 1) {
649 type = std::atoi(vstring.c_str());
651 else if (vcount == 2) {
652 weight = std::atof(vstring.c_str());
654 else if (vcount - 3 < vvec.size()) {
655 vvec[vcount - 3] = std::atof(vstring.c_str());
658 Log() << kFATAL <<
"Wrong variable count" <<
Endl;
668 Log() << kINFO <<
"Read " <<
fEvent.size() <<
" events from text file" <<
Endl;
679 Log() << kINFO <<
"Starting WriteWeightsToStream(TFile &rf) function..." <<
Endl;
682 Log() << kWARNING <<
"MethodKNN contains no events " <<
Endl;
689 tree->
Branch(
"event",
"TMVA::kNN::Event", &event);
692 for (kNN::EventVec::const_iterator it =
fEvent.begin(); it !=
fEvent.end(); ++it) {
703 Log() << kINFO <<
"Wrote " <<
size <<
"MB and " <<
fEvent.size()
704 <<
" events to ROOT file" <<
Endl;
715 Log() << kINFO <<
"Starting ReadWeightsFromStream(TFile &rf) function..." <<
Endl;
718 Log() << kINFO <<
"Erasing " <<
fEvent.size() <<
" previously stored events" <<
Endl;
725 Log() << kFATAL <<
"Failed to find knn tree" <<
Endl;
735 for (
Int_t i = 0; i < nevent; ++i) {
743 Log() << kINFO <<
"Read " <<
size <<
"MB and " <<
fEvent.size()
744 <<
" events from ROOT file" <<
Endl;
757 fout <<
" // not implemented for class: \"" << className <<
"\"" << std::endl;
758 fout <<
"};" << std::endl;
772 Log() <<
"The k-nearest neighbor (k-NN) algorithm is a multi-dimensional classification" <<
Endl
773 <<
"and regression algorithm. Similarly to other TMVA algorithms, k-NN uses a set of" <<
Endl
774 <<
"training events for which a classification category/regression target is known. " <<
Endl
775 <<
"The k-NN method compares a test event to all training events using a distance " <<
Endl
776 <<
"function, which is an Euclidean distance in a space defined by the input variables. "<<
Endl
777 <<
"The k-NN method, as implemented in TMVA, uses a kd-tree algorithm to perform a" <<
Endl
778 <<
"quick search for the k events with shortest distance to the test event. The method" <<
Endl
779 <<
"returns a fraction of signal events among the k neighbors. It is recommended" <<
Endl
780 <<
"that a histogram which stores the k-NN decision variable is binned with k+1 bins" <<
Endl
781 <<
"between 0 and 1." <<
Endl;
784 Log() <<
gTools().
Color(
"bold") <<
"--- Performance tuning via configuration options: "
787 Log() <<
"The k-NN method estimates a density of signal and background events in a "<<
Endl
788 <<
"neighborhood around the test event. The method assumes that the density of the " <<
Endl
789 <<
"signal and background events is uniform and constant within the neighborhood. " <<
Endl
790 <<
"k is an adjustable parameter and it determines an average size of the " <<
Endl
791 <<
"neighborhood. Small k values (less than 10) are sensitive to statistical " <<
Endl
792 <<
"fluctuations and large (greater than 100) values might not sufficiently capture " <<
Endl
793 <<
"local differences between events in the training set. The speed of the k-NN" <<
Endl
794 <<
"method also increases with larger values of k. " <<
Endl;
796 Log() <<
"The k-NN method assigns equal weight to all input variables. Different scales " <<
Endl
797 <<
"among the input variables is compensated using ScaleFrac parameter: the input " <<
Endl
798 <<
"variables are scaled so that the widths for central ScaleFrac*100% events are " <<
Endl
799 <<
"equal among all the input variables." <<
Endl;
802 Log() <<
gTools().
Color(
"bold") <<
"--- Additional configuration options: "
805 Log() <<
"The method inclues an option to use a Gaussian kernel to smooth out the k-NN" <<
Endl
806 <<
"response. The kernel re-weights events using a distance to the test event." <<
Endl;
816 if (!(avalue < 1.0)) {
820 const Double_t prod = 1.0 - avalue * avalue * avalue;
822 return (prod * prod * prod);
829 const kNN::Event &event,
const std::vector<Double_t> &svec)
const
832 Log() << kFATAL <<
"Mismatched vectors in Gaussian kernel function" <<
Endl;
839 double sum_exp = 0.0;
841 for(
unsigned int ivar = 0; ivar < event_knn.
GetNVar(); ++ivar) {
843 const Double_t diff_ =
event.GetVar(ivar) - event_knn.
GetVar(ivar);
845 if (!(sigm_ > 0.0)) {
846 Log() << kFATAL <<
"Bad sigma value = " << sigm_ <<
Endl;
850 sum_exp += diff_*diff_/(2.0*sigm_*sigm_);
858 return std::exp(-sum_exp);
872 for (kNN::List::const_iterator lit = rlist.begin(); lit != rlist.end(); ++lit)
874 if (!(lit->second > 0.0))
continue;
876 if (kradius < lit->second || kradius < 0.0) kradius = lit->second;
879 if (kcount >= knn)
break;
892 std::vector<Double_t> rvec;
896 for (kNN::List::const_iterator lit = rlist.begin(); lit != rlist.end(); ++lit)
898 if (!(lit->second > 0.0))
continue;
904 rvec.insert(rvec.end(), event_.
GetNVar(), 0.0);
906 else if (rvec.size() != event_.
GetNVar()) {
907 Log() << kFATAL <<
"Wrong number of variables, should never happen!" <<
Endl;
912 for(
unsigned int ivar = 0; ivar < event_.
GetNVar(); ++ivar) {
914 rvec[ivar] += diff_*diff_;
918 if (kcount >= knn)
break;
922 Log() << kFATAL <<
"Bad event kcount = " << kcount <<
Endl;
927 for(
unsigned int ivar = 0; ivar < rvec.size(); ++ivar) {
928 if (!(rvec[ivar] > 0.0)) {
929 Log() << kFATAL <<
"Bad RMS value = " << rvec[ivar] <<
Endl;
934 rvec[ivar] = std::abs(
fSigmaFact)*std::sqrt(rvec[ivar]/kcount);
946 for (kNN::List::const_iterator lit = rlist.begin(); lit != rlist.end(); ++lit) {
952 if (node.
GetEvent().GetType() == 1) {
953 sig_vec.push_back(tvec);
955 else if (node.
GetEvent().GetType() == 2) {
956 bac_vec.push_back(tvec);
959 Log() << kFATAL <<
"Unknown type for training event" <<
Endl;
963 fLDA.Initialize(sig_vec, bac_vec);
#define REGISTER_METHOD(CLASS)
for example
std::vector< std::vector< Float_t > > LDAEvents
size_t size(const MatrixT &matrix)
retrieve the size of a square matrix
int Int_t
Signed integer 4 bytes (int).
unsigned int UInt_t
Unsigned integer 4 bytes (unsigned int).
bool Bool_t
Boolean (0=false, 1=true) (bool).
short Short_t
Signed Short integer 2 bytes (short).
double Double_t
Double 8 bytes.
Int_t WriteTObject(const TObject *obj, const char *name=nullptr, Option_t *option="", Int_t bufsize=0) override
TObject * Get(const char *namecycle) override
Return pointer to object identified by namecycle.
A file, usually with extension .root, that stores data and code in the form of serialized objects in ...
OptionBase * DeclareOptionRef(T &ref, const TString &name, const TString &desc="")
Class that contains all the data information.
Float_t GetValue(UInt_t ivar) const
return value of i'th variable
std::vector< Float_t > & GetTargets()
Double_t GetWeight() const
return the event weight - depending on whether the flag IgnoreNegWeightsInTraining is or not.
MethodBase(const TString &jobName, Types::EMVA methodType, const TString &methodTitle, DataSetInfo &dsi, const TString &theOption="")
standard constructor
virtual void DeclareCompatibilityOptions()
options that are used ONLY for the READER to ensure backward compatibility they are hence without any...
Bool_t IgnoreEventsWithNegWeightsInTraining() const
UInt_t GetNEvents() const
std::vector< Float_t > * fRegressionReturnVal
const Event * GetEvent() const
DataSetInfo & DataInfo() const
UInt_t GetNVariables() const
void NoErrorCalc(Double_t *const err, Double_t *const errUpper)
Bool_t IsNormalised() const
Int_t fBalanceDepth
number of binary tree levels used for balancing tree
void MakeKNN(void)
create kNN
TString fKernel
="Gaus","Poln" - kernel type for smoothing
Float_t fScaleFrac
fraction of events used to compute variable width
virtual ~MethodKNN(void)
destructor
const std::vector< Double_t > getRMS(const kNN::List &rlist, const kNN::Event &event_knn) const
Get polynomial kernel radius.
const Ranking * CreateRanking() override
no ranking available
Int_t fTreeOptDepth
number of binary tree levels used for optimization
Double_t fSumOfWeightsS
sum-of-weights for signal training events
void DeclareOptions() override
MethodKNN options.
MethodKNN(const TString &jobName, const TString &methodTitle, DataSetInfo &theData, const TString &theOption="KNN")
standard constructor
Double_t getKernelRadius(const kNN::List &rlist) const
Get polynomial kernel radius.
void Train(void) override
kNN training
Double_t fSumOfWeightsB
sum-of-weights for background training events
kNN::EventVec fEvent
! (untouched) events used for learning
double getLDAValue(const kNN::List &rlist, const kNN::Event &event_knn)
void ProcessOptions() override
process the options specified by the user
Double_t PolnKernel(Double_t value) const
polynomial kernel
void DeclareCompatibilityOptions() override
options that are used ONLY for the READER to ensure backward compatibility
void ReadWeightsFromStream(std::istream &istr) override
read the weights
void GetHelpMessage() const override
get help message text
kNN::ModulekNN * fModule
! module where all work is done
Bool_t fUseLDA
use local linear discriminant analysis to compute MVA
void MakeClassSpecific(std::ostream &, const TString &) const override
write specific classifier response
Float_t fSigmaFact
scale factor for Gaussian sigma in Gaus. kernel
Bool_t fTrim
set equal number of signal and background events
Double_t GetMvaValue(Double_t *err=nullptr, Double_t *errUpper=nullptr) override
Compute classifier response.
Bool_t HasAnalysisType(Types::EAnalysisType type, UInt_t numberClasses, UInt_t numberTargets) override
FDA can handle classification with 2 classes and regression with one regression-target.
Bool_t fUseWeight
use weights to count kNN
Bool_t fUseKernel
use polynomial kernel weight function
void Init(void) override
Initialization.
void WriteWeightsToStream(TFile &rf) const
save weights to ROOT file
LDA fLDA
! Experimental feature for local knn analysis
Double_t GausKernel(const kNN::Event &event_knn, const kNN::Event &event, const std::vector< Double_t > &svec) const
Gaussian kernel.
void ReadWeightsFromXML(void *wghtnode) override
const std::vector< Float_t > & GetRegressionValues() override
Return vector of averages for target values of k-nearest neighbors.
Int_t fnkNN
number of k-nearest neighbors
void AddWeightsXMLTo(void *parent) const override
write weights to XML
Ranking for variables in method (implementation).
Singleton class for Global types used by TMVA.
void SetTargets(const VarVec &tvec)
VarType GetVar(UInt_t i) const
const VarVec & GetVars() const
This file contains binary tree and global function template that searches tree for k-nearest neigbors...
Double_t GetWeight() const
const T & GetEvent() const
std::vector< VarType > VarVec
A TTree represents a columnar dataset.
virtual Int_t Fill()
Fill all branches.
virtual Int_t SetBranchAddress(const char *bname, void *add, TBranch **ptr, TClass *realClass, EDataType datatype, bool isptr, bool suppressMissingBranchError)
virtual Int_t GetEntry(Long64_t entry, Int_t getall=0)
Read all branches of entry and return total number of bytes read.
virtual void SetDirectory(TDirectory *dir)
Change the tree's directory.
virtual Long64_t GetEntries() const
TBranch * Branch(const char *name, T *obj, Int_t bufsize=32000, Int_t splitlevel=99)
Add a new branch, and infer the data type from the type of obj being passed.
create variable transformations
MsgLogger & Endl(MsgLogger &ml)
Double_t Sqrt(Double_t x)
Returns the square root of x.
Short_t Abs(Short_t d)
Returns the absolute value of parameter Short_t d.