106 fVRangeMode(kAdaptive),
107 fKernelEstimator(
kBox),
120 fInitializedVolumeEle(0),
135 const TString& theWeightFile) :
138 fVRangeMode(kAdaptive),
139 fKernelEstimator(
kBox),
152 fInitializedVolumeEle(0),
184 fVRangeMode = kAdaptive;
185 fKernelEstimator =
kBox;
190 fMaxVIterations = 150;
191 fInitialScale = 0.99;
195 fkNNMin =
Int_t(fNEventsMin);
196 fkNNMax =
Int_t(fNEventsMax);
198 fInitializedVolumeEle =
kFALSE;
202 SetSignalReferenceCut( 0.0 );
210 if (fDelta)
delete fDelta;
211 if (fShift)
delete fShift;
213 if (NULL != fBinaryTree)
delete fBinaryTree;
254 DeclareOptionRef(fVolumeRange=
"Adaptive",
"VolumeRangeMode",
"Method to determine volume size");
255 AddPreDefVal(
TString(
"Unscaled"));
256 AddPreDefVal(
TString(
"MinMax"));
258 AddPreDefVal(
TString(
"Adaptive"));
261 DeclareOptionRef(fKernelString=
"Box",
"KernelEstimator",
"Kernel estimation function");
263 AddPreDefVal(
TString(
"Sphere"));
264 AddPreDefVal(
TString(
"Teepee"));
265 AddPreDefVal(
TString(
"Gauss"));
266 AddPreDefVal(
TString(
"Sinc3"));
267 AddPreDefVal(
TString(
"Sinc5"));
268 AddPreDefVal(
TString(
"Sinc7"));
269 AddPreDefVal(
TString(
"Sinc9"));
270 AddPreDefVal(
TString(
"Sinc11"));
271 AddPreDefVal(
TString(
"Lanczos2"));
272 AddPreDefVal(
TString(
"Lanczos3"));
273 AddPreDefVal(
TString(
"Lanczos5"));
274 AddPreDefVal(
TString(
"Lanczos8"));
277 DeclareOptionRef(fDeltaFrac ,
"DeltaFrac",
"nEventsMin/Max for minmax and rms volume range");
278 DeclareOptionRef(fNEventsMin ,
"NEventsMin",
"nEventsMin for adaptive volume range");
279 DeclareOptionRef(fNEventsMax ,
"NEventsMax",
"nEventsMax for adaptive volume range");
280 DeclareOptionRef(fMaxVIterations,
"MaxVIterations",
"MaxVIterations for adaptive volume range");
281 DeclareOptionRef(fInitialScale ,
"InitialScale",
"InitialScale for adaptive volume range");
282 DeclareOptionRef(fGaussSigma ,
"GaussSigma",
"Width (wrt volume size) of Gaussian kernel estimator");
283 DeclareOptionRef(fNormTree ,
"NormTree",
"Normalize binary search tree");
291 if (IgnoreEventsWithNegWeightsInTraining()) {
292 Log() << kFATAL <<
"Mechanism to ignore events with negative weights in training not yet available for method: "
293 << GetMethodTypeName()
294 <<
" --> please remove \"IgnoreNegWeightsInTraining\" option from booking string."
298 fGaussSigmaNorm = fGaussSigma;
302 if (fVolumeRange ==
"MinMax" ) fVRangeMode = kMinMax;
303 else if (fVolumeRange ==
"RMS" ) fVRangeMode = kRMS;
304 else if (fVolumeRange ==
"Adaptive" ) fVRangeMode = kAdaptive;
305 else if (fVolumeRange ==
"Unscaled" ) fVRangeMode = kUnscaled;
306 else if (fVolumeRange ==
"kNN" ) fVRangeMode = kkNN;
308 Log() << kFATAL <<
"VolumeRangeMode parameter '" << fVolumeRange <<
"' unknown" <<
Endl;
311 if (fKernelString ==
"Box" ) fKernelEstimator =
kBox;
312 else if (fKernelString ==
"Sphere" ) fKernelEstimator = kSphere;
313 else if (fKernelString ==
"Teepee" ) fKernelEstimator = kTeepee;
314 else if (fKernelString ==
"Gauss" ) fKernelEstimator =
kGauss;
315 else if (fKernelString ==
"Sinc3" ) fKernelEstimator = kSinc3;
316 else if (fKernelString ==
"Sinc5" ) fKernelEstimator = kSinc5;
317 else if (fKernelString ==
"Sinc7" ) fKernelEstimator = kSinc7;
318 else if (fKernelString ==
"Sinc9" ) fKernelEstimator = kSinc9;
319 else if (fKernelString ==
"Sinc11" ) fKernelEstimator = kSinc11;
320 else if (fKernelString ==
"Lanczos2" ) fKernelEstimator = kLanczos2;
321 else if (fKernelString ==
"Lanczos3" ) fKernelEstimator = kLanczos3;
322 else if (fKernelString ==
"Lanczos5" ) fKernelEstimator = kLanczos5;
323 else if (fKernelString ==
"Lanczos8" ) fKernelEstimator = kLanczos8;
324 else if (fKernelString ==
"Trim" ) fKernelEstimator = kTrim;
326 Log() << kFATAL <<
"KernelEstimator parameter '" << fKernelString <<
"' unknown" <<
Endl;
331 Log() << kVERBOSE <<
"interpreted option string: vRangeMethod: '"
332 << (
const char*)((fVRangeMode == kMinMax) ?
"MinMax" :
333 (fVRangeMode == kUnscaled) ?
"Unscaled" :
334 (fVRangeMode == kRMS ) ?
"RMS" :
"Adaptive") <<
"'" <<
Endl;
335 if (fVRangeMode == kMinMax || fVRangeMode == kRMS)
336 Log() << kVERBOSE <<
"deltaFrac: " << fDeltaFrac <<
Endl;
338 Log() << kVERBOSE <<
"nEventsMin/Max, maxVIterations, initialScale: "
339 << fNEventsMin <<
" " << fNEventsMax
340 <<
" " << fMaxVIterations <<
" " << fInitialScale <<
Endl;
341 Log() << kVERBOSE <<
"KernelEstimator = " << fKernelString <<
Endl;
352 if (IsNormalised()) Log() << kFATAL <<
"\"Normalise\" option cannot be used with PDERS; "
353 <<
"please remove the option from the configuration string, or "
354 <<
"use \"!Normalise\""
362 fInitializedVolumeEle =
kTRUE;
372 if (fInitializedVolumeEle ==
kFALSE) {
373 fInitializedVolumeEle =
kTRUE;
376 assert( fBinaryTree );
383 NoErrorCalc(err, errUpper);
385 return this->CRScalc( *GetEvent() );
392 if (fRegressionReturnVal == 0) fRegressionReturnVal =
new std::vector<Float_t>;
393 fRegressionReturnVal->clear();
396 if (fInitializedVolumeEle ==
kFALSE) {
397 fInitializedVolumeEle =
kTRUE;
400 assert( fBinaryTree );
407 const Event* ev = GetEvent();
408 this->RRScalc( *ev, fRegressionReturnVal );
412 for (std::vector<Float_t>::iterator it = fRegressionReturnVal->begin(); it != fRegressionReturnVal->end(); ++it ) {
417 const Event* evT2 = GetTransformationHandler().InverseTransform( evT );
418 fRegressionReturnVal->clear();
421 fRegressionReturnVal->push_back(evT2->
GetTarget(ivar));
427 return (*fRegressionReturnVal);
435 if (fVRangeMode == kAdaptive || fVRangeMode == kRMS || fVRangeMode == kkNN ) {
437 fBinaryTree->CalcStatistics();
439 for (
UInt_t ivar=0; ivar<GetNvar(); ivar++) {
440 if (!DoRegression()){
443 fAverageRMS.push_back( (rmsS + rmsB)*0.5 );
445 Float_t rms = fBinaryTree->RMS( ivar );
446 fAverageRMS.push_back( rms );
457 if (NULL != fBinaryTree)
delete fBinaryTree;
460 fBinaryTree->SetNormalize(
kTRUE );
463 fBinaryTree->Fill( GetEventCollection(
type) );
466 fBinaryTree->NormalizeTree();
469 if (!DoRegression()) {
474 Log() << kVERBOSE <<
"Signal and background scales: " << fScaleS <<
" " << fScaleB <<
Endl;
483 Log() << kFATAL <<
"GetNvar() == 0" <<
Endl;
488 fkNNMin =
Int_t(fNEventsMin);
489 fkNNMax =
Int_t(fNEventsMax);
491 if (fDelta)
delete fDelta;
492 if (fShift)
delete fShift;
493 fDelta =
new std::vector<Float_t>( GetNvar() );
494 fShift =
new std::vector<Float_t>( GetNvar() );
496 for (
UInt_t ivar=0; ivar<GetNvar(); ivar++) {
497 switch (fVRangeMode) {
503 if (fAverageRMS.size() != GetNvar())
504 Log() << kFATAL <<
"<SetVolumeElement> RMS not computed: " << fAverageRMS.size() <<
Endl;
505 (*fDelta)[ivar] = fAverageRMS[ivar]*fDeltaFrac;
506 Log() << kVERBOSE <<
"delta of var[" << (*fInputVars)[ivar]
507 <<
"\t]: " << fAverageRMS[ivar]
508 <<
"\t | comp with |max - min|: " << (GetXmax( ivar ) - GetXmin( ivar ))
512 (*fDelta)[ivar] = (GetXmax( ivar ) - GetXmin( ivar ))*fDeltaFrac;
515 (*fDelta)[ivar] = fDeltaFrac;
518 Log() << kFATAL <<
"<SetVolumeElement> unknown range-set mode: "
519 << fVRangeMode <<
Endl;
521 (*fShift)[ivar] = 0.5;
531 return ThisPDERS()->GetVolumeContentForRoot( scale );
540 v.ScaleInterval( scale );
542 Double_t count = GetBinaryTree()->SearchVolume( &
v );
549 std::vector<const BinarySearchTreeNode*>& events,
560#ifdef TMVA_MethodPDERS__countByHand__Debug__
563 count = fBinaryTree->SearchVolume( volume );
565 Int_t iS = 0, iB = 0;
567 for (
UInt_t ievt_=0; ievt_<Data()->GetNTrainingEvents(); ievt_++) {
568 const Event * ev = GetTrainingEvent(ievt_);
570 for (
Int_t ivar=0; ivar<nvar; ivar++) {
572 inV = (
x > (*volume->Lower)[ivar] &&
x <= (*volume->Upper)[ivar]);
579 Log() << kVERBOSE <<
"debug: my test: " << in <<
Endl;
580 Log() << kVERBOSE <<
"debug: binTree: " << count <<
Endl <<
Endl;
586 if (fVRangeMode == kRMS || fVRangeMode == kMinMax || fVRangeMode == kUnscaled) {
588 std::vector<Double_t> *lb =
new std::vector<Double_t>( GetNvar() );
589 for (
UInt_t ivar=0; ivar<GetNvar(); ivar++) (*lb)[ivar] =
e.GetValue(ivar);
590 std::vector<Double_t> *ub =
new std::vector<Double_t>( *lb );
591 for (
UInt_t ivar=0; ivar<GetNvar(); ivar++) {
592 (*lb)[ivar] -= (*fDelta)[ivar]*(1.0 - (*fShift)[ivar]);
593 (*ub)[ivar] += (*fDelta)[ivar]*(*fShift)[ivar];
598 fBinaryTree->SearchVolume( svolume, &events );
600 else if (fVRangeMode == kAdaptive) {
606 if (MethodPDERS_UseFindRoot) {
610 fHelpVolume = volume;
613 RootFinder rootFinder(
this, 0.01, 50, 200, 10 );
614 Double_t scale = rootFinder.
Root( (fNEventsMin + fNEventsMax)/2.0 );
618 fBinaryTree->SearchVolume( volume, &events );
626 count = fBinaryTree->SearchVolume( volume );
631 while (nEventsO < fNEventsMin) {
633 count = fBinaryTree->SearchVolume( volume );
637 if (i_ > 50) Log() << kWARNING <<
"warning in event: " <<
e
638 <<
": adaptive volume pre-adjustment reached "
639 <<
">50 iterations in while loop (" << i_ <<
")" <<
Endl;
642 Float_t nEventsE = 0.5*(fNEventsMin + fNEventsMax);
644 Float_t scaleN = fInitialScale;
647 Float_t nEventsBest = nEventsN;
649 for (
Int_t ic=1; ic<fMaxVIterations; ic++) {
650 if (nEventsN < fNEventsMin || nEventsN > fNEventsMax) {
654 v->ScaleInterval( scale );
655 nEventsN = fBinaryTree->SearchVolume(
v );
658 if (nEventsN > 1 && nEventsN - nEventsO != 0)
659 if (scaleN - scaleO != 0)
660 scale += (scaleN - scaleO)/(nEventsN - nEventsO)*(nEventsE - nEventsN);
671 (nEventsN >= fNEventsMin || nEventsBest < nEventsN)) {
672 nEventsBest = nEventsN;
683 nEventsN = nEventsBest;
685 if (nEventsN < fNEventsMin-1 || nEventsN > fNEventsMax+1)
686 Log() << kWARNING <<
"warning in event " <<
e
687 <<
": adaptive volume adjustment reached "
688 <<
"max. #iterations (" << fMaxVIterations <<
")"
689 <<
"[ nEvents: " << nEventsN <<
" " << fNEventsMin <<
" " << fNEventsMax <<
"]"
693 fBinaryTree->SearchVolume( volume, &events );
698 }
else if (fVRangeMode == kkNN) {
703 Int_t kNNcount = fBinaryTree->SearchVolumeWithMaxLimit( &
v, &events, fkNNMax+1 );
708 while ( !(kNNcount >= fkNNMin && kNNcount <= fkNNMax) ) {
709 if (kNNcount < fkNNMin) {
713 else if (kNNcount > fkNNMax) {
721 kNNcount = fBinaryTree->SearchVolumeWithMaxLimit( &
v, &events, fkNNMax+1 );
725 if (t_times == fMaxVIterations) {
726 Log() << kWARNING <<
"warning in event" <<
e
727 <<
": kNN volume adjustment reached "
728 <<
"max. #iterations (" << fMaxVIterations <<
")"
729 <<
"[ kNN: " << fkNNMin <<
" " << fkNNMax <<
Endl;
736 for (
UInt_t ivar=0; ivar<GetNvar(); ivar++) {
737 dim_normalization [ivar] = 1.0 / ((*
v.fUpper)[ivar] - (*
v.fLower)[ivar]);
740 std::vector<const BinarySearchTreeNode*> tempVector;
742 if (kNNcount >= fkNNMin) {
743 std::vector<Double_t> *distances =
new std::vector<Double_t>( kNNcount );
747 (*distances)[j] = GetNormalizedDistance (
e, *events[j], dim_normalization );
750 std::vector<Double_t>::iterator wsk = distances->begin();
751 for (
Int_t j=0;j<fkNNMin-1;++j) ++wsk;
752 std::nth_element( distances->begin(), wsk, distances->end() );
757 Double_t dist = GetNormalizedDistance(
e, *events[j], dim_normalization );
759 if (dist <= (*distances)[fkNNMin-1])
760 tempVector.push_back( events[j] );
762 fMax_distance = (*distances)[fkNNMin-1];
765 delete[] dim_normalization;
771 Log() << kFATAL <<
"<GetSample> unknown RangeMode: " << fVRangeMode <<
Endl;
780 std::vector<const BinarySearchTreeNode*> events;
785 std::vector<Double_t> *lb =
new std::vector<Double_t>( GetNvar() );
786 for (
UInt_t ivar=0; ivar<GetNvar(); ivar++) (*lb)[ivar] =
e.GetValue(ivar);
788 std::vector<Double_t> *ub =
new std::vector<Double_t>( *lb );
789 for (
UInt_t ivar=0; ivar<GetNvar(); ivar++) {
790 (*lb)[ivar] -= (*fDelta)[ivar]*(1.0 - (*fShift)[ivar]);
791 (*ub)[ivar] += (*fDelta)[ivar]*(*fShift)[ivar];
796 GetSample(
e, events, volume );
797 Double_t count = CKernelEstimate(
e, events, *volume );
809 std::vector<const BinarySearchTreeNode*> events;
814 std::vector<Double_t> *lb =
new std::vector<Double_t>( GetNvar() );
815 for (
UInt_t ivar=0; ivar<GetNvar(); ivar++) (*lb)[ivar] =
e.GetValue(ivar);
817 std::vector<Double_t> *ub =
new std::vector<Double_t>( *lb );
818 for (
UInt_t ivar=0; ivar<GetNvar(); ivar++) {
819 (*lb)[ivar] -= (*fDelta)[ivar]*(1.0 - (*fShift)[ivar]);
820 (*ub)[ivar] += (*fDelta)[ivar]*(*fShift)[ivar];
824 GetSample(
e, events, volume );
825 RKernelEstimate(
e, events, *volume, count );
835 std::vector<const BinarySearchTreeNode*>& events,
Volume&
v )
838 for (
UInt_t ivar=0; ivar<GetNvar(); ivar++)
839 dim_normalization [ivar] = 2 / ((*
v.fUpper)[ivar] - (*
v.fLower)[ivar]);
845 for (std::vector<const BinarySearchTreeNode*>::iterator iev = events.begin(); iev != events.end(); ++iev) {
848 Double_t normalized_distance = GetNormalizedDistance (event, *(*iev), dim_normalization);
852 if (normalized_distance > 1 && fKernelEstimator !=
kBox)
continue;
854 if ( (*iev)->GetClass()==fSignalClass )
855 pdfSumS += ApplyKernelFunction (normalized_distance) * (*iev)->GetWeight();
857 pdfSumB += ApplyKernelFunction (normalized_distance) * (*iev)->GetWeight();
859 pdfSumS = KernelNormalization( pdfSumS < 0. ? 0. : pdfSumS );
860 pdfSumB = KernelNormalization( pdfSumB < 0. ? 0. : pdfSumB );
862 delete[] dim_normalization;
864 if (pdfSumS < 1
e-20 && pdfSumB < 1
e-20)
return 0.5;
865 if (pdfSumB < 1
e-20)
return 1.0;
866 if (pdfSumS < 1
e-20)
return 0.0;
868 Float_t r = pdfSumB*fScaleB/(pdfSumS*fScaleS);
869 return 1.0/(
r + 1.0);
876 std::vector<const BinarySearchTreeNode*>& events,
Volume&
v,
877 std::vector<Float_t>* pdfSum )
880 for (
UInt_t ivar=0; ivar<GetNvar(); ivar++)
881 dim_normalization [ivar] = 2 / ((*
v.fUpper)[ivar] - (*
v.fLower)[ivar]);
888 for (
Int_t ivar = 0; ivar < fNRegOut ; ivar++)
889 pdfSum->push_back( 0 );
892 for (std::vector<const BinarySearchTreeNode*>::iterator iev = events.begin(); iev != events.end(); ++iev) {
895 Double_t normalized_distance = GetNormalizedDistance (event, *(*iev), dim_normalization);
899 if (normalized_distance > 1 && fKernelEstimator !=
kBox)
continue;
901 for (
Int_t ivar = 0; ivar < fNRegOut ; ivar++) {
902 pdfSum->at(ivar) += ApplyKernelFunction (normalized_distance) * (*iev)->GetWeight() * (*iev)->GetTargets()[ivar];
903 pdfDiv += ApplyKernelFunction (normalized_distance) * (*iev)->GetWeight();
907 delete[] dim_normalization;
912 for (
Int_t ivar = 0; ivar < fNRegOut ; ivar++)
913 pdfSum->at(ivar) /= pdfDiv;
924 switch (fKernelEstimator) {
930 return (1 - normalized_distance);
940 Double_t side_crossings = 2 + ((
int) fKernelEstimator) - ((
int) kSinc3);
941 return NormSinc (side_crossings * normalized_distance);
945 return LanczosFilter (2, normalized_distance);
948 return LanczosFilter (3, normalized_distance);
951 return LanczosFilter (5, normalized_distance);
954 return LanczosFilter (8, normalized_distance);
957 Double_t x = normalized_distance / fMax_distance;
963 Log() << kFATAL <<
"Kernel estimation function unsupported. Enumerator is " << fKernelEstimator <<
Endl;
980 if (ret != 0.0)
return ret*pdf;
983 switch (fKernelEstimator) {
1009 Log() << kFATAL <<
"Kernel estimation function unsupported. Enumerator is " << fKernelEstimator <<
Endl;
1027 for (
UInt_t ivar=0; ivar<GetNvar(); ivar++) {
1041 if (x < 10e-10 && x > -10
e-10) {
1062 if (x < 10e-10 && x > -10
e-10) {
1086 if (
d < 1
e-10)
return 1;
1089 Float_t err =
f*countB*countB*sumW2S +
f*countS*countS*sumW2B;
1091 if (err < 1
e-10)
return 1;
1103 fBinaryTree->AddXMLTo(wght);
1105 Log() << kFATAL <<
"Signal and background binary search tree not available" <<
Endl;
1113 if (NULL != fBinaryTree)
delete fBinaryTree;
1117 Log() << kFATAL <<
"Could not create BinarySearchTree from XML" <<
Endl;
1119 Log() << kFATAL <<
"Could not create BinarySearchTree from XML" <<
Endl;
1120 fBinaryTree->SetPeriode( GetNvar() );
1121 fBinaryTree->CalcStatistics();
1122 fBinaryTree->CountNodes();
1129 Log() << kINFO <<
"signal and background scales: " << fScaleS <<
" " << fScaleB <<
Endl;
1132 fInitializedVolumeEle =
kTRUE;
1140 if (NULL != fBinaryTree)
delete fBinaryTree;
1144 istr >> *fBinaryTree;
1146 fBinaryTree->SetPeriode( GetNvar() );
1148 fBinaryTree->CalcStatistics();
1150 fBinaryTree->CountNodes();
1156 Log() << kINFO <<
"signal and background scales: " << fScaleS <<
" " << fScaleB <<
Endl;
1162 fInitializedVolumeEle =
kTRUE;
1184 return GetMethodPDERSThreadLocal();
1191 GetMethodPDERSThreadLocal() =
this;
1199 fout <<
" // not implemented for class: \"" << className <<
"\"" << std::endl;
1200 fout <<
"};" << std::endl;
1214 Log() <<
"PDERS is a generalization of the projective likelihood classifier " <<
Endl;
1215 Log() <<
"to N dimensions, where N is the number of input variables used." <<
Endl;
1216 Log() <<
"In its adaptive form it is mostly equivalent to k-Nearest-Neighbor" <<
Endl;
1217 Log() <<
"(k-NN) methods. If the multidimensional PDF for signal and background" <<
Endl;
1218 Log() <<
"were known, this classifier would exploit the full information" <<
Endl;
1219 Log() <<
"contained in the input variables, and would hence be optimal. In " <<
Endl;
1220 Log() <<
"practice however, huge training samples are necessary to sufficiently " <<
Endl;
1221 Log() <<
"populate the multidimensional phase space. " <<
Endl;
1223 Log() <<
"The simplest implementation of PDERS counts the number of signal" <<
Endl;
1224 Log() <<
"and background events in the vicinity of a test event, and returns" <<
Endl;
1225 Log() <<
"a weight according to the majority species of the neighboring events." <<
Endl;
1226 Log() <<
"A more involved version of PDERS (selected by the option \"KernelEstimator\")" <<
Endl;
1227 Log() <<
"uses Kernel estimation methods to approximate the shape of the PDF." <<
Endl;
1231 Log() <<
"PDERS can be very powerful in case of strongly non-linear problems, " <<
Endl;
1232 Log() <<
"e.g., distinct islands of signal and background regions. Because of " <<
Endl;
1233 Log() <<
"the exponential growth of the phase space, it is important to restrict" <<
Endl;
1234 Log() <<
"the number of input variables (dimension) to the strictly necessary." <<
Endl;
1236 Log() <<
"Note that PDERS is a slowly responding classifier. Moreover, the necessity" <<
Endl;
1237 Log() <<
"to store the entire binary tree in memory, to avoid accessing virtual " <<
Endl;
1238 Log() <<
"memory, limits the number of training events that can effectively be " <<
Endl;
1239 Log() <<
"used to model the multidimensional PDF." <<
Endl;
1243 Log() <<
"If the PDERS response is found too slow when using the adaptive volume " <<
Endl;
1244 Log() <<
"size (option \"VolumeRangeMode=Adaptive\"), it might be found beneficial" <<
Endl;
1245 Log() <<
"to reduce the number of events required in the volume, and/or to enlarge" <<
Endl;
1246 Log() <<
"the allowed range (\"NeventsMin/Max\"). PDERS is relatively insensitive" <<
Endl;
1247 Log() <<
"to the width (\"GaussSigma\") of the Gaussian kernel (if used)." <<
Endl;
#define REGISTER_METHOD(CLASS)
for example
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void pix
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void char Point_t Rectangle_t WindowAttributes_t Float_t r
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void char Point_t Rectangle_t WindowAttributes_t Float_t Float_t Float_t Int_t Int_t UInt_t UInt_t Rectangle_t Int_t Int_t Window_t TString Int_t GCValues_t GetPrimarySelectionOwner GetDisplay GetScreen GetColormap GetNativeEvent const char const char dpyName wid window const char font_name cursor keysym reg const char only_if_exist regb h Point_t winding char text const char depth char const char Int_t count const char ColorStruct_t color const char Pixmap_t Pixmap_t PictureAttributes_t attr const char char ret_data h unsigned char height h Atom_t Int_t ULong_t ULong_t unsigned char prop_list Atom_t Atom_t Atom_t Time_t type
A ROOT file is an on-disk file, usually with extension .root, that stores objects in a file-system-li...
Node for the BinarySearch or Decision Trees.
const std::vector< Float_t > & GetEventV() const
A simple Binary search tree including a volume search method.
static BinarySearchTree * CreateFromXML(void *node, UInt_t tmva_Version_Code=262657)
re-create a new tree (decision tree or search tree) from XML
Class that contains all the data information.
Float_t GetValue(UInt_t ivar) const
return value of i'th variable
void SetTarget(UInt_t itgt, Float_t value)
set the target value (dimension itgt) to value
UInt_t GetNTargets() const
accessor to the number of targets
Float_t GetTarget(UInt_t itgt) const
Virtual base Class for all MVA method.
This is a generalization of the above Likelihood methods to dimensions, where is the number of inpu...
void WriteWeightsToStream(TFile &rf) const
write training sample (TTree) to file
void CreateBinarySearchTree(Types::ETreeType type)
create binary search trees for signal and background
BinarySearchTree * fBinaryTree
binary tree
virtual ~MethodPDERS(void)
destructor
MethodPDERS(const TString &jobName, const TString &methodTitle, DataSetInfo &theData, const TString &theOption)
standard constructor for the PDERS method
void GetSample(const Event &e, std::vector< const BinarySearchTreeNode * > &events, Volume *volume)
Float_t GetError(Float_t countS, Float_t countB, Float_t sumW2S, Float_t sumW2B) const
statistical error estimate for RS estimator
static MethodPDERS * ThisPDERS(void)
static pointer to this object
Double_t KernelNormalization(Double_t pdf)
Calculating the normalization factor only once (might need a reset at some point.
void ReadWeightsFromXML(void *wghtnode)
void ProcessOptions()
process the options specified by the user
void RRScalc(const Event &, std::vector< Float_t > *count)
void GetHelpMessage() const
get help message text
void UpdateThis()
update static this pointer
void Train(void)
this is a dummy training: the preparation work to do is the construction of the binary tree as a poin...
void MakeClassSpecific(std::ostream &, const TString &) const
write specific classifier response
Double_t CRScalc(const Event &)
void DeclareOptions()
define the options (their key words) that can be set in the option string.
Double_t GetMvaValue(Double_t *err=nullptr, Double_t *errUpper=nullptr)
init the size of a volume element using a defined fraction of the volume containing the entire events
void CalcAverages()
compute also average RMS values required for adaptive Gaussian
void RKernelEstimate(const Event &, std::vector< const BinarySearchTreeNode * > &, Volume &, std::vector< Float_t > *pdfSum)
normalization factors so we can work with radius 1 hyperspheres
void ReadWeightsFromStream(std::istream &istr)
read weight info from file
const std::vector< Float_t > & GetRegressionValues()
Double_t NormSinc(Double_t x)
NormSinc.
virtual Bool_t HasAnalysisType(Types::EAnalysisType type, UInt_t numberClasses, UInt_t numberTargets)
PDERS can handle classification with 2 classes and regression with one or more regression-targets.
void AddWeightsXMLTo(void *parent) const
write weights to xml file
void SetVolumeElement(void)
defines volume dimensions
void Init(void)
default initialisation routine called by all constructors
Double_t LanczosFilter(Int_t level, Double_t x)
Lanczos Filter.
Double_t CKernelEstimate(const Event &, std::vector< const BinarySearchTreeNode * > &, Volume &)
normalization factors so we can work with radius 1 hyperspheres
Double_t ApplyKernelFunction(Double_t normalized_distance)
from the normalized euclidean distance calculate the distance for a certain kernel
Double_t GetNormalizedDistance(const TMVA::Event &base_event, const BinarySearchTreeNode &sample_event, Double_t *dim_normalization)
We use Euclidian metric here. Might not be best or most efficient.
static Double_t IGetVolumeContentForRoot(Double_t)
Interface to RootFinder.
Double_t GetVolumeContentForRoot(Double_t)
count number of events in rescaled volume
Root finding using Brents algorithm (translated from CERNLIB function RZERO)
Double_t Root(Double_t refValue)
Root finding using Brents algorithm; taken from CERNLIB function RZERO.
Singleton class for Global types used by TMVA.
@ kSignal
Never change this number - it is elsewhere assumed to be zero !
Volume for BinarySearchTree.
void ScaleInterval(Double_t f)
"scale" the volume by symmetrically blowing up the interval in each dimension
const Bool_t MethodPDERS_UseFindRoot
create variable transformations
MsgLogger & Endl(MsgLogger &ml)
Double_t Gaus(Double_t x, Double_t mean=0, Double_t sigma=1, Bool_t norm=kFALSE)
Calculates a gaussian function with mean and sigma.
Double_t Sqrt(Double_t x)
Returns the square root of x.
LongDouble_t Power(LongDouble_t x, LongDouble_t y)
Returns x raised to the power y.
Double_t Gamma(Double_t z)
Computation of gamma(z) for all z.
Double_t Sin(Double_t)
Returns the sine of an angle of x radians.
Short_t Abs(Short_t d)
Returns the absolute value of parameter Short_t d.