Logo ROOT  
Reference Guide
 
Loading...
Searching...
No Matches
MnSeedGenerator.cxx
Go to the documentation of this file.
1// @(#)root/minuit2:$Id$
2// Authors: M. Winkler, F. James, L. Moneta, A. Zsenei 2003-2005
3
4/**********************************************************************
5 * *
6 * Copyright (c) 2005 LCG ROOT Math team, CERN/PH-SFT *
7 * *
8 **********************************************************************/
9
11#include "Minuit2/MinimumSeed.h"
12#include "Minuit2/MnFcn.h"
19#include "Minuit2/MnMatrix.h"
26#include "Minuit2/MnStrategy.h"
27#include "Minuit2/MnHesse.h"
33#include "Minuit2/MnPrint.h"
34
35#include <cmath>
36
37namespace ROOT {
38
39namespace Minuit2 {
40
42operator()(const MnFcn &fcn, const GradientCalculator &gc, const MnUserParameterState &st, const MnStrategy &stra) const
43{
44
45 MnPrint print("MnSeedGenerator");
46
47 // find seed (initial minimization point) using the calculated gradient
48 const unsigned int n = st.VariableParameters();
49 const MnMachinePrecision &prec = st.Precision();
50
51 print.Info("Computing seed using NumericalGradient calculator");
52
53 print.Debug(n, "free parameters, FCN pointer", &fcn);
54
55 // initial starting values
57 for (unsigned int i = 0; i < n; i++)
58 x(i) = st.IntParameters()[i];
59 double fcnmin = fcn(x);
60
61 MinimumParameters pa(x, fcnmin);
62 FunctionGradient dgrad = gc(pa);
64 double dcovar = 1.;
65 if (st.HasCovariance()) {
66 for (unsigned int i = 0; i < n; i++) {
67 mat(i, i) = st.IntCovariance()(i, i) > prec.Eps() ? st.IntCovariance()(i, i)
68 : dgrad.G2()(i) > prec.Eps() ? 1. / dgrad.G2()(i)
69 : 1.0;
70 for (unsigned int j = i + 1; j < n; j++)
71 mat(i, j) = st.IntCovariance()(i, j);
72 }
73 dcovar = 0.;
74 } else {
75 for (unsigned int i = 0; i < n; i++)
76 // if G2 is small better using an arbitrary value (e.g. 1)
77 mat(i, i) = dgrad.G2()(i) > prec.Eps() ? 1. / dgrad.G2()(i) : 1.0;
78 }
79 MinimumError err(mat, dcovar);
80
81 double edm = VariableMetricEDMEstimator().Estimate(dgrad, err);
82 MinimumState state(pa, err, dgrad, edm, fcn.NumOfCalls());
83
84 print.Info("Initial state:", MnPrint::Oneline(state));
85
86 if (!st.HasCovariance()) {
88 if (ng2ls.HasNegativeG2(dgrad, prec)) {
89 print.Debug("Negative G2 Found", "\n point:", x, "\n grad :", dgrad.Grad(), "\n g2 :", dgrad.G2());
90
91 state = ng2ls(fcn, state, gc, prec);
92
93 print.Info("Negative G2 found - new state:", state);
94 }
95 }
96
97 if (stra.Strategy() == 2 && !st.HasCovariance()) {
98 // calculate full 2nd derivative
99
100 print.Debug("calling MnHesse");
101
102 MinimumState tmp = MnHesse(stra)(fcn, state, st.Trafo());
103
104 print.Info("run Hesse - Initial seeding state:", tmp);
105
106 return MinimumSeed(tmp, st.Trafo());
107 }
108
109 print.Info("Initial state ",state);
110
111 return MinimumSeed(state, st.Trafo());
112}
113
115 const MnUserParameterState &st, const MnStrategy &stra) const
116{
117 MnPrint print("MnSeedGenerator");
118
119 // check gradient (slow: will require more function evaluations)
120 //if (gc.CheckGradient()) {
121 // //CheckGradient(st,trado,stra,grd)
122 //}
123
124 if (!gc.CanComputeG2()) {
125 print.Info("Using analytical (external) gradient calculator but cannot compute G2 - use then numerical gradient for G2");
126 Numerical2PGradientCalculator ngc(fcn, st.Trafo(), stra);
127 return this->operator()(fcn, ngc, st, stra);
128 }
129
130
131
132 if (gc.CanComputeHessian())
133 print.Info("Computing seed using analytical (external) gradients and Hessian calculator");
134 else
135 print.Info("Computing seed using analytical (external) gradients and G2 calculator");
136
137
138
139 // find seed (initial point for minimization) using analytical gradient
140 unsigned int n = st.VariableParameters();
141 const MnMachinePrecision &prec = st.Precision();
142
143 // initial starting values
145 for (unsigned int i = 0; i < n; i++)
146 x(i) = st.IntParameters()[i];
147 double fcnmin = fcn(x);
148 MinimumParameters pa(x, fcnmin);
149
150 // compute function gradient
151 FunctionGradient grad = gc(pa);
152 double dcovar = 0;
154 // if we can compute Hessian compute it and use it
155 bool computedHessian = false;
156 if (!grad.HasG2()) {
157 assert(gc.CanComputeHessian());
159 bool ret = gc.Hessian(pa, hmat);
160 if (!ret) {
161 print.Error("Cannot compute G2 and Hessian");
162 assert(true);
163 }
164 // update gradient using G2 from Hessian calculation
166 for (unsigned int i = 0; i < n; i++)
167 g2(i) = hmat(i,i);
168 grad = FunctionGradient(grad.Grad(),g2);
169
170 print.Debug("Computed analytical G2",g2);
171
172 // when Hessian has been computed invert to get covariance
173 // we prefer not using full Hessian in strategy 1 since we need to be sure that
174 // is pos-defined. Uncomment following line if want to have seed with the full Hessian
175 //computedHessian = true;
176 if (computedHessian) {
177 mat = MinimumError::InvertMatrix(hmat);
178 print.Info("Use full Hessian as seed");
179 print.Debug("computed Hessian",hmat);
180 print.Debug("computed Error matrix (H^-1)",mat);
181 }
182 }
183 // do this only when we have not computed the Hessian or always ?
184 if (!computedHessian) {
185 // check if minimum state has covariance - if not use computed G2
186 // should maybe this an option, sometimes is not good to re-use existing covariance
187 if (st.HasCovariance()) {
188 print.Info("Using existing covariance matrix");
189 for (unsigned int i = 0; i < n; i++) {
190 mat(i, i) = st.IntCovariance()(i, i) > prec.Eps() ? st.IntCovariance()(i, i)
191 : grad.G2()(i) > prec.Eps() ? 1. / grad.G2()(i)
192 : 1.0;
193 for (unsigned int j = i + 1; j < n; j++)
194 mat(i, j) = st.IntCovariance()(i, j);
195 }
196 dcovar = 0.;
197 } else {
198 for (unsigned int i = 0; i < n; i++) {
199 // if G2 is very small, better using an arbitrary value (e.g. 1.)
200 mat(i, i) = grad.G2()(i) > prec.Eps() ? 1. / grad.G2()(i) : 1.0;
201 }
202 dcovar = 1.;
203 }
204 } else {
205 print.Info("Computing seed using full Hessian");
206 }
207
208 MinimumError err(mat, dcovar);
209 double edm = VariableMetricEDMEstimator().Estimate(grad, err);
210
211 if (!grad.HasG2()) {
212 print.Error("Cannot compute seed because G2 is not computed");
213 }
214 MinimumState state(pa, err, grad, edm, fcn.NumOfCalls());
215
216 if (!st.HasCovariance()) {
218 if (ng2ls.HasNegativeG2(grad, prec)) {
219 // do a negative line search - can use current gradient calculator
220 // Numerical2PGradientCalculator ngc(fcn, st.Trafo(), stra);
221 state = ng2ls(fcn, state, gc, prec);
222 }
223 }
224
225 // compute Hessian above will not have posdef check as it is done if we call MnHesse
226 if (stra.Strategy() == 2 && !st.HasCovariance() && !computedHessian) {
227 // can calculate full 2nd derivative
228 MinimumState tmpState = MnHesse(stra)(fcn, state, st.Trafo());
229 print.Info("Compute full Hessian: Initial seeding state is ",tmpState);
230 return MinimumSeed(tmpState, st.Trafo());
231 }
232
233 print.Info("Initial seeding state ",state);
234
235 return MinimumSeed(state, st.Trafo());
236}
237#if 0
238bool CheckGradient(MinimumState & st, MnUserTransformation & trafo, MnStrategy & stra)
239{
240
241 const MinimumParameters & pa = st.Parameters();
242 const FunctionGradient & grd = st.FunctionGradient();
243
244 // I think one should use Numerical2PGradientCalculator
245 // since step sizes and G2 of initial gradient are wrong
246 InitialGradientCalculator igc(fcn, trafo, stra);
247 FunctionGradient tmp = igc(pa);
248 // should also use G2 from grd (in case Analyticalgradient can compute Hessian ?)
249 FunctionGradient dgrad(grd.Grad(), tmp.G2(), tmp.Gstep());
250
251 // do check computing gradient with HessianGradientCalculator which refines the gradient given an initial one
252 bool good = true;
253 HessianGradientCalculator hgc(fcn, trafo, MnStrategy(2));
254 std::pair<FunctionGradient, MnAlgebraicVector> hgrd = hgc.DeltaGradient(pa, dgrad);
255 for (unsigned int i = 0; i < n; i++) {
256 if (std::fabs(hgrd.first.Grad()(i) - grd.Grad()(i)) > hgrd.second(i)) {
257 int externalParameterIndex = trafo.ExtOfInt(i);
258 const char *parameter_name = trafo.Name(externalParameterIndex);
259 print.Warn("Gradient discrepancy of external Parameter too large:"
260 "parameter_name =",
261 parameter_name, "externalParameterIndex =", externalParameterIndex, "internal =", i);
262 good = false;
263 }
264 }
265 if (!good) {
266 print.Error("Minuit does not accept user specified Gradient. To force acceptance, override 'virtual bool "
267 "CheckGradient() const' of FCNGradientBase.h in the derived class.");
268
269 assert(good);
270 }
271 return good
272}
273#endif
274
275} // namespace Minuit2
276
277} // namespace ROOT
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void gc
const MnAlgebraicVector & Grad() const
const MnAlgebraicVector & G2() const
interface class for gradient calculators
HessianGradientCalculator: class to calculate Gradient for Hessian.
Class to calculate an initial estimate of the gradient.
Class describing a symmetric matrix of size n.
Definition LASymMatrix.h:45
MinimumError keeps the inv.
static MnAlgebraicSymMatrix InvertMatrix(const MnAlgebraicSymMatrix &matrix, int &ifail)
MinimumState keeps the information (position, Gradient, 2nd deriv, etc) after one minimization step (...
const MinimumParameters & Parameters() const
Wrapper class to FCNBase interface used internally by Minuit.
Definition MnFcn.h:30
unsigned int NumOfCalls() const
Definition MnFcn.h:39
API class for calculating the numerical covariance matrix (== 2x Inverse Hessian == 2x Inverse 2nd de...
Definition MnHesse.h:41
Sets the relative floating point (double) arithmetic precision.
double Eps() const
eps returns the smallest possible number so that 1.+eps > 1.
void Debug(const Ts &... args)
Definition MnPrint.h:147
void Error(const Ts &... args)
Definition MnPrint.h:129
void Info(const Ts &... args)
Definition MnPrint.h:141
MinimumSeed operator()(const MnFcn &, const GradientCalculator &, const MnUserParameterState &, const MnStrategy &) const override
API class for defining four levels of strategies: low (0), medium (1), high (2), very high (>=3); act...
Definition MnStrategy.h:27
unsigned int Strategy() const
Definition MnStrategy.h:38
class which holds the external user and/or internal Minuit representation of the parameters and error...
const MnMachinePrecision & Precision() const
const std::vector< double > & IntParameters() const
const MnUserTransformation & Trafo() const
const MnUserCovariance & IntCovariance() const
class dealing with the transformation between user specified parameters (external) and internal param...
unsigned int ExtOfInt(unsigned int internal) const
const char * Name(unsigned int) const
In case that one of the components of the second derivative g2 calculated by the numerical Gradient c...
bool HasNegativeG2(const FunctionGradient &, const MnMachinePrecision &) const
class performing the numerical gradient calculation
double Estimate(const FunctionGradient &, const MinimumError &) const
Double_t x[n]
Definition legend1.C:17
const Int_t n
Definition legend1.C:16
tbb::task_arena is an alias of tbb::interface7::task_arena, which doesn't allow to forward declare tb...