Logo ROOT  
Reference Guide
 
All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Properties Friends Macros Modules Pages
Loading...
Searching...
No Matches
MnSeedGenerator.cxx
Go to the documentation of this file.
1// @(#)root/minuit2:$Id$
2// Authors: M. Winkler, F. James, L. Moneta, A. Zsenei 2003-2005
3
4/**********************************************************************
5 * *
6 * Copyright (c) 2005 LCG ROOT Math team, CERN/PH-SFT *
7 * *
8 **********************************************************************/
9
11#include "Minuit2/MinimumSeed.h"
12#include "Minuit2/MnFcn.h"
19#include "Minuit2/MnMatrix.h"
26#include "Minuit2/MnStrategy.h"
27#include "Minuit2/MnHesse.h"
33#include "Minuit2/MnPrint.h"
34
35#include "Math/Util.h"
36
37#include <cmath>
38
39namespace ROOT {
40
41namespace Minuit2 {
42
45{
46 if(auto *agc = dynamic_cast<AnalyticalGradientCalculator const*>(&gc)) {
48 }
49
50 MnPrint print("MnSeedGenerator");
51
52 // find seed (initial minimization point) using the calculated gradient
53 const unsigned int n = st.VariableParameters();
54 const MnMachinePrecision &prec = st.Precision();
55
56 print.Info("Computing seed using NumericalGradient calculator");
57
58 print.Debug(n, "free parameters, FCN pointer", &fcn);
59
60 // initial starting values
62 for (unsigned int i = 0; i < n; i++)
63 x(i) = st.IntParameters()[i];
64
65 // We want to time everything with function evaluations. The MnSeedGenerator
66 // with numeric gradient only does one function and one gradient evaluation
67 // by default, and we're timing it here. If the G2 is negative, we also have
68 // to run a NegativeG2LineSearch later, but this is timed separately inside
69 // the line search.
70 auto timingScope = std::make_unique<ROOT::Math::Util::TimingScope>([&print](std::string const &s) { print.Info(s); },
71 "Evaluated function and gradient in");
74 timingScope.reset();
75
77 double dcovar = 1.;
78 if (st.HasCovariance()) {
79 for (unsigned int i = 0; i < n; i++) {
80 mat(i, i) = st.IntCovariance()(i, i) > prec.Eps() ? st.IntCovariance()(i, i)
81 : dgrad.G2()(i) > prec.Eps() ? 1. / dgrad.G2()(i)
82 : 1.0;
83 for (unsigned int j = i + 1; j < n; j++)
84 mat(i, j) = st.IntCovariance()(i, j);
85 }
86 dcovar = 0.;
87 } else {
88 for (unsigned int i = 0; i < n; i++)
89 // if G2 is small better using an arbitrary value (e.g. 1)
90 mat(i, i) = dgrad.G2()(i) > prec.Eps() ? 1. / dgrad.G2()(i) : 1.0;
91 }
93
94 double edm = VariableMetricEDMEstimator().Estimate(dgrad, err);
95 MinimumState state(pa, err, dgrad, edm, fcn.NumOfCalls());
96
97 print.Info("Initial state:", MnPrint::Oneline(state));
98
99 if (!st.HasCovariance()) {
101 if (ng2ls.HasNegativeG2(dgrad, prec)) {
102 print.Debug("Negative G2 Found", "\n point:", x, "\n grad :", dgrad.Grad(), "\n g2 :", dgrad.G2());
103
104 state = ng2ls(fcn, state, gc, prec);
105
106 print.Info("Negative G2 found - new state:", state);
107 }
108 }
109
110 if (stra.Strategy() == 2 && !st.HasCovariance()) {
111 // calculate full 2nd derivative
112
113 print.Debug("calling MnHesse");
114
115 MinimumState tmp = MnHesse(stra)(fcn, state, st.Trafo());
116
117 print.Info("run Hesse - Initial seeding state:", tmp);
118
119 return MinimumSeed(tmp, st.Trafo());
120 }
121
122 print.Info("Initial state ",state);
123
124 return MinimumSeed(state, st.Trafo());
125}
126
129 const MnUserParameterState &st, const MnStrategy &stra) const
130{
131 MnPrint print("MnSeedGenerator");
132
133 // check gradient (slow: will require more function evaluations)
134 //if (gc.CheckGradient()) {
135 // //CheckGradient(st,trado,stra,grd)
136 //}
137
138 if (!gc.CanComputeG2()) {
139 print.Info("Using analytical (external) gradient calculator but cannot compute G2 - use then numerical gradient for G2");
141 return this->operator()(fcn, ngc, st, stra);
142 }
143
144
145
146 if (gc.CanComputeHessian())
147 print.Info("Computing seed using analytical (external) gradients and Hessian calculator");
148 else
149 print.Info("Computing seed using analytical (external) gradients and G2 calculator");
150
151
152
153 // find seed (initial point for minimization) using analytical gradient
154 unsigned int n = st.VariableParameters();
155 const MnMachinePrecision &prec = st.Precision();
156
157 // initial starting values
158 MnAlgebraicVector x(st.IntParameters());
159 double fcnmin = MnFcnCaller{fcn}(x);
161
162 // compute function gradient
163 FunctionGradient grad = gc(pa);
164 double dcovar = 0;
166 // if we can compute Hessian compute it and use it
167 bool computedHessian = false;
168 if (!grad.HasG2()) {
169 assert(gc.CanComputeHessian());
171 bool ret = gc.Hessian(pa, hmat);
172 if (!ret) {
173 print.Error("Cannot compute G2 and Hessian");
174 assert(true);
175 }
176 // update gradient using G2 from Hessian calculation
178 for (unsigned int i = 0; i < n; i++)
179 g2(i) = hmat(i,i);
180 grad = FunctionGradient(grad.Grad(),g2);
181
182 print.Debug("Computed analytical G2",g2);
183
184 // when Hessian has been computed invert to get covariance
185 // we prefer not using full Hessian in strategy 1 since we need to be sure that
186 // is pos-defined. Uncomment following line if want to have seed with the full Hessian
187 //computedHessian = true;
188 if (computedHessian) {
190 print.Info("Use full Hessian as seed");
191 print.Debug("computed Hessian",hmat);
192 print.Debug("computed Error matrix (H^-1)",mat);
193 }
194 }
195 // do this only when we have not computed the Hessian or always ?
196 if (!computedHessian) {
197 // check if minimum state has covariance - if not use computed G2
198 // should maybe this an option, sometimes is not good to re-use existing covariance
199 if (st.HasCovariance()) {
200 print.Info("Using existing covariance matrix");
201 for (unsigned int i = 0; i < n; i++) {
202 mat(i, i) = st.IntCovariance()(i, i) > prec.Eps() ? st.IntCovariance()(i, i)
203 : grad.G2()(i) > prec.Eps() ? 1. / grad.G2()(i)
204 : 1.0;
205 for (unsigned int j = i + 1; j < n; j++)
206 mat(i, j) = st.IntCovariance()(i, j);
207 }
208 dcovar = 0.;
209 } else {
210 for (unsigned int i = 0; i < n; i++) {
211 // if G2 is very small, better using an arbitrary value (e.g. 1.)
212 mat(i, i) = grad.G2()(i) > prec.Eps() ? 1. / grad.G2()(i) : 1.0;
213 }
214 dcovar = 1.;
215 }
216 } else {
217 print.Info("Computing seed using full Hessian");
218 }
219
220 MinimumError err(mat, dcovar);
221 double edm = VariableMetricEDMEstimator().Estimate(grad, err);
222
223 if (!grad.HasG2()) {
224 print.Error("Cannot compute seed because G2 is not computed");
225 }
226 MinimumState state(pa, err, grad, edm, fcn.NumOfCalls());
227
228 if (!st.HasCovariance()) {
230 if (ng2ls.HasNegativeG2(grad, prec)) {
231 // do a negative line search - can use current gradient calculator
232 // Numerical2PGradientCalculator ngc(fcn, st.Trafo(), stra);
233 state = ng2ls(fcn, state, gc, prec);
234 }
235 }
236
237 // compute Hessian above will not have posdef check as it is done if we call MnHesse
238 if (stra.Strategy() == 2 && !st.HasCovariance() && !computedHessian) {
239 // can calculate full 2nd derivative
240 MinimumState tmpState = MnHesse(stra)(fcn, state, st.Trafo());
241 print.Info("Compute full Hessian: Initial seeding state is ",tmpState);
242 return MinimumSeed(tmpState, st.Trafo());
243 }
244
245 print.Info("Initial seeding state ",state);
246
247 return MinimumSeed(state, st.Trafo());
248}
249#if 0
251{
252
253 const MinimumParameters & pa = st.Parameters();
254 const FunctionGradient & grd = st.FunctionGradient();
255
256 // I think one should use Numerical2PGradientCalculator
257 // since step sizes and G2 of initial gradient are wrong
259 FunctionGradient tmp = igc(pa);
260 // should also use G2 from grd (in case Analyticalgradient can compute Hessian ?)
261 FunctionGradient dgrad(grd.Grad(), tmp.G2(), tmp.Gstep());
262
263 // do check computing gradient with HessianGradientCalculator which refines the gradient given an initial one
264 bool good = true;
266 std::pair<FunctionGradient, MnAlgebraicVector> hgrd = hgc.DeltaGradient(pa, dgrad);
267 for (unsigned int i = 0; i < n; i++) {
268 if (std::fabs(hgrd.first.Grad()(i) - grd.Grad()(i)) > hgrd.second(i)) {
269 int externalParameterIndex = trafo.ExtOfInt(i);
270 const char *parameter_name = trafo.Name(externalParameterIndex);
271 print.Warn("Gradient discrepancy of external Parameter too large:"
272 "parameter_name =",
273 parameter_name, "externalParameterIndex =", externalParameterIndex, "internal =", i);
274 good = false;
275 }
276 }
277 if (!good) {
278 print.Error("Minuit does not accept user specified Gradient. To force acceptance, override 'virtual bool "
279 "CheckGradient() const' of FCNGradientBase.h in the derived class.");
280
281 assert(good);
282 }
283 return good
284}
285#endif
286
287} // namespace Minuit2
288
289} // namespace ROOT
ROOT::Detail::TRangeCast< T, true > TRangeDynCast
TRangeDynCast is an adapter class that allows the typed iteration through a TCollection.
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void gc
const MnAlgebraicVector & Grad() const
const MnAlgebraicVector & G2() const
interface class for gradient calculators
HessianGradientCalculator: class to calculate Gradient for Hessian.
Class to calculate an initial estimate of the gradient.
Class describing a symmetric matrix of size n.
Definition MnMatrix.h:438
MinimumError keeps the inv.
static MnAlgebraicSymMatrix InvertMatrix(const MnAlgebraicSymMatrix &matrix, int &ifail)
MinimumState keeps the information (position, Gradient, 2nd deriv, etc) after one minimization step (...
Wrapper class to FCNBase interface used internally by Minuit.
Definition MnFcn.h:34
API class for calculating the numerical covariance matrix (== 2x Inverse Hessian == 2x Inverse 2nd de...
Definition MnHesse.h:41
Sets the relative floating point (double) arithmetic precision.
void Debug(const Ts &... args)
Definition MnPrint.h:135
void Error(const Ts &... args)
Definition MnPrint.h:117
void Info(const Ts &... args)
Definition MnPrint.h:129
MinimumSeed CallWithAnalyticalGradientCalculator(const MnFcn &, const AnalyticalGradientCalculator &, const MnUserParameterState &, const MnStrategy &) const
MinimumSeed operator()(const MnFcn &, const GradientCalculator &, const MnUserParameterState &, const MnStrategy &) const override
API class for defining four levels of strategies: low (0), medium (1), high (2), very high (>=3); act...
Definition MnStrategy.h:27
class which holds the external user and/or internal Minuit representation of the parameters and error...
class dealing with the transformation between user specified parameters (external) and internal param...
In case that one of the components of the second derivative g2 calculated by the numerical Gradient c...
class performing the numerical gradient calculation
double Estimate(const FunctionGradient &, const MinimumError &) const
Double_t x[n]
Definition legend1.C:17
const Int_t n
Definition legend1.C:16
tbb::task_arena is an alias of tbb::interface7::task_arena, which doesn't allow to forward declare tb...