Logo ROOT  
Reference Guide
 
Loading...
Searching...
No Matches
MnSeedGenerator.cxx
Go to the documentation of this file.
1// @(#)root/minuit2:$Id$
2// Authors: M. Winkler, F. James, L. Moneta, A. Zsenei 2003-2005
3
4/**********************************************************************
5 * *
6 * Copyright (c) 2005 LCG ROOT Math team, CERN/PH-SFT *
7 * *
8 **********************************************************************/
9
11#include "Minuit2/MinimumSeed.h"
12#include "Minuit2/MnFcn.h"
19#include "Minuit2/MnMatrix.h"
25#include "Minuit2/MnStrategy.h"
26#include "Minuit2/MnHesse.h"
32#include "Minuit2/MnPrint.h"
33
34#include "Math/Util.h"
35
36#include <cmath>
37
38namespace ROOT {
39
40namespace Minuit2 {
41
44{
45 if(auto *agc = dynamic_cast<AnalyticalGradientCalculator const*>(&gc)) {
47 }
48
49 MnPrint print("MnSeedGenerator");
50
51 // find seed (initial minimization point) using the calculated gradient
52 const unsigned int n = st.VariableParameters();
53 const MnMachinePrecision &prec = st.Precision();
54
55 print.Info("Computing seed using NumericalGradient calculator");
56
57 print.Debug(n, "free parameters, FCN pointer", &fcn);
58
59 // initial starting values
61 for (unsigned int i = 0; i < n; i++)
62 x(i) = st.IntParameters()[i];
63
64 // We want to time everything with function evaluations. The MnSeedGenerator
65 // with numeric gradient only does one function and one gradient evaluation
66 // by default, and we're timing it here. If the G2 is negative, we also have
67 // to run a NegativeG2LineSearch later, but this is timed separately inside
68 // the line search.
69 auto timingScope = std::make_unique<ROOT::Math::Util::TimingScope>([&print](std::string const &s) { print.Info(s); },
70 "Evaluated function and gradient in");
73 timingScope.reset();
74
76 double dcovar = 1.;
77 if (st.HasCovariance()) {
78 for (unsigned int i = 0; i < n; i++) {
79 mat(i, i) = st.IntCovariance()(i, i) > prec.Eps() ? st.IntCovariance()(i, i)
80 : dgrad.G2()(i) > prec.Eps() ? 1. / dgrad.G2()(i)
81 : 1.0;
82 for (unsigned int j = i + 1; j < n; j++)
83 mat(i, j) = st.IntCovariance()(i, j);
84 }
85 dcovar = 0.;
86 } else {
87 for (unsigned int i = 0; i < n; i++)
88 // if G2 is small better using an arbitrary value (e.g. 1)
89 mat(i, i) = dgrad.G2()(i) > prec.Eps() ? 1. / dgrad.G2()(i) : 1.0;
90 }
92
93 double edm = VariableMetricEDMEstimator().Estimate(dgrad, err);
94 MinimumState state(pa, err, dgrad, edm, fcn.NumOfCalls());
95
96 print.Info("Initial state:", MnPrint::Oneline(state));
97
98 if (!st.HasCovariance()) {
100 if (ng2ls.HasNegativeG2(dgrad, prec)) {
101 print.Debug("Negative G2 Found", "\n point:", x, "\n grad :", dgrad.Grad(), "\n g2 :", dgrad.G2());
102
103 state = ng2ls(fcn, state, gc, prec);
104
105 print.Info("Negative G2 found - new state:", state);
106 }
107 }
108
109 if (stra.Strategy() == 2 && !st.HasCovariance()) {
110 // calculate full 2nd derivative
111
112 print.Debug("calling MnHesse");
113
114 MinimumState tmp = MnHesse(stra)(fcn, state, st.Trafo());
115
116 print.Info("run Hesse - Initial seeding state:", tmp);
117
118 return MinimumSeed(tmp, st.Trafo());
119 }
120
121 print.Info("Initial state ",state);
122
123 return MinimumSeed(state, st.Trafo());
124}
125
128 const MnUserParameterState &st, const MnStrategy &stra) const
129{
130 MnPrint print("MnSeedGenerator");
131
132 // check gradient (slow: will require more function evaluations)
133 //if (gc.CheckGradient()) {
134 // //CheckGradient(st,trado,stra,grd)
135 //}
136
137 if (!gc.CanComputeG2()) {
138 print.Info("Using analytical (external) gradient calculator but cannot compute G2 - use then numerical gradient for G2");
140 return this->operator()(fcn, ngc, st, stra);
141 }
142
143
144
145 if (gc.CanComputeHessian())
146 print.Info("Computing seed using analytical (external) gradients and Hessian calculator");
147 else
148 print.Info("Computing seed using analytical (external) gradients and G2 calculator");
149
150
151
152 // find seed (initial point for minimization) using analytical gradient
153 unsigned int n = st.VariableParameters();
154 const MnMachinePrecision &prec = st.Precision();
155
156 // initial starting values
157 MnAlgebraicVector x(st.IntParameters());
158 double fcnmin = MnFcnCaller{fcn}(x);
160
161 // compute function gradient
162 FunctionGradient grad = gc(pa);
163 double dcovar = 0;
165 // if we can compute Hessian compute it and use it
166 bool computedHessian = false;
167 if (!grad.HasG2()) {
168 assert(gc.CanComputeHessian());
170 bool ret = gc.Hessian(pa, hmat);
171 if (!ret) {
172 print.Error("Cannot compute G2 and Hessian");
173 assert(true);
174 }
175 // update gradient using G2 from Hessian calculation
177 for (unsigned int i = 0; i < n; i++)
178 g2(i) = hmat(i,i);
179 grad = FunctionGradient(grad.Grad(),g2);
180
181 print.Debug("Computed analytical G2",g2);
182
183 // when Hessian has been computed invert to get covariance
184 // we prefer not using full Hessian in strategy 1 since we need to be sure that
185 // is pos-defined. Uncomment following line if want to have seed with the full Hessian
186 //computedHessian = true;
187 if (computedHessian) {
189 print.Info("Use full Hessian as seed");
190 print.Debug("computed Hessian",hmat);
191 print.Debug("computed Error matrix (H^-1)",mat);
192 }
193 }
194 // do this only when we have not computed the Hessian or always ?
195 if (!computedHessian) {
196 // check if minimum state has covariance - if not use computed G2
197 // should maybe this an option, sometimes is not good to re-use existing covariance
198 if (st.HasCovariance()) {
199 print.Info("Using existing covariance matrix");
200 for (unsigned int i = 0; i < n; i++) {
201 mat(i, i) = st.IntCovariance()(i, i) > prec.Eps() ? st.IntCovariance()(i, i)
202 : grad.G2()(i) > prec.Eps() ? 1. / grad.G2()(i)
203 : 1.0;
204 for (unsigned int j = i + 1; j < n; j++)
205 mat(i, j) = st.IntCovariance()(i, j);
206 }
207 dcovar = 0.;
208 } else {
209 for (unsigned int i = 0; i < n; i++) {
210 // if G2 is very small, better using an arbitrary value (e.g. 1.)
211 mat(i, i) = grad.G2()(i) > prec.Eps() ? 1. / grad.G2()(i) : 1.0;
212 }
213 dcovar = 1.;
214 }
215 } else {
216 print.Info("Computing seed using full Hessian");
217 }
218
219 MinimumError err(mat, dcovar);
220 double edm = VariableMetricEDMEstimator().Estimate(grad, err);
221
222 if (!grad.HasG2()) {
223 print.Error("Cannot compute seed because G2 is not computed");
224 }
225 MinimumState state(pa, err, grad, edm, fcn.NumOfCalls());
226
227 if (!st.HasCovariance()) {
229 if (ng2ls.HasNegativeG2(grad, prec)) {
230 // do a negative line search - can use current gradient calculator
231 // Numerical2PGradientCalculator ngc(fcn, st.Trafo(), stra);
232 state = ng2ls(fcn, state, gc, prec);
233 }
234 }
235
236 // compute Hessian above will not have posdef check as it is done if we call MnHesse
237 if (stra.Strategy() == 2 && !st.HasCovariance() && !computedHessian) {
238 // can calculate full 2nd derivative
239 MinimumState tmpState = MnHesse(stra)(fcn, state, st.Trafo());
240 print.Info("Compute full Hessian: Initial seeding state is ",tmpState);
241 return MinimumSeed(tmpState, st.Trafo());
242 }
243
244 print.Info("Initial seeding state ",state);
245
246 return MinimumSeed(state, st.Trafo());
247}
248#if 0
250{
251
252 const MinimumParameters & pa = st.Parameters();
253 const FunctionGradient & grd = st.FunctionGradient();
254
255 // I think one should use Numerical2PGradientCalculator
256 // since step sizes and G2 of initial gradient are wrong
258 FunctionGradient tmp = igc(pa);
259 // should also use G2 from grd (in case Analyticalgradient can compute Hessian ?)
260 FunctionGradient dgrad(grd.Grad(), tmp.G2(), tmp.Gstep());
261
262 // do check computing gradient with HessianGradientCalculator which refines the gradient given an initial one
263 bool good = true;
265 std::pair<FunctionGradient, MnAlgebraicVector> hgrd = hgc.DeltaGradient(pa, dgrad);
266 for (unsigned int i = 0; i < n; i++) {
267 if (std::fabs(hgrd.first.Grad()(i) - grd.Grad()(i)) > hgrd.second(i)) {
268 int externalParameterIndex = trafo.ExtOfInt(i);
269 const char *parameter_name = trafo.Name(externalParameterIndex);
270 print.Warn("Gradient discrepancy of external Parameter too large:"
271 "parameter_name =",
272 parameter_name, "externalParameterIndex =", externalParameterIndex, "internal =", i);
273 good = false;
274 }
275 }
276 if (!good) {
277 print.Error("Minuit does not accept user specified Gradient. To force acceptance, override 'virtual bool "
278 "CheckGradient() const' of FCNGradientBase.h in the derived class.");
279
280 assert(good);
281 }
282 return good
283}
284#endif
285
286} // namespace Minuit2
287
288} // namespace ROOT
ROOT::Detail::TRangeCast< T, true > TRangeDynCast
TRangeDynCast is an adapter class that allows the typed iteration through a TCollection.
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void gc
const MnAlgebraicVector & Grad() const
const MnAlgebraicVector & G2() const
interface class for gradient calculators
HessianGradientCalculator: class to calculate Gradient for Hessian.
Class describing a symmetric matrix of size n.
Definition MnMatrix.h:438
MinimumError keeps the inv.
static MnAlgebraicSymMatrix InvertMatrix(const MnAlgebraicSymMatrix &matrix, int &ifail)
MinimumState keeps the information (position, Gradient, 2nd deriv, etc) after one minimization step (...
Wrapper class to FCNBase interface used internally by Minuit.
Definition MnFcn.h:34
API class for calculating the numerical covariance matrix (== 2x Inverse Hessian == 2x Inverse 2nd de...
Definition MnHesse.h:41
Sets the relative floating point (double) arithmetic precision.
void Debug(const Ts &... args)
Definition MnPrint.h:135
void Error(const Ts &... args)
Definition MnPrint.h:117
void Info(const Ts &... args)
Definition MnPrint.h:129
MinimumSeed CallWithAnalyticalGradientCalculator(const MnFcn &, const AnalyticalGradientCalculator &, const MnUserParameterState &, const MnStrategy &) const
MinimumSeed operator()(const MnFcn &, const GradientCalculator &, const MnUserParameterState &, const MnStrategy &) const override
API class for defining four levels of strategies: low (0), medium (1), high (2), very high (>=3); act...
Definition MnStrategy.h:27
class which holds the external user and/or internal Minuit representation of the parameters and error...
class dealing with the transformation between user specified parameters (external) and internal param...
In case that one of the components of the second derivative g2 calculated by the numerical Gradient c...
class performing the numerical gradient calculation
double Estimate(const FunctionGradient &, const MinimumError &) const
Double_t x[n]
Definition legend1.C:17
const Int_t n
Definition legend1.C:16
Namespace for new ROOT classes and functions.