Logo ROOT  
Reference Guide
 
Loading...
Searching...
No Matches
VariableMetricBuilder.cxx
Go to the documentation of this file.
1// @(#)root/minuit2:$Id$
2// Authors: M. Winkler, F. James, L. Moneta, A. Zsenei 2003-2005
3
4/**********************************************************************
5 * *
6 * Copyright (c) 2005 LCG ROOT Math team, CERN/PH-SFT *
7 * *
8 **********************************************************************/
9
17#include "Minuit2/MinimumSeed.h"
18#include "Minuit2/MnFcn.h"
20#include "Minuit2/MnPosDef.h"
21#include "Minuit2/MnStrategy.h"
22#include "Minuit2/MnHesse.h"
23#include "Minuit2/MnPrint.h"
24
25#include "Math/Util.h"
26
27#include <cmath>
28#include <cassert>
29
30namespace ROOT {
31
32namespace Minuit2 {
33
34void VariableMetricBuilder::AddResult(std::vector<MinimumState> &result, const MinimumState &state) const
35{
36 // // if (!store) store = StorageLevel();
37 // // store |= (result.size() == 0);
38 // if (store)
39 result.push_back(state);
40 // else {
41 // result.back() = state;
42 // }
43 if (TraceIter())
44 TraceIteration(result.size() - 1, result.back());
45 else {
46 MnPrint print("VariableMetricBuilder", PrintLevel());
47 print.Info(MnPrint::Oneline(result.back(), result.size() - 1));
48 }
49}
50
52 const MnStrategy &strategy, unsigned int maxfcn, double edmval) const
53{
54 MnPrint print("VariableMetricBuilder", PrintLevel());
55
56 // top level function to find minimum from a given initial seed
57 // iterate on a minimum search in case of first attempt is not successful
58
59 // to be consistent with F77 Minuit
60 // in Minuit2 edm is correct and is ~ a factor of 2 smaller than F77Minuit
61 // There are also a check for convergence if (edm < 0.1 edmval for exiting the loop)
62 // LM: change factor to 2E-3 to be consistent with F77Minuit
63 edmval *= 0.002;
64
65 // set global printlevel to the local one so all calls to MN_INFO_MSG can be controlled in the same way
66 // at exit of this function the BuilderPrintLevelConf object is destructed and automatically the
67 // previous level will be restored
68
69 // double edm = Estimator().Estimate(seed.Gradient(), seed.Error());
70 double edm = seed.State().Edm();
71
72 FunctionMinimum min(seed, fcn.Up());
73
74 if (seed.Parameters().Vec().size() == 0) {
75 print.Warn("No free parameters.");
76 return min;
77 }
78
79 if (!seed.IsValid()) {
80 print.Error("Minimum seed invalid.");
81 return min;
82 }
83
84 if (edm < 0.) {
85 print.Error("Initial matrix not pos.def.");
86
87 // assert(!seed.Error().IsPosDef());
88 return min;
89 }
90
91 std::vector<MinimumState> result;
92 result.reserve(StorageLevel() > 0 ? 10 : 2);
93
94 // do actual iterations
95 print.Info("Start iterating until Edm is <", edmval, "with call limit =", maxfcn);
96
97 // print time after returning
98 ROOT::Math::Util::TimingScope timingScope([&print](std::string const &s) { print.Info(s); }, "Stop iterating after");
99
100 AddResult(result, seed.State());
101
102 // try first with a maxfxn = 80% of maxfcn
103 int maxfcn_eff = maxfcn;
104 int ipass = 0;
105 bool iterate = false;
106
107 do {
108
109 iterate = false;
110
111 print.Debug(ipass > 0 ? "Continue" : "Start", "iterating...");
112
113 min = Minimum(fcn, gc, seed, result, maxfcn_eff, edmval);
114
115 // if max function call reached exits
116 if (min.HasReachedCallLimit()) {
117 print.Warn("FunctionMinimum is invalid, reached function call limit");
118 return min;
119 }
120
121 // second time check for validity of function Minimum
122 if (ipass > 0) {
123 if (!min.IsValid()) {
124 print.Warn("FunctionMinimum is invalid after second try");
125 return min;
126 }
127 }
128
129 // resulting edm of minimization
130 edm = result.back().Edm();
131 // need to correct again for Dcovar: edm *= (1. + 3. * e.Dcovar()) ???
132
133 if ((strategy.Strategy() >= 2) || (strategy.Strategy() == 1 && min.Error().Dcovar() > 0.05)) {
134
135 print.Debug("MnMigrad will verify convergence and Error matrix; dcov =", min.Error().Dcovar());
136
138 strat.SetHessianForcePosDef(1); // ensure no matter what strategy is used, we force the result positive-definite if required
139 MinimumState st = MnHesse(strat)(fcn, min.State(), min.Seed().Trafo(), maxfcn);
140
141 print.Info("After Hessian");
142
144
145 if (!st.IsValid()) {
146 print.Warn("Invalid Hessian - exit the minimization");
147 break;
148 }
149
150 // check new edm
151 edm = st.Edm();
152
153 print.Debug("New Edm", edm, "Requested", edmval);
154
155 if (edm > edmval) {
156 // be careful with machine precision and avoid too small edm
157 double machineLimit = std::fabs(seed.Precision().Eps2() * result.back().Fval());
158 if (edm >= machineLimit) {
159 iterate = true;
160
161 print.Info("Tolerance not sufficient, continue minimization; "
162 "Edm",
163 edm, "Required", edmval);
164 } else {
165 print.Warn("Reached machine accuracy limit; Edm", edm, "is smaller than machine limit", machineLimit,
166 "while", edmval, "was requested");
167 }
168 }
169 }
170
171 // end loop on iterations
172 // ? need a maximum here (or max of function calls is enough ? )
173 // continnue iteration (re-calculate function Minimum if edm IS NOT sufficient)
174 // no need to check that hesse calculation is done (if isnot done edm is OK anyway)
175 // count the pass to exit second time when function Minimum is invalid
176 // increase by 20% maxfcn for doing some more tests
177 if (ipass == 0)
178 maxfcn_eff = int(maxfcn * 1.3);
179 ipass++;
180 } while (iterate);
181
182 // Add latest state (Hessian calculation)
183 const MinimumState &latest = result.back();
184
185 // check edm (add a factor of 10 in tolerance )
186 if (edm > 10 * edmval) {
188 print.Warn("No convergence; Edm", edm, "is above tolerance", 10 * edmval);
189 } else if (latest.Error().HasReachedCallLimit()) {
190 // communicate to user that call limit was reached in MnHesse
192 } else if (latest.Error().IsAvailable()) {
193 // check if minimum had edm above max before
194 if (min.IsAboveMaxEdm())
195 print.Info("Edm has been re-computed after Hesse; Edm", edm, "is now within tolerance");
196 min.Add(latest);
197 }
198
199 print.Debug("Minimum found", min);
200
201 return min;
202}
203
205 std::vector<MinimumState> &result, unsigned int maxfcn,
206 double edmval) const
207{
208 // function performing the minimum searches using the Variable Metric algorithm (MIGRAD)
209 // perform first a line search in the - Vg direction and then update using the Davidon formula (Davidon Error
210 // updator) stop when edm reached is less than required (edmval)
211
212 // after the modification when I iterate on this functions, so it can be called many times,
213 // the seed is used here only to get precision and construct the returned FunctionMinimum object
214
215 MnPrint print("VariableMetricBuilder", PrintLevel());
216
217 const MnMachinePrecision &prec = seed.Precision();
218
219 // result.push_back(MinimumState(seed.Parameters(), seed.Error(), seed.Gradient(), edm, fcn.NumOfCalls()));
220 const MinimumState &initialState = result.back();
221
222 double edm = initialState.Edm();
223
224 print.Debug("Initial State:", "\n Parameter:", initialState.Vec(), "\n Gradient:", initialState.Gradient().Vec(),
225 "\n InvHessian:", initialState.Error().InvHessian(), "\n Edm:", initialState.Edm());
226
227 // iterate until edm is small enough or max # of iterations reached
228 edm *= (1. + 3. * initialState.Error().Dcovar());
230 MnAlgebraicVector step(initialState.Gradient().Vec().size());
231 // keep also prevStep
232 MnAlgebraicVector prevStep(initialState.Gradient().Vec().size());
233
234 MinimumState s0 = result.back();
235
236 do {
237
238 // MinimumState s0 = result.back();
239
240 step = -1. * s0.Error().InvHessian() * s0.Gradient().Vec();
241
242 print.Debug("Iteration", result.size(), "Fval", s0.Fval(), "numOfCall", fcn.NumOfCalls(),
243 "\n Internal parameters", s0.Vec(), "\n Newton step", step);
244
245 // check if derivatives are not zero
246 if (inner_product(s0.Gradient().Vec(), s0.Gradient().Vec()) <= 0) {
247 print.Debug("all derivatives are zero - return current status");
248 break;
249 }
250
251 // gdel = s^T * g = -g^T H g (since s = - Hg) so it must be negative
252 double gdel = inner_product(step, s0.Gradient().Grad());
253
254 if (gdel > 0.) {
255 print.Warn("Matrix not pos.def, gdel =", gdel, "> 0");
256
258 s0 = psdf(s0, prec);
259 step = -1. * s0.Error().InvHessian() * s0.Gradient().Vec();
260 // #ifdef DEBUG
261 // std::cout << "After MnPosdef - Error " << s0.Error().InvHessian() << " Gradient " <<
262 // s0.Gradient().Vec() << " step " << step << std::endl;
263 // #endif
264 gdel = inner_product(step, s0.Gradient().Grad());
265
266 print.Warn("gdel =", gdel);
267
268 if (gdel > 0.) {
270
271 return FunctionMinimum(seed, result, fcn.Up());
272 }
273 }
274
275 auto pp = lsearch(fcn, s0.Parameters(), step, gdel, prec);
276
277 // <= needed for case 0 <= 0
278 if (std::fabs(pp.Y() - s0.Fval()) <= std::fabs(s0.Fval()) * prec.Eps()) {
279
280 print.Warn("No improvement in line search");
281
282 // no improvement exit (is it really needed LM ? in vers. 1.22 tried alternative )
283 // add new state when only fcn changes
284 if (result.size() <= 1)
285 AddResult(result, MinimumState(s0.Parameters(), s0.Error(), s0.Gradient(), s0.Edm(), fcn.NumOfCalls()));
286 else
287 // no need to re-store the state
288 AddResult(result, MinimumState(pp.Y(), s0.Edm(), fcn.NumOfCalls()));
289
290 break;
291 }
292
293 print.Debug("Result after line search :", "\n x =", pp.X(), "\n Old Fval =", s0.Fval(),
294 "\n New Fval =", pp.Y(), "\n NFcalls =", fcn.NumOfCalls());
295
296 MinimumParameters p(s0.Vec() + pp.X() * step, pp.Y());
297
298 FunctionGradient g = gc(p, s0.Gradient());
299
300 edm = Estimator().Estimate(g, s0.Error());
301
302 if (std::isnan(edm)) {
303 print.Warn("Edm is NaN; stop iterations");
305 return FunctionMinimum(seed, result, fcn.Up());
306 }
307
308 if (edm < 0.) {
309 print.Warn("Matrix not pos.def., try to make pos.def.");
310
312 s0 = psdf(s0, prec);
313 edm = Estimator().Estimate(g, s0.Error());
314 if (edm < 0.) {
315 print.Warn("Matrix still not pos.def.; stop iterations");
316
318
319 return FunctionMinimum(seed, result, fcn.Up());
320 }
321 }
322 MinimumError e = ErrorUpdator().Update(s0, p, g);
323
324 // avoid print Hessian that will invert the matrix
325 print.Debug("Updated new point:", "\n Parameter:", p.Vec(), "\n Gradient:", g.Vec(),
326 "\n InvHessian:", e.Matrix(), "\n Edm:", edm);
327
328 // update the state
329 s0 = MinimumState(p, e, g, edm, fcn.NumOfCalls());
330 if (StorageLevel() || result.size() <= 1)
332 else
333 // use a reduced state for not-final iterations
334 AddResult(result, MinimumState(p.Fval(), edm, fcn.NumOfCalls()));
335
336 // correct edm
337 edm *= (1. + 3. * e.Dcovar());
338
339 print.Debug("Dcovar =", e.Dcovar(), "\tCorrected edm =", edm);
340
341 } while (edm > edmval && fcn.NumOfCalls() < maxfcn); // end of iteration loop
342
343 // save last result in case of no complete final states
344 // when the result is filled above (reduced storage) the resulting state will not be valid
345 // since they will not have parameter values and error
346 // the line above will fill as last element a valid state
347 if (!result.back().IsValid())
348 result.back() = s0;
349
350 if (fcn.NumOfCalls() >= maxfcn) {
351 print.Warn("Call limit exceeded");
353 }
354
355 if (edm > edmval) {
356 if (edm < 10 * edmval) {
357 print.Info("Edm is close to limit - return current minimum");
358 return FunctionMinimum(seed, result, fcn.Up());
359 } else if (edm < std::fabs(prec.Eps2() * result.back().Fval())) {
360 print.Warn("Edm is limited by Machine accuracy - return current minimum");
361 return FunctionMinimum(seed, result, fcn.Up());
362 } else {
363 print.Warn("Iterations finish without convergence; Edm", edm, "Requested", edmval);
364
366 }
367 }
368 // std::cout<<"result.back().Error().Dcovar()= "<<result.back().Error().Dcovar()<<std::endl;
369
370 print.Debug("Exiting successfully;", "Ncalls", fcn.NumOfCalls(), "FCN", result.back().Fval(), "Edm", edm,
371 "Requested", edmval);
372
373 return FunctionMinimum(seed, result, fcn.Up());
374}
375
376} // namespace Minuit2
377
378} // namespace ROOT
#define g(i)
Definition RSha256.hxx:105
#define s0(x)
Definition RSha256.hxx:90
#define e(i)
Definition RSha256.hxx:103
ROOT::Detail::TRangeCast< T, true > TRangeDynCast
TRangeDynCast is an adapter class that allows the typed iteration through a TCollection.
winID h TVirtualViewer3D TVirtualGLPainter p
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void char Point_t Rectangle_t WindowAttributes_t Float_t Float_t Float_t Int_t Int_t UInt_t UInt_t Rectangle_t result
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void gc
class holding the full result of the minimization; both internal and external (MnUserParameterState) ...
interface class for gradient calculators
void TraceIteration(int iter, const MinimumState &state) const
MinimumError keeps the inv.
const MinimumParameters & Parameters() const
Definition MinimumSeed.h:30
const MnMachinePrecision & Precision() const
Definition MinimumSeed.h:34
const MinimumState & State() const
Definition MinimumSeed.h:29
MinimumState keeps the information (position, Gradient, 2nd deriv, etc) after one minimization step (...
Wrapper class to FCNBase interface used internally by Minuit.
Definition MnFcn.h:34
API class for calculating the numerical covariance matrix (== 2x Inverse Hessian == 2x Inverse 2nd de...
Definition MnHesse.h:41
Implements a 1-dimensional minimization along a given direction (i.e.
Sets the relative floating point (double) arithmetic precision.
Force the covariance matrix to be positive defined by adding extra terms in the diagonal.
Definition MnPosDef.h:25
void Debug(const Ts &... args)
Definition MnPrint.h:135
void Error(const Ts &... args)
Definition MnPrint.h:117
void Info(const Ts &... args)
Definition MnPrint.h:129
void Warn(const Ts &... args)
Definition MnPrint.h:123
API class for defining four levels of strategies: low (0), medium (1), high (2), very high (>=3); act...
Definition MnStrategy.h:27
void AddResult(std::vector< MinimumState > &result, const MinimumState &state) const
FunctionMinimum Minimum(const MnFcn &, const GradientCalculator &, const MinimumSeed &, const MnStrategy &, unsigned int, double) const override
const VariableMetricEDMEstimator & Estimator() const
const MinimumErrorUpdator & ErrorUpdator() const
int iterate(rng_state_t *X)
Definition mixmax.icc:34
double inner_product(const LAVector &, const LAVector &)
Definition MnMatrix.cxx:316
Namespace for new ROOT classes and functions.