37MinimumState
ComputeNumerical(
const MnFcn &,
const MinimumState &,
const MnUserTransformation &,
unsigned int maxcalls,
38 MnStrategy
const &
strat);
42MinimumState
ComputeAnalytical(
const FCNBase &,
const MinimumState &,
const MnUserTransformation &);
57 if (
fcn.HasGradient()) {
60 state.
Edm(),
static_cast<int>(state.
NFcn())}, state.
Trafo());
69 static_cast<int>(state.
NFcn())},
88 if (
st.Gradient().IsAnalytical()) {
90 if (
mfcn.Fcn().HasGradient() &&
mfcn.Fcn().HasHessian()) {
102 unsigned int n =
st.Parameters().Vec().size();
108 const MnMachinePrecision &
prec =
trafo.Precision();
110 std::unique_ptr<AnalyticalGradientCalculator>
hc;
112 hc = std::make_unique<ExternalInternalGradientCalculator>(
fcn,
trafo);
114 hc = std::make_unique<AnalyticalGradientCalculator>(
fcn,
trafo);
119 print.Error(
"Error computing analytical Hessian. MnHesse fails and will return a null matrix");
124 for (
unsigned int i = 0; i <
n; i++)
128 FunctionGradient
gr(
st.Gradient().Grad(),
g2);
131 print.Debug(
"Original error matrix",
vhmat);
136 print.Debug(
"PosDef error matrix",
vhmat);
141 print.Warn(
"Matrix inversion fails; will return diagonal matrix");
144 for (
unsigned int j = 0;
j <
n;
j++) {
151 VariableMetricEDMEstimator
estim;
154 if (
tmpErr.IsMadePosDef()) {
156 double edm =
estim.Estimate(
gr, err);
157 return MinimumState(
st.Parameters(), err,
gr, edm,
st.NFcn());
161 MinimumError err(
vhmat, 0.);
163 double edm =
estim.Estimate(
gr, err);
165 print.Debug(
"Hessian is ACCURATE. New state:",
"\n First derivative:",
st.Gradient().Grad(),
166 "\n Covariance matrix:",
vhmat,
"\n Edm:", edm);
168 return MinimumState(
st.Parameters(), err,
gr, edm,
st.NFcn());
176 MnPrint print(
"MnHesse");
182 const MnMachinePrecision &
prec =
trafo.Precision();
189 unsigned int n =
st.Parameters().Vec().size();
202 if (
st.Gradient().IsAnalytical()) {
203 print.Info(
"Using analytical gradient but a numerical Hessian calculator - it could be not optimal");
206 FunctionGradient st.Parameters());
210 print.Warn(
"Analytical calculator ",g2);
215 print.Debug(
"Gradient is",
st.Gradient().IsAnalytical() ?
"analytical" :
"numerical",
"\n point:",
x,
216 "\n fcn :",
amin,
"\n grad :",
grd,
"\n step :",
gst,
"\n g2 :",
g2);
218 for (
unsigned int i = 0; i <
n; i++) {
222 double d = std::fabs(
gst(i));
226 print.Debug(
"Derivative parameter", i,
"d =",
d,
"dmin =",
dmin);
240 print.Debug(
"cycle",
icyc,
"mul",
multpy,
"\tsag =",
sag,
"d =",
d);
245 if (
trafo.Parameter(i).HasLimits()) {
259 print.Warn(
"2nd derivative zero for parameter",
trafo.Name(
trafo.ExtOfInt(i)),
260 "; MnHesse fails and will return diagonal matrix");
262 for (
unsigned int j = 0;
j <
n;
j++) {
278 d = std::sqrt(2. *
aimsag / std::fabs(
g2(i)));
279 if (
trafo.Parameter(i).HasLimits())
280 d = std::min(0.5,
d);
284 print.Debug(
"g1 =",
grd(i),
"g2 =",
g2(i),
"step =",
gst(i),
"d =",
d,
"diffd =", std::fabs(
d -
dlast) /
d,
285 "diffg2 =", std::fabs(
g2(i) -
g2bfor) /
g2(i));
288 if (std::fabs((
d -
dlast) /
d) <
strat.HessianStepTolerance())
299 print.Warn(
"Maximum number of allowed function calls exhausted; will return diagonal matrix");
301 for (
unsigned int j = 0;
j <
n;
j++) {
307 st.Edm(),
mfcn.NumOfCalls());
311 print.Debug(
"Second derivatives",
g2);
313 if (
strat.Strategy() > 0) {
316 FunctionGradient
gr =
hgc(
st.Parameters(), FunctionGradient(
grd,
g2,
gst));
380 print.Debug(
"Original error matrix",
vhmat);
384 if (
strat.HessianForcePosDef()) {
388 print.Debug(
"PosDef error matrix",
vhmat);
393 print.Warn(
"Matrix inversion fails; will return diagonal matrix");
396 for (
unsigned int j = 0;
j <
n;
j++) {
406 VariableMetricEDMEstimator
estim;
409 if (
tmpErr.IsMadePosDef()) {
411 double edm =
estim.Estimate(
gr, err);
412 return MinimumState(
st.Parameters(), err,
gr, edm,
mfcn.NumOfCalls());
416 MinimumError err(
vhmat, 0.);
417 double edm =
estim.Estimate(
gr, err);
419 print.Debug(
"Hessian is ACCURATE. New state:",
"\n First derivative:",
grd,
"\n Second derivative:",
g2,
420 "\n Gradient step:",
gst,
"\n Covariance matrix:",
vhmat,
"\n Edm:", edm);
422 return MinimumState(
st.Parameters(), err,
gr, edm,
mfcn.NumOfCalls());
ROOT::Detail::TRangeCast< T, true > TRangeDynCast
TRangeDynCast is an adapter class that allows the typed iteration through a TCollection.
Interface (abstract class) defining the function to be minimized, which has to be implemented by the ...
class holding the full result of the minimization; both internal and external (MnUserParameterState) ...
Class describing a symmetric matrix of size n.
MinimumState keeps the information (position, Gradient, 2nd deriv, etc) after one minimization step (...
Wrapper class to FCNBase interface used internally by Minuit.
MnUserParameterState operator()(const FCNBase &, const MnUserParameterState &, unsigned int maxcalls=0) const
FCN + MnUserParameterState.
class which holds the external user and/or internal Minuit representation of the parameters and error...
unsigned int NFcn() const
unsigned int VariableParameters() const
const std::vector< double > & IntParameters() const
const MnUserTransformation & Trafo() const
class performing the numerical gradient calculation
int Invert(LASymMatrix &)
LAVector MnAlgebraicVector
LASymMatrix MnAlgebraicSymMatrix
tbb::task_arena is an alias of tbb::interface7::task_arena, which doesn't allow to forward declare tb...