Logo ROOT  
Reference Guide
 
Loading...
Searching...
No Matches
LikelihoodGradientJob.cxx
Go to the documentation of this file.
1/*
2 * Project: RooFit
3 * Authors:
4 * PB, Patrick Bos, Netherlands eScience Center, p.bos@esciencecenter.nl
5 *
6 * Copyright (c) 2021, CERN
7 *
8 * Redistribution and use in source and binary forms,
9 * with or without modification, are permitted according to the terms
10 * listed in LICENSE (http://roofit.sourceforge.net/license.txt)
11 */
12
14
18#include "RooMsgService.h"
19#include "RooMinimizer.h"
20
21#include "Minuit2/MnStrategy.h"
22
23namespace RooFit {
24namespace TestStatistics {
25
26LikelihoodGradientJob::LikelihoodGradientJob(std::shared_ptr<RooAbsL> likelihood,
27 std::shared_ptr<WrapperCalculationCleanFlags> calculation_is_clean,
28 std::size_t N_dim, RooMinimizer *minimizer)
29 : LikelihoodGradientWrapper(std::move(likelihood), std::move(calculation_is_clean), N_dim, minimizer), grad_(N_dim)
30{
31 // Note to future maintainers: take care when storing the minimizer_fcn pointer. The
32 // RooAbsMinimizerFcn subclasses may get cloned inside MINUIT, which means the pointer
33 // should also somehow be updated in this class.
34 N_tasks_ = N_dim;
35 minuit_internal_x_.reserve(N_dim);
36}
37
39 : MultiProcess::Job(other), LikelihoodGradientWrapper(other), grad_(other.grad_), gradf_(other.gradf_),
40 N_tasks_(other.N_tasks_), minuit_internal_x_(other.minuit_internal_x_)
41{
42}
43
45{
46 return new LikelihoodGradientJob(*this);
47}
48
50 const std::vector<ROOT::Fit::ParameterSettings> &parameter_settings)
51{
53}
54
56 ROOT::Math::IMultiGenFunction *function, const std::vector<ROOT::Fit::ParameterSettings> &parameter_settings)
57{
58 gradf_.SetInitialGradient(function, parameter_settings, grad_);
59}
60
62{
63 setStrategy(options.Strategy());
64 setErrorLevel(options.ErrorDef());
65}
66
68{
69 assert(istrat >= 0);
70 ROOT::Minuit2::MnStrategy strategy(static_cast<unsigned int>(istrat));
71
74 setNCycles(strategy.GradientNCycles());
75}
76
77void LikelihoodGradientJob::setStepTolerance(double step_tolerance) const
78{
79 gradf_.SetStepTolerance(step_tolerance);
80}
81
82void LikelihoodGradientJob::setGradTolerance(double grad_tolerance) const
83{
84 gradf_.SetGradTolerance(grad_tolerance);
85}
86
87void LikelihoodGradientJob::setNCycles(unsigned int ncycles) const
88{
89 gradf_.SetNCycles(ncycles);
90}
91
92void LikelihoodGradientJob::setErrorLevel(double error_level) const
93{
94 gradf_.SetErrorLevel(error_level);
95}
96
97///////////////////////////////////////////////////////////////////////////////
98/// Job overrides:
99
101{
102 run_derivator(task);
103}
104
105// SYNCHRONIZATION FROM WORKERS TO MASTER
106
108{
109 task_result_t task_result{id_, task, grad_[task]};
110 zmq::message_t message(sizeof(task_result_t));
111 memcpy(message.data(), &task_result, sizeof(task_result_t));
112 get_manager()->messenger().send_from_worker_to_master(std::move(message));
113}
114
116{
117 auto result = message.data<task_result_t>();
118 grad_[result->task_id] = result->grad;
120 bool job_completed = (N_tasks_at_workers_ == 0);
121 return job_completed;
122}
123
124// END SYNCHRONIZATION FROM WORKERS TO MASTER
125
126// SYNCHRONIZATION FROM MASTER TO WORKERS (STATE)
127
129{
130 // TODO optimization: only send changed parameters (now sending all)
131 zmq::message_t gradient_message(grad_.begin(), grad_.end());
132 zmq::message_t minuit_internal_x_message(minuit_internal_x_.begin(), minuit_internal_x_.end());
133 ++state_id_;
134 get_manager()->messenger().publish_from_master_to_workers(id_, state_id_, std::move(gradient_message),
135 std::move(minuit_internal_x_message));
136}
137
139{
140 bool more;
141
143
144 auto gradient_message = get_manager()->messenger().receive_from_master_on_worker<zmq::message_t>(&more);
145 assert(more);
146 auto gradient_message_begin = gradient_message.data<ROOT::Minuit2::DerivatorElement>();
147 auto gradient_message_end =
148 gradient_message_begin + gradient_message.size() / sizeof(ROOT::Minuit2::DerivatorElement);
149 std::copy(gradient_message_begin, gradient_message_end, grad_.begin());
150
151 auto minuit_internal_x_message = get_manager()->messenger().receive_from_master_on_worker<zmq::message_t>(&more);
152 assert(!more);
153 auto minuit_internal_x_message_begin = minuit_internal_x_message.data<double>();
154 auto minuit_internal_x_message_end =
155 minuit_internal_x_message_begin + minuit_internal_x_message.size() / sizeof(double);
156 std::copy(minuit_internal_x_message_begin, minuit_internal_x_message_end, minuit_internal_x_.begin());
157
160}
161
162// END SYNCHRONIZATION FROM MASTER TO WORKERS (STATE)
163
164///////////////////////////////////////////////////////////////////////////////
165/// Calculation stuff (mostly duplicates of RooGradMinimizerFcn code):
166
167void LikelihoodGradientJob::run_derivator(unsigned int i_component) const
168{
169 // Calculate the derivative etc for these parameters
170 grad_[i_component] = gradf_.FastPartialDerivative(
171 minimizer_->getMultiGenFcn(), minimizer_->fitter()->Config().ParamsSettings(), i_component, grad_[i_component]);
172}
173
175{
176 if (get_manager()->process_manager().is_master()) {
178
179 // master fills queue with tasks
180 for (std::size_t ix = 0; ix < N_tasks_; ++ix) {
181 MultiProcess::JobTask job_task{id_, state_id_, ix};
182 get_manager()->queue().add(job_task);
183 }
185 // wait for task results back from workers to master (put into _grad)
187
188 calculation_is_clean_->gradient = true;
189 }
190}
191
193{
194 if (get_manager()->process_manager().is_master()) {
195 if (!calculation_is_clean_->gradient) {
197 }
198
199 // put the results from _grad into *grad
200 for (Int_t ix = 0; ix < minimizer_->getNPar(); ++ix) {
201 grad[ix] = grad_[ix].derivative;
202 }
203 }
204}
205
206void LikelihoodGradientJob::fillGradientWithPrevResult(double *grad, double *previous_grad, double *previous_g2,
207 double *previous_gstep)
208{
209 if (get_manager()->process_manager().is_master()) {
210 for (std::size_t i_component = 0; i_component < N_tasks_; ++i_component) {
211 grad_[i_component] = {previous_grad[i_component], previous_g2[i_component], previous_gstep[i_component]};
212 }
213
214 if (!calculation_is_clean_->gradient) {
216 }
217
218 // put the results from _grad into *grad
219 for (Int_t ix = 0; ix < minimizer_->getNPar(); ++ix) {
220 grad[ix] = grad_[ix].derivative;
221 previous_g2[ix] = grad_[ix].second_derivative;
222 previous_gstep[ix] = grad_[ix].step_size;
223 }
224 }
225}
226
227void LikelihoodGradientJob::updateMinuitInternalParameterValues(const std::vector<double> &minuit_internal_x)
228{
229 minuit_internal_x_ = minuit_internal_x;
230}
231
233{
234 return true;
235}
236
237} // namespace TestStatistics
238} // namespace RooFit
double
const std::vector< ROOT::Fit::ParameterSettings > & ParamsSettings() const
get the vector of parameter settings (const method)
Definition FitConfig.h:86
const FitConfig & Config() const
access to the fit configuration (const method)
Definition Fitter.h:412
Documentation for the abstract class IBaseFunctionMultiDim.
Definition IFunction.h:62
double ErrorDef() const
error definition
API class for defining three levels of strategies: low (0), medium (1), high (>=2); acts on: Migrad (...
Definition MnStrategy.h:27
double GradientStepTolerance() const
Definition MnStrategy.h:41
double GradientTolerance() const
Definition MnStrategy.h:42
unsigned int GradientNCycles() const
Definition MnStrategy.h:40
void SetupDifferentiate(const ROOT::Math::IBaseFunctionMultiDim *function, const double *cx, const std::vector< ROOT::Fit::ParameterSettings > &parameters)
This function sets internal state based on input parameters.
void SetNCycles(unsigned int value)
void SetInitialGradient(const ROOT::Math::IBaseFunctionMultiDim *function, const std::vector< ROOT::Fit::ParameterSettings > &parameters, std::vector< DerivatorElement > &gradient)
This function was not implemented as in Minuit2.
DerivatorElement FastPartialDerivative(const ROOT::Math::IBaseFunctionMultiDim *function, const std::vector< ROOT::Fit::ParameterSettings > &parameters, unsigned int i_component, const DerivatorElement &previous)
std::size_t id_
Definition Job.h:45
std::size_t state_id_
Definition Job.h:46
JobManager * get_manager()
Get JobManager instance; create and activate if necessary.
Definition Job.cxx:116
void gather_worker_results()
Wait for all tasks to be retrieved for the current Job.
Definition Job.cxx:130
value_t receive_from_master_on_worker(bool *more=nullptr)
Definition Messenger.h:176
void publish_from_master_to_workers(T &&item)
specialization that sends the final message
Definition Messenger.h:150
void add(JobTask job_task)
Enqueue a task.
Definition Queue.cxx:61
bool usesMinuitInternalValues() override
Implement usesMinuitInternalValues to return true when you want Minuit to send this class Minuit-inte...
void update_state() override
Virtual function to update any necessary state on workers.
std::vector< ROOT::Minuit2::DerivatorElement > grad_
void fillGradientWithPrevResult(double *grad, double *previous_grad, double *previous_g2, double *previous_gstep) override
void updateMinuitInternalParameterValues(const std::vector< double > &minuit_internal_x) override
Minuit passes in parameter values that may not conform to RooFit internal standards (like applying ra...
void synchronizeParameterSettings(ROOT::Math::IMultiGenFunction *function, const std::vector< ROOT::Fit::ParameterSettings > &parameter_settings) override
void run_derivator(unsigned int i_component) const
Calculation stuff (mostly duplicates of RooGradMinimizerFcn code):
void send_back_task_result_from_worker(std::size_t task) override
void synchronizeWithMinimizer(const ROOT::Math::MinimizerOptions &options) override
Synchronize minimizer settings with calculators in child classes.
LikelihoodGradientJob(std::shared_ptr< RooAbsL > likelihood, std::shared_ptr< WrapperCalculationCleanFlags > calculation_is_clean, std::size_t N_dim, RooMinimizer *minimizer)
void setStepTolerance(double step_tolerance) const
void setGradTolerance(double grad_tolerance) const
void evaluate_task(std::size_t task) override
Job overrides:
bool receive_task_result_on_master(const zmq::message_t &message) override
LikelihoodGradientJob * clone() const override
Virtual base class for implementation of likelihood gradient calculation strategies.
std::shared_ptr< WrapperCalculationCleanFlags > calculation_is_clean_
virtual void synchronizeParameterSettings(const std::vector< ROOT::Fit::ParameterSettings > &parameter_settings)
RooMinimizer is a wrapper class around ROOT::Fit:Fitter that provides a seamless interface between th...
ROOT::Math::IMultiGenFunction * getMultiGenFcn() const
ROOT::Fit::Fitter * fitter()
Return underlying ROOT fitter object.
Int_t getNPar() const
std::size_t State
Definition types.h:23
The namespace RooFit contains mostly switches that change the behaviour of functions of PDFs (or othe...
Definition Common.h:18
combined job_object, state and task identifier type
Definition types.h:25