Logo ROOT  
Reference Guide
 
Loading...
Searching...
No Matches
LikelihoodGradientJob.cxx
Go to the documentation of this file.
1/*
2 * Project: RooFit
3 * Authors:
4 * PB, Patrick Bos, Netherlands eScience Center, p.bos@esciencecenter.nl
5 *
6 * Copyright (c) 2021, CERN
7 *
8 * Redistribution and use in source and binary forms,
9 * with or without modification, are permitted according to the terms
10 * listed in LICENSE (http://roofit.sourceforge.net/license.txt)
11 */
12
14
20#include "RooMsgService.h"
21#include "RooMinimizer.h"
22
23#include "Minuit2/MnStrategy.h"
24
25namespace RooFit {
26namespace TestStatistics {
27
28LikelihoodGradientJob::LikelihoodGradientJob(std::shared_ptr<RooAbsL> likelihood,
29 std::shared_ptr<WrapperCalculationCleanFlags> calculation_is_clean,
30 std::size_t N_dim, RooMinimizer *minimizer, SharedOffset offset)
31 : LikelihoodGradientWrapper(std::move(likelihood), std::move(calculation_is_clean), N_dim, minimizer,
32 std::move(offset)),
33 grad_(N_dim),
34 N_tasks_(N_dim)
35{
36 minuit_internal_x_.reserve(N_dim);
38}
39
41 const std::vector<ROOT::Fit::ParameterSettings> &parameter_settings)
42{
44}
45
47 ROOT::Math::IMultiGenFunction *function, const std::vector<ROOT::Fit::ParameterSettings> &parameter_settings)
48{
49 gradf_.SetInitialGradient(function, parameter_settings, grad_);
50}
51
53{
54 setStrategy(options.Strategy());
55 setErrorLevel(options.ErrorDef());
56}
57
59{
60 assert(istrat >= 0);
61 ROOT::Minuit2::MnStrategy strategy(static_cast<unsigned int>(istrat));
62
65 setNCycles(strategy.GradientNCycles());
66}
67
68void LikelihoodGradientJob::setStepTolerance(double step_tolerance) const
69{
70 gradf_.SetStepTolerance(step_tolerance);
71}
72
73void LikelihoodGradientJob::setGradTolerance(double grad_tolerance) const
74{
75 gradf_.SetGradTolerance(grad_tolerance);
76}
77
78void LikelihoodGradientJob::setNCycles(unsigned int ncycles) const
79{
80 gradf_.SetNCycles(ncycles);
81}
82
83void LikelihoodGradientJob::setErrorLevel(double error_level) const
84{
85 gradf_.SetErrorLevel(error_level);
86}
87
88///////////////////////////////////////////////////////////////////////////////
89/// Job overrides:
90
92{
93 run_derivator(task);
94}
95
96// SYNCHRONIZATION FROM WORKERS TO MASTER
97
99{
100 task_result_t task_result{id_, task, grad_[task]};
101 zmq::message_t message(sizeof(task_result_t));
102 memcpy(message.data(), &task_result, sizeof(task_result_t));
103 get_manager()->messenger().send_from_worker_to_master(std::move(message));
104}
105
107{
108 auto result = message.data<task_result_t>();
109 grad_[result->task_id] = result->grad;
111 bool job_completed = (N_tasks_at_workers_ == 0);
112 return job_completed;
113}
114
115// END SYNCHRONIZATION FROM WORKERS TO MASTER
116
117// SYNCHRONIZATION FROM MASTER TO WORKERS (STATE)
118
120{
121 // TODO optimization: only send changed parameters (now sending all)
122 zmq::message_t gradient_message(grad_.begin(), grad_.end());
123 zmq::message_t minuit_internal_x_message(minuit_internal_x_.begin(), minuit_internal_x_.end());
124 double maxFCN = minimizer_->maxFCN();
125 double fcnOffset = minimizer_->fcnOffset();
126 ++state_id_;
127
129 zmq::message_t offsets_message(shared_offset_.offsets().begin(), shared_offset_.offsets().end());
131 id_, state_id_, isCalculating_, maxFCN, fcnOffset, std::move(gradient_message),
132 std::move(minuit_internal_x_message), std::move(offsets_message));
134 } else {
136 std::move(gradient_message),
137 std::move(minuit_internal_x_message));
138 }
139}
140
142{
143 ++state_id_;
145}
146
148{
149 bool more;
150
152 assert(more);
154
155 if (more) {
156 auto maxFCN = get_manager()->messenger().receive_from_master_on_worker<double>(&more);
157 minimizer_->maxFCN() = maxFCN;
158 assert(more);
159
160 auto fcnOffset = get_manager()->messenger().receive_from_master_on_worker<double>(&more);
161 minimizer_->fcnOffset() = fcnOffset;
162 assert(more);
163
164 auto gradient_message = get_manager()->messenger().receive_from_master_on_worker<zmq::message_t>(&more);
165 assert(more);
166 auto gradient_message_begin = gradient_message.data<ROOT::Minuit2::DerivatorElement>();
167 auto gradient_message_end =
168 gradient_message_begin + gradient_message.size() / sizeof(ROOT::Minuit2::DerivatorElement);
169 std::copy(gradient_message_begin, gradient_message_end, grad_.begin());
170
171 auto minuit_internal_x_message = get_manager()->messenger().receive_from_master_on_worker<zmq::message_t>(&more);
172 auto minuit_internal_x_message_begin = minuit_internal_x_message.data<double>();
173 auto minuit_internal_x_message_end =
174 minuit_internal_x_message_begin + minuit_internal_x_message.size() / sizeof(double);
175 std::copy(minuit_internal_x_message_begin, minuit_internal_x_message_end, minuit_internal_x_.begin());
176
177 if (more) {
178 // offsets also incoming
179 auto offsets_message = get_manager()->messenger().receive_from_master_on_worker<zmq::message_t>(&more);
180 assert(!more);
181 auto offsets_message_begin = offsets_message.data<ROOT::Math::KahanSum<double>>();
182 std::size_t N_offsets = offsets_message.size() / sizeof(ROOT::Math::KahanSum<double>);
183 shared_offset_.offsets().reserve(N_offsets);
184 auto offsets_message_end = offsets_message_begin + N_offsets;
185 std::copy(offsets_message_begin, offsets_message_end, shared_offset_.offsets().begin());
186 }
187
188 // note: the next call must stay after the (possible) update of the offset, because it
189 // calls the likelihood function, so the offset must be correct at this point
191 minimizer_->fitter()->Config().ParamsSettings());
192 }
193}
194
195// END SYNCHRONIZATION FROM MASTER TO WORKERS (STATE)
196
197///////////////////////////////////////////////////////////////////////////////
198/// Calculation stuff (mostly duplicates of RooGradMinimizerFcn code):
199
200void LikelihoodGradientJob::run_derivator(unsigned int i_component) const
201{
202 // Calculate the derivative etc for these parameters
203 grad_[i_component] = gradf_.FastPartialDerivative(
204 minimizer_->getMultiGenFcn(), minimizer_->fitter()->Config().ParamsSettings(), i_component, grad_[i_component]);
205}
206
208{
209 if (get_manager()->process_manager().is_master()) {
210 isCalculating_ = true;
212
213 // master fills queue with tasks
214 for (std::size_t ix = 0; ix < N_tasks_; ++ix) {
215 MultiProcess::JobTask job_task{id_, state_id_, ix};
216 get_manager()->queue()->add(job_task);
217 }
219 // wait for task results back from workers to master (put into _grad)
221
222 calculation_is_clean_->gradient = true;
223 isCalculating_ = false;
225 }
226}
227
229{
230 if (get_manager()->process_manager().is_master()) {
231 if (!calculation_is_clean_->gradient) {
233 }
234
235 // put the results from _grad into *grad
236 for (Int_t ix = 0; ix < minimizer_->getNPar(); ++ix) {
237 grad[ix] = grad_[ix].derivative;
238 }
239 }
240}
241
242void LikelihoodGradientJob::fillGradientWithPrevResult(double *grad, double *previous_grad, double *previous_g2,
243 double *previous_gstep)
244{
245 if (get_manager()->process_manager().is_master()) {
246 for (std::size_t i_component = 0; i_component < N_tasks_; ++i_component) {
247 grad_[i_component] = {previous_grad[i_component], previous_g2[i_component], previous_gstep[i_component]};
248 }
249
250 if (!calculation_is_clean_->gradient) {
253 }
257 }
258 }
259
260 // put the results from _grad into *grad
261 for (Int_t ix = 0; ix < minimizer_->getNPar(); ++ix) {
262 grad[ix] = grad_[ix].derivative;
263 previous_g2[ix] = grad_[ix].second_derivative;
264 previous_gstep[ix] = grad_[ix].step_size;
265 }
266 }
267}
268
269void LikelihoodGradientJob::updateMinuitInternalParameterValues(const std::vector<double> &minuit_internal_x)
270{
271 minuit_internal_x_ = minuit_internal_x;
272}
273
275{
276 return true;
277}
278
279} // namespace TestStatistics
280} // namespace RooFit
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void char Point_t Rectangle_t WindowAttributes_t Float_t Float_t Float_t Int_t Int_t UInt_t UInt_t Rectangle_t Int_t Int_t Window_t TString Int_t GCValues_t GetPrimarySelectionOwner GetDisplay GetScreen GetColormap GetNativeEvent const char const char dpyName wid window const char font_name cursor keysym reg const char only_if_exist regb h Point_t winding char text const char depth char const char Int_t count const char ColorStruct_t color const char Pixmap_t Pixmap_t PictureAttributes_t attr const char char ret_data h unsigned char height h offset
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void char Point_t Rectangle_t WindowAttributes_t Float_t Float_t Float_t Int_t Int_t UInt_t UInt_t Rectangle_t result
Documentation for the abstract class IBaseFunctionMultiDim.
Definition IFunction.h:61
The Kahan summation is a compensated summation algorithm, which significantly reduces numerical error...
Definition Util.h:122
double ErrorDef() const
error definition
API class for defining four levels of strategies: low (0), medium (1), high (2), very high (>=3); act...
Definition MnStrategy.h:27
double GradientStepTolerance() const
Definition MnStrategy.h:41
double GradientTolerance() const
Definition MnStrategy.h:42
unsigned int GradientNCycles() const
Definition MnStrategy.h:40
void SetInitialGradient(const ROOT::Math::IBaseFunctionMultiDim *function, std::span< const ROOT::Fit::ParameterSettings > parameters, std::vector< DerivatorElement > &gradient)
This function was not implemented as in Minuit2.
void SetNCycles(unsigned int value)
DerivatorElement FastPartialDerivative(const ROOT::Math::IBaseFunctionMultiDim *function, std::span< const ROOT::Fit::ParameterSettings > parameters, unsigned int i_component, const DerivatorElement &previous)
void SetupDifferentiate(const ROOT::Math::IBaseFunctionMultiDim *function, const double *cx, std::span< const ROOT::Fit::ParameterSettings > parameters)
This function sets internal state based on input parameters.
static bool getTimingAnalysis()
Definition Config.cxx:87
std::size_t id_
Definition Job.h:45
std::size_t state_id_
Definition Job.h:46
JobManager * get_manager()
Get JobManager instance; create and activate if necessary.
Definition Job.cxx:112
void gather_worker_results()
Wait for all tasks to be retrieved for the current Job.
Definition Job.cxx:126
value_t receive_from_master_on_worker(bool *more=nullptr)
Definition Messenger.h:176
void send_from_worker_to_master(T &&item)
specialization that sends the final message
Definition Messenger.h:192
void publish_from_master_to_workers(T &&item)
specialization that sends the final message
Definition Messenger.h:150
static void start_timer(std::string section_name)
static void end_timer(std::string section_name)
virtual void add(JobTask job_task)=0
Enqueue a task.
bool usesMinuitInternalValues() override
Implement usesMinuitInternalValues to return true when you want Minuit to send this class Minuit-inte...
void update_state() override
Virtual function to update any necessary state on workers.
std::vector< ROOT::Minuit2::DerivatorElement > grad_
void fillGradientWithPrevResult(double *grad, double *previous_grad, double *previous_g2, double *previous_gstep) override
void updateMinuitInternalParameterValues(const std::vector< double > &minuit_internal_x) override
Minuit passes in parameter values that may not conform to RooFit internal standards (like applying ra...
void synchronizeParameterSettings(ROOT::Math::IMultiGenFunction *function, const std::vector< ROOT::Fit::ParameterSettings > &parameter_settings) override
void run_derivator(unsigned int i_component) const
Calculation stuff (mostly duplicates of RooGradMinimizerFcn code):
void send_back_task_result_from_worker(std::size_t task) override
void synchronizeWithMinimizer(const ROOT::Math::MinimizerOptions &options) override
Synchronize minimizer settings with calculators in child classes.
LikelihoodGradientJob(std::shared_ptr< RooAbsL > likelihood, std::shared_ptr< WrapperCalculationCleanFlags > calculation_is_clean, std::size_t N_dim, RooMinimizer *minimizer, SharedOffset offset)
void setStepTolerance(double step_tolerance) const
void setGradTolerance(double grad_tolerance) const
void evaluate_task(std::size_t task) override
Job overrides:
bool receive_task_result_on_master(const zmq::message_t &message) override
Virtual base class for implementation of likelihood gradient calculation strategies.
std::shared_ptr< WrapperCalculationCleanFlags > calculation_is_clean_
virtual void synchronizeParameterSettings(const std::vector< ROOT::Fit::ParameterSettings > &parameter_settings)
Wrapper class around ROOT::Math::Minimizer that provides a seamless interface between the minimizer f...
auto fitter()
Return underlying ROOT fitter object.
double & fcnOffset() const
double & maxFCN()
ROOT::Math::IMultiGenFunction * getMultiGenFcn() const
int getNPar() const
OffsetVec & offsets()
std::size_t State
Definition types.h:23
The namespace RooFit contains mostly switches that change the behaviour of functions of PDFs (or othe...
Definition JSONIO.h:26
combined job_object, state and task identifier type
Definition types.h:25