26namespace TestStatistics {
29 std::shared_ptr<WrapperCalculationCleanFlags> calculation_is_clean,
41 const std::vector<ROOT::Fit::ParameterSettings> ¶meter_settings)
112 return job_completed;
122 zmq::message_t gradient_message(
grad_.begin(),
grad_.end());
132 std::move(minuit_internal_x_message), std::move(offsets_message));
136 std::move(gradient_message),
137 std::move(minuit_internal_x_message));
167 auto gradient_message_end =
169 std::copy(gradient_message_begin, gradient_message_end,
grad_.begin());
172 auto minuit_internal_x_message_begin = minuit_internal_x_message.data<
double>();
173 auto minuit_internal_x_message_end =
174 minuit_internal_x_message_begin + minuit_internal_x_message.size() /
sizeof(
double);
175 std::copy(minuit_internal_x_message_begin, minuit_internal_x_message_end,
minuit_internal_x_.begin());
184 auto offsets_message_end = offsets_message_begin + N_offsets;
209 if (
get_manager()->process_manager().is_master()) {
214 for (std::size_t ix = 0; ix <
N_tasks_; ++ix) {
230 if (
get_manager()->process_manager().is_master()) {
237 grad[ix] =
grad_[ix].derivative;
243 double *previous_gstep)
245 if (
get_manager()->process_manager().is_master()) {
246 for (std::size_t i_component = 0; i_component <
N_tasks_; ++i_component) {
247 grad_[i_component] = {previous_grad[i_component], previous_g2[i_component], previous_gstep[i_component]};
262 grad[ix] =
grad_[ix].derivative;
263 previous_g2[ix] =
grad_[ix].second_derivative;
264 previous_gstep[ix] =
grad_[ix].step_size;
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void char Point_t Rectangle_t WindowAttributes_t Float_t Float_t Float_t Int_t Int_t UInt_t UInt_t Rectangle_t Int_t Int_t Window_t TString Int_t GCValues_t GetPrimarySelectionOwner GetDisplay GetScreen GetColormap GetNativeEvent const char const char dpyName wid window const char font_name cursor keysym reg const char only_if_exist regb h Point_t winding char text const char depth char const char Int_t count const char ColorStruct_t color const char Pixmap_t Pixmap_t PictureAttributes_t attr const char char ret_data h unsigned char height h offset
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void char Point_t Rectangle_t WindowAttributes_t Float_t Float_t Float_t Int_t Int_t UInt_t UInt_t Rectangle_t result
const std::vector< ROOT::Fit::ParameterSettings > & ParamsSettings() const
get the vector of parameter settings (const method)
const FitConfig & Config() const
access to the fit configuration (const method)
Documentation for the abstract class IBaseFunctionMultiDim.
The Kahan summation is a compensated summation algorithm, which significantly reduces numerical error...
int Strategy() const
strategy
double ErrorDef() const
error definition
API class for defining four levels of strategies: low (0), medium (1), high (2), very high (>=3); act...
double GradientStepTolerance() const
double GradientTolerance() const
unsigned int GradientNCycles() const
void SetGradTolerance(double value)
void SetErrorLevel(double value)
void SetupDifferentiate(const ROOT::Math::IBaseFunctionMultiDim *function, const double *cx, const std::vector< ROOT::Fit::ParameterSettings > ¶meters)
This function sets internal state based on input parameters.
void SetNCycles(unsigned int value)
void SetInitialGradient(const ROOT::Math::IBaseFunctionMultiDim *function, const std::vector< ROOT::Fit::ParameterSettings > ¶meters, std::vector< DerivatorElement > &gradient)
This function was not implemented as in Minuit2.
DerivatorElement FastPartialDerivative(const ROOT::Math::IBaseFunctionMultiDim *function, const std::vector< ROOT::Fit::ParameterSettings > ¶meters, unsigned int i_component, const DerivatorElement &previous)
void SetStepTolerance(double value)
static bool getTimingAnalysis()
Messenger & messenger() const
JobManager * get_manager()
Get JobManager instance; create and activate if necessary.
void gather_worker_results()
Wait for all tasks to be retrieved for the current Job.
value_t receive_from_master_on_worker(bool *more=nullptr)
void send_from_worker_to_master(T &&item)
specialization that sends the final message
void publish_from_master_to_workers(T &&item)
specialization that sends the final message
static void start_timer(std::string section_name)
static void end_timer(std::string section_name)
virtual void add(JobTask job_task)=0
Enqueue a task.
ROOT::Minuit2::NumericalDerivator gradf_
bool usesMinuitInternalValues() override
Implement usesMinuitInternalValues to return true when you want Minuit to send this class Minuit-inte...
void update_state() override
Virtual function to update any necessary state on workers.
void update_workers_state_isCalculating()
std::vector< ROOT::Minuit2::DerivatorElement > grad_
void fillGradientWithPrevResult(double *grad, double *previous_grad, double *previous_g2, double *previous_gstep) override
void updateMinuitInternalParameterValues(const std::vector< double > &minuit_internal_x) override
Minuit passes in parameter values that may not conform to RooFit internal standards (like applying ra...
void synchronizeParameterSettings(ROOT::Math::IMultiGenFunction *function, const std::vector< ROOT::Fit::ParameterSettings > ¶meter_settings) override
void run_derivator(unsigned int i_component) const
Calculation stuff (mostly duplicates of RooGradMinimizerFcn code):
std::size_t N_tasks_at_workers_
SharedOffset::OffsetVec offsets_previous_
void setNCycles(unsigned int ncycles) const
void send_back_task_result_from_worker(std::size_t task) override
void synchronizeWithMinimizer(const ROOT::Math::MinimizerOptions &options) override
Synchronize minimizer settings with calculators in child classes.
void fillGradient(double *grad) override
LikelihoodGradientJob(std::shared_ptr< RooAbsL > likelihood, std::shared_ptr< WrapperCalculationCleanFlags > calculation_is_clean, std::size_t N_dim, RooMinimizer *minimizer, SharedOffset offset)
void setStepTolerance(double step_tolerance) const
void setGradTolerance(double grad_tolerance) const
void setStrategy(int istrat)
void update_workers_state()
void evaluate_task(std::size_t task) override
Job overrides:
bool receive_task_result_on_master(const zmq::message_t &message) override
std::vector< double > minuit_internal_x_
void setErrorLevel(double error_level) const
Virtual base class for implementation of likelihood gradient calculation strategies.
std::shared_ptr< WrapperCalculationCleanFlags > calculation_is_clean_
RooMinimizer * minimizer_
virtual void synchronizeParameterSettings(const std::vector< ROOT::Fit::ParameterSettings > ¶meter_settings)
SharedOffset shared_offset_
Wrapper class around ROOT::Fit:Fitter that provides a seamless interface between the minimizer functi...
double & fcnOffset() const
ROOT::Math::IMultiGenFunction * getMultiGenFcn() const
ROOT::Fit::Fitter * fitter()
Return underlying ROOT fitter object.
The namespace RooFit contains mostly switches that change the behaviour of functions of PDFs (or othe...
combined job_object, state and task identifier type