npstat is hosted by Hepforge, IPPP Durham
NPStat  5.10.0
MinuitPLogliOSDE1DFcn.hh
Go to the documentation of this file.
1 #ifndef NPSI_MINUITPLOGLIOSDE1DFCN_HH_
2 #define NPSI_MINUITPLOGLIOSDE1DFCN_HH_
3 
4 /*!
5 // \file MinuitPLogliOSDE1DFcn.hh
6 //
7 // \brief Minuit function to minimize for pseudo-log-likelihood OSDE
8 //
9 // Author: I. Volobouev
10 //
11 // March 2023
12 */
13 
14 #include <cassert>
15 
16 #include "Minuit2/FCNGradientBase.h"
18 
19 namespace npsi {
20  /**
21  // Target minimization function adapter class for running PLogli OSDE
22  // fits by Minuit2. Note that PLogliOSDE1D object is not copied,
23  // only a reference is held, with the corresponding consequences
24  // for the object lifetimes.
25  */
26  class MinuitPLogliOSDE1DFcn : public ROOT::Minuit2::FCNGradientBase
27  {
28  public:
29  inline MinuitPLogliOSDE1DFcn(
30  const npstat::PLogliOSDE1D& i_osde,
31  const unsigned nCoeffsExpected,
32  const double up=0.05)
33  : osde_(i_osde),
34  lastGradient_(nCoeffsExpected),
35  lastResult_(0.0),
36  up_(up),
37  nCoeffs_(nCoeffsExpected),
38  first_(true)
39  {
40  assert(nCoeffs_);
41  }
42 
43  inline virtual ~MinuitPLogliOSDE1DFcn() {}
44 
45  inline const npstat::PLogliOSDE1D& getOsde() const {return osde_;}
46 
47  // Make Minuit look for a better minimum by reducing
48  // the recommended "Up" value by an order of magnitude
49  inline double Up() const {return up_;}
50 
51  // Do not waste time checking gradient calculations. They are correct.
52  inline bool CheckGradient() const {return false;}
53 
54  inline virtual double operator()(const std::vector<double>& x) const
55  {
56  calculateMemoized(x);
57  return lastResult_;
58  }
59 
60  inline std::vector<double> Gradient(const std::vector<double>& x) const
61  {
62  // Returning std::vector<double> on the stack is
63  // a pretty inefficient thing to do. However,
64  // this is how Minuit2 interface is designed.
65  calculateMemoized(x);
66  return lastGradient_;
67  }
68 
69  private:
71 
72  // Minuit2 has separate interfaces for calculating the function
73  // value and the gradient. I have yet to see a non-trivial realistic
74  // problem in which the function value can not be easily obtained
75  // along with the gradient. It would be much better to have them
76  // calculated together. We will alleviate this design problem
77  // by memoizing the argument / result pairs so that we do not
78  // have to repeat the calculation when the gradient is wanted
79  // at the same point where the function was just evaluated.
80  inline void calculateMemoized(const std::vector<double>& x) const
81  {
82  if (x != lastX_ || first_)
83  {
84  assert(x.size() == nCoeffs_);
85  lastResult_ = -osde_.pseudoLogli(
86  &x[0], nCoeffs_, &lastGradient_[0]);
87  lastX_ = x;
88  for (unsigned i=0; i<nCoeffs_; ++i)
89  lastGradient_[i] *= -1.0;
90  first_ = false;
91  }
92  }
93 
94  const npstat::PLogliOSDE1D& osde_;
95  mutable std::vector<double> lastX_;
96  mutable std::vector<double> lastGradient_;
97  mutable double lastResult_;
98  double up_;
99  unsigned nCoeffs_;
100  mutable bool first_;
101  };
102 }
103 
104 #endif // MINUITPLOGLIOSDE1DFCN_HH_
OSDE based on pseudo-log-likelihood.
Definition: MinuitPLogliOSDE1DFcn.hh:27
Definition: PLogliOSDE1D.hh:24
Definition: fitCompositeJohnson.hh:16