LSSTApplications  16.0-10-g0ee56ad+5,16.0-11-ga33d1f2+5,16.0-12-g3ef5c14+3,16.0-12-g71e5ef5+18,16.0-12-gbdf3636+3,16.0-13-g118c103+3,16.0-13-g8f68b0a+3,16.0-15-gbf5c1cb+4,16.0-16-gfd17674+3,16.0-17-g7c01f5c+3,16.0-18-g0a50484+1,16.0-20-ga20f992+8,16.0-21-g0e05fd4+6,16.0-21-g15e2d33+4,16.0-22-g62d8060+4,16.0-22-g847a80f+4,16.0-25-gf00d9b8+1,16.0-28-g3990c221+4,16.0-3-gf928089+3,16.0-32-g88a4f23+5,16.0-34-gd7987ad+3,16.0-37-gc7333cb+2,16.0-4-g10fc685+2,16.0-4-g18f3627+26,16.0-4-g5f3a788+26,16.0-5-gaf5c3d7+4,16.0-5-gcc1f4bb+1,16.0-6-g3b92700+4,16.0-6-g4412fcd+3,16.0-6-g7235603+4,16.0-69-g2562ce1b+2,16.0-8-g14ebd58+4,16.0-8-g2df868b+1,16.0-8-g4cec79c+6,16.0-8-gadf6c7a+1,16.0-8-gfc7ad86,16.0-82-g59ec2a54a+1,16.0-9-g5400cdc+2,16.0-9-ge6233d7+5,master-g2880f2d8cf+3,v17.0.rc1
LSSTDataManagementBasePackage
optimizer.h
Go to the documentation of this file.
1 // -*- lsst-c++ -*-
2 /*
3  * LSST Data Management System
4  * Copyright 2008-2013 LSST Corporation.
5  *
6  * This product includes software developed by the
7  * LSST Project (http://www.lsst.org/).
8  *
9  * This program is free software: you can redistribute it and/or modify
10  * it under the terms of the GNU General Public License as published by
11  * the Free Software Foundation, either version 3 of the License, or
12  * (at your option) any later version.
13  *
14  * This program is distributed in the hope that it will be useful,
15  * but WITHOUT ANY WARRANTY; without even the implied warranty of
16  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17  * GNU General Public License for more details.
18  *
19  * You should have received a copy of the LSST License Statement and
20  * the GNU General Public License along with this program. If not,
21  * see <http://www.lsstcorp.org/LegalNotices/>.
22  */
23 
24 #ifndef LSST_MEAS_MODELFIT_optimizer_h_INCLUDED
25 #define LSST_MEAS_MODELFIT_optimizer_h_INCLUDED
26 
27 #include "ndarray.h"
28 
29 #include "lsst/base.h"
30 #include "lsst/pex/config.h"
31 #include "lsst/afw/table/Schema.h"
34 
35 namespace lsst { namespace meas { namespace modelfit {
36 
37 class Likelihood;
38 class Prior;
39 class Optimizer;
40 
45 public:
46 
47  int const dataSize;
48  int const parameterSize;
49 
61  PTR(Prior) prior = PTR(Prior)()
62  );
63 
67  OptimizerObjective(int dataSize_, int parameterSize_) :
68  dataSize(dataSize_), parameterSize(parameterSize_)
69  {}
70 
91  ndarray::Array<Scalar const,2,1> const & parameters,
92  ndarray::Array<Scalar,1,1> const & output
93  ) const;
94 
103  virtual void computeResiduals(
104  ndarray::Array<Scalar const,1,1> const & parameters,
105  ndarray::Array<Scalar,1,1> const & residuals
106  ) const = 0;
107 
122  ndarray::Array<Scalar const,1,1> const & parameters,
123  ndarray::Array<Scalar,2,-2> const & derivatives
124  ) const {
125  return false;
126  }
127 
128 
134  virtual bool hasPrior() const { return false; }
135 
142  virtual Scalar computePrior(ndarray::Array<Scalar const,1,1> const & parameters) const { return 1.0; }
143 
158  virtual void differentiatePrior(
159  ndarray::Array<Scalar const,1,1> const & parameters,
160  ndarray::Array<Scalar,1,1> const & gradient,
161  ndarray::Array<Scalar,2,1> const & hessian
162  ) const {
163  gradient.deep() = 0.0;
164  hessian.deep() = 0.0;
165  }
166 
167  virtual ~OptimizerObjective() {}
168 };
169 
188 public:
190  noSR1Term, bool,
191  "If true, ignore the SR1 update term in the Hessian, resulting in a Levenberg-Marquardt-like method"
192  );
193 
195  skipSR1UpdateThreshold, double,
196  "Skip the SR1 update if |v||s| / (|v||s|) is less than this threshold"
197  );
198 
200  minTrustRadiusThreshold, double,
201  "If the trust radius falls below this threshold, consider the algorithm converged"
202  );
203 
205  gradientThreshold, double,
206  "If the maximum of the gradient falls below this threshold, consider the algorithm converged"
207  );
208 
210  numDiffRelStep, double,
211  "relative step size used for numerical derivatives (added to other steps)"
212  );
213 
215  numDiffAbsStep, double,
216  "absolute step size used for numerical derivatives (added to other steps)"
217  );
218 
220  numDiffTrustRadiusStep, double,
221  "step size (in units of trust radius) used for numerical derivatives (added to relative step)"
222  );
223 
225  stepAcceptThreshold, double,
226  "steps with reduction ratio greater than this are accepted"
227  );
228 
230  trustRegionInitialSize, double,
231  "the initial trust region will be set to this value"
232  );
233 
235  trustRegionGrowReductionRatio, double,
236  "steps with reduction radio greater than this may increase the trust radius"
237  );
238 
240  trustRegionGrowStepFraction, double,
241  "steps with length this fraction of the trust radius may increase the trust radius"
242  );
243 
245  trustRegionGrowFactor, double,
246  "when increase the trust region size, multiply the radius by this factor"
247  );
248 
250  trustRegionShrinkReductionRatio, double,
251  "steps with reduction radio less than this will decrease the trust radius"
252  );
253 
255  trustRegionShrinkFactor, double,
256  "when reducing the trust region size, multiply the radius by this factor"
257  );
258 
260  trustRegionSolverTolerance, double,
261  "value passed as the tolerance to solveTrustRegion"
262  );
263 
265  maxInnerIterations, int,
266  "maximum number of iterations (i.e. function evaluations and trust region subproblems) per step"
267  );
268 
270  maxOuterIterations, int,
271  "maximum number of steps"
272  );
273 
275  doSaveIterations, bool,
276  "whether to save all iterations for debugging purposes"
277  );
278 
280  noSR1Term(false), skipSR1UpdateThreshold(1E-8),
281  minTrustRadiusThreshold(1E-5),
282  gradientThreshold(1E-5),
283  numDiffRelStep(0.0), numDiffAbsStep(0.0), numDiffTrustRadiusStep(0.1),
284  stepAcceptThreshold(0.0),
285  trustRegionInitialSize(1.0),
286  trustRegionGrowReductionRatio(0.75),
287  trustRegionGrowStepFraction(0.8),
288  trustRegionGrowFactor(2.0),
289  trustRegionShrinkReductionRatio(0.25),
290  trustRegionShrinkFactor(1.0/3.0),
291  trustRegionSolverTolerance(1E-8),
292  maxInnerIterations(20),
293  maxOuterIterations(500),
294  doSaveIterations(false)
295  {}
296 };
297 
299 public:
300 
303  PTR(Model) model,
304  bool doRecordDerivatives
305  );
306 
307  explicit OptimizerHistoryRecorder(afw::table::Schema const & schema);
308 
309  void apply(
310  int outerIterCount,
311  int innerIterCount,
312  afw::table::BaseCatalog & history,
313  Optimizer const & optimizer
314  ) const;
315 
316  void unpackDerivatives(
317  ndarray::Array<Scalar const,1,1> const & nested,
318  Vector & gradient,
319  Matrix & hessian
320  ) const;
321 
322  void unpackDerivatives(
323  afw::table::BaseRecord const & record,
324  Vector & gradient,
325  Matrix & hessian
326  ) const;
327 
328  void unpackDerivatives(
329  ndarray::Array<Scalar const,1,1> const & nested,
330  ndarray::Array<Scalar,1,1> const & gradient,
331  ndarray::Array<Scalar,2,2> const & hessian
332  ) const;
333 
334  void unpackDerivatives(
335  afw::table::BaseRecord const & record,
336  ndarray::Array<Scalar,1,1> const & gradient,
337  ndarray::Array<Scalar,2,2> const & hessian
338  ) const;
339 
340  void fillObjectiveModelGrid(
341  afw::table::BaseRecord const & record,
342  ndarray::Array<Scalar const,2,1> const & parameters,
343  ndarray::Array<Scalar,1,1> const & output
344  ) const;
345 
354 };
355 
399 class Optimizer {
400 public:
401 
405 
406  enum StateFlags {
407  CONVERGED_GRADZERO = 0x0001,
408  CONVERGED_TR_SMALL = 0x0002,
409  CONVERGED = CONVERGED_GRADZERO | CONVERGED_TR_SMALL,
410  FAILED_MAX_INNER_ITERATIONS = 0x0010,
411  FAILED_MAX_OUTER_ITERATIONS = 0x0020,
412  FAILED_MAX_ITERATIONS = 0x0030,
413  FAILED_EXCEPTION = 0x0040,
414  FAILED_NAN = 0x0080,
415  FAILED = FAILED_MAX_INNER_ITERATIONS | FAILED_MAX_OUTER_ITERATIONS | FAILED_EXCEPTION | FAILED_NAN,
416  STATUS_STEP_REJECTED = 0x0100,
417  STATUS_STEP_ACCEPTED = 0x0200,
418  STATUS_STEP = STATUS_STEP_REJECTED | STATUS_STEP_ACCEPTED,
419  STATUS_TR_UNCHANGED = 0x1000,
420  STATUS_TR_DECREASED = 0x2000,
421  STATUS_TR_INCREASED = 0x4000,
422  STATUS_TR = STATUS_TR_UNCHANGED | STATUS_TR_DECREASED | STATUS_TR_INCREASED,
423  STATUS = STATUS_STEP | STATUS_TR,
424  };
425 
426  Optimizer(
427  PTR(Objective const) objective,
428  ndarray::Array<Scalar const,1,1> const & parameters,
429  Control const & ctrl
430  );
431 
432  PTR(Objective const) getObjective() const { return _objective; }
433 
434  Control const & getControl() const { return _ctrl; }
435 
436  bool step() { return _stepImpl(0); }
437 
438  bool step(HistoryRecorder const & recorder, afw::table::BaseCatalog & history) {
439  return _stepImpl(0, &recorder, &history);
440  }
441 
442  int run() { return _runImpl(); }
443 
444  int run(HistoryRecorder const & recorder, afw::table::BaseCatalog & history) {
445  return _runImpl(&recorder, &history);
446  }
447 
448  int getState() const { return _state; }
449 
450  Scalar getObjectiveValue() const { return _current.objectiveValue; }
451 
452  ndarray::Array<Scalar const,1,1> getParameters() const { return _current.parameters; }
453 
454  ndarray::Array<Scalar const,1,1> getResiduals() const { return _current.residuals; }
455 
456  ndarray::Array<Scalar const,1,1> getGradient() const { return _gradient; }
457 
458  ndarray::Array<Scalar const,2,2> getHessian() const { return _hessian; }
459 
461  void removeSR1Term();
462 
463 private:
464 
465  struct IterationData {
466  Scalar objectiveValue;
467  Scalar priorValue;
468  ndarray::Array<Scalar,1,1> parameters;
469  ndarray::Array<Scalar,1,1> residuals;
470 
471  IterationData(int dataSize, int parameterSize);
472 
473  void swap(IterationData & other);
474  };
475 
477 
478  bool _stepImpl(
479  int outerIterCount,
480  HistoryRecorder const * recorder=NULL,
481  afw::table::BaseCatalog * history=NULL
482  );
483 
484  int _runImpl(HistoryRecorder const * recorder=NULL, afw::table::BaseCatalog * history=NULL);
485 
486  void _computeDerivatives();
487 
488  int _state;
489  PTR(Objective const) _objective;
490  Control _ctrl;
491  double _trustRadius;
492  IterationData _current;
493  IterationData _next;
494  ndarray::Array<Scalar,1,1> _step;
495  ndarray::Array<Scalar,1,1> _gradient;
496  ndarray::Array<Scalar,2,2> _hessian;
497  ndarray::Array<Scalar,2,-2> _residualDerivative;
498  Matrix _sr1b;
499  Vector _sr1v;
500  Vector _sr1jtr;
501 };
502 
518 void solveTrustRegion(
519  ndarray::Array<Scalar,1,1> const & x,
520  ndarray::Array<Scalar const,2,1> const & F, ndarray::Array<Scalar const,1,1> const & g,
521  double r, double tolerance
522 );
523 
524 }}} // namespace lsst::meas::modelfit
525 
526 #endif // !LSST_MEAS_MODELFIT_optimizer_h_INCLUDED
Defines the fields and offsets for a table.
Definition: Schema.h:50
int run(HistoryRecorder const &recorder, afw::table::BaseCatalog &history)
Definition: optimizer.h:444
void fillObjectiveValueGrid(ndarray::Array< Scalar const, 2, 1 > const &parameters, ndarray::Array< Scalar, 1, 1 > const &output) const
Evaluate the Objective on a 1-d grid.
OptimizerObjective Objective
Definition: optimizer.h:402
A numerical optimizer customized for least-squares problems with Bayesian priors. ...
Definition: optimizer.h:399
double Scalar
Typedefs to be used for probability and parameter values.
Definition: common.h:44
virtual bool hasPrior() const
Return true if the Objective has a Bayesian prior as well as a likelihood.
Definition: optimizer.h:134
virtual Scalar computePrior(ndarray::Array< Scalar const, 1, 1 > const &parameters) const
Compute the value of the Bayesian prior for the given parameter vector.
Definition: optimizer.h:142
#define LSST_CONTROL_FIELD(NAME, TYPE, DOC)
A preprocessor macro used to define fields in C++ "control object" structs.
Definition: config.h:36
OptimizerHistoryRecorder HistoryRecorder
Definition: optimizer.h:404
Scalar getObjectiveValue() const
Definition: optimizer.h:450
Abstract base class and concrete factories that define multi-shapelet galaxy models.
Definition: Model.h:56
virtual bool differentiateResiduals(ndarray::Array< Scalar const, 1, 1 > const &parameters, ndarray::Array< Scalar, 2,-2 > const &derivatives) const
Evaluate analytic derivatives of the model or signal that they are not available. ...
Definition: optimizer.h:121
ndarray::Array< Scalar const, 1, 1 > getParameters() const
Definition: optimizer.h:452
A base class for image defects.
ndarray::Array< Scalar const, 1, 1 > getGradient() const
Definition: optimizer.h:456
#define PTR(...)
Definition: base.h:41
Eigen::Matrix< Scalar, Eigen::Dynamic, Eigen::Dynamic > Matrix
Typedefs to be used for probability and parameter values.
Definition: common.h:45
boost::shared_ptr< Objective const > getObjective() const
Definition: optimizer.h:432
Eigen::Matrix< Scalar, Eigen::Dynamic, 1 > Vector
Typedefs to be used for probability and parameter values.
Definition: common.h:46
virtual void differentiatePrior(ndarray::Array< Scalar const, 1, 1 > const &parameters, ndarray::Array< Scalar, 1, 1 > const &gradient, ndarray::Array< Scalar, 2, 1 > const &hessian) const
Compute the first and second derivatives of the Bayesian prior with respect to the parameters...
Definition: optimizer.h:158
table::Schema schema
Definition: Camera.cc:161
Configuration object for Optimizer.
Definition: optimizer.h:187
double x
Base class for Bayesian priors.
Definition: Prior.h:36
bool step(HistoryRecorder const &recorder, afw::table::BaseCatalog &history)
Definition: optimizer.h:438
ndarray::Array< Scalar const, 1, 1 > getResiduals() const
Definition: optimizer.h:454
void solveTrustRegion(ndarray::Array< Scalar, 1, 1 > const &x, ndarray::Array< Scalar const, 2, 1 > const &F, ndarray::Array< Scalar const, 1, 1 > const &g, double r, double tolerance)
Solve a symmetric quadratic matrix equation with a ball constraint.
void swap(Image< PixelT > &a, Image< PixelT > &b)
Definition: Image.cc:465
ndarray::Array< Scalar const, 2, 2 > getHessian() const
Definition: optimizer.h:458
Base class for all records.
Definition: BaseRecord.h:31
static boost::shared_ptr< OptimizerObjective > makeFromLikelihood(boost::shared_ptr< Likelihood > likelihood, boost::shared_ptr< Prior > prior=boost::shared_ptr< Prior >())
Return a concrete Objective object built from a Likelihood and Prior.
Base class for optimizer/sampler likelihood functions that compute likelihood at a point...
Definition: Likelihood.h:69
ItemVariant const * other
Definition: Schema.cc:56
Base class for objective functions for Optimizer.
Definition: optimizer.h:44
Control const & getControl() const
Definition: optimizer.h:434
table::Key< int > nested
OptimizerObjective(int dataSize_, int parameterSize_)
Base class constructor; must be called by all subclasses.
Definition: optimizer.h:67
Basic LSST definitions.
virtual void computeResiduals(ndarray::Array< Scalar const, 1, 1 > const &parameters, ndarray::Array< Scalar, 1, 1 > const &residuals) const =0
Evaluate the residuals of the model for a given parameter vector.