LSST Applications g0f08755f38+82efc23009,g12f32b3c4e+e7bdf1200e,g1653933729+a8ce1bb630,g1a0ca8cf93+50eff2b06f,g28da252d5a+52db39f6a5,g2bbee38e9b+37c5a29d61,g2bc492864f+37c5a29d61,g2cdde0e794+c05ff076ad,g3156d2b45e+41e33cbcdc,g347aa1857d+37c5a29d61,g35bb328faa+a8ce1bb630,g3a166c0a6a+37c5a29d61,g3e281a1b8c+fb992f5633,g414038480c+7f03dfc1b0,g41af890bb2+11b950c980,g5fbc88fb19+17cd334064,g6b1c1869cb+12dd639c9a,g781aacb6e4+a8ce1bb630,g80478fca09+72e9651da0,g82479be7b0+04c31367b4,g858d7b2824+82efc23009,g9125e01d80+a8ce1bb630,g9726552aa6+8047e3811d,ga5288a1d22+e532dc0a0b,gae0086650b+a8ce1bb630,gb58c049af0+d64f4d3760,gc28159a63d+37c5a29d61,gcf0d15dbbd+2acd6d4d48,gd7358e8bfb+778a810b6e,gda3e153d99+82efc23009,gda6a2b7d83+2acd6d4d48,gdaeeff99f8+1711a396fd,ge2409df99d+6b12de1076,ge79ae78c31+37c5a29d61,gf0baf85859+d0a5978c5a,gf3967379c6+4954f8c433,gfb92a5be7c+82efc23009,gfec2e1e490+2aaed99252,w.2024.46
LSST Data Management Base Package
Loading...
Searching...
No Matches
optimizer.h
Go to the documentation of this file.
1// -*- lsst-c++ -*-
2/*
3 * LSST Data Management System
4 * Copyright 2008-2013 LSST Corporation.
5 *
6 * This product includes software developed by the
7 * LSST Project (http://www.lsst.org/).
8 *
9 * This program is free software: you can redistribute it and/or modify
10 * it under the terms of the GNU General Public License as published by
11 * the Free Software Foundation, either version 3 of the License, or
12 * (at your option) any later version.
13 *
14 * This program is distributed in the hope that it will be useful,
15 * but WITHOUT ANY WARRANTY; without even the implied warranty of
16 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 * GNU General Public License for more details.
18 *
19 * You should have received a copy of the LSST License Statement and
20 * the GNU General Public License along with this program. If not,
21 * see <http://www.lsstcorp.org/LegalNotices/>.
22 */
23
24#ifndef LSST_MEAS_MODELFIT_optimizer_h_INCLUDED
25#define LSST_MEAS_MODELFIT_optimizer_h_INCLUDED
26
27#include "ndarray.h"
28
29#include "lsst/base.h"
30#include "lsst/pex/config.h"
34
35namespace lsst { namespace meas { namespace modelfit {
36
37class Likelihood;
38class Prior;
39class Optimizer;
40
45public:
46
47 int const dataSize;
48 int const parameterSize;
49
62 );
63
67 OptimizerObjective(int dataSize_, int parameterSize_) :
68 dataSize(dataSize_), parameterSize(parameterSize_)
69 {}
70
91 ndarray::Array<Scalar const,2,1> const & parameters,
92 ndarray::Array<Scalar,1,1> const & output
93 ) const;
94
103 virtual void computeResiduals(
104 ndarray::Array<Scalar const,1,1> const & parameters,
105 ndarray::Array<Scalar,1,1> const & residuals
106 ) const = 0;
107
122 ndarray::Array<Scalar const,1,1> const & parameters,
123 ndarray::Array<Scalar,2,-2> const & derivatives
124 ) const {
125 return false;
126 }
127
128
134 virtual bool hasPrior() const { return false; }
135
142 virtual Scalar computePrior(ndarray::Array<Scalar const,1,1> const & parameters) const { return 1.0; }
143
158 virtual void differentiatePrior(
159 ndarray::Array<Scalar const,1,1> const & parameters,
160 ndarray::Array<Scalar,1,1> const & gradient,
161 ndarray::Array<Scalar,2,1> const & hessian
162 ) const {
163 gradient.deep() = 0.0;
164 hessian.deep() = 0.0;
165 }
166
168};
169
188public:
190 noSR1Term, bool,
191 "If true, ignore the SR1 update term in the Hessian, resulting in a Levenberg-Marquardt-like method"
192 );
193
196 "Skip the SR1 update if |v||s| / (|v||s|) is less than this threshold"
197 );
198
201 "If the trust radius falls below this threshold, consider the algorithm converged"
202 );
203
205 gradientThreshold, double,
206 "If the maximum of the gradient falls below this threshold, consider the algorithm converged"
207 );
208
210 numDiffRelStep, double,
211 "relative step size used for numerical derivatives (added to other steps)"
212 );
213
215 numDiffAbsStep, double,
216 "absolute step size used for numerical derivatives (added to other steps)"
217 );
218
221 "step size (in units of trust radius) used for numerical derivatives (added to relative step)"
222 );
223
225 stepAcceptThreshold, double,
226 "steps with reduction ratio greater than this are accepted"
227 );
228
231 "the initial trust region will be set to this value"
232 );
233
236 "steps with reduction radio greater than this may increase the trust radius"
237 );
238
241 "steps with length this fraction of the trust radius may increase the trust radius"
242 );
243
245 trustRegionGrowFactor, double,
246 "when increase the trust region size, multiply the radius by this factor"
247 );
248
251 "steps with reduction radio less than this will decrease the trust radius"
252 );
253
256 "when reducing the trust region size, multiply the radius by this factor"
257 );
258
261 "value passed as the tolerance to solveTrustRegion"
262 );
263
266 "maximum number of iterations (i.e. function evaluations and trust region subproblems) per step"
267 );
268
271 "maximum number of steps"
272 );
273
275 doSaveIterations, bool,
276 "whether to save all iterations for debugging purposes"
277 );
278
296};
297
299public:
300
304 bool doRecordDerivatives
305 );
306
308
309 void apply(
310 int outerIterCount,
311 int innerIterCount,
312 afw::table::BaseCatalog & history,
313 Optimizer const & optimizer
314 ) const;
315
317 ndarray::Array<Scalar const,1,1> const & nested,
318 Vector & gradient,
319 Matrix & hessian
320 ) const;
321
323 afw::table::BaseRecord const & record,
324 Vector & gradient,
325 Matrix & hessian
326 ) const;
327
329 ndarray::Array<Scalar const,1,1> const & nested,
330 ndarray::Array<Scalar,1,1> const & gradient,
331 ndarray::Array<Scalar,2,2> const & hessian
332 ) const;
333
335 afw::table::BaseRecord const & record,
336 ndarray::Array<Scalar,1,1> const & gradient,
337 ndarray::Array<Scalar,2,2> const & hessian
338 ) const;
339
341 afw::table::BaseRecord const & record,
342 ndarray::Array<Scalar const,2,1> const & parameters,
343 ndarray::Array<Scalar,1,1> const & output
344 ) const;
345
354};
355
400public:
401
405
425
428 ndarray::Array<Scalar const,1,1> const & parameters,
429 Control const & ctrl
430 );
431
432 std::shared_ptr<Objective const> getObjective() const { return _objective; }
433
434 Control const & getControl() const { return _ctrl; }
435
436 bool step() { return _stepImpl(0); }
437
438 bool step(HistoryRecorder const & recorder, afw::table::BaseCatalog & history) {
439 return _stepImpl(0, &recorder, &history);
440 }
441
442 int run() { return _runImpl(); }
443
444 int run(HistoryRecorder const & recorder, afw::table::BaseCatalog & history) {
445 return _runImpl(&recorder, &history);
446 }
447
448 int getState() const { return _state; }
449
450 Scalar getObjectiveValue() const { return _current.objectiveValue; }
451
452 ndarray::Array<Scalar const,1,1> getParameters() const { return _current.parameters; }
453
454 ndarray::Array<Scalar const,1,1> getResiduals() const { return _current.residuals; }
455
456 ndarray::Array<Scalar const,1,1> getGradient() const { return _gradient; }
457
458 ndarray::Array<Scalar const,2,2> getHessian() const { return _hessian; }
459
462
463private:
464
465 struct IterationData {
466 Scalar objectiveValue;
467 Scalar priorValue;
468 ndarray::Array<Scalar,1,1> parameters;
469 ndarray::Array<Scalar,1,1> residuals;
470
471 IterationData(int dataSize, int parameterSize);
472
473 void swap(IterationData & other);
474 };
475
477
478 bool _stepImpl(
479 int outerIterCount,
480 HistoryRecorder const * recorder=NULL,
481 afw::table::BaseCatalog * history=NULL
482 );
483
484 int _runImpl(HistoryRecorder const * recorder=NULL, afw::table::BaseCatalog * history=NULL);
485
486 void _computeDerivatives();
487
488 int _state;
490 Control _ctrl;
491 double _trustRadius;
492 IterationData _current;
493 IterationData _next;
494 ndarray::Array<Scalar,1,1> _step;
495 ndarray::Array<Scalar,1,1> _gradient;
496 ndarray::Array<Scalar,2,2> _hessian;
497 ndarray::Array<Scalar,2,-2> _residualDerivative;
498 Matrix _sr1b;
499 Vector _sr1v;
500 Vector _sr1jtr;
501};
502
519 ndarray::Array<Scalar,1,1> const & x,
520 ndarray::Array<Scalar const,2,1> const & F, ndarray::Array<Scalar const,1,1> const & g,
521 double r, double tolerance
522);
523
524}}} // namespace lsst::meas::modelfit
525
526#endif // !LSST_MEAS_MODELFIT_optimizer_h_INCLUDED
table::Key< int > nested
table::Schema schema
Definition python.h:134
Basic LSST definitions.
Tag types used to declare specialized field types.
Definition misc.h:31
Base class for all records.
Definition BaseRecord.h:31
Defines the fields and offsets for a table.
Definition Schema.h:51
Configuration object for Optimizer.
Definition optimizer.h:187
double trustRegionGrowStepFraction
"steps with length this fraction of the trust radius may increase the trust radius" ;
Definition optimizer.h:242
bool noSR1Term
"If true, ignore the SR1 update term in the Hessian, resulting in a Levenberg-Marquardt-like method" ...
Definition optimizer.h:192
double trustRegionShrinkFactor
"when reducing the trust region size, multiply the radius by this factor" ;
Definition optimizer.h:257
double trustRegionShrinkReductionRatio
"steps with reduction radio less than this will decrease the trust radius" ;
Definition optimizer.h:252
double numDiffAbsStep
"absolute step size used for numerical derivatives (added to other steps)" ;
Definition optimizer.h:217
double numDiffTrustRadiusStep
"step size (in units of trust radius) used for numerical derivatives (added to relative step)" ;
Definition optimizer.h:222
double gradientThreshold
"If the maximum of the gradient falls below this threshold, consider the algorithm converged" ;
Definition optimizer.h:207
double numDiffRelStep
"relative step size used for numerical derivatives (added to other steps)" ;
Definition optimizer.h:212
double trustRegionGrowReductionRatio
"steps with reduction radio greater than this may increase the trust radius" ;
Definition optimizer.h:237
double trustRegionGrowFactor
"when increase the trust region size, multiply the radius by this factor" ;
Definition optimizer.h:247
int maxInnerIterations
"maximum number of iterations (i.e. function evaluations and trust region subproblems) per step" ;
Definition optimizer.h:267
double skipSR1UpdateThreshold
"Skip the SR1 update if |v||s| / (|v||s|) is less than this threshold" ;
Definition optimizer.h:197
double minTrustRadiusThreshold
"If the trust radius falls below this threshold, consider the algorithm converged" ;
Definition optimizer.h:202
bool doSaveIterations
"whether to save all iterations for debugging purposes" ;
Definition optimizer.h:277
double stepAcceptThreshold
"steps with reduction ratio greater than this are accepted" ;
Definition optimizer.h:227
int maxOuterIterations
"maximum number of steps" ;
Definition optimizer.h:272
double trustRegionInitialSize
"the initial trust region will be set to this value" ;
Definition optimizer.h:232
double trustRegionSolverTolerance
"value passed as the tolerance to solveTrustRegion" ;
Definition optimizer.h:262
void unpackDerivatives(afw::table::BaseRecord const &record, Vector &gradient, Matrix &hessian) const
void fillObjectiveModelGrid(afw::table::BaseRecord const &record, ndarray::Array< Scalar const, 2, 1 > const &parameters, ndarray::Array< Scalar, 1, 1 > const &output) const
OptimizerHistoryRecorder(afw::table::Schema &schema, std::shared_ptr< Model > model, bool doRecordDerivatives)
void unpackDerivatives(afw::table::BaseRecord const &record, ndarray::Array< Scalar, 1, 1 > const &gradient, ndarray::Array< Scalar, 2, 2 > const &hessian) const
void apply(int outerIterCount, int innerIterCount, afw::table::BaseCatalog &history, Optimizer const &optimizer) const
void unpackDerivatives(ndarray::Array< Scalar const, 1, 1 > const &nested, ndarray::Array< Scalar, 1, 1 > const &gradient, ndarray::Array< Scalar, 2, 2 > const &hessian) const
OptimizerHistoryRecorder(afw::table::Schema const &schema)
void unpackDerivatives(ndarray::Array< Scalar const, 1, 1 > const &nested, Vector &gradient, Matrix &hessian) const
A numerical optimizer customized for least-squares problems with Bayesian priors.
Definition optimizer.h:399
void removeSR1Term()
Remove the symmetric-rank-1 secant term from the Hessian, making it just (J^T J)
bool step(HistoryRecorder const &recorder, afw::table::BaseCatalog &history)
Definition optimizer.h:438
ndarray::Array< Scalar const, 2, 2 > getHessian() const
Definition optimizer.h:458
OptimizerObjective Objective
Definition optimizer.h:402
ndarray::Array< Scalar const, 1, 1 > getResiduals() const
Definition optimizer.h:454
ndarray::Array< Scalar const, 1, 1 > getParameters() const
Definition optimizer.h:452
int run(HistoryRecorder const &recorder, afw::table::BaseCatalog &history)
Definition optimizer.h:444
ndarray::Array< Scalar const, 1, 1 > getGradient() const
Definition optimizer.h:456
OptimizerHistoryRecorder HistoryRecorder
Definition optimizer.h:404
Optimizer(std::shared_ptr< Objective const > objective, ndarray::Array< Scalar const, 1, 1 > const &parameters, Control const &ctrl)
std::shared_ptr< Objective const > getObjective() const
Definition optimizer.h:432
Control const & getControl() const
Definition optimizer.h:434
Base class for objective functions for Optimizer.
Definition optimizer.h:44
virtual void differentiatePrior(ndarray::Array< Scalar const, 1, 1 > const &parameters, ndarray::Array< Scalar, 1, 1 > const &gradient, ndarray::Array< Scalar, 2, 1 > const &hessian) const
Compute the first and second derivatives of the Bayesian prior with respect to the parameters.
Definition optimizer.h:158
virtual void computeResiduals(ndarray::Array< Scalar const, 1, 1 > const &parameters, ndarray::Array< Scalar, 1, 1 > const &residuals) const =0
Evaluate the residuals of the model for a given parameter vector.
virtual Scalar computePrior(ndarray::Array< Scalar const, 1, 1 > const &parameters) const
Compute the value of the Bayesian prior for the given parameter vector.
Definition optimizer.h:142
virtual bool hasPrior() const
Return true if the Objective has a Bayesian prior as well as a likelihood.
Definition optimizer.h:134
OptimizerObjective(int dataSize_, int parameterSize_)
Base class constructor; must be called by all subclasses.
Definition optimizer.h:67
void fillObjectiveValueGrid(ndarray::Array< Scalar const, 2, 1 > const &parameters, ndarray::Array< Scalar, 1, 1 > const &output) const
Evaluate the Objective on a 1-d grid.
static std::shared_ptr< OptimizerObjective > makeFromLikelihood(std::shared_ptr< Likelihood > likelihood, std::shared_ptr< Prior > prior=std::shared_ptr< Prior >())
Return a concrete Objective object built from a Likelihood and Prior.
virtual bool differentiateResiduals(ndarray::Array< Scalar const, 1, 1 > const &parameters, ndarray::Array< Scalar, 2,-2 > const &derivatives) const
Evaluate analytic derivatives of the model or signal that they are not available.
Definition optimizer.h:121
#define LSST_CONTROL_FIELD(NAME, TYPE, DOC)
A preprocessor macro used to define fields in C++ "control object" structs.
Definition config.h:43
Eigen::Matrix< Scalar, Eigen::Dynamic, 1 > Vector
Definition common.h:46
void solveTrustRegion(ndarray::Array< Scalar, 1, 1 > const &x, ndarray::Array< Scalar const, 2, 1 > const &F, ndarray::Array< Scalar const, 1, 1 > const &g, double r, double tolerance)
Solve a symmetric quadratic matrix equation with a ball constraint.
Eigen::Matrix< Scalar, Eigen::Dynamic, Eigen::Dynamic > Matrix
Definition common.h:45
double Scalar
Typedefs to be used for probability and parameter values.
Definition common.h:44