LSST Applications g070148d5b3+33e5256705,g0d53e28543+25c8b88941,g0da5cf3356+2dd1178308,g1081da9e2a+62d12e78cb,g17e5ecfddb+7e422d6136,g1c76d35bf8+ede3a706f7,g295839609d+225697d880,g2e2c1a68ba+cc1f6f037e,g2ffcdf413f+853cd4dcde,g38293774b4+62d12e78cb,g3b44f30a73+d953f1ac34,g48ccf36440+885b902d19,g4b2f1765b6+7dedbde6d2,g5320a0a9f6+0c5d6105b6,g56b687f8c9+ede3a706f7,g5c4744a4d9+ef6ac23297,g5ffd174ac0+0c5d6105b6,g6075d09f38+66af417445,g667d525e37+2ced63db88,g670421136f+2ced63db88,g71f27ac40c+2ced63db88,g774830318a+463cbe8d1f,g7876bc68e5+1d137996f1,g7985c39107+62d12e78cb,g7fdac2220c+0fd8241c05,g96f01af41f+368e6903a7,g9ca82378b8+2ced63db88,g9d27549199+ef6ac23297,gabe93b2c52+e3573e3735,gb065e2a02a+3dfbe639da,gbc3249ced9+0c5d6105b6,gbec6a3398f+0c5d6105b6,gc9534b9d65+35b9f25267,gd01420fc67+0c5d6105b6,geee7ff78d7+a14128c129,gf63283c776+ede3a706f7,gfed783d017+0c5d6105b6,w.2022.47
LSST Data Management Base Package
Loading...
Searching...
No Matches
optimizer.cc
Go to the documentation of this file.
1// -*- lsst-c++ -*-
2/*
3 * LSST Data Management System
4 * Copyright 2008-2013 LSST Corporation.
5 *
6 * This product includes software developed by the
7 * LSST Project (http://www.lsst.org/).
8 *
9 * This program is free software: you can redistribute it and/or modify
10 * it under the terms of the GNU General Public License as published by
11 * the Free Software Foundation, either version 3 of the License, or
12 * (at your option) any later version.
13 *
14 * This program is distributed in the hope that it will be useful,
15 * but WITHOUT ANY WARRANTY; without even the implied warranty of
16 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 * GNU General Public License for more details.
18 *
19 * You should have received a copy of the LSST License Statement and
20 * the GNU General Public License along with this program. If not,
21 * see <http://www.lsstcorp.org/LegalNotices/>.
22 */
23
24#include "pybind11/pybind11.h"
25
26#include "ndarray/pybind11.h"
27
29
35
36namespace py = pybind11;
37using namespace pybind11::literals;
38
39namespace lsst {
40namespace meas {
41namespace modelfit {
42namespace {
43
44using PyOptimizerObjective = py::class_<OptimizerObjective, std::shared_ptr<OptimizerObjective>>;
45using PyOptimizerControl = py::class_<OptimizerControl, std::shared_ptr<OptimizerControl>>;
46using PyOptimizerHistoryRecorder =
47 py::class_<OptimizerHistoryRecorder, std::shared_ptr<OptimizerHistoryRecorder>>;
48using PyOptimizer = py::class_<Optimizer, std::shared_ptr<Optimizer>>;
49
50static PyOptimizerObjective declareOptimizerObjective(py::module &mod) {
51 PyOptimizerObjective cls(mod, "OptimizerObjective");
52 // Class is abstract, so no constructor.
53 cls.def_readonly("dataSize", &OptimizerObjective::dataSize);
54 cls.def_readonly("parameterSize", &OptimizerObjective::parameterSize);
55 cls.def_static("makeFromLikelihood", &OptimizerObjective::makeFromLikelihood, "likelihood"_a,
56 "prior"_a = nullptr);
57 // class is abstract and not subclassable in Python, so we don't wrap the ctor
58 cls.def("fillObjectiveValueGrid", &OptimizerObjective::fillObjectiveValueGrid, "parameters"_a,
59 "output"_a);
60 cls.def("computeResiduals", &OptimizerObjective::computeResiduals, "parameters"_a, "residuals"_a);
61 cls.def("differentiateResiduals", &OptimizerObjective::differentiateResiduals, "parameters"_a,
62 "derivatives"_a);
63 cls.def("hasPrior", &OptimizerObjective::hasPrior);
64 cls.def("computePrior", &OptimizerObjective::computePrior, "parameters"_a);
65 cls.def("differentiatePrior", &OptimizerObjective::differentiatePrior, "parameters"_a, "gradient"_a,
66 "hessian"_a);
67 return cls;
68}
69
70static PyOptimizerControl declareOptimizerControl(py::module &mod) {
71 PyOptimizerControl cls(mod, "OptimizerControl");
72 LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, noSR1Term);
73 LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, skipSR1UpdateThreshold);
74 LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, minTrustRadiusThreshold);
75 LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, gradientThreshold);
76 LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, numDiffRelStep);
77 LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, numDiffAbsStep);
78 LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, numDiffTrustRadiusStep);
79 LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, stepAcceptThreshold);
80 LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, trustRegionInitialSize);
81 LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, trustRegionGrowReductionRatio);
82 LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, trustRegionGrowStepFraction);
83 LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, trustRegionGrowFactor);
84 LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, trustRegionShrinkReductionRatio);
85 LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, trustRegionShrinkFactor);
86 LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, trustRegionSolverTolerance);
87 LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, maxInnerIterations);
88 LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, maxOuterIterations);
89 LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, doSaveIterations);
90 cls.def(py::init<>());
91 return cls;
92}
93
94static PyOptimizerHistoryRecorder declareOptimizerHistoryRecorder(py::module &mod) {
95 PyOptimizerHistoryRecorder cls(mod, "OptimizerHistoryRecorder");
96 cls.def(py::init<afw::table::Schema &, std::shared_ptr<Model>, bool>(), "schema"_a, "model"_a,
97 "doRecordDerivatives"_a);
98 cls.def(py::init<afw::table::Schema const &>(), "schema"_a);
99 cls.def("apply", &OptimizerHistoryRecorder::apply, "outerIterCount"_a, "innerIterCount"_a, "history"_a,
100 "optimizer"_a);
101 cls.def("unpackDerivatives",
102 (void (OptimizerHistoryRecorder::*)(ndarray::Array<Scalar const, 1, 1> const &,
103 ndarray::Array<Scalar, 1, 1> const &,
104 ndarray::Array<Scalar, 2, 2> const &) const) &
106 "nested"_a, "gradient"_a, "hessian"_a);
107 cls.def("unpackDerivatives", (void (OptimizerHistoryRecorder::*)(
108 afw::table::BaseRecord const &, ndarray::Array<Scalar, 1, 1> const &,
109 ndarray::Array<Scalar, 2, 2> const &) const) &
111 "record"_a, "gradient"_a, "hessian"_a);
112 // Other unpackDerivatives overloads do the same thing but with Eigen types,
113 // which makes them redundant in Python where it's all just NumPy.
114 cls.def("fillObjectiveModelGrid", &OptimizerHistoryRecorder::fillObjectiveModelGrid, "record"_a,
115 "parameters"_a, "output"_a);
116 cls.def_readonly("outer", &OptimizerHistoryRecorder::outer);
117 cls.def_readonly("inner", &OptimizerHistoryRecorder::inner);
118 cls.def_readonly("state", &OptimizerHistoryRecorder::state);
119 cls.def_readonly("objective", &OptimizerHistoryRecorder::objective);
120 cls.def_readonly("prior", &OptimizerHistoryRecorder::prior);
121 cls.def_readonly("trust", &OptimizerHistoryRecorder::trust);
122 cls.def_readonly("parameters", &OptimizerHistoryRecorder::parameters);
123 cls.def_readonly("derivatives", &OptimizerHistoryRecorder::derivatives);
124 return cls;
125}
126
127static PyOptimizer declareOptimizer(py::module &mod) {
128 PyOptimizer cls(mod, "Optimizer");
129 // StateFlags enum is used as bitflag, so we wrap values as int class attributes.
130 cls.attr("CONVERGED_GRADZERO") = py::cast(int(Optimizer::CONVERGED_GRADZERO));
131 cls.attr("CONVERGED_TR_SMALL") = py::cast(int(Optimizer::CONVERGED_TR_SMALL));
132 cls.attr("CONVERGED") = py::cast(int(Optimizer::CONVERGED));
133 cls.attr("FAILED_MAX_INNER_ITERATIONS") = py::cast(int(Optimizer::FAILED_MAX_INNER_ITERATIONS));
134 cls.attr("FAILED_MAX_OUTER_ITERATIONS") = py::cast(int(Optimizer::FAILED_MAX_OUTER_ITERATIONS));
135 cls.attr("FAILED_MAX_ITERATIONS") = py::cast(int(Optimizer::FAILED_MAX_ITERATIONS));
136 cls.attr("FAILED_EXCEPTION") = py::cast(int(Optimizer::FAILED_EXCEPTION));
137 cls.attr("FAILED_NAN") = py::cast(int(Optimizer::FAILED_NAN));
138 cls.attr("FAILED") = py::cast(int(Optimizer::FAILED));
139 cls.attr("STATUS_STEP_REJECTED") = py::cast(int(Optimizer::STATUS_STEP_REJECTED));
140 cls.attr("STATUS_STEP_ACCEPTED") = py::cast(int(Optimizer::STATUS_STEP_ACCEPTED));
141 cls.attr("STATUS_STEP") = py::cast(int(Optimizer::STATUS_STEP));
142 cls.attr("STATUS_TR_UNCHANGED") = py::cast(int(Optimizer::STATUS_TR_UNCHANGED));
143 cls.attr("STATUS_TR_DECREASED") = py::cast(int(Optimizer::STATUS_TR_DECREASED));
144 cls.attr("STATUS_TR_INCREASED") = py::cast(int(Optimizer::STATUS_TR_INCREASED));
145 cls.attr("STATUS_TR") = py::cast(int(Optimizer::STATUS_TR));
146 cls.attr("STATUS") = py::cast(int(Optimizer::STATUS));
147 cls.def(py::init<std::shared_ptr<Optimizer::Objective const>, ndarray::Array<Scalar const, 1, 1> const &,
149 "objective"_a, "parameters"_a, "ctrl"_a);
150 cls.def("getObjective", &Optimizer::getObjective);
151 cls.def("getControl", &Optimizer::getControl, py::return_value_policy::copy);
152 cls.def("step", (bool (Optimizer::*)()) & Optimizer::step);
153 cls.def("step", (bool (Optimizer::*)(Optimizer::HistoryRecorder const &, afw::table::BaseCatalog &)) &
155 "recorder"_a, "history"_a);
156 cls.def("run", (int (Optimizer::*)()) & Optimizer::run);
157 cls.def("run", (int (Optimizer::*)(Optimizer::HistoryRecorder const &, afw::table::BaseCatalog &)) &
159 "recorder"_a, "history"_a);
160 cls.def("getState", &Optimizer::getState);
161 cls.def("getObjectiveValue", &Optimizer::getObjectiveValue);
162 cls.def("getParameters", &Optimizer::getParameters);
163 cls.def("getResiduals", &Optimizer::getResiduals);
164 cls.def("getGradient", &Optimizer::getGradient);
165 cls.def("getHessian", &Optimizer::getHessian);
166 cls.def("removeSR1Term", &Optimizer::removeSR1Term);
167 return cls;
168}
169
170PYBIND11_MODULE(optimizer, mod) {
171 py::module::import("lsst.meas.modelfit.model");
172 py::module::import("lsst.meas.modelfit.likelihood");
173 py::module::import("lsst.meas.modelfit.priors");
174
175 auto clsObjective = declareOptimizerObjective(mod);
176 auto clsControl = declareOptimizerControl(mod);
177 auto clsHistoryRecorder = declareOptimizerHistoryRecorder(mod);
178 auto cls = declareOptimizer(mod);
179 cls.attr("Objective") = clsObjective;
180 cls.attr("Control") = clsControl;
181 cls.attr("HistoryRecorder") = clsHistoryRecorder;
182
183 mod.def("solveTrustRegion", &solveTrustRegion, "x"_a, "F"_a, "g"_a, "r"_a, "tolerance"_a);
184}
185
186}
187}
188}
189} // namespace lsst::meas::modelfit::anonymous
void fillObjectiveModelGrid(afw::table::BaseRecord const &record, ndarray::Array< Scalar const, 2, 1 > const &parameters, ndarray::Array< Scalar, 1, 1 > const &output) const
void apply(int outerIterCount, int innerIterCount, afw::table::BaseCatalog &history, Optimizer const &optimizer) const
void unpackDerivatives(ndarray::Array< Scalar const, 1, 1 > const &nested, Vector &gradient, Matrix &hessian) const
Scalar getObjectiveValue() const
Definition: optimizer.h:450
void removeSR1Term()
Remove the symmetric-rank-1 secant term from the Hessian, making it just (J^T J)
ndarray::Array< Scalar const, 2, 2 > getHessian() const
Definition: optimizer.h:458
ndarray::Array< Scalar const, 1, 1 > getResiduals() const
Definition: optimizer.h:454
ndarray::Array< Scalar const, 1, 1 > getParameters() const
Definition: optimizer.h:452
ndarray::Array< Scalar const, 1, 1 > getGradient() const
Definition: optimizer.h:456
OptimizerHistoryRecorder HistoryRecorder
Definition: optimizer.h:404
std::shared_ptr< Objective const > getObjective() const
Definition: optimizer.h:432
Control const & getControl() const
Definition: optimizer.h:434
virtual void differentiatePrior(ndarray::Array< Scalar const, 1, 1 > const &parameters, ndarray::Array< Scalar, 1, 1 > const &gradient, ndarray::Array< Scalar, 2, 1 > const &hessian) const
Compute the first and second derivatives of the Bayesian prior with respect to the parameters.
Definition: optimizer.h:158
virtual void computeResiduals(ndarray::Array< Scalar const, 1, 1 > const &parameters, ndarray::Array< Scalar, 1, 1 > const &residuals) const =0
Evaluate the residuals of the model for a given parameter vector.
virtual Scalar computePrior(ndarray::Array< Scalar const, 1, 1 > const &parameters) const
Compute the value of the Bayesian prior for the given parameter vector.
Definition: optimizer.h:142
virtual bool hasPrior() const
Return true if the Objective has a Bayesian prior as well as a likelihood.
Definition: optimizer.h:134
void fillObjectiveValueGrid(ndarray::Array< Scalar const, 2, 1 > const &parameters, ndarray::Array< Scalar, 1, 1 > const &output) const
Evaluate the Objective on a 1-d grid.
static std::shared_ptr< OptimizerObjective > makeFromLikelihood(std::shared_ptr< Likelihood > likelihood, std::shared_ptr< Prior > prior=std::shared_ptr< Prior >())
Return a concrete Objective object built from a Likelihood and Prior.
virtual bool differentiateResiduals(ndarray::Array< Scalar const, 1, 1 > const &parameters, ndarray::Array< Scalar, 2,-2 > const &derivatives) const
Evaluate analytic derivatives of the model or signal that they are not available.
Definition: optimizer.h:121
PYBIND11_MODULE(_cameraGeom, mod)
Definition: _cameraGeom.cc:38
CatalogT< BaseRecord > BaseCatalog
Definition: fwd.h:72
void solveTrustRegion(ndarray::Array< Scalar, 1, 1 > const &x, ndarray::Array< Scalar const, 2, 1 > const &F, ndarray::Array< Scalar const, 1, 1 > const &g, double r, double tolerance)
Solve a symmetric quadratic matrix equation with a ball constraint.
#define LSST_DECLARE_CONTROL_FIELD(WRAPPER, CLASS, NAME)
Macro used to wrap fields declared by LSST_CONTROL_FIELD using Pybind11.
Definition: python.h:50