LSST Applications  21.0.0-172-gfb10e10a+18fedfabac,22.0.0+297cba6710,22.0.0+80564b0ff1,22.0.0+8d77f4f51a,22.0.0+a28f4c53b1,22.0.0+dcf3732eb2,22.0.1-1-g7d6de66+2a20fdde0d,22.0.1-1-g8e32f31+297cba6710,22.0.1-1-geca5380+7fa3b7d9b6,22.0.1-12-g44dc1dc+2a20fdde0d,22.0.1-15-g6a90155+515f58c32b,22.0.1-16-g9282f48+790f5f2caa,22.0.1-2-g92698f7+dcf3732eb2,22.0.1-2-ga9b0f51+7fa3b7d9b6,22.0.1-2-gd1925c9+bf4f0e694f,22.0.1-24-g1ad7a390+a9625a72a8,22.0.1-25-g5bf6245+3ad8ecd50b,22.0.1-25-gb120d7b+8b5510f75f,22.0.1-27-g97737f7+2a20fdde0d,22.0.1-32-gf62ce7b1+aa4237961e,22.0.1-4-g0b3f228+2a20fdde0d,22.0.1-4-g243d05b+871c1b8305,22.0.1-4-g3a563be+32dcf1063f,22.0.1-4-g44f2e3d+9e4ab0f4fa,22.0.1-42-gca6935d93+ba5e5ca3eb,22.0.1-5-g15c806e+85460ae5f3,22.0.1-5-g58711c4+611d128589,22.0.1-5-g75bb458+99c117b92f,22.0.1-6-g1c63a23+7fa3b7d9b6,22.0.1-6-g50866e6+84ff5a128b,22.0.1-6-g8d3140d+720564cf76,22.0.1-6-gd805d02+cc5644f571,22.0.1-8-ge5750ce+85460ae5f3,master-g6e05de7fdc+babf819c66,master-g99da0e417a+8d77f4f51a,w.2021.48
LSST Data Management Base Package
optimizer.cc
Go to the documentation of this file.
1 // -*- lsst-c++ -*-
2 /*
3  * LSST Data Management System
4  * Copyright 2008-2013 LSST Corporation.
5  *
6  * This product includes software developed by the
7  * LSST Project (http://www.lsst.org/).
8  *
9  * This program is free software: you can redistribute it and/or modify
10  * it under the terms of the GNU General Public License as published by
11  * the Free Software Foundation, either version 3 of the License, or
12  * (at your option) any later version.
13  *
14  * This program is distributed in the hope that it will be useful,
15  * but WITHOUT ANY WARRANTY; without even the implied warranty of
16  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17  * GNU General Public License for more details.
18  *
19  * You should have received a copy of the LSST License Statement and
20  * the GNU General Public License along with this program. If not,
21  * see <http://www.lsstcorp.org/LegalNotices/>.
22  */
23 
24 #include "pybind11/pybind11.h"
25 
26 #include "ndarray/pybind11.h"
27 
28 #include "lsst/pex/config/python.h"
29 
34 #include "lsst/afw/table/Catalog.h"
35 
36 namespace py = pybind11;
37 using namespace pybind11::literals;
38 
39 namespace lsst {
40 namespace meas {
41 namespace modelfit {
42 namespace {
43 
44 using PyOptimizerObjective = py::class_<OptimizerObjective, std::shared_ptr<OptimizerObjective>>;
45 using PyOptimizerControl = py::class_<OptimizerControl, std::shared_ptr<OptimizerControl>>;
46 using PyOptimizerHistoryRecorder =
47  py::class_<OptimizerHistoryRecorder, std::shared_ptr<OptimizerHistoryRecorder>>;
48 using PyOptimizer = py::class_<Optimizer, std::shared_ptr<Optimizer>>;
49 
50 static PyOptimizerObjective declareOptimizerObjective(py::module &mod) {
51  PyOptimizerObjective cls(mod, "OptimizerObjective");
52  // Class is abstract, so no constructor.
53  cls.def_readonly("dataSize", &OptimizerObjective::dataSize);
54  cls.def_readonly("parameterSize", &OptimizerObjective::parameterSize);
55  cls.def_static("makeFromLikelihood", &OptimizerObjective::makeFromLikelihood, "likelihood"_a,
56  "prior"_a = nullptr);
57  // class is abstract and not subclassable in Python, so we don't wrap the ctor
58  cls.def("fillObjectiveValueGrid", &OptimizerObjective::fillObjectiveValueGrid, "parameters"_a,
59  "output"_a);
60  cls.def("computeResiduals", &OptimizerObjective::computeResiduals, "parameters"_a, "residuals"_a);
61  cls.def("differentiateResiduals", &OptimizerObjective::differentiateResiduals, "parameters"_a,
62  "derivatives"_a);
63  cls.def("hasPrior", &OptimizerObjective::hasPrior);
64  cls.def("computePrior", &OptimizerObjective::computePrior, "parameters"_a);
65  cls.def("differentiatePrior", &OptimizerObjective::differentiatePrior, "parameters"_a, "gradient"_a,
66  "hessian"_a);
67  return cls;
68 }
69 
70 static PyOptimizerControl declareOptimizerControl(py::module &mod) {
71  PyOptimizerControl cls(mod, "OptimizerControl");
72  LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, noSR1Term);
73  LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, skipSR1UpdateThreshold);
74  LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, minTrustRadiusThreshold);
75  LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, gradientThreshold);
76  LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, numDiffRelStep);
77  LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, numDiffAbsStep);
78  LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, numDiffTrustRadiusStep);
79  LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, stepAcceptThreshold);
80  LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, trustRegionInitialSize);
81  LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, trustRegionGrowReductionRatio);
82  LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, trustRegionGrowStepFraction);
83  LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, trustRegionGrowFactor);
84  LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, trustRegionShrinkReductionRatio);
85  LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, trustRegionShrinkFactor);
86  LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, trustRegionSolverTolerance);
87  LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, maxInnerIterations);
88  LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, maxOuterIterations);
89  LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, doSaveIterations);
90  cls.def(py::init<>());
91  return cls;
92 }
93 
94 static PyOptimizerHistoryRecorder declareOptimizerHistoryRecorder(py::module &mod) {
95  PyOptimizerHistoryRecorder cls(mod, "OptimizerHistoryRecorder");
96  cls.def(py::init<afw::table::Schema &, std::shared_ptr<Model>, bool>(), "schema"_a, "model"_a,
97  "doRecordDerivatives"_a);
98  cls.def(py::init<afw::table::Schema const &>(), "schema"_a);
99  cls.def("apply", &OptimizerHistoryRecorder::apply, "outerIterCount"_a, "innerIterCount"_a, "history"_a,
100  "optimizer"_a);
101  cls.def("unpackDerivatives",
102  (void (OptimizerHistoryRecorder::*)(ndarray::Array<Scalar const, 1, 1> const &,
103  ndarray::Array<Scalar, 1, 1> const &,
104  ndarray::Array<Scalar, 2, 2> const &) const) &
105  OptimizerHistoryRecorder::unpackDerivatives,
106  "nested"_a, "gradient"_a, "hessian"_a);
107  cls.def("unpackDerivatives", (void (OptimizerHistoryRecorder::*)(
108  afw::table::BaseRecord const &, ndarray::Array<Scalar, 1, 1> const &,
109  ndarray::Array<Scalar, 2, 2> const &) const) &
110  OptimizerHistoryRecorder::unpackDerivatives,
111  "record"_a, "gradient"_a, "hessian"_a);
112  // Other unpackDerivatives overloads do the same thing but with Eigen types,
113  // which makes them redundant in Python where it's all just NumPy.
114  cls.def("fillObjectiveModelGrid", &OptimizerHistoryRecorder::fillObjectiveModelGrid, "record"_a,
115  "parameters"_a, "output"_a);
116  cls.def_readonly("outer", &OptimizerHistoryRecorder::outer);
117  cls.def_readonly("inner", &OptimizerHistoryRecorder::inner);
118  cls.def_readonly("state", &OptimizerHistoryRecorder::state);
119  cls.def_readonly("objective", &OptimizerHistoryRecorder::objective);
120  cls.def_readonly("prior", &OptimizerHistoryRecorder::prior);
121  cls.def_readonly("trust", &OptimizerHistoryRecorder::trust);
122  cls.def_readonly("parameters", &OptimizerHistoryRecorder::parameters);
123  cls.def_readonly("derivatives", &OptimizerHistoryRecorder::derivatives);
124  return cls;
125 }
126 
127 static PyOptimizer declareOptimizer(py::module &mod) {
128  PyOptimizer cls(mod, "Optimizer");
129  // StateFlags enum is used as bitflag, so we wrap values as int class attributes.
130  cls.attr("CONVERGED_GRADZERO") = py::cast(int(Optimizer::CONVERGED_GRADZERO));
131  cls.attr("CONVERGED_TR_SMALL") = py::cast(int(Optimizer::CONVERGED_TR_SMALL));
132  cls.attr("CONVERGED") = py::cast(int(Optimizer::CONVERGED));
133  cls.attr("FAILED_MAX_INNER_ITERATIONS") = py::cast(int(Optimizer::FAILED_MAX_INNER_ITERATIONS));
134  cls.attr("FAILED_MAX_OUTER_ITERATIONS") = py::cast(int(Optimizer::FAILED_MAX_OUTER_ITERATIONS));
135  cls.attr("FAILED_MAX_ITERATIONS") = py::cast(int(Optimizer::FAILED_MAX_ITERATIONS));
136  cls.attr("FAILED_EXCEPTION") = py::cast(int(Optimizer::FAILED_EXCEPTION));
137  cls.attr("FAILED_NAN") = py::cast(int(Optimizer::FAILED_NAN));
138  cls.attr("FAILED") = py::cast(int(Optimizer::FAILED));
139  cls.attr("STATUS_STEP_REJECTED") = py::cast(int(Optimizer::STATUS_STEP_REJECTED));
140  cls.attr("STATUS_STEP_ACCEPTED") = py::cast(int(Optimizer::STATUS_STEP_ACCEPTED));
141  cls.attr("STATUS_STEP") = py::cast(int(Optimizer::STATUS_STEP));
142  cls.attr("STATUS_TR_UNCHANGED") = py::cast(int(Optimizer::STATUS_TR_UNCHANGED));
143  cls.attr("STATUS_TR_DECREASED") = py::cast(int(Optimizer::STATUS_TR_DECREASED));
144  cls.attr("STATUS_TR_INCREASED") = py::cast(int(Optimizer::STATUS_TR_INCREASED));
145  cls.attr("STATUS_TR") = py::cast(int(Optimizer::STATUS_TR));
146  cls.attr("STATUS") = py::cast(int(Optimizer::STATUS));
147  cls.def(py::init<std::shared_ptr<Optimizer::Objective const>, ndarray::Array<Scalar const, 1, 1> const &,
149  "objective"_a, "parameters"_a, "ctrl"_a);
150  cls.def("getObjective", &Optimizer::getObjective);
151  cls.def("getControl", &Optimizer::getControl, py::return_value_policy::copy);
152  cls.def("step", (bool (Optimizer::*)()) & Optimizer::step);
153  cls.def("step", (bool (Optimizer::*)(Optimizer::HistoryRecorder const &, afw::table::BaseCatalog &)) &
155  "recorder"_a, "history"_a);
156  cls.def("run", (int (Optimizer::*)()) & Optimizer::run);
157  cls.def("run", (int (Optimizer::*)(Optimizer::HistoryRecorder const &, afw::table::BaseCatalog &)) &
159  "recorder"_a, "history"_a);
160  cls.def("getState", &Optimizer::getState);
161  cls.def("getObjectiveValue", &Optimizer::getObjectiveValue);
162  cls.def("getParameters", &Optimizer::getParameters);
163  cls.def("getResiduals", &Optimizer::getResiduals);
164  cls.def("getGradient", &Optimizer::getGradient);
165  cls.def("getHessian", &Optimizer::getHessian);
166  cls.def("removeSR1Term", &Optimizer::removeSR1Term);
167  return cls;
168 }
169 
170 PYBIND11_MODULE(optimizer, mod) {
171  py::module::import("lsst.meas.modelfit.model");
172  py::module::import("lsst.meas.modelfit.likelihood");
173  py::module::import("lsst.meas.modelfit.priors");
174 
175  auto clsObjective = declareOptimizerObjective(mod);
176  auto clsControl = declareOptimizerControl(mod);
177  auto clsHistoryRecorder = declareOptimizerHistoryRecorder(mod);
178  auto cls = declareOptimizer(mod);
179  cls.attr("Objective") = clsObjective;
180  cls.attr("Control") = clsControl;
181  cls.attr("HistoryRecorder") = clsHistoryRecorder;
182 
183  mod.def("solveTrustRegion", &solveTrustRegion, "x"_a, "F"_a, "g"_a, "r"_a, "tolerance"_a);
184 }
185 
186 }
187 }
188 }
189 } // namespace lsst::meas::modelfit::anonymous
int const step
PYBIND11_MODULE(_cameraGeom, mod)
Definition: _cameraGeom.cc:38
CatalogT< BaseRecord > BaseCatalog
Definition: fwd.h:72
def run(self, coaddExposures, bbox, wcs)
Definition: getTemplate.py:603
void solveTrustRegion(ndarray::Array< Scalar, 1, 1 > const &x, ndarray::Array< Scalar const, 2, 1 > const &F, ndarray::Array< Scalar const, 1, 1 > const &g, double r, double tolerance)
Solve a symmetric quadratic matrix equation with a ball constraint.
A base class for image defects.
#define LSST_DECLARE_CONTROL_FIELD(WRAPPER, CLASS, NAME)
Macro used to wrap fields declared by LSST_CONTROL_FIELD using Pybind11.
Definition: python.h:50