LSST Applications  21.0.0-147-g0e635eb1+1acddb5be5,22.0.0+052faf71bd,22.0.0+1ea9a8b2b2,22.0.0+6312710a6c,22.0.0+729191ecac,22.0.0+7589c3a021,22.0.0+9f079a9461,22.0.1-1-g7d6de66+b8044ec9de,22.0.1-1-g87000a6+536b1ee016,22.0.1-1-g8e32f31+6312710a6c,22.0.1-10-gd060f87+016f7cdc03,22.0.1-12-g9c3108e+df145f6f68,22.0.1-16-g314fa6d+c825727ab8,22.0.1-19-g93a5c75+d23f2fb6d8,22.0.1-19-gb93eaa13+aab3ef7709,22.0.1-2-g8ef0a89+b8044ec9de,22.0.1-2-g92698f7+9f079a9461,22.0.1-2-ga9b0f51+052faf71bd,22.0.1-2-gac51dbf+052faf71bd,22.0.1-2-gb66926d+6312710a6c,22.0.1-2-gcb770ba+09e3807989,22.0.1-20-g32debb5+b8044ec9de,22.0.1-23-gc2439a9a+fb0756638e,22.0.1-3-g496fd5d+09117f784f,22.0.1-3-g59f966b+1e6ba2c031,22.0.1-3-g849a1b8+f8b568069f,22.0.1-3-gaaec9c0+c5c846a8b1,22.0.1-32-g5ddfab5d3+60ce4897b0,22.0.1-4-g037fbe1+64e601228d,22.0.1-4-g8623105+b8044ec9de,22.0.1-5-g096abc9+d18c45d440,22.0.1-5-g15c806e+57f5c03693,22.0.1-7-gba73697+57f5c03693,master-g6e05de7fdc+c1283a92b8,master-g72cdda8301+729191ecac,w.2021.39
LSST Data Management Base Package
optimizer.cc
Go to the documentation of this file.
1 // -*- lsst-c++ -*-
2 /*
3  * LSST Data Management System
4  * Copyright 2008-2013 LSST Corporation.
5  *
6  * This product includes software developed by the
7  * LSST Project (http://www.lsst.org/).
8  *
9  * This program is free software: you can redistribute it and/or modify
10  * it under the terms of the GNU General Public License as published by
11  * the Free Software Foundation, either version 3 of the License, or
12  * (at your option) any later version.
13  *
14  * This program is distributed in the hope that it will be useful,
15  * but WITHOUT ANY WARRANTY; without even the implied warranty of
16  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17  * GNU General Public License for more details.
18  *
19  * You should have received a copy of the LSST License Statement and
20  * the GNU General Public License along with this program. If not,
21  * see <http://www.lsstcorp.org/LegalNotices/>.
22  */
23 
24 #include "pybind11/pybind11.h"
25 
26 #include "ndarray/pybind11.h"
27 
28 #include "lsst/pex/config/python.h"
29 
34 #include "lsst/afw/table/Catalog.h"
35 
36 namespace py = pybind11;
37 using namespace pybind11::literals;
38 
39 namespace lsst {
40 namespace meas {
41 namespace modelfit {
42 namespace {
43 
44 using PyOptimizerObjective = py::class_<OptimizerObjective, std::shared_ptr<OptimizerObjective>>;
45 using PyOptimizerControl = py::class_<OptimizerControl, std::shared_ptr<OptimizerControl>>;
46 using PyOptimizerHistoryRecorder =
47  py::class_<OptimizerHistoryRecorder, std::shared_ptr<OptimizerHistoryRecorder>>;
48 using PyOptimizer = py::class_<Optimizer, std::shared_ptr<Optimizer>>;
49 
50 static PyOptimizerObjective declareOptimizerObjective(py::module &mod) {
51  PyOptimizerObjective cls(mod, "OptimizerObjective");
52  // Class is abstract, so no constructor.
53  cls.def_readonly("dataSize", &OptimizerObjective::dataSize);
54  cls.def_readonly("parameterSize", &OptimizerObjective::parameterSize);
55  cls.def_static("makeFromLikelihood", &OptimizerObjective::makeFromLikelihood, "likelihood"_a,
56  "prior"_a = nullptr);
57  // class is abstract and not subclassable in Python, so we don't wrap the ctor
58  cls.def("fillObjectiveValueGrid", &OptimizerObjective::fillObjectiveValueGrid, "parameters"_a,
59  "output"_a);
60  cls.def("computeResiduals", &OptimizerObjective::computeResiduals, "parameters"_a, "residuals"_a);
61  cls.def("differentiateResiduals", &OptimizerObjective::differentiateResiduals, "parameters"_a,
62  "derivatives"_a);
63  cls.def("hasPrior", &OptimizerObjective::hasPrior);
64  cls.def("computePrior", &OptimizerObjective::computePrior, "parameters"_a);
65  cls.def("differentiatePrior", &OptimizerObjective::differentiatePrior, "parameters"_a, "gradient"_a,
66  "hessian"_a);
67  return cls;
68 }
69 
70 static PyOptimizerControl declareOptimizerControl(py::module &mod) {
71  PyOptimizerControl cls(mod, "OptimizerControl");
72  LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, noSR1Term);
73  LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, skipSR1UpdateThreshold);
74  LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, minTrustRadiusThreshold);
75  LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, gradientThreshold);
76  LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, numDiffRelStep);
77  LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, numDiffAbsStep);
78  LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, numDiffTrustRadiusStep);
79  LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, stepAcceptThreshold);
80  LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, trustRegionInitialSize);
81  LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, trustRegionGrowReductionRatio);
82  LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, trustRegionGrowStepFraction);
83  LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, trustRegionGrowFactor);
84  LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, trustRegionShrinkReductionRatio);
85  LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, trustRegionShrinkFactor);
86  LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, trustRegionSolverTolerance);
87  LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, maxInnerIterations);
88  LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, maxOuterIterations);
89  LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, doSaveIterations);
90  cls.def(py::init<>());
91  return cls;
92 }
93 
94 static PyOptimizerHistoryRecorder declareOptimizerHistoryRecorder(py::module &mod) {
95  PyOptimizerHistoryRecorder cls(mod, "OptimizerHistoryRecorder");
96  cls.def(py::init<afw::table::Schema &, std::shared_ptr<Model>, bool>(), "schema"_a, "model"_a,
97  "doRecordDerivatives"_a);
98  cls.def(py::init<afw::table::Schema const &>(), "schema"_a);
99  cls.def("apply", &OptimizerHistoryRecorder::apply, "outerIterCount"_a, "innerIterCount"_a, "history"_a,
100  "optimizer"_a);
101  cls.def("unpackDerivatives",
102  (void (OptimizerHistoryRecorder::*)(ndarray::Array<Scalar const, 1, 1> const &,
103  ndarray::Array<Scalar, 1, 1> const &,
104  ndarray::Array<Scalar, 2, 2> const &) const) &
105  OptimizerHistoryRecorder::unpackDerivatives,
106  "nested"_a, "gradient"_a, "hessian"_a);
107  cls.def("unpackDerivatives", (void (OptimizerHistoryRecorder::*)(
108  afw::table::BaseRecord const &, ndarray::Array<Scalar, 1, 1> const &,
109  ndarray::Array<Scalar, 2, 2> const &) const) &
110  OptimizerHistoryRecorder::unpackDerivatives,
111  "record"_a, "gradient"_a, "hessian"_a);
112  // Other unpackDerivatives overloads do the same thing but with Eigen types,
113  // which makes them redundant in Python where it's all just NumPy.
114  cls.def("fillObjectiveModelGrid", &OptimizerHistoryRecorder::fillObjectiveModelGrid, "record"_a,
115  "parameters"_a, "output"_a);
116  cls.def_readonly("outer", &OptimizerHistoryRecorder::outer);
117  cls.def_readonly("inner", &OptimizerHistoryRecorder::inner);
118  cls.def_readonly("state", &OptimizerHistoryRecorder::state);
119  cls.def_readonly("objective", &OptimizerHistoryRecorder::objective);
120  cls.def_readonly("prior", &OptimizerHistoryRecorder::prior);
121  cls.def_readonly("trust", &OptimizerHistoryRecorder::trust);
122  cls.def_readonly("parameters", &OptimizerHistoryRecorder::parameters);
123  cls.def_readonly("derivatives", &OptimizerHistoryRecorder::derivatives);
124  return cls;
125 }
126 
127 static PyOptimizer declareOptimizer(py::module &mod) {
128  PyOptimizer cls(mod, "Optimizer");
129  // StateFlags enum is used as bitflag, so we wrap values as int class attributes.
130  cls.attr("CONVERGED_GRADZERO") = py::cast(int(Optimizer::CONVERGED_GRADZERO));
131  cls.attr("CONVERGED_TR_SMALL") = py::cast(int(Optimizer::CONVERGED_TR_SMALL));
132  cls.attr("CONVERGED") = py::cast(int(Optimizer::CONVERGED));
133  cls.attr("FAILED_MAX_INNER_ITERATIONS") = py::cast(int(Optimizer::FAILED_MAX_INNER_ITERATIONS));
134  cls.attr("FAILED_MAX_OUTER_ITERATIONS") = py::cast(int(Optimizer::FAILED_MAX_OUTER_ITERATIONS));
135  cls.attr("FAILED_MAX_ITERATIONS") = py::cast(int(Optimizer::FAILED_MAX_ITERATIONS));
136  cls.attr("FAILED_EXCEPTION") = py::cast(int(Optimizer::FAILED_EXCEPTION));
137  cls.attr("FAILED_NAN") = py::cast(int(Optimizer::FAILED_NAN));
138  cls.attr("FAILED") = py::cast(int(Optimizer::FAILED));
139  cls.attr("STATUS_STEP_REJECTED") = py::cast(int(Optimizer::STATUS_STEP_REJECTED));
140  cls.attr("STATUS_STEP_ACCEPTED") = py::cast(int(Optimizer::STATUS_STEP_ACCEPTED));
141  cls.attr("STATUS_STEP") = py::cast(int(Optimizer::STATUS_STEP));
142  cls.attr("STATUS_TR_UNCHANGED") = py::cast(int(Optimizer::STATUS_TR_UNCHANGED));
143  cls.attr("STATUS_TR_DECREASED") = py::cast(int(Optimizer::STATUS_TR_DECREASED));
144  cls.attr("STATUS_TR_INCREASED") = py::cast(int(Optimizer::STATUS_TR_INCREASED));
145  cls.attr("STATUS_TR") = py::cast(int(Optimizer::STATUS_TR));
146  cls.attr("STATUS") = py::cast(int(Optimizer::STATUS));
147  cls.def(py::init<std::shared_ptr<Optimizer::Objective const>, ndarray::Array<Scalar const, 1, 1> const &,
149  "objective"_a, "parameters"_a, "ctrl"_a);
150  cls.def("getObjective", &Optimizer::getObjective);
151  cls.def("getControl", &Optimizer::getControl, py::return_value_policy::copy);
152  cls.def("step", (bool (Optimizer::*)()) & Optimizer::step);
153  cls.def("step", (bool (Optimizer::*)(Optimizer::HistoryRecorder const &, afw::table::BaseCatalog &)) &
155  "recorder"_a, "history"_a);
156  cls.def("run", (int (Optimizer::*)()) & Optimizer::run);
157  cls.def("run", (int (Optimizer::*)(Optimizer::HistoryRecorder const &, afw::table::BaseCatalog &)) &
159  "recorder"_a, "history"_a);
160  cls.def("getState", &Optimizer::getState);
161  cls.def("getObjectiveValue", &Optimizer::getObjectiveValue);
162  cls.def("getParameters", &Optimizer::getParameters);
163  cls.def("getResiduals", &Optimizer::getResiduals);
164  cls.def("getGradient", &Optimizer::getGradient);
165  cls.def("getHessian", &Optimizer::getHessian);
166  cls.def("removeSR1Term", &Optimizer::removeSR1Term);
167  return cls;
168 }
169 
170 PYBIND11_MODULE(optimizer, mod) {
171  py::module::import("lsst.meas.modelfit.model");
172  py::module::import("lsst.meas.modelfit.likelihood");
173  py::module::import("lsst.meas.modelfit.priors");
174 
175  auto clsObjective = declareOptimizerObjective(mod);
176  auto clsControl = declareOptimizerControl(mod);
177  auto clsHistoryRecorder = declareOptimizerHistoryRecorder(mod);
178  auto cls = declareOptimizer(mod);
179  cls.attr("Objective") = clsObjective;
180  cls.attr("Control") = clsControl;
181  cls.attr("HistoryRecorder") = clsHistoryRecorder;
182 
183  mod.def("solveTrustRegion", &solveTrustRegion, "x"_a, "F"_a, "g"_a, "r"_a, "tolerance"_a);
184 }
185 
186 }
187 }
188 }
189 } // namespace lsst::meas::modelfit::anonymous
int const step
PYBIND11_MODULE(_cameraGeom, mod)
Definition: _cameraGeom.cc:38
CatalogT< BaseRecord > BaseCatalog
Definition: fwd.h:72
def run(self, coaddExposures, bbox, wcs)
Definition: getTemplate.py:603
void solveTrustRegion(ndarray::Array< Scalar, 1, 1 > const &x, ndarray::Array< Scalar const, 2, 1 > const &F, ndarray::Array< Scalar const, 1, 1 > const &g, double r, double tolerance)
Solve a symmetric quadratic matrix equation with a ball constraint.
A base class for image defects.
#define LSST_DECLARE_CONTROL_FIELD(WRAPPER, CLASS, NAME)
Macro used to wrap fields declared by LSST_CONTROL_FIELD using Pybind11.
Definition: python.h:50