LSST Applications  21.0.0+04719a4bac,21.0.0-1-ga51b5d4+f5e6047307,21.0.0-11-g2b59f77+a9c1acf22d,21.0.0-11-ga42c5b2+86977b0b17,21.0.0-12-gf4ce030+76814010d2,21.0.0-13-g1721dae+760e7a6536,21.0.0-13-g3a573fe+768d78a30a,21.0.0-15-g5a7caf0+f21cbc5713,21.0.0-16-g0fb55c1+b60e2d390c,21.0.0-19-g4cded4ca+71a93a33c0,21.0.0-2-g103fe59+bb20972958,21.0.0-2-g45278ab+04719a4bac,21.0.0-2-g5242d73+3ad5d60fb1,21.0.0-2-g7f82c8f+8babb168e8,21.0.0-2-g8f08a60+06509c8b61,21.0.0-2-g8faa9b5+616205b9df,21.0.0-2-ga326454+8babb168e8,21.0.0-2-gde069b7+5e4aea9c2f,21.0.0-2-gecfae73+1d3a86e577,21.0.0-2-gfc62afb+3ad5d60fb1,21.0.0-25-g1d57be3cd+e73869a214,21.0.0-3-g357aad2+ed88757d29,21.0.0-3-g4a4ce7f+3ad5d60fb1,21.0.0-3-g4be5c26+3ad5d60fb1,21.0.0-3-g65f322c+e0b24896a3,21.0.0-3-g7d9da8d+616205b9df,21.0.0-3-ge02ed75+a9c1acf22d,21.0.0-4-g591bb35+a9c1acf22d,21.0.0-4-g65b4814+b60e2d390c,21.0.0-4-gccdca77+0de219a2bc,21.0.0-4-ge8a399c+6c55c39e83,21.0.0-5-gd00fb1e+05fce91b99,21.0.0-6-gc675373+3ad5d60fb1,21.0.0-64-g1122c245+4fb2b8f86e,21.0.0-7-g04766d7+cd19d05db2,21.0.0-7-gdf92d54+04719a4bac,21.0.0-8-g5674e7b+d1bd76f71f,master-gac4afde19b+a9c1acf22d,w.2021.13
LSST Data Management Base Package
optimizer.cc
Go to the documentation of this file.
1 // -*- lsst-c++ -*-
2 /*
3  * LSST Data Management System
4  * Copyright 2008-2013 LSST Corporation.
5  *
6  * This product includes software developed by the
7  * LSST Project (http://www.lsst.org/).
8  *
9  * This program is free software: you can redistribute it and/or modify
10  * it under the terms of the GNU General Public License as published by
11  * the Free Software Foundation, either version 3 of the License, or
12  * (at your option) any later version.
13  *
14  * This program is distributed in the hope that it will be useful,
15  * but WITHOUT ANY WARRANTY; without even the implied warranty of
16  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17  * GNU General Public License for more details.
18  *
19  * You should have received a copy of the LSST License Statement and
20  * the GNU General Public License along with this program. If not,
21  * see <http://www.lsstcorp.org/LegalNotices/>.
22  */
23 
24 #include "pybind11/pybind11.h"
25 
26 #include "ndarray/pybind11.h"
27 
28 #include "lsst/pex/config/python.h"
29 
34 #include "lsst/afw/table/Catalog.h"
35 
36 namespace py = pybind11;
37 using namespace pybind11::literals;
38 
39 namespace lsst {
40 namespace meas {
41 namespace modelfit {
42 namespace {
43 
44 using PyOptimizerObjective = py::class_<OptimizerObjective, std::shared_ptr<OptimizerObjective>>;
45 using PyOptimizerControl = py::class_<OptimizerControl, std::shared_ptr<OptimizerControl>>;
46 using PyOptimizerHistoryRecorder =
47  py::class_<OptimizerHistoryRecorder, std::shared_ptr<OptimizerHistoryRecorder>>;
48 using PyOptimizer = py::class_<Optimizer, std::shared_ptr<Optimizer>>;
49 
50 static PyOptimizerObjective declareOptimizerObjective(py::module &mod) {
51  PyOptimizerObjective cls(mod, "OptimizerObjective");
52  // Class is abstract, so no constructor.
53  cls.def_readonly("dataSize", &OptimizerObjective::dataSize);
54  cls.def_readonly("parameterSize", &OptimizerObjective::parameterSize);
55  cls.def_static("makeFromLikelihood", &OptimizerObjective::makeFromLikelihood, "likelihood"_a,
56  "prior"_a = nullptr);
57  // class is abstract and not subclassable in Python, so we don't wrap the ctor
58  cls.def("fillObjectiveValueGrid", &OptimizerObjective::fillObjectiveValueGrid, "parameters"_a,
59  "output"_a);
60  cls.def("computeResiduals", &OptimizerObjective::computeResiduals, "parameters"_a, "residuals"_a);
61  cls.def("differentiateResiduals", &OptimizerObjective::differentiateResiduals, "parameters"_a,
62  "derivatives"_a);
63  cls.def("hasPrior", &OptimizerObjective::hasPrior);
64  cls.def("computePrior", &OptimizerObjective::computePrior, "parameters"_a);
65  cls.def("differentiatePrior", &OptimizerObjective::differentiatePrior, "parameters"_a, "gradient"_a,
66  "hessian"_a);
67  return cls;
68 }
69 
70 static PyOptimizerControl declareOptimizerControl(py::module &mod) {
71  PyOptimizerControl cls(mod, "OptimizerControl");
72  LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, noSR1Term);
73  LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, skipSR1UpdateThreshold);
74  LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, minTrustRadiusThreshold);
75  LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, gradientThreshold);
76  LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, numDiffRelStep);
77  LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, numDiffAbsStep);
78  LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, numDiffTrustRadiusStep);
79  LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, stepAcceptThreshold);
80  LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, trustRegionInitialSize);
81  LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, trustRegionGrowReductionRatio);
82  LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, trustRegionGrowStepFraction);
83  LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, trustRegionGrowFactor);
84  LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, trustRegionShrinkReductionRatio);
85  LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, trustRegionShrinkFactor);
86  LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, trustRegionSolverTolerance);
87  LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, maxInnerIterations);
88  LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, maxOuterIterations);
89  LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, doSaveIterations);
90  cls.def(py::init<>());
91  return cls;
92 }
93 
94 static PyOptimizerHistoryRecorder declareOptimizerHistoryRecorder(py::module &mod) {
95  PyOptimizerHistoryRecorder cls(mod, "OptimizerHistoryRecorder");
96  cls.def(py::init<afw::table::Schema &, std::shared_ptr<Model>, bool>(), "schema"_a, "model"_a,
97  "doRecordDerivatives"_a);
98  cls.def(py::init<afw::table::Schema const &>(), "schema"_a);
99  cls.def("apply", &OptimizerHistoryRecorder::apply, "outerIterCount"_a, "innerIterCount"_a, "history"_a,
100  "optimizer"_a);
101  cls.def("unpackDerivatives",
102  (void (OptimizerHistoryRecorder::*)(ndarray::Array<Scalar const, 1, 1> const &,
103  ndarray::Array<Scalar, 1, 1> const &,
104  ndarray::Array<Scalar, 2, 2> const &) const) &
105  OptimizerHistoryRecorder::unpackDerivatives,
106  "nested"_a, "gradient"_a, "hessian"_a);
107  cls.def("unpackDerivatives", (void (OptimizerHistoryRecorder::*)(
108  afw::table::BaseRecord const &, ndarray::Array<Scalar, 1, 1> const &,
109  ndarray::Array<Scalar, 2, 2> const &) const) &
110  OptimizerHistoryRecorder::unpackDerivatives,
111  "record"_a, "gradient"_a, "hessian"_a);
112  // Other unpackDerivatives overloads do the same thing but with Eigen types,
113  // which makes them redundant in Python where it's all just NumPy.
114  cls.def("fillObjectiveModelGrid", &OptimizerHistoryRecorder::fillObjectiveModelGrid, "record"_a,
115  "parameters"_a, "output"_a);
116  cls.def_readonly("outer", &OptimizerHistoryRecorder::outer);
117  cls.def_readonly("inner", &OptimizerHistoryRecorder::inner);
118  cls.def_readonly("state", &OptimizerHistoryRecorder::state);
119  cls.def_readonly("objective", &OptimizerHistoryRecorder::objective);
120  cls.def_readonly("prior", &OptimizerHistoryRecorder::prior);
121  cls.def_readonly("trust", &OptimizerHistoryRecorder::trust);
122  cls.def_readonly("parameters", &OptimizerHistoryRecorder::parameters);
123  cls.def_readonly("derivatives", &OptimizerHistoryRecorder::derivatives);
124  return cls;
125 }
126 
127 static PyOptimizer declareOptimizer(py::module &mod) {
128  PyOptimizer cls(mod, "Optimizer");
129  // StateFlags enum is used as bitflag, so we wrap values as int class attributes.
130  cls.attr("CONVERGED_GRADZERO") = py::cast(int(Optimizer::CONVERGED_GRADZERO));
131  cls.attr("CONVERGED_TR_SMALL") = py::cast(int(Optimizer::CONVERGED_TR_SMALL));
132  cls.attr("CONVERGED") = py::cast(int(Optimizer::CONVERGED));
133  cls.attr("FAILED_MAX_INNER_ITERATIONS") = py::cast(int(Optimizer::FAILED_MAX_INNER_ITERATIONS));
134  cls.attr("FAILED_MAX_OUTER_ITERATIONS") = py::cast(int(Optimizer::FAILED_MAX_OUTER_ITERATIONS));
135  cls.attr("FAILED_MAX_ITERATIONS") = py::cast(int(Optimizer::FAILED_MAX_ITERATIONS));
136  cls.attr("FAILED_EXCEPTION") = py::cast(int(Optimizer::FAILED_EXCEPTION));
137  cls.attr("FAILED_NAN") = py::cast(int(Optimizer::FAILED_NAN));
138  cls.attr("FAILED") = py::cast(int(Optimizer::FAILED));
139  cls.attr("STATUS_STEP_REJECTED") = py::cast(int(Optimizer::STATUS_STEP_REJECTED));
140  cls.attr("STATUS_STEP_ACCEPTED") = py::cast(int(Optimizer::STATUS_STEP_ACCEPTED));
141  cls.attr("STATUS_STEP") = py::cast(int(Optimizer::STATUS_STEP));
142  cls.attr("STATUS_TR_UNCHANGED") = py::cast(int(Optimizer::STATUS_TR_UNCHANGED));
143  cls.attr("STATUS_TR_DECREASED") = py::cast(int(Optimizer::STATUS_TR_DECREASED));
144  cls.attr("STATUS_TR_INCREASED") = py::cast(int(Optimizer::STATUS_TR_INCREASED));
145  cls.attr("STATUS_TR") = py::cast(int(Optimizer::STATUS_TR));
146  cls.attr("STATUS") = py::cast(int(Optimizer::STATUS));
147  cls.def(py::init<std::shared_ptr<Optimizer::Objective const>, ndarray::Array<Scalar const, 1, 1> const &,
149  "objective"_a, "parameters"_a, "ctrl"_a);
150  cls.def("getObjective", &Optimizer::getObjective);
151  cls.def("getControl", &Optimizer::getControl, py::return_value_policy::copy);
152  cls.def("step", (bool (Optimizer::*)()) & Optimizer::step);
153  cls.def("step", (bool (Optimizer::*)(Optimizer::HistoryRecorder const &, afw::table::BaseCatalog &)) &
155  "recorder"_a, "history"_a);
156  cls.def("run", (int (Optimizer::*)()) & Optimizer::run);
157  cls.def("run", (int (Optimizer::*)(Optimizer::HistoryRecorder const &, afw::table::BaseCatalog &)) &
159  "recorder"_a, "history"_a);
160  cls.def("getState", &Optimizer::getState);
161  cls.def("getObjectiveValue", &Optimizer::getObjectiveValue);
162  cls.def("getParameters", &Optimizer::getParameters);
163  cls.def("getResiduals", &Optimizer::getResiduals);
164  cls.def("getGradient", &Optimizer::getGradient);
165  cls.def("getHessian", &Optimizer::getHessian);
166  cls.def("removeSR1Term", &Optimizer::removeSR1Term);
167  return cls;
168 }
169 
170 PYBIND11_MODULE(optimizer, mod) {
171  py::module::import("lsst.meas.modelfit.model");
172  py::module::import("lsst.meas.modelfit.likelihood");
173  py::module::import("lsst.meas.modelfit.priors");
174 
175  auto clsObjective = declareOptimizerObjective(mod);
176  auto clsControl = declareOptimizerControl(mod);
177  auto clsHistoryRecorder = declareOptimizerHistoryRecorder(mod);
178  auto cls = declareOptimizer(mod);
179  cls.attr("Objective") = clsObjective;
180  cls.attr("Control") = clsControl;
181  cls.attr("HistoryRecorder") = clsHistoryRecorder;
182 
183  mod.def("solveTrustRegion", &solveTrustRegion, "x"_a, "F"_a, "g"_a, "r"_a, "tolerance"_a);
184 }
185 
186 }
187 }
188 }
189 } // namespace lsst::meas::modelfit::anonymous
int const step
PYBIND11_MODULE(_cameraGeom, mod)
Definition: _cameraGeom.cc:38
CatalogT< BaseRecord > BaseCatalog
Definition: fwd.h:71
void solveTrustRegion(ndarray::Array< Scalar, 1, 1 > const &x, ndarray::Array< Scalar const, 2, 1 > const &F, ndarray::Array< Scalar const, 1, 1 > const &g, double r, double tolerance)
Solve a symmetric quadratic matrix equation with a ball constraint.
def run(self, skyInfo, tempExpRefList, imageScalerList, weightList, altMaskList=None, mask=None, supplementaryData=None)
def init()
Definition: tests.py:59
A base class for image defects.
#define LSST_DECLARE_CONTROL_FIELD(WRAPPER, CLASS, NAME)
Macro used to wrap fields declared by LSST_CONTROL_FIELD using Pybind11.
Definition: python.h:50