LSSTApplications  19.0.0-14-gb0260a2+72efe9b372,20.0.0+7927753e06,20.0.0+8829bf0056,20.0.0+995114c5d2,20.0.0+b6f4b2abd1,20.0.0+bddc4f4cbe,20.0.0-1-g253301a+8829bf0056,20.0.0-1-g2b7511a+0d71a2d77f,20.0.0-1-g5b95a8c+7461dd0434,20.0.0-12-g321c96ea+23efe4bbff,20.0.0-16-gfab17e72e+fdf35455f6,20.0.0-2-g0070d88+ba3ffc8f0b,20.0.0-2-g4dae9ad+ee58a624b3,20.0.0-2-g61b8584+5d3db074ba,20.0.0-2-gb780d76+d529cf1a41,20.0.0-2-ged6426c+226a441f5f,20.0.0-2-gf072044+8829bf0056,20.0.0-2-gf1f7952+ee58a624b3,20.0.0-20-geae50cf+e37fec0aee,20.0.0-25-g3dcad98+544a109665,20.0.0-25-g5eafb0f+ee58a624b3,20.0.0-27-g64178ef+f1f297b00a,20.0.0-3-g4cc78c6+e0676b0dc8,20.0.0-3-g8f21e14+4fd2c12c9a,20.0.0-3-gbd60e8c+187b78b4b8,20.0.0-3-gbecbe05+48431fa087,20.0.0-38-ge4adf513+a12e1f8e37,20.0.0-4-g97dc21a+544a109665,20.0.0-4-gb4befbc+087873070b,20.0.0-4-gf910f65+5d3db074ba,20.0.0-5-gdfe0fee+199202a608,20.0.0-5-gfbfe500+d529cf1a41,20.0.0-6-g64f541c+d529cf1a41,20.0.0-6-g9a5b7a1+a1cd37312e,20.0.0-68-ga3f3dda+5fca18c6a4,20.0.0-9-g4aef684+e18322736b,w.2020.45
LSSTDataManagementBasePackage
optimizer.cc
Go to the documentation of this file.
1 // -*- lsst-c++ -*-
2 /*
3  * LSST Data Management System
4  * Copyright 2008-2013 LSST Corporation.
5  *
6  * This product includes software developed by the
7  * LSST Project (http://www.lsst.org/).
8  *
9  * This program is free software: you can redistribute it and/or modify
10  * it under the terms of the GNU General Public License as published by
11  * the Free Software Foundation, either version 3 of the License, or
12  * (at your option) any later version.
13  *
14  * This program is distributed in the hope that it will be useful,
15  * but WITHOUT ANY WARRANTY; without even the implied warranty of
16  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17  * GNU General Public License for more details.
18  *
19  * You should have received a copy of the LSST License Statement and
20  * the GNU General Public License along with this program. If not,
21  * see <http://www.lsstcorp.org/LegalNotices/>.
22  */
23 
24 #include "pybind11/pybind11.h"
25 
26 #include "ndarray/pybind11.h"
27 
28 #include "lsst/pex/config/python.h"
29 
34 #include "lsst/afw/table/Catalog.h"
35 
36 namespace py = pybind11;
37 using namespace pybind11::literals;
38 
39 namespace lsst {
40 namespace meas {
41 namespace modelfit {
42 namespace {
43 
44 using PyOptimizerObjective = py::class_<OptimizerObjective, std::shared_ptr<OptimizerObjective>>;
45 using PyOptimizerControl = py::class_<OptimizerControl, std::shared_ptr<OptimizerControl>>;
46 using PyOptimizerHistoryRecorder =
47  py::class_<OptimizerHistoryRecorder, std::shared_ptr<OptimizerHistoryRecorder>>;
48 using PyOptimizer = py::class_<Optimizer, std::shared_ptr<Optimizer>>;
49 
50 static PyOptimizerObjective declareOptimizerObjective(py::module &mod) {
51  PyOptimizerObjective cls(mod, "OptimizerObjective");
52  // Class is abstract, so no constructor.
53  cls.def_readonly("dataSize", &OptimizerObjective::dataSize);
54  cls.def_readonly("parameterSize", &OptimizerObjective::parameterSize);
55  cls.def_static("makeFromLikelihood", &OptimizerObjective::makeFromLikelihood, "likelihood"_a,
56  "prior"_a = nullptr);
57  // class is abstract and not subclassable in Python, so we don't wrap the ctor
58  cls.def("fillObjectiveValueGrid", &OptimizerObjective::fillObjectiveValueGrid, "parameters"_a,
59  "output"_a);
60  cls.def("computeResiduals", &OptimizerObjective::computeResiduals, "parameters"_a, "residuals"_a);
61  cls.def("differentiateResiduals", &OptimizerObjective::differentiateResiduals, "parameters"_a,
62  "derivatives"_a);
63  cls.def("hasPrior", &OptimizerObjective::hasPrior);
64  cls.def("computePrior", &OptimizerObjective::computePrior, "parameters"_a);
65  cls.def("differentiatePrior", &OptimizerObjective::differentiatePrior, "parameters"_a, "gradient"_a,
66  "hessian"_a);
67  return cls;
68 }
69 
70 static PyOptimizerControl declareOptimizerControl(py::module &mod) {
71  PyOptimizerControl cls(mod, "OptimizerControl");
72  LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, noSR1Term);
73  LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, skipSR1UpdateThreshold);
74  LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, minTrustRadiusThreshold);
75  LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, gradientThreshold);
76  LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, numDiffRelStep);
77  LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, numDiffAbsStep);
78  LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, numDiffTrustRadiusStep);
79  LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, stepAcceptThreshold);
80  LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, trustRegionInitialSize);
81  LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, trustRegionGrowReductionRatio);
82  LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, trustRegionGrowStepFraction);
83  LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, trustRegionGrowFactor);
84  LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, trustRegionShrinkReductionRatio);
85  LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, trustRegionShrinkFactor);
86  LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, trustRegionSolverTolerance);
87  LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, maxInnerIterations);
88  LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, maxOuterIterations);
89  LSST_DECLARE_CONTROL_FIELD(cls, OptimizerControl, doSaveIterations);
90  cls.def(py::init<>());
91  return cls;
92 }
93 
94 static PyOptimizerHistoryRecorder declareOptimizerHistoryRecorder(py::module &mod) {
95  PyOptimizerHistoryRecorder cls(mod, "OptimizerHistoryRecorder");
96  cls.def(py::init<afw::table::Schema &, std::shared_ptr<Model>, bool>(), "schema"_a, "model"_a,
97  "doRecordDerivatives"_a);
98  cls.def(py::init<afw::table::Schema const &>(), "schema"_a);
99  cls.def("apply", &OptimizerHistoryRecorder::apply, "outerIterCount"_a, "innerIterCount"_a, "history"_a,
100  "optimizer"_a);
101  cls.def("unpackDerivatives",
102  (void (OptimizerHistoryRecorder::*)(ndarray::Array<Scalar const, 1, 1> const &,
103  ndarray::Array<Scalar, 1, 1> const &,
104  ndarray::Array<Scalar, 2, 2> const &) const) &
105  OptimizerHistoryRecorder::unpackDerivatives,
106  "nested"_a, "gradient"_a, "hessian"_a);
107  cls.def("unpackDerivatives", (void (OptimizerHistoryRecorder::*)(
108  afw::table::BaseRecord const &, ndarray::Array<Scalar, 1, 1> const &,
109  ndarray::Array<Scalar, 2, 2> const &) const) &
110  OptimizerHistoryRecorder::unpackDerivatives,
111  "record"_a, "gradient"_a, "hessian"_a);
112  // Other unpackDerivatives overloads do the same thing but with Eigen types,
113  // which makes them redundant in Python where it's all just NumPy.
114  cls.def("fillObjectiveModelGrid", &OptimizerHistoryRecorder::fillObjectiveModelGrid, "record"_a,
115  "parameters"_a, "output"_a);
116  cls.def_readonly("outer", &OptimizerHistoryRecorder::outer);
117  cls.def_readonly("inner", &OptimizerHistoryRecorder::inner);
118  cls.def_readonly("state", &OptimizerHistoryRecorder::state);
119  cls.def_readonly("objective", &OptimizerHistoryRecorder::objective);
120  cls.def_readonly("prior", &OptimizerHistoryRecorder::prior);
121  cls.def_readonly("trust", &OptimizerHistoryRecorder::trust);
122  cls.def_readonly("parameters", &OptimizerHistoryRecorder::parameters);
123  cls.def_readonly("derivatives", &OptimizerHistoryRecorder::derivatives);
124  return cls;
125 }
126 
127 static PyOptimizer declareOptimizer(py::module &mod) {
128  PyOptimizer cls(mod, "Optimizer");
129  // StateFlags enum is used as bitflag, so we wrap values as int class attributes.
130  cls.attr("CONVERGED_GRADZERO") = py::cast(int(Optimizer::CONVERGED_GRADZERO));
131  cls.attr("CONVERGED_TR_SMALL") = py::cast(int(Optimizer::CONVERGED_TR_SMALL));
132  cls.attr("CONVERGED") = py::cast(int(Optimizer::CONVERGED));
133  cls.attr("FAILED_MAX_INNER_ITERATIONS") = py::cast(int(Optimizer::FAILED_MAX_INNER_ITERATIONS));
134  cls.attr("FAILED_MAX_OUTER_ITERATIONS") = py::cast(int(Optimizer::FAILED_MAX_OUTER_ITERATIONS));
135  cls.attr("FAILED_MAX_ITERATIONS") = py::cast(int(Optimizer::FAILED_MAX_ITERATIONS));
136  cls.attr("FAILED_EXCEPTION") = py::cast(int(Optimizer::FAILED_EXCEPTION));
137  cls.attr("FAILED_NAN") = py::cast(int(Optimizer::FAILED_NAN));
138  cls.attr("FAILED") = py::cast(int(Optimizer::FAILED));
139  cls.attr("STATUS_STEP_REJECTED") = py::cast(int(Optimizer::STATUS_STEP_REJECTED));
140  cls.attr("STATUS_STEP_ACCEPTED") = py::cast(int(Optimizer::STATUS_STEP_ACCEPTED));
141  cls.attr("STATUS_STEP") = py::cast(int(Optimizer::STATUS_STEP));
142  cls.attr("STATUS_TR_UNCHANGED") = py::cast(int(Optimizer::STATUS_TR_UNCHANGED));
143  cls.attr("STATUS_TR_DECREASED") = py::cast(int(Optimizer::STATUS_TR_DECREASED));
144  cls.attr("STATUS_TR_INCREASED") = py::cast(int(Optimizer::STATUS_TR_INCREASED));
145  cls.attr("STATUS_TR") = py::cast(int(Optimizer::STATUS_TR));
146  cls.attr("STATUS") = py::cast(int(Optimizer::STATUS));
147  cls.def(py::init<std::shared_ptr<Optimizer::Objective const>, ndarray::Array<Scalar const, 1, 1> const &,
149  "objective"_a, "parameters"_a, "ctrl"_a);
150  cls.def("getObjective", &Optimizer::getObjective);
151  cls.def("getControl", &Optimizer::getControl, py::return_value_policy::copy);
152  cls.def("step", (bool (Optimizer::*)()) & Optimizer::step);
153  cls.def("step", (bool (Optimizer::*)(Optimizer::HistoryRecorder const &, afw::table::BaseCatalog &)) &
155  "recorder"_a, "history"_a);
156  cls.def("run", (int (Optimizer::*)()) & Optimizer::run);
157  cls.def("run", (int (Optimizer::*)(Optimizer::HistoryRecorder const &, afw::table::BaseCatalog &)) &
159  "recorder"_a, "history"_a);
160  cls.def("getState", &Optimizer::getState);
161  cls.def("getObjectiveValue", &Optimizer::getObjectiveValue);
162  cls.def("getParameters", &Optimizer::getParameters);
163  cls.def("getResiduals", &Optimizer::getResiduals);
164  cls.def("getGradient", &Optimizer::getGradient);
165  cls.def("getHessian", &Optimizer::getHessian);
166  cls.def("removeSR1Term", &Optimizer::removeSR1Term);
167  return cls;
168 }
169 
170 PYBIND11_MODULE(optimizer, mod) {
171  py::module::import("lsst.meas.modelfit.model");
172  py::module::import("lsst.meas.modelfit.likelihood");
173  py::module::import("lsst.meas.modelfit.priors");
174 
175  auto clsObjective = declareOptimizerObjective(mod);
176  auto clsControl = declareOptimizerControl(mod);
177  auto clsHistoryRecorder = declareOptimizerHistoryRecorder(mod);
178  auto cls = declareOptimizer(mod);
179  cls.attr("Objective") = clsObjective;
180  cls.attr("Control") = clsControl;
181  cls.attr("HistoryRecorder") = clsHistoryRecorder;
182 
183  mod.def("solveTrustRegion", &solveTrustRegion, "x"_a, "F"_a, "g"_a, "r"_a, "tolerance"_a);
184 }
185 
186 }
187 }
188 }
189 } // namespace lsst::meas::modelfit::anonymous
Prior.h
std::shared_ptr
STL class.
Likelihood.h
lsst::afw::geom.transform.transformContinued.cls
cls
Definition: transformContinued.py:33
lsst.pipe.tasks.assembleCoadd.run
def run(self, skyInfo, tempExpRefList, imageScalerList, weightList, altMaskList=None, mask=None, supplementaryData=None)
Definition: assembleCoadd.py:720
step
int const step
Definition: BoundedField.cc:102
python.h
lsst::meas::modelfit::solveTrustRegion
void solveTrustRegion(ndarray::Array< Scalar, 1, 1 > const &x, ndarray::Array< Scalar const, 2, 1 > const &F, ndarray::Array< Scalar const, 1, 1 > const &g, double r, double tolerance)
Solve a symmetric quadratic matrix equation with a ball constraint.
Model.h
lsst
A base class for image defects.
Definition: imageAlgorithm.dox:1
lsst::afw::table::BaseCatalog
CatalogT< BaseRecord > BaseCatalog
Definition: fwd.h:71
optimizer.h
pybind11
Definition: _GenericMap.cc:40
Catalog.h
lsst::utils.tests.init
def init()
Definition: tests.py:59
LSST_DECLARE_CONTROL_FIELD
#define LSST_DECLARE_CONTROL_FIELD(WRAPPER, CLASS, NAME)
Macro used to wrap fields declared by LSST_CONTROL_FIELD using Pybind11.
Definition: python.h:50
lsst::ip::diffim.Control
Control
Definition: __init__.py:49
lsst::meas::modelfit.psf.psfContinued.module
module
Definition: psfContinued.py:42
lsst::afw::cameraGeom::PYBIND11_MODULE
PYBIND11_MODULE(camera, mod)
Definition: camera.cc:133