24#include "pybind11/pybind11.h"
26#include "ndarray/pybind11.h"
37using namespace pybind11::literals;
44using PyOptimizerObjective = py::class_<OptimizerObjective, std::shared_ptr<OptimizerObjective>>;
45using PyOptimizerControl = py::class_<OptimizerControl, std::shared_ptr<OptimizerControl>>;
46using PyOptimizerHistoryRecorder =
47 py::class_<OptimizerHistoryRecorder, std::shared_ptr<OptimizerHistoryRecorder>>;
48using PyOptimizer = py::class_<Optimizer, std::shared_ptr<Optimizer>>;
50static PyOptimizerObjective declareOptimizerObjective(py::module &mod) {
51 PyOptimizerObjective
cls(mod,
"OptimizerObjective");
70static PyOptimizerControl declareOptimizerControl(py::module &mod) {
71 PyOptimizerControl
cls(mod,
"OptimizerControl");
90 cls.def(py::init<>());
94static PyOptimizerHistoryRecorder declareOptimizerHistoryRecorder(py::module &mod) {
95 PyOptimizerHistoryRecorder
cls(mod,
"OptimizerHistoryRecorder");
97 "doRecordDerivatives"_a);
98 cls.def(py::init<afw::table::Schema const &>(),
"schema"_a);
101 cls.def(
"unpackDerivatives",
102 (
void (OptimizerHistoryRecorder::*)(ndarray::Array<Scalar const, 1, 1>
const &,
103 ndarray::Array<Scalar, 1, 1>
const &,
104 ndarray::Array<Scalar, 2, 2>
const &)
const) &
106 "nested"_a,
"gradient"_a,
"hessian"_a);
107 cls.def(
"unpackDerivatives", (
void (OptimizerHistoryRecorder::*)(
108 afw::table::BaseRecord
const &, ndarray::Array<Scalar, 1, 1>
const &,
109 ndarray::Array<Scalar, 2, 2>
const &)
const) &
111 "record"_a,
"gradient"_a,
"hessian"_a);
115 "parameters"_a,
"output"_a);
127static PyOptimizer declareOptimizer(py::module &mod) {
128 PyOptimizer
cls(mod,
"Optimizer");
149 "objective"_a,
"parameters"_a,
"ctrl"_a);
155 "recorder"_a,
"history"_a);
159 "recorder"_a,
"history"_a);
171 py::module::import(
"lsst.meas.modelfit.model");
172 py::module::import(
"lsst.meas.modelfit.likelihood");
173 py::module::import(
"lsst.meas.modelfit.priors");
175 auto clsObjective = declareOptimizerObjective(mod);
176 auto clsControl = declareOptimizerControl(mod);
177 auto clsHistoryRecorder = declareOptimizerHistoryRecorder(mod);
178 auto cls = declareOptimizer(mod);
179 cls.attr(
"Objective") = clsObjective;
180 cls.attr(
"Control") = clsControl;
181 cls.attr(
"HistoryRecorder") = clsHistoryRecorder;
183 mod.def(
"solveTrustRegion", &
solveTrustRegion,
"x"_a,
"F"_a,
"g"_a,
"r"_a,
"tolerance"_a);
void fillObjectiveModelGrid(afw::table::BaseRecord const &record, ndarray::Array< Scalar const, 2, 1 > const ¶meters, ndarray::Array< Scalar, 1, 1 > const &output) const
afw::table::Key< int > inner
afw::table::Key< int > state
void apply(int outerIterCount, int innerIterCount, afw::table::BaseCatalog &history, Optimizer const &optimizer) const
afw::table::Key< int > outer
void unpackDerivatives(ndarray::Array< Scalar const, 1, 1 > const &nested, Vector &gradient, Matrix &hessian) const
@ FAILED_MAX_OUTER_ITERATIONS
@ FAILED_MAX_INNER_ITERATIONS
Scalar getObjectiveValue() const
void removeSR1Term()
Remove the symmetric-rank-1 secant term from the Hessian, making it just (J^T J)
ndarray::Array< Scalar const, 2, 2 > getHessian() const
ndarray::Array< Scalar const, 1, 1 > getResiduals() const
ndarray::Array< Scalar const, 1, 1 > getParameters() const
ndarray::Array< Scalar const, 1, 1 > getGradient() const
OptimizerHistoryRecorder HistoryRecorder
std::shared_ptr< Objective const > getObjective() const
Control const & getControl() const
virtual void differentiatePrior(ndarray::Array< Scalar const, 1, 1 > const ¶meters, ndarray::Array< Scalar, 1, 1 > const &gradient, ndarray::Array< Scalar, 2, 1 > const &hessian) const
Compute the first and second derivatives of the Bayesian prior with respect to the parameters.
virtual void computeResiduals(ndarray::Array< Scalar const, 1, 1 > const ¶meters, ndarray::Array< Scalar, 1, 1 > const &residuals) const =0
Evaluate the residuals of the model for a given parameter vector.
virtual Scalar computePrior(ndarray::Array< Scalar const, 1, 1 > const ¶meters) const
Compute the value of the Bayesian prior for the given parameter vector.
virtual bool hasPrior() const
Return true if the Objective has a Bayesian prior as well as a likelihood.
void fillObjectiveValueGrid(ndarray::Array< Scalar const, 2, 1 > const ¶meters, ndarray::Array< Scalar, 1, 1 > const &output) const
Evaluate the Objective on a 1-d grid.
static std::shared_ptr< OptimizerObjective > makeFromLikelihood(std::shared_ptr< Likelihood > likelihood, std::shared_ptr< Prior > prior=std::shared_ptr< Prior >())
Return a concrete Objective object built from a Likelihood and Prior.
virtual bool differentiateResiduals(ndarray::Array< Scalar const, 1, 1 > const ¶meters, ndarray::Array< Scalar, 2,-2 > const &derivatives) const
Evaluate analytic derivatives of the model or signal that they are not available.
PYBIND11_MODULE(_cameraGeom, mod)
CatalogT< BaseRecord > BaseCatalog
void solveTrustRegion(ndarray::Array< Scalar, 1, 1 > const &x, ndarray::Array< Scalar const, 2, 1 > const &F, ndarray::Array< Scalar const, 1, 1 > const &g, double r, double tolerance)
Solve a symmetric quadratic matrix equation with a ball constraint.
#define LSST_DECLARE_CONTROL_FIELD(WRAPPER, CLASS, NAME)
Macro used to wrap fields declared by LSST_CONTROL_FIELD using Pybind11.