LSST Applications g063fba187b+cac8b7c890,g0f08755f38+6aee506743,g1653933729+a8ce1bb630,g168dd56ebc+a8ce1bb630,g1a2382251a+b4475c5878,g1dcb35cd9c+8f9bc1652e,g20f6ffc8e0+6aee506743,g217e2c1bcf+73dee94bd0,g28da252d5a+1f19c529b9,g2bbee38e9b+3f2625acfc,g2bc492864f+3f2625acfc,g3156d2b45e+6e55a43351,g32e5bea42b+1bb94961c2,g347aa1857d+3f2625acfc,g35bb328faa+a8ce1bb630,g3a166c0a6a+3f2625acfc,g3e281a1b8c+c5dd892a6c,g3e8969e208+a8ce1bb630,g414038480c+5927e1bc1e,g41af890bb2+8a9e676b2a,g7af13505b9+809c143d88,g80478fca09+6ef8b1810f,g82479be7b0+f568feb641,g858d7b2824+6aee506743,g89c8672015+f4add4ffd5,g9125e01d80+a8ce1bb630,ga5288a1d22+2903d499ea,gb58c049af0+d64f4d3760,gc28159a63d+3f2625acfc,gcab2d0539d+b12535109e,gcf0d15dbbd+46a3f46ba9,gda6a2b7d83+46a3f46ba9,gdaeeff99f8+1711a396fd,ge79ae78c31+3f2625acfc,gef2f8181fd+0a71e47438,gf0baf85859+c1f95f4921,gfa517265be+6aee506743,gfa999e8aa5+17cd334064,w.2024.51
LSST Data Management Base Package
Loading...
Searching...
No Matches
LeastSquares.cc
Go to the documentation of this file.
1// -*- LSST-C++ -*-
2
3/*
4 * LSST Data Management System
5 * Copyright 2008, 2009, 2010, 2011 LSST Corporation.
6 *
7 * This product includes software developed by the
8 * LSST Project (http://www.lsst.org/).
9 *
10 * This program is free software: you can redistribute it and/or modify
11 * it under the terms of the GNU General Public License as published by
12 * the Free Software Foundation, either version 3 of the License, or
13 * (at your option) any later version.
14 *
15 * This program is distributed in the hope that it will be useful,
16 * but WITHOUT ANY WARRANTY; without even the implied warranty of
17 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 * GNU General Public License for more details.
19 *
20 * You should have received a copy of the LSST License Statement and
21 * the GNU General Public License along with this program. If not,
22 * see <http://www.lsstcorp.org/LegalNotices/>.
23 */
24
25#include "Eigen/Eigenvalues"
26#include "Eigen/SVD"
27#include "Eigen/Cholesky"
28#include "boost/format.hpp"
29#include <memory>
30
32#include "lsst/pex/exceptions.h"
33#include "lsst/log/Log.h"
34
35namespace {
36LOG_LOGGER _log = LOG_GET("lsst.afw.math.LeastSquares");
37}
38
39namespace lsst {
40namespace afw {
41namespace math {
42
44public:
54
55 int state;
57 int rank;
60 double threshold;
61
62 Eigen::MatrixXd design;
63 Eigen::VectorXd data;
64 Eigen::MatrixXd fisher;
65 Eigen::VectorXd rhs;
66
67 ndarray::Array<double, 1, 1> solution;
68 ndarray::Array<double, 2, 2> covariance;
69 ndarray::Array<double, 1, 1> diagnostic;
70
71 template <typename D>
72 void setRank(Eigen::MatrixBase<D> const& values) {
73 double cond = threshold * values[0];
74 if (cond <= 0.0) {
75 rank = 0;
76 } else {
77 for (rank = dimension; (rank > 1) && (values[rank - 1] < cond); --rank)
78 ;
79 }
80 }
81
82 void ensure(int desired) {
84 if (desired & FULL_FISHER_MATRIX) desired |= LOWER_FISHER_MATRIX;
85 int toAdd = ~state & desired;
86 if (toAdd & LOWER_FISHER_MATRIX) {
87 assert(state & DESIGN_AND_DATA);
88 fisher = Eigen::MatrixXd::Zero(design.cols(), design.cols());
89 fisher.selfadjointView<Eigen::Lower>().rankUpdate(design.adjoint());
90 }
91 if (toAdd & FULL_FISHER_MATRIX) {
92 fisher.triangularView<Eigen::StrictlyUpper>() = fisher.adjoint();
93 }
94 if (toAdd & RHS_VECTOR) {
95 assert(state & DESIGN_AND_DATA);
96 rhs = design.adjoint() * data;
97 }
98 if (toAdd & SOLUTION_ARRAY) {
99 if (solution.isEmpty()) solution = ndarray::allocate(dimension);
101 }
102 if (toAdd & COVARIANCE_ARRAY) {
103 if (covariance.isEmpty()) covariance = ndarray::allocate(dimension, dimension);
105 }
106 if (toAdd & DIAGNOSTIC_ARRAY) {
107 if (diagnostic.isEmpty()) diagnostic = ndarray::allocate(dimension);
109 }
110 state |= toAdd;
111 }
112
113 virtual void factor() = 0;
114
115 virtual void updateRank() = 0;
116
117 virtual void updateSolution() = 0;
118 virtual void updateCovariance() = 0;
119
120 virtual void updateDiagnostic() = 0;
121
122 explicit Impl(
123 int dimension_,
124 Factorization factorization_,
125 double threshold_ = std::numeric_limits<double>::epsilon()
126 ) :
127 state(0),
128 dimension(dimension_),
129 rank(dimension_),
130 factorization(factorization_),
131 whichDiagnostic(factorization_),
132 threshold(threshold_)
133 {}
134
135 virtual ~Impl() = default;
136};
137
138namespace {
139
140class EigensystemSolver : public LeastSquares::Impl {
141public:
142 explicit EigensystemSolver(int dimension)
143 : Impl(dimension, LeastSquares::NORMAL_EIGENSYSTEM), _eig(dimension), _svd(), _tmp(dimension) {}
144
145 void factor() override {
146 ensure(LOWER_FISHER_MATRIX | RHS_VECTOR);
147 _eig.compute(fisher);
148 if (_eig.info() == Eigen::Success) {
149 setRank(_eig.eigenvalues().reverse());
150 LOGL_DEBUG(_log, "SelfAdjointEigenSolver succeeded: dimension=%d, rank=%d", dimension, rank);
151 } else {
152 // Note that the fallback is using SVD of the Fisher to compute the Eigensystem, because those
153 // are the same for a symmetric matrix; this is very different from doing a direct SVD of
154 // the design matrix.
155 ensure(FULL_FISHER_MATRIX);
156 _svd.compute(fisher, Eigen::ComputeFullU); // Matrix is symmetric, so V == U == eigenvectors
157 setRank(_svd.singularValues());
158 LOGL_DEBUG(_log,
159 "SelfAdjointEigenSolver failed; falling back to equivalent SVD: dimension=%d, rank=%d",
160 dimension, rank);
161 }
162 }
163
164 void updateRank() override {
165 if (_eig.info() == Eigen::Success) {
166 setRank(_eig.eigenvalues().reverse());
167 } else {
168 setRank(_svd.singularValues());
169 }
170 }
171
172 void updateDiagnostic() override {
173 if (whichDiagnostic == LeastSquares::NORMAL_CHOLESKY) {
174 throw LSST_EXCEPT(
176 "Cannot compute NORMAL_CHOLESKY diagnostic from NORMAL_EIGENSYSTEM factorization.");
177 }
178 if (_eig.info() == Eigen::Success) {
179 ndarray::asEigenMatrix(diagnostic) = _eig.eigenvalues().reverse();
180 } else {
181 ndarray::asEigenMatrix(diagnostic) = _svd.singularValues();
182 }
183 if (whichDiagnostic == LeastSquares::DIRECT_SVD) {
184 ndarray::asEigenArray(diagnostic) = ndarray::asEigenArray(diagnostic).sqrt();
185 }
186 }
187
188 void updateSolution() override {
189 if (rank == 0) {
190 ndarray::asEigenMatrix(solution).setZero();
191 return;
192 }
193 if (_eig.info() == Eigen::Success) {
194 _tmp.head(rank) = _eig.eigenvectors().rightCols(rank).adjoint() * rhs;
195 _tmp.head(rank).array() /= _eig.eigenvalues().tail(rank).array();
196 ndarray::asEigenMatrix(solution) = _eig.eigenvectors().rightCols(rank) * _tmp.head(rank);
197 } else {
198 _tmp.head(rank) = _svd.matrixU().leftCols(rank).adjoint() * rhs;
199 _tmp.head(rank).array() /= _svd.singularValues().head(rank).array();
200 ndarray::asEigenMatrix(solution) = _svd.matrixU().leftCols(rank) * _tmp.head(rank);
201 }
202 }
203
204 void updateCovariance() override {
205 if (rank == 0) {
206 ndarray::asEigenMatrix(covariance).setZero();
207 return;
208 }
209 if (_eig.info() == Eigen::Success) {
210 ndarray::asEigenMatrix(covariance) =
211 _eig.eigenvectors().rightCols(rank) *
212 _eig.eigenvalues().tail(rank).array().inverse().matrix().asDiagonal() *
213 _eig.eigenvectors().rightCols(rank).adjoint();
214 } else {
215 ndarray::asEigenMatrix(covariance) =
216 _svd.matrixU().leftCols(rank) *
217 _svd.singularValues().head(rank).array().inverse().matrix().asDiagonal() *
218 _svd.matrixU().leftCols(rank).adjoint();
219 }
220 }
221
222private:
223 Eigen::SelfAdjointEigenSolver<Eigen::MatrixXd> _eig;
224 Eigen::JacobiSVD<Eigen::MatrixXd> _svd; // only used if Eigendecomposition fails, should be very rare
225 Eigen::VectorXd _tmp;
226};
227
228class CholeskySolver : public LeastSquares::Impl {
229public:
230 explicit CholeskySolver(int dimension)
231 : Impl(dimension, LeastSquares::NORMAL_CHOLESKY, 0.0), _ldlt(dimension) {}
232
233 void factor() override {
234 ensure(LOWER_FISHER_MATRIX | RHS_VECTOR);
235 _ldlt.compute(fisher);
236 }
237
238 void updateRank() override {}
239
240 void updateDiagnostic() override {
241 if (whichDiagnostic != LeastSquares::NORMAL_CHOLESKY) {
242 throw LSST_EXCEPT(
243 pex::exceptions::LogicError,
244 "Can only compute NORMAL_CHOLESKY diagnostic from NORMAL_CHOLESKY factorization.");
245 }
246 ndarray::asEigenMatrix(diagnostic) = _ldlt.vectorD();
247 }
248
249 void updateSolution() override { ndarray::asEigenMatrix(solution) = _ldlt.solve(rhs); }
250
251 void updateCovariance() override {
252 auto cov = ndarray::asEigenMatrix(covariance);
253 cov.setIdentity();
254 cov = _ldlt.solve(cov);
255 }
256
257private:
258 Eigen::LDLT<Eigen::MatrixXd> _ldlt;
259};
260
261class SvdSolver : public LeastSquares::Impl {
262public:
263 explicit SvdSolver(int dimension)
264 : Impl(dimension, LeastSquares::DIRECT_SVD), _svd(), _tmp(dimension) {}
265
266 void factor() override {
267 if (!(state & DESIGN_AND_DATA)) {
268 throw LSST_EXCEPT(pex::exceptions::InvalidParameterError,
269 "Cannot initialize DIRECT_SVD solver with normal equations.");
270 }
271 _svd.compute(design, Eigen::ComputeThinU | Eigen::ComputeThinV);
272 setRank(_svd.singularValues());
273 LOGL_DEBUG(_log, "Using direct SVD method; dimension=%d, rank=%d", dimension, rank);
274 }
275
276 void updateRank() override { setRank(_svd.singularValues()); }
277
278 void updateDiagnostic() override {
279 switch (whichDiagnostic) {
281 ndarray::asEigenArray(diagnostic) = _svd.singularValues().array().square();
282 break;
284 throw LSST_EXCEPT(
285 pex::exceptions::LogicError,
286 "Can only compute NORMAL_CHOLESKY diagnostic from DIRECT_SVD factorization.");
288 ndarray::asEigenMatrix(diagnostic) = _svd.singularValues();
289 break;
290 }
291 }
292
293 void updateSolution() override {
294 if (rank == 0) {
295 ndarray::asEigenMatrix(solution).setZero();
296 return;
297 }
298 _tmp.head(rank) = _svd.matrixU().leftCols(rank).adjoint() * data;
299 _tmp.head(rank).array() /= _svd.singularValues().head(rank).array();
300 ndarray::asEigenMatrix(solution) = _svd.matrixV().leftCols(rank) * _tmp.head(rank);
301 }
302
303 void updateCovariance() override {
304 if (rank == 0) {
305 ndarray::asEigenMatrix(covariance).setZero();
306 return;
307 }
308 ndarray::asEigenMatrix(covariance) =
309 _svd.matrixV().leftCols(rank) *
310 _svd.singularValues().head(rank).array().inverse().square().matrix().asDiagonal() *
311 _svd.matrixV().leftCols(rank).adjoint();
312 }
313
314private:
315 Eigen::JacobiSVD<Eigen::MatrixXd> _svd;
316 Eigen::VectorXd _tmp;
317};
318
319} // namespace
320
321void LeastSquares::setThreshold(double threshold) {
322 _impl->threshold = threshold;
323 _impl->state &= ~Impl::SOLUTION_ARRAY;
324 _impl->state &= ~Impl::COVARIANCE_ARRAY;
325 _impl->updateRank();
326}
327
328double LeastSquares::getThreshold() const { return _impl->threshold; }
329
330ndarray::Array<double const, 1, 1> LeastSquares::getSolution() {
332 return _impl->solution;
333}
334
335ndarray::Array<double const, 2, 2> LeastSquares::getCovariance() {
337 return _impl->covariance;
338}
339
340ndarray::Array<double const, 2, 2> LeastSquares::getFisherMatrix() {
342 // Wrap the Eigen::MatrixXd in an ndarray::Array, using _impl as the reference-counted owner.
343 // Doesn't matter if we swap strides, because it's symmetric.
344 return ndarray::external(_impl->fisher.data(), ndarray::makeVector(_impl->dimension, _impl->dimension),
345 ndarray::makeVector(_impl->dimension, 1), _impl);
346}
347
348ndarray::Array<double const, 1, 1> LeastSquares::getDiagnostic(Factorization factorization) {
349 if (_impl->whichDiagnostic != factorization) {
350 _impl->state &= ~Impl::DIAGNOSTIC_ARRAY;
351 _impl->whichDiagnostic = factorization;
352 }
354 return _impl->diagnostic;
355}
356
357int LeastSquares::getDimension() const { return _impl->dimension; }
358
359int LeastSquares::getRank() const { return _impl->rank; }
360
362
363LeastSquares::LeastSquares(Factorization factorization, int dimension) {
364 switch (factorization) {
366 _impl = std::make_shared<EigensystemSolver>(dimension);
367 break;
368 case NORMAL_CHOLESKY:
369 _impl = std::make_shared<CholeskySolver>(dimension);
370 break;
371 case DIRECT_SVD:
372 _impl = std::make_shared<SvdSolver>(dimension);
373 break;
374 }
375}
376
381
383
384Eigen::MatrixXd& LeastSquares::_getDesignMatrix() { return _impl->design; }
385Eigen::VectorXd& LeastSquares::_getDataVector() { return _impl->data; }
386
387Eigen::MatrixXd& LeastSquares::_getFisherMatrix() { return _impl->fisher; }
388Eigen::VectorXd& LeastSquares::_getRhsVector() { return _impl->rhs; }
389
390void LeastSquares::_factor(bool haveNormalEquations) {
391 if (haveNormalEquations) {
392 if (_getFisherMatrix().rows() != _impl->dimension) {
393 throw LSST_EXCEPT(pex::exceptions::InvalidParameterError,
394 (boost::format("Number of rows of Fisher matrix (%d) does not match"
395 " dimension of LeastSquares solver.") %
396 _getFisherMatrix().rows() % _impl->dimension)
397 .str());
398 }
399 if (_getFisherMatrix().cols() != _impl->dimension) {
400 throw LSST_EXCEPT(pex::exceptions::InvalidParameterError,
401 (boost::format("Number of columns of Fisher matrix (%d) does not match"
402 " dimension of LeastSquares solver.") %
403 _getFisherMatrix().cols() % _impl->dimension)
404 .str());
405 }
406 if (_getRhsVector().size() != _impl->dimension) {
407 throw LSST_EXCEPT(pex::exceptions::InvalidParameterError,
408 (boost::format("Number of elements in RHS vector (%d) does not match"
409 " dimension of LeastSquares solver.") %
410 _getRhsVector().size() % _impl->dimension)
411 .str());
412 }
414 } else {
415 if (_getDesignMatrix().cols() != _impl->dimension) {
416 throw LSST_EXCEPT(
417 pex::exceptions::InvalidParameterError,
418 "Number of columns of design matrix does not match dimension of LeastSquares solver.");
419 }
420 if (_getDesignMatrix().rows() != _getDataVector().size()) {
421 throw LSST_EXCEPT(pex::exceptions::InvalidParameterError,
422 (boost::format("Number of rows of design matrix (%d) does not match number of "
423 "data points (%d)") %
424 _getDesignMatrix().rows() % _getDataVector().size())
425 .str());
426 }
427 if (_getDesignMatrix().cols() > _getDataVector().size()) {
428 throw LSST_EXCEPT(
429 pex::exceptions::InvalidParameterError,
430 (boost::format("Number of columns of design matrix (%d) must be smaller than number of "
431 "data points (%d)") %
432 _getDesignMatrix().cols() % _getDataVector().size())
433 .str());
434 }
436 }
437 _impl->factor();
438}
439} // namespace math
440} // namespace afw
441} // namespace lsst
char * data
Definition BaseRecord.cc:61
#define LSST_EXCEPT(type,...)
Create an exception with a given type.
Definition Exception.h:48
LSST DM logging module built on log4cxx.
#define LOG_GET(logger)
Returns a Log object associated with logger.
Definition Log.h:75
#define LOG_LOGGER
Definition Log.h:714
#define LOGL_DEBUG(logger, message...)
Log a debug-level message using a varargs/printf style interface.
Definition Log.h:515
ndarray::Array< double, 1, 1 > solution
ndarray::Array< double, 2, 2 > covariance
Impl(int dimension_, Factorization factorization_, double threshold_=std::numeric_limits< double >::epsilon())
ndarray::Array< double, 1, 1 > diagnostic
void setRank(Eigen::MatrixBase< D > const &values)
Solver for linear least-squares problems.
ndarray::Array< double const, 1, 1 > getSolution()
Return the vector solution to the least squares problem.
ndarray::Array< double const, 2, 2 > getFisherMatrix()
Return the Fisher matrix (inverse of the covariance) of the parameters.
int getRank() const
Return the rank of the problem (number of nonzero Eigenvalues).
ndarray::Array< double const, 2, 2 > getCovariance()
Return the covariance matrix of the least squares problem.
LeastSquares & operator=(LeastSquares const &)
Factorization
Private implementation; forward-declared publicly so we can inherit from it in .cc.
@ NORMAL_EIGENSYSTEM
Use the normal equations with a symmetric Eigensystem decomposition.
@ NORMAL_CHOLESKY
Use the normal equations with a Cholesky decomposition.
@ DIRECT_SVD
Use a thin singular value decomposition of the design matrix.
LeastSquares(Factorization factorization, int dimension)
Construct a least-squares object for the given factorization and dimensionality.
int getDimension() const
Return the number of parameters.
Factorization getFactorization() const
Retun the type of factorization used by the solver.
ndarray::Array< double const, 1, 1 > getDiagnostic(Factorization factorization)
Return a factorization-dependent vector that can be used to characterize the stability of the solutio...
double getThreshold() const
Get the threshold used to determine when to truncate Eigenvalues.
void setThreshold(double threshold)
Set the threshold used to determine when to truncate Eigenvalues.
Reports errors in the logical structure of the program.
Definition Runtime.h:46
MatrixQ covariance