LSSTApplications  11.0-13-gbb96280,12.1+18,12.1+7,12.1-1-g14f38d3+72,12.1-1-g16c0db7+5,12.1-1-g5961e7a+84,12.1-1-ge22e12b+23,12.1-11-g06625e2+4,12.1-11-g0d7f63b+4,12.1-19-gd507bfc,12.1-2-g7dda0ab+38,12.1-2-gc0bc6ab+81,12.1-21-g6ffe579+2,12.1-21-gbdb6c2a+4,12.1-24-g941c398+5,12.1-3-g57f6835+7,12.1-3-gf0736f3,12.1-37-g3ddd237,12.1-4-gf46015e+5,12.1-5-g06c326c+20,12.1-5-g648ee80+3,12.1-5-gc2189d7+4,12.1-6-ga608fc0+1,12.1-7-g3349e2a+5,12.1-7-gfd75620+9,12.1-9-g577b946+5,12.1-9-gc4df26a+10
LSSTDataManagementBasePackage
LeastSquares.cc
Go to the documentation of this file.
1 // -*- LSST-C++ -*-
2 
3 /*
4  * LSST Data Management System
5  * Copyright 2008, 2009, 2010, 2011 LSST Corporation.
6  *
7  * This product includes software developed by the
8  * LSST Project (http://www.lsst.org/).
9  *
10  * This program is free software: you can redistribute it and/or modify
11  * it under the terms of the GNU General Public License as published by
12  * the Free Software Foundation, either version 3 of the License, or
13  * (at your option) any later version.
14  *
15  * This program is distributed in the hope that it will be useful,
16  * but WITHOUT ANY WARRANTY; without even the implied warranty of
17  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18  * GNU General Public License for more details.
19  *
20  * You should have received a copy of the LSST License Statement and
21  * the GNU General Public License along with this program. If not,
22  * see <http://www.lsstcorp.org/LegalNotices/>.
23  */
24 
25 #include "Eigen/Eigenvalues"
26 #include "Eigen/SVD"
27 #include "Eigen/Cholesky"
28 #include "boost/format.hpp"
29 #include <memory>
30 
32 #include "lsst/pex/exceptions.h"
33 #include "lsst/log/Log.h"
34 
35 namespace {
36 LOG_LOGGER _log = LOG_GET("afw.math.LeastSquares");
37 }
38 
39 namespace lsst { namespace afw { namespace math {
40 
42 public:
43 
44  enum StateFlags {
47  RHS_VECTOR = 0x004,
48  SOLUTION_ARRAY = 0x008,
52  };
53 
54  int state;
55  int dimension;
56  int rank;
59  double threshold;
60 
61  Eigen::MatrixXd design;
62  Eigen::VectorXd data;
63  Eigen::MatrixXd fisher;
64  Eigen::VectorXd rhs;
65 
66  ndarray::Array<double,1,1> solution;
67  ndarray::Array<double,2,2> covariance;
68  ndarray::Array<double,1,1> diagnostic;
69 
70  template <typename D>
71  void setRank(Eigen::MatrixBase<D> const & values) {
72  double cond = threshold * values[0];
73  if (cond <= 0.0) {
74  rank = 0;
75  } else {
76  for (rank = dimension; (rank > 1) && (values[rank-1] < cond); --rank);
77  }
78  }
79 
80  void ensure(int desired) {
82  if (desired & FULL_FISHER_MATRIX) desired |= LOWER_FISHER_MATRIX;
83  int toAdd = ~state & desired;
84  if (toAdd & LOWER_FISHER_MATRIX) {
85  assert(state & DESIGN_AND_DATA);
86  fisher = Eigen::MatrixXd::Zero(design.cols(), design.cols());
87  fisher.selfadjointView<Eigen::Lower>().rankUpdate(design.adjoint());
88  }
89  if (toAdd & FULL_FISHER_MATRIX) {
90  fisher.triangularView<Eigen::StrictlyUpper>() = fisher.adjoint();
91  }
92  if (toAdd & RHS_VECTOR) {
93  assert(state & DESIGN_AND_DATA);
94  rhs = design.adjoint() * data;
95  }
96  if (toAdd & SOLUTION_ARRAY) {
97  if (solution.isEmpty()) solution = ndarray::allocate(dimension);
99  }
100  if (toAdd & COVARIANCE_ARRAY) {
101  if (covariance.isEmpty()) covariance = ndarray::allocate(dimension, dimension);
103  }
104  if (toAdd & DIAGNOSTIC_ARRAY) {
105  if (diagnostic.isEmpty()) diagnostic = ndarray::allocate(dimension);
107  }
108  state |= toAdd;
109  }
110 
111  virtual void factor() = 0;
112 
113  virtual void updateRank() = 0;
114 
115  virtual void updateSolution() = 0;
116  virtual void updateCovariance() = 0;
117 
118  virtual void updateDiagnostic() = 0;
119 
120  Impl(int dimension_, double threshold_=std::numeric_limits<double>::epsilon()) :
121  state(0), dimension(dimension_), rank(dimension_), threshold(threshold_)
122  {}
123 
124  virtual ~Impl() {}
125 };
126 
127 namespace {
128 
129 class EigensystemSolver : public LeastSquares::Impl {
130 public:
131 
132  explicit EigensystemSolver(int dimension) :
133  Impl(dimension), _eig(dimension), _svd(), _tmp(dimension)
134  {}
135 
136  virtual void factor() {
137  ensure(LOWER_FISHER_MATRIX | RHS_VECTOR);
138  _eig.compute(fisher);
139  if (_eig.info() == Eigen::Success) {
140  setRank(_eig.eigenvalues().reverse());
141  LOGL_DEBUG(_log, "SelfAdjointEigenSolver succeeded: dimension=%d, rank=%d", dimension, rank);
142  } else {
143  // Note that the fallback is using SVD of the Fisher to compute the Eigensystem, because those
144  // are the same for a symmetric matrix; this is very different from doing a direct SVD of
145  // the design matrix.
146  ensure(FULL_FISHER_MATRIX);
147  _svd.compute(fisher, Eigen::ComputeFullU); // Matrix is symmetric, so V == U == eigenvectors
148  setRank(_svd.singularValues());
149  LOGL_DEBUG(_log,
150  "SelfAdjointEigenSolver failed; falling back to equivalent SVD: dimension=%d, rank=%d",
151  dimension, rank
152  );
153  }
154  }
155 
156  virtual void updateRank() {
157  if (_eig.info() == Eigen::Success) {
158  setRank(_eig.eigenvalues().reverse());
159  } else {
160  setRank(_svd.singularValues());
161  }
162  }
163 
164  virtual void updateDiagnostic() {
165  if (whichDiagnostic == LeastSquares::NORMAL_CHOLESKY) {
166  throw LSST_EXCEPT(
167  pex::exceptions::LogicError,
168  "Cannot compute NORMAL_CHOLESKY diagnostic from NORMAL_EIGENSYSTEM factorization."
169  );
170  }
171  if (_eig.info() == Eigen::Success) {
172  diagnostic.asEigen() = _eig.eigenvalues().reverse();
173  } else {
174  diagnostic.asEigen() = _svd.singularValues();
175  }
176  if (whichDiagnostic == LeastSquares::DIRECT_SVD) {
177  diagnostic.asEigen<Eigen::ArrayXpr>() = diagnostic.asEigen<Eigen::ArrayXpr>().sqrt();
178  }
179  }
180 
181  virtual void updateSolution() {
182  if (rank == 0) {
183  solution.asEigen().setZero();
184  return;
185  }
186  if (_eig.info() == Eigen::Success) {
187  _tmp.head(rank) = _eig.eigenvectors().rightCols(rank).adjoint() * rhs;
188  _tmp.head(rank).array() /= _eig.eigenvalues().tail(rank).array();
189  solution.asEigen() = _eig.eigenvectors().rightCols(rank) * _tmp.head(rank);
190  } else {
191  _tmp.head(rank) = _svd.matrixU().leftCols(rank).adjoint() * rhs;
192  _tmp.head(rank).array() /= _svd.singularValues().head(rank).array();
193  solution.asEigen() = _svd.matrixU().leftCols(rank) * _tmp.head(rank);
194  }
195  }
196 
197  virtual void updateCovariance() {
198  if (rank == 0) {
199  covariance.asEigen().setZero();
200  return;
201  }
202  if (_eig.info() == Eigen::Success) {
203  covariance.asEigen() =
204  _eig.eigenvectors().rightCols(rank)
205  * _eig.eigenvalues().tail(rank).array().inverse().matrix().asDiagonal()
206  * _eig.eigenvectors().rightCols(rank).adjoint();
207  } else {
208  covariance.asEigen() =
209  _svd.matrixU().leftCols(rank)
210  * _svd.singularValues().head(rank).array().inverse().matrix().asDiagonal()
211  * _svd.matrixU().leftCols(rank).adjoint();
212  }
213  }
214 
215 private:
216  Eigen::SelfAdjointEigenSolver<Eigen::MatrixXd> _eig;
217  Eigen::JacobiSVD<Eigen::MatrixXd> _svd; // only used if Eigendecomposition fails, should be very rare
218  Eigen::VectorXd _tmp;
219 };
220 
221 class CholeskySolver : public LeastSquares::Impl {
222 public:
223 
224  explicit CholeskySolver(int dimension) : Impl(dimension, 0.0), _ldlt(dimension) {}
225 
226  virtual void factor() {
227  ensure(LOWER_FISHER_MATRIX | RHS_VECTOR);
228  _ldlt.compute(fisher);
229  }
230 
231  virtual void updateRank() {}
232 
233  virtual void updateDiagnostic() {
234  if (whichDiagnostic != LeastSquares::NORMAL_CHOLESKY) {
235  throw LSST_EXCEPT(
236  pex::exceptions::LogicError,
237  "Can only compute NORMAL_CHOLESKY diagnostic from NORMAL_CHOLESKY factorization."
238  );
239  }
240  diagnostic.asEigen() = _ldlt.vectorD();
241  }
242 
243  virtual void updateSolution() { solution.asEigen() = _ldlt.solve(rhs); }
244 
245  virtual void updateCovariance() {
246  ndarray::EigenView<double,2,2> cov(covariance);
247  cov.setIdentity();
248  cov = _ldlt.solve(cov);
249  }
250 
251 private:
252  Eigen::LDLT<Eigen::MatrixXd> _ldlt;
253 };
254 
255 class SvdSolver : public LeastSquares::Impl {
256 public:
257 
258  explicit SvdSolver(int dimension) : Impl(dimension), _svd(), _tmp(dimension) {}
259 
260  virtual void factor() {
261  if (!(state & DESIGN_AND_DATA)) {
262  throw LSST_EXCEPT(
263  pex::exceptions::InvalidParameterError,
264  "Cannot initialize DIRECT_SVD solver with normal equations."
265  );
266  }
267  _svd.compute(design, Eigen::ComputeThinU | Eigen::ComputeThinV);
268  setRank(_svd.singularValues());
269  LOGL_DEBUG(_log, "Using direct SVD method; dimension=%d, rank=%d", dimension, rank);
270  }
271 
272  virtual void updateRank() { setRank(_svd.singularValues()); }
273 
274  virtual void updateDiagnostic() {
275  switch (whichDiagnostic) {
277  diagnostic.asEigen<Eigen::ArrayXpr>() = _svd.singularValues().array().square();
278  break;
280  throw LSST_EXCEPT(
281  pex::exceptions::LogicError,
282  "Can only compute NORMAL_CHOLESKY diagnostic from DIRECT_SVD factorization."
283  );
285  diagnostic.asEigen() = _svd.singularValues();
286  break;
287  }
288  }
289 
290  virtual void updateSolution() {
291  if (rank == 0) {
292  solution.asEigen().setZero();
293  return;
294  }
295  _tmp.head(rank) = _svd.matrixU().leftCols(rank).adjoint() * data;
296  _tmp.head(rank).array() /= _svd.singularValues().head(rank).array();
297  solution.asEigen() = _svd.matrixV().leftCols(rank) * _tmp.head(rank);
298  }
299 
300  virtual void updateCovariance() {
301  if (rank == 0) {
302  covariance.asEigen().setZero();
303  return;
304  }
305  covariance.asEigen() =
306  _svd.matrixV().leftCols(rank)
307  * _svd.singularValues().head(rank).array().inverse().square().matrix().asDiagonal()
308  * _svd.matrixV().leftCols(rank).adjoint();
309  }
310 
311 private:
312  Eigen::JacobiSVD<Eigen::MatrixXd> _svd;
313  Eigen::VectorXd _tmp;
314 };
315 
316 } // anonymous
317 
318 void LeastSquares::setThreshold(double threshold) {
319  _impl->threshold = threshold;
320  _impl->state &= ~Impl::SOLUTION_ARRAY;
321  _impl->state &= ~Impl::COVARIANCE_ARRAY;
322  _impl->updateRank();
323 }
324 
325 double LeastSquares::getThreshold() const { return _impl->threshold; }
326 
327 ndarray::Array<double const,1,1> LeastSquares::getSolution() {
328  _impl->ensure(Impl::SOLUTION_ARRAY);
329  return _impl->solution;
330 }
331 
332 ndarray::Array<double const,2,2> LeastSquares::getCovariance() {
333  _impl->ensure(Impl::COVARIANCE_ARRAY);
334  return _impl->covariance;
335 }
336 
337 ndarray::Array<double const,2,2> LeastSquares::getFisherMatrix() {
339  // Wrap the Eigen::MatrixXd in an ndarray::Array, using _impl as the reference-counted owner.
340  // Doesn't matter if we swap strides, because it's symmetric.
341  return ndarray::external(
342  _impl->fisher.data(),
343  ndarray::makeVector(_impl->dimension, _impl->dimension),
344  ndarray::makeVector(_impl->dimension, 1),
345  _impl
346  );
347 }
348 
349 ndarray::Array<double const,1,1> LeastSquares::getDiagnostic(Factorization factorization) {
350  if (_impl->whichDiagnostic != factorization) {
351  _impl->state &= ~Impl::DIAGNOSTIC_ARRAY;
352  _impl->whichDiagnostic = factorization;
353  }
354  _impl->ensure(Impl::DIAGNOSTIC_ARRAY);
355  return _impl->diagnostic;
356 }
357 
358 int LeastSquares::getDimension() const { return _impl->dimension; }
359 
360 int LeastSquares::getRank() const { return _impl->rank; }
361 
363 
364 LeastSquares::LeastSquares(Factorization factorization, int dimension) {
365  switch (factorization) {
366  case NORMAL_EIGENSYSTEM:
367  _impl = std::make_shared<EigensystemSolver>(dimension);
368  break;
369  case NORMAL_CHOLESKY:
370  _impl = std::make_shared<CholeskySolver>(dimension);
371  break;
372  case DIRECT_SVD:
373  _impl = std::make_shared<SvdSolver>(dimension);
374  break;
375  }
376  _impl->factorization = factorization;
377 }
378 
380 
381 Eigen::MatrixXd & LeastSquares::_getDesignMatrix() { return _impl->design; }
382 Eigen::VectorXd & LeastSquares::_getDataVector() { return _impl->data; }
383 
384 Eigen::MatrixXd & LeastSquares::_getFisherMatrix() { return _impl->fisher; }
385 Eigen::VectorXd & LeastSquares::_getRhsVector() { return _impl->rhs; }
386 
387 void LeastSquares::_factor(bool haveNormalEquations) {
388  if (haveNormalEquations) {
389  if (_getFisherMatrix().rows() != _impl->dimension) {
390  throw LSST_EXCEPT(
391  pex::exceptions::InvalidParameterError,
392  (boost::format("Number of rows of Fisher matrix (%d) does not match"
393  " dimension of LeastSquares solver.")
394  % _getFisherMatrix().rows() % _impl->dimension).str()
395  );
396  }
397  if (_getFisherMatrix().cols() != _impl->dimension) {
398  throw LSST_EXCEPT(
399  pex::exceptions::InvalidParameterError,
400  (boost::format("Number of columns of Fisher matrix (%d) does not match"
401  " dimension of LeastSquares solver.")
402  % _getFisherMatrix().cols() % _impl->dimension).str()
403  );
404  }
405  if (_getRhsVector().size() != _impl->dimension) {
406  throw LSST_EXCEPT(
407  pex::exceptions::InvalidParameterError,
408  (boost::format("Number of elements in RHS vector (%d) does not match"
409  " dimension of LeastSquares solver.")
410  % _getRhsVector().size() % _impl->dimension).str()
411  );
412  }
414  } else {
415  if (_getDesignMatrix().cols() != _impl->dimension) {
416  throw LSST_EXCEPT(
417  pex::exceptions::InvalidParameterError,
418  "Number of columns of design matrix does not match dimension of LeastSquares solver."
419  );
420  }
421  if (_getDesignMatrix().rows() != _getDataVector().size()) {
422  throw LSST_EXCEPT(
423  pex::exceptions::InvalidParameterError,
424  (boost::format("Number of rows of design matrix (%d) does not match number of "
425  "data points (%d)") % _getDesignMatrix().rows() % _getDataVector().size()
426  ).str()
427  );
428  }
429  if (_getDesignMatrix().cols() > _getDataVector().size()) {
430  throw LSST_EXCEPT(
431  pex::exceptions::InvalidParameterError,
432  (boost::format("Number of columns of design matrix (%d) must be smaller than number of "
433  "data points (%d)") % _getDesignMatrix().cols() % _getDataVector().size()
434  ).str()
435  );
436  }
437  _impl->state = Impl::DESIGN_AND_DATA;
438  }
439  _impl->factor();
440 }
441 
442 }}} // namespace lsst::afw::math
Eigen::VectorXd _tmp
Eigen::LDLT< Eigen::MatrixXd > _ldlt
#define LOG_LOGGER
Definition: Log.h:712
Impl(int dimension_, double threshold_=std::numeric_limits< double >::epsilon())
Eigen::SelfAdjointEigenSolver< Eigen::MatrixXd > _eig
void _factor(bool haveNormalEquations)
double getThreshold() const
Get the threshold used to determine when to truncate Eigenvalues.
void setThreshold(double threshold)
Set the threshold used to determine when to truncate Eigenvalues.
Eigen::VectorXd & _getDataVector()
Include files required for standard LSST Exception handling.
ndarray::Array< double, 1, 1 > diagnostic
Definition: LeastSquares.cc:68
LeastSquares(Factorization factorization, int dimension)
Construct a least-squares object for the given factorization and dimensionality.
Use a thin singular value decomposition of the design matrix.
Definition: LeastSquares.h:87
#define LOGL_DEBUG(logger, message...)
Log a debug-level message using a varargs/printf style interface.
Definition: Log.h:513
Use the normal equations with a Cholesky decomposition.
Definition: LeastSquares.h:79
LSST DM logging module built on log4cxx.
int getRank() const
Return the rank of the problem (number of nonzero Eigenvalues).
Use the normal equations with a symmetric Eigensystem decomposition.
Definition: LeastSquares.h:71
ndarray::Array< double const, 1, 1 > getDiagnostic(Factorization factorization)
Return a factorization-dependent vector that can be used to characterize the stability of the solutio...
boost::shared_ptr< Impl > _impl
Definition: LeastSquares.h:353
ndarray::Array< double, 1, 1 > solution
Definition: LeastSquares.cc:66
ndarray::Array< double const, 2, 2 > getCovariance()
Return the covariance matrix of the least squares problem.
#define LSST_EXCEPT(type,...)
Create an exception with a given type and message and optionally other arguments (dependent on the ty...
Definition: Exception.h:46
Factorization
Private implementation; forward-declared publicly so we can inherit from it in .cc.
Definition: LeastSquares.h:70
Factorization getFactorization() const
Retun the type of factorization used by the solver.
ndarray::Array< double const, 1, 1 > getSolution()
Return the vector solution to the least squares problem.
Eigen::MatrixXd & _getFisherMatrix()
void setRank(Eigen::MatrixBase< D > const &values)
Definition: LeastSquares.cc:71
Eigen::MatrixXd & _getDesignMatrix()
ndarray::Array< double, 2, 2 > covariance
Definition: LeastSquares.cc:67
int getDimension() const
Return the number of parameters.
#define LOG_GET(logger)
Returns a Log object associated with logger.
Definition: Log.h:83
ndarray::Array< double const, 2, 2 > getFisherMatrix()
Return the Fisher matrix (inverse of the covariance) of the parameters.
Eigen::JacobiSVD< Eigen::MatrixXd > _svd
Eigen::VectorXd & _getRhsVector()