36 GaussianProcessTimer::GaussianProcessTimer() {
37 _interpolationCount = 0;
45 void GaussianProcessTimer::reset() {
46 _interpolationCount = 0;
54 void GaussianProcessTimer::start() {
59 void GaussianProcessTimer::addToEigen() {
62 _eigenTime += after - _before;
66 void GaussianProcessTimer::addToVariance() {
69 _varianceTime += after - _before;
73 void GaussianProcessTimer::addToSearch() {
76 _searchTime += after - _before;
80 void GaussianProcessTimer::addToIteration() {
83 _iterationTime += after - _before;
87 void GaussianProcessTimer::addToTotal(
int i) {
90 _totalTime += after - _beginning;
91 _interpolationCount += i;
94 void GaussianProcessTimer::display() {
95 std::cout <<
"\nSearch time " << _searchTime <<
"\n";
96 std::cout <<
"Eigen time " << _eigenTime <<
"\n";
97 std::cout <<
"Variance time " << _varianceTime <<
"\n";
98 std::cout <<
"Iteration time " << _iterationTime <<
"\n";
99 std::cout <<
"Total time " << _totalTime <<
"\n";
100 std::cout <<
"Number of interpolations " << _interpolationCount <<
"\n";
103 template <
typename T>
107 _npts = dt.template getSize<0>();
108 _dimensions = dt.template getSize<1>();
111 _inn = allocate(ndarray::makeVector(_npts));
116 _data = allocate(ndarray::makeVector(_room, _dimensions));
120 _tree = allocate(ndarray::makeVector(_room, 4));
122 for (i = 0; i < _npts; i++) {
126 _organize(_inn, _npts, -1, -1);
134 template <
typename T>
136 ndarray::Array<const T, 1, 1>
const &v,
int n_nn)
const {
139 "Asked for more neighbors than kd tree contains\n");
144 "Asked for zero or a negative number of neighbors\n");
147 if (neighdex.getNumElements() !=
static_cast<ndarray::Size
>(n_nn)) {
149 "Size of neighdex does not equal n_nn in KdTree.findNeighbors\n");
152 if (dd.getNumElements() !=
static_cast<ndarray::Size
>(n_nn)) {
154 "Size of dd does not equal n_nn in KdTree.findNeighbors\n");
159 _neighborCandidates = allocate(ndarray::makeVector(n_nn));
160 _neighborDistances = allocate(ndarray::makeVector(n_nn));
162 _neighborsWanted = n_nn;
164 for (i = 0; i < n_nn; i++) _neighborDistances[i] = -1.0;
166 start = _findNode(v);
168 _neighborDistances[0] = _distance(v, _data[start]);
169 _neighborCandidates[0] = start;
172 for (i = 1; i < 4; i++) {
173 if (_tree[start][i] >= 0) {
174 _lookForNeighbors(v, _tree[start][i], start);
178 for (i = 0; i < n_nn; i++) {
179 neighdex[i] = _neighborCandidates[i];
180 dd[i] = _neighborDistances[i];
184 template <
typename T>
186 if (ipt >= _npts || ipt < 0) {
188 "Asked for point that does not exist in KdTree\n");
191 if (idim >= _dimensions || idim < 0) {
193 "KdTree does not have points of that many dimensions\n");
196 return _data[ipt][idim];
199 template <
typename T>
201 if (ipt >= _npts || ipt < 0) {
203 "Asked for point that does not exist in KdTree\n");
209 template <
typename T>
211 if (v.getNumElements() !=
static_cast<ndarray::Size
>(_dimensions)) {
213 "You are trying to add a point of the incorrect dimensionality to KdTree\n");
216 int i, j, node, dim, dir;
219 dim = _tree[node][DIMENSION] + 1;
220 if (dim == _dimensions) dim = 0;
222 if (_npts == _room) {
223 ndarray::Array<T, 2, 2> dbuff = allocate(ndarray::makeVector(_npts, _dimensions));
224 ndarray::Array<int, 2, 2> tbuff = allocate(ndarray::makeVector(_npts, 4));
226 dbuff.deep() = _data;
227 tbuff.deep() = _tree;
231 _tree = allocate(ndarray::makeVector(_room, 4));
232 _data = allocate(ndarray::makeVector(_room, _dimensions));
234 for (i = 0; i < _npts; i++) {
235 for (j = 0; j < _dimensions; j++) _data[i][j] = dbuff[i][j];
236 for (j = 0; j < 4; j++) _tree[i][j] = tbuff[i][j];
240 _tree[_npts][DIMENSION] = dim;
241 _tree[_npts][
PARENT] = node;
242 i = _tree[node][DIMENSION];
244 if (_data[node][i] > v[i]) {
245 if (_tree[node][LT] >= 0) {
247 "Trying to add to KdTree in a node that is already occupied\n");
249 _tree[node][LT] = _npts;
252 if (_tree[node][GEQ] >= 0) {
254 "Trying to add to KdTree in a node that is already occupied\n");
256 _tree[node][GEQ] = _npts;
259 _tree[_npts][LT] = -1;
260 _tree[_npts][GEQ] = -1;
261 for (i = 0; i < _dimensions; i++) {
262 _data[_npts][i] = v[i];
267 i = _walkUpTree(_tree[_npts - 1][
PARENT], dir, _npts - 1);
268 if (i != _masterParent) {
273 template <
typename T>
278 template <
typename T>
280 if (dex < 0 || dex >= _npts) {
282 "Asked for tree information on point that does not exist\n");
285 if (v.getNumElements() != 4) {
287 "Need to pass a 4-element ndarray into KdTree.getTreeNode()\n");
290 v[0] = _tree[dex][DIMENSION];
291 v[1] = _tree[dex][LT];
292 v[2] = _tree[dex][GEQ];
293 v[3] = _tree[dex][
PARENT];
296 template <
typename T>
298 if (_npts == 1 && _tree[0][
PARENT] < 0 && _masterParent == 0) {
308 for (i = 0; i < _npts; i++) isparent.
push_back(0);
311 for (i = 0; i < _npts; i++) {
312 if (_tree[i][
PARENT] < 0) j++;
315 std::cout <<
"_tree FAILURE " << j <<
" _masterParents\n";
319 for (i = 0; i < _npts; i++) {
320 if (_tree[i][
PARENT] >= 0) {
321 isparent[_tree[i][
PARENT]]++;
325 for (i = 0; i < _npts; i++) {
326 if (isparent[i] > 2) {
327 std::cout <<
"_tree FAILURE " << i <<
" is parent to " << isparent[i] <<
"\n";
332 for (i = 0; i < _npts; i++) {
333 if (_tree[i][
PARENT] >= 0) {
334 if (_tree[_tree[i][
PARENT]][LT] == i)
338 output = _walkUpTree(_tree[i][
PARENT], j, i);
340 if (output != _masterParent) {
346 if (output != _masterParent)
352 template <
typename T>
353 void KdTree<T>::_organize(ndarray::Array<int, 1, 1>
const &use,
int ct,
int parent,
int dir) {
354 int i, j, k, l, idim, daughter;
355 T mean, var, varbest;
366 for (i = 0; i < _dimensions; i++) {
369 for (j = 0; j < ct; j++) {
370 mean += _data[use[j]][i];
371 var += _data[use[j]][i] * _data[use[j]][i];
373 mean = mean / double(ct);
374 var = var / double(ct) - mean * mean;
375 if (i == 0 || var > varbest || (var == varbest && parent >= 0 && i > _tree[parent][DIMENSION])) {
389 toSortElement.
push_back(_data[use[0]][idim]);
392 for (i = 1; i < ct; i++) {
393 toSortElement[0] = _data[use[i]][idim];
394 toSortElement[1] = T(use[i]);
402 while (k > 0 && toSort[k][0] == toSort[k - 1][0]) k--;
404 while (l < ct - 1 && toSort[l][0] == toSort[ct / 2][0]) l++;
406 if ((ct / 2 - k) < (l - ct / 2) || l == ct - 1)
411 for (i = 0; i < ct; i++) {
412 use[i] = int(toSort[i][1]);
416 if (parent >= 0) _tree[parent][dir] = daughter;
417 _tree[daughter][DIMENSION] = idim;
418 _tree[daughter][
PARENT] = parent;
421 _organize(use[ndarray::view(j + 1, use.getSize<0>())], ct - j - 1, daughter, GEQ);
423 _tree[daughter][GEQ] = -1;
426 _organize(use, j, daughter, LT);
428 _tree[daughter][LT] = -1;
434 if (parent >= 0) _tree[parent][dir] = daughter;
436 idim = _tree[parent][DIMENSION] + 1;
438 if (idim >= _dimensions) idim = 0;
440 _tree[daughter][DIMENSION] = idim;
441 _tree[daughter][LT] = -1;
442 _tree[daughter][GEQ] = -1;
443 _tree[daughter][
PARENT] = parent;
447 _masterParent = daughter;
451 template <
typename T>
452 int KdTree<T>::_findNode(ndarray::Array<const T, 1, 1>
const &v)
const {
453 int consider,
next, dim;
455 dim = _tree[_masterParent][DIMENSION];
457 if (v[dim] < _data[_masterParent][dim])
458 consider = _tree[_masterParent][LT];
460 consider = _tree[_masterParent][GEQ];
467 dim = _tree[consider][DIMENSION];
468 if (v[dim] < _data[consider][dim])
469 next = _tree[consider][LT];
471 next = _tree[consider][GEQ];
477 template <
typename T>
478 void KdTree<T>::_lookForNeighbors(ndarray::Array<const T, 1, 1>
const &v,
int consider,
int from)
const {
482 dd = _distance(v, _data[consider]);
484 if (_neighborsFound < _neighborsWanted || dd < _neighborDistances[_neighborsWanted - 1]) {
485 for (j = 0; j < _neighborsFound && _neighborDistances[j] < dd; j++)
488 for (i = _neighborsWanted - 1; i > j; i--) {
489 _neighborDistances[i] = _neighborDistances[i - 1];
490 _neighborCandidates[i] = _neighborCandidates[i - 1];
493 _neighborDistances[j] = dd;
494 _neighborCandidates[j] = consider;
496 if (_neighborsFound < _neighborsWanted) _neighborsFound++;
502 i = _tree[consider][DIMENSION];
503 dd = v[i] - _data[consider][i];
504 if ((dd <= _neighborDistances[_neighborsFound - 1] || _neighborsFound < _neighborsWanted) &&
505 _tree[consider][LT] >= 0) {
506 _lookForNeighbors(v, _tree[consider][LT], consider);
509 dd = _data[consider][i] - v[i];
510 if ((dd <= _neighborDistances[_neighborsFound - 1] || _neighborsFound < _neighborsWanted) &&
511 _tree[consider][GEQ] >= 0) {
512 _lookForNeighbors(v, _tree[consider][GEQ], consider);
518 if (_tree[consider][LT] ==
from) {
524 j = _tree[consider][going];
527 i = _tree[consider][DIMENSION];
530 dd = v[i] - _data[consider][i];
532 dd = _data[consider][i] - v[i];
534 if (dd <= _neighborDistances[_neighborsFound - 1] || _neighborsFound < _neighborsWanted) {
535 _lookForNeighbors(v, j, consider);
540 if (_tree[consider][
PARENT] >= 0) {
541 _lookForNeighbors(v, _tree[consider][
PARENT], consider);
546 template <
typename T>
547 int KdTree<T>::_walkUpTree(
int target,
int dir,
int root)
const {
557 if (_data[root][_tree[
target][DIMENSION]] >= _data[
target][_tree[
target][DIMENSION]]) {
561 if (_data[root][_tree[
target][DIMENSION]] < _data[
target][_tree[
target][DIMENSION]]) {
571 output = output * _walkUpTree(_tree[
target][
PARENT], i, root);
581 template <
typename T>
583 if (target < 0 || target >= _npts) {
585 "You are trying to remove a point that doesn't exist from KdTree\n");
590 "There is only one point left in this KdTree. You cannot call removePoint\n");
593 int nl, nr, i, j, k, side;
598 if (_tree[
target][LT] >= 0) {
600 _count(_tree[
target][LT], &nl);
603 if (_tree[
target][GEQ] >= 0) {
605 _count(_tree[
target][GEQ], &nr);
608 if (nl == 0 && nr == 0) {
611 if (_tree[k][LT] ==
target)
613 else if (_tree[k][GEQ] ==
target)
617 else if ((nl == 0 && nr > 0) || (nr == 0 && nl > 0)) {
625 if (_tree[k][LT] ==
target) {
626 _tree[k][LT] = _tree[
target][side];
627 _tree[_tree[k][LT]][
PARENT] = k;
630 _tree[k][GEQ] = _tree[
target][side];
631 _tree[_tree[k][GEQ]][
PARENT] = k;
634 _masterParent = _tree[
target][side];
646 _masterParent = _tree[
target][side];
647 _tree[_masterParent][
PARENT] = -1;
649 if (_tree[k][LT] ==
target) {
650 _tree[k][LT] = _tree[
target][side];
651 _tree[_tree[k][LT]][
PARENT] = k;
653 _tree[k][GEQ] = _tree[
target][side];
654 _tree[_tree[k][GEQ]][
PARENT] = k;
658 root = _tree[
target][3 - side];
664 for (i =
target + 1; i < _npts; i++) {
665 for (j = 0; j < 4; j++) _tree[i - 1][j] = _tree[i][j];
667 for (j = 0; j < _dimensions; j++) _data[i - 1][j] = _data[i][j];
670 for (i = 0; i < _npts; i++) {
671 for (j = 1; j < 4; j++) {
672 if (_tree[i][j] >
target) _tree[i][j]--;
676 if (_masterParent >
target) _masterParent--;
686 template <
typename T>
690 if (_tree[where][LT] >= 0) {
692 _count(_tree[where][LT], ct);
694 if (_tree[where][GEQ] >= 0) {
696 _count(_tree[where][GEQ], ct);
700 template <
typename T>
701 void KdTree<T>::_reassign(
int target) {
704 where = _masterParent;
705 if (_data[
target][_tree[where][DIMENSION]] < _data[where][_tree[where][DIMENSION]])
710 k = _tree[where][dir];
713 if (_data[
target][_tree[where][DIMENSION]] < _data[where][_tree[where][DIMENSION]])
717 k = _tree[where][dir];
720 _tree[where][dir] =
target;
724 _tree[
target][DIMENSION] = _tree[where][DIMENSION] + 1;
726 if (_tree[
target][DIMENSION] == _dimensions) _tree[
target][DIMENSION] = 0;
729 template <
typename T>
730 void KdTree<T>::_descend(
int root) {
731 if (_tree[root][LT] >= 0) _descend(_tree[root][LT]);
732 if (_tree[root][GEQ] >= 0) _descend(_tree[root][GEQ]);
737 template <
typename T>
738 double KdTree<T>::_distance(ndarray::Array<const T, 1, 1>
const &p1,
739 ndarray::Array<const T, 1, 1>
const &p2)
const {
743 dd = p1.template getSize<0>();
745 for (i = 0; i < dd; i++) ans += (p1[i] - p2[i]) * (p1[i] - p2[i]);
750 template <
typename T>
757 _covariogram = covarIn;
759 _npts = dataIn.template getSize<0>();
760 _dimensions = dataIn.template getSize<1>();
766 _function = allocate(ndarray::makeVector(_npts, 1));
768 if (ff.getNumElements() !=
static_cast<ndarray::Size
>(_npts)) {
770 "You did not pass in the same number of data points as function values\n");
773 for (i = 0; i < _npts; i++) _function[i][0] = ff[i];
775 _krigingParameter = T(1.0);
780 _kdTree.Initialize(dataIn);
782 _npts = _kdTree.getNPoints();
785 template <
typename T>
787 ndarray::Array<T, 1, 1>
const &mx, ndarray::Array<T, 1, 1>
const &ff,
790 ndarray::Array<T, 2, 2> normalizedData;
792 _covariogram = covarIn;
794 _npts = dataIn.template getSize<0>();
795 _dimensions = dataIn.template getSize<1>();
799 if (ff.getNumElements() !=
static_cast<ndarray::Size
>(_npts)) {
801 "You did not pass in the same number of data points as function values\n");
804 if (mn.getNumElements() !=
static_cast<ndarray::Size
>(_dimensions) ||
805 mx.getNumElements() !=
static_cast<ndarray::Size
>(_dimensions)) {
807 "Your min/max values have different dimensionality than your data points\n");
810 _krigingParameter = T(1.0);
813 _krigingParameter = T(1.0);
815 _max = allocate(ndarray::makeVector(_dimensions));
816 _min = allocate(ndarray::makeVector(_dimensions));
820 normalizedData = allocate(ndarray::makeVector(_npts, _dimensions));
821 for (i = 0; i < _npts; i++) {
822 for (j = 0; j < _dimensions; j++) {
823 normalizedData[i][j] = (dataIn[i][j] - _min[j]) / (_max[j] - _min[j]);
828 _kdTree.Initialize(normalizedData);
830 _npts = _kdTree.getNPoints();
832 _function = allocate(ndarray::makeVector(_npts, 1));
833 for (i = 0; i < _npts; i++) _function[i][0] = ff[i];
836 template <
typename T>
839 _covariogram = covarIn;
841 _npts = dataIn.template getSize<0>();
842 _dimensions = dataIn.template getSize<1>();
847 if (ff.template getSize<0>() !=
static_cast<ndarray::Size
>(_npts)) {
849 "You did not pass in the same number of data points as function values\n");
852 _nFunctions = ff.template getSize<1>();
853 _function = allocate(ndarray::makeVector(_npts, _nFunctions));
854 _function.deep() = ff;
856 _krigingParameter = T(1.0);
862 _kdTree.Initialize(dataIn);
864 _npts = _kdTree.getNPoints();
867 template <
typename T>
869 ndarray::Array<T, 1, 1>
const &mx, ndarray::Array<T, 2, 2>
const &ff,
872 ndarray::Array<T, 2, 2> normalizedData;
874 _covariogram = covarIn;
876 _npts = dataIn.template getSize<0>();
877 _dimensions = dataIn.template getSize<1>();
882 if (ff.template getSize<0>() !=
static_cast<ndarray::Size
>(_npts)) {
884 "You did not pass in the same number of data points as function values\n");
887 if (mn.getNumElements() !=
static_cast<ndarray::Size
>(_dimensions) ||
888 mx.getNumElements() !=
static_cast<ndarray::Size
>(_dimensions)) {
890 "Your min/max values have different dimensionality than your data points\n");
893 _krigingParameter = T(1.0);
896 _krigingParameter = T(1.0);
898 _max = allocate(ndarray::makeVector(_dimensions));
899 _min = allocate(ndarray::makeVector(_dimensions));
903 normalizedData = allocate(ndarray::makeVector(_npts, _dimensions));
904 for (i = 0; i < _npts; i++) {
905 for (j = 0; j < _dimensions; j++) {
906 normalizedData[i][j] = (dataIn[i][j] - _min[j]) / (_max[j] - _min[j]);
911 _kdTree.Initialize(normalizedData);
912 _npts = _kdTree.getNPoints();
913 _nFunctions = ff.template getSize<1>();
914 _function = allocate(ndarray::makeVector(_npts, _nFunctions));
915 _function.deep() = ff;
918 template <
typename T>
923 template <
typename T>
928 template <
typename T>
930 ndarray::Array<int, 1, 1> indices)
const {
931 if (_nFunctions != 1) {
933 "Your function value array does not have enough room for all of the functions "
934 "in your GaussianProcess.\n");
937 if (pts.template getSize<1>() !=
static_cast<ndarray::Size
>(_dimensions)) {
939 "Your pts array is constructed for points of the wrong dimensionality.\n");
942 if (pts.template getSize<0>() != indices.getNumElements()) {
944 "You did not put enough room in your pts array to fit all the points "
945 "you asked for in your indices array.\n");
948 if (fn.template getSize<0>() != indices.getNumElements()) {
950 "You did not provide enough room in your function value array "
951 "for all of the points you requested in your indices array.\n");
954 for (ndarray::Size i = 0; i < indices.template getSize<0>(); i++) {
955 pts[i] = _kdTree.getData(indices[i]);
957 fn[i] = _function[indices[i]][0];
958 if (_useMaxMin == 1) {
959 for (
int j = 0; j < _dimensions; j++) {
960 pts[i][j] *= (_max[j] - _min[j]);
961 pts[i][j] += _min[j];
967 template <
typename T>
969 ndarray::Array<int, 1, 1> indices)
const {
970 if (fn.template getSize<1>() !=
static_cast<ndarray::Size
>(_nFunctions)) {
972 "Your function value array does not have enough room for all of the functions "
973 "in your GaussianProcess.\n");
976 if (pts.template getSize<1>() !=
static_cast<ndarray::Size
>(_dimensions)) {
978 "Your pts array is constructed for points of the wrong dimensionality.\n");
981 if (pts.template getSize<0>() != indices.getNumElements()) {
983 "You did not put enough room in your pts array to fit all the points "
984 "you asked for in your indices array.\n");
987 if (fn.template getSize<0>() != indices.getNumElements()) {
989 "You did not provide enough room in your function value array "
990 "for all of the points you requested in your indices array.\n");
993 for (ndarray::Size i = 0; i < indices.template getSize<0>(); i++) {
994 pts[i] = _kdTree.getData(indices[i]);
996 for (
int j = 0; j < _nFunctions; j++) {
997 fn[i][j] = _function[indices[i]][j];
999 if (_useMaxMin == 1) {
1000 for (
int j = 0; j < _dimensions; j++) {
1001 pts[i][j] *= (_max[j] - _min[j]);
1002 pts[i][j] += _min[j];
1008 template <
typename T>
1010 int numberOfNeighbors)
const {
1011 if (_nFunctions > 1) {
1013 "You need to call the version of GaussianProcess.interpolate() "
1014 "that accepts mu and variance arrays (which it populates with results). "
1015 "You are interpolating more than one function.");
1018 if (numberOfNeighbors <= 0) {
1020 "Asked for zero or negative number of neighbors\n");
1023 if (numberOfNeighbors > _kdTree.getNPoints()) {
1025 "Asked for more neighbors than you have data points\n");
1028 if (
variance.getNumElements() !=
static_cast<ndarray::Size
>(_nFunctions)) {
1030 "Your variance array is the incorrect size for the number "
1031 "of functions you are trying to interpolate\n");
1034 if (vin.getNumElements() !=
static_cast<ndarray::Size
>(_dimensions)) {
1036 "You are interpolating at a point with different dimensionality than you data\n");
1042 ndarray::Array<T, 1, 1> covarianceTestPoint;
1043 ndarray::Array<int, 1, 1> neighbors;
1044 ndarray::Array<double, 1, 1> neighborDistances, vv;
1046 Eigen::Matrix<T, Eigen::Dynamic, Eigen::Dynamic>
covariance, bb, xx;
1047 Eigen::LDLT<Eigen::Matrix<T, Eigen::Dynamic, Eigen::Dynamic> > ldlt;
1051 bb.resize(numberOfNeighbors, 1);
1052 xx.resize(numberOfNeighbors, 1);
1054 covariance.resize(numberOfNeighbors, numberOfNeighbors);
1056 covarianceTestPoint = allocate(ndarray::makeVector(numberOfNeighbors));
1058 neighbors = allocate(ndarray::makeVector(numberOfNeighbors));
1060 neighborDistances = allocate(ndarray::makeVector(numberOfNeighbors));
1062 vv = allocate(ndarray::makeVector(_dimensions));
1063 if (_useMaxMin == 1) {
1069 for (i = 0; i < _dimensions; i++) vv[i] = (vin[i] - _min[i]) / (_max[i] - _min[i]);
1074 _kdTree.findNeighbors(neighbors, neighborDistances, vv, numberOfNeighbors);
1076 _timer.addToSearch();
1079 for (i = 0; i < numberOfNeighbors; i++) fbar += _function[neighbors[i]][0];
1080 fbar = fbar / double(numberOfNeighbors);
1082 for (i = 0; i < numberOfNeighbors; i++) {
1083 covarianceTestPoint[i] = (*_covariogram)(vv, _kdTree.getData(neighbors[i]));
1086 (*_covariogram)(_kdTree.getData(neighbors[i]), _kdTree.getData(neighbors[i])) + _lambda;
1088 for (j = i + 1; j < numberOfNeighbors; j++) {
1089 covariance(i, j) = (*_covariogram)(_kdTree.getData(neighbors[i]), _kdTree.getData(neighbors[j]));
1094 _timer.addToIteration();
1099 for (i = 0; i < numberOfNeighbors; i++) bb(i, 0) = _function[neighbors[i]][0] - fbar;
1101 xx = ldlt.solve(bb);
1102 _timer.addToEigen();
1106 for (i = 0; i < numberOfNeighbors; i++) {
1107 mu += covarianceTestPoint[i] * xx(i, 0);
1110 _timer.addToIteration();
1112 variance(0) = (*_covariogram)(vv, vv) + _lambda;
1114 for (i = 0; i < numberOfNeighbors; i++) bb(i) = covarianceTestPoint[i];
1116 xx = ldlt.solve(bb);
1118 for (i = 0; i < numberOfNeighbors; i++) {
1119 variance(0) -= covarianceTestPoint[i] * xx(i, 0);
1124 _timer.addToVariance();
1125 _timer.addToTotal(1);
1130 template <
typename T>
1132 ndarray::Array<T, 1, 1>
const &vin,
int numberOfNeighbors)
const {
1133 if (numberOfNeighbors <= 0) {
1135 "Asked for zero or negative number of neighbors\n");
1138 if (numberOfNeighbors > _kdTree.getNPoints()) {
1140 "Asked for more neighbors than you have data points\n");
1143 if (vin.getNumElements() !=
static_cast<ndarray::Size
>(_dimensions)) {
1145 "You are interpolating at a point with different dimensionality than you data\n");
1148 if (mu.getNumElements() !=
static_cast<ndarray::Size
>(_nFunctions) ||
1149 variance.getNumElements() !=
static_cast<ndarray::Size
>(_nFunctions)) {
1151 "Your mu and/or var arrays are improperly sized for the number of functions "
1152 "you are interpolating\n");
1158 ndarray::Array<T, 1, 1> covarianceTestPoint;
1159 ndarray::Array<int, 1, 1> neighbors;
1160 ndarray::Array<double, 1, 1> neighborDistances, vv;
1162 Eigen::Matrix<T, Eigen::Dynamic, Eigen::Dynamic>
covariance, bb, xx;
1163 Eigen::LDLT<Eigen::Matrix<T, Eigen::Dynamic, Eigen::Dynamic> > ldlt;
1167 bb.resize(numberOfNeighbors, 1);
1168 xx.resize(numberOfNeighbors, 1);
1169 covariance.resize(numberOfNeighbors, numberOfNeighbors);
1170 covarianceTestPoint = allocate(ndarray::makeVector(numberOfNeighbors));
1171 neighbors = allocate(ndarray::makeVector(numberOfNeighbors));
1172 neighborDistances = allocate(ndarray::makeVector(numberOfNeighbors));
1174 vv = allocate(ndarray::makeVector(_dimensions));
1176 if (_useMaxMin == 1) {
1182 for (i = 0; i < _dimensions; i++) vv[i] = (vin[i] - _min[i]) / (_max[i] - _min[i]);
1187 _kdTree.findNeighbors(neighbors, neighborDistances, vv, numberOfNeighbors);
1189 _timer.addToSearch();
1191 for (i = 0; i < numberOfNeighbors; i++) {
1192 covarianceTestPoint[i] = (*_covariogram)(vv, _kdTree.getData(neighbors[i]));
1195 (*_covariogram)(_kdTree.getData(neighbors[i]), _kdTree.getData(neighbors[i])) + _lambda;
1197 for (j = i + 1; j < numberOfNeighbors; j++) {
1198 covariance(i, j) = (*_covariogram)(_kdTree.getData(neighbors[i]), _kdTree.getData(neighbors[j]));
1203 _timer.addToIteration();
1208 for (ii = 0; ii < _nFunctions; ii++) {
1210 for (i = 0; i < numberOfNeighbors; i++) fbar += _function[neighbors[i]][ii];
1211 fbar = fbar / double(numberOfNeighbors);
1213 for (i = 0; i < numberOfNeighbors; i++) bb(i, 0) = _function[neighbors[i]][ii] - fbar;
1214 xx = ldlt.solve(bb);
1218 for (i = 0; i < numberOfNeighbors; i++) {
1219 mu[ii] += covarianceTestPoint[i] * xx(i, 0);
1224 _timer.addToEigen();
1226 variance[0] = (*_covariogram)(vv, vv) + _lambda;
1228 for (i = 0; i < numberOfNeighbors; i++) bb(i) = covarianceTestPoint[i];
1230 xx = ldlt.solve(bb);
1232 for (i = 0; i < numberOfNeighbors; i++) {
1233 variance[0] -= covarianceTestPoint[i] * xx(i, 0);
1239 _timer.addToVariance();
1240 _timer.addToTotal(1);
1243 template <
typename T>
1245 int numberOfNeighbors)
const {
1246 if (_nFunctions > 1) {
1248 "You need to call the version of GaussianProcess.selfInterpolate() "
1249 "that accepts mu and variance arrays (which it populates with results). "
1250 "You are interpolating more than one function.");
1253 if (
variance.getNumElements() !=
static_cast<ndarray::Size
>(_nFunctions)) {
1255 "Your variance array is the incorrect size for the number "
1256 "of functions you are trying to interpolate\n");
1259 if (numberOfNeighbors <= 0) {
1261 "Asked for zero or negative number of neighbors\n");
1264 if (numberOfNeighbors + 1 > _kdTree.getNPoints()) {
1266 "Asked for more neighbors than you have data points\n");
1269 if (dex < 0 || dex >= _kdTree.getNPoints()) {
1271 "Asked to self interpolate on a point that does not exist\n");
1277 ndarray::Array<T, 1, 1> covarianceTestPoint;
1278 ndarray::Array<int, 1, 1> selfNeighbors;
1279 ndarray::Array<double, 1, 1> selfDistances;
1280 ndarray::Array<int, 1, 1> neighbors;
1281 ndarray::Array<double, 1, 1> neighborDistances;
1283 Eigen::Matrix<T, Eigen::Dynamic, Eigen::Dynamic>
covariance, bb, xx;
1284 Eigen::LDLT<Eigen::Matrix<T, Eigen::Dynamic, Eigen::Dynamic> > ldlt;
1288 bb.resize(numberOfNeighbors, 1);
1289 xx.resize(numberOfNeighbors, 1);
1290 covariance.resize(numberOfNeighbors, numberOfNeighbors);
1291 covarianceTestPoint = allocate(ndarray::makeVector(numberOfNeighbors));
1292 neighbors = allocate(ndarray::makeVector(numberOfNeighbors));
1293 neighborDistances = allocate(ndarray::makeVector(numberOfNeighbors));
1295 selfNeighbors = allocate(ndarray::makeVector(numberOfNeighbors + 1));
1296 selfDistances = allocate(ndarray::makeVector(numberOfNeighbors + 1));
1300 _kdTree.findNeighbors(selfNeighbors, selfDistances, _kdTree.getData(dex), numberOfNeighbors + 1);
1302 _timer.addToSearch();
1304 if (selfNeighbors[0] != dex) {
1306 "Nearest neighbor search in selfInterpolate did not find self\n");
1314 for (i = 0; i < numberOfNeighbors; i++) {
1315 neighbors[i] = selfNeighbors[i + 1];
1316 neighborDistances[i] = selfDistances[i + 1];
1320 for (i = 0; i < numberOfNeighbors; i++) fbar += _function[neighbors[i]][0];
1321 fbar = fbar / double(numberOfNeighbors);
1323 for (i = 0; i < numberOfNeighbors; i++) {
1324 covarianceTestPoint[i] = (*_covariogram)(_kdTree.getData(dex), _kdTree.getData(neighbors[i]));
1327 (*_covariogram)(_kdTree.getData(neighbors[i]), _kdTree.getData(neighbors[i])) + _lambda;
1329 for (j = i + 1; j < numberOfNeighbors; j++) {
1330 covariance(i, j) = (*_covariogram)(_kdTree.getData(neighbors[i]), _kdTree.getData(neighbors[j]));
1334 _timer.addToIteration();
1339 for (i = 0; i < numberOfNeighbors; i++) bb(i, 0) = _function[neighbors[i]][0] - fbar;
1340 xx = ldlt.solve(bb);
1341 _timer.addToEigen();
1345 for (i = 0; i < numberOfNeighbors; i++) {
1346 mu += covarianceTestPoint[i] * xx(i, 0);
1349 variance(0) = (*_covariogram)(_kdTree.getData(dex), _kdTree.getData(dex)) + _lambda;
1351 for (i = 0; i < numberOfNeighbors; i++) bb(i) = covarianceTestPoint[i];
1353 xx = ldlt.solve(bb);
1355 for (i = 0; i < numberOfNeighbors; i++) {
1356 variance(0) -= covarianceTestPoint[i] * xx(i, 0);
1360 _timer.addToVariance();
1361 _timer.addToTotal(1);
1366 template <
typename T>
1368 int dex,
int numberOfNeighbors)
const {
1369 if (mu.getNumElements() !=
static_cast<ndarray::Size
>(_nFunctions) ||
1370 variance.getNumElements() !=
static_cast<ndarray::Size
>(_nFunctions)) {
1372 "Your mu and/or var arrays are improperly sized for the number of functions "
1373 "you are interpolating\n");
1376 if (numberOfNeighbors <= 0) {
1378 "Asked for zero or negative number of neighbors\n");
1381 if (numberOfNeighbors + 1 > _kdTree.getNPoints()) {
1383 "Asked for more neighbors than you have data points\n");
1386 if (dex < 0 || dex >= _kdTree.getNPoints()) {
1388 "Asked to self interpolate on a point that does not exist\n");
1394 ndarray::Array<T, 1, 1> covarianceTestPoint;
1395 ndarray::Array<int, 1, 1> selfNeighbors;
1396 ndarray::Array<double, 1, 1> selfDistances;
1397 ndarray::Array<int, 1, 1> neighbors;
1398 ndarray::Array<double, 1, 1> neighborDistances;
1400 Eigen::Matrix<T, Eigen::Dynamic, Eigen::Dynamic>
covariance, bb, xx;
1401 Eigen::LDLT<Eigen::Matrix<T, Eigen::Dynamic, Eigen::Dynamic> > ldlt;
1405 bb.resize(numberOfNeighbors, 1);
1406 xx.resize(numberOfNeighbors, 1);
1407 covariance.resize(numberOfNeighbors, numberOfNeighbors);
1408 covarianceTestPoint = allocate(ndarray::makeVector(numberOfNeighbors));
1409 neighbors = allocate(ndarray::makeVector(numberOfNeighbors));
1410 neighborDistances = allocate(ndarray::makeVector(numberOfNeighbors));
1412 selfNeighbors = allocate(ndarray::makeVector(numberOfNeighbors + 1));
1413 selfDistances = allocate(ndarray::makeVector(numberOfNeighbors + 1));
1417 _kdTree.findNeighbors(selfNeighbors, selfDistances, _kdTree.getData(dex), numberOfNeighbors + 1);
1419 _timer.addToSearch();
1421 if (selfNeighbors[0] != dex) {
1423 "Nearest neighbor search in selfInterpolate did not find self\n");
1431 for (i = 0; i < numberOfNeighbors; i++) {
1432 neighbors[i] = selfNeighbors[i + 1];
1433 neighborDistances[i] = selfDistances[i + 1];
1436 for (i = 0; i < numberOfNeighbors; i++) {
1437 covarianceTestPoint[i] = (*_covariogram)(_kdTree.getData(dex), _kdTree.getData(neighbors[i]));
1440 (*_covariogram)(_kdTree.getData(neighbors[i]), _kdTree.getData(neighbors[i])) + _lambda;
1442 for (j = i + 1; j < numberOfNeighbors; j++) {
1443 covariance(i, j) = (*_covariogram)(_kdTree.getData(neighbors[i]), _kdTree.getData(neighbors[j]));
1447 _timer.addToIteration();
1452 for (ii = 0; ii < _nFunctions; ii++) {
1454 for (i = 0; i < numberOfNeighbors; i++) fbar += _function[neighbors[i]][ii];
1455 fbar = fbar / double(numberOfNeighbors);
1457 for (i = 0; i < numberOfNeighbors; i++) bb(i, 0) = _function[neighbors[i]][ii] - fbar;
1458 xx = ldlt.solve(bb);
1462 for (i = 0; i < numberOfNeighbors; i++) {
1463 mu[ii] += covarianceTestPoint[i] * xx(i, 0);
1467 _timer.addToEigen();
1469 variance[0] = (*_covariogram)(_kdTree.getData(dex), _kdTree.getData(dex)) + _lambda;
1471 for (i = 0; i < numberOfNeighbors; i++) bb(i) = covarianceTestPoint[i];
1473 xx = ldlt.solve(bb);
1475 for (i = 0; i < numberOfNeighbors; i++) {
1476 variance[0] -= covarianceTestPoint[i] * xx(i, 0);
1483 _timer.addToVariance();
1484 _timer.addToTotal(1);
1487 template <
typename T>
1489 ndarray::Array<T, 2, 2>
const &queries)
const {
1492 ndarray::Size nQueries = queries.template getSize<0>();
1494 if (_nFunctions != 1) {
1496 "Your mu and variance arrays do not have room for all of the functions "
1497 "as you are trying to interpolate\n");
1500 if (mu.getNumElements() != nQueries ||
variance.getNumElements() != nQueries) {
1502 "Your mu and variance arrays do not have room for all of the points "
1503 "at which you are trying to interpolate your function.\n");
1506 if (queries.template getSize<1>() !=
static_cast<ndarray::Size
>(_dimensions)) {
1508 "The points you passed to batchInterpolate are of the wrong "
1509 "dimensionality for your Gaussian Process\n");
1513 Eigen::Matrix<T, Eigen::Dynamic, Eigen::Dynamic> batchCovariance, batchbb, batchxx;
1514 Eigen::Matrix<T, Eigen::Dynamic, Eigen::Dynamic> queryCovariance;
1515 Eigen::LDLT<Eigen::Matrix<T, Eigen::Dynamic, Eigen::Dynamic> > ldlt;
1517 ndarray::Array<T, 1, 1> v1;
1521 v1 = allocate(ndarray::makeVector(_dimensions));
1522 batchbb.resize(_npts, 1);
1523 batchxx.resize(_npts, 1);
1524 batchCovariance.resize(_npts, _npts);
1525 queryCovariance.resize(_npts, 1);
1527 for (i = 0; i < _npts; i++) {
1528 batchCovariance(i, i) = (*_covariogram)(_kdTree.getData(i), _kdTree.getData(i)) + _lambda;
1529 for (j = i + 1; j < _npts; j++) {
1530 batchCovariance(i, j) = (*_covariogram)(_kdTree.getData(i), _kdTree.getData(j));
1531 batchCovariance(j, i) = batchCovariance(i, j);
1534 _timer.addToIteration();
1536 ldlt.compute(batchCovariance);
1539 for (i = 0; i < _npts; i++) {
1540 fbar += _function[i][0];
1542 fbar = fbar / T(_npts);
1544 for (i = 0; i < _npts; i++) {
1545 batchbb(i, 0) = _function[i][0] - fbar;
1547 batchxx = ldlt.solve(batchbb);
1548 _timer.addToEigen();
1550 for (ndarray::Size ii = 0; ii < nQueries; ii++) {
1551 for (i = 0; i < _dimensions; i++) v1[i] = queries[ii][i];
1552 if (_useMaxMin == 1) {
1553 for (i = 0; i < _dimensions; i++) v1[i] = (v1[i] - _min[i]) / (_max[i] - _min[i]);
1556 for (i = 0; i < _npts; i++) {
1557 mu(ii) += batchxx(i) * (*_covariogram)(v1, _kdTree.getData(i));
1560 _timer.addToIteration();
1562 for (ndarray::Size ii = 0; ii < nQueries; ii++) {
1563 for (i = 0; i < _dimensions; i++) v1[i] = queries[ii][i];
1564 if (_useMaxMin == 1) {
1565 for (i = 0; i < _dimensions; i++) v1[i] = (v1[i] - _min[i]) / (_max[i] - _min[i]);
1568 for (i = 0; i < _npts; i++) {
1569 batchbb(i, 0) = (*_covariogram)(v1, _kdTree.getData(i));
1570 queryCovariance(i, 0) = batchbb(i, 0);
1572 batchxx = ldlt.solve(batchbb);
1574 variance[ii] = (*_covariogram)(v1, v1) + _lambda;
1576 for (i = 0; i < _npts; i++) {
1577 variance[ii] -= queryCovariance(i, 0) * batchxx(i);
1583 _timer.addToVariance();
1584 _timer.addToTotal(nQueries);
1587 template <
typename T>
1589 ndarray::Array<T, 2, 2>
const &queries)
const {
1592 ndarray::Size nQueries = queries.template getSize<0>();
1594 if (mu.template getSize<0>() != nQueries ||
variance.template getSize<0>() != nQueries) {
1596 "Your output arrays do not have room for all of the points at which "
1597 "you are interpolating your functions.\n");
1600 if (mu.template getSize<1>() !=
static_cast<ndarray::Size
>(_nFunctions) ||
1601 variance.template getSize<1>() !=
static_cast<ndarray::Size
>(_nFunctions)) {
1603 "Your output arrays do not have room for all of the functions you are "
1607 if (queries.template getSize<1>() !=
static_cast<ndarray::Size
>(_dimensions)) {
1609 "The points at which you are interpolating your functions have the "
1610 "wrong dimensionality.\n");
1614 Eigen::Matrix<T, Eigen::Dynamic, Eigen::Dynamic> batchCovariance, batchbb, batchxx;
1615 Eigen::Matrix<T, Eigen::Dynamic, Eigen::Dynamic> queryCovariance;
1616 Eigen::LDLT<Eigen::Matrix<T, Eigen::Dynamic, Eigen::Dynamic> > ldlt;
1618 ndarray::Array<T, 1, 1> v1;
1622 v1 = allocate(ndarray::makeVector(_dimensions));
1623 batchbb.resize(_npts, 1);
1624 batchxx.resize(_npts, 1);
1625 batchCovariance.resize(_npts, _npts);
1626 queryCovariance.resize(_npts, 1);
1628 for (i = 0; i < _npts; i++) {
1629 batchCovariance(i, i) = (*_covariogram)(_kdTree.getData(i), _kdTree.getData(i)) + _lambda;
1630 for (j = i + 1; j < _npts; j++) {
1631 batchCovariance(i, j) = (*_covariogram)(_kdTree.getData(i), _kdTree.getData(j));
1632 batchCovariance(j, i) = batchCovariance(i, j);
1636 _timer.addToIteration();
1638 ldlt.compute(batchCovariance);
1640 _timer.addToEigen();
1642 for (ifn = 0; ifn < _nFunctions; ifn++) {
1644 for (i = 0; i < _npts; i++) {
1645 fbar += _function[i][ifn];
1647 fbar = fbar / T(_npts);
1648 _timer.addToIteration();
1650 for (i = 0; i < _npts; i++) {
1651 batchbb(i, 0) = _function[i][ifn] - fbar;
1653 batchxx = ldlt.solve(batchbb);
1654 _timer.addToEigen();
1656 for (ndarray::Size ii = 0; ii < nQueries; ii++) {
1657 for (i = 0; i < _dimensions; i++) v1[i] = queries[ii][i];
1658 if (_useMaxMin == 1) {
1659 for (i = 0; i < _dimensions; i++) v1[i] = (v1[i] - _min[i]) / (_max[i] - _min[i]);
1662 for (i = 0; i < _npts; i++) {
1663 mu[ii][ifn] += batchxx(i) * (*_covariogram)(v1, _kdTree.getData(i));
1669 _timer.addToIteration();
1670 for (ndarray::Size ii = 0; ii < nQueries; ii++) {
1671 for (i = 0; i < _dimensions; i++) v1[i] = queries[ii][i];
1672 if (_useMaxMin == 1) {
1673 for (i = 0; i < _dimensions; i++) v1[i] = (v1[i] - _min[i]) / (_max[i] - _min[i]);
1676 for (i = 0; i < _npts; i++) {
1677 batchbb(i, 0) = (*_covariogram)(v1, _kdTree.getData(i));
1678 queryCovariance(i, 0) = batchbb(i, 0);
1680 batchxx = ldlt.solve(batchbb);
1682 variance[ii][0] = (*_covariogram)(v1, v1) + _lambda;
1684 for (i = 0; i < _npts; i++) {
1685 variance[ii][0] -= queryCovariance(i, 0) * batchxx(i);
1691 _timer.addToVariance();
1692 _timer.addToTotal(nQueries);
1695 template <
typename T>
1697 ndarray::Array<T, 2, 2>
const &queries)
const {
1700 ndarray::Size nQueries = queries.template getSize<0>();
1702 if (_nFunctions != 1) {
1704 "Your output array does not have enough room for all of the functions "
1705 "you are trying to interpolate.\n");
1708 if (queries.template getSize<1>() !=
static_cast<ndarray::Size
>(_dimensions)) {
1710 "The points at which you are trying to interpolate your function are "
1711 "of the wrong dimensionality.\n");
1714 if (mu.getNumElements() != nQueries) {
1716 "Your output array does not have enough room for all of the points "
1717 "at which you are trying to interpolate your function.\n");
1721 Eigen::Matrix<T, Eigen::Dynamic, Eigen::Dynamic> batchCovariance, batchbb, batchxx;
1722 Eigen::Matrix<T, Eigen::Dynamic, Eigen::Dynamic> queryCovariance;
1723 Eigen::LDLT<Eigen::Matrix<T, Eigen::Dynamic, Eigen::Dynamic> > ldlt;
1725 ndarray::Array<T, 1, 1> v1;
1729 v1 = allocate(ndarray::makeVector(_dimensions));
1731 batchbb.resize(_npts, 1);
1732 batchxx.resize(_npts, 1);
1733 batchCovariance.resize(_npts, _npts);
1734 queryCovariance.resize(_npts, 1);
1736 for (i = 0; i < _npts; i++) {
1737 batchCovariance(i, i) = (*_covariogram)(_kdTree.getData(i), _kdTree.getData(i)) + _lambda;
1738 for (j = i + 1; j < _npts; j++) {
1739 batchCovariance(i, j) = (*_covariogram)(_kdTree.getData(i), _kdTree.getData(j));
1740 batchCovariance(j, i) = batchCovariance(i, j);
1743 _timer.addToIteration();
1745 ldlt.compute(batchCovariance);
1748 for (i = 0; i < _npts; i++) {
1749 fbar += _function[i][0];
1751 fbar = fbar / T(_npts);
1753 for (i = 0; i < _npts; i++) {
1754 batchbb(i, 0) = _function[i][0] - fbar;
1756 batchxx = ldlt.solve(batchbb);
1757 _timer.addToEigen();
1759 for (ndarray::Size ii = 0; ii < nQueries; ii++) {
1760 for (i = 0; i < _dimensions; i++) v1[i] = queries[ii][i];
1761 if (_useMaxMin == 1) {
1762 for (i = 0; i < _dimensions; i++) v1[i] = (v1[i] - _min[i]) / (_max[i] - _min[i]);
1766 for (i = 0; i < _npts; i++) {
1767 mu(ii) += batchxx(i) * (*_covariogram)(v1, _kdTree.getData(i));
1770 _timer.addToIteration();
1771 _timer.addToTotal(nQueries);
1774 template <
typename T>
1776 ndarray::Array<T, 2, 2>
const &queries)
const {
1779 ndarray::Size nQueries = queries.template getSize<0>();
1781 if (mu.template getSize<0>() != nQueries) {
1783 "Your output array does not have enough room for all of the points "
1784 "at which you want to interpolate your functions.\n");
1787 if (mu.template getSize<1>() !=
static_cast<ndarray::Size
>(_nFunctions)) {
1789 "Your output array does not have enough room for all of the functions "
1790 "you are trying to interpolate.\n");
1793 if (queries.template getSize<1>() !=
static_cast<ndarray::Size
>(_dimensions)) {
1795 "The points at which you are interpolating your functions do not "
1796 "have the correct dimensionality.\n");
1800 Eigen::Matrix<T, Eigen::Dynamic, Eigen::Dynamic> batchCovariance, batchbb, batchxx;
1801 Eigen::Matrix<T, Eigen::Dynamic, Eigen::Dynamic> queryCovariance;
1802 Eigen::LDLT<Eigen::Matrix<T, Eigen::Dynamic, Eigen::Dynamic> > ldlt;
1804 ndarray::Array<T, 1, 1> v1;
1808 v1 = allocate(ndarray::makeVector(_dimensions));
1810 batchbb.resize(_npts, 1);
1811 batchxx.resize(_npts, 1);
1812 batchCovariance.resize(_npts, _npts);
1813 queryCovariance.resize(_npts, 1);
1815 for (i = 0; i < _npts; i++) {
1816 batchCovariance(i, i) = (*_covariogram)(_kdTree.getData(i), _kdTree.getData(i)) + _lambda;
1817 for (j = i + 1; j < _npts; j++) {
1818 batchCovariance(i, j) = (*_covariogram)(_kdTree.getData(i), _kdTree.getData(j));
1819 batchCovariance(j, i) = batchCovariance(i, j);
1823 _timer.addToIteration();
1825 ldlt.compute(batchCovariance);
1827 _timer.addToEigen();
1829 for (ifn = 0; ifn < _nFunctions; ifn++) {
1831 for (i = 0; i < _npts; i++) {
1832 fbar += _function[i][ifn];
1834 fbar = fbar / T(_npts);
1836 _timer.addToIteration();
1838 for (i = 0; i < _npts; i++) {
1839 batchbb(i, 0) = _function[i][ifn] - fbar;
1841 batchxx = ldlt.solve(batchbb);
1842 _timer.addToEigen();
1844 for (ndarray::Size ii = 0; ii < nQueries; ii++) {
1845 for (i = 0; i < _dimensions; i++) v1[i] = queries[ii][i];
1846 if (_useMaxMin == 1) {
1847 for (i = 0; i < _dimensions; i++) v1[i] = (v1[i] - _min[i]) / (_max[i] - _min[i]);
1851 for (i = 0; i < _npts; i++) {
1852 mu[ii][ifn] += batchxx(i) * (*_covariogram)(v1, _kdTree.getData(i));
1858 _timer.addToTotal(nQueries);
1861 template <
typename T>
1865 if (_nFunctions != 1) {
1867 "You are calling the wrong addPoint; you need a vector of functions\n");
1870 if (vin.getNumElements() !=
static_cast<ndarray::Size
>(_dimensions)) {
1872 "You are trying to add a point of the wrong dimensionality to "
1873 "your GaussianProcess.\n");
1876 ndarray::Array<T, 1, 1> v;
1877 v = allocate(ndarray::makeVector(_dimensions));
1879 for (i = 0; i < _dimensions; i++) {
1881 if (_useMaxMin == 1) {
1882 v[i] = (v[i] - _min[i]) / (_max[i] - _min[i]);
1886 if (_npts == _room) {
1887 ndarray::Array<T, 2, 2> buff;
1888 buff = allocate(ndarray::makeVector(_npts, _nFunctions));
1889 buff.deep() = _function;
1892 _function = allocate(ndarray::makeVector(_room, _nFunctions));
1893 for (i = 0; i < _npts; i++) {
1894 for (j = 0; j < _nFunctions; j++) {
1895 _function[i][j] = buff[i][j];
1900 _function[_npts][0] = f;
1902 _kdTree.addPoint(v);
1903 _npts = _kdTree.getNPoints();
1906 template <
typename T>
1908 if (vin.getNumElements() !=
static_cast<ndarray::Size
>(_dimensions)) {
1910 "You are trying to add a point of the wrong dimensionality to "
1911 "your GaussianProcess.\n");
1914 if (f.template getSize<0>() !=
static_cast<ndarray::Size
>(_nFunctions)) {
1916 "You are not adding the correct number of function values to "
1917 "your GaussianProcess.\n");
1922 ndarray::Array<T, 1, 1> v;
1923 v = allocate(ndarray::makeVector(_dimensions));
1925 for (i = 0; i < _dimensions; i++) {
1927 if (_useMaxMin == 1) {
1928 v[i] = (v[i] - _min[i]) / (_max[i] - _min[i]);
1932 if (_npts == _room) {
1933 ndarray::Array<T, 2, 2> buff;
1934 buff = allocate(ndarray::makeVector(_npts, _nFunctions));
1935 buff.deep() = _function;
1938 _function = allocate(ndarray::makeVector(_room, _nFunctions));
1939 for (i = 0; i < _npts; i++) {
1940 for (j = 0; j < _nFunctions; j++) {
1941 _function[i][j] = buff[i][j];
1945 for (i = 0; i < _nFunctions; i++) _function[_npts][i] = f[i];
1947 _kdTree.addPoint(v);
1948 _npts = _kdTree.getNPoints();
1951 template <
typename T>
1957 for (i = dex; i < _npts; i++) {
1958 for (j = 0; j < _nFunctions; j++) {
1959 _function[i][j] = _function[i + 1][j];
1962 _npts = _kdTree.getNPoints();
1965 template <
typename T>
1967 _krigingParameter = kk;
1970 template <
typename T>
1972 _covariogram = covar;
1975 template <
typename T>
1980 template <
typename T>
1985 template <
typename T>
1988 template <
typename T>
1990 ndarray::Array<const T, 1, 1>
const &p2)
const {
1991 std::cout <<
"by the way, you are calling the wrong operator\n";
1996 template <
typename T>
1999 template <
typename T>
2004 template <
typename T>
2006 _ellSquared = ellSquared;
2009 template <
typename T>
2011 ndarray::Array<const T, 1, 1>
const &p2)
const {
2013 for (ndarray::Size i = 0; i < p1.template getSize<0>(); i++) {
2014 d += (p1[i] - p2[i]) * (p1[i] - p2[i]);
2017 d = d / _ellSquared;
2018 return T(
exp(-0.5 * d));
2021 template <
typename T>
2024 template <
typename T>
2030 template <
typename T>
2032 ndarray::Array<const T, 1, 1>
const &p2)
const {
2034 double num, denom1, denom2, arg;
2036 dim = p1.template getSize<0>();
2038 num = 2.0 * _sigma0;
2039 denom1 = 1.0 + 2.0 * _sigma0;
2040 denom2 = 1.0 + 2.0 * _sigma0;
2042 for (i = 0; i < dim; i++) {
2043 num += 2.0 * p1[i] * p2[i] * _sigma1;
2044 denom1 += 2.0 * p1[i] * p1[i] * _sigma1;
2045 denom2 += 2.0 * p2[i] * p2[i] * _sigma1;
2048 arg = num /
::sqrt(denom1 * denom2);
2049 return T(2.0 * (::
asin(arg)) / 3.141592654);
2052 template <
typename T>
2057 template <
typename T>
2062 #define INSTANTIATEGP(T) \
2063 template class KdTree<T>; \
2064 template class GaussianProcess<T>; \
2065 template class Covariogram<T>; \
2066 template class SquaredExpCovariogram<T>; \
2067 template class NeuralNetCovariogram<T>;
Key< Flag > const & target
#define LSST_EXCEPT(type,...)
Create an exception with a given type.
The parent class of covariogram functions for use in Gaussian Process interpolation.
Stores values of a function sampled on an image and allows you to interpolate the function to unsampl...
void removePoint(int dex)
This will remove a point from the data set.
This is a structure for keeping track of how long the interpolation methods spend on different parts ...
The data for GaussianProcess is stored in a KD tree to facilitate nearest-neighbor searches.
a Covariogram that recreates a neural network with one hidden layer and infinite units in that layer
static DateTime now(void)
Return current time as a DateTime.
Reports errors that are due to events beyond the control of the program.
A base class for image defects.
table::Key< double > sigma1