LSSTApplications  10.0+286,10.0+36,10.0+46,10.0-2-g4f67435,10.1+152,10.1+37,11.0,11.0+1,11.0-1-g47edd16,11.0-1-g60db491,11.0-1-g7418c06,11.0-2-g04d2804,11.0-2-g68503cd,11.0-2-g818369d,11.0-2-gb8b8ce7
LSSTDataManagementBasePackage
GaussianProcess.cc
Go to the documentation of this file.
1 // - * - LSST - C++ - * -
2 
3 /*
4  * LSST Data Management System
5  * Copyright 2008, 2009, 2010 LSST Corporation.
6  *
7  * This product includes software developed by the
8  * LSST Project (http://www.lsst.org/).
9  *
10  * This program is free software: you can redistribute it and/or modify
11  * it under the terms of the GNU General Public License as published by
12  * the Free Software Foundation, either version 3 of the License, or
13  * (at your option) any later version.
14  *
15  * This program is distributed in the hope that it will be useful,
16  * but WITHOUT ANY WARRANTY; without even the implied warranty of
17  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18  * GNU General Public License for more details.
19  *
20  * You should have received a copy of the LSST License Statement and
21  * the GNU General Public License along with this program. If not,
22  * see < http://www.lsstcorp.org/LegalNotices/ > .
23  */
24 
25 #include <iostream>
26 #include <cmath>
27 
29 
30 using namespace std;
31 
32 
33 namespace lsst {
34 namespace afw {
35 namespace math {
36 
37 GaussianProcessTimer::GaussianProcessTimer()
38 {
39  _interpolationCount = 0;
40  _iterationTime = 0.0;
41  _eigenTime = 0.0;
42  _searchTime = 0.0;
43  _varianceTime = 0.0;
44  _totalTime = 0.0;
45 
46 }
47 
48 void GaussianProcessTimer::reset()
49 {
50  _interpolationCount = 0;
51  _iterationTime = 0.0;
52  _eigenTime = 0.0;
53  _searchTime = 0.0;
54  _varianceTime = 0.0;
55  _totalTime = 0.0;
56 
57 }
58 
59 void GaussianProcessTimer::start()
60 {
61  _before = double(lsst::daf::base::DateTime::now().get()*24*60*60);
62  _beginning = _before;
63 }
64 
65 void GaussianProcessTimer::addToEigen()
66 {
67  double after;
68  after = double(lsst::daf::base::DateTime::now().get()*24*60*60);
69  _eigenTime += after - _before;
70  _before = after;
71 }
72 
73 void GaussianProcessTimer::addToVariance()
74 {
75  double after;
76  after = double(lsst::daf::base::DateTime::now().get()*24*60*60);
77  _varianceTime += after - _before;
78  _before = after;
79 }
80 
81 void GaussianProcessTimer::addToSearch()
82 {
83  double after;
84  after = double(lsst::daf::base::DateTime::now().get()*24*60*60);
85  _searchTime += after - _before;
86  _before = after;
87 }
88 
89 void GaussianProcessTimer::addToIteration()
90 {
91  double after;
92  after = double(lsst::daf::base::DateTime::now().get()*24*60*60);
93  _iterationTime += after - _before;
94  _before = after;
95 }
96 
97 void GaussianProcessTimer::addToTotal(int i)
98 {
99  double after;
100  after = double(lsst::daf::base::DateTime::now().get()*24*60*60);
101  _totalTime += after - _beginning;
102  _interpolationCount += i;
103 }
104 
105 void GaussianProcessTimer::display(){
106  std::cout << "\nSearch time " << _searchTime << "\n";
107  std::cout << "Eigen time " << _eigenTime << "\n";
108  std::cout << "Variance time " << _varianceTime << "\n";
109  std::cout << "Iteration time " << _iterationTime << "\n";
110  std::cout << "Total time " << _totalTime << "\n";
111  std::cout << "Number of interpolations " << _interpolationCount << "\n";
112 }
113 
114 template < typename T >
115 void KdTree < T > ::Initialize(ndarray::Array < T,2,2 > const &dt)
116 {
117  int i;
118 
119  _pts = dt.template getSize < 0 > ();
120  _dimensions = dt.template getSize < 1 > ();
121 
122  //a buffer to use when first building the tree
123  _inn = allocate(ndarray::makeVector(_pts));
124 
125  _roomStep = 5000;
126  _room = _pts;
127 
128  _data = allocate(ndarray::makeVector(_room,_dimensions));
129 
130  _data.deep() = dt;
131 
132  _tree = allocate(ndarray::makeVector(_room,4));
133 
134  for (i = 0 ; i < _pts ; i++ ) {
135  _inn[i] = i;
136  }
137 
138  _organize(_inn,_pts, -1, -1);
139 
140 
141  i = _testTree();
142  if (i == 0) {
143  throw LSST_EXCEPT(lsst::pex::exceptions::RuntimeError,
144  "Failed to properly initialize KdTree\n");
145  }
146 
147 }
148 
149 template < typename T >
150 void KdTree < T > ::findNeighbors(ndarray::Array < int,1,1 > neighdex,
153  int n_nn) const
154 {
155  int i,start;
156 
157  _neighborCandidates = allocate(ndarray::makeVector(n_nn));
158  _neighborDistances = allocate(ndarray::makeVector(n_nn));
159  _neighborsFound = 0;
160  _neighborsWanted = n_nn;
161 
162  for (i = 0; i < n_nn; i++ ) _neighborDistances[i] = -1.0;
163 
164  start = _findNode(v);
165 
166  _neighborDistances[0] = _distance(v,_data[start]);
167  _neighborCandidates[0] = start;
168  _neighborsFound = 1;
169 
170 
171  for (i = 1; i < 4; i++ ) {
172  if(_tree[start][i] >= 0){
173  _lookForNeighbors(v,_tree[start][i], start);
174  }
175  }
176 
177  for (i = 0 ; i < n_nn ; i++ ) {
178  neighdex[i] = _neighborCandidates[i];
179  dd[i] = _neighborDistances[i];
180  }
181 }
182 
183 template < typename T >
184 T KdTree < T > ::getData(int ipt, int idim) const
185 {
186  return _data[ipt][idim];
187 }
188 
189 template < typename T >
190 ndarray::Array < T,1,1 > KdTree < T > ::getData(int ipt) const
191 {
192  return _data[ipt];
193 }
194 
195 template < typename T >
197 {
198 
199  int i,j,node,dim,dir;
200 
201  node = _findNode(v);
202  dim = _tree[node][DIMENSION] + 1;
203  if(dim == _dimensions)dim = 0;
204 
205  if(_pts == _room){
206 
207  ndarray::Array < T,2,2 > dbuff = allocate(ndarray::makeVector(_pts, _dimensions));
209 
210  dbuff.deep() = _data;
211  tbuff.deep() = _tree;
212 
213  _room += _roomStep;
214 
215  _tree = allocate(ndarray::makeVector(_room, 4));
216  _data = allocate(ndarray::makeVector(_room, _dimensions));
217 
218 
219  for (i = 0; i < _pts; i++ ) {
220  for (j = 0; j < _dimensions; j++ ) _data[i][j] = dbuff[i][j];
221  for (j = 0; j < 4; j++ ) _tree[i][j] = tbuff[i][j];
222  }
223  }
224 
225  _tree[_pts][DIMENSION] = dim;
226  _tree[_pts][PARENT] = node;
227  i = _tree[node][DIMENSION];
228 
229  if(_data[node][i] > v[i]){
230  if(_tree[node][LT] >= 0){
231  throw LSST_EXCEPT(lsst::pex::exceptions::RuntimeError,
232  "Trying to add to KdTree in a node that is already occupied\n");
233  }
234  _tree[node][LT] = _pts;
235  dir=LT;
236  }
237  else{
238  if(_tree[node][GEQ] >= 0){
239  throw LSST_EXCEPT(lsst::pex::exceptions::RuntimeError,
240  "Trying to add to KdTree in a node that is already occupied\n");
241  }
242  _tree[node][GEQ] = _pts;
243  dir=GEQ;
244  }
245  _tree[_pts][LT] = -1;
246  _tree[_pts][GEQ] = -1;
247  for (i = 0; i < _dimensions; i++ ) {
248  _data[_pts][i] = v[i];
249  }
250 
251  _pts++;
252 
253 
254  i = _walkUpTree(_tree[_pts-1][PARENT], dir, _pts-1);
255  if (i != _masterParent){
256  throw LSST_EXCEPT(lsst::pex::exceptions::RuntimeError,
257  "Adding to KdTree failed\n");
258  }
259 
260 }
261 
262 template < typename T >
263 int KdTree < T > ::getPoints() const
264 {
265  return _pts;
266 }
267 
268 template < typename T >
269 void KdTree < T > ::getTreeNode(ndarray::Array < int,1,1 > const &v,
270  int dex) const
271 {
272  v[0] = _tree[dex][DIMENSION];
273  v[1] = _tree[dex][LT];
274  v[2] = _tree[dex][GEQ];
275  v[3] = _tree[dex][PARENT];
276 }
277 
278 template < typename T >
279 int KdTree < T > ::_testTree() const{
280 
281  int i,j,output;
282  std::vector < int > isparent;
283 
284  output=1;
285 
286  for (i = 0; i < _pts; i++ ) isparent.push_back(0);
287 
288  j = 0;
289  for (i = 0; i < _pts; i++ ) {
290  if(_tree[i][PARENT] < 0)j++;
291  }
292  if(j != 1){
293  std::cout << "_tree FAILURE " << j << " _masterParents\n";
294  return 0;
295  }
296 
297  for (i = 0; i < _pts; i++ ) {
298  if(_tree[i][PARENT] >= 0){
299  isparent[_tree[i][PARENT]]++;
300  }
301  }
302 
303  for (i = 0; i < _pts; i++ ) {
304  if(isparent[i] > 2){
305  std::cout << "_tree FAILURE " << i << " is parent to " << isparent[i] << "\n";
306  return 0;
307  }
308  }
309 
310 
311  for (i = 0; i < _pts; i++ ) {
312  if(_tree[i][PARENT] >= 0) {
313  if(_tree[_tree[i][PARENT]][LT] == i)j = LT;
314  else j = GEQ;
315  output = _walkUpTree(_tree[i][PARENT], j, i);
316 
317  if(output != _masterParent){
318  return 0;
319  }
320  }
321  }
322 
323  if(output != _masterParent) return 0;
324  else return 1;
325 }
326 
327 template < typename T >
328 void KdTree < T > ::_organize(ndarray::Array < int,1,1 > const &use,
329  int ct,
330  int parent,
331  int dir
332  )
333 {
334 
335  int i,j,k,l,idim,daughter;
336  T mean,var,varbest;
337 
338 
339  std::vector < std::vector < T > > toSort;
340  std::vector < T > toSortElement;
341 
342  if(ct > 1){
343  //below is code to choose the dimension on which the available points
344  //have the greates variance. This will be the dimension on which
345  //the daughter node splits the data
346  idim=0;
347  varbest=-1.0;
348  for (i = 0; i < _dimensions; i++ ) {
349  mean = 0.0;
350  var = 0.0;
351  for (j = 0; j < ct; j++ ) {
352  mean += _data[use[j]][i];
353  var += _data[use[j]][i]*_data[use[j]][i];
354  }
355  mean = mean/double(ct);
356  var = var/double(ct) - mean*mean;
357  if(i == 0 || var > varbest ||
358  (var == varbest && parent >= 0 && i > _tree[parent][DIMENSION])) {
359  idim = i;
360  varbest = var;
361  }
362  }//for(i = 0;i < _dimensions;i++ )
363 
364  //we need to sort the available data points according to their idim - th element
365  //but we need to keep track of their original indices, so we can refer back to
366  //their positions in _data. Therefore, the code constructs a 2 - dimensional
367  //std::vector. The first dimension contains the element to be sorted.
368  //The second dimension contains the original index information.
369  //After sorting, the (rearranged) index information will be stored back in the
370  //ndarray use[]
371 
372  toSortElement.push_back(_data[use[0]][idim]);
373  toSortElement.push_back(T(use[0]));
374  toSort.push_back(toSortElement);
375  for(i = 1; i < ct; i++ ) {
376  toSortElement[0] = _data[use[i]][idim];
377  toSortElement[1] = T(use[i]);
378  toSort.push_back(toSortElement);
379  }
380 
381  std::stable_sort(toSort.begin(), toSort.end());
382 
383  k = ct/2;
384  l = ct/2;
385  while(k > 0 && toSort[k][0] == toSort[k - 1][0]) k--;
386 
387  while(l < ct - 1 && toSort[l][0] == toSort[ct/2][0]) l++;
388 
389  if((ct/2 - k) < (l - ct/2) || l == ct - 1) j = k;
390  else j = l;
391 
392  for(i = 0; i < ct; i++ ) {
393  use[i] = int(toSort[i][1]);
394  }
395  daughter = use[j];
396 
397  if(parent >= 0)_tree[parent][dir] = daughter;
398  _tree[daughter][DIMENSION] = idim;
399  _tree[daughter][PARENT] = parent;
400 
401  if(j < ct - 1){
402  _organize(use[ndarray::view(j + 1,use.getSize < 0 > ())], ct - j - 1, daughter, GEQ);
403  }
404  else _tree[daughter][GEQ] = -1;
405 
406  if(j > 0){
407  _organize(use, j, daughter, LT);
408  }
409  else _tree[daughter][LT] = -1;
410 
411  }//if(ct > 1)
412  else{
413  daughter = use[0];
414 
415  if(parent >= 0) _tree[parent][dir] = daughter;
416 
417  idim = _tree[parent][DIMENSION] + 1;
418 
419  if(idim >= _dimensions)idim = 0;
420 
421  _tree[daughter][DIMENSION] = idim;
422  _tree[daughter][LT] = - 1;
423  _tree[daughter][GEQ] = - 1;
424  _tree[daughter][PARENT] = parent;
425 
426  }
427 
428  if(parent == - 1){
429  _masterParent = daughter;
430  }
431 
432 }
433 
434 template < typename T >
435 int KdTree < T > ::_findNode(ndarray::Array < const T,1,1 > const &v) const
436 {
437  int consider,next,dim;
438 
439  dim = _tree[_masterParent][DIMENSION];
440 
441  if(v[dim] < _data[_masterParent][dim]) consider = _tree[_masterParent][LT];
442  else consider = _tree[_masterParent][GEQ];
443 
444  next = consider;
445 
446  while(next >= 0){
447 
448  consider = next;
449 
450  dim = _tree[consider][DIMENSION];
451  if(v[dim] < _data[consider][dim]) next = _tree[consider][LT];
452  else next = _tree[consider][GEQ];
453 
454  }
455 
456  return consider;
457 }
458 
459 template < typename T >
460 void KdTree < T > ::_lookForNeighbors(ndarray::Array < const T,1,1 > const &v,
461  int consider,
462  int from) const
463 {
464 
465  int i,j,going;
466  double dd;
467 
468  dd = _distance(v, _data[consider]);
469 
470  if(_neighborsFound < _neighborsWanted || dd < _neighborDistances[_neighborsWanted -1]) {
471 
472  for(j = 0; j < _neighborsFound && _neighborDistances[j] < dd; j++ );
473 
474  for(i = _neighborsWanted - 1; i > j; i-- ){
475  _neighborDistances[i] = _neighborDistances[i - 1];
476  _neighborCandidates[i] = _neighborCandidates[i - 1];
477  }
478 
479  _neighborDistances[j] = dd;
480  _neighborCandidates[j] = consider;
481 
482  if(_neighborsFound < _neighborsWanted) _neighborsFound++;
483  }
484 
485  if(_tree[consider][PARENT] == from){
486  //you came here from the parent
487 
488  i = _tree[consider][DIMENSION];
489  dd = v[i] - _data[consider][i];
490  if((dd <= _neighborDistances[_neighborsFound - 1] || _neighborsFound < _neighborsWanted)
491  && _tree[consider][LT] >= 0){
492 
493  _lookForNeighbors(v, _tree[consider][LT], consider);
494  }
495 
496  dd = _data[consider][i] - v[i];
497  if((dd <= _neighborDistances[_neighborsFound - 1] || _neighborsFound < _neighborsWanted)
498  && _tree[consider][GEQ] >= 0){
499 
500  _lookForNeighbors(v, _tree[consider][GEQ], consider);
501  }
502  }
503  else{
504  //you came here from one of the branches
505 
506  //descend the other branch
507  if(_tree[consider][LT] == from){
508  going = GEQ;
509  }
510  else{
511  going = LT;
512  }
513 
514  j = _tree[consider][going];
515 
516  if(j >= 0){
517  i = _tree[consider][DIMENSION];
518 
519  if(going == 1) dd = v[i] - _data[consider][i];
520  else dd = _data[consider][i] - v[i];
521 
522  if(dd <= _neighborDistances[_neighborsFound - 1] || _neighborsFound < _neighborsWanted) {
523  _lookForNeighbors(v, j, consider);
524  }
525  }
526 
527  //ascend to the parent
528  if(_tree[consider][PARENT] >= 0) {
529  _lookForNeighbors(v, _tree[consider][PARENT], consider);
530  }
531  }
532 }
533 
534 template < typename T >
535 int KdTree < T > ::_walkUpTree(int target,
536  int dir,
537  int root) const
538 {
539  //target is the node that you are examining now
540  //dir is where you came from
541  //root is the ultimate point from which you started
542 
543  int i,output;
544 
545  output = 1;
546 
547  if(dir == LT){
548  if(_data[root][_tree[target][DIMENSION]] >= _data[target][_tree[target][DIMENSION]]){
549  return 0;
550  }
551  }
552  else{
553  if(_data[root][_tree[target][DIMENSION]] < _data[target][_tree[target][DIMENSION]]) {
554  return 0;
555  }
556  }
557 
558  if(_tree[target][PARENT] >= 0){
559  if(_tree[_tree[target][PARENT]][LT] == target)i = LT;
560  else i = GEQ;
561  output = output*_walkUpTree(_tree[target][PARENT],i,root);
562 
563  }
564  else{
565  output = output*target;
566  //so that it will return _masterParent
567  //make sure everything is connected to _masterParent
568  }
569  return output;
570 
571 }
572 
573 template < typename T >
574 void KdTree < T > ::removePoint(int target)
575 {
576 
577  int nl,nr,i,j,k,side;
578  int root;
579 
580  nl = 0;
581  nr = 0;
582  if(_tree[target][LT] >= 0){
583  nl++ ;
584  _count(_tree[target][LT], &nl);
585  }
586 
587  if(_tree[target][GEQ] >= 0){
588  nr++;
589  _count(_tree[target][GEQ], &nr);
590  }
591 
592  if(nl == 0 && nr == 0){
593 
594  k = _tree[target][PARENT];
595 
596  if(_tree[k][LT] == target) _tree[k][LT] = - 1;
597  else if(_tree[k][GEQ] == target) _tree[k][GEQ] = - 1;
598 
599  }//if target is terminal
600  else if((nl == 0 && nr > 0) || (nr == 0 && nl > 0)){
601 
602  if(nl == 0) side = GEQ;
603  else side = LT;
604 
605  k = _tree[target][PARENT];
606  if(k >= 0){
607 
608  if(_tree[k][LT] == target){
609  _tree[k][LT] = _tree[target][side];
610  _tree[_tree[k][LT]][PARENT] = k;
611 
612  }
613  else{
614  _tree[k][GEQ] = _tree[target][side];
615  _tree[_tree[k][GEQ]][PARENT] = k;
616  }
617  }
618  else{
619  _masterParent = _tree[target][side];
620  _tree[_tree[target][side]][PARENT] = - 1;
621  }
622  }//if only one side is populated
623  else{
624 
625  if(nl > nr)side = LT;
626  else side = GEQ;
627 
628  k = _tree[target][PARENT];
629  if(k < 0){
630  _masterParent = _tree[target][side];
631  _tree[_masterParent][PARENT] = - 1;
632  }
633  else{
634  if(_tree[k][LT] == target){
635  _tree[k][LT] = _tree[target][side];
636  _tree[_tree[k][LT]][PARENT] = k;
637  }
638  else{
639  _tree[k][GEQ] = _tree[target][side];
640  _tree[_tree[k][GEQ]][PARENT] = k;
641  }
642  }
643 
644  root = _tree[target][3 - side];
645 
646  _descend(root);
647  }//if both sides are populated
648 
649  if(target < _pts - 1){
650  for(i = target + 1;i < _pts;i++ ){
651  for(j = 0; j < 4; j++ ) _tree[i - 1][j] = _tree[i][j];
652 
653  for(j = 0; j < _dimensions; j++ ) _data[i - 1][j] = _data[i][j];
654  }
655 
656  for(i = 0; i < _pts; i++ ){
657  for(j = 1; j < 4; j++ ){
658  if(_tree[i][j] > target) _tree[i][j]--;
659  }
660  }
661 
662  if(_masterParent > target)_masterParent-- ;
663  }
664  _pts--;
665 
666  i = _testTree();
667  if (i == 0) {
668  throw LSST_EXCEPT(lsst::pex::exceptions::RuntimeError,
669  "Subtracting from KdTree failed\n");
670  }
671 
672 
673 }
674 
675 template < typename T >
676 void KdTree < T > ::_count(int where, int *ct) const
677 {
678  //a way to count the number of vital elements on a given branch
679 
680  if(_tree[where][LT] >= 0) {
681  ct[0]++;
682  _count(_tree[where][LT], ct);
683  }
684  if(_tree[where][GEQ] >= 0) {
685  ct[0]++;
686  _count(_tree[where][GEQ], ct);
687  }
688 }
689 
690 template < typename T >
691 void KdTree < T > ::_reassign(int target)
692 {
693 
694  int where,dir,k;
695 
696  where = _masterParent;
697  if(_data[target][_tree[where][DIMENSION]] < _data[where][_tree[where][DIMENSION]]) dir = LT;
698  else dir = GEQ;
699 
700  k = _tree[where][dir];
701  while(k >= 0) {
702  where = k;
703  if(_data[target][_tree[where][DIMENSION]] < _data[where][_tree[where][DIMENSION]]) dir = LT;
704  else dir = GEQ;
705  k = _tree[where][dir];
706  }
707 
708  _tree[where][dir] = target;
709  _tree[target][PARENT] = where;
710  _tree[target][LT] = - 1;
711  _tree[target][GEQ] = - 1;
712  _tree[target][DIMENSION] = _tree[where][DIMENSION] + 1;
713 
714  if(_tree[target][DIMENSION] == _dimensions) _tree[target][DIMENSION] = 0;
715 }
716 
717 template < typename T >
718 void KdTree < T > ::_descend(int root)
719 {
720 
721  if(_tree[root][LT] >= 0) _descend(_tree[root][LT]);
722  if(_tree[root][GEQ] >= 0) _descend(_tree[root][GEQ]);
723 
724  _reassign(root);
725 }
726 
727 template < typename T >
728 double KdTree < T > ::_distance(ndarray::Array < const T,1,1 > const &p1,
729  ndarray::Array < const T,1,1 > const &p2) const
730 {
731 
732  int i,dd;
733  double ans;
734  ans = 0.0;
735  dd = p1.template getSize< 0 >();
736 
737  for(i = 0;i < dd;i++ ) ans += (p1[i] - p2[i])*(p1[i] - p2[i]);
738 
739  return ::sqrt(ans);
740 
741 }
742 
743 
744 template < typename T >
746  ndarray::Array < T,1,1 > const &ff,
747  boost::shared_ptr < Covariogram < T > > const &covarIn)
748 
749 {
750  int i;
751 
752  _covariogram = covarIn;
753 
754  _pts = dataIn.template getSize < 0 > ();
755  _dimensions = dataIn.template getSize < 1 > ();
756 
757  _room = _pts;
758  _roomStep = 5000;
759 
760  _nFunctions = 1;
761  _function = allocate(ndarray::makeVector(_pts,1));
762 
763  for(i = 0; i < _pts; i++ ) _function[i][0] = ff[i];
764 
765  _krigingParameter = T(1.0);
766  _lambda = T(1.0e-5);
767 
768  _useMaxMin = 0;
769 
770 
771  _kdTree.Initialize(dataIn);
772 
773  _pts = _kdTree.getPoints();
774 
775 }
776 
777 template < typename T >
779  ndarray::Array < T,1,1 > const &mn,
780  ndarray::Array < T,1,1 > const &mx,
781  ndarray::Array < T,1,1 > const &ff,
782  boost::shared_ptr < Covariogram < T > > const &covarIn
783  )
784 {
785 
786  int i,j;
787  ndarray::Array < T,2,2 > normalizedData;
788 
789  _covariogram = covarIn;
790 
791  _pts = dataIn.template getSize < 0 > ();
792  _dimensions = dataIn.template getSize < 1 > ();
793  _room = _pts;
794  _roomStep = 5000;
795 
796  _krigingParameter = T(1.0);
797 
798  _lambda = T(1.0e-5);
799  _krigingParameter = T(1.0);
800 
801  _max = allocate(ndarray::makeVector(_dimensions));
802  _min = allocate(ndarray::makeVector(_dimensions));
803  _max.deep() = mx;
804  _min.deep() = mn;
805  _useMaxMin = 1;
806  normalizedData = allocate(ndarray::makeVector(_pts, _dimensions));
807  for(i = 0; i < _pts; i++ ) {
808  for(j = 0; j < _dimensions; j++ ) {
809  normalizedData[i][j] = (dataIn[i][j] - _min[j])/(_max[j] - _min[j]);
810  //note the normalization by _max - _min in each dimension
811  }
812  }
813 
814  _kdTree.Initialize(normalizedData);
815 
816  _pts = _kdTree.getPoints();
817  _nFunctions = 1;
818  _function = allocate(ndarray::makeVector(_pts, 1));
819  for(i = 0; i < _pts; i++ )_function[i][0] = ff[i];
820 }
821 
822 template < typename T >
824  ndarray::Array < T,2,2 > const &ff,
825  boost::shared_ptr <Covariogram < T > > const &covarIn)
826 {
827 
828  _covariogram = covarIn;
829 
830  _pts = dataIn.template getSize < 0 > ();
831  _dimensions = dataIn.template getSize < 1 > ();
832 
833  _room = _pts;
834  _roomStep = 5000;
835 
836  _nFunctions = ff.template getSize < 1 > ();
837  _function = allocate(ndarray::makeVector(_pts, _nFunctions));
838  _function.deep() = ff;
839 
840  _krigingParameter = T(1.0);
841 
842  _lambda = T(1.0e-5);
843 
844  _useMaxMin = 0;
845 
846 
847  _kdTree.Initialize(dataIn);
848 
849  _pts = _kdTree.getPoints();
850 }
851 
852 template < typename T >
854  ndarray::Array < T,1,1 > const &mn,
855  ndarray::Array < T,1,1 > const &mx,
856  ndarray::Array < T,2,2 > const &ff,
857  boost::shared_ptr <Covariogram < T > > const &covarIn
858  )
859 {
860 
861  int i,j;
862  ndarray::Array < T,2,2 > normalizedData;
863 
864  _covariogram = covarIn;
865 
866  _pts = dataIn.template getSize < 0 > ();
867  _dimensions = dataIn.template getSize < 1 > ();
868 
869  _room = _pts;
870  _roomStep = 5000;
871 
872  _krigingParameter = T(1.0);
873 
874  _lambda = T(1.0e-5);
875  _krigingParameter = T(1.0);
876 
877  _max = allocate(ndarray::makeVector(_dimensions));
878  _min = allocate(ndarray::makeVector(_dimensions));
879  _max.deep() = mx;
880  _min.deep() = mn;
881  _useMaxMin = 1;
882  normalizedData = allocate(ndarray::makeVector(_pts,_dimensions));
883  for(i = 0; i < _pts; i++ ) {
884  for(j = 0; j < _dimensions; j++ ) {
885  normalizedData[i][j] = (dataIn[i][j] - _min[j])/(_max[j] - _min[j]);
886  //note the normalization by _max - _min in each dimension
887  }
888  }
889 
890  _kdTree.Initialize(normalizedData);
891  _pts = _kdTree.getPoints();
892  _nFunctions = ff.template getSize < 1 > ();
893  _function = allocate(ndarray::makeVector(_pts, _nFunctions));
894  _function.deep() = ff;
895 }
896 
897 
898 template < typename T >
900  ndarray::Array < T,1,1 > const &vin,
901  int numberOfNeighbors) const
902 {
903 
904  if(numberOfNeighbors <= 0){
905  throw LSST_EXCEPT(lsst::pex::exceptions::RuntimeError,
906  "Asked for zero or negative number of neighbors\n");
907  }
908 
909  if(numberOfNeighbors > _kdTree.getPoints()){
910  throw LSST_EXCEPT(lsst::pex::exceptions::RuntimeError,
911  "Asked for more neighbors than you have data points\n");
912  }
913 
914  int i,j;
915  T fbar,mu;
916 
917  ndarray::Array < T,1,1 > covarianceTestPoint;
918  ndarray::Array < int,1,1 > neighbors;
919  ndarray::Array < double,1,1 > neighborDistances,vv;
920 
921  Eigen::Matrix < T,Eigen::Dynamic,Eigen::Dynamic > covariance,bb,xx;
922  Eigen::LDLT < Eigen::Matrix < T,Eigen::Dynamic,Eigen::Dynamic > > ldlt;
923 
924 
925  _timer.start();
926 
927  bb.resize(numberOfNeighbors, 1);
928  xx.resize(numberOfNeighbors, 1);
929 
930  covariance.resize(numberOfNeighbors,numberOfNeighbors);
931 
932  covarianceTestPoint = allocate(ndarray::makeVector(numberOfNeighbors));
933 
934  neighbors = allocate(ndarray::makeVector(numberOfNeighbors));
935 
936  neighborDistances = allocate(ndarray::makeVector(numberOfNeighbors));
937 
938  vv = allocate(ndarray::makeVector(_dimensions));
939  if(_useMaxMin == 1){
940  //if you constructed this Gaussian process with minimum and maximum
941  //values for the dimensions of your parameter space,
942  //the point you are interpolating must be scaled to match the data so
943  //that the selected nearest neighbors are appropriate
944 
945  for(i = 0; i < _dimensions; i++ ) vv[i] = (vin[i] - _min[i])/(_max[i] - _min[i]);
946  }
947  else{
948  vv = vin;
949  }
950 
951 
952  _kdTree.findNeighbors(neighbors, neighborDistances, vv, numberOfNeighbors);
953 
954  _timer.addToSearch();
955 
956  fbar = 0.0;
957  for(i = 0; i < numberOfNeighbors; i++ )fbar += _function[neighbors[i]][0];
958  fbar = fbar/double(numberOfNeighbors);
959 
960  for(i = 0; i < numberOfNeighbors; i++ ){
961  covarianceTestPoint[i] = (*_covariogram)(vv, _kdTree.getData(neighbors[i]));
962 
963  covariance(i,i) = (*_covariogram)(_kdTree.getData(neighbors[i]), _kdTree.getData(neighbors[i]))
964  + _lambda;
965 
966  for(j = i + 1; j < numberOfNeighbors; j++ ){
967  covariance(i,j) = (*_covariogram)(_kdTree.getData(neighbors[i]),
968  _kdTree.getData(neighbors[j]));
969  covariance(j,i) = covariance(i, j);
970  }
971  }
972 
973  _timer.addToIteration();
974 
975  //use Eigen's ldlt solver in place of matrix inversion (for speed purposes)
976  ldlt.compute(covariance);
977 
978  for(i = 0; i < numberOfNeighbors; i++) bb(i,0) = _function[neighbors[i]][0] - fbar;
979 
980  xx = ldlt.solve(bb);
981  _timer.addToEigen();
982 
983  mu = fbar;
984 
985  for(i = 0; i < numberOfNeighbors; i++ ){
986  mu += covarianceTestPoint[i]*xx(i, 0);
987  }
988 
989  _timer.addToIteration();
990 
991  variance(0) = (*_covariogram)(vv, vv) + _lambda;
992 
993  for(i = 0; i < numberOfNeighbors; i++ ) bb(i) = covarianceTestPoint[i];
994 
995  xx = ldlt.solve(bb);
996 
997  for(i = 0; i < numberOfNeighbors; i++ ){
998  variance(0) -= covarianceTestPoint[i]*xx(i, 0);
999  }
1000 
1001  variance(0) = variance(0)*_krigingParameter;
1002 
1003  _timer.addToVariance();
1004  _timer.addToTotal(1);
1005 
1006  return mu;
1007 }
1008 
1009 template < typename T >
1011  ndarray::Array < T,1,1 > variance,
1012  ndarray::Array < T,1,1 > const &vin,
1013  int numberOfNeighbors) const
1014 {
1015 
1016 
1017  if(numberOfNeighbors <= 0){
1018  throw LSST_EXCEPT(lsst::pex::exceptions::RuntimeError,
1019  "Asked for zero or negative number of neighbors\n");
1020  }
1021 
1022  if(numberOfNeighbors > _kdTree.getPoints()){
1023  throw LSST_EXCEPT(lsst::pex::exceptions::RuntimeError,
1024  "Asked for more neighbors than you have data points\n");
1025  }
1026 
1027 
1028  int i,j,ii;
1029  T fbar;
1030 
1031  ndarray::Array < T,1,1 > covarianceTestPoint;
1032  ndarray::Array < int,1,1 > neighbors;
1033  ndarray::Array < double,1,1 > neighborDistances,vv;
1034 
1035  Eigen::Matrix < T,Eigen::Dynamic,Eigen::Dynamic > covariance,bb,xx;
1036  Eigen::LDLT < Eigen::Matrix < T,Eigen::Dynamic,Eigen::Dynamic > > ldlt;
1037 
1038  _timer.start();
1039 
1040 
1041  bb.resize(numberOfNeighbors,1);
1042  xx.resize(numberOfNeighbors,1);
1043  covariance.resize(numberOfNeighbors,numberOfNeighbors);
1044  covarianceTestPoint = allocate(ndarray::makeVector(numberOfNeighbors));
1045  neighbors = allocate(ndarray::makeVector(numberOfNeighbors));
1046  neighborDistances = allocate(ndarray::makeVector(numberOfNeighbors));
1047 
1048  vv = allocate(ndarray::makeVector(_dimensions));
1049 
1050 
1051  if(_useMaxMin == 1) {
1052  //if you constructed this Gaussian process with minimum and maximum
1053  //values for the dimensions of your parameter space,
1054  //the point you are interpolating must be scaled to match the data so
1055  //that the selected nearest neighbors are appropriate
1056 
1057  for(i = 0; i < _dimensions; i++ )vv[i] = (vin[i] - _min[i])/(_max[i] - _min[i]);
1058  }
1059  else {
1060  vv = vin;
1061  }
1062 
1063 
1064  _kdTree.findNeighbors(neighbors, neighborDistances, vv, numberOfNeighbors);
1065 
1066  _timer.addToSearch();
1067 
1068  for(i = 0; i < numberOfNeighbors; i++ ) {
1069  covarianceTestPoint[i] = (*_covariogram)(vv, _kdTree.getData(neighbors[i]));
1070 
1071  covariance(i,i) = (*_covariogram)(_kdTree.getData(neighbors[i]), _kdTree.getData(neighbors[i]))
1072  + _lambda;
1073 
1074  for(j = i + 1; j < numberOfNeighbors; j++ ){
1075  covariance(i,j) = (*_covariogram)(_kdTree.getData(neighbors[i]),
1076  _kdTree.getData(neighbors[j]));
1077  covariance(j,i) = covariance(i, j);
1078  }
1079  }
1080 
1081  _timer.addToIteration();
1082 
1083  //use Eigen's ldlt solver in place of matrix inversion (for speed purposes)
1084  ldlt.compute(covariance);
1085 
1086  for(ii = 0; ii < _nFunctions; ii++ ) {
1087 
1088  fbar = 0.0;
1089  for(i = 0; i < numberOfNeighbors; i++ )fbar += _function[neighbors[i]][ii];
1090  fbar = fbar/double(numberOfNeighbors);
1091 
1092  for(i = 0; i < numberOfNeighbors; i++ )bb(i,0) = _function[neighbors[i]][ii] - fbar;
1093  xx = ldlt.solve(bb);
1094 
1095  mu[ii] = fbar;
1096 
1097  for(i = 0; i < numberOfNeighbors; i++ ) {
1098  mu[ii] += covarianceTestPoint[i]*xx(i, 0);
1099  }
1100 
1101  }//ii = 0 through _nFunctions
1102 
1103  _timer.addToEigen();
1104 
1105  variance[0] = (*_covariogram)(vv, vv) + _lambda;
1106 
1107  for(i = 0; i < numberOfNeighbors; i++ )bb(i) = covarianceTestPoint[i];
1108 
1109  xx = ldlt.solve(bb);
1110 
1111  for(i = 0; i < numberOfNeighbors; i++ ) {
1112  variance[0] -= covarianceTestPoint[i]*xx(i, 0);
1113  }
1114  variance[0] = variance[0]*_krigingParameter;
1115 
1116 
1117  for(i = 1; i < _nFunctions; i++ )variance[i] = variance[0];
1118 
1119  _timer.addToVariance();
1120  _timer.addToTotal(1);
1121 }
1122 
1123 
1124 template < typename T >
1126  int dex,
1127  int numberOfNeighbors) const
1128 {
1129 
1130  if(numberOfNeighbors <= 0){
1131  throw LSST_EXCEPT(lsst::pex::exceptions::RuntimeError,
1132  "Asked for zero or negative number of neighbors\n");
1133  }
1134 
1135  if(numberOfNeighbors > _kdTree.getPoints()){
1136  throw LSST_EXCEPT(lsst::pex::exceptions::RuntimeError,
1137  "Asked for more neighbors than you have data points\n");
1138  }
1139 
1140  int i,j;
1141  T fbar,mu;
1142 
1143  ndarray::Array < T,1,1 > covarianceTestPoint;
1144  ndarray::Array < int,1,1 > selfNeighbors;
1145  ndarray::Array < double,1,1 > selfDistances;
1146  ndarray::Array < int,1,1 > neighbors;
1147  ndarray::Array < double,1,1 > neighborDistances;
1148 
1149  Eigen::Matrix < T,Eigen::Dynamic,Eigen::Dynamic > covariance,bb,xx;
1150  Eigen::LDLT < Eigen::Matrix < T,Eigen::Dynamic,Eigen::Dynamic > > ldlt;
1151 
1152  _timer.start();
1153 
1154  bb.resize(numberOfNeighbors, 1);
1155  xx.resize(numberOfNeighbors, 1);
1156  covariance.resize(numberOfNeighbors,numberOfNeighbors);
1157  covarianceTestPoint = allocate(ndarray::makeVector(numberOfNeighbors));
1158  neighbors = allocate(ndarray::makeVector(numberOfNeighbors));
1159  neighborDistances = allocate(ndarray::makeVector(numberOfNeighbors));
1160 
1161  selfNeighbors = allocate(ndarray::makeVector(numberOfNeighbors + 1));
1162  selfDistances = allocate(ndarray::makeVector(numberOfNeighbors + 1));
1163 
1164  //we don't use _useMaxMin because the data has already been normalized
1165 
1166 
1167  _kdTree.findNeighbors(selfNeighbors, selfDistances, _kdTree.getData(dex),
1168  numberOfNeighbors + 1);
1169 
1170  _timer.addToSearch();
1171 
1172  if(selfNeighbors[0]!= dex) {
1173  throw LSST_EXCEPT(lsst::pex::exceptions::RuntimeError,
1174  "Nearest neighbor search in selfInterpolate did not find self\n");
1175  }
1176 
1177  //SelfNeighbors[0] will be the point itself (it is its own nearest neighbor)
1178  //We discard that for the interpolation calculation
1179  //
1180  //If you do not wish to do this, simply call the usual ::interpolate() method instead of
1181  //::selfInterpolate()
1182  for(i = 0; i < numberOfNeighbors; i++ ){
1183  neighbors[i] = selfNeighbors[i + 1];
1184  neighborDistances[i] = selfDistances[i + 1];
1185  }
1186 
1187  fbar = 0.0;
1188  for(i = 0; i < numberOfNeighbors; i++ ) fbar += _function[neighbors[i]][0];
1189  fbar = fbar/double(numberOfNeighbors);
1190 
1191  for(i = 0; i < numberOfNeighbors; i++ ){
1192  covarianceTestPoint[i] = (*_covariogram)(_kdTree.getData(dex),
1193  _kdTree.getData(neighbors[i]));
1194 
1195  covariance(i, i) = (*_covariogram)(_kdTree.getData(neighbors[i]),
1196  _kdTree.getData(neighbors[i])) + _lambda;
1197 
1198  for(j = i + 1; j < numberOfNeighbors; j++ ) {
1199  covariance(i, j) = (*_covariogram)(_kdTree.getData(neighbors[i]),
1200  _kdTree.getData(neighbors[j]));
1201  covariance(j, i) = covariance(i ,j);
1202  }
1203  }
1204  _timer.addToIteration();
1205 
1206  //use Eigen's ldlt solver in place of matrix inversion (for speed purposes)
1207  ldlt.compute(covariance);
1208 
1209 
1210  for(i = 0; i < numberOfNeighbors;i++ ) bb(i, 0) = _function[neighbors[i]][0] - fbar;
1211  xx = ldlt.solve(bb);
1212  _timer.addToEigen();
1213 
1214  mu = fbar;
1215 
1216  for(i = 0; i < numberOfNeighbors; i++ ) {
1217  mu += covarianceTestPoint[i]*xx(i, 0);
1218  }
1219 
1220 
1221  variance(0) = (*_covariogram)(_kdTree.getData(dex), _kdTree.getData(dex)) + _lambda;
1222 
1223  for(i = 0; i < numberOfNeighbors; i++ )bb(i) = covarianceTestPoint[i];
1224 
1225  xx = ldlt.solve(bb);
1226 
1227  for(i = 0; i < numberOfNeighbors; i++ ){
1228  variance(0) -= covarianceTestPoint[i]*xx(i,0);
1229  }
1230 
1231  variance(0) = variance(0)*_krigingParameter;
1232  _timer.addToVariance();
1233  _timer.addToTotal(1);
1234 
1235  return mu;
1236 }
1237 
1238 template < typename T >
1240  ndarray::Array < T,1,1 > variance,
1241  int dex,
1242  int numberOfNeighbors) const{
1243 
1244  if(numberOfNeighbors <= 0){
1245  throw LSST_EXCEPT(lsst::pex::exceptions::RuntimeError,
1246  "Asked for zero or negative number of neighbors\n");
1247  }
1248 
1249  if(numberOfNeighbors + 1 > _kdTree.getPoints()){
1250  throw LSST_EXCEPT(lsst::pex::exceptions::RuntimeError,
1251  "Asked for more neighbors than you have data points\n");
1252  }
1253 
1254  if(dex < 0 || dex >=_kdTree.getPoints()){
1255  throw LSST_EXCEPT(lsst::pex::exceptions::RuntimeError,
1256  "Asked to self interpolate on a point that does not exist\n");
1257  }
1258 
1259  int i,j,ii;
1260  T fbar;
1261 
1262  ndarray::Array < T,1,1 > covarianceTestPoint;
1263  ndarray::Array < int,1,1 > selfNeighbors;
1264  ndarray::Array < double,1,1 > selfDistances;
1265  ndarray::Array < int,1,1 > neighbors;
1266  ndarray::Array < double,1,1 > neighborDistances;
1267 
1268  Eigen::Matrix < T,Eigen::Dynamic,Eigen::Dynamic > covariance,bb,xx;
1269  Eigen::LDLT < Eigen::Matrix < T,Eigen::Dynamic,Eigen::Dynamic > > ldlt;
1270 
1271  _timer.start();
1272 
1273  bb.resize(numberOfNeighbors, 1);
1274  xx.resize(numberOfNeighbors, 1);
1275  covariance.resize(numberOfNeighbors,numberOfNeighbors);
1276  covarianceTestPoint = allocate(ndarray::makeVector(numberOfNeighbors));
1277  neighbors = allocate(ndarray::makeVector(numberOfNeighbors));
1278  neighborDistances = allocate(ndarray::makeVector(numberOfNeighbors));
1279 
1280  selfNeighbors = allocate(ndarray::makeVector(numberOfNeighbors + 1));
1281  selfDistances = allocate(ndarray::makeVector(numberOfNeighbors + 1));
1282 
1283  //we don't use _useMaxMin because the data has already been normalized
1284 
1285 
1286  _kdTree.findNeighbors(selfNeighbors, selfDistances, _kdTree.getData(dex),
1287  numberOfNeighbors + 1);
1288 
1289  _timer.addToSearch();
1290 
1291  if(selfNeighbors[0]!= dex) {
1292 
1293  throw LSST_EXCEPT(lsst::pex::exceptions::RuntimeError,
1294  "Nearest neighbor search in selfInterpolate did not find self\n");
1295  }
1296 
1297  //SelfNeighbors[0] will be the point itself (it is its own nearest neighbor)
1298  //We discard that for the interpolation calculation
1299  //
1300  //If you do not wish to do this, simply call the usual ::interpolate() method instead of
1301  //::selfInterpolate()
1302  for(i = 0; i < numberOfNeighbors; i++ ) {
1303  neighbors[i] = selfNeighbors[i + 1];
1304  neighborDistances[i] = selfDistances[i + 1];
1305  }
1306 
1307 
1308 
1309  for(i = 0; i < numberOfNeighbors; i++ ) {
1310  covarianceTestPoint[i] = (*_covariogram)(_kdTree.getData(dex),_kdTree.getData(neighbors[i]));
1311 
1312  covariance(i, i) = (*_covariogram)(_kdTree.getData(neighbors[i]), _kdTree.getData(neighbors[i]))
1313  + _lambda;
1314 
1315  for(j = i + 1; j < numberOfNeighbors; j++ ) {
1316  covariance(i, j) = (*_covariogram)(_kdTree.getData(neighbors[i]),
1317  _kdTree.getData(neighbors[j]));
1318  covariance(j, i) = covariance(i, j);
1319  }
1320  }
1321  _timer.addToIteration();
1322 
1323 
1324  //use Eigen's ldlt solver in place of matrix inversion (for speed purposes)
1325  ldlt.compute(covariance);
1326 
1327  for(ii = 0; ii < _nFunctions; ii++ ) {
1328 
1329  fbar = 0.0;
1330  for(i = 0; i < numberOfNeighbors; i++ )fbar += _function[neighbors[i]][ii];
1331  fbar = fbar/double(numberOfNeighbors);
1332 
1333  for(i = 0; i < numberOfNeighbors; i++ )bb(i,0) = _function[neighbors[i]][ii] - fbar;
1334  xx = ldlt.solve(bb);
1335 
1336  mu[ii] = fbar;
1337 
1338  for(i = 0; i < numberOfNeighbors; i++ ){
1339  mu[ii] += covarianceTestPoint[i]*xx(i,0);
1340  }
1341  }//ii = 0 through _nFunctions
1342 
1343  _timer.addToEigen();
1344 
1345  variance[0] = (*_covariogram)(_kdTree.getData(dex), _kdTree.getData(dex)) + _lambda;
1346 
1347  for(i = 0; i < numberOfNeighbors; i++ )bb(i) = covarianceTestPoint[i];
1348 
1349  xx = ldlt.solve(bb);
1350 
1351  for(i = 0; i < numberOfNeighbors; i++ ) {
1352  variance[0] -= covarianceTestPoint[i]*xx(i,0);
1353  }
1354 
1355  variance[0] = variance[0]*_krigingParameter;
1356 
1357  for(i = 1; i < _nFunctions; i++ )variance[i] = variance[0];
1358 
1359  _timer.addToVariance();
1360  _timer.addToTotal(1);
1361 }
1362 
1363 
1364 template < typename T >
1366  ndarray:: Array < T,1,1 > variance,
1367  ndarray::Array < T,2,2 > const &queries) const
1368 {
1369 
1370  int i,j,ii,nQueries;
1371 
1372  T fbar;
1373  Eigen::Matrix < T,Eigen::Dynamic,Eigen::Dynamic > batchCovariance,batchbb,batchxx;
1374  Eigen::Matrix < T,Eigen::Dynamic,Eigen::Dynamic > queryCovariance;
1375  Eigen::LDLT < Eigen::Matrix < T,Eigen::Dynamic,Eigen::Dynamic > > ldlt;
1376 
1378 
1379  _timer.start();
1380 
1381  nQueries = queries.template getSize < 0 > ();
1382 
1383 
1384  v1 = allocate(ndarray::makeVector(_dimensions));
1385  batchbb.resize(_pts, 1);
1386  batchxx.resize(_pts, 1);
1387  batchCovariance.resize(_pts, _pts);
1388  queryCovariance.resize(_pts, 1);
1389 
1390 
1391  for(i = 0; i < _pts; i++ ) {
1392 
1393  batchCovariance(i, i) = (*_covariogram)(_kdTree.getData(i), _kdTree.getData(i)) + _lambda;
1394  for(j = i + 1; j < _pts; j++ ) {
1395  batchCovariance(i, j) = (*_covariogram)(_kdTree.getData(i), _kdTree.getData(j));
1396  batchCovariance(j, i) = batchCovariance(i, j);
1397  }
1398  }
1399  _timer.addToIteration();
1400 
1401  ldlt.compute(batchCovariance);
1402 
1403  fbar = 0.0;
1404  for(i = 0; i < _pts; i++ ) {
1405  fbar += _function[i][0];
1406  }
1407  fbar = fbar/T(_pts);
1408 
1409  for(i = 0; i < _pts; i++ ){
1410  batchbb(i, 0) = _function[i][0] - fbar;
1411  }
1412  batchxx = ldlt.solve(batchbb);
1413  _timer.addToEigen();
1414 
1415 
1416  for(ii = 0; ii < nQueries; ii++ ) {
1417  for(i = 0; i < _dimensions; i++ )v1[i] = queries[ii][i];
1418  if(_useMaxMin == 1) {
1419  for(i = 0; i < _dimensions; i++ )v1[i] = (v1[i] - _min[i])/(_max[i] - _min[i]);
1420  }
1421  mu(ii) = fbar;
1422  for(i = 0; i < _pts; i++ ){
1423  mu(ii) += batchxx(i)*(*_covariogram)(v1, _kdTree.getData(i));
1424  }
1425  }
1426  _timer.addToIteration();
1427 
1428  for(ii = 0; ii < nQueries; ii++ ) {
1429  for(i = 0; i < _dimensions; i++ )v1[i] = queries[ii][i];
1430  if(_useMaxMin == 1) {
1431  for(i = 0; i < _dimensions; i++ )v1[i] = (v1[i] - _min[i])/(_max[i] - _min[i]);
1432  }
1433 
1434  for(i = 0; i < _pts; i++ ) {
1435  batchbb(i, 0) = (*_covariogram)(v1, _kdTree.getData(i));
1436  queryCovariance(i, 0) = batchbb(i, 0);
1437  }
1438  batchxx = ldlt.solve(batchbb);
1439 
1440  variance[ii] = (*_covariogram)(v1, v1) + _lambda;
1441 
1442  for(i = 0; i < _pts; i++ ){
1443  variance[ii] -= queryCovariance(i, 0)*batchxx(i);
1444  }
1445 
1446  variance[ii] = variance[ii]*_krigingParameter;
1447 
1448  }
1449 
1450  _timer.addToVariance();
1451  _timer.addToTotal(nQueries);
1452 }
1453 
1454 template < typename T >
1456  ndarray:: Array < T,2,2 > variance,
1457  ndarray::Array < T,2,2 > const &queries) const
1458 {
1459 
1460  int i,j,ii,nQueries,ifn;
1461 
1462  T fbar;
1463  Eigen::Matrix < T,Eigen::Dynamic,Eigen::Dynamic > batchCovariance,batchbb,batchxx;
1464  Eigen::Matrix < T,Eigen::Dynamic,Eigen::Dynamic > queryCovariance;
1465  Eigen::LDLT < Eigen::Matrix < T,Eigen::Dynamic,Eigen::Dynamic > > ldlt;
1466 
1468 
1469  _timer.start();
1470 
1471  nQueries = queries.template getSize < 0 > ();
1472 
1473 
1474 
1475  v1 = allocate(ndarray::makeVector(_dimensions));
1476  batchbb.resize(_pts, 1);
1477  batchxx.resize(_pts, 1);
1478  batchCovariance.resize(_pts, _pts);
1479  queryCovariance.resize(_pts, 1);
1480 
1481  for(i = 0; i < _pts; i++ ){
1482  batchCovariance(i, i) = (*_covariogram)(_kdTree.getData(i), _kdTree.getData(i)) + _lambda;
1483  for(j = i + 1; j < _pts; j++ ) {
1484  batchCovariance(i, j) = (*_covariogram)(_kdTree.getData(i), _kdTree.getData(j));
1485  batchCovariance(j, i) = batchCovariance(i, j);
1486  }
1487  }
1488 
1489  _timer.addToIteration();
1490 
1491  ldlt.compute(batchCovariance);
1492 
1493  _timer.addToEigen();
1494 
1495  for(ifn = 0; ifn < _nFunctions; ifn++ ) {
1496 
1497  fbar = 0.0;
1498  for(i = 0; i < _pts; i++ ){
1499  fbar += _function[i][ifn];
1500  }
1501  fbar = fbar/T(_pts);
1502  _timer.addToIteration();
1503 
1504  for(i = 0; i < _pts; i++ ){
1505  batchbb(i,0) = _function[i][ifn] - fbar;
1506  }
1507  batchxx = ldlt.solve(batchbb);
1508  _timer.addToEigen();
1509 
1510 
1511  for(ii = 0; ii < nQueries; ii++ ){
1512  for(i = 0; i < _dimensions; i++ ) v1[i] = queries[ii][i];
1513  if(_useMaxMin == 1) {
1514  for(i = 0; i < _dimensions; i++ ) v1[i] = (v1[i] - _min[i])/(_max[i] - _min[i]);
1515  }
1516  mu[ii][ifn] = fbar;
1517  for(i = 0; i < _pts; i++ ){
1518  mu[ii][ifn] += batchxx(i)*(*_covariogram)(v1, _kdTree.getData(i));
1519  }
1520  }
1521 
1522  }//ifn = 0 to _nFunctions
1523 
1524  _timer.addToIteration();
1525  for(ii = 0; ii < nQueries; ii++ ){
1526  for(i = 0; i < _dimensions; i++ ) v1[i] = queries[ii][i];
1527  if(_useMaxMin == 1){
1528  for(i = 0; i < _dimensions; i++ ) v1[i] = (v1[i] - _min[i])/(_max[i] - _min[i]);
1529  }
1530 
1531  for(i = 0;i < _pts;i++ ) {
1532  batchbb(i,0) = (*_covariogram)(v1,_kdTree.getData(i));
1533  queryCovariance(i,0) = batchbb(i,0);
1534  }
1535  batchxx = ldlt.solve(batchbb);
1536 
1537  variance[ii][0] = (*_covariogram)(v1, v1) + _lambda;
1538 
1539  for(i = 0; i < _pts; i++ ) {
1540  variance[ii][0] -= queryCovariance(i, 0)*batchxx(i);
1541  }
1542 
1543  variance[ii][0] = variance[ii][0]*_krigingParameter;
1544  for(i = 1; i < _nFunctions; i++ ) variance[ii][i] = variance[ii][0];
1545 
1546  }
1547  _timer.addToVariance();
1548  _timer.addToTotal(nQueries);
1549 }
1550 
1551 template < typename T >
1553  ndarray::Array < T,2,2 > const &queries) const
1554 {
1555 
1556  int i,j,ii,nQueries;
1557 
1558  T fbar;
1559  Eigen::Matrix < T,Eigen::Dynamic,Eigen::Dynamic > batchCovariance,batchbb,batchxx;
1560  Eigen::Matrix < T,Eigen::Dynamic,Eigen::Dynamic > queryCovariance;
1561  Eigen::LDLT < Eigen::Matrix < T,Eigen::Dynamic,Eigen::Dynamic > > ldlt;
1562 
1564 
1565  _timer.start();
1566 
1567  nQueries = queries.template getSize < 0 > ();
1568 
1569  v1 = allocate(ndarray::makeVector(_dimensions));
1570 
1571  batchbb.resize(_pts, 1);
1572  batchxx.resize(_pts, 1);
1573  batchCovariance.resize(_pts, _pts);
1574  queryCovariance.resize(_pts, 1);
1575 
1576 
1577  for(i = 0; i < _pts; i++ ) {
1578  batchCovariance(i, i) = (*_covariogram)(_kdTree.getData(i), _kdTree.getData(i)) + _lambda;
1579  for(j = i + 1; j < _pts; j++ ) {
1580  batchCovariance(i, j) = (*_covariogram)(_kdTree.getData(i), _kdTree.getData(j));
1581  batchCovariance(j, i) = batchCovariance(i, j);
1582  }
1583  }
1584  _timer.addToIteration();
1585 
1586  ldlt.compute(batchCovariance);
1587 
1588  fbar = 0.0;
1589  for(i = 0; i < _pts; i++ ) {
1590  fbar += _function[i][0];
1591  }
1592  fbar = fbar/T(_pts);
1593 
1594  for(i = 0; i < _pts; i++ ) {
1595  batchbb(i, 0) = _function[i][0] - fbar;
1596  }
1597  batchxx = ldlt.solve(batchbb);
1598  _timer.addToEigen();
1599 
1600  for(ii = 0; ii < nQueries; ii++ ) {
1601 
1602  for(i = 0; i < _dimensions; i++ ) v1[i] = queries[ii][i];
1603  if(_useMaxMin == 1) {
1604  for(i = 0; i < _dimensions; i++ )v1[i] = (v1[i] - _min[i])/(_max[i] - _min[i]);
1605  }
1606 
1607  mu(ii) = fbar;
1608  for(i = 0; i < _pts; i++ ) {
1609  mu(ii) += batchxx(i)*(*_covariogram)(v1, _kdTree.getData(i));
1610  }
1611  }
1612  _timer.addToIteration();
1613  _timer.addToTotal(nQueries);
1614 
1615 }
1616 
1617 template < typename T >
1619  ndarray::Array < T,2,2 > const &queries) const
1620 {
1621 
1622  int i,j,ii,nQueries,ifn;
1623 
1624 
1625  T fbar;
1626  Eigen::Matrix < T,Eigen::Dynamic,Eigen::Dynamic > batchCovariance,batchbb,batchxx;
1627  Eigen::Matrix < T,Eigen::Dynamic,Eigen::Dynamic > queryCovariance;
1628  Eigen::LDLT < Eigen::Matrix < T,Eigen::Dynamic,Eigen::Dynamic > > ldlt;
1629 
1631 
1632  _timer.start();
1633 
1634  nQueries = queries.template getSize < 0 > ();
1635 
1636 
1637  v1 = allocate(ndarray::makeVector(_dimensions));
1638 
1639  batchbb.resize(_pts, 1);
1640  batchxx.resize(_pts, 1);
1641  batchCovariance.resize(_pts, _pts);
1642  queryCovariance.resize(_pts, 1);
1643 
1644 
1645  for(i = 0; i < _pts; i++ ){
1646  batchCovariance(i, i) = (*_covariogram)(_kdTree.getData(i), _kdTree.getData(i)) + _lambda;
1647  for(j = i + 1; j < _pts; j++ ){
1648  batchCovariance(i, j) = (*_covariogram)(_kdTree.getData(i), _kdTree.getData(j));
1649  batchCovariance(j, i) = batchCovariance(i, j);
1650  }
1651  }
1652 
1653  _timer.addToIteration();
1654 
1655  ldlt.compute(batchCovariance);
1656 
1657  _timer.addToEigen();
1658 
1659  for(ifn = 0; ifn < _nFunctions; ifn++ ){
1660 
1661  fbar = 0.0;
1662  for(i = 0; i < _pts; i++ ){
1663  fbar += _function[i][ifn];
1664  }
1665  fbar = fbar/T(_pts);
1666 
1667  _timer.addToIteration();
1668 
1669  for(i = 0; i < _pts; i++ ){
1670  batchbb(i, 0) = _function[i][ifn] - fbar;
1671  }
1672  batchxx = ldlt.solve(batchbb);
1673  _timer.addToEigen();
1674 
1675 
1676  for(ii = 0; ii < nQueries; ii++ ){
1677  for(i = 0; i < _dimensions; i++ )v1[i] = queries[ii][i];
1678  if(_useMaxMin == 1){
1679  for(i = 0;i < _dimensions;i++ )v1[i] = (v1[i] - _min[i])/(_max[i] - _min[i]);
1680  }
1681 
1682  mu[ii][ifn] = fbar;
1683  for(i = 0; i < _pts; i++ ){
1684  mu[ii][ifn] += batchxx(i)*(*_covariogram)(v1, _kdTree.getData(i));
1685  }
1686  }
1687 
1688 
1689  }//ifn = 0 through _nFunctions
1690 
1691  _timer.addToTotal(nQueries);
1692 
1693 }
1694 
1695 
1696 template < typename T >
1697 void GaussianProcess < T > ::addPoint(ndarray::Array < T,1,1 > const &vin, T f)
1698 {
1699 
1700  int i,j;
1701 
1702  if(_nFunctions!= 1){
1703 
1704  throw LSST_EXCEPT(lsst::pex::exceptions::RuntimeError,
1705  "You are calling the wrong addPoint; you need a vector of functions\n");
1706 
1707  }
1708 
1710  v = allocate(ndarray::makeVector(_dimensions));
1711 
1712  for(i = 0; i < _dimensions; i++ ){
1713  v[i] = vin[i];
1714  if(_useMaxMin == 1){
1715  v[i] = (v[i] - _min[i])/(_max[i] - _min[i]);
1716  }
1717  }
1718 
1719  if(_pts == _room){
1721  buff = allocate(ndarray::makeVector(_pts, _nFunctions));
1722  buff.deep() = _function;
1723 
1724  _room += _roomStep;
1725  _function = allocate(ndarray::makeVector(_room, _nFunctions));
1726  for(i = 0; i < _pts; i++ ) {
1727 
1728  for(j = 0; j < _nFunctions; j++ ) {
1729  _function[i][j] = buff[i][j];
1730  }
1731 
1732  }
1733  }
1734 
1735  _function[_pts][0] = f;
1736 
1737  _kdTree.addPoint(v);
1738  _pts = _kdTree.getPoints();
1739 
1740 }
1741 
1742 template < typename T >
1744  ndarray::Array < T,1,1 > const &f)
1745 {
1746 
1747  int i,j;
1748 
1750  v = allocate(ndarray::makeVector(_dimensions));
1751 
1752  for(i = 0; i < _dimensions; i++ ) {
1753  v[i] = vin[i];
1754  if(_useMaxMin == 1) {
1755  v[i] = (v[i] - _min[i])/(_max[i] - _min[i]);
1756  }
1757  }
1758 
1759  if(_pts == _room) {
1761  buff = allocate(ndarray::makeVector(_pts, _nFunctions));
1762  buff.deep() = _function;
1763 
1764  _room += _roomStep;
1765  _function = allocate(ndarray::makeVector(_room, _nFunctions));
1766  for(i = 0; i < _pts; i++ ) {
1767  for(j = 0; j < _nFunctions; j++ ) {
1768  _function[i][j] = buff[i][j];
1769  }
1770  }
1771 
1772  }
1773  for(i = 0; i < _nFunctions; i++ )_function[_pts][i] = f[i];
1774 
1775  _kdTree.addPoint(v);
1776  _pts = _kdTree.getPoints();
1777 
1778 
1779 }
1780 
1781 template < typename T >
1782 void GaussianProcess < T > ::removePoint(int dex)
1783 {
1784 
1785  int i,j;
1786 
1787  _kdTree.removePoint(dex);
1788 
1789  for(i = dex; i < _pts; i++ ) {
1790  for(j = 0; j < _nFunctions; j++ ) {
1791  _function[i][j] = _function[i + 1][j];
1792  }
1793  }
1794  _pts = _kdTree.getPoints();
1795 }
1796 
1797 template < typename T >
1798 void GaussianProcess < T > ::setKrigingParameter(T kk)
1799 {
1800  _krigingParameter = kk;
1801 }
1802 
1803 template < typename T >
1804 void GaussianProcess < T > ::setCovariogram(boost::shared_ptr < Covariogram < T > > const &covar){
1805  _covariogram = covar;
1806 }
1807 
1808 template < typename T >
1809 void GaussianProcess < T > ::setLambda(T lambda){
1810  _lambda = lambda;
1811 }
1812 
1813 
1814 template < typename T >
1816 {
1817  return _timer;
1818 }
1819 
1820 template < typename T >
1821 Covariogram < T > ::~Covariogram(){};
1822 
1823 template < typename T >
1825  ndarray::Array < const T,1,1 > const &p2) const
1826 {
1827  std::cout << "by the way, you are calling the wrong operator\n";
1828  exit(1);
1829  return T(1.0);
1830 }
1831 
1832 template < typename T >
1833 SquaredExpCovariogram < T > ::~SquaredExpCovariogram(){}
1834 
1835 template < typename T >
1837 {
1838  _ellSquared = 1.0;
1839 }
1840 
1841 template < typename T >
1842 void SquaredExpCovariogram < T > ::setEllSquared(double ellSquared)
1843 {
1844  _ellSquared = ellSquared;
1845 }
1846 
1847 template < typename T >
1849  ndarray::Array < const T,1,1 > const &p2) const
1850 {
1851  int i;
1852  T d;
1853  d = 0.0;
1854  for(i = 0; i < p1.template getSize < 0 > (); i++ ){
1855  d += (p1[i] - p2[i])*(p1[i] - p2[i]);
1856  }
1857 
1858  d = d/_ellSquared;
1859  return T(exp( - 0.5*d));
1860 }
1861 
1862 template < typename T >
1863 NeuralNetCovariogram < T > ::~NeuralNetCovariogram(){}
1864 
1865 template < typename T >
1867 
1868  _sigma0 = 1.0;
1869  _sigma1 = 1.0;
1870 }
1871 
1872 template < typename T >
1875  ) const
1876 {
1877  int i,dim;
1878  double num,denom1,denom2,arg;
1879 
1880  dim = p1.template getSize < 0 > ();
1881 
1882  num = 2.0*_sigma0;
1883  denom1 = 1.0 + 2.0*_sigma0;
1884  denom2 = 1.0 + 2.0*_sigma0;
1885 
1886  for(i = 0; i < dim; i++ ) {
1887  num += 2.0*p1[i]*p2[i]*_sigma1;
1888  denom1 += 2.0*p1[i]*p1[i]*_sigma1;
1889  denom2 += 2.0*p2[i]*p2[i]*_sigma1;
1890  }
1891 
1892  arg = num/::sqrt(denom1*denom2);
1893  return T(2.0*(::asin(arg))/3.141592654);
1894 
1895 }
1896 
1897 template < typename T >
1898 void NeuralNetCovariogram < T > ::setSigma0(double sigma0)
1899 {
1900  _sigma0 = sigma0;
1901 }
1902 
1903 template < typename T >
1904 void NeuralNetCovariogram < T > ::setSigma1(double sigma1)
1905 {
1906  _sigma1 = sigma1;
1907 }
1908 
1909 }}}
1910 
1911 #define gpn lsst::afw::math
1912 
1913 #define INSTANTIATEGP(T) \
1914  template class gpn::KdTree < T > ; \
1915  template class gpn::GaussianProcess < T > ; \
1916  template class gpn::Covariogram < T > ; \
1917  template class gpn::SquaredExpCovariogram < T > ;\
1918  template class gpn::NeuralNetCovariogram < T > ;
1919 
1920 INSTANTIATEGP(double);
#define INSTANTIATEGP(T)
static DateTime now(void)
Definition: DateTime.cc:554
View< boost::fusion::vector1< index::Full > > view()
Start a view definition that includes the entire first dimension.
Definition: views.h:131
Stores values of a function sampled on an image and allows you to interpolate the function to unsampl...
This is a structure for keeping track of how long the interpolation methods spend on different parts ...
The parent class of covariogram functions for use in Gaussian Process interpolation.
int getSize() const
Return the size of a specific dimension.
Definition: ArrayBase.h:126
Vector< T, N > makeVector(T v1, T v2,..., T vN)
Variadic constructor for Vector.
The data for GaussianProcess is stored in a KD tree to facilitate nearest-neighbor searches...
a Covariogram that falls off as the negative exponent of the square of the distance between the point...
a Covariogram that recreates a neural network with one hidden layer and infinite units in that layer ...
Element * getData() const
Return a raw pointer to the first element of the array.
Definition: ArrayBase.h:117
detail::SimpleInitializer< N > allocate(Vector< int, N > const &shape)
Create an expression that allocates uninitialized memory for an array.
#define LSST_EXCEPT(type,...)
Definition: Exception.h:46
Deep const deep() const
Return an ArrayRef view to this.
Definition: ArrayBase.h:178
afw::table::Key< double > sigma1
void removePoint(int dex)
This will remove a point from the data set.