LSSTApplications  16.0-10-g0ee56ad+5,16.0-11-ga33d1f2+5,16.0-12-g3ef5c14+3,16.0-12-g71e5ef5+18,16.0-12-gbdf3636+3,16.0-13-g118c103+3,16.0-13-g8f68b0a+3,16.0-15-gbf5c1cb+4,16.0-16-gfd17674+3,16.0-17-g7c01f5c+3,16.0-18-g0a50484+1,16.0-20-ga20f992+8,16.0-21-g0e05fd4+6,16.0-21-g15e2d33+4,16.0-22-g62d8060+4,16.0-22-g847a80f+4,16.0-25-gf00d9b8+1,16.0-28-g3990c221+4,16.0-3-gf928089+3,16.0-32-g88a4f23+5,16.0-34-gd7987ad+3,16.0-37-gc7333cb+2,16.0-4-g10fc685+2,16.0-4-g18f3627+26,16.0-4-g5f3a788+26,16.0-5-gaf5c3d7+4,16.0-5-gcc1f4bb+1,16.0-6-g3b92700+4,16.0-6-g4412fcd+3,16.0-6-g7235603+4,16.0-69-g2562ce1b+2,16.0-8-g14ebd58+4,16.0-8-g2df868b+1,16.0-8-g4cec79c+6,16.0-8-gadf6c7a+1,16.0-8-gfc7ad86,16.0-82-g59ec2a54a+1,16.0-9-g5400cdc+2,16.0-9-ge6233d7+5,master-g2880f2d8cf+3,v17.0.rc1
LSSTDataManagementBasePackage
baseContinued.py
Go to the documentation of this file.
1 #
2 # LSST Data Management System
3 # Copyright 2017 LSST/AURA.
4 #
5 # This product includes software developed by the
6 # LSST Project (http://www.lsst.org/).
7 #
8 # This program is free software: you can redistribute it and/or modify
9 # it under the terms of the GNU General Public License as published by
10 # the Free Software Foundation, either version 3 of the License, or
11 # (at your option) any later version.
12 #
13 # This program is distributed in the hope that it will be useful,
14 # but WITHOUT ANY WARRANTY; without even the implied warranty of
15 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 # GNU General Public License for more details.
17 #
18 # You should have received a copy of the LSST License Statement and
19 # the GNU General Public License along with this program. If not,
20 # see <http://www.lsstcorp.org/LegalNotices/>.
21 #
22 
23 import numpy as np
24 
25 from lsst.utils import continueClass, TemplateMeta
26 from .base import BaseRecord, BaseCatalog
27 from ..schema import Key
28 
29 __all__ = ["Catalog"]
30 
31 
32 @continueClass # noqa F811
33 class BaseRecord:
34 
35  def extract(self, *patterns, **kwds):
36  """Extract a dictionary of {<name>: <field-value>} in which the field names
37  match the given shell-style glob pattern(s).
38 
39  Any number of glob patterns may be passed; the result will be the union of all
40  the result of each glob considered separately.
41 
42  Parameters
43  ----------
44 
45  items : `dict`
46  The result of a call to self.schema.extract(); this will be used
47  instead of doing any new matching, and allows the pattern matching
48  to be reused to extract values from multiple records. This
49  keyword is incompatible with any position arguments and the regex,
50  sub, and ordered keyword arguments.
51 
52  split : `bool`
53  If True, fields with named subfields (e.g. points) will be split
54  into separate items in the dict; instead of {"point":
55  lsst.geom.Point2I(2,3)}, for instance, you'd get {"point.x":
56  2, "point.y": 3}. Default is False.
57 
58  regex : `str` or `re` pattern object
59  A regular expression to be used in addition to any glob patterns
60  passed as positional arguments. Note that this will be compared
61  with re.match, not re.search.
62 
63  sub : `str`
64  A replacement string (see `re.MatchObject.expand`) used to set the
65  dictionary keys of any fields matched by regex.
66 
67  ordered : `bool`
68  If `True`, a `collections.OrderedDict` will be returned instead of
69  a standard dict, with the order corresponding to the definition
70  order of the `Schema`. `Default is False`.
71  """
72  d = kwds.pop("items", None)
73  split = kwds.pop("split", False)
74  if d is None:
75  d = self.schema.extract(*patterns, **kwds).copy()
76  elif kwds:
77  raise ValueError(
78  "Unrecognized keyword arguments for extract: %s" % ", ".join(kwds.keys()))
79  # must use list because we might be adding/deleting elements
80  for name, schemaItem in list(d.items()):
81  key = schemaItem.key
82  if split and key.HAS_NAMED_SUBFIELDS:
83  for subname, subkey in zip(key.subfields, key.subkeys):
84  d["%s.%s" % (name, subname)] = self.get(subkey)
85  del d[name]
86  else:
87  d[name] = self.get(schemaItem.key)
88  return d
89 
90  def __repr__(self):
91  return "%s\n%s" % (type(self), str(self))
92 
93 
94 class Catalog(metaclass=TemplateMeta):
95 
96  def getColumnView(self):
97  self._columns = self._getColumnView()
98  return self._columns
99 
100  def __getColumns(self):
101  if not hasattr(self, "_columns") or self._columns is None:
102  self._columns = self._getColumnView()
103  return self._columns
104  columns = property(__getColumns, doc="a column view of the catalog")
105 
106  def __getitem__(self, key):
107  """Return the record at index key if key is an integer,
108  return a column if key is a string field name or Key,
109  or return a subset of the catalog if key is a slice
110  or boolean NumPy array.
111  """
112  if type(key) is slice:
113  (start, stop, step) = (key.start, key.stop, key.step)
114  if step is None:
115  step = 1
116  if start is None:
117  start = 0
118  if stop is None:
119  stop = len(self)
120  return self.subset(start, stop, step)
121  elif isinstance(key, np.ndarray):
122  if key.dtype == bool:
123  return self.subset(key)
124  raise RuntimeError("Unsupported array type for indexing non-contiguous Catalog: %s" %
125  (key.dtype,))
126  elif isinstance(key, Key) or isinstance(key, str):
127  if not self.isContiguous():
128  if isinstance(key, str):
129  key = self.schema[key].asKey()
130  array = self._getitem_(key)
131  # This array doesn't share memory with the Catalog, so don't let it be modified by
132  # the user who thinks that the Catalog itself is being modified.
133  # Just be aware that this array can only be passed down to C++ as an ndarray::Array<T const>
134  # instead of an ordinary ndarray::Array<T>. If pybind isn't letting it down into C++,
135  # you may have left off the 'const' in the definition.
136  array.flags.writeable = False
137  return array
138  return self.columns[key]
139  else:
140  return self._getitem_(key)
141 
142  def __setitem__(self, key, value):
143  """
144  If ``key`` is an integer, set ``catalog[key]`` to ``value``. Otherwise select column ``key``
145  and set it to ``value``.
146  """
147  self._columns = None
148  if isinstance(key, Key) or isinstance(key, str):
149  self.columns[key] = value
150  else:
151  return self.set(key, value)
152 
153  def __delitem__(self, key):
154  self._columns = None
155  if isinstance(key, slice):
156  self._delslice_(key)
157  else:
158  self._delitem_(key)
159 
160  def append(self, record):
161  self._columns = None
162  self._append(record)
163 
164  def insert(self, key, value):
165  self._columns = None
166  self._insert(key, value)
167 
168  def clear(self):
169  self._columns = None
170  self._clear()
171 
172  def addNew(self):
173  self._columns = None
174  return self._addNew()
175 
176  def cast(self, type_, deep=False):
177  """Return a copy of the catalog with the given type, optionally
178  cloning the table and deep-copying all records if deep==True.
179  """
180  if deep:
181  table = self.table.clone()
182  table.preallocate(len(self))
183  else:
184  table = self.table
185  copy = type_(table)
186  copy.extend(self, deep=deep)
187  return copy
188 
189  def copy(self, deep=False):
190  """
191  Copy a catalog (default is not a deep copy).
192  """
193  return self.cast(type(self), deep)
194 
195  def extend(self, iterable, deep=False, mapper=None):
196  """Append all records in the given iterable to the catalog.
197 
198  Arguments:
199  iterable ------ any Python iterable containing records
200  deep ---------- if True, the records will be deep-copied; ignored
201  if mapper is not None (that always implies True).
202  mapper -------- a SchemaMapper object used to translate records
203  """
204  self._columns = None
205  # We can't use isinstance here, because the SchemaMapper symbol isn't available
206  # when this code is part of a subclass of Catalog in another package.
207  if type(deep).__name__ == "SchemaMapper":
208  mapper = deep
209  deep = None
210  if isinstance(iterable, type(self)):
211  if mapper is not None:
212  self._extend(iterable, mapper)
213  else:
214  self._extend(iterable, deep)
215  else:
216  for record in iterable:
217  if mapper is not None:
218  self._append(self.table.copyRecord(record, mapper))
219  elif deep:
220  self._append(self.table.copyRecord(record))
221  else:
222  self._append(record)
223 
224  def __reduce__(self):
225  import lsst.afw.fits
226  return lsst.afw.fits.reduceToFits(self)
227 
228  def asAstropy(self, cls=None, copy=False, unviewable="copy"):
229  """!Return an astropy.table.Table (or subclass thereof) view into this catalog.
230 
231  @param[in] cls Table subclass to use; None implies astropy.table.Table itself.
232  Use astropy.table.QTable to get Quantity columns.
233 
234  @param[in] copy Whether to copy data from the LSST catalog to the astropy table.
235  Not copying is usually faster, but can keep memory from being
236  freed if columns are later removed from the Astropy view.
237 
238  @param[in] unviewable One of the following options, indicating how to handle field types
239  (string and Flag) for which views cannot be constructed:
240  - 'copy' (default): copy only the unviewable fields.
241  - 'raise': raise ValueError if unviewable fields are present.
242  - 'skip': do not include unviewable fields in the Astropy Table.
243  This option is ignored if copy=True.
244  """
245  import astropy.table
246  if cls is None:
247  cls = astropy.table.Table
248  if unviewable not in ("copy", "raise", "skip"):
249  raise ValueError(
250  "'unviewable'=%r must be one of 'copy', 'raise', or 'skip'" % (unviewable,))
251  ps = self.getMetadata()
252  meta = ps.toOrderedDict() if ps is not None else None
253  columns = []
254  items = self.schema.extract("*", ordered=True)
255  for name, item in items.items():
256  key = item.key
257  unit = item.field.getUnits() or None # use None instead of "" when empty
258  if key.getTypeString() == "String":
259  if not copy:
260  if unviewable == "raise":
261  raise ValueError("Cannot extract string "
262  "unless copy=True or unviewable='copy' or 'skip'.")
263  elif unviewable == "skip":
264  continue
265  data = np.zeros(
266  len(self), dtype=np.dtype((str, key.getSize())))
267  for i, record in enumerate(self):
268  data[i] = record.get(key)
269  elif key.getTypeString() == "Flag":
270  if not copy:
271  if unviewable == "raise":
272  raise ValueError("Cannot extract packed bit columns "
273  "unless copy=True or unviewable='copy' or 'skip'.")
274  elif unviewable == "skip":
275  continue
276  data = self.columns.get_bool_array(key)
277  elif key.getTypeString() == "Angle":
278  data = self.columns.get(key)
279  unit = "radian"
280  if copy:
281  data = data.copy()
282  elif "Array" in key.getTypeString() and key.isVariableLength():
283  # Can't get columns for variable-length array fields.
284  if unviewable == "raise":
285  raise ValueError("Cannot extract variable-length array fields unless unviewable='skip'.")
286  elif unviewable == "skip" or unviewable == "copy":
287  continue
288  else:
289  data = self.columns.get(key)
290  if copy:
291  data = data.copy()
292  columns.append(
293  astropy.table.Column(
294  data,
295  name=name,
296  unit=unit,
297  description=item.field.getDoc()
298  )
299  )
300  return cls(columns, meta=meta, copy=False)
301 
302  def __dir__(self):
303  """
304  This custom dir is necessary due to the custom getattr below.
305  Without it, not all of the methods available are returned with dir.
306  See DM-7199
307  """
308  def recursive_get_class_dir(cls):
309  """
310  Return a set containing the names of all methods
311  for a given class *and* all of its subclasses.
312  """
313  result = set()
314  if cls.__bases__:
315  for subcls in cls.__bases__:
316  result |= recursive_get_class_dir(subcls)
317  result |= set(cls.__dict__.keys())
318  return result
319  return sorted(set(dir(self.columns)) | set(dir(self.table)) |
320  recursive_get_class_dir(type(self)) | set(self.__dict__.keys()))
321 
322  def __getattr__(self, name):
323  # Catalog forwards unknown method calls to its table and column view
324  # for convenience. (Feature requested by RHL; complaints about magic
325  # should be directed to him.)
326  if name == "_columns":
327  self._columns = None
328  return None
329  try:
330  return getattr(self.table, name)
331  except AttributeError:
332  return getattr(self.columns, name)
333 
334  def __str__(self):
335  if self.isContiguous():
336  return str(self.asAstropy())
337  else:
338  fields = ' '.join(x.field.getName() for x in self.schema)
339  string = "Non-contiguous afw.Catalog of %d rows.\ncolumns: %s" % (len(self), fields)
340  return string
341 
342  def __repr__(self):
343  return "%s\n%s" % (type(self), self)
344 
345 
346 Catalog.register("Base", BaseCatalog)
daf::base::PropertySet * set
Definition: fits.cc:832
def asAstropy(self, cls=None, copy=False, unviewable="copy")
Return an astropy.table.Table (or subclass thereof) view into this catalog.
table::Key< int > type
Definition: Detector.cc:164
def extend(self, iterable, deep=False, mapper=None)
def get(cls, key, default=None)
Definition: wrappers.py:477
daf::base::PropertyList * list
Definition: fits.cc:833