23from lsst.utils import continueClass, TemplateMeta
24from ._table
import BaseRecord, BaseCatalog
25from ._schema
import Key
35 """Extract a dictionary of {<name>: <field-value>} in which the field
36 names match the given shell-style glob pattern(s).
38 Any number of glob patterns may be passed; the result will be the union
39 of all the result of each glob considered separately.
44 The result of a call to self.schema.extract(); this will be used
45 instead of doing any new matching, and allows the pattern matching
46 to be reused to extract values
from multiple records. This
47 keyword
is incompatible
with any position arguments
and the regex,
48 sub,
and ordered keyword arguments.
49 regex : `str`
or `re` pattern object
50 A regular expression to be used
in addition to any glob patterns
51 passed
as positional arguments. Note that this will be compared
52 with re.match,
not re.search.
54 A replacement string (see `re.MatchObject.expand`) used to set the
55 dictionary keys of any fields matched by regex.
57 If `
True`, a `collections.OrderedDict` will be returned instead of
58 a standard dict,
with the order corresponding to the definition
59 order of the `Schema`. Default
is `
False`.
61 d = kwargs.pop("items",
None)
63 d = self.schema.
extract(*patterns, **kwargs).copy()
65 kwargsStr =
", ".join(kwargs.keys())
66 raise ValueError(f
"Unrecognized keyword arguments for extract: {kwargsStr}")
67 return {name: self.get(schemaItem.key)
for name, schemaItem
in d.items()}
70 return f
"{type(self)}\n{self}"
79 def __getColumns(self):
80 if not hasattr(self,
"_columns")
or self.
_columns is None:
81 self.
_columns = self._getColumnView()
83 columns = property(__getColumns, doc=
"a column view of the catalog")
85 def __getitem__(self, key):
86 """Return the record at index key if key is an integer,
87 return a column
if `key`
is a string field name
or Key,
88 or return a subset of the catalog
if key
is a slice
89 or boolean NumPy array.
91 if type(key)
is slice:
92 (start, stop, step) = (key.start, key.stop, key.step)
99 return self.subset(start, stop, step)
100 elif isinstance(key, np.ndarray):
101 if key.dtype == bool:
102 return self.subset(key)
103 raise RuntimeError(f
"Unsupported array type for indexing non-contiguous Catalog: {key.dtype}")
104 elif isinstance(key, str):
105 key = self.schema.find(key).key
108 elif isinstance(key, Key):
112 return self._getitem_(key)
114 def __setitem__(self, key, value):
115 """If ``key`` is an integer, set ``catalog[key]`` to
116 ``value``. Otherwise select column ``key`` and set it to
120 if isinstance(key, str):
121 key = self.schema[key].asKey()
122 if isinstance(key, Key):
123 if isinstance(key, Key[
"Flag"]):
124 self._set_flag(key, value)
128 return self.set(key, value)
132 if isinstance(key, slice):
141 def insert(self, key, value):
143 self._insert(key, value)
151 return self._addNew()
153 def cast(self, type_, deep=False):
154 """Return a copy of the catalog with the given type.
159 Type of catalog to return.
160 deep : `bool`, optional
161 If `
True`, clone the table
and deep copy all records.
166 Copy of catalog
with the requested type.
170 table.preallocate(len(self))
174 copy.extend(self, deep=deep)
177 def copy(self, deep=False):
179 Copy a catalog (default is not a deep copy).
183 def extend(self, iterable, deep=False, mapper=None):
184 """Append all records in the given iterable to the catalog.
189 Any Python iterable containing records.
190 deep : `bool`, optional
191 If `True`, the records will be deep-copied; ignored
if
192 mapper
is not `
None` (that always implies `
True`).
193 mapper : `lsst.afw.table.schemaMapper.SchemaMapper`, optional
194 Used to translate records.
199 if type(deep).__name__ ==
"SchemaMapper":
202 if isinstance(iterable,
type(self)):
203 if mapper
is not None:
204 self._extend(iterable, mapper)
206 self._extend(iterable, deep)
208 for record
in iterable:
209 if mapper
is not None:
210 self._append(self.
table.copyRecord(record, mapper))
212 self._append(self.
table.copyRecord(record))
218 return lsst.afw.fits.reduceToFits(self)
220 def asAstropy(self, cls=None, copy=False, unviewable="copy"):
221 """Return an astropy.table.Table (or subclass thereof) view into this catalog.
226 Table subclass to use; `None` implies `astropy.table.Table`
227 itself. Use `astropy.table.QTable` to get Quantity columns.
228 copy : bool, optional
229 If `
True`, copy data
from the LSST catalog to the astropy
230 table. Not copying
is usually faster, but can keep memory
231 from being freed
if columns are later removed
from the
233 unviewable : `str`, optional
234 One of the following options (which
is ignored
if
235 copy=`
True` ), indicating how to handle field types (`str`
236 and `Flag`)
for which views cannot be constructed:
238 -
'copy' (default): copy only the unviewable fields.
239 -
'raise':
raise ValueError
if unviewable fields are present.
240 -
'skip': do
not include unviewable fields
in the Astropy Table.
244 cls : `astropy.table.Table`
245 Astropy view into the catalog.
250 Raised
if the `unviewable` option
is not a known value,
or
251 if the option
is 'raise' and an uncopyable field
is found.
256 cls = astropy.table.Table
257 if unviewable
not in (
"copy",
"raise",
"skip"):
259 f
"'unviewable'={unviewable!r} must be one of 'copy', 'raise', or 'skip'")
260 ps = self.getMetadata()
261 meta = ps.toOrderedDict()
if ps
is not None else None
263 items = self.schema.
extract(
"*", ordered=
True)
264 for name, item
in items.items():
266 unit = item.field.getUnits()
or None
267 if key.getTypeString() ==
"String":
269 if unviewable ==
"raise":
270 raise ValueError(
"Cannot extract string "
271 "unless copy=True or unviewable='copy' or 'skip'.")
272 elif unviewable ==
"skip":
275 len(self), dtype=np.dtype((str, key.getSize())))
276 for i, record
in enumerate(self):
277 data[i] = record.get(key)
278 elif key.getTypeString() ==
"Flag":
280 if unviewable ==
"raise":
281 raise ValueError(
"Cannot extract packed bit columns "
282 "unless copy=True or unviewable='copy' or 'skip'.")
283 elif unviewable ==
"skip":
286 elif key.getTypeString() ==
"Angle":
291 elif "Array" in key.getTypeString()
and key.isVariableLength():
293 if unviewable ==
"raise":
294 raise ValueError(
"Cannot extract variable-length array fields unless unviewable='skip'.")
295 elif unviewable ==
"skip" or unviewable ==
"copy":
302 astropy.table.Column(
306 description=item.field.getDoc()
309 return cls(columns, meta=meta, copy=
False)
313 This custom dir is necessary due to the custom getattr below.
314 Without it,
not all of the methods available are returned
with dir.
317 def recursive_get_class_dir(cls):
319 Return a set containing the names of all methods
320 for a given
class *
and* all of its subclasses.
324 for subcls
in cls.__bases__:
325 result |= recursive_get_class_dir(subcls)
326 result |=
set(cls.__dict__.keys())
329 | recursive_get_class_dir(
type(self)) |
set(self.__dict__.keys()))
335 if name ==
"_columns":
339 return getattr(self.
table, name)
340 except AttributeError:
341 return getattr(self.
columns, name)
344 if self.isContiguous():
347 fields =
' '.join(x.field.getName()
for x
in self.schema)
348 return f
"Non-contiguous afw.Catalog of {len(self)} rows.\ncolumns: {fields}"
351 return "%s\n%s" % (
type(self), self)
354 """Extract a dictionary of {<name>: <column-array>} in which the field
355 names match the given shell-style glob pattern(s).
357 Any number of glob patterns may be passed (including none); the result
358 will be the union of all the result of each glob considered separately.
360 Note that extract("*", copy=
True) provides an easy way to transform a
361 catalog into a set of writeable contiguous NumPy arrays.
363 This routines unpacks `Flag` columns into full boolean arrays. String
364 fields are silently ignored.
368 patterns : Array of `str`
369 List of glob patterns to use to select field names.
371 Dictionary of additional keyword arguments. May contain:
374 The result of a call to self.schema.
extract(); this will be
375 used instead of doing any new matching,
and allows the pattern
376 matching to be reused to extract values
from multiple records.
377 This keyword
is incompatible
with any position arguments
and
378 the regex, sub,
and ordered keyword arguments.
379 ``where`` : array index expression
380 Any expression that can be passed
as indices to a NumPy array,
381 including slices, boolean arrays,
and index arrays, that will
382 be used to index each column array. This
is applied before
383 arrays are copied when copy
is True, so
if the indexing results
384 in an implicit copy no unnecessary second copy
is performed.
386 If
True, the returned arrays will be contiguous copies rather
387 than strided views into the catalog. This ensures that the
388 lifetime of the catalog
is not tied to the lifetime of a
389 particular catalog,
and it also may improve the performance
if
390 the array
is used repeatedly. Default
is False. Copies are
391 always made
if the catalog
is noncontiguous, but
if
392 ``copy=
False`` these set
as read-only to ensure code does
not
393 assume they are views that could modify the original catalog.
394 ``regex`` : `str`
or `re` pattern
395 A regular expression to be used
in addition to any glob
396 patterns passed
as positional arguments. Note that this will
397 be compared
with re.match,
not re.search.
399 A replacement string (see re.MatchObject.expand) used to set
400 the dictionary keys of any fields matched by regex.
402 If
True, a collections.OrderedDict will be returned instead of
403 a standard dict,
with the order corresponding to the definition
404 order of the Schema. Default
is False.
409 Dictionary of extracted name-column array sets.
414 Raised
if a list of ``items``
is supplied
with additional keywords.
416 copy = kwds.pop("copy",
False)
417 where = kwds.pop(
"where",
None)
418 d = kwds.pop(
"items",
None)
423 d = self.schema.
extract(*patterns, **kwds).copy()
426 "kwd 'items' was specified, which is not compatible with additional keywords")
429 if where
is not None:
436 for name, schemaItem
in list(d.items()):
438 if key.getTypeString() ==
"String":
441 d[name] = processArray(self[schemaItem.key])
445Catalog.register(
"Base", BaseCatalog)
extract(self, *patterns, **kwargs)
asAstropy(self, cls=None, copy=False, unviewable="copy")
extend(self, iterable, deep=False, mapper=None)
cast(self, type_, deep=False)
extract(self, *patterns, **kwds)
daf::base::PropertyList * list
daf::base::PropertySet * set