23from lsst.utils import continueClass, TemplateMeta
24from ._table
import BaseRecord, BaseCatalog
25from ._schema
import Key
35 """Extract a dictionary of {<name>: <field-value>} in which the field
36 names match the given shell-style glob pattern(s).
38 Any number of glob patterns may be passed; the result will be the union
39 of all the result of each glob considered separately.
44 The result of a call to self.schema.extract(); this will be used
45 instead of doing any new matching, and allows the pattern matching
46 to be reused to extract values
from multiple records. This
47 keyword
is incompatible
with any position arguments
and the regex,
48 sub,
and ordered keyword arguments.
49 regex : `str`
or `re` pattern object
50 A regular expression to be used
in addition to any glob patterns
51 passed
as positional arguments. Note that this will be compared
52 with re.match,
not re.search.
54 A replacement string (see `re.MatchObject.expand`) used to set the
55 dictionary keys of any fields matched by regex.
57 If `
True`, a `collections.OrderedDict` will be returned instead of
58 a standard dict,
with the order corresponding to the definition
59 order of the `Schema`. Default
is `
False`.
61 d = kwargs.pop("items",
None)
63 d = self.schema.
extract(*patterns, **kwargs).copy()
65 kwargsStr =
", ".join(kwargs.keys())
66 raise ValueError(f
"Unrecognized keyword arguments for extract: {kwargsStr}")
67 return {name: self.get(schemaItem.key)
for name, schemaItem
in d.items()}
70 return f
"{type(self)}\n{self}"
76 self.
_columns = self._getColumnView()
79 def __getColumns(self):
80 if not hasattr(self,
"_columns")
or self.
_columns is None:
81 self.
_columns = self._getColumnView()
83 columns = property(__getColumns, doc=
"a column view of the catalog")
85 def __getitem__(self, key):
86 """Return the record at index key if key is an integer,
87 return a column
if `key`
is a string field name
or Key,
88 or return a subset of the catalog
if key
is a slice
89 or boolean NumPy array.
91 if type(key)
is slice:
92 (start, stop, step) = (key.start, key.stop, key.step)
99 return self.subset(start, stop, step)
100 elif isinstance(key, np.ndarray):
101 if key.dtype == bool:
102 return self.subset(key)
103 raise RuntimeError(f
"Unsupported array type for indexing non-contiguous Catalog: {key.dtype}")
104 elif isinstance(key, Key)
or isinstance(key, str):
105 if not self.isContiguous():
106 if isinstance(key, str):
107 key = self.schema[key].asKey()
108 array = self._getitem_(key)
114 array.flags.writeable =
False
118 return self._getitem_(key)
120 def __setitem__(self, key, value):
121 """If ``key`` is an integer, set ``catalog[key]`` to
122 ``value``. Otherwise select column ``key`` and set it to
126 if isinstance(key, str):
127 key = self.schema[key].asKey()
128 if isinstance(key, Key):
129 if isinstance(key, Key[
"Flag"]):
130 self._set_flag(key, value)
134 return self.set(key, value)
138 if isinstance(key, slice):
147 def insert(self, key, value):
149 self._insert(key, value)
157 return self._addNew()
159 def cast(self, type_, deep=False):
160 """Return a copy of the catalog with the given type.
165 Type of catalog to return.
166 deep : `bool`, optional
167 If `
True`, clone the table
and deep copy all records.
172 Copy of catalog
with the requested type.
175 table = self.table.
clone()
176 table.preallocate(len(self))
180 copy.extend(self, deep=deep)
183 def copy(self, deep=False):
185 Copy a catalog (default is not a deep copy).
189 def extend(self, iterable, deep=False, mapper=None):
190 """Append all records in the given iterable to the catalog.
195 Any Python iterable containing records.
196 deep : `bool`, optional
197 If `True`, the records will be deep-copied; ignored
if
198 mapper
is not `
None` (that always implies `
True`).
199 mapper : `lsst.afw.table.schemaMapper.SchemaMapper`, optional
200 Used to translate records.
205 if type(deep).__name__ ==
"SchemaMapper":
208 if isinstance(iterable,
type(self)):
209 if mapper
is not None:
210 self._extend(iterable, mapper)
212 self._extend(iterable, deep)
214 for record
in iterable:
215 if mapper
is not None:
216 self._append(self.table.copyRecord(record, mapper))
218 self._append(self.table.copyRecord(record))
224 return lsst.afw.fits.reduceToFits(self)
226 def asAstropy(self, cls=None, copy=False, unviewable="copy"):
227 """Return an astropy.table.Table (or subclass thereof) view into this catalog.
232 Table subclass to use; `None` implies `astropy.table.Table`
233 itself. Use `astropy.table.QTable` to get Quantity columns.
234 copy : bool, optional
235 If `
True`, copy data
from the LSST catalog to the astropy
236 table. Not copying
is usually faster, but can keep memory
237 from being freed
if columns are later removed
from the
239 unviewable : `str`, optional
240 One of the following options (which
is ignored
if
241 copy=`
True` ), indicating how to handle field types (`str`
242 and `Flag`)
for which views cannot be constructed:
244 -
'copy' (default): copy only the unviewable fields.
245 -
'raise':
raise ValueError
if unviewable fields are present.
246 -
'skip': do
not include unviewable fields
in the Astropy Table.
250 cls : `astropy.table.Table`
251 Astropy view into the catalog.
256 Raised
if the `unviewable` option
is not a known value,
or
257 if the option
is 'raise' and an uncopyable field
is found.
262 cls = astropy.table.Table
263 if unviewable
not in (
"copy",
"raise",
"skip"):
265 f
"'unviewable'={unviewable!r} must be one of 'copy', 'raise', or 'skip'")
266 ps = self.getMetadata()
267 meta = ps.toOrderedDict()
if ps
is not None else None
269 items = self.schema.extract(
"*", ordered=
True)
270 for name, item
in items.items():
272 unit = item.field.getUnits()
or None
273 if key.getTypeString() ==
"String":
275 if unviewable ==
"raise":
276 raise ValueError(
"Cannot extract string "
277 "unless copy=True or unviewable='copy' or 'skip'.")
278 elif unviewable ==
"skip":
281 len(self), dtype=np.dtype((str, key.getSize())))
282 for i, record
in enumerate(self):
283 data[i] = record.get(key)
284 elif key.getTypeString() ==
"Flag":
286 if unviewable ==
"raise":
287 raise ValueError(
"Cannot extract packed bit columns "
288 "unless copy=True or unviewable='copy' or 'skip'.")
289 elif unviewable ==
"skip":
291 data = self.
columns.get_bool_array(key)
292 elif key.getTypeString() ==
"Angle":
297 elif "Array" in key.getTypeString()
and key.isVariableLength():
299 if unviewable ==
"raise":
300 raise ValueError(
"Cannot extract variable-length array fields unless unviewable='skip'.")
301 elif unviewable ==
"skip" or unviewable ==
"copy":
308 astropy.table.Column(
312 description=item.field.getDoc()
315 return cls(columns, meta=meta, copy=
False)
319 This custom dir is necessary due to the custom getattr below.
320 Without it,
not all of the methods available are returned
with dir.
323 def recursive_get_class_dir(cls):
325 Return a set containing the names of all methods
326 for a given
class *
and* all of its subclasses.
330 for subcls
in cls.__bases__:
331 result |= recursive_get_class_dir(subcls)
332 result |=
set(cls.__dict__.keys())
335 | recursive_get_class_dir(
type(self)) |
set(self.__dict__.keys()))
341 if name ==
"_columns":
345 return getattr(self.table, name)
346 except AttributeError:
347 return getattr(self.
columns, name)
350 if self.isContiguous():
353 fields =
' '.join(x.field.getName()
for x
in self.schema)
354 return f
"Non-contiguous afw.Catalog of {len(self)} rows.\ncolumns: {fields}"
357 return "%s\n%s" % (
type(self), self)
360Catalog.register(
"Base", BaseCatalog)
def extract(self, *patterns, **kwargs)
def __getattr__(self, name)
def cast(self, type_, deep=False)
def extend(self, iterable, deep=False, mapper=None)
def __delitem__(self, key)
def asAstropy(self, cls=None, copy=False, unviewable="copy")
daf::base::PropertySet * set