23 from lsst.utils import continueClass, TemplateMeta
24 from ._table
import BaseRecord, BaseCatalog
25 from ._schema
import Key
35 """Extract a dictionary of {<name>: <field-value>} in which the field names
36 match the given shell-style glob pattern(s).
38 Any number of glob patterns may be passed; the result will be the union of all
39 the result of each glob considered separately.
44 The result of a call to self.schema.extract(); this will be used
45 instead of doing any new matching, and allows the pattern matching
46 to be reused to extract values from multiple records. This
47 keyword is incompatible with any position arguments and the regex,
48 sub, and ordered keyword arguments.
50 If `True`, fields with named subfields (e.g. points) will be split
51 into separate items in the dict; instead of {"point":
52 lsst.geom.Point2I(2,3)}, for instance, you'd get {"point.x":
53 2, "point.y": 3}. Default is `False`.
54 regex : `str` or `re` pattern object
55 A regular expression to be used in addition to any glob patterns
56 passed as positional arguments. Note that this will be compared
57 with re.match, not re.search.
59 A replacement string (see `re.MatchObject.expand`) used to set the
60 dictionary keys of any fields matched by regex.
62 If `True`, a `collections.OrderedDict` will be returned instead of
63 a standard dict, with the order corresponding to the definition
64 order of the `Schema`. Default is `False`.
66 d = kwargs.pop(
"items",
None)
67 split = kwargs.pop(
"split",
False)
69 d = self.schema.
extract(*patterns, **kwargs).copy()
71 kwargsStr =
", ".join(kwargs.keys())
72 raise ValueError(f
"Unrecognized keyword arguments for extract: {kwargsStr}")
74 for name, schemaItem
in list(d.items()):
76 if split
and key.HAS_NAMED_SUBFIELDS:
77 for subname, subkey
in zip(key.subfields, key.subkeys):
78 d[f
"{name}.{subname}"] = self.get(subkey)
81 d[name] = self.get(schemaItem.key)
85 return f
"{type(self)}\n{self}"
91 self.
_columns = self._getColumnView()
94 def __getColumns(self):
95 if not hasattr(self,
"_columns")
or self.
_columns is None:
96 self.
_columns = self._getColumnView()
98 columns = property(__getColumns, doc=
"a column view of the catalog")
101 """Return the record at index key if key is an integer,
102 return a column if `key` is a string field name or Key,
103 or return a subset of the catalog if key is a slice
104 or boolean NumPy array.
106 if type(key)
is slice:
107 (start, stop, step) = (key.start, key.stop, key.step)
114 return self.subset(start, stop, step)
115 elif isinstance(key, np.ndarray):
116 if key.dtype == bool:
117 return self.subset(key)
118 raise RuntimeError(f
"Unsupported array type for indexing non-contiguous Catalog: {key.dtype}")
119 elif isinstance(key, Key)
or isinstance(key, str):
120 if not self.isContiguous():
121 if isinstance(key, str):
122 key = self.schema[key].asKey()
123 array = self._getitem_(key)
129 array.flags.writeable =
False
133 return self._getitem_(key)
136 """If ``key`` is an integer, set ``catalog[key]`` to
137 ``value``. Otherwise select column ``key`` and set it to
141 if isinstance(key, str):
142 key = self.schema[key].asKey()
143 if isinstance(key, Key):
144 if isinstance(key, Key[
"Flag"]):
145 self._set_flag(key, value)
149 return self.set(key, value)
153 if isinstance(key, slice):
164 self._insert(key, value)
172 return self._addNew()
174 def cast(self, type_, deep=False):
175 """Return a copy of the catalog with the given type.
180 Type of catalog to return.
181 deep : `bool`, optional
182 If `True`, clone the table and deep copy all records.
187 Copy of catalog with the requested type.
190 table = self.table.
clone()
191 table.preallocate(len(self))
195 copy.extend(self, deep=deep)
200 Copy a catalog (default is not a deep copy).
204 def extend(self, iterable, deep=False, mapper=None):
205 """Append all records in the given iterable to the catalog.
210 Any Python iterable containing records.
211 deep : `bool`, optional
212 If `True`, the records will be deep-copied; ignored if
213 mapper is not `None` (that always implies `True`).
214 mapper : `lsst.afw.table.schemaMapper.SchemaMapper`, optional
215 Used to translate records.
220 if type(deep).__name__ ==
"SchemaMapper":
223 if isinstance(iterable,
type(self)):
224 if mapper
is not None:
225 self._extend(iterable, mapper)
227 self._extend(iterable, deep)
229 for record
in iterable:
230 if mapper
is not None:
231 self._append(self.table.copyRecord(record, mapper))
233 self._append(self.table.copyRecord(record))
239 return lsst.afw.fits.reduceToFits(self)
241 def asAstropy(self, cls=None, copy=False, unviewable="copy"):
242 """Return an astropy.table.Table (or subclass thereof) view into this catalog.
247 Table subclass to use; `None` implies `astropy.table.Table`
248 itself. Use `astropy.table.QTable` to get Quantity columns.
249 copy : bool, optional
250 If `True`, copy data from the LSST catalog to the astropy
251 table. Not copying is usually faster, but can keep memory
252 from being freed if columns are later removed from the
254 unviewable : `str`, optional
255 One of the following options (which is ignored if
256 copy=`True` ), indicating how to handle field types (`str`
257 and `Flag`) for which views cannot be constructed:
258 - 'copy' (default): copy only the unviewable fields.
259 - 'raise': raise ValueError if unviewable fields are present.
260 - 'skip': do not include unviewable fields in the Astropy Table.
264 cls : `astropy.table.Table`
265 Astropy view into the catalog.
270 Raised if the `unviewable` option is not a known value, or
271 if the option is 'raise' and an uncopyable field is found.
276 cls = astropy.table.Table
277 if unviewable
not in (
"copy",
"raise",
"skip"):
279 f
"'unviewable'={unviewable!r} must be one of 'copy', 'raise', or 'skip'")
280 ps = self.getMetadata()
281 meta = ps.toOrderedDict()
if ps
is not None else None
283 items = self.schema.extract(
"*", ordered=
True)
284 for name, item
in items.items():
286 unit = item.field.getUnits()
or None
287 if key.getTypeString() ==
"String":
289 if unviewable ==
"raise":
290 raise ValueError(
"Cannot extract string "
291 "unless copy=True or unviewable='copy' or 'skip'.")
292 elif unviewable ==
"skip":
295 len(self), dtype=np.dtype((str, key.getSize())))
296 for i, record
in enumerate(self):
297 data[i] = record.get(key)
298 elif key.getTypeString() ==
"Flag":
300 if unviewable ==
"raise":
301 raise ValueError(
"Cannot extract packed bit columns "
302 "unless copy=True or unviewable='copy' or 'skip'.")
303 elif unviewable ==
"skip":
305 data = self.
columns.get_bool_array(key)
306 elif key.getTypeString() ==
"Angle":
311 elif "Array" in key.getTypeString()
and key.isVariableLength():
313 if unviewable ==
"raise":
314 raise ValueError(
"Cannot extract variable-length array fields unless unviewable='skip'.")
315 elif unviewable ==
"skip" or unviewable ==
"copy":
322 astropy.table.Column(
326 description=item.field.getDoc()
329 return cls(columns, meta=meta, copy=
False)
333 This custom dir is necessary due to the custom getattr below.
334 Without it, not all of the methods available are returned with dir.
337 def recursive_get_class_dir(cls):
339 Return a set containing the names of all methods
340 for a given class *and* all of its subclasses.
344 for subcls
in cls.__bases__:
345 result |= recursive_get_class_dir(subcls)
346 result |=
set(cls.__dict__.
keys())
349 | recursive_get_class_dir(
type(self)) |
set(self.__dict__.
keys()))
355 if name ==
"_columns":
359 return getattr(self.table, name)
360 except AttributeError:
361 return getattr(self.
columns, name)
364 if self.isContiguous():
367 fields =
' '.join(x.field.getName()
for x
in self.schema)
368 return f
"Non-contiguous afw.Catalog of {len(self)} rows.\ncolumns: {fields}"
371 return "%s\n%s" % (
type(self), self)
374 Catalog.register(
"Base", BaseCatalog)