23 from lsst.utils import continueClass, TemplateMeta
24 from ._table
import BaseRecord, BaseCatalog
25 from ._schema
import Key
35 """Extract a dictionary of {<name>: <field-value>} in which the field names
36 match the given shell-style glob pattern(s).
38 Any number of glob patterns may be passed; the result will be the union of all
39 the result of each glob considered separately.
44 The result of a call to self.schema.extract(); this will be used
45 instead of doing any new matching, and allows the pattern matching
46 to be reused to extract values from multiple records. This
47 keyword is incompatible with any position arguments and the regex,
48 sub, and ordered keyword arguments.
50 If `True`, fields with named subfields (e.g. points) will be split
51 into separate items in the dict; instead of {"point":
52 lsst.geom.Point2I(2,3)}, for instance, you'd get {"point.x":
53 2, "point.y": 3}. Default is `False`.
54 regex : `str` or `re` pattern object
55 A regular expression to be used in addition to any glob patterns
56 passed as positional arguments. Note that this will be compared
57 with re.match, not re.search.
59 A replacement string (see `re.MatchObject.expand`) used to set the
60 dictionary keys of any fields matched by regex.
62 If `True`, a `collections.OrderedDict` will be returned instead of
63 a standard dict, with the order corresponding to the definition
64 order of the `Schema`. Default is `False`.
66 d = kwargs.pop(
"items",
None)
67 split = kwargs.pop(
"split",
False)
69 d = self.schema.
extract(*patterns, **kwargs).copy()
71 kwargsStr =
", ".join(kwargs.keys())
72 raise ValueError(f
"Unrecognized keyword arguments for extract: {kwargsStr}")
74 for name, schemaItem
in list(d.items()):
76 if split
and key.HAS_NAMED_SUBFIELDS:
77 for subname, subkey
in zip(key.subfields, key.subkeys):
78 d[f
"{name}.{subname}"] = self.get(subkey)
81 d[name] = self.get(schemaItem.key)
85 return f
"{type(self)}\n{self}"
91 self.
_columns_columns = self._getColumnView()
94 def __getColumns(self):
95 if not hasattr(self,
"_columns")
or self.
_columns_columns
is None:
96 self.
_columns_columns = self._getColumnView()
98 columns = property(__getColumns, doc=
"a column view of the catalog")
101 """Return the record at index key if key is an integer,
102 return a column if `key` is a string field name or Key,
103 or return a subset of the catalog if key is a slice
104 or boolean NumPy array.
106 if type(key)
is slice:
107 (start, stop, step) = (key.start, key.stop, key.step)
114 return self.subset(start, stop, step)
115 elif isinstance(key, np.ndarray):
116 if key.dtype == bool:
117 return self.subset(key)
118 raise RuntimeError(f
"Unsupported array type for indexing non-contiguous Catalog: {key.dtype}")
119 elif isinstance(key, Key)
or isinstance(key, str):
120 if not self.isContiguous():
121 if isinstance(key, str):
122 key = self.schema[key].asKey()
123 array = self._getitem_(key)
129 array.flags.writeable =
False
131 return self.
columnscolumns[key]
133 return self._getitem_(key)
136 """If ``key`` is an integer, set ``catalog[key]`` to
137 ``value``. Otherwise select column ``key`` and set it to
141 if isinstance(key, str):
142 key = self.schema[key].asKey()
143 if isinstance(key, Key):
144 if isinstance(key, Key[
"Flag"]):
145 self._set_flag(key, value)
147 self.
columnscolumns[key] = value
149 return self.set(key, value)
153 if isinstance(key, slice):
164 self._insert(key, value)
172 return self._addNew()
174 def cast(self, type_, deep=False):
175 """Return a copy of the catalog with the given type.
180 Type of catalog to return.
181 deep : `bool`, optional
182 If `True`, clone the table and deep copy all records.
187 Copy of catalog with the requested type.
190 table = self.table.
clone()
191 table.preallocate(len(self))
195 copy.extend(self, deep=deep)
200 Copy a catalog (default is not a deep copy).
202 return self.
castcast(
type(self), deep)
204 def extend(self, iterable, deep=False, mapper=None):
205 """Append all records in the given iterable to the catalog.
210 Any Python iterable containing records.
211 deep : `bool`, optional
212 If `True`, the records will be deep-copied; ignored if
213 mapper is not `None` (that always implies `True`).
214 mapper : `lsst.afw.table.schemaMapper.SchemaMapper`, optional
215 Used to translate records.
220 if type(deep).__name__ ==
"SchemaMapper":
223 if isinstance(iterable,
type(self)):
224 if mapper
is not None:
225 self._extend(iterable, mapper)
227 self._extend(iterable, deep)
229 for record
in iterable:
230 if mapper
is not None:
231 self._append(self.table.copyRecord(record, mapper))
233 self._append(self.table.copyRecord(record))
239 return lsst.afw.fits.reduceToFits(self)
241 def asAstropy(self, cls=None, copy=False, unviewable="copy"):
242 """Return an astropy.table.Table (or subclass thereof) view into this catalog.
247 Table subclass to use; `None` implies `astropy.table.Table`
248 itself. Use `astropy.table.QTable` to get Quantity columns.
249 copy : bool, optional
250 If `True`, copy data from the LSST catalog to the astropy
251 table. Not copying is usually faster, but can keep memory
252 from being freed if columns are later removed from the
254 unviewable : `str`, optional
255 One of the following options (which is ignored if
256 copy=`True` ), indicating how to handle field types (`str`
257 and `Flag`) for which views cannot be constructed:
259 - 'copy' (default): copy only the unviewable fields.
260 - 'raise': raise ValueError if unviewable fields are present.
261 - 'skip': do not include unviewable fields in the Astropy Table.
265 cls : `astropy.table.Table`
266 Astropy view into the catalog.
271 Raised if the `unviewable` option is not a known value, or
272 if the option is 'raise' and an uncopyable field is found.
277 cls = astropy.table.Table
278 if unviewable
not in (
"copy",
"raise",
"skip"):
280 f
"'unviewable'={unviewable!r} must be one of 'copy', 'raise', or 'skip'")
281 ps = self.getMetadata()
282 meta = ps.toOrderedDict()
if ps
is not None else None
284 items = self.schema.extract(
"*", ordered=
True)
285 for name, item
in items.items():
287 unit = item.field.getUnits()
or None
288 if key.getTypeString() ==
"String":
290 if unviewable ==
"raise":
291 raise ValueError(
"Cannot extract string "
292 "unless copy=True or unviewable='copy' or 'skip'.")
293 elif unviewable ==
"skip":
296 len(self), dtype=np.dtype((str, key.getSize())))
297 for i, record
in enumerate(self):
298 data[i] = record.get(key)
299 elif key.getTypeString() ==
"Flag":
301 if unviewable ==
"raise":
302 raise ValueError(
"Cannot extract packed bit columns "
303 "unless copy=True or unviewable='copy' or 'skip'.")
304 elif unviewable ==
"skip":
306 data = self.
columnscolumns.get_bool_array(key)
307 elif key.getTypeString() ==
"Angle":
312 elif "Array" in key.getTypeString()
and key.isVariableLength():
314 if unviewable ==
"raise":
315 raise ValueError(
"Cannot extract variable-length array fields unless unviewable='skip'.")
316 elif unviewable ==
"skip" or unviewable ==
"copy":
323 astropy.table.Column(
327 description=item.field.getDoc()
330 return cls(columns, meta=meta, copy=
False)
334 This custom dir is necessary due to the custom getattr below.
335 Without it, not all of the methods available are returned with dir.
338 def recursive_get_class_dir(cls):
340 Return a set containing the names of all methods
341 for a given class *and* all of its subclasses.
345 for subcls
in cls.__bases__:
346 result |= recursive_get_class_dir(subcls)
347 result |=
set(cls.__dict__.
keys())
349 return sorted(
set(dir(self.
columnscolumns)) |
set(dir(self.table))
350 | recursive_get_class_dir(
type(self)) |
set(self.__dict__.
keys()))
356 if name ==
"_columns":
360 return getattr(self.table, name)
361 except AttributeError:
362 return getattr(self.
columnscolumns, name)
365 if self.isContiguous():
368 fields =
' '.join(x.field.getName()
for x
in self.schema)
369 return f
"Non-contiguous afw.Catalog of {len(self)} rows.\ncolumns: {fields}"
372 return "%s\n%s" % (
type(self), self)
375 Catalog.register(
"Base", BaseCatalog)
def extract(self, *patterns, **kwargs)
def __getattr__(self, name)
def cast(self, type_, deep=False)
def copy(self, deep=False)
def extend(self, iterable, deep=False, mapper=None)
def __delitem__(self, key)
def asAstropy(self, cls=None, copy=False, unviewable="copy")
def __setitem__(self, key, value)
def __getitem__(self, key)
def insert(self, key, value)
daf::base::PropertyList * list
daf::base::PropertySet * set