25 from lsst.utils import continueClass, TemplateMeta
26 from .base
import BaseRecord, BaseCatalog
27 from ..schema
import Key
36 """Extract a dictionary of {<name>: <field-value>} in which the field names 37 match the given shell-style glob pattern(s). 39 Any number of glob patterns may be passed; the result will be the union of all 40 the result of each glob considered separately. 46 The result of a call to self.schema.extract(); this will be used 47 instead of doing any new matching, and allows the pattern matching 48 to be reused to extract values from multiple records. This 49 keyword is incompatible with any position arguments and the regex, 50 sub, and ordered keyword arguments. 53 If True, fields with named subfields (e.g. points) will be split 54 into separate items in the dict; instead of {"point": 55 lsst.geom.Point2I(2,3)}, for instance, you'd get {"point.x": 56 2, "point.y": 3}. Default is False. 58 regex : `str` or `re` pattern object 59 A regular expression to be used in addition to any glob patterns 60 passed as positional arguments. Note that this will be compared 61 with re.match, not re.search. 64 A replacement string (see `re.MatchObject.expand`) used to set the 65 dictionary keys of any fields matched by regex. 68 If `True`, a `collections.OrderedDict` will be returned instead of 69 a standard dict, with the order corresponding to the definition 70 order of the `Schema`. `Default is False`. 72 d = kwds.pop(
"items",
None)
73 split = kwds.pop(
"split",
False)
75 d = self.schema.
extract(*patterns, **kwds).copy()
78 "Unrecognized keyword arguments for extract: %s" %
", ".join(kwds.keys()))
80 for name, schemaItem
in list(d.items()):
82 if split
and key.HAS_NAMED_SUBFIELDS:
83 for subname, subkey
in zip(key.subfields, key.subkeys):
84 d[
"%s.%s" % (name, subname)] = self.get(subkey)
87 d[name] = self.get(schemaItem.key)
91 return "%s\n%s" % (
type(self),
str(self))
97 self.
_columns = self._getColumnView()
100 def __getColumns(self):
101 if not hasattr(self,
"_columns")
or self.
_columns is None:
102 self.
_columns = self._getColumnView()
104 columns = property(__getColumns, doc=
"a column view of the catalog")
107 """Return the record at index key if key is an integer, 108 return a column if key is a string field name or Key, 109 or return a subset of the catalog if key is a slice 110 or boolean NumPy array. 112 if type(key)
is slice:
113 (start, stop, step) = (key.start, key.stop, key.step)
120 return self.subset(start, stop, step)
121 elif isinstance(key, np.ndarray):
122 if key.dtype == bool:
123 return self.subset(key)
124 raise RuntimeError(
"Unsupported array type for indexing non-contiguous Catalog: %s" %
126 elif isinstance(key, Key)
or isinstance(key, str):
127 if not self.isContiguous():
128 if isinstance(key, str):
129 key = self.schema[key].asKey()
130 array = self._getitem_(key)
136 array.flags.writeable =
False 140 return self._getitem_(key)
144 If ``key`` is an integer, set ``catalog[key]`` to ``value``. Otherwise select column ``key`` 145 and set it to ``value``. 148 if isinstance(key, Key)
or isinstance(key, str):
151 return self.set(key, value)
155 if isinstance(key, slice):
166 self._insert(key, value)
174 return self._addNew()
176 def cast(self, type_, deep=False):
177 """Return a copy of the catalog with the given type, optionally 178 cloning the table and deep-copying all records if deep==True. 181 table = self.table.
clone()
182 table.preallocate(len(self))
186 copy.extend(self, deep=deep)
191 Copy a catalog (default is not a deep copy). 195 def extend(self, iterable, deep=False, mapper=None):
196 """Append all records in the given iterable to the catalog. 199 iterable ------ any Python iterable containing records 200 deep ---------- if True, the records will be deep-copied; ignored 201 if mapper is not None (that always implies True). 202 mapper -------- a SchemaMapper object used to translate records 207 if type(deep).__name__ ==
"SchemaMapper":
210 if isinstance(iterable,
type(self)):
211 if mapper
is not None:
212 self._extend(iterable, mapper)
214 self._extend(iterable, deep)
216 for record
in iterable:
217 if mapper
is not None:
218 self._append(self.table.copyRecord(record, mapper))
220 self._append(self.table.copyRecord(record))
226 return lsst.afw.fits.reduceToFits(self)
228 def asAstropy(self, cls=None, copy=False, unviewable="copy"):
229 """!Return an astropy.table.Table (or subclass thereof) view into this catalog. 231 @param[in] cls Table subclass to use; None implies astropy.table.Table itself. 232 Use astropy.table.QTable to get Quantity columns. 234 @param[in] copy Whether to copy data from the LSST catalog to the astropy table. 235 Not copying is usually faster, but can keep memory from being 236 freed if columns are later removed from the Astropy view. 238 @param[in] unviewable One of the following options, indicating how to handle field types 239 (string and Flag) for which views cannot be constructed: 240 - 'copy' (default): copy only the unviewable fields. 241 - 'raise': raise ValueError if unviewable fields are present. 242 - 'skip': do not include unviewable fields in the Astropy Table. 243 This option is ignored if copy=True. 247 cls = astropy.table.Table
248 if unviewable
not in (
"copy",
"raise",
"skip"):
250 "'unviewable'=%r must be one of 'copy', 'raise', or 'skip'" % (unviewable,))
251 ps = self.getMetadata()
252 meta = ps.toOrderedDict()
if ps
is not None else None 254 items = self.schema.extract(
"*", ordered=
True)
255 for name, item
in items.items():
257 unit = item.field.getUnits()
or None 258 if key.getTypeString() ==
"String":
260 if unviewable ==
"raise":
261 raise ValueError(
"Cannot extract string " 262 "unless copy=True or unviewable='copy' or 'skip'.")
263 elif unviewable ==
"skip":
266 len(self), dtype=np.dtype((str, key.getSize())))
267 for i, record
in enumerate(self):
268 data[i] = record.get(key)
269 elif key.getTypeString() ==
"Flag":
271 if unviewable ==
"raise":
272 raise ValueError(
"Cannot extract packed bit columns " 273 "unless copy=True or unviewable='copy' or 'skip'.")
274 elif unviewable ==
"skip":
276 data = self.
columns.get_bool_array(key)
277 elif key.getTypeString() ==
"Angle":
282 elif "Array" in key.getTypeString()
and key.isVariableLength():
284 if unviewable ==
"raise":
285 raise ValueError(
"Cannot extract variable-length array fields unless unviewable='skip'.")
286 elif unviewable ==
"skip" or unviewable ==
"copy":
293 astropy.table.Column(
297 description=item.field.getDoc()
300 return cls(columns, meta=meta, copy=
False)
304 This custom dir is necessary due to the custom getattr below. 305 Without it, not all of the methods available are returned with dir. 308 def recursive_get_class_dir(cls):
310 Return a set containing the names of all methods 311 for a given class *and* all of its subclasses. 315 for subcls
in cls.__bases__:
316 result |= recursive_get_class_dir(subcls)
317 result |=
set(cls.__dict__.
keys())
319 return sorted(
set(dir(self.
columns)) |
set(dir(self.table)) |
320 recursive_get_class_dir(
type(self)) |
set(self.__dict__.
keys()))
326 if name ==
"_columns":
330 return getattr(self.table, name)
331 except AttributeError:
332 return getattr(self.
columns, name)
335 if self.isContiguous():
338 fields =
' '.join(x.field.getName()
for x
in self.schema)
339 string =
"Non-contiguous afw.Catalog of %d rows.\ncolumns: %s" % (len(self), fields)
343 return "%s\n%s" % (
type(self), self)
346 Catalog.register(
"Base", BaseCatalog)
def extract(self, patterns, kwds)
def copy(self, deep=False)
daf::base::PropertySet * set
def __setitem__(self, key, value)
def __delitem__(self, key)
def asAstropy(self, cls=None, copy=False, unviewable="copy")
Return an astropy.table.Table (or subclass thereof) view into this catalog.
def insert(self, key, value)
def __getattr__(self, name)
def extend(self, iterable, deep=False, mapper=None)
def cast(self, type_, deep=False)
daf::base::PropertyList * list
def __getitem__(self, key)