22 __all__ = (
"SkyMapDimensionPacker",)
24 from lsst.daf.butler
import DimensionPacker, ExpandedDataCoordinate, DimensionGraph, DataCoordinate
28 """A `DimensionPacker` for tract, patch and optionally abstract_filter,
33 fixed : `lsst.daf.butler.ExpandedDataCoordinate`
34 Expanded data ID that must include at least the skymap dimension.
35 dimensions : `lsst.daf.butler.DimensionGraph`
36 The dimensions of data IDs packed by this instance. Must include
37 skymap, tract, and patch, and may include abstract_filter.
40 SUPPORTED_FILTERS = [
None] +
list(
"ugrizyUBGVRIZYJHK")
41 """abstract_filter names supported by this packer.
43 New filters should be added to the end of the list to maximize
44 compatibility with existing IDs.
49 """Return an integer that represents the abstract_filter with the given
55 raise NotImplementedError(f
"abstract_filter '{name}' not supported by this ID packer.")
59 """Return an abstract_filter name from its integer representation.
70 assert dimensions.given == [
"skymap"]
71 assert dimensions.required.issuperset([
"tract",
"patch"])
72 metadata = {
"skymap": [
"tract_max",
"patch_nx_max",
"patch_ny_max"]}
76 def __init__(self, fixed: ExpandedDataCoordinate, dimensions: DimensionGraph):
78 record = fixed.records[
"skymap"]
80 self.
_patchMax = record.patch_nx_max * record.patch_ny_max
82 if "abstract_filter" in dimensions:
93 return packedMax.bit_length()
95 def _pack(self, dataId: DataCoordinate) -> int:
97 packed = dataId[
"patch"] + self.
_patchMax*dataId[
"tract"]
102 def unpack(self, packedId: int) -> DataCoordinate:
110 return DataCoordinate.standardize(d, graph=self.dimensions)