22 __all__ = (
"SkyMapDataIdPacker",)
24 from lsst.daf.butler
import DataIdPacker, DataId
28 """A `DataIdPacker` for tract, patch and optionally abstract_filter, given 33 dimensions : `DataIdPackerDimensions` 34 Struct containing dimensions related to this `DataIdPacker`. Must 35 have skymap as the only given dimension, tract, patch, and possibly 36 abstract_filter as the covered dimensions, and all of these as required 39 skymap name from `Registry`. 41 Maximum (exclusive) tract index for this skymap. 43 Maximum (exclusive) patch index in the x direction. 45 Maximum (exclusive) patch index in the y direction. 48 SUPPORTED_FILTERS = [
None] +
list(
"ugrizyUBGVRIZYJHK")
49 """abstract_filter names supported by this packer. 51 New filters should be added to the end of the list to maximize 52 compatibility with existing IDs. 57 """Return an integer that represents the abstract_filter with the given 63 raise NotImplementedError(f
"abstract_filter '{name}' not supported by this ID packer.")
67 """Return an abstract_filter name from its integer representation. 78 assert dimensions.given == [
"skymap"]
79 assert dimensions.required.issuperset([
"tract",
"patch"])
80 metadata = {
"skymap": [
"tract_max",
"patch_nx_max",
"patch_ny_max"]}
84 def __init__(self, dimensions, skymap, tractMax, patchNxMax, patchNyMax):
88 if "abstract_filter" in dimensions.required:
99 return packedMax.bit_length()
101 def _pack(self, dataId):
103 packed = dataId[
"patch"] + self.
_patchMax*dataId[
"tract"]
116 return DataId(d, dimensions=self.dimensions.required)
def __init__(self, dimensions, skymap, tractMax, patchNxMax, patchNyMax)
def getMaxIntForFilters(cls)
def configure(cls, dimensions)
def getFilterNameFromInt(cls, num)
def unpack(self, packedId)
daf::base::PropertyList * list
def getIntFromFilter(cls, name)