LSSTApplications  18.0.0+106,18.0.0+50,19.0.0,19.0.0+1,19.0.0+10,19.0.0+11,19.0.0+13,19.0.0+17,19.0.0+2,19.0.0-1-g20d9b18+6,19.0.0-1-g425ff20,19.0.0-1-g5549ca4,19.0.0-1-g580fafe+6,19.0.0-1-g6fe20d0+1,19.0.0-1-g7011481+9,19.0.0-1-g8c57eb9+6,19.0.0-1-gb5175dc+11,19.0.0-1-gdc0e4a7+9,19.0.0-1-ge272bc4+6,19.0.0-1-ge3aa853,19.0.0-10-g448f008b,19.0.0-12-g6990b2c,19.0.0-2-g0d9f9cd+11,19.0.0-2-g3d9e4fb2+11,19.0.0-2-g5037de4,19.0.0-2-gb96a1c4+3,19.0.0-2-gd955cfd+15,19.0.0-3-g2d13df8,19.0.0-3-g6f3c7dc,19.0.0-4-g725f80e+11,19.0.0-4-ga671dab3b+1,19.0.0-4-gad373c5+3,19.0.0-5-ga2acb9c+2,19.0.0-5-gfe96e6c+2,w.2020.01
LSSTDataManagementBasePackage
packers.py
Go to the documentation of this file.
1 # This file is part of skymap.
2 #
3 # Developed for the LSST Data Management System.
4 # This product includes software developed by the LSST Project
5 # (http://www.lsst.org).
6 # See the COPYRIGHT file at the top-level directory of this distribution
7 # for details of code ownership.
8 #
9 # This program is free software: you can redistribute it and/or modify
10 # it under the terms of the GNU General Public License as published by
11 # the Free Software Foundation, either version 3 of the License, or
12 # (at your option) any later version.
13 #
14 # This program is distributed in the hope that it will be useful,
15 # but WITHOUT ANY WARRANTY; without even the implied warranty of
16 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 # GNU General Public License for more details.
18 #
19 # You should have received a copy of the GNU General Public License
20 # along with this program. If not, see <http://www.gnu.org/licenses/>.
21 
22 __all__ = ("SkyMapDimensionPacker",)
23 
24 from lsst.daf.butler import DimensionPacker, ExpandedDataCoordinate, DimensionGraph, DataCoordinate
25 
26 
27 class SkyMapDimensionPacker(DimensionPacker):
28  """A `DimensionPacker` for tract, patch and optionally abstract_filter,
29  given a SkyMap.
30 
31  Parameters
32  ----------
33  fixed : `lsst.daf.butler.ExpandedDataCoordinate`
34  Expanded data ID that must include at least the skymap dimension.
35  dimensions : `lsst.daf.butler.DimensionGraph`
36  The dimensions of data IDs packed by this instance. Must include
37  skymap, tract, and patch, and may include abstract_filter.
38  """
39 
40  SUPPORTED_FILTERS = [None] + list("ugrizyUBGVRIZYJHK") # split string into single chars
41  """abstract_filter names supported by this packer.
42 
43  New filters should be added to the end of the list to maximize
44  compatibility with existing IDs.
45  """
46 
47  @classmethod
48  def getIntFromFilter(cls, name):
49  """Return an integer that represents the abstract_filter with the given
50  name.
51  """
52  try:
53  return cls.SUPPORTED_FILTERS.index(name)
54  except ValueError:
55  raise NotImplementedError(f"abstract_filter '{name}' not supported by this ID packer.")
56 
57  @classmethod
58  def getFilterNameFromInt(cls, num):
59  """Return an abstract_filter name from its integer representation.
60  """
61  return cls.SUPPORTED_FILTERS[num]
62 
63  @classmethod
65  return len(cls.SUPPORTED_FILTERS)
66 
67  @classmethod
68  def configure(cls, dimensions):
69  # Docstring inherited from DataIdPacker.configure
70  assert dimensions.given == ["skymap"]
71  assert dimensions.required.issuperset(["tract", "patch"])
72  metadata = {"skymap": ["tract_max", "patch_nx_max", "patch_ny_max"]}
73  kwds = {}
74  return metadata, kwds
75 
76  def __init__(self, fixed: ExpandedDataCoordinate, dimensions: DimensionGraph):
77  super().__init__(fixed, dimensions)
78  record = fixed.records["skymap"]
79  self._skyMapName = record.name
80  self._patchMax = record.patch_nx_max * record.patch_ny_max
81  self._tractPatchMax = self._patchMax*record.tract_max
82  if "abstract_filter" in dimensions:
83  self._filterMax = self.getMaxIntForFilters()
84  else:
85  self._filterMax = None
86 
87  @property
88  def maxBits(self) -> int:
89  # Docstring inherited from DataIdPacker.maxBits
90  packedMax = self._tractPatchMax
91  if self._filterMax is not None:
92  packedMax *= self._filterMax
93  return packedMax.bit_length()
94 
95  def _pack(self, dataId: DataCoordinate) -> int:
96  # Docstring inherited from DataIdPacker.pack
97  packed = dataId["patch"] + self._patchMax*dataId["tract"]
98  if self._filterMax is not None:
99  packed += self.getIntFromFilter(dataId["abstract_filter"])*self._tractPatchMax
100  return packed
101 
102  def unpack(self, packedId: int) -> DataCoordinate:
103  # Docstring inherited from DataIdPacker.unpack
104  d = {"skymap": self._skyMapName}
105  if self._filterMax is not None:
106  d["abstract_filter"] = self.getFilterNameFromInt(packedId // self._tractPatchMax)
107  packedId %= self._tractPatchMax
108  d["tract"] = packedId // self._patchMax
109  d["patch"] = packedId % self._patchMax
110  return DataCoordinate.standardize(d, graph=self.dimensions)
daf::base::PropertyList * list
Definition: fits.cc:903