LSSTApplications  19.0.0-14-gb0260a2+72efe9b372,20.0.0+7927753e06,20.0.0+8829bf0056,20.0.0+995114c5d2,20.0.0+b6f4b2abd1,20.0.0+bddc4f4cbe,20.0.0-1-g253301a+8829bf0056,20.0.0-1-g2b7511a+0d71a2d77f,20.0.0-1-g5b95a8c+7461dd0434,20.0.0-12-g321c96ea+23efe4bbff,20.0.0-16-gfab17e72e+fdf35455f6,20.0.0-2-g0070d88+ba3ffc8f0b,20.0.0-2-g4dae9ad+ee58a624b3,20.0.0-2-g61b8584+5d3db074ba,20.0.0-2-gb780d76+d529cf1a41,20.0.0-2-ged6426c+226a441f5f,20.0.0-2-gf072044+8829bf0056,20.0.0-2-gf1f7952+ee58a624b3,20.0.0-20-geae50cf+e37fec0aee,20.0.0-25-g3dcad98+544a109665,20.0.0-25-g5eafb0f+ee58a624b3,20.0.0-27-g64178ef+f1f297b00a,20.0.0-3-g4cc78c6+e0676b0dc8,20.0.0-3-g8f21e14+4fd2c12c9a,20.0.0-3-gbd60e8c+187b78b4b8,20.0.0-3-gbecbe05+48431fa087,20.0.0-38-ge4adf513+a12e1f8e37,20.0.0-4-g97dc21a+544a109665,20.0.0-4-gb4befbc+087873070b,20.0.0-4-gf910f65+5d3db074ba,20.0.0-5-gdfe0fee+199202a608,20.0.0-5-gfbfe500+d529cf1a41,20.0.0-6-g64f541c+d529cf1a41,20.0.0-6-g9a5b7a1+a1cd37312e,20.0.0-68-ga3f3dda+5fca18c6a4,20.0.0-9-g4aef684+e18322736b,w.2020.45
LSSTDataManagementBasePackage
mapper.py
Go to the documentation of this file.
1 #!/usr/bin/env python
2 
3 #
4 # LSST Data Management System
5 # Copyright 2008, 2009, 2010 LSST Corporation.
6 #
7 # This product includes software developed by the
8 # LSST Project (http://www.lsst.org/).
9 #
10 # This program is free software: you can redistribute it and/or modify
11 # it under the terms of the GNU General Public License as published by
12 # the Free Software Foundation, either version 3 of the License, or
13 # (at your option) any later version.
14 #
15 # This program is distributed in the hope that it will be useful,
16 # but WITHOUT ANY WARRANTY; without even the implied warranty of
17 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 # GNU General Public License for more details.
19 #
20 # You should have received a copy of the LSST License Statement and
21 # the GNU General Public License along with this program. If not,
22 # see <http://www.lsstcorp.org/LegalNotices/>.
23 #
24 from . import Policy
25 
26 """This module defines the Mapper base class."""
27 
28 
29 class Mapper:
30  """Mapper is a base class for all mappers.
31 
32  Subclasses may define the following methods:
33 
34  map_{datasetType}(self, dataId, write)
35  Map a dataset id for the given dataset type into a ButlerLocation.
36  If write=True, this mapping is for an output dataset.
37 
38  query_{datasetType}(self, key, format, dataId)
39  Return the possible values for the format fields that would produce
40  datasets at the granularity of key in combination with the provided
41  partial dataId.
42 
43  std_{datasetType}(self, item)
44  Standardize an object of the given data set type.
45 
46  Methods that must be overridden:
47 
48  keys(self)
49  Return a list of the keys that can be used in data ids.
50 
51  Other public methods:
52 
53  __init__(self)
54 
55  getDatasetTypes(self)
56 
57  map(self, datasetType, dataId, write=False)
58 
59  queryMetadata(self, datasetType, key, format, dataId)
60 
61  canStandardize(self, datasetType)
62 
63  standardize(self, datasetType, item, dataId)
64 
65  validate(self, dataId)
66  """
67 
68  @staticmethod
69  def Mapper(cfg):
70  '''Instantiate a Mapper from a configuration.
71  In come cases the cfg may have already been instantiated into a Mapper, this is allowed and
72  the input var is simply returned.
73 
74  :param cfg: the cfg for this mapper. It is recommended this be created by calling
75  Mapper.cfg()
76  :return: a Mapper instance
77  '''
78  if isinstance(cfg, Policy):
79  return cfg['cls'](cfg)
80  return cfg
81 
82  def __new__(cls, *args, **kwargs):
83  """Create a new Mapper, saving arguments for pickling.
84 
85  This is in __new__ instead of __init__ to save the user
86  from having to save the arguments themselves (either explicitly,
87  or by calling the super's __init__ with all their
88  *args,**kwargs. The resulting pickling system (of __new__,
89  __getstate__ and __setstate__ is similar to how __reduce__
90  is usually used, except that we save the user from any
91  responsibility (except when overriding __new__, but that
92  is not common).
93  """
94  self = super().__new__(cls)
95  self._arguments = (args, kwargs)
96  return self
97 
98  def __init__(self, **kwargs):
99  pass
100 
101  def __getstate__(self):
102  return self._arguments
103 
104  def __setstate__(self, state):
105  self._arguments = state
106  args, kwargs = state
107  self.__init__(*args, **kwargs)
108 
109  def keys(self):
110  raise NotImplementedError("keys() unimplemented")
111 
112  def queryMetadata(self, datasetType, format, dataId):
113  """Get possible values for keys given a partial data id.
114 
115  :param datasetType: see documentation about the use of datasetType
116  :param key: this is used as the 'level' parameter
117  :param format:
118  :param dataId: see documentation about the use of dataId
119  :return:
120  """
121  func = getattr(self, 'query_' + datasetType)
122 
123  val = func(format, self.validate(dataId))
124  return val
125 
126  def getDatasetTypes(self):
127  """Return a list of the mappable dataset types."""
128 
129  list = []
130  for attr in dir(self):
131  if attr.startswith("map_"):
132  list.append(attr[4:])
133  return list
134 
135  def map(self, datasetType, dataId, write=False):
136  """Map a data id using the mapping method for its dataset type.
137 
138  Parameters
139  ----------
140  datasetType : string
141  The datasetType to map
142  dataId : DataId instance
143  The dataId to use when mapping
144  write : bool, optional
145  Indicates if the map is being performed for a read operation
146  (False) or a write operation (True)
147 
148  Returns
149  -------
150  ButlerLocation or a list of ButlerLocation
151  The location(s) found for the map operation. If write is True, a
152  list is returned. If write is False a single ButlerLocation is
153  returned.
154 
155  Raises
156  ------
157  NoResults
158  If no locaiton was found for this map operation, the derived mapper
159  class may raise a lsst.daf.persistence.NoResults exception. Butler
160  catches this and will look in the next Repository if there is one.
161  """
162  func = getattr(self, 'map_' + datasetType)
163  return func(self.validate(dataId), write)
164 
165  def canStandardize(self, datasetType):
166  """Return true if this mapper can standardize an object of the given
167  dataset type."""
168 
169  return hasattr(self, 'std_' + datasetType)
170 
171  def standardize(self, datasetType, item, dataId):
172  """Standardize an object using the standardization method for its data
173  set type, if it exists."""
174 
175  if hasattr(self, 'std_' + datasetType):
176  func = getattr(self, 'std_' + datasetType)
177  return func(item, self.validate(dataId))
178  return item
179 
180  def validate(self, dataId):
181  """Validate a dataId's contents.
182 
183  If the dataId is valid, return it. If an invalid component can be
184  transformed into a valid one, copy the dataId, fix the component, and
185  return the copy. Otherwise, raise an exception."""
186 
187  return dataId
188 
189  def backup(self, datasetType, dataId):
190  """Rename any existing object with the given type and dataId.
191 
192  Not implemented in the base mapper.
193  """
194  raise NotImplementedError("Base-class Mapper does not implement backups")
195 
196  def getRegistry(self):
197  """Get the registry"""
198  return None
lsst::daf::persistence.mapper.Mapper.map
def map(self, datasetType, dataId, write=False)
Definition: mapper.py:135
lsst::daf::persistence.mapper.Mapper.__getstate__
def __getstate__(self)
Definition: mapper.py:101
lsst::daf::persistence.mapper.Mapper.__new__
def __new__(cls, *args, **kwargs)
Definition: mapper.py:82
lsst::daf::persistence.mapper.Mapper.Mapper
def Mapper(cfg)
Definition: mapper.py:69
lsst::daf::persistence.mapper.Mapper.standardize
def standardize(self, datasetType, item, dataId)
Definition: mapper.py:171
lsst::daf::persistence.mapper.Mapper._arguments
_arguments
Definition: mapper.py:95
lsst::daf::persistence.mapper.Mapper
Definition: mapper.py:29
lsst::daf::persistence.mapper.Mapper.__init__
def __init__(self, **kwargs)
Definition: mapper.py:98
lsst::daf::persistence.mapper.Mapper.canStandardize
def canStandardize(self, datasetType)
Definition: mapper.py:165
lsst::daf::persistence.mapper.Mapper.getRegistry
def getRegistry(self)
Definition: mapper.py:196
lsst::daf::persistence.mapper.Mapper.getDatasetTypes
def getDatasetTypes(self)
Definition: mapper.py:126
lsst::daf::persistence.mapper.Mapper.validate
def validate(self, dataId)
Definition: mapper.py:180
lsst::daf::persistence.mapper.Mapper.backup
def backup(self, datasetType, dataId)
Definition: mapper.py:189
lsst::daf::persistence.mapper.Mapper.__setstate__
def __setstate__(self, state)
Definition: mapper.py:104
lsst::daf::persistence.mapper.Mapper.queryMetadata
def queryMetadata(self, datasetType, format, dataId)
Definition: mapper.py:112
lsst::daf::persistence.mapper.Mapper.keys
def keys(self)
Definition: mapper.py:109