LSSTApplications  17.0+124,17.0+14,17.0+73,18.0.0+37,18.0.0+80,18.0.0-4-g68ffd23+4,18.1.0-1-g0001055+12,18.1.0-1-g03d53ef+5,18.1.0-1-g1349e88+55,18.1.0-1-g2505f39+44,18.1.0-1-g5315e5e+4,18.1.0-1-g5e4b7ea+14,18.1.0-1-g7e8fceb+4,18.1.0-1-g85f8cd4+48,18.1.0-1-g8ff0b9f+4,18.1.0-1-ga2c679d+1,18.1.0-1-gd55f500+35,18.1.0-10-gb58edde+2,18.1.0-11-g0997b02+4,18.1.0-13-gfe4edf0b+12,18.1.0-14-g259bd21+21,18.1.0-19-gdb69f3f+2,18.1.0-2-g5f9922c+24,18.1.0-2-gd3b74e5+11,18.1.0-2-gfbf3545+32,18.1.0-26-g728bddb4+5,18.1.0-27-g6ff7ca9+2,18.1.0-3-g52aa583+25,18.1.0-3-g8ea57af+9,18.1.0-3-gb69f684+42,18.1.0-3-gfcaddf3+6,18.1.0-32-gd8786685a,18.1.0-4-gf3f9b77+6,18.1.0-5-g1dd662b+2,18.1.0-5-g6dbcb01+41,18.1.0-6-gae77429+3,18.1.0-7-g9d75d83+9,18.1.0-7-gae09a6d+30,18.1.0-9-gc381ef5+4,w.2019.45
LSSTDataManagementBasePackage
mapper.py
Go to the documentation of this file.
1 #!/usr/bin/env python
2 
3 #
4 # LSST Data Management System
5 # Copyright 2008, 2009, 2010 LSST Corporation.
6 #
7 # This product includes software developed by the
8 # LSST Project (http://www.lsst.org/).
9 #
10 # This program is free software: you can redistribute it and/or modify
11 # it under the terms of the GNU General Public License as published by
12 # the Free Software Foundation, either version 3 of the License, or
13 # (at your option) any later version.
14 #
15 # This program is distributed in the hope that it will be useful,
16 # but WITHOUT ANY WARRANTY; without even the implied warranty of
17 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 # GNU General Public License for more details.
19 #
20 # You should have received a copy of the LSST License Statement and
21 # the GNU General Public License along with this program. If not,
22 # see <http://www.lsstcorp.org/LegalNotices/>.
23 #
24 from builtins import object, super
25 
26 
27 from . import Policy
28 
29 """This module defines the Mapper base class."""
30 
31 
32 class Mapper(object):
33  """Mapper is a base class for all mappers.
34 
35  Subclasses may define the following methods:
36 
37  map_{datasetType}(self, dataId, write)
38  Map a dataset id for the given dataset type into a ButlerLocation.
39  If write=True, this mapping is for an output dataset.
40 
41  query_{datasetType}(self, key, format, dataId)
42  Return the possible values for the format fields that would produce
43  datasets at the granularity of key in combination with the provided
44  partial dataId.
45 
46  std_{datasetType}(self, item)
47  Standardize an object of the given data set type.
48 
49  Methods that must be overridden:
50 
51  keys(self)
52  Return a list of the keys that can be used in data ids.
53 
54  Other public methods:
55 
56  __init__(self)
57 
58  getDatasetTypes(self)
59 
60  map(self, datasetType, dataId, write=False)
61 
62  queryMetadata(self, datasetType, key, format, dataId)
63 
64  canStandardize(self, datasetType)
65 
66  standardize(self, datasetType, item, dataId)
67 
68  validate(self, dataId)
69  """
70 
71  @staticmethod
72  def Mapper(cfg):
73  '''Instantiate a Mapper from a configuration.
74  In come cases the cfg may have already been instantiated into a Mapper, this is allowed and
75  the input var is simply returned.
76 
77  :param cfg: the cfg for this mapper. It is recommended this be created by calling
78  Mapper.cfg()
79  :return: a Mapper instance
80  '''
81  if isinstance(cfg, Policy):
82  return cfg['cls'](cfg)
83  return cfg
84 
85  def __new__(cls, *args, **kwargs):
86  """Create a new Mapper, saving arguments for pickling.
87 
88  This is in __new__ instead of __init__ to save the user
89  from having to save the arguments themselves (either explicitly,
90  or by calling the super's __init__ with all their
91  *args,**kwargs. The resulting pickling system (of __new__,
92  __getstate__ and __setstate__ is similar to how __reduce__
93  is usually used, except that we save the user from any
94  responsibility (except when overriding __new__, but that
95  is not common).
96  """
97  self = super().__new__(cls)
98  self._arguments = (args, kwargs)
99  return self
100 
101  def __init__(self, **kwargs):
102  pass
103 
104  def __getstate__(self):
105  return self._arguments
106 
107  def __setstate__(self, state):
108  self._arguments = state
109  args, kwargs = state
110  self.__init__(*args, **kwargs)
111 
112  def keys(self):
113  raise NotImplementedError("keys() unimplemented")
114 
115  def queryMetadata(self, datasetType, format, dataId):
116  """Get possible values for keys given a partial data id.
117 
118  :param datasetType: see documentation about the use of datasetType
119  :param key: this is used as the 'level' parameter
120  :param format:
121  :param dataId: see documentation about the use of dataId
122  :return:
123  """
124  func = getattr(self, 'query_' + datasetType)
125 
126  val = func(format, self.validate(dataId))
127  return val
128 
129  def getDatasetTypes(self):
130  """Return a list of the mappable dataset types."""
131 
132  list = []
133  for attr in dir(self):
134  if attr.startswith("map_"):
135  list.append(attr[4:])
136  return list
137 
138  def map(self, datasetType, dataId, write=False):
139  """Map a data id using the mapping method for its dataset type.
140 
141  Parameters
142  ----------
143  datasetType : string
144  The datasetType to map
145  dataId : DataId instance
146  The dataId to use when mapping
147  write : bool, optional
148  Indicates if the map is being performed for a read operation
149  (False) or a write operation (True)
150 
151  Returns
152  -------
153  ButlerLocation or a list of ButlerLocation
154  The location(s) found for the map operation. If write is True, a
155  list is returned. If write is False a single ButlerLocation is
156  returned.
157 
158  Raises
159  ------
160  NoResults
161  If no locaiton was found for this map operation, the derived mapper
162  class may raise a lsst.daf.persistence.NoResults exception. Butler
163  catches this and will look in the next Repository if there is one.
164  """
165  func = getattr(self, 'map_' + datasetType)
166  return func(self.validate(dataId), write)
167 
168  def canStandardize(self, datasetType):
169  """Return true if this mapper can standardize an object of the given
170  dataset type."""
171 
172  return hasattr(self, 'std_' + datasetType)
173 
174  def standardize(self, datasetType, item, dataId):
175  """Standardize an object using the standardization method for its data
176  set type, if it exists."""
177 
178  if hasattr(self, 'std_' + datasetType):
179  func = getattr(self, 'std_' + datasetType)
180  return func(item, self.validate(dataId))
181  return item
182 
183  def validate(self, dataId):
184  """Validate a dataId's contents.
185 
186  If the dataId is valid, return it. If an invalid component can be
187  transformed into a valid one, copy the dataId, fix the component, and
188  return the copy. Otherwise, raise an exception."""
189 
190  return dataId
191 
192  def backup(self, datasetType, dataId):
193  """Rename any existing object with the given type and dataId.
194 
195  Not implemented in the base mapper.
196  """
197  raise NotImplementedError("Base-class Mapper does not implement backups")
198 
199  def getRegistry(self):
200  """Get the registry"""
201  return None
def backup(self, datasetType, dataId)
Definition: mapper.py:192
def canStandardize(self, datasetType)
Definition: mapper.py:168
def map(self, datasetType, dataId, write=False)
Definition: mapper.py:138
def queryMetadata(self, datasetType, format, dataId)
Definition: mapper.py:115
def standardize(self, datasetType, item, dataId)
Definition: mapper.py:174
def __new__(cls, args, kwargs)
Definition: mapper.py:85