LSST Applications  21.0.0+04719a4bac,21.0.0-1-ga51b5d4+f5e6047307,21.0.0-11-g2b59f77+a9c1acf22d,21.0.0-11-ga42c5b2+86977b0b17,21.0.0-12-gf4ce030+76814010d2,21.0.0-13-g1721dae+760e7a6536,21.0.0-13-g3a573fe+768d78a30a,21.0.0-15-g5a7caf0+f21cbc5713,21.0.0-16-g0fb55c1+b60e2d390c,21.0.0-19-g4cded4ca+71a93a33c0,21.0.0-2-g103fe59+bb20972958,21.0.0-2-g45278ab+04719a4bac,21.0.0-2-g5242d73+3ad5d60fb1,21.0.0-2-g7f82c8f+8babb168e8,21.0.0-2-g8f08a60+06509c8b61,21.0.0-2-g8faa9b5+616205b9df,21.0.0-2-ga326454+8babb168e8,21.0.0-2-gde069b7+5e4aea9c2f,21.0.0-2-gecfae73+1d3a86e577,21.0.0-2-gfc62afb+3ad5d60fb1,21.0.0-25-g1d57be3cd+e73869a214,21.0.0-3-g357aad2+ed88757d29,21.0.0-3-g4a4ce7f+3ad5d60fb1,21.0.0-3-g4be5c26+3ad5d60fb1,21.0.0-3-g65f322c+e0b24896a3,21.0.0-3-g7d9da8d+616205b9df,21.0.0-3-ge02ed75+a9c1acf22d,21.0.0-4-g591bb35+a9c1acf22d,21.0.0-4-g65b4814+b60e2d390c,21.0.0-4-gccdca77+0de219a2bc,21.0.0-4-ge8a399c+6c55c39e83,21.0.0-5-gd00fb1e+05fce91b99,21.0.0-6-gc675373+3ad5d60fb1,21.0.0-64-g1122c245+4fb2b8f86e,21.0.0-7-g04766d7+cd19d05db2,21.0.0-7-gdf92d54+04719a4bac,21.0.0-8-g5674e7b+d1bd76f71f,master-gac4afde19b+a9c1acf22d,w.2021.13
LSST Data Management Base Package
mapper.py
Go to the documentation of this file.
1 #!/usr/bin/env python
2 
3 #
4 # LSST Data Management System
5 # Copyright 2008, 2009, 2010 LSST Corporation.
6 #
7 # This product includes software developed by the
8 # LSST Project (http://www.lsst.org/).
9 #
10 # This program is free software: you can redistribute it and/or modify
11 # it under the terms of the GNU General Public License as published by
12 # the Free Software Foundation, either version 3 of the License, or
13 # (at your option) any later version.
14 #
15 # This program is distributed in the hope that it will be useful,
16 # but WITHOUT ANY WARRANTY; without even the implied warranty of
17 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 # GNU General Public License for more details.
19 #
20 # You should have received a copy of the LSST License Statement and
21 # the GNU General Public License along with this program. If not,
22 # see <http://www.lsstcorp.org/LegalNotices/>.
23 #
24 from . import Policy
25 from .deprecation import deprecate_class
26 
27 """This module defines the Mapper base class."""
28 
29 
30 @deprecate_class
31 class Mapper:
32  """Mapper is a base class for all mappers.
33 
34  Subclasses may define the following methods:
35 
36  map_{datasetType}(self, dataId, write)
37  Map a dataset id for the given dataset type into a ButlerLocation.
38  If write=True, this mapping is for an output dataset.
39 
40  query_{datasetType}(self, key, format, dataId)
41  Return the possible values for the format fields that would produce
42  datasets at the granularity of key in combination with the provided
43  partial dataId.
44 
45  std_{datasetType}(self, item)
46  Standardize an object of the given data set type.
47 
48  Methods that must be overridden:
49 
50  keys(self)
51  Return a list of the keys that can be used in data ids.
52 
53  Other public methods:
54 
55  __init__(self)
56 
57  getDatasetTypes(self)
58 
59  map(self, datasetType, dataId, write=False)
60 
61  queryMetadata(self, datasetType, key, format, dataId)
62 
63  canStandardize(self, datasetType)
64 
65  standardize(self, datasetType, item, dataId)
66 
67  validate(self, dataId)
68  """
69 
70  @staticmethod
71  def Mapper(cfg):
72  '''Instantiate a Mapper from a configuration.
73  In come cases the cfg may have already been instantiated into a Mapper, this is allowed and
74  the input var is simply returned.
75 
76  :param cfg: the cfg for this mapper. It is recommended this be created by calling
77  Mapper.cfg()
78  :return: a Mapper instance
79  '''
80  if isinstance(cfg, Policy):
81  return cfg['cls'](cfg)
82  return cfg
83 
84  def __new__(cls, *args, **kwargs):
85  """Create a new Mapper, saving arguments for pickling.
86 
87  This is in __new__ instead of __init__ to save the user
88  from having to save the arguments themselves (either explicitly,
89  or by calling the super's __init__ with all their
90  *args,**kwargs. The resulting pickling system (of __new__,
91  __getstate__ and __setstate__ is similar to how __reduce__
92  is usually used, except that we save the user from any
93  responsibility (except when overriding __new__, but that
94  is not common).
95  """
96  self = super().__new__(cls)
97  self._arguments_arguments = (args, kwargs)
98  return self
99 
100  def __init__(self, **kwargs):
101  pass
102 
103  def __getstate__(self):
104  return self._arguments
105 
106  def __setstate__(self, state):
107  self._arguments_arguments = state
108  args, kwargs = state
109  self.__init____init__(*args, **kwargs)
110 
111  def keys(self):
112  raise NotImplementedError("keys() unimplemented")
113 
114  def queryMetadata(self, datasetType, format, dataId):
115  """Get possible values for keys given a partial data id.
116 
117  :param datasetType: see documentation about the use of datasetType
118  :param key: this is used as the 'level' parameter
119  :param format:
120  :param dataId: see documentation about the use of dataId
121  :return:
122  """
123  func = getattr(self, 'query_' + datasetType)
124 
125  val = func(format, self.validatevalidate(dataId))
126  return val
127 
128  def getDatasetTypes(self):
129  """Return a list of the mappable dataset types."""
130 
131  list = []
132  for attr in dir(self):
133  if attr.startswith("map_"):
134  list.append(attr[4:])
135  return list
136 
137  def map(self, datasetType, dataId, write=False):
138  """Map a data id using the mapping method for its dataset type.
139 
140  Parameters
141  ----------
142  datasetType : string
143  The datasetType to map
144  dataId : DataId instance
145  The dataId to use when mapping
146  write : bool, optional
147  Indicates if the map is being performed for a read operation
148  (False) or a write operation (True)
149 
150  Returns
151  -------
152  ButlerLocation or a list of ButlerLocation
153  The location(s) found for the map operation. If write is True, a
154  list is returned. If write is False a single ButlerLocation is
155  returned.
156 
157  Raises
158  ------
159  NoResults
160  If no locaiton was found for this map operation, the derived mapper
161  class may raise a lsst.daf.persistence.NoResults exception. Butler
162  catches this and will look in the next Repository if there is one.
163  """
164  func = getattr(self, 'map_' + datasetType)
165  return func(self.validatevalidate(dataId), write)
166 
167  def canStandardize(self, datasetType):
168  """Return true if this mapper can standardize an object of the given
169  dataset type."""
170 
171  return hasattr(self, 'std_' + datasetType)
172 
173  def standardize(self, datasetType, item, dataId):
174  """Standardize an object using the standardization method for its data
175  set type, if it exists."""
176 
177  if hasattr(self, 'std_' + datasetType):
178  func = getattr(self, 'std_' + datasetType)
179  return func(item, self.validatevalidate(dataId))
180  return item
181 
182  def validate(self, dataId):
183  """Validate a dataId's contents.
184 
185  If the dataId is valid, return it. If an invalid component can be
186  transformed into a valid one, copy the dataId, fix the component, and
187  return the copy. Otherwise, raise an exception."""
188 
189  return dataId
190 
191  def backup(self, datasetType, dataId):
192  """Rename any existing object with the given type and dataId.
193 
194  Not implemented in the base mapper.
195  """
196  raise NotImplementedError("Base-class Mapper does not implement backups")
197 
198  def getRegistry(self):
199  """Get the registry"""
200  return None
def map(self, datasetType, dataId, write=False)
Definition: mapper.py:137
def __new__(cls, *args, **kwargs)
Definition: mapper.py:84
def __init__(self, **kwargs)
Definition: mapper.py:100
def standardize(self, datasetType, item, dataId)
Definition: mapper.py:173
def queryMetadata(self, datasetType, format, dataId)
Definition: mapper.py:114
def canStandardize(self, datasetType)
Definition: mapper.py:167
def backup(self, datasetType, dataId)
Definition: mapper.py:191