LSSTApplications  10.0-2-g4f67435,11.0.rc2+1,11.0.rc2+12,11.0.rc2+3,11.0.rc2+4,11.0.rc2+5,11.0.rc2+6,11.0.rc2+7,11.0.rc2+8
LSSTDataManagementBasePackage
mapper.py
Go to the documentation of this file.
1 #!/usr/bin/env python
2 
3 #
4 # LSST Data Management System
5 # Copyright 2008, 2009, 2010 LSST Corporation.
6 #
7 # This product includes software developed by the
8 # LSST Project (http://www.lsst.org/).
9 #
10 # This program is free software: you can redistribute it and/or modify
11 # it under the terms of the GNU General Public License as published by
12 # the Free Software Foundation, either version 3 of the License, or
13 # (at your option) any later version.
14 #
15 # This program is distributed in the hope that it will be useful,
16 # but WITHOUT ANY WARRANTY; without even the implied warranty of
17 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 # GNU General Public License for more details.
19 #
20 # You should have received a copy of the LSST License Statement and
21 # the GNU General Public License along with this program. If not,
22 # see <http://www.lsstcorp.org/LegalNotices/>.
23 #
24 
25 
26 """This module defines the Mapper base class."""
27 
28 class Mapper(object):
29  """Mapper is a base class for all mappers.
30 
31  Subclasses may define the following methods:
32 
33  map_{datasetType}(self, dataId, write)
34  Map a dataset id for the given dataset type into a ButlerLocation.
35  If write=True, this mapping is for an output dataset.
36 
37  query_{datasetType}(self, key, format, dataId)
38  Return the possible values for the format fields that would produce
39  datasets at the granularity of key in combination with the provided
40  partial dataId.
41 
42  std_{datasetType}(self, item)
43  Standardize an object of the given data set type.
44 
45  Methods that must be overridden:
46 
47  keys(self)
48  Return a list of the keys that can be used in data ids.
49 
50  Other public methods:
51 
52  __init__(self)
53 
54  getDatasetTypes(self)
55 
56  map(self, datasetType, dataId, write=False)
57 
58  queryMetadata(self, datasetType, key, format, dataId)
59 
60  canStandardize(self, datasetType)
61 
62  standardize(self, datasetType, item, dataId)
63 
64  validate(self, dataId)
65  """
66 
67  def __new__(cls, *args, **kwargs):
68  """Create a new Mapper, saving arguments for pickling.
69 
70  This is in __new__ instead of __init__ to save the user
71  from having to save the arguments themselves (either explicitly,
72  or by calling the super's __init__ with all their
73  *args,**kwargs. The resulting pickling system (of __new__,
74  __getstate__ and __setstate__ is similar to how __reduce__
75  is usually used, except that we save the user from any
76  responsibility (except when overriding __new__, but that
77  is not common).
78  """
79  self = super(Mapper, cls).__new__(cls)
80  self._arguments = (args, kwargs)
81  return self
82 
83  def __init__(self):
84  pass
85 
86  def __getstate__(self):
87  return self._arguments
88 
89  def __setstate__(self, state):
90  self._arguments = state
91  args, kwargs = state
92  self.__init__(*args, **kwargs)
93 
94  def keys(self):
95  raise NotImplementedError("keys() unimplemented")
96 
97  def queryMetadata(self, datasetType, key, format, dataId):
98  """Return possible values for keys given a partial data id."""
99 
100  func = getattr(self, 'query_' + datasetType)
101  return func(key, format, self.validate(dataId))
102 
103  def getDatasetTypes(self):
104  """Return a list of the mappable dataset types."""
105 
106  list = []
107  for attr in dir(self):
108  if attr.startswith("map_"):
109  list.append(attr[4:])
110  return list
111 
112  def map(self, datasetType, dataId, write=False):
113  """Map a data id using the mapping method for its dataset type."""
114 
115  func = getattr(self, 'map_' + datasetType)
116  return func(self.validate(dataId), write)
117 
118  def canStandardize(self, datasetType):
119  """Return true if this mapper can standardize an object of the given
120  dataset type."""
121 
122  return hasattr(self, 'std_' + datasetType)
123 
124  def standardize(self, datasetType, item, dataId):
125  """Standardize an object using the standardization method for its data
126  set type, if it exists."""
127 
128  if hasattr(self, 'std_' + datasetType):
129  func = getattr(self, 'std_' + datasetType)
130  return func(item, self.validate(dataId))
131  return item
132 
133  def validate(self, dataId):
134  """Validate a dataId's contents.
135 
136  If the dataId is valid, return it. If an invalid component can be
137  transformed into a valid one, copy the dataId, fix the component, and
138  return the copy. Otherwise, raise an exception."""
139 
140  return dataId
141 
142  def backup(self, datasetType, dataId):
143  """Rename any existing object with the given type and dataId.
144 
145  Not implemented in the base mapper.
146  """
147  raise NotImplementedError("Base-class Mapper does not implement backups")