LSST Applications  21.0.0-172-gfb10e10a+18fedfabac,22.0.0+297cba6710,22.0.0+80564b0ff1,22.0.0+8d77f4f51a,22.0.0+a28f4c53b1,22.0.0+dcf3732eb2,22.0.1-1-g7d6de66+2a20fdde0d,22.0.1-1-g8e32f31+297cba6710,22.0.1-1-geca5380+7fa3b7d9b6,22.0.1-12-g44dc1dc+2a20fdde0d,22.0.1-15-g6a90155+515f58c32b,22.0.1-16-g9282f48+790f5f2caa,22.0.1-2-g92698f7+dcf3732eb2,22.0.1-2-ga9b0f51+7fa3b7d9b6,22.0.1-2-gd1925c9+bf4f0e694f,22.0.1-24-g1ad7a390+a9625a72a8,22.0.1-25-g5bf6245+3ad8ecd50b,22.0.1-25-gb120d7b+8b5510f75f,22.0.1-27-g97737f7+2a20fdde0d,22.0.1-32-gf62ce7b1+aa4237961e,22.0.1-4-g0b3f228+2a20fdde0d,22.0.1-4-g243d05b+871c1b8305,22.0.1-4-g3a563be+32dcf1063f,22.0.1-4-g44f2e3d+9e4ab0f4fa,22.0.1-42-gca6935d93+ba5e5ca3eb,22.0.1-5-g15c806e+85460ae5f3,22.0.1-5-g58711c4+611d128589,22.0.1-5-g75bb458+99c117b92f,22.0.1-6-g1c63a23+7fa3b7d9b6,22.0.1-6-g50866e6+84ff5a128b,22.0.1-6-g8d3140d+720564cf76,22.0.1-6-gd805d02+cc5644f571,22.0.1-8-ge5750ce+85460ae5f3,master-g6e05de7fdc+babf819c66,master-g99da0e417a+8d77f4f51a,w.2021.48
LSST Data Management Base Package
Public Member Functions | Public Attributes | List of all members
lsst.daf.persistence.repositoryMapper.RepositoryMapper Class Reference
Inheritance diagram for lsst.daf.persistence.repositoryMapper.RepositoryMapper:
lsst.daf.persistence.mapper.Mapper

Public Member Functions

def __init__ (self, storage, policy)
 
def __repr__ (self)
 
def map_cfg (self, dataId, write)
 
def map_repo (self, dataId, write)
 
def __new__ (cls, *args, **kwargs)
 
def __getstate__ (self)
 
def __setstate__ (self, state)
 
def keys (self)
 
def queryMetadata (self, datasetType, format, dataId)
 
def getDatasetTypes (self)
 
def map (self, datasetType, dataId, write=False)
 
def canStandardize (self, datasetType)
 
def standardize (self, datasetType, item, dataId)
 
def validate (self, dataId)
 
def backup (self, datasetType, dataId)
 
def getRegistry (self)
 

Public Attributes

 policy
 
 storage
 

Detailed Description

"Base class for a mapper to find repository configurations within a butler repository.

.. warning::

        cfg is 'wet paint' and very likely to change. Use of it in production code other than via the 'old
        butler' API is strongly discouraged.

Definition at line 28 of file repositoryMapper.py.

Constructor & Destructor Documentation

◆ __init__()

def lsst.daf.persistence.repositoryMapper.RepositoryMapper.__init__ (   self,
  storage,
  policy 
)

Definition at line 37 of file repositoryMapper.py.

37  def __init__(self, storage, policy):
38  # todo I'm guessing the policy would probably want to come from the default in-package location, and
39  # then be overridden where desired by policy in repository root, and then
40  # have the cfg policy applied
41  self.policy = Policy(policy)
42  self.storage = storage
43 

Member Function Documentation

◆ __getstate__()

def lsst.daf.persistence.mapper.Mapper.__getstate__ (   self)
inherited

Definition at line 103 of file mapper.py.

103  def __getstate__(self):
104  return self._arguments
105 

◆ __new__()

def lsst.daf.persistence.mapper.Mapper.__new__ (   cls,
args,
**  kwargs 
)
inherited
Create a new Mapper, saving arguments for pickling.

This is in __new__ instead of __init__ to save the user
from having to save the arguments themselves (either explicitly,
or by calling the super's __init__ with all their
*args,**kwargs.  The resulting pickling system (of __new__,
__getstate__ and __setstate__ is similar to how __reduce__
is usually used, except that we save the user from any
responsibility (except when overriding __new__, but that
is not common).

Definition at line 84 of file mapper.py.

84  def __new__(cls, *args, **kwargs):
85  """Create a new Mapper, saving arguments for pickling.
86 
87  This is in __new__ instead of __init__ to save the user
88  from having to save the arguments themselves (either explicitly,
89  or by calling the super's __init__ with all their
90  *args,**kwargs. The resulting pickling system (of __new__,
91  __getstate__ and __setstate__ is similar to how __reduce__
92  is usually used, except that we save the user from any
93  responsibility (except when overriding __new__, but that
94  is not common).
95  """
96  self = super().__new__(cls)
97  self._arguments = (args, kwargs)
98  return self
99 

◆ __repr__()

def lsst.daf.persistence.repositoryMapper.RepositoryMapper.__repr__ (   self)

Definition at line 44 of file repositoryMapper.py.

44  def __repr__(self):
45  if 'policy' in self.__dict__ and 'storageCfg' in self.__dict__:
46  return 'RepositoryMapper(policy=%s, storageCfg=%s)' % (self.policy, self.storageCfg)
47  else:
48  return 'uninitialized RepositoryMapper'
49 

◆ __setstate__()

def lsst.daf.persistence.mapper.Mapper.__setstate__ (   self,
  state 
)
inherited

Definition at line 106 of file mapper.py.

106  def __setstate__(self, state):
107  self._arguments = state
108  args, kwargs = state
109  self.__init__(*args, **kwargs)
110 

◆ backup()

def lsst.daf.persistence.mapper.Mapper.backup (   self,
  datasetType,
  dataId 
)
inherited
Rename any existing object with the given type and dataId.

Not implemented in the base mapper.

Definition at line 191 of file mapper.py.

191  def backup(self, datasetType, dataId):
192  """Rename any existing object with the given type and dataId.
193 
194  Not implemented in the base mapper.
195  """
196  raise NotImplementedError("Base-class Mapper does not implement backups")
197 

◆ canStandardize()

def lsst.daf.persistence.mapper.Mapper.canStandardize (   self,
  datasetType 
)
inherited
Return true if this mapper can standardize an object of the given
dataset type.

Definition at line 167 of file mapper.py.

167  def canStandardize(self, datasetType):
168  """Return true if this mapper can standardize an object of the given
169  dataset type."""
170 
171  return hasattr(self, 'std_' + datasetType)
172 

◆ getDatasetTypes()

def lsst.daf.persistence.mapper.Mapper.getDatasetTypes (   self)
inherited
Return a list of the mappable dataset types.

Definition at line 128 of file mapper.py.

128  def getDatasetTypes(self):
129  """Return a list of the mappable dataset types."""
130 
131  list = []
132  for attr in dir(self):
133  if attr.startswith("map_"):
134  list.append(attr[4:])
135  return list
136 

◆ getRegistry()

def lsst.daf.persistence.mapper.Mapper.getRegistry (   self)
inherited
Get the registry

Definition at line 198 of file mapper.py.

198  def getRegistry(self):
199  """Get the registry"""
200  return None

◆ keys()

def lsst.daf.persistence.mapper.Mapper.keys (   self)
inherited

Reimplemented in lsst.pipe.tasks.mocks.simpleMapper.SimpleMapper.

Definition at line 111 of file mapper.py.

111  def keys(self):
112  raise NotImplementedError("keys() unimplemented")
113 

◆ map()

def lsst.daf.persistence.mapper.Mapper.map (   self,
  datasetType,
  dataId,
  write = False 
)
inherited
Map a data id using the mapping method for its dataset type.

Parameters
----------
datasetType : string
    The datasetType to map
dataId : DataId instance
    The dataId to use when mapping
write : bool, optional
    Indicates if the map is being performed for a read operation
    (False) or a write operation (True)

Returns
-------
ButlerLocation or a list of ButlerLocation
    The location(s) found for the map operation. If write is True, a
    list is returned. If write is False a single ButlerLocation is
    returned.

Raises
------
NoResults
    If no locaiton was found for this map operation, the derived mapper
    class may raise a lsst.daf.persistence.NoResults exception. Butler
    catches this and will look in the next Repository if there is one.

Definition at line 137 of file mapper.py.

137  def map(self, datasetType, dataId, write=False):
138  """Map a data id using the mapping method for its dataset type.
139 
140  Parameters
141  ----------
142  datasetType : string
143  The datasetType to map
144  dataId : DataId instance
145  The dataId to use when mapping
146  write : bool, optional
147  Indicates if the map is being performed for a read operation
148  (False) or a write operation (True)
149 
150  Returns
151  -------
152  ButlerLocation or a list of ButlerLocation
153  The location(s) found for the map operation. If write is True, a
154  list is returned. If write is False a single ButlerLocation is
155  returned.
156 
157  Raises
158  ------
159  NoResults
160  If no locaiton was found for this map operation, the derived mapper
161  class may raise a lsst.daf.persistence.NoResults exception. Butler
162  catches this and will look in the next Repository if there is one.
163  """
164  func = getattr(self, 'map_' + datasetType)
165  return func(self.validate(dataId), write)
166 

◆ map_cfg()

def lsst.daf.persistence.repositoryMapper.RepositoryMapper.map_cfg (   self,
  dataId,
  write 
)
Map a location for a cfg file.

:param dataId: keys & values to be applied to the template.
:param write: True if this map is being done do perform a write operation, else assumes read. Will
              verify location exists if write is True.
:return: a butlerLocation that describes the mapped location.

Definition at line 50 of file repositoryMapper.py.

50  def map_cfg(self, dataId, write):
51  """Map a location for a cfg file.
52 
53  :param dataId: keys & values to be applied to the template.
54  :param write: True if this map is being done do perform a write operation, else assumes read. Will
55  verify location exists if write is True.
56  :return: a butlerLocation that describes the mapped location.
57  """
58  # todo check: do we need keys to complete dataId? (search Registry)
59  template = self.policy['repositories.cfg.template']
60  location = template % dataId
61  if not write and not self.storage.exists(location):
62  return None
63  bl = ButlerLocation(
64  pythonType=self.policy['repositories.cfg.python'],
65  cppType=None,
66  storageName=self.policy['repositories.cfg.storage'],
67  locationList=(self.storage.locationWithRoot(location),),
68  dataId=dataId,
69  mapper=self)
70  return bl
71 

◆ map_repo()

def lsst.daf.persistence.repositoryMapper.RepositoryMapper.map_repo (   self,
  dataId,
  write 
)

Definition at line 72 of file repositoryMapper.py.

72  def map_repo(self, dataId, write):
73  if write:
74  return None
75 
76  # todo check: do we need keys to complete dataId? (search Registry)
77 
78  template = self.policy['repositories.repo.template']
79  location = template % dataId
80  if self.storage.exists(location):
81  bl = ButlerLocation(
82  pythonType=self.policy['repositories.repo.python'],
83  cppType=None,
84  storageName=None,
85  locationList=(location,),
86  dataId=dataId,
87  mapper=self)
88  return bl
89  return None

◆ queryMetadata()

def lsst.daf.persistence.mapper.Mapper.queryMetadata (   self,
  datasetType,
  format,
  dataId 
)
inherited
Get possible values for keys given a partial data id.

:param datasetType: see documentation about the use of datasetType
:param key: this is used as the 'level' parameter
:param format:
:param dataId: see documentation about the use of dataId
:return:

Definition at line 114 of file mapper.py.

114  def queryMetadata(self, datasetType, format, dataId):
115  """Get possible values for keys given a partial data id.
116 
117  :param datasetType: see documentation about the use of datasetType
118  :param key: this is used as the 'level' parameter
119  :param format:
120  :param dataId: see documentation about the use of dataId
121  :return:
122  """
123  func = getattr(self, 'query_' + datasetType)
124 
125  val = func(format, self.validate(dataId))
126  return val
127 

◆ standardize()

def lsst.daf.persistence.mapper.Mapper.standardize (   self,
  datasetType,
  item,
  dataId 
)
inherited
Standardize an object using the standardization method for its data
set type, if it exists.

Definition at line 173 of file mapper.py.

173  def standardize(self, datasetType, item, dataId):
174  """Standardize an object using the standardization method for its data
175  set type, if it exists."""
176 
177  if hasattr(self, 'std_' + datasetType):
178  func = getattr(self, 'std_' + datasetType)
179  return func(item, self.validate(dataId))
180  return item
181 

◆ validate()

def lsst.daf.persistence.mapper.Mapper.validate (   self,
  dataId 
)
inherited
Validate a dataId's contents.

If the dataId is valid, return it.  If an invalid component can be
transformed into a valid one, copy the dataId, fix the component, and
return the copy.  Otherwise, raise an exception.

Definition at line 182 of file mapper.py.

182  def validate(self, dataId):
183  """Validate a dataId's contents.
184 
185  If the dataId is valid, return it. If an invalid component can be
186  transformed into a valid one, copy the dataId, fix the component, and
187  return the copy. Otherwise, raise an exception."""
188 
189  return dataId
190 

Member Data Documentation

◆ policy

lsst.daf.persistence.repositoryMapper.RepositoryMapper.policy

Definition at line 41 of file repositoryMapper.py.

◆ storage

lsst.daf.persistence.repositoryMapper.RepositoryMapper.storage

Definition at line 42 of file repositoryMapper.py.


The documentation for this class was generated from the following file: