| LSSTApplications
    20.0.0
    LSSTDataManagementBasePackage | 
 
 
 
Go to the documentation of this file.
   26 """This module defines the Mapper base class.""" 
   30     """Mapper is a base class for all mappers. 
   32     Subclasses may define the following methods: 
   34     map_{datasetType}(self, dataId, write) 
   35         Map a dataset id for the given dataset type into a ButlerLocation. 
   36         If write=True, this mapping is for an output dataset. 
   38     query_{datasetType}(self, key, format, dataId) 
   39         Return the possible values for the format fields that would produce 
   40         datasets at the granularity of key in combination with the provided 
   43     std_{datasetType}(self, item) 
   44         Standardize an object of the given data set type. 
   46     Methods that must be overridden: 
   49         Return a list of the keys that can be used in data ids. 
   57     map(self, datasetType, dataId, write=False) 
   59     queryMetadata(self, datasetType, key, format, dataId) 
   61     canStandardize(self, datasetType) 
   63     standardize(self, datasetType, item, dataId) 
   65     validate(self, dataId) 
   70         '''Instantiate a Mapper from a configuration. 
   71         In come cases the cfg may have already been instantiated into a Mapper, this is allowed and 
   72         the input var is simply returned. 
   74         :param cfg: the cfg for this mapper. It is recommended this be created by calling 
   76         :return: a Mapper instance 
   78         if isinstance(cfg, Policy):
 
   79             return cfg[
'cls'](cfg)
 
   83         """Create a new Mapper, saving arguments for pickling. 
   85         This is in __new__ instead of __init__ to save the user 
   86         from having to save the arguments themselves (either explicitly, 
   87         or by calling the super's __init__ with all their 
   88         *args,**kwargs.  The resulting pickling system (of __new__, 
   89         __getstate__ and __setstate__ is similar to how __reduce__ 
   90         is usually used, except that we save the user from any 
   91         responsibility (except when overriding __new__, but that 
  102         return self._arguments
 
  110         raise NotImplementedError(
"keys() unimplemented")
 
  113         """Get possible values for keys given a partial data id. 
  115         :param datasetType: see documentation about the use of datasetType 
  116         :param key: this is used as the 'level' parameter 
  118         :param dataId: see documentation about the use of dataId 
  121         func = getattr(self, 
'query_' + datasetType)
 
  123         val = func(format, self.
validate(dataId))
 
  127         """Return a list of the mappable dataset types.""" 
  130         for attr 
in dir(self):
 
  131             if attr.startswith(
"map_"):
 
  132                 list.append(attr[4:])
 
  135     def map(self, datasetType, dataId, write=False):
 
  136         """Map a data id using the mapping method for its dataset type. 
  141             The datasetType to map 
  142         dataId : DataId instance 
  143             The dataId to use when mapping 
  144         write : bool, optional 
  145             Indicates if the map is being performed for a read operation 
  146             (False) or a write operation (True) 
  150         ButlerLocation or a list of ButlerLocation 
  151             The location(s) found for the map operation. If write is True, a 
  152             list is returned. If write is False a single ButlerLocation is 
  158             If no locaiton was found for this map operation, the derived mapper 
  159             class may raise a lsst.daf.persistence.NoResults exception. Butler 
  160             catches this and will look in the next Repository if there is one. 
  162         func = getattr(self, 
'map_' + datasetType)
 
  163         return func(self.
validate(dataId), write)
 
  166         """Return true if this mapper can standardize an object of the given 
  169         return hasattr(self, 
'std_' + datasetType)
 
  172         """Standardize an object using the standardization method for its data 
  173         set type, if it exists.""" 
  175         if hasattr(self, 
'std_' + datasetType):
 
  176             func = getattr(self, 
'std_' + datasetType)
 
  177             return func(item, self.
validate(dataId))
 
  181         """Validate a dataId's contents. 
  183         If the dataId is valid, return it.  If an invalid component can be 
  184         transformed into a valid one, copy the dataId, fix the component, and 
  185         return the copy.  Otherwise, raise an exception.""" 
  190         """Rename any existing object with the given type and dataId. 
  192         Not implemented in the base mapper. 
  194         raise NotImplementedError(
"Base-class Mapper does not implement backups")
 
  197         """Get the registry""" 
  
def map(self, datasetType, dataId, write=False)
def __new__(cls, *args, **kwargs)
def standardize(self, datasetType, item, dataId)
def __init__(self, **kwargs)
def canStandardize(self, datasetType)
def getDatasetTypes(self)
def validate(self, dataId)
def backup(self, datasetType, dataId)
def __setstate__(self, state)
def queryMetadata(self, datasetType, format, dataId)