22 from collections 
import OrderedDict
 
   28 from lsst.afw.image import Exposure, MaskedImage, Image, DecoratedImage
 
   30 __all__ = [
"Mapping", 
"ImageMapping", 
"ExposureMapping", 
"CalibrationMapping", 
"DatasetMapping"]
 
   35     """Mapping is a base class for all mappings.  Mappings are used by 
   36     the Mapper to map (determine a path to some data given some 
   37     identifiers) and standardize (convert data into some standard 
   38     format or type) data, and to query the associated registry to see 
   39     what data is available. 
   41     Subclasses must specify self.storage or else override self.map(). 
   43     Public methods: lookup, have, need, getKeys, map 
   45     Mappings are specified mainly by policy.  A Mapping policy should 
   48     template (string): a Python string providing the filename for that 
   49     particular dataset type based on some data identifiers.  In the 
   50     case of redundancy in the path (e.g., file uniquely specified by 
   51     the exposure number, but filter in the path), the 
   52     redundant/dependent identifiers can be looked up in the registry. 
   54     python (string): the Python type for the retrieved data (e.g. 
   55     lsst.afw.image.ExposureF) 
   57     persistable (string): the Persistable registration for the on-disk data 
   60     storage (string, optional): Storage type for this dataset type (e.g. 
   63     level (string, optional): the level in the camera hierarchy at which the 
   64     data is stored (Amp, Ccd or skyTile), if relevant 
   66     tables (string, optional): a whitespace-delimited list of tables in the 
   67     registry that can be NATURAL JOIN-ed to look up additional 
   73         Butler dataset type to be mapped. 
   74     policy : `daf_persistence.Policy` 
   76     registry : `lsst.obs.base.Registry` 
   77         Registry for metadata lookups. 
   78     rootStorage : Storage subclass instance 
   79         Interface to persisted repository data. 
   80     provided : `list` of `str` 
   81         Keys provided by the mapper. 
   84     def __init__(self, datasetType, policy, registry, rootStorage, provided=None):
 
   87             raise RuntimeError(
"No policy provided for mapping")
 
  101                 (k, _formatMap(v, k, datasetType))
 
  103                 re.findall(
r'\%\((\w+)\).*?([diouxXeEfFgGcrs])', self.
template)
 
  107         if provided 
is not None:
 
  114         if 'level' in policy:
 
  116         if 'tables' in policy:
 
  122         self.
obsTimeName = policy[
'obsTimeName'] 
if 'obsTimeName' in policy 
else None 
  123         self.
recipe = policy[
'recipe'] 
if 'recipe' in policy 
else 'default' 
  130             raise RuntimeError(f
"Template is not defined for the {self.datasetType} dataset type, " 
  131                                "it must be set before it can be used.")
 
  134         """Return the dict of keys and value types required for this mapping.""" 
  137     def map(self, mapper, dataId, write=False):
 
  138         """Standard implementation of map function. 
  142         mapper: `lsst.daf.persistence.Mapper` 
  149         lsst.daf.persistence.ButlerLocation 
  150             Location of object that was mapped. 
  153         usedDataId = {key: actualId[key] 
for key 
in self.
keyDict.
keys()}
 
  154         path = mapper._mapActualToPath(self.
template, actualId)
 
  155         if os.path.isabs(path):
 
  156             raise RuntimeError(
"Mapped path should not be absolute.")
 
  163             for ext 
in (
None, 
'.gz', 
'.fz'):
 
  164                 if ext 
and path.endswith(ext):
 
  166                 extPath = path + ext 
if ext 
else path
 
  171         assert path, 
"Fully-qualified filename is empty." 
  174         if hasattr(mapper, addFunc):
 
  175             addFunc = getattr(mapper, addFunc)
 
  176             additionalData = addFunc(self.
datasetType, actualId)
 
  177             assert isinstance(additionalData, PropertySet), \
 
  178                 "Bad type for returned data: %s" (
type(additionalData),)
 
  180             additionalData = 
None 
  183                               locationList=path, dataId=actualId.copy(), mapper=mapper,
 
  185                               additionalData=additionalData)
 
  188         """Look up properties for in a metadata registry given a partial 
  193         properties : `list` of `str` 
  201             Values of properties. 
  204             raise RuntimeError(
"No registry for lookup")
 
  206         skyMapKeys = (
"tract", 
"patch")
 
  218         substitutions = OrderedDict()
 
  220         properties = 
list(properties)  
 
  224                     substitutions[p] = dataId[p]
 
  228                         "Cannot look up skymap key '%s'; it must be explicitly included in the data ID" % p
 
  231                 substitutions[p] = index
 
  239             if p 
not in (
'filter', 
'expTime', 
'taiObs'):
 
  242         if fastPath 
and 'visit' in dataId 
and "raw" in self.
tables:
 
  243             lookupDataId = {
'visit': dataId[
'visit']}
 
  246             if dataId 
is not None:
 
  247                 for k, v 
in dataId.items():
 
  254                     where.append((k, 
'?'))
 
  256             lookupDataId = {k[0]: v 
for k, v 
in zip(where, values)}
 
  265         result = [tuple(v 
if k 
in removed 
else item[v] 
for k, v 
in substitutions.items())
 
  269     def have(self, properties, dataId):
 
  270         """Returns whether the provided data identifier has all 
  271         the properties in the provided list. 
  275         properties : `list of `str` 
  283             True if all properties are present. 
  285         for prop 
in properties:
 
  286             if prop 
not in dataId:
 
  290     def need(self, properties, dataId):
 
  291         """Ensures all properties in the provided list are present in 
  292         the data identifier, looking them up as needed.  This is only 
  293         possible for the case where the data identifies a single 
  298         properties : `list` of `str` 
  301             Partial dataset identifier 
  306             Copy of dataset identifier with enhanced values. 
  308         newId = dataId.copy()
 
  310         for prop 
in properties:
 
  311             if prop 
not in newId:
 
  312                 newProps.append(prop)
 
  313         if len(newProps) == 0:
 
  316         lookups = self.
lookup(newProps, newId)
 
  317         if len(lookups) != 1:
 
  318             raise NoResults(
"No unique lookup for %s from %s: %d matches" %
 
  319                             (newProps, newId, len(lookups)),
 
  321         for i, prop 
in enumerate(newProps):
 
  322             newId[prop] = lookups[0][i]
 
  326 def _formatMap(ch, k, datasetType):
 
  327     """Convert a format character into a Python type.""" 
  335         raise RuntimeError(
"Unexpected format specifier %s" 
  336                            " for field %s in template for dataset %s" %
 
  337                            (ch, k, datasetType))
 
  341     """ImageMapping is a Mapping subclass for non-camera images. 
  346         Butler dataset type to be mapped. 
  347     policy : `daf_persistence.Policy` 
  349     registry : `lsst.obs.base.Registry` 
  350         Registry for metadata lookups 
  352         Path of root directory 
  355     def __init__(self, datasetType, policy, registry, root, **kwargs):
 
  356         Mapping.__init__(self, datasetType, policy, registry, root, **kwargs)
 
  357         self.
columns = policy.asArray(
'columns') 
if 'columns' in policy 
else None 
  361     """ExposureMapping is a Mapping subclass for normal exposures. 
  366         Butler dataset type to be mapped. 
  367     policy : `daf_persistence.Policy` 
  369     registry : `lsst.obs.base.Registry` 
  370         Registry for metadata lookups 
  372         Path of root directory 
  375     def __init__(self, datasetType, policy, registry, root, **kwargs):
 
  376         Mapping.__init__(self, datasetType, policy, registry, root, **kwargs)
 
  377         self.
columns = policy.asArray(
'columns') 
if 'columns' in policy 
else None 
  380         return mapper._standardizeExposure(self, item, dataId)
 
  384     """CalibrationMapping is a Mapping subclass for calibration-type products. 
  386     The difference is that data properties in the query or template 
  387     can be looked up using a reference Mapping in addition to this one. 
  389     CalibrationMapping Policies can contain the following: 
  391     reference (string, optional) 
  392         a list of tables for finding missing dataset 
  393         identifier components (including the observation time, if a validity range 
  394         is required) in the exposure registry; note that the "tables" entry refers 
  395         to the calibration registry 
  397     refCols (string, optional) 
  398         a list of dataset properties required from the 
  399         reference tables for lookups in the calibration registry 
  402         true if the calibration dataset has a validity range 
  403         specified by a column in the tables of the reference dataset in the 
  404         exposure registry) and two columns in the tables of this calibration 
  405         dataset in the calibration registry) 
  407     obsTimeName (string, optional) 
  408         the name of the column in the reference 
  409         dataset tables containing the observation time (default "taiObs") 
  411     validStartName (string, optional) 
  412         the name of the column in the 
  413         calibration dataset tables containing the start of the validity range 
  414         (default "validStart") 
  416     validEndName (string, optional) 
  417         the name of the column in the 
  418         calibration dataset tables containing the end of the validity range 
  424         Butler dataset type to be mapped. 
  425     policy : `daf_persistence.Policy` 
  427     registry : `lsst.obs.base.Registry` 
  428         Registry for metadata lookups 
  429     calibRegistry : `lsst.obs.base.Registry` 
  430         Registry for calibration metadata lookups. 
  432         Path of calibration root directory. 
  434         Path of data root directory; used for outputs only. 
  437     def __init__(self, datasetType, policy, registry, calibRegistry, calibRoot, dataRoot=None, **kwargs):
 
  438         Mapping.__init__(self, datasetType, policy, calibRegistry, calibRoot, **kwargs)
 
  439         self.
reference = policy.asArray(
"reference") 
if "reference" in policy 
else None 
  440         self.
refCols = policy.asArray(
"refCols") 
if "refCols" in policy 
else None 
  443         if "validRange" in policy 
and policy[
"validRange"]:
 
  444             self.
range = (
"?", policy[
"validStartName"], policy[
"validEndName"])
 
  445         if "columns" in policy:
 
  447         if "filter" in policy:
 
  450         if "metadataKey" in policy:
 
  453     def map(self, mapper, dataId, write=False):
 
  454         location = Mapping.map(self, mapper, dataId, write=write)
 
  461         """Look up properties for in a metadata registry given a partial 
  466         properties : `list` of `str` 
  467             Properties to look up. 
  474             Values of properties. 
  480         newId = dataId.copy()
 
  484             for k, v 
in dataId.items():
 
  493                 for k 
in dataId.keys():
 
  496                 columns = 
set(properties)
 
  500                 return Mapping.lookup(self, properties, newId)
 
  502             lookupDataId = dict(zip(where, values))
 
  504             if len(lookups) != 1:
 
  505                 raise RuntimeError(
"No unique lookup for %s from %s: %d matches" %
 
  506                                    (columns, dataId, len(lookups)))
 
  507             if columns == 
set(properties):
 
  510             for i, prop 
in enumerate(columns):
 
  511                 newId[prop] = lookups[0][i]
 
  512         return Mapping.lookup(self, properties, newId)
 
  515         """Default standardization function for calibration datasets. 
  517         If the item is of a type that should be standardized, the base class 
  518         ``standardizeExposure`` method is called, otherwise the item is returned 
  523         mapping : `lsst.obs.base.Mapping` 
  524             Mapping object to pass through. 
  526             Will be standardized if of type lsst.afw.image.Exposure, 
  527             lsst.afw.image.DecoratedImage, lsst.afw.image.Image 
  528             or lsst.afw.image.MaskedImage 
  535         `lsst.afw.image.Exposure` or item 
  536             The standardized object. 
  538         if issubclass(
doImport(self.
python), (Exposure, MaskedImage, Image, DecoratedImage)):
 
  539             return mapper._standardizeExposure(self, item, dataId, filter=self.
setFilter)
 
  544     """DatasetMapping is a Mapping subclass for non-Exposure datasets that can 
  545     be retrieved by the standard daf_persistence mechanism. 
  547     The differences are that the Storage type must be specified and no 
  548     Exposure standardization is performed. 
  550     The "storage" entry in the Policy is mandatory; the "tables" entry is 
  551     optional; no "level" entry is allowed. 
  556         Butler dataset type to be mapped. 
  557     policy : `daf_persistence.Policy` 
  559     registry : `lsst.obs.base.Registry` 
  560         Registry for metadata lookups 
  562         Path of root directory 
  565     def __init__(self, datasetType, policy, registry, root, **kwargs):
 
  566         Mapping.__init__(self, datasetType, policy, registry, root, **kwargs)