23 from collections
import OrderedDict
29 from lsst.afw.image import Exposure, MaskedImage, Image, DecoratedImage
31 __all__ = [
"Mapping",
"ImageMapping",
"ExposureMapping",
"CalibrationMapping",
"DatasetMapping"]
36 """Mapping is a base class for all mappings. Mappings are used by 37 the Mapper to map (determine a path to some data given some 38 identifiers) and standardize (convert data into some standard 39 format or type) data, and to query the associated registry to see 40 what data is available. 42 Subclasses must specify self.storage or else override self.map(). 44 Public methods: lookup, have, need, getKeys, map 46 Mappings are specified mainly by policy. A Mapping policy should 49 template (string): a Python string providing the filename for that 50 particular dataset type based on some data identifiers. In the 51 case of redundancy in the path (e.g., file uniquely specified by 52 the exposure number, but filter in the path), the 53 redundant/dependent identifiers can be looked up in the registry. 55 python (string): the Python type for the retrieved data (e.g. 56 lsst.afw.image.ExposureF) 58 persistable (string): the Persistable registration for the on-disk data 61 storage (string, optional): Storage type for this dataset type (e.g. 64 level (string, optional): the level in the camera hierarchy at which the 65 data is stored (Amp, Ccd or skyTile), if relevant 67 tables (string, optional): a whitespace-delimited list of tables in the 68 registry that can be NATURAL JOIN-ed to look up additional 74 Butler dataset type to be mapped. 75 policy : `daf_persistence.Policy` 77 registry : `lsst.obs.base.Registry` 78 Registry for metadata lookups. 79 rootStorage : Storage subclass instance 80 Interface to persisted repository data. 81 provided : `list` of `str` 82 Keys provided by the mapper. 85 def __init__(self, datasetType, policy, registry, rootStorage, provided=None):
88 raise RuntimeError(
"No policy provided for mapping")
102 (k, _formatMap(v, k, datasetType))
104 re.findall(
r'\%\((\w+)\).*?([diouxXeEfFgGcrs])', self.
template)
108 if provided
is not None:
115 if 'level' in policy:
117 if 'tables' in policy:
123 self.
obsTimeName = policy[
'obsTimeName']
if 'obsTimeName' in policy
else None 124 self.
recipe = policy[
'recipe']
if 'recipe' in policy
else 'default' 131 raise RuntimeError(
"Template is not defined for the {} dataset type, ".
format(self.
datasetType) +
132 "it must be set before it can be used.")
135 """Return the dict of keys and value types required for this mapping.""" 138 def map(self, mapper, dataId, write=False):
139 """Standard implementation of map function. 143 mapper: `lsst.daf.persistence.Mapper` 150 lsst.daf.persistence.ButlerLocation 151 Location of object that was mapped. 154 usedDataId = {key: actualId[key]
for key
in self.
keyDict.
keys()}
155 path = mapper._mapActualToPath(self.
template, actualId)
156 if os.path.isabs(path):
157 raise RuntimeError(
"Mapped path should not be absolute.")
164 for ext
in (
None,
'.gz',
'.fz'):
165 if ext
and path.endswith(ext):
167 extPath = path + ext
if ext
else path
172 assert path,
"Fully-qualified filename is empty." 175 if hasattr(mapper, addFunc):
176 addFunc = getattr(mapper, addFunc)
177 additionalData = addFunc(self.
datasetType, actualId)
178 assert isinstance(additionalData, PropertySet), \
179 "Bad type for returned data: %s" (
type(additionalData),)
181 additionalData =
None 184 locationList=path, dataId=actualId.copy(), mapper=mapper,
186 additionalData=additionalData)
189 """Look up properties for in a metadata registry given a partial 194 properties : `list` of `str` 202 Values of properties. 205 raise RuntimeError(
"No registry for lookup")
207 skyMapKeys = (
"tract",
"patch")
219 substitutions = OrderedDict()
221 properties =
list(properties)
225 substitutions[p] = dataId[p]
229 "Cannot look up skymap key '%s'; it must be explicitly included in the data ID" % p
232 substitutions[p] = index
240 if p
not in (
'filter',
'expTime',
'taiObs'):
243 if fastPath
and 'visit' in dataId
and "raw" in self.
tables:
244 lookupDataId = {
'visit': dataId[
'visit']}
247 if dataId
is not None:
248 for k, v
in dataId.items():
255 where.append((k,
'?'))
257 lookupDataId = {k[0]: v
for k, v
in zip(where, values)}
266 result = [tuple(v
if k
in removed
else item[v]
for k, v
in substitutions.items())
270 def have(self, properties, dataId):
271 """Returns whether the provided data identifier has all 272 the properties in the provided list. 276 properties : `list of `str` 284 True if all properties are present. 286 for prop
in properties:
287 if prop
not in dataId:
291 def need(self, properties, dataId):
292 """Ensures all properties in the provided list are present in 293 the data identifier, looking them up as needed. This is only 294 possible for the case where the data identifies a single 299 properties : `list` of `str` 302 Partial dataset identifier 307 Copy of dataset identifier with enhanced values. 309 newId = dataId.copy()
311 for prop
in properties:
312 if prop
not in newId:
313 newProps.append(prop)
314 if len(newProps) == 0:
317 lookups = self.
lookup(newProps, newId)
318 if len(lookups) != 1:
319 raise NoResults(
"No unique lookup for %s from %s: %d matches" %
320 (newProps, newId, len(lookups)),
322 for i, prop
in enumerate(newProps):
323 newId[prop] = lookups[0][i]
327 def _formatMap(ch, k, datasetType):
328 """Convert a format character into a Python type.""" 336 raise RuntimeError(
"Unexpected format specifier %s" 337 " for field %s in template for dataset %s" %
338 (ch, k, datasetType))
342 """ImageMapping is a Mapping subclass for non-camera images. 347 Butler dataset type to be mapped. 348 policy : `daf_persistence.Policy` 350 registry : `lsst.obs.base.Registry` 351 Registry for metadata lookups 353 Path of root directory 356 def __init__(self, datasetType, policy, registry, root, **kwargs):
357 Mapping.__init__(self, datasetType, policy, registry, root, **kwargs)
358 self.
columns = policy.asArray(
'columns')
if 'columns' in policy
else None 362 """ExposureMapping is a Mapping subclass for normal exposures. 367 Butler dataset type to be mapped. 368 policy : `daf_persistence.Policy` 370 registry : `lsst.obs.base.Registry` 371 Registry for metadata lookups 373 Path of root directory 376 def __init__(self, datasetType, policy, registry, root, **kwargs):
377 Mapping.__init__(self, datasetType, policy, registry, root, **kwargs)
378 self.
columns = policy.asArray(
'columns')
if 'columns' in policy
else None 381 return mapper._standardizeExposure(self, item, dataId)
385 """CalibrationMapping is a Mapping subclass for calibration-type products. 387 The difference is that data properties in the query or template 388 can be looked up using a reference Mapping in addition to this one. 390 CalibrationMapping Policies can contain the following: 392 reference (string, optional) 393 a list of tables for finding missing dataset 394 identifier components (including the observation time, if a validity range 395 is required) in the exposure registry; note that the "tables" entry refers 396 to the calibration registry 398 refCols (string, optional) 399 a list of dataset properties required from the 400 reference tables for lookups in the calibration registry 403 true if the calibration dataset has a validity range 404 specified by a column in the tables of the reference dataset in the 405 exposure registry) and two columns in the tables of this calibration 406 dataset in the calibration registry) 408 obsTimeName (string, optional) 409 the name of the column in the reference 410 dataset tables containing the observation time (default "taiObs") 412 validStartName (string, optional) 413 the name of the column in the 414 calibration dataset tables containing the start of the validity range 415 (default "validStart") 417 validEndName (string, optional) 418 the name of the column in the 419 calibration dataset tables containing the end of the validity range 425 Butler dataset type to be mapped. 426 policy : `daf_persistence.Policy` 428 registry : `lsst.obs.base.Registry` 429 Registry for metadata lookups 430 calibRegistry : `lsst.obs.base.Registry` 431 Registry for calibration metadata lookups. 433 Path of calibration root directory. 435 Path of data root directory; used for outputs only. 438 def __init__(self, datasetType, policy, registry, calibRegistry, calibRoot, dataRoot=None, **kwargs):
439 Mapping.__init__(self, datasetType, policy, calibRegistry, calibRoot, **kwargs)
440 self.
reference = policy.asArray(
"reference")
if "reference" in policy
else None 441 self.
refCols = policy.asArray(
"refCols")
if "refCols" in policy
else None 444 if "validRange" in policy
and policy[
"validRange"]:
445 self.
range = (
"?", policy[
"validStartName"], policy[
"validEndName"])
446 if "columns" in policy:
448 if "filter" in policy:
451 if "metadataKey" in policy:
454 def map(self, mapper, dataId, write=False):
455 location = Mapping.map(self, mapper, dataId, write=write)
462 """Look up properties for in a metadata registry given a partial 467 properties : `list` of `str` 468 Properties to look up. 475 Values of properties. 481 newId = dataId.copy()
485 for k, v
in dataId.items():
494 for k
in dataId.keys():
497 columns =
set(properties)
501 return Mapping.lookup(self, properties, newId)
503 lookupDataId = dict(zip(where, values))
505 if len(lookups) != 1:
506 raise RuntimeError(
"No unique lookup for %s from %s: %d matches" %
507 (columns, dataId, len(lookups)))
508 if columns ==
set(properties):
511 for i, prop
in enumerate(columns):
512 newId[prop] = lookups[0][i]
513 return Mapping.lookup(self, properties, newId)
516 """Default standardization function for calibration datasets. 518 If the item is of a type that should be standardized, the base class 519 ``standardizeExposure`` method is called, otherwise the item is returned 524 mapping : `lsst.obs.base.Mapping` 525 Mapping object to pass through. 527 Will be standardized if of type lsst.afw.image.Exposure, 528 lsst.afw.image.DecoratedImage, lsst.afw.image.Image 529 or lsst.afw.image.MaskedImage 536 `lsst.afw.image.Exposure` or item 537 The standardized object. 539 if issubclass(
doImport(self.
python), (Exposure, MaskedImage, Image, DecoratedImage)):
540 return mapper._standardizeExposure(self, item, dataId, filter=self.
setFilter)
545 """DatasetMapping is a Mapping subclass for non-Exposure datasets that can 546 be retrieved by the standard daf_persistence mechanism. 548 The differences are that the Storage type must be specified and no 549 Exposure standardization is performed. 551 The "storage" entry in the Policy is mandatory; the "tables" entry is 552 optional; no "level" entry is allowed. 557 Butler dataset type to be mapped. 558 policy : `daf_persistence.Policy` 560 registry : `lsst.obs.base.Registry` 561 Registry for metadata lookups 563 Path of root directory 566 def __init__(self, datasetType, policy, registry, root, **kwargs):
567 Mapping.__init__(self, datasetType, policy, registry, root, **kwargs)
def format(config, name=None, writeSourceLine=True, prefix="", verbose=False)
def __init__(self, datasetType, policy, registry, root, kwargs)
def __init__(self, datasetType, policy, registry, calibRegistry, calibRoot, dataRoot=None, kwargs)
daf::base::PropertySet * set
def __init__(self, datasetType, policy, registry, root, kwargs)
def standardize(self, mapper, item, dataId)
def map(self, mapper, dataId, write=False)
def have(self, properties, dataId)
def standardize(self, mapper, item, dataId)
def need(self, properties, dataId)
def lookup(self, properties, dataId)
def __init__(self, datasetType, policy, registry, root, kwargs)
def lookup(self, properties, dataId)
def __init__(self, datasetType, policy, registry, rootStorage, provided=None)
Backwards-compatibility support for depersisting the old Calib (FluxMag0/FluxMag0Err) objects...
daf::base::PropertyList * list
def map(self, mapper, dataId, write=False)