|
def | __init__ (self, inputPolicy=None, **kwargs) |
|
def | bypass_ccdExposureId (self, datasetType, pythonType, location, dataId) |
|
def | bypass_ccdExposureId_bits (self, datasetType, pythonType, location, dataId) |
|
def | validate (self, dataId) |
|
def | backup (self, datasetType, dataId) |
|
def | keys (self) |
|
def | getKeys (self, datasetType, level) |
|
def | getDefaultLevel (self) |
|
def | getDefaultSubLevel (self, level) |
|
def | getCameraName (cls) |
|
def | getPackageName (cls) |
|
def | getPackageDir (cls) |
|
def | map_camera (self, dataId, write=False) |
|
def | bypass_camera (self, datasetType, pythonType, butlerLocation, dataId) |
|
def | map_expIdInfo (self, dataId, write=False) |
|
def | bypass_expIdInfo (self, datasetType, pythonType, location, dataId) |
|
def | std_bfKernel (self, item, dataId) |
|
def | std_raw (self, item, dataId) |
|
def | map_skypolicy (self, dataId) |
|
def | std_skypolicy (self, item, dataId) |
|
def | getRegistry (self) |
|
def | getImageCompressionSettings (self, datasetType, dataId) |
|
def | __new__ (cls, *args, **kwargs) |
|
def | __getstate__ (self) |
|
def | __setstate__ (self, state) |
|
def | queryMetadata (self, datasetType, format, dataId) |
|
def | getDatasetTypes (self) |
|
def | map (self, datasetType, dataId, write=False) |
|
def | canStandardize (self, datasetType) |
|
def | standardize (self, datasetType, item, dataId) |
|
Camera mapper for the Test camera.
Definition at line 34 of file testMapper.py.
def lsst.daf.persistence.mapper.Mapper.__new__ |
( |
|
cls, |
|
|
* |
args, |
|
|
** |
kwargs |
|
) |
| |
|
inherited |
Create a new Mapper, saving arguments for pickling.
This is in __new__ instead of __init__ to save the user
from having to save the arguments themselves (either explicitly,
or by calling the super's __init__ with all their
*args,**kwargs. The resulting pickling system (of __new__,
__getstate__ and __setstate__ is similar to how __reduce__
is usually used, except that we save the user from any
responsibility (except when overriding __new__, but that
is not common).
Definition at line 82 of file mapper.py.
82 def __new__(cls, *args, **kwargs):
83 """Create a new Mapper, saving arguments for pickling.
85 This is in __new__ instead of __init__ to save the user
86 from having to save the arguments themselves (either explicitly,
87 or by calling the super's __init__ with all their
88 *args,**kwargs. The resulting pickling system (of __new__,
89 __getstate__ and __setstate__ is similar to how __reduce__
90 is usually used, except that we save the user from any
91 responsibility (except when overriding __new__, but that
94 self = super().__new__(cls)
95 self._arguments = (args, kwargs)
def lsst.obs.base.cameraMapper.CameraMapper.backup |
( |
|
self, |
|
|
|
datasetType, |
|
|
|
dataId |
|
) |
| |
|
inherited |
Rename any existing object with the given type and dataId.
The CameraMapper implementation saves objects in a sequence of e.g.:
- foo.fits
- foo.fits~1
- foo.fits~2
All of the backups will be placed in the output repo, however, and will
not be removed if they are found elsewhere in the _parent chain. This
means that the same file will be stored twice if the previous version
was found in an input repo.
Reimplemented from lsst.daf.persistence.mapper.Mapper.
Definition at line 591 of file cameraMapper.py.
591 def backup(self, datasetType, dataId):
592 """Rename any existing object with the given type and dataId.
594 The CameraMapper implementation saves objects in a sequence of e.g.:
600 All of the backups will be placed in the output repo, however, and will
601 not be removed if they are found elsewhere in the _parent chain. This
602 means that the same file will be stored twice if the previous version
603 was found in an input repo.
612 def firstElement(list):
613 """Get the first element in the list, or None if that can't be
616 return list[0]
if list
is not None and len(list)
else None
619 newLocation = self.map(datasetType, dataId, write=
True)
620 newPath = newLocation.getLocations()[0]
621 path = dafPersist.PosixStorage.search(self.root, newPath, searchParents=
True)
622 path = firstElement(path)
624 while path
is not None:
626 oldPaths.append((n, path))
627 path = dafPersist.PosixStorage.search(self.root,
"%s~%d" % (newPath, n), searchParents=
True)
628 path = firstElement(path)
629 for n, oldPath
in reversed(oldPaths):
630 self.rootStorage.copyFile(oldPath,
"%s~%d" % (newPath, n))
def lsst.daf.persistence.mapper.Mapper.map |
( |
|
self, |
|
|
|
datasetType, |
|
|
|
dataId, |
|
|
|
write = False |
|
) |
| |
|
inherited |
Map a data id using the mapping method for its dataset type.
Parameters
----------
datasetType : string
The datasetType to map
dataId : DataId instance
The dataId to use when mapping
write : bool, optional
Indicates if the map is being performed for a read operation
(False) or a write operation (True)
Returns
-------
ButlerLocation or a list of ButlerLocation
The location(s) found for the map operation. If write is True, a
list is returned. If write is False a single ButlerLocation is
returned.
Raises
------
NoResults
If no locaiton was found for this map operation, the derived mapper
class may raise a lsst.daf.persistence.NoResults exception. Butler
catches this and will look in the next Repository if there is one.
Definition at line 135 of file mapper.py.
135 def map(self, datasetType, dataId, write=False):
136 """Map a data id using the mapping method for its dataset type.
141 The datasetType to map
142 dataId : DataId instance
143 The dataId to use when mapping
144 write : bool, optional
145 Indicates if the map is being performed for a read operation
146 (False) or a write operation (True)
150 ButlerLocation or a list of ButlerLocation
151 The location(s) found for the map operation. If write is True, a
152 list is returned. If write is False a single ButlerLocation is
158 If no locaiton was found for this map operation, the derived mapper
159 class may raise a lsst.daf.persistence.NoResults exception. Butler
160 catches this and will look in the next Repository if there is one.
162 func = getattr(self,
'map_' + datasetType)
163 return func(self.validate(dataId), write)
def lsst.obs.test.testMapper.TestMapper.validate |
( |
|
self, |
|
|
|
dataId |
|
) |
| |
Validate a dataId's contents.
If the dataId is valid, return it. If an invalid component can be
transformed into a valid one, copy the dataId, fix the component, and
return the copy. Otherwise, raise an exception.
Reimplemented from lsst.daf.persistence.mapper.Mapper.
Definition at line 107 of file testMapper.py.
108 visit = dataId.get(
"visit")
109 if visit
is not None and not isinstance(visit, int):
110 dataId[
"visit"] = int(visit)