22 """Shim classes that provide (limited) Gen2 Butler interfaces to Gen3 25 All of the classes here still operate on Gen3 data IDs - the shim layer 26 essentially assumes that Gen2 Tasks treat the data ID as an opaque blob, which 27 is usually (but not always true). When it isn't, the best approach now is 28 probably to use the GENERATION class attribute on the Butler classes to 29 special-case code for each generation. 32 __all__ = (
"ShimButler",
"ShimButlerSubset",
"ShimDataRef")
35 from lsst.daf.butler
import StorageClassFactory
39 """A shim for a Gen2 `~lsst.daf.persistence.Butler` with a Gen3 40 `~lsst.daf.butler.Butler` backend. 44 butler3 : `lsst.daf.butler.Butler` 45 Generation 3 Butler instance. 49 """This is a Generation 2 shim for a Generation3 Butler. 55 def _makeDataId(self, dataId=None, **rest):
56 """Construct a full data ID by merging the given arguments with the 60 if dataId
is not None:
61 fullDataId.update(dataId)
62 fullDataId.update(rest)
65 def _translateDatasetType(self, datasetType):
66 if "_" in datasetType:
67 if datasetType.endswith(
"_md"):
68 return f
"{datasetType[:-3]}.metadata" 69 for component
in StorageClassFactory().getStorageClass(
"Exposure").components:
70 suffix = f
"_{component}" 71 if datasetType.endswith(suffix):
72 return "{}.{}".
format(datasetType[:-len(suffix)], component)
76 """Check whether a datataset exists in the repository. 81 Name of the Gen2 dataset type. 82 dataId : `dict` or `~lsst.daf.butler.DataId`, optional 83 A Generation 3 data ID that identifies the dataset. 85 This option is provided for compatibility with 86 `lsst.daf.persistence.Butler`, but must be `False`. 88 Additional key-value pairs to augment the given data ID. 93 `True` if the dataset is present in the repository, `False` 97 raise NotImplementedError(
"ShimButler cannot implement datasetExists with 'write=True'")
107 def get(self, datasetType, dataId=None, immediate=True, **rest):
108 """Retrieve a dataset. 113 Name of the Gen2 dataset type. 114 dataId : `dict` or `~lsst.daf.butler.DataId`, optional 115 A Generation 3 data ID that identifies the dataset. 117 This option is provided for compatibility with 118 `lsst.daf.persistence.Butler`, but is ignored. 120 Additional key-value pairs to augment the given data ID. 129 `~lsst.daf.persistence.NoResults` 130 Raised if the dataset does not exist. 134 if datasetType.endswith(
"_sub"):
136 datasetType = datasetType[:-len(
"_sub")]
137 parameters = dict(bbox=fullDataId.pop(
"bbox"))
138 origin = fullDataId.pop(
"imageOrigin", lsst.afw.image.PARENT)
139 parameters[
"origin"] = origin
143 return self.
_butler3.
get(datasetType, fullDataId, parameters=parameters)
144 except (FileNotFoundError, LookupError)
as err:
145 raise NoResults(str(err), datasetType, fullDataId)
147 def put(self, obj, datasetType, dataId=None, doBackup=False, **rest):
155 Name of the Gen2 dataset type. 156 dataId : `dict` or `~lsst.daf.butler.DataId`, optional 157 A Generation 3 data ID that identifies the dataset. 159 This option is provided for compatibility with 160 `lsst.daf.persistence.Butler`, but must be `False`. 162 Additional key-value pairs to augment the given data ID. 165 raise NotImplementedError(
"ShimButler cannot implement put with 'doBackup=True'")
169 def dataRef(self, datasetType, level=None, dataId=None, **rest):
170 """Return a DataRef associated with the given dataset type and data ID. 175 Name of the dataset type. 176 dataId : `dict` or `~lsst.daf.butler.DataId`, optional 177 A Generation 3 data ID that identifies the dataset. 179 This option is provided for compatibility with 180 `lsst.daf.persistence.Butler`, but must be `None`. 182 Additional key-value pairs to augment the given data ID. 184 if level
is not None:
185 raise NotImplementedError(
"ShimButler cannot implement dataRef with 'level != None'")
187 if dataId
is not None:
188 fullDataId.update(dataId)
189 fullDataId.update(rest)
194 """A shim for a Gen2 `~lsst.daf.persistence.ButlerSubset` with a Gen3 195 `~lsst.daf.butler.Butler` backend. 199 butler : `ShimButler` 200 Butler shim instance. 202 Name of the dataset type. 203 dataIds : iterable of `dict` or `~lsst.daf.butler.DataId` 204 Generation 3 data IDs that define the data in this subset. 208 """This is a Generation 2 shim for a Generation3 Butler. 225 """A shim for a Gen2 `~lsst.daf.persistence.ButlerDataRef` with a Gen3 226 `~lsst.daf.butler.Butler` backend. 230 butlerSubset : `ShimButlerSubset` 231 ButlerSubset shim instance. Sets the butler and default dataset type 233 dataId : `dict` or `~lsst.daf.butler.DataId` 234 Generation 3 data ID associated with this reference. 238 """This is a Generation 2 shim for a Generation3 Butler. 245 def get(self, datasetType=None, **rest):
246 """Retrieve a dataset. 250 datasetType : `str`, optional. 251 Name of the dataset type. Defaults to the dataset type used to 252 construct the `ShimButlerSubset`. 254 Additional arguments forwarded to `ShimButler.get`. 263 `~lsst.daf.persistence.NoResults` 264 Raised if the dataset does not exist. 266 if datasetType
is None:
270 def put(self, obj, datasetType=None, doBackup=False, **rest):
277 datasetType : `str`, optional 278 Name of the dataset type. Defaults to the dataset type used to 279 construct the `ShimButlerSubset`. 281 This option is provided for compatibility with 282 `lsst.daf.persistence.ButlerDataRef`, but must be `False`. 284 Additional arguments forwarded to `ShimButler.put`. 286 if datasetType
is None:
288 self.
butlerSubset.butler.put(obj, datasetType, self.
dataId, doBackup=doBackup, **rest)
291 """Check whether a datataset exists in the repository. 295 datasetType : `str`, optional 296 Name of the dataset type. Defaults to the dataset type used to 297 construct the `ShimButlerSubset`. 299 This option is provided for compatibility with 300 `lsst.daf.persistence.ButlerDataRef`, but must be `False`. 302 Additional arguments forwarded to `ShimButler.datasetExists`. 307 `True` if the dataset is present in the repository, `False` 310 if datasetType
is None:
313 datasetType, self.
dataId, write=write, **rest)
316 """Return the (shim) Butler used by this DataRef. def format(config, name=None, writeSourceLine=True, prefix="", verbose=False)
def __init__(self, butler, datasetType, dataIds)
def __init__(self, butlerSubset, dataId)
def datasetExists(self, datasetType, dataId=None, write=False, rest)
def put(self, obj, datasetType=None, doBackup=False, rest)
def _makeDataId(self, dataId=None, rest)
def __init__(self, butler3)
def get(self, datasetType=None, rest)
def get(self, datasetType, dataId=None, immediate=True, rest)
def dataRef(self, datasetType, level=None, dataId=None, rest)
def _translateDatasetType(self, datasetType)
Backwards-compatibility support for depersisting the old Calib (FluxMag0/FluxMag0Err) objects...
def datasetExists(self, datasetType=None, write=False, rest)
def put(self, obj, datasetType, dataId=None, doBackup=False, rest)