22 __all__ = (
"FitsExposureFormatter",
"FitsImageFormatter",
"FitsMaskFormatter",
23 "FitsMaskedImageFormatter")
25 from astro_metadata_translator
import fix_header
27 from lsst.daf.butler
import Formatter
31 from lsst.afw.image import ExposureFitsReader, ImageFitsReader, MaskFitsReader, MaskedImageFitsReader
37 """Interface for reading and writing Exposures to and from FITS files.
39 This Formatter supports write recipes.
41 Each ``FitsExposureFormatter`` recipe for FITS compression should
42 define ``image``, ``mask`` and ``variance`` entries, each of which may
43 contain ``compression`` and ``scaling`` entries. Defaults will be
44 provided for any missing elements under ``compression`` and
47 The allowed entries under ``compression`` are:
49 * ``algorithm`` (`str`): compression algorithm to use
50 * ``rows`` (`int`): number of rows per tile (0 = entire dimension)
51 * ``columns`` (`int`): number of columns per tile (0 = entire dimension)
52 * ``quantizeLevel`` (`float`): cfitsio quantization level
54 The allowed entries under ``scaling`` are:
56 * ``algorithm`` (`str`): scaling algorithm to use
57 * ``bitpix`` (`int`): bits per pixel (0,8,16,32,64,-32,-64)
58 * ``fuzz`` (`bool`): fuzz the values when quantising floating-point values?
59 * ``seed`` (`int`): seed for random number generator when fuzzing
60 * ``maskPlanes`` (`list` of `str`): mask planes to ignore when doing
62 * ``quantizeLevel`` (`float`): divisor of the standard deviation for
64 * ``quantizePad`` (`float`): number of stdev to allow on the low side (for
65 ``STDEV_POSITIVE``/``NEGATIVE``)
66 * ``bscale`` (`float`): manually specified ``BSCALE``
67 (for ``MANUAL`` scaling)
68 * ``bzero`` (`float`): manually specified ``BSCALE``
69 (for ``MANUAL`` scaling)
71 A very simple example YAML recipe:
75 lsst.obs.base.fitsExposureFormatter.FitsExposureFormatter:
79 algorithm: GZIP_SHUFFLE
84 supportedExtensions = frozenset({
".fits",
".fits.gz",
".fits.fz",
".fz",
".fit"})
87 supportedWriteParameters = frozenset({
"recipe"})
88 _readerClass = ExposureFitsReader
90 unsupportedParameters = {}
91 """Support all parameters."""
95 """The metadata read from this file. It will be stripped as
96 components are extracted from it
97 (`lsst.daf.base.PropertyList`).
104 """Read all header metadata directly into a PropertyList.
108 metadata : `~lsst.daf.base.PropertyList`
116 """Remove metadata entries that are parsed into components.
118 This is only called when just the metadata is requested; stripping
119 entries there forces code that wants other components to ask for those
120 components directly rather than trying to extract them from the
121 metadata manually, which is fragile. This behavior is an intentional
126 metadata : `~lsst.daf.base.PropertyList`
127 Header metadata, to be modified in-place.
145 """Read a component held by the Exposure.
149 component : `str`, optional
150 Component to read from the file.
151 parameters : `dict`, optional
152 If specified, a dictionary of slicing parameters that
153 overrides those in ``fileDescriptor``.
157 obj : component-dependent
158 In-memory component object.
163 Raised if the requested component cannot be handled.
167 componentMap = {
'wcs': (
'readWcs',
False),
168 'coaddInputs': (
'readCoaddInputs',
False),
169 'psf': (
'readPsf',
False),
170 'image': (
'readImage',
True),
171 'mask': (
'readMask',
True),
172 'variance': (
'readVariance',
True),
173 'photoCalib': (
'readPhotoCalib',
False),
174 'bbox': (
'readBBox',
True),
175 'dimensions': (
'readBBox',
True),
176 'xy0': (
'readXY0',
True),
177 'filter': (
'readFilter',
False),
178 'validPolygon': (
'readValidPolygon',
False),
179 'apCorrMap': (
'readApCorrMap',
False),
180 'visitInfo': (
'readVisitInfo',
False),
181 'transmissionCurve': (
'readTransmissionCurve',
False),
182 'detector': (
'readDetector',
False),
183 'extras': (
'readExtraComponents',
False),
184 'exposureInfo': (
'readExposureInfo',
False),
186 method, hasParams = componentMap.get(component, (
None,
False))
191 reader = self.
_readerClass(self.fileDescriptor.location.path)
192 caller = getattr(reader, method,
None)
195 if parameters
is None:
196 parameters = self.fileDescriptor.parameters
197 if parameters
is None:
199 self.fileDescriptor.storageClass.validateParameters(parameters)
201 if hasParams
and parameters:
202 thisComponent = caller(**parameters)
204 thisComponent = caller()
205 if component ==
"dimensions" and thisComponent
is not None:
206 thisComponent = thisComponent.getDimensions()
209 raise KeyError(f
"Unknown component requested: {component}")
212 """Read the full Exposure object.
216 parameters : `dict`, optional
217 If specified a dictionary of slicing parameters that overrides
218 those in ``fileDescriptor``.
222 exposure : `~lsst.afw.image.Exposure`
223 Complete in-memory exposure.
225 fileDescriptor = self.fileDescriptor
226 if parameters
is None:
227 parameters = fileDescriptor.parameters
228 if parameters
is None:
230 fileDescriptor.storageClass.validateParameters(parameters)
231 reader = self.
_readerClass(fileDescriptor.location.path)
232 return reader.read(**parameters)
234 def read(self, component=None):
235 """Read data from a file.
239 component : `str`, optional
240 Component to read from the file. Only used if the `StorageClass`
241 for reading differed from the `StorageClass` used to write the
246 inMemoryDataset : `object`
247 The requested data as a Python object. The type of object
248 is controlled by the specific formatter.
253 Component requested but this file does not seem to be a concrete
256 Raised when parameters passed with fileDescriptor are not
259 fileDescriptor = self.fileDescriptor
260 if fileDescriptor.readStorageClass != fileDescriptor.storageClass:
261 if component ==
"metadata":
264 elif component
is not None:
267 raise ValueError(
"Storage class inconsistency ({} vs {}) but no"
268 " component requested".
format(fileDescriptor.readStorageClass.name,
269 fileDescriptor.storageClass.name))
273 """Write a Python object to a file.
277 inMemoryDataset : `object`
278 The Python object to store.
283 The `URI` where the primary file is stored.
286 self.fileDescriptor.location.updateExtension(self.
extension)
287 outputPath = self.fileDescriptor.location.path
290 recipeName = self.writeParameters.get(
"recipe")
297 inMemoryDataset.writeFitsWithOptions(outputPath, options=ps)
299 inMemoryDataset.writeFits(outputPath)
300 return self.fileDescriptor.location.pathInStore
303 """Retrieve the relevant compression settings for this recipe.
308 Label associated with the collection of compression parameters
314 The selected settings.
319 if "default" not in self.writeRecipes:
321 recipeName =
"default"
323 if recipeName
not in self.writeRecipes:
324 raise RuntimeError(f
"Unrecognized recipe option given for compression: {recipeName}")
326 recipe = self.writeRecipes[recipeName]
329 seed = hash(tuple(self.dataId.
items())) % 2**31
330 for plane
in (
"image",
"mask",
"variance"):
331 if plane
in recipe
and "scaling" in recipe[plane]:
332 scaling = recipe[plane][
"scaling"]
333 if "seed" in scaling
and scaling[
"seed"] == 0:
334 scaling[
"seed"] = seed
340 """Validate supplied recipes for this formatter.
342 The recipes are supplemented with default values where appropriate.
344 TODO: replace this custom validation code with Cerberus (DM-11846)
349 Recipes to validate. Can be empty dict or `None`.
354 Validated recipes. Returns what was given if there are no
360 Raised if validation fails.
364 compressionSchema = {
368 "quantizeLevel": 0.0,
373 "maskPlanes": [
"NO_DATA"],
375 "quantizeLevel": 4.0,
386 def checkUnrecognized(entry, allowed, description):
387 """Check to see if the entry contains unrecognised keywords"""
388 unrecognized =
set(entry) -
set(allowed)
391 f
"Unrecognized entries when parsing image compression recipe {description}: "
396 checkUnrecognized(recipes[name], [
"image",
"mask",
"variance"], name)
398 for plane
in (
"image",
"mask",
"variance"):
399 checkUnrecognized(recipes[name][plane], [
"compression",
"scaling"],
403 validated[name][plane] = np
404 for settings, schema
in ((
"compression", compressionSchema),
405 (
"scaling", scalingSchema)):
407 if settings
not in recipes[name][plane]:
409 np[settings][key] = schema[key]
411 entry = recipes[name][plane][settings]
412 checkUnrecognized(entry, schema.keys(), f
"{name}->{plane}->{settings}")
414 value =
type(schema[key])(entry[key])
if key
in entry
else schema[key]
415 np[settings][key] = value
420 """Specialisation for `~lsst.afw.image.Image` reading.
423 _readerClass = ImageFitsReader
427 """Specialisation for `~lsst.afw.image.Mask` reading.
430 _readerClass = MaskFitsReader
434 """Specialisation for `~lsst.afw.image.MaskedImage` reading.
437 _readerClass = MaskedImageFitsReader