22 __all__ = (
"FitsExposureFormatter",
"FitsImageFormatter",
"FitsMaskFormatter",
23 "FitsMaskedImageFormatter")
25 from astro_metadata_translator
import fix_header
27 from lsst.daf.butler
import Formatter
31 from lsst.afw.image import ExposureFitsReader, ImageFitsReader, MaskFitsReader, MaskedImageFitsReader
38 """Interface for reading and writing Exposures to and from FITS files.
40 This Formatter supports write recipes.
42 Each ``FitsExposureFormatter`` recipe for FITS compression should
43 define ``image``, ``mask`` and ``variance`` entries, each of which may
44 contain ``compression`` and ``scaling`` entries. Defaults will be
45 provided for any missing elements under ``compression`` and
48 The allowed entries under ``compression`` are:
50 * ``algorithm`` (`str`): compression algorithm to use
51 * ``rows`` (`int`): number of rows per tile (0 = entire dimension)
52 * ``columns`` (`int`): number of columns per tile (0 = entire dimension)
53 * ``quantizeLevel`` (`float`): cfitsio quantization level
55 The allowed entries under ``scaling`` are:
57 * ``algorithm`` (`str`): scaling algorithm to use
58 * ``bitpix`` (`int`): bits per pixel (0,8,16,32,64,-32,-64)
59 * ``fuzz`` (`bool`): fuzz the values when quantising floating-point values?
60 * ``seed`` (`int`): seed for random number generator when fuzzing
61 * ``maskPlanes`` (`list` of `str`): mask planes to ignore when doing
63 * ``quantizeLevel`` (`float`): divisor of the standard deviation for
65 * ``quantizePad`` (`float`): number of stdev to allow on the low side (for
66 ``STDEV_POSITIVE``/``NEGATIVE``)
67 * ``bscale`` (`float`): manually specified ``BSCALE``
68 (for ``MANUAL`` scaling)
69 * ``bzero`` (`float`): manually specified ``BSCALE``
70 (for ``MANUAL`` scaling)
72 A very simple example YAML recipe:
76 lsst.obs.base.fitsExposureFormatter.FitsExposureFormatter:
80 algorithm: GZIP_SHUFFLE
85 supportedExtensions = frozenset({
".fits",
".fits.gz",
".fits.fz",
".fz",
".fit"})
88 supportedWriteParameters = frozenset({
"recipe"})
89 _readerClass = ExposureFitsReader
91 unsupportedParameters = {}
92 """Support all parameters."""
96 """The metadata read from this file. It will be stripped as
97 components are extracted from it
98 (`lsst.daf.base.PropertyList`).
105 """Read all header metadata directly into a PropertyList.
109 metadata : `~lsst.daf.base.PropertyList`
117 """Remove metadata entries that are parsed into components.
119 This is only called when just the metadata is requested; stripping
120 entries there forces code that wants other components to ask for those
121 components directly rather than trying to extract them from the
122 metadata manually, which is fragile. This behavior is an intentional
127 metadata : `~lsst.daf.base.PropertyList`
128 Header metadata, to be modified in-place.
146 """Read a component held by the Exposure.
150 component : `str`, optional
151 Component to read from the file.
152 parameters : `dict`, optional
153 If specified, a dictionary of slicing parameters that
154 overrides those in ``fileDescriptor``.
158 obj : component-dependent
159 In-memory component object.
164 Raised if the requested component cannot be handled.
168 componentMap = {
'wcs': (
'readWcs',
False,
None),
169 'coaddInputs': (
'readCoaddInputs',
False,
None),
170 'psf': (
'readPsf',
False,
None),
171 'image': (
'readImage',
True,
None),
172 'mask': (
'readMask',
True,
None),
173 'variance': (
'readVariance',
True,
None),
174 'photoCalib': (
'readPhotoCalib',
False,
None),
175 'bbox': (
'readBBox',
True,
None),
176 'dimensions': (
'readBBox',
True,
None),
177 'xy0': (
'readXY0',
True,
None),
179 'filter': (
'readFilter',
False,
None),
181 'filterLabel': (
'readFilterLabel',
False,
None),
182 'validPolygon': (
'readValidPolygon',
False,
None),
183 'apCorrMap': (
'readApCorrMap',
False,
None),
184 'visitInfo': (
'readVisitInfo',
False,
None),
185 'transmissionCurve': (
'readTransmissionCurve',
False,
None),
186 'detector': (
'readDetector',
False,
None),
187 'exposureInfo': (
'readExposureInfo',
False,
None),
188 'summaryStats': (
'readComponent',
False, ExposureInfo.KEY_SUMMARY_STATS),
190 method, hasParams, componentName = componentMap.get(component, (
None,
False,
None))
195 reader = self.
_readerClass_readerClass(self.fileDescriptor.location.path)
196 caller = getattr(reader, method,
None)
199 if parameters
is None:
200 parameters = self.fileDescriptor.parameters
201 if parameters
is None:
203 self.fileDescriptor.storageClass.validateParameters(parameters)
205 if componentName
is None:
206 if hasParams
and parameters:
207 thisComponent = caller(**parameters)
209 thisComponent = caller()
211 thisComponent = caller(componentName)
213 if component ==
"dimensions" and thisComponent
is not None:
214 thisComponent = thisComponent.getDimensions()
217 raise KeyError(f
"Unknown component requested: {component}")
220 """Read the full Exposure object.
224 parameters : `dict`, optional
225 If specified a dictionary of slicing parameters that overrides
226 those in ``fileDescriptor``.
230 exposure : `~lsst.afw.image.Exposure`
231 Complete in-memory exposure.
233 fileDescriptor = self.fileDescriptor
234 if parameters
is None:
235 parameters = fileDescriptor.parameters
236 if parameters
is None:
238 fileDescriptor.storageClass.validateParameters(parameters)
239 reader = self.
_readerClass_readerClass(fileDescriptor.location.path)
240 return reader.read(**parameters)
242 def read(self, component=None):
243 """Read data from a file.
247 component : `str`, optional
248 Component to read from the file. Only used if the `StorageClass`
249 for reading differed from the `StorageClass` used to write the
254 inMemoryDataset : `object`
255 The requested data as a Python object. The type of object
256 is controlled by the specific formatter.
261 Component requested but this file does not seem to be a concrete
264 Raised when parameters passed with fileDescriptor are not
267 fileDescriptor = self.fileDescriptor
268 if fileDescriptor.readStorageClass != fileDescriptor.storageClass:
269 if component ==
"metadata":
272 elif component
is not None:
275 raise ValueError(
"Storage class inconsistency ({} vs {}) but no"
276 " component requested".
format(fileDescriptor.readStorageClass.name,
277 fileDescriptor.storageClass.name))
281 """Write a Python object to a file.
285 inMemoryDataset : `object`
286 The Python object to store.
289 self.fileDescriptor.location.updateExtension(self.
extensionextension)
290 outputPath = self.fileDescriptor.location.path
293 recipeName = self.writeParameters.get(
"recipe")
300 inMemoryDataset.writeFitsWithOptions(outputPath, options=ps)
302 inMemoryDataset.writeFits(outputPath)
305 """Retrieve the relevant compression settings for this recipe.
310 Label associated with the collection of compression parameters
316 The selected settings.
321 if "default" not in self.writeRecipes:
323 recipeName =
"default"
325 if recipeName
not in self.writeRecipes:
326 raise RuntimeError(f
"Unrecognized recipe option given for compression: {recipeName}")
328 recipe = self.writeRecipes[recipeName]
331 seed = hash(tuple(self.dataId.
items())) % 2**31
332 for plane
in (
"image",
"mask",
"variance"):
333 if plane
in recipe
and "scaling" in recipe[plane]:
334 scaling = recipe[plane][
"scaling"]
335 if "seed" in scaling
and scaling[
"seed"] == 0:
336 scaling[
"seed"] = seed
342 """Validate supplied recipes for this formatter.
344 The recipes are supplemented with default values where appropriate.
346 TODO: replace this custom validation code with Cerberus (DM-11846)
351 Recipes to validate. Can be empty dict or `None`.
356 Validated recipes. Returns what was given if there are no
362 Raised if validation fails.
366 compressionSchema = {
370 "quantizeLevel": 0.0,
375 "maskPlanes": [
"NO_DATA"],
377 "quantizeLevel": 4.0,
388 def checkUnrecognized(entry, allowed, description):
389 """Check to see if the entry contains unrecognised keywords"""
390 unrecognized =
set(entry) -
set(allowed)
393 f
"Unrecognized entries when parsing image compression recipe {description}: "
398 checkUnrecognized(recipes[name], [
"image",
"mask",
"variance"], name)
400 for plane
in (
"image",
"mask",
"variance"):
401 checkUnrecognized(recipes[name][plane], [
"compression",
"scaling"],
405 validated[name][plane] = np
406 for settings, schema
in ((
"compression", compressionSchema),
407 (
"scaling", scalingSchema)):
409 if settings
not in recipes[name][plane]:
411 np[settings][key] = schema[key]
413 entry = recipes[name][plane][settings]
414 checkUnrecognized(entry, schema.keys(), f
"{name}->{plane}->{settings}")
416 value =
type(schema[key])(entry[key])
if key
in entry
else schema[key]
417 np[settings][key] = value
422 """Specialisation for `~lsst.afw.image.Image` reading.
425 _readerClass = ImageFitsReader
429 """Specialisation for `~lsst.afw.image.Mask` reading.
432 _readerClass = MaskFitsReader
436 """Specialisation for `~lsst.afw.image.MaskedImage` reading.
439 _readerClass = MaskedImageFitsReader
std::vector< SchemaItem< Flag > > * items
Class for storing generic metadata.
std::shared_ptr< SkyWcs > makeSkyWcs(daf::base::PropertySet &metadata, bool strip=false)
Construct a SkyWcs from FITS keywords.
Backwards-compatibility support for depersisting the old Calib (FluxMag0/FluxMag0Err) objects.
lsst::geom::Box2I bboxFromMetadata(daf::base::PropertySet &metadata)
Determine the image bounding box from its metadata (FITS header)
def format(config, name=None, writeSourceLine=True, prefix="", verbose=False)
daf::base::PropertySet * set