26 import lsst.pipe.base.connectionTypes
as cT
32 from astro_metadata_translator
import merge_headers, ObservationGroup
33 from astro_metadata_translator.serialize
import dates_to_fits
38 """Parameters controlling the measurement of background statistics.
40 stat = pexConfig.Field(
43 doc=
"Statistic name to use to estimate background (from lsst.afw.math)",
45 clip = pexConfig.Field(
48 doc=
"Clipping threshold for background",
50 nIter = pexConfig.Field(
53 doc=
"Clipping iterations for background",
55 mask = pexConfig.ListField(
57 default=[
"DETECTED",
"BAD",
"NO_DATA"],
58 doc=
"Mask planes to reject",
63 """Measure statistics on the background
65 This can be useful for scaling the background, e.g., for flats and fringe frames.
67 ConfigClass = CalibStatsConfig
69 def run(self, exposureOrImage):
70 """Measure a particular statistic on an image (of some sort).
74 exposureOrImage : `lsst.afw.image.Exposure`, `lsst.afw.image.MaskedImage`, or `lsst.afw.image.Image`
75 Exposure or image to calculate statistics on.
80 Resulting statistic value.
83 afwImage.Mask.getPlaneBitMask(self.config.mask))
85 image = exposureOrImage.getMaskedImage()
88 image = exposureOrImage.getImage()
90 image = exposureOrImage
96 dimensions=(
"instrument",
"detector")):
99 doc=
"Input pre-processed exposures to combine.",
100 storageClass=
"Exposure",
101 dimensions=(
"instrument",
"detector",
"exposure"),
104 inputScales = cT.Input(
106 doc=
"Input scale factors to use.",
107 storageClass=
"StructuredDataDict",
108 dimensions=(
"instrument", ),
112 outputData = cT.Output(
114 doc=
"Output combined proposed calibration to be validated and certified..",
115 storageClass=
"ExposureF",
116 dimensions=(
"instrument",
"detector"),
123 if config
and config.exposureScaling !=
'InputList':
124 self.inputs.discard(
"inputScales")
129 pipelineConnections=CalibCombineConnections):
130 """Configuration for combining calib exposures.
132 calibrationType = pexConfig.Field(
134 default=
"calibration",
135 doc=
"Name of calibration to be generated.",
138 exposureScaling = pexConfig.ChoiceField(
141 "Unity":
"Do not scale inputs. Scale factor is 1.0.",
142 "ExposureTime":
"Scale inputs by their exposure time.",
143 "DarkTime":
"Scale inputs by their dark time.",
144 "MeanStats":
"Scale inputs based on their mean values.",
145 "InputList":
"Scale inputs based on a list of values.",
148 doc=
"Scaling to be applied to each input exposure.",
150 scalingLevel = pexConfig.ChoiceField(
153 "DETECTOR":
"Scale by detector.",
154 "AMP":
"Scale by amplifier.",
157 doc=
"Region to scale.",
159 maxVisitsToCalcErrorFromInputVariance = pexConfig.Field(
162 doc=
"Maximum number of visits to estimate variance from input variance, not per-pixel spread",
165 doVignette = pexConfig.Field(
168 doc=
"Copy vignette polygon to output and censor vignetted pixels?"
171 mask = pexConfig.ListField(
173 default=[
"SAT",
"DETECTED",
"INTRP"],
174 doc=
"Mask planes to respect",
176 combine = pexConfig.Field(
179 doc=
"Statistic name to use for combination (from lsst.afw.math)",
181 clip = pexConfig.Field(
184 doc=
"Clipping threshold for combination",
186 nIter = pexConfig.Field(
189 doc=
"Clipping iterations for combination",
191 stats = pexConfig.ConfigurableField(
192 target=CalibStatsTask,
193 doc=
"Background statistics configuration",
198 pipeBase.CmdLineTask):
199 """Task to combine calib exposures."""
200 ConfigClass = CalibCombineConfig
201 _DefaultName =
'cpCombine'
205 self.makeSubtask(
"stats")
208 inputs = butlerQC.get(inputRefs)
210 dimensions = [exp.dataId.byName()
for exp
in inputRefs.inputExps]
211 inputs[
'inputDims'] = dimensions
213 outputs = self.
runrun(**inputs)
214 butlerQC.put(outputs, outputRefs)
216 def run(self, inputExps, inputScales=None, inputDims=None):
217 """Combine calib exposures for a single detector.
221 inputExps : `list` [`lsst.afw.image.Exposure`]
222 Input list of exposures to combine.
223 inputScales : `dict` [`dict` [`dict` [`float`]]], optional
224 Dictionary of scales, indexed by detector (`int`),
225 amplifier (`int`), and exposure (`int`). Used for
227 inputDims : `list` [`dict`]
228 List of dictionaries of input data dimensions/values.
229 Each list entry should contain:
232 exposure id value (`int`)
234 detector id value (`int`)
238 combinedExp : `lsst.afw.image.Exposure`
239 Final combined exposure generated from the inputs.
244 Raised if no input data is found. Also raised if
245 config.exposureScaling == InputList, and a necessary scale
250 afwImage.Mask.getPlaneBitMask(self.config.mask))
251 numExps = len(inputExps)
253 raise RuntimeError(
"No valid input data")
254 if numExps < self.config.maxVisitsToCalcErrorFromInputVariance:
255 stats.setCalcErrorFromInputVariance(
True)
259 detectorList = [exp.getDetector()
for exp
in inputExps]
260 if None in detectorList:
261 self.log.
warn(
"Not all input detectors defined.")
262 detectorIds = [det.getId()
if det
is not None else None for det
in detectorList]
263 detectorSerials = [det.getId()
if det
is not None else None for det
in detectorList]
264 numDetectorIds = len(
set(detectorIds))
265 numDetectorSerials = len(
set(detectorSerials))
266 numDetectors = len(
set([numDetectorIds, numDetectorSerials]))
267 if numDetectors != 1:
268 raise RuntimeError(
"Input data contains multiple detectors.")
270 inputDetector = inputExps[0].getDetector()
273 combined = afwImage.MaskedImageF(width, height)
278 if inputDims
is None:
279 inputDims = [dict()
for i
in inputExps]
281 for index, (exp, dims)
in enumerate(zip(inputExps, inputDims)):
284 self.log.
warn(
"Input %d is None (%s); unable to scale exp.", index, dims)
287 if self.config.exposureScaling ==
"ExposureTime":
288 scale = exp.getInfo().getVisitInfo().getExposureTime()
289 elif self.config.exposureScaling ==
"DarkTime":
290 scale = exp.getInfo().getVisitInfo().getDarkTime()
291 elif self.config.exposureScaling ==
"MeanStats":
292 scale = self.stats.
run(exp)
293 elif self.config.exposureScaling ==
"InputList":
294 visitId = dims.get(
'exposure',
None)
295 detectorId = dims.get(
'detector',
None)
296 if visitId
is None or detectorId
is None:
297 raise RuntimeError(f
"Could not identify scaling for input {index} ({dims})")
298 if detectorId
not in inputScales[
'expScale']:
299 raise RuntimeError(f
"Could not identify a scaling for input {index}"
300 f
" detector {detectorId}")
302 if self.config.scalingLevel ==
"DETECTOR":
303 if visitId
not in inputScales[
'expScale'][detectorId]:
304 raise RuntimeError(f
"Could not identify a scaling for input {index}"
305 f
"detector {detectorId} visit {visitId}")
306 scale = inputScales[
'expScale'][detectorId][visitId]
307 elif self.config.scalingLevel ==
'AMP':
308 scale = [inputScales[
'expScale'][detectorId][amp.getName()][visitId]
309 for amp
in exp.getDetector()]
311 raise RuntimeError(f
"Unknown scaling level: {self.config.scalingLevel}")
312 elif self.config.exposureScaling ==
'Unity':
315 raise RuntimeError(f
"Unknown scaling type: {self.config.exposureScaling}.")
317 expScales.append(scale)
318 self.log.
info(
"Scaling input %d by %s", index, scale)
321 self.
combinecombine(combined, inputExps, stats)
325 if self.config.doVignette:
326 polygon = inputExps[0].
getInfo().getValidPolygon()
328 doSetValue=
True, vignetteValue=0.0)
332 calibType=self.config.calibrationType, scales=expScales)
335 combinedExp.setDetector(inputDetector)
338 return pipeBase.Struct(
339 outputData=combinedExp,
343 """Get dimensions of the inputs.
347 expList : `list` [`lsst.afw.image.Exposure`]
348 Exps to check the sizes of.
352 width, height : `int`
353 Unique set of input dimensions.
355 dimList = [exp.getDimensions()
for exp
in expList
if exp
is not None]
356 return self.
getSizegetSize(dimList)
359 """Determine a consistent size, given a list of image sizes.
363 dimList : iterable of `tuple` (`int`, `int`)
369 If input dimensions are inconsistent.
373 width, height : `int`
376 dim =
set((w, h)
for w, h
in dimList)
378 raise RuntimeError(
"Inconsistent dimensions: %s" % dim)
382 """Apply scale to input exposure.
384 This implementation applies a flux scaling: the input exposure is
385 divided by the provided scale.
389 exposure : `lsst.afw.image.Exposure`
391 scale : `float` or `list` [`float`], optional
392 Constant scale to divide the exposure by.
394 if scale
is not None:
395 mi = exposure.getMaskedImage()
396 if isinstance(scale, list):
397 for amp, ampScale
in zip(exposure.getDetector(), scale):
398 ampIm = mi[amp.getBBox()]
404 """Combine multiple images.
408 target : `lsst.afw.image.Exposure`
409 Output exposure to construct.
410 expList : `list` [`lsst.afw.image.Exposure`]
411 Input exposures to combine.
412 stats : `lsst.afw.math.StatisticsControl`
413 Control explaining how to combine the input images.
415 images = [img.getMaskedImage()
for img
in expList
if img
is not None]
420 """Combine input headers to determine the set of common headers,
421 supplemented by calibration inputs.
425 expList : `list` of `lsst.afw.image.Exposure`
426 Input list of exposures to combine.
427 calib : `lsst.afw.image.Exposure`
428 Output calibration to construct headers for.
429 calibType: `str`, optional
430 OBSTYPE the output should claim.
431 scales: `list` of `float`, optional
432 Scale values applied to each input to record.
436 header : `lsst.daf.base.PropertyList`
440 header = calib.getMetadata()
441 header.set(
"OBSTYPE", calibType)
444 comments = {
"TIMESYS":
"Time scale for all dates",
445 "DATE-OBS":
"Start date of earliest input observation",
446 "MJD-OBS":
"[d] Start MJD of earliest input observation",
447 "DATE-END":
"End date of oldest input observation",
448 "MJD-END":
"[d] End MJD of oldest input observation",
449 "MJD-AVG":
"[d] MJD midpoint of all input observations",
450 "DATE-AVG":
"Midpoint date of all input observations"}
453 now = time.localtime()
454 calibDate = time.strftime(
"%Y-%m-%d", now)
455 calibTime = time.strftime(
"%X %Z", now)
456 header.set(
"CALIB_CREATE_DATE", calibDate)
457 header.set(
"CALIB_CREATE_TIME", calibTime)
460 inputHeaders = [exp.getMetadata()
for exp
in expList
if exp
is not None]
461 merged = merge_headers(inputHeaders, mode=
'drop')
462 for k, v
in merged.items():
464 md = expList[0].getMetadata()
465 comment = md.getComment(k)
if k
in md
else None
466 header.set(k, v, comment=comment)
469 visitInfoList = [exp.getInfo().getVisitInfo()
for exp
in expList
if exp
is not None]
470 for i, visit
in enumerate(visitInfoList):
473 header.set(
"CPP_INPUT_%d" % (i,), visit.getExposureId())
474 header.set(
"CPP_INPUT_DATE_%d" % (i,), str(visit.getDate()))
475 header.set(
"CPP_INPUT_EXPT_%d" % (i,), visit.getExposureTime())
476 if scales
is not None:
477 header.set(
"CPP_INPUT_SCALE_%d" % (i,), scales[i])
484 group = ObservationGroup(visitInfoList, pedantic=
False)
486 self.log.
warn(
"Exception making an obs group for headers. Continuing.")
488 dateCards = {
"DATE-OBS":
"{}T00:00:00.00".
format(calibDate)}
489 comments[
"DATE-OBS"] =
"Date of start of day of calibration midpoint"
491 oldest, newest = group.extremes()
492 dateCards = dates_to_fits(oldest.datetime_begin, newest.datetime_end)
494 for k, v
in dateCards.items():
495 header.set(k, v, comment=comments.get(k,
None))
500 """Interpolate over NANs in the combined image.
502 NANs can result from masked areas on the CCD. We don't want them getting
503 into our science images, so we replace them with the median of the image.
507 exp : `lsst.afw.image.Exposure`
508 Exp to check for NaNs.
510 array = exp.getImage().getArray()
511 bad = np.isnan(array)
513 median = np.median(array[np.logical_not(bad)])
514 count = np.sum(np.logical_not(bad))
517 self.log.
warn(
"Found %s NAN pixels", count)
522 dimensions=(
"instrument",
"detector",
"physical_filter")):
523 inputScales = cT.Input(
524 name=
"cpFilterScales",
525 doc=
"Input scale factors to use.",
526 storageClass=
"StructuredDataDict",
527 dimensions=(
"instrument",
"physical_filter"),
531 outputData = cT.Output(
532 name=
"cpFilterProposal",
533 doc=
"Output combined proposed calibration to be validated and certified.",
534 storageClass=
"ExposureF",
535 dimensions=(
"instrument",
"detector",
"physical_filter"),
542 if config
and config.exposureScaling !=
'InputList':
543 self.inputs.discard(
"inputScales")
547 pipelineConnections=CalibCombineByFilterConnections):
552 """Task to combine calib exposures."""
553 ConfigClass = CalibCombineByFilterConfig
554 _DefaultName =
'cpFilterCombine'
559 doUpdateMask=True, maskPlane="NO_DATA",
560 doSetValue=False, vignetteValue=0.0,
562 """Apply vignetted polygon to image pixels.
566 exposure : `lsst.afw.image.Exposure`
568 doUpdateMask : `bool`, optional
569 Update the exposure mask for vignetted area?
570 maskPlane : `str`, optional
571 Mask plane to assign.
572 doSetValue : `bool`, optional
573 Set image value for vignetted area?
574 vignetteValue : `float`, optional
576 log : `lsst.log.Log`, optional
582 Raised if no valid polygon exists.
584 polygon = polygon
if polygon
else exposure.getInfo().getValidPolygon()
586 raise RuntimeError(
"Could not find valid polygon!")
587 log = log
if log
else Log.getLogger(__name__.partition(
".")[2])
589 fullyIlluminated =
True
590 for corner
in exposure.getBBox().getCorners():
591 if not polygon.contains(
Point2D(corner)):
592 fullyIlluminated =
False
594 log.info(
"Exposure is fully illuminated? %s", fullyIlluminated)
596 if not fullyIlluminated:
598 mask = exposure.getMask()
599 numPixels = mask.getBBox().getArea()
601 xx, yy = np.meshgrid(np.arange(0, mask.getWidth(), dtype=int),
602 np.arange(0, mask.getHeight(), dtype=int))
604 vignMask = np.array([
not polygon.contains(
Point2D(x, y))
for x, y
in
605 zip(xx.reshape(numPixels), yy.reshape(numPixels))])
606 vignMask = vignMask.reshape(mask.getHeight(), mask.getWidth())
609 bitMask = mask.getPlaneBitMask(maskPlane)
610 maskArray = mask.getArray()
611 maskArray[vignMask] |= bitMask
613 imageArray = exposure.getImage().getArray()
614 imageArray[vignMask] = vignetteValue
615 log.info(
"Exposure contains %d vignetted pixels.",
616 np.count_nonzero(vignMask))
Pass parameters to a Statistics object.
def __init__(self, *config=None)
def __init__(self, *config=None)
def applyScale(self, exposure, scale=None)
def runQuantum(self, butlerQC, inputRefs, outputRefs)
def __init__(self, **kwargs)
def run(self, inputExps, inputScales=None, inputDims=None)
def combine(self, target, expList, stats)
def getDimensions(self, expList)
def getSize(self, dimList)
def interpolateNans(self, exp)
def combineHeaders(self, expList, calib, calibType="CALIB", scales=None)
def run(self, exposureOrImage)
daf::base::PropertySet * set
Backwards-compatibility support for depersisting the old Calib (FluxMag0/FluxMag0Err) objects.
std::shared_ptr< Exposure< ImagePixelT, MaskPixelT, VariancePixelT > > makeExposure(MaskedImage< ImagePixelT, MaskPixelT, VariancePixelT > &mimage, std::shared_ptr< geom::SkyWcs const > wcs=std::shared_ptr< geom::SkyWcs const >())
A function to return an Exposure of the correct type (cf.
Statistics makeStatistics(lsst::afw::image::Image< Pixel > const &img, lsst::afw::image::Mask< image::MaskPixel > const &msk, int const flags, StatisticsControl const &sctrl=StatisticsControl())
Handle a watered-down front-end to the constructor (no variance)
Property stringToStatisticsProperty(std::string const property)
Conversion function to switch a string to a Property (see Statistics.h)
std::shared_ptr< lsst::afw::image::Image< PixelT > > statisticsStack(std::vector< std::shared_ptr< lsst::afw::image::Image< PixelT >>> &images, Property flags, StatisticsControl const &sctrl=StatisticsControl(), std::vector< lsst::afw::image::VariancePixel > const &wvector=std::vector< lsst::afw::image::VariancePixel >(0))
A function to compute some statistics of a stack of Images.
def VignetteExposure(exposure, polygon=None, doUpdateMask=True, maskPlane="NO_DATA", doSetValue=False, vignetteValue=0.0, log=None)
Point< double, 2 > Point2D
def format(config, name=None, writeSourceLine=True, prefix="", verbose=False)