LSST Applications  21.0.0+04719a4bac,21.0.0-1-ga51b5d4+f5e6047307,21.0.0-11-g2b59f77+a9c1acf22d,21.0.0-11-ga42c5b2+86977b0b17,21.0.0-12-gf4ce030+76814010d2,21.0.0-13-g1721dae+760e7a6536,21.0.0-13-g3a573fe+768d78a30a,21.0.0-15-g5a7caf0+f21cbc5713,21.0.0-16-g0fb55c1+b60e2d390c,21.0.0-19-g4cded4ca+71a93a33c0,21.0.0-2-g103fe59+bb20972958,21.0.0-2-g45278ab+04719a4bac,21.0.0-2-g5242d73+3ad5d60fb1,21.0.0-2-g7f82c8f+8babb168e8,21.0.0-2-g8f08a60+06509c8b61,21.0.0-2-g8faa9b5+616205b9df,21.0.0-2-ga326454+8babb168e8,21.0.0-2-gde069b7+5e4aea9c2f,21.0.0-2-gecfae73+1d3a86e577,21.0.0-2-gfc62afb+3ad5d60fb1,21.0.0-25-g1d57be3cd+e73869a214,21.0.0-3-g357aad2+ed88757d29,21.0.0-3-g4a4ce7f+3ad5d60fb1,21.0.0-3-g4be5c26+3ad5d60fb1,21.0.0-3-g65f322c+e0b24896a3,21.0.0-3-g7d9da8d+616205b9df,21.0.0-3-ge02ed75+a9c1acf22d,21.0.0-4-g591bb35+a9c1acf22d,21.0.0-4-g65b4814+b60e2d390c,21.0.0-4-gccdca77+0de219a2bc,21.0.0-4-ge8a399c+6c55c39e83,21.0.0-5-gd00fb1e+05fce91b99,21.0.0-6-gc675373+3ad5d60fb1,21.0.0-64-g1122c245+4fb2b8f86e,21.0.0-7-g04766d7+cd19d05db2,21.0.0-7-gdf92d54+04719a4bac,21.0.0-8-g5674e7b+d1bd76f71f,master-gac4afde19b+a9c1acf22d,w.2021.13
LSST Data Management Base Package
cpCombine.py
Go to the documentation of this file.
1 # This file is part of cp_pipe.
2 #
3 # Developed for the LSST Data Management System.
4 # This product includes software developed by the LSST Project
5 # (https://www.lsst.org).
6 # See the COPYRIGHT file at the top-level directory of this distribution
7 # for details of code ownership.
8 #
9 # This program is free software: you can redistribute it and/or modify
10 # it under the terms of the GNU General Public License as published by
11 # the Free Software Foundation, either version 3 of the License, or
12 # (at your option) any later version.
13 #
14 # This program is distributed in the hope that it will be useful,
15 # but WITHOUT ANY WARRANTY; without even the implied warranty of
16 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 # GNU General Public License for more details.
18 #
19 # You should have received a copy of the GNU General Public License
20 # along with this program. If not, see <http://www.gnu.org/licenses/>.
21 import numpy as np
22 import time
23 
24 import lsst.pex.config as pexConfig
25 import lsst.pipe.base as pipeBase
27 import lsst.afw.math as afwMath
28 import lsst.afw.image as afwImage
29 
30 from lsst.geom import Point2D
31 from lsst.log import Log
32 from astro_metadata_translator import merge_headers, ObservationGroup
33 from astro_metadata_translator.serialize import dates_to_fits
34 
35 
36 # CalibStatsConfig/CalibStatsTask from pipe_base/constructCalibs.py
37 class CalibStatsConfig(pexConfig.Config):
38  """Parameters controlling the measurement of background statistics.
39  """
40  stat = pexConfig.Field(
41  dtype=str,
42  default='MEANCLIP',
43  doc="Statistic name to use to estimate background (from lsst.afw.math)",
44  )
45  clip = pexConfig.Field(
46  dtype=float,
47  default=3.0,
48  doc="Clipping threshold for background",
49  )
50  nIter = pexConfig.Field(
51  dtype=int,
52  default=3,
53  doc="Clipping iterations for background",
54  )
55  mask = pexConfig.ListField(
56  dtype=str,
57  default=["DETECTED", "BAD", "NO_DATA"],
58  doc="Mask planes to reject",
59  )
60 
61 
62 class CalibStatsTask(pipeBase.Task):
63  """Measure statistics on the background
64 
65  This can be useful for scaling the background, e.g., for flats and fringe frames.
66  """
67  ConfigClass = CalibStatsConfig
68 
69  def run(self, exposureOrImage):
70  """Measure a particular statistic on an image (of some sort).
71 
72  Parameters
73  ----------
74  exposureOrImage : `lsst.afw.image.Exposure`, `lsst.afw.image.MaskedImage`, or `lsst.afw.image.Image`
75  Exposure or image to calculate statistics on.
76 
77  Returns
78  -------
79  results : float
80  Resulting statistic value.
81  """
82  stats = afwMath.StatisticsControl(self.config.clip, self.config.nIter,
83  afwImage.Mask.getPlaneBitMask(self.config.mask))
84  try:
85  image = exposureOrImage.getMaskedImage()
86  except Exception:
87  try:
88  image = exposureOrImage.getImage()
89  except Exception:
90  image = exposureOrImage
91  statType = afwMath.stringToStatisticsProperty(self.config.stat)
92  return afwMath.makeStatistics(image, statType, stats).getValue()
93 
94 
95 class CalibCombineConnections(pipeBase.PipelineTaskConnections,
96  dimensions=("instrument", "detector")):
97  inputExps = cT.Input(
98  name="cpInputs",
99  doc="Input pre-processed exposures to combine.",
100  storageClass="Exposure",
101  dimensions=("instrument", "detector", "exposure"),
102  multiple=True,
103  )
104  inputScales = cT.Input(
105  name="cpScales",
106  doc="Input scale factors to use.",
107  storageClass="StructuredDataDict",
108  dimensions=("instrument", ),
109  multiple=False,
110  )
111 
112  outputData = cT.Output(
113  name="cpProposal",
114  doc="Output combined proposed calibration.",
115  storageClass="ExposureF",
116  dimensions=("instrument", "detector"),
117  isCalibration=True,
118  )
119 
120  def __init__(self, *, config=None):
121  super().__init__(config=config)
122 
123  if config and config.exposureScaling != 'InputList':
124  self.inputs.discard("inputScales")
125 
126  if config and len(config.calibrationDimensions) != 0:
127  newDimensions = tuple(config.calibrationDimensions)
128  newOutputData = cT.Output(
129  name=self.outputDataoutputData.name,
130  doc=self.outputDataoutputData.doc,
131  storageClass=self.outputDataoutputData.storageClass,
132  dimensions=self.allConnections['outputData'].dimensions + newDimensions,
133  isCalibration=True,
134  )
135  self.dimensions.update(config.calibrationDimensions)
136  self.outputDataoutputData = newOutputData
137 
138  if config.exposureScaling == 'InputList':
139  newInputScales = cT.PrerequisiteInput(
140  name=self.inputScalesinputScales.name,
141  doc=self.inputScalesinputScales.doc,
142  storageClass=self.inputScalesinputScales.storageClass,
143  dimensions=self.allConnections['inputScales'].dimensions + newDimensions
144  )
145  self.dimensions.update(config.calibrationDimensions)
146  self.inputScalesinputScales = newInputScales
147 
148 
149 # CalibCombineConfig/CalibCombineTask from pipe_base/constructCalibs.py
150 class CalibCombineConfig(pipeBase.PipelineTaskConfig,
151  pipelineConnections=CalibCombineConnections):
152  """Configuration for combining calib exposures.
153  """
154  calibrationType = pexConfig.Field(
155  dtype=str,
156  default="calibration",
157  doc="Name of calibration to be generated.",
158  )
159  calibrationDimensions = pexConfig.ListField(
160  dtype=str,
161  default=[],
162  doc="List of updated dimensions to append to output."
163  )
164 
165  exposureScaling = pexConfig.ChoiceField(
166  dtype=str,
167  allowed={
168  "Unity": "Do not scale inputs. Scale factor is 1.0.",
169  "ExposureTime": "Scale inputs by their exposure time.",
170  "DarkTime": "Scale inputs by their dark time.",
171  "MeanStats": "Scale inputs based on their mean values.",
172  "InputList": "Scale inputs based on a list of values.",
173  },
174  default="Unity",
175  doc="Scaling to be applied to each input exposure.",
176  )
177  scalingLevel = pexConfig.ChoiceField(
178  dtype=str,
179  allowed={
180  "DETECTOR": "Scale by detector.",
181  "AMP": "Scale by amplifier.",
182  },
183  default="DETECTOR",
184  doc="Region to scale.",
185  )
186  maxVisitsToCalcErrorFromInputVariance = pexConfig.Field(
187  dtype=int,
188  default=5,
189  doc="Maximum number of visits to estimate variance from input variance, not per-pixel spread",
190  )
191 
192  doVignette = pexConfig.Field(
193  dtype=bool,
194  default=False,
195  doc="Copy vignette polygon to output and censor vignetted pixels?"
196  )
197 
198  mask = pexConfig.ListField(
199  dtype=str,
200  default=["SAT", "DETECTED", "INTRP"],
201  doc="Mask planes to respect",
202  )
203  combine = pexConfig.Field(
204  dtype=str,
205  default='MEANCLIP',
206  doc="Statistic name to use for combination (from lsst.afw.math)",
207  )
208  clip = pexConfig.Field(
209  dtype=float,
210  default=3.0,
211  doc="Clipping threshold for combination",
212  )
213  nIter = pexConfig.Field(
214  dtype=int,
215  default=3,
216  doc="Clipping iterations for combination",
217  )
218  stats = pexConfig.ConfigurableField(
219  target=CalibStatsTask,
220  doc="Background statistics configuration",
221  )
222 
223 
224 class CalibCombineTask(pipeBase.PipelineTask,
225  pipeBase.CmdLineTask):
226  """Task to combine calib exposures."""
227  ConfigClass = CalibCombineConfig
228  _DefaultName = 'cpCombine'
229 
230  def __init__(self, **kwargs):
231  super().__init__(**kwargs)
232  self.makeSubtask("stats")
233 
234  def runQuantum(self, butlerQC, inputRefs, outputRefs):
235  inputs = butlerQC.get(inputRefs)
236 
237  dimensions = [exp.dataId.byName() for exp in inputRefs.inputExps]
238  inputs['inputDims'] = dimensions
239 
240  outputs = self.runrun(**inputs)
241  butlerQC.put(outputs, outputRefs)
242 
243  def run(self, inputExps, inputScales=None, inputDims=None):
244  """Combine calib exposures for a single detector.
245 
246  Parameters
247  ----------
248  inputExps : `list` [`lsst.afw.image.Exposure`]
249  Input list of exposures to combine.
250  inputScales : `dict` [`dict` [`dict` [`float`]]], optional
251  Dictionary of scales, indexed by detector (`int`),
252  amplifier (`int`), and exposure (`int`). Used for
253  'inputList' scaling.
254  inputDims : `list` [`dict`]
255  List of dictionaries of input data dimensions/values.
256  Each list entry should contain:
257 
258  ``"exposure"``
259  exposure id value (`int`)
260  ``"detector"``
261  detector id value (`int`)
262 
263  Returns
264  -------
265  combinedExp : `lsst.afw.image.Exposure`
266  Final combined exposure generated from the inputs.
267 
268  Raises
269  ------
270  RuntimeError
271  Raised if no input data is found. Also raised if
272  config.exposureScaling == InputList, and a necessary scale
273  was not found.
274  """
275  width, height = self.getDimensionsgetDimensions(inputExps)
276  stats = afwMath.StatisticsControl(self.config.clip, self.config.nIter,
277  afwImage.Mask.getPlaneBitMask(self.config.mask))
278  numExps = len(inputExps)
279  if numExps < 1:
280  raise RuntimeError("No valid input data")
281  if numExps < self.config.maxVisitsToCalcErrorFromInputVariance:
282  stats.setCalcErrorFromInputVariance(True)
283 
284  # Create output exposure for combined data.
285  combined = afwImage.MaskedImageF(width, height)
286  combinedExp = afwImage.makeExposure(combined)
287 
288  # Apply scaling:
289  expScales = []
290  if inputDims is None:
291  inputDims = [dict() for i in inputExps]
292 
293  for index, (exp, dims) in enumerate(zip(inputExps, inputDims)):
294  scale = 1.0
295  if exp is None:
296  self.log.warn("Input %d is None (%s); unable to scale exp.", index, dims)
297  continue
298 
299  if self.config.exposureScaling == "ExposureTime":
300  scale = exp.getInfo().getVisitInfo().getExposureTime()
301  elif self.config.exposureScaling == "DarkTime":
302  scale = exp.getInfo().getVisitInfo().getDarkTime()
303  elif self.config.exposureScaling == "MeanStats":
304  scale = self.stats.run(exp)
305  elif self.config.exposureScaling == "InputList":
306  visitId = dims.get('exposure', None)
307  detectorId = dims.get('detector', None)
308  if visitId is None or detectorId is None:
309  raise RuntimeError(f"Could not identify scaling for input {index} ({dims})")
310  if detectorId not in inputScales['expScale']:
311  raise RuntimeError(f"Could not identify a scaling for input {index}"
312  f" detector {detectorId}")
313 
314  if self.config.scalingLevel == "DETECTOR":
315  if visitId not in inputScales['expScale'][detectorId]:
316  raise RuntimeError(f"Could not identify a scaling for input {index}"
317  f"detector {detectorId} visit {visitId}")
318  scale = inputScales['expScale'][detectorId][visitId]
319  elif self.config.scalingLevel == 'AMP':
320  scale = [inputScales['expScale'][detectorId][amp.getName()][visitId]
321  for amp in exp.getDetector()]
322  else:
323  raise RuntimeError(f"Unknown scaling level: {self.config.scalingLevel}")
324  elif self.config.exposureScaling == 'Unity':
325  scale = 1.0
326  else:
327  raise RuntimeError(f"Unknown scaling type: {self.config.exposureScaling}.")
328 
329  expScales.append(scale)
330  self.log.info("Scaling input %d by %s", index, scale)
331  self.applyScaleapplyScale(exp, scale)
332 
333  self.combinecombine(combined, inputExps, stats)
334 
335  self.interpolateNansinterpolateNans(combined)
336 
337  if self.config.doVignette:
338  polygon = inputExps[0].getInfo().getValidPolygon()
339  VignetteExposure(combined, polygon=polygon, doUpdateMask=True,
340  doSetValue=True, vignetteValue=0.0)
341 
342  # Combine headers
343  self.combineHeaderscombineHeaders(inputExps, combinedExp,
344  calibType=self.config.calibrationType, scales=expScales)
345 
346  # Return
347  return pipeBase.Struct(
348  outputData=combinedExp,
349  )
350 
351  def getDimensions(self, expList):
352  """Get dimensions of the inputs.
353 
354  Parameters
355  ----------
356  expList : `list` [`lsst.afw.image.Exposure`]
357  Exps to check the sizes of.
358 
359  Returns
360  -------
361  width, height : `int`
362  Unique set of input dimensions.
363  """
364  dimList = [exp.getDimensions() for exp in expList if exp is not None]
365  return self.getSizegetSize(dimList)
366 
367  def getSize(self, dimList):
368  """Determine a consistent size, given a list of image sizes.
369 
370  Parameters
371  -----------
372  dimList : iterable of `tuple` (`int`, `int`)
373  List of dimensions.
374 
375  Raises
376  ------
377  RuntimeError
378  If input dimensions are inconsistent.
379 
380  Returns
381  --------
382  width, height : `int`
383  Common dimensions.
384  """
385  dim = set((w, h) for w, h in dimList)
386  if len(dim) != 1:
387  raise RuntimeError("Inconsistent dimensions: %s" % dim)
388  return dim.pop()
389 
390  def applyScale(self, exposure, scale=None):
391  """Apply scale to input exposure.
392 
393  This implementation applies a flux scaling: the input exposure is
394  divided by the provided scale.
395 
396  Parameters
397  ----------
398  exposure : `lsst.afw.image.Exposure`
399  Exposure to scale.
400  scale : `float` or `list` [`float`], optional
401  Constant scale to divide the exposure by.
402  """
403  if scale is not None:
404  mi = exposure.getMaskedImage()
405  if isinstance(scale, list):
406  for amp, ampScale in zip(exposure.getDetector(), scale):
407  ampIm = mi[amp.getBBox()]
408  ampIm /= ampScale
409  else:
410  mi /= scale
411 
412  def combine(self, target, expList, stats):
413  """Combine multiple images.
414 
415  Parameters
416  ----------
417  target : `lsst.afw.image.Exposure`
418  Output exposure to construct.
419  expList : `list` [`lsst.afw.image.Exposure`]
420  Input exposures to combine.
421  stats : `lsst.afw.math.StatisticsControl`
422  Control explaining how to combine the input images.
423  """
424  images = [img.getMaskedImage() for img in expList if img is not None]
425  combineType = afwMath.stringToStatisticsProperty(self.config.combine)
426  afwMath.statisticsStack(target, images, combineType, stats)
427 
428  def combineHeaders(self, expList, calib, calibType="CALIB", scales=None):
429  """Combine input headers to determine the set of common headers,
430  supplemented by calibration inputs.
431 
432  Parameters
433  ----------
434  expList : `list` of `lsst.afw.image.Exposure`
435  Input list of exposures to combine.
436  calib : `lsst.afw.image.Exposure`
437  Output calibration to construct headers for.
438  calibType: `str`, optional
439  OBSTYPE the output should claim.
440  scales: `list` of `float`, optional
441  Scale values applied to each input to record.
442 
443  Returns
444  -------
445  header : `lsst.daf.base.PropertyList`
446  Constructed header.
447  """
448  # Header
449  header = calib.getMetadata()
450  header.set("OBSTYPE", calibType)
451 
452  # Keywords we care about
453  comments = {"TIMESYS": "Time scale for all dates",
454  "DATE-OBS": "Start date of earliest input observation",
455  "MJD-OBS": "[d] Start MJD of earliest input observation",
456  "DATE-END": "End date of oldest input observation",
457  "MJD-END": "[d] End MJD of oldest input observation",
458  "MJD-AVG": "[d] MJD midpoint of all input observations",
459  "DATE-AVG": "Midpoint date of all input observations"}
460 
461  # Creation date
462  now = time.localtime()
463  calibDate = time.strftime("%Y-%m-%d", now)
464  calibTime = time.strftime("%X %Z", now)
465  header.set("CALIB_CREATE_DATE", calibDate)
466  header.set("CALIB_CREATE_TIME", calibTime)
467 
468  # Merge input headers
469  inputHeaders = [exp.getMetadata() for exp in expList if exp is not None]
470  merged = merge_headers(inputHeaders, mode='drop')
471  for k, v in merged.items():
472  if k not in header:
473  md = expList[0].getMetadata()
474  comment = md.getComment(k) if k in md else None
475  header.set(k, v, comment=comment)
476 
477  # Construct list of visits
478  visitInfoList = [exp.getInfo().getVisitInfo() for exp in expList if exp is not None]
479  for i, visit in enumerate(visitInfoList):
480  if visit is None:
481  continue
482  header.set("CPP_INPUT_%d" % (i,), visit.getExposureId())
483  header.set("CPP_INPUT_DATE_%d" % (i,), str(visit.getDate()))
484  header.set("CPP_INPUT_EXPT_%d" % (i,), visit.getExposureTime())
485  if scales is not None:
486  header.set("CPP_INPUT_SCALE_%d" % (i,), scales[i])
487 
488  # Not yet working: DM-22302
489  # Create an observation group so we can add some standard headers
490  # independent of the form in the input files.
491  # Use try block in case we are dealing with unexpected data headers
492  try:
493  group = ObservationGroup(visitInfoList, pedantic=False)
494  except Exception:
495  self.log.warn("Exception making an obs group for headers. Continuing.")
496  # Fall back to setting a DATE-OBS from the calibDate
497  dateCards = {"DATE-OBS": "{}T00:00:00.00".format(calibDate)}
498  comments["DATE-OBS"] = "Date of start of day of calibration midpoint"
499  else:
500  oldest, newest = group.extremes()
501  dateCards = dates_to_fits(oldest.datetime_begin, newest.datetime_end)
502 
503  for k, v in dateCards.items():
504  header.set(k, v, comment=comments.get(k, None))
505 
506  return header
507 
508  def interpolateNans(self, exp):
509  """Interpolate over NANs in the combined image.
510 
511  NANs can result from masked areas on the CCD. We don't want them getting
512  into our science images, so we replace them with the median of the image.
513 
514  Parameters
515  ----------
516  exp : `lsst.afw.image.Exposure`
517  Exp to check for NaNs.
518  """
519  array = exp.getImage().getArray()
520  bad = np.isnan(array)
521 
522  median = np.median(array[np.logical_not(bad)])
523  count = np.sum(np.logical_not(bad))
524  array[bad] = median
525  if count > 0:
526  self.log.warn("Found %s NAN pixels", count)
527 
528 
529 def VignetteExposure(exposure, polygon=None,
530  doUpdateMask=True, maskPlane='BAD',
531  doSetValue=False, vignetteValue=0.0,
532  log=None):
533  """Apply vignetted polygon to image pixels.
534 
535  Parameters
536  ----------
537  exposure : `lsst.afw.image.Exposure`
538  Image to be updated.
539  doUpdateMask : `bool`, optional
540  Update the exposure mask for vignetted area?
541  maskPlane : `str`, optional,
542  Mask plane to assign.
543  doSetValue : `bool`, optional
544  Set image value for vignetted area?
545  vignetteValue : `float`, optional
546  Value to assign.
547  log : `lsst.log.Log`, optional
548  Log to write to.
549 
550  Raises
551  ------
552  RuntimeError
553  Raised if no valid polygon exists.
554  """
555  polygon = polygon if polygon else exposure.getInfo().getValidPolygon()
556  if not polygon:
557  raise RuntimeError("Could not find valid polygon!")
558  log = log if log else Log.getLogger(__name__.partition(".")[2])
559 
560  fullyIlluminated = True
561  for corner in exposure.getBBox().getCorners():
562  if not polygon.contains(Point2D(corner)):
563  fullyIlluminated = False
564 
565  log.info("Exposure is fully illuminated? %s", fullyIlluminated)
566 
567  if not fullyIlluminated:
568  # Scan pixels.
569  mask = exposure.getMask()
570  numPixels = mask.getBBox().getArea()
571 
572  xx, yy = np.meshgrid(np.arange(0, mask.getWidth(), dtype=int),
573  np.arange(0, mask.getHeight(), dtype=int))
574 
575  vignMask = np.array([not polygon.contains(Point2D(x, y)) for x, y in
576  zip(xx.reshape(numPixels), yy.reshape(numPixels))])
577  vignMask = vignMask.reshape(mask.getHeight(), mask.getWidth())
578 
579  if doUpdateMask:
580  bitMask = mask.getPlaneBitMask(maskPlane)
581  maskArray = mask.getArray()
582  maskArray[vignMask] |= bitMask
583  if doSetValue:
584  imageArray = exposure.getImage().getArray()
585  imageArray[vignMask] = vignetteValue
586  log.info("Exposure contains %d vignetted pixels.",
587  np.count_nonzero(vignMask))
Pass parameters to a Statistics object.
Definition: Statistics.h:93
def applyScale(self, exposure, scale=None)
Definition: cpCombine.py:390
def runQuantum(self, butlerQC, inputRefs, outputRefs)
Definition: cpCombine.py:234
def run(self, inputExps, inputScales=None, inputDims=None)
Definition: cpCombine.py:243
def combine(self, target, expList, stats)
Definition: cpCombine.py:412
def combineHeaders(self, expList, calib, calibType="CALIB", scales=None)
Definition: cpCombine.py:428
def run(self, exposureOrImage)
Definition: cpCombine.py:69
daf::base::PropertySet * set
Definition: fits.cc:912
Backwards-compatibility support for depersisting the old Calib (FluxMag0/FluxMag0Err) objects.
std::shared_ptr< Exposure< ImagePixelT, MaskPixelT, VariancePixelT > > makeExposure(MaskedImage< ImagePixelT, MaskPixelT, VariancePixelT > &mimage, std::shared_ptr< geom::SkyWcs const > wcs=std::shared_ptr< geom::SkyWcs const >())
A function to return an Exposure of the correct type (cf.
Definition: Exposure.h:462
Statistics makeStatistics(lsst::afw::image::Image< Pixel > const &img, lsst::afw::image::Mask< image::MaskPixel > const &msk, int const flags, StatisticsControl const &sctrl=StatisticsControl())
Handle a watered-down front-end to the constructor (no variance)
Definition: Statistics.h:354
Property stringToStatisticsProperty(std::string const property)
Conversion function to switch a string to a Property (see Statistics.h)
Definition: Statistics.cc:747
std::shared_ptr< lsst::afw::image::Image< PixelT > > statisticsStack(std::vector< std::shared_ptr< lsst::afw::image::Image< PixelT >>> &images, Property flags, StatisticsControl const &sctrl=StatisticsControl(), std::vector< lsst::afw::image::VariancePixel > const &wvector=std::vector< lsst::afw::image::VariancePixel >(0))
A function to compute some statistics of a stack of Images.
def VignetteExposure(exposure, polygon=None, doUpdateMask=True, maskPlane='BAD', doSetValue=False, vignetteValue=0.0, log=None)
Definition: cpCombine.py:532
Point< double, 2 > Point2D
Definition: Point.h:324
Definition: Log.h:706
def format(config, name=None, writeSourceLine=True, prefix="", verbose=False)
Definition: history.py:174