LSSTApplications  18.1.0
LSSTDataManagementBasePackage
deblend.py
Go to the documentation of this file.
1 # This file is part of meas_deblender.
2 #
3 # Developed for the LSST Data Management System.
4 # This product includes software developed by the LSST Project
5 # (https://www.lsst.org).
6 # See the COPYRIGHT file at the top-level directory of this distribution
7 # for details of code ownership.
8 #
9 # This program is free software: you can redistribute it and/or modify
10 # it under the terms of the GNU General Public License as published by
11 # the Free Software Foundation, either version 3 of the License, or
12 # (at your option) any later version.
13 #
14 # This program is distributed in the hope that it will be useful,
15 # but WITHOUT ANY WARRANTY; without even the implied warranty of
16 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 # GNU General Public License for more details.
18 #
19 # You should have received a copy of the GNU General Public License
20 # along with this program. If not, see <https://www.gnu.org/licenses/>.
21 
22 import math
23 import numpy as np
24 import time
25 
26 import scarlet
27 
28 import lsst.log
29 import lsst.pex.config as pexConfig
30 import lsst.pipe.base as pipeBase
31 import lsst.afw.math as afwMath
32 import lsst.afw.geom as afwGeom
33 import lsst.afw.geom.ellipses as afwEll
34 import lsst.afw.image as afwImage
35 import lsst.afw.detection as afwDet
36 import lsst.afw.table as afwTable
37 
38 logger = lsst.log.Log.getLogger("meas.deblender.deblend")
39 
40 __all__ = 'SourceDeblendConfig', 'SourceDeblendTask', 'MultibandDeblendConfig', 'MultibandDeblendTask'
41 
42 
43 class SourceDeblendConfig(pexConfig.Config):
44 
45  edgeHandling = pexConfig.ChoiceField(
46  doc='What to do when a peak to be deblended is close to the edge of the image',
47  dtype=str, default='ramp',
48  allowed={
49  'clip': 'Clip the template at the edge AND the mirror of the edge.',
50  'ramp': 'Ramp down flux at the image edge by the PSF',
51  'noclip': 'Ignore the edge when building the symmetric template.',
52  }
53  )
54 
55  strayFluxToPointSources = pexConfig.ChoiceField(
56  doc='When the deblender should attribute stray flux to point sources',
57  dtype=str, default='necessary',
58  allowed={
59  'necessary': 'When there is not an extended object in the footprint',
60  'always': 'Always',
61  'never': ('Never; stray flux will not be attributed to any deblended child '
62  'if the deblender thinks all peaks look like point sources'),
63  }
64  )
65 
66  assignStrayFlux = pexConfig.Field(dtype=bool, default=True,
67  doc='Assign stray flux (not claimed by any child in the deblender) '
68  'to deblend children.')
69 
70  strayFluxRule = pexConfig.ChoiceField(
71  doc='How to split flux among peaks',
72  dtype=str, default='trim',
73  allowed={
74  'r-to-peak': '~ 1/(1+R^2) to the peak',
75  'r-to-footprint': ('~ 1/(1+R^2) to the closest pixel in the footprint. '
76  'CAUTION: this can be computationally expensive on large footprints!'),
77  'nearest-footprint': ('Assign 100% to the nearest footprint (using L-1 norm aka '
78  'Manhattan distance)'),
79  'trim': ('Shrink the parent footprint to pixels that are not assigned to children')
80  }
81  )
82 
83  clipStrayFluxFraction = pexConfig.Field(dtype=float, default=0.001,
84  doc=('When splitting stray flux, clip fractions below '
85  'this value to zero.'))
86  psfChisq1 = pexConfig.Field(dtype=float, default=1.5, optional=False,
87  doc=('Chi-squared per DOF cut for deciding a source is '
88  'a PSF during deblending (un-shifted PSF model)'))
89  psfChisq2 = pexConfig.Field(dtype=float, default=1.5, optional=False,
90  doc=('Chi-squared per DOF cut for deciding a source is '
91  'PSF during deblending (shifted PSF model)'))
92  psfChisq2b = pexConfig.Field(dtype=float, default=1.5, optional=False,
93  doc=('Chi-squared per DOF cut for deciding a source is '
94  'a PSF during deblending (shifted PSF model #2)'))
95  maxNumberOfPeaks = pexConfig.Field(dtype=int, default=0,
96  doc=("Only deblend the brightest maxNumberOfPeaks peaks in the parent"
97  " (<= 0: unlimited)"))
98  maxFootprintArea = pexConfig.Field(dtype=int, default=1000000,
99  doc=("Maximum area for footprints before they are ignored as large; "
100  "non-positive means no threshold applied"))
101  maxFootprintSize = pexConfig.Field(dtype=int, default=0,
102  doc=("Maximum linear dimension for footprints before they are ignored "
103  "as large; non-positive means no threshold applied"))
104  minFootprintAxisRatio = pexConfig.Field(dtype=float, default=0.0,
105  doc=("Minimum axis ratio for footprints before they are ignored "
106  "as large; non-positive means no threshold applied"))
107  notDeblendedMask = pexConfig.Field(dtype=str, default="NOT_DEBLENDED", optional=True,
108  doc="Mask name for footprints not deblended, or None")
109 
110  tinyFootprintSize = pexConfig.RangeField(dtype=int, default=2, min=2, inclusiveMin=True,
111  doc=('Footprints smaller in width or height than this value '
112  'will be ignored; minimum of 2 due to PSF gradient '
113  'calculation.'))
114 
115  propagateAllPeaks = pexConfig.Field(dtype=bool, default=False,
116  doc=('Guarantee that all peaks produce a child source.'))
117  catchFailures = pexConfig.Field(
118  dtype=bool, default=False,
119  doc=("If True, catch exceptions thrown by the deblender, log them, "
120  "and set a flag on the parent, instead of letting them propagate up"))
121  maskPlanes = pexConfig.ListField(dtype=str, default=["SAT", "INTRP", "NO_DATA"],
122  doc="Mask planes to ignore when performing statistics")
123  maskLimits = pexConfig.DictField(
124  keytype=str,
125  itemtype=float,
126  default={},
127  doc=("Mask planes with the corresponding limit on the fraction of masked pixels. "
128  "Sources violating this limit will not be deblended."),
129  )
130  weightTemplates = pexConfig.Field(
131  dtype=bool, default=False,
132  doc=("If true, a least-squares fit of the templates will be done to the "
133  "full image. The templates will be re-weighted based on this fit."))
134  removeDegenerateTemplates = pexConfig.Field(dtype=bool, default=False,
135  doc=("Try to remove similar templates?"))
136  maxTempDotProd = pexConfig.Field(
137  dtype=float, default=0.5,
138  doc=("If the dot product between two templates is larger than this value, we consider them to be "
139  "describing the same object (i.e. they are degenerate). If one of the objects has been "
140  "labeled as a PSF it will be removed, otherwise the template with the lowest value will "
141  "be removed."))
142  medianSmoothTemplate = pexConfig.Field(dtype=bool, default=True,
143  doc="Apply a smoothing filter to all of the template images")
144 
145 
151 
152 
153 class SourceDeblendTask(pipeBase.Task):
154  """!
155  \anchor SourceDeblendTask_
156 
157  \brief Split blended sources into individual sources.
158 
159  This task has no return value; it only modifies the SourceCatalog in-place.
160  """
161  ConfigClass = SourceDeblendConfig
162  _DefaultName = "sourceDeblend"
163 
164  def __init__(self, schema, peakSchema=None, **kwargs):
165  """!
166  Create the task, adding necessary fields to the given schema.
167 
168  @param[in,out] schema Schema object for measurement fields; will be modified in-place.
169  @param[in] peakSchema Schema of Footprint Peaks that will be passed to the deblender.
170  Any fields beyond the PeakTable minimal schema will be transferred
171  to the main source Schema. If None, no fields will be transferred
172  from the Peaks.
173  @param[in] **kwargs Passed to Task.__init__.
174  """
175  pipeBase.Task.__init__(self, **kwargs)
176  self.schema = schema
177  self.toCopyFromParent = [item.key for item in self.schema
178  if item.field.getName().startswith("merge_footprint")]
179  peakMinimalSchema = afwDet.PeakTable.makeMinimalSchema()
180  if peakSchema is None:
181  # In this case, the peakSchemaMapper will transfer nothing, but we'll still have one
182  # to simplify downstream code
183  self.peakSchemaMapper = afwTable.SchemaMapper(peakMinimalSchema, schema)
184  else:
185  self.peakSchemaMapper = afwTable.SchemaMapper(peakSchema, schema)
186  for item in peakSchema:
187  if item.key not in peakMinimalSchema:
188  self.peakSchemaMapper.addMapping(item.key, item.field)
189  # Because SchemaMapper makes a copy of the output schema you give its ctor, it isn't
190  # updating this Schema in place. That's probably a design flaw, but in the meantime,
191  # we'll keep that schema in sync with the peakSchemaMapper.getOutputSchema() manually,
192  # by adding the same fields to both.
193  schema.addField(item.field)
194  assert schema == self.peakSchemaMapper.getOutputSchema(), "Logic bug mapping schemas"
195  self.addSchemaKeys(schema)
196 
197  def addSchemaKeys(self, schema):
198  self.nChildKey = schema.addField('deblend_nChild', type=np.int32,
199  doc='Number of children this object has (defaults to 0)')
200  self.psfKey = schema.addField('deblend_deblendedAsPsf', type='Flag',
201  doc='Deblender thought this source looked like a PSF')
202  self.psfCenterKey = afwTable.Point2DKey.addFields(schema, 'deblend_psfCenter',
203  'If deblended-as-psf, the PSF centroid', "pixel")
204  self.psfFluxKey = schema.addField('deblend_psf_instFlux', type='D',
205  doc='If deblended-as-psf, the instrumental PSF flux', units='count')
206  self.tooManyPeaksKey = schema.addField('deblend_tooManyPeaks', type='Flag',
207  doc='Source had too many peaks; '
208  'only the brightest were included')
209  self.tooBigKey = schema.addField('deblend_parentTooBig', type='Flag',
210  doc='Parent footprint covered too many pixels')
211  self.maskedKey = schema.addField('deblend_masked', type='Flag',
212  doc='Parent footprint was predominantly masked')
213 
214  if self.config.catchFailures:
215  self.deblendFailedKey = schema.addField('deblend_failed', type='Flag',
216  doc="Deblending failed on source")
217 
218  self.deblendSkippedKey = schema.addField('deblend_skipped', type='Flag',
219  doc="Deblender skipped this source")
220 
221  self.deblendRampedTemplateKey = schema.addField(
222  'deblend_rampedTemplate', type='Flag',
223  doc=('This source was near an image edge and the deblender used '
224  '"ramp" edge-handling.'))
225 
226  self.deblendPatchedTemplateKey = schema.addField(
227  'deblend_patchedTemplate', type='Flag',
228  doc=('This source was near an image edge and the deblender used '
229  '"patched" edge-handling.'))
230 
231  self.hasStrayFluxKey = schema.addField(
232  'deblend_hasStrayFlux', type='Flag',
233  doc=('This source was assigned some stray flux'))
234 
235  self.log.trace('Added keys to schema: %s', ", ".join(str(x) for x in (
236  self.nChildKey, self.psfKey, self.psfCenterKey, self.psfFluxKey,
237  self.tooManyPeaksKey, self.tooBigKey)))
238 
239  @pipeBase.timeMethod
240  def run(self, exposure, sources):
241  """!
242  Get the psf from the provided exposure and then run deblend().
243 
244  @param[in] exposure Exposure to process
245  @param[in,out] sources SourceCatalog containing sources detected on this exposure.
246 
247  @return None
248  """
249  psf = exposure.getPsf()
250  assert sources.getSchema() == self.schema
251  self.deblend(exposure, sources, psf)
252 
253  def _getPsfFwhm(self, psf, bbox):
254  # It should be easier to get a PSF's fwhm;
255  # https://dev.lsstcorp.org/trac/ticket/3030
256  return psf.computeShape().getDeterminantRadius() * 2.35
257 
258  @pipeBase.timeMethod
259  def deblend(self, exposure, srcs, psf):
260  """!
261  Deblend.
262 
263  @param[in] exposure Exposure to process
264  @param[in,out] srcs SourceCatalog containing sources detected on this exposure.
265  @param[in] psf PSF
266 
267  @return None
268  """
269  self.log.info("Deblending %d sources" % len(srcs))
270 
271  from lsst.meas.deblender.baseline import deblend
272 
273  # find the median stdev in the image...
274  mi = exposure.getMaskedImage()
275  statsCtrl = afwMath.StatisticsControl()
276  statsCtrl.setAndMask(mi.getMask().getPlaneBitMask(self.config.maskPlanes))
277  stats = afwMath.makeStatistics(mi.getVariance(), mi.getMask(), afwMath.MEDIAN, statsCtrl)
278  sigma1 = math.sqrt(stats.getValue(afwMath.MEDIAN))
279  self.log.trace('sigma1: %g', sigma1)
280 
281  n0 = len(srcs)
282  nparents = 0
283  for i, src in enumerate(srcs):
284  # t0 = time.clock()
285 
286  fp = src.getFootprint()
287  pks = fp.getPeaks()
288 
289  # Since we use the first peak for the parent object, we should propagate its flags
290  # to the parent source.
291  src.assign(pks[0], self.peakSchemaMapper)
292 
293  if len(pks) < 2:
294  continue
295 
296  if self.isLargeFootprint(fp):
297  src.set(self.tooBigKey, True)
298  self.skipParent(src, mi.getMask())
299  self.log.trace('Parent %i: skipping large footprint', int(src.getId()))
300  continue
301  if self.isMasked(fp, exposure.getMaskedImage().getMask()):
302  src.set(self.maskedKey, True)
303  self.skipParent(src, mi.getMask())
304  self.log.trace('Parent %i: skipping masked footprint', int(src.getId()))
305  continue
306 
307  nparents += 1
308  bb = fp.getBBox()
309  psf_fwhm = self._getPsfFwhm(psf, bb)
310 
311  self.log.trace('Parent %i: deblending %i peaks', int(src.getId()), len(pks))
312 
313  self.preSingleDeblendHook(exposure, srcs, i, fp, psf, psf_fwhm, sigma1)
314  npre = len(srcs)
315 
316  # This should really be set in deblend, but deblend doesn't have access to the src
317  src.set(self.tooManyPeaksKey, len(fp.getPeaks()) > self.config.maxNumberOfPeaks)
318 
319  try:
320  res = deblend(
321  fp, mi, psf, psf_fwhm, sigma1=sigma1,
322  psfChisqCut1=self.config.psfChisq1,
323  psfChisqCut2=self.config.psfChisq2,
324  psfChisqCut2b=self.config.psfChisq2b,
325  maxNumberOfPeaks=self.config.maxNumberOfPeaks,
326  strayFluxToPointSources=self.config.strayFluxToPointSources,
327  assignStrayFlux=self.config.assignStrayFlux,
328  strayFluxAssignment=self.config.strayFluxRule,
329  rampFluxAtEdge=(self.config.edgeHandling == 'ramp'),
330  patchEdges=(self.config.edgeHandling == 'noclip'),
331  tinyFootprintSize=self.config.tinyFootprintSize,
332  clipStrayFluxFraction=self.config.clipStrayFluxFraction,
333  weightTemplates=self.config.weightTemplates,
334  removeDegenerateTemplates=self.config.removeDegenerateTemplates,
335  maxTempDotProd=self.config.maxTempDotProd,
336  medianSmoothTemplate=self.config.medianSmoothTemplate
337  )
338  if self.config.catchFailures:
339  src.set(self.deblendFailedKey, False)
340  except Exception as e:
341  if self.config.catchFailures:
342  self.log.warn("Unable to deblend source %d: %s" % (src.getId(), e))
343  src.set(self.deblendFailedKey, True)
344  import traceback
345  traceback.print_exc()
346  continue
347  else:
348  raise
349 
350  kids = []
351  nchild = 0
352  for j, peak in enumerate(res.deblendedParents[0].peaks):
353  heavy = peak.getFluxPortion()
354  if heavy is None or peak.skip:
355  src.set(self.deblendSkippedKey, True)
356  if not self.config.propagateAllPeaks:
357  # Don't care
358  continue
359  # We need to preserve the peak: make sure we have enough info to create a minimal
360  # child src
361  self.log.trace("Peak at (%i,%i) failed. Using minimal default info for child.",
362  pks[j].getIx(), pks[j].getIy())
363  if heavy is None:
364  # copy the full footprint and strip out extra peaks
365  foot = afwDet.Footprint(src.getFootprint())
366  peakList = foot.getPeaks()
367  peakList.clear()
368  peakList.append(peak.peak)
369  zeroMimg = afwImage.MaskedImageF(foot.getBBox())
370  heavy = afwDet.makeHeavyFootprint(foot, zeroMimg)
371  if peak.deblendedAsPsf:
372  if peak.psfFitFlux is None:
373  peak.psfFitFlux = 0.0
374  if peak.psfFitCenter is None:
375  peak.psfFitCenter = (peak.peak.getIx(), peak.peak.getIy())
376 
377  assert(len(heavy.getPeaks()) == 1)
378 
379  src.set(self.deblendSkippedKey, False)
380  child = srcs.addNew()
381  nchild += 1
382  for key in self.toCopyFromParent:
383  child.set(key, src.get(key))
384  child.assign(heavy.getPeaks()[0], self.peakSchemaMapper)
385  child.setParent(src.getId())
386  child.setFootprint(heavy)
387  child.set(self.psfKey, peak.deblendedAsPsf)
388  child.set(self.hasStrayFluxKey, peak.strayFlux is not None)
389  if peak.deblendedAsPsf:
390  (cx, cy) = peak.psfFitCenter
391  child.set(self.psfCenterKey, afwGeom.Point2D(cx, cy))
392  child.set(self.psfFluxKey, peak.psfFitFlux)
393  child.set(self.deblendRampedTemplateKey, peak.hasRampedTemplate)
394  child.set(self.deblendPatchedTemplateKey, peak.patched)
395  kids.append(child)
396 
397  # Child footprints may extend beyond the full extent of their parent's which
398  # results in a failure of the replace-by-noise code to reinstate these pixels
399  # to their original values. The following updates the parent footprint
400  # in-place to ensure it contains the full union of itself and all of its
401  # children's footprints.
402  spans = src.getFootprint().spans
403  for child in kids:
404  spans = spans.union(child.getFootprint().spans)
405  src.getFootprint().setSpans(spans)
406 
407  src.set(self.nChildKey, nchild)
408 
409  self.postSingleDeblendHook(exposure, srcs, i, npre, kids, fp, psf, psf_fwhm, sigma1, res)
410  # print('Deblending parent id', src.getId(), 'took', time.clock() - t0)
411 
412  n1 = len(srcs)
413  self.log.info('Deblended: of %i sources, %i were deblended, creating %i children, total %i sources'
414  % (n0, nparents, n1-n0, n1))
415 
416  def preSingleDeblendHook(self, exposure, srcs, i, fp, psf, psf_fwhm, sigma1):
417  pass
418 
419  def postSingleDeblendHook(self, exposure, srcs, i, npre, kids, fp, psf, psf_fwhm, sigma1, res):
420  pass
421 
422  def isLargeFootprint(self, footprint):
423  """Returns whether a Footprint is large
424 
425  'Large' is defined by thresholds on the area, size and axis ratio.
426  These may be disabled independently by configuring them to be non-positive.
427 
428  This is principally intended to get rid of satellite streaks, which the
429  deblender or other downstream processing can have trouble dealing with
430  (e.g., multiple large HeavyFootprints can chew up memory).
431  """
432  if self.config.maxFootprintArea > 0 and footprint.getArea() > self.config.maxFootprintArea:
433  return True
434  if self.config.maxFootprintSize > 0:
435  bbox = footprint.getBBox()
436  if max(bbox.getWidth(), bbox.getHeight()) > self.config.maxFootprintSize:
437  return True
438  if self.config.minFootprintAxisRatio > 0:
439  axes = afwEll.Axes(footprint.getShape())
440  if axes.getB() < self.config.minFootprintAxisRatio*axes.getA():
441  return True
442  return False
443 
444  def isMasked(self, footprint, mask):
445  """Returns whether the footprint violates the mask limits"""
446  size = float(footprint.getArea())
447  for maskName, limit in self.config.maskLimits.items():
448  maskVal = mask.getPlaneBitMask(maskName)
449  unmaskedSpan = footprint.spans.intersectNot(mask, maskVal) # spanset of unmasked pixels
450  if (size - unmaskedSpan.getArea())/size > limit:
451  return True
452  return False
453 
454  def skipParent(self, source, mask):
455  """Indicate that the parent source is not being deblended
456 
457  We set the appropriate flags and mask.
458 
459  @param source The source to flag as skipped
460  @param mask The mask to update
461  """
462  fp = source.getFootprint()
463  source.set(self.deblendSkippedKey, True)
464  source.set(self.nChildKey, len(fp.getPeaks())) # It would have this many if we deblended them all
465  if self.config.notDeblendedMask:
466  mask.addMaskPlane(self.config.notDeblendedMask)
467  fp.spans.setMask(mask, mask.getPlaneBitMask(self.config.notDeblendedMask))
468 
469 
470 class MultibandDeblendConfig(pexConfig.Config):
471  """MultibandDeblendConfig
472 
473  Configuration for the multiband deblender.
474  The parameters are organized by the parameter types, which are
475  - Stopping Criteria: Used to determine if the fit has converged
476  - Position Fitting Criteria: Used to fit the positions of the peaks
477  - Constraints: Used to apply constraints to the peaks and their components
478  - Other: Parameters that don't fit into the above categories
479  """
480  # Stopping Criteria
481  maxIter = pexConfig.Field(dtype=int, default=200,
482  doc=("Maximum number of iterations to deblend a single parent"))
483  relativeError = pexConfig.Field(dtype=float, default=1e-3,
484  doc=("Relative error to use when determining stopping criteria"))
485 
486  # Blend Configuration options
487  minTranslation = pexConfig.Field(dtype=float, default=1e-3,
488  doc=("A peak must be updated by at least 'minTranslation' (pixels)"
489  "or no update is performed."
490  "This field is ignored if fitPositions is False."))
491  refinementSkip = pexConfig.Field(dtype=int, default=10,
492  doc=("If fitPositions is True, the positions and box sizes are"
493  "updated on every 'refinementSkip' iterations."))
494  translationMethod = pexConfig.Field(dtype=str, default="default",
495  doc=("Method to use for fitting translations."
496  "Currently 'default' is the only available option,"
497  "which performs a linear fit, but it is possible that we"
498  "will use galsim or some other method as a future option"))
499  edgeFluxThresh = pexConfig.Field(dtype=float, default=1.0,
500  doc=("Boxes are resized when the flux at an edge is "
501  "> edgeFluxThresh * background RMS"))
502  exactLipschitz = pexConfig.Field(dtype=bool, default=False,
503  doc=("Calculate exact Lipschitz constant in every step"
504  "(True) or only calculate the approximate"
505  "Lipschitz constant with significant changes in A,S"
506  "(False)"))
507  stepSlack = pexConfig.Field(dtype=float, default=0.2,
508  doc=("A fractional measure of how much a value (like the exactLipschitz)"
509  "can change before it needs to be recalculated."
510  "This must be between 0 and 1."))
511 
512  # Constraints
513  constraints = pexConfig.Field(dtype=str, default="1,+,S,M",
514  doc=("List of constraints to use for each object"
515  "(order does not matter)"
516  "Current options are all used by default:\n"
517  "S: symmetry\n"
518  "M: monotonicity\n"
519  "1: normalized SED to unity"
520  "+: non-negative morphology"))
521  symmetryThresh = pexConfig.Field(dtype=float, default=1.0,
522  doc=("Strictness of symmetry, from"
523  "0 (no symmetry enforced) to"
524  "1 (perfect symmetry required)."
525  "If 'S' is not in `constraints`, this argument is ignored"))
526  l0Thresh = pexConfig.Field(dtype=float, default=np.nan,
527  doc=("L0 threshold. NaN results in no L0 penalty."))
528  l1Thresh = pexConfig.Field(dtype=float, default=np.nan,
529  doc=("L1 threshold. NaN results in no L1 penalty."))
530  tvxThresh = pexConfig.Field(dtype=float, default=np.nan,
531  doc=("Threshold for TV (total variation) constraint in the x-direction."
532  "NaN results in no TVx penalty."))
533  tvyThresh = pexConfig.Field(dtype=float, default=np.nan,
534  doc=("Threshold for TV (total variation) constraint in the y-direction."
535  "NaN results in no TVy penalty."))
536 
537  # Other scarlet paremeters
538  useWeights = pexConfig.Field(dtype=bool, default=False, doc="Use inverse variance as deblender weights")
539  bgScale = pexConfig.Field(
540  dtype=float, default=0.5,
541  doc=("Fraction of background RMS level to use as a"
542  "cutoff for defining the background of the image"
543  "This is used to initialize the model for each source"
544  "and to set the size of the bounding box for each source"
545  "every `refinementSkip` iteration."))
546  usePsfConvolution = pexConfig.Field(
547  dtype=bool, default=True,
548  doc=("Whether or not to convolve the morphology with the"
549  "PSF in each band or use the same morphology in all bands"))
550  saveTemplates = pexConfig.Field(
551  dtype=bool, default=True,
552  doc="Whether or not to save the SEDs and templates")
553  processSingles = pexConfig.Field(
554  dtype=bool, default=False,
555  doc="Whether or not to process isolated sources in the deblender")
556  badMask = pexConfig.Field(
557  dtype=str, default="BAD,CR,NO_DATA,SAT,SUSPECT",
558  doc="Whether or not to process isolated sources in the deblender")
559  # Old deblender parameters used in this implementation (some of which might be removed later)
560 
561  maxNumberOfPeaks = pexConfig.Field(
562  dtype=int, default=0,
563  doc=("Only deblend the brightest maxNumberOfPeaks peaks in the parent"
564  " (<= 0: unlimited)"))
565  maxFootprintArea = pexConfig.Field(
566  dtype=int, default=1000000,
567  doc=("Maximum area for footprints before they are ignored as large; "
568  "non-positive means no threshold applied"))
569  maxFootprintSize = pexConfig.Field(
570  dtype=int, default=0,
571  doc=("Maximum linear dimension for footprints before they are ignored "
572  "as large; non-positive means no threshold applied"))
573  minFootprintAxisRatio = pexConfig.Field(
574  dtype=float, default=0.0,
575  doc=("Minimum axis ratio for footprints before they are ignored "
576  "as large; non-positive means no threshold applied"))
577  notDeblendedMask = pexConfig.Field(
578  dtype=str, default="NOT_DEBLENDED", optional=True,
579  doc="Mask name for footprints not deblended, or None")
580 
581  tinyFootprintSize = pexConfig.RangeField(
582  dtype=int, default=2, min=2, inclusiveMin=True,
583  doc=('Footprints smaller in width or height than this value will '
584  'be ignored; minimum of 2 due to PSF gradient calculation.'))
585  catchFailures = pexConfig.Field(
586  dtype=bool, default=False,
587  doc=("If True, catch exceptions thrown by the deblender, log them, "
588  "and set a flag on the parent, instead of letting them propagate up"))
589  propagateAllPeaks = pexConfig.Field(dtype=bool, default=False,
590  doc=('Guarantee that all peaks produce a child source.'))
591  maskPlanes = pexConfig.ListField(dtype=str, default=["SAT", "INTRP", "NO_DATA"],
592  doc="Mask planes to ignore when performing statistics")
593  maskLimits = pexConfig.DictField(
594  keytype=str,
595  itemtype=float,
596  default={},
597  doc=("Mask planes with the corresponding limit on the fraction of masked pixels. "
598  "Sources violating this limit will not be deblended."),
599  )
600 
601  edgeHandling = pexConfig.ChoiceField(
602  doc='What to do when a peak to be deblended is close to the edge of the image',
603  dtype=str, default='noclip',
604  allowed={
605  'clip': 'Clip the template at the edge AND the mirror of the edge.',
606  'ramp': 'Ramp down flux at the image edge by the PSF',
607  'noclip': 'Ignore the edge when building the symmetric template.',
608  }
609  )
610 
611  medianSmoothTemplate = pexConfig.Field(dtype=bool, default=False,
612  doc="Apply a smoothing filter to all of the template images")
613  medianFilterHalfsize = pexConfig.Field(dtype=float, default=2,
614  doc=('Half size of the median smoothing filter'))
615  clipFootprintToNonzero = pexConfig.Field(dtype=bool, default=False,
616  doc=("Clip non-zero spans in the footprints"))
617 
618  conserveFlux = pexConfig.Field(dtype=bool, default=True,
619  doc=("Reapportion flux to the footprints so that flux is conserved"))
620  weightTemplates = pexConfig.Field(
621  dtype=bool, default=False,
622  doc=("If true, a least-squares fit of the templates will be done to the "
623  "full image. The templates will be re-weighted based on this fit."))
624  strayFluxToPointSources = pexConfig.ChoiceField(
625  doc='When the deblender should attribute stray flux to point sources',
626  dtype=str, default='necessary',
627  allowed={
628  'necessary': 'When there is not an extended object in the footprint',
629  'always': 'Always',
630  'never': ('Never; stray flux will not be attributed to any deblended child '
631  'if the deblender thinks all peaks look like point sources'),
632  }
633  )
634 
635  assignStrayFlux = pexConfig.Field(dtype=bool, default=True,
636  doc='Assign stray flux (not claimed by any child in the deblender) '
637  'to deblend children.')
638 
639  strayFluxRule = pexConfig.ChoiceField(
640  doc='How to split flux among peaks',
641  dtype=str, default='trim',
642  allowed={
643  'r-to-peak': '~ 1/(1+R^2) to the peak',
644  'r-to-footprint': ('~ 1/(1+R^2) to the closest pixel in the footprint. '
645  'CAUTION: this can be computationally expensive on large footprints!'),
646  'nearest-footprint': ('Assign 100% to the nearest footprint (using L-1 norm aka '
647  'Manhattan distance)'),
648  'trim': ('Shrink the parent footprint to pixels that are not assigned to children')
649  }
650  )
651 
652  clipStrayFluxFraction = pexConfig.Field(dtype=float, default=0.001,
653  doc=('When splitting stray flux, clip fractions below '
654  'this value to zero.'))
655  getTemplateSum = pexConfig.Field(dtype=bool, default=False,
656  doc=("As part of the flux calculation, the sum of the templates is"
657  "calculated. If 'getTemplateSum==True' then the sum of the"
658  "templates is stored in the result (a 'PerFootprint')."))
659 
660 
661 class MultibandDeblendTask(pipeBase.Task):
662  """MultibandDeblendTask
663 
664  Split blended sources into individual sources.
665 
666  This task has no return value; it only modifies the SourceCatalog in-place.
667  """
668  ConfigClass = MultibandDeblendConfig
669  _DefaultName = "multibandDeblend"
670 
671  def __init__(self, schema, peakSchema=None, **kwargs):
672  """Create the task, adding necessary fields to the given schema.
673 
674  Parameters
675  ----------
676  schema: `lsst.afw.table.schema.schema.Schema`
677  Schema object for measurement fields; will be modified in-place.
678  peakSchema: `lsst.afw.table.schema.schema.Schema`
679  Schema of Footprint Peaks that will be passed to the deblender.
680  Any fields beyond the PeakTable minimal schema will be transferred
681  to the main source Schema. If None, no fields will be transferred
682  from the Peaks.
683  filters: list of str
684  Names of the filters used for the eposures. This is needed to store the SED as a field
685  **kwargs
686  Passed to Task.__init__.
687  """
688  from lsst.meas.deblender import plugins
689 
690  pipeBase.Task.__init__(self, **kwargs)
691  if not self.config.conserveFlux and not self.config.saveTemplates:
692  raise ValueError("Either `conserveFlux` or `saveTemplates` must be True")
693 
694  peakMinimalSchema = afwDet.PeakTable.makeMinimalSchema()
695  if peakSchema is None:
696  # In this case, the peakSchemaMapper will transfer nothing, but we'll still have one
697  # to simplify downstream code
698  self.peakSchemaMapper = afwTable.SchemaMapper(peakMinimalSchema, schema)
699  else:
700  self.peakSchemaMapper = afwTable.SchemaMapper(peakSchema, schema)
701  for item in peakSchema:
702  if item.key not in peakMinimalSchema:
703  self.peakSchemaMapper.addMapping(item.key, item.field)
704  # Because SchemaMapper makes a copy of the output schema you give its ctor, it isn't
705  # updating this Schema in place. That's probably a design flaw, but in the meantime,
706  # we'll keep that schema in sync with the peakSchemaMapper.getOutputSchema() manually,
707  # by adding the same fields to both.
708  schema.addField(item.field)
709  assert schema == self.peakSchemaMapper.getOutputSchema(), "Logic bug mapping schemas"
710  self._addSchemaKeys(schema)
711  self.schema = schema
712 
713  # Create the plugins for multiband deblending using the Config options
714 
715  # Basic deblender configuration
716  config = scarlet.config.Config(
717  center_min_dist=self.config.minTranslation,
718  edge_flux_thresh=self.config.edgeFluxThresh,
719  exact_lipschitz=self.config.exactLipschitz,
720  refine_skip=self.config.refinementSkip,
721  slack=self.config.stepSlack,
722  )
723  if self.config.translationMethod != "default":
724  err = "Currently the only supported translationMethod is 'default', you entered '{0}'"
725  raise NotImplementedError(err.format(self.config.translationMethod))
726 
727  # If the default constraints are not used, set the constraints for
728  # all of the sources
729  constraints = None
730  _constraints = self.config.constraints.split(",")
731  if (sorted(_constraints) != ['+', '1', 'M', 'S'] or
732  ~np.isnan(self.config.l0Thresh) or
733  ~np.isnan(self.config.l1Thresh)):
734  constraintDict = {
735  "+": scarlet.constraint.PositivityConstraint,
736  "1": scarlet.constraint.SimpleConstraint,
737  "M": scarlet.constraint.DirectMonotonicityConstraint(use_nearest=False),
738  "S": scarlet.constraint.DirectSymmetryConstraint(sigma=self.config.symmetryThresh)
739  }
740  for c in _constraints:
741  if constraints is None:
742  constraints = [constraintDict[c]]
743  else:
744  constraints += [constraintDict[c]]
745  if constraints is None:
746  constraints = scarlet.constraint.MinimalConstraint()
747  if ~np.isnan(self.config.l0Thresh):
748  constraints += [scarlet.constraint.L0Constraint(self.config.l0Thresh)]
749  if ~np.isnan(self.config.l1Thresh):
750  constraints += [scarlet.constraint.L1Constraint(self.config.l1Thresh)]
751  if ~np.isnan(self.config.tvxThresh):
752  constraints += [scarlet.constraint.TVxConstraint(self.config.tvxThresh)]
753  if ~np.isnan(self.config.tvyThresh):
754  constraints += [scarlet.constraint.TVyConstraint(self.config.tvyThresh)]
755 
756  multiband_plugin = plugins.DeblenderPlugin(
757  plugins.buildMultibandTemplates,
758  useWeights=self.config.useWeights,
759  usePsf=self.config.usePsfConvolution,
760  constraints=constraints,
761  config=config,
762  maxIter=self.config.maxIter,
763  bgScale=self.config.bgScale,
764  relativeError=self.config.relativeError,
765  badMask=self.config.badMask.split(","),
766  )
767  self.plugins = [multiband_plugin]
768 
769  # Plugins from the old deblender for post-template processing
770  # (see lsst.meas_deblender.baseline.deblend)
771  if self.config.edgeHandling == 'ramp':
772  self.plugins.append(plugins.DeblenderPlugin(plugins.rampFluxAtEdge, patchEdges=False))
773  if self.config.medianSmoothTemplate:
775  plugins.medianSmoothTemplates,
776  medianFilterHalfsize=self.config.medianFilterHalfsize))
777  if self.config.clipFootprintToNonzero:
778  self.plugins.append(plugins.DeblenderPlugin(plugins.clipFootprintsToNonzero))
779  if self.config.conserveFlux:
780  if self.config.weightTemplates:
781  self.plugins.append(plugins.DeblenderPlugin(plugins.weightTemplates))
783  plugins.apportionFlux,
784  clipStrayFluxFraction=self.config.clipStrayFluxFraction,
785  assignStrayFlux=self.config.assignStrayFlux,
786  strayFluxAssignment=self.config.strayFluxRule,
787  strayFluxToPointSources=self.config.strayFluxToPointSources,
788  getTemplateSum=self.config.getTemplateSum))
789 
790  def _addSchemaKeys(self, schema):
791  """Add deblender specific keys to the schema
792  """
793  self.runtimeKey = schema.addField('runtime', type=np.float32, doc='runtime in ms')
794  # Keys from old Deblender that might be kept in the new deblender
795  self.nChildKey = schema.addField('deblend_nChild', type=np.int32,
796  doc='Number of children this object has (defaults to 0)')
797  self.psfKey = schema.addField('deblend_deblendedAsPsf', type='Flag',
798  doc='Deblender thought this source looked like a PSF')
799  self.tooManyPeaksKey = schema.addField('deblend_tooManyPeaks', type='Flag',
800  doc='Source had too many peaks; '
801  'only the brightest were included')
802  self.tooBigKey = schema.addField('deblend_parentTooBig', type='Flag',
803  doc='Parent footprint covered too many pixels')
804  self.maskedKey = schema.addField('deblend_masked', type='Flag',
805  doc='Parent footprint was predominantly masked')
806  self.deblendFailedKey = schema.addField('deblend_failed', type='Flag',
807  doc="Deblending failed on source")
808 
809  self.deblendSkippedKey = schema.addField('deblend_skipped', type='Flag',
810  doc="Deblender skipped this source")
811 
812  # Keys from the old Deblender that some measruement tasks require.
813  # TODO: Remove these whem the old deblender is removed
814  self.psfCenterKey = afwTable.Point2DKey.addFields(schema, 'deblend_psfCenter',
815  'If deblended-as-psf, the PSF centroid', "pixel")
816  self.psfFluxKey = schema.addField('deblend_psf_instFlux', type='D',
817  doc='If deblended-as-psf, the instrumental PSF flux', units='count')
818  self.deblendRampedTemplateKey = schema.addField(
819  'deblend_rampedTemplate', type='Flag',
820  doc=('This source was near an image edge and the deblender used '
821  '"ramp" edge-handling.'))
822 
823  self.deblendPatchedTemplateKey = schema.addField(
824  'deblend_patchedTemplate', type='Flag',
825  doc=('This source was near an image edge and the deblender used '
826  '"patched" edge-handling.'))
827 
828  self.hasStrayFluxKey = schema.addField(
829  'deblend_hasStrayFlux', type='Flag',
830  doc=('This source was assigned some stray flux'))
831 
832  self.log.trace('Added keys to schema: %s', ", ".join(str(x) for x in (
833  self.nChildKey, self.psfKey, self.psfCenterKey, self.psfFluxKey,
834  self.tooManyPeaksKey, self.tooBigKey)))
835 
836  @pipeBase.timeMethod
837  def run(self, mExposure, mergedSources):
838  """Get the psf from each exposure and then run deblend().
839 
840  Parameters
841  ----------
842  mExposure: `MultibandExposure`
843  The exposures should be co-added images of the same
844  shape and region of the sky.
845  mergedSources: `SourceCatalog`
846  The merged `SourceCatalog` that contains parent footprints
847  to (potentially) deblend.
848 
849  Returns
850  -------
851  fluxCatalogs: dict or None
852  Keys are the names of the filters and the values are
853  `lsst.afw.table.source.source.SourceCatalog`'s.
854  These are the flux-conserved catalogs with heavy footprints with
855  the image data weighted by the multiband templates.
856  If `self.config.conserveFlux` is `False`, then this item will be None
857  templateCatalogs: dict or None
858  Keys are the names of the filters and the values are
859  `lsst.afw.table.source.source.SourceCatalog`'s.
860  These are catalogs with heavy footprints that are the templates
861  created by the multiband templates.
862  If `self.config.saveTemplates` is `False`, then this item will be None
863  """
864  psfs = {f: mExposure[f].getPsf() for f in mExposure.filters}
865  return self.deblend(mExposure, mergedSources, psfs)
866 
867  def _getPsfFwhm(self, psf, bbox):
868  return psf.computeShape().getDeterminantRadius() * 2.35
869 
870  def _addChild(self, parentId, peak, sources, heavy):
871  """Add a child to a catalog
872 
873  This creates a new child in the source catalog,
874  assigning it a parent id, adding a footprint,
875  and setting all appropriate flags based on the
876  deblender result.
877  """
878  assert len(heavy.getPeaks()) == 1
879  src = sources.addNew()
880  src.assign(heavy.getPeaks()[0], self.peakSchemaMapper)
881  src.setParent(parentId)
882  src.setFootprint(heavy)
883  src.set(self.psfKey, peak.deblendedAsPsf)
884  src.set(self.hasStrayFluxKey, peak.strayFlux is not None)
885  src.set(self.deblendRampedTemplateKey, peak.hasRampedTemplate)
886  src.set(self.deblendPatchedTemplateKey, peak.patched)
887  src.set(self.runtimeKey, 0)
888  return src
889 
890  @pipeBase.timeMethod
891  def deblend(self, mExposure, sources, psfs):
892  """Deblend a data cube of multiband images
893 
894  Parameters
895  ----------
896  mExposure: `MultibandExposure`
897  The exposures should be co-added images of the same
898  shape and region of the sky.
899  sources: `SourceCatalog`
900  The merged `SourceCatalog` that contains parent footprints
901  to (potentially) deblend.
902  psfs: dict
903  Keys are the names of the filters
904  (should be the same as `mExposure.filters`)
905  and the values are the PSFs in each band.
906 
907  Returns
908  -------
909  fluxCatalogs: dict or None
910  Keys are the names of the filters and the values are
911  `lsst.afw.table.source.source.SourceCatalog`'s.
912  These are the flux-conserved catalogs with heavy footprints with
913  the image data weighted by the multiband templates.
914  If `self.config.conserveFlux` is `False`, then this item will be None
915  templateCatalogs: dict or None
916  Keys are the names of the filters and the values are
917  `lsst.afw.table.source.source.SourceCatalog`'s.
918  These are catalogs with heavy footprints that are the templates
919  created by the multiband templates.
920  If `self.config.saveTemplates` is `False`, then this item will be None
921  """
922  from lsst.meas.deblender.baseline import newDeblend
923 
924  if tuple(psfs.keys()) != mExposure.filters:
925  msg = "PSF keys must be the same as mExposure.filters ({0}), got {1}"
926  raise ValueError(msg.format(mExposure.filters, psfs.keys()))
927 
928  filters = mExposure.filters
929  mMaskedImage = afwImage.MultibandMaskedImage(filters=mExposure.filters, image=mExposure.image,
930  mask=mExposure.mask, variance=mExposure.variance)
931  self.log.info("Deblending {0} sources in {1} exposures".format(len(sources), len(mExposure)))
932 
933  # find the median stdev in each image
934  sigmas = {}
935  for f in filters:
936  exposure = mExposure[f]
937  mi = exposure.getMaskedImage()
938  statsCtrl = afwMath.StatisticsControl()
939  statsCtrl.setAndMask(mi.getMask().getPlaneBitMask(self.config.maskPlanes))
940  stats = afwMath.makeStatistics(mi.getVariance(), mi.getMask(), afwMath.MEDIAN, statsCtrl)
941  sigma1 = math.sqrt(stats.getValue(afwMath.MEDIAN))
942  self.log.trace('Exposure {0}, sigma1: {1}'.format(f, sigma1))
943  sigmas[f] = sigma1
944 
945  # Create the output catalogs
946  if self.config.conserveFlux:
947  fluxCatalogs = {}
948  for f in filters:
949  _catalog = afwTable.SourceCatalog(sources.table.clone())
950  _catalog.extend(sources)
951  fluxCatalogs[f] = _catalog
952  else:
953  fluxCatalogs = None
954  if self.config.saveTemplates:
955  templateCatalogs = {}
956  for f in filters:
957  _catalog = afwTable.SourceCatalog(sources.table.clone())
958  _catalog.extend(sources)
959  templateCatalogs[f] = _catalog
960  else:
961  templateCatalogs = None
962 
963  n0 = len(sources)
964  nparents = 0
965  for pk, src in enumerate(sources):
966  foot = src.getFootprint()
967  logger.info("id: {0}".format(src["id"]))
968  peaks = foot.getPeaks()
969 
970  # Since we use the first peak for the parent object, we should propagate its flags
971  # to the parent source.
972  src.assign(peaks[0], self.peakSchemaMapper)
973 
974  # Block of Skipping conditions
975  if len(peaks) < 2 and not self.config.processSingles:
976  for f in filters:
977  if self.config.saveTemplates:
978  templateCatalogs[f][pk].set(self.runtimeKey, 0)
979  if self.config.conserveFlux:
980  fluxCatalogs[f][pk].set(self.runtimeKey, 0)
981  continue
982  if self.isLargeFootprint(foot):
983  src.set(self.tooBigKey, True)
984  self.skipParent(src, [mi.getMask() for mi in mMaskedImage])
985  self.log.trace('Parent %i: skipping large footprint', int(src.getId()))
986  continue
987  if self.isMasked(foot, exposure.getMaskedImage().getMask()):
988  src.set(self.maskedKey, True)
989  self.skipParent(src, mi.getMask())
990  self.log.trace('Parent %i: skipping masked footprint', int(src.getId()))
991  continue
992  if len(peaks) > self.config.maxNumberOfPeaks:
993  src.set(self.tooManyPeaksKey, True)
994  msg = 'Parent {0}: Too many peaks, using the first {1} peaks'
995  self.log.trace(msg.format(int(src.getId()), self.config.maxNumberOfPeaks))
996 
997  nparents += 1
998  bbox = foot.getBBox()
999  psf_fwhms = {f: self._getPsfFwhm(psf, bbox) for f, psf in psfs.items()}
1000  self.log.trace('Parent %i: deblending %i peaks', int(src.getId()), len(peaks))
1001  self.preSingleDeblendHook(mExposure.singles, sources, pk, foot, psfs, psf_fwhms, sigmas)
1002  npre = len(sources)
1003  # Run the deblender
1004  try:
1005  t0 = time.time()
1006  # Build the parameter lists with the same ordering
1007  images = mMaskedImage[:, bbox]
1008  psf_list = [psfs[f] for f in filters]
1009  fwhm_list = [psf_fwhms[f] for f in filters]
1010  avgNoise = [sigmas[f] for f in filters]
1011 
1012  result = newDeblend(debPlugins=self.plugins,
1013  footprint=foot,
1014  mMaskedImage=images,
1015  psfs=psf_list,
1016  psfFwhms=fwhm_list,
1017  avgNoise=avgNoise,
1018  maxNumberOfPeaks=self.config.maxNumberOfPeaks)
1019  tf = time.time()
1020  runtime = (tf-t0)*1000
1021  if result.failed:
1022  src.set(self.deblendFailedKey, False)
1023  src.set(self.runtimeKey, 0)
1024  continue
1025  except Exception as e:
1026  if self.config.catchFailures:
1027  self.log.warn("Unable to deblend source %d: %s" % (src.getId(), e))
1028  src.set(self.deblendFailedKey, True)
1029  src.set(self.runtimeKey, 0)
1030  import traceback
1031  traceback.print_exc()
1032  continue
1033  else:
1034  raise
1035 
1036  # Add the merged source as a parent in the catalog for each band
1037  templateParents = {}
1038  fluxParents = {}
1039  parentId = src.getId()
1040  for f in filters:
1041  if self.config.saveTemplates:
1042  templateParents[f] = templateCatalogs[f][pk]
1043  templateParents[f].set(self.runtimeKey, runtime)
1044  if self.config.conserveFlux:
1045  fluxParents[f] = fluxCatalogs[f][pk]
1046  fluxParents[f].set(self.runtimeKey, runtime)
1047 
1048  # Add each source to the catalogs in each band
1049  templateSpans = {f: afwGeom.SpanSet() for f in filters}
1050  fluxSpans = {f: afwGeom.SpanSet() for f in filters}
1051  nchild = 0
1052  for j, multiPeak in enumerate(result.peaks):
1053  heavy = {f: peak.getFluxPortion() for f, peak in multiPeak.deblendedPeaks.items()}
1054  no_flux = all([v is None for v in heavy.values()])
1055  skip_peak = all([peak.skip for peak in multiPeak.deblendedPeaks.values()])
1056  if no_flux or skip_peak:
1057  src.set(self.deblendSkippedKey, True)
1058  if not self.config.propagateAllPeaks:
1059  # We don't care
1060  continue
1061  # We need to preserve the peak: make sure we have enough info to create a minimal
1062  # child src
1063  msg = "Peak at {0} failed deblending. Using minimal default info for child."
1064  self.log.trace(msg.format(multiPeak.x, multiPeak.y))
1065 
1066  # copy the full footprint and strip out extra peaks
1067  pfoot = afwDet.Footprint(foot)
1068  peakList = pfoot.getPeaks()
1069  peakList.clear()
1070  pfoot.addPeak(multiPeak.x, multiPeak.y, 0)
1071  zeroMimg = afwImage.MaskedImageF(pfoot.getBBox())
1072  for f in filters:
1073  heavy[f] = afwDet.makeHeavyFootprint(pfoot, zeroMimg)
1074  else:
1075  src.set(self.deblendSkippedKey, False)
1076 
1077  # Add the peak to the source catalog in each band
1078  for f in filters:
1079  if len(heavy[f].getPeaks()) != 1:
1080  err = "Heavy footprint should have a single peak, got {0}"
1081  raise ValueError(err.format(len(heavy[f].getPeaks())))
1082  peak = multiPeak.deblendedPeaks[f]
1083  if self.config.saveTemplates:
1084  cat = templateCatalogs[f]
1085  tfoot = peak.templateFootprint
1086  timg = afwImage.MaskedImageF(peak.templateImage)
1087  tHeavy = afwDet.makeHeavyFootprint(tfoot, timg)
1088  child = self._addChild(parentId, peak, cat, tHeavy)
1089  if parentId == 0:
1090  child.setId(src.getId())
1091  child.set(self.runtimeKey, runtime)
1092  else:
1093  templateSpans[f] = templateSpans[f].union(tHeavy.getSpans())
1094  if self.config.conserveFlux:
1095  cat = fluxCatalogs[f]
1096  child = self._addChild(parentId, peak, cat, heavy[f])
1097  if parentId == 0:
1098  child.setId(src.getId())
1099  child.set(self.runtimeKey, runtime)
1100  else:
1101  fluxSpans[f] = fluxSpans[f].union(heavy[f].getSpans())
1102  nchild += 1
1103 
1104  # Child footprints may extend beyond the full extent of their parent's which
1105  # results in a failure of the replace-by-noise code to reinstate these pixels
1106  # to their original values. The following updates the parent footprint
1107  # in-place to ensure it contains the full union of itself and all of its
1108  # children's footprints.
1109  for f in filters:
1110  if self.config.saveTemplates:
1111  templateParents[f].set(self.nChildKey, nchild)
1112  templateParents[f].getFootprint().setSpans(templateSpans[f])
1113  if self.config.conserveFlux:
1114  fluxParents[f].set(self.nChildKey, nchild)
1115  fluxParents[f].getFootprint().setSpans(fluxSpans[f])
1116 
1117  self.postSingleDeblendHook(exposure, fluxCatalogs, templateCatalogs,
1118  pk, npre, foot, psfs, psf_fwhms, sigmas, result)
1119 
1120  if fluxCatalogs is not None:
1121  n1 = len(list(fluxCatalogs.values())[0])
1122  else:
1123  n1 = len(list(templateCatalogs.values())[0])
1124  self.log.info('Deblended: of %i sources, %i were deblended, creating %i children, total %i sources'
1125  % (n0, nparents, n1-n0, n1))
1126  return fluxCatalogs, templateCatalogs
1127 
1128  def preSingleDeblendHook(self, exposures, sources, pk, fp, psfs, psf_fwhms, sigmas):
1129  pass
1130 
1131  def postSingleDeblendHook(self, exposures, fluxCatalogs, templateCatalogs,
1132  pk, npre, fp, psfs, psf_fwhms, sigmas, result):
1133  pass
1134 
1135  def isLargeFootprint(self, footprint):
1136  """Returns whether a Footprint is large
1137 
1138  'Large' is defined by thresholds on the area, size and axis ratio.
1139  These may be disabled independently by configuring them to be non-positive.
1140 
1141  This is principally intended to get rid of satellite streaks, which the
1142  deblender or other downstream processing can have trouble dealing with
1143  (e.g., multiple large HeavyFootprints can chew up memory).
1144  """
1145  if self.config.maxFootprintArea > 0 and footprint.getArea() > self.config.maxFootprintArea:
1146  return True
1147  if self.config.maxFootprintSize > 0:
1148  bbox = footprint.getBBox()
1149  if max(bbox.getWidth(), bbox.getHeight()) > self.config.maxFootprintSize:
1150  return True
1151  if self.config.minFootprintAxisRatio > 0:
1152  axes = afwEll.Axes(footprint.getShape())
1153  if axes.getB() < self.config.minFootprintAxisRatio*axes.getA():
1154  return True
1155  return False
1156 
1157  def isMasked(self, footprint, mask):
1158  """Returns whether the footprint violates the mask limits"""
1159  size = float(footprint.getArea())
1160  for maskName, limit in self.config.maskLimits.items():
1161  maskVal = mask.getPlaneBitMask(maskName)
1162  unmaskedSpan = footprint.spans.intersectNot(mask, maskVal) # spanset of unmasked pixels
1163  if (size - unmaskedSpan.getArea())/size > limit:
1164  return True
1165  return False
1166 
1167  def skipParent(self, source, masks):
1168  """Indicate that the parent source is not being deblended
1169 
1170  We set the appropriate flags and masks for each exposure.
1171 
1172  Parameters
1173  ----------
1174  source: `lsst.afw.table.source.source.SourceRecord`
1175  The source to flag as skipped
1176  masks: list of `lsst.afw.image.MaskX`
1177  The mask in each band to update with the non-detection
1178  """
1179  fp = source.getFootprint()
1180  source.set(self.deblendSkippedKey, True)
1181  source.set(self.nChildKey, len(fp.getPeaks())) # It would have this many if we deblended them all
1182  if self.config.notDeblendedMask:
1183  for mask in masks:
1184  mask.addMaskPlane(self.config.notDeblendedMask)
1185  fp.spans.setMask(mask, mask.getPlaneBitMask(self.config.notDeblendedMask))
def newDeblend(debPlugins, footprint, mMaskedImage, psfs, psfFwhms, log=None, verbose=False, avgNoise=None, maxNumberOfPeaks=0)
Definition: baseline.py:675
def _addChild(self, parentId, peak, sources, heavy)
Definition: deblend.py:870
def postSingleDeblendHook(self, exposures, fluxCatalogs, templateCatalogs, pk, npre, fp, psfs, psf_fwhms, sigmas, result)
Definition: deblend.py:1132
def postSingleDeblendHook(self, exposure, srcs, i, npre, kids, fp, psf, psf_fwhm, sigma1, res)
Definition: deblend.py:419
A compact representation of a collection of pixels.
Definition: SpanSet.h:77
A mapping between the keys of two Schemas, used to copy data between them.
Definition: SchemaMapper.h:21
def preSingleDeblendHook(self, exposure, srcs, i, fp, psf, psf_fwhm, sigma1)
Definition: deblend.py:416
def run(self, exposure, sources)
Get the psf from the provided exposure and then run deblend().
Definition: deblend.py:240
std::shared_ptr< FrameSet > append(FrameSet const &first, FrameSet const &second)
Construct a FrameSet that performs two transformations in series.
Definition: functional.cc:33
daf::base::PropertySet * set
Definition: fits.cc:884
Definition: Log.h:691
Statistics makeStatistics(lsst::afw::math::MaskedVector< EntryT > const &mv, std::vector< WeightPixel > const &vweights, int const flags, StatisticsControl const &sctrl=StatisticsControl())
The makeStatistics() overload to handle lsst::afw::math::MaskedVector<>
Definition: Statistics.h:520
def __init__(self, schema, peakSchema=None, kwargs)
Definition: deblend.py:671
bool all(CoordinateExpr< N > const &expr) noexcept
Return true if all elements are true.
Pass parameters to a Statistics object.
Definition: Statistics.h:93
def preSingleDeblendHook(self, exposures, sources, pk, fp, psfs, psf_fwhms, sigmas)
Definition: deblend.py:1128
def format(config, name=None, writeSourceLine=True, prefix="", verbose=False)
Definition: history.py:168
int max
Class to describe the properties of a detected object from an image.
Definition: Footprint.h:62
Split blended sources into individual sources.
Definition: deblend.py:153
def deblend(self, exposure, srcs, psf)
Deblend.
Definition: deblend.py:259
static Log getLogger(Log const &logger)
Definition: Log.h:745
def __init__(self, schema, peakSchema=None, kwargs)
Create the task, adding necessary fields to the given schema.
Definition: deblend.py:164
def deblend(self, mExposure, sources, psfs)
Definition: deblend.py:891
def isMasked(self, footprint, mask)
Definition: deblend.py:444
Backwards-compatibility support for depersisting the old Calib (FluxMag0/FluxMag0Err) objects...
daf::base::PropertyList * list
Definition: fits.cc:885
def run(self, mExposure, mergedSources)
Definition: deblend.py:837
HeavyFootprint< ImagePixelT, MaskPixelT, VariancePixelT > makeHeavyFootprint(Footprint const &foot, lsst::afw::image::MaskedImage< ImagePixelT, MaskPixelT, VariancePixelT > const &img, HeavyFootprintCtrl const *ctrl=NULL)
Create a HeavyFootprint with footprint defined by the given Footprint and pixel values from the given...