LSSTApplications  19.0.0-14-gb0260a2+72efe9b372,20.0.0+7927753e06,20.0.0+8829bf0056,20.0.0+995114c5d2,20.0.0+b6f4b2abd1,20.0.0+bddc4f4cbe,20.0.0-1-g253301a+8829bf0056,20.0.0-1-g2b7511a+0d71a2d77f,20.0.0-1-g5b95a8c+7461dd0434,20.0.0-12-g321c96ea+23efe4bbff,20.0.0-16-gfab17e72e+fdf35455f6,20.0.0-2-g0070d88+ba3ffc8f0b,20.0.0-2-g4dae9ad+ee58a624b3,20.0.0-2-g61b8584+5d3db074ba,20.0.0-2-gb780d76+d529cf1a41,20.0.0-2-ged6426c+226a441f5f,20.0.0-2-gf072044+8829bf0056,20.0.0-2-gf1f7952+ee58a624b3,20.0.0-20-geae50cf+e37fec0aee,20.0.0-25-g3dcad98+544a109665,20.0.0-25-g5eafb0f+ee58a624b3,20.0.0-27-g64178ef+f1f297b00a,20.0.0-3-g4cc78c6+e0676b0dc8,20.0.0-3-g8f21e14+4fd2c12c9a,20.0.0-3-gbd60e8c+187b78b4b8,20.0.0-3-gbecbe05+48431fa087,20.0.0-38-ge4adf513+a12e1f8e37,20.0.0-4-g97dc21a+544a109665,20.0.0-4-gb4befbc+087873070b,20.0.0-4-gf910f65+5d3db074ba,20.0.0-5-gdfe0fee+199202a608,20.0.0-5-gfbfe500+d529cf1a41,20.0.0-6-g64f541c+d529cf1a41,20.0.0-6-g9a5b7a1+a1cd37312e,20.0.0-68-ga3f3dda+5fca18c6a4,20.0.0-9-g4aef684+e18322736b,w.2020.45
LSSTDataManagementBasePackage
multiBand.py
Go to the documentation of this file.
1 #!/usr/bin/env python
2 #
3 # LSST Data Management System
4 # Copyright 2008-2015 AURA/LSST.
5 #
6 # This product includes software developed by the
7 # LSST Project (http://www.lsst.org/).
8 #
9 # This program is free software: you can redistribute it and/or modify
10 # it under the terms of the GNU General Public License as published by
11 # the Free Software Foundation, either version 3 of the License, or
12 # (at your option) any later version.
13 #
14 # This program is distributed in the hope that it will be useful,
15 # but WITHOUT ANY WARRANTY; without even the implied warranty of
16 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 # GNU General Public License for more details.
18 #
19 # You should have received a copy of the LSST License Statement and
20 # the GNU General Public License along with this program. If not,
21 # see <https://www.lsstcorp.org/LegalNotices/>.
22 #
23 from lsst.coadd.utils.coaddDataIdContainer import ExistingCoaddDataIdContainer
24 from lsst.pipe.base import (CmdLineTask, Struct, ArgumentParser, ButlerInitializedTaskRunner,
25  PipelineTask, PipelineTaskConfig, PipelineTaskConnections)
27 from lsst.pex.config import Config, Field, ConfigurableField
28 from lsst.meas.algorithms import DynamicDetectionTask, ReferenceObjectLoader
29 from lsst.meas.base import SingleFrameMeasurementTask, ApplyApCorrTask, CatalogCalculationTask
30 from lsst.meas.deblender import SourceDeblendTask
31 from lsst.meas.extensions.scarlet import ScarletDeblendTask
32 from lsst.pipe.tasks.coaddBase import getSkyInfo
33 from lsst.pipe.tasks.scaleVariance import ScaleVarianceTask
34 from lsst.meas.astrom import DirectMatchTask, denormalizeMatches
35 from lsst.pipe.tasks.fakes import BaseFakeSourcesTask
36 from lsst.pipe.tasks.setPrimaryFlags import SetPrimaryFlagsTask
37 from lsst.pipe.tasks.propagateVisitFlags import PropagateVisitFlagsTask
38 import lsst.afw.image as afwImage
39 import lsst.afw.table as afwTable
40 import lsst.afw.math as afwMath
41 from lsst.daf.base import PropertyList
42 
43 from .mergeDetections import MergeDetectionsConfig, MergeDetectionsTask # noqa: F401
44 from .mergeMeasurements import MergeMeasurementsConfig, MergeMeasurementsTask # noqa: F401
45 from .multiBandUtils import MergeSourcesRunner, CullPeaksConfig, _makeGetSchemaCatalogs # noqa: F401
46 from .multiBandUtils import getInputSchema, getShortFilterName, readCatalog, _makeMakeIdFactory # noqa: F401
47 from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesSingleConfig # noqa: F401
48 from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesSingleTask # noqa: F401
49 from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesMultiConfig # noqa: F401
50 from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesMultiTask # noqa: F401
51 
52 
53 """
54 New set types:
55 * deepCoadd_det: detections from what used to be processCoadd (tract, patch, filter)
56 * deepCoadd_mergeDet: merged detections (tract, patch)
57 * deepCoadd_meas: measurements of merged detections (tract, patch, filter)
58 * deepCoadd_ref: reference sources (tract, patch)
59 All of these have associated *_schema catalogs that require no data ID and hold no records.
60 
61 In addition, we have a schema-only dataset, which saves the schema for the PeakRecords in
62 the mergeDet, meas, and ref dataset Footprints:
63 * deepCoadd_peak_schema
64 """
65 
66 
67 
69  dimensions=("tract", "patch", "band", "skymap"),
70  defaultTemplates={"inputCoaddName": "deep", "outputCoaddName": "deep"}):
71  detectionSchema = cT.InitOutput(
72  doc="Schema of the detection catalog",
73  name="{outputCoaddName}Coadd_det_schema",
74  storageClass="SourceCatalog",
75  )
76  exposure = cT.Input(
77  doc="Exposure on which detections are to be performed",
78  name="{inputCoaddName}Coadd",
79  storageClass="ExposureF",
80  dimensions=("tract", "patch", "band", "skymap")
81  )
82  outputBackgrounds = cT.Output(
83  doc="Output Backgrounds used in detection",
84  name="{outputCoaddName}Coadd_calexp_background",
85  storageClass="Background",
86  dimensions=("tract", "patch", "band", "skymap")
87  )
88  outputSources = cT.Output(
89  doc="Detected sources catalog",
90  name="{outputCoaddName}Coadd_det",
91  storageClass="SourceCatalog",
92  dimensions=("tract", "patch", "band", "skymap")
93  )
94  outputExposure = cT.Output(
95  doc="Exposure post detection",
96  name="{outputCoaddName}Coadd_calexp",
97  storageClass="ExposureF",
98  dimensions=("tract", "patch", "band", "skymap")
99  )
100 
101 
102 class DetectCoaddSourcesConfig(PipelineTaskConfig, pipelineConnections=DetectCoaddSourcesConnections):
103  """!
104  @anchor DetectCoaddSourcesConfig_
105 
106  @brief Configuration parameters for the DetectCoaddSourcesTask
107  """
108  doScaleVariance = Field(dtype=bool, default=True, doc="Scale variance plane using empirical noise?")
109  scaleVariance = ConfigurableField(target=ScaleVarianceTask, doc="Variance rescaling")
110  detection = ConfigurableField(target=DynamicDetectionTask, doc="Source detection")
111  coaddName = Field(dtype=str, default="deep", doc="Name of coadd")
112  doInsertFakes = Field(dtype=bool, default=False,
113  doc="Run fake sources injection task")
114  insertFakes = ConfigurableField(target=BaseFakeSourcesTask,
115  doc="Injection of fake sources for testing "
116  "purposes (must be retargeted)")
117  hasFakes = Field(
118  dtype=bool,
119  default=False,
120  doc="Should be set to True if fake sources have been inserted into the input data."
121  )
122 
123  def setDefaults(self):
124  super().setDefaults()
125  self.detection.thresholdType = "pixel_stdev"
126  self.detection.isotropicGrow = True
127  # Coadds are made from background-subtracted CCDs, so any background subtraction should be very basic
128  self.detection.reEstimateBackground = False
129  self.detection.background.useApprox = False
130  self.detection.background.binSize = 4096
131  self.detection.background.undersampleStyle = 'REDUCE_INTERP_ORDER'
132  self.detection.doTempWideBackground = True # Suppress large footprints that overwhelm the deblender
133 
134 
140 
141 
142 class DetectCoaddSourcesTask(PipelineTask, CmdLineTask):
143  r"""!
144  @anchor DetectCoaddSourcesTask_
145 
146  @brief Detect sources on a coadd
147 
148  @section pipe_tasks_multiBand_Contents Contents
149 
150  - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Purpose
151  - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Initialize
152  - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Run
153  - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Config
154  - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Debug
155  - @ref pipe_tasks_multiband_DetectCoaddSourcesTask_Example
156 
157  @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Purpose Description
158 
159  Command-line task that detects sources on a coadd of exposures obtained with a single filter.
160 
161  Coadding individual visits requires each exposure to be warped. This introduces covariance in the noise
162  properties across pixels. Before detection, we correct the coadd variance by scaling the variance plane
163  in the coadd to match the observed variance. This is an approximate approach -- strictly, we should
164  propagate the full covariance matrix -- but it is simple and works well in practice.
165 
166  After scaling the variance plane, we detect sources and generate footprints by delegating to the @ref
167  SourceDetectionTask_ "detection" subtask.
168 
169  @par Inputs:
170  deepCoadd{tract,patch,filter}: ExposureF
171  @par Outputs:
172  deepCoadd_det{tract,patch,filter}: SourceCatalog (only parent Footprints)
173  @n deepCoadd_calexp{tract,patch,filter}: Variance scaled, background-subtracted input
174  exposure (ExposureF)
175  @n deepCoadd_calexp_background{tract,patch,filter}: BackgroundList
176  @par Data Unit:
177  tract, patch, filter
178 
179  DetectCoaddSourcesTask delegates most of its work to the @ref SourceDetectionTask_ "detection" subtask.
180  You can retarget this subtask if you wish.
181 
182  @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Initialize Task initialization
183 
184  @copydoc \_\_init\_\_
185 
186  @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Run Invoking the Task
187 
188  @copydoc run
189 
190  @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Config Configuration parameters
191 
192  See @ref DetectCoaddSourcesConfig_ "DetectSourcesConfig"
193 
194  @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Debug Debug variables
195 
196  The @link lsst.pipe.base.cmdLineTask.CmdLineTask command line task@endlink interface supports a
197  flag @c -d to import @b debug.py from your @c PYTHONPATH; see @ref baseDebug for more about @b debug.py
198  files.
199 
200  DetectCoaddSourcesTask has no debug variables of its own because it relegates all the work to
201  @ref SourceDetectionTask_ "SourceDetectionTask"; see the documetation for
202  @ref SourceDetectionTask_ "SourceDetectionTask" for further information.
203 
204  @section pipe_tasks_multiband_DetectCoaddSourcesTask_Example A complete example
205  of using DetectCoaddSourcesTask
206 
207  DetectCoaddSourcesTask is meant to be run after assembling a coadded image in a given band. The purpose of
208  the task is to update the background, detect all sources in a single band and generate a set of parent
209  footprints. Subsequent tasks in the multi-band processing procedure will merge sources across bands and,
210  eventually, perform forced photometry. Command-line usage of DetectCoaddSourcesTask expects a data
211  reference to the coadd to be processed. A list of the available optional arguments can be obtained by
212  calling detectCoaddSources.py with the `--help` command line argument:
213  @code
214  detectCoaddSources.py --help
215  @endcode
216 
217  To demonstrate usage of the DetectCoaddSourcesTask in the larger context of multi-band processing, we
218  will process HSC data in the [ci_hsc](https://github.com/lsst/ci_hsc) package. Assuming one has followed
219  steps 1 - 4 at @ref pipeTasks_multiBand, one may detect all the sources in each coadd as follows:
220  @code
221  detectCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-I
222  @endcode
223  that will process the HSC-I band data. The results are written to
224  `$CI_HSC_DIR/DATA/deepCoadd-results/HSC-I`.
225 
226  It is also necessary to run:
227  @code
228  detectCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-R
229  @endcode
230  to generate the sources catalogs for the HSC-R band required by the next step in the multi-band
231  processing procedure: @ref MergeDetectionsTask_ "MergeDetectionsTask".
232  """
233  _DefaultName = "detectCoaddSources"
234  ConfigClass = DetectCoaddSourcesConfig
235  getSchemaCatalogs = _makeGetSchemaCatalogs("det")
236  makeIdFactory = _makeMakeIdFactory("CoaddId")
237 
238  @classmethod
239  def _makeArgumentParser(cls):
240  parser = ArgumentParser(name=cls._DefaultName)
241  parser.add_id_argument("--id", "deepCoadd", help="data ID, e.g. --id tract=12345 patch=1,2 filter=r",
242  ContainerClass=ExistingCoaddDataIdContainer)
243  return parser
244 
245  def __init__(self, schema=None, **kwargs):
246  """!
247  @brief Initialize the task. Create the @ref SourceDetectionTask_ "detection" subtask.
248 
249  Keyword arguments (in addition to those forwarded to CmdLineTask.__init__):
250 
251  @param[in] schema: initial schema for the output catalog, modified-in place to include all
252  fields set by this task. If None, the source minimal schema will be used.
253  @param[in] **kwargs: keyword arguments to be passed to lsst.pipe.base.task.Task.__init__
254  """
255  # N.B. Super is used here to handle the multiple inheritance of PipelineTasks, the init tree
256  # call structure has been reviewed carefully to be sure super will work as intended.
257  super().__init__(**kwargs)
258  if schema is None:
259  schema = afwTable.SourceTable.makeMinimalSchema()
260  if self.config.doInsertFakes:
261  self.makeSubtask("insertFakes")
262  self.schema = schema
263  self.makeSubtask("detection", schema=self.schema)
264  if self.config.doScaleVariance:
265  self.makeSubtask("scaleVariance")
266 
267  self.detectionSchema = afwTable.SourceCatalog(self.schema)
268 
269  def runDataRef(self, patchRef):
270  """!
271  @brief Run detection on a coadd.
272 
273  Invokes @ref run and then uses @ref write to output the
274  results.
275 
276  @param[in] patchRef: data reference for patch
277  """
278  if self.config.hasFakes:
279  exposure = patchRef.get("fakes_" + self.config.coaddName + "Coadd", immediate=True)
280  else:
281  exposure = patchRef.get(self.config.coaddName + "Coadd", immediate=True)
282  expId = int(patchRef.get(self.config.coaddName + "CoaddId"))
283  results = self.run(exposure, self.makeIdFactory(patchRef), expId=expId)
284  self.write(results, patchRef)
285  return results
286 
287  def runQuantum(self, butlerQC, inputRefs, outputRefs):
288  inputs = butlerQC.get(inputRefs)
289  packedId, maxBits = butlerQC.quantum.dataId.pack("tract_patch_band", returnMaxBits=True)
290  inputs["idFactory"] = afwTable.IdFactory.makeSource(packedId, 64 - maxBits)
291  inputs["expId"] = packedId
292  outputs = self.run(**inputs)
293  butlerQC.put(outputs, outputRefs)
294 
295  def run(self, exposure, idFactory, expId):
296  """!
297  @brief Run detection on an exposure.
298 
299  First scale the variance plane to match the observed variance
300  using @ref ScaleVarianceTask. Then invoke the @ref SourceDetectionTask_ "detection" subtask to
301  detect sources.
302 
303  @param[in,out] exposure: Exposure on which to detect (may be backround-subtracted and scaled,
304  depending on configuration).
305  @param[in] idFactory: IdFactory to set source identifiers
306  @param[in] expId: Exposure identifier (integer) for RNG seed
307 
308  @return a pipe.base.Struct with fields
309  - sources: catalog of detections
310  - backgrounds: list of backgrounds
311  """
312  if self.config.doScaleVariance:
313  varScale = self.scaleVariance.run(exposure.maskedImage)
314  exposure.getMetadata().add("VARIANCE_SCALE", varScale)
315  backgrounds = afwMath.BackgroundList()
316  if self.config.doInsertFakes:
317  self.insertFakes.run(exposure, background=backgrounds)
318  table = afwTable.SourceTable.make(self.schema, idFactory)
319  detections = self.detection.run(table, exposure, expId=expId)
320  sources = detections.sources
321  fpSets = detections.fpSets
322  if hasattr(fpSets, "background") and fpSets.background:
323  for bg in fpSets.background:
324  backgrounds.append(bg)
325  return Struct(outputSources=sources, outputBackgrounds=backgrounds, outputExposure=exposure)
326 
327  def write(self, results, patchRef):
328  """!
329  @brief Write out results from runDetection.
330 
331  @param[in] exposure: Exposure to write out
332  @param[in] results: Struct returned from runDetection
333  @param[in] patchRef: data reference for patch
334  """
335  coaddName = self.config.coaddName + "Coadd"
336  patchRef.put(results.outputBackgrounds, coaddName + "_calexp_background")
337  patchRef.put(results.outputSources, coaddName + "_det")
338  if self.config.hasFakes:
339  patchRef.put(results.outputExposure, "fakes_" + coaddName + "_calexp")
340  else:
341  patchRef.put(results.outputExposure, coaddName + "_calexp")
342 
343 
344 
345 
346 class DeblendCoaddSourcesConfig(Config):
347  """DeblendCoaddSourcesConfig
348 
349  Configuration parameters for the `DeblendCoaddSourcesTask`.
350  """
351  singleBandDeblend = ConfigurableField(target=SourceDeblendTask,
352  doc="Deblend sources separately in each band")
353  multiBandDeblend = ConfigurableField(target=ScarletDeblendTask,
354  doc="Deblend sources simultaneously across bands")
355  simultaneous = Field(dtype=bool, default=False, doc="Simultaneously deblend all bands?")
356  coaddName = Field(dtype=str, default="deep", doc="Name of coadd")
357  hasFakes = Field(dtype=bool,
358  default=False,
359  doc="Should be set to True if fake sources have been inserted into the input data.")
360 
361  def setDefaults(self):
362  Config.setDefaults(self)
363  self.singleBandDeblend.propagateAllPeaks = True
364 
365 
366 class DeblendCoaddSourcesRunner(MergeSourcesRunner):
367  """Task runner for the `MergeSourcesTask`
368 
369  Required because the run method requires a list of
370  dataRefs rather than a single dataRef.
371  """
372  @staticmethod
373  def getTargetList(parsedCmd, **kwargs):
374  """Provide a list of patch references for each patch, tract, filter combo.
375 
376  Parameters
377  ----------
378  parsedCmd:
379  The parsed command
380  kwargs:
381  Keyword arguments passed to the task
382 
383  Returns
384  -------
385  targetList: list
386  List of tuples, where each tuple is a (dataRef, kwargs) pair.
387  """
388  refDict = MergeSourcesRunner.buildRefDict(parsedCmd)
389  kwargs["psfCache"] = parsedCmd.psfCache
390  return [(list(p.values()), kwargs) for t in refDict.values() for p in t.values()]
391 
392 
393 class DeblendCoaddSourcesTask(CmdLineTask):
394  """Deblend the sources in a merged catalog
395 
396  Deblend sources from master catalog in each coadd.
397  This can either be done separately in each band using the HSC-SDSS deblender
398  (`DeblendCoaddSourcesTask.config.simultaneous==False`)
399  or use SCARLET to simultaneously fit the blend in all bands
400  (`DeblendCoaddSourcesTask.config.simultaneous==True`).
401  The task will set its own `self.schema` atribute to the `Schema` of the
402  output deblended catalog.
403  This will include all fields from the input `Schema`, as well as additional fields
404  from the deblender.
405 
406  `pipe.tasks.multiband.DeblendCoaddSourcesTask Description
407  ---------------------------------------------------------
408  `
409 
410  Parameters
411  ----------
412  butler: `Butler`
413  Butler used to read the input schemas from disk or
414  construct the reference catalog loader, if `schema` or `peakSchema` or
415  schema: `Schema`
416  The schema of the merged detection catalog as an input to this task.
417  peakSchema: `Schema`
418  The schema of the `PeakRecord`s in the `Footprint`s in the merged detection catalog
419  """
420  ConfigClass = DeblendCoaddSourcesConfig
421  RunnerClass = DeblendCoaddSourcesRunner
422  _DefaultName = "deblendCoaddSources"
423  makeIdFactory = _makeMakeIdFactory("MergedCoaddId")
424 
425  @classmethod
426  def _makeArgumentParser(cls):
427  parser = ArgumentParser(name=cls._DefaultName)
428  parser.add_id_argument("--id", "deepCoadd_calexp",
429  help="data ID, e.g. --id tract=12345 patch=1,2 filter=g^r^i",
430  ContainerClass=ExistingCoaddDataIdContainer)
431  parser.add_argument("--psfCache", type=int, default=100, help="Size of CoaddPsf cache")
432  return parser
433 
434  def __init__(self, butler=None, schema=None, peakSchema=None, **kwargs):
435  CmdLineTask.__init__(self, **kwargs)
436  if schema is None:
437  assert butler is not None, "Neither butler nor schema is defined"
438  schema = butler.get(self.config.coaddName + "Coadd_mergeDet_schema", immediate=True).schema
439  self.schemaMapper = afwTable.SchemaMapper(schema)
440  self.schemaMapper.addMinimalSchema(schema)
441  self.schema = self.schemaMapper.getOutputSchema()
442  if peakSchema is None:
443  assert butler is not None, "Neither butler nor peakSchema is defined"
444  peakSchema = butler.get(self.config.coaddName + "Coadd_peak_schema", immediate=True).schema
445 
446  if self.config.simultaneous:
447  self.makeSubtask("multiBandDeblend", schema=self.schema, peakSchema=peakSchema)
448  else:
449  self.makeSubtask("singleBandDeblend", schema=self.schema, peakSchema=peakSchema)
450 
451  def getSchemaCatalogs(self):
452  """Return a dict of empty catalogs for each catalog dataset produced by this task.
453 
454  Returns
455  -------
456  result: dict
457  Dictionary of empty catalogs, with catalog names as keys.
458  """
459  catalog = afwTable.SourceCatalog(self.schema)
460  return {self.config.coaddName + "Coadd_deblendedFlux": catalog,
461  self.config.coaddName + "Coadd_deblendedModel": catalog}
462 
463  def runDataRef(self, patchRefList, psfCache=100):
464  """Deblend the patch
465 
466  Deblend each source simultaneously or separately
467  (depending on `DeblendCoaddSourcesTask.config.simultaneous`).
468  Set `is-primary` and related flags.
469  Propagate flags from individual visits.
470  Write the deblended sources out.
471 
472  Parameters
473  ----------
474  patchRefList: list
475  List of data references for each filter
476  """
477 
478  if self.config.hasFakes:
479  coaddType = "fakes_" + self.config.coaddName
480  else:
481  coaddType = self.config.coaddName
482 
483  if self.config.simultaneous:
484  # Use SCARLET to simultaneously deblend across filters
485  filters = []
486  exposures = []
487  for patchRef in patchRefList:
488  exposure = patchRef.get(coaddType + "Coadd_calexp", immediate=True)
489  filters.append(patchRef.dataId["filter"])
490  exposures.append(exposure)
491  # The input sources are the same for all bands, since it is a merged catalog
492  sources = self.readSources(patchRef)
493  exposure = afwImage.MultibandExposure.fromExposures(filters, exposures)
494  fluxCatalogs, templateCatalogs = self.multiBandDeblend.run(exposure, sources)
495  for n in range(len(patchRefList)):
496  fluxCat = fluxCatalogs if fluxCatalogs is None else fluxCatalogs[filters[n]]
497  self.write(patchRefList[n], fluxCat, templateCatalogs[filters[n]])
498  else:
499  # Use the singeband deblender to deblend each band separately
500  for patchRef in patchRefList:
501  exposure = patchRef.get(coaddType + "Coadd_calexp", immediate=True)
502  exposure.getPsf().setCacheCapacity(psfCache)
503  sources = self.readSources(patchRef)
504  self.singleBandDeblend.run(exposure, sources)
505  self.write(patchRef, sources)
506 
507  def readSources(self, dataRef):
508  """Read merged catalog
509 
510  Read the catalog of merged detections and create a catalog
511  in a single band.
512 
513  Parameters
514  ----------
515  dataRef: data reference
516  Data reference for catalog of merged detections
517 
518  Returns
519  -------
520  sources: `SourceCatalog`
521  List of sources in merged catalog
522 
523  We also need to add columns to hold the measurements we're about to make
524  so we can measure in-place.
525  """
526  merged = dataRef.get(self.config.coaddName + "Coadd_mergeDet", immediate=True)
527  self.log.info("Read %d detections: %s" % (len(merged), dataRef.dataId))
528  idFactory = self.makeIdFactory(dataRef)
529  for s in merged:
530  idFactory.notify(s.getId())
531  table = afwTable.SourceTable.make(self.schema, idFactory)
532  sources = afwTable.SourceCatalog(table)
533  sources.extend(merged, self.schemaMapper)
534  return sources
535 
536  def write(self, dataRef, flux_sources, template_sources=None):
537  """Write the source catalog(s)
538 
539  Parameters
540  ----------
541  dataRef: Data Reference
542  Reference to the output catalog.
543  flux_sources: `SourceCatalog`
544  Flux conserved sources to write to file.
545  If using the single band deblender, this is the catalog
546  generated.
547  template_sources: `SourceCatalog`
548  Source catalog using the multiband template models
549  as footprints.
550  """
551  # The multiband deblender does not have to conserve flux,
552  # so only write the flux conserved catalog if it exists
553  if flux_sources is not None:
554  assert not self.config.simultaneous or self.config.multiBandDeblend.conserveFlux
555  dataRef.put(flux_sources, self.config.coaddName + "Coadd_deblendedFlux")
556  self.log.info("Wrote %d sources: %s" % (len(flux_sources), dataRef.dataId))
557  # Only the multiband deblender has the option to output the
558  # template model catalog, which can optionally be used
559  # in MeasureMergedCoaddSources
560  if template_sources is not None:
561  assert self.config.multiBandDeblend.saveTemplates
562  dataRef.put(template_sources, self.config.coaddName + "Coadd_deblendedModel")
563  self.log.info("Wrote %d sources: %s" % (len(template_sources), dataRef.dataId))
564 
565  def writeMetadata(self, dataRefList):
566  """Write the metadata produced from processing the data.
567  Parameters
568  ----------
569  dataRefList
570  List of Butler data references used to write the metadata.
571  The metadata is written to dataset type `CmdLineTask._getMetadataName`.
572  """
573  for dataRef in dataRefList:
574  try:
575  metadataName = self._getMetadataName()
576  if metadataName is not None:
577  dataRef.put(self.getFullMetadata(), metadataName)
578  except Exception as e:
579  self.log.warn("Could not persist metadata for dataId=%s: %s", dataRef.dataId, e)
580 
581  def getExposureId(self, dataRef):
582  """Get the ExposureId from a data reference
583  """
584  return int(dataRef.get(self.config.coaddName + "CoaddId"))
585 
586 
587 class MeasureMergedCoaddSourcesConnections(PipelineTaskConnections, dimensions=("tract", "patch", "band", "skymap"),
588  defaultTemplates={"inputCoaddName": "deep",
589  "outputCoaddName": "deep"}):
590  inputSchema = cT.InitInput(
591  doc="Input schema for measure merged task produced by a deblender or detection task",
592  name="{inputCoaddName}Coadd_deblendedFlux_schema",
593  storageClass="SourceCatalog"
594  )
595  outputSchema = cT.InitOutput(
596  doc="Output schema after all new fields are added by task",
597  name="{inputCoaddName}Coadd_meas_schema",
598  storageClass="SourceCatalog"
599  )
600  refCat = cT.PrerequisiteInput(
601  doc="Reference catalog used to match measured sources against known sources",
602  name="ref_cat",
603  storageClass="SimpleCatalog",
604  dimensions=("skypix",),
605  deferLoad=True,
606  multiple=True
607  )
608  exposure = cT.Input(
609  doc="Input coadd image",
610  name="{inputCoaddName}Coadd_calexp",
611  storageClass="ExposureF",
612  dimensions=("tract", "patch", "band", "skymap")
613  )
614  skyMap = cT.Input(
615  doc="SkyMap to use in processing",
616  name="{inputCoaddName}Coadd_skyMap",
617  storageClass="SkyMap",
618  dimensions=("skymap",),
619  )
620  visitCatalogs = cT.Input(
621  doc="Source catalogs for visits which overlap input tract, patch, band. Will be "
622  "further filtered in the task for the purpose of propagating flags from image calibration "
623  "and characterization to codd objects",
624  name="src",
625  dimensions=("instrument", "visit", "detector"),
626  storageClass="SourceCatalog",
627  multiple=True
628  )
629  inputCatalog = cT.Input(
630  doc=("Name of the input catalog to use."
631  "If the single band deblender was used this should be 'deblendedFlux."
632  "If the multi-band deblender was used this should be 'deblendedModel, "
633  "or deblendedFlux if the multiband deblender was configured to output "
634  "deblended flux catalogs. If no deblending was performed this should "
635  "be 'mergeDet'"),
636  name="{inputCoaddName}Coadd_deblendedFlux",
637  storageClass="SourceCatalog",
638  dimensions=("tract", "patch", "band", "skymap"),
639  )
640  outputSources = cT.Output(
641  doc="Source catalog containing all the measurement information generated in this task",
642  name="{outputCoaddName}Coadd_meas",
643  dimensions=("tract", "patch", "band", "skymap"),
644  storageClass="SourceCatalog",
645  )
646  matchResult = cT.Output(
647  doc="Match catalog produced by configured matcher, optional on doMatchSources",
648  name="{outputCoaddName}Coadd_measMatch",
649  dimensions=("tract", "patch", "band", "skymap"),
650  storageClass="Catalog",
651  )
652  denormMatches = cT.Output(
653  doc="Denormalized Match catalog produced by configured matcher, optional on "
654  "doWriteMatchesDenormalized",
655  name="{outputCoaddName}Coadd_measMatchFull",
656  dimensions=("tract", "patch", "band", "skymap"),
657  storageClass="Catalog",
658  )
659 
660  def __init__(self, *, config=None):
661  super().__init__(config=config)
662  if config.doPropagateFlags is False:
663  self.inputs -= set(("visitCatalogs",))
664 
665  if config.doMatchSources is False:
666  self.outputs -= set(("matchResult",))
667 
668  if config.doWriteMatchesDenormalized is False:
669  self.outputs -= set(("denormMatches",))
670 
671 
672 class MeasureMergedCoaddSourcesConfig(PipelineTaskConfig,
673  pipelineConnections=MeasureMergedCoaddSourcesConnections):
674  """!
675  @anchor MeasureMergedCoaddSourcesConfig_
676 
677  @brief Configuration parameters for the MeasureMergedCoaddSourcesTask
678  """
679  inputCatalog = Field(dtype=str, default="deblendedFlux",
680  doc=("Name of the input catalog to use."
681  "If the single band deblender was used this should be 'deblendedFlux."
682  "If the multi-band deblender was used this should be 'deblendedModel."
683  "If no deblending was performed this should be 'mergeDet'"))
684  measurement = ConfigurableField(target=SingleFrameMeasurementTask, doc="Source measurement")
685  setPrimaryFlags = ConfigurableField(target=SetPrimaryFlagsTask, doc="Set flags for primary tract/patch")
686  doPropagateFlags = Field(
687  dtype=bool, default=True,
688  doc="Whether to match sources to CCD catalogs to propagate flags (to e.g. identify PSF stars)"
689  )
690  propagateFlags = ConfigurableField(target=PropagateVisitFlagsTask, doc="Propagate visit flags to coadd")
691  doMatchSources = Field(dtype=bool, default=True, doc="Match sources to reference catalog?")
692  match = ConfigurableField(target=DirectMatchTask, doc="Matching to reference catalog")
693  doWriteMatchesDenormalized = Field(
694  dtype=bool,
695  default=False,
696  doc=("Write reference matches in denormalized format? "
697  "This format uses more disk space, but is more convenient to read."),
698  )
699  coaddName = Field(dtype=str, default="deep", doc="Name of coadd")
700  psfCache = Field(dtype=int, default=100, doc="Size of psfCache")
701  checkUnitsParseStrict = Field(
702  doc="Strictness of Astropy unit compatibility check, can be 'raise', 'warn' or 'silent'",
703  dtype=str,
704  default="raise",
705  )
706  doApCorr = Field(
707  dtype=bool,
708  default=True,
709  doc="Apply aperture corrections"
710  )
711  applyApCorr = ConfigurableField(
712  target=ApplyApCorrTask,
713  doc="Subtask to apply aperture corrections"
714  )
715  doRunCatalogCalculation = Field(
716  dtype=bool,
717  default=True,
718  doc='Run catalogCalculation task'
719  )
720  catalogCalculation = ConfigurableField(
721  target=CatalogCalculationTask,
722  doc="Subtask to run catalogCalculation plugins on catalog"
723  )
724 
725  hasFakes = Field(
726  dtype=bool,
727  default=False,
728  doc="Should be set to True if fake sources have been inserted into the input data."
729  )
730 
731  @property
732  def refObjLoader(self):
733  return self.match.refObjLoader
734 
735  def setDefaults(self):
736  super().setDefaults()
737  self.measurement.plugins.names |= ['base_InputCount',
738  'base_Variance',
739  'base_LocalPhotoCalib',
740  'base_LocalWcs']
741  self.measurement.plugins['base_PixelFlags'].masksFpAnywhere = ['CLIPPED', 'SENSOR_EDGE',
742  'INEXACT_PSF']
743  self.measurement.plugins['base_PixelFlags'].masksFpCenter = ['CLIPPED', 'SENSOR_EDGE',
744  'INEXACT_PSF']
745 
746  def validate(self):
747  super().validate()
748  refCatGen2 = getattr(self.refObjLoader, "ref_dataset_name", None)
749  if refCatGen2 is not None and refCatGen2 != self.connections.refCat:
750  raise ValueError(
751  f"Gen2 ({refCatGen2}) and Gen3 ({self.connections.refCat}) reference catalogs "
752  f"are different. These options must be kept in sync until Gen2 is retired."
753  )
754 
755 
756 
762 
763 
764 class MeasureMergedCoaddSourcesRunner(ButlerInitializedTaskRunner):
765  """Get the psfCache setting into MeasureMergedCoaddSourcesTask"""
766  @staticmethod
767  def getTargetList(parsedCmd, **kwargs):
768  return ButlerInitializedTaskRunner.getTargetList(parsedCmd, psfCache=parsedCmd.psfCache)
769 
770 
771 class MeasureMergedCoaddSourcesTask(PipelineTask, CmdLineTask):
772  r"""!
773  @anchor MeasureMergedCoaddSourcesTask_
774 
775  @brief Deblend sources from master catalog in each coadd seperately and measure.
776 
777  @section pipe_tasks_multiBand_Contents Contents
778 
779  - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Purpose
780  - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Initialize
781  - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Run
782  - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Config
783  - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Debug
784  - @ref pipe_tasks_multiband_MeasureMergedCoaddSourcesTask_Example
785 
786  @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Purpose Description
787 
788  Command-line task that uses peaks and footprints from a master catalog to perform deblending and
789  measurement in each coadd.
790 
791  Given a master input catalog of sources (peaks and footprints) or deblender outputs
792  (including a HeavyFootprint in each band), measure each source on the
793  coadd. Repeating this procedure with the same master catalog across multiple coadds will generate a
794  consistent set of child sources.
795 
796  The deblender retains all peaks and deblends any missing peaks (dropouts in that band) as PSFs. Source
797  properties are measured and the @c is-primary flag (indicating sources with no children) is set. Visit
798  flags are propagated to the coadd sources.
799 
800  Optionally, we can match the coadd sources to an external reference catalog.
801 
802  @par Inputs:
803  deepCoadd_mergeDet{tract,patch} or deepCoadd_deblend{tract,patch}: SourceCatalog
804  @n deepCoadd_calexp{tract,patch,filter}: ExposureF
805  @par Outputs:
806  deepCoadd_meas{tract,patch,filter}: SourceCatalog
807  @par Data Unit:
808  tract, patch, filter
809 
810  MeasureMergedCoaddSourcesTask delegates most of its work to a set of sub-tasks:
811 
812  <DL>
813  <DT> @ref SingleFrameMeasurementTask_ "measurement"
814  <DD> Measure source properties of deblended sources.</DD>
815  <DT> @ref SetPrimaryFlagsTask_ "setPrimaryFlags"
816  <DD> Set flag 'is-primary' as well as related flags on sources. 'is-primary' is set for sources that are
817  not at the edge of the field and that have either not been deblended or are the children of deblended
818  sources</DD>
819  <DT> @ref PropagateVisitFlagsTask_ "propagateFlags"
820  <DD> Propagate flags set in individual visits to the coadd.</DD>
821  <DT> @ref DirectMatchTask_ "match"
822  <DD> Match input sources to a reference catalog (optional).
823  </DD>
824  </DL>
825  These subtasks may be retargeted as required.
826 
827  @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Initialize Task initialization
828 
829  @copydoc \_\_init\_\_
830 
831  @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Run Invoking the Task
832 
833  @copydoc run
834 
835  @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Config Configuration parameters
836 
837  See @ref MeasureMergedCoaddSourcesConfig_
838 
839  @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Debug Debug variables
840 
841  The @link lsst.pipe.base.cmdLineTask.CmdLineTask command line task@endlink interface supports a
842  flag @c -d to import @b debug.py from your @c PYTHONPATH; see @ref baseDebug for more about @b debug.py
843  files.
844 
845  MeasureMergedCoaddSourcesTask has no debug variables of its own because it delegates all the work to
846  the various sub-tasks. See the documetation for individual sub-tasks for more information.
847 
848  @section pipe_tasks_multiband_MeasureMergedCoaddSourcesTask_Example A complete example of using
849  MeasureMergedCoaddSourcesTask
850 
851  After MeasureMergedCoaddSourcesTask has been run on multiple coadds, we have a set of per-band catalogs.
852  The next stage in the multi-band processing procedure will merge these measurements into a suitable
853  catalog for driving forced photometry.
854 
855  Command-line usage of MeasureMergedCoaddSourcesTask expects a data reference to the coadds
856  to be processed.
857  A list of the available optional arguments can be obtained by calling measureCoaddSources.py with the
858  `--help` command line argument:
859  @code
860  measureCoaddSources.py --help
861  @endcode
862 
863  To demonstrate usage of the DetectCoaddSourcesTask in the larger context of multi-band processing, we
864  will process HSC data in the [ci_hsc](https://github.com/lsst/ci_hsc) package. Assuming one has finished
865  step 6 at @ref pipeTasks_multiBand, one may perform deblending and measure sources in the HSC-I band
866  coadd as follows:
867  @code
868  measureCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-I
869  @endcode
870  This will process the HSC-I band data. The results are written in
871  `$CI_HSC_DIR/DATA/deepCoadd-results/HSC-I/0/5,4/meas-HSC-I-0-5,4.fits
872 
873  It is also necessary to run
874  @code
875  measureCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-R
876  @endcode
877  to generate the sources catalogs for the HSC-R band required by the next step in the multi-band
878  procedure: @ref MergeMeasurementsTask_ "MergeMeasurementsTask".
879  """
880  _DefaultName = "measureCoaddSources"
881  ConfigClass = MeasureMergedCoaddSourcesConfig
882  RunnerClass = MeasureMergedCoaddSourcesRunner
883  getSchemaCatalogs = _makeGetSchemaCatalogs("meas")
884  makeIdFactory = _makeMakeIdFactory("MergedCoaddId") # The IDs we already have are of this type
885 
886  @classmethod
887  def _makeArgumentParser(cls):
888  parser = ArgumentParser(name=cls._DefaultName)
889  parser.add_id_argument("--id", "deepCoadd_calexp",
890  help="data ID, e.g. --id tract=12345 patch=1,2 filter=r",
891  ContainerClass=ExistingCoaddDataIdContainer)
892  parser.add_argument("--psfCache", type=int, default=100, help="Size of CoaddPsf cache")
893  return parser
894 
895  def __init__(self, butler=None, schema=None, peakSchema=None, refObjLoader=None, initInputs=None,
896  **kwargs):
897  """!
898  @brief Initialize the task.
899 
900  Keyword arguments (in addition to those forwarded to CmdLineTask.__init__):
901  @param[in] schema: the schema of the merged detection catalog used as input to this one
902  @param[in] peakSchema: the schema of the PeakRecords in the Footprints in the merged detection catalog
903  @param[in] refObjLoader: an instance of LoadReferenceObjectsTasks that supplies an external reference
904  catalog. May be None if the loader can be constructed from the butler argument or all steps
905  requiring a reference catalog are disabled.
906  @param[in] butler: a butler used to read the input schemas from disk or construct the reference
907  catalog loader, if schema or peakSchema or refObjLoader is None
908 
909  The task will set its own self.schema attribute to the schema of the output measurement catalog.
910  This will include all fields from the input schema, as well as additional fields for all the
911  measurements.
912  """
913  super().__init__(**kwargs)
914  self.deblended = self.config.inputCatalog.startswith("deblended")
915  self.inputCatalog = "Coadd_" + self.config.inputCatalog
916  if initInputs is not None:
917  schema = initInputs['inputSchema'].schema
918  if schema is None:
919  assert butler is not None, "Neither butler nor schema is defined"
920  schema = butler.get(self.config.coaddName + self.inputCatalog + "_schema", immediate=True).schema
921  self.schemaMapper = afwTable.SchemaMapper(schema)
922  self.schemaMapper.addMinimalSchema(schema)
923  self.schema = self.schemaMapper.getOutputSchema()
924  self.algMetadata = PropertyList()
925  self.makeSubtask("measurement", schema=self.schema, algMetadata=self.algMetadata)
926  self.makeSubtask("setPrimaryFlags", schema=self.schema)
927  if self.config.doMatchSources:
928  self.makeSubtask("match", butler=butler, refObjLoader=refObjLoader)
929  if self.config.doPropagateFlags:
930  self.makeSubtask("propagateFlags", schema=self.schema)
931  self.schema.checkUnits(parse_strict=self.config.checkUnitsParseStrict)
932  if self.config.doApCorr:
933  self.makeSubtask("applyApCorr", schema=self.schema)
934  if self.config.doRunCatalogCalculation:
935  self.makeSubtask("catalogCalculation", schema=self.schema)
936 
937  self.outputSchema = afwTable.SourceCatalog(self.schema)
938 
939  def runQuantum(self, butlerQC, inputRefs, outputRefs):
940  inputs = butlerQC.get(inputRefs)
941 
942  refObjLoader = ReferenceObjectLoader([ref.datasetRef.dataId for ref in inputRefs.refCat],
943  inputs.pop('refCat'), config=self.config.refObjLoader,
944  log=self.log)
945  self.match.setRefObjLoader(refObjLoader)
946 
947  # Set psfcache
948  # move this to run after gen2 deprecation
949  inputs['exposure'].getPsf().setCacheCapacity(self.config.psfCache)
950 
951  # Get unique integer ID for IdFactory and RNG seeds
952  packedId, maxBits = butlerQC.quantum.dataId.pack("tract_patch", returnMaxBits=True)
953  inputs['exposureId'] = packedId
954  idFactory = afwTable.IdFactory.makeSource(packedId, 64 - maxBits)
955  # Transform inputCatalog
956  table = afwTable.SourceTable.make(self.schema, idFactory)
957  sources = afwTable.SourceCatalog(table)
958  sources.extend(inputs.pop('inputCatalog'), self.schemaMapper)
959  table = sources.getTable()
960  table.setMetadata(self.algMetadata) # Capture algorithm metadata to write out to the source catalog.
961  inputs['sources'] = sources
962 
963  skyMap = inputs.pop('skyMap')
964  tractNumber = inputRefs.inputCatalog.dataId['tract']
965  tractInfo = skyMap[tractNumber]
966  patchInfo = tractInfo.getPatchInfo(inputRefs.inputCatalog.dataId['patch'])
967  skyInfo = Struct(
968  skyMap=skyMap,
969  tractInfo=tractInfo,
970  patchInfo=patchInfo,
971  wcs=tractInfo.getWcs(),
972  bbox=patchInfo.getOuterBBox()
973  )
974  inputs['skyInfo'] = skyInfo
975 
976  if self.config.doPropagateFlags:
977  # Filter out any visit catalog that is not coadd inputs
978  ccdInputs = inputs['exposure'].getInfo().getCoaddInputs().ccds
979  visitKey = ccdInputs.schema.find("visit").key
980  ccdKey = ccdInputs.schema.find("ccd").key
981  inputVisitIds = set()
982  ccdRecordsWcs = {}
983  for ccdRecord in ccdInputs:
984  visit = ccdRecord.get(visitKey)
985  ccd = ccdRecord.get(ccdKey)
986  inputVisitIds.add((visit, ccd))
987  ccdRecordsWcs[(visit, ccd)] = ccdRecord.getWcs()
988 
989  inputCatalogsToKeep = []
990  inputCatalogWcsUpdate = []
991  for i, dataRef in enumerate(inputRefs.visitCatalogs):
992  key = (dataRef.dataId['visit'], dataRef.dataId['detector'])
993  if key in inputVisitIds:
994  inputCatalogsToKeep.append(inputs['visitCatalogs'][i])
995  inputCatalogWcsUpdate.append(ccdRecordsWcs[key])
996  inputs['visitCatalogs'] = inputCatalogsToKeep
997  inputs['wcsUpdates'] = inputCatalogWcsUpdate
998  inputs['ccdInputs'] = ccdInputs
999 
1000  outputs = self.run(**inputs)
1001  butlerQC.put(outputs, outputRefs)
1002 
1003  def runDataRef(self, patchRef, psfCache=100):
1004  """!
1005  @brief Deblend and measure.
1006 
1007  @param[in] patchRef: Patch reference.
1008 
1009  Set 'is-primary' and related flags. Propagate flags
1010  from individual visits. Optionally match the sources to a reference catalog and write the matches.
1011  Finally, write the deblended sources and measurements out.
1012  """
1013  if self.config.hasFakes:
1014  coaddType = "fakes_" + self.config.coaddName
1015  else:
1016  coaddType = self.config.coaddName
1017  exposure = patchRef.get(coaddType + "Coadd_calexp", immediate=True)
1018  exposure.getPsf().setCacheCapacity(psfCache)
1019  sources = self.readSources(patchRef)
1020  table = sources.getTable()
1021  table.setMetadata(self.algMetadata) # Capture algorithm metadata to write out to the source catalog.
1022  skyInfo = getSkyInfo(coaddName=self.config.coaddName, patchRef=patchRef)
1023 
1024  if self.config.doPropagateFlags:
1025  ccdInputs = self.propagateFlags.getCcdInputs(exposure)
1026  else:
1027  ccdInputs = None
1028 
1029  results = self.run(exposure=exposure, sources=sources,
1030  ccdInputs=ccdInputs,
1031  skyInfo=skyInfo, butler=patchRef.getButler(),
1032  exposureId=self.getExposureId(patchRef))
1033 
1034  if self.config.doMatchSources:
1035  self.writeMatches(patchRef, results)
1036  self.write(patchRef, results.outputSources)
1037 
1038  def run(self, exposure, sources, skyInfo, exposureId, ccdInputs=None, visitCatalogs=None, wcsUpdates=None,
1039  butler=None):
1040  """Run measurement algorithms on the input exposure, and optionally populate the
1041  resulting catalog with extra information.
1042 
1043  Parameters
1044  ----------
1045  exposure : `lsst.afw.exposure.Exposure`
1046  The input exposure on which measurements are to be performed
1047  sources : `lsst.afw.table.SourceCatalog`
1048  A catalog built from the results of merged detections, or
1049  deblender outputs.
1050  skyInfo : `lsst.pipe.base.Struct`
1051  A struct containing information about the position of the input exposure within
1052  a `SkyMap`, the `SkyMap`, its `Wcs`, and its bounding box
1053  exposureId : `int` or `bytes`
1054  packed unique number or bytes unique to the input exposure
1055  ccdInputs : `lsst.afw.table.ExposureCatalog`
1056  Catalog containing information on the individual visits which went into making
1057  the exposure
1058  visitCatalogs : list of `lsst.afw.table.SourceCatalogs` or `None`
1059  A list of source catalogs corresponding to measurements made on the individual
1060  visits which went into the input exposure. If None and butler is `None` then
1061  the task cannot propagate visit flags to the output catalog.
1062  wcsUpdates : list of `lsst.afw.geom.SkyWcs` or `None`
1063  If visitCatalogs is not `None` this should be a list of wcs objects which correspond
1064  to the input visits. Used to put all coordinates to common system. If `None` and
1065  butler is `None` then the task cannot propagate visit flags to the output catalog.
1066  butler : `lsst.daf.butler.Butler` or `lsst.daf.persistence.Butler`
1067  Either a gen2 or gen3 butler used to load visit catalogs
1068 
1069  Returns
1070  -------
1071  results : `lsst.pipe.base.Struct`
1072  Results of running measurement task. Will contain the catalog in the
1073  sources attribute. Optionally will have results of matching to a
1074  reference catalog in the matchResults attribute, and denormalized
1075  matches in the denormMatches attribute.
1076  """
1077  self.measurement.run(sources, exposure, exposureId=exposureId)
1078 
1079  if self.config.doApCorr:
1080  self.applyApCorr.run(
1081  catalog=sources,
1082  apCorrMap=exposure.getInfo().getApCorrMap()
1083  )
1084 
1085  # TODO DM-11568: this contiguous check-and-copy could go away if we
1086  # reserve enough space during SourceDetection and/or SourceDeblend.
1087  # NOTE: sourceSelectors require contiguous catalogs, so ensure
1088  # contiguity now, so views are preserved from here on.
1089  if not sources.isContiguous():
1090  sources = sources.copy(deep=True)
1091 
1092  if self.config.doRunCatalogCalculation:
1093  self.catalogCalculation.run(sources)
1094 
1095  self.setPrimaryFlags.run(sources, skyMap=skyInfo.skyMap, tractInfo=skyInfo.tractInfo,
1096  patchInfo=skyInfo.patchInfo, includeDeblend=self.deblended)
1097  if self.config.doPropagateFlags:
1098  self.propagateFlags.run(butler, sources, ccdInputs, exposure.getWcs(), visitCatalogs, wcsUpdates)
1099 
1100  results = Struct()
1101 
1102  if self.config.doMatchSources:
1103  matchResult = self.match.run(sources, exposure.getInfo().getFilter().getName())
1104  matches = afwTable.packMatches(matchResult.matches)
1105  matches.table.setMetadata(matchResult.matchMeta)
1106  results.matchResult = matches
1107  if self.config.doWriteMatchesDenormalized:
1108  if matchResult.matches:
1109  denormMatches = denormalizeMatches(matchResult.matches, matchResult.matchMeta)
1110  else:
1111  self.log.warn("No matches, so generating dummy denormalized matches file")
1112  denormMatches = afwTable.BaseCatalog(afwTable.Schema())
1113  denormMatches.setMetadata(PropertyList())
1114  denormMatches.getMetadata().add("COMMENT",
1115  "This catalog is empty because no matches were found.")
1116  results.denormMatches = denormMatches
1117  results.denormMatches = denormMatches
1118 
1119  results.outputSources = sources
1120  return results
1121 
1122  def readSources(self, dataRef):
1123  """!
1124  @brief Read input sources.
1125 
1126  @param[in] dataRef: Data reference for catalog of merged detections
1127  @return List of sources in merged catalog
1128 
1129  We also need to add columns to hold the measurements we're about to make
1130  so we can measure in-place.
1131  """
1132  merged = dataRef.get(self.config.coaddName + self.inputCatalog, immediate=True)
1133  self.log.info("Read %d detections: %s" % (len(merged), dataRef.dataId))
1134  idFactory = self.makeIdFactory(dataRef)
1135  for s in merged:
1136  idFactory.notify(s.getId())
1137  table = afwTable.SourceTable.make(self.schema, idFactory)
1138  sources = afwTable.SourceCatalog(table)
1139  sources.extend(merged, self.schemaMapper)
1140  return sources
1141 
1142  def writeMatches(self, dataRef, results):
1143  """!
1144  @brief Write matches of the sources to the astrometric reference catalog.
1145 
1146  @param[in] dataRef: data reference
1147  @param[in] results: results struct from run method
1148  """
1149  if hasattr(results, "matchResult"):
1150  dataRef.put(results.matchResult, self.config.coaddName + "Coadd_measMatch")
1151  if hasattr(results, "denormMatches"):
1152  dataRef.put(results.denormMatches, self.config.coaddName + "Coadd_measMatchFull")
1153 
1154  def write(self, dataRef, sources):
1155  """!
1156  @brief Write the source catalog.
1157 
1158  @param[in] dataRef: data reference
1159  @param[in] sources: source catalog
1160  """
1161  dataRef.put(sources, self.config.coaddName + "Coadd_meas")
1162  self.log.info("Wrote %d sources: %s" % (len(sources), dataRef.dataId))
1163 
1164  def getExposureId(self, dataRef):
1165  return int(dataRef.get(self.config.coaddName + "CoaddId"))
1166 
lsst.pipe.tasks.propagateVisitFlags
Definition: propagateVisitFlags.py:1
lsst::afw::image
Backwards-compatibility support for depersisting the old Calib (FluxMag0/FluxMag0Err) objects.
Definition: imageAlgorithm.dox:1
lsst::log.log.logContinued.warn
def warn(fmt, *args)
Definition: logContinued.py:205
lsst::log.log.logContinued.info
def info(fmt, *args)
Definition: logContinued.py:201
lsst.pex.config.wrap.setDefaults
setDefaults
Definition: wrap.py:293
lsst::meas::astrom
Definition: polynomialUtils.h:32
lsst.pipe.tasks.coaddBase.getSkyInfo
def getSkyInfo(coaddName, patchRef)
Return the SkyMap, tract and patch information, wcs, and outer bbox of the patch to be coadded.
Definition: coaddBase.py:261
lsst::meas::algorithms.loadReferenceObjects.ReferenceObjectLoader
Definition: loadReferenceObjects.py:195
lsst::daf::base::PropertyList
Class for storing ordered metadata with comments.
Definition: PropertyList.h:68
lsst.pipe.base.argumentParser.ArgumentParser
Definition: argumentParser.py:408
lsst.pex.config.configurableField.ConfigurableField
Definition: configurableField.py:170
lsst::afw::table::Schema
Defines the fields and offsets for a table.
Definition: Schema.h:50
lsst::meas::astrom.denormalizeMatches.denormalizeMatches
def denormalizeMatches(matches, matchMeta=None)
Definition: denormalizeMatches.py:27
lsst.pipe.tasks.setPrimaryFlags
Definition: setPrimaryFlags.py:1
lsst.pipe.tasks.multiBand.DetectCoaddSourcesConnections
Definition: multiBand.py:70
lsst.pipe.tasks.mergeDetections.write
def write(self, patchRef, catalog)
Write the output.
Definition: mergeDetections.py:388
lsst.pipe.tasks.scaleVariance
Definition: scaleVariance.py:1
lsst.pipe.tasks.assembleCoadd.run
def run(self, skyInfo, tempExpRefList, imageScalerList, weightList, altMaskList=None, mask=None, supplementaryData=None)
Definition: assembleCoadd.py:720
lsst.pipe.base.pipelineTask.PipelineTask
Definition: pipelineTask.py:32
lsst.pipe.base.struct.Struct
Definition: struct.py:26
lsst.pex.config
Definition: __init__.py:1
lsst::meas::base
Definition: Algorithm.h:37
lsst::afw::table._source.SourceCatalog
Definition: _source.py:32
lsstDebug.getInfo
getInfo
Definition: lsstDebug.py:87
lsst::afw::table::SchemaMapper
A mapping between the keys of two Schemas, used to copy data between them.
Definition: SchemaMapper.h:21
lsst.pipe.base.connections.PipelineTaskConnections
Definition: connections.py:260
lsst::afw::math.backgroundList.BackgroundList
Definition: backgroundList.py:32
lsst::afw::table
Definition: table.dox:3
lsst::meas::deblender
Definition: BaselineUtils.h:17
lsst.pipe.base.config.PipelineTaskConfig
Definition: config.py:115
lsst.pipe.tasks.fakes
Definition: fakes.py:1
lsst.pipe.tasks.multiBandUtils.MergeSourcesRunner
Definition: multiBandUtils.py:10
lsst.pipe.base.cmdLineTask.ButlerInitializedTaskRunner
Definition: cmdLineTask.py:510
lsst::daf::base
Definition: Utils.h:47
list
daf::base::PropertyList * list
Definition: fits.cc:913
lsst.pipe.tasks.coaddBase
Definition: coaddBase.py:1
lsst::afw::math
Definition: statistics.dox:6
lsst::afw::table::packMatches
template BaseCatalog packMatches(SourceMatchVector const &)
lsst.pex.config.config.Config
Definition: config.py:736
lsst.pex.config.config.Field
Definition: config.py:247
lsst::coadd::utils.coaddDataIdContainer
Definition: coaddDataIdContainer.py:1
lsst.pipe.base
Definition: __init__.py:1
lsst::meas::algorithms
Fit spatial kernel using approximate fluxes for candidates, and solving a linear system of equations.
Definition: CoaddBoundedField.h:34
lsst.pex.config.wrap.validate
validate
Definition: wrap.py:295
lsst::afw::table::CatalogT< BaseRecord >
set
daf::base::PropertySet * set
Definition: fits.cc:912
lsst.pipe.base.connectionTypes
Definition: connectionTypes.py:1
lsst.pipe.tasks.mergeDetections.writeMetadata
def writeMetadata(self, dataRefList)
No metadata to write, and not sure how to write it for a list of dataRefs.
Definition: mergeDetections.py:405
lsst.pipe.base.cmdLineTask.CmdLineTask
Definition: cmdLineTask.py:544