LSST Applications  21.0.0-172-gfb10e10a+18fedfabac,22.0.0+297cba6710,22.0.0+80564b0ff1,22.0.0+8d77f4f51a,22.0.0+a28f4c53b1,22.0.0+dcf3732eb2,22.0.1-1-g7d6de66+2a20fdde0d,22.0.1-1-g8e32f31+297cba6710,22.0.1-1-geca5380+7fa3b7d9b6,22.0.1-12-g44dc1dc+2a20fdde0d,22.0.1-15-g6a90155+515f58c32b,22.0.1-16-g9282f48+790f5f2caa,22.0.1-2-g92698f7+dcf3732eb2,22.0.1-2-ga9b0f51+7fa3b7d9b6,22.0.1-2-gd1925c9+bf4f0e694f,22.0.1-24-g1ad7a390+a9625a72a8,22.0.1-25-g5bf6245+3ad8ecd50b,22.0.1-25-gb120d7b+8b5510f75f,22.0.1-27-g97737f7+2a20fdde0d,22.0.1-32-gf62ce7b1+aa4237961e,22.0.1-4-g0b3f228+2a20fdde0d,22.0.1-4-g243d05b+871c1b8305,22.0.1-4-g3a563be+32dcf1063f,22.0.1-4-g44f2e3d+9e4ab0f4fa,22.0.1-42-gca6935d93+ba5e5ca3eb,22.0.1-5-g15c806e+85460ae5f3,22.0.1-5-g58711c4+611d128589,22.0.1-5-g75bb458+99c117b92f,22.0.1-6-g1c63a23+7fa3b7d9b6,22.0.1-6-g50866e6+84ff5a128b,22.0.1-6-g8d3140d+720564cf76,22.0.1-6-gd805d02+cc5644f571,22.0.1-8-ge5750ce+85460ae5f3,master-g6e05de7fdc+babf819c66,master-g99da0e417a+8d77f4f51a,w.2021.48
LSST Data Management Base Package
multiBand.py
Go to the documentation of this file.
1 #!/usr/bin/env python
2 #
3 # LSST Data Management System
4 # Copyright 2008-2015 AURA/LSST.
5 #
6 # This product includes software developed by the
7 # LSST Project (http://www.lsst.org/).
8 #
9 # This program is free software: you can redistribute it and/or modify
10 # it under the terms of the GNU General Public License as published by
11 # the Free Software Foundation, either version 3 of the License, or
12 # (at your option) any later version.
13 #
14 # This program is distributed in the hope that it will be useful,
15 # but WITHOUT ANY WARRANTY; without even the implied warranty of
16 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 # GNU General Public License for more details.
18 #
19 # You should have received a copy of the LSST License Statement and
20 # the GNU General Public License along with this program. If not,
21 # see <https://www.lsstcorp.org/LegalNotices/>.
22 #
23 from lsst.coadd.utils.coaddDataIdContainer import ExistingCoaddDataIdContainer
24 from lsst.pipe.base import (CmdLineTask, Struct, ArgumentParser, ButlerInitializedTaskRunner,
25  PipelineTask, PipelineTaskConfig, PipelineTaskConnections)
26 import lsst.pipe.base.connectionTypes as cT
27 from lsst.pex.config import Config, Field, ConfigurableField
28 from lsst.meas.algorithms import DynamicDetectionTask, ReferenceObjectLoader
29 from lsst.meas.base import SingleFrameMeasurementTask, ApplyApCorrTask, CatalogCalculationTask
30 from lsst.meas.deblender import SourceDeblendTask
31 from lsst.meas.extensions.scarlet import ScarletDeblendTask
32 from lsst.pipe.tasks.coaddBase import getSkyInfo
33 from lsst.pipe.tasks.scaleVariance import ScaleVarianceTask
34 from lsst.meas.astrom import DirectMatchTask, denormalizeMatches
35 from lsst.pipe.tasks.fakes import BaseFakeSourcesTask
36 from lsst.pipe.tasks.setPrimaryFlags import SetPrimaryFlagsTask
37 from lsst.pipe.tasks.propagateVisitFlags import PropagateVisitFlagsTask
38 import lsst.afw.image as afwImage
39 import lsst.afw.table as afwTable
40 import lsst.afw.math as afwMath
41 from lsst.daf.base import PropertyList
42 from lsst.skymap import BaseSkyMap
43 from lsst.obs.base import ExposureIdInfo
44 
45 # NOTE: these imports are a convenience so multiband users only have to import this file.
46 from .mergeDetections import MergeDetectionsConfig, MergeDetectionsTask # noqa: F401
47 from .mergeMeasurements import MergeMeasurementsConfig, MergeMeasurementsTask # noqa: F401
48 from .multiBandUtils import MergeSourcesRunner, CullPeaksConfig, _makeGetSchemaCatalogs # noqa: F401
49 from .multiBandUtils import getInputSchema, readCatalog, _makeMakeIdFactory # noqa: F401
50 from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesSingleConfig # noqa: F401
51 from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesSingleTask # noqa: F401
52 from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesMultiConfig # noqa: F401
53 from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesMultiTask # noqa: F401
54 
55 
56 """
57 New set types:
58 * deepCoadd_det: detections from what used to be processCoadd (tract, patch, filter)
59 * deepCoadd_mergeDet: merged detections (tract, patch)
60 * deepCoadd_meas: measurements of merged detections (tract, patch, filter)
61 * deepCoadd_ref: reference sources (tract, patch)
62 All of these have associated *_schema catalogs that require no data ID and hold no records.
63 
64 In addition, we have a schema-only dataset, which saves the schema for the PeakRecords in
65 the mergeDet, meas, and ref dataset Footprints:
66 * deepCoadd_peak_schema
67 """
68 
69 
70 
71 class DetectCoaddSourcesConnections(PipelineTaskConnections,
72  dimensions=("tract", "patch", "band", "skymap"),
73  defaultTemplates={"inputCoaddName": "deep", "outputCoaddName": "deep"}):
74  detectionSchema = cT.InitOutput(
75  doc="Schema of the detection catalog",
76  name="{outputCoaddName}Coadd_det_schema",
77  storageClass="SourceCatalog",
78  )
79  exposure = cT.Input(
80  doc="Exposure on which detections are to be performed",
81  name="{inputCoaddName}Coadd",
82  storageClass="ExposureF",
83  dimensions=("tract", "patch", "band", "skymap")
84  )
85  outputBackgrounds = cT.Output(
86  doc="Output Backgrounds used in detection",
87  name="{outputCoaddName}Coadd_calexp_background",
88  storageClass="Background",
89  dimensions=("tract", "patch", "band", "skymap")
90  )
91  outputSources = cT.Output(
92  doc="Detected sources catalog",
93  name="{outputCoaddName}Coadd_det",
94  storageClass="SourceCatalog",
95  dimensions=("tract", "patch", "band", "skymap")
96  )
97  outputExposure = cT.Output(
98  doc="Exposure post detection",
99  name="{outputCoaddName}Coadd_calexp",
100  storageClass="ExposureF",
101  dimensions=("tract", "patch", "band", "skymap")
102  )
103 
104 
105 class DetectCoaddSourcesConfig(PipelineTaskConfig, pipelineConnections=DetectCoaddSourcesConnections):
106  """!
107  @anchor DetectCoaddSourcesConfig_
108 
109  @brief Configuration parameters for the DetectCoaddSourcesTask
110  """
111  doScaleVariance = Field(dtype=bool, default=True, doc="Scale variance plane using empirical noise?")
112  scaleVariance = ConfigurableField(target=ScaleVarianceTask, doc="Variance rescaling")
113  detection = ConfigurableField(target=DynamicDetectionTask, doc="Source detection")
114  coaddName = Field(dtype=str, default="deep", doc="Name of coadd")
115  doInsertFakes = Field(dtype=bool, default=False,
116  doc="Run fake sources injection task")
117  insertFakes = ConfigurableField(target=BaseFakeSourcesTask,
118  doc="Injection of fake sources for testing "
119  "purposes (must be retargeted)")
120  hasFakes = Field(
121  dtype=bool,
122  default=False,
123  doc="Should be set to True if fake sources have been inserted into the input data."
124  )
125 
126  def setDefaults(self):
127  super().setDefaults()
128  self.detection.thresholdType = "pixel_stdev"
129  self.detection.isotropicGrow = True
130  # Coadds are made from background-subtracted CCDs, so any background subtraction should be very basic
131  self.detection.reEstimateBackground = False
132  self.detection.background.useApprox = False
133  self.detection.background.binSize = 4096
134  self.detection.background.undersampleStyle = 'REDUCE_INTERP_ORDER'
135  self.detection.doTempWideBackground = True # Suppress large footprints that overwhelm the deblender
136 
137 
143 
144 
145 class DetectCoaddSourcesTask(PipelineTask, CmdLineTask):
146  r"""!
147  @anchor DetectCoaddSourcesTask_
148 
149  @brief Detect sources on a coadd
150 
151  @section pipe_tasks_multiBand_Contents Contents
152 
153  - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Purpose
154  - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Initialize
155  - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Run
156  - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Config
157  - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Debug
158  - @ref pipe_tasks_multiband_DetectCoaddSourcesTask_Example
159 
160  @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Purpose Description
161 
162  Command-line task that detects sources on a coadd of exposures obtained with a single filter.
163 
164  Coadding individual visits requires each exposure to be warped. This introduces covariance in the noise
165  properties across pixels. Before detection, we correct the coadd variance by scaling the variance plane
166  in the coadd to match the observed variance. This is an approximate approach -- strictly, we should
167  propagate the full covariance matrix -- but it is simple and works well in practice.
168 
169  After scaling the variance plane, we detect sources and generate footprints by delegating to the @ref
170  SourceDetectionTask_ "detection" subtask.
171 
172  @par Inputs:
173  deepCoadd{tract,patch,filter}: ExposureF
174  @par Outputs:
175  deepCoadd_det{tract,patch,filter}: SourceCatalog (only parent Footprints)
176  @n deepCoadd_calexp{tract,patch,filter}: Variance scaled, background-subtracted input
177  exposure (ExposureF)
178  @n deepCoadd_calexp_background{tract,patch,filter}: BackgroundList
179  @par Data Unit:
180  tract, patch, filter
181 
182  DetectCoaddSourcesTask delegates most of its work to the @ref SourceDetectionTask_ "detection" subtask.
183  You can retarget this subtask if you wish.
184 
185  @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Initialize Task initialization
186 
187  @copydoc \_\_init\_\_
188 
189  @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Run Invoking the Task
190 
191  @copydoc run
192 
193  @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Config Configuration parameters
194 
195  See @ref DetectCoaddSourcesConfig_ "DetectSourcesConfig"
196 
197  @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Debug Debug variables
198 
199  The @link lsst.pipe.base.cmdLineTask.CmdLineTask command line task@endlink interface supports a
200  flag @c -d to import @b debug.py from your @c PYTHONPATH; see @ref baseDebug for more about @b debug.py
201  files.
202 
203  DetectCoaddSourcesTask has no debug variables of its own because it relegates all the work to
204  @ref SourceDetectionTask_ "SourceDetectionTask"; see the documetation for
205  @ref SourceDetectionTask_ "SourceDetectionTask" for further information.
206 
207  @section pipe_tasks_multiband_DetectCoaddSourcesTask_Example A complete example
208  of using DetectCoaddSourcesTask
209 
210  DetectCoaddSourcesTask is meant to be run after assembling a coadded image in a given band. The purpose of
211  the task is to update the background, detect all sources in a single band and generate a set of parent
212  footprints. Subsequent tasks in the multi-band processing procedure will merge sources across bands and,
213  eventually, perform forced photometry. Command-line usage of DetectCoaddSourcesTask expects a data
214  reference to the coadd to be processed. A list of the available optional arguments can be obtained by
215  calling detectCoaddSources.py with the `--help` command line argument:
216  @code
217  detectCoaddSources.py --help
218  @endcode
219 
220  To demonstrate usage of the DetectCoaddSourcesTask in the larger context of multi-band processing, we
221  will process HSC data in the [ci_hsc](https://github.com/lsst/ci_hsc) package. Assuming one has followed
222  steps 1 - 4 at @ref pipeTasks_multiBand, one may detect all the sources in each coadd as follows:
223  @code
224  detectCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-I
225  @endcode
226  that will process the HSC-I band data. The results are written to
227  `$CI_HSC_DIR/DATA/deepCoadd-results/HSC-I`.
228 
229  It is also necessary to run:
230  @code
231  detectCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-R
232  @endcode
233  to generate the sources catalogs for the HSC-R band required by the next step in the multi-band
234  processing procedure: @ref MergeDetectionsTask_ "MergeDetectionsTask".
235  """
236  _DefaultName = "detectCoaddSources"
237  ConfigClass = DetectCoaddSourcesConfig
238  getSchemaCatalogs = _makeGetSchemaCatalogs("det")
239  makeIdFactory = _makeMakeIdFactory("CoaddId")
240 
241  @classmethod
242  def _makeArgumentParser(cls):
243  parser = ArgumentParser(name=cls._DefaultName)
244  parser.add_id_argument("--id", "deepCoadd", help="data ID, e.g. --id tract=12345 patch=1,2 filter=r",
245  ContainerClass=ExistingCoaddDataIdContainer)
246  return parser
247 
248  def __init__(self, schema=None, **kwargs):
249  """!
250  @brief Initialize the task. Create the @ref SourceDetectionTask_ "detection" subtask.
251 
252  Keyword arguments (in addition to those forwarded to CmdLineTask.__init__):
253 
254  @param[in] schema: initial schema for the output catalog, modified-in place to include all
255  fields set by this task. If None, the source minimal schema will be used.
256  @param[in] **kwargs: keyword arguments to be passed to lsst.pipe.base.task.Task.__init__
257  """
258  # N.B. Super is used here to handle the multiple inheritance of PipelineTasks, the init tree
259  # call structure has been reviewed carefully to be sure super will work as intended.
260  super().__init__(**kwargs)
261  if schema is None:
262  schema = afwTable.SourceTable.makeMinimalSchema()
263  if self.config.doInsertFakes:
264  self.makeSubtask("insertFakes")
265  self.schema = schema
266  self.makeSubtask("detection", schema=self.schema)
267  if self.config.doScaleVariance:
268  self.makeSubtask("scaleVariance")
269 
270  self.detectionSchema = afwTable.SourceCatalog(self.schema)
271 
272  def runDataRef(self, patchRef):
273  """!
274  @brief Run detection on a coadd.
275 
276  Invokes @ref run and then uses @ref write to output the
277  results.
278 
279  @param[in] patchRef: data reference for patch
280  """
281  if self.config.hasFakes:
282  exposure = patchRef.get("fakes_" + self.config.coaddName + "Coadd", immediate=True)
283  else:
284  exposure = patchRef.get(self.config.coaddName + "Coadd", immediate=True)
285  expId = int(patchRef.get(self.config.coaddName + "CoaddId"))
286  results = self.run(exposure, self.makeIdFactory(patchRef), expId=expId)
287  self.write(results, patchRef)
288  return results
289 
290  def runQuantum(self, butlerQC, inputRefs, outputRefs):
291  inputs = butlerQC.get(inputRefs)
292  exposureIdInfo = ExposureIdInfo.fromDataId(butlerQC.quantum.dataId, "tract_patch_band")
293  inputs["idFactory"] = exposureIdInfo.makeSourceIdFactory()
294  inputs["expId"] = exposureIdInfo.expId
295  outputs = self.run(**inputs)
296  butlerQC.put(outputs, outputRefs)
297 
298  def run(self, exposure, idFactory, expId):
299  """!
300  @brief Run detection on an exposure.
301 
302  First scale the variance plane to match the observed variance
303  using @ref ScaleVarianceTask. Then invoke the @ref SourceDetectionTask_ "detection" subtask to
304  detect sources.
305 
306  @param[in,out] exposure: Exposure on which to detect (may be backround-subtracted and scaled,
307  depending on configuration).
308  @param[in] idFactory: IdFactory to set source identifiers
309  @param[in] expId: Exposure identifier (integer) for RNG seed
310 
311  @return a pipe.base.Struct with fields
312  - sources: catalog of detections
313  - backgrounds: list of backgrounds
314  """
315  if self.config.doScaleVariance:
316  varScale = self.scaleVariance.run(exposure.maskedImage)
317  exposure.getMetadata().add("VARIANCE_SCALE", varScale)
318  backgrounds = afwMath.BackgroundList()
319  if self.config.doInsertFakes:
320  self.insertFakes.run(exposure, background=backgrounds)
321  table = afwTable.SourceTable.make(self.schema, idFactory)
322  detections = self.detection.run(table, exposure, expId=expId)
323  sources = detections.sources
324  fpSets = detections.fpSets
325  if hasattr(fpSets, "background") and fpSets.background:
326  for bg in fpSets.background:
327  backgrounds.append(bg)
328  return Struct(outputSources=sources, outputBackgrounds=backgrounds, outputExposure=exposure)
329 
330  def write(self, results, patchRef):
331  """!
332  @brief Write out results from runDetection.
333 
334  @param[in] exposure: Exposure to write out
335  @param[in] results: Struct returned from runDetection
336  @param[in] patchRef: data reference for patch
337  """
338  coaddName = self.config.coaddName + "Coadd"
339  patchRef.put(results.outputBackgrounds, coaddName + "_calexp_background")
340  patchRef.put(results.outputSources, coaddName + "_det")
341  if self.config.hasFakes:
342  patchRef.put(results.outputExposure, "fakes_" + coaddName + "_calexp")
343  else:
344  patchRef.put(results.outputExposure, coaddName + "_calexp")
345 
346 
347 
348 
349 class DeblendCoaddSourcesConfig(Config):
350  """DeblendCoaddSourcesConfig
351 
352  Configuration parameters for the `DeblendCoaddSourcesTask`.
353  """
354  singleBandDeblend = ConfigurableField(target=SourceDeblendTask,
355  doc="Deblend sources separately in each band")
356  multiBandDeblend = ConfigurableField(target=ScarletDeblendTask,
357  doc="Deblend sources simultaneously across bands")
358  simultaneous = Field(dtype=bool,
359  default=True,
360  doc="Simultaneously deblend all bands? "
361  "True uses `multibandDeblend` while False uses `singleBandDeblend`")
362  coaddName = Field(dtype=str, default="deep", doc="Name of coadd")
363  hasFakes = Field(dtype=bool,
364  default=False,
365  doc="Should be set to True if fake sources have been inserted into the input data.")
366 
367  def setDefaults(self):
368  Config.setDefaults(self)
369  self.singleBandDeblend.propagateAllPeaks = True
370 
371 
372 class DeblendCoaddSourcesRunner(MergeSourcesRunner):
373  """Task runner for the `MergeSourcesTask`
374 
375  Required because the run method requires a list of
376  dataRefs rather than a single dataRef.
377  """
378  @staticmethod
379  def getTargetList(parsedCmd, **kwargs):
380  """Provide a list of patch references for each patch, tract, filter combo.
381 
382  Parameters
383  ----------
384  parsedCmd:
385  The parsed command
386  kwargs:
387  Keyword arguments passed to the task
388 
389  Returns
390  -------
391  targetList: list
392  List of tuples, where each tuple is a (dataRef, kwargs) pair.
393  """
394  refDict = MergeSourcesRunner.buildRefDict(parsedCmd)
395  kwargs["psfCache"] = parsedCmd.psfCache
396  return [(list(p.values()), kwargs) for t in refDict.values() for p in t.values()]
397 
398 
399 class DeblendCoaddSourcesTask(CmdLineTask):
400  """Deblend the sources in a merged catalog
401 
402  Deblend sources from master catalog in each coadd.
403  This can either be done separately in each band using the HSC-SDSS deblender
404  (`DeblendCoaddSourcesTask.config.simultaneous==False`)
405  or use SCARLET to simultaneously fit the blend in all bands
406  (`DeblendCoaddSourcesTask.config.simultaneous==True`).
407  The task will set its own `self.schema` atribute to the `Schema` of the
408  output deblended catalog.
409  This will include all fields from the input `Schema`, as well as additional fields
410  from the deblender.
411 
412  `pipe.tasks.multiband.DeblendCoaddSourcesTask Description
413  ---------------------------------------------------------
414  `
415 
416  Parameters
417  ----------
418  butler: `Butler`
419  Butler used to read the input schemas from disk or
420  construct the reference catalog loader, if `schema` or `peakSchema` or
421  schema: `Schema`
422  The schema of the merged detection catalog as an input to this task.
423  peakSchema: `Schema`
424  The schema of the `PeakRecord`s in the `Footprint`s in the merged detection catalog
425  """
426  ConfigClass = DeblendCoaddSourcesConfig
427  RunnerClass = DeblendCoaddSourcesRunner
428  _DefaultName = "deblendCoaddSources"
429  makeIdFactory = _makeMakeIdFactory("MergedCoaddId")
430 
431  @classmethod
432  def _makeArgumentParser(cls):
433  parser = ArgumentParser(name=cls._DefaultName)
434  parser.add_id_argument("--id", "deepCoadd_calexp",
435  help="data ID, e.g. --id tract=12345 patch=1,2 filter=g^r^i",
436  ContainerClass=ExistingCoaddDataIdContainer)
437  parser.add_argument("--psfCache", type=int, default=100, help="Size of CoaddPsf cache")
438  return parser
439 
440  def __init__(self, butler=None, schema=None, peakSchema=None, **kwargs):
441  CmdLineTask.__init__(self, **kwargs)
442  if schema is None:
443  assert butler is not None, "Neither butler nor schema is defined"
444  schema = butler.get(self.config.coaddName + "Coadd_mergeDet_schema", immediate=True).schema
445  self.schemaMapper = afwTable.SchemaMapper(schema)
446  self.schemaMapper.addMinimalSchema(schema)
447  self.schema = self.schemaMapper.getOutputSchema()
448  if peakSchema is None:
449  assert butler is not None, "Neither butler nor peakSchema is defined"
450  peakSchema = butler.get(self.config.coaddName + "Coadd_peak_schema", immediate=True).schema
451 
452  if self.config.simultaneous:
453  self.makeSubtask("multiBandDeblend", schema=self.schema, peakSchema=peakSchema)
454  else:
455  self.makeSubtask("singleBandDeblend", schema=self.schema, peakSchema=peakSchema)
456 
457  def getSchemaCatalogs(self):
458  """Return a dict of empty catalogs for each catalog dataset produced by this task.
459 
460  Returns
461  -------
462  result: dict
463  Dictionary of empty catalogs, with catalog names as keys.
464  """
465  catalog = afwTable.SourceCatalog(self.schema)
466  return {self.config.coaddName + "Coadd_deblendedFlux": catalog,
467  self.config.coaddName + "Coadd_deblendedModel": catalog}
468 
469  def runDataRef(self, patchRefList, psfCache=100):
470  """Deblend the patch
471 
472  Deblend each source simultaneously or separately
473  (depending on `DeblendCoaddSourcesTask.config.simultaneous`).
474  Set `is-primary` and related flags.
475  Propagate flags from individual visits.
476  Write the deblended sources out.
477 
478  Parameters
479  ----------
480  patchRefList: list
481  List of data references for each filter
482  """
483 
484  if self.config.hasFakes:
485  coaddType = "fakes_" + self.config.coaddName
486  else:
487  coaddType = self.config.coaddName
488 
489  if self.config.simultaneous:
490  # Use SCARLET to simultaneously deblend across filters
491  filters = []
492  exposures = []
493  for patchRef in patchRefList:
494  exposure = patchRef.get(coaddType + "Coadd_calexp", immediate=True)
495  filter = patchRef.get(coaddType + "Coadd_filterLabel", immediate=True)
496  filters.append(filter.bandLabel)
497  exposures.append(exposure)
498  # The input sources are the same for all bands, since it is a merged catalog
499  sources = self.readSources(patchRef)
500  exposure = afwImage.MultibandExposure.fromExposures(filters, exposures)
501  templateCatalogs = self.multiBandDeblend.run(exposure, sources)
502  for n in range(len(patchRefList)):
503  self.write(patchRefList[n], templateCatalogs[filters[n]])
504  else:
505  # Use the singeband deblender to deblend each band separately
506  for patchRef in patchRefList:
507  exposure = patchRef.get(coaddType + "Coadd_calexp", immediate=True)
508  exposure.getPsf().setCacheCapacity(psfCache)
509  sources = self.readSources(patchRef)
510  self.singleBandDeblend.run(exposure, sources)
511  self.write(patchRef, sources)
512 
513  def readSources(self, dataRef):
514  """Read merged catalog
515 
516  Read the catalog of merged detections and create a catalog
517  in a single band.
518 
519  Parameters
520  ----------
521  dataRef: data reference
522  Data reference for catalog of merged detections
523 
524  Returns
525  -------
526  sources: `SourceCatalog`
527  List of sources in merged catalog
528 
529  We also need to add columns to hold the measurements we're about to make
530  so we can measure in-place.
531  """
532  merged = dataRef.get(self.config.coaddName + "Coadd_mergeDet", immediate=True)
533  self.log.info("Read %d detections: %s", len(merged), dataRef.dataId)
534  idFactory = self.makeIdFactory(dataRef)
535  for s in merged:
536  idFactory.notify(s.getId())
537  table = afwTable.SourceTable.make(self.schema, idFactory)
538  sources = afwTable.SourceCatalog(table)
539  sources.extend(merged, self.schemaMapper)
540  return sources
541 
542  def write(self, dataRef, sources):
543  """Write the source catalog(s)
544 
545  Parameters
546  ----------
547  dataRef: Data Reference
548  Reference to the output catalog.
549  sources: `SourceCatalog`
550  Flux conserved sources to write to file.
551  If using the single band deblender, this is the catalog
552  generated.
553  template_sources: `SourceCatalog`
554  Source catalog using the multiband template models
555  as footprints.
556  """
557  dataRef.put(sources, self.config.coaddName + "Coadd_deblendedFlux")
558  self.log.info("Wrote %d sources: %s", len(sources), dataRef.dataId)
559 
560  def writeMetadata(self, dataRefList):
561  """Write the metadata produced from processing the data.
562  Parameters
563  ----------
564  dataRefList
565  List of Butler data references used to write the metadata.
566  The metadata is written to dataset type `CmdLineTask._getMetadataName`.
567  """
568  for dataRef in dataRefList:
569  try:
570  metadataName = self._getMetadataName()
571  if metadataName is not None:
572  dataRef.put(self.getFullMetadata(), metadataName)
573  except Exception as e:
574  self.log.warning("Could not persist metadata for dataId=%s: %s", dataRef.dataId, e)
575 
576  def getExposureId(self, dataRef):
577  """Get the ExposureId from a data reference
578  """
579  return int(dataRef.get(self.config.coaddName + "CoaddId"))
580 
581 
582 class MeasureMergedCoaddSourcesConnections(PipelineTaskConnections, dimensions=("tract", "patch", "band", "skymap"),
583  defaultTemplates={"inputCoaddName": "deep",
584  "outputCoaddName": "deep"}):
585  inputSchema = cT.InitInput(
586  doc="Input schema for measure merged task produced by a deblender or detection task",
587  name="{inputCoaddName}Coadd_deblendedFlux_schema",
588  storageClass="SourceCatalog"
589  )
590  outputSchema = cT.InitOutput(
591  doc="Output schema after all new fields are added by task",
592  name="{inputCoaddName}Coadd_meas_schema",
593  storageClass="SourceCatalog"
594  )
595  refCat = cT.PrerequisiteInput(
596  doc="Reference catalog used to match measured sources against known sources",
597  name="ref_cat",
598  storageClass="SimpleCatalog",
599  dimensions=("skypix",),
600  deferLoad=True,
601  multiple=True
602  )
603  exposure = cT.Input(
604  doc="Input coadd image",
605  name="{inputCoaddName}Coadd_calexp",
606  storageClass="ExposureF",
607  dimensions=("tract", "patch", "band", "skymap")
608  )
609  skyMap = cT.Input(
610  doc="SkyMap to use in processing",
611  name=BaseSkyMap.SKYMAP_DATASET_TYPE_NAME,
612  storageClass="SkyMap",
613  dimensions=("skymap",),
614  )
615  visitCatalogs = cT.Input(
616  doc="Source catalogs for visits which overlap input tract, patch, band. Will be "
617  "further filtered in the task for the purpose of propagating flags from image calibration "
618  "and characterization to codd objects",
619  name="src",
620  dimensions=("instrument", "visit", "detector"),
621  storageClass="SourceCatalog",
622  multiple=True
623  )
624  inputCatalog = cT.Input(
625  doc=("Name of the input catalog to use."
626  "If the single band deblender was used this should be 'deblendedFlux."
627  "If the multi-band deblender was used this should be 'deblendedModel, "
628  "or deblendedFlux if the multiband deblender was configured to output "
629  "deblended flux catalogs. If no deblending was performed this should "
630  "be 'mergeDet'"),
631  name="{inputCoaddName}Coadd_deblendedFlux",
632  storageClass="SourceCatalog",
633  dimensions=("tract", "patch", "band", "skymap"),
634  )
635  outputSources = cT.Output(
636  doc="Source catalog containing all the measurement information generated in this task",
637  name="{outputCoaddName}Coadd_meas",
638  dimensions=("tract", "patch", "band", "skymap"),
639  storageClass="SourceCatalog",
640  )
641  matchResult = cT.Output(
642  doc="Match catalog produced by configured matcher, optional on doMatchSources",
643  name="{outputCoaddName}Coadd_measMatch",
644  dimensions=("tract", "patch", "band", "skymap"),
645  storageClass="Catalog",
646  )
647  denormMatches = cT.Output(
648  doc="Denormalized Match catalog produced by configured matcher, optional on "
649  "doWriteMatchesDenormalized",
650  name="{outputCoaddName}Coadd_measMatchFull",
651  dimensions=("tract", "patch", "band", "skymap"),
652  storageClass="Catalog",
653  )
654 
655  def __init__(self, *, config=None):
656  super().__init__(config=config)
657  if config.doPropagateFlags is False:
658  self.inputs -= set(("visitCatalogs",))
659 
660  if config.doMatchSources is False:
661  self.outputs -= set(("matchResult",))
662 
663  if config.doWriteMatchesDenormalized is False:
664  self.outputs -= set(("denormMatches",))
665 
666 
667 class MeasureMergedCoaddSourcesConfig(PipelineTaskConfig,
668  pipelineConnections=MeasureMergedCoaddSourcesConnections):
669  """!
670  @anchor MeasureMergedCoaddSourcesConfig_
671 
672  @brief Configuration parameters for the MeasureMergedCoaddSourcesTask
673  """
674  inputCatalog = Field(dtype=str, default="deblendedFlux",
675  doc=("Name of the input catalog to use."
676  "If the single band deblender was used this should be 'deblendedFlux."
677  "If the multi-band deblender was used this should be 'deblendedModel."
678  "If no deblending was performed this should be 'mergeDet'"))
679  measurement = ConfigurableField(target=SingleFrameMeasurementTask, doc="Source measurement")
680  setPrimaryFlags = ConfigurableField(target=SetPrimaryFlagsTask, doc="Set flags for primary tract/patch")
681  doPropagateFlags = Field(
682  dtype=bool, default=True,
683  doc="Whether to match sources to CCD catalogs to propagate flags (to e.g. identify PSF stars)"
684  )
685  propagateFlags = ConfigurableField(target=PropagateVisitFlagsTask, doc="Propagate visit flags to coadd")
686  doMatchSources = Field(dtype=bool, default=True, doc="Match sources to reference catalog?")
687  match = ConfigurableField(target=DirectMatchTask, doc="Matching to reference catalog")
688  doWriteMatchesDenormalized = Field(
689  dtype=bool,
690  default=False,
691  doc=("Write reference matches in denormalized format? "
692  "This format uses more disk space, but is more convenient to read."),
693  )
694  coaddName = Field(dtype=str, default="deep", doc="Name of coadd")
695  psfCache = Field(dtype=int, default=100, doc="Size of psfCache")
696  checkUnitsParseStrict = Field(
697  doc="Strictness of Astropy unit compatibility check, can be 'raise', 'warn' or 'silent'",
698  dtype=str,
699  default="raise",
700  )
701  doApCorr = Field(
702  dtype=bool,
703  default=True,
704  doc="Apply aperture corrections"
705  )
706  applyApCorr = ConfigurableField(
707  target=ApplyApCorrTask,
708  doc="Subtask to apply aperture corrections"
709  )
710  doRunCatalogCalculation = Field(
711  dtype=bool,
712  default=True,
713  doc='Run catalogCalculation task'
714  )
715  catalogCalculation = ConfigurableField(
716  target=CatalogCalculationTask,
717  doc="Subtask to run catalogCalculation plugins on catalog"
718  )
719 
720  hasFakes = Field(
721  dtype=bool,
722  default=False,
723  doc="Should be set to True if fake sources have been inserted into the input data."
724  )
725 
726  @property
727  def refObjLoader(self):
728  return self.match.refObjLoader
729 
730  def setDefaults(self):
731  super().setDefaults()
732  self.measurement.plugins.names |= ['base_InputCount',
733  'base_Variance',
734  'base_LocalPhotoCalib',
735  'base_LocalWcs']
736  self.measurement.plugins['base_PixelFlags'].masksFpAnywhere = ['CLIPPED', 'SENSOR_EDGE',
737  'INEXACT_PSF']
738  self.measurement.plugins['base_PixelFlags'].masksFpCenter = ['CLIPPED', 'SENSOR_EDGE',
739  'INEXACT_PSF']
740 
741  def validate(self):
742  super().validate()
743  refCatGen2 = getattr(self.refObjLoader, "ref_dataset_name", None)
744  if refCatGen2 is not None and refCatGen2 != self.connections.refCat:
745  raise ValueError(
746  f"Gen2 ({refCatGen2}) and Gen3 ({self.connections.refCat}) reference catalogs "
747  f"are different. These options must be kept in sync until Gen2 is retired."
748  )
749 
750 
751 
757 
758 
759 class MeasureMergedCoaddSourcesRunner(ButlerInitializedTaskRunner):
760  """Get the psfCache setting into MeasureMergedCoaddSourcesTask"""
761  @staticmethod
762  def getTargetList(parsedCmd, **kwargs):
763  return ButlerInitializedTaskRunner.getTargetList(parsedCmd, psfCache=parsedCmd.psfCache)
764 
765 
766 class MeasureMergedCoaddSourcesTask(PipelineTask, CmdLineTask):
767  r"""!
768  @anchor MeasureMergedCoaddSourcesTask_
769 
770  @brief Deblend sources from master catalog in each coadd seperately and measure.
771 
772  @section pipe_tasks_multiBand_Contents Contents
773 
774  - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Purpose
775  - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Initialize
776  - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Run
777  - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Config
778  - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Debug
779  - @ref pipe_tasks_multiband_MeasureMergedCoaddSourcesTask_Example
780 
781  @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Purpose Description
782 
783  Command-line task that uses peaks and footprints from a master catalog to perform deblending and
784  measurement in each coadd.
785 
786  Given a master input catalog of sources (peaks and footprints) or deblender outputs
787  (including a HeavyFootprint in each band), measure each source on the
788  coadd. Repeating this procedure with the same master catalog across multiple coadds will generate a
789  consistent set of child sources.
790 
791  The deblender retains all peaks and deblends any missing peaks (dropouts in that band) as PSFs. Source
792  properties are measured and the @c is-primary flag (indicating sources with no children) is set. Visit
793  flags are propagated to the coadd sources.
794 
795  Optionally, we can match the coadd sources to an external reference catalog.
796 
797  @par Inputs:
798  deepCoadd_mergeDet{tract,patch} or deepCoadd_deblend{tract,patch}: SourceCatalog
799  @n deepCoadd_calexp{tract,patch,filter}: ExposureF
800  @par Outputs:
801  deepCoadd_meas{tract,patch,filter}: SourceCatalog
802  @par Data Unit:
803  tract, patch, filter
804 
805  MeasureMergedCoaddSourcesTask delegates most of its work to a set of sub-tasks:
806 
807  <DL>
808  <DT> @ref SingleFrameMeasurementTask_ "measurement"
809  <DD> Measure source properties of deblended sources.</DD>
810  <DT> @ref SetPrimaryFlagsTask_ "setPrimaryFlags"
811  <DD> Set flag 'is-primary' as well as related flags on sources. 'is-primary' is set for sources that are
812  not at the edge of the field and that have either not been deblended or are the children of deblended
813  sources</DD>
814  <DT> @ref PropagateVisitFlagsTask_ "propagateFlags"
815  <DD> Propagate flags set in individual visits to the coadd.</DD>
816  <DT> @ref DirectMatchTask_ "match"
817  <DD> Match input sources to a reference catalog (optional).
818  </DD>
819  </DL>
820  These subtasks may be retargeted as required.
821 
822  @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Initialize Task initialization
823 
824  @copydoc \_\_init\_\_
825 
826  @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Run Invoking the Task
827 
828  @copydoc run
829 
830  @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Config Configuration parameters
831 
832  See @ref MeasureMergedCoaddSourcesConfig_
833 
834  @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Debug Debug variables
835 
836  The @link lsst.pipe.base.cmdLineTask.CmdLineTask command line task@endlink interface supports a
837  flag @c -d to import @b debug.py from your @c PYTHONPATH; see @ref baseDebug for more about @b debug.py
838  files.
839 
840  MeasureMergedCoaddSourcesTask has no debug variables of its own because it delegates all the work to
841  the various sub-tasks. See the documetation for individual sub-tasks for more information.
842 
843  @section pipe_tasks_multiband_MeasureMergedCoaddSourcesTask_Example A complete example of using
844  MeasureMergedCoaddSourcesTask
845 
846  After MeasureMergedCoaddSourcesTask has been run on multiple coadds, we have a set of per-band catalogs.
847  The next stage in the multi-band processing procedure will merge these measurements into a suitable
848  catalog for driving forced photometry.
849 
850  Command-line usage of MeasureMergedCoaddSourcesTask expects a data reference to the coadds
851  to be processed.
852  A list of the available optional arguments can be obtained by calling measureCoaddSources.py with the
853  `--help` command line argument:
854  @code
855  measureCoaddSources.py --help
856  @endcode
857 
858  To demonstrate usage of the DetectCoaddSourcesTask in the larger context of multi-band processing, we
859  will process HSC data in the [ci_hsc](https://github.com/lsst/ci_hsc) package. Assuming one has finished
860  step 6 at @ref pipeTasks_multiBand, one may perform deblending and measure sources in the HSC-I band
861  coadd as follows:
862  @code
863  measureCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-I
864  @endcode
865  This will process the HSC-I band data. The results are written in
866  `$CI_HSC_DIR/DATA/deepCoadd-results/HSC-I/0/5,4/meas-HSC-I-0-5,4.fits
867 
868  It is also necessary to run
869  @code
870  measureCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-R
871  @endcode
872  to generate the sources catalogs for the HSC-R band required by the next step in the multi-band
873  procedure: @ref MergeMeasurementsTask_ "MergeMeasurementsTask".
874  """
875  _DefaultName = "measureCoaddSources"
876  ConfigClass = MeasureMergedCoaddSourcesConfig
877  RunnerClass = MeasureMergedCoaddSourcesRunner
878  getSchemaCatalogs = _makeGetSchemaCatalogs("meas")
879  makeIdFactory = _makeMakeIdFactory("MergedCoaddId") # The IDs we already have are of this type
880 
881  @classmethod
882  def _makeArgumentParser(cls):
883  parser = ArgumentParser(name=cls._DefaultName)
884  parser.add_id_argument("--id", "deepCoadd_calexp",
885  help="data ID, e.g. --id tract=12345 patch=1,2 filter=r",
886  ContainerClass=ExistingCoaddDataIdContainer)
887  parser.add_argument("--psfCache", type=int, default=100, help="Size of CoaddPsf cache")
888  return parser
889 
890  def __init__(self, butler=None, schema=None, peakSchema=None, refObjLoader=None, initInputs=None,
891  **kwargs):
892  """!
893  @brief Initialize the task.
894 
895  Keyword arguments (in addition to those forwarded to CmdLineTask.__init__):
896  @param[in] schema: the schema of the merged detection catalog used as input to this one
897  @param[in] peakSchema: the schema of the PeakRecords in the Footprints in the merged detection catalog
898  @param[in] refObjLoader: an instance of LoadReferenceObjectsTasks that supplies an external reference
899  catalog. May be None if the loader can be constructed from the butler argument or all steps
900  requiring a reference catalog are disabled.
901  @param[in] butler: a butler used to read the input schemas from disk or construct the reference
902  catalog loader, if schema or peakSchema or refObjLoader is None
903 
904  The task will set its own self.schema attribute to the schema of the output measurement catalog.
905  This will include all fields from the input schema, as well as additional fields for all the
906  measurements.
907  """
908  super().__init__(**kwargs)
909  self.deblended = self.config.inputCatalog.startswith("deblended")
910  self.inputCatalog = "Coadd_" + self.config.inputCatalog
911  if initInputs is not None:
912  schema = initInputs['inputSchema'].schema
913  if schema is None:
914  assert butler is not None, "Neither butler nor schema is defined"
915  schema = butler.get(self.config.coaddName + self.inputCatalog + "_schema", immediate=True).schema
916  self.schemaMapper = afwTable.SchemaMapper(schema)
917  self.schemaMapper.addMinimalSchema(schema)
918  self.schema = self.schemaMapper.getOutputSchema()
919  self.algMetadata = PropertyList()
920  self.makeSubtask("measurement", schema=self.schema, algMetadata=self.algMetadata)
921  self.makeSubtask("setPrimaryFlags", schema=self.schema)
922  if self.config.doMatchSources:
923  self.makeSubtask("match", butler=butler, refObjLoader=refObjLoader)
924  if self.config.doPropagateFlags:
925  self.makeSubtask("propagateFlags", schema=self.schema)
926  self.schema.checkUnits(parse_strict=self.config.checkUnitsParseStrict)
927  if self.config.doApCorr:
928  self.makeSubtask("applyApCorr", schema=self.schema)
929  if self.config.doRunCatalogCalculation:
930  self.makeSubtask("catalogCalculation", schema=self.schema)
931 
932  self.outputSchema = afwTable.SourceCatalog(self.schema)
933 
934  def runQuantum(self, butlerQC, inputRefs, outputRefs):
935  inputs = butlerQC.get(inputRefs)
936 
937  refObjLoader = ReferenceObjectLoader([ref.datasetRef.dataId for ref in inputRefs.refCat],
938  inputs.pop('refCat'), config=self.config.refObjLoader,
939  log=self.log)
940  self.match.setRefObjLoader(refObjLoader)
941 
942  # Set psfcache
943  # move this to run after gen2 deprecation
944  inputs['exposure'].getPsf().setCacheCapacity(self.config.psfCache)
945 
946  # Get unique integer ID for IdFactory and RNG seeds
947  exposureIdInfo = ExposureIdInfo.fromDataId(butlerQC.quantum.dataId, "tract_patch")
948  inputs['exposureId'] = exposureIdInfo.expId
949  idFactory = exposureIdInfo.makeSourceIdFactory()
950  # Transform inputCatalog
951  table = afwTable.SourceTable.make(self.schema, idFactory)
952  sources = afwTable.SourceCatalog(table)
953  sources.extend(inputs.pop('inputCatalog'), self.schemaMapper)
954  table = sources.getTable()
955  table.setMetadata(self.algMetadata) # Capture algorithm metadata to write out to the source catalog.
956  inputs['sources'] = sources
957 
958  skyMap = inputs.pop('skyMap')
959  tractNumber = inputRefs.inputCatalog.dataId['tract']
960  tractInfo = skyMap[tractNumber]
961  patchInfo = tractInfo.getPatchInfo(inputRefs.inputCatalog.dataId['patch'])
962  skyInfo = Struct(
963  skyMap=skyMap,
964  tractInfo=tractInfo,
965  patchInfo=patchInfo,
966  wcs=tractInfo.getWcs(),
967  bbox=patchInfo.getOuterBBox()
968  )
969  inputs['skyInfo'] = skyInfo
970 
971  if self.config.doPropagateFlags:
972  # Filter out any visit catalog that is not coadd inputs
973  ccdInputs = inputs['exposure'].getInfo().getCoaddInputs().ccds
974  visitKey = ccdInputs.schema.find("visit").key
975  ccdKey = ccdInputs.schema.find("ccd").key
976  inputVisitIds = set()
977  ccdRecordsWcs = {}
978  for ccdRecord in ccdInputs:
979  visit = ccdRecord.get(visitKey)
980  ccd = ccdRecord.get(ccdKey)
981  inputVisitIds.add((visit, ccd))
982  ccdRecordsWcs[(visit, ccd)] = ccdRecord.getWcs()
983 
984  inputCatalogsToKeep = []
985  inputCatalogWcsUpdate = []
986  for i, dataRef in enumerate(inputRefs.visitCatalogs):
987  key = (dataRef.dataId['visit'], dataRef.dataId['detector'])
988  if key in inputVisitIds:
989  inputCatalogsToKeep.append(inputs['visitCatalogs'][i])
990  inputCatalogWcsUpdate.append(ccdRecordsWcs[key])
991  inputs['visitCatalogs'] = inputCatalogsToKeep
992  inputs['wcsUpdates'] = inputCatalogWcsUpdate
993  inputs['ccdInputs'] = ccdInputs
994 
995  outputs = self.run(**inputs)
996  butlerQC.put(outputs, outputRefs)
997 
998  def runDataRef(self, patchRef, psfCache=100):
999  """!
1000  @brief Deblend and measure.
1001 
1002  @param[in] patchRef: Patch reference.
1003 
1004  Set 'is-primary' and related flags. Propagate flags
1005  from individual visits. Optionally match the sources to a reference catalog and write the matches.
1006  Finally, write the deblended sources and measurements out.
1007  """
1008  if self.config.hasFakes:
1009  coaddType = "fakes_" + self.config.coaddName
1010  else:
1011  coaddType = self.config.coaddName
1012  exposure = patchRef.get(coaddType + "Coadd_calexp", immediate=True)
1013  exposure.getPsf().setCacheCapacity(psfCache)
1014  sources = self.readSources(patchRef)
1015  table = sources.getTable()
1016  table.setMetadata(self.algMetadata) # Capture algorithm metadata to write out to the source catalog.
1017  skyInfo = getSkyInfo(coaddName=self.config.coaddName, patchRef=patchRef)
1018 
1019  if self.config.doPropagateFlags:
1020  ccdInputs = self.propagateFlags.getCcdInputs(exposure)
1021  else:
1022  ccdInputs = None
1023 
1024  results = self.run(exposure=exposure, sources=sources,
1025  ccdInputs=ccdInputs,
1026  skyInfo=skyInfo, butler=patchRef.getButler(),
1027  exposureId=self.getExposureId(patchRef))
1028 
1029  if self.config.doMatchSources:
1030  self.writeMatches(patchRef, results)
1031  self.write(patchRef, results.outputSources)
1032 
1033  def run(self, exposure, sources, skyInfo, exposureId, ccdInputs=None, visitCatalogs=None, wcsUpdates=None,
1034  butler=None):
1035  """Run measurement algorithms on the input exposure, and optionally populate the
1036  resulting catalog with extra information.
1037 
1038  Parameters
1039  ----------
1040  exposure : `lsst.afw.exposure.Exposure`
1041  The input exposure on which measurements are to be performed
1042  sources : `lsst.afw.table.SourceCatalog`
1043  A catalog built from the results of merged detections, or
1044  deblender outputs.
1045  skyInfo : `lsst.pipe.base.Struct`
1046  A struct containing information about the position of the input exposure within
1047  a `SkyMap`, the `SkyMap`, its `Wcs`, and its bounding box
1048  exposureId : `int` or `bytes`
1049  packed unique number or bytes unique to the input exposure
1050  ccdInputs : `lsst.afw.table.ExposureCatalog`
1051  Catalog containing information on the individual visits which went into making
1052  the exposure
1053  visitCatalogs : list of `lsst.afw.table.SourceCatalogs` or `None`
1054  A list of source catalogs corresponding to measurements made on the individual
1055  visits which went into the input exposure. If None and butler is `None` then
1056  the task cannot propagate visit flags to the output catalog.
1057  wcsUpdates : list of `lsst.afw.geom.SkyWcs` or `None`
1058  If visitCatalogs is not `None` this should be a list of wcs objects which correspond
1059  to the input visits. Used to put all coordinates to common system. If `None` and
1060  butler is `None` then the task cannot propagate visit flags to the output catalog.
1061  butler : `lsst.daf.butler.Butler` or `lsst.daf.persistence.Butler`
1062  Either a gen2 or gen3 butler used to load visit catalogs
1063 
1064  Returns
1065  -------
1066  results : `lsst.pipe.base.Struct`
1067  Results of running measurement task. Will contain the catalog in the
1068  sources attribute. Optionally will have results of matching to a
1069  reference catalog in the matchResults attribute, and denormalized
1070  matches in the denormMatches attribute.
1071  """
1072  self.measurement.run(sources, exposure, exposureId=exposureId)
1073 
1074  if self.config.doApCorr:
1075  self.applyApCorr.run(
1076  catalog=sources,
1077  apCorrMap=exposure.getInfo().getApCorrMap()
1078  )
1079 
1080  # TODO DM-11568: this contiguous check-and-copy could go away if we
1081  # reserve enough space during SourceDetection and/or SourceDeblend.
1082  # NOTE: sourceSelectors require contiguous catalogs, so ensure
1083  # contiguity now, so views are preserved from here on.
1084  if not sources.isContiguous():
1085  sources = sources.copy(deep=True)
1086 
1087  if self.config.doRunCatalogCalculation:
1088  self.catalogCalculation.run(sources)
1089 
1090  self.setPrimaryFlags.run(sources, skyMap=skyInfo.skyMap, tractInfo=skyInfo.tractInfo,
1091  patchInfo=skyInfo.patchInfo)
1092  if self.config.doPropagateFlags:
1093  self.propagateFlags.run(butler, sources, ccdInputs, exposure.getWcs(), visitCatalogs, wcsUpdates)
1094 
1095  results = Struct()
1096 
1097  if self.config.doMatchSources:
1098  matchResult = self.match.run(sources, exposure.getInfo().getFilterLabel().bandLabel)
1099  matches = afwTable.packMatches(matchResult.matches)
1100  matches.table.setMetadata(matchResult.matchMeta)
1101  results.matchResult = matches
1102  if self.config.doWriteMatchesDenormalized:
1103  if matchResult.matches:
1104  denormMatches = denormalizeMatches(matchResult.matches, matchResult.matchMeta)
1105  else:
1106  self.log.warning("No matches, so generating dummy denormalized matches file")
1107  denormMatches = afwTable.BaseCatalog(afwTable.Schema())
1108  denormMatches.setMetadata(PropertyList())
1109  denormMatches.getMetadata().add("COMMENT",
1110  "This catalog is empty because no matches were found.")
1111  results.denormMatches = denormMatches
1112  results.denormMatches = denormMatches
1113 
1114  results.outputSources = sources
1115  return results
1116 
1117  def readSources(self, dataRef):
1118  """!
1119  @brief Read input sources.
1120 
1121  @param[in] dataRef: Data reference for catalog of merged detections
1122  @return List of sources in merged catalog
1123 
1124  We also need to add columns to hold the measurements we're about to make
1125  so we can measure in-place.
1126  """
1127  merged = dataRef.get(self.config.coaddName + self.inputCatalog, immediate=True)
1128  self.log.info("Read %d detections: %s", len(merged), dataRef.dataId)
1129  idFactory = self.makeIdFactory(dataRef)
1130  for s in merged:
1131  idFactory.notify(s.getId())
1132  table = afwTable.SourceTable.make(self.schema, idFactory)
1133  sources = afwTable.SourceCatalog(table)
1134  sources.extend(merged, self.schemaMapper)
1135  return sources
1136 
1137  def writeMatches(self, dataRef, results):
1138  """!
1139  @brief Write matches of the sources to the astrometric reference catalog.
1140 
1141  @param[in] dataRef: data reference
1142  @param[in] results: results struct from run method
1143  """
1144  if hasattr(results, "matchResult"):
1145  dataRef.put(results.matchResult, self.config.coaddName + "Coadd_measMatch")
1146  if hasattr(results, "denormMatches"):
1147  dataRef.put(results.denormMatches, self.config.coaddName + "Coadd_measMatchFull")
1148 
1149  def write(self, dataRef, sources):
1150  """!
1151  @brief Write the source catalog.
1152 
1153  @param[in] dataRef: data reference
1154  @param[in] sources: source catalog
1155  """
1156  dataRef.put(sources, self.config.coaddName + "Coadd_meas")
1157  self.log.info("Wrote %d sources: %s", len(sources), dataRef.dataId)
1158 
1159  def getExposureId(self, dataRef):
1160  return int(dataRef.get(self.config.coaddName + "CoaddId"))
1161 
Defines the fields and offsets for a table.
Definition: Schema.h:51
A mapping between the keys of two Schemas, used to copy data between them.
Definition: SchemaMapper.h:21
Class for storing ordered metadata with comments.
Definition: PropertyList.h:68
daf::base::PropertyList * list
Definition: fits.cc:913
daf::base::PropertySet * set
Definition: fits.cc:912
Backwards-compatibility support for depersisting the old Calib (FluxMag0/FluxMag0Err) objects.
void write(OutputArchiveHandle &handle) const override
BaseCatalog packMatches(std::vector< Match< Record1, Record2 > > const &matches)
Return a table representation of a MatchVector that can be used to persist it.
Definition: Match.cc:432
def run(self, coaddExposures, bbox, wcs)
Definition: getTemplate.py:603
Fit spatial kernel using approximate fluxes for candidates, and solving a linear system of equations.
def denormalizeMatches(matches, matchMeta=None)
def getSkyInfo(coaddName, patchRef)
Return the SkyMap, tract and patch information, wcs, and outer bbox of the patch to be coadded.
Definition: coaddBase.py:271
def writeMetadata(self, dataRefList)
No metadata to write, and not sure how to write it for a list of dataRefs.