LSSTApplications  21.0.0+75b29a8a7f,21.0.0+e70536a077,21.0.0-1-ga51b5d4+62c747d40b,21.0.0-11-ga6ea59e8e+47cba9fc36,21.0.0-2-g103fe59+914993bf7c,21.0.0-2-g1367e85+e2614ded12,21.0.0-2-g45278ab+e70536a077,21.0.0-2-g4bc9b9f+7b2b5f8678,21.0.0-2-g5242d73+e2614ded12,21.0.0-2-g54e2caa+6403186824,21.0.0-2-g7f82c8f+3ac4acbffc,21.0.0-2-g8dde007+04a6aea1af,21.0.0-2-g8f08a60+9402881886,21.0.0-2-ga326454+3ac4acbffc,21.0.0-2-ga63a54e+81dd751046,21.0.0-2-gc738bc1+5f65c6e7a9,21.0.0-2-gde069b7+26c92b3210,21.0.0-2-gecfae73+0993ddc9bd,21.0.0-2-gfc62afb+e2614ded12,21.0.0-21-gba890a8+5a4f502a26,21.0.0-23-g9966ff26+03098d1af8,21.0.0-3-g357aad2+8ad216c477,21.0.0-3-g4be5c26+e2614ded12,21.0.0-3-g6d51c4a+4d2fe0280d,21.0.0-3-g7d9da8d+75b29a8a7f,21.0.0-3-gaa929c8+522e0f12c2,21.0.0-3-ge02ed75+4d2fe0280d,21.0.0-4-g3300ddd+e70536a077,21.0.0-4-gc004bbf+eac6615e82,21.0.0-4-gccdca77+f94adcd104,21.0.0-4-gd1c1571+18b81799f9,21.0.0-5-g7b47fff+4d2fe0280d,21.0.0-5-gb155db7+d2632f662b,21.0.0-5-gdf36809+637e4641ee,21.0.0-6-g722ad07+28c848f42a,21.0.0-7-g959bb79+522e0f12c2,21.0.0-7-gfd72ab2+cf01990774,21.0.0-9-g87fb7b8d+e2ab11cdd6,w.2021.04
LSSTDataManagementBasePackage
multiBand.py
Go to the documentation of this file.
1 #!/usr/bin/env python
2 #
3 # LSST Data Management System
4 # Copyright 2008-2015 AURA/LSST.
5 #
6 # This product includes software developed by the
7 # LSST Project (http://www.lsst.org/).
8 #
9 # This program is free software: you can redistribute it and/or modify
10 # it under the terms of the GNU General Public License as published by
11 # the Free Software Foundation, either version 3 of the License, or
12 # (at your option) any later version.
13 #
14 # This program is distributed in the hope that it will be useful,
15 # but WITHOUT ANY WARRANTY; without even the implied warranty of
16 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 # GNU General Public License for more details.
18 #
19 # You should have received a copy of the LSST License Statement and
20 # the GNU General Public License along with this program. If not,
21 # see <https://www.lsstcorp.org/LegalNotices/>.
22 #
23 from lsst.coadd.utils.coaddDataIdContainer import ExistingCoaddDataIdContainer
24 from lsst.pipe.base import (CmdLineTask, Struct, ArgumentParser, ButlerInitializedTaskRunner,
25  PipelineTask, PipelineTaskConfig, PipelineTaskConnections)
27 from lsst.pex.config import Config, Field, ConfigurableField
28 from lsst.meas.algorithms import DynamicDetectionTask, ReferenceObjectLoader
29 from lsst.meas.base import SingleFrameMeasurementTask, ApplyApCorrTask, CatalogCalculationTask
30 from lsst.meas.deblender import SourceDeblendTask
31 from lsst.meas.extensions.scarlet import ScarletDeblendTask
32 from lsst.pipe.tasks.coaddBase import getSkyInfo
33 from lsst.pipe.tasks.scaleVariance import ScaleVarianceTask
34 from lsst.meas.astrom import DirectMatchTask, denormalizeMatches
35 from lsst.pipe.tasks.fakes import BaseFakeSourcesTask
36 from lsst.pipe.tasks.setPrimaryFlags import SetPrimaryFlagsTask
37 from lsst.pipe.tasks.propagateVisitFlags import PropagateVisitFlagsTask
38 import lsst.afw.image as afwImage
39 import lsst.afw.table as afwTable
40 import lsst.afw.math as afwMath
41 from lsst.daf.base import PropertyList
42 from lsst.skymap import BaseSkyMap
43 
44 from .mergeDetections import MergeDetectionsConfig, MergeDetectionsTask # noqa: F401
45 from .mergeMeasurements import MergeMeasurementsConfig, MergeMeasurementsTask # noqa: F401
46 from .multiBandUtils import MergeSourcesRunner, CullPeaksConfig, _makeGetSchemaCatalogs # noqa: F401
47 from .multiBandUtils import getInputSchema, getShortFilterName, readCatalog, _makeMakeIdFactory # noqa: F401
48 from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesSingleConfig # noqa: F401
49 from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesSingleTask # noqa: F401
50 from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesMultiConfig # noqa: F401
51 from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesMultiTask # noqa: F401
52 
53 
54 """
55 New set types:
56 * deepCoadd_det: detections from what used to be processCoadd (tract, patch, filter)
57 * deepCoadd_mergeDet: merged detections (tract, patch)
58 * deepCoadd_meas: measurements of merged detections (tract, patch, filter)
59 * deepCoadd_ref: reference sources (tract, patch)
60 All of these have associated *_schema catalogs that require no data ID and hold no records.
61 
62 In addition, we have a schema-only dataset, which saves the schema for the PeakRecords in
63 the mergeDet, meas, and ref dataset Footprints:
64 * deepCoadd_peak_schema
65 """
66 
67 
68 
70  dimensions=("tract", "patch", "band", "skymap"),
71  defaultTemplates={"inputCoaddName": "deep", "outputCoaddName": "deep"}):
72  detectionSchema = cT.InitOutput(
73  doc="Schema of the detection catalog",
74  name="{outputCoaddName}Coadd_det_schema",
75  storageClass="SourceCatalog",
76  )
77  exposure = cT.Input(
78  doc="Exposure on which detections are to be performed",
79  name="{inputCoaddName}Coadd",
80  storageClass="ExposureF",
81  dimensions=("tract", "patch", "band", "skymap")
82  )
83  outputBackgrounds = cT.Output(
84  doc="Output Backgrounds used in detection",
85  name="{outputCoaddName}Coadd_calexp_background",
86  storageClass="Background",
87  dimensions=("tract", "patch", "band", "skymap")
88  )
89  outputSources = cT.Output(
90  doc="Detected sources catalog",
91  name="{outputCoaddName}Coadd_det",
92  storageClass="SourceCatalog",
93  dimensions=("tract", "patch", "band", "skymap")
94  )
95  outputExposure = cT.Output(
96  doc="Exposure post detection",
97  name="{outputCoaddName}Coadd_calexp",
98  storageClass="ExposureF",
99  dimensions=("tract", "patch", "band", "skymap")
100  )
101 
102 
103 class DetectCoaddSourcesConfig(PipelineTaskConfig, pipelineConnections=DetectCoaddSourcesConnections):
104  """!
105  @anchor DetectCoaddSourcesConfig_
106 
107  @brief Configuration parameters for the DetectCoaddSourcesTask
108  """
109  doScaleVariance = Field(dtype=bool, default=True, doc="Scale variance plane using empirical noise?")
110  scaleVariance = ConfigurableField(target=ScaleVarianceTask, doc="Variance rescaling")
111  detection = ConfigurableField(target=DynamicDetectionTask, doc="Source detection")
112  coaddName = Field(dtype=str, default="deep", doc="Name of coadd")
113  doInsertFakes = Field(dtype=bool, default=False,
114  doc="Run fake sources injection task")
115  insertFakes = ConfigurableField(target=BaseFakeSourcesTask,
116  doc="Injection of fake sources for testing "
117  "purposes (must be retargeted)")
118  hasFakes = Field(
119  dtype=bool,
120  default=False,
121  doc="Should be set to True if fake sources have been inserted into the input data."
122  )
123 
124  def setDefaults(self):
125  super().setDefaults()
126  self.detection.thresholdType = "pixel_stdev"
127  self.detection.isotropicGrow = True
128  # Coadds are made from background-subtracted CCDs, so any background subtraction should be very basic
129  self.detection.reEstimateBackground = False
130  self.detection.background.useApprox = False
131  self.detection.background.binSize = 4096
132  self.detection.background.undersampleStyle = 'REDUCE_INTERP_ORDER'
133  self.detection.doTempWideBackground = True # Suppress large footprints that overwhelm the deblender
134 
135 
141 
142 
143 class DetectCoaddSourcesTask(PipelineTask, CmdLineTask):
144  r"""!
145  @anchor DetectCoaddSourcesTask_
146 
147  @brief Detect sources on a coadd
148 
149  @section pipe_tasks_multiBand_Contents Contents
150 
151  - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Purpose
152  - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Initialize
153  - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Run
154  - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Config
155  - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Debug
156  - @ref pipe_tasks_multiband_DetectCoaddSourcesTask_Example
157 
158  @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Purpose Description
159 
160  Command-line task that detects sources on a coadd of exposures obtained with a single filter.
161 
162  Coadding individual visits requires each exposure to be warped. This introduces covariance in the noise
163  properties across pixels. Before detection, we correct the coadd variance by scaling the variance plane
164  in the coadd to match the observed variance. This is an approximate approach -- strictly, we should
165  propagate the full covariance matrix -- but it is simple and works well in practice.
166 
167  After scaling the variance plane, we detect sources and generate footprints by delegating to the @ref
168  SourceDetectionTask_ "detection" subtask.
169 
170  @par Inputs:
171  deepCoadd{tract,patch,filter}: ExposureF
172  @par Outputs:
173  deepCoadd_det{tract,patch,filter}: SourceCatalog (only parent Footprints)
174  @n deepCoadd_calexp{tract,patch,filter}: Variance scaled, background-subtracted input
175  exposure (ExposureF)
176  @n deepCoadd_calexp_background{tract,patch,filter}: BackgroundList
177  @par Data Unit:
178  tract, patch, filter
179 
180  DetectCoaddSourcesTask delegates most of its work to the @ref SourceDetectionTask_ "detection" subtask.
181  You can retarget this subtask if you wish.
182 
183  @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Initialize Task initialization
184 
185  @copydoc \_\_init\_\_
186 
187  @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Run Invoking the Task
188 
189  @copydoc run
190 
191  @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Config Configuration parameters
192 
193  See @ref DetectCoaddSourcesConfig_ "DetectSourcesConfig"
194 
195  @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Debug Debug variables
196 
197  The @link lsst.pipe.base.cmdLineTask.CmdLineTask command line task@endlink interface supports a
198  flag @c -d to import @b debug.py from your @c PYTHONPATH; see @ref baseDebug for more about @b debug.py
199  files.
200 
201  DetectCoaddSourcesTask has no debug variables of its own because it relegates all the work to
202  @ref SourceDetectionTask_ "SourceDetectionTask"; see the documetation for
203  @ref SourceDetectionTask_ "SourceDetectionTask" for further information.
204 
205  @section pipe_tasks_multiband_DetectCoaddSourcesTask_Example A complete example
206  of using DetectCoaddSourcesTask
207 
208  DetectCoaddSourcesTask is meant to be run after assembling a coadded image in a given band. The purpose of
209  the task is to update the background, detect all sources in a single band and generate a set of parent
210  footprints. Subsequent tasks in the multi-band processing procedure will merge sources across bands and,
211  eventually, perform forced photometry. Command-line usage of DetectCoaddSourcesTask expects a data
212  reference to the coadd to be processed. A list of the available optional arguments can be obtained by
213  calling detectCoaddSources.py with the `--help` command line argument:
214  @code
215  detectCoaddSources.py --help
216  @endcode
217 
218  To demonstrate usage of the DetectCoaddSourcesTask in the larger context of multi-band processing, we
219  will process HSC data in the [ci_hsc](https://github.com/lsst/ci_hsc) package. Assuming one has followed
220  steps 1 - 4 at @ref pipeTasks_multiBand, one may detect all the sources in each coadd as follows:
221  @code
222  detectCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-I
223  @endcode
224  that will process the HSC-I band data. The results are written to
225  `$CI_HSC_DIR/DATA/deepCoadd-results/HSC-I`.
226 
227  It is also necessary to run:
228  @code
229  detectCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-R
230  @endcode
231  to generate the sources catalogs for the HSC-R band required by the next step in the multi-band
232  processing procedure: @ref MergeDetectionsTask_ "MergeDetectionsTask".
233  """
234  _DefaultName = "detectCoaddSources"
235  ConfigClass = DetectCoaddSourcesConfig
236  getSchemaCatalogs = _makeGetSchemaCatalogs("det")
237  makeIdFactory = _makeMakeIdFactory("CoaddId")
238 
239  @classmethod
240  def _makeArgumentParser(cls):
241  parser = ArgumentParser(name=cls._DefaultName)
242  parser.add_id_argument("--id", "deepCoadd", help="data ID, e.g. --id tract=12345 patch=1,2 filter=r",
243  ContainerClass=ExistingCoaddDataIdContainer)
244  return parser
245 
246  def __init__(self, schema=None, **kwargs):
247  """!
248  @brief Initialize the task. Create the @ref SourceDetectionTask_ "detection" subtask.
249 
250  Keyword arguments (in addition to those forwarded to CmdLineTask.__init__):
251 
252  @param[in] schema: initial schema for the output catalog, modified-in place to include all
253  fields set by this task. If None, the source minimal schema will be used.
254  @param[in] **kwargs: keyword arguments to be passed to lsst.pipe.base.task.Task.__init__
255  """
256  # N.B. Super is used here to handle the multiple inheritance of PipelineTasks, the init tree
257  # call structure has been reviewed carefully to be sure super will work as intended.
258  super().__init__(**kwargs)
259  if schema is None:
260  schema = afwTable.SourceTable.makeMinimalSchema()
261  if self.config.doInsertFakes:
262  self.makeSubtask("insertFakes")
263  self.schema = schema
264  self.makeSubtask("detection", schema=self.schema)
265  if self.config.doScaleVariance:
266  self.makeSubtask("scaleVariance")
267 
268  self.detectionSchema = afwTable.SourceCatalog(self.schema)
269 
270  def runDataRef(self, patchRef):
271  """!
272  @brief Run detection on a coadd.
273 
274  Invokes @ref run and then uses @ref write to output the
275  results.
276 
277  @param[in] patchRef: data reference for patch
278  """
279  if self.config.hasFakes:
280  exposure = patchRef.get("fakes_" + self.config.coaddName + "Coadd", immediate=True)
281  else:
282  exposure = patchRef.get(self.config.coaddName + "Coadd", immediate=True)
283  expId = int(patchRef.get(self.config.coaddName + "CoaddId"))
284  results = self.run(exposure, self.makeIdFactory(patchRef), expId=expId)
285  self.write(results, patchRef)
286  return results
287 
288  def runQuantum(self, butlerQC, inputRefs, outputRefs):
289  inputs = butlerQC.get(inputRefs)
290  packedId, maxBits = butlerQC.quantum.dataId.pack("tract_patch_band", returnMaxBits=True)
291  inputs["idFactory"] = afwTable.IdFactory.makeSource(packedId, 64 - maxBits)
292  inputs["expId"] = packedId
293  outputs = self.run(**inputs)
294  butlerQC.put(outputs, outputRefs)
295 
296  def run(self, exposure, idFactory, expId):
297  """!
298  @brief Run detection on an exposure.
299 
300  First scale the variance plane to match the observed variance
301  using @ref ScaleVarianceTask. Then invoke the @ref SourceDetectionTask_ "detection" subtask to
302  detect sources.
303 
304  @param[in,out] exposure: Exposure on which to detect (may be backround-subtracted and scaled,
305  depending on configuration).
306  @param[in] idFactory: IdFactory to set source identifiers
307  @param[in] expId: Exposure identifier (integer) for RNG seed
308 
309  @return a pipe.base.Struct with fields
310  - sources: catalog of detections
311  - backgrounds: list of backgrounds
312  """
313  if self.config.doScaleVariance:
314  varScale = self.scaleVariance.run(exposure.maskedImage)
315  exposure.getMetadata().add("VARIANCE_SCALE", varScale)
316  backgrounds = afwMath.BackgroundList()
317  if self.config.doInsertFakes:
318  self.insertFakes.run(exposure, background=backgrounds)
319  table = afwTable.SourceTable.make(self.schema, idFactory)
320  detections = self.detection.run(table, exposure, expId=expId)
321  sources = detections.sources
322  fpSets = detections.fpSets
323  if hasattr(fpSets, "background") and fpSets.background:
324  for bg in fpSets.background:
325  backgrounds.append(bg)
326  return Struct(outputSources=sources, outputBackgrounds=backgrounds, outputExposure=exposure)
327 
328  def write(self, results, patchRef):
329  """!
330  @brief Write out results from runDetection.
331 
332  @param[in] exposure: Exposure to write out
333  @param[in] results: Struct returned from runDetection
334  @param[in] patchRef: data reference for patch
335  """
336  coaddName = self.config.coaddName + "Coadd"
337  patchRef.put(results.outputBackgrounds, coaddName + "_calexp_background")
338  patchRef.put(results.outputSources, coaddName + "_det")
339  if self.config.hasFakes:
340  patchRef.put(results.outputExposure, "fakes_" + coaddName + "_calexp")
341  else:
342  patchRef.put(results.outputExposure, coaddName + "_calexp")
343 
344 
345 
346 
347 class DeblendCoaddSourcesConfig(Config):
348  """DeblendCoaddSourcesConfig
349 
350  Configuration parameters for the `DeblendCoaddSourcesTask`.
351  """
352  singleBandDeblend = ConfigurableField(target=SourceDeblendTask,
353  doc="Deblend sources separately in each band")
354  multiBandDeblend = ConfigurableField(target=ScarletDeblendTask,
355  doc="Deblend sources simultaneously across bands")
356  simultaneous = Field(dtype=bool, default=False, doc="Simultaneously deblend all bands?")
357  coaddName = Field(dtype=str, default="deep", doc="Name of coadd")
358  hasFakes = Field(dtype=bool,
359  default=False,
360  doc="Should be set to True if fake sources have been inserted into the input data.")
361 
362  def setDefaults(self):
363  Config.setDefaults(self)
364  self.singleBandDeblend.propagateAllPeaks = True
365 
366 
367 class DeblendCoaddSourcesRunner(MergeSourcesRunner):
368  """Task runner for the `MergeSourcesTask`
369 
370  Required because the run method requires a list of
371  dataRefs rather than a single dataRef.
372  """
373  @staticmethod
374  def getTargetList(parsedCmd, **kwargs):
375  """Provide a list of patch references for each patch, tract, filter combo.
376 
377  Parameters
378  ----------
379  parsedCmd:
380  The parsed command
381  kwargs:
382  Keyword arguments passed to the task
383 
384  Returns
385  -------
386  targetList: list
387  List of tuples, where each tuple is a (dataRef, kwargs) pair.
388  """
389  refDict = MergeSourcesRunner.buildRefDict(parsedCmd)
390  kwargs["psfCache"] = parsedCmd.psfCache
391  return [(list(p.values()), kwargs) for t in refDict.values() for p in t.values()]
392 
393 
394 class DeblendCoaddSourcesTask(CmdLineTask):
395  """Deblend the sources in a merged catalog
396 
397  Deblend sources from master catalog in each coadd.
398  This can either be done separately in each band using the HSC-SDSS deblender
399  (`DeblendCoaddSourcesTask.config.simultaneous==False`)
400  or use SCARLET to simultaneously fit the blend in all bands
401  (`DeblendCoaddSourcesTask.config.simultaneous==True`).
402  The task will set its own `self.schema` atribute to the `Schema` of the
403  output deblended catalog.
404  This will include all fields from the input `Schema`, as well as additional fields
405  from the deblender.
406 
407  `pipe.tasks.multiband.DeblendCoaddSourcesTask Description
408  ---------------------------------------------------------
409  `
410 
411  Parameters
412  ----------
413  butler: `Butler`
414  Butler used to read the input schemas from disk or
415  construct the reference catalog loader, if `schema` or `peakSchema` or
416  schema: `Schema`
417  The schema of the merged detection catalog as an input to this task.
418  peakSchema: `Schema`
419  The schema of the `PeakRecord`s in the `Footprint`s in the merged detection catalog
420  """
421  ConfigClass = DeblendCoaddSourcesConfig
422  RunnerClass = DeblendCoaddSourcesRunner
423  _DefaultName = "deblendCoaddSources"
424  makeIdFactory = _makeMakeIdFactory("MergedCoaddId")
425 
426  @classmethod
427  def _makeArgumentParser(cls):
428  parser = ArgumentParser(name=cls._DefaultName)
429  parser.add_id_argument("--id", "deepCoadd_calexp",
430  help="data ID, e.g. --id tract=12345 patch=1,2 filter=g^r^i",
431  ContainerClass=ExistingCoaddDataIdContainer)
432  parser.add_argument("--psfCache", type=int, default=100, help="Size of CoaddPsf cache")
433  return parser
434 
435  def __init__(self, butler=None, schema=None, peakSchema=None, **kwargs):
436  CmdLineTask.__init__(self, **kwargs)
437  if schema is None:
438  assert butler is not None, "Neither butler nor schema is defined"
439  schema = butler.get(self.config.coaddName + "Coadd_mergeDet_schema", immediate=True).schema
440  self.schemaMapper = afwTable.SchemaMapper(schema)
441  self.schemaMapper.addMinimalSchema(schema)
442  self.schema = self.schemaMapper.getOutputSchema()
443  if peakSchema is None:
444  assert butler is not None, "Neither butler nor peakSchema is defined"
445  peakSchema = butler.get(self.config.coaddName + "Coadd_peak_schema", immediate=True).schema
446 
447  if self.config.simultaneous:
448  self.makeSubtask("multiBandDeblend", schema=self.schema, peakSchema=peakSchema)
449  else:
450  self.makeSubtask("singleBandDeblend", schema=self.schema, peakSchema=peakSchema)
451 
452  def getSchemaCatalogs(self):
453  """Return a dict of empty catalogs for each catalog dataset produced by this task.
454 
455  Returns
456  -------
457  result: dict
458  Dictionary of empty catalogs, with catalog names as keys.
459  """
460  catalog = afwTable.SourceCatalog(self.schema)
461  return {self.config.coaddName + "Coadd_deblendedFlux": catalog,
462  self.config.coaddName + "Coadd_deblendedModel": catalog}
463 
464  def runDataRef(self, patchRefList, psfCache=100):
465  """Deblend the patch
466 
467  Deblend each source simultaneously or separately
468  (depending on `DeblendCoaddSourcesTask.config.simultaneous`).
469  Set `is-primary` and related flags.
470  Propagate flags from individual visits.
471  Write the deblended sources out.
472 
473  Parameters
474  ----------
475  patchRefList: list
476  List of data references for each filter
477  """
478 
479  if self.config.hasFakes:
480  coaddType = "fakes_" + self.config.coaddName
481  else:
482  coaddType = self.config.coaddName
483 
484  if self.config.simultaneous:
485  # Use SCARLET to simultaneously deblend across filters
486  filters = []
487  exposures = []
488  for patchRef in patchRefList:
489  exposure = patchRef.get(coaddType + "Coadd_calexp", immediate=True)
490  filters.append(patchRef.dataId["filter"])
491  exposures.append(exposure)
492  # The input sources are the same for all bands, since it is a merged catalog
493  sources = self.readSources(patchRef)
494  exposure = afwImage.MultibandExposure.fromExposures(filters, exposures)
495  templateCatalogs = self.multiBandDeblend.run(exposure, sources)
496  for n in range(len(patchRefList)):
497  self.write(patchRefList[n], templateCatalogs[filters[n]])
498  else:
499  # Use the singeband deblender to deblend each band separately
500  for patchRef in patchRefList:
501  exposure = patchRef.get(coaddType + "Coadd_calexp", immediate=True)
502  exposure.getPsf().setCacheCapacity(psfCache)
503  sources = self.readSources(patchRef)
504  self.singleBandDeblend.run(exposure, sources)
505  self.write(patchRef, sources)
506 
507  def readSources(self, dataRef):
508  """Read merged catalog
509 
510  Read the catalog of merged detections and create a catalog
511  in a single band.
512 
513  Parameters
514  ----------
515  dataRef: data reference
516  Data reference for catalog of merged detections
517 
518  Returns
519  -------
520  sources: `SourceCatalog`
521  List of sources in merged catalog
522 
523  We also need to add columns to hold the measurements we're about to make
524  so we can measure in-place.
525  """
526  merged = dataRef.get(self.config.coaddName + "Coadd_mergeDet", immediate=True)
527  self.log.info("Read %d detections: %s" % (len(merged), dataRef.dataId))
528  idFactory = self.makeIdFactory(dataRef)
529  for s in merged:
530  idFactory.notify(s.getId())
531  table = afwTable.SourceTable.make(self.schema, idFactory)
532  sources = afwTable.SourceCatalog(table)
533  sources.extend(merged, self.schemaMapper)
534  return sources
535 
536  def write(self, dataRef, sources):
537  """Write the source catalog(s)
538 
539  Parameters
540  ----------
541  dataRef: Data Reference
542  Reference to the output catalog.
543  sources: `SourceCatalog`
544  Flux conserved sources to write to file.
545  If using the single band deblender, this is the catalog
546  generated.
547  template_sources: `SourceCatalog`
548  Source catalog using the multiband template models
549  as footprints.
550  """
551  dataRef.put(sources, self.config.coaddName + "Coadd_deblendedFlux")
552  self.log.info("Wrote %d sources: %s" % (len(sources), dataRef.dataId))
553 
554  def writeMetadata(self, dataRefList):
555  """Write the metadata produced from processing the data.
556  Parameters
557  ----------
558  dataRefList
559  List of Butler data references used to write the metadata.
560  The metadata is written to dataset type `CmdLineTask._getMetadataName`.
561  """
562  for dataRef in dataRefList:
563  try:
564  metadataName = self._getMetadataName()
565  if metadataName is not None:
566  dataRef.put(self.getFullMetadata(), metadataName)
567  except Exception as e:
568  self.log.warn("Could not persist metadata for dataId=%s: %s", dataRef.dataId, e)
569 
570  def getExposureId(self, dataRef):
571  """Get the ExposureId from a data reference
572  """
573  return int(dataRef.get(self.config.coaddName + "CoaddId"))
574 
575 
576 class MeasureMergedCoaddSourcesConnections(PipelineTaskConnections, dimensions=("tract", "patch", "band", "skymap"),
577  defaultTemplates={"inputCoaddName": "deep",
578  "outputCoaddName": "deep"}):
579  inputSchema = cT.InitInput(
580  doc="Input schema for measure merged task produced by a deblender or detection task",
581  name="{inputCoaddName}Coadd_deblendedFlux_schema",
582  storageClass="SourceCatalog"
583  )
584  outputSchema = cT.InitOutput(
585  doc="Output schema after all new fields are added by task",
586  name="{inputCoaddName}Coadd_meas_schema",
587  storageClass="SourceCatalog"
588  )
589  refCat = cT.PrerequisiteInput(
590  doc="Reference catalog used to match measured sources against known sources",
591  name="ref_cat",
592  storageClass="SimpleCatalog",
593  dimensions=("skypix",),
594  deferLoad=True,
595  multiple=True
596  )
597  exposure = cT.Input(
598  doc="Input coadd image",
599  name="{inputCoaddName}Coadd_calexp",
600  storageClass="ExposureF",
601  dimensions=("tract", "patch", "band", "skymap")
602  )
603  skyMap = cT.Input(
604  doc="SkyMap to use in processing",
605  name=BaseSkyMap.SKYMAP_DATASET_TYPE_NAME,
606  storageClass="SkyMap",
607  dimensions=("skymap",),
608  )
609  visitCatalogs = cT.Input(
610  doc="Source catalogs for visits which overlap input tract, patch, band. Will be "
611  "further filtered in the task for the purpose of propagating flags from image calibration "
612  "and characterization to codd objects",
613  name="src",
614  dimensions=("instrument", "visit", "detector"),
615  storageClass="SourceCatalog",
616  multiple=True
617  )
618  inputCatalog = cT.Input(
619  doc=("Name of the input catalog to use."
620  "If the single band deblender was used this should be 'deblendedFlux."
621  "If the multi-band deblender was used this should be 'deblendedModel, "
622  "or deblendedFlux if the multiband deblender was configured to output "
623  "deblended flux catalogs. If no deblending was performed this should "
624  "be 'mergeDet'"),
625  name="{inputCoaddName}Coadd_deblendedFlux",
626  storageClass="SourceCatalog",
627  dimensions=("tract", "patch", "band", "skymap"),
628  )
629  outputSources = cT.Output(
630  doc="Source catalog containing all the measurement information generated in this task",
631  name="{outputCoaddName}Coadd_meas",
632  dimensions=("tract", "patch", "band", "skymap"),
633  storageClass="SourceCatalog",
634  )
635  matchResult = cT.Output(
636  doc="Match catalog produced by configured matcher, optional on doMatchSources",
637  name="{outputCoaddName}Coadd_measMatch",
638  dimensions=("tract", "patch", "band", "skymap"),
639  storageClass="Catalog",
640  )
641  denormMatches = cT.Output(
642  doc="Denormalized Match catalog produced by configured matcher, optional on "
643  "doWriteMatchesDenormalized",
644  name="{outputCoaddName}Coadd_measMatchFull",
645  dimensions=("tract", "patch", "band", "skymap"),
646  storageClass="Catalog",
647  )
648 
649  def __init__(self, *, config=None):
650  super().__init__(config=config)
651  if config.doPropagateFlags is False:
652  self.inputs -= set(("visitCatalogs",))
653 
654  if config.doMatchSources is False:
655  self.outputs -= set(("matchResult",))
656 
657  if config.doWriteMatchesDenormalized is False:
658  self.outputs -= set(("denormMatches",))
659 
660 
661 class MeasureMergedCoaddSourcesConfig(PipelineTaskConfig,
662  pipelineConnections=MeasureMergedCoaddSourcesConnections):
663  """!
664  @anchor MeasureMergedCoaddSourcesConfig_
665 
666  @brief Configuration parameters for the MeasureMergedCoaddSourcesTask
667  """
668  inputCatalog = Field(dtype=str, default="deblendedFlux",
669  doc=("Name of the input catalog to use."
670  "If the single band deblender was used this should be 'deblendedFlux."
671  "If the multi-band deblender was used this should be 'deblendedModel."
672  "If no deblending was performed this should be 'mergeDet'"))
673  measurement = ConfigurableField(target=SingleFrameMeasurementTask, doc="Source measurement")
674  setPrimaryFlags = ConfigurableField(target=SetPrimaryFlagsTask, doc="Set flags for primary tract/patch")
675  doPropagateFlags = Field(
676  dtype=bool, default=True,
677  doc="Whether to match sources to CCD catalogs to propagate flags (to e.g. identify PSF stars)"
678  )
679  propagateFlags = ConfigurableField(target=PropagateVisitFlagsTask, doc="Propagate visit flags to coadd")
680  doMatchSources = Field(dtype=bool, default=True, doc="Match sources to reference catalog?")
681  match = ConfigurableField(target=DirectMatchTask, doc="Matching to reference catalog")
682  doWriteMatchesDenormalized = Field(
683  dtype=bool,
684  default=False,
685  doc=("Write reference matches in denormalized format? "
686  "This format uses more disk space, but is more convenient to read."),
687  )
688  coaddName = Field(dtype=str, default="deep", doc="Name of coadd")
689  psfCache = Field(dtype=int, default=100, doc="Size of psfCache")
690  checkUnitsParseStrict = Field(
691  doc="Strictness of Astropy unit compatibility check, can be 'raise', 'warn' or 'silent'",
692  dtype=str,
693  default="raise",
694  )
695  doApCorr = Field(
696  dtype=bool,
697  default=True,
698  doc="Apply aperture corrections"
699  )
700  applyApCorr = ConfigurableField(
701  target=ApplyApCorrTask,
702  doc="Subtask to apply aperture corrections"
703  )
704  doRunCatalogCalculation = Field(
705  dtype=bool,
706  default=True,
707  doc='Run catalogCalculation task'
708  )
709  catalogCalculation = ConfigurableField(
710  target=CatalogCalculationTask,
711  doc="Subtask to run catalogCalculation plugins on catalog"
712  )
713 
714  hasFakes = Field(
715  dtype=bool,
716  default=False,
717  doc="Should be set to True if fake sources have been inserted into the input data."
718  )
719 
720  @property
721  def refObjLoader(self):
722  return self.match.refObjLoader
723 
724  def setDefaults(self):
725  super().setDefaults()
726  self.measurement.plugins.names |= ['base_InputCount',
727  'base_Variance',
728  'base_LocalPhotoCalib',
729  'base_LocalWcs']
730  self.measurement.plugins['base_PixelFlags'].masksFpAnywhere = ['CLIPPED', 'SENSOR_EDGE',
731  'INEXACT_PSF']
732  self.measurement.plugins['base_PixelFlags'].masksFpCenter = ['CLIPPED', 'SENSOR_EDGE',
733  'INEXACT_PSF']
734 
735  def validate(self):
736  super().validate()
737  refCatGen2 = getattr(self.refObjLoader, "ref_dataset_name", None)
738  if refCatGen2 is not None and refCatGen2 != self.connections.refCat:
739  raise ValueError(
740  f"Gen2 ({refCatGen2}) and Gen3 ({self.connections.refCat}) reference catalogs "
741  f"are different. These options must be kept in sync until Gen2 is retired."
742  )
743 
744 
745 
751 
752 
753 class MeasureMergedCoaddSourcesRunner(ButlerInitializedTaskRunner):
754  """Get the psfCache setting into MeasureMergedCoaddSourcesTask"""
755  @staticmethod
756  def getTargetList(parsedCmd, **kwargs):
757  return ButlerInitializedTaskRunner.getTargetList(parsedCmd, psfCache=parsedCmd.psfCache)
758 
759 
760 class MeasureMergedCoaddSourcesTask(PipelineTask, CmdLineTask):
761  r"""!
762  @anchor MeasureMergedCoaddSourcesTask_
763 
764  @brief Deblend sources from master catalog in each coadd seperately and measure.
765 
766  @section pipe_tasks_multiBand_Contents Contents
767 
768  - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Purpose
769  - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Initialize
770  - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Run
771  - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Config
772  - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Debug
773  - @ref pipe_tasks_multiband_MeasureMergedCoaddSourcesTask_Example
774 
775  @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Purpose Description
776 
777  Command-line task that uses peaks and footprints from a master catalog to perform deblending and
778  measurement in each coadd.
779 
780  Given a master input catalog of sources (peaks and footprints) or deblender outputs
781  (including a HeavyFootprint in each band), measure each source on the
782  coadd. Repeating this procedure with the same master catalog across multiple coadds will generate a
783  consistent set of child sources.
784 
785  The deblender retains all peaks and deblends any missing peaks (dropouts in that band) as PSFs. Source
786  properties are measured and the @c is-primary flag (indicating sources with no children) is set. Visit
787  flags are propagated to the coadd sources.
788 
789  Optionally, we can match the coadd sources to an external reference catalog.
790 
791  @par Inputs:
792  deepCoadd_mergeDet{tract,patch} or deepCoadd_deblend{tract,patch}: SourceCatalog
793  @n deepCoadd_calexp{tract,patch,filter}: ExposureF
794  @par Outputs:
795  deepCoadd_meas{tract,patch,filter}: SourceCatalog
796  @par Data Unit:
797  tract, patch, filter
798 
799  MeasureMergedCoaddSourcesTask delegates most of its work to a set of sub-tasks:
800 
801  <DL>
802  <DT> @ref SingleFrameMeasurementTask_ "measurement"
803  <DD> Measure source properties of deblended sources.</DD>
804  <DT> @ref SetPrimaryFlagsTask_ "setPrimaryFlags"
805  <DD> Set flag 'is-primary' as well as related flags on sources. 'is-primary' is set for sources that are
806  not at the edge of the field and that have either not been deblended or are the children of deblended
807  sources</DD>
808  <DT> @ref PropagateVisitFlagsTask_ "propagateFlags"
809  <DD> Propagate flags set in individual visits to the coadd.</DD>
810  <DT> @ref DirectMatchTask_ "match"
811  <DD> Match input sources to a reference catalog (optional).
812  </DD>
813  </DL>
814  These subtasks may be retargeted as required.
815 
816  @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Initialize Task initialization
817 
818  @copydoc \_\_init\_\_
819 
820  @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Run Invoking the Task
821 
822  @copydoc run
823 
824  @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Config Configuration parameters
825 
826  See @ref MeasureMergedCoaddSourcesConfig_
827 
828  @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Debug Debug variables
829 
830  The @link lsst.pipe.base.cmdLineTask.CmdLineTask command line task@endlink interface supports a
831  flag @c -d to import @b debug.py from your @c PYTHONPATH; see @ref baseDebug for more about @b debug.py
832  files.
833 
834  MeasureMergedCoaddSourcesTask has no debug variables of its own because it delegates all the work to
835  the various sub-tasks. See the documetation for individual sub-tasks for more information.
836 
837  @section pipe_tasks_multiband_MeasureMergedCoaddSourcesTask_Example A complete example of using
838  MeasureMergedCoaddSourcesTask
839 
840  After MeasureMergedCoaddSourcesTask has been run on multiple coadds, we have a set of per-band catalogs.
841  The next stage in the multi-band processing procedure will merge these measurements into a suitable
842  catalog for driving forced photometry.
843 
844  Command-line usage of MeasureMergedCoaddSourcesTask expects a data reference to the coadds
845  to be processed.
846  A list of the available optional arguments can be obtained by calling measureCoaddSources.py with the
847  `--help` command line argument:
848  @code
849  measureCoaddSources.py --help
850  @endcode
851 
852  To demonstrate usage of the DetectCoaddSourcesTask in the larger context of multi-band processing, we
853  will process HSC data in the [ci_hsc](https://github.com/lsst/ci_hsc) package. Assuming one has finished
854  step 6 at @ref pipeTasks_multiBand, one may perform deblending and measure sources in the HSC-I band
855  coadd as follows:
856  @code
857  measureCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-I
858  @endcode
859  This will process the HSC-I band data. The results are written in
860  `$CI_HSC_DIR/DATA/deepCoadd-results/HSC-I/0/5,4/meas-HSC-I-0-5,4.fits
861 
862  It is also necessary to run
863  @code
864  measureCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-R
865  @endcode
866  to generate the sources catalogs for the HSC-R band required by the next step in the multi-band
867  procedure: @ref MergeMeasurementsTask_ "MergeMeasurementsTask".
868  """
869  _DefaultName = "measureCoaddSources"
870  ConfigClass = MeasureMergedCoaddSourcesConfig
871  RunnerClass = MeasureMergedCoaddSourcesRunner
872  getSchemaCatalogs = _makeGetSchemaCatalogs("meas")
873  makeIdFactory = _makeMakeIdFactory("MergedCoaddId") # The IDs we already have are of this type
874 
875  @classmethod
876  def _makeArgumentParser(cls):
877  parser = ArgumentParser(name=cls._DefaultName)
878  parser.add_id_argument("--id", "deepCoadd_calexp",
879  help="data ID, e.g. --id tract=12345 patch=1,2 filter=r",
880  ContainerClass=ExistingCoaddDataIdContainer)
881  parser.add_argument("--psfCache", type=int, default=100, help="Size of CoaddPsf cache")
882  return parser
883 
884  def __init__(self, butler=None, schema=None, peakSchema=None, refObjLoader=None, initInputs=None,
885  **kwargs):
886  """!
887  @brief Initialize the task.
888 
889  Keyword arguments (in addition to those forwarded to CmdLineTask.__init__):
890  @param[in] schema: the schema of the merged detection catalog used as input to this one
891  @param[in] peakSchema: the schema of the PeakRecords in the Footprints in the merged detection catalog
892  @param[in] refObjLoader: an instance of LoadReferenceObjectsTasks that supplies an external reference
893  catalog. May be None if the loader can be constructed from the butler argument or all steps
894  requiring a reference catalog are disabled.
895  @param[in] butler: a butler used to read the input schemas from disk or construct the reference
896  catalog loader, if schema or peakSchema or refObjLoader is None
897 
898  The task will set its own self.schema attribute to the schema of the output measurement catalog.
899  This will include all fields from the input schema, as well as additional fields for all the
900  measurements.
901  """
902  super().__init__(**kwargs)
903  self.deblended = self.config.inputCatalog.startswith("deblended")
904  self.inputCatalog = "Coadd_" + self.config.inputCatalog
905  if initInputs is not None:
906  schema = initInputs['inputSchema'].schema
907  if schema is None:
908  assert butler is not None, "Neither butler nor schema is defined"
909  schema = butler.get(self.config.coaddName + self.inputCatalog + "_schema", immediate=True).schema
910  self.schemaMapper = afwTable.SchemaMapper(schema)
911  self.schemaMapper.addMinimalSchema(schema)
912  self.schema = self.schemaMapper.getOutputSchema()
913  self.algMetadata = PropertyList()
914  self.makeSubtask("measurement", schema=self.schema, algMetadata=self.algMetadata)
915  self.makeSubtask("setPrimaryFlags", schema=self.schema)
916  if self.config.doMatchSources:
917  self.makeSubtask("match", butler=butler, refObjLoader=refObjLoader)
918  if self.config.doPropagateFlags:
919  self.makeSubtask("propagateFlags", schema=self.schema)
920  self.schema.checkUnits(parse_strict=self.config.checkUnitsParseStrict)
921  if self.config.doApCorr:
922  self.makeSubtask("applyApCorr", schema=self.schema)
923  if self.config.doRunCatalogCalculation:
924  self.makeSubtask("catalogCalculation", schema=self.schema)
925 
926  self.outputSchema = afwTable.SourceCatalog(self.schema)
927 
928  def runQuantum(self, butlerQC, inputRefs, outputRefs):
929  inputs = butlerQC.get(inputRefs)
930 
931  refObjLoader = ReferenceObjectLoader([ref.datasetRef.dataId for ref in inputRefs.refCat],
932  inputs.pop('refCat'), config=self.config.refObjLoader,
933  log=self.log)
934  self.match.setRefObjLoader(refObjLoader)
935 
936  # Set psfcache
937  # move this to run after gen2 deprecation
938  inputs['exposure'].getPsf().setCacheCapacity(self.config.psfCache)
939 
940  # Get unique integer ID for IdFactory and RNG seeds
941  packedId, maxBits = butlerQC.quantum.dataId.pack("tract_patch", returnMaxBits=True)
942  inputs['exposureId'] = packedId
943  idFactory = afwTable.IdFactory.makeSource(packedId, 64 - maxBits)
944  # Transform inputCatalog
945  table = afwTable.SourceTable.make(self.schema, idFactory)
946  sources = afwTable.SourceCatalog(table)
947  sources.extend(inputs.pop('inputCatalog'), self.schemaMapper)
948  table = sources.getTable()
949  table.setMetadata(self.algMetadata) # Capture algorithm metadata to write out to the source catalog.
950  inputs['sources'] = sources
951 
952  skyMap = inputs.pop('skyMap')
953  tractNumber = inputRefs.inputCatalog.dataId['tract']
954  tractInfo = skyMap[tractNumber]
955  patchInfo = tractInfo.getPatchInfo(inputRefs.inputCatalog.dataId['patch'])
956  skyInfo = Struct(
957  skyMap=skyMap,
958  tractInfo=tractInfo,
959  patchInfo=patchInfo,
960  wcs=tractInfo.getWcs(),
961  bbox=patchInfo.getOuterBBox()
962  )
963  inputs['skyInfo'] = skyInfo
964 
965  if self.config.doPropagateFlags:
966  # Filter out any visit catalog that is not coadd inputs
967  ccdInputs = inputs['exposure'].getInfo().getCoaddInputs().ccds
968  visitKey = ccdInputs.schema.find("visit").key
969  ccdKey = ccdInputs.schema.find("ccd").key
970  inputVisitIds = set()
971  ccdRecordsWcs = {}
972  for ccdRecord in ccdInputs:
973  visit = ccdRecord.get(visitKey)
974  ccd = ccdRecord.get(ccdKey)
975  inputVisitIds.add((visit, ccd))
976  ccdRecordsWcs[(visit, ccd)] = ccdRecord.getWcs()
977 
978  inputCatalogsToKeep = []
979  inputCatalogWcsUpdate = []
980  for i, dataRef in enumerate(inputRefs.visitCatalogs):
981  key = (dataRef.dataId['visit'], dataRef.dataId['detector'])
982  if key in inputVisitIds:
983  inputCatalogsToKeep.append(inputs['visitCatalogs'][i])
984  inputCatalogWcsUpdate.append(ccdRecordsWcs[key])
985  inputs['visitCatalogs'] = inputCatalogsToKeep
986  inputs['wcsUpdates'] = inputCatalogWcsUpdate
987  inputs['ccdInputs'] = ccdInputs
988 
989  outputs = self.run(**inputs)
990  butlerQC.put(outputs, outputRefs)
991 
992  def runDataRef(self, patchRef, psfCache=100):
993  """!
994  @brief Deblend and measure.
995 
996  @param[in] patchRef: Patch reference.
997 
998  Set 'is-primary' and related flags. Propagate flags
999  from individual visits. Optionally match the sources to a reference catalog and write the matches.
1000  Finally, write the deblended sources and measurements out.
1001  """
1002  if self.config.hasFakes:
1003  coaddType = "fakes_" + self.config.coaddName
1004  else:
1005  coaddType = self.config.coaddName
1006  exposure = patchRef.get(coaddType + "Coadd_calexp", immediate=True)
1007  exposure.getPsf().setCacheCapacity(psfCache)
1008  sources = self.readSources(patchRef)
1009  table = sources.getTable()
1010  table.setMetadata(self.algMetadata) # Capture algorithm metadata to write out to the source catalog.
1011  skyInfo = getSkyInfo(coaddName=self.config.coaddName, patchRef=patchRef)
1012 
1013  if self.config.doPropagateFlags:
1014  ccdInputs = self.propagateFlags.getCcdInputs(exposure)
1015  else:
1016  ccdInputs = None
1017 
1018  results = self.run(exposure=exposure, sources=sources,
1019  ccdInputs=ccdInputs,
1020  skyInfo=skyInfo, butler=patchRef.getButler(),
1021  exposureId=self.getExposureId(patchRef))
1022 
1023  if self.config.doMatchSources:
1024  self.writeMatches(patchRef, results)
1025  self.write(patchRef, results.outputSources)
1026 
1027  def run(self, exposure, sources, skyInfo, exposureId, ccdInputs=None, visitCatalogs=None, wcsUpdates=None,
1028  butler=None):
1029  """Run measurement algorithms on the input exposure, and optionally populate the
1030  resulting catalog with extra information.
1031 
1032  Parameters
1033  ----------
1034  exposure : `lsst.afw.exposure.Exposure`
1035  The input exposure on which measurements are to be performed
1036  sources : `lsst.afw.table.SourceCatalog`
1037  A catalog built from the results of merged detections, or
1038  deblender outputs.
1039  skyInfo : `lsst.pipe.base.Struct`
1040  A struct containing information about the position of the input exposure within
1041  a `SkyMap`, the `SkyMap`, its `Wcs`, and its bounding box
1042  exposureId : `int` or `bytes`
1043  packed unique number or bytes unique to the input exposure
1044  ccdInputs : `lsst.afw.table.ExposureCatalog`
1045  Catalog containing information on the individual visits which went into making
1046  the exposure
1047  visitCatalogs : list of `lsst.afw.table.SourceCatalogs` or `None`
1048  A list of source catalogs corresponding to measurements made on the individual
1049  visits which went into the input exposure. If None and butler is `None` then
1050  the task cannot propagate visit flags to the output catalog.
1051  wcsUpdates : list of `lsst.afw.geom.SkyWcs` or `None`
1052  If visitCatalogs is not `None` this should be a list of wcs objects which correspond
1053  to the input visits. Used to put all coordinates to common system. If `None` and
1054  butler is `None` then the task cannot propagate visit flags to the output catalog.
1055  butler : `lsst.daf.butler.Butler` or `lsst.daf.persistence.Butler`
1056  Either a gen2 or gen3 butler used to load visit catalogs
1057 
1058  Returns
1059  -------
1060  results : `lsst.pipe.base.Struct`
1061  Results of running measurement task. Will contain the catalog in the
1062  sources attribute. Optionally will have results of matching to a
1063  reference catalog in the matchResults attribute, and denormalized
1064  matches in the denormMatches attribute.
1065  """
1066  self.measurement.run(sources, exposure, exposureId=exposureId)
1067 
1068  if self.config.doApCorr:
1069  self.applyApCorr.run(
1070  catalog=sources,
1071  apCorrMap=exposure.getInfo().getApCorrMap()
1072  )
1073 
1074  # TODO DM-11568: this contiguous check-and-copy could go away if we
1075  # reserve enough space during SourceDetection and/or SourceDeblend.
1076  # NOTE: sourceSelectors require contiguous catalogs, so ensure
1077  # contiguity now, so views are preserved from here on.
1078  if not sources.isContiguous():
1079  sources = sources.copy(deep=True)
1080 
1081  if self.config.doRunCatalogCalculation:
1082  self.catalogCalculation.run(sources)
1083 
1084  self.setPrimaryFlags.run(sources, skyMap=skyInfo.skyMap, tractInfo=skyInfo.tractInfo,
1085  patchInfo=skyInfo.patchInfo, includeDeblend=self.deblended)
1086  if self.config.doPropagateFlags:
1087  self.propagateFlags.run(butler, sources, ccdInputs, exposure.getWcs(), visitCatalogs, wcsUpdates)
1088 
1089  results = Struct()
1090 
1091  if self.config.doMatchSources:
1092  matchResult = self.match.run(sources, exposure.getInfo().getFilter().getName())
1093  matches = afwTable.packMatches(matchResult.matches)
1094  matches.table.setMetadata(matchResult.matchMeta)
1095  results.matchResult = matches
1096  if self.config.doWriteMatchesDenormalized:
1097  if matchResult.matches:
1098  denormMatches = denormalizeMatches(matchResult.matches, matchResult.matchMeta)
1099  else:
1100  self.log.warn("No matches, so generating dummy denormalized matches file")
1101  denormMatches = afwTable.BaseCatalog(afwTable.Schema())
1102  denormMatches.setMetadata(PropertyList())
1103  denormMatches.getMetadata().add("COMMENT",
1104  "This catalog is empty because no matches were found.")
1105  results.denormMatches = denormMatches
1106  results.denormMatches = denormMatches
1107 
1108  results.outputSources = sources
1109  return results
1110 
1111  def readSources(self, dataRef):
1112  """!
1113  @brief Read input sources.
1114 
1115  @param[in] dataRef: Data reference for catalog of merged detections
1116  @return List of sources in merged catalog
1117 
1118  We also need to add columns to hold the measurements we're about to make
1119  so we can measure in-place.
1120  """
1121  merged = dataRef.get(self.config.coaddName + self.inputCatalog, immediate=True)
1122  self.log.info("Read %d detections: %s" % (len(merged), dataRef.dataId))
1123  idFactory = self.makeIdFactory(dataRef)
1124  for s in merged:
1125  idFactory.notify(s.getId())
1126  table = afwTable.SourceTable.make(self.schema, idFactory)
1127  sources = afwTable.SourceCatalog(table)
1128  sources.extend(merged, self.schemaMapper)
1129  return sources
1130 
1131  def writeMatches(self, dataRef, results):
1132  """!
1133  @brief Write matches of the sources to the astrometric reference catalog.
1134 
1135  @param[in] dataRef: data reference
1136  @param[in] results: results struct from run method
1137  """
1138  if hasattr(results, "matchResult"):
1139  dataRef.put(results.matchResult, self.config.coaddName + "Coadd_measMatch")
1140  if hasattr(results, "denormMatches"):
1141  dataRef.put(results.denormMatches, self.config.coaddName + "Coadd_measMatchFull")
1142 
1143  def write(self, dataRef, sources):
1144  """!
1145  @brief Write the source catalog.
1146 
1147  @param[in] dataRef: data reference
1148  @param[in] sources: source catalog
1149  """
1150  dataRef.put(sources, self.config.coaddName + "Coadd_meas")
1151  self.log.info("Wrote %d sources: %s" % (len(sources), dataRef.dataId))
1152 
1153  def getExposureId(self, dataRef):
1154  return int(dataRef.get(self.config.coaddName + "CoaddId"))
1155 
lsst.pipe.tasks.propagateVisitFlags
Definition: propagateVisitFlags.py:1
lsst::afw::image
Backwards-compatibility support for depersisting the old Calib (FluxMag0/FluxMag0Err) objects.
Definition: imageAlgorithm.dox:1
lsst::log.log.logContinued.warn
def warn(fmt, *args)
Definition: logContinued.py:205
lsst::log.log.logContinued.info
def info(fmt, *args)
Definition: logContinued.py:201
lsst.pex.config.wrap.setDefaults
setDefaults
Definition: wrap.py:293
lsst::meas::astrom
Definition: polynomialUtils.h:32
lsst.pipe.tasks.coaddBase.getSkyInfo
def getSkyInfo(coaddName, patchRef)
Return the SkyMap, tract and patch information, wcs, and outer bbox of the patch to be coadded.
Definition: coaddBase.py:261
lsst::meas::algorithms.loadReferenceObjects.ReferenceObjectLoader
Definition: loadReferenceObjects.py:195
lsst::daf::base::PropertyList
Class for storing ordered metadata with comments.
Definition: PropertyList.h:68
lsst.pipe.base.argumentParser.ArgumentParser
Definition: argumentParser.py:408
lsst.pex.config.configurableField.ConfigurableField
Definition: configurableField.py:170
lsst::afw::table::Schema
Defines the fields and offsets for a table.
Definition: Schema.h:50
lsst::meas::astrom.denormalizeMatches.denormalizeMatches
def denormalizeMatches(matches, matchMeta=None)
Definition: denormalizeMatches.py:27
lsst.pipe.tasks.setPrimaryFlags
Definition: setPrimaryFlags.py:1
lsst.pipe.tasks.multiBand.DetectCoaddSourcesConnections
Definition: multiBand.py:71
lsst.pipe.tasks.mergeDetections.write
def write(self, patchRef, catalog)
Write the output.
Definition: mergeDetections.py:389
lsst.pipe.tasks.scaleVariance
Definition: scaleVariance.py:1
lsst.pipe.tasks.assembleCoadd.run
def run(self, skyInfo, tempExpRefList, imageScalerList, weightList, altMaskList=None, mask=None, supplementaryData=None)
Definition: assembleCoadd.py:721
lsst.pipe.base.pipelineTask.PipelineTask
Definition: pipelineTask.py:32
lsst.pipe.base.struct.Struct
Definition: struct.py:26
lsst.pex.config
Definition: __init__.py:1
lsst::meas::base
Definition: Algorithm.h:37
lsst::afw::table._source.SourceCatalog
Definition: _source.py:32
lsstDebug.getInfo
getInfo
Definition: lsstDebug.py:87
lsst::afw::table::SchemaMapper
A mapping between the keys of two Schemas, used to copy data between them.
Definition: SchemaMapper.h:21
lsst.pipe.base.connections.PipelineTaskConnections
Definition: connections.py:260
lsst::afw::math.backgroundList.BackgroundList
Definition: backgroundList.py:32
lsst::afw::table
Definition: table.dox:3
lsst::meas::deblender
Definition: BaselineUtils.h:17
lsst.pipe.base.config.PipelineTaskConfig
Definition: config.py:115
lsst.pipe.tasks.fakes
Definition: fakes.py:1
lsst.pipe.tasks.multiBandUtils.MergeSourcesRunner
Definition: multiBandUtils.py:10
lsst.pipe.base.cmdLineTask.ButlerInitializedTaskRunner
Definition: cmdLineTask.py:510
lsst::daf::base
Definition: Utils.h:47
list
daf::base::PropertyList * list
Definition: fits.cc:913
lsst.pipe.tasks.coaddBase
Definition: coaddBase.py:1
lsst::afw::math
Definition: statistics.dox:6
lsst::afw::table::packMatches
template BaseCatalog packMatches(SourceMatchVector const &)
lsst.pex.config.config.Config
Definition: config.py:736
lsst.pex.config.config.Field
Definition: config.py:247
lsst.skymap
Definition: __init__.py:1
lsst::coadd::utils.coaddDataIdContainer
Definition: coaddDataIdContainer.py:1
lsst.pipe.base
Definition: __init__.py:1
lsst::meas::algorithms
Fit spatial kernel using approximate fluxes for candidates, and solving a linear system of equations.
Definition: CoaddBoundedField.h:34
lsst.pex.config.wrap.validate
validate
Definition: wrap.py:295
lsst::afw::table::CatalogT< BaseRecord >
set
daf::base::PropertySet * set
Definition: fits.cc:912
lsst.pipe.base.connectionTypes
Definition: connectionTypes.py:1
lsst.pipe.tasks.mergeDetections.writeMetadata
def writeMetadata(self, dataRefList)
No metadata to write, and not sure how to write it for a list of dataRefs.
Definition: mergeDetections.py:406
lsst.pipe.base.cmdLineTask.CmdLineTask
Definition: cmdLineTask.py:544