LSSTApplications  17.0,17.0+1,17.0+10,17.0+13,17.0+6,17.0+9,17.0-1-g377950a+5,17.0.1,17.0.1-1-g0d345a5+1,17.0.1-1-g444bd44+1,17.0.1-1-g46e6382+1,17.0.1-1-g4d4fbc4,17.0.1-1-g703d48b+1,17.0.1-1-g9deacb5+1,17.0.1-1-gaef33af,17.0.1-1-gea52513+1,17.0.1-1-gf4e0155+1,17.0.1-1-gfc65f5f+1,17.0.1-1-gfc6fb1f,17.0.1-2-g0ce9737+1,17.0.1-2-g2a2f1b99+1,17.0.1-2-gd73ec07+1,17.0.1-2-gd9aa6e4+1,17.0.1-3-g18e75bb+1,17.0.1-3-gb71a564+1,17.0.1-3-gc20ba7d+1,17.0.1-4-g41c8d5dc0+1,17.0.1-5-gb7d1e01,17.0.1-5-gf0ac6446+1,17.0.1-7-g69836a1
LSSTDataManagementBasePackage
multiBand.py
Go to the documentation of this file.
1 #!/usr/bin/env python
2 #
3 # LSST Data Management System
4 # Copyright 2008-2015 AURA/LSST.
5 #
6 # This product includes software developed by the
7 # LSST Project (http://www.lsst.org/).
8 #
9 # This program is free software: you can redistribute it and/or modify
10 # it under the terms of the GNU General Public License as published by
11 # the Free Software Foundation, either version 3 of the License, or
12 # (at your option) any later version.
13 #
14 # This program is distributed in the hope that it will be useful,
15 # but WITHOUT ANY WARRANTY; without even the implied warranty of
16 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 # GNU General Public License for more details.
18 #
19 # You should have received a copy of the LSST License Statement and
20 # the GNU General Public License along with this program. If not,
21 # see <https://www.lsstcorp.org/LegalNotices/>.
22 #
23 from lsst.coadd.utils.coaddDataIdContainer import ExistingCoaddDataIdContainer
24 from lsst.pipe.base import (CmdLineTask, Struct, ArgumentParser, ButlerInitializedTaskRunner,
25  PipelineTask, PipelineTaskConfig, InitInputDatasetField,
26  InitOutputDatasetField, InputDatasetField, OutputDatasetField)
27 from lsst.pex.config import Config, Field, ConfigurableField
28 from lsst.meas.algorithms import DynamicDetectionTask, ReferenceObjectLoader
29 from lsst.meas.base import SingleFrameMeasurementTask, ApplyApCorrTask, CatalogCalculationTask
30 from lsst.meas.deblender import SourceDeblendTask, MultibandDeblendTask
31 from lsst.pipe.tasks.coaddBase import getSkyInfo
32 from lsst.pipe.tasks.scaleVariance import ScaleVarianceTask
33 from lsst.meas.astrom import DirectMatchTask, denormalizeMatches
34 from lsst.pipe.tasks.fakes import BaseFakeSourcesTask
35 from lsst.pipe.tasks.setPrimaryFlags import SetPrimaryFlagsTask
36 from lsst.pipe.tasks.propagateVisitFlags import PropagateVisitFlagsTask
37 import lsst.afw.image as afwImage
38 import lsst.afw.table as afwTable
39 import lsst.afw.math as afwMath
40 from lsst.daf.base import PropertyList
41 
42 from .mergeDetections import MergeDetectionsConfig, MergeDetectionsTask # noqa: F401
43 from .mergeMeasurements import MergeMeasurementsConfig, MergeMeasurementsTask # noqa: F401
44 from .multiBandUtils import MergeSourcesRunner, CullPeaksConfig, _makeGetSchemaCatalogs # noqa: F401
45 from .multiBandUtils import getInputSchema, getShortFilterName, readCatalog, _makeMakeIdFactory # noqa: F401
46 from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesSingleConfig # noqa: F401
47 from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesSingleTask # noqa: F401
48 from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesMultiConfig # noqa: F401
49 from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesMultiTask # noqa: F401
50 
51 
52 """
53 New set types:
54 * deepCoadd_det: detections from what used to be processCoadd (tract, patch, filter)
55 * deepCoadd_mergeDet: merged detections (tract, patch)
56 * deepCoadd_meas: measurements of merged detections (tract, patch, filter)
57 * deepCoadd_ref: reference sources (tract, patch)
58 All of these have associated *_schema catalogs that require no data ID and hold no records.
59 
60 In addition, we have a schema-only dataset, which saves the schema for the PeakRecords in
61 the mergeDet, meas, and ref dataset Footprints:
62 * deepCoadd_peak_schema
63 """
64 
65 
66 
67 
68 class DetectCoaddSourcesConfig(PipelineTaskConfig):
69  """!
70  @anchor DetectCoaddSourcesConfig_
71 
72  @brief Configuration parameters for the DetectCoaddSourcesTask
73  """
74  doScaleVariance = Field(dtype=bool, default=True, doc="Scale variance plane using empirical noise?")
75  scaleVariance = ConfigurableField(target=ScaleVarianceTask, doc="Variance rescaling")
76  detection = ConfigurableField(target=DynamicDetectionTask, doc="Source detection")
77  coaddName = Field(dtype=str, default="deep", doc="Name of coadd")
78  doInsertFakes = Field(dtype=bool, default=False,
79  doc="Run fake sources injection task")
80  insertFakes = ConfigurableField(target=BaseFakeSourcesTask,
81  doc="Injection of fake sources for testing "
82  "purposes (must be retargeted)")
83  detectionSchema = InitOutputDatasetField(
84  doc="Schema of the detection catalog",
85  nameTemplate="{outputCoaddName}Coadd_det_schema",
86  storageClass="SourceCatalog",
87  )
88  exposure = InputDatasetField(
89  doc="Exposure on which detections are to be performed",
90  nameTemplate="{inputCoaddName}Coadd",
91  scalar=True,
92  storageClass="ExposureF",
93  dimensions=("Tract", "Patch", "AbstractFilter", "SkyMap")
94  )
95  outputBackgrounds = OutputDatasetField(
96  doc="Output Backgrounds used in detection",
97  nameTemplate="{outputCoaddName}Coadd_calexp_background",
98  scalar=True,
99  storageClass="Background",
100  dimensions=("Tract", "Patch", "AbstractFilter", "SkyMap")
101  )
102  outputSources = OutputDatasetField(
103  doc="Detected sources catalog",
104  nameTemplate="{outputCoaddName}Coadd_det",
105  scalar=True,
106  storageClass="SourceCatalog",
107  dimensions=("Tract", "Patch", "AbstractFilter", "SkyMap")
108  )
109  outputExposure = OutputDatasetField(
110  doc="Exposure post detection",
111  nameTemplate="{outputCoaddName}Coadd_calexp",
112  scalar=True,
113  storageClass="ExposureF",
114  dimensions=("Tract", "Patch", "AbstractFilter", "SkyMap")
115  )
116 
117  def setDefaults(self):
118  super().setDefaults()
119  self.quantum.dimensions = ("Tract", "Patch", "AbstractFilter", "SkyMap")
120  self.formatTemplateNames({"inputCoaddName": "deep", "outputCoaddName": "deep"})
121  self.detection.thresholdType = "pixel_stdev"
122  self.detection.isotropicGrow = True
123  # Coadds are made from background-subtracted CCDs, so any background subtraction should be very basic
124  self.detection.reEstimateBackground = False
125  self.detection.background.useApprox = False
126  self.detection.background.binSize = 4096
127  self.detection.background.undersampleStyle = 'REDUCE_INTERP_ORDER'
128  self.detection.doTempWideBackground = True # Suppress large footprints that overwhelm the deblender
129 
130 
136 
137 
138 class DetectCoaddSourcesTask(PipelineTask, CmdLineTask):
139  r"""!
140  @anchor DetectCoaddSourcesTask_
141 
142  @brief Detect sources on a coadd
143 
144  @section pipe_tasks_multiBand_Contents Contents
145 
146  - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Purpose
147  - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Initialize
148  - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Run
149  - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Config
150  - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Debug
151  - @ref pipe_tasks_multiband_DetectCoaddSourcesTask_Example
152 
153  @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Purpose Description
154 
155  Command-line task that detects sources on a coadd of exposures obtained with a single filter.
156 
157  Coadding individual visits requires each exposure to be warped. This introduces covariance in the noise
158  properties across pixels. Before detection, we correct the coadd variance by scaling the variance plane
159  in the coadd to match the observed variance. This is an approximate approach -- strictly, we should
160  propagate the full covariance matrix -- but it is simple and works well in practice.
161 
162  After scaling the variance plane, we detect sources and generate footprints by delegating to the @ref
163  SourceDetectionTask_ "detection" subtask.
164 
165  @par Inputs:
166  deepCoadd{tract,patch,filter}: ExposureF
167  @par Outputs:
168  deepCoadd_det{tract,patch,filter}: SourceCatalog (only parent Footprints)
169  @n deepCoadd_calexp{tract,patch,filter}: Variance scaled, background-subtracted input
170  exposure (ExposureF)
171  @n deepCoadd_calexp_background{tract,patch,filter}: BackgroundList
172  @par Data Unit:
173  tract, patch, filter
174 
175  DetectCoaddSourcesTask delegates most of its work to the @ref SourceDetectionTask_ "detection" subtask.
176  You can retarget this subtask if you wish.
177 
178  @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Initialize Task initialization
179 
180  @copydoc \_\_init\_\_
181 
182  @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Run Invoking the Task
183 
184  @copydoc run
185 
186  @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Config Configuration parameters
187 
188  See @ref DetectCoaddSourcesConfig_ "DetectSourcesConfig"
189 
190  @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Debug Debug variables
191 
192  The @link lsst.pipe.base.cmdLineTask.CmdLineTask command line task@endlink interface supports a
193  flag @c -d to import @b debug.py from your @c PYTHONPATH; see @ref baseDebug for more about @b debug.py
194  files.
195 
196  DetectCoaddSourcesTask has no debug variables of its own because it relegates all the work to
197  @ref SourceDetectionTask_ "SourceDetectionTask"; see the documetation for
198  @ref SourceDetectionTask_ "SourceDetectionTask" for further information.
199 
200  @section pipe_tasks_multiband_DetectCoaddSourcesTask_Example A complete example
201  of using DetectCoaddSourcesTask
202 
203  DetectCoaddSourcesTask is meant to be run after assembling a coadded image in a given band. The purpose of
204  the task is to update the background, detect all sources in a single band and generate a set of parent
205  footprints. Subsequent tasks in the multi-band processing procedure will merge sources across bands and,
206  eventually, perform forced photometry. Command-line usage of DetectCoaddSourcesTask expects a data
207  reference to the coadd to be processed. A list of the available optional arguments can be obtained by
208  calling detectCoaddSources.py with the `--help` command line argument:
209  @code
210  detectCoaddSources.py --help
211  @endcode
212 
213  To demonstrate usage of the DetectCoaddSourcesTask in the larger context of multi-band processing, we
214  will process HSC data in the [ci_hsc](https://github.com/lsst/ci_hsc) package. Assuming one has followed
215  steps 1 - 4 at @ref pipeTasks_multiBand, one may detect all the sources in each coadd as follows:
216  @code
217  detectCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-I
218  @endcode
219  that will process the HSC-I band data. The results are written to
220  `$CI_HSC_DIR/DATA/deepCoadd-results/HSC-I`.
221 
222  It is also necessary to run:
223  @code
224  detectCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-R
225  @endcode
226  to generate the sources catalogs for the HSC-R band required by the next step in the multi-band
227  processing procedure: @ref MergeDetectionsTask_ "MergeDetectionsTask".
228  """
229  _DefaultName = "detectCoaddSources"
230  ConfigClass = DetectCoaddSourcesConfig
231  getSchemaCatalogs = _makeGetSchemaCatalogs("det")
232  makeIdFactory = _makeMakeIdFactory("CoaddId")
233 
234  @classmethod
235  def _makeArgumentParser(cls):
236  parser = ArgumentParser(name=cls._DefaultName)
237  parser.add_id_argument("--id", "deepCoadd", help="data ID, e.g. --id tract=12345 patch=1,2 filter=r",
238  ContainerClass=ExistingCoaddDataIdContainer)
239  return parser
240 
241  def __init__(self, schema=None, **kwargs):
242  """!
243  @brief Initialize the task. Create the @ref SourceDetectionTask_ "detection" subtask.
244 
245  Keyword arguments (in addition to those forwarded to CmdLineTask.__init__):
246 
247  @param[in] schema: initial schema for the output catalog, modified-in place to include all
248  fields set by this task. If None, the source minimal schema will be used.
249  @param[in] **kwargs: keyword arguments to be passed to lsst.pipe.base.task.Task.__init__
250  """
251  # N.B. Super is used here to handle the multiple inheritance of PipelineTasks, the init tree
252  # call structure has been reviewed carefully to be sure super will work as intended.
253  super().__init__(**kwargs)
254  if schema is None:
255  schema = afwTable.SourceTable.makeMinimalSchema()
256  if self.config.doInsertFakes:
257  self.makeSubtask("insertFakes")
258  self.schema = schema
259  self.makeSubtask("detection", schema=self.schema)
260  if self.config.doScaleVariance:
261  self.makeSubtask("scaleVariance")
262 
264  return {"detectionSchema": afwTable.SourceCatalog(self.schema)}
265 
266  def runDataRef(self, patchRef):
267  """!
268  @brief Run detection on a coadd.
269 
270  Invokes @ref run and then uses @ref write to output the
271  results.
272 
273  @param[in] patchRef: data reference for patch
274  """
275  exposure = patchRef.get(self.config.coaddName + "Coadd", immediate=True)
276  expId = int(patchRef.get(self.config.coaddName + "CoaddId"))
277  results = self.run(exposure, self.makeIdFactory(patchRef), expId=expId)
278  self.write(results, patchRef)
279  return results
280 
281  def adaptArgsAndRun(self, inputData, inputDataIds, outputDataIds, butler):
282  packedId, maxBits = butler.registry.packDataId("TractPatchAbstractFilter",
283  inputDataIds["exposure"],
284  returnMaxBits=True)
285  inputData["idFactory"] = afwTable.IdFactory.makeSource(packedId, 64 - maxBits)
286  inputData["expId"] = packedId
287  return self.run(**inputData)
288 
289  def run(self, exposure, idFactory, expId):
290  """!
291  @brief Run detection on an exposure.
292 
293  First scale the variance plane to match the observed variance
294  using @ref ScaleVarianceTask. Then invoke the @ref SourceDetectionTask_ "detection" subtask to
295  detect sources.
296 
297  @param[in,out] exposure: Exposure on which to detect (may be backround-subtracted and scaled,
298  depending on configuration).
299  @param[in] idFactory: IdFactory to set source identifiers
300  @param[in] expId: Exposure identifier (integer) for RNG seed
301 
302  @return a pipe.base.Struct with fields
303  - sources: catalog of detections
304  - backgrounds: list of backgrounds
305  """
306  if self.config.doScaleVariance:
307  varScale = self.scaleVariance.run(exposure.maskedImage)
308  exposure.getMetadata().add("variance_scale", varScale)
309  backgrounds = afwMath.BackgroundList()
310  if self.config.doInsertFakes:
311  self.insertFakes.run(exposure, background=backgrounds)
312  table = afwTable.SourceTable.make(self.schema, idFactory)
313  detections = self.detection.makeSourceCatalog(table, exposure, expId=expId)
314  sources = detections.sources
315  fpSets = detections.fpSets
316  if hasattr(fpSets, "background") and fpSets.background:
317  for bg in fpSets.background:
318  backgrounds.append(bg)
319  return Struct(outputSources=sources, outputBackgrounds=backgrounds, outputExposure=exposure)
320 
321  def write(self, results, patchRef):
322  """!
323  @brief Write out results from runDetection.
324 
325  @param[in] exposure: Exposure to write out
326  @param[in] results: Struct returned from runDetection
327  @param[in] patchRef: data reference for patch
328  """
329  coaddName = self.config.coaddName + "Coadd"
330  patchRef.put(results.outputBackgrounds, coaddName + "_calexp_background")
331  patchRef.put(results.outputSources, coaddName + "_det")
332  patchRef.put(results.outputExposure, coaddName + "_calexp")
333 
334 
335 
336 
338  """DeblendCoaddSourcesConfig
339 
340  Configuration parameters for the `DeblendCoaddSourcesTask`.
341  """
342  singleBandDeblend = ConfigurableField(target=SourceDeblendTask,
343  doc="Deblend sources separately in each band")
344  multiBandDeblend = ConfigurableField(target=MultibandDeblendTask,
345  doc="Deblend sources simultaneously across bands")
346  simultaneous = Field(dtype=bool, default=False, doc="Simultaneously deblend all bands?")
347  coaddName = Field(dtype=str, default="deep", doc="Name of coadd")
348 
349  def setDefaults(self):
350  Config.setDefaults(self)
351  self.singleBandDeblend.propagateAllPeaks = True
352 
353 
355  """Task runner for the `MergeSourcesTask`
356 
357  Required because the run method requires a list of
358  dataRefs rather than a single dataRef.
359  """
360  @staticmethod
361  def getTargetList(parsedCmd, **kwargs):
362  """Provide a list of patch references for each patch, tract, filter combo.
363 
364  Parameters
365  ----------
366  parsedCmd:
367  The parsed command
368  kwargs:
369  Keyword arguments passed to the task
370 
371  Returns
372  -------
373  targetList: list
374  List of tuples, where each tuple is a (dataRef, kwargs) pair.
375  """
376  refDict = MergeSourcesRunner.buildRefDict(parsedCmd)
377  kwargs["psfCache"] = parsedCmd.psfCache
378  return [(list(p.values()), kwargs) for t in refDict.values() for p in t.values()]
379 
380 
381 class DeblendCoaddSourcesTask(CmdLineTask):
382  """Deblend the sources in a merged catalog
383 
384  Deblend sources from master catalog in each coadd.
385  This can either be done separately in each band using the HSC-SDSS deblender
386  (`DeblendCoaddSourcesTask.config.simultaneous==False`)
387  or use SCARLET to simultaneously fit the blend in all bands
388  (`DeblendCoaddSourcesTask.config.simultaneous==True`).
389  The task will set its own `self.schema` atribute to the `Schema` of the
390  output deblended catalog.
391  This will include all fields from the input `Schema`, as well as additional fields
392  from the deblender.
393 
394  `pipe.tasks.multiband.DeblendCoaddSourcesTask Description
395  ---------------------------------------------------------
396  `
397 
398  Parameters
399  ----------
400  butler: `Butler`
401  Butler used to read the input schemas from disk or
402  construct the reference catalog loader, if `schema` or `peakSchema` or
403  schema: `Schema`
404  The schema of the merged detection catalog as an input to this task.
405  peakSchema: `Schema`
406  The schema of the `PeakRecord`s in the `Footprint`s in the merged detection catalog
407  """
408  ConfigClass = DeblendCoaddSourcesConfig
409  RunnerClass = DeblendCoaddSourcesRunner
410  _DefaultName = "deblendCoaddSources"
411  makeIdFactory = _makeMakeIdFactory("MergedCoaddId")
412 
413  @classmethod
414  def _makeArgumentParser(cls):
415  parser = ArgumentParser(name=cls._DefaultName)
416  parser.add_id_argument("--id", "deepCoadd_calexp",
417  help="data ID, e.g. --id tract=12345 patch=1,2 filter=g^r^i",
418  ContainerClass=ExistingCoaddDataIdContainer)
419  parser.add_argument("--psfCache", type=int, default=100, help="Size of CoaddPsf cache")
420  return parser
421 
422  def __init__(self, butler=None, schema=None, peakSchema=None, **kwargs):
423  CmdLineTask.__init__(self, **kwargs)
424  if schema is None:
425  assert butler is not None, "Neither butler nor schema is defined"
426  schema = butler.get(self.config.coaddName + "Coadd_mergeDet_schema", immediate=True).schema
428  self.schemaMapper.addMinimalSchema(schema)
429  self.schema = self.schemaMapper.getOutputSchema()
430  if peakSchema is None:
431  assert butler is not None, "Neither butler nor peakSchema is defined"
432  peakSchema = butler.get(self.config.coaddName + "Coadd_peak_schema", immediate=True).schema
433 
434  if self.config.simultaneous:
435  self.makeSubtask("multiBandDeblend", schema=self.schema, peakSchema=peakSchema)
436  else:
437  self.makeSubtask("singleBandDeblend", schema=self.schema, peakSchema=peakSchema)
438 
439  def getSchemaCatalogs(self):
440  """Return a dict of empty catalogs for each catalog dataset produced by this task.
441 
442  Returns
443  -------
444  result: dict
445  Dictionary of empty catalogs, with catalog names as keys.
446  """
447  catalog = afwTable.SourceCatalog(self.schema)
448  return {self.config.coaddName + "Coadd_deblendedFlux": catalog,
449  self.config.coaddName + "Coadd_deblendedModel": catalog}
450 
451  def runDataRef(self, patchRefList, psfCache=100):
452  """Deblend the patch
453 
454  Deblend each source simultaneously or separately
455  (depending on `DeblendCoaddSourcesTask.config.simultaneous`).
456  Set `is-primary` and related flags.
457  Propagate flags from individual visits.
458  Write the deblended sources out.
459 
460  Parameters
461  ----------
462  patchRefList: list
463  List of data references for each filter
464  """
465  if self.config.simultaneous:
466  # Use SCARLET to simultaneously deblend across filters
467  filters = []
468  exposures = []
469  for patchRef in patchRefList:
470  exposure = patchRef.get(self.config.coaddName + "Coadd_calexp", immediate=True)
471  filters.append(patchRef.dataId["filter"])
472  exposures.append(exposure)
473  # The input sources are the same for all bands, since it is a merged catalog
474  sources = self.readSources(patchRef)
475  exposure = afwImage.MultibandExposure.fromExposures(filters, exposures)
476  fluxCatalogs, templateCatalogs = self.multiBandDeblend.run(exposure, sources)
477  for n in range(len(patchRefList)):
478  self.write(patchRefList[n], fluxCatalogs[filters[n]], templateCatalogs[filters[n]])
479  else:
480  # Use the singeband deblender to deblend each band separately
481  for patchRef in patchRefList:
482  exposure = patchRef.get(self.config.coaddName + "Coadd_calexp", immediate=True)
483  exposure.getPsf().setCacheCapacity(psfCache)
484  sources = self.readSources(patchRef)
485  self.singleBandDeblend.run(exposure, sources)
486  self.write(patchRef, sources)
487 
488  def readSources(self, dataRef):
489  """Read merged catalog
490 
491  Read the catalog of merged detections and create a catalog
492  in a single band.
493 
494  Parameters
495  ----------
496  dataRef: data reference
497  Data reference for catalog of merged detections
498 
499  Returns
500  -------
501  sources: `SourceCatalog`
502  List of sources in merged catalog
503 
504  We also need to add columns to hold the measurements we're about to make
505  so we can measure in-place.
506  """
507  merged = dataRef.get(self.config.coaddName + "Coadd_mergeDet", immediate=True)
508  self.log.info("Read %d detections: %s" % (len(merged), dataRef.dataId))
509  idFactory = self.makeIdFactory(dataRef)
510  for s in merged:
511  idFactory.notify(s.getId())
512  table = afwTable.SourceTable.make(self.schema, idFactory)
513  sources = afwTable.SourceCatalog(table)
514  sources.extend(merged, self.schemaMapper)
515  return sources
516 
517  def write(self, dataRef, flux_sources, template_sources=None):
518  """Write the source catalog(s)
519 
520  Parameters
521  ----------
522  dataRef: Data Reference
523  Reference to the output catalog.
524  flux_sources: `SourceCatalog`
525  Flux conserved sources to write to file.
526  If using the single band deblender, this is the catalog
527  generated.
528  template_sources: `SourceCatalog`
529  Source catalog using the multiband template models
530  as footprints.
531  """
532  # The multiband deblender does not have to conserve flux,
533  # so only write the flux conserved catalog if it exists
534  if flux_sources is not None:
535  assert not self.config.simultaneous or self.config.multiBandDeblend.conserveFlux
536  dataRef.put(flux_sources, self.config.coaddName + "Coadd_deblendedFlux")
537  # Only the multiband deblender has the option to output the
538  # template model catalog, which can optionally be used
539  # in MeasureMergedCoaddSources
540  if template_sources is not None:
541  assert self.config.multiBandDeblend.saveTemplates
542  dataRef.put(template_sources, self.config.coaddName + "Coadd_deblendedModel")
543  self.log.info("Wrote %d sources: %s" % (len(flux_sources), dataRef.dataId))
544 
545  def writeMetadata(self, dataRefList):
546  """Write the metadata produced from processing the data.
547  Parameters
548  ----------
549  dataRefList
550  List of Butler data references used to write the metadata.
551  The metadata is written to dataset type `CmdLineTask._getMetadataName`.
552  """
553  for dataRef in dataRefList:
554  try:
555  metadataName = self._getMetadataName()
556  if metadataName is not None:
557  dataRef.put(self.getFullMetadata(), metadataName)
558  except Exception as e:
559  self.log.warn("Could not persist metadata for dataId=%s: %s", dataRef.dataId, e)
560 
561  def getExposureId(self, dataRef):
562  """Get the ExposureId from a data reference
563  """
564  return int(dataRef.get(self.config.coaddName + "CoaddId"))
565 
566 
567 class MeasureMergedCoaddSourcesConfig(PipelineTaskConfig):
568  """!
569  @anchor MeasureMergedCoaddSourcesConfig_
570 
571  @brief Configuration parameters for the MeasureMergedCoaddSourcesTask
572  """
573  inputCatalog = Field(dtype=str, default="deblendedFlux",
574  doc=("Name of the input catalog to use."
575  "If the single band deblender was used this should be 'deblendedFlux."
576  "If the multi-band deblender was used this should be 'deblendedModel."
577  "If no deblending was performed this should be 'mergeDet'"))
578  measurement = ConfigurableField(target=SingleFrameMeasurementTask, doc="Source measurement")
579  setPrimaryFlags = ConfigurableField(target=SetPrimaryFlagsTask, doc="Set flags for primary tract/patch")
580  doPropagateFlags = Field(
581  dtype=bool, default=True,
582  doc="Whether to match sources to CCD catalogs to propagate flags (to e.g. identify PSF stars)"
583  )
584  propagateFlags = ConfigurableField(target=PropagateVisitFlagsTask, doc="Propagate visit flags to coadd")
585  doMatchSources = Field(dtype=bool, default=True, doc="Match sources to reference catalog?")
586  match = ConfigurableField(target=DirectMatchTask, doc="Matching to reference catalog")
587  doWriteMatchesDenormalized = Field(
588  dtype=bool,
589  default=False,
590  doc=("Write reference matches in denormalized format? "
591  "This format uses more disk space, but is more convenient to read."),
592  )
593  coaddName = Field(dtype=str, default="deep", doc="Name of coadd")
594  psfCache = Field(dtype=int, default=100, doc="Size of psfCache")
595  checkUnitsParseStrict = Field(
596  doc="Strictness of Astropy unit compatibility check, can be 'raise', 'warn' or 'silent'",
597  dtype=str,
598  default="raise",
599  )
600  doApCorr = Field(
601  dtype=bool,
602  default=True,
603  doc="Apply aperture corrections"
604  )
605  applyApCorr = ConfigurableField(
606  target=ApplyApCorrTask,
607  doc="Subtask to apply aperture corrections"
608  )
609  doRunCatalogCalculation = Field(
610  dtype=bool,
611  default=True,
612  doc='Run catalogCalculation task'
613  )
614  catalogCalculation = ConfigurableField(
615  target=CatalogCalculationTask,
616  doc="Subtask to run catalogCalculation plugins on catalog"
617  )
618  inputSchema = InitInputDatasetField(
619  doc="Input schema for measure merged task produced by a deblender or detection task",
620  nameTemplate="{inputCoaddName}Coadd_deblendedFlux_schema",
621  storageClass="SourceCatalog"
622  )
623  outputSchema = InitOutputDatasetField(
624  doc="Output schema after all new fields are added by task",
625  nameTemplate="{inputCoaddName}Coadd_meas_schema",
626  storageClass="SourceCatalog"
627  )
629  doc="Reference catalog used to match measured sources against known sources",
630  name="ref_cat",
631  storageClass="SimpleCatalog",
632  dimensions=("SkyPix",),
633  manualLoad=True
634  )
635  exposure = InputDatasetField(
636  doc="Input coadd image",
637  nameTemplate="{inputCoaddName}Coadd_calexp",
638  scalar=True,
639  storageClass="ExposureF",
640  dimensions=("Tract", "Patch", "AbstractFilter", "SkyMap")
641  )
643  doc="SkyMap to use in processing",
644  nameTemplate="{inputCoaddName}Coadd_skyMap",
645  storageClass="SkyMap",
646  dimensions=("SkyMap",),
647  scalar=True
648  )
649  visitCatalogs = InputDatasetField(
650  doc="Source catalogs for visits which overlap input tract, patch, abstract_filter. Will be "
651  "further filtered in the task for the purpose of propagating flags from image calibration "
652  "and characterization to codd objects",
653  name="src",
654  dimensions=("Instrument", "Visit", "Detector"),
655  storageClass="SourceCatalog"
656  )
657  intakeCatalog = InputDatasetField(
658  doc=("Name of the input catalog to use."
659  "If the single band deblender was used this should be 'deblendedFlux."
660  "If the multi-band deblender was used this should be 'deblendedModel, "
661  "or deblendedFlux if the multiband deblender was configured to output "
662  "deblended flux catalogs. If no deblending was performed this should "
663  "be 'mergeDet'"),
664  nameTemplate="{inputCoaddName}Coadd_deblendedFlux",
665  storageClass="SourceCatalog",
666  dimensions=("Tract", "Patch", "AbstractFilter", "SkyMap"),
667  scalar=True
668  )
669  outputSources = OutputDatasetField(
670  doc="Source catalog containing all the measurement information generated in this task",
671  nameTemplate="{outputCoaddName}Coadd_meas",
672  dimensions=("Tract", "Patch", "AbstractFilter", "SkyMap"),
673  storageClass="SourceCatalog",
674  scalar=True
675  )
676  matchResult = OutputDatasetField(
677  doc="Match catalog produced by configured matcher, optional on doMatchSources",
678  nameTemplate="{outputCoaddName}Coadd_measMatch",
679  dimensions=("Tract", "Patch", "AbstractFilter", "SkyMap"),
680  storageClass="Catalog",
681  scalar=True
682  )
683  denormMatches = OutputDatasetField(
684  doc="Denormalized Match catalog produced by configured matcher, optional on "
685  "doWriteMatchesDenormalized",
686  nameTemplate="{outputCoaddName}Coadd_measMatchFull",
687  dimensions=("Tract", "Patch", "AbstractFilter", "SkyMap"),
688  storageClass="Catalog",
689  scalar=True
690  )
691 
692  @property
693  def refObjLoader(self):
694  return self.match.refObjLoader
695 
696  def setDefaults(self):
697  super().setDefaults()
698  self.formatTemplateNames({"inputCoaddName": "deep", "outputCoaddName": "deep"})
699  self.quantum.dimensions = ("Tract", "Patch", "AbstractFilter", "SkyMap")
700  self.measurement.plugins.names |= ['base_InputCount', 'base_Variance']
701  self.measurement.plugins['base_PixelFlags'].masksFpAnywhere = ['CLIPPED', 'SENSOR_EDGE',
702  'INEXACT_PSF']
703  self.measurement.plugins['base_PixelFlags'].masksFpCenter = ['CLIPPED', 'SENSOR_EDGE',
704  'INEXACT_PSF']
705 
706 
712 
713 
714 class MeasureMergedCoaddSourcesRunner(ButlerInitializedTaskRunner):
715  """Get the psfCache setting into MeasureMergedCoaddSourcesTask"""
716  @staticmethod
717  def getTargetList(parsedCmd, **kwargs):
718  return ButlerInitializedTaskRunner.getTargetList(parsedCmd, psfCache=parsedCmd.psfCache)
719 
720 
721 class MeasureMergedCoaddSourcesTask(PipelineTask, CmdLineTask):
722  r"""!
723  @anchor MeasureMergedCoaddSourcesTask_
724 
725  @brief Deblend sources from master catalog in each coadd seperately and measure.
726 
727  @section pipe_tasks_multiBand_Contents Contents
728 
729  - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Purpose
730  - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Initialize
731  - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Run
732  - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Config
733  - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Debug
734  - @ref pipe_tasks_multiband_MeasureMergedCoaddSourcesTask_Example
735 
736  @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Purpose Description
737 
738  Command-line task that uses peaks and footprints from a master catalog to perform deblending and
739  measurement in each coadd.
740 
741  Given a master input catalog of sources (peaks and footprints) or deblender outputs
742  (including a HeavyFootprint in each band), measure each source on the
743  coadd. Repeating this procedure with the same master catalog across multiple coadds will generate a
744  consistent set of child sources.
745 
746  The deblender retains all peaks and deblends any missing peaks (dropouts in that band) as PSFs. Source
747  properties are measured and the @c is-primary flag (indicating sources with no children) is set. Visit
748  flags are propagated to the coadd sources.
749 
750  Optionally, we can match the coadd sources to an external reference catalog.
751 
752  @par Inputs:
753  deepCoadd_mergeDet{tract,patch} or deepCoadd_deblend{tract,patch}: SourceCatalog
754  @n deepCoadd_calexp{tract,patch,filter}: ExposureF
755  @par Outputs:
756  deepCoadd_meas{tract,patch,filter}: SourceCatalog
757  @par Data Unit:
758  tract, patch, filter
759 
760  MeasureMergedCoaddSourcesTask delegates most of its work to a set of sub-tasks:
761 
762  <DL>
763  <DT> @ref SingleFrameMeasurementTask_ "measurement"
764  <DD> Measure source properties of deblended sources.</DD>
765  <DT> @ref SetPrimaryFlagsTask_ "setPrimaryFlags"
766  <DD> Set flag 'is-primary' as well as related flags on sources. 'is-primary' is set for sources that are
767  not at the edge of the field and that have either not been deblended or are the children of deblended
768  sources</DD>
769  <DT> @ref PropagateVisitFlagsTask_ "propagateFlags"
770  <DD> Propagate flags set in individual visits to the coadd.</DD>
771  <DT> @ref DirectMatchTask_ "match"
772  <DD> Match input sources to a reference catalog (optional).
773  </DD>
774  </DL>
775  These subtasks may be retargeted as required.
776 
777  @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Initialize Task initialization
778 
779  @copydoc \_\_init\_\_
780 
781  @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Run Invoking the Task
782 
783  @copydoc run
784 
785  @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Config Configuration parameters
786 
787  See @ref MeasureMergedCoaddSourcesConfig_
788 
789  @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Debug Debug variables
790 
791  The @link lsst.pipe.base.cmdLineTask.CmdLineTask command line task@endlink interface supports a
792  flag @c -d to import @b debug.py from your @c PYTHONPATH; see @ref baseDebug for more about @b debug.py
793  files.
794 
795  MeasureMergedCoaddSourcesTask has no debug variables of its own because it delegates all the work to
796  the various sub-tasks. See the documetation for individual sub-tasks for more information.
797 
798  @section pipe_tasks_multiband_MeasureMergedCoaddSourcesTask_Example A complete example of using
799  MeasureMergedCoaddSourcesTask
800 
801  After MeasureMergedCoaddSourcesTask has been run on multiple coadds, we have a set of per-band catalogs.
802  The next stage in the multi-band processing procedure will merge these measurements into a suitable
803  catalog for driving forced photometry.
804 
805  Command-line usage of MeasureMergedCoaddSourcesTask expects a data reference to the coadds
806  to be processed.
807  A list of the available optional arguments can be obtained by calling measureCoaddSources.py with the
808  `--help` command line argument:
809  @code
810  measureCoaddSources.py --help
811  @endcode
812 
813  To demonstrate usage of the DetectCoaddSourcesTask in the larger context of multi-band processing, we
814  will process HSC data in the [ci_hsc](https://github.com/lsst/ci_hsc) package. Assuming one has finished
815  step 6 at @ref pipeTasks_multiBand, one may perform deblending and measure sources in the HSC-I band
816  coadd as follows:
817  @code
818  measureCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-I
819  @endcode
820  This will process the HSC-I band data. The results are written in
821  `$CI_HSC_DIR/DATA/deepCoadd-results/HSC-I/0/5,4/meas-HSC-I-0-5,4.fits
822 
823  It is also necessary to run
824  @code
825  measureCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-R
826  @endcode
827  to generate the sources catalogs for the HSC-R band required by the next step in the multi-band
828  procedure: @ref MergeMeasurementsTask_ "MergeMeasurementsTask".
829  """
830  _DefaultName = "measureCoaddSources"
831  ConfigClass = MeasureMergedCoaddSourcesConfig
832  RunnerClass = MeasureMergedCoaddSourcesRunner
833  getSchemaCatalogs = _makeGetSchemaCatalogs("meas")
834  makeIdFactory = _makeMakeIdFactory("MergedCoaddId") # The IDs we already have are of this type
835 
836  @classmethod
837  def _makeArgumentParser(cls):
838  parser = ArgumentParser(name=cls._DefaultName)
839  parser.add_id_argument("--id", "deepCoadd_calexp",
840  help="data ID, e.g. --id tract=12345 patch=1,2 filter=r",
841  ContainerClass=ExistingCoaddDataIdContainer)
842  parser.add_argument("--psfCache", type=int, default=100, help="Size of CoaddPsf cache")
843  return parser
844 
845  def __init__(self, butler=None, schema=None, peakSchema=None, refObjLoader=None, initInputs=None,
846  **kwargs):
847  """!
848  @brief Initialize the task.
849 
850  Keyword arguments (in addition to those forwarded to CmdLineTask.__init__):
851  @param[in] schema: the schema of the merged detection catalog used as input to this one
852  @param[in] peakSchema: the schema of the PeakRecords in the Footprints in the merged detection catalog
853  @param[in] refObjLoader: an instance of LoadReferenceObjectsTasks that supplies an external reference
854  catalog. May be None if the loader can be constructed from the butler argument or all steps
855  requiring a reference catalog are disabled.
856  @param[in] butler: a butler used to read the input schemas from disk or construct the reference
857  catalog loader, if schema or peakSchema or refObjLoader is None
858 
859  The task will set its own self.schema attribute to the schema of the output measurement catalog.
860  This will include all fields from the input schema, as well as additional fields for all the
861  measurements.
862  """
863  super().__init__(**kwargs)
864  self.deblended = self.config.inputCatalog.startswith("deblended")
865  self.inputCatalog = "Coadd_" + self.config.inputCatalog
866  if initInputs is not None:
867  schema = initInputs['inputSchema'].schema
868  if schema is None:
869  assert butler is not None, "Neither butler nor schema is defined"
870  schema = butler.get(self.config.coaddName + self.inputCatalog + "_schema", immediate=True).schema
872  self.schemaMapper.addMinimalSchema(schema)
873  self.schema = self.schemaMapper.getOutputSchema()
875  self.makeSubtask("measurement", schema=self.schema, algMetadata=self.algMetadata)
876  self.makeSubtask("setPrimaryFlags", schema=self.schema)
877  if self.config.doMatchSources:
878  self.makeSubtask("match", butler=butler, refObjLoader=refObjLoader)
879  if self.config.doPropagateFlags:
880  self.makeSubtask("propagateFlags", schema=self.schema)
881  self.schema.checkUnits(parse_strict=self.config.checkUnitsParseStrict)
882  if self.config.doApCorr:
883  self.makeSubtask("applyApCorr", schema=self.schema)
884  if self.config.doRunCatalogCalculation:
885  self.makeSubtask("catalogCalculation", schema=self.schema)
886 
887  @classmethod
888  def getInputDatasetTypes(cls, config):
889  inputDatasetTypes = super().getInputDatasetTypes(config)
890  if not config.doPropagateFlags:
891  inputDatasetTypes.pop("visitCatalogs")
892  return inputDatasetTypes
893 
894  @classmethod
895  def getOutputDatasetTypes(cls, config):
896  outputDatasetTypes = super().getOutputDatasetTypes(config)
897  if config.doMatchSources is False:
898  outputDatasetTypes.pop("matchResult")
899  if config.doWriteMatchesDenormalized is False:
900  outputDatasetTypes.pop("denormMatches")
901  return outputDatasetTypes
902 
903  @classmethod
904  def getPrerequisiteDatasetTypes(cls, config):
905  return frozenset(["refCat"])
906 
908  return {"outputSchema": afwTable.SourceCatalog(self.schema)}
909 
910  def adaptArgsAndRun(self, inputData, inputDataIds, outputDataIds, butler):
911  refObjLoader = ReferenceObjectLoader(inputDataIds['refCat'], butler,
912  config=self.config.refObjLoader, log=self.log)
913  self.match.setRefObjLoader(refObjLoader)
914 
915  # Set psfcache
916  # move this to run after gen2 deprecation
917  inputData['exposure'].getPsf().setCacheCapacity(self.config.psfCache)
918 
919  # Get unique integer ID for IdFactory and RNG seeds
920  packedId, maxBits = butler.registry.packDataId("TractPatch", outputDataIds["outputSources"],
921  returnMaxBits=True)
922  inputData['exposureId'] = packedId
923  idFactory = afwTable.IdFactory.makeSource(packedId, 64 - maxBits)
924  # Transform inputCatalog
925  table = afwTable.SourceTable.make(self.schema, idFactory)
926  sources = afwTable.SourceCatalog(table)
927  sources.extend(inputData.pop('intakeCatalog'), self.schemaMapper)
928  table = sources.getTable()
929  table.setMetadata(self.algMetadata) # Capture algorithm metadata to write out to the source catalog.
930  inputData['sources'] = sources
931 
932  skyMap = inputData.pop('skyMap')
933  tractNumber = inputDataIds['intakeCatalog']['tract']
934  tractInfo = skyMap[tractNumber]
935  patchInfo = tractInfo.getPatchInfo(inputDataIds['intakeCatalog']['patch'])
936  skyInfo = Struct(
937  skyMap=skyMap,
938  tractInfo=tractInfo,
939  patchInfo=patchInfo,
940  wcs=tractInfo.getWcs(),
941  bbox=patchInfo.getOuterBBox()
942  )
943  inputData['skyInfo'] = skyInfo
944 
945  if self.config.doPropagateFlags:
946  # Filter out any visit catalog that is not coadd inputs
947  ccdInputs = inputData['exposure'].getInfo().getCoaddInputs().ccds
948  visitKey = ccdInputs.schema.find("visit").key
949  ccdKey = ccdInputs.schema.find("ccd").key
950  inputVisitIds = set()
951  ccdRecordsWcs = {}
952  for ccdRecord in ccdInputs:
953  visit = ccdRecord.get(visitKey)
954  ccd = ccdRecord.get(ccdKey)
955  inputVisitIds.add((visit, ccd))
956  ccdRecordsWcs[(visit, ccd)] = ccdRecord.getWcs()
957 
958  inputCatalogsToKeep = []
959  inputCatalogWcsUpdate = []
960  for i, dataId in enumerate(inputDataIds['visitCatalogs']):
961  key = (dataId['visit'], dataId['detector'])
962  if key in inputVisitIds:
963  inputCatalogsToKeep.append(inputData['visitCatalogs'][i])
964  inputCatalogWcsUpdate.append(ccdRecordsWcs[key])
965  inputData['visitCatalogs'] = inputCatalogsToKeep
966  inputData['wcsUpdates'] = inputCatalogWcsUpdate
967  inputData['ccdInputs'] = ccdInputs
968 
969  return self.run(**inputData)
970 
971  def runDataRef(self, patchRef, psfCache=100):
972  """!
973  @brief Deblend and measure.
974 
975  @param[in] patchRef: Patch reference.
976 
977  Set 'is-primary' and related flags. Propagate flags
978  from individual visits. Optionally match the sources to a reference catalog and write the matches.
979  Finally, write the deblended sources and measurements out.
980  """
981  exposure = patchRef.get(self.config.coaddName + "Coadd_calexp", immediate=True)
982  exposure.getPsf().setCacheCapacity(psfCache)
983  sources = self.readSources(patchRef)
984  table = sources.getTable()
985  table.setMetadata(self.algMetadata) # Capture algorithm metadata to write out to the source catalog.
986  skyInfo = getSkyInfo(coaddName=self.config.coaddName, patchRef=patchRef)
987 
988  if self.config.doPropagateFlags:
989  ccdInputs = self.propagateFlags.getCcdInputs(exposure)
990  else:
991  ccdInputs = None
992 
993  results = self.run(exposure=exposure, sources=sources,
994  ccdInputs=ccdInputs,
995  skyInfo=skyInfo, butler=patchRef.getButler(),
996  exposureId=self.getExposureId(patchRef))
997 
998  if self.config.doMatchSources:
999  self.writeMatches(patchRef, results)
1000  self.write(patchRef, results.outputSources)
1001 
1002  def run(self, exposure, sources, skyInfo, exposureId, ccdInputs=None, visitCatalogs=None, wcsUpdates=None,
1003  butler=None):
1004  """Run measurement algorithms on the input exposure, and optionally populate the
1005  resulting catalog with extra information.
1006 
1007  Parameters
1008  ----------
1009  exposure : `lsst.afw.exposure.Exposure`
1010  The input exposure on which measurements are to be performed
1011  sources : `lsst.afw.table.SourceCatalog`
1012  A catalog built from the results of merged detections, or
1013  deblender outputs.
1014  skyInfo : `lsst.pipe.base.Struct`
1015  A struct containing information about the position of the input exposure within
1016  a `SkyMap`, the `SkyMap`, its `Wcs`, and its bounding box
1017  exposureId : `int` or `bytes`
1018  packed unique number or bytes unique to the input exposure
1019  ccdInputs : `lsst.afw.table.ExposureCatalog`
1020  Catalog containing information on the individual visits which went into making
1021  the exposure
1022  visitCatalogs : list of `lsst.afw.table.SourceCatalogs` or `None`
1023  A list of source catalogs corresponding to measurements made on the individual
1024  visits which went into the input exposure. If None and butler is `None` then
1025  the task cannot propagate visit flags to the output catalog.
1026  wcsUpdates : list of `lsst.afw.geom.SkyWcs` or `None`
1027  If visitCatalogs is not `None` this should be a list of wcs objects which correspond
1028  to the input visits. Used to put all coordinates to common system. If `None` and
1029  butler is `None` then the task cannot propagate visit flags to the output catalog.
1030  butler : `lsst.daf.butler.Butler` or `lsst.daf.persistence.Butler`
1031  Either a gen2 or gen3 butler used to load visit catalogs
1032 
1033  Returns
1034  -------
1035  results : `lsst.pipe.base.Struct`
1036  Results of running measurement task. Will contain the catalog in the
1037  sources attribute. Optionally will have results of matching to a
1038  reference catalog in the matchResults attribute, and denormalized
1039  matches in the denormMatches attribute.
1040  """
1041  self.measurement.run(sources, exposure, exposureId=exposureId)
1042 
1043  if self.config.doApCorr:
1044  self.applyApCorr.run(
1045  catalog=sources,
1046  apCorrMap=exposure.getInfo().getApCorrMap()
1047  )
1048 
1049  # TODO DM-11568: this contiguous check-and-copy could go away if we
1050  # reserve enough space during SourceDetection and/or SourceDeblend.
1051  # NOTE: sourceSelectors require contiguous catalogs, so ensure
1052  # contiguity now, so views are preserved from here on.
1053  if not sources.isContiguous():
1054  sources = sources.copy(deep=True)
1055 
1056  if self.config.doRunCatalogCalculation:
1057  self.catalogCalculation.run(sources)
1058 
1059  self.setPrimaryFlags.run(sources, skyInfo.skyMap, skyInfo.tractInfo, skyInfo.patchInfo,
1060  includeDeblend=self.deblended)
1061  if self.config.doPropagateFlags:
1062  self.propagateFlags.run(butler, sources, ccdInputs, exposure.getWcs(), visitCatalogs, wcsUpdates)
1063 
1064  results = Struct()
1065 
1066  if self.config.doMatchSources:
1067  matchResult = self.match.run(sources, exposure.getInfo().getFilter().getName())
1068  matches = afwTable.packMatches(matchResult.matches)
1069  matches.table.setMetadata(matchResult.matchMeta)
1070  results.matchResult = matches
1071  if self.config.doWriteMatchesDenormalized:
1072  results.denormMatches = denormalizeMatches(matchResult.matches,
1073  matchResult.matchMeta)
1074 
1075  results.outputSources = sources
1076  return results
1077 
1078  def readSources(self, dataRef):
1079  """!
1080  @brief Read input sources.
1081 
1082  @param[in] dataRef: Data reference for catalog of merged detections
1083  @return List of sources in merged catalog
1084 
1085  We also need to add columns to hold the measurements we're about to make
1086  so we can measure in-place.
1087  """
1088  merged = dataRef.get(self.config.coaddName + self.inputCatalog, immediate=True)
1089  self.log.info("Read %d detections: %s" % (len(merged), dataRef.dataId))
1090  idFactory = self.makeIdFactory(dataRef)
1091  for s in merged:
1092  idFactory.notify(s.getId())
1093  table = afwTable.SourceTable.make(self.schema, idFactory)
1094  sources = afwTable.SourceCatalog(table)
1095  sources.extend(merged, self.schemaMapper)
1096  return sources
1097 
1098  def writeMatches(self, dataRef, results):
1099  """!
1100  @brief Write matches of the sources to the astrometric reference catalog.
1101 
1102  @param[in] dataRef: data reference
1103  @param[in] results: results struct from run method
1104  """
1105  if hasattr(results, "matchResult"):
1106  dataRef.put(results.matchResult, self.config.coaddName + "Coadd_measMatch")
1107  if hasattr(results, "denormMatches"):
1108  dataRef.put(results.denormMatches, self.config.coaddName + "Coadd_measMatchFull")
1109 
1110  def write(self, dataRef, sources):
1111  """!
1112  @brief Write the source catalog.
1113 
1114  @param[in] dataRef: data reference
1115  @param[in] sources: source catalog
1116  """
1117  dataRef.put(sources, self.config.coaddName + "Coadd_meas")
1118  self.log.info("Wrote %d sources: %s" % (len(sources), dataRef.dataId))
1119 
1120  def getExposureId(self, dataRef):
1121  return int(dataRef.get(self.config.coaddName + "CoaddId"))
def adaptArgsAndRun(self, inputData, inputDataIds, outputDataIds, butler)
Definition: multiBand.py:910
def runDataRef(self, patchRef)
Run detection on a coadd.
Definition: multiBand.py:266
Class for storing ordered metadata with comments.
Definition: PropertyList.h:68
A mapping between the keys of two Schemas, used to copy data between them.
Definition: SchemaMapper.h:21
Configuration parameters for the DetectCoaddSourcesTask.
Definition: multiBand.py:68
def denormalizeMatches(matches, matchMeta=None)
def runDataRef(self, patchRefList, psfCache=100)
Definition: multiBand.py:451
def __init__(self, schema=None, kwargs)
Initialize the task.
Definition: multiBand.py:241
def adaptArgsAndRun(self, inputData, inputDataIds, outputDataIds, butler)
Definition: multiBand.py:281
Deblend sources from master catalog in each coadd seperately and measure.
Definition: multiBand.py:721
Fit spatial kernel using approximate fluxes for candidates, and solving a linear system of equations...
daf::base::PropertySet * set
Definition: fits.cc:832
def __init__(self, butler=None, schema=None, peakSchema=None, kwargs)
Definition: multiBand.py:422
def writeMatches(self, dataRef, results)
Write matches of the sources to the astrometric reference catalog.
Definition: multiBand.py:1098
def run(self, exposure, sources, skyInfo, exposureId, ccdInputs=None, visitCatalogs=None, wcsUpdates=None, butler=None)
Definition: multiBand.py:1003
def readSources(self, dataRef)
Read input sources.
Definition: multiBand.py:1078
def write(self, results, patchRef)
Write out results from runDetection.
Definition: multiBand.py:321
template BaseCatalog packMatches(SourceMatchVector const &)
def write(self, dataRef, flux_sources, template_sources=None)
Definition: multiBand.py:517
def __init__(self, butler=None, schema=None, peakSchema=None, refObjLoader=None, initInputs=None, kwargs)
Initialize the task.
Definition: multiBand.py:846
def write(self, dataRef, sources)
Write the source catalog.
Definition: multiBand.py:1110
Configuration parameters for the MeasureMergedCoaddSourcesTask.
Definition: multiBand.py:567
def getSkyInfo(coaddName, patchRef)
Return the SkyMap, tract and patch information, wcs, and outer bbox of the patch to be coadded...
Definition: coaddBase.py:231
def run(self, exposure, idFactory, expId)
Run detection on an exposure.
Definition: multiBand.py:289
daf::base::PropertyList * list
Definition: fits.cc:833
def runDataRef(self, patchRef, psfCache=100)
Deblend and measure.
Definition: multiBand.py:971