LSSTApplications  17.0+11,17.0+35,17.0+60,17.0+61,17.0+63,17.0+7,17.0-1-g377950a+35,17.0.1-1-g114240f+2,17.0.1-1-g4d4fbc4+30,17.0.1-1-g55520dc+55,17.0.1-1-g5f4ed7e+59,17.0.1-1-g6dd7d69+22,17.0.1-1-g8de6c91+11,17.0.1-1-gb9095d2+7,17.0.1-1-ge9fec5e+5,17.0.1-1-gf4e0155+63,17.0.1-1-gfc65f5f+56,17.0.1-1-gfc6fb1f+20,17.0.1-10-g87f9f3f+9,17.0.1-12-g112a4bc+3,17.0.1-17-gab9750a3+5,17.0.1-17-gdae4c4a+16,17.0.1-19-g3a24bb2+2,17.0.1-2-g26618f5+35,17.0.1-2-g54f2ebc+9,17.0.1-2-gf403422+1,17.0.1-21-g52a398f+5,17.0.1-26-gd98a1d13,17.0.1-3-g7e86b59+45,17.0.1-3-gb5ca14a,17.0.1-3-gd08d533+46,17.0.1-31-gb0791f330,17.0.1-4-g59d126d+10,17.0.1-5-g3877d06+2,17.0.1-7-g35889ee+7,17.0.1-7-gc7c8782+20,17.0.1-7-gcb7da53+5,17.0.1-9-gc4bbfb2+10,w.2019.24
LSSTDataManagementBasePackage
multiBand.py
Go to the documentation of this file.
1 #!/usr/bin/env python
2 #
3 # LSST Data Management System
4 # Copyright 2008-2015 AURA/LSST.
5 #
6 # This product includes software developed by the
7 # LSST Project (http://www.lsst.org/).
8 #
9 # This program is free software: you can redistribute it and/or modify
10 # it under the terms of the GNU General Public License as published by
11 # the Free Software Foundation, either version 3 of the License, or
12 # (at your option) any later version.
13 #
14 # This program is distributed in the hope that it will be useful,
15 # but WITHOUT ANY WARRANTY; without even the implied warranty of
16 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 # GNU General Public License for more details.
18 #
19 # You should have received a copy of the LSST License Statement and
20 # the GNU General Public License along with this program. If not,
21 # see <https://www.lsstcorp.org/LegalNotices/>.
22 #
23 from lsst.coadd.utils.coaddDataIdContainer import ExistingCoaddDataIdContainer
24 from lsst.pipe.base import (CmdLineTask, Struct, ArgumentParser, ButlerInitializedTaskRunner,
25  PipelineTask, PipelineTaskConfig, InitInputDatasetField,
26  InitOutputDatasetField, InputDatasetField, OutputDatasetField)
27 from lsst.pex.config import Config, Field, ConfigurableField
28 from lsst.meas.algorithms import DynamicDetectionTask, ReferenceObjectLoader
29 from lsst.meas.base import SingleFrameMeasurementTask, ApplyApCorrTask, CatalogCalculationTask
30 from lsst.meas.deblender import SourceDeblendTask, MultibandDeblendTask
31 from lsst.pipe.tasks.coaddBase import getSkyInfo
32 from lsst.pipe.tasks.scaleVariance import ScaleVarianceTask
33 from lsst.meas.astrom import DirectMatchTask, denormalizeMatches
34 from lsst.pipe.tasks.fakes import BaseFakeSourcesTask
35 from lsst.pipe.tasks.setPrimaryFlags import SetPrimaryFlagsTask
36 from lsst.pipe.tasks.propagateVisitFlags import PropagateVisitFlagsTask
37 import lsst.afw.image as afwImage
38 import lsst.afw.table as afwTable
39 import lsst.afw.math as afwMath
40 from lsst.daf.base import PropertyList
41 
42 from .mergeDetections import MergeDetectionsConfig, MergeDetectionsTask # noqa: F401
43 from .mergeMeasurements import MergeMeasurementsConfig, MergeMeasurementsTask # noqa: F401
44 from .multiBandUtils import MergeSourcesRunner, CullPeaksConfig, _makeGetSchemaCatalogs # noqa: F401
45 from .multiBandUtils import getInputSchema, getShortFilterName, readCatalog, _makeMakeIdFactory # noqa: F401
46 from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesSingleConfig # noqa: F401
47 from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesSingleTask # noqa: F401
48 from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesMultiConfig # noqa: F401
49 from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesMultiTask # noqa: F401
50 
51 
52 """
53 New set types:
54 * deepCoadd_det: detections from what used to be processCoadd (tract, patch, filter)
55 * deepCoadd_mergeDet: merged detections (tract, patch)
56 * deepCoadd_meas: measurements of merged detections (tract, patch, filter)
57 * deepCoadd_ref: reference sources (tract, patch)
58 All of these have associated *_schema catalogs that require no data ID and hold no records.
59 
60 In addition, we have a schema-only dataset, which saves the schema for the PeakRecords in
61 the mergeDet, meas, and ref dataset Footprints:
62 * deepCoadd_peak_schema
63 """
64 
65 
66 
67 
68 class DetectCoaddSourcesConfig(PipelineTaskConfig):
69  """!
70  @anchor DetectCoaddSourcesConfig_
71 
72  @brief Configuration parameters for the DetectCoaddSourcesTask
73  """
74  doScaleVariance = Field(dtype=bool, default=True, doc="Scale variance plane using empirical noise?")
75  scaleVariance = ConfigurableField(target=ScaleVarianceTask, doc="Variance rescaling")
76  detection = ConfigurableField(target=DynamicDetectionTask, doc="Source detection")
77  coaddName = Field(dtype=str, default="deep", doc="Name of coadd")
78  doInsertFakes = Field(dtype=bool, default=False,
79  doc="Run fake sources injection task")
80  insertFakes = ConfigurableField(target=BaseFakeSourcesTask,
81  doc="Injection of fake sources for testing "
82  "purposes (must be retargeted)")
83  detectionSchema = InitOutputDatasetField(
84  doc="Schema of the detection catalog",
85  nameTemplate="{outputCoaddName}Coadd_det_schema",
86  storageClass="SourceCatalog",
87  )
88  exposure = InputDatasetField(
89  doc="Exposure on which detections are to be performed",
90  nameTemplate="{inputCoaddName}Coadd",
91  scalar=True,
92  storageClass="ExposureF",
93  dimensions=("tract", "patch", "abstract_filter", "skymap")
94  )
95  outputBackgrounds = OutputDatasetField(
96  doc="Output Backgrounds used in detection",
97  nameTemplate="{outputCoaddName}Coadd_calexp_background",
98  scalar=True,
99  storageClass="Background",
100  dimensions=("tract", "patch", "abstract_filter", "skymap")
101  )
102  outputSources = OutputDatasetField(
103  doc="Detected sources catalog",
104  nameTemplate="{outputCoaddName}Coadd_det",
105  scalar=True,
106  storageClass="SourceCatalog",
107  dimensions=("tract", "patch", "abstract_filter", "skymap")
108  )
109  outputExposure = OutputDatasetField(
110  doc="Exposure post detection",
111  nameTemplate="{outputCoaddName}Coadd_calexp",
112  scalar=True,
113  storageClass="ExposureF",
114  dimensions=("tract", "patch", "abstract_filter", "skymap")
115  )
116 
117  hasFakes = Field(
118  dtype=bool,
119  default=False,
120  doc="Should be set to True if fake sources have been inserted into the input data."
121  )
122 
123  def setDefaults(self):
124  super().setDefaults()
125  self.quantum.dimensions = ("tract", "patch", "abstract_filter", "skymap")
126  self.formatTemplateNames({"inputCoaddName": "deep", "outputCoaddName": "deep"})
127  self.detection.thresholdType = "pixel_stdev"
128  self.detection.isotropicGrow = True
129  # Coadds are made from background-subtracted CCDs, so any background subtraction should be very basic
130  self.detection.reEstimateBackground = False
131  self.detection.background.useApprox = False
132  self.detection.background.binSize = 4096
133  self.detection.background.undersampleStyle = 'REDUCE_INTERP_ORDER'
134  self.detection.doTempWideBackground = True # Suppress large footprints that overwhelm the deblender
135 
136 
142 
143 
144 class DetectCoaddSourcesTask(PipelineTask, CmdLineTask):
145  r"""!
146  @anchor DetectCoaddSourcesTask_
147 
148  @brief Detect sources on a coadd
149 
150  @section pipe_tasks_multiBand_Contents Contents
151 
152  - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Purpose
153  - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Initialize
154  - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Run
155  - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Config
156  - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Debug
157  - @ref pipe_tasks_multiband_DetectCoaddSourcesTask_Example
158 
159  @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Purpose Description
160 
161  Command-line task that detects sources on a coadd of exposures obtained with a single filter.
162 
163  Coadding individual visits requires each exposure to be warped. This introduces covariance in the noise
164  properties across pixels. Before detection, we correct the coadd variance by scaling the variance plane
165  in the coadd to match the observed variance. This is an approximate approach -- strictly, we should
166  propagate the full covariance matrix -- but it is simple and works well in practice.
167 
168  After scaling the variance plane, we detect sources and generate footprints by delegating to the @ref
169  SourceDetectionTask_ "detection" subtask.
170 
171  @par Inputs:
172  deepCoadd{tract,patch,filter}: ExposureF
173  @par Outputs:
174  deepCoadd_det{tract,patch,filter}: SourceCatalog (only parent Footprints)
175  @n deepCoadd_calexp{tract,patch,filter}: Variance scaled, background-subtracted input
176  exposure (ExposureF)
177  @n deepCoadd_calexp_background{tract,patch,filter}: BackgroundList
178  @par Data Unit:
179  tract, patch, filter
180 
181  DetectCoaddSourcesTask delegates most of its work to the @ref SourceDetectionTask_ "detection" subtask.
182  You can retarget this subtask if you wish.
183 
184  @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Initialize Task initialization
185 
186  @copydoc \_\_init\_\_
187 
188  @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Run Invoking the Task
189 
190  @copydoc run
191 
192  @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Config Configuration parameters
193 
194  See @ref DetectCoaddSourcesConfig_ "DetectSourcesConfig"
195 
196  @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Debug Debug variables
197 
198  The @link lsst.pipe.base.cmdLineTask.CmdLineTask command line task@endlink interface supports a
199  flag @c -d to import @b debug.py from your @c PYTHONPATH; see @ref baseDebug for more about @b debug.py
200  files.
201 
202  DetectCoaddSourcesTask has no debug variables of its own because it relegates all the work to
203  @ref SourceDetectionTask_ "SourceDetectionTask"; see the documetation for
204  @ref SourceDetectionTask_ "SourceDetectionTask" for further information.
205 
206  @section pipe_tasks_multiband_DetectCoaddSourcesTask_Example A complete example
207  of using DetectCoaddSourcesTask
208 
209  DetectCoaddSourcesTask is meant to be run after assembling a coadded image in a given band. The purpose of
210  the task is to update the background, detect all sources in a single band and generate a set of parent
211  footprints. Subsequent tasks in the multi-band processing procedure will merge sources across bands and,
212  eventually, perform forced photometry. Command-line usage of DetectCoaddSourcesTask expects a data
213  reference to the coadd to be processed. A list of the available optional arguments can be obtained by
214  calling detectCoaddSources.py with the `--help` command line argument:
215  @code
216  detectCoaddSources.py --help
217  @endcode
218 
219  To demonstrate usage of the DetectCoaddSourcesTask in the larger context of multi-band processing, we
220  will process HSC data in the [ci_hsc](https://github.com/lsst/ci_hsc) package. Assuming one has followed
221  steps 1 - 4 at @ref pipeTasks_multiBand, one may detect all the sources in each coadd as follows:
222  @code
223  detectCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-I
224  @endcode
225  that will process the HSC-I band data. The results are written to
226  `$CI_HSC_DIR/DATA/deepCoadd-results/HSC-I`.
227 
228  It is also necessary to run:
229  @code
230  detectCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-R
231  @endcode
232  to generate the sources catalogs for the HSC-R band required by the next step in the multi-band
233  processing procedure: @ref MergeDetectionsTask_ "MergeDetectionsTask".
234  """
235  _DefaultName = "detectCoaddSources"
236  ConfigClass = DetectCoaddSourcesConfig
237  getSchemaCatalogs = _makeGetSchemaCatalogs("det")
238  makeIdFactory = _makeMakeIdFactory("CoaddId")
239 
240  @classmethod
241  def _makeArgumentParser(cls):
242  parser = ArgumentParser(name=cls._DefaultName)
243  parser.add_id_argument("--id", "deepCoadd", help="data ID, e.g. --id tract=12345 patch=1,2 filter=r",
244  ContainerClass=ExistingCoaddDataIdContainer)
245  return parser
246 
247  def __init__(self, schema=None, **kwargs):
248  """!
249  @brief Initialize the task. Create the @ref SourceDetectionTask_ "detection" subtask.
250 
251  Keyword arguments (in addition to those forwarded to CmdLineTask.__init__):
252 
253  @param[in] schema: initial schema for the output catalog, modified-in place to include all
254  fields set by this task. If None, the source minimal schema will be used.
255  @param[in] **kwargs: keyword arguments to be passed to lsst.pipe.base.task.Task.__init__
256  """
257  # N.B. Super is used here to handle the multiple inheritance of PipelineTasks, the init tree
258  # call structure has been reviewed carefully to be sure super will work as intended.
259  super().__init__(**kwargs)
260  if schema is None:
261  schema = afwTable.SourceTable.makeMinimalSchema()
262  if self.config.doInsertFakes:
263  self.makeSubtask("insertFakes")
264  self.schema = schema
265  self.makeSubtask("detection", schema=self.schema)
266  if self.config.doScaleVariance:
267  self.makeSubtask("scaleVariance")
268 
270  return {"detectionSchema": afwTable.SourceCatalog(self.schema)}
271 
272  def runDataRef(self, patchRef):
273  """!
274  @brief Run detection on a coadd.
275 
276  Invokes @ref run and then uses @ref write to output the
277  results.
278 
279  @param[in] patchRef: data reference for patch
280  """
281  if self.config.hasFakes:
282  exposure = patchRef.get("fakes_" + self.config.coaddName + "Coadd", immediate=True)
283  else:
284  exposure = patchRef.get(self.config.coaddName + "Coadd", immediate=True)
285  expId = int(patchRef.get(self.config.coaddName + "CoaddId"))
286  results = self.run(exposure, self.makeIdFactory(patchRef), expId=expId)
287  self.write(results, patchRef)
288  return results
289 
290  def adaptArgsAndRun(self, inputData, inputDataIds, outputDataIds, butler):
291  packedId, maxBits = butler.registry.packDataId("tract_patch_abstract_filter",
292  inputDataIds["exposure"],
293  returnMaxBits=True)
294  inputData["idFactory"] = afwTable.IdFactory.makeSource(packedId, 64 - maxBits)
295  inputData["expId"] = packedId
296  return self.run(**inputData)
297 
298  def run(self, exposure, idFactory, expId):
299  """!
300  @brief Run detection on an exposure.
301 
302  First scale the variance plane to match the observed variance
303  using @ref ScaleVarianceTask. Then invoke the @ref SourceDetectionTask_ "detection" subtask to
304  detect sources.
305 
306  @param[in,out] exposure: Exposure on which to detect (may be backround-subtracted and scaled,
307  depending on configuration).
308  @param[in] idFactory: IdFactory to set source identifiers
309  @param[in] expId: Exposure identifier (integer) for RNG seed
310 
311  @return a pipe.base.Struct with fields
312  - sources: catalog of detections
313  - backgrounds: list of backgrounds
314  """
315  if self.config.doScaleVariance:
316  varScale = self.scaleVariance.run(exposure.maskedImage)
317  exposure.getMetadata().add("variance_scale", varScale)
318  backgrounds = afwMath.BackgroundList()
319  if self.config.doInsertFakes:
320  self.insertFakes.run(exposure, background=backgrounds)
321  table = afwTable.SourceTable.make(self.schema, idFactory)
322  detections = self.detection.makeSourceCatalog(table, exposure, expId=expId)
323  sources = detections.sources
324  fpSets = detections.fpSets
325  if hasattr(fpSets, "background") and fpSets.background:
326  for bg in fpSets.background:
327  backgrounds.append(bg)
328  return Struct(outputSources=sources, outputBackgrounds=backgrounds, outputExposure=exposure)
329 
330  def write(self, results, patchRef):
331  """!
332  @brief Write out results from runDetection.
333 
334  @param[in] exposure: Exposure to write out
335  @param[in] results: Struct returned from runDetection
336  @param[in] patchRef: data reference for patch
337  """
338  coaddName = self.config.coaddName + "Coadd"
339  patchRef.put(results.outputBackgrounds, coaddName + "_calexp_background")
340  patchRef.put(results.outputSources, coaddName + "_det")
341  if self.config.hasFakes:
342  patchRef.put(results.outputExposure, "fakes_" + coaddName + "_calexp")
343  else:
344  patchRef.put(results.outputExposure, coaddName + "_calexp")
345 
346 
347 
348 
350  """DeblendCoaddSourcesConfig
351 
352  Configuration parameters for the `DeblendCoaddSourcesTask`.
353  """
354  singleBandDeblend = ConfigurableField(target=SourceDeblendTask,
355  doc="Deblend sources separately in each band")
356  multiBandDeblend = ConfigurableField(target=MultibandDeblendTask,
357  doc="Deblend sources simultaneously across bands")
358  simultaneous = Field(dtype=bool, default=False, doc="Simultaneously deblend all bands?")
359  coaddName = Field(dtype=str, default="deep", doc="Name of coadd")
360  hasFakes = Field(dtype=bool,
361  default=False,
362  doc="Should be set to True if fake sources have been inserted into the input data.")
363 
364  def setDefaults(self):
365  Config.setDefaults(self)
366  self.singleBandDeblend.propagateAllPeaks = True
367 
368 
370  """Task runner for the `MergeSourcesTask`
371 
372  Required because the run method requires a list of
373  dataRefs rather than a single dataRef.
374  """
375  @staticmethod
376  def getTargetList(parsedCmd, **kwargs):
377  """Provide a list of patch references for each patch, tract, filter combo.
378 
379  Parameters
380  ----------
381  parsedCmd:
382  The parsed command
383  kwargs:
384  Keyword arguments passed to the task
385 
386  Returns
387  -------
388  targetList: list
389  List of tuples, where each tuple is a (dataRef, kwargs) pair.
390  """
391  refDict = MergeSourcesRunner.buildRefDict(parsedCmd)
392  kwargs["psfCache"] = parsedCmd.psfCache
393  return [(list(p.values()), kwargs) for t in refDict.values() for p in t.values()]
394 
395 
396 class DeblendCoaddSourcesTask(CmdLineTask):
397  """Deblend the sources in a merged catalog
398 
399  Deblend sources from master catalog in each coadd.
400  This can either be done separately in each band using the HSC-SDSS deblender
401  (`DeblendCoaddSourcesTask.config.simultaneous==False`)
402  or use SCARLET to simultaneously fit the blend in all bands
403  (`DeblendCoaddSourcesTask.config.simultaneous==True`).
404  The task will set its own `self.schema` atribute to the `Schema` of the
405  output deblended catalog.
406  This will include all fields from the input `Schema`, as well as additional fields
407  from the deblender.
408 
409  `pipe.tasks.multiband.DeblendCoaddSourcesTask Description
410  ---------------------------------------------------------
411  `
412 
413  Parameters
414  ----------
415  butler: `Butler`
416  Butler used to read the input schemas from disk or
417  construct the reference catalog loader, if `schema` or `peakSchema` or
418  schema: `Schema`
419  The schema of the merged detection catalog as an input to this task.
420  peakSchema: `Schema`
421  The schema of the `PeakRecord`s in the `Footprint`s in the merged detection catalog
422  """
423  ConfigClass = DeblendCoaddSourcesConfig
424  RunnerClass = DeblendCoaddSourcesRunner
425  _DefaultName = "deblendCoaddSources"
426  makeIdFactory = _makeMakeIdFactory("MergedCoaddId")
427 
428  @classmethod
429  def _makeArgumentParser(cls):
430  parser = ArgumentParser(name=cls._DefaultName)
431  parser.add_id_argument("--id", "deepCoadd_calexp",
432  help="data ID, e.g. --id tract=12345 patch=1,2 filter=g^r^i",
433  ContainerClass=ExistingCoaddDataIdContainer)
434  parser.add_argument("--psfCache", type=int, default=100, help="Size of CoaddPsf cache")
435  return parser
436 
437  def __init__(self, butler=None, schema=None, peakSchema=None, **kwargs):
438  CmdLineTask.__init__(self, **kwargs)
439  if schema is None:
440  assert butler is not None, "Neither butler nor schema is defined"
441  schema = butler.get(self.config.coaddName + "Coadd_mergeDet_schema", immediate=True).schema
443  self.schemaMapper.addMinimalSchema(schema)
444  self.schema = self.schemaMapper.getOutputSchema()
445  if peakSchema is None:
446  assert butler is not None, "Neither butler nor peakSchema is defined"
447  peakSchema = butler.get(self.config.coaddName + "Coadd_peak_schema", immediate=True).schema
448 
449  if self.config.simultaneous:
450  self.makeSubtask("multiBandDeblend", schema=self.schema, peakSchema=peakSchema)
451  else:
452  self.makeSubtask("singleBandDeblend", schema=self.schema, peakSchema=peakSchema)
453 
454  def getSchemaCatalogs(self):
455  """Return a dict of empty catalogs for each catalog dataset produced by this task.
456 
457  Returns
458  -------
459  result: dict
460  Dictionary of empty catalogs, with catalog names as keys.
461  """
462  catalog = afwTable.SourceCatalog(self.schema)
463  return {self.config.coaddName + "Coadd_deblendedFlux": catalog,
464  self.config.coaddName + "Coadd_deblendedModel": catalog}
465 
466  def runDataRef(self, patchRefList, psfCache=100):
467  """Deblend the patch
468 
469  Deblend each source simultaneously or separately
470  (depending on `DeblendCoaddSourcesTask.config.simultaneous`).
471  Set `is-primary` and related flags.
472  Propagate flags from individual visits.
473  Write the deblended sources out.
474 
475  Parameters
476  ----------
477  patchRefList: list
478  List of data references for each filter
479  """
480 
481  if self.config.hasFakes:
482  coaddType = "fakes_" + self.config.coaddName
483  else:
484  coaddType = self.config.coaddName
485 
486  if self.config.simultaneous:
487  # Use SCARLET to simultaneously deblend across filters
488  filters = []
489  exposures = []
490  for patchRef in patchRefList:
491  exposure = patchRef.get(coaddType + "Coadd_calexp", immediate=True)
492  filters.append(patchRef.dataId["filter"])
493  exposures.append(exposure)
494  # The input sources are the same for all bands, since it is a merged catalog
495  sources = self.readSources(patchRef)
496  exposure = afwImage.MultibandExposure.fromExposures(filters, exposures)
497  fluxCatalogs, templateCatalogs = self.multiBandDeblend.run(exposure, sources)
498  for n in range(len(patchRefList)):
499  self.write(patchRefList[n], fluxCatalogs[filters[n]], templateCatalogs[filters[n]])
500  else:
501  # Use the singeband deblender to deblend each band separately
502  for patchRef in patchRefList:
503  exposure = patchRef.get(coaddType + "Coadd_calexp", immediate=True)
504  exposure.getPsf().setCacheCapacity(psfCache)
505  sources = self.readSources(patchRef)
506  self.singleBandDeblend.run(exposure, sources)
507  self.write(patchRef, sources)
508 
509  def readSources(self, dataRef):
510  """Read merged catalog
511 
512  Read the catalog of merged detections and create a catalog
513  in a single band.
514 
515  Parameters
516  ----------
517  dataRef: data reference
518  Data reference for catalog of merged detections
519 
520  Returns
521  -------
522  sources: `SourceCatalog`
523  List of sources in merged catalog
524 
525  We also need to add columns to hold the measurements we're about to make
526  so we can measure in-place.
527  """
528  merged = dataRef.get(self.config.coaddName + "Coadd_mergeDet", immediate=True)
529  self.log.info("Read %d detections: %s" % (len(merged), dataRef.dataId))
530  idFactory = self.makeIdFactory(dataRef)
531  for s in merged:
532  idFactory.notify(s.getId())
533  table = afwTable.SourceTable.make(self.schema, idFactory)
534  sources = afwTable.SourceCatalog(table)
535  sources.extend(merged, self.schemaMapper)
536  return sources
537 
538  def write(self, dataRef, flux_sources, template_sources=None):
539  """Write the source catalog(s)
540 
541  Parameters
542  ----------
543  dataRef: Data Reference
544  Reference to the output catalog.
545  flux_sources: `SourceCatalog`
546  Flux conserved sources to write to file.
547  If using the single band deblender, this is the catalog
548  generated.
549  template_sources: `SourceCatalog`
550  Source catalog using the multiband template models
551  as footprints.
552  """
553  # The multiband deblender does not have to conserve flux,
554  # so only write the flux conserved catalog if it exists
555  if flux_sources is not None:
556  assert not self.config.simultaneous or self.config.multiBandDeblend.conserveFlux
557  dataRef.put(flux_sources, self.config.coaddName + "Coadd_deblendedFlux")
558  # Only the multiband deblender has the option to output the
559  # template model catalog, which can optionally be used
560  # in MeasureMergedCoaddSources
561  if template_sources is not None:
562  assert self.config.multiBandDeblend.saveTemplates
563  dataRef.put(template_sources, self.config.coaddName + "Coadd_deblendedModel")
564  self.log.info("Wrote %d sources: %s" % (len(flux_sources), dataRef.dataId))
565 
566  def writeMetadata(self, dataRefList):
567  """Write the metadata produced from processing the data.
568  Parameters
569  ----------
570  dataRefList
571  List of Butler data references used to write the metadata.
572  The metadata is written to dataset type `CmdLineTask._getMetadataName`.
573  """
574  for dataRef in dataRefList:
575  try:
576  metadataName = self._getMetadataName()
577  if metadataName is not None:
578  dataRef.put(self.getFullMetadata(), metadataName)
579  except Exception as e:
580  self.log.warn("Could not persist metadata for dataId=%s: %s", dataRef.dataId, e)
581 
582  def getExposureId(self, dataRef):
583  """Get the ExposureId from a data reference
584  """
585  return int(dataRef.get(self.config.coaddName + "CoaddId"))
586 
587 
588 class MeasureMergedCoaddSourcesConfig(PipelineTaskConfig):
589  """!
590  @anchor MeasureMergedCoaddSourcesConfig_
591 
592  @brief Configuration parameters for the MeasureMergedCoaddSourcesTask
593  """
594  inputCatalog = Field(dtype=str, default="deblendedFlux",
595  doc=("Name of the input catalog to use."
596  "If the single band deblender was used this should be 'deblendedFlux."
597  "If the multi-band deblender was used this should be 'deblendedModel."
598  "If no deblending was performed this should be 'mergeDet'"))
599  measurement = ConfigurableField(target=SingleFrameMeasurementTask, doc="Source measurement")
600  setPrimaryFlags = ConfigurableField(target=SetPrimaryFlagsTask, doc="Set flags for primary tract/patch")
601  doPropagateFlags = Field(
602  dtype=bool, default=True,
603  doc="Whether to match sources to CCD catalogs to propagate flags (to e.g. identify PSF stars)"
604  )
605  propagateFlags = ConfigurableField(target=PropagateVisitFlagsTask, doc="Propagate visit flags to coadd")
606  doMatchSources = Field(dtype=bool, default=True, doc="Match sources to reference catalog?")
607  match = ConfigurableField(target=DirectMatchTask, doc="Matching to reference catalog")
608  doWriteMatchesDenormalized = Field(
609  dtype=bool,
610  default=False,
611  doc=("Write reference matches in denormalized format? "
612  "This format uses more disk space, but is more convenient to read."),
613  )
614  coaddName = Field(dtype=str, default="deep", doc="Name of coadd")
615  psfCache = Field(dtype=int, default=100, doc="Size of psfCache")
616  checkUnitsParseStrict = Field(
617  doc="Strictness of Astropy unit compatibility check, can be 'raise', 'warn' or 'silent'",
618  dtype=str,
619  default="raise",
620  )
621  doApCorr = Field(
622  dtype=bool,
623  default=True,
624  doc="Apply aperture corrections"
625  )
626  applyApCorr = ConfigurableField(
627  target=ApplyApCorrTask,
628  doc="Subtask to apply aperture corrections"
629  )
630  doRunCatalogCalculation = Field(
631  dtype=bool,
632  default=True,
633  doc='Run catalogCalculation task'
634  )
635  catalogCalculation = ConfigurableField(
636  target=CatalogCalculationTask,
637  doc="Subtask to run catalogCalculation plugins on catalog"
638  )
639  inputSchema = InitInputDatasetField(
640  doc="Input schema for measure merged task produced by a deblender or detection task",
641  nameTemplate="{inputCoaddName}Coadd_deblendedFlux_schema",
642  storageClass="SourceCatalog"
643  )
644  outputSchema = InitOutputDatasetField(
645  doc="Output schema after all new fields are added by task",
646  nameTemplate="{inputCoaddName}Coadd_meas_schema",
647  storageClass="SourceCatalog"
648  )
650  doc="Reference catalog used to match measured sources against known sources",
651  name="ref_cat",
652  storageClass="SimpleCatalog",
653  dimensions=("skypix",),
654  manualLoad=True
655  )
656  exposure = InputDatasetField(
657  doc="Input coadd image",
658  nameTemplate="{inputCoaddName}Coadd_calexp",
659  scalar=True,
660  storageClass="ExposureF",
661  dimensions=("tract", "patch", "abstract_filter", "skymap")
662  )
664  doc="SkyMap to use in processing",
665  nameTemplate="{inputCoaddName}Coadd_skyMap",
666  storageClass="SkyMap",
667  dimensions=("skymap",),
668  scalar=True
669  )
670  visitCatalogs = InputDatasetField(
671  doc="Source catalogs for visits which overlap input tract, patch, abstract_filter. Will be "
672  "further filtered in the task for the purpose of propagating flags from image calibration "
673  "and characterization to codd objects",
674  name="src",
675  dimensions=("instrument", "visit", "detector"),
676  storageClass="SourceCatalog"
677  )
678  intakeCatalog = InputDatasetField(
679  doc=("Name of the input catalog to use."
680  "If the single band deblender was used this should be 'deblendedFlux."
681  "If the multi-band deblender was used this should be 'deblendedModel, "
682  "or deblendedFlux if the multiband deblender was configured to output "
683  "deblended flux catalogs. If no deblending was performed this should "
684  "be 'mergeDet'"),
685  nameTemplate="{inputCoaddName}Coadd_deblendedFlux",
686  storageClass="SourceCatalog",
687  dimensions=("tract", "patch", "abstract_filter", "skymap"),
688  scalar=True
689  )
690  outputSources = OutputDatasetField(
691  doc="Source catalog containing all the measurement information generated in this task",
692  nameTemplate="{outputCoaddName}Coadd_meas",
693  dimensions=("tract", "patch", "abstract_filter", "skymap"),
694  storageClass="SourceCatalog",
695  scalar=True
696  )
697  matchResult = OutputDatasetField(
698  doc="Match catalog produced by configured matcher, optional on doMatchSources",
699  nameTemplate="{outputCoaddName}Coadd_measMatch",
700  dimensions=("tract", "patch", "abstract_filter", "skymap"),
701  storageClass="Catalog",
702  scalar=True
703  )
704  denormMatches = OutputDatasetField(
705  doc="Denormalized Match catalog produced by configured matcher, optional on "
706  "doWriteMatchesDenormalized",
707  nameTemplate="{outputCoaddName}Coadd_measMatchFull",
708  dimensions=("tract", "patch", "abstract_filter", "skymap"),
709  storageClass="Catalog",
710  scalar=True
711  )
712 
713  hasFakes = Field(
714  dtype=bool,
715  default=False,
716  doc="Should be set to True if fake sources have been inserted into the input data."
717  )
718 
719  @property
720  def refObjLoader(self):
721  return self.match.refObjLoader
722 
723  def setDefaults(self):
724  super().setDefaults()
725  self.formatTemplateNames({"inputCoaddName": "deep", "outputCoaddName": "deep"})
726  self.quantum.dimensions = ("tract", "patch", "abstract_filter", "skymap")
727  self.measurement.plugins.names |= ['base_InputCount', 'base_Variance']
728  self.measurement.plugins['base_PixelFlags'].masksFpAnywhere = ['CLIPPED', 'SENSOR_EDGE',
729  'INEXACT_PSF']
730  self.measurement.plugins['base_PixelFlags'].masksFpCenter = ['CLIPPED', 'SENSOR_EDGE',
731  'INEXACT_PSF']
732 
733 
739 
740 
741 class MeasureMergedCoaddSourcesRunner(ButlerInitializedTaskRunner):
742  """Get the psfCache setting into MeasureMergedCoaddSourcesTask"""
743  @staticmethod
744  def getTargetList(parsedCmd, **kwargs):
745  return ButlerInitializedTaskRunner.getTargetList(parsedCmd, psfCache=parsedCmd.psfCache)
746 
747 
748 class MeasureMergedCoaddSourcesTask(PipelineTask, CmdLineTask):
749  r"""!
750  @anchor MeasureMergedCoaddSourcesTask_
751 
752  @brief Deblend sources from master catalog in each coadd seperately and measure.
753 
754  @section pipe_tasks_multiBand_Contents Contents
755 
756  - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Purpose
757  - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Initialize
758  - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Run
759  - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Config
760  - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Debug
761  - @ref pipe_tasks_multiband_MeasureMergedCoaddSourcesTask_Example
762 
763  @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Purpose Description
764 
765  Command-line task that uses peaks and footprints from a master catalog to perform deblending and
766  measurement in each coadd.
767 
768  Given a master input catalog of sources (peaks and footprints) or deblender outputs
769  (including a HeavyFootprint in each band), measure each source on the
770  coadd. Repeating this procedure with the same master catalog across multiple coadds will generate a
771  consistent set of child sources.
772 
773  The deblender retains all peaks and deblends any missing peaks (dropouts in that band) as PSFs. Source
774  properties are measured and the @c is-primary flag (indicating sources with no children) is set. Visit
775  flags are propagated to the coadd sources.
776 
777  Optionally, we can match the coadd sources to an external reference catalog.
778 
779  @par Inputs:
780  deepCoadd_mergeDet{tract,patch} or deepCoadd_deblend{tract,patch}: SourceCatalog
781  @n deepCoadd_calexp{tract,patch,filter}: ExposureF
782  @par Outputs:
783  deepCoadd_meas{tract,patch,filter}: SourceCatalog
784  @par Data Unit:
785  tract, patch, filter
786 
787  MeasureMergedCoaddSourcesTask delegates most of its work to a set of sub-tasks:
788 
789  <DL>
790  <DT> @ref SingleFrameMeasurementTask_ "measurement"
791  <DD> Measure source properties of deblended sources.</DD>
792  <DT> @ref SetPrimaryFlagsTask_ "setPrimaryFlags"
793  <DD> Set flag 'is-primary' as well as related flags on sources. 'is-primary' is set for sources that are
794  not at the edge of the field and that have either not been deblended or are the children of deblended
795  sources</DD>
796  <DT> @ref PropagateVisitFlagsTask_ "propagateFlags"
797  <DD> Propagate flags set in individual visits to the coadd.</DD>
798  <DT> @ref DirectMatchTask_ "match"
799  <DD> Match input sources to a reference catalog (optional).
800  </DD>
801  </DL>
802  These subtasks may be retargeted as required.
803 
804  @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Initialize Task initialization
805 
806  @copydoc \_\_init\_\_
807 
808  @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Run Invoking the Task
809 
810  @copydoc run
811 
812  @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Config Configuration parameters
813 
814  See @ref MeasureMergedCoaddSourcesConfig_
815 
816  @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Debug Debug variables
817 
818  The @link lsst.pipe.base.cmdLineTask.CmdLineTask command line task@endlink interface supports a
819  flag @c -d to import @b debug.py from your @c PYTHONPATH; see @ref baseDebug for more about @b debug.py
820  files.
821 
822  MeasureMergedCoaddSourcesTask has no debug variables of its own because it delegates all the work to
823  the various sub-tasks. See the documetation for individual sub-tasks for more information.
824 
825  @section pipe_tasks_multiband_MeasureMergedCoaddSourcesTask_Example A complete example of using
826  MeasureMergedCoaddSourcesTask
827 
828  After MeasureMergedCoaddSourcesTask has been run on multiple coadds, we have a set of per-band catalogs.
829  The next stage in the multi-band processing procedure will merge these measurements into a suitable
830  catalog for driving forced photometry.
831 
832  Command-line usage of MeasureMergedCoaddSourcesTask expects a data reference to the coadds
833  to be processed.
834  A list of the available optional arguments can be obtained by calling measureCoaddSources.py with the
835  `--help` command line argument:
836  @code
837  measureCoaddSources.py --help
838  @endcode
839 
840  To demonstrate usage of the DetectCoaddSourcesTask in the larger context of multi-band processing, we
841  will process HSC data in the [ci_hsc](https://github.com/lsst/ci_hsc) package. Assuming one has finished
842  step 6 at @ref pipeTasks_multiBand, one may perform deblending and measure sources in the HSC-I band
843  coadd as follows:
844  @code
845  measureCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-I
846  @endcode
847  This will process the HSC-I band data. The results are written in
848  `$CI_HSC_DIR/DATA/deepCoadd-results/HSC-I/0/5,4/meas-HSC-I-0-5,4.fits
849 
850  It is also necessary to run
851  @code
852  measureCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-R
853  @endcode
854  to generate the sources catalogs for the HSC-R band required by the next step in the multi-band
855  procedure: @ref MergeMeasurementsTask_ "MergeMeasurementsTask".
856  """
857  _DefaultName = "measureCoaddSources"
858  ConfigClass = MeasureMergedCoaddSourcesConfig
859  RunnerClass = MeasureMergedCoaddSourcesRunner
860  getSchemaCatalogs = _makeGetSchemaCatalogs("meas")
861  makeIdFactory = _makeMakeIdFactory("MergedCoaddId") # The IDs we already have are of this type
862 
863  @classmethod
864  def _makeArgumentParser(cls):
865  parser = ArgumentParser(name=cls._DefaultName)
866  parser.add_id_argument("--id", "deepCoadd_calexp",
867  help="data ID, e.g. --id tract=12345 patch=1,2 filter=r",
868  ContainerClass=ExistingCoaddDataIdContainer)
869  parser.add_argument("--psfCache", type=int, default=100, help="Size of CoaddPsf cache")
870  return parser
871 
872  def __init__(self, butler=None, schema=None, peakSchema=None, refObjLoader=None, initInputs=None,
873  **kwargs):
874  """!
875  @brief Initialize the task.
876 
877  Keyword arguments (in addition to those forwarded to CmdLineTask.__init__):
878  @param[in] schema: the schema of the merged detection catalog used as input to this one
879  @param[in] peakSchema: the schema of the PeakRecords in the Footprints in the merged detection catalog
880  @param[in] refObjLoader: an instance of LoadReferenceObjectsTasks that supplies an external reference
881  catalog. May be None if the loader can be constructed from the butler argument or all steps
882  requiring a reference catalog are disabled.
883  @param[in] butler: a butler used to read the input schemas from disk or construct the reference
884  catalog loader, if schema or peakSchema or refObjLoader is None
885 
886  The task will set its own self.schema attribute to the schema of the output measurement catalog.
887  This will include all fields from the input schema, as well as additional fields for all the
888  measurements.
889  """
890  super().__init__(**kwargs)
891  self.deblended = self.config.inputCatalog.startswith("deblended")
892  self.inputCatalog = "Coadd_" + self.config.inputCatalog
893  if initInputs is not None:
894  schema = initInputs['inputSchema'].schema
895  if schema is None:
896  assert butler is not None, "Neither butler nor schema is defined"
897  schema = butler.get(self.config.coaddName + self.inputCatalog + "_schema", immediate=True).schema
899  self.schemaMapper.addMinimalSchema(schema)
900  self.schema = self.schemaMapper.getOutputSchema()
902  self.makeSubtask("measurement", schema=self.schema, algMetadata=self.algMetadata)
903  self.makeSubtask("setPrimaryFlags", schema=self.schema)
904  if self.config.doMatchSources:
905  self.makeSubtask("match", butler=butler, refObjLoader=refObjLoader)
906  if self.config.doPropagateFlags:
907  self.makeSubtask("propagateFlags", schema=self.schema)
908  self.schema.checkUnits(parse_strict=self.config.checkUnitsParseStrict)
909  if self.config.doApCorr:
910  self.makeSubtask("applyApCorr", schema=self.schema)
911  if self.config.doRunCatalogCalculation:
912  self.makeSubtask("catalogCalculation", schema=self.schema)
913 
914  @classmethod
915  def getInputDatasetTypes(cls, config):
916  inputDatasetTypes = super().getInputDatasetTypes(config)
917  if not config.doPropagateFlags:
918  inputDatasetTypes.pop("visitCatalogs")
919  return inputDatasetTypes
920 
921  @classmethod
922  def getOutputDatasetTypes(cls, config):
923  outputDatasetTypes = super().getOutputDatasetTypes(config)
924  if config.doMatchSources is False:
925  outputDatasetTypes.pop("matchResult")
926  if config.doWriteMatchesDenormalized is False:
927  outputDatasetTypes.pop("denormMatches")
928  return outputDatasetTypes
929 
930  @classmethod
931  def getPrerequisiteDatasetTypes(cls, config):
932  return frozenset(["refCat"])
933 
935  return {"outputSchema": afwTable.SourceCatalog(self.schema)}
936 
937  def adaptArgsAndRun(self, inputData, inputDataIds, outputDataIds, butler):
938  refObjLoader = ReferenceObjectLoader(inputDataIds['refCat'], butler,
939  config=self.config.refObjLoader, log=self.log)
940  self.match.setRefObjLoader(refObjLoader)
941 
942  # Set psfcache
943  # move this to run after gen2 deprecation
944  inputData['exposure'].getPsf().setCacheCapacity(self.config.psfCache)
945 
946  # Get unique integer ID for IdFactory and RNG seeds
947  packedId, maxBits = butler.registry.packDataId("tract_patch", outputDataIds["outputSources"],
948  returnMaxBits=True)
949  inputData['exposureId'] = packedId
950  idFactory = afwTable.IdFactory.makeSource(packedId, 64 - maxBits)
951  # Transform inputCatalog
952  table = afwTable.SourceTable.make(self.schema, idFactory)
953  sources = afwTable.SourceCatalog(table)
954  sources.extend(inputData.pop('intakeCatalog'), self.schemaMapper)
955  table = sources.getTable()
956  table.setMetadata(self.algMetadata) # Capture algorithm metadata to write out to the source catalog.
957  inputData['sources'] = sources
958 
959  skyMap = inputData.pop('skyMap')
960  tractNumber = inputDataIds['intakeCatalog']['tract']
961  tractInfo = skyMap[tractNumber]
962  patchInfo = tractInfo.getPatchInfo(inputDataIds['intakeCatalog']['patch'])
963  skyInfo = Struct(
964  skyMap=skyMap,
965  tractInfo=tractInfo,
966  patchInfo=patchInfo,
967  wcs=tractInfo.getWcs(),
968  bbox=patchInfo.getOuterBBox()
969  )
970  inputData['skyInfo'] = skyInfo
971 
972  if self.config.doPropagateFlags:
973  # Filter out any visit catalog that is not coadd inputs
974  ccdInputs = inputData['exposure'].getInfo().getCoaddInputs().ccds
975  visitKey = ccdInputs.schema.find("visit").key
976  ccdKey = ccdInputs.schema.find("ccd").key
977  inputVisitIds = set()
978  ccdRecordsWcs = {}
979  for ccdRecord in ccdInputs:
980  visit = ccdRecord.get(visitKey)
981  ccd = ccdRecord.get(ccdKey)
982  inputVisitIds.add((visit, ccd))
983  ccdRecordsWcs[(visit, ccd)] = ccdRecord.getWcs()
984 
985  inputCatalogsToKeep = []
986  inputCatalogWcsUpdate = []
987  for i, dataId in enumerate(inputDataIds['visitCatalogs']):
988  key = (dataId['visit'], dataId['detector'])
989  if key in inputVisitIds:
990  inputCatalogsToKeep.append(inputData['visitCatalogs'][i])
991  inputCatalogWcsUpdate.append(ccdRecordsWcs[key])
992  inputData['visitCatalogs'] = inputCatalogsToKeep
993  inputData['wcsUpdates'] = inputCatalogWcsUpdate
994  inputData['ccdInputs'] = ccdInputs
995 
996  return self.run(**inputData)
997 
998  def runDataRef(self, patchRef, psfCache=100):
999  """!
1000  @brief Deblend and measure.
1001 
1002  @param[in] patchRef: Patch reference.
1003 
1004  Set 'is-primary' and related flags. Propagate flags
1005  from individual visits. Optionally match the sources to a reference catalog and write the matches.
1006  Finally, write the deblended sources and measurements out.
1007  """
1008  if self.config.hasFakes:
1009  coaddType = "fakes_" + self.config.coaddName
1010  else:
1011  coaddType = self.config.coaddName
1012  exposure = patchRef.get(coaddType + "Coadd_calexp", immediate=True)
1013  exposure.getPsf().setCacheCapacity(psfCache)
1014  sources = self.readSources(patchRef)
1015  table = sources.getTable()
1016  table.setMetadata(self.algMetadata) # Capture algorithm metadata to write out to the source catalog.
1017  skyInfo = getSkyInfo(coaddName=self.config.coaddName, patchRef=patchRef)
1018 
1019  if self.config.doPropagateFlags:
1020  ccdInputs = self.propagateFlags.getCcdInputs(exposure)
1021  else:
1022  ccdInputs = None
1023 
1024  results = self.run(exposure=exposure, sources=sources,
1025  ccdInputs=ccdInputs,
1026  skyInfo=skyInfo, butler=patchRef.getButler(),
1027  exposureId=self.getExposureId(patchRef))
1028 
1029  if self.config.doMatchSources:
1030  self.writeMatches(patchRef, results)
1031  self.write(patchRef, results.outputSources)
1032 
1033  def run(self, exposure, sources, skyInfo, exposureId, ccdInputs=None, visitCatalogs=None, wcsUpdates=None,
1034  butler=None):
1035  """Run measurement algorithms on the input exposure, and optionally populate the
1036  resulting catalog with extra information.
1037 
1038  Parameters
1039  ----------
1040  exposure : `lsst.afw.exposure.Exposure`
1041  The input exposure on which measurements are to be performed
1042  sources : `lsst.afw.table.SourceCatalog`
1043  A catalog built from the results of merged detections, or
1044  deblender outputs.
1045  skyInfo : `lsst.pipe.base.Struct`
1046  A struct containing information about the position of the input exposure within
1047  a `SkyMap`, the `SkyMap`, its `Wcs`, and its bounding box
1048  exposureId : `int` or `bytes`
1049  packed unique number or bytes unique to the input exposure
1050  ccdInputs : `lsst.afw.table.ExposureCatalog`
1051  Catalog containing information on the individual visits which went into making
1052  the exposure
1053  visitCatalogs : list of `lsst.afw.table.SourceCatalogs` or `None`
1054  A list of source catalogs corresponding to measurements made on the individual
1055  visits which went into the input exposure. If None and butler is `None` then
1056  the task cannot propagate visit flags to the output catalog.
1057  wcsUpdates : list of `lsst.afw.geom.SkyWcs` or `None`
1058  If visitCatalogs is not `None` this should be a list of wcs objects which correspond
1059  to the input visits. Used to put all coordinates to common system. If `None` and
1060  butler is `None` then the task cannot propagate visit flags to the output catalog.
1061  butler : `lsst.daf.butler.Butler` or `lsst.daf.persistence.Butler`
1062  Either a gen2 or gen3 butler used to load visit catalogs
1063 
1064  Returns
1065  -------
1066  results : `lsst.pipe.base.Struct`
1067  Results of running measurement task. Will contain the catalog in the
1068  sources attribute. Optionally will have results of matching to a
1069  reference catalog in the matchResults attribute, and denormalized
1070  matches in the denormMatches attribute.
1071  """
1072  self.measurement.run(sources, exposure, exposureId=exposureId)
1073 
1074  if self.config.doApCorr:
1075  self.applyApCorr.run(
1076  catalog=sources,
1077  apCorrMap=exposure.getInfo().getApCorrMap()
1078  )
1079 
1080  # TODO DM-11568: this contiguous check-and-copy could go away if we
1081  # reserve enough space during SourceDetection and/or SourceDeblend.
1082  # NOTE: sourceSelectors require contiguous catalogs, so ensure
1083  # contiguity now, so views are preserved from here on.
1084  if not sources.isContiguous():
1085  sources = sources.copy(deep=True)
1086 
1087  if self.config.doRunCatalogCalculation:
1088  self.catalogCalculation.run(sources)
1089 
1090  self.setPrimaryFlags.run(sources, skyInfo.skyMap, skyInfo.tractInfo, skyInfo.patchInfo,
1091  includeDeblend=self.deblended)
1092  if self.config.doPropagateFlags:
1093  self.propagateFlags.run(butler, sources, ccdInputs, exposure.getWcs(), visitCatalogs, wcsUpdates)
1094 
1095  results = Struct()
1096 
1097  if self.config.doMatchSources:
1098  matchResult = self.match.run(sources, exposure.getInfo().getFilter().getName())
1099  matches = afwTable.packMatches(matchResult.matches)
1100  matches.table.setMetadata(matchResult.matchMeta)
1101  results.matchResult = matches
1102  if self.config.doWriteMatchesDenormalized:
1103  results.denormMatches = denormalizeMatches(matchResult.matches,
1104  matchResult.matchMeta)
1105 
1106  results.outputSources = sources
1107  return results
1108 
1109  def readSources(self, dataRef):
1110  """!
1111  @brief Read input sources.
1112 
1113  @param[in] dataRef: Data reference for catalog of merged detections
1114  @return List of sources in merged catalog
1115 
1116  We also need to add columns to hold the measurements we're about to make
1117  so we can measure in-place.
1118  """
1119  merged = dataRef.get(self.config.coaddName + self.inputCatalog, immediate=True)
1120  self.log.info("Read %d detections: %s" % (len(merged), dataRef.dataId))
1121  idFactory = self.makeIdFactory(dataRef)
1122  for s in merged:
1123  idFactory.notify(s.getId())
1124  table = afwTable.SourceTable.make(self.schema, idFactory)
1125  sources = afwTable.SourceCatalog(table)
1126  sources.extend(merged, self.schemaMapper)
1127  return sources
1128 
1129  def writeMatches(self, dataRef, results):
1130  """!
1131  @brief Write matches of the sources to the astrometric reference catalog.
1132 
1133  @param[in] dataRef: data reference
1134  @param[in] results: results struct from run method
1135  """
1136  if hasattr(results, "matchResult"):
1137  dataRef.put(results.matchResult, self.config.coaddName + "Coadd_measMatch")
1138  if hasattr(results, "denormMatches"):
1139  dataRef.put(results.denormMatches, self.config.coaddName + "Coadd_measMatchFull")
1140 
1141  def write(self, dataRef, sources):
1142  """!
1143  @brief Write the source catalog.
1144 
1145  @param[in] dataRef: data reference
1146  @param[in] sources: source catalog
1147  """
1148  dataRef.put(sources, self.config.coaddName + "Coadd_meas")
1149  self.log.info("Wrote %d sources: %s" % (len(sources), dataRef.dataId))
1150 
1151  def getExposureId(self, dataRef):
1152  return int(dataRef.get(self.config.coaddName + "CoaddId"))
def InitOutputDatasetField
Definition: config.py:339
def adaptArgsAndRun(self, inputData, inputDataIds, outputDataIds, butler)
Definition: multiBand.py:937
def runDataRef(self, patchRef)
Run detection on a coadd.
Definition: multiBand.py:272
Class for storing ordered metadata with comments.
Definition: PropertyList.h:68
A mapping between the keys of two Schemas, used to copy data between them.
Definition: SchemaMapper.h:21
Configuration parameters for the DetectCoaddSourcesTask.
Definition: multiBand.py:68
def denormalizeMatches(matches, matchMeta=None)
def runDataRef(self, patchRefList, psfCache=100)
Definition: multiBand.py:466
def __init__(self, schema=None, kwargs)
Initialize the task.
Definition: multiBand.py:247
def adaptArgsAndRun(self, inputData, inputDataIds, outputDataIds, butler)
Definition: multiBand.py:290
Deblend sources from master catalog in each coadd seperately and measure.
Definition: multiBand.py:748
Fit spatial kernel using approximate fluxes for candidates, and solving a linear system of equations...
daf::base::PropertySet * set
Definition: fits.cc:884
def __init__(self, butler=None, schema=None, peakSchema=None, kwargs)
Definition: multiBand.py:437
def writeMatches(self, dataRef, results)
Write matches of the sources to the astrometric reference catalog.
Definition: multiBand.py:1129
def run(self, exposure, sources, skyInfo, exposureId, ccdInputs=None, visitCatalogs=None, wcsUpdates=None, butler=None)
Definition: multiBand.py:1034
def readSources(self, dataRef)
Read input sources.
Definition: multiBand.py:1109
def write(self, results, patchRef)
Write out results from runDetection.
Definition: multiBand.py:330
template BaseCatalog packMatches(SourceMatchVector const &)
def write(self, dataRef, flux_sources, template_sources=None)
Definition: multiBand.py:538
def __init__(self, butler=None, schema=None, peakSchema=None, refObjLoader=None, initInputs=None, kwargs)
Initialize the task.
Definition: multiBand.py:873
def write(self, dataRef, sources)
Write the source catalog.
Definition: multiBand.py:1141
Configuration parameters for the MeasureMergedCoaddSourcesTask.
Definition: multiBand.py:588
def getSkyInfo(coaddName, patchRef)
Return the SkyMap, tract and patch information, wcs, and outer bbox of the patch to be coadded...
Definition: coaddBase.py:231
Backwards-compatibility support for depersisting the old Calib (FluxMag0/FluxMag0Err) objects...
def run(self, exposure, idFactory, expId)
Run detection on an exposure.
Definition: multiBand.py:298
daf::base::PropertyList * list
Definition: fits.cc:885
def runDataRef(self, patchRef, psfCache=100)
Deblend and measure.
Definition: multiBand.py:998