22__all__ = [
"DetectCoaddSourcesConfig",
"DetectCoaddSourcesTask"]
26from lsst.pipe.base import (Struct, PipelineTask, PipelineTaskConfig, PipelineTaskConnections)
27import lsst.pipe.base.connectionTypes
as cT
28from lsst.pex.config import Config, Field, ConfigurableField, ChoiceField
30from lsst.meas.base import SingleFrameMeasurementTask, ApplyApCorrTask, CatalogCalculationTask
32from lsst.meas.extensions.scarlet
import ScarletDeblendTask
41from lsst.obs.base
import ExposureIdInfo
44from .mergeDetections
import MergeDetectionsConfig, MergeDetectionsTask
45from .mergeMeasurements
import MergeMeasurementsConfig, MergeMeasurementsTask
46from .multiBandUtils
import CullPeaksConfig
47from .deblendCoaddSourcesPipeline
import DeblendCoaddSourcesSingleConfig
48from .deblendCoaddSourcesPipeline
import DeblendCoaddSourcesSingleTask
49from .deblendCoaddSourcesPipeline
import DeblendCoaddSourcesMultiConfig
50from .deblendCoaddSourcesPipeline
import DeblendCoaddSourcesMultiTask
55* deepCoadd_det: detections from what used to be processCoadd (tract, patch, filter)
56* deepCoadd_mergeDet: merged detections (tract, patch)
57* deepCoadd_meas: measurements of merged detections (tract, patch, filter)
58* deepCoadd_ref: reference sources (tract, patch)
59All of these have associated *_schema catalogs that require no data ID and hold no records.
61In addition, we have a schema-only dataset, which saves the schema for the PeakRecords in
62the mergeDet, meas, and ref dataset Footprints:
63* deepCoadd_peak_schema
69 dimensions=(
"tract",
"patch",
"band",
"skymap"),
70 defaultTemplates={
"inputCoaddName":
"deep",
"outputCoaddName":
"deep"}):
71 detectionSchema = cT.InitOutput(
72 doc=
"Schema of the detection catalog",
73 name=
"{outputCoaddName}Coadd_det_schema",
74 storageClass=
"SourceCatalog",
77 doc=
"Exposure on which detections are to be performed",
78 name=
"{inputCoaddName}Coadd",
79 storageClass=
"ExposureF",
80 dimensions=(
"tract",
"patch",
"band",
"skymap")
82 outputBackgrounds = cT.Output(
83 doc=
"Output Backgrounds used in detection",
84 name=
"{outputCoaddName}Coadd_calexp_background",
85 storageClass=
"Background",
86 dimensions=(
"tract",
"patch",
"band",
"skymap")
88 outputSources = cT.Output(
89 doc=
"Detected sources catalog",
90 name=
"{outputCoaddName}Coadd_det",
91 storageClass=
"SourceCatalog",
92 dimensions=(
"tract",
"patch",
"band",
"skymap")
94 outputExposure = cT.Output(
95 doc=
"Exposure post detection",
96 name=
"{outputCoaddName}Coadd_calexp",
97 storageClass=
"ExposureF",
98 dimensions=(
"tract",
"patch",
"band",
"skymap")
102class DetectCoaddSourcesConfig(PipelineTaskConfig, pipelineConnections=DetectCoaddSourcesConnections):
103 """Configuration parameters for the DetectCoaddSourcesTask
106 doScaleVariance = Field(dtype=bool, default=True, doc=
"Scale variance plane using empirical noise?")
107 scaleVariance =
ConfigurableField(target=ScaleVarianceTask, doc=
"Variance rescaling")
108 detection =
ConfigurableField(target=DynamicDetectionTask, doc=
"Source detection")
109 coaddName =
Field(dtype=str, default=
"deep", doc=
"Name of coadd")
110 doInsertFakes =
Field(dtype=bool, default=
False,
111 doc=
"Run fake sources injection task",
112 deprecated=(
"doInsertFakes is no longer supported. This config will be removed "
115 doc=
"Injection of fake sources for testing "
116 "purposes (must be retargeted)",
117 deprecated=(
"insertFakes is no longer supported. This config will "
118 "be removed after v24."))
122 doc=
"Should be set to True if fake sources have been inserted into the input data.",
125 def setDefaults(self):
126 super().setDefaults()
127 self.detection.thresholdType =
"pixel_stdev"
128 self.detection.isotropicGrow =
True
130 self.detection.reEstimateBackground =
False
131 self.detection.background.useApprox =
False
132 self.detection.background.binSize = 4096
133 self.detection.background.undersampleStyle =
'REDUCE_INTERP_ORDER'
134 self.detection.doTempWideBackground =
True
137class DetectCoaddSourcesTask(PipelineTask):
138 """Detect sources on a single filter coadd.
140 Coadding individual visits requires each exposure to be warped. This
141 introduces covariance in the noise properties across pixels. Before
142 detection, we correct the coadd variance by scaling the variance plane
in
143 the coadd to match the observed variance. This
is an approximate
144 approach -- strictly, we should propagate the full covariance matrix --
145 but it
is simple
and works well
in practice.
147 After scaling the variance plane, we detect sources
and generate footprints
148 by delegating to the
@ref SourceDetectionTask_
"detection" subtask.
150 DetectCoaddSourcesTask
is meant to be run after assembling a coadded image
151 in a given band. The purpose of the task
is to update the background,
152 detect all sources
in a single band
and generate a set of parent
153 footprints. Subsequent tasks
in the multi-band processing procedure will
154 merge sources across bands
and, eventually, perform forced photometry.
159 Initial schema
for the output catalog, modified-
in place to include all
160 fields set by this task. If
None, the source minimal schema will be used.
162 Additional keyword arguments.
165 _DefaultName = "detectCoaddSources"
166 ConfigClass = DetectCoaddSourcesConfig
168 def __init__(self, schema=None, **kwargs):
171 super().__init__(**kwargs)
173 schema = afwTable.SourceTable.makeMinimalSchema()
175 self.makeSubtask(
"detection", schema=self.schema)
176 if self.config.doScaleVariance:
177 self.makeSubtask(
"scaleVariance")
181 def runQuantum(self, butlerQC, inputRefs, outputRefs):
182 inputs = butlerQC.get(inputRefs)
183 exposureIdInfo = ExposureIdInfo.fromDataId(butlerQC.quantum.dataId,
"tract_patch_band")
184 inputs[
"idFactory"] = exposureIdInfo.makeSourceIdFactory()
185 inputs[
"expId"] = exposureIdInfo.expId
186 outputs = self.run(**inputs)
187 butlerQC.put(outputs, outputRefs)
189 def run(self, exposure, idFactory, expId):
190 """Run detection on an exposure.
192 First scale the variance plane to match the observed variance
193 using ``ScaleVarianceTask``. Then invoke the ``SourceDetectionTask_`` "detection" subtask to
199 Exposure on which to detect (may be backround-subtracted
and scaled,
200 depending on configuration).
202 IdFactory to set source identifiers.
204 Exposure identifier (integer)
for RNG seed.
208 result : `lsst.pipe.base.Struct`
209 Results
as a struct
with attributes:
214 List of backgrounds (`list`).
216 if self.config.doScaleVariance:
217 varScale = self.scaleVariance.run(exposure.maskedImage)
218 exposure.getMetadata().add(
"VARIANCE_SCALE", varScale)
220 table = afwTable.SourceTable.make(self.schema, idFactory)
221 detections = self.detection.run(table, exposure, expId=expId)
222 sources = detections.sources
223 fpSets = detections.fpSets
224 if hasattr(fpSets,
"background")
and fpSets.background:
225 for bg
in fpSets.background:
226 backgrounds.append(bg)
227 return Struct(outputSources=sources, outputBackgrounds=backgrounds, outputExposure=exposure)
233class DeblendCoaddSourcesConfig(
Config):
234 """Configuration parameters for the `DeblendCoaddSourcesTask`.
238 doc="Deblend sources separately in each band")
240 doc=
"Deblend sources simultaneously across bands")
241 simultaneous =
Field(dtype=bool,
243 doc=
"Simultaneously deblend all bands? "
244 "True uses `multibandDeblend` while False uses `singleBandDeblend`")
245 coaddName =
Field(dtype=str, default=
"deep", doc=
"Name of coadd")
246 hasFakes =
Field(dtype=bool,
248 doc=
"Should be set to True if fake sources have been inserted into the input data.")
250 def setDefaults(self):
251 Config.setDefaults(self)
252 self.singleBandDeblend.propagateAllPeaks =
True
255class MeasureMergedCoaddSourcesConnections(PipelineTaskConnections, dimensions=(
"tract",
"patch",
"band",
"skymap"),
256 defaultTemplates={
"inputCoaddName":
"deep",
257 "outputCoaddName":
"deep",
258 "deblendedCatalog":
"deblendedFlux"}):
259 inputSchema = cT.InitInput(
260 doc=
"Input schema for measure merged task produced by a deblender or detection task",
261 name=
"{inputCoaddName}Coadd_deblendedFlux_schema",
262 storageClass=
"SourceCatalog"
264 outputSchema = cT.InitOutput(
265 doc=
"Output schema after all new fields are added by task",
266 name=
"{inputCoaddName}Coadd_meas_schema",
267 storageClass=
"SourceCatalog"
269 refCat = cT.PrerequisiteInput(
270 doc=
"Reference catalog used to match measured sources against known sources",
272 storageClass=
"SimpleCatalog",
273 dimensions=(
"skypix",),
278 doc=
"Input coadd image",
279 name=
"{inputCoaddName}Coadd_calexp",
280 storageClass=
"ExposureF",
281 dimensions=(
"tract",
"patch",
"band",
"skymap")
284 doc=
"SkyMap to use in processing",
285 name=BaseSkyMap.SKYMAP_DATASET_TYPE_NAME,
286 storageClass=
"SkyMap",
287 dimensions=(
"skymap",),
289 visitCatalogs = cT.Input(
290 doc=
"Source catalogs for visits which overlap input tract, patch, band. Will be "
291 "further filtered in the task for the purpose of propagating flags from image calibration "
292 "and characterization to coadd objects. Only used in legacy PropagateVisitFlagsTask.",
294 dimensions=(
"instrument",
"visit",
"detector"),
295 storageClass=
"SourceCatalog",
298 sourceTableHandles = cT.Input(
299 doc=(
"Source tables that are derived from the ``CalibrateTask`` sources. "
300 "These tables contain astrometry and photometry flags, and optionally "
302 name=
"sourceTable_visit",
303 storageClass=
"DataFrame",
304 dimensions=(
"instrument",
"visit"),
308 finalizedSourceTableHandles = cT.Input(
309 doc=(
"Finalized source tables from ``FinalizeCalibrationTask``. These "
310 "tables contain PSF flags from the finalized PSF estimation."),
311 name=
"finalized_src_table",
312 storageClass=
"DataFrame",
313 dimensions=(
"instrument",
"visit"),
317 inputCatalog = cT.Input(
318 doc=(
"Name of the input catalog to use."
319 "If the single band deblender was used this should be 'deblendedFlux."
320 "If the multi-band deblender was used this should be 'deblendedModel, "
321 "or deblendedFlux if the multiband deblender was configured to output "
322 "deblended flux catalogs. If no deblending was performed this should "
324 name=
"{inputCoaddName}Coadd_{deblendedCatalog}",
325 storageClass=
"SourceCatalog",
326 dimensions=(
"tract",
"patch",
"band",
"skymap"),
328 scarletCatalog = cT.Input(
329 doc=
"Catalogs produced by multiband deblending",
330 name=
"{inputCoaddName}Coadd_deblendedCatalog",
331 storageClass=
"SourceCatalog",
332 dimensions=(
"tract",
"patch",
"skymap"),
334 scarletModels = cT.Input(
335 doc=
"Multiband scarlet models produced by the deblender",
336 name=
"{inputCoaddName}Coadd_scarletModelData",
337 storageClass=
"ScarletModelData",
338 dimensions=(
"tract",
"patch",
"skymap"),
340 outputSources = cT.Output(
341 doc=
"Source catalog containing all the measurement information generated in this task",
342 name=
"{outputCoaddName}Coadd_meas",
343 dimensions=(
"tract",
"patch",
"band",
"skymap"),
344 storageClass=
"SourceCatalog",
346 matchResult = cT.Output(
347 doc=
"Match catalog produced by configured matcher, optional on doMatchSources",
348 name=
"{outputCoaddName}Coadd_measMatch",
349 dimensions=(
"tract",
"patch",
"band",
"skymap"),
350 storageClass=
"Catalog",
352 denormMatches = cT.Output(
353 doc=
"Denormalized Match catalog produced by configured matcher, optional on "
354 "doWriteMatchesDenormalized",
355 name=
"{outputCoaddName}Coadd_measMatchFull",
356 dimensions=(
"tract",
"patch",
"band",
"skymap"),
357 storageClass=
"Catalog",
360 def __init__(self, *, config=None):
361 super().__init__(config=config)
362 if config.doPropagateFlags
is False:
363 self.inputs -=
set((
"visitCatalogs",))
364 self.inputs -=
set((
"sourceTableHandles",))
365 self.inputs -=
set((
"finalizedSourceTableHandles",))
366 elif config.propagateFlags.target == PropagateSourceFlagsTask:
368 self.inputs -=
set((
"visitCatalogs",))
370 if not config.propagateFlags.source_flags:
371 self.inputs -=
set((
"sourceTableHandles",))
372 if not config.propagateFlags.finalized_source_flags:
373 self.inputs -=
set((
"finalizedSourceTableHandles",))
376 self.inputs -=
set((
"sourceTableHandles",))
377 self.inputs -=
set((
"finalizedSourceTableHandles",))
379 if config.inputCatalog ==
"deblendedCatalog":
380 self.inputs -=
set((
"inputCatalog",))
382 if not config.doAddFootprints:
383 self.inputs -=
set((
"scarletModels",))
385 self.inputs -=
set((
"deblendedCatalog"))
386 self.inputs -=
set((
"scarletModels",))
388 if config.doMatchSources
is False:
389 self.outputs -=
set((
"matchResult",))
391 if config.doWriteMatchesDenormalized
is False:
392 self.outputs -=
set((
"denormMatches",))
395class MeasureMergedCoaddSourcesConfig(PipelineTaskConfig,
396 pipelineConnections=MeasureMergedCoaddSourcesConnections):
397 """Configuration parameters for the MeasureMergedCoaddSourcesTask
401 default="deblendedCatalog",
403 "deblendedCatalog":
"Output catalog from ScarletDeblendTask",
404 "deblendedFlux":
"Output catalog from SourceDeblendTask",
405 "mergeDet":
"The merged detections before deblending."
407 doc=
"The name of the input catalog.",
409 doAddFootprints =
Field(dtype=bool,
411 doc=
"Whether or not to add footprints to the input catalog from scarlet models. "
412 "This should be true whenever using the multi-band deblender, "
413 "otherwise this should be False.")
414 doConserveFlux =
Field(dtype=bool, default=
True,
415 doc=
"Whether to use the deblender models as templates to re-distribute the flux "
416 "from the 'exposure' (True), or to perform measurements on the deblender "
418 doStripFootprints =
Field(dtype=bool, default=
True,
419 doc=
"Whether to strip footprints from the output catalog before "
421 "This is usually done when using scarlet models to save disk space.")
422 measurement =
ConfigurableField(target=SingleFrameMeasurementTask, doc=
"Source measurement")
423 setPrimaryFlags =
ConfigurableField(target=SetPrimaryFlagsTask, doc=
"Set flags for primary tract/patch")
424 doPropagateFlags =
Field(
425 dtype=bool, default=
True,
426 doc=
"Whether to match sources to CCD catalogs to propagate flags (to e.g. identify PSF stars)"
428 propagateFlags =
ConfigurableField(target=PropagateSourceFlagsTask, doc=
"Propagate source flags to coadd")
429 doMatchSources =
Field(dtype=bool, default=
True, doc=
"Match sources to reference catalog?")
430 match =
ConfigurableField(target=DirectMatchTask, doc=
"Matching to reference catalog")
431 doWriteMatchesDenormalized =
Field(
434 doc=(
"Write reference matches in denormalized format? "
435 "This format uses more disk space, but is more convenient to read."),
437 coaddName =
Field(dtype=str, default=
"deep", doc=
"Name of coadd")
438 psfCache =
Field(dtype=int, default=100, doc=
"Size of psfCache")
439 checkUnitsParseStrict =
Field(
440 doc=
"Strictness of Astropy unit compatibility check, can be 'raise', 'warn' or 'silent'",
447 doc=
"Apply aperture corrections"
450 target=ApplyApCorrTask,
451 doc=
"Subtask to apply aperture corrections"
453 doRunCatalogCalculation =
Field(
456 doc=
'Run catalogCalculation task'
459 target=CatalogCalculationTask,
460 doc=
"Subtask to run catalogCalculation plugins on catalog"
466 doc=
"Should be set to True if fake sources have been inserted into the input data."
470 def refObjLoader(self):
471 return self.match.refObjLoader
473 def setDefaults(self):
474 super().setDefaults()
475 self.measurement.plugins.names |= [
'base_InputCount',
477 'base_LocalPhotoCalib',
479 self.measurement.plugins[
'base_PixelFlags'].masksFpAnywhere = [
'CLIPPED',
'SENSOR_EDGE',
481 self.measurement.plugins[
'base_PixelFlags'].masksFpCenter = [
'CLIPPED',
'SENSOR_EDGE',
485class MeasureMergedCoaddSourcesTask(PipelineTask):
486 """Deblend sources from main catalog in each coadd seperately and measure.
488 Use peaks and footprints
from a master catalog to perform deblending
and
489 measurement
in each coadd.
491 Given a master input catalog of sources (peaks
and footprints)
or deblender
492 outputs(including a HeavyFootprint
in each band), measure each source on
493 the coadd. Repeating this procedure
with the same master catalog across
494 multiple coadds will generate a consistent set of child sources.
496 The deblender retains all peaks
and deblends any missing peaks (dropouts
in
497 that band)
as PSFs. Source properties are measured
and the
@c is-primary
498 flag (indicating sources
with no children)
is set. Visit flags are
499 propagated to the coadd sources.
501 Optionally, we can match the coadd sources to an external reference
504 After MeasureMergedCoaddSourcesTask has been run on multiple coadds, we
505 have a set of per-band catalogs. The next stage
in the multi-band
506 processing procedure will merge these measurements into a suitable catalog
507 for driving forced photometry.
511 butler : `lsst.daf.butler.Butler`
or `
None`, optional
512 A butler used to read the input schemas
from disk
or construct the reference
513 catalog loader,
if schema
or peakSchema
or refObjLoader
is None.
515 The schema of the merged detection catalog used
as input to this one.
517 The schema of the PeakRecords
in the Footprints
in the merged detection catalog.
519 An instance of LoadReferenceObjectsTasks that supplies an external reference
520 catalog. May be
None if the loader can be constructed
from the butler argument
or all steps
521 requiring a reference catalog are disabled.
522 initInputs : `dict`, optional
523 Dictionary that can contain a key ``inputSchema`` containing the
524 input schema. If present will override the value of ``schema``.
526 Additional keyword arguments.
529 _DefaultName = "measureCoaddSources"
530 ConfigClass = MeasureMergedCoaddSourcesConfig
532 def __init__(self, butler=None, schema=None, peakSchema=None, refObjLoader=None, initInputs=None,
534 super().__init__(**kwargs)
535 self.deblended = self.config.inputCatalog.startswith(
"deblended")
536 self.inputCatalog =
"Coadd_" + self.config.inputCatalog
537 if initInputs
is not None:
538 schema = initInputs[
'inputSchema'].schema
540 assert butler
is not None,
"Neither butler nor schema is defined"
541 schema = butler.get(self.config.coaddName + self.inputCatalog +
"_schema", immediate=
True).schema
543 self.schemaMapper.addMinimalSchema(schema)
544 self.schema = self.schemaMapper.getOutputSchema()
546 self.makeSubtask(
"measurement", schema=self.schema, algMetadata=self.algMetadata)
547 self.makeSubtask(
"setPrimaryFlags", schema=self.schema)
548 if self.config.doMatchSources:
549 self.makeSubtask(
"match", butler=butler, refObjLoader=refObjLoader)
550 if self.config.doPropagateFlags:
551 self.makeSubtask(
"propagateFlags", schema=self.schema)
552 self.schema.checkUnits(parse_strict=self.config.checkUnitsParseStrict)
553 if self.config.doApCorr:
554 self.makeSubtask(
"applyApCorr", schema=self.schema)
555 if self.config.doRunCatalogCalculation:
556 self.makeSubtask(
"catalogCalculation", schema=self.schema)
560 def runQuantum(self, butlerQC, inputRefs, outputRefs):
561 inputs = butlerQC.get(inputRefs)
564 inputs.pop(
'refCat'),
565 name=self.config.connections.refCat,
566 config=self.config.refObjLoader,
568 self.match.setRefObjLoader(refObjLoader)
572 inputs[
'exposure'].getPsf().setCacheCapacity(self.config.psfCache)
575 exposureIdInfo = ExposureIdInfo.fromDataId(butlerQC.quantum.dataId,
"tract_patch")
576 inputs[
'exposureId'] = exposureIdInfo.expId
577 idFactory = exposureIdInfo.makeSourceIdFactory()
579 table = afwTable.SourceTable.make(self.schema, idFactory)
582 if "scarletCatalog" in inputs:
583 inputCatalog = inputs.pop(
"scarletCatalog")
584 catalogRef = inputRefs.scarletCatalog
586 inputCatalog = inputs.pop(
"inputCatalog")
587 catalogRef = inputRefs.inputCatalog
588 sources.extend(inputCatalog, self.schemaMapper)
591 if self.config.doAddFootprints:
592 modelData = inputs.pop(
'scarletModels')
593 if self.config.doConserveFlux:
594 redistributeImage = inputs[
'exposure'].image
596 redistributeImage =
None
597 modelData.updateCatalogFootprints(
599 band=inputRefs.exposure.dataId[
"band"],
600 psfModel=inputs[
'exposure'].getPsf(),
601 redistributeImage=redistributeImage,
602 removeScarletData=
True,
604 table = sources.getTable()
605 table.setMetadata(self.algMetadata)
606 inputs[
'sources'] = sources
608 skyMap = inputs.pop(
'skyMap')
609 tractNumber = catalogRef.dataId[
'tract']
610 tractInfo = skyMap[tractNumber]
611 patchInfo = tractInfo.getPatchInfo(catalogRef.dataId[
'patch'])
616 wcs=tractInfo.getWcs(),
617 bbox=patchInfo.getOuterBBox()
619 inputs[
'skyInfo'] = skyInfo
621 if self.config.doPropagateFlags:
622 if self.config.propagateFlags.target == PropagateSourceFlagsTask:
624 ccdInputs = inputs[
"exposure"].getInfo().getCoaddInputs().ccds
625 inputs[
"ccdInputs"] = ccdInputs
627 if "sourceTableHandles" in inputs:
628 sourceTableHandles = inputs.pop(
"sourceTableHandles")
629 sourceTableHandleDict = {handle.dataId[
"visit"]: handle
630 for handle
in sourceTableHandles}
631 inputs[
"sourceTableHandleDict"] = sourceTableHandleDict
632 if "finalizedSourceTableHandles" in inputs:
633 finalizedSourceTableHandles = inputs.pop(
"finalizedSourceTableHandles")
634 finalizedSourceTableHandleDict = {handle.dataId[
"visit"]: handle
635 for handle
in finalizedSourceTableHandles}
636 inputs[
"finalizedSourceTableHandleDict"] = finalizedSourceTableHandleDict
640 ccdInputs = inputs[
'exposure'].getInfo().getCoaddInputs().ccds
641 visitKey = ccdInputs.schema.find(
"visit").key
642 ccdKey = ccdInputs.schema.find(
"ccd").key
643 inputVisitIds =
set()
645 for ccdRecord
in ccdInputs:
646 visit = ccdRecord.get(visitKey)
647 ccd = ccdRecord.get(ccdKey)
648 inputVisitIds.add((visit, ccd))
649 ccdRecordsWcs[(visit, ccd)] = ccdRecord.getWcs()
651 inputCatalogsToKeep = []
652 inputCatalogWcsUpdate = []
653 for i, dataRef
in enumerate(inputRefs.visitCatalogs):
654 key = (dataRef.dataId[
'visit'], dataRef.dataId[
'detector'])
655 if key
in inputVisitIds:
656 inputCatalogsToKeep.append(inputs[
'visitCatalogs'][i])
657 inputCatalogWcsUpdate.append(ccdRecordsWcs[key])
658 inputs[
'visitCatalogs'] = inputCatalogsToKeep
659 inputs[
'wcsUpdates'] = inputCatalogWcsUpdate
660 inputs[
'ccdInputs'] = ccdInputs
662 outputs = self.run(**inputs)
664 sources = outputs.outputSources
665 butlerQC.put(outputs, outputRefs)
667 def run(self, exposure, sources, skyInfo, exposureId, ccdInputs=None, visitCatalogs=None, wcsUpdates=None,
668 butler=None, sourceTableHandleDict=None, finalizedSourceTableHandleDict=None):
669 """Run measurement algorithms on the input exposure, and optionally populate the
670 resulting catalog with extra information.
674 exposure : `lsst.afw.exposure.Exposure`
675 The input exposure on which measurements are to be performed.
677 A catalog built
from the results of merged detections,
or
679 skyInfo : `lsst.pipe.base.Struct`
680 A struct containing information about the position of the input exposure within
681 a `SkyMap`, the `SkyMap`, its `Wcs`,
and its bounding box.
682 exposureId : `int`
or `bytes`
683 Packed unique number
or bytes unique to the input exposure.
685 Catalog containing information on the individual visits which went into making
687 visitCatalogs : `list` of `lsst.afw.table.SourceCatalogs`, optional
688 A list of source catalogs corresponding to measurements made on the individual
689 visits which went into the input exposure. If
None and butler
is `
None` then
690 the task cannot propagate visit flags to the output catalog.
691 Deprecated, to be removed
with PropagateVisitFlagsTask.
693 If visitCatalogs
is not `
None` this should be a list of wcs objects which correspond
694 to the input visits. Used to put all coordinates to common system. If `
None`
and
695 butler
is `
None` then the task cannot propagate visit flags to the output catalog.
696 Deprecated, to be removed
with PropagateVisitFlagsTask.
697 butler : `
None`, optional
698 This was a Gen2 butler used to load visit catalogs.
699 No longer used
and should
not be set. Will be removed
in the
701 sourceTableHandleDict : `dict` [`int`, `lsst.daf.butler.DeferredDatasetHandle`], optional
702 Dict
for sourceTable_visit handles (key
is visit)
for propagating flags.
703 These tables are derived
from the ``CalibrateTask`` sources,
and contain
704 astrometry
and photometry flags,
and optionally PSF flags.
705 finalizedSourceTableHandleDict : `dict` [`int`, `lsst.daf.butler.DeferredDatasetHandle`], optional
706 Dict
for finalized_src_table handles (key
is visit)
for propagating flags.
707 These tables are derived
from ``FinalizeCalibrationTask``
and contain
708 PSF flags
from the finalized PSF estimation.
712 results : `lsst.pipe.base.Struct`
713 Results of running measurement task. Will contain the catalog
in the
714 sources attribute. Optionally will have results of matching to a
715 reference catalog
in the matchResults attribute,
and denormalized
716 matches
in the denormMatches attribute.
718 if butler
is not None:
719 warnings.warn(
"The 'butler' parameter is no longer used and can be safely removed.",
720 category=FutureWarning, stacklevel=2)
723 self.measurement.run(sources, exposure, exposureId=exposureId)
725 if self.config.doApCorr:
726 self.applyApCorr.run(
728 apCorrMap=exposure.getInfo().getApCorrMap()
735 if not sources.isContiguous():
736 sources = sources.copy(deep=
True)
738 if self.config.doRunCatalogCalculation:
739 self.catalogCalculation.run(sources)
741 self.setPrimaryFlags.run(sources, skyMap=skyInfo.skyMap, tractInfo=skyInfo.tractInfo,
742 patchInfo=skyInfo.patchInfo)
743 if self.config.doPropagateFlags:
744 if self.config.propagateFlags.target == PropagateSourceFlagsTask:
746 self.propagateFlags.run(
749 sourceTableHandleDict,
750 finalizedSourceTableHandleDict
754 self.propagateFlags.run(
765 if self.config.doMatchSources:
766 matchResult = self.match.run(sources, exposure.getInfo().getFilter().bandLabel)
768 matches.table.setMetadata(matchResult.matchMeta)
769 results.matchResult = matches
770 if self.config.doWriteMatchesDenormalized:
771 if matchResult.matches:
772 denormMatches = denormalizeMatches(matchResult.matches, matchResult.matchMeta)
774 self.log.warning(
"No matches, so generating dummy denormalized matches file")
777 denormMatches.getMetadata().add(
"COMMENT",
778 "This catalog is empty because no matches were found.")
779 results.denormMatches = denormMatches
780 results.denormMatches = denormMatches
782 results.outputSources = sources
A 2-dimensional celestial WCS that transform pixels to ICRS RA/Dec, using the LSST standard for pixel...
A class to contain the data, WCS, and other information needed to describe an image of the sky.
Custom catalog class for ExposureRecord/Table.
A polymorphic functor base class for generating record IDs for a table.
Defines the fields and offsets for a table.
A mapping between the keys of two Schemas, used to copy data between them.
Class for storing ordered metadata with comments.
daf::base::PropertySet * set
BaseCatalog packMatches(std::vector< Match< Record1, Record2 > > const &matches)
Return a table representation of a MatchVector that can be used to persist it.