LSST Applications g1653933729+34a971ddd9,g28da252d5a+6ecf39ab43,g2bbee38e9b+7117419c07,g2bc492864f+7117419c07,g2cdde0e794+704103fe75,g3156d2b45e+6e87dc994a,g347aa1857d+7117419c07,g35bb328faa+34a971ddd9,g3a166c0a6a+7117419c07,g3e281a1b8c+8ec26ec694,g4005a62e65+ba0306790b,g414038480c+40f8eb5d96,g569e0e2b34+d8e684fe9f,g6bb7d3c5bc+b9f74a9651,g717e5f8c0f+b9f74a9651,g76fe02a360+28e3f83f63,g80478fca09+fdb1f299e2,g82479be7b0+75dabfbf8d,g858d7b2824+b9f74a9651,g8cd86fa7b1+ed49507c07,g90e5cb8612+3848bed617,g9125e01d80+34a971ddd9,g979bb04a14+a66ccdebd6,g9a700aaafa+34a971ddd9,ga5288a1d22+3504ff4bc9,gae0086650b+34a971ddd9,gae74b0b5c6+761ee125be,gb3d89ef091+14ce01cc3a,gb58c049af0+ace264a4f2,gc28159a63d+7117419c07,gcf0d15dbbd+34f38ce56c,gda6a2b7d83+34f38ce56c,gdaeeff99f8+7774323b41,ge33fd446bb+b9f74a9651,ge5cf12406b+d328917b0a,ge79ae78c31+7117419c07,gf0baf85859+890af219f9,gf5289d68f6+da9a3f2f8e,w.2024.35
LSST Data Management Base Package
Loading...
Searching...
No Matches
multiBand.py
Go to the documentation of this file.
1# This file is part of pipe_tasks.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (https://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <https://www.gnu.org/licenses/>.
21
22__all__ = ["DetectCoaddSourcesConfig", "DetectCoaddSourcesTask"]
23
24from lsst.pipe.base import (Struct, PipelineTask, PipelineTaskConfig, PipelineTaskConnections)
25import lsst.pipe.base.connectionTypes as cT
26from lsst.pex.config import Field, ConfigurableField, ChoiceField
27from lsst.meas.algorithms import DynamicDetectionTask, ReferenceObjectLoader, ScaleVarianceTask, \
28 SetPrimaryFlagsTask
29from lsst.meas.base import (
30 SingleFrameMeasurementTask,
31 ApplyApCorrTask,
32 CatalogCalculationTask,
33 SkyMapIdGeneratorConfig,
34)
35from lsst.meas.extensions.scarlet.io import updateCatalogFootprints
36from lsst.meas.astrom import DirectMatchTask, denormalizeMatches
37from lsst.pipe.tasks.propagateSourceFlags import PropagateSourceFlagsTask
38import lsst.afw.table as afwTable
39import lsst.afw.math as afwMath
40from lsst.daf.base import PropertyList
41from lsst.skymap import BaseSkyMap
42
43# NOTE: these imports are a convenience so multiband users only have to import this file.
44from .mergeDetections import MergeDetectionsConfig, MergeDetectionsTask # noqa: F401
45from .mergeMeasurements import MergeMeasurementsConfig, MergeMeasurementsTask # noqa: F401
46from .multiBandUtils import CullPeaksConfig # noqa: F401
47from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesSingleConfig # noqa: F401
48from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesSingleTask # noqa: F401
49from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesMultiConfig # noqa: F401
50from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesMultiTask # noqa: F401
51
52
53"""
54New set types:
55* deepCoadd_det: detections from what used to be processCoadd (tract, patch, filter)
56* deepCoadd_mergeDet: merged detections (tract, patch)
57* deepCoadd_meas: measurements of merged detections (tract, patch, filter)
58* deepCoadd_ref: reference sources (tract, patch)
59All of these have associated *_schema catalogs that require no data ID and hold no records.
60
61In addition, we have a schema-only dataset, which saves the schema for the PeakRecords in
62the mergeDet, meas, and ref dataset Footprints:
63* deepCoadd_peak_schema
64"""
65
66
67
68class DetectCoaddSourcesConnections(PipelineTaskConnections,
69 dimensions=("tract", "patch", "band", "skymap"),
70 defaultTemplates={"inputCoaddName": "deep", "outputCoaddName": "deep"}):
71 detectionSchema = cT.InitOutput(
72 doc="Schema of the detection catalog",
73 name="{outputCoaddName}Coadd_det_schema",
74 storageClass="SourceCatalog",
75 )
76 exposure = cT.Input(
77 doc="Exposure on which detections are to be performed",
78 name="{inputCoaddName}Coadd",
79 storageClass="ExposureF",
80 dimensions=("tract", "patch", "band", "skymap")
81 )
82 outputBackgrounds = cT.Output(
83 doc="Output Backgrounds used in detection",
84 name="{outputCoaddName}Coadd_calexp_background",
85 storageClass="Background",
86 dimensions=("tract", "patch", "band", "skymap")
87 )
88 outputSources = cT.Output(
89 doc="Detected sources catalog",
90 name="{outputCoaddName}Coadd_det",
91 storageClass="SourceCatalog",
92 dimensions=("tract", "patch", "band", "skymap")
93 )
94 outputExposure = cT.Output(
95 doc="Exposure post detection",
96 name="{outputCoaddName}Coadd_calexp",
97 storageClass="ExposureF",
98 dimensions=("tract", "patch", "band", "skymap")
99 )
100
101
102class DetectCoaddSourcesConfig(PipelineTaskConfig, pipelineConnections=DetectCoaddSourcesConnections):
103 """Configuration parameters for the DetectCoaddSourcesTask
104 """
105
106 doScaleVariance = Field(dtype=bool, default=True, doc="Scale variance plane using empirical noise?")
107 scaleVariance = ConfigurableField(target=ScaleVarianceTask, doc="Variance rescaling")
108 detection = ConfigurableField(target=DynamicDetectionTask, doc="Source detection")
109 coaddName = Field(dtype=str, default="deep", doc="Name of coadd")
110 hasFakes = Field(
111 dtype=bool,
112 default=False,
113 doc="Should be set to True if fake sources have been inserted into the input data.",
114 )
115 idGenerator = SkyMapIdGeneratorConfig.make_field()
116
117 def setDefaults(self):
118 super().setDefaults()
119 self.detection.thresholdType = "pixel_stdev"
120 self.detection.isotropicGrow = True
121 # Coadds are made from background-subtracted CCDs, so any background subtraction should be very basic
122 self.detection.reEstimateBackground = False
123 self.detection.background.useApprox = False
124 self.detection.background.binSize = 4096
125 self.detection.background.undersampleStyle = 'REDUCE_INTERP_ORDER'
126 self.detection.doTempWideBackground = True # Suppress large footprints that overwhelm the deblender
127 # Include band in packed data IDs that go into object IDs (None -> "as
128 # many bands as are defined", rather than the default of zero).
129 self.idGenerator.packer.n_bands = None
130
131
132class DetectCoaddSourcesTask(PipelineTask):
133 """Detect sources on a single filter coadd.
134
135 Coadding individual visits requires each exposure to be warped. This
136 introduces covariance in the noise properties across pixels. Before
137 detection, we correct the coadd variance by scaling the variance plane in
138 the coadd to match the observed variance. This is an approximate
139 approach -- strictly, we should propagate the full covariance matrix --
140 but it is simple and works well in practice.
141
142 After scaling the variance plane, we detect sources and generate footprints
143 by delegating to the @ref SourceDetectionTask_ "detection" subtask.
144
145 DetectCoaddSourcesTask is meant to be run after assembling a coadded image
146 in a given band. The purpose of the task is to update the background,
147 detect all sources in a single band and generate a set of parent
148 footprints. Subsequent tasks in the multi-band processing procedure will
149 merge sources across bands and, eventually, perform forced photometry.
150
151 Parameters
152 ----------
153 schema : `lsst.afw.table.Schema`, optional
154 Initial schema for the output catalog, modified-in place to include all
155 fields set by this task. If None, the source minimal schema will be used.
156 **kwargs
157 Additional keyword arguments.
158 """
159
160 _DefaultName = "detectCoaddSources"
161 ConfigClass = DetectCoaddSourcesConfig
162
163 def __init__(self, schema=None, **kwargs):
164 # N.B. Super is used here to handle the multiple inheritance of PipelineTasks, the init tree
165 # call structure has been reviewed carefully to be sure super will work as intended.
166 super().__init__(**kwargs)
167 if schema is None:
168 schema = afwTable.SourceTable.makeMinimalSchema()
169 self.schema = schema
170 self.makeSubtask("detection", schema=self.schema)
171 if self.config.doScaleVariance:
172 self.makeSubtask("scaleVariance")
173
174 self.detectionSchema = afwTable.SourceCatalog(self.schema)
175
176 def runQuantum(self, butlerQC, inputRefs, outputRefs):
177 inputs = butlerQC.get(inputRefs)
178 idGenerator = self.config.idGenerator.apply(butlerQC.quantum.dataId)
179 inputs["idFactory"] = idGenerator.make_table_id_factory()
180 inputs["expId"] = idGenerator.catalog_id
181 outputs = self.run(**inputs)
182 butlerQC.put(outputs, outputRefs)
183
184 def run(self, exposure, idFactory, expId):
185 """Run detection on an exposure.
186
187 First scale the variance plane to match the observed variance
188 using ``ScaleVarianceTask``. Then invoke the ``SourceDetectionTask_`` "detection" subtask to
189 detect sources.
190
191 Parameters
192 ----------
193 exposure : `lsst.afw.image.Exposure`
194 Exposure on which to detect (may be backround-subtracted and scaled,
195 depending on configuration).
196 idFactory : `lsst.afw.table.IdFactory`
197 IdFactory to set source identifiers.
198 expId : `int`
199 Exposure identifier (integer) for RNG seed.
200
201 Returns
202 -------
203 result : `lsst.pipe.base.Struct`
204 Results as a struct with attributes:
205
206 ``sources``
207 Catalog of detections (`lsst.afw.table.SourceCatalog`).
208 ``backgrounds``
209 List of backgrounds (`list`).
210 """
211 if self.config.doScaleVariance:
212 varScale = self.scaleVariance.run(exposure.maskedImage)
213 exposure.getMetadata().add("VARIANCE_SCALE", varScale)
214 backgrounds = afwMath.BackgroundList()
215 table = afwTable.SourceTable.make(self.schema, idFactory)
216 detections = self.detection.run(table, exposure, expId=expId)
217 sources = detections.sources
218 if hasattr(detections, "background") and detections.background:
219 for bg in detections.background:
220 backgrounds.append(bg)
221 return Struct(outputSources=sources, outputBackgrounds=backgrounds, outputExposure=exposure)
222
223
224class MeasureMergedCoaddSourcesConnections(PipelineTaskConnections,
225 dimensions=("tract", "patch", "band", "skymap"),
226 defaultTemplates={"inputCoaddName": "deep",
227 "outputCoaddName": "deep",
228 "deblendedCatalog": "deblendedFlux"}):
229 inputSchema = cT.InitInput(
230 doc="Input schema for measure merged task produced by a deblender or detection task",
231 name="{inputCoaddName}Coadd_deblendedFlux_schema",
232 storageClass="SourceCatalog"
233 )
234 outputSchema = cT.InitOutput(
235 doc="Output schema after all new fields are added by task",
236 name="{inputCoaddName}Coadd_meas_schema",
237 storageClass="SourceCatalog"
238 )
239 refCat = cT.PrerequisiteInput(
240 doc="Reference catalog used to match measured sources against known sources",
241 name="ref_cat",
242 storageClass="SimpleCatalog",
243 dimensions=("skypix",),
244 deferLoad=True,
245 multiple=True
246 )
247 exposure = cT.Input(
248 doc="Input coadd image",
249 name="{inputCoaddName}Coadd_calexp",
250 storageClass="ExposureF",
251 dimensions=("tract", "patch", "band", "skymap")
252 )
253 skyMap = cT.Input(
254 doc="SkyMap to use in processing",
255 name=BaseSkyMap.SKYMAP_DATASET_TYPE_NAME,
256 storageClass="SkyMap",
257 dimensions=("skymap",),
258 )
259 visitCatalogs = cT.Input(
260 doc="Source catalogs for visits which overlap input tract, patch, band. Will be "
261 "further filtered in the task for the purpose of propagating flags from image calibration "
262 "and characterization to coadd objects. Only used in legacy PropagateVisitFlagsTask.",
263 name="src",
264 dimensions=("instrument", "visit", "detector"),
265 storageClass="SourceCatalog",
266 multiple=True
267 )
268 sourceTableHandles = cT.Input(
269 doc=("Source tables that are derived from the ``CalibrateTask`` sources. "
270 "These tables contain astrometry and photometry flags, and optionally "
271 "PSF flags."),
272 name="sourceTable_visit",
273 storageClass="DataFrame",
274 dimensions=("instrument", "visit"),
275 multiple=True,
276 deferLoad=True,
277 )
278 finalizedSourceTableHandles = cT.Input(
279 doc=("Finalized source tables from ``FinalizeCalibrationTask``. These "
280 "tables contain PSF flags from the finalized PSF estimation."),
281 name="finalized_src_table",
282 storageClass="DataFrame",
283 dimensions=("instrument", "visit"),
284 multiple=True,
285 deferLoad=True,
286 )
287 inputCatalog = cT.Input(
288 doc=("Name of the input catalog to use."
289 "If the single band deblender was used this should be 'deblendedFlux."
290 "If the multi-band deblender was used this should be 'deblendedModel, "
291 "or deblendedFlux if the multiband deblender was configured to output "
292 "deblended flux catalogs. If no deblending was performed this should "
293 "be 'mergeDet'"),
294 name="{inputCoaddName}Coadd_{deblendedCatalog}",
295 storageClass="SourceCatalog",
296 dimensions=("tract", "patch", "band", "skymap"),
297 )
298 scarletCatalog = cT.Input(
299 doc="Catalogs produced by multiband deblending",
300 name="{inputCoaddName}Coadd_deblendedCatalog",
301 storageClass="SourceCatalog",
302 dimensions=("tract", "patch", "skymap"),
303 )
304 scarletModels = cT.Input(
305 doc="Multiband scarlet models produced by the deblender",
306 name="{inputCoaddName}Coadd_scarletModelData",
307 storageClass="ScarletModelData",
308 dimensions=("tract", "patch", "skymap"),
309 )
310 outputSources = cT.Output(
311 doc="Source catalog containing all the measurement information generated in this task",
312 name="{outputCoaddName}Coadd_meas",
313 dimensions=("tract", "patch", "band", "skymap"),
314 storageClass="SourceCatalog",
315 )
316 matchResult = cT.Output(
317 doc="Match catalog produced by configured matcher, optional on doMatchSources",
318 name="{outputCoaddName}Coadd_measMatch",
319 dimensions=("tract", "patch", "band", "skymap"),
320 storageClass="Catalog",
321 )
322 denormMatches = cT.Output(
323 doc="Denormalized Match catalog produced by configured matcher, optional on "
324 "doWriteMatchesDenormalized",
325 name="{outputCoaddName}Coadd_measMatchFull",
326 dimensions=("tract", "patch", "band", "skymap"),
327 storageClass="Catalog",
328 )
329
330 def __init__(self, *, config=None):
331 super().__init__(config=config)
332 if config.doPropagateFlags is False:
333 self.inputs -= set(("visitCatalogs",))
334 self.inputs -= set(("sourceTableHandles",))
335 self.inputs -= set(("finalizedSourceTableHandles",))
336 elif config.propagateFlags.target == PropagateSourceFlagsTask:
337 # New PropagateSourceFlagsTask does not use visitCatalogs.
338 self.inputs -= set(("visitCatalogs",))
339 # Check for types of flags required.
340 if not config.propagateFlags.source_flags:
341 self.inputs -= set(("sourceTableHandles",))
342 if not config.propagateFlags.finalized_source_flags:
343 self.inputs -= set(("finalizedSourceTableHandles",))
344 else:
345 # Deprecated PropagateVisitFlagsTask uses visitCatalogs.
346 self.inputs -= set(("sourceTableHandles",))
347 self.inputs -= set(("finalizedSourceTableHandles",))
348
349 if config.inputCatalog == "deblendedCatalog":
350 self.inputs -= set(("inputCatalog",))
351
352 if not config.doAddFootprints:
353 self.inputs -= set(("scarletModels",))
354 else:
355 self.inputs -= set(("deblendedCatalog"))
356 self.inputs -= set(("scarletModels",))
357
358 if config.doMatchSources is False:
359 self.prerequisiteInputs -= set(("refCat",))
360 self.outputs -= set(("matchResult",))
361
362 if config.doWriteMatchesDenormalized is False:
363 self.outputs -= set(("denormMatches",))
364
365
366class MeasureMergedCoaddSourcesConfig(PipelineTaskConfig,
367 pipelineConnections=MeasureMergedCoaddSourcesConnections):
368 """Configuration parameters for the MeasureMergedCoaddSourcesTask
369 """
370 inputCatalog = ChoiceField(
371 dtype=str,
372 default="deblendedCatalog",
373 allowed={
374 "deblendedCatalog": "Output catalog from ScarletDeblendTask",
375 "deblendedFlux": "Output catalog from SourceDeblendTask",
376 "mergeDet": "The merged detections before deblending."
377 },
378 doc="The name of the input catalog.",
379 )
380 doAddFootprints = Field(dtype=bool,
381 default=True,
382 doc="Whether or not to add footprints to the input catalog from scarlet models. "
383 "This should be true whenever using the multi-band deblender, "
384 "otherwise this should be False.")
385 doConserveFlux = Field(dtype=bool, default=True,
386 doc="Whether to use the deblender models as templates to re-distribute the flux "
387 "from the 'exposure' (True), or to perform measurements on the deblender "
388 "model footprints.")
389 doStripFootprints = Field(dtype=bool, default=True,
390 doc="Whether to strip footprints from the output catalog before "
391 "saving to disk. "
392 "This is usually done when using scarlet models to save disk space.")
393 measurement = ConfigurableField(target=SingleFrameMeasurementTask, doc="Source measurement")
394 setPrimaryFlags = ConfigurableField(target=SetPrimaryFlagsTask, doc="Set flags for primary tract/patch")
395 doPropagateFlags = Field(
396 dtype=bool, default=True,
397 doc="Whether to match sources to CCD catalogs to propagate flags (to e.g. identify PSF stars)"
398 )
399 propagateFlags = ConfigurableField(target=PropagateSourceFlagsTask, doc="Propagate source flags to coadd")
400 doMatchSources = Field(dtype=bool, default=True, doc="Match sources to reference catalog?")
401 match = ConfigurableField(target=DirectMatchTask, doc="Matching to reference catalog")
402 doWriteMatchesDenormalized = Field(
403 dtype=bool,
404 default=False,
405 doc=("Write reference matches in denormalized format? "
406 "This format uses more disk space, but is more convenient to read."),
407 )
408 coaddName = Field(dtype=str, default="deep", doc="Name of coadd")
409 psfCache = Field(dtype=int, default=100, doc="Size of psfCache")
410 checkUnitsParseStrict = Field(
411 doc="Strictness of Astropy unit compatibility check, can be 'raise', 'warn' or 'silent'",
412 dtype=str,
413 default="raise",
414 )
415 doApCorr = Field(
416 dtype=bool,
417 default=True,
418 doc="Apply aperture corrections"
419 )
420 applyApCorr = ConfigurableField(
421 target=ApplyApCorrTask,
422 doc="Subtask to apply aperture corrections"
423 )
424 doRunCatalogCalculation = Field(
425 dtype=bool,
426 default=True,
427 doc='Run catalogCalculation task'
428 )
429 catalogCalculation = ConfigurableField(
430 target=CatalogCalculationTask,
431 doc="Subtask to run catalogCalculation plugins on catalog"
432 )
433
434 hasFakes = Field(
435 dtype=bool,
436 default=False,
437 doc="Should be set to True if fake sources have been inserted into the input data."
438 )
439 idGenerator = SkyMapIdGeneratorConfig.make_field()
440
441 @property
442 def refObjLoader(self):
443 return self.match.refObjLoader
444
445 def setDefaults(self):
446 super().setDefaults()
447 self.measurement.plugins.names |= ['base_InputCount',
448 'base_Variance',
449 'base_LocalPhotoCalib',
450 'base_LocalWcs']
451
452 # TODO: Remove STREAK in DM-44658, streak masking to happen only in ip_diffim
453 self.measurement.plugins['base_PixelFlags'].masksFpAnywhere = ['CLIPPED', 'SENSOR_EDGE',
454 'INEXACT_PSF', 'STREAK']
455 self.measurement.plugins['base_PixelFlags'].masksFpCenter = ['CLIPPED', 'SENSOR_EDGE',
456 'INEXACT_PSF', 'STREAK']
457
458 def validate(self):
459 super().validate()
460
461 if not self.doMatchSources and self.doWriteMatchesDenormalized:
462 raise ValueError("Cannot set doWriteMatchesDenormalized if doMatchSources is False.")
463
464
465class MeasureMergedCoaddSourcesTask(PipelineTask):
466 """Deblend sources from main catalog in each coadd seperately and measure.
467
468 Use peaks and footprints from a master catalog to perform deblending and
469 measurement in each coadd.
470
471 Given a master input catalog of sources (peaks and footprints) or deblender
472 outputs(including a HeavyFootprint in each band), measure each source on
473 the coadd. Repeating this procedure with the same master catalog across
474 multiple coadds will generate a consistent set of child sources.
475
476 The deblender retains all peaks and deblends any missing peaks (dropouts in
477 that band) as PSFs. Source properties are measured and the @c is-primary
478 flag (indicating sources with no children) is set. Visit flags are
479 propagated to the coadd sources.
480
481 Optionally, we can match the coadd sources to an external reference
482 catalog.
483
484 After MeasureMergedCoaddSourcesTask has been run on multiple coadds, we
485 have a set of per-band catalogs. The next stage in the multi-band
486 processing procedure will merge these measurements into a suitable catalog
487 for driving forced photometry.
488
489 Parameters
490 ----------
491 schema : ``lsst.afw.table.Schema`, optional
492 The schema of the merged detection catalog used as input to this one.
493 peakSchema : ``lsst.afw.table.Schema`, optional
494 The schema of the PeakRecords in the Footprints in the merged detection catalog.
495 refObjLoader : `lsst.meas.algorithms.ReferenceObjectLoader`, optional
496 An instance of ReferenceObjectLoader that supplies an external reference
497 catalog. May be None if the loader can be constructed from the butler argument or all steps
498 requiring a reference catalog are disabled.
499 initInputs : `dict`, optional
500 Dictionary that can contain a key ``inputSchema`` containing the
501 input schema. If present will override the value of ``schema``.
502 **kwargs
503 Additional keyword arguments.
504 """
505
506 _DefaultName = "measureCoaddSources"
507 ConfigClass = MeasureMergedCoaddSourcesConfig
508
509 def __init__(self, schema=None, peakSchema=None, refObjLoader=None, initInputs=None,
510 **kwargs):
511 super().__init__(**kwargs)
512 self.deblended = self.config.inputCatalog.startswith("deblended")
513 self.inputCatalog = "Coadd_" + self.config.inputCatalog
514 if initInputs is not None:
515 schema = initInputs['inputSchema'].schema
516 if schema is None:
517 raise ValueError("Schema must be defined.")
518 self.schemaMapper = afwTable.SchemaMapper(schema)
519 self.schemaMapper.addMinimalSchema(schema)
520 self.schema = self.schemaMapper.getOutputSchema()
521 afwTable.CoordKey.addErrorFields(self.schema)
522 self.algMetadata = PropertyList()
523 self.makeSubtask("measurement", schema=self.schema, algMetadata=self.algMetadata)
524 self.makeSubtask("setPrimaryFlags", schema=self.schema)
525 if self.config.doMatchSources:
526 self.makeSubtask("match", refObjLoader=refObjLoader)
527 if self.config.doPropagateFlags:
528 self.makeSubtask("propagateFlags", schema=self.schema)
529 self.schema.checkUnits(parse_strict=self.config.checkUnitsParseStrict)
530 if self.config.doApCorr:
531 self.makeSubtask("applyApCorr", schema=self.schema)
532 if self.config.doRunCatalogCalculation:
533 self.makeSubtask("catalogCalculation", schema=self.schema)
534
535 self.outputSchema = afwTable.SourceCatalog(self.schema)
536
537 def runQuantum(self, butlerQC, inputRefs, outputRefs):
538 inputs = butlerQC.get(inputRefs)
539
540 if self.config.doMatchSources:
541 refObjLoader = ReferenceObjectLoader([ref.datasetRef.dataId for ref in inputRefs.refCat],
542 inputs.pop('refCat'),
543 name=self.config.connections.refCat,
544 config=self.config.refObjLoader,
545 log=self.log)
546 self.match.setRefObjLoader(refObjLoader)
547
548 # Set psfcache
549 # move this to run after gen2 deprecation
550 inputs['exposure'].getPsf().setCacheCapacity(self.config.psfCache)
551
552 # Get unique integer ID for IdFactory and RNG seeds; only the latter
553 # should really be used as the IDs all come from the input catalog.
554 idGenerator = self.config.idGenerator.apply(butlerQC.quantum.dataId)
555 inputs['exposureId'] = idGenerator.catalog_id
556
557 # Transform inputCatalog
558 table = afwTable.SourceTable.make(self.schema, idGenerator.make_table_id_factory())
559 sources = afwTable.SourceCatalog(table)
560 # Load the correct input catalog
561 if "scarletCatalog" in inputs:
562 inputCatalog = inputs.pop("scarletCatalog")
563 catalogRef = inputRefs.scarletCatalog
564 else:
565 inputCatalog = inputs.pop("inputCatalog")
566 catalogRef = inputRefs.inputCatalog
567 sources.extend(inputCatalog, self.schemaMapper)
568 del inputCatalog
569 # Add the HeavyFootprints to the deblended sources
570 if self.config.doAddFootprints:
571 modelData = inputs.pop('scarletModels')
572 if self.config.doConserveFlux:
573 imageForRedistribution = inputs['exposure']
574 else:
575 imageForRedistribution = None
576 updateCatalogFootprints(
577 modelData=modelData,
578 catalog=sources,
579 band=inputRefs.exposure.dataId["band"],
580 imageForRedistribution=imageForRedistribution,
581 removeScarletData=True,
582 updateFluxColumns=True,
583 )
584 table = sources.getTable()
585 table.setMetadata(self.algMetadata) # Capture algorithm metadata to write out to the source catalog.
586 inputs['sources'] = sources
587
588 skyMap = inputs.pop('skyMap')
589 tractNumber = catalogRef.dataId['tract']
590 tractInfo = skyMap[tractNumber]
591 patchInfo = tractInfo.getPatchInfo(catalogRef.dataId['patch'])
592 skyInfo = Struct(
593 skyMap=skyMap,
594 tractInfo=tractInfo,
595 patchInfo=patchInfo,
596 wcs=tractInfo.getWcs(),
597 bbox=patchInfo.getOuterBBox()
598 )
599 inputs['skyInfo'] = skyInfo
600
601 if self.config.doPropagateFlags:
602 if self.config.propagateFlags.target == PropagateSourceFlagsTask:
603 # New version
604 ccdInputs = inputs["exposure"].getInfo().getCoaddInputs().ccds
605 inputs["ccdInputs"] = ccdInputs
606
607 if "sourceTableHandles" in inputs:
608 sourceTableHandles = inputs.pop("sourceTableHandles")
609 sourceTableHandleDict = {handle.dataId["visit"]: handle
610 for handle in sourceTableHandles}
611 inputs["sourceTableHandleDict"] = sourceTableHandleDict
612 if "finalizedSourceTableHandles" in inputs:
613 finalizedSourceTableHandles = inputs.pop("finalizedSourceTableHandles")
614 finalizedSourceTableHandleDict = {handle.dataId["visit"]: handle
615 for handle in finalizedSourceTableHandles}
616 inputs["finalizedSourceTableHandleDict"] = finalizedSourceTableHandleDict
617 else:
618 # Deprecated legacy version
619 # Filter out any visit catalog that is not coadd inputs
620 ccdInputs = inputs['exposure'].getInfo().getCoaddInputs().ccds
621 visitKey = ccdInputs.schema.find("visit").key
622 ccdKey = ccdInputs.schema.find("ccd").key
623 inputVisitIds = set()
624 ccdRecordsWcs = {}
625 for ccdRecord in ccdInputs:
626 visit = ccdRecord.get(visitKey)
627 ccd = ccdRecord.get(ccdKey)
628 inputVisitIds.add((visit, ccd))
629 ccdRecordsWcs[(visit, ccd)] = ccdRecord.getWcs()
630
631 inputCatalogsToKeep = []
632 inputCatalogWcsUpdate = []
633 for i, dataRef in enumerate(inputRefs.visitCatalogs):
634 key = (dataRef.dataId['visit'], dataRef.dataId['detector'])
635 if key in inputVisitIds:
636 inputCatalogsToKeep.append(inputs['visitCatalogs'][i])
637 inputCatalogWcsUpdate.append(ccdRecordsWcs[key])
638 inputs['visitCatalogs'] = inputCatalogsToKeep
639 inputs['wcsUpdates'] = inputCatalogWcsUpdate
640 inputs['ccdInputs'] = ccdInputs
641
642 outputs = self.run(**inputs)
643 # Strip HeavyFootprints to save space on disk
644 sources = outputs.outputSources
645 butlerQC.put(outputs, outputRefs)
646
647 def run(self, exposure, sources, skyInfo, exposureId, ccdInputs=None, visitCatalogs=None, wcsUpdates=None,
648 sourceTableHandleDict=None, finalizedSourceTableHandleDict=None):
649 """Run measurement algorithms on the input exposure, and optionally populate the
650 resulting catalog with extra information.
651
652 Parameters
653 ----------
654 exposure : `lsst.afw.exposure.Exposure`
655 The input exposure on which measurements are to be performed.
656 sources : `lsst.afw.table.SourceCatalog`
657 A catalog built from the results of merged detections, or
658 deblender outputs.
659 skyInfo : `lsst.pipe.base.Struct`
660 A struct containing information about the position of the input exposure within
661 a `SkyMap`, the `SkyMap`, its `Wcs`, and its bounding box.
662 exposureId : `int` or `bytes`
663 Packed unique number or bytes unique to the input exposure.
664 ccdInputs : `lsst.afw.table.ExposureCatalog`, optional
665 Catalog containing information on the individual visits which went into making
666 the coadd.
667 visitCatalogs : `list` of `lsst.afw.table.SourceCatalogs`, optional
668 A list of source catalogs corresponding to measurements made on the individual
669 visits which went into the input exposure. If None and butler is `None` then
670 the task cannot propagate visit flags to the output catalog.
671 Deprecated, to be removed with PropagateVisitFlagsTask.
672 wcsUpdates : `list` of `lsst.afw.geom.SkyWcs`, optional
673 If visitCatalogs is not `None` this should be a list of wcs objects which correspond
674 to the input visits. Used to put all coordinates to common system. If `None` and
675 butler is `None` then the task cannot propagate visit flags to the output catalog.
676 Deprecated, to be removed with PropagateVisitFlagsTask.
677 sourceTableHandleDict : `dict` [`int`, `lsst.daf.butler.DeferredDatasetHandle`], optional
678 Dict for sourceTable_visit handles (key is visit) for propagating flags.
679 These tables are derived from the ``CalibrateTask`` sources, and contain
680 astrometry and photometry flags, and optionally PSF flags.
681 finalizedSourceTableHandleDict : `dict` [`int`, `lsst.daf.butler.DeferredDatasetHandle`], optional
682 Dict for finalized_src_table handles (key is visit) for propagating flags.
683 These tables are derived from ``FinalizeCalibrationTask`` and contain
684 PSF flags from the finalized PSF estimation.
685
686 Returns
687 -------
688 results : `lsst.pipe.base.Struct`
689 Results of running measurement task. Will contain the catalog in the
690 sources attribute. Optionally will have results of matching to a
691 reference catalog in the matchResults attribute, and denormalized
692 matches in the denormMatches attribute.
693 """
694 self.measurement.run(sources, exposure, exposureId=exposureId)
695
696 if self.config.doApCorr:
697 self.applyApCorr.run(
698 catalog=sources,
699 apCorrMap=exposure.getInfo().getApCorrMap()
700 )
701
702 # TODO DM-11568: this contiguous check-and-copy could go away if we
703 # reserve enough space during SourceDetection and/or SourceDeblend.
704 # NOTE: sourceSelectors require contiguous catalogs, so ensure
705 # contiguity now, so views are preserved from here on.
706 if not sources.isContiguous():
707 sources = sources.copy(deep=True)
708
709 if self.config.doRunCatalogCalculation:
710 self.catalogCalculation.run(sources)
711
712 self.setPrimaryFlags.run(sources, skyMap=skyInfo.skyMap, tractInfo=skyInfo.tractInfo,
713 patchInfo=skyInfo.patchInfo)
714 if self.config.doPropagateFlags:
715 if self.config.propagateFlags.target == PropagateSourceFlagsTask:
716 # New version
717 self.propagateFlags.run(
718 sources,
719 ccdInputs,
720 sourceTableHandleDict,
721 finalizedSourceTableHandleDict
722 )
723 else:
724 # Legacy deprecated version
725 self.propagateFlags.run(
726 sources,
727 ccdInputs,
728 exposure.getWcs(),
729 visitCatalogs,
730 wcsUpdates
731 )
732
733 results = Struct()
734
735 if self.config.doMatchSources:
736 matchResult = self.match.run(sources, exposure.getInfo().getFilter().bandLabel)
737 matches = afwTable.packMatches(matchResult.matches)
738 matches.table.setMetadata(matchResult.matchMeta)
739 results.matchResult = matches
740 if self.config.doWriteMatchesDenormalized:
741 if matchResult.matches:
742 denormMatches = denormalizeMatches(matchResult.matches, matchResult.matchMeta)
743 else:
744 self.log.warning("No matches, so generating dummy denormalized matches file")
745 denormMatches = afwTable.BaseCatalog(afwTable.Schema())
746 denormMatches.setMetadata(PropertyList())
747 denormMatches.getMetadata().add("COMMENT",
748 "This catalog is empty because no matches were found.")
749 results.denormMatches = denormMatches
750 results.denormMatches = denormMatches
751
752 results.outputSources = sources
753 return results
Defines the fields and offsets for a table.
Definition Schema.h:51
A mapping between the keys of two Schemas, used to copy data between them.
Class for storing ordered metadata with comments.
BaseCatalog packMatches(std::vector< Match< Record1, Record2 > > const &matches)
Return a table representation of a MatchVector that can be used to persist it.
Definition Match.cc:432