LSST Applications g0b6bd0c080+a72a5dd7e6,g1182afd7b4+2a019aa3bb,g17e5ecfddb+2b8207f7de,g1d67935e3f+06cf436103,g38293774b4+ac198e9f13,g396055baef+6a2097e274,g3b44f30a73+6611e0205b,g480783c3b1+98f8679e14,g48ccf36440+89c08d0516,g4b93dc025c+98f8679e14,g5c4744a4d9+a302e8c7f0,g613e996a0d+e1c447f2e0,g6c8d09e9e7+25247a063c,g7271f0639c+98f8679e14,g7a9cd813b8+124095ede6,g9d27549199+a302e8c7f0,ga1cf026fa3+ac198e9f13,ga32aa97882+7403ac30ac,ga786bb30fb+7a139211af,gaa63f70f4e+9994eb9896,gabf319e997+ade567573c,gba47b54d5d+94dc90c3ea,gbec6a3398f+06cf436103,gc6308e37c7+07dd123edb,gc655b1545f+ade567573c,gcc9029db3c+ab229f5caf,gd01420fc67+06cf436103,gd877ba84e5+06cf436103,gdb4cecd868+6f279b5b48,ge2d134c3d5+cc4dbb2e3f,ge448b5faa6+86d1ceac1d,gecc7e12556+98f8679e14,gf3ee170dca+25247a063c,gf4ac96e456+ade567573c,gf9f5ea5b4d+ac198e9f13,gff490e6085+8c2580be5c,w.2022.27
LSST Data Management Base Package
forcedPhotCoadd.py
Go to the documentation of this file.
1# This file is part of meas_base.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (https://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <https://www.gnu.org/licenses/>.
21
22import lsst.pex.config
23import lsst.afw.table
24
25import lsst.coadd.utils as coaddUtils
26import lsst.pipe.base as pipeBase
27from lsst.obs.base import ExposureIdInfo
28
29from .references import MultiBandReferencesTask
30from .forcedMeasurement import ForcedMeasurementTask
31from .applyApCorr import ApplyApCorrTask
32from .catalogCalculation import CatalogCalculationTask
33
34__all__ = ("ForcedPhotCoaddConfig", "ForcedPhotCoaddTask")
35
36
37class ForcedPhotCoaddRunner(pipeBase.ButlerInitializedTaskRunner):
38 """Get the psfCache setting into ForcedPhotCoaddTask"""
39 @staticmethod
40 def getTargetList(parsedCmd, **kwargs):
41 return pipeBase.ButlerInitializedTaskRunner.getTargetList(parsedCmd,
42 psfCache=parsedCmd.psfCache)
43
44
45class ForcedPhotCoaddConnections(pipeBase.PipelineTaskConnections,
46 dimensions=("band", "skymap", "tract", "patch"),
47 defaultTemplates={"inputCoaddName": "deep",
48 "outputCoaddName": "deep"}):
49 inputSchema = pipeBase.connectionTypes.InitInput(
50 doc="Schema for the input measurement catalogs.",
51 name="{inputCoaddName}Coadd_ref_schema",
52 storageClass="SourceCatalog",
53 )
54 outputSchema = pipeBase.connectionTypes.InitOutput(
55 doc="Schema for the output forced measurement catalogs.",
56 name="{outputCoaddName}Coadd_forced_src_schema",
57 storageClass="SourceCatalog",
58 )
59 exposure = pipeBase.connectionTypes.Input(
60 doc="Input exposure to perform photometry on.",
61 name="{inputCoaddName}Coadd_calexp",
62 storageClass="ExposureF",
63 dimensions=["band", "skymap", "tract", "patch"],
64 )
65 refCat = pipeBase.connectionTypes.Input(
66 doc="Catalog of shapes and positions at which to force photometry.",
67 name="{inputCoaddName}Coadd_ref",
68 storageClass="SourceCatalog",
69 dimensions=["skymap", "tract", "patch"],
70 )
71 refCatInBand = pipeBase.connectionTypes.Input(
72 doc="Catalog of shapes and positions in the band having forced photometry done",
73 name="{inputCoaddName}Coadd_meas",
74 storageClass="SourceCatalog",
75 dimensions=("band", "skymap", "tract", "patch")
76 )
77 footprintCatInBand = pipeBase.connectionTypes.Input(
78 doc="Catalog of footprints to attach to sources",
79 name="{inputCoaddName}Coadd_deblendedFlux",
80 storageClass="SourceCatalog",
81 dimensions=("band", "skymap", "tract", "patch")
82 )
83 scarletModels = pipeBase.connectionTypes.Input(
84 doc="Multiband scarlet models produced by the deblender",
85 name="{inputCoaddName}Coadd_scarletModelData",
86 storageClass="ScarletModelData",
87 dimensions=("tract", "patch", "skymap"),
88 )
89 refWcs = pipeBase.connectionTypes.Input(
90 doc="Reference world coordinate system.",
91 name="{inputCoaddName}Coadd.wcs",
92 storageClass="Wcs",
93 dimensions=["band", "skymap", "tract", "patch"],
94 ) # used in place of a skymap wcs because of DM-28880
95 measCat = pipeBase.connectionTypes.Output(
96 doc="Output forced photometry catalog.",
97 name="{outputCoaddName}Coadd_forced_src",
98 storageClass="SourceCatalog",
99 dimensions=["band", "skymap", "tract", "patch"],
100 )
101
102 def __init__(self, *, config=None):
103 super().__init__(config=config)
104 if config.footprintDatasetName != "ScarletModelData":
105 self.inputs.remove("scarletModels")
106 if config.footprintDatasetName != "DeblendedFlux":
107 self.inputs.remove("footprintCatInBand")
108
109
110class ForcedPhotCoaddConfig(pipeBase.PipelineTaskConfig,
111 pipelineConnections=ForcedPhotCoaddConnections):
113 target=MultiBandReferencesTask,
114 doc="subtask to retrieve reference source catalog"
115 )
117 target=ForcedMeasurementTask,
118 doc="subtask to do forced measurement"
119 )
120 coaddName = lsst.pex.config.Field(
121 doc="coadd name: typically one of deep or goodSeeing",
122 dtype=str,
123 default="deep",
124 )
125 doApCorr = lsst.pex.config.Field(
126 dtype=bool,
127 default=True,
128 doc="Run subtask to apply aperture corrections"
129 )
131 target=ApplyApCorrTask,
132 doc="Subtask to apply aperture corrections"
133 )
134 catalogCalculation = lsst.pex.config.ConfigurableField(
135 target=CatalogCalculationTask,
136 doc="Subtask to run catalogCalculation plugins on catalog"
137 )
138 footprintDatasetName = lsst.pex.config.Field(
139 doc="Dataset (without coadd prefix) that should be used to obtain (Heavy)Footprints for sources. "
140 "Must have IDs that match those of the reference catalog."
141 "If None, Footprints will be generated by transforming the reference Footprints.",
142 dtype=str,
143 default="ScarletModelData",
144 optional=True
145 )
146 doConserveFlux = lsst.pex.config.Field(
147 dtype=bool,
148 default=True,
149 doc="Whether to use the deblender models as templates to re-distribute the flux "
150 "from the 'exposure' (True), or to perform measurements on the deblender model footprints. "
151 "If footprintDatasetName != 'ScarletModelData' then this field is ignored.")
152 doStripFootprints = lsst.pex.config.Field(
153 dtype=bool,
154 default=True,
155 doc="Whether to strip footprints from the output catalog before "
156 "saving to disk. "
157 "This is usually done when using scarlet models to save disk space.")
158 hasFakes = lsst.pex.config.Field(
159 dtype=bool,
160 default=False,
161 doc="Should be set to True if fake sources have been inserted into the input data."
162 )
163
164 def setDefaults(self):
165 # Docstring inherited.
166 # Make catalogCalculation a no-op by default as no modelFlux is setup by default in
167 # ForcedMeasurementTask
168 super().setDefaults()
169
170 self.catalogCalculation.plugins.names = []
171 self.measurement.copyColumns["id"] = "id"
172 self.measurement.copyColumns["parent"] = "parent"
173 self.references.removePatchOverlaps = False # see validate() for why
174 self.measurement.plugins.names |= ['base_InputCount', 'base_Variance']
175 self.measurement.plugins['base_PixelFlags'].masksFpAnywhere = ['CLIPPED', 'SENSOR_EDGE',
176 'REJECTED', 'INEXACT_PSF']
177 self.measurement.plugins['base_PixelFlags'].masksFpCenter = ['CLIPPED', 'SENSOR_EDGE',
178 'REJECTED', 'INEXACT_PSF']
179
180 def validate(self):
181 super().validate()
182 if (self.measurement.doReplaceWithNoise and self.footprintDatasetName is not None
183 and self.references.removePatchOverlaps):
184 raise ValueError("Cannot use removePatchOverlaps=True with deblended footprints, as parent "
185 "sources may be rejected while their children are not.")
186
187
188class ForcedPhotCoaddTask(pipeBase.PipelineTask, pipeBase.CmdLineTask):
189 """A command-line driver for performing forced measurement on coadd images.
190
191 Parameters
192 ----------
193 butler : `lsst.daf.persistence.butler.Butler`, optional
194 A Butler which will be passed to the references subtask to allow it to
195 load its schema from disk. Optional, but must be specified if
196 ``refSchema`` is not; if both are specified, ``refSchema`` takes
197 precedence.
198 refSchema : `lsst.afw.table.Schema`, optional
199 The schema of the reference catalog, passed to the constructor of the
200 references subtask. Optional, but must be specified if ``butler`` is
201 not; if both are specified, ``refSchema`` takes precedence.
202 **kwds
203 Keyword arguments are passed to the supertask constructor.
204 """
205
206 ConfigClass = ForcedPhotCoaddConfig
207 RunnerClass = ForcedPhotCoaddRunner
208 _DefaultName = "forcedPhotCoadd"
209 dataPrefix = "deepCoadd_"
210
211 def __init__(self, butler=None, refSchema=None, initInputs=None, **kwds):
212 super().__init__(**kwds)
213
214 if initInputs is not None:
215 refSchema = initInputs['inputSchema'].schema
216
217 self.makeSubtask("references", butler=butler, schema=refSchema)
218 if refSchema is None:
219 refSchema = self.references.schema
220 self.makeSubtask("measurement", refSchema=refSchema)
221 # It is necessary to get the schema internal to the forced measurement task until such a time
222 # that the schema is not owned by the measurement task, but is passed in by an external caller
223 if self.config.doApCorr:
224 self.makeSubtask("applyApCorr", schema=self.measurement.schema)
225 self.makeSubtask('catalogCalculation', schema=self.measurement.schema)
226 self.outputSchema = lsst.afw.table.SourceCatalog(self.measurement.schema)
227
228 def runQuantum(self, butlerQC, inputRefs, outputRefs):
229 inputs = butlerQC.get(inputRefs)
230
231 refCatInBand = inputs.pop('refCatInBand')
232 if self.config.footprintDatasetName == "ScarletModelData":
233 footprintData = inputs.pop("scarletModels")
234 elif self.config.footprintDatasetName == "DeblendedFlux":
235 footprintData = inputs.pop("footprintCatIndBand")
236 else:
237 footprintData = None
238 inputs['measCat'], inputs['exposureId'] = self.generateMeasCat(inputRefs.exposure.dataId,
239 inputs['exposure'],
240 inputs['refCat'],
241 refCatInBand,
242 inputs['refWcs'],
243 "tract_patch",
244 footprintData)
245 outputs = self.run(**inputs)
246 # Strip HeavyFootprints to save space on disk
247 if self.config.footprintDatasetName == "ScarletModelData" and self.config.doStripFootprints:
248 sources = outputs.measCat
249 for source in sources[sources["parent"] != 0]:
250 source.setFootprint(None)
251 butlerQC.put(outputs, outputRefs)
252
253 def generateMeasCat(self, exposureDataId, exposure, refCat, refCatInBand, refWcs, idPackerName,
254 footprintData):
255 """Generate a measurement catalog for Gen3.
256
257 Parameters
258 ----------
259 exposureDataId : `DataId`
260 Butler dataId for this exposure.
262 Exposure to generate the catalog for.
264 Catalog of shapes and positions at which to force photometry.
265 refCatInBand : `lsst.afw.table.SourceCatalog`
266 Catalog of shapes and position in the band forced photometry is
267 currently being performed
268 refWcs : `lsst.afw.image.SkyWcs`
269 Reference world coordinate system.
270 idPackerName : `str`
271 Type of ID packer to construct from the registry.
272 footprintData : `ScarletDataModel` or `lsst.afw.table.SourceCatalog`
273 Either the scarlet data models or the deblended catalog
274 containing footprints.
275 If `footprintData` is `None` then the footprints contained
276 in `refCatInBand` are used.
277
278 Returns
279 -------
281 Catalog of forced sources to measure.
282 expId : `int`
283 Unique binary id associated with the input exposure
284
285 Raises
286 ------
287 LookupError
288 Raised if a footprint with a given source id was in the reference
289 catalog but not in the reference catalog in band (meaning there
290 was some sort of mismatch in the two input catalogs)
291 """
292 exposureIdInfo = ExposureIdInfo.fromDataId(exposureDataId, idPackerName)
293 idFactory = exposureIdInfo.makeSourceIdFactory()
294
295 measCat = self.measurement.generateMeasCat(exposure, refCat, refWcs,
296 idFactory=idFactory)
297 # attach footprints here, as the attachFootprints method is geared for gen2
298 # and is not worth modifying, as this can naturally live inside this method
299 if self.config.footprintDatasetName == "ScarletModelData":
300 # Load the scarlet models
301 self._attachScarletFootprints(
302 catalog=measCat,
303 modelData=footprintData,
304 exposure=exposure,
305 band=exposureDataId["band"]
306 )
307 else:
308 if self.config.footprintDatasetName is None:
309 footprintCat = refCatInBand
310 else:
311 footprintCat = footprintData
312 for srcRecord in measCat:
313 fpRecord = footprintCat.find(srcRecord.getId())
314 if fpRecord is None:
315 raise LookupError("Cannot find Footprint for source {}; please check that {} "
316 "IDs are compatible with reference source IDs"
317 .format(srcRecord.getId(), footprintCat))
318 srcRecord.setFootprint(fpRecord.getFootprint())
319 return measCat, exposureIdInfo.expId
320
321 def runDataRef(self, dataRef, psfCache=None):
322 """Perform forced measurement on a single exposure.
323
324 Parameters
325 ----------
327 Passed to the ``references`` subtask to obtain the reference WCS,
328 the ``getExposure`` method (implemented by derived classes) to
329 read the measurment image, and the ``fetchReferences`` method to
330 get the exposure and load the reference catalog (see
332 Refer to derived class documentation for details of the datasets
333 and data ID keys which are used.
334 psfCache : `int`, optional
335 Size of PSF cache, or `None`. The size of the PSF cache can have
336 a significant effect upon the runtime for complicated PSF models.
337
338 Notes
339 -----
340 Sources are generated with ``generateMeasCat`` in the ``measurement``
341 subtask. These are passed to ``measurement``'s ``run`` method, which
342 fills the source catalog with the forced measurement results. The
343 sources are then passed to the ``writeOutputs`` method (implemented by
344 derived classes) which writes the outputs.
345 """
346 refWcs = self.references.getWcs(dataRef)
347 exposure = self.getExposure(dataRef)
348 if psfCache is not None:
349 exposure.getPsf().setCacheCapacity(psfCache)
350 refCat = self.fetchReferences(dataRef, exposure)
351
352 exposureId = coaddUtils.getGen3CoaddExposureId(dataRef, coaddName=self.config.coaddName,
353 includeBand=False, log=self.log)
354 measCat = self.measurement.generateMeasCat(
355 exposure, refCat, refWcs, idFactory=self.makeIdFactory(dataRef, exposureId=exposureId))
356 self.log.info("Performing forced measurement on %s", dataRef.dataId)
357 self.attachFootprints(measCat, refCat, exposure, refWcs, dataRef)
358
359 forcedPhotResult = self.run(measCat, exposure, refCat, refWcs, exposureId=exposureId)
360
361 self.writeOutput(dataRef, forcedPhotResult.measCat)
362
363 def run(self, measCat, exposure, refCat, refWcs, exposureId=None):
364 """Perform forced measurement on a single exposure.
365
366 Parameters
367 ----------
369 The measurement catalog, based on the sources listed in the
370 reference catalog.
371 exposure : `lsst.afw.image.Exposure`
372 The measurement image upon which to perform forced detection.
374 The reference catalog of sources to measure.
375 refWcs : `lsst.afw.image.SkyWcs`
376 The WCS for the references.
377 exposureId : `int`
378 Optional unique exposureId used for random seed in measurement
379 task.
380
381 Returns
382 -------
383 result : ~`lsst.pipe.base.Struct`
384 Structure with fields:
385
386 ``measCat``
387 Catalog of forced measurement results
389 """
390 self.measurement.run(measCat, exposure, refCat, refWcs, exposureId=exposureId)
391 if self.config.doApCorr:
392 self.applyApCorr.run(
393 catalog=measCat,
394 apCorrMap=exposure.getInfo().getApCorrMap()
395 )
396 self.catalogCalculation.run(measCat)
397
398 return pipeBase.Struct(measCat=measCat)
399
400 def makeIdFactory(self, dataRef, exposureId):
401 """Create an object that generates globally unique source IDs.
402
403 Source IDs are created based on a per-CCD ID and the ID of the CCD
404 itself.
405
406 Parameters
407 ----------
409 Butler data reference. The "CoaddId_bits" and "CoaddId" datasets
410 are accessed. The data ID must have tract and patch keys.
411 """
412 # With the default configuration, this IdFactory doesn't do anything,
413 # because the IDs it generates are immediately overwritten by the ID
414 # from the reference catalog (since that's in
415 # config.measurement.copyColumns). But we create one here anyway, to
416 # allow us to revert back to the old behavior of generating new forced
417 # source IDs, just by renaming the ID in config.copyColumns to
418 # "object_id".
419 exposureIdInfo = ExposureIdInfo(exposureId, dataRef.get(self.config.coaddName + "CoaddId_bits"))
420 return exposureIdInfo.makeSourceIdFactory()
421
422 def fetchReferences(self, dataRef, exposure):
423 """Return an iterable of reference sources which overlap the exposure.
424
425 Parameters
426 ----------
428 Butler data reference corresponding to the image to be measured;
429 should have tract, patch, and filter keys.
430
431 exposure : `lsst.afw.image.Exposure`
432 Unused.
433
434 Notes
435 -----
436 All work is delegated to the references subtask; see
437 `CoaddSrcReferencesTask` for information about the default behavior.
438 """
439 skyMap = dataRef.get(self.dataPrefix + "skyMap", immediate=True)
440 tractInfo = skyMap[dataRef.dataId["tract"]]
441 patch = tuple(int(v) for v in dataRef.dataId["patch"].split(","))
442 patchInfo = tractInfo.getPatchInfo(patch)
443 references = lsst.afw.table.SourceCatalog(self.references.schema)
444 references.extend(self.references.fetchInPatches(dataRef, patchList=[patchInfo]))
445 return references
446
447 def attachFootprints(self, sources, refCat, exposure, refWcs, dataRef):
448 r"""Attach Footprints to source records.
449
450 For coadd forced photometry, we use the deblended "heavy"
451 `~lsst.afw.detection.Footprint`\ s from the single-band measurements
452 of the same band - because we've guaranteed that the peaks (and hence
453 child sources) will be consistent across all bands before we get to
454 measurement, this should yield reasonable deblending for most sources.
455 It's most likely limitation is that it will not provide good flux
456 upper limits for sources that were not detected in this band but were
457 blended with sources that were.
458 """
459 if self.config.footprintDatasetName is None:
460 return self.measurement.attachTransformedFootprints(sources, refCat, exposure, refWcs)
461
462 self.log.info("Loading deblended footprints for sources from %s, %s",
463 self.config.footprintDatasetName, dataRef.dataId)
464
465 if self.config.footprintDatasetName == "ScarletModelData":
466 # Load the scarlet models
467 dataModel = dataRef.get("%sCoadd_%s" % (self.config.coaddName, self.config.footprintDatasetName),
468 immediate=True)
469 self._attachScarletFootprints(refCat, dataModel, exposure, dataRef.dataId["band"])
470 else:
471 fpCat = dataRef.get("%sCoadd_%s" % (self.config.coaddName, self.config.footprintDatasetName),
472 immediate=True)
473 for refRecord, srcRecord in zip(refCat, sources):
474 fpRecord = fpCat.find(refRecord.getId())
475 if fpRecord is None:
476 raise LookupError("Cannot find Footprint for source %s; please check that %sCoadd_%s "
477 "IDs are compatible with reference source IDs" %
478 (srcRecord.getId(), self.config.coaddName,
479 self.config.footprintDatasetName))
480 srcRecord.setFootprint(fpRecord.getFootprint())
481
482 def _attachScarletFootprints(self, catalog, modelData, exposure, band):
483 """Attach scarlet models as HeavyFootprints
484 """
485 if self.config.doConserveFlux:
486 redistributeImage = exposure.image
487 else:
488 redistributeImage = None
489 # Attach the footprints
490 modelData.updateCatalogFootprints(
491 catalog=catalog,
492 band=band,
493 psfModel=exposure.getPsf(),
494 redistributeImage=redistributeImage,
495 removeScarletData=True,
496 updateFluxColumns=False,
497 )
498
499 def getExposure(self, dataRef):
500 """Read input exposure on which measurement will be performed.
501
502 Parameters
503 ----------
505 Butler data reference.
506 """
507 if self.config.hasFakes:
508 name = "fakes_" + self.config.coaddName + "Coadd_calexp"
509 else:
510 name = self.config.coaddName + "Coadd_calexp"
511
512 return dataRef.get(name) if dataRef.datasetExists(name) else None
513
514 def writeOutput(self, dataRef, sources):
515 """Write forced source table
516
517 Parameters
518 ----------
520 Butler data reference. The forced_src dataset (with
521 self.dataPrefix prepended) is all that will be modified.
523 Catalog of sources to save.
524 """
525 dataRef.put(sources, self.dataPrefix + "forced_src", flags=lsst.afw.table.SOURCE_IO_NO_FOOTPRINTS)
526
528 """The schema catalogs that will be used by this task.
529
530 Returns
531 -------
532 schemaCatalogs : `dict`
533 Dictionary mapping dataset type to schema catalog.
534
535 Notes
536 -----
537 There is only one schema for each type of forced measurement. The
538 dataset type for this measurement is defined in the mapper.
539 """
540 catalog = lsst.afw.table.SourceCatalog(self.measurement.schema)
541 catalog.getTable().setMetadata(self.measurement.algMetadata)
542 datasetType = self.dataPrefix + "forced_src"
543 return {datasetType: catalog}
544
545 def _getConfigName(self):
546 # Documented in superclass
547 return self.dataPrefix + "forced_config"
548
549 def _getMetadataName(self):
550 # Documented in superclass
551 return self.dataPrefix + "forced_metadata"
552
553 @classmethod
554 def _makeArgumentParser(cls):
555 parser = pipeBase.ArgumentParser(name=cls._DefaultName)
556 parser.add_id_argument("--id", "deepCoadd_forced_src", help="data ID, with raw CCD keys + tract",
557 ContainerClass=coaddUtils.CoaddDataIdContainer)
558 parser.add_argument("--psfCache", type=int, default=100, help="Size of CoaddPsf cache")
559 return parser
Class to describe the properties of a detected object from an image.
Definition: Footprint.h:63
A class to contain the data, WCS, and other information needed to describe an image of the sky.
Definition: Exposure.h:72
Defines the fields and offsets for a table.
Definition: Schema.h:51
def run(self, coaddExposures, bbox, wcs, dataIds, **kwargs)
Definition: getTemplate.py:596
def attachFootprints(self, sources, refCat, exposure, refWcs, dataRef)
def writeOutput(self, dataRef, sources)
def fetchReferences(self, dataRef, exposure)
def format(config, name=None, writeSourceLine=True, prefix="", verbose=False)
Definition: history.py:174