LSST Applications g04e9c324dd+8c5ae1fdc5,g134cb467dc+1b3060144d,g18429d2f64+f642bf4753,g199a45376c+0ba108daf9,g1fd858c14a+2dcf163641,g262e1987ae+7b8c96d2ca,g29ae962dfc+3bd6ecb08a,g2cef7863aa+aef1011c0b,g35bb328faa+8c5ae1fdc5,g3fd5ace14f+53e1a9e7c5,g4595892280+fef73a337f,g47891489e3+2efcf17695,g4d44eb3520+642b70b07e,g53246c7159+8c5ae1fdc5,g67b6fd64d1+2efcf17695,g67fd3c3899+b70e05ef52,g74acd417e5+317eb4c7d4,g786e29fd12+668abc6043,g87389fa792+8856018cbb,g89139ef638+2efcf17695,g8d7436a09f+3be3c13596,g8ea07a8fe4+9f5ccc88ac,g90f42f885a+a4e7b16d9b,g97be763408+ad77d7208f,g9dd6db0277+b70e05ef52,ga681d05dcb+a3f46e7fff,gabf8522325+735880ea63,gac2eed3f23+2efcf17695,gb89ab40317+2efcf17695,gbf99507273+8c5ae1fdc5,gd8ff7fe66e+b70e05ef52,gdab6d2f7ff+317eb4c7d4,gdc713202bf+b70e05ef52,gdfd2d52018+b10e285e0f,ge365c994fd+310e8507c4,ge410e46f29+2efcf17695,geaed405ab2+562b3308c0,gffca2db377+8c5ae1fdc5,w.2025.35
LSST Data Management Base Package
Loading...
Searching...
No Matches
measurementDriver.py
Go to the documentation of this file.
1# This file is part of pipe_tasks.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (https://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <https://www.gnu.org/licenses/>.
21
22__all__ = [
23 "SingleBandMeasurementDriverConfig",
24 "SingleBandMeasurementDriverTask",
25 "MultiBandMeasurementDriverConfig",
26 "MultiBandMeasurementDriverTask",
27 "ForcedMeasurementDriverConfig",
28 "ForcedMeasurementDriverTask",
29]
30
31import copy
32import logging
33from abc import ABCMeta, abstractmethod
34
35import astropy
36import lsst.afw.detection as afwDetection
37import lsst.afw.geom as afwGeom
38import lsst.afw.image as afwImage
39import lsst.afw.math as afwMath
40import lsst.afw.table as afwTable
41import lsst.geom
42import lsst.meas.algorithms as measAlgorithms
43import lsst.meas.base as measBase
44import lsst.meas.deblender as measDeblender
45import lsst.meas.extensions.scarlet as scarlet
46import lsst.pipe.base as pipeBase
47import lsst.scarlet.lite as scl
48import numpy as np
49from lsst.meas.extensions.scarlet.deconvolveExposureTask import DeconvolveExposureTask
50from lsst.pex.config import Config, ConfigurableField, Field
51
52logging.basicConfig(level=logging.INFO)
53
54
56 """Base configuration for measurement driver tasks.
57
58 This class provides foundational configuration for its subclasses to handle
59 single-band and multi-band data. It defines variance scaling, detection,
60 deblending, measurement, aperture correction, and catalog calculation
61 subtasks, which are intended to be executed in sequence by the driver task.
62 """
63
64 doScaleVariance = Field[bool](doc="Scale variance plane using empirical noise?", default=False)
65
66 scaleVariance = ConfigurableField(
67 doc="Subtask to rescale variance plane", target=measAlgorithms.ScaleVarianceTask
68 )
69
70 doDetect = Field[bool](doc="Run the source detection algorithm?", default=True)
71
72 detection = ConfigurableField(
73 doc="Subtask to detect sources in the image", target=measAlgorithms.SourceDetectionTask
74 )
75
76 doDeblend = Field[bool](doc="Run the source deblending algorithm?", default=True)
77 # N.B. The 'deblend' configurable field should be defined in subclasses.
78
79 doMeasure = Field[bool](doc="Run the source measurement algorithm?", default=True)
80
81 measurement = ConfigurableField(
82 doc="Subtask to measure sources and populate the output catalog",
83 target=measBase.SingleFrameMeasurementTask,
84 )
85
86 psfCache = Field[int](
87 doc="Maximum number of PSFs to cache, preventing repeated PSF evaluations at the same "
88 "point across different measurement plugins. Defaults to -1, which auto-sizes the cache "
89 "based on the plugin count.",
90 default=-1,
91 )
92
93 checkUnitsParseStrict = Field[str](
94 doc="Strictness of Astropy unit compatibility check, can be 'raise', 'warn' or 'silent'",
95 default="raise",
96 )
97
98 doApCorr = Field[bool](
99 doc="Apply aperture corrections? If yes, your image must have an aperture correction map",
100 default=False,
101 )
102
103 applyApCorr = ConfigurableField(
104 doc="Subtask to apply aperture corrections",
105 target=measBase.ApplyApCorrTask,
106 )
107
108 doRunCatalogCalculation = Field[bool](doc="Run catalogCalculation task?", default=False)
109
110 catalogCalculation = ConfigurableField(
111 doc="Subtask to run catalogCalculation plugins on catalog", target=measBase.CatalogCalculationTask
112 )
113
114 doOptions = [
115 "doScaleVariance",
116 "doDetect",
117 "doDeblend",
118 "doMeasure",
119 "doApCorr",
120 "doRunCatalogCalculation",
121 ]
122
123 def validate(self):
124 """Ensure that at least one processing step is enabled."""
125 super().validate()
126
127 if not any(getattr(self, opt) for opt in self.doOptions):
128 raise ValueError(f"At least one of these options must be enabled: {self.doOptions}")
129
130 if self.doApCorr and not self.doMeasure:
131 raise ValueError("Aperture correction requires measurement to be enabled.")
132
133 if self.doRunCatalogCalculation and not self.doMeasure:
134 raise ValueError("Catalog calculation requires measurement to be enabled.")
135
136
137class MeasurementDriverBaseTask(pipeBase.Task, metaclass=ABCMeta):
138 """Base class for the mid-level driver running variance scaling, detection,
139 deblending, measurement, apperture correction, and catalog calculation in
140 one go.
141
142 Users don't need to Butlerize their input data, which is a significant
143 advantage for quick data exploration and testing. This driver simplifies
144 the process of applying measurement algorithms to images by abstracting
145 away low-level implementation details such as Schema and table boilerplate.
146 It's a convenient way to process images into catalogs with a user-friendly
147 interface for non-developers while allowing extensive configuration and
148 integration into unit tests for developers. It also considerably improves
149 how demos and workflows are showcased in Jupyter notebooks.
150
151 Parameters
152 ----------
153 schema :
154 Schema used to create the output `~lsst.afw.table.SourceCatalog`,
155 modified in place with fields that will be written by this task.
156 peakSchema :
157 Schema of Footprint Peaks that will be passed to the deblender.
158 **kwargs :
159 Additional kwargs to pass to lsst.pipe.base.Task.__init__()
160
161 Notes
162 -----
163 Subclasses (e.g., single-band vs. multi-band) share most methods and config
164 options but differ in handling and validating inputs by overriding the base
165 config class and any methods that require their own logic.
166 """
167
168 ConfigClass = MeasurementDriverBaseConfig
169 _DefaultName = "measurementDriverBase"
170 _Deblender = ""
171
172 def __init__(self, schema: afwTable.Schema = None, peakSchema: afwTable.Schema = None, **kwargs: dict):
173 super().__init__(**kwargs)
174
175 # Schema for the output catalog.
176 self.schema = schema
177
178 # Schema for deblender peaks.
179 self.peakSchema = peakSchema
180
181 # Placeholders for subclasses to populate.
183 self.scaleVariance: measAlgorithms.ScaleVarianceTask
184 self.detection: measAlgorithms.SourceDetectionTask
185 self.deblend: measDeblender.SourceDeblendTask | scarlet.ScarletDeblendTask
186 self.measurement: measBase.SingleFrameMeasurementTask | measBase.ForcedMeasurementTask
187 self.applyApCorr: measBase.ApplyApCorrTask
188 self.catalogCalculation: measBase.CatalogCalculationTask
189
190 # Store the initial Schema to use for reinitialization if necessary.
192 # To safeguard against user tampering and ensure predictable behavior,
193 # the following attribute can only be modified within the class using
194 # a controlled setter.
195 super().__setattr__("initSchema", copy.deepcopy(schema))
196
197 def __setattr__(self, name, value):
198 """Prevent external modifications of the initial Schema."""
199 if name == "initSchema":
200 raise AttributeError(f"Cannot modify {name} directly")
201 super().__setattr__(name, value)
202
203 @abstractmethod
204 def run(self, *args, **kwargs) -> pipeBase.Struct:
205 """Run the measurement driver task. Subclasses must implement this
206 method using their own logic to handle single-band or multi-band data.
207 """
208 raise NotImplementedError("This is not implemented on the base class")
209
211 self,
212 catalog: afwTable.SourceCatalog | None,
213 ):
214 """Perform validation and adjustments of inputs without heavy
215 computation.
216
217 Parameters
218 ----------
219 catalog :
220 Catalog to be extended by the driver task.
221 """
222 # Validate the configuration before proceeding.
223 self.config.validate()
224
225 if self.config.doDetect:
226 if catalog is not None:
227 raise RuntimeError(
228 "An input catalog was given to bypass detection, but 'doDetect' is still on"
229 )
230 else:
231 if catalog is None:
232 raise RuntimeError("Cannot run without detection if no 'catalog' is provided")
233
234 def _initializeSchema(self, catalog: afwTable.SourceCatalog = None):
235 """Initialize the Schema to be used for constructing the subtasks.
236
237 Though it may seem clunky, this workaround is necessary to ensure
238 Schema consistency across all subtasks.
239
240 Parameters
241 ----------
242 catalog :
243 Catalog from which to extract the Schema. If not provided, the
244 user-provided Schema and if that is also not provided during
245 initialization, a minimal Schema will be used.
246 """
247 # If the Schema has been modified (either by subtasks or externally by
248 # the user), reset it to the initial state before creating subtasks.
249 # This would be neccessary when running the same driver task multiple
250 # times with different configs/inputs.
251 if self.schema != self.initSchema:
252 self.schema = copy.deepcopy(self.initSchema)
253
254 if catalog is None:
255 if self.schema is None:
256 # Create a minimal Schema that will be extended by tasks.
257 self.schema = afwTable.SourceTable.makeMinimalSchema()
258
259 # Add coordinate error fields to avoid missing field issues.
261 else:
262 if self.schema is not None:
263 self.log.warning(
264 "Both a catalog and a Schema were provided; using the Schema from the catalog only"
265 )
266
267 # Since a catalog is provided, use its Schema as the base.
268 catalogSchema = catalog.schema
269
270 # Create a SchemaMapper that maps from catalogSchema to a new one
271 # it will create.
272 self.mapper = afwTable.SchemaMapper(catalogSchema)
273
274 # Ensure coordinate error fields exist in the schema.
275 # This must be done after initializing the SchemaMapper to avoid
276 # unequal schemas between input record and mapper during the
277 # _updateCatalogSchema call.
278 self._addCoordErrorFieldsIfMissing(catalogSchema)
279
280 # Add everything from catalogSchema to output Schema.
281 self.mapper.addMinimalSchema(catalogSchema, True)
282
283 # Get the output Schema from the SchemaMapper and assign it as the
284 # Schema to be used for constructing the subtasks.
285 self.schema = self.mapper.getOutputSchema()
286
287 if isinstance(self, ForcedMeasurementDriverTask):
288 # A trick also used in https://github.com/lsst/ap_pipe/blob/
289 # a221d4e43e2abac44b1cbed0533b9e220c5a67f4/python/lsst/ap/pipe/
290 # matchSourceInjected.py#L161
291 self.schema.addField("deblend_nChild", "I", "Needed for minimal forced photometry schema")
292
293 def _addCoordErrorFieldsIfMissing(self, schema: afwTable.Schema):
294 """Add coordinate error fields to the schema in-place if they are not
295 already present.
296
297 Parameters
298 ----------
299 schema :
300 Schema to be checked for coordinate error fields.
301 """
302 if not any(
303 errorField in schema.getNames()
304 for errorField in ("coord_raErr", "coord_decErr", "coord_ra_dec_Cov")
305 ):
306 afwTable.CoordKey.addErrorFields(schema)
307
308 def _makeSubtasks(self):
309 """Construct subtasks based on the configuration and the Schema."""
310 if self.schema is None and any(
311 getattr(self.config, attr) for attr in self.config.doOptions if attr != "doScaleVariance"
312 ):
313 raise RuntimeError(
314 "Cannot create requested subtasks without a Schema; "
315 "ensure one is provided explicitly or via a catalog"
316 )
317
318 if self.config.doScaleVariance:
319 self.makeSubtask("scaleVariance")
320
321 if isinstance(self, ForcedMeasurementDriverTask):
322 # Always True for forced measurement.
323 if self.config.doMeasure:
324 self.makeSubtask("measurement", refSchema=self.schema)
325
326 # In forced measurement, where the measurement catalog is built
327 # internally, we need to initialize applyApCorr with the full
328 # schema after measurement plugins have added their fields;
329 # otherwise, it won’t see them and will silently skip applying
330 # aperture corrections.
331 # A related example can be found in this reference:
332 # https://github.com/lsst/drp_tasks/blob/
333 # b565995b995cd5f0e40196f8d3c89cafb89aa515/python/lsst/drp/tasks/
334 # forcedPhotCoadd.py#L203
335 if self.config.doApCorr:
336 self.makeSubtask("applyApCorr", schema=self.measurement.schema)
337
338 # Same reference as above uses `measurement.schema` to make the
339 # catalogCalculation subtask, so we do the same here.
340 if self.config.doRunCatalogCalculation:
341 self.makeSubtask("catalogCalculation", schema=self.measurement.schema)
342 else:
343 if self.config.doDetect:
344 self.makeSubtask("detection", schema=self.schema)
345
346 if self.config.doDeblend:
347 self.makeSubtask("deblend", schema=self.schema, peakSchema=self.peakSchema)
348
349 if self.config.doMeasure:
350 self.makeSubtask("measurement", schema=self.schema)
351
352 if self.config.doApCorr:
353 self.makeSubtask("applyApCorr", schema=self.measurement.schema)
354
355 if self.config.doRunCatalogCalculation:
356 self.makeSubtask("catalogCalculation", schema=self.schema)
357
359 self, catalog: afwTable.SourceCatalog | None
360 ) -> afwTable.SourceCatalog | None:
361 """Ensure subtasks are properly initialized according to the
362 configuration and the provided catalog.
363
364 Parameters
365 ----------
366 catalog :
367 Optional catalog to be used for initializing the Schema and the
368 subtasks.
369
370 Returns
371 -------
372 catalog :
373 Updated catalog to be passed to the subtasks, if it was provided.
374 """
375 # Set up the Schema before creating subtasks.
376 self._initializeSchema(catalog)
377
378 # Create subtasks, passing the same Schema to each subtask's
379 # constructor that requires it.
380 self._makeSubtasks()
381
382 # Check that all units in the Schema are valid Astropy unit strings.
383 self.schema.checkUnits(parse_strict=self.config.checkUnitsParseStrict)
384
385 # Adjust the catalog Schema to align with changes made by the subtasks.
386 if catalog:
387 catalog = self._updateCatalogSchema(catalog)
388
389 return catalog
390
391 def _scaleVariance(self, exposure: afwImage.Exposure, band: str = "a single"):
392 """Scale the variance plane of an exposure to match the observed
393 variance.
394
395 Parameters
396 ----------
397 exposure :
398 Exposure on which to run the variance scaling algorithm.
399 band :
400 Band associated with the exposure. Used for logging.
401 """
402 self.log.info(f"Scaling variance plane for {band} band")
403 varScale = self.scaleVariance.run(exposure.maskedImage)
404 exposure.getMetadata().add("VARIANCE_SCALE", varScale)
405
407 self, catalog: afwTable.SourceCatalog | dict[str, afwTable.SourceCatalog]
409 """Make a catalog or catalogs contiguous if they are not already.
410
411 Parameters
412 ----------
413 catalog :
414 Catalog or dictionary of catalogs with bands as keys to be made
415 contiguous.
416
417 Returns
418 -------
419 catalog :
420 Contiguous catalog or dictionary of contiguous catalogs.
421 """
422 if isinstance(catalog, dict):
423 for band, cat in catalog.items():
424 if not cat.isContiguous():
425 self.log.info(f"{band}-band catalog is not contiguous; making it contiguous")
426 catalog[band] = cat.copy(deep=True)
427 else:
428 if not catalog.isContiguous():
429 self.log.info("Catalog is not contiguous; making it contiguous")
430 catalog = catalog.copy(deep=True)
431 return catalog
432
433 def _updateCatalogSchema(self, catalog: afwTable.SourceCatalog) -> afwTable.SourceCatalog:
434 """Update the Schema of the provided catalog to incorporate changes
435 made by the configured subtasks.
436
437 Parameters
438 ----------
439 catalog :
440 Catalog to be updated with the Schema changes.
441
442 Returns
443 -------
444 updatedCatalog :
445 Catalog with the updated Schema.
446 """
447 # Create an empty catalog with the Schema required by the subtasks that
448 # are configured to run.
449 updatedCatalog = afwTable.SourceCatalog(self.schema)
450
451 # Transfer all records from the original catalog to the new catalog,
452 # using the SchemaMapper to copy values.
453 updatedCatalog.extend(catalog, mapper=self.mapper)
454
455 # Return the updated catalog, preserving the records while applying the
456 # updated Schema.
457 return updatedCatalog
458
460 self, exposure: afwImage.Exposure | afwImage.MultibandExposure, idGenerator: measBase.IdGenerator
462 """Run the detection subtask to identify sources in the image.
463
464 Parameters
465 ----------
466 exposure :
467 Exposure on which to run the detection algorithm.
468 idGenerator :
469 Generator for unique source IDs.
470
471 Returns
472 -------
473 catalog :
474 A catalog containing detected sources.
475 backgroundList :
476 A list of background models obtained from the detection process,
477 if available.
478 """
479 self.log.info(f"Running detection on a {exposure.width}x{exposure.height} pixel exposure")
480
481 # Create an empty source table with the known Schema into which
482 # detected sources will be placed next.
483 table = afwTable.SourceTable.make(self.schema, idGenerator.make_table_id_factory())
484
485 # Run the detection task on the exposure and make a source catalog.
486 detections = self.detection.run(table, exposure)
487 catalog = detections.sources
488 backgroundList = afwMath.BackgroundList()
489
490 # Get the background model from the detection task, if available.
491 if hasattr(detections, "background") and detections.background:
492 for bg in detections.background:
493 backgroundList.append(bg)
494
495 return catalog, backgroundList
496
497 @abstractmethod
498 def _deblendSources(self, *args, **kwargs):
499 """Run the deblending subtask to separate blended sources. Subclasses
500 must implement this method to handle task-specific deblending logic.
501 """
502 raise NotImplementedError("This is not implemented on the base class")
503
505 self,
506 exposure: afwImage.Exposure,
507 catalog: afwTable.SourceCatalog,
508 idGenerator: measBase.IdGenerator,
509 refCat: afwTable.SourceCatalog | None = None,
510 ):
511 """Run the measurement subtask to compute properties of sources.
512
513 Parameters
514 ----------
515 exposure :
516 Exposure on which to run the measurement algorithm.
517 catalog :
518 Catalog containing sources on which to run the measurement subtask.
519 idGenerator :
520 Generator for unique source IDs.
521 refCat :
522 Reference catalog to be used for forced measurements, if any.
523 If not provided, the measurement will be run on the sources in the
524 catalog in a standard manner without reference.
525 """
526 if refCat:
527 # Note that refCat does not have a WCS, so we need to
528 # extract the WCS from the exposure.
529 refWcs = exposure.getWcs()
530 # Run forced measurement since a reference catalog is provided.
531 self.measurement.run(
532 measCat=catalog,
533 exposure=exposure,
534 refCat=refCat,
535 refWcs=refWcs,
536 exposureId=idGenerator.catalog_id,
537 )
538 else:
539 # Run standard measurement if no reference catalog is provided.
540 self.measurement.run(measCat=catalog, exposure=exposure, exposureId=idGenerator.catalog_id)
541
543 self, exposure: afwImage.Exposure, catalog: afwTable.SourceCatalog, idGenerator: measBase.IdGenerator
544 ):
545 """Apply aperture corrections to the catalog.
546
547 Parameters
548 ----------
549 exposure :
550 Exposure on which to apply aperture corrections.
551 catalog :
552 Catalog to be corrected using the aperture correction map from
553 the exposure.
554 idGenerator :
555 Generator for unique source IDs.
556 """
557 apCorrMap = exposure.getInfo().getApCorrMap()
558 if apCorrMap is None:
559 self.log.warning(
560 "Image does not have valid aperture correction map for catalog id "
561 f"{idGenerator.catalog_id}; skipping aperture correction"
562 )
563 else:
564 self.applyApCorr.run(catalog=catalog, apCorrMap=apCorrMap)
565
566 def _runCatalogCalculation(self, catalog: afwTable.SourceCatalog):
567 """Run the catalog calculation plugins on the catalog.
568
569 Parameters
570 ----------
571 catalog :
572 Catalog to be processed by the catalog calculation subtask.
573 """
574 self.catalogCalculation.run(catalog)
575
577 self,
578 exposure: afwImage.Exposure,
579 catalog: afwTable.SourceCatalog,
580 idGenerator: measBase.IdGenerator,
581 band: str = "a single",
582 refCat: afwTable.SourceCatalog | None = None,
584 """Process a catalog through measurement, aperture correction, and
585 catalog calculation subtasks.
586
587 Parameters
588 ----------
589 exposure :
590 Exposure associated with the catalog.
591 catalog :
592 Catalog to be processed by the subtasks.
593 idGenerator :
594 Generator for unique source IDs.
595 band :
596 Band associated with the exposure and catalog. Used for logging.
597 refCat :
598 Reference catalog for forced measurements. If not provided, the
599 measurement will be run on the sources in the catalog in a standard
600 manner without reference.
601
602 Returns
603 -------
604 catalog :
605 Catalog after processing through the configured subtasks.
606 """
607 # Set the PSF cache capacity to cache repeated PSF evaluations at the
608 # same point coming from different measurement plugins.
609 if self.config.psfCache > 0:
610 # Set a hard limit on the number of PSFs to cache.
611 exposure.psf.setCacheCapacity(self.config.psfCache)
612 else:
613 # Auto-size the cache based on the number of measurement
614 # plugins. We assume each algorithm tries to evaluate the PSF
615 # twice, which is more than enough since many don't evaluate it
616 # at all, and there's no *good* reason for any algorithm to
617 # evaluate it more than once.
618 # (Adopted from drp_tasks/ForcedPhotCoaddTask)
619 exposure.psf.setCacheCapacity(2 * len(self.config.measurement.plugins.names))
620
621 # Measure properties of sources in the catalog.
622 if self.config.doMeasure:
623 self.log.info(
624 f"Measuring {len(catalog)} sources in {band} band "
625 f"using '{self.measurement.__class__.__name__}'"
626 )
627 self._measureSources(exposure, catalog, idGenerator, refCat=refCat)
628
629 # Ensure contiguity again.
630 catalog = self._toContiguous(catalog)
631
632 # Apply aperture corrections to the catalog.
633 if self.config.doApCorr:
634 self.log.info(f"Applying aperture corrections to {band} band")
635 self._applyApCorr(exposure, catalog, idGenerator)
636
637 # Run catalogCalculation on the catalog.
638 if self.config.doRunCatalogCalculation:
639 self.log.info(f"Running catalog calculation on {band} band")
640 self._runCatalogCalculation(catalog)
641
642 self.log.info(
643 f"Finished processing for {band} band; output catalog has {catalog.schema.getFieldCount()} "
644 f"fields and {len(catalog)} records"
645 )
646
647 return catalog
648
649
651 """Configuration for the single-band measurement driver task."""
652
653 deblend = ConfigurableField(target=measDeblender.SourceDeblendTask, doc="Deblender for single-band data.")
654
655
657 """Mid-level driver for processing single-band data.
658
659 Offers a helper method for direct handling of raw image data in addition to
660 the standard single-band exposure.
661
662 Examples
663 --------
664 Here is an example of how to use this class to run variance scaling,
665 detection, deblending, and measurement on a single-band exposure:
666
667 >>> from lsst.pipe.tasks.measurementDriver import (
668 ... SingleBandMeasurementDriverConfig,
669 ... SingleBandMeasurementDriverTask,
670 ... )
671 >>> import lsst.meas.extensions.shapeHSM # To register its plugins
672 >>> config = SingleBandMeasurementDriverConfig()
673 >>> config.doScaleVariance = True
674 >>> config.doDetect = True
675 >>> config.doDeblend = True
676 >>> config.doMeasure = True
677 >>> config.scaleVariance.background.binSize = 64
678 >>> config.detection.thresholdValue = 5.5
679 >>> config.deblend.tinyFootprintSize = 3
680 >>> config.measurement.plugins.names |= [
681 ... "base_SdssCentroid",
682 ... "base_SdssShape",
683 ... "ext_shapeHSM_HsmSourceMoments",
684 ... ]
685 >>> config.measurement.slots.psfFlux = None
686 >>> config.measurement.doReplaceWithNoise = False
687 >>> exposure = butler.get("deepCoadd", dataId=...)
688 >>> driver = SingleBandMeasurementDriverTask(config=config)
689 >>> results = driver.run(exposure)
690 >>> results.catalog.writeFits("meas_catalog.fits")
691
692 Alternatively, if an exposure is not available, the driver can also process
693 raw image data:
694
695 >>> image = ...
696 >>> mask = ...
697 >>> variance = ...
698 >>> wcs = ...
699 >>> psf = ...
700 >>> photoCalib = ...
701 >>> results = driver.runFromImage(
702 ... image, mask, variance, wcs, psf, photoCalib
703 ... )
704 >>> results.catalog.writeFits("meas_catalog.fits")
705 """
706
707 ConfigClass = SingleBandMeasurementDriverConfig
708 _DefaultName = "singleBandMeasurementDriver"
709 _Deblender = "meas_deblender"
710
711 def __init__(self, *args, **kwargs):
712 super().__init__(*args, **kwargs)
713
714 self.deblend: measDeblender.SourceDeblendTask
715 self.measurement: measBase.SingleFrameMeasurementTask
716
717 def run(
718 self,
719 exposure: afwImage.Exposure,
720 catalog: afwTable.SourceCatalog | None = None,
721 idGenerator: measBase.IdGenerator | None = None,
722 ) -> pipeBase.Struct:
723 """Process a single-band exposure through the configured subtasks and
724 return the results as a struct.
725
726 Parameters
727 ----------
728 exposure :
729 The exposure on which to run the driver task.
730 catalog :
731 Catalog to be extended by the driver task. If not provided, an
732 empty catalog will be created and populated.
733 idGenerator :
734 Object that generates source IDs and provides random seeds.
735
736 Returns
737 -------
738 result :
739 Results as a struct with attributes:
740
741 ``catalog``
742 Catalog containing the measured sources
743 (`~lsst.afw.table.SourceCatalog`).
744 ``backgroundList``
745 List of backgrounds (`list[~lsst.afw.math.Background]`). Only
746 populated if detection is enabled.
747 """
748
749 # Validate inputs before proceeding.
750 self._ensureValidInputs(catalog)
751
752 # Prepare the Schema and subtasks for processing.
753 catalog = self._prepareSchemaAndSubtasks(catalog)
754
755 # Generate catalog IDs consistently across subtasks.
756 if idGenerator is None:
757 idGenerator = measBase.IdGenerator()
758
759 # Scale the variance plane. If enabled, this should be done before
760 # detection.
761 if self.config.doScaleVariance:
762 self._scaleVariance(exposure)
763
764 # Detect sources in the image and populate the catalog.
765 if self.config.doDetect:
766 catalog, backgroundList = self._detectSources(exposure, idGenerator)
767 else:
768 self.log.info("Skipping detection; using detections from the provided catalog")
769 backgroundList = None
770
771 # Deblend detected sources and update the catalog.
772 if self.config.doDeblend:
773 catalog = self._deblendSources(exposure, catalog)
774 else:
775 self.log.info("Skipping deblending")
776
777 # Process catalog through measurement, aperture correction, and catalog
778 # calculation subtasks.
779 catalog = self._processCatalog(exposure, catalog, idGenerator)
780
781 return pipeBase.Struct(catalog=catalog, backgroundList=backgroundList)
782
784 self,
785 image: afwImage.MaskedImage | afwImage.Image | np.ndarray,
786 mask: afwImage.Mask | np.ndarray = None,
787 variance: afwImage.Image | np.ndarray = None,
788 wcs: afwGeom.SkyWcs = None,
789 psf: afwDetection.Psf | np.ndarray = None,
790 photoCalib: afwImage.PhotoCalib = None,
791 catalog: afwTable.SourceCatalog = None,
792 idGenerator: measBase.IdGenerator = None,
793 ) -> pipeBase.Struct:
794 """Convert image data to an `Exposure`, then run it through the
795 configured subtasks.
796
797 Parameters
798 ----------
799 image :
800 Input image data. Will be converted into an `Exposure` before
801 processing.
802 mask :
803 Mask data for the image. Used if ``image`` is a bare `array` or
804 `Image`.
805 variance :
806 Variance plane data for the image.
807 wcs :
808 World Coordinate System to associate with the exposure that will
809 be created from ``image``.
810 psf :
811 PSF model for the exposure.
812 photoCalib :
813 Photometric calibration model for the exposure.
814 catalog :
815 Catalog to be extended by the driver task. If not provided, a new
816 catalog will be created during detection and populated.
817 idGenerator :
818 Generator for unique source IDs.
819
820 Returns
821 -------
822 result :
823 Results as a struct with attributes:
824
825 ``catalog``
826 Catalog containing the measured sources
827 (`~lsst.afw.table.SourceCatalog`).
828 ``backgroundList``
829 List of backgrounds (`list[~lsst.afw.math.Background]`).
830 """
831 # Convert raw image data into an Exposure.
832 if isinstance(image, np.ndarray):
833 image = afwImage.makeImageFromArray(image)
834 if isinstance(mask, np.ndarray):
835 mask = afwImage.makeMaskFromArray(mask)
836 if isinstance(variance, np.ndarray):
837 variance = afwImage.makeImageFromArray(variance)
838 if isinstance(image, afwImage.Image):
839 image = afwImage.makeMaskedImage(image, mask, variance)
840
841 # By now, the input should already be - or have been converted to - a
842 # MaskedImage.
843 if isinstance(image, afwImage.MaskedImage):
844 exposure = afwImage.makeExposure(image, wcs)
845 else:
846 raise TypeError(f"Unsupported 'image' type: {type(image)}")
847
848 if psf is not None:
849 if isinstance(psf, np.ndarray):
850 # Create a FixedKernel using the array.
851 psf /= psf.sum()
852 kernel = afwMath.FixedKernel(afwImage.makeImageFromArray(psf))
853 # Create a KernelPsf using the kernel.
854 psf = afwDetection.KernelPsf(kernel)
855 elif not isinstance(psf, afwDetection.Psf):
856 raise TypeError(f"Unsupported 'psf' type: {type(psf)}")
857 exposure.setPsf(psf)
858
859 if photoCalib is not None:
860 exposure.setPhotoCalib(photoCalib)
861
862 return self.run(exposure, catalog=catalog, idGenerator=idGenerator)
863
865 self, exposure: afwImage.Exposure, catalog: afwTable.SourceCatalog
867 """Run single-band deblending given an exposure and a catalog.
868
869 Parameters
870 ----------
871 exposure :
872 Exposure on which to run the deblending algorithm.
873 catalog :
874 Catalog containing sources to be deblended.
875
876 Returns
877 -------
878 catalog :
879 Catalog after deblending, with sources separated into their
880 individual components if they were deblended.
881 """
882 self.log.info(f"Deblending using '{self._Deblender}' on {len(catalog)} detection footprints")
883 self.deblend.run(exposure=exposure, sources=catalog)
884 # The deblender may not produce contiguous catalogs; ensure
885 # contiguity for the subsequent subtasks.
886 return self._toContiguous(catalog)
887
888
890 """Configuration for the multi-band measurement driver task."""
891
893 target=scarlet.ScarletDeblendTask, doc="Scarlet deblender for multi-band data"
894 )
895
896 doConserveFlux = Field[bool](
897 doc="Whether to use the deblender models as templates to re-distribute the flux from "
898 "the 'exposure' (True), or to perform measurements on the deblender model footprints.",
899 default=False,
900 )
901
902 measureOnlyInRefBand = Field[bool](
903 doc="If True, all measurements downstream of deblending run only in the reference band that "
904 "was used for detection; otherwise, they are performed in all available bands, generating a "
905 "catalog for each. Regardless of this setting, deblending still uses all available bands.",
906 default=False,
907 )
908
909 removeScarletData = Field[bool](
910 doc="Whether or not to remove `ScarletBlendData` for each blend in order to save memory. "
911 "If set to True, some sources may end up with missing footprints in catalogs other than the "
912 "reference-band catalog, leading to failures in subsequent measurements that require footprints. "
913 "For example, keep this False if `measureOnlyInRefBand` is set to False and "
914 "`measurement.doReplaceWithNoise` to True, in order to make the footprints available in "
915 "non-reference bands in addition to the reference band.",
916 default=False,
917 )
918
919 updateFluxColumns = Field[bool](
920 doc="Whether or not to update the `deblend_*` columns in the catalog. This should only be "
921 "True when the input catalog schema already contains those columns.",
922 default=True,
923 )
924
925
927 """Mid-level driver for processing multi-band data.
928
929 The default behavior is to run detection on the reference band, use all
930 available bands for deblending, and then process everything downstream
931 separately for each band making per-band catalogs unless configured
932 otherwise. This subclass provides functionality for handling a singe-band
933 exposure and a list of single-band exposures in addition to a standard
934 multi-band exposure.
935
936 Examples
937 --------
938 Here is an example of how to use this class to run variance scaling,
939 detection, deblending, measurement, and aperture correction on a multi-band
940 exposure:
941
942 >>> from lsst.afw.image import MultibandExposure
943 >>> from lsst.pipe.tasks.measurementDriver import (
944 ... MultiBandMeasurementDriverConfig,
945 ... MultiBandMeasurementDriverTask,
946 ... )
947 >>> import lsst.meas.extensions.shapeHSM # To register its plugins
948 >>> config = MultiBandMeasurementDriverConfig()
949 >>> config.doScaleVariance = True
950 >>> config.doDetect = True
951 >>> config.doDeblend = True
952 >>> config.doMeasure = True
953 >>> config.doApCorr = True
954 >>> config.scaleVariance.background.binSize = 64
955 >>> config.detection.thresholdValue = 5.5
956 >>> config.deblend.minSNR = 42.0
957 >>> config.deblend.maxIter = 20
958 >>> config.measurement.plugins.names |= [
959 ... "base_SdssCentroid",
960 ... "base_SdssShape",
961 ... "ext_shapeHSM_HsmSourceMoments",
962 ... ]
963 >>> config.measurement.slots.psfFlux = None
964 >>> config.measurement.doReplaceWithNoise = False
965 >>> config.applyApCorr.doFlagApCorrFailures = False
966 >>> mExposure = MultibandExposure.fromButler(
967 ... butler, ["g", "r", "i"], "deepCoadd_calexp", ...
968 ... )
969 >>> driver = MultiBandMeasurementDriverTask(config=config)
970 >>> results = driver.run(mExposure, "r")
971 >>> for band, catalog in results.catalogs.items():
972 ... catalog.writeFits(f"meas_catalog_{band}.fits")
973 """
974
975 ConfigClass = MultiBandMeasurementDriverConfig
976 _DefaultName = "multiBandMeasurementDriver"
977 _Deblender = "scarlet"
978
979 def __init__(self, *args, **kwargs):
980 super().__init__(*args, **kwargs)
981
982 self.deblend: scarlet.ScarletDeblendTask
983
984 # Placeholder for the model data produced by the deblender. Caching
985 # this data has proven be useful for debugging.
986 self.modelData: scl.io.ScarletModelData
987
988 def run(
989 self,
990 mExposure: afwImage.MultibandExposure | list[afwImage.Exposure] | afwImage.Exposure,
991 mDeconvolved: afwImage.MultibandExposure | list[afwImage.Exposure] | afwImage.Exposure | None = None,
992 refBand: str | None = None,
993 bands: list[str] | None = None,
994 catalog: afwTable.SourceCatalog = None,
995 idGenerator: measBase.IdGenerator = None,
996 ) -> pipeBase.Struct:
997 """Process an exposure through the configured subtasks while using
998 multi-band information for deblending.
999
1000 Parameters
1001 ----------
1002 mExposure :
1003 Multi-band data containing images of the same shape and region of
1004 the sky. May be a `MultibandExposure`, a single-band exposure
1005 (i.e., `Exposure`), or a list of single-band exposures associated
1006 with different bands in which case ``bands`` must be provided. If a
1007 single-band exposure is given, it will be treated as a
1008 `MultibandExposure` that contains only that one band whose name may
1009 be "unknown" unless either ``bands`` or ``refBand`` is provided.
1010 mDeconvolved :
1011 Multi-band deconvolved images of the same shape and region of the
1012 sky. Follows the same type conventions as ``mExposure``. If not
1013 provided, the deblender will run the deconvolution internally
1014 using the provided ``mExposure``.
1015 refBand :
1016 Reference band to use for detection. Not required for single-band
1017 exposures. If `measureOnlyInRefBand` is enabled while detection is
1018 disabled and a catalog of detected sources is provided, this
1019 should specify the band the sources were detected on (or the band
1020 you want to use to perform measurements on exclusively). If
1021 `measureOnlyInRefBand` is disabled instead in the latter scenario,
1022 ``refBand`` does not need to be provided.
1023 bands :
1024 List of bands associated with the exposures in ``mExposure``. Only
1025 required if ``mExposure`` is a list of single-band exposures. If
1026 provided for a multi-band exposure, it will be used to only process
1027 that subset of bands from the available ones in the exposure.
1028 catalog :
1029 Catalog to be extended by the driver task. If not provided, a new
1030 catalog will be created and populated.
1031 idGenerator :
1032 Generator for unique source IDs.
1033
1034 Returns
1035 -------
1036 result :
1037 Results as a struct with attributes:
1038
1039 ``catalogs``
1040 Dictionary of catalogs containing the measured sources with
1041 bands as keys (`dict[str, ~lsst.afw.table.SourceCatalog]`). If
1042 `measureOnlyInRefBand` is enabled or deblending is disabled,
1043 this will only contain the reference-band catalog; otherwise,
1044 it will contain a catalog for each band.
1045 ``backgroundList``
1046 List of backgrounds (`list[~lsst.afw.math.Background]`). Will
1047 be None if detection is disabled.
1048 ``modelData``
1049 Multiband scarlet models produced during deblending
1050 (`~lsst.scarlet.lite.io.ScarletModelData`). Will be None if
1051 deblending is disabled.
1052 """
1053
1054 # Validate inputs and adjust them as necessary.
1055 mExposure, mDeconvolved, refBand, bands = self._ensureValidInputs(
1056 mExposure, mDeconvolved, refBand, bands, catalog
1057 )
1058
1059 # Prepare the Schema and subtasks for processing.
1060 catalog = self._prepareSchemaAndSubtasks(catalog)
1061
1062 # Generate catalog IDs consistently across subtasks.
1063 if idGenerator is None:
1064 idGenerator = measBase.IdGenerator()
1065
1066 # Scale the variance plane. If enabled, this should be done before
1067 # detection.
1068 if self.config.doScaleVariance:
1069 # Here, we iterate over references to the exposures, not copies.
1070 for band in mExposure.bands:
1071 self._scaleVariance(mExposure[band], band=f"'{band}'")
1072
1073 # Detect sources in the reference band and populate the catalog.
1074 if self.config.doDetect:
1075 catalog, backgroundList = self._detectSources(mExposure[refBand], idGenerator)
1076 else:
1077 self.log.info("Skipping detection; using detections from provided catalog")
1078 backgroundList = None
1079
1080 # Deblend detected sources and update the catalog(s).
1081 if self.config.doDeblend:
1082 catalogs, self.modelData = self._deblendSources(mExposure, mDeconvolved, catalog, refBand=refBand)
1083 else:
1084 self.log.warning(
1085 "Skipping deblending; proceeding with the provided catalog in the reference band"
1086 )
1087 catalogs = {refBand: catalog}
1088 self.modelData = None
1089
1090 # Process catalog(s) through measurement, aperture correction, and
1091 # catalog calculation subtasks.
1092 for band, catalog in catalogs.items():
1093 exposure = mExposure[band]
1094 self._processCatalog(exposure, catalog, idGenerator, band=f"'{band}'")
1095
1096 return pipeBase.Struct(catalogs=catalogs, backgroundList=backgroundList, modelData=self.modelData)
1097
1099 self,
1100 mExposure: afwImage.MultibandExposure | list[afwImage.Exposure] | afwImage.Exposure,
1101 mDeconvolved: afwImage.MultibandExposure | list[afwImage.Exposure] | afwImage.Exposure | None,
1102 refBand: str | None,
1103 bands: list[str] | None,
1104 catalog: afwTable.SourceCatalog | None = None,
1105 ) -> tuple[afwImage.MultibandExposure, afwImage.MultibandExposure, str, list[str] | None]:
1106 """Perform validation and adjustments of inputs without heavy
1107 computation.
1108
1109 Parameters
1110 ----------
1111 mExposure :
1112 Multi-band data to be processed by the driver task.
1113 mDeconvolved :
1114 Multi-band deconvolved data to be processed by the driver task.
1115 refBand :
1116 Reference band to use for detection.
1117 bands :
1118 List of bands associated with the exposures in ``mExposure``.
1119 catalog :
1120 Catalog to be extended by the driver task.
1121
1122 Returns
1123 -------
1124 mExposure :
1125 Multi-band exposure to be processed by the driver task. If the
1126 input was not already a `MultibandExposure` (optionally with the
1127 relevant ``bands``), it is converted into one and returned
1128 here; otherwise, the original input is returned unchanged.
1129 mDeconvolved :
1130 Multi-band deconvolved exposure to be processed by the driver task.
1131 Same adjustments apply as for ``mExposure`` except that it is
1132 optional and may be returned as None if not provided as input.
1133 refBand :
1134 Reference band to use for detection after potential adjustments.
1135 If not provided in the input, and only one band is set to be
1136 processed, ``refBand`` will be chosen to be the only existing band
1137 in the ``bands`` list, or `mExposure.bands`, and if neither is
1138 provided, it will be set to "unknown" for single-band exposures
1139 processed by this multi-band driver.
1140 bands :
1141 List of bands associated with the exposures in ``mExposure`` after
1142 potential adjustments. If not provided in the input, it will be set
1143 to a list containing only the provided (or inferred as "unknown")
1144 ``refBand`` value.
1145 """
1146
1147 # Perform basic checks that are shared with all driver tasks.
1148 super()._ensureValidInputs(catalog)
1149
1150 # Multi-band-specific validation and adjustments.
1151
1152 if bands is not None:
1153 if not isinstance(bands, list):
1154 raise TypeError(f"Expected 'bands' to be a list, got {type(bands)}")
1155 if not all(isinstance(b, str) for b in bands):
1156 raise TypeError(f"All elements in 'bands' must be strings, got {[type(b) for b in bands]}")
1157
1158 if refBand is not None:
1159 if not isinstance(refBand, str):
1160 raise TypeError(f"Reference band must be a string, got {type(refBand)}")
1161
1162 # Validate mExposure.
1163 if isinstance(mExposure, afwImage.MultibandExposure):
1164 if bands is not None:
1165 if any(b not in mExposure.bands for b in bands):
1166 raise ValueError(
1167 f"Some of the provided {bands=} are not present in {mExposure.bands=}"
1168 )
1169 self.log.info(
1170 f"Using {bands=} out of the available {mExposure.bands=} in the multi-band exposures"
1171 )
1172 elif isinstance(mExposure, list):
1173 if bands is None:
1174 raise ValueError("The 'bands' list must be provided if 'mExposure' is a list")
1175 if len(bands) != len(mExposure):
1176 raise ValueError("Number of bands and exposures must match")
1177 elif isinstance(mExposure, afwImage.Exposure):
1178 if bands is not None:
1179 if len(bands) != 1:
1180 raise ValueError(
1181 f"{bands=}, if provided, must only contain a single band "
1182 "if 'mExposure' is a single-band exposure"
1183 )
1184 else:
1185 raise TypeError(f"Unsupported 'mExposure' type: {type(mExposure)}")
1186
1187 # Validate mDeconvolved.
1188 if mDeconvolved:
1189 if isinstance(mDeconvolved, afwImage.MultibandExposure):
1190 if bands is not None:
1191 if any(b not in mDeconvolved.bands for b in bands):
1192 raise ValueError(
1193 f"Some of the provided {bands=} are not present in {mDeconvolved.bands=}"
1194 )
1195 elif isinstance(mDeconvolved, list):
1196 if bands is None:
1197 raise ValueError("The 'bands' list must be provided if 'mDeconvolved' is a list")
1198 if len(bands) != len(mDeconvolved):
1199 raise ValueError("Number of bands and deconvolved exposures must match")
1200 elif isinstance(mDeconvolved, afwImage.Exposure):
1201 if bands is not None:
1202 if len(bands) != 1:
1203 raise ValueError(
1204 f"{bands=}, if provided, must only contain a single band "
1205 "if 'mDeconvolved' is a single-band exposure"
1206 )
1207 else:
1208 raise TypeError(f"Unsupported {type(mDeconvolved)=}")
1209
1210 if isinstance(mExposure, afwImage.Exposure) or isinstance(mDeconvolved, afwImage.Exposure):
1211 if bands is not None and len(bands) != 1:
1212 raise ValueError(
1213 f"{bands=}, if provided, must only contain a single band "
1214 "if one of 'mExposure' or 'mDeconvolved' is a single-band exposure"
1215 )
1216 if bands is None and refBand is None:
1217 refBand = "unknown" # Placeholder for single-band deblending
1218 bands = [refBand]
1219 elif bands is None and refBand is not None:
1220 bands = [refBand]
1221 elif bands is not None and refBand is None:
1222 refBand = bands[0]
1223
1224 # Convert or subset the exposures to a MultibandExposure with the
1225 # bands of interest.
1226 mExposure = self._buildMultibandExposure(mExposure, bands)
1227
1228 if mDeconvolved:
1229 mDeconvolved = self._buildMultibandExposure(mDeconvolved, bands)
1230 if mExposure.bands != mDeconvolved.bands:
1231 raise ValueError(
1232 "The bands in 'mExposure' and 'mDeconvolved' must match; "
1233 f"got {mExposure.bands} and {mDeconvolved.bands}"
1234 )
1235
1236 if len(mExposure.bands) == 1:
1237 # N.B. Scarlet is designed to leverage multi-band information to
1238 # differentiate overlapping sources based on their spectral and
1239 # spatial profiles. However, it can also run on a single band and
1240 # often give better results than 'meas_deblender'.
1241 self.log.info(f"Running '{self._Deblender}' in single-band mode; make sure it was intended!")
1242 if refBand is None:
1243 refBand = mExposure.bands[0]
1244 self.log.info(
1245 "No reference band provided for single-band data; "
1246 f"using the only available band ('{refBand}') as the reference band"
1247 )
1248 else:
1249 if catalog is None:
1250 if self.config.measureOnlyInRefBand:
1251 measInfo = "and everything downstream of deblending"
1252 else:
1253 measInfo = (
1254 "while subtasks downstream of deblending will be run in each of "
1255 f"the {mExposure.bands} bands"
1256 )
1257 self.log.info(f"Using '{refBand}' as the reference band for detection {measInfo}")
1258
1259 # Final sanity checks after all the adjustments above.
1260 if refBand is None:
1261 raise ValueError("Reference band must be provided for multi-band data")
1262
1263 if refBand not in mExposure.bands:
1264 raise ValueError(f"Requested {refBand=} is not in {mExposure.bands=}")
1265
1266 if bands is not None and refBand not in bands:
1267 raise ValueError(f"Reference {refBand=} is not in {bands=}")
1268
1269 return mExposure, mDeconvolved, refBand, bands
1270
1272 self,
1273 mExposure: afwImage.MultibandExposure,
1274 mDeconvolved: afwImage.MultibandExposure | None,
1275 catalog: afwTable.SourceCatalog,
1276 refBand: str,
1277 ) -> tuple[dict[str, afwTable.SourceCatalog], scl.io.ScarletModelData]:
1278 """Run multi-band deblending given a multi-band exposure and a catalog.
1279
1280 Parameters
1281 ----------
1282 mExposure :
1283 Multi-band exposure on which to run the deblending algorithm.
1284 mDeconvolved :
1285 Multi-band deconvolved exposure to use for deblending. If None,
1286 the deblender will create it internally using the provided
1287 ``mExposure``.
1288 catalog :
1289 Catalog containing sources to be deblended.
1290 refBand :
1291 Reference band used for detection or the band to use for
1292 measurements if `measureOnlyInRefBand` is enabled.
1293
1294 Returns
1295 -------
1296 catalogs :
1297 Dictionary of catalogs containing the deblended sources. If
1298 `measureOnlyInRefBand` is enabled, this will only contain the
1299 reference-band catalog; otherwise, it will contain a catalog for
1300 each band.
1301 modelData :
1302 Multiband scarlet models produced during deblending.
1303 """
1304 self.log.info(f"Deblending using '{self._Deblender}' on {len(catalog)} detection footprints")
1305
1306 if mDeconvolved is None:
1307 # Make a deconvolved version of the multi-band exposure.
1308 deconvolvedCoadds = []
1309 deconvolveTask = DeconvolveExposureTask()
1310 for coadd in mExposure:
1311 deconvolvedCoadd = deconvolveTask.run(coadd, catalog).deconvolved
1312 deconvolvedCoadds.append(deconvolvedCoadd)
1313 mDeconvolved = afwImage.MultibandExposure.fromExposures(mExposure.bands, deconvolvedCoadds)
1314
1315 # Run the deblender on the multi-band exposure.
1316 result = self.deblend.run(mExposure, mDeconvolved, catalog)
1317 catalog = result.deblendedCatalog
1318 modelData = result.scarletModelData
1319
1320 # Determine which bands to process post-deblending.
1321 bands = [refBand] if self.config.measureOnlyInRefBand else mExposure.bands
1322
1323 catalogs = {band: catalog.copy(deep=True) for band in bands}
1324 for band in bands:
1325 # The footprints need to be updated for the subsequent measurement.
1326 imageForRedistribution = mExposure[band] if self.config.doConserveFlux else None
1327 scarlet.io.updateCatalogFootprints(
1328 modelData=modelData,
1329 catalog=catalogs[band], # In-place modification
1330 band=band,
1331 imageForRedistribution=imageForRedistribution,
1332 removeScarletData=self.config.removeScarletData,
1333 updateFluxColumns=self.config.updateFluxColumns,
1334 )
1335
1336 return self._toContiguous(catalogs), modelData
1337
1339 self,
1340 mExposureData: afwImage.MultibandExposure | list[afwImage.Exposure] | afwImage.Exposure,
1341 bands: list[str] | None,
1343 """Convert a single-band exposure or a list of single-band exposures to
1344 a `MultibandExposure` if not already of that type.
1345
1346 No conversion will be done if ``mExposureData`` is already a
1347 `MultibandExposure` except it will be subsetted to the bands provided.
1348
1349 Parameters
1350 ----------
1351 mExposureData :
1352 Input multi-band data.
1353 bands :
1354 List of bands associated with the exposures in ``mExposure``. Only
1355 required if ``mExposure`` is a list of single-band exposures. If
1356 provided while ``mExposureData`` is a ``MultibandExposure``, it
1357 will be used to select a specific subset of bands from the
1358 available ones.
1359
1360 Returns
1361 -------
1362 mExposure :
1363 Converted multi-band exposure.
1364 """
1365 if isinstance(mExposureData, afwImage.MultibandExposure):
1366 if bands and not set(bands).issubset(mExposureData.bands):
1367 raise ValueError(
1368 f"Requested bands {bands} are not a subset of available bands: {mExposureData.bands}"
1369 )
1370 return mExposureData[bands,] if bands and len(bands) > 1 else mExposureData
1371 elif isinstance(mExposureData, list):
1372 mExposure = afwImage.MultibandExposure.fromExposures(bands, mExposureData)
1373 elif isinstance(mExposureData, afwImage.Exposure):
1374 # We still need to build a multi-band exposure to satisfy scarlet
1375 # function's signature, even when using a single band.
1376 mExposure = afwImage.MultibandExposure.fromExposures(bands, [mExposureData])
1377
1378 # Attach the WCS from each input exposure to the corresponding band of
1379 # the multi-band exposure; otherwise, their WCS will be None,
1380 # potentially causing issues downstream. Note that afwImage does not do
1381 # this when constructing a MultibandExposure from exposures.
1382 for band, exposure in zip(bands, mExposureData):
1383 mExposure[band].setWcs(exposure.getWcs())
1384
1385 return mExposure
1386
1387
1389 """Configuration for the forced measurement driver task."""
1390
1391 measurement = ConfigurableField(
1392 target=measBase.ForcedMeasurementTask,
1393 doc="Measurement task for forced measurements. This should be a "
1394 "measurement task that does not perform detection.",
1395 )
1396
1397 def setDefaults(self):
1398 """Set default values for the configuration.
1399
1400 This method overrides the base class method to ensure that `doDetect`
1401 is set to `False` by default, as this task is intended for forced
1402 measurements where detection is not performed. Also, it sets some
1403 default measurement plugins by default.
1404 """
1405 super().setDefaults()
1406 self.doDetect = False
1407 self.doDeblend = False
1408 self.doMeasure = True
1409 self.measurement.plugins.names = [
1410 "base_PixelFlags",
1411 "base_TransformedCentroidFromCoord",
1412 "base_PsfFlux",
1413 "base_CircularApertureFlux",
1414 ]
1415
1416 def _validate(self):
1417 """Validate the configuration.
1418
1419 This method overrides the base class validation to ensure that
1420 `doDetect` is set to `False`, as this task is intended for forced
1421 measurements where detection is not performed.
1422 """
1423 super()._validate()
1424 if self.doDetect or self.doDeblend:
1425 raise ValueError(
1426 "ForcedMeasurementDriverTask should not perform detection or "
1427 "deblending; set doDetect=False and doDeblend=False"
1428 )
1429 if not self.doMeasure:
1430 raise ValueError("ForcedMeasurementDriverTask must perform measurements; set doMeasure=True")
1431
1432
1434 """Forced measurement driver task for single-band data.
1435
1436 This task is the 'forced' version of the `SingleBandMeasurementDriverTask`,
1437 intended as a convenience function for performing forced photometry on an
1438 input image given a set of IDs and RA/Dec coordinates. It is designed as a
1439 public-facing interface, allowing users to measure sources without
1440 explicitly instantiating and running pipeline tasks.
1441
1442 Examples
1443 --------
1444 Here is an example of how to use this class to run forced measurements on
1445 an exposure using an Astropy table containing source IDs and RA/Dec
1446 coordinates:
1447
1448 >>> from lsst.pipe.tasks.measurementDriver import (
1449 ... ForcedMeasurementDriverConfig,
1450 ... ForcedMeasurementDriverTask,
1451 ... )
1452 >>> import astropy.table
1453 >>> import lsst.afw.image as afwImage
1454 >>> config = ForcedMeasurementDriverConfig()
1455 >>> config.doScaleVariance = True
1456 >>> config.scaleVariance.background.binSize = 32
1457 >>> config.doApCorr = True
1458 >>> config.measurement.plugins.names = [
1459 ... "base_PixelFlags",
1460 ... "base_TransformedCentroidFromCoord",
1461 ... "base_PsfFlux",
1462 ... "base_CircularApertureFlux",
1463 ... ]
1464 >>> config.measurement.slots.psfFlux = "base_PsfFlux"
1465 >>> config.measurement.slots.centroid = "base_TransformedCentroidFromCoord"
1466 >>> config.measurement.slots.shape = None
1467 >>> config.measurement.doReplaceWithNoise = False
1468 >>> calexp = butler.get("deepCoadd_calexp", dataId=...)
1469 >>> objtable = butler.get(
1470 ... "objectTable", dataId=..., storageClass="ArrowAstropy"
1471 ... )
1472 >>> table = objtable[:5].copy()["objectId", "coord_ra", "coord_dec"]
1473 >>> driver = ForcedMeasurementDriverTask(config=config)
1474 >>> results = driver.runFromAstropy(
1475 ... table,
1476 ... calexp,
1477 ... id_column_name="objectId",
1478 ... ra_column_name="coord_ra",
1479 ... dec_column_name="coord_dec",
1480 ... psf_footprint_scaling=3.0,
1481 ... )
1482 >>> results.writeFits("forced_meas_catalog.fits")
1483 """
1484
1485 ConfigClass = ForcedMeasurementDriverConfig
1486 _DefaultName = "forcedMeasurementDriver"
1487
1488 def __init__(self, *args, **kwargs):
1489 """Initialize the forced measurement driver task."""
1490 super().__init__(*args, **kwargs)
1491
1492 self.measurement: measBase.ForcedMeasurementTask # To be created!
1493
1495 self,
1496 table: astropy.table.Table,
1497 exposure: afwImage.Exposure,
1498 *,
1499 id_column_name: str = "objectId",
1500 ra_column_name: str = "coord_ra",
1501 dec_column_name: str = "coord_dec",
1502 psf_footprint_scaling: float = 3.0,
1503 idGenerator: measBase.IdGenerator | None = None,
1504 ) -> astropy.table.Table:
1505 """Run forced measurements on an exposure using an Astropy table.
1506
1507 Parameters
1508 ----------
1509 table :
1510 Astropy table containing source IDs and RA/Dec coordinates.
1511 Must contain columns with names specified by `id_column_name`,
1512 `ra_column_name`, and `dec_column_name`.
1513 exposure :
1514 Exposure on which to run the forced measurements.
1515 id_column_name :
1516 Name of the column containing source IDs in the table.
1517 ra_column_name :
1518 Name of the column containing RA coordinates in the table.
1519 dec_column_name :
1520 Name of the column containing Dec coordinates in the table.
1521 psf_footprint_scaling :
1522 Scaling factor to apply to the PSF second-moments ellipse in order
1523 to determine the footprint boundary.
1524 idGenerator :
1525 Object that generates source IDs and provides random seeds.
1526 If not provided, a new `IdGenerator` will be created.
1527
1528 Returns
1529 -------
1530 result :
1531 Astropy table containing the measured sources with columns
1532 corresponding to the source IDs, RA, Dec, from the input table, and
1533 additional measurement columns defined in the configuration.
1534 """
1535 # Validate inputs before proceeding.
1536 coord_unit = self._ensureValidInputs(table, exposure, id_column_name, ra_column_name, dec_column_name)
1537
1538 # Generate catalog IDs consistently across subtasks.
1539 if idGenerator is None:
1540 idGenerator = measBase.IdGenerator()
1541
1542 # Get the WCS from the exposure asnd use it as the reference WCS.
1543 refWcs = exposure.getWcs()
1544
1545 # Prepare the Schema and subtasks for processing. No catalog is
1546 # provided here, as we will generate it from the reference catalog.
1547 self._prepareSchemaAndSubtasks(catalog=None)
1548
1549 # Convert the Astropy table to a minimal source catalog.
1550 # This must be done *after* `_prepareSchemaAndSubtasks`, or the schema
1551 # won't be set up correctly.
1553 table, columns=[id_column_name, ra_column_name, dec_column_name], coord_unit=coord_unit
1554 )
1555
1556 # Check whether coords are within the image.
1557 bbox = exposure.getBBox()
1558 for record in refCat:
1559 localPoint = refWcs.skyToPixel(record.getCoord())
1560 localIntPoint = lsst.geom.Point2I(localPoint)
1561 assert bbox.contains(localIntPoint), (
1562 f"Center for record {record.getId()} is not in exposure; this should be guaranteed by "
1563 "generateMeasCat."
1564 )
1565
1566 # Scale the variance plane.
1567 if self.config.doScaleVariance:
1568 self._scaleVariance(exposure)
1569
1570 # Generate the measurement catalog from the reference catalog.
1571 # The `exposure` and `wcs` arguments will not actually be used by the
1572 # call below, but we need to pass it to satisfy the interface.
1573 catalog = self.measurement.generateMeasCat(
1574 exposure, refCat, refWcs, idFactory=idGenerator.make_table_id_factory()
1575 )
1576
1577 # Forced measurement uses a provided catalog, so detection was skipped
1578 # and no footprints exist. We therefore resort to approximate
1579 # footprints by scaling the PSF's second-moments ellipse.
1580 self.measurement.attachPsfShapeFootprints(catalog, exposure, scaling=psf_footprint_scaling)
1581
1582 # Process catalog through measurement, aperture correction, and catalog
1583 # calculation subtasks.
1584 catalog = self._processCatalog(exposure, catalog, idGenerator, refCat=refCat)
1585
1586 # Convert the catalog back to an Astropy table.
1587 result = catalog.asAstropy()
1588
1589 # Clean up: 'id' may confuse users since 'objectId' is the expected
1590 # identifier.
1591 del result["id"]
1592
1593 return result
1594
1595 def run(self, *args, **kwargs):
1596 raise NotImplementedError(
1597 "The run method is not implemented for `ForcedMeasurementDriverTask`. "
1598 "Use `runFromAstropy` instead."
1599 )
1600
1601 def runFromImage(self, *args, **kwargs):
1602 raise NotImplementedError(
1603 "The `runFromImage` method is not implemented for `ForcedMeasurementDriverTask`. "
1604 "Use `runFromAstropy` instead."
1605 )
1606
1608 self,
1609 table: astropy.table.Table,
1610 exposure: afwImage.Exposure,
1611 id_column_name: str,
1612 ra_column_name: str,
1613 dec_column_name: str,
1614 ) -> None:
1615 """Validate the inputs for the forced measurement task.
1616
1617 Parameters
1618 ----------
1619 table :
1620 Astropy table containing source IDs and RA/Dec coordinates.
1621 exposure :
1622 Exposure on which to run the forced measurements.
1623 id_column_name :
1624 Name of the column containing source IDs in the table.
1625 ra_column_name :
1626 Name of the column containing RA coordinates in the table.
1627 dec_column_name :
1628 Name of the column containing Dec coordinates in the table.
1629
1630 Returns
1631 -------
1632 coord_unit : `str`
1633 Unit of the sky coordinates extracted from the table.
1634 """
1635 if not isinstance(table, astropy.table.Table):
1636 raise TypeError(f"Expected 'table' to be an astropy Table, got {type(table)}")
1637
1638 if table[ra_column_name].unit == table[dec_column_name].unit:
1639 if table[ra_column_name].unit == astropy.units.deg:
1640 coord_unit = "degrees"
1641 elif table[ra_column_name].unit == astropy.units.rad:
1642 coord_unit = "radians"
1643 else:
1644 # Fallback if it's something else.
1645 coord_unit = str(table[ra_column_name].unit)
1646 else:
1647 raise ValueError("RA and Dec columns must have the same unit")
1648
1649 if not isinstance(exposure, afwImage.Exposure):
1650 raise TypeError(f"Expected 'exposure' to be an Exposure, got {type(exposure)}")
1651
1652 for col in [id_column_name, ra_column_name, dec_column_name]:
1653 if col not in table.colnames:
1654 raise ValueError(f"Column '{col}' not found in the input table")
1655
1656 return coord_unit
1657
1659 self,
1660 table: astropy.table.Table,
1661 columns: list[str] = ["id", "ra", "dec"],
1662 coord_unit: str = "degrees",
1663 ):
1664 """Convert an Astropy Table to a minimal LSST SourceCatalog.
1665
1666 This is intended for use with the forced measurement subtask, which
1667 expects a `SourceCatalog` input with a minimal schema containing `id`,
1668 `ra`, and `dec`.
1669
1670 Parameters
1671 ----------
1672 table :
1673 Astropy Table containing source IDs and sky coordinates.
1674 columns :
1675 Names of the columns in the order [id, ra, dec], where `ra` and
1676 `dec` are in degrees by default. If the coordinates are in radians,
1677 set `coord_unit` to "radians".
1678 coord_unit : `str`
1679 Unit of the sky coordinates. Can be either "degrees" or "radians".
1680
1681 Returns
1682 -------
1683 outputCatalog : `lsst.afw.table.SourceCatalog`
1684 A SourceCatalog with minimal schema populated from the input table.
1685
1686 Raises
1687 ------
1688 ValueError
1689 If `coord_unit` is not "degrees" or "radians".
1690 If `columns` does not contain exactly 3 items.
1691 KeyError
1692 If any of the specified columns are missing from the input table.
1693 """
1694 # TODO: Open a meas_base ticket to make this function pay attention to
1695 # the configs, and move this from being a Task method to a free
1696 # function that takes column names as args.
1697
1698 if coord_unit not in ["degrees", "radians"]:
1699 raise ValueError(f"Invalid coordinate unit '{coord_unit}'; must be 'degrees' or 'radians'")
1700
1701 if len(columns) != 3:
1702 raise ValueError("`columns` must contain exactly three elements for [id, ra, dec]")
1703
1704 idCol, raCol, decCol = columns
1705
1706 for col in columns:
1707 if col not in table.colnames:
1708 raise KeyError(f"Missing required column: '{col}'")
1709
1710 outputCatalog = lsst.afw.table.SourceCatalog(self.schema)
1711 outputCatalog.reserve(len(table))
1712
1713 for row in table:
1714 outputRecord = outputCatalog.addNew()
1715 outputRecord.setId(row[idCol])
1716 outputRecord.setCoord(
1717 lsst.geom.SpherePoint(row[raCol], row[decCol], getattr(lsst.geom, coord_unit))
1718 )
1719
1720 return outputCatalog
A 2-dimensional celestial WCS that transform pixels to ICRS RA/Dec, using the LSST standard for pixel...
Definition SkyWcs.h:118
A class to contain the data, WCS, and other information needed to describe an image of the sky.
Definition Exposure.h:72
A class to represent a 2-dimensional array of pixels.
Definition Image.h:51
Represent a 2-dimensional array of bitmask pixels.
Definition Mask.h:82
A class to manipulate images, masks, and variance as a single object.
Definition MaskedImage.h:74
The photometric calibration of an exposure.
Definition PhotoCalib.h:114
A kernel created from an Image.
Definition Kernel.h:471
Defines the fields and offsets for a table.
Definition Schema.h:51
A mapping between the keys of two Schemas, used to copy data between them.
Point in an unspecified spherical coordinate system.
Definition SpherePoint.h:57
astropy.table.Table runFromAstropy(self, astropy.table.Table table, afwImage.Exposure exposure, *, str id_column_name="objectId", str ra_column_name="coord_ra", str dec_column_name="coord_dec", float psf_footprint_scaling=3.0, measBase.IdGenerator|None idGenerator=None)
_makeMinimalSourceCatalogFromAstropy(self, astropy.table.Table table, list[str] columns=["id", "ra", "dec"], str coord_unit="degrees")
None _ensureValidInputs(self, astropy.table.Table table, afwImage.Exposure exposure, str id_column_name, str ra_column_name, str dec_column_name)
_ensureValidInputs(self, afwTable.SourceCatalog|None catalog)
_applyApCorr(self, afwImage.Exposure exposure, afwTable.SourceCatalog catalog, measBase.IdGenerator idGenerator)
_scaleVariance(self, afwImage.Exposure exposure, str band="a single")
afwTable.SourceCatalog _processCatalog(self, afwImage.Exposure exposure, afwTable.SourceCatalog catalog, measBase.IdGenerator idGenerator, str band="a single", afwTable.SourceCatalog|None refCat=None)
afwTable.SourceCatalog|dict[str, afwTable.SourceCatalog] _toContiguous(self, afwTable.SourceCatalog|dict[str, afwTable.SourceCatalog] catalog)
tuple[afwTable.SourceCatalog, afwMath.BackgroundList] _detectSources(self, afwImage.Exposure|afwImage.MultibandExposure exposure, measBase.IdGenerator idGenerator)
__init__(self, afwTable.Schema schema=None, afwTable.Schema peakSchema=None, **dict kwargs)
measDeblender.SourceDeblendTask|scarlet.ScarletDeblendTask deblend
_measureSources(self, afwImage.Exposure exposure, afwTable.SourceCatalog catalog, measBase.IdGenerator idGenerator, afwTable.SourceCatalog|None refCat=None)
afwTable.SourceCatalog|None _prepareSchemaAndSubtasks(self, afwTable.SourceCatalog|None catalog)
_initializeSchema(self, afwTable.SourceCatalog catalog=None)
measBase.SingleFrameMeasurementTask|measBase.ForcedMeasurementTask measurement
_runCatalogCalculation(self, afwTable.SourceCatalog catalog)
afwTable.SourceCatalog _updateCatalogSchema(self, afwTable.SourceCatalog catalog)
pipeBase.Struct run(self, afwImage.MultibandExposure|list[afwImage.Exposure]|afwImage.Exposure mExposure, afwImage.MultibandExposure|list[afwImage.Exposure]|afwImage.Exposure|None mDeconvolved=None, str|None refBand=None, list[str]|None bands=None, afwTable.SourceCatalog catalog=None, measBase.IdGenerator idGenerator=None)
afwImage.MultibandExposure _buildMultibandExposure(self, afwImage.MultibandExposure|list[afwImage.Exposure]|afwImage.Exposure mExposureData, list[str]|None bands)
tuple[afwImage.MultibandExposure, afwImage.MultibandExposure, str, list[str]|None] _ensureValidInputs(self, afwImage.MultibandExposure|list[afwImage.Exposure]|afwImage.Exposure mExposure, afwImage.MultibandExposure|list[afwImage.Exposure]|afwImage.Exposure|None mDeconvolved, str|None refBand, list[str]|None bands, afwTable.SourceCatalog|None catalog=None)
tuple[dict[str, afwTable.SourceCatalog], scl.io.ScarletModelData] _deblendSources(self, afwImage.MultibandExposure mExposure, afwImage.MultibandExposure|None mDeconvolved, afwTable.SourceCatalog catalog, str refBand)
afwTable.SourceCatalog _deblendSources(self, afwImage.Exposure exposure, afwTable.SourceCatalog catalog)
pipeBase.Struct runFromImage(self, afwImage.MaskedImage|afwImage.Image|np.ndarray image, afwImage.Mask|np.ndarray mask=None, afwImage.Image|np.ndarray variance=None, afwGeom.SkyWcs wcs=None, afwDetection.Psf|np.ndarray psf=None, afwImage.PhotoCalib photoCalib=None, afwTable.SourceCatalog catalog=None, measBase.IdGenerator idGenerator=None)
pipeBase.Struct run(self, afwImage.Exposure exposure, afwTable.SourceCatalog|None catalog=None, measBase.IdGenerator|None idGenerator=None)
MaskedImage< ImagePixelT, MaskPixelT, VariancePixelT > * makeMaskedImage(typename std::shared_ptr< Image< ImagePixelT > > image, typename std::shared_ptr< Mask< MaskPixelT > > mask=Mask< MaskPixelT >(), typename std::shared_ptr< Image< VariancePixelT > > variance=Image< VariancePixelT >())
A function to return a MaskedImage of the correct type (cf.
std::shared_ptr< Exposure< ImagePixelT, MaskPixelT, VariancePixelT > > makeExposure(MaskedImage< ImagePixelT, MaskPixelT, VariancePixelT > &mimage, std::shared_ptr< geom::SkyWcs const > wcs=std::shared_ptr< geom::SkyWcs const >())
A function to return an Exposure of the correct type (cf.
Definition Exposure.h:484