LSST Applications g0fba68d861+5616995c1c,g1ebb85f214+2420ccdea7,g1fd858c14a+44c57a1f81,g21d47ad084+8e51fce9ac,g262e1987ae+1a7d68eb3b,g2cef7863aa+3bd8df3d95,g35bb328faa+fcb1d3bbc8,g36ff55ed5b+2420ccdea7,g47891489e3+5c6313fe9a,g53246c7159+fcb1d3bbc8,g646c943bdb+dbb9921566,g67b6fd64d1+5c6313fe9a,g6bd32b75b5+2420ccdea7,g74acd417e5+37fc0c974d,g786e29fd12+cf7ec2a62a,g86c591e316+6e13bcb9e9,g87389fa792+1e0a283bba,g89139ef638+5c6313fe9a,g90f42f885a+fce05a46d3,g9125e01d80+fcb1d3bbc8,g93e38de9ac+5345a64125,g95a1e89356+47d08a1cc6,g97be763408+bba861c665,ga9e4eb89a6+85210110a1,gb0b61e0e8e+1f27f70249,gb58c049af0+f03b321e39,gb89ab40317+5c6313fe9a,gc4e39d7843+4e09c98c3d,gd16ba4ae74+5402bcf54a,gd8ff7fe66e+2420ccdea7,gd9a9a58781+fcb1d3bbc8,gdab6d2f7ff+37fc0c974d,gde280f09ee+604b327636,ge278dab8ac+50e2446c94,ge410e46f29+5c6313fe9a,gef3c2e6661+6b480e0fb7,gf67bdafdda+5c6313fe9a,gffca2db377+fcb1d3bbc8,v29.2.0.rc1
LSST Data Management Base Package
Loading...
Searching...
No Matches
measurementDriver.py
Go to the documentation of this file.
1# This file is part of pipe_tasks.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (https://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <https://www.gnu.org/licenses/>.
21
22__all__ = [
23 "SingleBandMeasurementDriverConfig",
24 "SingleBandMeasurementDriverTask",
25 "MultiBandMeasurementDriverConfig",
26 "MultiBandMeasurementDriverTask",
27 "ForcedMeasurementDriverConfig",
28 "ForcedMeasurementDriverTask",
29]
30
31import copy
32import logging
33from abc import ABCMeta, abstractmethod
34
35import astropy
36import lsst.afw.detection as afwDetection
37import lsst.afw.geom as afwGeom
38import lsst.afw.image as afwImage
39import lsst.afw.math as afwMath
40import lsst.afw.table as afwTable
41import lsst.geom
42import lsst.meas.algorithms as measAlgorithms
43import lsst.meas.base as measBase
44import lsst.meas.deblender as measDeblender
45import lsst.meas.extensions.scarlet as scarlet
46import lsst.pipe.base as pipeBase
47import lsst.scarlet.lite as scl
48import numpy as np
49from lsst.pex.config import Config, ConfigurableField, Field
50
51logging.basicConfig(level=logging.INFO)
52
53
55 """Base configuration for measurement driver tasks.
56
57 This class provides foundational configuration for its subclasses to handle
58 single-band and multi-band data. It defines variance scaling, detection,
59 deblending, measurement, aperture correction, and catalog calculation
60 subtasks, which are intended to be executed in sequence by the driver task.
61 """
62
63 doScaleVariance = Field[bool](doc="Scale variance plane using empirical noise?", default=False)
64
65 scaleVariance = ConfigurableField(
66 doc="Subtask to rescale variance plane", target=measAlgorithms.ScaleVarianceTask
67 )
68
69 doDetect = Field[bool](doc="Run the source detection algorithm?", default=True)
70
71 detection = ConfigurableField(
72 doc="Subtask to detect sources in the image", target=measAlgorithms.SourceDetectionTask
73 )
74
75 doDeblend = Field[bool](doc="Run the source deblending algorithm?", default=True)
76 # N.B. The 'deblend' configurable field should be defined in subclasses.
77
78 doMeasure = Field[bool](doc="Run the source measurement algorithm?", default=True)
79
80 measurement = ConfigurableField(
81 doc="Subtask to measure sources and populate the output catalog",
82 target=measBase.SingleFrameMeasurementTask,
83 )
84
85 psfCache = Field[int](
86 doc="Maximum number of PSFs to cache, preventing repeated PSF evaluations at the same "
87 "point across different measurement plugins. Defaults to -1, which auto-sizes the cache "
88 "based on the plugin count.",
89 default=-1,
90 )
91
92 checkUnitsParseStrict = Field[str](
93 doc="Strictness of Astropy unit compatibility check, can be 'raise', 'warn' or 'silent'",
94 default="raise",
95 )
96
97 doApCorr = Field[bool](
98 doc="Apply aperture corrections? If yes, your image must have an aperture correction map",
99 default=False,
100 )
101
102 applyApCorr = ConfigurableField(
103 doc="Subtask to apply aperture corrections",
104 target=measBase.ApplyApCorrTask,
105 )
106
107 doRunCatalogCalculation = Field[bool](doc="Run catalogCalculation task?", default=False)
108
109 catalogCalculation = ConfigurableField(
110 doc="Subtask to run catalogCalculation plugins on catalog", target=measBase.CatalogCalculationTask
111 )
112
113 doOptions = [
114 "doScaleVariance",
115 "doDetect",
116 "doDeblend",
117 "doMeasure",
118 "doApCorr",
119 "doRunCatalogCalculation",
120 ]
121
122 def validate(self):
123 """Ensure that at least one processing step is enabled."""
124 super().validate()
125
126 if not any(getattr(self, opt) for opt in self.doOptions):
127 raise ValueError(f"At least one of these options must be enabled: {self.doOptions}")
128
129
130class MeasurementDriverBaseTask(pipeBase.Task, metaclass=ABCMeta):
131 """Base class for the mid-level driver running variance scaling, detection,
132 deblending, measurement, apperture correction, and catalog calculation in
133 one go.
134
135 Users don't need to Butlerize their input data, which is a significant
136 advantage for quick data exploration and testing. This driver simplifies
137 the process of applying measurement algorithms to images by abstracting
138 away low-level implementation details such as Schema and table boilerplate.
139 It's a convenient way to process images into catalogs with a user-friendly
140 interface for non-developers while allowing extensive configuration and
141 integration into unit tests for developers. It also considerably improves
142 how demos and workflows are showcased in Jupyter notebooks.
143
144 Parameters
145 ----------
146 schema :
147 Schema used to create the output `~lsst.afw.table.SourceCatalog`,
148 modified in place with fields that will be written by this task.
149 peakSchema :
150 Schema of Footprint Peaks that will be passed to the deblender.
151 **kwargs :
152 Additional kwargs to pass to lsst.pipe.base.Task.__init__()
153
154 Notes
155 -----
156 Subclasses (e.g., single-band vs. multi-band) share most methods and config
157 options but differ in handling and validating inputs by overriding the base
158 config class and any methods that require their own logic.
159 """
160
161 ConfigClass = MeasurementDriverBaseConfig
162 _DefaultName = "measurementDriverBase"
163 _Deblender = ""
164
165 def __init__(self, schema: afwTable.Schema = None, peakSchema: afwTable.Schema = None, **kwargs: dict):
166 super().__init__(**kwargs)
167
168 # Schema for the output catalog.
169 self.schema = schema
170
171 # Schema for deblender peaks.
172 self.peakSchema = peakSchema
173
174 # Placeholders for subclasses to populate.
176 self.scaleVariance: measAlgorithms.ScaleVarianceTask
177 self.detection: measAlgorithms.SourceDetectionTask
178 self.deblend: measDeblender.SourceDeblendTask | scarlet.ScarletDeblendTask
179 self.measurement: measBase.SingleFrameMeasurementTask | measBase.ForcedMeasurementTask
180 self.applyApCorr: measBase.ApplyApCorrTask
181 self.catalogCalculation: measBase.CatalogCalculationTask
182
183 # Store the initial Schema to use for reinitialization if necessary.
185 # To safeguard against user tampering and ensure predictable behavior,
186 # the following attribute can only be modified within the class using
187 # a controlled setter.
188 super().__setattr__("initSchema", copy.deepcopy(schema))
189
190 def __setattr__(self, name, value):
191 """Prevent external modifications of the initial Schema."""
192 if name == "initSchema":
193 raise AttributeError(f"Cannot modify {name} directly")
194 super().__setattr__(name, value)
195
196 @abstractmethod
197 def run(self, *args, **kwargs) -> pipeBase.Struct:
198 """Run the measurement driver task. Subclasses must implement this
199 method using their own logic to handle single-band or multi-band data.
200 """
201 raise NotImplementedError("This is not implemented on the base class")
202
204 self,
205 catalog: afwTable.SourceCatalog | None,
206 ):
207 """Perform validation and adjustments of inputs without heavy
208 computation.
209
210 Parameters
211 ----------
212 catalog :
213 Catalog to be extended by the driver task.
214 """
215 # Validate the configuration before proceeding.
216 self.config.validate()
217
218 if self.config.doDetect:
219 if catalog is not None:
220 raise RuntimeError(
221 "An input catalog was given to bypass detection, but 'doDetect' is still on"
222 )
223 else:
224 if catalog is None:
225 raise RuntimeError("Cannot run without detection if no 'catalog' is provided")
226
227 def _initializeSchema(self, catalog: afwTable.SourceCatalog = None):
228 """Initialize the Schema to be used for constructing the subtasks.
229
230 Though it may seem clunky, this workaround is necessary to ensure
231 Schema consistency across all subtasks.
232
233 Parameters
234 ----------
235 catalog :
236 Catalog from which to extract the Schema. If not provided, the
237 user-provided Schema and if that is also not provided during
238 initialization, a minimal Schema will be used.
239 """
240 # If the Schema has been modified (either by subtasks or externally by
241 # the user), reset it to the initial state before creating subtasks.
242 # This would be neccessary when running the same driver task multiple
243 # times with different configs/inputs.
244 if self.schema != self.initSchema:
245 self.schema = copy.deepcopy(self.initSchema)
246
247 if catalog is None:
248 if self.schema is None:
249 # Create a minimal Schema that will be extended by tasks.
250 self.schema = afwTable.SourceTable.makeMinimalSchema()
251
252 # Add coordinate error fields to avoid missing field issues.
254 else:
255 if self.schema is not None:
256 self.log.warning(
257 "Both a catalog and a Schema were provided; using the Schema from the catalog only"
258 )
259
260 # Since a catalog is provided, use its Schema as the base.
261 catalogSchema = catalog.schema
262
263 # Ensure that the Schema has coordinate error fields.
264 self._addCoordErrorFieldsIfMissing(catalogSchema)
265
266 # Create a SchemaMapper that maps from catalogSchema to a new one
267 # it will create.
268 self.mapper = afwTable.SchemaMapper(catalogSchema)
269
270 # Add everything from catalogSchema to output Schema.
271 self.mapper.addMinimalSchema(catalogSchema, True)
272
273 # Get the output Schema from the SchemaMapper and assign it as the
274 # Schema to be used for constructing the subtasks.
275 self.schema = self.mapper.getOutputSchema()
276
277 if isinstance(self, ForcedMeasurementDriverTask):
278 # A trick also used in https://github.com/lsst/ap_pipe/blob/
279 # a221d4e43e2abac44b1cbed0533b9e220c5a67f4/python/lsst/ap/pipe/
280 # matchSourceInjected.py#L161
281 self.schema.addField("deblend_nChild", "I", "Needed for minimal forced photometry schema")
282
283 def _addCoordErrorFieldsIfMissing(self, schema: afwTable.Schema):
284 """Add coordinate error fields to the schema in-place if they are not
285 already present.
286
287 Parameters
288 ----------
289 schema :
290 Schema to be checked for coordinate error fields.
291 """
292 if not any(
293 errorField in schema.getNames()
294 for errorField in ("coord_raErr", "coord_decErr", "coord_ra_dec_Cov")
295 ):
296 afwTable.CoordKey.addErrorFields(schema)
297
298 def _makeSubtasks(self):
299 """Construct subtasks based on the configuration and the Schema."""
300 if self.schema is None and any(
301 getattr(self.config, attr) for attr in self.config.doOptions if attr != "doScaleVariance"
302 ):
303 raise RuntimeError(
304 "Cannot create requested subtasks without a Schema; "
305 "ensure one is provided explicitly or via a catalog"
306 )
307
308 if self.config.doScaleVariance:
309 self.makeSubtask("scaleVariance")
310
311 if isinstance(self, ForcedMeasurementDriverTask):
312 # Always True for forced measurement.
313 if self.config.doMeasure:
314 self.makeSubtask("measurement", refSchema=self.schema)
315
316 # In forced measurement, where the measurement catalog is built
317 # internally, we need to initialize applyApCorr with the full
318 # schema after measurement plugins have added their fields;
319 # otherwise, it won’t see them and will silently skip applying
320 # aperture corrections.
321 # A related example can be found in this reference:
322 # https://github.com/lsst/drp_tasks/blob/
323 # b565995b995cd5f0e40196f8d3c89cafb89aa515/python/lsst/drp/tasks/
324 # forcedPhotCoadd.py#L203
325 if self.config.doApCorr:
326 self.makeSubtask("applyApCorr", schema=self.measurement.schema)
327
328 # Same reference as above uses `measurement.schema` to make the
329 # catalogCalculation subtask, so we do the same here.
330 if self.config.doRunCatalogCalculation:
331 self.makeSubtask("catalogCalculation", schema=self.measurement.schema)
332 else:
333 if self.config.doDetect:
334 self.makeSubtask("detection", schema=self.schema)
335
336 if self.config.doDeblend:
337 self.makeSubtask("deblend", schema=self.schema, peakSchema=self.peakSchema)
338
339 if self.config.doMeasure:
340 self.makeSubtask("measurement", schema=self.schema)
341
342 if self.config.doApCorr:
343 self.makeSubtask("applyApCorr", schema=self.measurement.schema)
344
345 if self.config.doRunCatalogCalculation:
346 self.makeSubtask("catalogCalculation", schema=self.schema)
347
349 self, catalog: afwTable.SourceCatalog | None
350 ) -> afwTable.SourceCatalog | None:
351 """Ensure subtasks are properly initialized according to the
352 configuration and the provided catalog.
353
354 Parameters
355 ----------
356 catalog :
357 Optional catalog to be used for initializing the Schema and the
358 subtasks.
359
360 Returns
361 -------
362 catalog :
363 Updated catalog to be passed to the subtasks, if it was provided.
364 """
365 # Set up the Schema before creating subtasks.
366 self._initializeSchema(catalog)
367
368 # Create subtasks, passing the same Schema to each subtask's
369 # constructor that requires it.
370 self._makeSubtasks()
371
372 # Check that all units in the Schema are valid Astropy unit strings.
373 self.schema.checkUnits(parse_strict=self.config.checkUnitsParseStrict)
374
375 # Adjust the catalog Schema to align with changes made by the subtasks.
376 if catalog:
377 catalog = self._updateCatalogSchema(catalog)
378
379 return catalog
380
381 def _scaleVariance(self, exposure: afwImage.Exposure, band: str = "a single"):
382 """Scale the variance plane of an exposure to match the observed
383 variance.
384
385 Parameters
386 ----------
387 exposure :
388 Exposure on which to run the variance scaling algorithm.
389 band :
390 Band associated with the exposure. Used for logging.
391 """
392 self.log.info(f"Scaling variance plane for {band} band")
393 varScale = self.scaleVariance.run(exposure.maskedImage)
394 exposure.getMetadata().add("VARIANCE_SCALE", varScale)
395
397 self, catalog: afwTable.SourceCatalog | dict[str, afwTable.SourceCatalog]
399 """Make a catalog or catalogs contiguous if they are not already.
400
401 Parameters
402 ----------
403 catalog :
404 Catalog or dictionary of catalogs with bands as keys to be made
405 contiguous.
406
407 Returns
408 -------
409 catalog :
410 Contiguous catalog or dictionary of contiguous catalogs.
411 """
412 if isinstance(catalog, dict):
413 for band, cat in catalog.items():
414 if not cat.isContiguous():
415 self.log.info(f"{band}-band catalog is not contiguous; making it contiguous")
416 catalog[band] = cat.copy(deep=True)
417 else:
418 if not catalog.isContiguous():
419 self.log.info("Catalog is not contiguous; making it contiguous")
420 catalog = catalog.copy(deep=True)
421 return catalog
422
423 def _updateCatalogSchema(self, catalog: afwTable.SourceCatalog) -> afwTable.SourceCatalog:
424 """Update the Schema of the provided catalog to incorporate changes
425 made by the configured subtasks.
426
427 Parameters
428 ----------
429 catalog :
430 Catalog to be updated with the Schema changes.
431
432 Returns
433 -------
434 updatedCatalog :
435 Catalog with the updated Schema.
436 """
437 # Create an empty catalog with the Schema required by the subtasks that
438 # are configured to run.
439 updatedCatalog = afwTable.SourceCatalog(self.schema)
440
441 # Transfer all records from the original catalog to the new catalog,
442 # using the SchemaMapper to copy values.
443 updatedCatalog.extend(catalog, mapper=self.mapper)
444
445 # Return the updated catalog, preserving the records while applying the
446 # updated Schema.
447 return updatedCatalog
448
450 self, exposure: afwImage.Exposure | afwImage.MultibandExposure, idGenerator: measBase.IdGenerator
452 """Run the detection subtask to identify sources in the image.
453
454 Parameters
455 ----------
456 exposure :
457 Exposure on which to run the detection algorithm.
458 idGenerator :
459 Generator for unique source IDs.
460
461 Returns
462 -------
463 catalog :
464 A catalog containing detected sources.
465 backgroundList :
466 A list of background models obtained from the detection process,
467 if available.
468 """
469 self.log.info(f"Running detection on a {exposure.width}x{exposure.height} pixel exposure")
470
471 # Create an empty source table with the known Schema into which
472 # detected sources will be placed next.
473 table = afwTable.SourceTable.make(self.schema, idGenerator.make_table_id_factory())
474
475 # Run the detection task on the exposure and make a source catalog.
476 detections = self.detection.run(table, exposure)
477 catalog = detections.sources
478 backgroundList = afwMath.BackgroundList()
479
480 # Get the background model from the detection task, if available.
481 if hasattr(detections, "background") and detections.background:
482 for bg in detections.background:
483 backgroundList.append(bg)
484
485 return catalog, backgroundList
486
487 @abstractmethod
488 def _deblendSources(self, *args, **kwargs):
489 """Run the deblending subtask to separate blended sources. Subclasses
490 must implement this method to handle task-specific deblending logic.
491 """
492 raise NotImplementedError("This is not implemented on the base class")
493
495 self,
496 exposure: afwImage.Exposure,
497 catalog: afwTable.SourceCatalog,
498 idGenerator: measBase.IdGenerator,
499 refCat: afwTable.SourceCatalog | None = None,
500 ):
501 """Run the measurement subtask to compute properties of sources.
502
503 Parameters
504 ----------
505 exposure :
506 Exposure on which to run the measurement algorithm.
507 catalog :
508 Catalog containing sources on which to run the measurement subtask.
509 idGenerator :
510 Generator for unique source IDs.
511 refCat :
512 Reference catalog to be used for forced measurements, if any.
513 If not provided, the measurement will be run on the sources in the
514 catalog in a standard manner without reference.
515 """
516 if refCat:
517 # Note that refCat does not have a WCS, so we need to
518 # extract the WCS from the exposure.
519 refWcs = exposure.getWcs()
520 # Run forced measurement since a reference catalog is provided.
521 self.measurement.run(
522 measCat=catalog,
523 exposure=exposure,
524 refCat=refCat,
525 refWcs=refWcs,
526 exposureId=idGenerator.catalog_id,
527 )
528 else:
529 # Run standard measurement if no reference catalog is provided.
530 self.measurement.run(measCat=catalog, exposure=exposure, exposureId=idGenerator.catalog_id)
531
533 self, exposure: afwImage.Exposure, catalog: afwTable.SourceCatalog, idGenerator: measBase.IdGenerator
534 ):
535 """Apply aperture corrections to the catalog.
536
537 Parameters
538 ----------
539 exposure :
540 Exposure on which to apply aperture corrections.
541 catalog :
542 Catalog to be corrected using the aperture correction map from
543 the exposure.
544 idGenerator :
545 Generator for unique source IDs.
546 """
547 apCorrMap = exposure.getInfo().getApCorrMap()
548 if apCorrMap is None:
549 self.log.warning(
550 "Image does not have valid aperture correction map for catalog id "
551 f"{idGenerator.catalog_id}; skipping aperture correction"
552 )
553 else:
554 self.applyApCorr.run(catalog=catalog, apCorrMap=apCorrMap)
555
556 def _runCatalogCalculation(self, catalog: afwTable.SourceCatalog):
557 """Run the catalog calculation plugins on the catalog.
558
559 Parameters
560 ----------
561 catalog :
562 Catalog to be processed by the catalog calculation subtask.
563 """
564 self.catalogCalculation.run(catalog)
565
567 self,
568 exposure: afwImage.Exposure,
569 catalog: afwTable.SourceCatalog,
570 idGenerator: measBase.IdGenerator,
571 band: str = "a single",
572 refCat: afwTable.SourceCatalog | None = None,
574 """Process a catalog through measurement, aperture correction, and
575 catalog calculation subtasks.
576
577 Parameters
578 ----------
579 exposure :
580 Exposure associated with the catalog.
581 catalog :
582 Catalog to be processed by the subtasks.
583 idGenerator :
584 Generator for unique source IDs.
585 band :
586 Band associated with the exposure and catalog. Used for logging.
587 refCat :
588 Reference catalog for forced measurements. If not provided, the
589 measurement will be run on the sources in the catalog in a standard
590 manner without reference.
591
592 Returns
593 -------
594 catalog :
595 Catalog after processing through the configured subtasks.
596 """
597 # Set the PSF cache capacity to cache repeated PSF evaluations at the
598 # same point coming from different measurement plugins.
599 if self.config.psfCache > 0:
600 # Set a hard limit on the number of PSFs to cache.
601 exposure.psf.setCacheCapacity(self.config.psfCache)
602 else:
603 # Auto-size the cache based on the number of measurement
604 # plugins. We assume each algorithm tries to evaluate the PSF
605 # twice, which is more than enough since many don't evaluate it
606 # at all, and there's no *good* reason for any algorithm to
607 # evaluate it more than once.
608 # (Adopted from drp_tasks/ForcedPhotCoaddTask)
609 exposure.psf.setCacheCapacity(2 * len(self.config.measurement.plugins.names))
610
611 # Measure properties of sources in the catalog.
612 if self.config.doMeasure:
613 self.log.info(
614 f"Measuring {len(catalog)} sources in {band} band "
615 f"using '{self.measurement.__class__.__name__}'"
616 )
617 self._measureSources(exposure, catalog, idGenerator, refCat=refCat)
618
619 # Ensure contiguity again.
620 catalog = self._toContiguous(catalog)
621
622 # Apply aperture corrections to the catalog.
623 if self.config.doApCorr:
624 self.log.info(f"Applying aperture corrections to {band} band")
625 self._applyApCorr(exposure, catalog, idGenerator)
626
627 # Run catalogCalculation on the catalog.
628 if self.config.doRunCatalogCalculation:
629 self.log.info(f"Running catalog calculation on {band} band")
630 self._runCatalogCalculation(catalog)
631
632 self.log.info(
633 f"Finished processing for {band} band; output catalog has {catalog.schema.getFieldCount()} "
634 f"fields and {len(catalog)} records"
635 )
636
637 return catalog
638
639
641 """Configuration for the single-band measurement driver task."""
642
643 deblend = ConfigurableField(target=measDeblender.SourceDeblendTask, doc="Deblender for single-band data.")
644
645
647 """Mid-level driver for processing single-band data.
648
649 Offers a helper method for direct handling of raw image data in addition to
650 the standard single-band exposure.
651
652 Examples
653 --------
654 Here is an example of how to use this class to run variance scaling,
655 detection, deblending, and measurement on a single-band exposure:
656
657 >>> from lsst.pipe.tasks.measurementDriver import (
658 ... SingleBandMeasurementDriverConfig,
659 ... SingleBandMeasurementDriverTask,
660 ... )
661 >>> import lsst.meas.extensions.shapeHSM # To register its plugins
662 >>> config = SingleBandMeasurementDriverConfig()
663 >>> config.doScaleVariance = True
664 >>> config.doDetect = True
665 >>> config.doDeblend = True
666 >>> config.doMeasure = True
667 >>> config.scaleVariance.background.binSize = 64
668 >>> config.detection.thresholdValue = 5.5
669 >>> config.deblend.tinyFootprintSize = 3
670 >>> config.measurement.plugins.names |= [
671 ... "base_SdssCentroid",
672 ... "base_SdssShape",
673 ... "ext_shapeHSM_HsmSourceMoments",
674 ... ]
675 >>> config.measurement.slots.psfFlux = None
676 >>> config.measurement.doReplaceWithNoise = False
677 >>> exposure = butler.get("deepCoadd", dataId=...)
678 >>> driver = SingleBandMeasurementDriverTask(config=config)
679 >>> results = driver.run(exposure)
680 >>> results.catalog.writeFits("meas_catalog.fits")
681
682 Alternatively, if an exposure is not available, the driver can also process
683 raw image data:
684
685 >>> image = ...
686 >>> mask = ...
687 >>> variance = ...
688 >>> wcs = ...
689 >>> psf = ...
690 >>> photoCalib = ...
691 >>> results = driver.runFromImage(
692 ... image, mask, variance, wcs, psf, photoCalib
693 ... )
694 >>> results.catalog.writeFits("meas_catalog.fits")
695 """
696
697 ConfigClass = SingleBandMeasurementDriverConfig
698 _DefaultName = "singleBandMeasurementDriver"
699 _Deblender = "meas_deblender"
700
701 def __init__(self, *args, **kwargs):
702 super().__init__(*args, **kwargs)
703
704 self.deblend: measDeblender.SourceDeblendTask
705 self.measurement: measBase.SingleFrameMeasurementTask
706
707 def run(
708 self,
709 exposure: afwImage.Exposure,
710 catalog: afwTable.SourceCatalog | None = None,
711 idGenerator: measBase.IdGenerator | None = None,
712 ) -> pipeBase.Struct:
713 """Process a single-band exposure through the configured subtasks and
714 return the results as a struct.
715
716 Parameters
717 ----------
718 exposure :
719 The exposure on which to run the driver task.
720 catalog :
721 Catalog to be extended by the driver task. If not provided, an
722 empty catalog will be created and populated.
723 idGenerator :
724 Object that generates source IDs and provides random seeds.
725
726 Returns
727 -------
728 result :
729 Results as a struct with attributes:
730
731 ``catalog``
732 Catalog containing the measured sources
733 (`~lsst.afw.table.SourceCatalog`).
734 ``backgroundList``
735 List of backgrounds (`list[~lsst.afw.math.Background]`). Only
736 populated if detection is enabled.
737 """
738
739 # Validate inputs before proceeding.
740 self._ensureValidInputs(catalog)
741
742 # Prepare the Schema and subtasks for processing.
743 catalog = self._prepareSchemaAndSubtasks(catalog)
744
745 # Generate catalog IDs consistently across subtasks.
746 if idGenerator is None:
747 idGenerator = measBase.IdGenerator()
748
749 # Scale the variance plane. If enabled, this should be done before
750 # detection.
751 if self.config.doScaleVariance:
752 self._scaleVariance(exposure)
753
754 # Detect sources in the image and populate the catalog.
755 if self.config.doDetect:
756 catalog, backgroundList = self._detectSources(exposure, idGenerator)
757 else:
758 self.log.info("Skipping detection; using detections from the provided catalog")
759 backgroundList = None
760
761 # Deblend detected sources and update the catalog.
762 if self.config.doDeblend:
763 catalog = self._deblendSources(exposure, catalog)
764 else:
765 self.log.info("Skipping deblending")
766
767 # Process catalog through measurement, aperture correction, and catalog
768 # calculation subtasks.
769 catalog = self._processCatalog(exposure, catalog, idGenerator)
770
771 return pipeBase.Struct(catalog=catalog, backgroundList=backgroundList)
772
774 self,
775 image: afwImage.MaskedImage | afwImage.Image | np.ndarray,
776 mask: afwImage.Mask | np.ndarray = None,
777 variance: afwImage.Image | np.ndarray = None,
778 wcs: afwGeom.SkyWcs = None,
779 psf: afwDetection.Psf | np.ndarray = None,
780 photoCalib: afwImage.PhotoCalib = None,
781 catalog: afwTable.SourceCatalog = None,
782 idGenerator: measBase.IdGenerator = None,
783 ) -> pipeBase.Struct:
784 """Convert image data to an `Exposure`, then run it through the
785 configured subtasks.
786
787 Parameters
788 ----------
789 image :
790 Input image data. Will be converted into an `Exposure` before
791 processing.
792 mask :
793 Mask data for the image. Used if ``image`` is a bare `array` or
794 `Image`.
795 variance :
796 Variance plane data for the image.
797 wcs :
798 World Coordinate System to associate with the exposure that will
799 be created from ``image``.
800 psf :
801 PSF model for the exposure.
802 photoCalib :
803 Photometric calibration model for the exposure.
804 catalog :
805 Catalog to be extended by the driver task. If not provided, a new
806 catalog will be created during detection and populated.
807 idGenerator :
808 Generator for unique source IDs.
809
810 Returns
811 -------
812 result :
813 Results as a struct with attributes:
814
815 ``catalog``
816 Catalog containing the measured sources
817 (`~lsst.afw.table.SourceCatalog`).
818 ``backgroundList``
819 List of backgrounds (`list[~lsst.afw.math.Background]`).
820 """
821 # Convert raw image data into an Exposure.
822 if isinstance(image, np.ndarray):
823 image = afwImage.makeImageFromArray(image)
824 if isinstance(mask, np.ndarray):
825 mask = afwImage.makeMaskFromArray(mask)
826 if isinstance(variance, np.ndarray):
827 variance = afwImage.makeImageFromArray(variance)
828 if isinstance(image, afwImage.Image):
829 image = afwImage.makeMaskedImage(image, mask, variance)
830
831 # By now, the input should already be - or have been converted to - a
832 # MaskedImage.
833 if isinstance(image, afwImage.MaskedImage):
834 exposure = afwImage.makeExposure(image, wcs)
835 else:
836 raise TypeError(f"Unsupported 'image' type: {type(image)}")
837
838 if psf is not None:
839 if isinstance(psf, np.ndarray):
840 # Create a FixedKernel using the array.
841 psf /= psf.sum()
842 kernel = afwMath.FixedKernel(afwImage.makeImageFromArray(psf))
843 # Create a KernelPsf using the kernel.
844 psf = afwDetection.KernelPsf(kernel)
845 elif not isinstance(psf, afwDetection.Psf):
846 raise TypeError(f"Unsupported 'psf' type: {type(psf)}")
847 exposure.setPsf(psf)
848
849 if photoCalib is not None:
850 exposure.setPhotoCalib(photoCalib)
851
852 return self.run(exposure, catalog=catalog, idGenerator=idGenerator)
853
855 self, exposure: afwImage.Exposure, catalog: afwTable.SourceCatalog
857 """Run single-band deblending given an exposure and a catalog.
858
859 Parameters
860 ----------
861 exposure :
862 Exposure on which to run the deblending algorithm.
863 catalog :
864 Catalog containing sources to be deblended.
865
866 Returns
867 -------
868 catalog :
869 Catalog after deblending, with sources separated into their
870 individual components if they were deblended.
871 """
872 self.log.info(f"Deblending using '{self._Deblender}' on {len(catalog)} detection footprints")
873 self.deblend.run(exposure=exposure, sources=catalog)
874 # The deblender may not produce contiguous catalogs; ensure
875 # contiguity for the subsequent subtasks.
876 return self._toContiguous(catalog)
877
878
880 """Configuration for the multi-band measurement driver task."""
881
883 target=scarlet.ScarletDeblendTask, doc="Scarlet deblender for multi-band data"
884 )
885
886 doConserveFlux = Field[bool](
887 doc="Whether to use the deblender models as templates to re-distribute the flux from "
888 "the 'exposure' (True), or to perform measurements on the deblender model footprints.",
889 default=False,
890 )
891
892 measureOnlyInRefBand = Field[bool](
893 doc="If True, all measurements downstream of deblending run only in the reference band that "
894 "was used for detection; otherwise, they are performed in all available bands, generating a "
895 "catalog for each. Regardless of this setting, deblending still uses all available bands.",
896 default=False,
897 )
898
899 removeScarletData = Field[bool](
900 doc="Whether or not to remove `ScarletBlendData` for each blend in order to save memory. "
901 "If set to True, some sources may end up with missing footprints in catalogs other than the "
902 "reference-band catalog, leading to failures in subsequent measurements that require footprints. "
903 "For example, keep this False if `measureOnlyInRefBand` is set to False and "
904 "`measurement.doReplaceWithNoise` to True, in order to make the footprints available in "
905 "non-reference bands in addition to the reference band.",
906 default=False,
907 )
908
909 updateFluxColumns = Field[bool](
910 doc="Whether or not to update the `deblend_*` columns in the catalog. This should only be "
911 "True when the input catalog schema already contains those columns.",
912 default=True,
913 )
914
915
917 """Mid-level driver for processing multi-band data.
918
919 The default behavior is to run detection on the reference band, use all
920 available bands for deblending, and then process everything downstream
921 separately for each band making per-band catalogs unless configured
922 otherwise. This subclass provides functionality for handling a singe-band
923 exposure and a list of single-band exposures in addition to a standard
924 multi-band exposure.
925
926 Examples
927 --------
928 Here is an example of how to use this class to run variance scaling,
929 detection, deblending, measurement, and aperture correction on a multi-band
930 exposure:
931
932 >>> from lsst.afw.image import MultibandExposure
933 >>> from lsst.pipe.tasks.measurementDriver import (
934 ... MultiBandMeasurementDriverConfig,
935 ... MultiBandMeasurementDriverTask,
936 ... )
937 >>> import lsst.meas.extensions.shapeHSM # To register its plugins
938 >>> config = MultiBandMeasurementDriverConfig()
939 >>> config.doScaleVariance = True
940 >>> config.doDetect = True
941 >>> config.doDeblend = True
942 >>> config.doMeasure = True
943 >>> config.doApCorr = True
944 >>> config.scaleVariance.background.binSize = 64
945 >>> config.detection.thresholdValue = 5.5
946 >>> config.deblend.minSNR = 42.0
947 >>> config.deblend.maxIter = 20
948 >>> config.measurement.plugins.names |= [
949 ... "base_SdssCentroid",
950 ... "base_SdssShape",
951 ... "ext_shapeHSM_HsmSourceMoments",
952 ... ]
953 >>> config.measurement.slots.psfFlux = None
954 >>> config.measurement.doReplaceWithNoise = False
955 >>> config.applyApCorr.doFlagApCorrFailures = False
956 >>> mExposure = MultibandExposure.fromButler(
957 ... butler, ["g", "r", "i"], "deepCoadd_calexp", ...
958 ... )
959 >>> driver = MultiBandMeasurementDriverTask(config=config)
960 >>> results = driver.run(mExposure, "r")
961 >>> for band, catalog in results.catalogs.items():
962 ... catalog.writeFits(f"meas_catalog_{band}.fits")
963 """
964
965 ConfigClass = MultiBandMeasurementDriverConfig
966 _DefaultName = "multiBandMeasurementDriver"
967 _Deblender = "scarlet"
968
969 def __init__(self, *args, **kwargs):
970 super().__init__(*args, **kwargs)
971
972 self.deblend: scarlet.ScarletDeblendTask
973
974 # Placeholder for the model data produced by the deblender. Caching
975 # this data has proven be useful for debugging.
976 self.modelData: scl.io.ScarletModelData
977
978 def run(
979 self,
980 mExposure: afwImage.MultibandExposure | list[afwImage.Exposure] | afwImage.Exposure,
981 refBand: str | None = None,
982 bands: list[str] | None = None,
983 catalog: afwTable.SourceCatalog = None,
984 idGenerator: measBase.IdGenerator = None,
985 ) -> pipeBase.Struct:
986 """Process an exposure through the configured subtasks while using
987 multi-band information for deblending.
988
989 Parameters
990 ----------
991 mExposure :
992 Multi-band data. May be a `MultibandExposure`, a single-band
993 exposure (i.e., `Exposure`), or a list of single-band exposures
994 associated with different bands in which case ``bands`` must be
995 provided. If a single-band exposure is given, it will be treated as
996 a `MultibandExposure` that contains only that one band.
997 refBand :
998 Reference band to use for detection. Not required for single-band
999 exposures. If `measureOnlyInRefBand` is enabled while detection is
1000 disabled and a catalog of detected sources is provided, this
1001 should specify the band the sources were detected on (or the band
1002 you want to use to perform measurements on exclusively). If
1003 `measureOnlyInRefBand` is disabled instead in the latter scenario,
1004 ``refBand`` does not need to be provided.
1005 bands :
1006 List of bands associated with the exposures in ``mExposure``. Only
1007 required if ``mExposure`` is a list of single-band exposures. If
1008 provided for a multi-band exposure, it will be used to only process
1009 that subset of bands from the available ones in the exposure.
1010 catalog :
1011 Catalog to be extended by the driver task. If not provided, a new
1012 catalog will be created and populated.
1013 idGenerator :
1014 Generator for unique source IDs.
1015
1016 Returns
1017 -------
1018 result :
1019 Results as a struct with attributes:
1020
1021 ``catalogs``
1022 Dictionary of catalogs containing the measured sources with
1023 bands as keys (`dict[str, ~lsst.afw.table.SourceCatalog]`). If
1024 `measureOnlyInRefBand` is enabled or deblending is disabled,
1025 this will only contain the reference-band catalog; otherwise,
1026 it will contain a catalog for each band.
1027 ``backgroundList``
1028 List of backgrounds (`list[~lsst.afw.math.Background]`). Will
1029 be None if detection is disabled.
1030 ``modelData``
1031 Multiband scarlet models produced during deblending
1032 (`~lsst.scarlet.lite.io.ScarletModelData`). Will be None if
1033 deblending is disabled.
1034 """
1035
1036 # Validate inputs and adjust them as necessary.
1037 mExposure, refBand, bands = self._ensureValidInputs(mExposure, refBand, bands, catalog)
1038
1039 # Prepare the Schema and subtasks for processing.
1040 catalog = self._prepareSchemaAndSubtasks(catalog)
1041
1042 # Generate catalog IDs consistently across subtasks.
1043 if idGenerator is None:
1044 idGenerator = measBase.IdGenerator()
1045
1046 # Scale the variance plane. If enabled, this should be done before
1047 # detection.
1048 if self.config.doScaleVariance:
1049 # Here, we iterate over references to the exposures, not copies.
1050 for band in mExposure.bands:
1051 self._scaleVariance(mExposure[band], band=f"'{band}'")
1052
1053 # Detect sources in the reference band and populate the catalog.
1054 if self.config.doDetect:
1055 catalog, backgroundList = self._detectSources(mExposure[refBand], idGenerator)
1056 else:
1057 self.log.info("Skipping detection; using detections from provided catalog")
1058 backgroundList = None
1059
1060 # Deblend detected sources and update the catalog(s).
1061 if self.config.doDeblend:
1062 catalogs, self.modelData = self._deblendSources(mExposure, catalog, refBand=refBand)
1063 else:
1064 self.log.warning(
1065 "Skipping deblending; proceeding with the provided catalog in the reference band"
1066 )
1067 catalogs = {refBand: catalog}
1068 self.modelData = None
1069
1070 # Process catalog(s) through measurement, aperture correction, and
1071 # catalog calculation subtasks.
1072 for band, catalog in catalogs.items():
1073 exposure = mExposure[band]
1074 self._processCatalog(exposure, catalog, idGenerator, band=f"'{band}'")
1075
1076 return pipeBase.Struct(catalogs=catalogs, backgroundList=backgroundList, modelData=self.modelData)
1077
1079 self,
1080 mExposure: afwImage.MultibandExposure | list[afwImage.Exposure] | afwImage.Exposure,
1081 refBand: str | None,
1082 bands: list[str] | None,
1083 catalog: afwTable.SourceCatalog | None = None,
1084 ) -> tuple[afwImage.MultibandExposure, str, list[str] | None]:
1085 """Perform validation and adjustments of inputs without heavy
1086 computation.
1087
1088 Parameters
1089 ----------
1090 mExposure :
1091 Multi-band data to be processed by the driver task.
1092 refBand :
1093 Reference band to use for detection or measurements.
1094 bands :
1095 List of bands associated with the exposures in ``mExposure``.
1096 catalog :
1097 Catalog to be extended by the driver task.
1098
1099 Returns
1100 -------
1101 mExposure :
1102 Multi-band exposure to be processed by the driver task.
1103 refBand :
1104 Reference band to use for detection or measurements.
1105 bands :
1106 List of bands associated with the exposures in ``mExposure``.
1107 """
1108
1109 # Perform basic checks that are shared with all driver tasks.
1110 super()._ensureValidInputs(catalog)
1111
1112 # Multi-band-specific validation and adjustments.
1113 if isinstance(mExposure, afwImage.MultibandExposure):
1114 if bands is not None:
1115 if any(b not in mExposure.bands for b in bands):
1116 raise ValueError(
1117 "Some bands in the 'bands' list are not present in the input multi-band exposure"
1118 )
1119 self.log.info(
1120 f"Using bands {bands} out of the available {mExposure.bands} in the multi-band exposure"
1121 )
1122 elif isinstance(mExposure, list):
1123 if bands is None:
1124 raise ValueError("The 'bands' list must be provided if 'mExposure' is a list")
1125 if len(bands) != len(mExposure):
1126 raise ValueError("Number of bands and exposures must match")
1127 elif isinstance(mExposure, afwImage.Exposure):
1128 if bands is not None and len(bands) != 1:
1129 raise ValueError(
1130 "The 'bands' list, if provided, must only contain a single band "
1131 "if a single-band exposure is given"
1132 )
1133 if bands is None and refBand is None:
1134 refBand = "unknown" # Placeholder for single-band deblending
1135 bands = [refBand]
1136 elif bands is None and refBand is not None:
1137 bands = [refBand]
1138 elif bands is not None and refBand is None:
1139 refBand = bands[0]
1140 else:
1141 raise TypeError(f"Unsupported 'mExposure' type: {type(mExposure)}")
1142
1143 # Convert mExposure to a MultibandExposure object with the bands
1144 # provided.
1145 mExposure = self._buildMultibandExposure(mExposure, bands)
1146
1147 if len(mExposure.bands) == 1:
1148 # N.B. Scarlet is designed to leverage multi-band information to
1149 # differentiate overlapping sources based on their spectral and
1150 # spatial profiles. However, it can also run on a single band and
1151 # often give better results than 'meas_deblender'.
1152 self.log.info(f"Running '{self._Deblender}' in single-band mode; make sure it was intended!")
1153 if refBand is None:
1154 refBand = mExposure.bands[0]
1155 self.log.info(
1156 "No reference band provided for single-band data; "
1157 f"using the only available band ('{refBand}') as the reference band"
1158 )
1159 else:
1160 if catalog is None:
1161 if self.config.measureOnlyInRefBand:
1162 measInfo = "and everything downstream of deblending"
1163 else:
1164 measInfo = (
1165 "while subtasks downstream of deblending will be run in each of "
1166 f"the {mExposure.bands} bands"
1167 )
1168 self.log.info(f"Using '{refBand}' as the reference band for detection {measInfo}")
1169
1170 # Final sanity checks after all the adjustments above.
1171 if refBand is None:
1172 raise ValueError("Reference band must be provided for multi-band data")
1173
1174 if refBand not in mExposure.bands:
1175 raise ValueError(f"Requested band '{refBand}' is not present in the multi-band exposure")
1176
1177 if bands is not None and refBand not in bands:
1178 raise ValueError(f"Reference band '{refBand}' is not in the list of 'bands' provided: {bands}")
1179
1180 return mExposure, refBand, bands
1181
1183 self, mExposure: afwImage.MultibandExposure, catalog: afwTable.SourceCatalog, refBand: str
1184 ) -> tuple[dict[str, afwTable.SourceCatalog], scl.io.ScarletModelData]:
1185 """Run multi-band deblending given a multi-band exposure and a catalog.
1186
1187 Parameters
1188 ----------
1189 mExposure :
1190 Multi-band exposure on which to run the deblending algorithm.
1191 catalog :
1192 Catalog containing sources to be deblended.
1193 refBand :
1194 Reference band used for detection or the band to use for
1195 measurements if `measureOnlyInRefBand` is enabled.
1196
1197 Returns
1198 -------
1199 catalogs :
1200 Dictionary of catalogs containing the deblended sources. If
1201 `measureOnlyInRefBand` is enabled, this will only contain the
1202 reference-band catalog; otherwise, it will contain a catalog for
1203 each band.
1204 modelData :
1205 Multiband scarlet models produced during deblending.
1206 """
1207 self.log.info(f"Deblending using '{self._Deblender}' on {len(catalog)} detection footprints")
1208
1209 # Run the deblender on the multi-band exposure.
1210 catalog, modelData = self.deblend.run(mExposure, catalog)
1211
1212 # Determine which bands to process post-deblending.
1213 bands = [refBand] if self.config.measureOnlyInRefBand else mExposure.bands
1214
1215 catalogs = {band: catalog.copy(deep=True) for band in bands}
1216 for band in bands:
1217 # The footprints need to be updated for the subsequent measurement.
1218 imageForRedistribution = mExposure[band] if self.config.doConserveFlux else None
1219 scarlet.io.updateCatalogFootprints(
1220 modelData=modelData,
1221 catalog=catalogs[band], # In-place modification
1222 band=band,
1223 imageForRedistribution=imageForRedistribution,
1224 removeScarletData=self.config.removeScarletData,
1225 updateFluxColumns=self.config.updateFluxColumns,
1226 )
1227
1228 return self._toContiguous(catalogs), modelData
1229
1231 self,
1232 mExposureData: afwImage.MultibandExposure | list[afwImage.Exposure] | afwImage.Exposure,
1233 bands: list[str] | None,
1235 """Convert a single-band exposure or a list of single-band exposures to
1236 a `MultibandExposure` if not already of that type.
1237
1238 No conversion will be done if ``mExposureData`` is already a
1239 `MultibandExposure` except it will be subsetted to the bands provided.
1240
1241 Parameters
1242 ----------
1243 mExposureData :
1244 Input multi-band data.
1245 bands :
1246 List of bands associated with the exposures in ``mExposure``. Only
1247 required if ``mExposure`` is a list of single-band exposures. If
1248 provided while ``mExposureData`` is a ``MultibandExposure``, it
1249 will be used to select a specific subset of bands from the
1250 available ones.
1251
1252 Returns
1253 -------
1254 mExposure :
1255 Converted multi-band exposure.
1256 """
1257 if isinstance(mExposureData, afwImage.MultibandExposure):
1258 if bands and not set(bands).issubset(mExposureData.bands):
1259 raise ValueError(
1260 f"Requested bands {bands} are not a subset of available bands: {mExposureData.bands}"
1261 )
1262 return mExposureData[bands,] if bands and len(bands) > 1 else mExposureData
1263 elif isinstance(mExposureData, list):
1264 mExposure = afwImage.MultibandExposure.fromExposures(bands, mExposureData)
1265 elif isinstance(mExposureData, afwImage.Exposure):
1266 # We still need to build a multi-band exposure to satisfy scarlet
1267 # function's signature, even when using a single band.
1268 mExposure = afwImage.MultibandExposure.fromExposures(bands, [mExposureData])
1269
1270 # Attach the WCS from each input exposure to the corresponding band of
1271 # the multi-band exposure; otherwise, their WCS will be None,
1272 # potentially causing issues downstream. Note that afwImage does not do
1273 # this when constructing a MultibandExposure from exposures.
1274 for band, exposure in zip(bands, mExposureData):
1275 mExposure[band].setWcs(exposure.getWcs())
1276
1277 return mExposure
1278
1279
1281 """Configuration for the forced measurement driver task."""
1282
1283 measurement = ConfigurableField(
1284 target=measBase.ForcedMeasurementTask,
1285 doc="Measurement task for forced measurements. This should be a "
1286 "measurement task that does not perform detection.",
1287 )
1288
1289 def setDefaults(self):
1290 """Set default values for the configuration.
1291
1292 This method overrides the base class method to ensure that `doDetect`
1293 is set to `False` by default, as this task is intended for forced
1294 measurements where detection is not performed. Also, it sets some
1295 default measurement plugins by default.
1296 """
1297 super().setDefaults()
1298 self.doDetect = False
1299 self.doDeblend = False
1300 self.doMeasure = True
1301 self.measurement.plugins.names = [
1302 "base_PixelFlags",
1303 "base_TransformedCentroidFromCoord",
1304 "base_PsfFlux",
1305 "base_CircularApertureFlux",
1306 ]
1307
1308 def _validate(self):
1309 """Validate the configuration.
1310
1311 This method overrides the base class validation to ensure that
1312 `doDetect` is set to `False`, as this task is intended for forced
1313 measurements where detection is not performed.
1314 """
1315 super()._validate()
1316 if self.doDetect or self.doDeblend:
1317 raise ValueError(
1318 "ForcedMeasurementDriverTask should not perform detection or "
1319 "deblending; set doDetect=False and doDeblend=False"
1320 )
1321 if not self.doMeasure:
1322 raise ValueError(
1323 "ForcedMeasurementDriverTask must perform measurements; "
1324 "set doMeasure=True"
1325 )
1326
1327
1329 """Forced measurement driver task for single-band data.
1330
1331 This task is the 'forced' version of the `SingleBandMeasurementDriverTask`,
1332 intended as a convenience function for performing forced photometry on an
1333 input image given a set of IDs and RA/Dec coordinates. It is designed as a
1334 public-facing interface, allowing users to measure sources without
1335 explicitly instantiating and running pipeline tasks.
1336
1337 Examples
1338 --------
1339 Here is an example of how to use this class to run forced measurements on
1340 an exposure using an Astropy table containing source IDs and RA/Dec
1341 coordinates:
1342
1343 >>> from lsst.pipe.tasks.measurementDriver import (
1344 ... ForcedMeasurementDriverConfig,
1345 ... ForcedMeasurementDriverTask,
1346 ... )
1347 >>> import astropy.table
1348 >>> import lsst.afw.image as afwImage
1349 >>> config = ForcedMeasurementDriverConfig()
1350 >>> config.doScaleVariance = True
1351 >>> config.scaleVariance.background.binSize = 32
1352 >>> config.doApCorr = True
1353 >>> config.measurement.plugins.names = [
1354 ... "base_PixelFlags",
1355 ... "base_TransformedCentroidFromCoord",
1356 ... "base_PsfFlux",
1357 ... "base_CircularApertureFlux",
1358 ... ]
1359 >>> config.measurement.slots.psfFlux = "base_PsfFlux"
1360 >>> config.measurement.slots.centroid = "base_TransformedCentroidFromCoord"
1361 >>> config.measurement.slots.shape = None
1362 >>> config.measurement.doReplaceWithNoise = False
1363 >>> calexp = butler.get("deepCoadd_calexp", dataId=...)
1364 >>> objtable = butler.get(
1365 ... "objectTable", dataId=..., storageClass="ArrowAstropy"
1366 ... )
1367 >>> table = objtable[:5].copy()["objectId", "coord_ra", "coord_dec"]
1368 >>> driver = ForcedMeasurementDriverTask(config=config)
1369 >>> results = driver.runFromAstropy(
1370 ... table,
1371 ... calexp,
1372 ... id_column_name="objectId",
1373 ... ra_column_name="coord_ra",
1374 ... dec_column_name="coord_dec",
1375 ... psf_footprint_scaling=3.0,
1376 ... )
1377 >>> results.writeFits("forced_meas_catalog.fits")
1378 """
1379
1380 ConfigClass = ForcedMeasurementDriverConfig
1381 _DefaultName = "forcedMeasurementDriver"
1382
1383 def __init__(self, *args, **kwargs):
1384 """Initialize the forced measurement driver task."""
1385 super().__init__(*args, **kwargs)
1386
1387 self.measurement: measBase.ForcedMeasurementTask # To be created!
1388
1390 self,
1391 table: astropy.table.Table,
1392 exposure: afwImage.Exposure,
1393 *,
1394 id_column_name: str = "objectId",
1395 ra_column_name: str = "coord_ra",
1396 dec_column_name: str = "coord_dec",
1397 psf_footprint_scaling: float = 3.0,
1398 idGenerator: measBase.IdGenerator | None = None,
1399 ) -> astropy.table.Table:
1400 """Run forced measurements on an exposure using an Astropy table.
1401
1402 Parameters
1403 ----------
1404 table :
1405 Astropy table containing source IDs and RA/Dec coordinates.
1406 Must contain columns with names specified by `id_column_name`,
1407 `ra_column_name`, and `dec_column_name`.
1408 exposure :
1409 Exposure on which to run the forced measurements.
1410 id_column_name :
1411 Name of the column containing source IDs in the table.
1412 ra_column_name :
1413 Name of the column containing RA coordinates in the table.
1414 dec_column_name :
1415 Name of the column containing Dec coordinates in the table.
1416 psf_footprint_scaling :
1417 Scaling factor to apply to the PSF second-moments ellipse in order
1418 to determine the footprint boundary.
1419 idGenerator :
1420 Object that generates source IDs and provides random seeds.
1421 If not provided, a new `IdGenerator` will be created.
1422
1423 Returns
1424 -------
1425 result :
1426 Astropy table containing the measured sources with columns
1427 corresponding to the source IDs, RA, Dec, from the input table, and
1428 additional measurement columns defined in the configuration.
1429 """
1430 # Validate inputs before proceeding.
1431 self._ensureValidInputs(table, exposure, id_column_name, ra_column_name, dec_column_name)
1432
1433 # Generate catalog IDs consistently across subtasks.
1434 if idGenerator is None:
1435 idGenerator = measBase.IdGenerator()
1436
1437 # Get the WCS from the exposure asnd use it as the reference WCS.
1438 refWcs = exposure.getWcs()
1439
1440 # Prepare the Schema and subtasks for processing. No catalog is
1441 # provided here, as we will generate it from the reference catalog.
1442 self._prepareSchemaAndSubtasks(catalog=None)
1443
1444 # Convert the Astropy table to a minimal source catalog.
1445 # This must be done *after* `_prepareSchemaAndSubtasks`, or the schema
1446 # won't be set up correctly.
1448 table, columns=[id_column_name, ra_column_name, dec_column_name]
1449 )
1450
1451 # Check whether coords are within the image.
1452 bbox = exposure.getBBox()
1453 for record in refCat:
1454 localPoint = refWcs.skyToPixel(record.getCoord())
1455 localIntPoint = lsst.geom.Point2I(localPoint)
1456 assert bbox.contains(localIntPoint), (
1457 f"Center for record {record.getId()} is not in exposure; this should be guaranteed by "
1458 "generateMeasCat."
1459 )
1460
1461 # Scale the variance plane.
1462 if self.config.doScaleVariance:
1463 self._scaleVariance(exposure)
1464
1465 # Generate the measurement catalog from the reference catalog.
1466 # The `exposure` and `wcs` arguments will not actually be used by the
1467 # call below, but we need to pass it to satisfy the interface.
1468 catalog = self.measurement.generateMeasCat(
1469 exposure, refCat, refWcs, idFactory=idGenerator.make_table_id_factory()
1470 )
1471
1472 # Forced measurement uses a provided catalog, so detection was skipped
1473 # and no footprints exist. We therefore resort to approximate
1474 # footprints by scaling the PSF's second-moments ellipse.
1475 self.measurement.attachPsfShapeFootprints(catalog, exposure, scaling=psf_footprint_scaling)
1476
1477 # Process catalog through measurement, aperture correction, and catalog
1478 # calculation subtasks.
1479 catalog = self._processCatalog(exposure, catalog, idGenerator, refCat=refCat)
1480
1481 # Convert the catalog back to an Astropy table.
1482 result = catalog.asAstropy()
1483
1484 # Clean up: 'id' may confuse users since 'objectId' is the expected
1485 # identifier.
1486 del result["id"]
1487
1488 return result
1489
1490 def run(self, *args, **kwargs):
1491 raise NotImplementedError(
1492 "The run method is not implemented for `ForcedMeasurementDriverTask`. "
1493 "Use `runFromAstropy` instead."
1494 )
1495
1496 def runFromImage(self, *args, **kwargs):
1497 raise NotImplementedError(
1498 "The `runFromImage` method is not implemented for `ForcedMeasurementDriverTask`. "
1499 "Use `runFromAstropy` instead."
1500 )
1501
1503 self,
1504 table: astropy.table.Table,
1505 exposure: afwImage.Exposure,
1506 id_column_name: str,
1507 ra_column_name: str,
1508 dec_column_name: str,
1509 ) -> None:
1510 """Validate the inputs for the forced measurement task.
1511
1512 Parameters
1513 ----------
1514 table :
1515 Astropy table containing source IDs and RA/Dec coordinates.
1516 exposure :
1517 Exposure on which to run the forced measurements.
1518 id_column_name :
1519 Name of the column containing source IDs in the table.
1520 ra_column_name :
1521 Name of the column containing RA coordinates in the table.
1522 dec_column_name :
1523 Name of the column containing Dec coordinates in the table.
1524 """
1525 if not isinstance(table, astropy.table.Table):
1526 raise TypeError(f"Expected 'table' to be an astropy Table, got {type(table)}")
1527
1528 if not isinstance(exposure, afwImage.Exposure):
1529 raise TypeError(f"Expected 'exposure' to be an Exposure, got {type(exposure)}")
1530
1531 for col in [id_column_name, ra_column_name, dec_column_name]:
1532 if col not in table.colnames:
1533 raise ValueError(f"Column '{col}' not found in the input table")
1534
1536 self, table: astropy.table.Table, columns: list[str] = ["id", "ra", "dec"]
1537 ):
1538 """Convert an Astropy Table to a minimal LSST SourceCatalog.
1539
1540 This is intended for use with the forced measurement subtask, which
1541 expects a `SourceCatalog` input with a minimal schema containing `id`,
1542 `ra`, and `dec`.
1543
1544 Parameters
1545 ----------
1546 table :
1547 Astropy Table containing source IDs and sky coordinates.
1548 columns :
1549 Names of the columns in the order [id, ra, dec], where `ra` and
1550 `dec` are in degrees.
1551
1552 Returns
1553 -------
1554 outputCatalog : `lsst.afw.table.SourceCatalog`
1555 A SourceCatalog with minimal schema populated from the input table.
1556
1557 Raises
1558 ------
1559 ValueError
1560 If `columns` does not contain exactly 3 items.
1561 KeyError
1562 If any of the specified columns are missing from the input table.
1563 """
1564 # TODO: Open a meas_base ticket to make this function pay attention to
1565 # the configs, and move this from being a Task method to a free
1566 # function that takes column names as args.
1567
1568 if len(columns) != 3:
1569 raise ValueError("`columns` must contain exactly three elements for [id, ra, dec]")
1570
1571 idCol, raCol, decCol = columns
1572
1573 for col in columns:
1574 if col not in table.colnames:
1575 raise KeyError(f"Missing required column: '{col}'")
1576
1577 outputCatalog = lsst.afw.table.SourceCatalog(self.schema)
1578 outputCatalog.reserve(len(table))
1579
1580 for row in table:
1581 outputRecord = outputCatalog.addNew()
1582 outputRecord.setId(row[idCol])
1583 outputRecord.setCoord(lsst.geom.SpherePoint(row[raCol], row[decCol], lsst.geom.degrees))
1584
1585 return outputCatalog
A 2-dimensional celestial WCS that transform pixels to ICRS RA/Dec, using the LSST standard for pixel...
Definition SkyWcs.h:118
A class to contain the data, WCS, and other information needed to describe an image of the sky.
Definition Exposure.h:72
A class to represent a 2-dimensional array of pixels.
Definition Image.h:51
Represent a 2-dimensional array of bitmask pixels.
Definition Mask.h:82
A class to manipulate images, masks, and variance as a single object.
Definition MaskedImage.h:74
The photometric calibration of an exposure.
Definition PhotoCalib.h:114
A kernel created from an Image.
Definition Kernel.h:471
Defines the fields and offsets for a table.
Definition Schema.h:51
A mapping between the keys of two Schemas, used to copy data between them.
Point in an unspecified spherical coordinate system.
Definition SpherePoint.h:57
astropy.table.Table runFromAstropy(self, astropy.table.Table table, afwImage.Exposure exposure, *, str id_column_name="objectId", str ra_column_name="coord_ra", str dec_column_name="coord_dec", float psf_footprint_scaling=3.0, measBase.IdGenerator|None idGenerator=None)
_makeMinimalSourceCatalogFromAstropy(self, astropy.table.Table table, list[str] columns=["id", "ra", "dec"])
None _ensureValidInputs(self, astropy.table.Table table, afwImage.Exposure exposure, str id_column_name, str ra_column_name, str dec_column_name)
_ensureValidInputs(self, afwTable.SourceCatalog|None catalog)
_applyApCorr(self, afwImage.Exposure exposure, afwTable.SourceCatalog catalog, measBase.IdGenerator idGenerator)
_scaleVariance(self, afwImage.Exposure exposure, str band="a single")
afwTable.SourceCatalog _processCatalog(self, afwImage.Exposure exposure, afwTable.SourceCatalog catalog, measBase.IdGenerator idGenerator, str band="a single", afwTable.SourceCatalog|None refCat=None)
afwTable.SourceCatalog|dict[str, afwTable.SourceCatalog] _toContiguous(self, afwTable.SourceCatalog|dict[str, afwTable.SourceCatalog] catalog)
tuple[afwTable.SourceCatalog, afwMath.BackgroundList] _detectSources(self, afwImage.Exposure|afwImage.MultibandExposure exposure, measBase.IdGenerator idGenerator)
__init__(self, afwTable.Schema schema=None, afwTable.Schema peakSchema=None, **dict kwargs)
measDeblender.SourceDeblendTask|scarlet.ScarletDeblendTask deblend
_measureSources(self, afwImage.Exposure exposure, afwTable.SourceCatalog catalog, measBase.IdGenerator idGenerator, afwTable.SourceCatalog|None refCat=None)
afwTable.SourceCatalog|None _prepareSchemaAndSubtasks(self, afwTable.SourceCatalog|None catalog)
_initializeSchema(self, afwTable.SourceCatalog catalog=None)
measBase.SingleFrameMeasurementTask|measBase.ForcedMeasurementTask measurement
_runCatalogCalculation(self, afwTable.SourceCatalog catalog)
afwTable.SourceCatalog _updateCatalogSchema(self, afwTable.SourceCatalog catalog)
pipeBase.Struct run(self, afwImage.MultibandExposure|list[afwImage.Exposure]|afwImage.Exposure mExposure, str|None refBand=None, list[str]|None bands=None, afwTable.SourceCatalog catalog=None, measBase.IdGenerator idGenerator=None)
tuple[afwImage.MultibandExposure, str, list[str]|None] _ensureValidInputs(self, afwImage.MultibandExposure|list[afwImage.Exposure]|afwImage.Exposure mExposure, str|None refBand, list[str]|None bands, afwTable.SourceCatalog|None catalog=None)
tuple[dict[str, afwTable.SourceCatalog], scl.io.ScarletModelData] _deblendSources(self, afwImage.MultibandExposure mExposure, afwTable.SourceCatalog catalog, str refBand)
afwImage.MultibandExposure _buildMultibandExposure(self, afwImage.MultibandExposure|list[afwImage.Exposure]|afwImage.Exposure mExposureData, list[str]|None bands)
afwTable.SourceCatalog _deblendSources(self, afwImage.Exposure exposure, afwTable.SourceCatalog catalog)
pipeBase.Struct runFromImage(self, afwImage.MaskedImage|afwImage.Image|np.ndarray image, afwImage.Mask|np.ndarray mask=None, afwImage.Image|np.ndarray variance=None, afwGeom.SkyWcs wcs=None, afwDetection.Psf|np.ndarray psf=None, afwImage.PhotoCalib photoCalib=None, afwTable.SourceCatalog catalog=None, measBase.IdGenerator idGenerator=None)
pipeBase.Struct run(self, afwImage.Exposure exposure, afwTable.SourceCatalog|None catalog=None, measBase.IdGenerator|None idGenerator=None)
MaskedImage< ImagePixelT, MaskPixelT, VariancePixelT > * makeMaskedImage(typename std::shared_ptr< Image< ImagePixelT > > image, typename std::shared_ptr< Mask< MaskPixelT > > mask=Mask< MaskPixelT >(), typename std::shared_ptr< Image< VariancePixelT > > variance=Image< VariancePixelT >())
A function to return a MaskedImage of the correct type (cf.
std::shared_ptr< Exposure< ImagePixelT, MaskPixelT, VariancePixelT > > makeExposure(MaskedImage< ImagePixelT, MaskPixelT, VariancePixelT > &mimage, std::shared_ptr< geom::SkyWcs const > wcs=std::shared_ptr< geom::SkyWcs const >())
A function to return an Exposure of the correct type (cf.
Definition Exposure.h:484