LSST Applications g0f08755f38+9c285cab97,g1635faa6d4+13f3999e92,g1653933729+a8ce1bb630,g1a0ca8cf93+bf6eb00ceb,g28da252d5a+0829b12dee,g29321ee8c0+5700dc9eac,g2bbee38e9b+9634bc57db,g2bc492864f+9634bc57db,g2cdde0e794+c2c89b37c4,g3156d2b45e+41e33cbcdc,g347aa1857d+9634bc57db,g35bb328faa+a8ce1bb630,g3a166c0a6a+9634bc57db,g3e281a1b8c+9f2c4e2fc3,g414038480c+077ccc18e7,g41af890bb2+fde0dd39b6,g5fbc88fb19+17cd334064,g781aacb6e4+a8ce1bb630,g80478fca09+55a9465950,g82479be7b0+d730eedb7d,g858d7b2824+9c285cab97,g9125e01d80+a8ce1bb630,g9726552aa6+10f999ec6a,ga5288a1d22+2a84bb7594,gacf8899fa4+c69c5206e8,gae0086650b+a8ce1bb630,gb58c049af0+d64f4d3760,gc28159a63d+9634bc57db,gcf0d15dbbd+4b7d09cae4,gda3e153d99+9c285cab97,gda6a2b7d83+4b7d09cae4,gdaeeff99f8+1711a396fd,ge2409df99d+5e831397f4,ge79ae78c31+9634bc57db,gf0baf85859+147a0692ba,gf3967379c6+41c94011de,gf3fb38a9a8+8f07a9901b,gfb92a5be7c+9c285cab97,w.2024.46
LSST Data Management Base Package
Loading...
Searching...
No Matches
Classes | Variables
lsst.pipe.tasks.multiBand Namespace Reference

Classes

class  DetectCoaddSourcesConnections
 

Variables

 schema : `lsst.afw.table.Schema`, optional
 
 exposure : `lsst.afw.image.Exposure`
 
 idFactory : `lsst.afw.table.IdFactory`
 
 expId : `int`
 
 result : `lsst.pipe.base.Struct`
 
 peakSchema : ``lsst.afw.table.Schema`, optional
 
 refObjLoader : `lsst.meas.algorithms.ReferenceObjectLoader`, optional
 
 initInputs : `dict`, optional
 
 sources : `lsst.afw.table.SourceCatalog`
 
 skyInfo : `lsst.pipe.base.Struct`
 
 exposureId : `int` or `bytes`
 
 ccdInputs : `lsst.afw.table.ExposureCatalog`, optional
 
 sourceTableHandleDict : `dict` [`int`, `lsst.daf.butler.DeferredDatasetHandle`], optional
 
 finalizedSourceTableHandleDict : `dict` [`int`, `lsst.daf.butler.DeferredDatasetHandle`], optional
 
 results : `lsst.pipe.base.Struct`
 

Variable Documentation

◆ ccdInputs

lsst.pipe.tasks.multiBand.ccdInputs : `lsst.afw.table.ExposureCatalog`, optional

Definition at line 631 of file multiBand.py.

◆ expId

lsst.pipe.tasks.multiBand.expId : `int`

Definition at line 198 of file multiBand.py.

◆ exposure

lsst.pipe.tasks.multiBand.exposure : `lsst.afw.image.Exposure`
_DefaultName = "detectCoaddSources"
ConfigClass = DetectCoaddSourcesConfig

def __init__(self, schema=None, **kwargs):
    # N.B. Super is used here to handle the multiple inheritance of PipelineTasks, the init tree
    # call structure has been reviewed carefully to be sure super will work as intended.
    super().__init__(**kwargs)
    if schema is None:
        schema = afwTable.SourceTable.makeMinimalSchema()
    self.schema = schema
    self.makeSubtask("detection", schema=self.schema)
    if self.config.doScaleVariance:
        self.makeSubtask("scaleVariance")

    self.detectionSchema = afwTable.SourceCatalog(self.schema)

def runQuantum(self, butlerQC, inputRefs, outputRefs):
    inputs = butlerQC.get(inputRefs)
    idGenerator = self.config.idGenerator.apply(butlerQC.quantum.dataId)
    inputs["idFactory"] = idGenerator.make_table_id_factory()
    inputs["expId"] = idGenerator.catalog_id
    outputs = self.run(**inputs)
    butlerQC.put(outputs, outputRefs)

def run(self, exposure, idFactory, expId):
_DefaultName = "measureCoaddSources"
ConfigClass = MeasureMergedCoaddSourcesConfig

def __init__(self, schema=None, peakSchema=None, refObjLoader=None, initInputs=None,
             **kwargs):
    super().__init__(**kwargs)
    self.deblended = self.config.inputCatalog.startswith("deblended")
    self.inputCatalog = "Coadd_" + self.config.inputCatalog
    if initInputs is not None:
        schema = initInputs['inputSchema'].schema
    if schema is None:
        raise ValueError("Schema must be defined.")
    self.schemaMapper = afwTable.SchemaMapper(schema)
    self.schemaMapper.addMinimalSchema(schema)
    self.schema = self.schemaMapper.getOutputSchema()
    afwTable.CoordKey.addErrorFields(self.schema)
    self.algMetadata = PropertyList()
    self.makeSubtask("measurement", schema=self.schema, algMetadata=self.algMetadata)
    self.makeSubtask("setPrimaryFlags", schema=self.schema)
    if self.config.doMatchSources:
        self.makeSubtask("match", refObjLoader=refObjLoader)
    if self.config.doPropagateFlags:
        self.makeSubtask("propagateFlags", schema=self.schema)
    self.schema.checkUnits(parse_strict=self.config.checkUnitsParseStrict)
    if self.config.doApCorr:
        self.makeSubtask("applyApCorr", schema=self.schema)
    if self.config.doRunCatalogCalculation:
        self.makeSubtask("catalogCalculation", schema=self.schema)

    self.outputSchema = afwTable.SourceCatalog(self.schema)

def runQuantum(self, butlerQC, inputRefs, outputRefs):
    inputs = butlerQC.get(inputRefs)

    if self.config.doMatchSources:
        refObjLoader = ReferenceObjectLoader([ref.datasetRef.dataId for ref in inputRefs.refCat],
                                             inputs.pop('refCat'),
                                             name=self.config.connections.refCat,
                                             config=self.config.refObjLoader,
                                             log=self.log)
        self.match.setRefObjLoader(refObjLoader)

    # Set psfcache
    # move this to run after gen2 deprecation
    inputs['exposure'].getPsf().setCacheCapacity(self.config.psfCache)

    # Get unique integer ID for IdFactory and RNG seeds; only the latter
    # should really be used as the IDs all come from the input catalog.
    idGenerator = self.config.idGenerator.apply(butlerQC.quantum.dataId)
    inputs['exposureId'] = idGenerator.catalog_id

    # Transform inputCatalog
    table = afwTable.SourceTable.make(self.schema, idGenerator.make_table_id_factory())
    sources = afwTable.SourceCatalog(table)
    # Load the correct input catalog
    if "scarletCatalog" in inputs:
        inputCatalog = inputs.pop("scarletCatalog")
        catalogRef = inputRefs.scarletCatalog
    else:
        inputCatalog = inputs.pop("inputCatalog")
        catalogRef = inputRefs.inputCatalog
    sources.extend(inputCatalog, self.schemaMapper)
    del inputCatalog
    # Add the HeavyFootprints to the deblended sources
    if self.config.doAddFootprints:
        modelData = inputs.pop('scarletModels')
        if self.config.doConserveFlux:
            imageForRedistribution = inputs['exposure']
        else:
            imageForRedistribution = None
        updateCatalogFootprints(
            modelData=modelData,
            catalog=sources,
            band=inputRefs.exposure.dataId["band"],
            imageForRedistribution=imageForRedistribution,
            removeScarletData=True,
            updateFluxColumns=True,
        )
    table = sources.getTable()
    table.setMetadata(self.algMetadata)  # Capture algorithm metadata to write out to the source catalog.
    inputs['sources'] = sources

    skyMap = inputs.pop('skyMap')
    tractNumber = catalogRef.dataId['tract']
    tractInfo = skyMap[tractNumber]
    patchInfo = tractInfo.getPatchInfo(catalogRef.dataId['patch'])
    skyInfo = Struct(
        skyMap=skyMap,
        tractInfo=tractInfo,
        patchInfo=patchInfo,
        wcs=tractInfo.getWcs(),
        bbox=patchInfo.getOuterBBox()
    )
    inputs['skyInfo'] = skyInfo

    if self.config.doPropagateFlags:
        ccdInputs = inputs["exposure"].getInfo().getCoaddInputs().ccds
        inputs["ccdInputs"] = ccdInputs

        if "sourceTableHandles" in inputs:
            sourceTableHandles = inputs.pop("sourceTableHandles")
            sourceTableHandleDict = {handle.dataId["visit"]: handle for handle in sourceTableHandles}
            inputs["sourceTableHandleDict"] = sourceTableHandleDict
        if "finalizedSourceTableHandles" in inputs:
            finalizedSourceTableHandles = inputs.pop("finalizedSourceTableHandles")
            finalizedSourceTableHandleDict = {handle.dataId["visit"]: handle
                                              for handle in finalizedSourceTableHandles}
            inputs["finalizedSourceTableHandleDict"] = finalizedSourceTableHandleDict

    outputs = self.run(**inputs)
    # Strip HeavyFootprints to save space on disk
    sources = outputs.outputSources
    butlerQC.put(outputs, outputRefs)

def run(self, exposure, sources, skyInfo, exposureId, ccdInputs=None,
        sourceTableHandleDict=None, finalizedSourceTableHandleDict=None):

Definition at line 193 of file multiBand.py.

◆ exposureId

lsst.pipe.tasks.multiBand.exposureId : `int` or `bytes`

Definition at line 629 of file multiBand.py.

◆ finalizedSourceTableHandleDict

lsst.pipe.tasks.multiBand.finalizedSourceTableHandleDict : `dict` [`int`, `lsst.daf.butler.DeferredDatasetHandle`], optional

Definition at line 638 of file multiBand.py.

◆ idFactory

lsst.pipe.tasks.multiBand.idFactory : `lsst.afw.table.IdFactory`

Definition at line 196 of file multiBand.py.

◆ initInputs

lsst.pipe.tasks.multiBand.initInputs : `dict`, optional

Definition at line 493 of file multiBand.py.

◆ peakSchema

lsst.pipe.tasks.multiBand.peakSchema : ``lsst.afw.table.Schema`, optional

Definition at line 487 of file multiBand.py.

◆ refObjLoader

lsst.pipe.tasks.multiBand.refObjLoader : `lsst.meas.algorithms.ReferenceObjectLoader`, optional

Definition at line 489 of file multiBand.py.

◆ result

lsst.pipe.tasks.multiBand.result : `lsst.pipe.base.Struct`

Definition at line 203 of file multiBand.py.

◆ results

lsst.pipe.tasks.multiBand.results : `lsst.pipe.base.Struct`

Definition at line 645 of file multiBand.py.

◆ schema

lsst.pipe.tasks.multiBand.schema : `lsst.afw.table.Schema`, optional
doScaleVariance = Field(dtype=bool, default=True, doc="Scale variance plane using empirical noise?")
scaleVariance = ConfigurableField(target=ScaleVarianceTask, doc="Variance rescaling")
detection = ConfigurableField(target=DynamicDetectionTask, doc="Source detection")
coaddName = Field(dtype=str, default="deep", doc="Name of coadd")
hasFakes = Field(
    dtype=bool,
    default=False,
    doc="Should be set to True if fake sources have been inserted into the input data.",
)
idGenerator = SkyMapIdGeneratorConfig.make_field()

def setDefaults(self):
    super().setDefaults()
    self.detection.thresholdType = "pixel_stdev"
    self.detection.isotropicGrow = True
    # Coadds are made from background-subtracted CCDs, so any background subtraction should be very basic
    self.detection.reEstimateBackground = False
    self.detection.background.useApprox = False
    self.detection.background.binSize = 4096
    self.detection.background.undersampleStyle = 'REDUCE_INTERP_ORDER'
    self.detection.doTempWideBackground = True  # Suppress large footprints that overwhelm the deblender
    # Include band in packed data IDs that go into object IDs (None -> "as
    # many bands as are defined", rather than the default of zero).
    self.idGenerator.packer.n_bands = None


class DetectCoaddSourcesTask(PipelineTask):
if self.config.doScaleVariance:
    varScale = self.scaleVariance.run(exposure.maskedImage)
    exposure.getMetadata().add("VARIANCE_SCALE", varScale)
backgrounds = afwMath.BackgroundList()
table = afwTable.SourceTable.make(self.schema, idFactory)
detections = self.detection.run(table, exposure, expId=expId)
sources = detections.sources
if hasattr(detections, "background") and detections.background:
    for bg in detections.background:
        backgrounds.append(bg)
return Struct(outputSources=sources, outputBackgrounds=backgrounds, outputExposure=exposure)


class MeasureMergedCoaddSourcesConnections(PipelineTaskConnections,
                                   dimensions=("tract", "patch", "band", "skymap"),
                                   defaultTemplates={"inputCoaddName": "deep",
                                                     "outputCoaddName": "deep",
                                                     "deblendedCatalog": "deblendedFlux"}):
inputSchema = cT.InitInput(
doc="Input schema for measure merged task produced by a deblender or detection task",
name="{inputCoaddName}Coadd_deblendedFlux_schema",
storageClass="SourceCatalog"
)
outputSchema = cT.InitOutput(
doc="Output schema after all new fields are added by task",
name="{inputCoaddName}Coadd_meas_schema",
storageClass="SourceCatalog"
)
refCat = cT.PrerequisiteInput(
doc="Reference catalog used to match measured sources against known sources",
name="ref_cat",
storageClass="SimpleCatalog",
dimensions=("skypix",),
deferLoad=True,
multiple=True
)
exposure = cT.Input(
doc="Input coadd image",
name="{inputCoaddName}Coadd_calexp",
storageClass="ExposureF",
dimensions=("tract", "patch", "band", "skymap")
)
skyMap = cT.Input(
doc="SkyMap to use in processing",
name=BaseSkyMap.SKYMAP_DATASET_TYPE_NAME,
storageClass="SkyMap",
dimensions=("skymap",),
)
# TODO[DM-47424]: remove this deprecated connection.
visitCatalogs = cT.Input(
doc="Deprecated and unused.",
name="src",
dimensions=("instrument", "visit", "detector"),
storageClass="SourceCatalog",
multiple=True,
deprecated="Deprecated and unused.  Will be removed after v29.",
)
sourceTableHandles = cT.Input(
doc=("Source tables that are derived from the ``CalibrateTask`` sources. "
     "These tables contain astrometry and photometry flags, and optionally "
     "PSF flags."),
name="sourceTable_visit",
storageClass="DataFrame",
dimensions=("instrument", "visit"),
multiple=True,
deferLoad=True,
)
finalizedSourceTableHandles = cT.Input(
doc=("Finalized source tables from ``FinalizeCalibrationTask``. These "
     "tables contain PSF flags from the finalized PSF estimation."),
name="finalized_src_table",
storageClass="DataFrame",
dimensions=("instrument", "visit"),
multiple=True,
deferLoad=True,
)
inputCatalog = cT.Input(
doc=("Name of the input catalog to use."
     "If the single band deblender was used this should be 'deblendedFlux."
     "If the multi-band deblender was used this should be 'deblendedModel, "
     "or deblendedFlux if the multiband deblender was configured to output "
     "deblended flux catalogs. If no deblending was performed this should "
     "be 'mergeDet'"),
name="{inputCoaddName}Coadd_{deblendedCatalog}",
storageClass="SourceCatalog",
dimensions=("tract", "patch", "band", "skymap"),
)
scarletCatalog = cT.Input(
doc="Catalogs produced by multiband deblending",
name="{inputCoaddName}Coadd_deblendedCatalog",
storageClass="SourceCatalog",
dimensions=("tract", "patch", "skymap"),
)
scarletModels = cT.Input(
doc="Multiband scarlet models produced by the deblender",
name="{inputCoaddName}Coadd_scarletModelData",
storageClass="ScarletModelData",
dimensions=("tract", "patch", "skymap"),
)
outputSources = cT.Output(
doc="Source catalog containing all the measurement information generated in this task",
name="{outputCoaddName}Coadd_meas",
dimensions=("tract", "patch", "band", "skymap"),
storageClass="SourceCatalog",
)
matchResult = cT.Output(
doc="Match catalog produced by configured matcher, optional on doMatchSources",
name="{outputCoaddName}Coadd_measMatch",
dimensions=("tract", "patch", "band", "skymap"),
storageClass="Catalog",
)
denormMatches = cT.Output(
doc="Denormalized Match catalog produced by configured matcher, optional on "
    "doWriteMatchesDenormalized",
name="{outputCoaddName}Coadd_measMatchFull",
dimensions=("tract", "patch", "band", "skymap"),
storageClass="Catalog",
)

def __init__(self, *, config=None):
super().__init__(config=config)
del self.visitCatalogs
if not config.doPropagateFlags:
    del self.sourceTableHandles
    del self.finalizedSourceTableHandles
else:
    # Check for types of flags required.
    if not config.propagateFlags.source_flags:
        del self.sourceTableHandles
    if not config.propagateFlags.finalized_source_flags:
        del self.finalizedSourceTableHandles
if config.inputCatalog == "deblendedCatalog":
    del self.inputCatalog
    if not config.doAddFootprints:
        del self.scarletModels
else:
    del self.deblendedCatalog
    del self.scarletModels

if not config.doMatchSources:
    del self.refCat
    del self.matchResult

if not config.doWriteMatchesDenormalized:
    del self.denormMatches


class MeasureMergedCoaddSourcesConfig(PipelineTaskConfig,
                              pipelineConnections=MeasureMergedCoaddSourcesConnections):
inputCatalog = ChoiceField(
    dtype=str,
    default="deblendedCatalog",
    allowed={
        "deblendedCatalog": "Output catalog from ScarletDeblendTask",
        "deblendedFlux": "Output catalog from SourceDeblendTask",
        "mergeDet": "The merged detections before deblending."
    },
    doc="The name of the input catalog.",
)
doAddFootprints = Field(dtype=bool,
                        default=True,
                        doc="Whether or not to add footprints to the input catalog from scarlet models. "
                            "This should be true whenever using the multi-band deblender, "
                            "otherwise this should be False.")
doConserveFlux = Field(dtype=bool, default=True,
                       doc="Whether to use the deblender models as templates to re-distribute the flux "
                           "from the 'exposure' (True), or to perform measurements on the deblender "
                           "model footprints.")
doStripFootprints = Field(dtype=bool, default=True,
                          doc="Whether to strip footprints from the output catalog before "
                              "saving to disk. "
                              "This is usually done when using scarlet models to save disk space.")
measurement = ConfigurableField(target=SingleFrameMeasurementTask, doc="Source measurement")
setPrimaryFlags = ConfigurableField(target=SetPrimaryFlagsTask, doc="Set flags for primary tract/patch")
doPropagateFlags = Field(
    dtype=bool, default=True,
    doc="Whether to match sources to CCD catalogs to propagate flags (to e.g. identify PSF stars)"
)
propagateFlags = ConfigurableField(target=PropagateSourceFlagsTask, doc="Propagate source flags to coadd")
doMatchSources = Field(dtype=bool, default=True, doc="Match sources to reference catalog?")
match = ConfigurableField(target=DirectMatchTask, doc="Matching to reference catalog")
doWriteMatchesDenormalized = Field(
    dtype=bool,
    default=False,
    doc=("Write reference matches in denormalized format? "
         "This format uses more disk space, but is more convenient to read."),
)
coaddName = Field(dtype=str, default="deep", doc="Name of coadd")
psfCache = Field(dtype=int, default=100, doc="Size of psfCache")
checkUnitsParseStrict = Field(
    doc="Strictness of Astropy unit compatibility check, can be 'raise', 'warn' or 'silent'",
    dtype=str,
    default="raise",
)
doApCorr = Field(
    dtype=bool,
    default=True,
    doc="Apply aperture corrections"
)
applyApCorr = ConfigurableField(
    target=ApplyApCorrTask,
    doc="Subtask to apply aperture corrections"
)
doRunCatalogCalculation = Field(
    dtype=bool,
    default=True,
    doc='Run catalogCalculation task'
)
catalogCalculation = ConfigurableField(
    target=CatalogCalculationTask,
    doc="Subtask to run catalogCalculation plugins on catalog"
)

hasFakes = Field(
    dtype=bool,
    default=False,
    doc="Should be set to True if fake sources have been inserted into the input data."
)
idGenerator = SkyMapIdGeneratorConfig.make_field()

@property
def refObjLoader(self):
    return self.match.refObjLoader

def setDefaults(self):
    super().setDefaults()
    self.measurement.plugins.names |= ['base_InputCount',
                                       'base_Variance',
                                       'base_LocalPhotoCalib',
                                       'base_LocalWcs']

    # TODO: Remove STREAK in DM-44658, streak masking to happen only in
    # ip_diffim; if we can propagate the streak mask from diffim, we can
    # still set flags with it here.
    self.measurement.plugins['base_PixelFlags'].masksFpAnywhere = ['CLIPPED', 'SENSOR_EDGE',
                                                                   'INEXACT_PSF']
    self.measurement.plugins['base_PixelFlags'].masksFpCenter = ['CLIPPED', 'SENSOR_EDGE',
                                                                 'INEXACT_PSF']

def validate(self):
    super().validate()

    if not self.doMatchSources and self.doWriteMatchesDenormalized:
        raise ValueError("Cannot set doWriteMatchesDenormalized if doMatchSources is False.")


class MeasureMergedCoaddSourcesTask(PipelineTask):

Definition at line 153 of file multiBand.py.

◆ skyInfo

lsst.pipe.tasks.multiBand.skyInfo : `lsst.pipe.base.Struct`

Definition at line 626 of file multiBand.py.

◆ sources

lsst.pipe.tasks.multiBand.sources : `lsst.afw.table.SourceCatalog`

Definition at line 623 of file multiBand.py.

◆ sourceTableHandleDict

lsst.pipe.tasks.multiBand.sourceTableHandleDict : `dict` [`int`, `lsst.daf.butler.DeferredDatasetHandle`], optional

Definition at line 634 of file multiBand.py.