LSST Applications g042eb84c57+730a74494b,g04e9c324dd+8c5ae1fdc5,g134cb467dc+1f1e3e7524,g199a45376c+0ba108daf9,g1fd858c14a+fa7d31856b,g210f2d0738+f66ac109ec,g262e1987ae+83a3acc0e5,g29ae962dfc+d856a2cb1f,g2cef7863aa+aef1011c0b,g35bb328faa+8c5ae1fdc5,g3fd5ace14f+a1e0c9f713,g47891489e3+0d594cb711,g4d44eb3520+c57ec8f3ed,g4d7b6aa1c5+f66ac109ec,g53246c7159+8c5ae1fdc5,g56a1a4eaf3+fd7ad03fde,g64539dfbff+f66ac109ec,g67b6fd64d1+0d594cb711,g67fd3c3899+f66ac109ec,g6985122a63+0d594cb711,g74acd417e5+3098891321,g786e29fd12+668abc6043,g81db2e9a8d+98e2ab9f28,g87389fa792+8856018cbb,g89139ef638+0d594cb711,g8d7436a09f+80fda9ce03,g8ea07a8fe4+760ca7c3fc,g90f42f885a+033b1d468d,g97be763408+a8a29bda4b,g99822b682c+e3ec3c61f9,g9d5c6a246b+0d5dac0c3d,ga41d0fce20+9243b26dd2,gbf99507273+8c5ae1fdc5,gd7ef33dd92+0d594cb711,gdab6d2f7ff+3098891321,ge410e46f29+0d594cb711,geaed405ab2+c4bbc419c6,gf9a733ac38+8c5ae1fdc5,w.2025.38
LSST Data Management Base Package
|
Classes | |
class | DetectCoaddSourcesConnections |
class | MeasureMergedCoaddSourcesConnections |
Variables | |
exposure = self._cropToExactBinning(exposure, patchInfo).clone() | |
background = self.detection.background.run(exposure).background | |
stats = bg.getStatsImage() | |
schema : ``lsst.afw.table.Schema`, optional | |
peakSchema : ``lsst.afw.table.Schema`, optional | |
refObjLoader : `lsst.meas.algorithms.ReferenceObjectLoader`, optional | |
initInputs : `dict`, optional | |
sources : `lsst.afw.table.SourceCatalog` | |
skyInfo : `lsst.pipe.base.Struct` | |
exposureId : `int` or `bytes` | |
ccdInputs : `lsst.afw.table.ExposureCatalog`, optional | |
sourceTableHandleDict : `dict` [`int`, `lsst.daf.butler.DeferredDatasetHandle`], optional | |
finalizedSourceTableHandleDict : `dict` [`int`, `lsst.daf.butler.DeferredDatasetHandle`], optional | |
finalVisitSummaryHandleDict : `dict` [`int`, `lsst.daf.butler.DeferredDatasetHandle`], optional | |
apCorrMap : `lsst.afw.image.ApCorrMap`, optional | |
results : `lsst.pipe.base.Struct` | |
lsst.pipe.tasks.multiBand.apCorrMap : `lsst.afw.image.ApCorrMap`, optional |
Definition at line 911 of file multiBand.py.
lsst.pipe.tasks.multiBand.background = self.detection.background.run(exposure).background |
Definition at line 392 of file multiBand.py.
lsst.pipe.tasks.multiBand.ccdInputs : `lsst.afw.table.ExposureCatalog`, optional |
Definition at line 899 of file multiBand.py.
lsst.pipe.tasks.multiBand.exposure = self._cropToExactBinning(exposure, patchInfo).clone() |
_DefaultName = "measureCoaddSources" ConfigClass = MeasureMergedCoaddSourcesConfig def __init__(self, schema=None, peakSchema=None, refObjLoader=None, initInputs=None, **kwargs): super().__init__(**kwargs) self.deblended = self.config.inputCatalog.startswith("deblended") self.inputCatalog = "Coadd_" + self.config.inputCatalog if initInputs is not None: schema = initInputs['inputSchema'].schema if schema is None: raise ValueError("Schema must be defined.") self.schemaMapper = afwTable.SchemaMapper(schema) self.schemaMapper.addMinimalSchema(schema) self.schema = self.schemaMapper.getOutputSchema() self.algMetadata = PropertyList() self.makeSubtask("measurement", schema=self.schema, algMetadata=self.algMetadata) self.makeSubtask("setPrimaryFlags", schema=self.schema) # TODO[DM-47797]: remove match subtask if self.config.doMatchSources: self.makeSubtask("match", refObjLoader=refObjLoader) if self.config.doPropagateFlags: self.makeSubtask("propagateFlags", schema=self.schema) self.schema.checkUnits(parse_strict=self.config.checkUnitsParseStrict) if self.config.doApCorr: self.makeSubtask("applyApCorr", schema=self.schema) if self.config.doRunCatalogCalculation: self.makeSubtask("catalogCalculation", schema=self.schema) self.outputSchema = afwTable.SourceCatalog(self.schema) def runQuantum(self, butlerQC, inputRefs, outputRefs): inputs = butlerQC.get(inputRefs) # TODO[DM-47797]: remove this block if self.config.doMatchSources: refObjLoader = ReferenceObjectLoader([ref.datasetRef.dataId for ref in inputRefs.refCat], inputs.pop('refCat'), name=self.config.connections.refCat, config=self.config.refObjLoader, log=self.log) self.match.setRefObjLoader(refObjLoader) if self.config.useCellCoadds: multiple_cell_coadd = inputs.pop("exposure_cells") stitched_coadd = multiple_cell_coadd.stitch() exposure = stitched_coadd.asExposure() background = inputs.pop("background") exposure.image -= background.getImage() ccdInputs = stitched_coadd.ccds apCorrMap = stitched_coadd.ap_corr_map band = inputRefs.exposure_cells.dataId["band"] else: exposure = inputs.pop("exposure") # Set psfcache # move this to run after gen2 deprecation exposure.getPsf().setCacheCapacity(self.config.psfCache) ccdInputs = exposure.getInfo().getCoaddInputs().ccds apCorrMap = exposure.getInfo().getApCorrMap() band = inputRefs.exposure.dataId["band"] # Get unique integer ID for IdFactory and RNG seeds; only the latter # should really be used as the IDs all come from the input catalog. idGenerator = self.config.idGenerator.apply(butlerQC.quantum.dataId) # Transform inputCatalog table = afwTable.SourceTable.make(self.schema, idGenerator.make_table_id_factory()) sources = afwTable.SourceCatalog(table) # Load the correct input catalog if "scarletCatalog" in inputs: inputCatalog = inputs.pop("scarletCatalog") catalogRef = inputRefs.scarletCatalog else: inputCatalog = inputs.pop("inputCatalog") catalogRef = inputRefs.inputCatalog sources.extend(inputCatalog, self.schemaMapper) del inputCatalog # Add the HeavyFootprints to the deblended sources if self.config.doAddFootprints: modelData = inputs.pop('scarletModels') if self.config.doConserveFlux: imageForRedistribution = exposure else: imageForRedistribution = None updateCatalogFootprints( modelData=modelData, catalog=sources, band=band, imageForRedistribution=imageForRedistribution, removeScarletData=True, updateFluxColumns=True, ) table = sources.getTable() table.setMetadata(self.algMetadata) # Capture algorithm metadata to write out to the source catalog. skyMap = inputs.pop('skyMap') tractNumber = catalogRef.dataId['tract'] tractInfo = skyMap[tractNumber] patchInfo = tractInfo.getPatchInfo(catalogRef.dataId['patch']) skyInfo = Struct( skyMap=skyMap, tractInfo=tractInfo, patchInfo=patchInfo, wcs=tractInfo.getWcs(), bbox=patchInfo.getOuterBBox() ) if self.config.doPropagateFlags: if "sourceTableHandles" in inputs: sourceTableHandles = inputs.pop("sourceTableHandles") sourceTableHandleDict = {handle.dataId["visit"]: handle for handle in sourceTableHandles} else: sourceTableHandleDict = None if "finalizedSourceTableHandles" in inputs: finalizedSourceTableHandles = inputs.pop("finalizedSourceTableHandles") finalizedSourceTableHandleDict = {handle.dataId["visit"]: handle for handle in finalizedSourceTableHandles} else: finalizedSourceTableHandleDict = None if "finalVisitSummaryHandles" in inputs: finalVisitSummaryHandles = inputs.pop("finalVisitSummaryHandles") finalVisitSummaryHandleDict = {handle.dataId["visit"]: handle for handle in finalVisitSummaryHandles} else: finalVisitSummaryHandleDict = None assert not inputs, "runQuantum got more inputs than expected." outputs = self.run( exposure=exposure, sources=sources, skyInfo=skyInfo, exposureId=idGenerator.catalog_id, ccdInputs=ccdInputs, sourceTableHandleDict=sourceTableHandleDict, finalizedSourceTableHandleDict=finalizedSourceTableHandleDict, finalVisitSummaryHandleDict=finalVisitSummaryHandleDict, apCorrMap=apCorrMap, ) # Strip HeavyFootprints to save space on disk if self.config.doStripFootprints: sources = outputs.outputSources for source in sources[sources["parent"] != 0]: source.setFootprint(None) butlerQC.put(outputs, outputRefs) def run(self, exposure, sources, skyInfo, exposureId, ccdInputs=None, sourceTableHandleDict=None, finalizedSourceTableHandleDict=None, finalVisitSummaryHandleDict=None, apCorrMap=None):
Definition at line 389 of file multiBand.py.
lsst.pipe.tasks.multiBand.exposureId : `int` or `bytes` |
Definition at line 897 of file multiBand.py.
lsst.pipe.tasks.multiBand.finalizedSourceTableHandleDict : `dict` [`int`, `lsst.daf.butler.DeferredDatasetHandle`], optional |
Definition at line 905 of file multiBand.py.
lsst.pipe.tasks.multiBand.finalVisitSummaryHandleDict : `dict` [`int`, `lsst.daf.butler.DeferredDatasetHandle`], optional |
Definition at line 908 of file multiBand.py.
lsst.pipe.tasks.multiBand.initInputs : `dict`, optional |
Definition at line 727 of file multiBand.py.
lsst.pipe.tasks.multiBand.peakSchema : ``lsst.afw.table.Schema`, optional |
Definition at line 721 of file multiBand.py.
lsst.pipe.tasks.multiBand.refObjLoader : `lsst.meas.algorithms.ReferenceObjectLoader`, optional |
Definition at line 723 of file multiBand.py.
lsst.pipe.tasks.multiBand.results : `lsst.pipe.base.Struct` |
Definition at line 917 of file multiBand.py.
lsst.pipe.tasks.multiBand.schema : ``lsst.afw.table.Schema`, optional |
inputCatalog = ChoiceField( dtype=str, default="deblendedCatalog", allowed={ "deblendedCatalog": "Output catalog from ScarletDeblendTask", "deblendedFlux": "Output catalog from SourceDeblendTask", "mergeDet": "The merged detections before deblending." }, doc="The name of the input catalog.", # TODO[DM-47797]: remove this config option and anything using it. deprecated="Support for old deblender outputs will be removed after v29.", ) doAddFootprints = Field(dtype=bool, default=True, doc="Whether or not to add footprints to the input catalog from scarlet models. " "This should be true whenever using the multi-band deblender, " "otherwise this should be False.") doConserveFlux = Field(dtype=bool, default=True, doc="Whether to use the deblender models as templates to re-distribute the flux " "from the 'exposure' (True), or to perform measurements on the deblender " "model footprints.") doStripFootprints = Field(dtype=bool, default=True, doc="Whether to strip footprints from the output catalog before " "saving to disk. " "This is usually done when using scarlet models to save disk space.") useCellCoadds = Field(dtype=bool, default=False, doc="Whether to use cell coadds?") measurement = ConfigurableField(target=SingleFrameMeasurementTask, doc="Source measurement") setPrimaryFlags = ConfigurableField(target=SetPrimaryFlagsTask, doc="Set flags for primary tract/patch") doPropagateFlags = Field( dtype=bool, default=True, doc="Whether to match sources to CCD catalogs to propagate flags (to e.g. identify PSF stars)" ) propagateFlags = ConfigurableField(target=PropagateSourceFlagsTask, doc="Propagate source flags to coadd") doMatchSources = Field( dtype=bool, default=False, doc="Match sources to reference catalog?", deprecated="Reference matching in measureCoaddSources will be removed after v29.", ) match = ConfigurableField( target=DirectMatchTask, doc="Matching to reference catalog", deprecated="Reference matching in measureCoaddSources will be removed after v29.", ) doWriteMatchesDenormalized = Field( dtype=bool, default=False, doc=("Write reference matches in denormalized format? " "This format uses more disk space, but is more convenient to read."), deprecated="Reference matching in measureCoaddSources will be removed after v29.", ) coaddName = Field(dtype=str, default="deep", doc="Name of coadd") psfCache = Field(dtype=int, default=100, doc="Size of psfCache") checkUnitsParseStrict = Field( doc="Strictness of Astropy unit compatibility check, can be 'raise', 'warn' or 'silent'", dtype=str, default="raise", ) doApCorr = Field( dtype=bool, default=True, doc="Apply aperture corrections" ) applyApCorr = ConfigurableField( target=ApplyApCorrTask, doc="Subtask to apply aperture corrections" ) doRunCatalogCalculation = Field( dtype=bool, default=True, doc='Run catalogCalculation task' ) catalogCalculation = ConfigurableField( target=CatalogCalculationTask, doc="Subtask to run catalogCalculation plugins on catalog" ) hasFakes = Field( dtype=bool, default=False, doc="Should be set to True if fake sources have been inserted into the input data." ) idGenerator = SkyMapIdGeneratorConfig.make_field() @property def refObjLoader(self): return self.match.refObjLoader def setDefaults(self): super().setDefaults() self.measurement.plugins.names |= ['base_InputCount', 'base_Variance', 'base_LocalPhotoCalib', 'base_LocalWcs'] # TODO: Remove STREAK in DM-44658, streak masking to happen only in # ip_diffim; if we can propagate the streak mask from diffim, we can # still set flags with it here. self.measurement.plugins['base_PixelFlags'].masksFpAnywhere = ['CLIPPED', 'SENSOR_EDGE', 'INEXACT_PSF'] self.measurement.plugins['base_PixelFlags'].masksFpCenter = ['CLIPPED', 'SENSOR_EDGE', 'INEXACT_PSF'] def validate(self): super().validate() if not self.doMatchSources and self.doWriteMatchesDenormalized: raise ValueError("Cannot set doWriteMatchesDenormalized if doMatchSources is False.") class MeasureMergedCoaddSourcesTask(PipelineTask):
Definition at line 719 of file multiBand.py.
lsst.pipe.tasks.multiBand.skyInfo : `lsst.pipe.base.Struct` |
Definition at line 894 of file multiBand.py.
lsst.pipe.tasks.multiBand.sources : `lsst.afw.table.SourceCatalog` |
Definition at line 891 of file multiBand.py.
lsst.pipe.tasks.multiBand.sourceTableHandleDict : `dict` [`int`, `lsst.daf.butler.DeferredDatasetHandle`], optional |
Definition at line 902 of file multiBand.py.
lsst.pipe.tasks.multiBand.stats = bg.getStatsImage() |
Definition at line 394 of file multiBand.py.