LSST Applications  21.0.0+04719a4bac,21.0.0-1-ga51b5d4+f5e6047307,21.0.0-11-g2b59f77+a9c1acf22d,21.0.0-11-ga42c5b2+86977b0b17,21.0.0-12-gf4ce030+76814010d2,21.0.0-13-g1721dae+760e7a6536,21.0.0-13-g3a573fe+768d78a30a,21.0.0-15-g5a7caf0+f21cbc5713,21.0.0-16-g0fb55c1+b60e2d390c,21.0.0-19-g4cded4ca+71a93a33c0,21.0.0-2-g103fe59+bb20972958,21.0.0-2-g45278ab+04719a4bac,21.0.0-2-g5242d73+3ad5d60fb1,21.0.0-2-g7f82c8f+8babb168e8,21.0.0-2-g8f08a60+06509c8b61,21.0.0-2-g8faa9b5+616205b9df,21.0.0-2-ga326454+8babb168e8,21.0.0-2-gde069b7+5e4aea9c2f,21.0.0-2-gecfae73+1d3a86e577,21.0.0-2-gfc62afb+3ad5d60fb1,21.0.0-25-g1d57be3cd+e73869a214,21.0.0-3-g357aad2+ed88757d29,21.0.0-3-g4a4ce7f+3ad5d60fb1,21.0.0-3-g4be5c26+3ad5d60fb1,21.0.0-3-g65f322c+e0b24896a3,21.0.0-3-g7d9da8d+616205b9df,21.0.0-3-ge02ed75+a9c1acf22d,21.0.0-4-g591bb35+a9c1acf22d,21.0.0-4-g65b4814+b60e2d390c,21.0.0-4-gccdca77+0de219a2bc,21.0.0-4-ge8a399c+6c55c39e83,21.0.0-5-gd00fb1e+05fce91b99,21.0.0-6-gc675373+3ad5d60fb1,21.0.0-64-g1122c245+4fb2b8f86e,21.0.0-7-g04766d7+cd19d05db2,21.0.0-7-gdf92d54+04719a4bac,21.0.0-8-g5674e7b+d1bd76f71f,master-gac4afde19b+a9c1acf22d,w.2021.13
LSST Data Management Base Package
Classes | Functions | Variables
lsst.pipe.tasks.mergeDetections Namespace Reference

Classes

class  MergeDetectionsConnections
 

Functions

def write (self, patchRef, catalog)
 Write the output. More...
 
def writeMetadata (self, dataRefList)
 No metadata to write, and not sure how to write it for a list of dataRefs. More...
 

Variables

 patch
 
 tract
 
 filter
 
 schema = self.merged.getPeakSchema()
 
 mergeKey = schema.find("merge_peak_%s" % self.config.skyFilterName).key
 
list converted = []
 
 peak = oldFoot.getPeaks()[0]
 
 newFoot = afwDetect.Footprint(oldFoot.spans, schema)
 

Function Documentation

◆ write()

def lsst.pipe.tasks.mergeDetections.write (   self,
  patchRef,
  catalog 
)

Write the output.

Parameters
[in]patchRefdata reference for patch
[in]catalogcatalog

We write as the dataset provided by the 'outputDataset' class variable.

Definition at line 389 of file mergeDetections.py.

389  def write(self, patchRef, catalog):
390  """!
391  @brief Write the output.
392 
393  @param[in] patchRef data reference for patch
394  @param[in] catalog catalog
395 
396  We write as the dataset provided by the 'outputDataset'
397  class variable.
398  """
399  patchRef.put(catalog, self.config.coaddName + "Coadd_" + self.outputDataset)
400  # since the filter isn't actually part of the data ID for the dataset we're saving,
401  # it's confusing to see it in the log message, even if the butler simply ignores it.
402  mergeDataId = patchRef.dataId.copy()
403  del mergeDataId["filter"]
404  self.log.info("Wrote merged catalog: %s" % (mergeDataId,))
405 
def write(self, patchRef, catalog)
Write the output.

◆ writeMetadata()

def lsst.pipe.tasks.mergeDetections.writeMetadata (   self,
  dataRefList 
)

No metadata to write, and not sure how to write it for a list of dataRefs.

Definition at line 406 of file mergeDetections.py.

406  def writeMetadata(self, dataRefList):
407  """!
408  @brief No metadata to write, and not sure how to write it for a list of dataRefs.
409  """
410  pass
def writeMetadata(self, dataRefList)
No metadata to write, and not sure how to write it for a list of dataRefs.

Variable Documentation

◆ converted

list lsst.pipe.tasks.mergeDetections.converted = []

Definition at line 378 of file mergeDetections.py.

◆ filter

lsst.pipe.tasks.mergeDetections.filter

Definition at line 189 of file mergeDetections.py.

◆ mergeKey

lsst.pipe.tasks.mergeDetections.mergeKey = schema.find("merge_peak_%s" % self.config.skyFilterName).key

Definition at line 377 of file mergeDetections.py.

◆ newFoot

lsst.pipe.tasks.mergeDetections.newFoot = afwDetect.Footprint(oldFoot.spans, schema)

Definition at line 382 of file mergeDetections.py.

◆ patch

lsst.pipe.tasks.mergeDetections.patch

Definition at line 189 of file mergeDetections.py.

◆ peak

lsst.pipe.tasks.mergeDetections.peak = oldFoot.getPeaks()[0]
Examples
forEachPixel.cc.

Definition at line 381 of file mergeDetections.py.

◆ schema

lsst.pipe.tasks.mergeDetections.schema = self.merged.getPeakSchema()
ConfigClass = MergeDetectionsConfig
RunnerClass = MergeSourcesRunner
_DefaultName = "mergeCoaddDetections"
inputDataset = "det"
outputDataset = "mergeDet"
makeIdFactory = _makeMakeIdFactory("MergedCoaddId")

@classmethod
def _makeArgumentParser(cls):
    return makeMergeArgumentParser(cls._DefaultName, cls.inputDataset)

def getInputSchema(self, butler=None, schema=None):
    return getInputSchema(self, butler, schema)

def __init__(self, butler=None, schema=None, initInputs=None, **kwargs):
    # Make PipelineTask-only wording less transitional after cmdlineTask is removed
super().__init__(**kwargs)
if initInputs is not None:
    schema = initInputs['schema'].schema

self.makeSubtask("skyObjects")
self.schema = self.getInputSchema(butler=butler, schema=schema)

filterNames = list(self.config.priorityList)
filterNames.append(self.config.skyFilterName)
self.merged = afwDetect.FootprintMergeList(self.schema, filterNames)
self.outputSchema = afwTable.SourceCatalog(self.schema)
self.outputPeakSchema = afwDetect.PeakCatalog(self.merged.getPeakSchema())

def runDataRef(self, patchRefList):
catalogs = dict(readCatalog(self, patchRef) for patchRef in patchRefList)
skyInfo = getSkyInfo(coaddName=self.config.coaddName, patchRef=patchRefList[0])
idFactory = self.makeIdFactory(patchRefList[0])
skySeed = patchRefList[0].get(self.config.coaddName + "MergedCoaddId")
mergeCatalogStruct = self.run(catalogs, skyInfo, idFactory, skySeed)
self.write(patchRefList[0], mergeCatalogStruct.outputCatalog)

def runQuantum(self, butlerQC, inputRefs, outputRefs):
inputs = butlerQC.get(inputRefs)
exposureIdInfo = ExposureIdInfo.fromDataId(butlerQC.quantum.dataId, "tract_patch")
inputs["skySeed"] = exposureIdInfo.expId
inputs["idFactory"] = exposureIdInfo.makeSourceIdFactory()
catalogDict = {ref.dataId['band']: cat for ref, cat in zip(inputRefs.catalogs,
               inputs['catalogs'])}
inputs['catalogs'] = catalogDict
skyMap = inputs.pop('skyMap')
# Can use the first dataId to find the tract and patch being worked on
tractNumber = inputRefs.catalogs[0].dataId['tract']
tractInfo = skyMap[tractNumber]
patchInfo = tractInfo.getPatchInfo(inputRefs.catalogs[0].dataId['patch'])
skyInfo = Struct(
    skyMap=skyMap,
    tractInfo=tractInfo,
    patchInfo=patchInfo,
    wcs=tractInfo.getWcs(),
    bbox=patchInfo.getOuterBBox()
)
inputs['skyInfo'] = skyInfo

outputs = self.run(**inputs)
butlerQC.put(outputs, outputRefs)

def run(self, catalogs, skyInfo, idFactory, skySeed):
r
# Convert distance to tract coordinate
tractWcs = skyInfo.wcs
peakDistance = self.config.minNewPeak / tractWcs.getPixelScale().asArcseconds()
samePeakDistance = self.config.maxSamePeak / tractWcs.getPixelScale().asArcseconds()

# Put catalogs, filters in priority order
orderedCatalogs = [catalogs[band] for band in self.config.priorityList if band in catalogs.keys()]
orderedBands = [band for band in self.config.priorityList if band in catalogs.keys()]

mergedList = self.merged.getMergedSourceCatalog(orderedCatalogs, orderedBands, peakDistance,
                                                self.schema, idFactory,
                                                samePeakDistance)

#
# Add extra sources that correspond to blank sky
#
skySourceFootprints = self.getSkySourceFootprints(mergedList, skyInfo, skySeed)
if skySourceFootprints:
    key = mergedList.schema.find("merge_footprint_%s" % self.config.skyFilterName).key
    for foot in skySourceFootprints:
        s = mergedList.addNew()
        s.setFootprint(foot)
        s.set(key, True)

# Sort Peaks from brightest to faintest
for record in mergedList:
    record.getFootprint().sortPeaks()
self.log.info("Merged to %d sources" % len(mergedList))
# Attempt to remove garbage peaks
self.cullPeaks(mergedList)
return Struct(outputCatalog=mergedList)

def cullPeaks(self, catalog):
keys = [item.key for item in self.merged.getPeakSchema().extract("merge_peak_*").values()]
assert len(keys) > 0, "Error finding flags that associate peaks with their detection bands."
totalPeaks = 0
culledPeaks = 0
for parentSource in catalog:
    # Make a list copy so we can clear the attached PeakCatalog and append the ones we're keeping
    # to it (which is easier than deleting as we iterate).
    keptPeaks = parentSource.getFootprint().getPeaks()
    oldPeaks = list(keptPeaks)
    keptPeaks.clear()
    familySize = len(oldPeaks)
    totalPeaks += familySize
    for rank, peak in enumerate(oldPeaks):
        if ((rank < self.config.cullPeaks.rankSufficient)
            or (sum([peak.get(k) for k in keys]) >= self.config.cullPeaks.nBandsSufficient)
            or (rank < self.config.cullPeaks.rankConsidered
                and rank < self.config.cullPeaks.rankNormalizedConsidered * familySize)):
            keptPeaks.append(peak)
        else:
            culledPeaks += 1
self.log.info("Culled %d of %d peaks" % (culledPeaks, totalPeaks))

def getSchemaCatalogs(self):
mergeDet = afwTable.SourceCatalog(self.schema)
peak = afwDetect.PeakCatalog(self.merged.getPeakSchema())
return {self.config.coaddName + "Coadd_mergeDet": mergeDet,
        self.config.coaddName + "Coadd_peak": peak}

def getSkySourceFootprints(self, mergedList, skyInfo, seed):

Definition at line 376 of file mergeDetections.py.

◆ tract

lsst.pipe.tasks.mergeDetections.tract

Definition at line 189 of file mergeDetections.py.