LSST Applications 29.1.0,g0fba68d861+6b120c4394,g123d84c11c+8c5ae1fdc5,g1ec0fe41b4+191117f6ec,g1fd858c14a+c8450ae71a,g3533f9d6cb+a04f9ee0ab,g35bb328faa+8c5ae1fdc5,g3f0dcc2b1b+7df08700bd,g4178042926+b4254969db,g44ba364a48+04455b336b,g53246c7159+8c5ae1fdc5,g60b5630c4e+a04f9ee0ab,g663da51e9b+b05e6e1875,g67b6fd64d1+250bf6acd3,g78460c75b0+7e33a9eb6d,g786e29fd12+668abc6043,g8352419a5c+8c5ae1fdc5,g87e3079a85+d3fa38de54,g8852436030+cd899e2626,g89139ef638+250bf6acd3,g93a033419f+31ead11197,g989de1cb63+250bf6acd3,g9f33ca652e+f6053ecf14,ga1e959baac+5fbc491aed,ga2f891cd6c+a04f9ee0ab,gabe3b4be73+8856018cbb,gabf8522325+1f7e6d67b9,gac2eed3f23+250bf6acd3,gb1101e3267+0c331e9486,gb89ab40317+250bf6acd3,gcf25f946ba+cd899e2626,gd107969129+8964d67276,gd6cbbdb0b4+6bbecc8878,gde0f65d7ad+d65f9e019a,ge278dab8ac+eb3bbeb12f,ge410e46f29+250bf6acd3,gf5e32f922b+8c5ae1fdc5,gff02db199a+747430a128,gffe7e49bb4+a04f9ee0ab
LSST Data Management Base Package
Loading...
Searching...
No Matches
lsst.pipe.tasks.postprocess Namespace Reference

Classes

class  for
 
class  TableVStack
 
class  TransformObjectCatalogConnections
 
class  WriteObjectTableConnections
 

Functions

 flattenFilters (df, noDupCols=["coord_ra", "coord_dec"], camelCase=False, inputBands=None)
 

Variables

 log = logging.getLogger(__name__)
 
 catalogs : `dict`
 
str catalog : `pandas.DataFrame`
 
 result : `~lsst.pipe.base.Struct`
 
 exposure : `lsst.afw.image.exposure.Exposure`
 
 detectorId : `int`
 
 visitSummary : `lsst.afw.table.ExposureCatalog`, optional
 
 newCat : `lsst.afw.table.SourceCatalog`
 
 handles : `~lsst.daf.butler.DeferredDatasetHandle` or
 
 functors : `list`, `dict`, or `~lsst.pipe.tasks.functors.CompositeFunctor`
 
 filt : `str`, optional
 
 flags : `list`, optional
 
 refFlags : `list`, optional
 
 forcedFlags : `list`, optional
 
 funcs : `~lsst.pipe.tasks.functors.Functor`
 
 inplace
 
 True
 
 drop
 
 primaryKey
 
 df
 
 analysis
 

Function Documentation

◆ flattenFilters()

lsst.pipe.tasks.postprocess.flattenFilters ( df,
noDupCols = ["coord_ra", "coord_dec"],
camelCase = False,
inputBands = None )
Flattens a dataframe with multilevel column index.

Definition at line 67 of file postprocess.py.

67def flattenFilters(df, noDupCols=["coord_ra", "coord_dec"], camelCase=False, inputBands=None):
68 """Flattens a dataframe with multilevel column index.
69 """
70 newDf = pd.DataFrame()
71 # band is the level 0 index
72 dfBands = df.columns.unique(level=0).values
73 for band in dfBands:
74 subdf = df[band]
75 columnFormat = "{0}{1}" if camelCase else "{0}_{1}"
76 newColumns = {c: columnFormat.format(band, c)
77 for c in subdf.columns if c not in noDupCols}
78 cols = list(newColumns.keys())
79 newDf = pd.concat([newDf, subdf[cols].rename(columns=newColumns)], axis=1)
80
81 # Band must be present in the input and output or else column is all NaN:
82 presentBands = dfBands if inputBands is None else list(set(inputBands).intersection(dfBands))
83 # Get the unexploded columns from any present band's partition
84 noDupDf = df[presentBands[0]][noDupCols]
85 newDf = pd.concat([noDupDf, newDf], axis=1)
86 return newDf
87
88

Variable Documentation

◆ analysis

lsst.pipe.tasks.postprocess.analysis

Definition at line 892 of file postprocess.py.

◆ catalog

lsst.pipe.tasks.postprocess.catalog : `pandas.DataFrame`
dfs = []
for filt, tableDict in catalogs.items():
    for dataset, table in tableDict.items():
        # Convert afwTable to pandas DataFrame if needed
        if isinstance(table, pd.DataFrame):
            df = table
        elif isinstance(table, afwTable.SourceCatalog):
            df = table.asAstropy().to_pandas()
        elif isinstance(table, astropy.table.Table):
            df = table.to_pandas()
        else:
            raise ValueError(f"{dataset=} has unsupported {type(table)=}")
        df.set_index("id", drop=True, inplace=True)

        # Sort columns by name, to ensure matching schema among patches
        df = df.reindex(sorted(df.columns), axis=1)
        df = df.assign(tractId=tract, patchId=patch)

        # Make columns a 3-level MultiIndex
        df.columns = pd.MultiIndex.from_tuples([(dataset, filt, c) for c in df.columns],
                                               names=("dataset", "band", "column"))
        dfs.append(df)

# We do this dance and not `pd.concat(dfs)` because the pandas
# concatenation uses infinite memory.
catalog = functools.reduce(lambda d1, d2: d1.join(d2), dfs)
return catalog


class WriteSourceTableConnections(pipeBase.PipelineTaskConnections,
                          defaultTemplates={"catalogType": ""},
                          dimensions=("instrument", "visit", "detector")):

catalog = connectionTypes.Input(
doc="Input full-depth catalog of sources produced by CalibrateTask",
name="{catalogType}src",
storageClass="SourceCatalog",
dimensions=("instrument", "visit", "detector")
)
outputCatalog = connectionTypes.Output(
doc="Catalog of sources, `src` in Astropy/Parquet format.  Columns are unchanged.",
name="{catalogType}source",
storageClass="ArrowAstropy",
dimensions=("instrument", "visit", "detector")
)


class WriteSourceTableConfig(pipeBase.PipelineTaskConfig,
                     pipelineConnections=WriteSourceTableConnections):
pass


class WriteSourceTableTask(pipeBase.PipelineTask):
_DefaultName = "writeSourceTable"
ConfigClass = WriteSourceTableConfig

def runQuantum(self, butlerQC, inputRefs, outputRefs):
    inputs = butlerQC.get(inputRefs)
    inputs["visit"] = butlerQC.quantum.dataId["visit"]
    inputs["detector"] = butlerQC.quantum.dataId["detector"]
    result = self.run(**inputs)
    outputs = pipeBase.Struct(outputCatalog=result.table)
    butlerQC.put(outputs, outputRefs)

def run(self, catalog, visit, detector, **kwargs):
if visitSummary is not None:
    row = visitSummary.find(detectorId)
    if row is None:
        raise pipeBase.NoWorkFound(f"Visit summary for detector {detectorId} is missing.")
    if (photoCalib := row.getPhotoCalib()) is None:
        self.log.warning("Detector id %s has None for photoCalib in visit summary; "
                         "skipping reevaluation of photoCalib.", detectorId)
        exposure.setPhotoCalib(None)
    else:
        exposure.setPhotoCalib(photoCalib)
    if (skyWcs := row.getWcs()) is None:
        self.log.warning("Detector id %s has None for skyWcs in visit summary; "
                         "skipping reevaluation of skyWcs.", detectorId)
        exposure.setWcs(None)
    else:
        exposure.setWcs(skyWcs)

return exposure

def addCalibColumns(self, catalog, exposure, **kwargs):

Definition at line 278 of file postprocess.py.

◆ catalogs

lsst.pipe.tasks.postprocess.catalogs : `dict`
_DefaultName = "writeObjectTable"
ConfigClass = WriteObjectTableConfig

# Tag of output dataset written by `MergeSourcesTask.write`
outputDataset = "obj"

def runQuantum(self, butlerQC, inputRefs, outputRefs):
    inputs = butlerQC.get(inputRefs)

    catalogs = defaultdict(dict)
    for dataset, connection in (
        ("meas", "inputCatalogMeas"),
        ("forced_src", "inputCatalogForcedSrc"),
        ("psfs_multiprofit", "inputCatalogPsfsMultiprofit"),
    ):
        for ref, cat in zip(getattr(inputRefs, connection), inputs[connection]):
            catalogs[ref.dataId["band"]][dataset] = cat

    dataId = butlerQC.quantum.dataId
    df = self.run(catalogs=catalogs, tract=dataId["tract"], patch=dataId["patch"])
    outputs = pipeBase.Struct(outputCatalog=df)
    butlerQC.put(outputs, outputRefs)

def run(self, catalogs, tract, patch):

Definition at line 269 of file postprocess.py.

◆ detectorId

lsst.pipe.tasks.postprocess.detectorId : `int`

Definition at line 450 of file postprocess.py.

◆ df

lsst.pipe.tasks.postprocess.df

Definition at line 891 of file postprocess.py.

◆ drop

lsst.pipe.tasks.postprocess.drop

Definition at line 887 of file postprocess.py.

◆ exposure

lsst.pipe.tasks.postprocess.exposure : `lsst.afw.image.exposure.Exposure`
self.log.info("Generating DataFrame from src catalog visit,detector=%i,%i", visit, detector)
tbl = catalog.asAstropy()
tbl["visit"] = visit
# int16 instead of uint8 because databases don't like unsigned bytes.
tbl["detector"] = np.int16(detector)

return pipeBase.Struct(table=tbl)


class WriteRecalibratedSourceTableConnections(WriteSourceTableConnections,
                                      defaultTemplates={"catalogType": ""},
                                      dimensions=("instrument", "visit", "detector", "skymap")):
visitSummary = connectionTypes.Input(
doc="Input visit-summary catalog with updated calibration objects.",
name="finalVisitSummary",
storageClass="ExposureCatalog",
dimensions=("instrument", "visit",),
)

def __init__(self, config):
# We don't want the input catalog here to be an initial existence
# constraint in QG generation, because that can unfortunately limit the
# set of data IDs of inputs to other tasks, even those that run earlier
# (e.g. updateVisitSummary), when the input 'src' catalog is not
# produced.  It's safer to just use 'visitSummary' existence as an
# initial constraint, and then let the graph prune out the detectors
# that don't have a 'src' for this task only.
self.catalog = dataclasses.replace(self.catalog, deferGraphConstraint=True)


class WriteRecalibratedSourceTableConfig(WriteSourceTableConfig,
                                 pipelineConnections=WriteRecalibratedSourceTableConnections):

doReevaluatePhotoCalib = pexConfig.Field(
dtype=bool,
default=True,
doc=("Add or replace local photoCalib columns"),
)
doReevaluateSkyWcs = pexConfig.Field(
dtype=bool,
default=True,
doc=("Add or replace local WCS columns and update the coord columns, coord_ra and coord_dec"),
)


class WriteRecalibratedSourceTableTask(WriteSourceTableTask):
_DefaultName = "writeRecalibratedSourceTable"
ConfigClass = WriteRecalibratedSourceTableConfig

def runQuantum(self, butlerQC, inputRefs, outputRefs):
    inputs = butlerQC.get(inputRefs)

    inputs["visit"] = butlerQC.quantum.dataId["visit"]
    inputs["detector"] = butlerQC.quantum.dataId["detector"]

    if self.config.doReevaluatePhotoCalib or self.config.doReevaluateSkyWcs:
        exposure = ExposureF()
        inputs["exposure"] = self.prepareCalibratedExposure(
            exposure=exposure,
            visitSummary=inputs["visitSummary"],
            detectorId=butlerQC.quantum.dataId["detector"]
        )
        inputs["catalog"] = self.addCalibColumns(**inputs)

    result = self.run(**inputs)
    outputs = pipeBase.Struct(outputCatalog=result.table)
    butlerQC.put(outputs, outputRefs)

def prepareCalibratedExposure(self, exposure, detectorId, visitSummary=None):

Definition at line 448 of file postprocess.py.

◆ filt

lsst.pipe.tasks.postprocess.filt : `str`, optional

Definition at line 600 of file postprocess.py.

◆ flags

lsst.pipe.tasks.postprocess.flags : `list`, optional

Definition at line 605 of file postprocess.py.

◆ forcedFlags

lsst.pipe.tasks.postprocess.forcedFlags : `list`, optional

Definition at line 612 of file postprocess.py.

◆ funcs

lsst.pipe.tasks.postprocess.funcs : `~lsst.pipe.tasks.functors.Functor`

Definition at line 836 of file postprocess.py.

◆ functors

lsst.pipe.tasks.postprocess.functors : `list`, `dict`, or `~lsst.pipe.tasks.functors.CompositeFunctor`

Definition at line 593 of file postprocess.py.

◆ handles

lsst.pipe.tasks.postprocess.handles : `~lsst.daf.butler.DeferredDatasetHandle` or
measureConfig = SingleFrameMeasurementTask.ConfigClass()
measureConfig.doReplaceWithNoise = False

# Clear all slots, because we aren't running the relevant plugins.
for slot in measureConfig.slots:
    setattr(measureConfig.slots, slot, None)

measureConfig.plugins.names = []
if self.config.doReevaluateSkyWcs:
    measureConfig.plugins.names.add("base_LocalWcs")
    self.log.info("Re-evaluating base_LocalWcs plugin")
if self.config.doReevaluatePhotoCalib:
    measureConfig.plugins.names.add("base_LocalPhotoCalib")
    self.log.info("Re-evaluating base_LocalPhotoCalib plugin")
pluginsNotToCopy = tuple(measureConfig.plugins.names)

# Create a new schema and catalog
# Copy all columns from original except for the ones to reevaluate
aliasMap = catalog.schema.getAliasMap()
mapper = afwTable.SchemaMapper(catalog.schema)
for item in catalog.schema:
    if not item.field.getName().startswith(pluginsNotToCopy):
        mapper.addMapping(item.key)

schema = mapper.getOutputSchema()
measurement = SingleFrameMeasurementTask(config=measureConfig, schema=schema)
schema.setAliasMap(aliasMap)
newCat = afwTable.SourceCatalog(schema)
newCat.extend(catalog, mapper=mapper)

# Fluxes in sourceCatalogs are in counts, so there are no fluxes to
# update here. LocalPhotoCalibs are applied during transform tasks.
# Update coord_ra/coord_dec, which are expected to be positions on the
# sky and are used as such in sdm tables without transform
if self.config.doReevaluateSkyWcs and exposure.wcs is not None:
    afwTable.updateSourceCoords(exposure.wcs, newCat)
    wcsPlugin = measurement.plugins["base_LocalWcs"]
else:
    wcsPlugin = None

if self.config.doReevaluatePhotoCalib and exposure.getPhotoCalib() is not None:
    pcPlugin = measurement.plugins["base_LocalPhotoCalib"]
else:
    pcPlugin = None

for row in newCat:
    if wcsPlugin is not None:
        wcsPlugin.measure(row, exposure)
    if pcPlugin is not None:
        pcPlugin.measure(row, exposure)

return newCat


class PostprocessAnalysis(object):

Definition at line 589 of file postprocess.py.

◆ inplace

lsst.pipe.tasks.postprocess.inplace

Definition at line 887 of file postprocess.py.

◆ log

lsst.pipe.tasks.postprocess.log = logging.getLogger(__name__)

Definition at line 64 of file postprocess.py.

◆ newCat

lsst.pipe.tasks.postprocess.newCat : `lsst.afw.table.SourceCatalog`

Definition at line 500 of file postprocess.py.

◆ primaryKey

lsst.pipe.tasks.postprocess.primaryKey

Definition at line 888 of file postprocess.py.

◆ refFlags

lsst.pipe.tasks.postprocess.refFlags : `list`, optional

Definition at line 609 of file postprocess.py.

◆ result

lsst.pipe.tasks.postprocess.result : `~lsst.pipe.base.Struct`

Definition at line 368 of file postprocess.py.

◆ True

lsst.pipe.tasks.postprocess.True

Definition at line 887 of file postprocess.py.

◆ visitSummary

lsst.pipe.tasks.postprocess.visitSummary : `lsst.afw.table.ExposureCatalog`, optional

Definition at line 452 of file postprocess.py.