22 from collections 
import defaultdict
 
   26 import healsparse 
as hsp
 
   32 from lsst.daf.butler 
import Formatter
 
   34 from lsst.utils.timer 
import timeMethod
 
   35 from .healSparseMappingProperties 
import (BasePropertyMap, BasePropertyMapConfig,
 
   36                                           PropertyMapMap, compute_approx_psf_size_and_shape)
 
   39 __all__ = [
"HealSparseInputMapTask", 
"HealSparseInputMapConfig",
 
   40            "HealSparseMapFormatter", 
"HealSparsePropertyMapConnections",
 
   41            "HealSparsePropertyMapConfig", 
"HealSparsePropertyMapTask",
 
   42            "ConsolidateHealSparsePropertyMapConnections",
 
   43            "ConsolidateHealSparsePropertyMapConfig",
 
   44            "ConsolidateHealSparsePropertyMapTask"]
 
   48     """Interface for reading and writing healsparse.HealSparseMap files.""" 
   49     unsupportedParameters = frozenset()
 
   50     supportedExtensions = frozenset({
".hsp", 
".fit", 
".fits"})
 
   53     def read(self, component=None):
 
   55         path = self.fileDescriptor.location.path
 
   57         if component == 
'coverage':
 
   59                 data = hsp.HealSparseCoverage.read(path)
 
   60             except (OSError, RuntimeError):
 
   61                 raise ValueError(f
"Unable to read healsparse map with URI {self.fileDescriptor.location.uri}")
 
   65         if self.fileDescriptor.parameters 
is None:
 
   69             pixels = self.fileDescriptor.parameters.get(
'pixels', 
None)
 
   70             degrade_nside = self.fileDescriptor.parameters.get(
'degrade_nside', 
None)
 
   72             data = hsp.HealSparseMap.read(path, pixels=pixels, degrade_nside=degrade_nside)
 
   73         except (OSError, RuntimeError):
 
   74             raise ValueError(f
"Unable to read healsparse map with URI {self.fileDescriptor.location.uri}")
 
   78     def write(self, inMemoryDataset):
 
   81         self.fileDescriptor.location.updateExtension(self.
extensionextension)
 
   82         inMemoryDataset.write(self.fileDescriptor.location.path, clobber=
True)
 
   85 def _is_power_of_two(value):
 
   86     """Check that value is a power of two. 
   95     is_power_of_two : `bool` 
   96         True if value is a power of two; False otherwise, or 
   97         if value is not an integer. 
   99     if not isinstance(value, numbers.Integral):
 
  106     return (value & (value - 1) == 0) 
and value != 0
 
  110     """Configuration parameters for HealSparseInputMapTask""" 
  111     nside = pexConfig.Field(
 
  112         doc=
"Mapping healpix nside.  Must be power of 2.",
 
  115         check=_is_power_of_two,
 
  117     nside_coverage = pexConfig.Field(
 
  118         doc=
"HealSparse coverage map nside.  Must be power of 2.",
 
  121         check=_is_power_of_two,
 
  123     bad_mask_min_coverage = pexConfig.Field(
 
  124         doc=(
"Minimum area fraction of a map healpixel pixel that must be " 
  125              "covered by bad pixels to be removed from the input map. " 
  126              "This is approximate."),
 
  133     """Task for making a HealSparse input map.""" 
  135     ConfigClass = HealSparseInputMapConfig
 
  136     _DefaultName = 
"healSparseInputMap" 
  139         pipeBase.Task.__init__(self, **kwargs)
 
  144         """Build a map from ccd valid polygons or bounding boxes. 
  148         bbox : `lsst.geom.Box2I` 
  149             Bounding box for region to build input map. 
  150         wcs : `lsst.afw.geom.SkyWcs` 
  151             WCS object for region to build input map. 
  152         ccds : `lsst.afw.table.ExposureCatalog` 
  153             Exposure catalog with ccd data from coadd inputs. 
  155         self.
ccd_input_mapccd_input_map = hsp.HealSparseMap.make_empty(nside_coverage=self.config.nside_coverage,
 
  156                                                           nside_sparse=self.config.nside,
 
  158                                                           wide_mask_maxbits=len(ccds))
 
  160         self.
_bbox_bbox = bbox
 
  161         self.
_ccds_ccds = ccds
 
  163         pixel_scale = wcs.getPixelScale().asArcseconds()
 
  164         hpix_area_arcsec2 = hp.nside2pixarea(self.config.nside, degrees=
True)*(3600.**2.)
 
  165         self.
_min_bad_min_bad = self.config.bad_mask_min_coverage*hpix_area_arcsec2/(pixel_scale**2.)
 
  170         for bit, ccd_row 
in enumerate(ccds):
 
  171             metadata[f
"B{bit:04d}CCD"] = ccd_row[
"ccd"]
 
  172             metadata[f
"B{bit:04d}VIS"] = ccd_row[
"visit"]
 
  173             metadata[f
"B{bit:04d}WT"] = ccd_row[
"weight"]
 
  178             ccd_poly = ccd_row.getValidPolygon()
 
  182             ccd_poly_radec = self.
_pixels_to_radec_pixels_to_radec(ccd_row.getWcs(), ccd_poly.convexHull().getVertices())
 
  185             poly = hsp.Polygon(ra=ccd_poly_radec[: -1, 0],
 
  186                                dec=ccd_poly_radec[: -1, 1],
 
  194                                                 bbox_afw_poly.convexHull().getVertices())
 
  195         bbox_poly = hsp.Polygon(ra=bbox_poly_radec[: -1, 0], dec=bbox_poly_radec[: -1, 1],
 
  196                                 value=np.arange(self.
ccd_input_mapccd_input_map.wide_mask_maxbits))
 
  197         bbox_poly_map = bbox_poly.get_map_like(self.
ccd_input_mapccd_input_map)
 
  206         cov = self.config.nside_coverage
 
  207         ns = self.config.nside
 
  217         """Mask a subregion from a visit. 
  218         This must be run after build_ccd_input_map initializes 
  223         bbox : `lsst.geom.Box2I` 
  224             Bounding box from region to mask. 
  226             Visit number corresponding to warp with mask. 
  227         mask : `lsst.afw.image.MaskX` 
  228             Mask plane from warp exposure. 
  229         bit_mask_value : `int` 
  230             Bit mask to check for bad pixels. 
  234         RuntimeError : Raised if build_ccd_input_map was not run first. 
  237             raise RuntimeError(
"Must run build_ccd_input_map before mask_warp_bbox")
 
  240         bad_pixels = np.where(mask.array & bit_mask_value)
 
  241         if len(bad_pixels[0]) == 0:
 
  246         bad_ra, bad_dec = self.
_wcs_wcs.pixelToSkyArray(bad_pixels[1].astype(np.float64),
 
  247                                                     bad_pixels[0].astype(np.float64),
 
  249         bad_hpix = hp.ang2pix(self.config.nside, bad_ra, bad_dec,
 
  250                               lonlat=
True, nest=
True)
 
  253         min_bad_hpix = bad_hpix.min()
 
  254         bad_hpix_count = np.zeros(bad_hpix.max() - min_bad_hpix + 1, dtype=np.int32)
 
  255         np.add.at(bad_hpix_count, bad_hpix - min_bad_hpix, 1)
 
  260         pix_to_add, = np.where(bad_hpix_count > 0)
 
  263         count_map_arr[primary] = np.clip(count_map_arr[primary], 0, 
None)
 
  265         count_map_arr[f
"v{visit}"] = np.clip(count_map_arr[f
"v{visit}"], 0, 
None)
 
  266         count_map_arr[f
"v{visit}"] += bad_hpix_count[pix_to_add]
 
  271         """Use accumulated mask information to finalize the masking of 
  276         RuntimeError : Raised if build_ccd_input_map was not run first. 
  279             raise RuntimeError(
"Must run build_ccd_input_map before finalize_ccd_input_map_mask.")
 
  283             to_mask, = np.where(count_map_arr[f
"v{visit}"] > self.
_min_bad_min_bad)
 
  284             if to_mask.size == 0:
 
  292     def _pixels_to_radec(self, wcs, pixels):
 
  293         """Convert pixels to ra/dec positions using a wcs. 
  297         wcs : `lsst.afw.geom.SkyWcs` 
  299         pixels : `list` [`lsst.geom.Point2D`] 
  300             List of pixels to convert. 
  304         radec : `numpy.ndarray` 
  305             Nx2 array of ra/dec positions associated with pixels. 
  307         sph_pts = wcs.pixelToSky(pixels)
 
  308         return np.array([(sph.getRa().asDegrees(), sph.getDec().asDegrees())
 
  313                                        dimensions=(
"tract", 
"band", 
"skymap",),
 
  314                                        defaultTemplates={
"coaddName": 
"deep",
 
  316     input_maps = pipeBase.connectionTypes.Input(
 
  317         doc=
"Healsparse bit-wise coadd input maps",
 
  318         name=
"{coaddName}Coadd_inputMap",
 
  319         storageClass=
"HealSparseMap",
 
  320         dimensions=(
"tract", 
"patch", 
"skymap", 
"band"),
 
  324     coadd_exposures = pipeBase.connectionTypes.Input(
 
  325         doc=
"Coadded exposures associated with input_maps",
 
  326         name=
"{coaddName}Coadd",
 
  327         storageClass=
"ExposureF",
 
  328         dimensions=(
"tract", 
"patch", 
"skymap", 
"band"),
 
  332     visit_summaries = pipeBase.connectionTypes.Input(
 
  333         doc=
"Visit summary tables with aggregated statistics",
 
  334         name=
"{calexpType}visitSummary",
 
  335         storageClass=
"ExposureCatalog",
 
  336         dimensions=(
"instrument", 
"visit"),
 
  340     sky_map = pipeBase.connectionTypes.Input(
 
  341         doc=
"Input definition of geometry/bbox and projection/wcs for coadded exposures",
 
  342         name=BaseSkyMap.SKYMAP_DATASET_TYPE_NAME,
 
  343         storageClass=
"SkyMap",
 
  344         dimensions=(
"skymap",),
 
  352     for name 
in BasePropertyMap.registry:
 
  353         vars()[f
"{name}_map_min"] = pipeBase.connectionTypes.Output(
 
  354             doc=f
"Minimum-value map of {name}",
 
  355             name=f
"{{coaddName}}Coadd_{name}_map_min",
 
  356             storageClass=
"HealSparseMap",
 
  357             dimensions=(
"tract", 
"skymap", 
"band"),
 
  359         vars()[f
"{name}_map_max"] = pipeBase.connectionTypes.Output(
 
  360             doc=f
"Maximum-value map of {name}",
 
  361             name=f
"{{coaddName}}Coadd_{name}_map_max",
 
  362             storageClass=
"HealSparseMap",
 
  363             dimensions=(
"tract", 
"skymap", 
"band"),
 
  365         vars()[f
"{name}_map_mean"] = pipeBase.connectionTypes.Output(
 
  366             doc=f
"Mean-value map of {name}",
 
  367             name=f
"{{coaddName}}Coadd_{name}_map_mean",
 
  368             storageClass=
"HealSparseMap",
 
  369             dimensions=(
"tract", 
"skymap", 
"band"),
 
  371         vars()[f
"{name}_map_weighted_mean"] = pipeBase.connectionTypes.Output(
 
  372             doc=f
"Weighted mean-value map of {name}",
 
  373             name=f
"{{coaddName}}Coadd_{name}_map_weighted_mean",
 
  374             storageClass=
"HealSparseMap",
 
  375             dimensions=(
"tract", 
"skymap", 
"band"),
 
  377         vars()[f
"{name}_map_sum"] = pipeBase.connectionTypes.Output(
 
  378             doc=f
"Sum-value map of {name}",
 
  379             name=f
"{{coaddName}}Coadd_{name}_map_sum",
 
  380             storageClass=
"HealSparseMap",
 
  381             dimensions=(
"tract", 
"skymap", 
"band"),
 
  384     def __init__(self, *, config=None):
 
  385         super().__init__(config=config)
 
  389         for name 
in BasePropertyMap.registry:
 
  390             if name 
not in config.property_maps:
 
  392                 prop_config.do_min = 
False 
  393                 prop_config.do_max = 
False 
  394                 prop_config.do_mean = 
False 
  395                 prop_config.do_weighted_mean = 
False 
  396                 prop_config.do_sum = 
False 
  398                 prop_config = config.property_maps[name]
 
  400             if not prop_config.do_min:
 
  401                 self.outputs.remove(f
"{name}_map_min")
 
  402             if not prop_config.do_max:
 
  403                 self.outputs.remove(f
"{name}_map_max")
 
  404             if not prop_config.do_mean:
 
  405                 self.outputs.remove(f
"{name}_map_mean")
 
  406             if not prop_config.do_weighted_mean:
 
  407                 self.outputs.remove(f
"{name}_map_weighted_mean")
 
  408             if not prop_config.do_sum:
 
  409                 self.outputs.remove(f
"{name}_map_sum")
 
  412 class HealSparsePropertyMapConfig(pipeBase.PipelineTaskConfig,
 
  413                                   pipelineConnections=HealSparsePropertyMapConnections):
 
  414     """Configuration parameters for HealSparsePropertyMapTask""" 
  415     property_maps = BasePropertyMap.registry.makeField(
 
  417         default=[
"exposure_time",
 
  428         doc=
"Property map computation objects",
 
  432         self.property_maps[
"exposure_time"].do_sum = 
True 
  433         self.property_maps[
"psf_size"].do_weighted_mean = 
True 
  434         self.property_maps[
"psf_e1"].do_weighted_mean = 
True 
  435         self.property_maps[
"psf_e2"].do_weighted_mean = 
True 
  436         self.property_maps[
"psf_maglim"].do_weighted_mean = 
True 
  437         self.property_maps[
"sky_noise"].do_weighted_mean = 
True 
  438         self.property_maps[
"sky_background"].do_weighted_mean = 
True 
  439         self.property_maps[
"dcr_dra"].do_weighted_mean = 
True 
  440         self.property_maps[
"dcr_ddec"].do_weighted_mean = 
True 
  441         self.property_maps[
"dcr_e1"].do_weighted_mean = 
True 
  442         self.property_maps[
"dcr_e2"].do_weighted_mean = 
True 
  445 class HealSparsePropertyMapTask(pipeBase.PipelineTask):
 
  446     """Task to compute Healsparse property maps. 
  448     This task will compute individual property maps (per tract, per 
  449     map type, per band).  These maps cover the full coadd tract, and 
  450     are not truncated to the inner tract region. 
  452     ConfigClass = HealSparsePropertyMapConfig
 
  453     _DefaultName = 
"healSparsePropertyMapTask" 
  455     def __init__(self, **kwargs):
 
  456         super().__init__(**kwargs)
 
  458         for name, config, PropertyMapClass 
in self.config.property_maps.apply():
 
  459             self.property_maps[name] = PropertyMapClass(config, name)
 
  462     def runQuantum(self, butlerQC, inputRefs, outputRefs):
 
  463         inputs = butlerQC.get(inputRefs)
 
  465         sky_map = inputs.pop(
"sky_map")
 
  467         tract = butlerQC.quantum.dataId[
"tract"]
 
  468         band = butlerQC.quantum.dataId[
"band"]
 
  470         input_map_dict = {ref.dataId[
"patch"]: ref 
for ref 
in inputs[
"input_maps"]}
 
  471         coadd_dict = {ref.dataId[
"patch"]: ref 
for ref 
in inputs[
"coadd_exposures"]}
 
  473         visit_summary_dict = {ref.dataId[
"visit"]: ref.get()
 
  474                               for ref 
in inputs[
"visit_summaries"]}
 
  476         self.run(sky_map, tract, band, coadd_dict, input_map_dict, visit_summary_dict)
 
  479         for name, property_map 
in self.property_maps.
items():
 
  480             if property_map.config.do_min:
 
  481                 butlerQC.put(property_map.min_map,
 
  482                              getattr(outputRefs, f
"{name}_map_min"))
 
  483             if property_map.config.do_max:
 
  484                 butlerQC.put(property_map.max_map,
 
  485                              getattr(outputRefs, f
"{name}_map_max"))
 
  486             if property_map.config.do_mean:
 
  487                 butlerQC.put(property_map.mean_map,
 
  488                              getattr(outputRefs, f
"{name}_map_mean"))
 
  489             if property_map.config.do_weighted_mean:
 
  490                 butlerQC.put(property_map.weighted_mean_map,
 
  491                              getattr(outputRefs, f
"{name}_map_weighted_mean"))
 
  492             if property_map.config.do_sum:
 
  493                 butlerQC.put(property_map.sum_map,
 
  494                              getattr(outputRefs, f
"{name}_map_sum"))
 
  496     def run(self, sky_map, tract, band, coadd_dict, input_map_dict, visit_summary_dict):
 
  497         """Run the healsparse property task. 
  501         sky_map : Sky map object 
  505             Band name for logging. 
  506         coadd_dict : `dict` [`int`: `lsst.daf.butler.DeferredDatasetHandle`] 
  507             Dictionary of coadd exposure references.  Keys are patch numbers. 
  508         input_map_dict : `dict` [`int`: `lsst.daf.butler.DeferredDatasetHandle`] 
  509             Dictionary of input map references.  Keys are patch numbers. 
  510         visit_summary_dict : `dict` [`int`: `lsst.afw.table.ExposureCatalog`] 
  511             Dictionary of visit summary tables.  Keys are visit numbers. 
  515         RepeatableQuantumError 
  516             If visit_summary_dict is missing any visits or detectors found in an 
  517             input map.  This leads to an inconsistency between what is in the coadd 
  518             (via the input map) and the visit summary tables which contain data 
  521         tract_info = sky_map[tract]
 
  523         tract_maps_initialized = 
False 
  525         for patch 
in input_map_dict.keys():
 
  526             self.log.
info(
"Making maps for band %s, tract %d, patch %d.",
 
  529             patch_info = tract_info[patch]
 
  531             input_map = input_map_dict[patch].get()
 
  532             coadd_photo_calib = coadd_dict[patch].get(component=
"photoCalib")
 
  533             coadd_inputs = coadd_dict[patch].get(component=
"coaddInputs")
 
  535             coadd_zeropoint = 2.5*np.log10(coadd_photo_calib.getInstFluxAtZeroMagnitude())
 
  538             poly_vertices = patch_info.getInnerSkyPolygon(tract_info.getWcs()).getVertices()
 
  539             patch_radec = self._vertices_to_radec(poly_vertices)
 
  540             patch_poly = hsp.Polygon(ra=patch_radec[:, 0], dec=patch_radec[:, 1],
 
  541                                      value=np.arange(input_map.wide_mask_maxbits))
 
  542             patch_poly_map = patch_poly.get_map_like(input_map)
 
  543             input_map = hsp.and_intersection([input_map, patch_poly_map])
 
  545             if not tract_maps_initialized:
 
  548                 nside_coverage = self._compute_nside_coverage_tract(tract_info)
 
  549                 nside = input_map.nside_sparse
 
  551                 do_compute_approx_psf = 
False 
  553                 for property_map 
in self.property_maps:
 
  554                     property_map.initialize_tract_maps(nside_coverage, nside)
 
  555                     if property_map.requires_psf:
 
  556                         do_compute_approx_psf = 
True 
  558                 tract_maps_initialized = 
True 
  560             valid_pixels, vpix_ra, vpix_dec = input_map.valid_pixels_pos(return_pixels=
True)
 
  563             if valid_pixels.size == 0:
 
  567             for property_map 
in self.property_maps:
 
  568                 property_map.initialize_values(valid_pixels.size)
 
  569                 property_map.zeropoint = coadd_zeropoint
 
  572             total_weights = np.zeros(valid_pixels.size)
 
  573             total_inputs = np.zeros(valid_pixels.size, dtype=np.int32)
 
  575             for bit, ccd_row 
in enumerate(coadd_inputs.ccds):
 
  577                 inmap, = np.where(input_map.check_bits_pix(valid_pixels, [bit]))
 
  584                 visit = ccd_row[
"visit"]
 
  585                 detector_id = ccd_row[
"ccd"]
 
  586                 weight = ccd_row[
"weight"]
 
  588                 x, y = ccd_row.getWcs().skyToPixelArray(vpix_ra[inmap], vpix_dec[inmap], degrees=
True)
 
  589                 scalings = self._compute_calib_scale(ccd_row, x, y)
 
  591                 if do_compute_approx_psf:
 
  596                 total_weights[inmap] += weight
 
  597                 total_inputs[inmap] += 1
 
  600                 if visit 
not in visit_summary_dict:
 
  601                     msg = f
"Visit {visit} not found in visit_summaries." 
  602                     raise pipeBase.RepeatableQuantumError(msg)
 
  603                 row = visit_summary_dict[visit].find(detector_id)
 
  605                     msg = f
"Visit {visit} / detector_id {detector_id} not found in visit_summaries." 
  606                     raise pipeBase.RepeatableQuantumError(msg)
 
  609                 for property_map 
in self.property_maps:
 
  610                     property_map.accumulate_values(inmap,
 
  619             for property_map 
in self.property_maps:
 
  620                 property_map.finalize_mean_values(total_weights, total_inputs)
 
  621                 property_map.set_map_values(valid_pixels)
 
  623     def _compute_calib_scale(self, ccd_row, x, y):
 
  624         """Compute calibration scaling values. 
  628         ccd_row : `lsst.afw.table.ExposureRecord` 
  629             Exposure metadata for a given detector exposure. 
  631             Array of x positions. 
  633             Array of y positions. 
  637         calib_scale : `np.ndarray` 
  638             Array of calibration scale values. 
  640         photo_calib = ccd_row.getPhotoCalib()
 
  641         bf = photo_calib.computeScaledCalibration()
 
  642         if bf.getBBox() == ccd_row.getBBox():
 
  644             calib_scale = photo_calib.getCalibrationMean()*bf.evaluate(x, y)
 
  647             calib_scale = photo_calib.getCalibrationMean()
 
  651     def _vertices_to_radec(self, vertices):
 
  652         """Convert polygon vertices to ra/dec. 
  656         vertices : `list` [ `lsst.sphgeom.UnitVector3d` ] 
  657             Vertices for bounding polygon. 
  661         radec : `numpy.ndarray` 
  662             Nx2 array of ra/dec positions (in degrees) associated with vertices. 
  665         radec = np.array([(x.getLon().asDegrees(), x.getLat().asDegrees()) 
for 
  669     def _compute_nside_coverage_tract(self, tract_info):
 
  670         """Compute the optimal coverage nside for a tract. 
  674         tract_info : `lsst.skymap.tractInfo.ExplicitTractInfo` 
  675             Tract information object. 
  679         nside_coverage : `int` 
  680             Optimal coverage nside for a tract map. 
  682         num_patches = tract_info.getNumPatches()
 
  685         patch_info = tract_info.getPatchInfo(0)
 
  686         vertices = patch_info.getInnerSkyPolygon(tract_info.getWcs()).getVertices()
 
  687         radec = self._vertices_to_radec(vertices)
 
  688         delta_ra = np.max(radec[:, 0]) - np.min(radec[:, 0])
 
  689         delta_dec = np.max(radec[:, 1]) - np.min(radec[:, 1])
 
  690         patch_area = delta_ra*delta_dec*np.cos(np.deg2rad(np.mean(radec[:, 1])))
 
  692         tract_area = num_patches[0]*num_patches[1]*patch_area
 
  694         nside_coverage_tract = 32
 
  695         while hp.nside2pixarea(nside_coverage_tract, degrees=
True) > tract_area:
 
  696             nside_coverage_tract = 2*nside_coverage_tract
 
  698         nside_coverage_tract = int(np.clip(nside_coverage_tract/2, 32, 
None))
 
  700         return nside_coverage_tract
 
  703 class ConsolidateHealSparsePropertyMapConnections(pipeBase.PipelineTaskConnections,
 
  704                                                   dimensions=(
"band", 
"skymap",),
 
  705                                                   defaultTemplates={
"coaddName": 
"deep"}):
 
  706     sky_map = pipeBase.connectionTypes.Input(
 
  707         doc=
"Input definition of geometry/bbox and projection/wcs for coadded exposures",
 
  708         name=BaseSkyMap.SKYMAP_DATASET_TYPE_NAME,
 
  709         storageClass=
"SkyMap",
 
  710         dimensions=(
"skymap",),
 
  718     for name 
in BasePropertyMap.registry:
 
  719         vars()[f
"{name}_map_min"] = pipeBase.connectionTypes.Input(
 
  720             doc=f
"Minimum-value map of {name}",
 
  721             name=f
"{{coaddName}}Coadd_{name}_map_min",
 
  722             storageClass=
"HealSparseMap",
 
  723             dimensions=(
"tract", 
"skymap", 
"band"),
 
  727         vars()[f
"{name}_consolidated_map_min"] = pipeBase.connectionTypes.Output(
 
  728             doc=f
"Minumum-value map of {name}",
 
  729             name=f
"{{coaddName}}Coadd_{name}_consolidated_map_min",
 
  730             storageClass=
"HealSparseMap",
 
  731             dimensions=(
"skymap", 
"band"),
 
  733         vars()[f
"{name}_map_max"] = pipeBase.connectionTypes.Input(
 
  734             doc=f
"Maximum-value map of {name}",
 
  735             name=f
"{{coaddName}}Coadd_{name}_map_max",
 
  736             storageClass=
"HealSparseMap",
 
  737             dimensions=(
"tract", 
"skymap", 
"band"),
 
  741         vars()[f
"{name}_consolidated_map_max"] = pipeBase.connectionTypes.Output(
 
  742             doc=f
"Minumum-value map of {name}",
 
  743             name=f
"{{coaddName}}Coadd_{name}_consolidated_map_max",
 
  744             storageClass=
"HealSparseMap",
 
  745             dimensions=(
"skymap", 
"band"),
 
  747         vars()[f
"{name}_map_mean"] = pipeBase.connectionTypes.Input(
 
  748             doc=f
"Mean-value map of {name}",
 
  749             name=f
"{{coaddName}}Coadd_{name}_map_mean",
 
  750             storageClass=
"HealSparseMap",
 
  751             dimensions=(
"tract", 
"skymap", 
"band"),
 
  755         vars()[f
"{name}_consolidated_map_mean"] = pipeBase.connectionTypes.Output(
 
  756             doc=f
"Minumum-value map of {name}",
 
  757             name=f
"{{coaddName}}Coadd_{name}_consolidated_map_mean",
 
  758             storageClass=
"HealSparseMap",
 
  759             dimensions=(
"skymap", 
"band"),
 
  761         vars()[f
"{name}_map_weighted_mean"] = pipeBase.connectionTypes.Input(
 
  762             doc=f
"Weighted mean-value map of {name}",
 
  763             name=f
"{{coaddName}}Coadd_{name}_map_weighted_mean",
 
  764             storageClass=
"HealSparseMap",
 
  765             dimensions=(
"tract", 
"skymap", 
"band"),
 
  769         vars()[f
"{name}_consolidated_map_weighted_mean"] = pipeBase.connectionTypes.Output(
 
  770             doc=f
"Minumum-value map of {name}",
 
  771             name=f
"{{coaddName}}Coadd_{name}_consolidated_map_weighted_mean",
 
  772             storageClass=
"HealSparseMap",
 
  773             dimensions=(
"skymap", 
"band"),
 
  775         vars()[f
"{name}_map_sum"] = pipeBase.connectionTypes.Input(
 
  776             doc=f
"Sum-value map of {name}",
 
  777             name=f
"{{coaddName}}Coadd_{name}_map_sum",
 
  778             storageClass=
"HealSparseMap",
 
  779             dimensions=(
"tract", 
"skymap", 
"band"),
 
  783         vars()[f
"{name}_consolidated_map_sum"] = pipeBase.connectionTypes.Output(
 
  784             doc=f
"Minumum-value map of {name}",
 
  785             name=f
"{{coaddName}}Coadd_{name}_consolidated_map_sum",
 
  786             storageClass=
"HealSparseMap",
 
  787             dimensions=(
"skymap", 
"band"),
 
  790     def __init__(self, *, config=None):
 
  791         super().__init__(config=config)
 
  795         for name 
in BasePropertyMap.registry:
 
  796             if name 
not in config.property_maps:
 
  798                 prop_config.do_min = 
False 
  799                 prop_config.do_max = 
False 
  800                 prop_config.do_mean = 
False 
  801                 prop_config.do_weighted_mean = 
False 
  802                 prop_config.do_sum = 
False 
  804                 prop_config = config.property_maps[name]
 
  806             if not prop_config.do_min:
 
  807                 self.inputs.remove(f
"{name}_map_min")
 
  808                 self.outputs.remove(f
"{name}_consolidated_map_min")
 
  809             if not prop_config.do_max:
 
  810                 self.inputs.remove(f
"{name}_map_max")
 
  811                 self.outputs.remove(f
"{name}_consolidated_map_max")
 
  812             if not prop_config.do_mean:
 
  813                 self.inputs.remove(f
"{name}_map_mean")
 
  814                 self.outputs.remove(f
"{name}_consolidated_map_mean")
 
  815             if not prop_config.do_weighted_mean:
 
  816                 self.inputs.remove(f
"{name}_map_weighted_mean")
 
  817                 self.outputs.remove(f
"{name}_consolidated_map_weighted_mean")
 
  818             if not prop_config.do_sum:
 
  819                 self.inputs.remove(f
"{name}_map_sum")
 
  820                 self.outputs.remove(f
"{name}_consolidated_map_sum")
 
  823 class ConsolidateHealSparsePropertyMapConfig(pipeBase.PipelineTaskConfig,
 
  824                                              pipelineConnections=ConsolidateHealSparsePropertyMapConnections):
 
  825     """Configuration parameters for ConsolidateHealSparsePropertyMapTask""" 
  826     property_maps = BasePropertyMap.registry.makeField(
 
  828         default=[
"exposure_time",
 
  839         doc=
"Property map computation objects",
 
  843         self.property_maps[
"exposure_time"].do_sum = 
True 
  844         self.property_maps[
"psf_size"].do_weighted_mean = 
True 
  845         self.property_maps[
"psf_e1"].do_weighted_mean = 
True 
  846         self.property_maps[
"psf_e2"].do_weighted_mean = 
True 
  847         self.property_maps[
"psf_maglim"].do_weighted_mean = 
True 
  848         self.property_maps[
"sky_noise"].do_weighted_mean = 
True 
  849         self.property_maps[
"sky_background"].do_weighted_mean = 
True 
  850         self.property_maps[
"dcr_dra"].do_weighted_mean = 
True 
  851         self.property_maps[
"dcr_ddec"].do_weighted_mean = 
True 
  852         self.property_maps[
"dcr_e1"].do_weighted_mean = 
True 
  853         self.property_maps[
"dcr_e2"].do_weighted_mean = 
True 
  856 class ConsolidateHealSparsePropertyMapTask(pipeBase.PipelineTask):
 
  857     """Task to consolidate HealSparse property maps. 
  859     This task will take all the individual tract-based maps (per map type, 
  860     per band) and consolidate them into one survey-wide map (per map type, 
  861     per band).  Each tract map is truncated to its inner region before 
  864     ConfigClass = ConsolidateHealSparsePropertyMapConfig
 
  865     _DefaultName = 
"consolidateHealSparsePropertyMapTask" 
  867     def __init__(self, **kwargs):
 
  868         super().__init__(**kwargs)
 
  870         for name, config, PropertyMapClass 
in self.config.property_maps.apply():
 
  871             self.property_maps[name] = PropertyMapClass(config, name)
 
  874     def runQuantum(self, butlerQC, inputRefs, outputRefs):
 
  875         inputs = butlerQC.get(inputRefs)
 
  877         sky_map = inputs.pop(
"sky_map")
 
  880         for name 
in self.config.property_maps.names:
 
  881             for type_ 
in [
'min', 
'max', 
'mean', 
'weighted_mean', 
'sum']:
 
  882                 map_type = f
"{name}_map_{type_}" 
  883                 if map_type 
in inputs:
 
  884                     input_refs = {ref.dataId[
'tract']: ref
 
  885                                   for ref 
in inputs[map_type]}
 
  886                     consolidated_map = self.consolidate_map(sky_map, input_refs)
 
  887                     butlerQC.put(consolidated_map,
 
  888                                  getattr(outputRefs, f
"{name}_consolidated_map_{type_}"))
 
  890     def consolidate_map(self, sky_map, input_refs):
 
  891         """Consolidate the healsparse property maps. 
  895         sky_map : Sky map object 
  896         input_refs : `dict` [`int`: `lsst.daf.butler.DeferredDatasetHandle`] 
  897             Dictionary of tract_id mapping to dataref. 
  901         consolidated_map : `healsparse.HealSparseMap` 
  902             Consolidated HealSparse map. 
  907         for tract_id 
in input_refs:
 
  908             cov = input_refs[tract_id].get(component=
'coverage')
 
  910                 cov_mask = cov.coverage_mask
 
  912                 cov_mask |= cov.coverage_mask
 
  914         cov_pix, = np.where(cov_mask)
 
  917         consolidated_map = 
None 
  918         for tract_id 
in input_refs:
 
  919             input_map = input_refs[tract_id].get()
 
  920             if consolidated_map 
is None:
 
  921                 dtype = input_map.dtype
 
  922                 sentinel = input_map._sentinel
 
  923                 nside_coverage = input_map.nside_coverage
 
  924                 nside_sparse = input_map.nside_sparse
 
  925                 consolidated_map = hsp.HealSparseMap.make_empty(nside_coverage,
 
  929                 consolidated_map._reserve_cov_pix(cov_pix)
 
  932             vpix, ra, dec = input_map.valid_pixels_pos(return_pixels=
True)
 
  933             vpix_tract_ids = sky_map.findTractIdArray(ra, dec, degrees=
True)
 
  935             in_tract = (vpix_tract_ids == tract_id)
 
  937             consolidated_map[vpix[in_tract]] = input_map[vpix[in_tract]]
 
  939         return consolidated_map
 
std::vector< SchemaItem< Flag > > * items
A floating-point coordinate rectangle geometry.
LonLat represents a spherical coordinate (longitude/latitude angle) pair.
std::shared_ptr< FrameSet > append(FrameSet const &first, FrameSet const &second)
Construct a FrameSet that performs two transformations in series.
def run(self, coaddExposures, bbox, wcs)
def compute_approx_psf_size_and_shape(ccd_row, ra, dec, nx=20, ny=20, orderx=2, ordery=2)