22from collections
import defaultdict
27import healsparse
as hsp
33from lsst.daf.butler
import Formatter
35from lsst.utils.timer
import timeMethod
36from .healSparseMappingProperties
import (BasePropertyMap, BasePropertyMapConfig,
37 PropertyMapMap, compute_approx_psf_size_and_shape)
40__all__ = [
"HealSparseInputMapTask",
"HealSparseInputMapConfig",
41 "HealSparseMapFormatter",
"HealSparsePropertyMapConnections",
42 "HealSparsePropertyMapConfig",
"HealSparsePropertyMapTask",
43 "ConsolidateHealSparsePropertyMapConnections",
44 "ConsolidateHealSparsePropertyMapConfig",
45 "ConsolidateHealSparsePropertyMapTask"]
49 """Interface for reading and writing healsparse.HealSparseMap files."""
50 unsupportedParameters = frozenset()
51 supportedExtensions = frozenset({
".hsp",
".fit",
".fits"})
54 def read(self, component=None):
56 path = self.fileDescriptor.location.path
58 if component ==
'coverage':
60 data = hsp.HealSparseCoverage.read(path)
61 except (OSError, RuntimeError):
62 raise ValueError(f
"Unable to read healsparse map with URI {self.fileDescriptor.location.uri}")
66 if self.fileDescriptor.parameters
is None:
70 pixels = self.fileDescriptor.parameters.get(
'pixels',
None)
71 degrade_nside = self.fileDescriptor.parameters.get(
'degrade_nside',
None)
73 data = hsp.HealSparseMap.read(path, pixels=pixels, degrade_nside=degrade_nside)
74 except (OSError, RuntimeError):
75 raise ValueError(f
"Unable to read healsparse map with URI {self.fileDescriptor.location.uri}")
79 def write(self, inMemoryDataset):
82 self.fileDescriptor.location.updateExtension(self.
extensionextension)
83 inMemoryDataset.write(self.fileDescriptor.location.path, clobber=
True)
86def _is_power_of_two(value):
87 """Check that value is a power of two.
96 is_power_of_two : `bool`
97 True if value
is a power of two;
False otherwise,
or
98 if value
is not an integer.
100 if not isinstance(value, numbers.Integral):
107 return (value & (value - 1) == 0)
and value != 0
111 """Configuration parameters for HealSparseInputMapTask"""
112 nside = pexConfig.Field(
113 doc=
"Mapping healpix nside. Must be power of 2.",
116 check=_is_power_of_two,
118 nside_coverage = pexConfig.Field(
119 doc=
"HealSparse coverage map nside. Must be power of 2.",
122 check=_is_power_of_two,
124 bad_mask_min_coverage = pexConfig.Field(
125 doc=(
"Minimum area fraction of a map healpixel pixel that must be "
126 "covered by bad pixels to be removed from the input map. "
127 "This is approximate."),
134 """Task for making a HealSparse input map."""
136 ConfigClass = HealSparseInputMapConfig
137 _DefaultName =
"healSparseInputMap"
140 pipeBase.Task.__init__(self, **kwargs)
145 """Build a map from ccd valid polygons or bounding boxes.
150 Bounding box for region to build input map.
152 WCS object
for region to build input map.
154 Exposure catalog
with ccd data
from coadd inputs.
156 with warnings.catch_warnings():
161 warnings.simplefilter(
"ignore")
162 self.
ccd_input_mapccd_input_map = hsp.HealSparseMap.make_empty(nside_coverage=self.config.nside_coverage,
163 nside_sparse=self.config.nside,
165 wide_mask_maxbits=len(ccds))
167 self.
_bbox_bbox = bbox
168 self.
_ccds_ccds = ccds
170 pixel_scale = wcs.getPixelScale().asArcseconds()
171 hpix_area_arcsec2 = hp.nside2pixarea(self.config.nside, degrees=
True)*(3600.**2.)
172 self.
_min_bad_min_bad = self.config.bad_mask_min_coverage*hpix_area_arcsec2/(pixel_scale**2.)
177 for bit, ccd_row
in enumerate(ccds):
178 metadata[f
"B{bit:04d}CCD"] = ccd_row[
"ccd"]
179 metadata[f
"B{bit:04d}VIS"] = ccd_row[
"visit"]
180 metadata[f
"B{bit:04d}WT"] = ccd_row[
"weight"]
185 ccd_poly = ccd_row.getValidPolygon()
189 ccd_poly_radec = self.
_pixels_to_radec_pixels_to_radec(ccd_row.getWcs(), ccd_poly.convexHull().getVertices())
192 poly = hsp.Polygon(ra=ccd_poly_radec[: -1, 0],
193 dec=ccd_poly_radec[: -1, 1],
201 bbox_afw_poly.convexHull().getVertices())
202 bbox_poly = hsp.Polygon(ra=bbox_poly_radec[: -1, 0], dec=bbox_poly_radec[: -1, 1],
203 value=np.arange(self.
ccd_input_mapccd_input_map.wide_mask_maxbits))
204 bbox_poly_map = bbox_poly.get_map_like(self.
ccd_input_mapccd_input_map)
213 with warnings.catch_warnings():
218 warnings.simplefilter(
"ignore")
220 nside_coverage=self.config.nside_coverage,
221 nside_sparse=self.config.nside,
230 """Mask a subregion from a visit.
231 This must be run after build_ccd_input_map initializes
237 Bounding box from region to mask.
239 Visit number corresponding to warp
with mask.
240 mask : `lsst.afw.image.MaskX`
241 Mask plane
from warp exposure.
242 bit_mask_value : `int`
243 Bit mask to check
for bad pixels.
247 RuntimeError : Raised
if build_ccd_input_map was
not run first.
250 raise RuntimeError(
"Must run build_ccd_input_map before mask_warp_bbox")
253 bad_pixels = np.where(mask.array & bit_mask_value)
254 if len(bad_pixels[0]) == 0:
259 bad_ra, bad_dec = self.
_wcs_wcs.pixelToSkyArray(bad_pixels[1].astype(np.float64),
260 bad_pixels[0].astype(np.float64),
262 bad_hpix = hp.ang2pix(self.config.nside, bad_ra, bad_dec,
263 lonlat=
True, nest=
True)
266 min_bad_hpix = bad_hpix.min()
267 bad_hpix_count = np.zeros(bad_hpix.max() - min_bad_hpix + 1, dtype=np.int32)
268 np.add.at(bad_hpix_count, bad_hpix - min_bad_hpix, 1)
273 pix_to_add, = np.where(bad_hpix_count > 0)
276 count_map_arr[primary] = np.clip(count_map_arr[primary], 0,
None)
278 count_map_arr[f
"v{visit}"] = np.clip(count_map_arr[f
"v{visit}"], 0,
None)
279 count_map_arr[f
"v{visit}"] += bad_hpix_count[pix_to_add]
284 """Use accumulated mask information to finalize the masking of
289 RuntimeError : Raised if build_ccd_input_map was
not run first.
292 raise RuntimeError(
"Must run build_ccd_input_map before finalize_ccd_input_map_mask.")
296 to_mask, = np.where(count_map_arr[f
"v{visit}"] > self.
_min_bad_min_bad)
297 if to_mask.size == 0:
305 def _pixels_to_radec(self, wcs, pixels):
306 """Convert pixels to ra/dec positions using a wcs.
313 List of pixels to convert.
317 radec : `numpy.ndarray`
318 Nx2 array of ra/dec positions associated with pixels.
320 sph_pts = wcs.pixelToSky(pixels)
321 return np.array([(sph.getRa().asDegrees(), sph.getDec().asDegrees())
326 dimensions=(
"tract",
"band",
"skymap",),
327 defaultTemplates={
"coaddName":
"deep",
329 input_maps = pipeBase.connectionTypes.Input(
330 doc=
"Healsparse bit-wise coadd input maps",
331 name=
"{coaddName}Coadd_inputMap",
332 storageClass=
"HealSparseMap",
333 dimensions=(
"tract",
"patch",
"skymap",
"band"),
337 coadd_exposures = pipeBase.connectionTypes.Input(
338 doc=
"Coadded exposures associated with input_maps",
339 name=
"{coaddName}Coadd",
340 storageClass=
"ExposureF",
341 dimensions=(
"tract",
"patch",
"skymap",
"band"),
345 visit_summaries = pipeBase.connectionTypes.Input(
346 doc=
"Visit summary tables with aggregated statistics",
347 name=
"{calexpType}visitSummary",
348 storageClass=
"ExposureCatalog",
349 dimensions=(
"instrument",
"visit"),
353 sky_map = pipeBase.connectionTypes.Input(
354 doc=
"Input definition of geometry/bbox and projection/wcs for coadded exposures",
355 name=BaseSkyMap.SKYMAP_DATASET_TYPE_NAME,
356 storageClass=
"SkyMap",
357 dimensions=(
"skymap",),
365 for name
in BasePropertyMap.registry:
366 vars()[f
"{name}_map_min"] = pipeBase.connectionTypes.Output(
367 doc=f
"Minimum-value map of {name}",
368 name=f
"{{coaddName}}Coadd_{name}_map_min",
369 storageClass=
"HealSparseMap",
370 dimensions=(
"tract",
"skymap",
"band"),
372 vars()[f
"{name}_map_max"] = pipeBase.connectionTypes.Output(
373 doc=f
"Maximum-value map of {name}",
374 name=f
"{{coaddName}}Coadd_{name}_map_max",
375 storageClass=
"HealSparseMap",
376 dimensions=(
"tract",
"skymap",
"band"),
378 vars()[f
"{name}_map_mean"] = pipeBase.connectionTypes.Output(
379 doc=f
"Mean-value map of {name}",
380 name=f
"{{coaddName}}Coadd_{name}_map_mean",
381 storageClass=
"HealSparseMap",
382 dimensions=(
"tract",
"skymap",
"band"),
384 vars()[f
"{name}_map_weighted_mean"] = pipeBase.connectionTypes.Output(
385 doc=f
"Weighted mean-value map of {name}",
386 name=f
"{{coaddName}}Coadd_{name}_map_weighted_mean",
387 storageClass=
"HealSparseMap",
388 dimensions=(
"tract",
"skymap",
"band"),
390 vars()[f
"{name}_map_sum"] = pipeBase.connectionTypes.Output(
391 doc=f
"Sum-value map of {name}",
392 name=f
"{{coaddName}}Coadd_{name}_map_sum",
393 storageClass=
"HealSparseMap",
394 dimensions=(
"tract",
"skymap",
"band"),
397 def __init__(self, *, config=None):
398 super().__init__(config=config)
402 for name
in BasePropertyMap.registry:
403 if name
not in config.property_maps:
405 prop_config.do_min =
False
406 prop_config.do_max =
False
407 prop_config.do_mean =
False
408 prop_config.do_weighted_mean =
False
409 prop_config.do_sum =
False
411 prop_config = config.property_maps[name]
413 if not prop_config.do_min:
414 self.outputs.remove(f
"{name}_map_min")
415 if not prop_config.do_max:
416 self.outputs.remove(f
"{name}_map_max")
417 if not prop_config.do_mean:
418 self.outputs.remove(f
"{name}_map_mean")
419 if not prop_config.do_weighted_mean:
420 self.outputs.remove(f
"{name}_map_weighted_mean")
421 if not prop_config.do_sum:
422 self.outputs.remove(f
"{name}_map_sum")
425class HealSparsePropertyMapConfig(pipeBase.PipelineTaskConfig,
426 pipelineConnections=HealSparsePropertyMapConnections):
427 """Configuration parameters for HealSparsePropertyMapTask"""
428 property_maps = BasePropertyMap.registry.makeField(
430 default=[
"exposure_time",
441 doc=
"Property map computation objects",
445 self.property_maps[
"exposure_time"].do_sum =
True
446 self.property_maps[
"psf_size"].do_weighted_mean =
True
447 self.property_maps[
"psf_e1"].do_weighted_mean =
True
448 self.property_maps[
"psf_e2"].do_weighted_mean =
True
449 self.property_maps[
"psf_maglim"].do_weighted_mean =
True
450 self.property_maps[
"sky_noise"].do_weighted_mean =
True
451 self.property_maps[
"sky_background"].do_weighted_mean =
True
452 self.property_maps[
"dcr_dra"].do_weighted_mean =
True
453 self.property_maps[
"dcr_ddec"].do_weighted_mean =
True
454 self.property_maps[
"dcr_e1"].do_weighted_mean =
True
455 self.property_maps[
"dcr_e2"].do_weighted_mean =
True
458class HealSparsePropertyMapTask(pipeBase.PipelineTask):
459 """Task to compute Healsparse property maps.
461 This task will compute individual property maps (per tract, per
462 map type, per band). These maps cover the full coadd tract, and
463 are
not truncated to the inner tract region.
465 ConfigClass = HealSparsePropertyMapConfig
466 _DefaultName = "healSparsePropertyMapTask"
468 def __init__(self, **kwargs):
469 super().__init__(**kwargs)
471 for name, config, PropertyMapClass
in self.config.property_maps.apply():
472 self.property_maps[name] = PropertyMapClass(config, name)
475 def runQuantum(self, butlerQC, inputRefs, outputRefs):
476 inputs = butlerQC.get(inputRefs)
478 sky_map = inputs.pop(
"sky_map")
480 tract = butlerQC.quantum.dataId[
"tract"]
481 band = butlerQC.quantum.dataId[
"band"]
483 input_map_dict = {ref.dataId[
"patch"]: ref
for ref
in inputs[
"input_maps"]}
484 coadd_dict = {ref.dataId[
"patch"]: ref
for ref
in inputs[
"coadd_exposures"]}
486 visit_summary_dict = {ref.dataId[
"visit"]: ref.get()
487 for ref
in inputs[
"visit_summaries"]}
489 self.run(sky_map, tract, band, coadd_dict, input_map_dict, visit_summary_dict)
492 for name, property_map
in self.property_maps.
items():
493 if property_map.config.do_min:
494 butlerQC.put(property_map.min_map,
495 getattr(outputRefs, f
"{name}_map_min"))
496 if property_map.config.do_max:
497 butlerQC.put(property_map.max_map,
498 getattr(outputRefs, f
"{name}_map_max"))
499 if property_map.config.do_mean:
500 butlerQC.put(property_map.mean_map,
501 getattr(outputRefs, f
"{name}_map_mean"))
502 if property_map.config.do_weighted_mean:
503 butlerQC.put(property_map.weighted_mean_map,
504 getattr(outputRefs, f
"{name}_map_weighted_mean"))
505 if property_map.config.do_sum:
506 butlerQC.put(property_map.sum_map,
507 getattr(outputRefs, f
"{name}_map_sum"))
509 def run(self, sky_map, tract, band, coadd_dict, input_map_dict, visit_summary_dict):
510 """Run the healsparse property task.
514 sky_map : Sky map object
518 Band name for logging.
519 coadd_dict : `dict` [`int`: `lsst.daf.butler.DeferredDatasetHandle`]
520 Dictionary of coadd exposure references. Keys are patch numbers.
521 input_map_dict : `dict` [`int`: `lsst.daf.butler.DeferredDatasetHandle`]
522 Dictionary of input map references. Keys are patch numbers.
524 Dictionary of visit summary tables. Keys are visit numbers.
528 RepeatableQuantumError
529 If visit_summary_dict
is missing any visits
or detectors found
in an
530 input map. This leads to an inconsistency between what
is in the coadd
531 (via the input map)
and the visit summary tables which contain data
534 tract_info = sky_map[tract]
536 tract_maps_initialized = False
538 for patch
in input_map_dict.keys():
539 self.log.
info(
"Making maps for band %s, tract %d, patch %d.",
542 patch_info = tract_info[patch]
544 input_map = input_map_dict[patch].get()
545 coadd_photo_calib = coadd_dict[patch].get(component=
"photoCalib")
546 coadd_inputs = coadd_dict[patch].get(component=
"coaddInputs")
548 coadd_zeropoint = 2.5*np.log10(coadd_photo_calib.getInstFluxAtZeroMagnitude())
551 poly_vertices = patch_info.getInnerSkyPolygon(tract_info.getWcs()).getVertices()
552 patch_radec = self._vertices_to_radec(poly_vertices)
553 patch_poly = hsp.Polygon(ra=patch_radec[:, 0], dec=patch_radec[:, 1],
554 value=np.arange(input_map.wide_mask_maxbits))
555 patch_poly_map = patch_poly.get_map_like(input_map)
556 input_map = hsp.and_intersection([input_map, patch_poly_map])
558 if not tract_maps_initialized:
561 nside_coverage = self._compute_nside_coverage_tract(tract_info)
562 nside = input_map.nside_sparse
564 do_compute_approx_psf =
False
566 for property_map
in self.property_maps:
567 property_map.initialize_tract_maps(nside_coverage, nside)
568 if property_map.requires_psf:
569 do_compute_approx_psf =
True
571 tract_maps_initialized =
True
573 valid_pixels, vpix_ra, vpix_dec = input_map.valid_pixels_pos(return_pixels=
True)
576 if valid_pixels.size == 0:
580 for property_map
in self.property_maps:
581 property_map.initialize_values(valid_pixels.size)
582 property_map.zeropoint = coadd_zeropoint
585 total_weights = np.zeros(valid_pixels.size)
586 total_inputs = np.zeros(valid_pixels.size, dtype=np.int32)
588 for bit, ccd_row
in enumerate(coadd_inputs.ccds):
590 inmap, = np.where(input_map.check_bits_pix(valid_pixels, [bit]))
597 visit = ccd_row[
"visit"]
598 detector_id = ccd_row[
"ccd"]
599 weight = ccd_row[
"weight"]
601 x, y = ccd_row.getWcs().skyToPixelArray(vpix_ra[inmap], vpix_dec[inmap], degrees=
True)
602 scalings = self._compute_calib_scale(ccd_row, x, y)
604 if do_compute_approx_psf:
609 total_weights[inmap] += weight
610 total_inputs[inmap] += 1
613 if visit
not in visit_summary_dict:
614 msg = f
"Visit {visit} not found in visit_summaries."
615 raise pipeBase.RepeatableQuantumError(msg)
616 row = visit_summary_dict[visit].find(detector_id)
618 msg = f
"Visit {visit} / detector_id {detector_id} not found in visit_summaries."
619 raise pipeBase.RepeatableQuantumError(msg)
622 for property_map
in self.property_maps:
623 property_map.accumulate_values(inmap,
632 for property_map
in self.property_maps:
633 property_map.finalize_mean_values(total_weights, total_inputs)
634 property_map.set_map_values(valid_pixels)
636 def _compute_calib_scale(self, ccd_row, x, y):
637 """Compute calibration scaling values.
642 Exposure metadata for a given detector exposure.
644 Array of x positions.
646 Array of y positions.
650 calib_scale : `np.ndarray`
651 Array of calibration scale values.
653 photo_calib = ccd_row.getPhotoCalib()
654 bf = photo_calib.computeScaledCalibration()
655 if bf.getBBox() == ccd_row.getBBox():
657 calib_scale = photo_calib.getCalibrationMean()*bf.evaluate(x, y)
660 calib_scale = photo_calib.getCalibrationMean()
664 def _vertices_to_radec(self, vertices):
665 """Convert polygon vertices to ra/dec.
670 Vertices for bounding polygon.
674 radec : `numpy.ndarray`
675 Nx2 array of ra/dec positions (
in degrees) associated
with vertices.
678 radec = np.array([(x.getLon().asDegrees(), x.getLat().asDegrees())
for
682 def _compute_nside_coverage_tract(self, tract_info):
683 """Compute the optimal coverage nside for a tract.
688 Tract information object.
692 nside_coverage : `int`
693 Optimal coverage nside for a tract map.
695 num_patches = tract_info.getNumPatches()
698 patch_info = tract_info.getPatchInfo(0)
699 vertices = patch_info.getInnerSkyPolygon(tract_info.getWcs()).getVertices()
700 radec = self._vertices_to_radec(vertices)
701 delta_ra = np.max(radec[:, 0]) - np.min(radec[:, 0])
702 delta_dec = np.max(radec[:, 1]) - np.min(radec[:, 1])
703 patch_area = delta_ra*delta_dec*np.cos(np.deg2rad(np.mean(radec[:, 1])))
705 tract_area = num_patches[0]*num_patches[1]*patch_area
707 nside_coverage_tract = 32
708 while hp.nside2pixarea(nside_coverage_tract, degrees=
True) > tract_area:
709 nside_coverage_tract = 2*nside_coverage_tract
712 nside_coverage_tract =
int(np.clip(nside_coverage_tract/2, 32, 128))
714 return nside_coverage_tract
717class ConsolidateHealSparsePropertyMapConnections(pipeBase.PipelineTaskConnections,
718 dimensions=(
"band",
"skymap",),
719 defaultTemplates={
"coaddName":
"deep"}):
720 sky_map = pipeBase.connectionTypes.Input(
721 doc=
"Input definition of geometry/bbox and projection/wcs for coadded exposures",
722 name=BaseSkyMap.SKYMAP_DATASET_TYPE_NAME,
723 storageClass=
"SkyMap",
724 dimensions=(
"skymap",),
732 for name
in BasePropertyMap.registry:
733 vars()[f
"{name}_map_min"] = pipeBase.connectionTypes.Input(
734 doc=f
"Minimum-value map of {name}",
735 name=f
"{{coaddName}}Coadd_{name}_map_min",
736 storageClass=
"HealSparseMap",
737 dimensions=(
"tract",
"skymap",
"band"),
741 vars()[f
"{name}_consolidated_map_min"] = pipeBase.connectionTypes.Output(
742 doc=f
"Minumum-value map of {name}",
743 name=f
"{{coaddName}}Coadd_{name}_consolidated_map_min",
744 storageClass=
"HealSparseMap",
745 dimensions=(
"skymap",
"band"),
747 vars()[f
"{name}_map_max"] = pipeBase.connectionTypes.Input(
748 doc=f
"Maximum-value map of {name}",
749 name=f
"{{coaddName}}Coadd_{name}_map_max",
750 storageClass=
"HealSparseMap",
751 dimensions=(
"tract",
"skymap",
"band"),
755 vars()[f
"{name}_consolidated_map_max"] = pipeBase.connectionTypes.Output(
756 doc=f
"Minumum-value map of {name}",
757 name=f
"{{coaddName}}Coadd_{name}_consolidated_map_max",
758 storageClass=
"HealSparseMap",
759 dimensions=(
"skymap",
"band"),
761 vars()[f
"{name}_map_mean"] = pipeBase.connectionTypes.Input(
762 doc=f
"Mean-value map of {name}",
763 name=f
"{{coaddName}}Coadd_{name}_map_mean",
764 storageClass=
"HealSparseMap",
765 dimensions=(
"tract",
"skymap",
"band"),
769 vars()[f
"{name}_consolidated_map_mean"] = pipeBase.connectionTypes.Output(
770 doc=f
"Minumum-value map of {name}",
771 name=f
"{{coaddName}}Coadd_{name}_consolidated_map_mean",
772 storageClass=
"HealSparseMap",
773 dimensions=(
"skymap",
"band"),
775 vars()[f
"{name}_map_weighted_mean"] = pipeBase.connectionTypes.Input(
776 doc=f
"Weighted mean-value map of {name}",
777 name=f
"{{coaddName}}Coadd_{name}_map_weighted_mean",
778 storageClass=
"HealSparseMap",
779 dimensions=(
"tract",
"skymap",
"band"),
783 vars()[f
"{name}_consolidated_map_weighted_mean"] = pipeBase.connectionTypes.Output(
784 doc=f
"Minumum-value map of {name}",
785 name=f
"{{coaddName}}Coadd_{name}_consolidated_map_weighted_mean",
786 storageClass=
"HealSparseMap",
787 dimensions=(
"skymap",
"band"),
789 vars()[f
"{name}_map_sum"] = pipeBase.connectionTypes.Input(
790 doc=f
"Sum-value map of {name}",
791 name=f
"{{coaddName}}Coadd_{name}_map_sum",
792 storageClass=
"HealSparseMap",
793 dimensions=(
"tract",
"skymap",
"band"),
797 vars()[f
"{name}_consolidated_map_sum"] = pipeBase.connectionTypes.Output(
798 doc=f
"Minumum-value map of {name}",
799 name=f
"{{coaddName}}Coadd_{name}_consolidated_map_sum",
800 storageClass=
"HealSparseMap",
801 dimensions=(
"skymap",
"band"),
804 def __init__(self, *, config=None):
805 super().__init__(config=config)
809 for name
in BasePropertyMap.registry:
810 if name
not in config.property_maps:
812 prop_config.do_min =
False
813 prop_config.do_max =
False
814 prop_config.do_mean =
False
815 prop_config.do_weighted_mean =
False
816 prop_config.do_sum =
False
818 prop_config = config.property_maps[name]
820 if not prop_config.do_min:
821 self.inputs.remove(f
"{name}_map_min")
822 self.outputs.remove(f
"{name}_consolidated_map_min")
823 if not prop_config.do_max:
824 self.inputs.remove(f
"{name}_map_max")
825 self.outputs.remove(f
"{name}_consolidated_map_max")
826 if not prop_config.do_mean:
827 self.inputs.remove(f
"{name}_map_mean")
828 self.outputs.remove(f
"{name}_consolidated_map_mean")
829 if not prop_config.do_weighted_mean:
830 self.inputs.remove(f
"{name}_map_weighted_mean")
831 self.outputs.remove(f
"{name}_consolidated_map_weighted_mean")
832 if not prop_config.do_sum:
833 self.inputs.remove(f
"{name}_map_sum")
834 self.outputs.remove(f
"{name}_consolidated_map_sum")
837class ConsolidateHealSparsePropertyMapConfig(pipeBase.PipelineTaskConfig,
838 pipelineConnections=ConsolidateHealSparsePropertyMapConnections):
839 """Configuration parameters for ConsolidateHealSparsePropertyMapTask"""
840 property_maps = BasePropertyMap.registry.makeField(
842 default=[
"exposure_time",
853 doc=
"Property map computation objects",
855 nside_coverage = pexConfig.Field(
856 doc=
"Consolidated HealSparse coverage map nside. Must be power of 2.",
859 check=_is_power_of_two,
863 self.property_maps[
"exposure_time"].do_sum =
True
864 self.property_maps[
"psf_size"].do_weighted_mean =
True
865 self.property_maps[
"psf_e1"].do_weighted_mean =
True
866 self.property_maps[
"psf_e2"].do_weighted_mean =
True
867 self.property_maps[
"psf_maglim"].do_weighted_mean =
True
868 self.property_maps[
"sky_noise"].do_weighted_mean =
True
869 self.property_maps[
"sky_background"].do_weighted_mean =
True
870 self.property_maps[
"dcr_dra"].do_weighted_mean =
True
871 self.property_maps[
"dcr_ddec"].do_weighted_mean =
True
872 self.property_maps[
"dcr_e1"].do_weighted_mean =
True
873 self.property_maps[
"dcr_e2"].do_weighted_mean =
True
876class ConsolidateHealSparsePropertyMapTask(pipeBase.PipelineTask):
877 """Task to consolidate HealSparse property maps.
879 This task will take all the individual tract-based maps (per map type,
880 per band) and consolidate them into one survey-wide map (per map type,
881 per band). Each tract map
is truncated to its inner region before
884 ConfigClass = ConsolidateHealSparsePropertyMapConfig
885 _DefaultName = "consolidateHealSparsePropertyMapTask"
887 def __init__(self, **kwargs):
888 super().__init__(**kwargs)
890 for name, config, PropertyMapClass
in self.config.property_maps.apply():
891 self.property_maps[name] = PropertyMapClass(config, name)
894 def runQuantum(self, butlerQC, inputRefs, outputRefs):
895 inputs = butlerQC.get(inputRefs)
897 sky_map = inputs.pop(
"sky_map")
900 for name
in self.config.property_maps.names:
901 for type_
in [
'min',
'max',
'mean',
'weighted_mean',
'sum']:
902 map_type = f
"{name}_map_{type_}"
903 if map_type
in inputs:
904 input_refs = {ref.dataId[
'tract']: ref
905 for ref
in inputs[map_type]}
906 consolidated_map = self.consolidate_map(sky_map, input_refs)
907 butlerQC.put(consolidated_map,
908 getattr(outputRefs, f
"{name}_consolidated_map_{type_}"))
910 def consolidate_map(self, sky_map, input_refs):
911 """Consolidate the healsparse property maps.
915 sky_map : Sky map object
916 input_refs : `dict` [`int`: `lsst.daf.butler.DeferredDatasetHandle`]
917 Dictionary of tract_id mapping to dataref.
921 consolidated_map : `healsparse.HealSparseMap`
922 Consolidated HealSparse map.
927 nside_coverage_inputs =
None
928 for tract_id
in input_refs:
929 cov = input_refs[tract_id].get(component=
'coverage')
931 cov_mask = cov.coverage_mask
932 nside_coverage_inputs = cov.nside_coverage
934 cov_mask |= cov.coverage_mask
936 cov_pix_inputs, = np.where(cov_mask)
939 if nside_coverage_inputs == self.config.nside_coverage:
940 cov_pix = cov_pix_inputs
941 elif nside_coverage_inputs > self.config.nside_coverage:
944 bit_shift = hsp.utils._compute_bitshift(self.config.nside_coverage,
945 nside_coverage_inputs)
946 cov_pix = np.right_shift(cov_pix_inputs, bit_shift)
950 bit_shift = hsp.utils._compute_bitshift(nside_coverage_inputs,
951 self.config.nside_coverage)
952 cov_pix = np.left_shift(cov_pix_inputs, bit_shift)
955 consolidated_map =
None
956 for tract_id
in input_refs:
957 input_map = input_refs[tract_id].get()
958 if consolidated_map
is None:
959 consolidated_map = hsp.HealSparseMap.make_empty(
960 self.config.nside_coverage,
961 input_map.nside_sparse,
963 sentinel=input_map._sentinel,
967 vpix, ra, dec = input_map.valid_pixels_pos(return_pixels=
True)
968 vpix_tract_ids = sky_map.findTractIdArray(ra, dec, degrees=
True)
970 in_tract = (vpix_tract_ids == tract_id)
972 consolidated_map[vpix[in_tract]] = input_map[vpix[in_tract]]
974 return consolidated_map
std::vector< SchemaItem< Flag > > * items
A 2-dimensional celestial WCS that transform pixels to ICRS RA/Dec, using the LSST standard for pixel...
Custom catalog class for ExposureRecord/Table.
Record class used to store exposure metadata.
A floating-point coordinate rectangle geometry.
An integer coordinate rectangle.
LonLat represents a spherical coordinate (longitude/latitude angle) pair.
UnitVector3d is a unit vector in ℝ³ with components stored in double precision.
std::shared_ptr< FrameSet > append(FrameSet const &first, FrameSet const &second)
Construct a FrameSet that performs two transformations in series.
def run(self, coaddExposures, bbox, wcs, dataIds, **kwargs)
def compute_approx_psf_size_and_shape(ccd_row, ra, dec, nx=20, ny=20, orderx=2, ordery=2)