22"""Tasks for making and manipulating HIPS images."""
24__all__ = [
"HighResolutionHipsTask",
"HighResolutionHipsConfig",
"HighResolutionHipsConnections",
25 "GenerateHipsTask",
"GenerateHipsConfig",
"GenerateColorHipsTask",
"GenerateColorHipsConfig"]
27from collections
import defaultdict
35from datetime
import datetime
37import healsparse
as hsp
38from astropy.io
import fits
39from astropy.visualization.lupton_rgb
import AsinhMapping
43from lsst.utils.timer
import timeMethod
44from lsst.daf.butler
import Butler, DatasetRef, Quantum, SkyPixDimension
52from lsst.resources
import ResourcePath
56 dimensions=(
"healpix9",
"band"),
57 defaultTemplates={
"coaddName":
"deep"}):
58 coadd_exposure_handles = pipeBase.connectionTypes.Input(
59 doc=
"Coadded exposures to convert to HIPS format.",
60 name=
"{coaddName}Coadd_calexp",
61 storageClass=
"ExposureF",
62 dimensions=(
"tract",
"patch",
"skymap",
"band"),
66 hips_exposures = pipeBase.connectionTypes.Output(
67 doc=
"HiPS-compatible HPX image.",
68 name=
"{coaddName}Coadd_hpx",
69 storageClass=
"ExposureF",
70 dimensions=(
"healpix11",
"band"),
74 def __init__(self, *, config=None):
75 super().__init__(config=config)
78 for dim
in self.dimensions:
80 if quantum_order
is not None:
81 raise ValueError(
"Must not specify more than one quantum healpix dimension.")
82 quantum_order = int(dim.split(
"healpix")[1])
83 if quantum_order
is None:
84 raise ValueError(
"Must specify a healpix dimension in quantum dimensions.")
86 if quantum_order > config.hips_order:
87 raise ValueError(
"Quantum healpix dimension order must not be greater than hips_order")
90 for dim
in self.hips_exposures.dimensions:
93 raise ValueError(
"Must not specify more than one healpix dimension.")
94 order = int(dim.split(
"healpix")[1])
96 raise ValueError(
"Must specify a healpix dimension in hips_exposure dimensions.")
98 if order != config.hips_order:
99 raise ValueError(
"healpix dimension order must match config.hips_order.")
102class HighResolutionHipsConfig(pipeBase.PipelineTaskConfig,
103 pipelineConnections=HighResolutionHipsConnections):
104 """Configuration parameters for HighResolutionHipsTask.
108 A HiPS image covers one HEALPix cell, with the HEALPix nside equal to
109 2**hips_order. Each cell
is 'shift_order' orders deeper than the HEALPix
110 cell,
with 2**shift_order x 2**shift_order sub-pixels on a side, which
111 defines the target resolution of the HiPS image. The IVOA recommends
112 shift_order=9,
for 2**9=512 pixels on a side.
115 https://www.ivoa.net/documents/HiPS/20170519/REC-HIPS-1.0-20170519.pdf
116 shows the relationship between hips_order, number of tiles (full
117 sky coverage), cell size,
and sub-pixel size/image resolution (
with
118 the default shift_order=9):
119 +------------+-----------------+--------------+------------------+
120 | hips_order | Number of Tiles | Cell Size | Image Resolution |
121 +============+=================+==============+==================+
122 | 0 | 12 | 58.63 deg | 6.871 arcmin |
123 | 1 | 48 | 29.32 deg | 3.435 arcmin |
124 | 2 | 192 | 14.66 deg | 1.718 arcmin |
125 | 3 | 768 | 7.329 deg | 51.53 arcsec |
126 | 4 | 3072 | 3.665 deg | 25.77 arcsec |
127 | 5 | 12288 | 1.832 deg | 12.88 arcsec |
128 | 6 | 49152 | 54.97 arcmin | 6.442 arcsec |
129 | 7 | 196608 | 27.48 arcmin | 3.221 arcsec |
130 | 8 | 786432 | 13.74 arcmin | 1.61 arcsec |
131 | 9 | 3145728 | 6.871 arcmin | 805.2mas |
132 | 10 | 12582912 | 3.435 arcmin | 402.6mas |
133 | 11 | 50331648 | 1.718 arcmin | 201.3mas |
134 | 12 | 201326592 | 51.53 arcsec | 100.6mas |
135 | 13 | 805306368 | 25.77 arcsec | 50.32mas |
136 +------------+-----------------+--------------+------------------+
138 hips_order = pexConfig.Field(
139 doc="HIPS image order.",
143 shift_order = pexConfig.Field(
144 doc=
"HIPS shift order (such that each tile is 2**shift_order pixels on a side)",
148 warp = pexConfig.ConfigField(
149 dtype=afwMath.Warper.ConfigClass,
150 doc=
"Warper configuration",
153 def setDefaults(self):
154 self.warp.warpingKernelName =
"lanczos5"
157class HipsTaskNameDescriptor:
158 """Descriptor used create a DefaultName that matches the order of
159 the defined dimensions in the connections
class.
164 The prefix of the Default name, to which the order will be
167 def __init__(self, prefix):
169 self._defaultName = f
"{prefix}{{}}"
172 def __get__(self, obj, klass=None):
175 "HipsTaskDescriptor was used in an unexpected context"
177 if self._order
is None:
178 klassDimensions = klass.ConfigClass.ConnectionsClass.dimensions
179 for dim
in klassDimensions:
180 if (match := re.match(
r"^healpix(\d*)$", dim))
is not None:
181 self._order = int(match.group(1))
185 "Could not find healpix dimension in connections class"
187 return self._defaultName.format(self._order)
190class HighResolutionHipsTask(pipeBase.PipelineTask):
191 """Task for making high resolution HiPS images."""
192 ConfigClass = HighResolutionHipsConfig
193 _DefaultName = HipsTaskNameDescriptor(
"highResolutionHips")
195 def __init__(self, **kwargs):
196 super().__init__(**kwargs)
197 self.warper = afwMath.Warper.fromConfig(self.config.warp)
200 def runQuantum(self, butlerQC, inputRefs, outputRefs):
201 inputs = butlerQC.get(inputRefs)
203 healpix_dim = f
"healpix{self.config.hips_order}"
205 pixels = [hips_exposure.dataId[healpix_dim]
206 for hips_exposure
in outputRefs.hips_exposures]
208 outputs = self.run(pixels=pixels, coadd_exposure_handles=inputs[
"coadd_exposure_handles"])
210 hips_exposure_ref_dict = {hips_exposure_ref.dataId[healpix_dim]:
211 hips_exposure_ref
for hips_exposure_ref
in outputRefs.hips_exposures}
212 for pixel, hips_exposure
in outputs.hips_exposures.items():
213 butlerQC.put(hips_exposure, hips_exposure_ref_dict[pixel])
215 def run(self, pixels, coadd_exposure_handles):
216 """Run the HighResolutionHipsTask.
220 pixels : `Iterable` [ `int` ]
221 Iterable of healpix pixels (nest ordering) to warp to.
222 coadd_exposure_handles : `list` [`lsst.daf.butler.DeferredDatasetHandle`]
223 Handles for the coadd exposures.
227 outputs : `lsst.pipe.base.Struct`
228 ``hips_exposures``
is a dict
with pixel (key)
and hips_exposure (value)
230 self.log.info("Generating HPX images for %d pixels at order %d", len(pixels), self.config.hips_order)
232 npix = 2**self.config.shift_order
242 wcs_hpx = afwGeom.makeHpxWcs(self.config.hips_order, pixel, shift_order=self.config.shift_order)
243 exp_hpx = afwImage.ExposureF(bbox_hpx, wcs_hpx)
244 exp_hpx_dict[pixel] = exp_hpx
245 warp_dict[pixel] = []
250 for handle
in coadd_exposure_handles:
251 coadd_exp = handle.get()
255 warped = self.warper.warpExposure(exp_hpx_dict[pixel].getWcs(), coadd_exp, maxBBox=bbox_hpx)
257 exp = afwImage.ExposureF(exp_hpx_dict[pixel].getBBox(), exp_hpx_dict[pixel].getWcs())
258 exp.maskedImage.set(np.nan, afwImage.Mask.getPlaneBitMask(
"NO_DATA"), np.nan)
263 exp_hpx_dict[pixel].mask.conformMaskPlanes(coadd_exp.mask.getMaskPlaneDict())
264 exp_hpx_dict[pixel].setFilter(coadd_exp.getFilter())
265 exp_hpx_dict[pixel].setPhotoCalib(coadd_exp.getPhotoCalib())
267 if warped.getBBox().getArea() == 0
or not np.any(np.isfinite(warped.image.array)):
270 "No overlap between output HPX %d and input exposure %s",
276 exp.maskedImage.assign(warped.maskedImage, warped.getBBox())
277 warp_dict[pixel].append(exp.maskedImage)
283 stats_ctrl.setNanSafe(
True)
284 stats_ctrl.setWeighted(
True)
285 stats_ctrl.setCalcErrorFromInputVariance(
True)
291 exp_hpx_dict[pixel].maskedImage.set(np.nan, afwImage.Mask.getPlaneBitMask(
"NO_DATA"), np.nan)
293 if not warp_dict[pixel]:
295 self.log.debug(
"No data in HPX pixel %d", pixel)
298 exp_hpx_dict.pop(pixel)
305 [1.0]*len(warp_dict[pixel]),
310 return pipeBase.Struct(hips_exposures=exp_hpx_dict)
313 def build_quantum_graph_cli(cls, argv):
314 """A command-line interface entry point to `build_quantum_graph`.
315 This method provides the implementation for the
316 ``build-high-resolution-hips-qg`` script.
320 argv : `Sequence` [ `str` ]
321 Command-line arguments (e.g. ``sys.argv[1:]``).
323 parser = cls._make_cli_parser()
325 args = parser.parse_args(argv)
327 if args.subparser_name
is None:
331 pipeline = pipeBase.Pipeline.from_uri(args.pipeline)
332 expanded_pipeline =
list(pipeline.toExpandedPipeline())
334 if len(expanded_pipeline) != 1:
335 raise RuntimeError(f
"Pipeline file {args.pipeline} may only contain one task.")
337 (task_def,) = expanded_pipeline
339 butler = Butler(args.butler_config, collections=args.input)
341 if args.subparser_name ==
"segment":
344 dataset = task_def.connections.coadd_exposure_handles.name
345 data_ids =
set(butler.registry.queryDataIds(
"tract", datasets=dataset).expanded())
347 for data_id
in data_ids:
348 region = data_id.region
349 pixel_range = hpix_pixelization.envelope(region)
350 for r
in pixel_range.ranges():
351 region_pixels.extend(range(r[0], r[1]))
352 indices = np.unique(region_pixels)
354 print(f
"Pixels to run at HEALPix order --hpix_build_order {args.hpix_build_order}:")
355 for pixel
in indices:
358 elif args.subparser_name ==
"build":
361 build_ranges =
RangeSet(sorted(args.pixels))
363 qg = cls.build_quantum_graph(
366 args.hpix_build_order,
369 collections=args.input
371 qg.saveUri(args.save_qgraph)
374 def _make_cli_parser(cls):
375 """Make the command-line parser.
379 parser : `argparse.ArgumentParser`
381 parser = argparse.ArgumentParser(
383 "Build a QuantumGraph that runs HighResolutionHipsTask on existing coadd datasets."
386 subparsers = parser.add_subparsers(help=
"sub-command help", dest=
"subparser_name")
388 parser_segment = subparsers.add_parser(
"segment",
389 help=
"Determine survey segments for workflow.")
390 parser_build = subparsers.add_parser(
"build",
391 help=
"Build quantum graph for HighResolutionHipsTask")
393 for sub
in [parser_segment, parser_build]:
399 help=
"Path to data repository or butler configuration.",
406 help=
"Pipeline file, limited to one task.",
414 help=
"Input collection(s) to search for coadd exposures.",
419 "--hpix_build_order",
422 help=
"HEALPix order to segment sky for building quantum graph files.",
429 help=
"Data ID expression used when querying for input coadd datasets.",
432 parser_build.add_argument(
436 help=
"Output filename for QuantumGraph.",
439 parser_build.add_argument(
444 help=
"Pixels at --hpix_build_order to generate quantum graph.",
451 def build_quantum_graph(
460 """Generate a `QuantumGraph` for running just this task.
462 This is a temporary workaround
for incomplete butler query support
for
467 task_def : `lsst.pipe.base.TaskDef`
469 registry : `lsst.daf.butler.Registry`
470 Client
for the butler database. May be read-only.
471 constraint_order : `int`
472 HEALPix order used to contrain which quanta are generated, via
473 ``constraint_indices``. This should be a coarser grid (smaller
474 order) than the order used
for the task
's quantum and output data
475 IDs, and ideally something between the spatial scale of a patch
or
476 the data repository
's "common skypix" system (usually ``htm7``).
478 RangeSet which describes constraint pixels (HEALPix NEST, with order
479 constraint_order) to constrain generated quanta.
480 where : `str`, optional
481 A boolean `str` expression of the form accepted by
482 `Registry.queryDatasets` to constrain input datasets. This may
483 contain a constraint on tracts, patches,
or bands, but
not HEALPix
484 indices. Constraints on tracts
and patches should usually be
485 unnecessary, however - existing coadds that overlap the given
486 HEALpix indices will be selected without such a constraint,
and
487 providing one may reject some that should normally be included.
488 collections : `str`
or `Iterable` [ `str` ], optional
489 Collection
or collections to search
for input datasets,
in order.
490 If
not provided, ``registry.defaults.collections`` will be
493 config = task_def.config
495 dataset_types = pipeBase.PipelineDatasetTypes.fromPipeline(pipeline=[task_def], registry=registry)
498 (input_dataset_type,) = dataset_types.inputs
503 output_dataset_type = dataset_types.outputs[task_def.connections.hips_exposures.name]
504 incidental_output_dataset_types = dataset_types.outputs.copy()
505 incidental_output_dataset_types.remove(output_dataset_type)
506 (hpx_output_dimension,) = (d
for d
in output_dataset_type.dimensions
507 if isinstance(d, SkyPixDimension))
509 constraint_hpx_pixelization = registry.dimensions[f
"healpix{constraint_order}"].pixelization
510 common_skypix_name = registry.dimensions.commonSkyPix.name
511 common_skypix_pixelization = registry.dimensions.commonSkyPix.pixelization
514 task_dimensions = registry.dimensions.extract(task_def.connections.dimensions)
515 (hpx_dimension,) = (d
for d
in task_dimensions
if d.name !=
"band")
516 hpx_pixelization = hpx_dimension.pixelization
518 if hpx_pixelization.level < constraint_order:
519 raise ValueError(f
"Quantum order {hpx_pixelization.level} must be < {constraint_order}")
520 hpx_ranges = constraint_ranges.scaled(4**(hpx_pixelization.level - constraint_order))
525 for begin, end
in constraint_ranges:
526 for hpx_index
in range(begin, end):
527 constraint_hpx_region = constraint_hpx_pixelization.pixel(hpx_index)
528 common_skypix_ranges |= common_skypix_pixelization.envelope(constraint_hpx_region)
532 for simp
in range(1, 10):
533 if len(common_skypix_ranges) < 100:
535 common_skypix_ranges.simplify(simp)
542 for n, (begin, end)
in enumerate(common_skypix_ranges):
545 where_terms.append(f
"{common_skypix_name} = cpx{n}")
546 bind[f
"cpx{n}"] = begin
548 where_terms.append(f
"({common_skypix_name} >= cpx{n}a AND {common_skypix_name} <= cpx{n}b)")
549 bind[f
"cpx{n}a"] = begin
550 bind[f
"cpx{n}b"] = stop
552 where =
" OR ".join(where_terms)
554 where = f
"({where}) AND ({' OR '.join(where_terms)})"
558 input_refs = registry.queryDatasets(
562 collections=collections,
565 inputs_by_patch = defaultdict(set)
566 patch_dimensions = registry.dimensions.extract([
"patch"])
567 for input_ref
in input_refs:
568 inputs_by_patch[input_ref.dataId.subset(patch_dimensions)].add(input_ref)
569 if not inputs_by_patch:
570 message_body =
"\n".join(input_refs.explain_no_results())
571 raise RuntimeError(f
"No inputs found:\n{message_body}")
576 inputs_by_hpx = defaultdict(set)
577 for patch_data_id, input_refs_for_patch
in inputs_by_patch.items():
578 patch_hpx_ranges = hpx_pixelization.envelope(patch_data_id.region)
579 for begin, end
in patch_hpx_ranges & hpx_ranges:
580 for hpx_index
in range(begin, end):
581 inputs_by_hpx[hpx_index].update(input_refs_for_patch)
584 for hpx_index, input_refs_for_hpx_index
in inputs_by_hpx.items():
586 input_refs_by_band = defaultdict(list)
587 for input_ref
in input_refs_for_hpx_index:
588 input_refs_by_band[input_ref.dataId[
"band"]].append(input_ref)
590 for band, input_refs_for_band
in input_refs_by_band.items():
591 data_id = registry.expandDataId({hpx_dimension: hpx_index,
"band": band})
593 hpx_pixel_ranges =
RangeSet(hpx_index)
594 hpx_output_ranges = hpx_pixel_ranges.scaled(4**(config.hips_order - hpx_pixelization.level))
596 for begin, end
in hpx_output_ranges:
597 for hpx_output_index
in range(begin, end):
598 output_data_ids.append(
599 registry.expandDataId({hpx_output_dimension: hpx_output_index,
"band": band})
601 outputs = {dt: [DatasetRef(dt, data_id)]
for dt
in incidental_output_dataset_types}
602 outputs[output_dataset_type] = [DatasetRef(output_dataset_type, data_id)
603 for data_id
in output_data_ids]
606 taskName=task_def.taskName,
607 taskClass=task_def.taskClass,
610 inputs={input_dataset_type: input_refs_for_band},
616 raise RuntimeError(
"Given constraints yielded empty quantum graph.")
618 return pipeBase.QuantumGraph(quanta={task_def: quanta})
621class HipsPropertiesSpectralTerm(pexConfig.Config):
622 lambda_min = pexConfig.Field(
623 doc=
"Minimum wavelength (nm)",
626 lambda_max = pexConfig.Field(
627 doc=
"Maximum wavelength (nm)",
632class HipsPropertiesConfig(pexConfig.Config):
633 """Configuration parameters for writing a HiPS properties file."""
634 creator_did_template = pexConfig.Field(
635 doc=(
"Unique identifier of the HiPS - Format: IVOID. "
636 "Use ``{band}`` to substitute the band name."),
640 obs_collection = pexConfig.Field(
641 doc=
"Short name of original data set - Format: one word",
645 obs_description_template = pexConfig.Field(
646 doc=(
"Data set description - Format: free text, longer free text "
647 "description of the dataset. Use ``{band}`` to substitute "
651 prov_progenitor = pexConfig.ListField(
652 doc=
"Provenance of the original data - Format: free text",
656 obs_title_template = pexConfig.Field(
657 doc=(
"Data set title format: free text, but should be short. "
658 "Use ``{band}`` to substitute the band name."),
662 spectral_ranges = pexConfig.ConfigDictField(
663 doc=(
"Mapping from band to lambda_min, lamba_max (nm). May be approximate."),
665 itemtype=HipsPropertiesSpectralTerm,
668 initial_ra = pexConfig.Field(
669 doc=
"Initial RA (deg) (default for HiPS viewer). If not set will use a point in MOC.",
673 initial_dec = pexConfig.Field(
674 doc=
"Initial Declination (deg) (default for HiPS viewer). If not set will use a point in MOC.",
678 initial_fov = pexConfig.Field(
679 doc=
"Initial field-of-view (deg). If not set will use ~1 healpix tile.",
683 obs_ack = pexConfig.Field(
684 doc=
"Observation acknowledgements (free text).",
688 t_min = pexConfig.Field(
689 doc=
"Time (MJD) of earliest observation included in HiPS",
693 t_max = pexConfig.Field(
694 doc=
"Time (MJD) of latest observation included in HiPS",
702 if self.obs_collection
is not None:
703 if re.search(
r"\s", self.obs_collection):
704 raise ValueError(
"obs_collection cannot contain any space characters.")
706 def setDefaults(self):
709 u_term = HipsPropertiesSpectralTerm()
710 u_term.lambda_min = 330.
711 u_term.lambda_max = 400.
712 self.spectral_ranges[
"u"] = u_term
713 g_term = HipsPropertiesSpectralTerm()
714 g_term.lambda_min = 402.
715 g_term.lambda_max = 552.
716 self.spectral_ranges[
"g"] = g_term
717 r_term = HipsPropertiesSpectralTerm()
718 r_term.lambda_min = 552.
719 r_term.lambda_max = 691.
720 self.spectral_ranges[
"r"] = r_term
721 i_term = HipsPropertiesSpectralTerm()
722 i_term.lambda_min = 691.
723 i_term.lambda_max = 818.
724 self.spectral_ranges[
"i"] = i_term
725 z_term = HipsPropertiesSpectralTerm()
726 z_term.lambda_min = 818.
727 z_term.lambda_max = 922.
728 self.spectral_ranges[
"z"] = z_term
729 y_term = HipsPropertiesSpectralTerm()
730 y_term.lambda_min = 970.
731 y_term.lambda_max = 1060.
732 self.spectral_ranges[
"y"] = y_term
735class GenerateHipsConnections(pipeBase.PipelineTaskConnections,
736 dimensions=(
"instrument",
"band"),
737 defaultTemplates={
"coaddName":
"deep"}):
738 hips_exposure_handles = pipeBase.connectionTypes.Input(
739 doc=
"HiPS-compatible HPX images.",
740 name=
"{coaddName}Coadd_hpx",
741 storageClass=
"ExposureF",
742 dimensions=(
"healpix11",
"band"),
748class GenerateHipsConfig(pipeBase.PipelineTaskConfig,
749 pipelineConnections=GenerateHipsConnections):
750 """Configuration parameters for GenerateHipsTask."""
755 hips_base_uri = pexConfig.Field(
756 doc=
"URI to HiPS base for output.",
760 min_order = pexConfig.Field(
761 doc=
"Minimum healpix order for HiPS tree.",
765 properties = pexConfig.ConfigField(
766 dtype=HipsPropertiesConfig,
767 doc=
"Configuration for properties file.",
769 allsky_tilesize = pexConfig.Field(
771 doc=
"Allsky.png tile size. Must be power of 2.",
774 png_gray_asinh_minimum = pexConfig.Field(
775 doc=
"AsinhMapping intensity to be mapped to black for grayscale png scaling.",
779 png_gray_asinh_stretch = pexConfig.Field(
780 doc=
"AsinhMapping linear stretch for grayscale png scaling.",
784 png_gray_asinh_softening = pexConfig.Field(
785 doc=
"AsinhMapping softening parameter (Q) for grayscale png scaling.",
791class GenerateHipsTask(pipeBase.PipelineTask):
792 """Task for making a HiPS tree with FITS and grayscale PNGs."""
793 ConfigClass = GenerateHipsConfig
794 _DefaultName =
"generateHips"
798 def runQuantum(self, butlerQC, inputRefs, outputRefs):
799 inputs = butlerQC.get(inputRefs)
801 dims = inputRefs.hips_exposure_handles[0].dataId.names
805 order = int(dim.split(
"healpix")[1])
809 raise RuntimeError(
"Could not determine healpix order for input exposures.")
811 hips_exposure_handle_dict = {
812 (hips_exposure_handle.dataId[healpix_dim],
813 hips_exposure_handle.dataId[
"band"]): hips_exposure_handle
814 for hips_exposure_handle
in inputs[
"hips_exposure_handles"]
817 data_bands = {hips_exposure_handle.dataId[
"band"]
818 for hips_exposure_handle
in inputs[
"hips_exposure_handles"]}
819 bands = self._check_data_bands(data_bands)
824 hips_exposure_handle_dict=hips_exposure_handle_dict,
825 do_color=self.color_task,
828 def _check_data_bands(self, data_bands):
829 """Check that the data has only a single band.
833 data_bands : `set` [`str`]
834 Bands from the input data.
838 bands : `list` [`str`]
839 List of single band to process.
843 RuntimeError
if there
is not exactly one band.
845 if len(data_bands) != 1:
846 raise RuntimeError(
"GenerateHipsTask can only use data from a single band.")
848 return list(data_bands)
851 def run(self, bands, max_order, hips_exposure_handle_dict, do_color=False):
852 """Run the GenerateHipsTask.
856 bands : `list [ `str` ]
857 List of bands to be processed (or single band).
859 HEALPix order of the maximum (native) HPX exposures.
860 hips_exposure_handle_dict : `dict` {`int`: `lsst.daf.butler.DeferredDatasetHandle`}
861 Dict of handles
for the HiPS high-resolution exposures.
862 Key
is (pixel number, ``band``).
863 do_color : `bool`, optional
864 Do color pngs instead of per-band grayscale.
866 min_order = self.config.min_order
869 png_grayscale_mapping = AsinhMapping(
870 self.config.png_gray_asinh_minimum,
871 self.config.png_gray_asinh_stretch,
872 Q=self.config.png_gray_asinh_softening,
875 png_color_mapping = AsinhMapping(
876 self.config.png_color_asinh_minimum,
877 self.config.png_color_asinh_stretch,
878 Q=self.config.png_color_asinh_softening,
881 bcb = self.config.blue_channel_band
882 gcb = self.config.green_channel_band
883 rcb = self.config.red_channel_band
884 colorstr = f
"{bcb}{gcb}{rcb}"
887 hips_base_path = ResourcePath(self.config.hips_base_uri, forceDirectory=
True)
891 pixels = np.unique(np.array([pixel
for pixel, _
in hips_exposure_handle_dict.keys()]))
894 pixels = np.append(pixels, [0])
898 pixels_shifted[max_order] = pixels
899 for order
in range(max_order - 1, min_order - 1, -1):
900 pixels_shifted[order] = np.right_shift(pixels_shifted[order + 1], 2)
903 for order
in range(min_order, max_order + 1):
904 pixels_shifted[order][-1] = -1
907 exp0 =
list(hips_exposure_handle_dict.values())[0].get()
908 bbox = exp0.getBBox()
909 npix = bbox.getWidth()
910 shift_order = int(np.round(np.log2(npix)))
917 for order
in range(min_order, max_order + 1):
918 exp = exp0.Factory(bbox=bbox)
919 exp.image.array[:, :] = np.nan
920 exposures[(band, order)] = exp
923 for pixel_counter, pixel
in enumerate(pixels[:-1]):
924 self.log.debug(
"Working on high resolution pixel %d", pixel)
930 if (pixel, band)
in hips_exposure_handle_dict:
931 exposures[(band, max_order)] = hips_exposure_handle_dict[(pixel, band)].get()
937 for order
in range(max_order, min_order - 1, -1):
938 if pixels_shifted[order][pixel_counter + 1] == pixels_shifted[order][pixel_counter]:
946 self._write_hips_image(
947 hips_base_path.join(f
"band_{band}", forceDirectory=
True),
949 pixels_shifted[order][pixel_counter],
950 exposures[(band, order)].image,
951 png_grayscale_mapping,
952 shift_order=shift_order,
956 self._write_hips_color_png(
957 hips_base_path.join(f
"color_{colorstr}", forceDirectory=
True),
959 pixels_shifted[order][pixel_counter],
960 exposures[(self.config.red_channel_band, order)].image,
961 exposures[(self.config.green_channel_band, order)].image,
962 exposures[(self.config.blue_channel_band, order)].image,
966 log_level = self.log.INFO
if order == (max_order - 3)
else self.log.DEBUG
969 "Completed HiPS generation for %s, order %d, pixel %d (%d/%d)",
972 pixels_shifted[order][pixel_counter],
978 if order == min_order:
980 exposures[(band, order)].image.array[:, :] = np.nan
985 arr = exposures[(band, order)].image.array.reshape(npix//2, 2, npix//2, 2)
986 with warnings.catch_warnings():
987 warnings.simplefilter(
"ignore")
988 binned_image_arr = np.nanmean(arr, axis=(1, 3))
992 sub_index = (pixels_shifted[order][pixel_counter]
993 - np.left_shift(pixels_shifted[order - 1][pixel_counter], 2))
996 exp = exposures[(band, order - 1)]
1000 exp.image.array[npix//2:, 0: npix//2] = binned_image_arr
1001 elif sub_index == 1:
1002 exp.image.array[0: npix//2, 0: npix//2] = binned_image_arr
1003 elif sub_index == 2:
1004 exp.image.array[npix//2:, npix//2:] = binned_image_arr
1005 elif sub_index == 3:
1006 exp.image.array[0: npix//2, npix//2:] = binned_image_arr
1009 raise ValueError(
"Illegal pixel sub index")
1012 if order < max_order:
1013 exposures[(band, order)].image.array[:, :] = np.nan
1018 band_pixels = np.array([pixel
1019 for pixel, band_
in hips_exposure_handle_dict.keys()
1021 band_pixels = np.sort(band_pixels)
1023 self._write_properties_and_moc(
1024 hips_base_path.join(f
"band_{band}", forceDirectory=
True),
1032 self._write_allsky_file(
1033 hips_base_path.join(f
"band_{band}", forceDirectory=
True),
1037 self._write_properties_and_moc(
1038 hips_base_path.join(f
"color_{colorstr}", forceDirectory=
True),
1046 self._write_allsky_file(
1047 hips_base_path.join(f
"color_{colorstr}", forceDirectory=
True),
1051 def _write_hips_image(self, hips_base_path, order, pixel, image, png_mapping, shift_order=9):
1052 """Write a HiPS image.
1056 hips_base_path : `lsst.resources.ResourcePath`
1057 Resource path to the base of the HiPS directory tree.
1059 HEALPix order of the HiPS image to write.
1061 HEALPix pixel of the HiPS image.
1064 png_mapping : `astropy.visualization.lupton_rgb.AsinhMapping`
1065 Mapping to convert image to scaled png.
1066 shift_order : `int`, optional
1074 dir_number = self._get_dir_number(pixel)
1075 hips_dir = hips_base_path.join(
1083 wcs = makeHpxWcs(order, pixel, shift_order=shift_order)
1085 uri = hips_dir.join(f
"Npix{pixel}.fits")
1087 with ResourcePath.temporary_uri(suffix=uri.getExtension())
as temporary_uri:
1088 image.writeFits(temporary_uri.ospath, metadata=wcs.getFitsMetadata())
1090 uri.transfer_from(temporary_uri, transfer=
"copy", overwrite=
True)
1094 vals = 255 - png_mapping.map_intensity_to_uint8(image.array).astype(np.uint8)
1095 vals[~np.isfinite(image.array) | (image.array < 0)] = 0
1096 im = Image.fromarray(vals[::-1, :],
"L")
1098 uri = hips_dir.join(f
"Npix{pixel}.png")
1100 with ResourcePath.temporary_uri(suffix=uri.getExtension())
as temporary_uri:
1101 im.save(temporary_uri.ospath)
1103 uri.transfer_from(temporary_uri, transfer=
"copy", overwrite=
True)
1105 def _write_hips_color_png(
1115 """Write a color png HiPS image.
1119 hips_base_path : `lsst.resources.ResourcePath`
1120 Resource path to the base of the HiPS directory tree.
1122 HEALPix order of the HiPS image to write.
1124 HEALPix pixel of the HiPS image.
1126 Input for red channel of output png.
1128 Input
for green channel of output png.
1130 Input
for blue channel of output png.
1131 png_mapping : `astropy.visualization.lupton_rgb.AsinhMapping`
1132 Mapping to convert image to scaled png.
1139 dir_number = self._get_dir_number(pixel)
1140 hips_dir = hips_base_path.join(
1149 arr_red = image_red.array.copy()
1150 arr_red[np.isnan(arr_red)] = png_mapping.minimum[0]
1151 arr_green = image_green.array.copy()
1152 arr_green[np.isnan(arr_green)] = png_mapping.minimum[1]
1153 arr_blue = image_blue.array.copy()
1154 arr_blue[np.isnan(arr_blue)] = png_mapping.minimum[2]
1156 image_array = png_mapping.make_rgb_image(arr_red, arr_green, arr_blue)
1158 im = Image.fromarray(image_array[::-1, :, :], mode=
"RGB")
1160 uri = hips_dir.join(f
"Npix{pixel}.png")
1162 with ResourcePath.temporary_uri(suffix=uri.getExtension())
as temporary_uri:
1163 im.save(temporary_uri.ospath)
1165 uri.transfer_from(temporary_uri, transfer=
"copy", overwrite=
True)
1167 def _write_properties_and_moc(
1177 """Write HiPS properties file and MOC.
1181 hips_base_path : : `lsst.resources.ResourcePath`
1182 Resource path to the base of the HiPS directory tree.
1184 Maximum HEALPix order.
1185 pixels : `np.ndarray` (N,)
1186 Array of pixels used.
1188 Sample HPX exposure used for generating HiPS tiles.
1194 Is band multiband / color?
1196 area = hpg.nside_to_pixel_area(2**max_order, degrees=True)*len(pixels)
1198 initial_ra = self.config.properties.initial_ra
1199 initial_dec = self.config.properties.initial_dec
1200 initial_fov = self.config.properties.initial_fov
1202 if initial_ra
is None or initial_dec
is None or initial_fov
is None:
1205 temp_pixels = pixels.copy()
1206 if temp_pixels.size % 2 == 0:
1207 temp_pixels = np.append(temp_pixels, [temp_pixels[0]])
1208 medpix = int(np.median(temp_pixels))
1209 _initial_ra, _initial_dec = hpg.pixel_to_angle(2**max_order, medpix)
1210 _initial_fov = hpg.nside_to_resolution(2**max_order, units=
'arcminutes')/60.
1212 if initial_ra
is None or initial_dec
is None:
1213 initial_ra = _initial_ra
1214 initial_dec = _initial_dec
1215 if initial_fov
is None:
1216 initial_fov = _initial_fov
1218 self._write_hips_properties_file(
1220 self.config.properties,
1233 self._write_hips_moc_file(
1239 def _write_hips_properties_file(
1253 """Write HiPS properties file.
1257 hips_base_path : `lsst.resources.ResourcePath`
1258 ResourcePath at top of HiPS tree. File will be written
1259 to this path as ``properties``.
1260 properties_config : `lsst.pipe.tasks.hips.HipsPropertiesConfig`
1261 Configuration
for properties values.
1263 Name of
band(s)
for HiPS tree.
1265 Is multiband / color?
1267 Sample HPX exposure used
for generating HiPS tiles.
1269 Maximum HEALPix order.
1273 Coverage area
in square degrees.
1274 initial_ra : `float`
1275 Initial HiPS RA position (degrees).
1276 initial_dec : `float`
1277 Initial HiPS Dec position (degrees).
1278 initial_fov : `float`
1279 Initial HiPS display size (degrees).
1285 def _write_property(fh, name, value):
1286 """Write a property name/value to a file handle.
1290 fh : file handle (blah)
1299 if re.search(
r"\s", name):
1300 raise ValueError(f
"``{name}`` cannot contain any space characters.")
1302 raise ValueError(f
"``{name}`` cannot contain an ``=``")
1304 fh.write(f
"{name:25}= {value}\n")
1306 if exposure.image.array.dtype == np.dtype(
"float32"):
1308 elif exposure.image.array.dtype == np.dtype(
"float64"):
1310 elif exposure.image.array.dtype == np.dtype(
"int32"):
1313 date_iso8601 = datetime.utcnow().isoformat(timespec=
"seconds") +
"Z"
1314 pixel_scale = hpg.nside_to_resolution(2**(max_order + shift_order), units=
'degrees')
1316 uri = hips_base_path.join(
"properties")
1317 with ResourcePath.temporary_uri(suffix=uri.getExtension())
as temporary_uri:
1318 with open(temporary_uri.ospath,
"w")
as fh:
1322 properties_config.creator_did_template.format(band=band),
1324 if properties_config.obs_collection
is not None:
1325 _write_property(fh,
"obs_collection", properties_config.obs_collection)
1329 properties_config.obs_title_template.format(band=band),
1331 if properties_config.obs_description_template
is not None:
1335 properties_config.obs_description_template.format(band=band),
1337 if len(properties_config.prov_progenitor) > 0:
1338 for prov_progenitor
in properties_config.prov_progenitor:
1339 _write_property(fh,
"prov_progenitor", prov_progenitor)
1340 if properties_config.obs_ack
is not None:
1341 _write_property(fh,
"obs_ack", properties_config.obs_ack)
1342 _write_property(fh,
"obs_regime",
"Optical")
1343 _write_property(fh,
"data_pixel_bitpix",
str(bitpix))
1344 _write_property(fh,
"dataproduct_type",
"image")
1345 _write_property(fh,
"moc_sky_fraction",
str(area/41253.))
1346 _write_property(fh,
"data_ucd",
"phot.flux")
1347 _write_property(fh,
"hips_creation_date", date_iso8601)
1348 _write_property(fh,
"hips_builder",
"lsst.pipe.tasks.hips.GenerateHipsTask")
1349 _write_property(fh,
"hips_creator",
"Vera C. Rubin Observatory")
1350 _write_property(fh,
"hips_version",
"1.4")
1351 _write_property(fh,
"hips_release_date", date_iso8601)
1352 _write_property(fh,
"hips_frame",
"equatorial")
1353 _write_property(fh,
"hips_order",
str(max_order))
1354 _write_property(fh,
"hips_tile_width",
str(exposure.getBBox().getWidth()))
1355 _write_property(fh,
"hips_status",
"private master clonableOnce")
1357 _write_property(fh,
"hips_tile_format",
"png")
1358 _write_property(fh,
"dataproduct_subtype",
"color")
1360 _write_property(fh,
"hips_tile_format",
"png fits")
1361 _write_property(fh,
"hips_pixel_bitpix",
str(bitpix))
1362 _write_property(fh,
"hips_pixel_scale",
str(pixel_scale))
1363 _write_property(fh,
"hips_initial_ra",
str(initial_ra))
1364 _write_property(fh,
"hips_initial_dec",
str(initial_dec))
1365 _write_property(fh,
"hips_initial_fov",
str(initial_fov))
1367 if self.config.blue_channel_band
in properties_config.spectral_ranges:
1368 em_min = properties_config.spectral_ranges[
1369 self.config.blue_channel_band
1372 self.log.warning(
"blue band %s not in self.config.spectral_ranges.", band)
1374 if self.config.red_channel_band
in properties_config.spectral_ranges:
1375 em_max = properties_config.spectral_ranges[
1376 self.config.red_channel_band
1379 self.log.warning(
"red band %s not in self.config.spectral_ranges.", band)
1382 if band
in properties_config.spectral_ranges:
1383 em_min = properties_config.spectral_ranges[band].lambda_min/1e9
1384 em_max = properties_config.spectral_ranges[band].lambda_max/1e9
1386 self.log.warning(
"band %s not in self.config.spectral_ranges.", band)
1389 _write_property(fh,
"em_min",
str(em_min))
1390 _write_property(fh,
"em_max",
str(em_max))
1391 if properties_config.t_min
is not None:
1392 _write_property(fh,
"t_min", properties_config.t_min)
1393 if properties_config.t_max
is not None:
1394 _write_property(fh,
"t_max", properties_config.t_max)
1396 uri.transfer_from(temporary_uri, transfer=
"copy", overwrite=
True)
1398 def _write_hips_moc_file(self, hips_base_path, max_order, pixels, min_uniq_order=1):
1399 """Write HiPS MOC file.
1403 hips_base_path : `lsst.resources.ResourcePath`
1404 ResourcePath to top of HiPS tree. File will be written as
1405 to this path
as ``Moc.fits``.
1407 Maximum HEALPix order.
1408 pixels : `np.ndarray`
1409 Array of pixels covered.
1410 min_uniq_order : `int`, optional
1411 Minimum HEALPix order
for looking
for fully covered pixels.
1419 uniq = 4*(4**max_order) + pixels
1422 hspmap = hsp.HealSparseMap.make_empty(2**min_uniq_order, 2**max_order, dtype=np.float32)
1423 hspmap[pixels] = 1.0
1426 for uniq_order
in range(max_order - 1, min_uniq_order - 1, -1):
1427 hspmap = hspmap.degrade(2**uniq_order, reduction=
"sum")
1428 pix_shift = np.right_shift(pixels, 2*(max_order - uniq_order))
1430 covered, = np.isclose(hspmap[pix_shift], 4**(max_order - uniq_order)).nonzero()
1431 if covered.size == 0:
1435 uniq[covered] = 4*(4**uniq_order) + pix_shift[covered]
1438 uniq = np.unique(uniq)
1441 tbl = np.zeros(uniq.size, dtype=[(
"UNIQ",
"i8")])
1444 order = np.log2(tbl[
"UNIQ"]//4).astype(np.int32)//2
1445 moc_order = np.max(order)
1447 hdu = fits.BinTableHDU(tbl)
1448 hdu.header[
"PIXTYPE"] =
"HEALPIX"
1449 hdu.header[
"ORDERING"] =
"NUNIQ"
1450 hdu.header[
"COORDSYS"] =
"C"
1451 hdu.header[
"MOCORDER"] = moc_order
1452 hdu.header[
"MOCTOOL"] =
"lsst.pipe.tasks.hips.GenerateHipsTask"
1454 uri = hips_base_path.join(
"Moc.fits")
1456 with ResourcePath.temporary_uri(suffix=uri.getExtension())
as temporary_uri:
1457 hdu.writeto(temporary_uri.ospath)
1459 uri.transfer_from(temporary_uri, transfer=
"copy", overwrite=
True)
1461 def _write_allsky_file(self, hips_base_path, allsky_order):
1462 """Write an Allsky.png file.
1466 hips_base_path : `lsst.resources.ResourcePath`
1467 Resource path to the base of the HiPS directory tree.
1468 allsky_order : `int`
1469 HEALPix order of the minimum order to make allsky file.
1471 tile_size = self.config.allsky_tilesize
1472 n_tiles_per_side = int(np.sqrt(hpg.nside_to_npixel(hpg.order_to_nside(allsky_order))))
1476 allsky_order_uri = hips_base_path.join(f
"Norder{allsky_order}", forceDirectory=
True)
1477 pixel_regex = re.compile(
r"Npix([0-9]+)\.png$")
1479 ResourcePath.findFileResources(
1480 candidates=[allsky_order_uri],
1481 file_filter=pixel_regex,
1485 for png_uri
in png_uris:
1486 matches = re.match(pixel_regex, png_uri.basename())
1487 pix_num = int(matches.group(1))
1488 tile_image = Image.open(io.BytesIO(png_uri.read()))
1489 row = math.floor(pix_num//n_tiles_per_side)
1490 column = pix_num % n_tiles_per_side
1491 box = (column*tile_size, row*tile_size, (column + 1)*tile_size, (row + 1)*tile_size)
1492 tile_image_shrunk = tile_image.resize((tile_size, tile_size))
1494 if allsky_image
is None:
1495 allsky_image = Image.new(
1497 (n_tiles_per_side*tile_size, n_tiles_per_side*tile_size),
1499 allsky_image.paste(tile_image_shrunk, box)
1501 uri = allsky_order_uri.join(
"Allsky.png")
1503 with ResourcePath.temporary_uri(suffix=uri.getExtension())
as temporary_uri:
1504 allsky_image.save(temporary_uri.ospath)
1506 uri.transfer_from(temporary_uri, transfer=
"copy", overwrite=
True)
1508 def _get_dir_number(self, pixel):
1509 """Compute the directory number from a pixel.
1514 HEALPix pixel number.
1519 HiPS directory number.
1521 return (pixel//10000)*10000
1524class GenerateColorHipsConnections(pipeBase.PipelineTaskConnections,
1525 dimensions=(
"instrument", ),
1526 defaultTemplates={
"coaddName":
"deep"}):
1527 hips_exposure_handles = pipeBase.connectionTypes.Input(
1528 doc=
"HiPS-compatible HPX images.",
1529 name=
"{coaddName}Coadd_hpx",
1530 storageClass=
"ExposureF",
1531 dimensions=(
"healpix11",
"band"),
1537class GenerateColorHipsConfig(GenerateHipsConfig,
1538 pipelineConnections=GenerateColorHipsConnections):
1539 """Configuration parameters for GenerateColorHipsTask."""
1540 blue_channel_band = pexConfig.Field(
1541 doc=
"Band to use for blue channel of color pngs.",
1545 green_channel_band = pexConfig.Field(
1546 doc=
"Band to use for green channel of color pngs.",
1550 red_channel_band = pexConfig.Field(
1551 doc=
"Band to use for red channel of color pngs.",
1555 png_color_asinh_minimum = pexConfig.Field(
1556 doc=
"AsinhMapping intensity to be mapped to black for color png scaling.",
1560 png_color_asinh_stretch = pexConfig.Field(
1561 doc=
"AsinhMapping linear stretch for color png scaling.",
1565 png_color_asinh_softening = pexConfig.Field(
1566 doc=
"AsinhMapping softening parameter (Q) for color png scaling.",
1572class GenerateColorHipsTask(GenerateHipsTask):
1573 """Task for making a HiPS tree with color pngs."""
1574 ConfigClass = GenerateColorHipsConfig
1575 _DefaultName =
"generateColorHips"
1578 def _check_data_bands(self, data_bands):
1579 """Check the data for configured bands.
1581 Warn if any color bands are missing data.
1585 data_bands : `set` [`str`]
1586 Bands
from the input data.
1590 bands : `list` [`str`]
1591 List of bands
in bgr color order.
1593 if len(data_bands) == 0:
1594 raise RuntimeError(
"GenerateColorHipsTask must have data from at least one band.")
1596 if self.config.blue_channel_band
not in data_bands:
1598 "Color png blue_channel_band %s not in dataset.",
1599 self.config.blue_channel_band
1601 if self.config.green_channel_band
not in data_bands:
1603 "Color png green_channel_band %s not in dataset.",
1604 self.config.green_channel_band
1606 if self.config.red_channel_band
not in data_bands:
1608 "Color png red_channel_band %s not in dataset.",
1609 self.config.red_channel_band
1613 self.config.blue_channel_band,
1614 self.config.green_channel_band,
1615 self.config.red_channel_band,
A class to contain the data, WCS, and other information needed to describe an image of the sky.
A class to represent a 2-dimensional array of pixels.
Pass parameters to a Statistics object.
An integer coordinate rectangle.
A RangeSet is a set of unsigned 64 bit integers.
daf::base::PropertyList * list
daf::base::PropertySet * set
std::shared_ptr< lsst::afw::image::Image< PixelT > > statisticsStack(std::vector< std::shared_ptr< lsst::afw::image::Image< PixelT > > > &images, Property flags, StatisticsControl const &sctrl=StatisticsControl(), std::vector< lsst::afw::image::VariancePixel > const &wvector=std::vector< lsst::afw::image::VariancePixel >(0))
A function to compute some statistics of a stack of Images.
Property stringToStatisticsProperty(std::string const property)
Conversion function to switch a string to a Property (see Statistics.h)