22"""Tasks for making and manipulating HIPS images."""
24__all__ = [
"HighResolutionHipsTask",
"HighResolutionHipsConfig",
"HighResolutionHipsConnections",
25 "HighResolutionHipsQuantumGraphBuilder",
26 "GenerateHipsTask",
"GenerateHipsConfig",
"GenerateColorHipsTask",
"GenerateColorHipsConfig"]
28from collections
import defaultdict
36from datetime
import datetime
38import healsparse
as hsp
39from astropy.io
import fits
41 from astropy.visualization.lupton_rgb
import AsinhMapping
43 from ._fallback_asinhmapping
import AsinhMapping
47from lsst.utils.timer
import timeMethod
48from lsst.daf.butler
import Butler
49from lsst.daf.butler.queries.expression_factory
import ExpressionFactory
52from lsst.pipe.base.quantum_graph_builder
import QuantumGraphBuilder
53from lsst.pipe.base.quantum_graph_skeleton
import QuantumGraphSkeleton
59from lsst.resources
import ResourcePath
61from .healSparseMapping
import _is_power_of_two
65 dimensions=(
"healpix9",
"band"),
66 defaultTemplates={
"coaddName":
"deep"}):
67 coadd_exposure_handles = pipeBase.connectionTypes.Input(
68 doc=
"Coadded exposures to convert to HIPS format.",
69 name=
"{coaddName}Coadd_calexp",
70 storageClass=
"ExposureF",
71 dimensions=(
"tract",
"patch",
"skymap",
"band"),
75 hips_exposures = pipeBase.connectionTypes.Output(
76 doc=
"HiPS-compatible HPX image.",
77 name=
"{coaddName}Coadd_hpx",
78 storageClass=
"ExposureF",
79 dimensions=(
"healpix11",
"band"),
83 def __init__(self, *, config=None):
84 super().__init__(config=config)
87 for dim
in self.dimensions:
89 if quantum_order
is not None:
90 raise ValueError(
"Must not specify more than one quantum healpix dimension.")
91 quantum_order = int(dim.split(
"healpix")[1])
92 if quantum_order
is None:
93 raise ValueError(
"Must specify a healpix dimension in quantum dimensions.")
95 if quantum_order > config.hips_order:
96 raise ValueError(
"Quantum healpix dimension order must not be greater than hips_order")
99 for dim
in self.hips_exposures.dimensions:
101 if order
is not None:
102 raise ValueError(
"Must not specify more than one healpix dimension.")
103 order = int(dim.split(
"healpix")[1])
105 raise ValueError(
"Must specify a healpix dimension in hips_exposure dimensions.")
107 if order != config.hips_order:
108 raise ValueError(
"healpix dimension order must match config.hips_order.")
111class HighResolutionHipsConfig(pipeBase.PipelineTaskConfig,
112 pipelineConnections=HighResolutionHipsConnections):
113 """Configuration parameters for HighResolutionHipsTask.
117 A HiPS image covers one HEALPix cell, with the HEALPix nside equal to
118 2**hips_order. Each cell is 'shift_order' orders deeper than the HEALPix
119 cell, with 2**shift_order x 2**shift_order sub-pixels on a side, which
120 defines the target resolution of the HiPS image. The IVOA recommends
121 shift_order=9, for 2**9=512 pixels on a side.
124 https://www.ivoa.net/documents/HiPS/20170519/REC-HIPS-1.0-20170519.pdf
125 shows the relationship between hips_order, number of tiles (full
126 sky coverage), cell size, and sub-pixel size/image resolution (with
127 the default shift_order=9):
128 +------------+-----------------+--------------+------------------+
129 | hips_order | Number of Tiles | Cell Size | Image Resolution |
130 +============+=================+==============+==================+
131 | 0 | 12 | 58.63 deg | 6.871 arcmin |
132 | 1 | 48 | 29.32 deg | 3.435 arcmin |
133 | 2 | 192 | 14.66 deg | 1.718 arcmin |
134 | 3 | 768 | 7.329 deg | 51.53 arcsec |
135 | 4 | 3072 | 3.665 deg | 25.77 arcsec |
136 | 5 | 12288 | 1.832 deg | 12.88 arcsec |
137 | 6 | 49152 | 54.97 arcmin | 6.442 arcsec |
138 | 7 | 196608 | 27.48 arcmin | 3.221 arcsec |
139 | 8 | 786432 | 13.74 arcmin | 1.61 arcsec |
140 | 9 | 3145728 | 6.871 arcmin | 805.2mas |
141 | 10 | 12582912 | 3.435 arcmin | 402.6mas |
142 | 11 | 50331648 | 1.718 arcmin | 201.3mas |
143 | 12 | 201326592 | 51.53 arcsec | 100.6mas |
144 | 13 | 805306368 | 25.77 arcsec | 50.32mas |
145 +------------+-----------------+--------------+------------------+
147 hips_order = pexConfig.Field(
148 doc=
"HIPS image order.",
152 shift_order = pexConfig.Field(
153 doc=
"HIPS shift order (such that each tile is 2**shift_order pixels on a side)",
157 warp = pexConfig.ConfigField(
158 dtype=afwMath.Warper.ConfigClass,
159 doc=
"Warper configuration",
162 def setDefaults(self):
163 self.warp.warpingKernelName =
"lanczos5"
166class HipsTaskNameDescriptor:
167 """Descriptor used create a DefaultName that matches the order of
168 the defined dimensions in the connections class.
173 The prefix of the Default name, to which the order will be
176 def __init__(self, prefix):
178 self._defaultName = f
"{prefix}{{}}"
181 def __get__(self, obj, klass=None):
184 "HipsTaskDescriptor was used in an unexpected context"
186 if self._order
is None:
187 klassDimensions = klass.ConfigClass.ConnectionsClass.dimensions
188 for dim
in klassDimensions:
189 if (match := re.match(
r"^healpix(\d*)$", dim))
is not None:
190 self._order = int(match.group(1))
194 "Could not find healpix dimension in connections class"
196 return self._defaultName.format(self._order)
199class HighResolutionHipsTask(pipeBase.PipelineTask):
200 """Task for making high resolution HiPS images."""
201 ConfigClass = HighResolutionHipsConfig
202 _DefaultName = HipsTaskNameDescriptor(
"highResolutionHips")
204 def __init__(self, **kwargs):
205 super().__init__(**kwargs)
206 self.warper = afwMath.Warper.fromConfig(self.config.warp)
209 def runQuantum(self, butlerQC, inputRefs, outputRefs):
210 inputs = butlerQC.get(inputRefs)
212 healpix_dim = f
"healpix{self.config.hips_order}"
214 pixels = [hips_exposure.dataId[healpix_dim]
215 for hips_exposure
in outputRefs.hips_exposures]
217 outputs = self.run(pixels=pixels, coadd_exposure_handles=inputs[
"coadd_exposure_handles"])
219 hips_exposure_ref_dict = {hips_exposure_ref.dataId[healpix_dim]:
220 hips_exposure_ref
for hips_exposure_ref
in outputRefs.hips_exposures}
221 for pixel, hips_exposure
in outputs.hips_exposures.items():
222 butlerQC.put(hips_exposure, hips_exposure_ref_dict[pixel])
224 def run(self, pixels, coadd_exposure_handles):
225 """Run the HighResolutionHipsTask.
229 pixels : `Iterable` [ `int` ]
230 Iterable of healpix pixels (nest ordering) to warp to.
231 coadd_exposure_handles : `list` [`lsst.daf.butler.DeferredDatasetHandle`]
232 Handles for the coadd exposures.
236 outputs : `lsst.pipe.base.Struct`
237 ``hips_exposures`` is a dict with pixel (key) and hips_exposure (value)
239 self.log.info(
"Generating HPX images for %d pixels at order %d", len(pixels), self.config.hips_order)
241 npix = 2**self.config.shift_order
251 wcs_hpx = afwGeom.makeHpxWcs(self.config.hips_order, pixel, shift_order=self.config.shift_order)
252 exp_hpx = afwImage.ExposureF(bbox_hpx, wcs_hpx)
253 exp_hpx_dict[pixel] = exp_hpx
254 warp_dict[pixel] = []
259 for handle
in coadd_exposure_handles:
260 coadd_exp = handle.get()
264 warped = self.warper.warpExposure(exp_hpx_dict[pixel].getWcs(), coadd_exp, maxBBox=bbox_hpx)
266 exp = afwImage.ExposureF(exp_hpx_dict[pixel].getBBox(), exp_hpx_dict[pixel].getWcs())
267 exp.maskedImage.set(np.nan, afwImage.Mask.getPlaneBitMask(
"NO_DATA"), np.nan)
272 exp_hpx_dict[pixel].mask.conformMaskPlanes(coadd_exp.mask.getMaskPlaneDict())
273 exp_hpx_dict[pixel].setFilter(coadd_exp.getFilter())
274 exp_hpx_dict[pixel].setPhotoCalib(coadd_exp.getPhotoCalib())
276 if warped.getBBox().getArea() == 0
or not np.any(np.isfinite(warped.image.array)):
279 "No overlap between output HPX %d and input exposure %s",
285 exp.maskedImage.assign(warped.maskedImage, warped.getBBox())
286 warp_dict[pixel].append(exp.maskedImage)
292 stats_ctrl.setNanSafe(
True)
293 stats_ctrl.setWeighted(
True)
294 stats_ctrl.setCalcErrorFromInputVariance(
True)
300 exp_hpx_dict[pixel].maskedImage.set(np.nan, afwImage.Mask.getPlaneBitMask(
"NO_DATA"), np.nan)
302 if not warp_dict[pixel]:
304 self.log.debug(
"No data in HPX pixel %d", pixel)
307 exp_hpx_dict.pop(pixel)
314 [1.0]*len(warp_dict[pixel]),
319 return pipeBase.Struct(hips_exposures=exp_hpx_dict)
322 def build_quantum_graph_cli(cls, argv):
323 """A command-line interface entry point to `build_quantum_graph`.
324 This method provides the implementation for the
325 ``build-high-resolution-hips-qg`` script.
329 argv : `Sequence` [ `str` ]
330 Command-line arguments (e.g. ``sys.argv[1:]``).
332 parser = cls._make_cli_parser()
334 args = parser.parse_args(argv)
336 if args.subparser_name
is None:
340 pipeline = pipeBase.Pipeline.from_uri(args.pipeline)
341 pipeline_graph = pipeline.to_graph()
343 if len(pipeline_graph.tasks) != 1:
344 raise RuntimeError(f
"Pipeline file {args.pipeline} may only contain one task.")
346 (task_node,) = pipeline_graph.tasks.values()
348 butler = Butler(args.butler_config, collections=args.input)
350 if args.subparser_name ==
"segment":
353 dataset = task_node.inputs[
"coadd_exposure_handles"].dataset_type_name
354 with butler.query()
as q:
355 data_ids = list(q.join_dataset_search(dataset).data_ids(
"tract").with_dimension_records())
357 for data_id
in data_ids:
358 region = data_id.region
359 pixel_range = hpix_pixelization.envelope(region)
360 for r
in pixel_range.ranges():
361 region_pixels.extend(range(r[0], r[1]))
362 indices = np.unique(region_pixels)
364 print(f
"Pixels to run at HEALPix order --hpix_build_order {args.hpix_build_order}:")
365 for pixel
in indices:
368 elif args.subparser_name ==
"build":
372 if args.output_run
is None:
373 if args.output
is None:
374 raise ValueError(
"At least one of --output or --output-run options is required.")
375 args.output_run =
"{}/{}".format(args.output, pipeBase.Instrument.makeCollectionTimestamp())
377 build_ranges =
RangeSet(sorted(args.pixels))
382 "butler_argument": args.butler_config,
383 "output": args.output,
384 "output_run": args.output_run,
385 "data_query": args.where,
386 "time": f
"{datetime.now()}",
389 builder = HighResolutionHipsQuantumGraphBuilder(
392 input_collections=args.input,
393 output_run=args.output_run,
394 constraint_order=args.hpix_build_order,
395 constraint_ranges=build_ranges,
398 qg = builder.build(metadata, attach_datastore_records=
True)
399 qg.saveUri(args.save_qgraph)
402 def _make_cli_parser(cls):
403 """Make the command-line parser.
407 parser : `argparse.ArgumentParser`
409 parser = argparse.ArgumentParser(
411 "Build a QuantumGraph that runs HighResolutionHipsTask on existing coadd datasets."
414 subparsers = parser.add_subparsers(help=
"sub-command help", dest=
"subparser_name")
416 parser_segment = subparsers.add_parser(
"segment",
417 help=
"Determine survey segments for workflow.")
418 parser_build = subparsers.add_parser(
"build",
419 help=
"Build quantum graph for HighResolutionHipsTask")
421 for sub
in [parser_segment, parser_build]:
427 help=
"Path to data repository or butler configuration.",
434 help=
"Pipeline file, limited to one task.",
442 help=
"Input collection(s) to search for coadd exposures.",
447 "--hpix_build_order",
450 help=
"HEALPix order to segment sky for building quantum graph files.",
457 help=
"Data ID expression used when querying for input coadd datasets.",
460 parser_build.add_argument(
464 "Name of the output CHAINED collection. If this options is specified and "
465 "--output-run is not, then a new RUN collection will be created by appending "
466 "a timestamp to the value of this option."
471 parser_build.add_argument(
475 "Output RUN collection to write resulting images. If not provided "
476 "then --output must be provided and a new RUN collection will be created "
477 "by appending a timestamp to the value passed with --output."
482 parser_build.add_argument(
486 help=
"Output filename for QuantumGraph.",
489 parser_build.add_argument(
494 help=
"Pixels at --hpix_build_order to generate quantum graph.",
501class HighResolutionHipsQuantumGraphBuilder(QuantumGraphBuilder):
502 """A custom a `lsst.pipe.base.QuantumGraphBuilder` for running
503 `HighResolutionHipsTask` only.
505 This is a workaround for incomplete butler query support for HEALPix
510 pipeline_graph : `lsst.pipe.base.PipelineGraph`
511 Pipeline graph with exactly one task, which must be a configuration
512 of `HighResolutionHipsTask`.
513 butler : `lsst.daf.butler.Butler`
514 Client for the butler data repository. May be read-only.
515 input_collections : `str` or `Iterable` [ `str` ], optional
516 Collection or collections to search for input datasets, in order.
517 If not provided, ``butler.collections`` will be searched.
518 output_run : `str`, optional
519 Name of the output collection. If not provided, ``butler.run`` will
521 constraint_order : `int`
522 HEALPix order used to constrain which quanta are generated, via
523 ``constraint_indices``. This should be a coarser grid (smaller
524 order) than the order used for the task's quantum and output data
525 IDs, and ideally something between the spatial scale of a patch or
526 the data repository's "common skypix" system (usually ``htm7``).
527 constraint_ranges : `lsst.sphgeom.RangeSet`
528 RangeSet that describes constraint pixels (HEALPix NEST, with order
529 ``constraint_order``) to constrain generated quanta.
530 where : `str`, optional
531 A boolean `str` expression of the form accepted by
532 `lsst.daf.butler.Butler` to constrain input datasets. This may
533 contain a constraint on tracts, patches, or bands, but not HEALPix
534 indices. Constraints on tracts and patches should usually be
535 unnecessary, however - existing coadds that overlap the given
536 HEALpix indices will be selected without such a constraint, and
537 providing one may reject some that should normally be included.
545 input_collections=None,
551 super().__init__(pipeline_graph, butler, input_collections=input_collections, output_run=output_run)
552 self.constraint_order = constraint_order
553 self.constraint_ranges = constraint_ranges
556 def process_subgraph(self, subgraph):
558 (task_node,) = subgraph.tasks.values()
562 (input_dataset_type_node,) = subgraph.inputs_of(task_node.label).values()
563 assert input_dataset_type_node
is not None,
"PipelineGraph should be resolved by base class."
564 (output_edge,) = task_node.outputs.values()
565 output_dataset_type_node = subgraph.dataset_types[output_edge.parent_dataset_type_name]
566 (hpx_output_dimension,) = (
567 self.butler.dimensions.skypix_dimensions[d]
568 for d
in output_dataset_type_node.dimensions.skypix
570 constraint_hpx_pixelization = (
571 self.butler.dimensions.skypix_dimensions[f
"healpix{self.constraint_order}"].pixelization
573 common_skypix_pixelization = self.butler.dimensions.commonSkyPix.pixelization
579 self.butler.dimensions.skypix_dimensions[d]
for d
in task_node.dimensions.names
if d !=
"band"
581 hpx_pixelization = hpx_dimension.pixelization
582 if hpx_pixelization.level < self.constraint_order:
583 raise ValueError(f
"Quantum order {hpx_pixelization.level} must be < {self.constraint_order}")
584 hpx_ranges = self.constraint_ranges.scaled(4**(hpx_pixelization.level - self.constraint_order))
589 for begin, end
in self.constraint_ranges:
590 for hpx_index
in range(begin, end):
591 constraint_hpx_region = constraint_hpx_pixelization.pixel(hpx_index)
592 common_skypix_ranges |= common_skypix_pixelization.envelope(constraint_hpx_region)
596 for simp
in range(1, 10):
597 if len(common_skypix_ranges) < 100:
599 common_skypix_ranges.simplify(simp)
606 xf = ExpressionFactory(self.butler.dimensions)
607 common_skypix_proxy = getattr(xf, self.butler.dimensions.commonSkyPix.name)
609 for begin, end
in common_skypix_ranges:
611 where_terms.append(common_skypix_proxy == begin)
613 where_terms.append(common_skypix_proxy.in_range(begin, end))
614 where = xf.any(*where_terms)
618 with self.butler.query()
as query:
619 input_refs = query.datasets(
620 input_dataset_type_node.dataset_type,
621 collections=self.input_collections
625 ).with_dimension_records()
626 inputs_by_patch = defaultdict(set)
627 patch_dimensions = self.butler.dimensions.conform([
"patch"])
628 skeleton = QuantumGraphSkeleton([task_node.label])
629 for input_ref
in input_refs:
630 dataset_key = skeleton.add_dataset_node(input_ref.datasetType.name, input_ref.dataId)
631 skeleton.set_dataset_ref(input_ref, dataset_key)
632 inputs_by_patch[input_ref.dataId.subset(patch_dimensions)].add(dataset_key)
633 if not inputs_by_patch:
634 message_body =
"\n".join(input_refs.explain_no_results())
635 raise RuntimeError(f
"No inputs found:\n{message_body}")
640 inputs_by_hpx = defaultdict(set)
641 for patch_data_id, input_keys_for_patch
in inputs_by_patch.items():
642 patch_hpx_ranges = hpx_pixelization.envelope(patch_data_id.region)
643 for begin, end
in patch_hpx_ranges & hpx_ranges:
644 for hpx_index
in range(begin, end):
645 inputs_by_hpx[hpx_index].update(input_keys_for_patch)
648 for hpx_index, input_keys_for_hpx_index
in inputs_by_hpx.items():
650 input_keys_by_band = defaultdict(list)
651 for input_key
in input_keys_for_hpx_index:
652 input_ref = skeleton.get_dataset_ref(input_key)
653 assert input_ref
is not None,
"Code above adds the same nodes to the graph with refs."
654 input_keys_by_band[input_ref.dataId[
"band"]].append(input_key)
656 for band, input_keys_for_band
in input_keys_by_band.items():
657 data_id = self.butler.registry.expandDataId({hpx_dimension.name: hpx_index,
"band": band})
658 quantum_key = skeleton.add_quantum_node(task_node.label, data_id)
660 skeleton.add_input_edges(quantum_key, input_keys_for_band)
662 hpx_pixel_ranges =
RangeSet(hpx_index)
663 hpx_output_ranges = hpx_pixel_ranges.scaled(
664 4**(task_node.config.hips_order - hpx_pixelization.level)
666 for begin, end
in hpx_output_ranges:
667 for hpx_output_index
in range(begin, end):
668 dataset_key = skeleton.add_dataset_node(
669 output_dataset_type_node.name,
670 self.butler.registry.expandDataId(
671 {hpx_output_dimension: hpx_output_index,
"band": band}
674 skeleton.add_output_edge(quantum_key, dataset_key)
676 for write_edge
in task_node.iter_all_outputs():
677 if write_edge.connection_name == output_edge.connection_name:
679 dataset_key = skeleton.add_dataset_node(write_edge.parent_dataset_type_name, data_id)
680 skeleton.add_output_edge(quantum_key, dataset_key)
684class HipsPropertiesSpectralTerm(pexConfig.Config):
685 lambda_min = pexConfig.Field(
686 doc=
"Minimum wavelength (nm)",
689 lambda_max = pexConfig.Field(
690 doc=
"Maximum wavelength (nm)",
695class HipsPropertiesConfig(pexConfig.Config):
696 """Configuration parameters for writing a HiPS properties file."""
697 creator_did_template = pexConfig.Field(
698 doc=(
"Unique identifier of the HiPS - Format: IVOID. "
699 "Use ``{band}`` to substitute the band name."),
703 obs_collection = pexConfig.Field(
704 doc=
"Short name of original data set - Format: one word",
708 obs_description_template = pexConfig.Field(
709 doc=(
"Data set description - Format: free text, longer free text "
710 "description of the dataset. Use ``{band}`` to substitute "
714 prov_progenitor = pexConfig.ListField(
715 doc=
"Provenance of the original data - Format: free text",
719 obs_title_template = pexConfig.Field(
720 doc=(
"Data set title format: free text, but should be short. "
721 "Use ``{band}`` to substitute the band name."),
725 spectral_ranges = pexConfig.ConfigDictField(
726 doc=(
"Mapping from band to lambda_min, lamba_max (nm). May be approximate."),
728 itemtype=HipsPropertiesSpectralTerm,
731 initial_ra = pexConfig.Field(
732 doc=
"Initial RA (deg) (default for HiPS viewer). If not set will use a point in MOC.",
736 initial_dec = pexConfig.Field(
737 doc=
"Initial Declination (deg) (default for HiPS viewer). If not set will use a point in MOC.",
741 initial_fov = pexConfig.Field(
742 doc=
"Initial field-of-view (deg). If not set will use ~1 healpix tile.",
746 obs_ack = pexConfig.Field(
747 doc=
"Observation acknowledgements (free text).",
751 t_min = pexConfig.Field(
752 doc=
"Time (MJD) of earliest observation included in HiPS",
756 t_max = pexConfig.Field(
757 doc=
"Time (MJD) of latest observation included in HiPS",
765 if self.obs_collection
is not None:
766 if re.search(
r"\s", self.obs_collection):
767 raise ValueError(
"obs_collection cannot contain any space characters.")
769 def setDefaults(self):
772 u_term = HipsPropertiesSpectralTerm()
773 u_term.lambda_min = 330.
774 u_term.lambda_max = 400.
775 self.spectral_ranges[
"u"] = u_term
776 g_term = HipsPropertiesSpectralTerm()
777 g_term.lambda_min = 402.
778 g_term.lambda_max = 552.
779 self.spectral_ranges[
"g"] = g_term
780 r_term = HipsPropertiesSpectralTerm()
781 r_term.lambda_min = 552.
782 r_term.lambda_max = 691.
783 self.spectral_ranges[
"r"] = r_term
784 i_term = HipsPropertiesSpectralTerm()
785 i_term.lambda_min = 691.
786 i_term.lambda_max = 818.
787 self.spectral_ranges[
"i"] = i_term
788 z_term = HipsPropertiesSpectralTerm()
789 z_term.lambda_min = 818.
790 z_term.lambda_max = 922.
791 self.spectral_ranges[
"z"] = z_term
792 y_term = HipsPropertiesSpectralTerm()
793 y_term.lambda_min = 970.
794 y_term.lambda_max = 1060.
795 self.spectral_ranges[
"y"] = y_term
798class GenerateHipsConnections(pipeBase.PipelineTaskConnections,
799 dimensions=(
"instrument",
"band"),
800 defaultTemplates={
"coaddName":
"deep"}):
801 hips_exposure_handles = pipeBase.connectionTypes.Input(
802 doc=
"HiPS-compatible HPX images.",
803 name=
"{coaddName}Coadd_hpx",
804 storageClass=
"ExposureF",
805 dimensions=(
"healpix11",
"band"),
811class GenerateHipsConfig(pipeBase.PipelineTaskConfig,
812 pipelineConnections=GenerateHipsConnections):
813 """Configuration parameters for GenerateHipsTask."""
818 hips_base_uri = pexConfig.Field(
819 doc=
"URI to HiPS base for output.",
823 min_order = pexConfig.Field(
824 doc=
"Minimum healpix order for HiPS tree.",
828 properties = pexConfig.ConfigField(
829 dtype=HipsPropertiesConfig,
830 doc=
"Configuration for properties file.",
832 allsky_tilesize = pexConfig.Field(
834 doc=
"Allsky tile size; must be power of 2. HiPS standard recommends 64x64 tiles.",
836 check=_is_power_of_two,
838 png_gray_asinh_minimum = pexConfig.Field(
839 doc=
"AsinhMapping intensity to be mapped to black for grayscale png scaling.",
843 png_gray_asinh_stretch = pexConfig.Field(
844 doc=
"AsinhMapping linear stretch for grayscale png scaling.",
848 png_gray_asinh_softening = pexConfig.Field(
849 doc=
"AsinhMapping softening parameter (Q) for grayscale png scaling.",
855class GenerateHipsTask(pipeBase.PipelineTask):
856 """Task for making a HiPS tree with FITS and grayscale PNGs."""
857 ConfigClass = GenerateHipsConfig
858 _DefaultName =
"generateHips"
862 def runQuantum(self, butlerQC, inputRefs, outputRefs):
863 inputs = butlerQC.get(inputRefs)
865 dims = inputRefs.hips_exposure_handles[0].dataId.dimensions.names
869 order = int(dim.split(
"healpix")[1])
873 raise RuntimeError(
"Could not determine healpix order for input exposures.")
875 hips_exposure_handle_dict = {
876 (hips_exposure_handle.dataId[healpix_dim],
877 hips_exposure_handle.dataId[
"band"]): hips_exposure_handle
878 for hips_exposure_handle
in inputs[
"hips_exposure_handles"]
881 data_bands = {hips_exposure_handle.dataId[
"band"]
882 for hips_exposure_handle
in inputs[
"hips_exposure_handles"]}
883 bands = self._check_data_bands(data_bands)
888 hips_exposure_handle_dict=hips_exposure_handle_dict,
889 do_color=self.color_task,
892 def _check_data_bands(self, data_bands):
893 """Check that the data has only a single band.
897 data_bands : `set` [`str`]
898 Bands from the input data.
902 bands : `list` [`str`]
903 List of single band to process.
907 RuntimeError if there is not exactly one band.
909 if len(data_bands) != 1:
910 raise RuntimeError(
"GenerateHipsTask can only use data from a single band.")
912 return list(data_bands)
915 def run(self, bands, max_order, hips_exposure_handle_dict, do_color=False):
916 """Run the GenerateHipsTask.
920 bands : `list [ `str` ]
921 List of bands to be processed (or single band).
923 HEALPix order of the maximum (native) HPX exposures.
924 hips_exposure_handle_dict : `dict` {`int`: `lsst.daf.butler.DeferredDatasetHandle`}
925 Dict of handles for the HiPS high-resolution exposures.
926 Key is (pixel number, ``band``).
927 do_color : `bool`, optional
928 Do color pngs instead of per-band grayscale.
930 min_order = self.config.min_order
934 self.config.png_gray_asinh_minimum,
935 self.config.png_gray_asinh_stretch,
936 Q=self.config.png_gray_asinh_softening,
940 self.config.png_color_asinh_minimum,
941 self.config.png_color_asinh_stretch,
942 Q=self.config.png_color_asinh_softening,
945 bcb = self.config.blue_channel_band
946 gcb = self.config.green_channel_band
947 rcb = self.config.red_channel_band
948 colorstr = f
"{bcb}{gcb}{rcb}"
951 hips_base_path = ResourcePath(self.config.hips_base_uri, forceDirectory=
True)
955 pixels = np.unique(np.array([pixel
for pixel, _
in hips_exposure_handle_dict.keys()]))
958 pixels = np.append(pixels, [0])
962 pixels_shifted[max_order] = pixels
963 for order
in range(max_order - 1, min_order - 1, -1):
964 pixels_shifted[order] = np.right_shift(pixels_shifted[order + 1], 2)
967 for order
in range(min_order, max_order + 1):
968 pixels_shifted[order][-1] = -1
971 exp0 = list(hips_exposure_handle_dict.values())[0].get()
972 bbox = exp0.getBBox()
973 npix = bbox.getWidth()
974 shift_order = int(np.round(np.log2(npix)))
981 for order
in range(min_order, max_order + 1):
982 exp = exp0.Factory(bbox=bbox)
983 exp.image.array[:, :] = np.nan
984 exposures[(band, order)] = exp
987 for pixel_counter, pixel
in enumerate(pixels[:-1]):
988 self.log.debug(
"Working on high resolution pixel %d", pixel)
994 if (pixel, band)
in hips_exposure_handle_dict:
995 exposures[(band, max_order)] = hips_exposure_handle_dict[(pixel, band)].get()
1001 for order
in range(max_order, min_order - 1, -1):
1002 if pixels_shifted[order][pixel_counter + 1] == pixels_shifted[order][pixel_counter]:
1010 self._write_hips_image(
1011 hips_base_path.join(f
"band_{band}", forceDirectory=
True),
1013 pixels_shifted[order][pixel_counter],
1014 exposures[(band, order)].image,
1015 png_grayscale_mapping,
1016 shift_order=shift_order,
1020 self._write_hips_color_png(
1021 hips_base_path.join(f
"color_{colorstr}", forceDirectory=
True),
1023 pixels_shifted[order][pixel_counter],
1024 exposures[(self.config.red_channel_band, order)].image,
1025 exposures[(self.config.green_channel_band, order)].image,
1026 exposures[(self.config.blue_channel_band, order)].image,
1030 log_level = self.log.INFO
if order == (max_order - 3)
else self.log.DEBUG
1033 "Completed HiPS generation for %s, order %d, pixel %d (%d/%d)",
1036 pixels_shifted[order][pixel_counter],
1042 if order == min_order:
1044 exposures[(band, order)].image.array[:, :] = np.nan
1049 arr = exposures[(band, order)].image.array.reshape(npix//2, 2, npix//2, 2)
1050 with warnings.catch_warnings():
1051 warnings.simplefilter(
"ignore")
1052 binned_image_arr = np.nanmean(arr, axis=(1, 3))
1056 sub_index = (pixels_shifted[order][pixel_counter]
1057 - np.left_shift(pixels_shifted[order - 1][pixel_counter], 2))
1060 exp = exposures[(band, order - 1)]
1064 exp.image.array[npix//2:, 0: npix//2] = binned_image_arr
1065 elif sub_index == 1:
1066 exp.image.array[0: npix//2, 0: npix//2] = binned_image_arr
1067 elif sub_index == 2:
1068 exp.image.array[npix//2:, npix//2:] = binned_image_arr
1069 elif sub_index == 3:
1070 exp.image.array[0: npix//2, npix//2:] = binned_image_arr
1073 raise ValueError(
"Illegal pixel sub index")
1076 if order < max_order:
1077 exposures[(band, order)].image.array[:, :] = np.nan
1082 band_pixels = np.array([pixel
1083 for pixel, band_
in hips_exposure_handle_dict.keys()
1085 band_pixels = np.sort(band_pixels)
1087 self._write_properties_and_moc(
1088 hips_base_path.join(f
"band_{band}", forceDirectory=
True),
1096 self._write_allsky_file(
1097 hips_base_path.join(f
"band_{band}", forceDirectory=
True),
1101 self._write_properties_and_moc(
1102 hips_base_path.join(f
"color_{colorstr}", forceDirectory=
True),
1110 self._write_allsky_file(
1111 hips_base_path.join(f
"color_{colorstr}", forceDirectory=
True),
1115 def _write_hips_image(self, hips_base_path, order, pixel, image, png_mapping, shift_order=9):
1116 """Write a HiPS image.
1120 hips_base_path : `lsst.resources.ResourcePath`
1121 Resource path to the base of the HiPS directory tree.
1123 HEALPix order of the HiPS image to write.
1125 HEALPix pixel of the HiPS image.
1126 image : `lsst.afw.image.Image`
1128 png_mapping : `astropy.visualization.lupton_rgb.AsinhMapping`
1129 Mapping to convert image to scaled png.
1130 shift_order : `int`, optional
1138 dir_number = self._get_dir_number(pixel)
1139 hips_dir = hips_base_path.join(
1147 wcs = makeHpxWcs(order, pixel, shift_order=shift_order)
1149 uri = hips_dir.join(f
"Npix{pixel}.fits")
1151 with ResourcePath.temporary_uri(suffix=uri.getExtension())
as temporary_uri:
1152 image.writeFits(temporary_uri.ospath, metadata=wcs.getFitsMetadata())
1154 uri.transfer_from(temporary_uri, transfer=
"copy", overwrite=
True)
1158 with np.errstate(invalid=
"ignore"):
1159 vals = 255 - png_mapping.map_intensity_to_uint8(image.array).astype(np.uint8)
1161 vals[~np.isfinite(image.array) | (image.array < 0)] = 0
1162 im = Image.fromarray(vals[::-1, :],
"L")
1164 uri = hips_dir.join(f
"Npix{pixel}.png")
1166 with ResourcePath.temporary_uri(suffix=uri.getExtension())
as temporary_uri:
1167 im.save(temporary_uri.ospath)
1169 uri.transfer_from(temporary_uri, transfer=
"copy", overwrite=
True)
1171 def _write_hips_color_png(
1181 """Write a color png HiPS image.
1185 hips_base_path : `lsst.resources.ResourcePath`
1186 Resource path to the base of the HiPS directory tree.
1188 HEALPix order of the HiPS image to write.
1190 HEALPix pixel of the HiPS image.
1191 image_red : `lsst.afw.image.Image`
1192 Input for red channel of output png.
1193 image_green : `lsst.afw.image.Image`
1194 Input for green channel of output png.
1195 image_blue : `lsst.afw.image.Image`
1196 Input for blue channel of output png.
1197 png_mapping : `astropy.visualization.lupton_rgb.AsinhMapping`
1198 Mapping to convert image to scaled png.
1205 dir_number = self._get_dir_number(pixel)
1206 hips_dir = hips_base_path.join(
1215 arr_red = image_red.array.copy()
1216 arr_red[np.isnan(arr_red)] = png_mapping.minimum[0]
1217 arr_green = image_green.array.copy()
1218 arr_green[np.isnan(arr_green)] = png_mapping.minimum[1]
1219 arr_blue = image_blue.array.copy()
1220 arr_blue[np.isnan(arr_blue)] = png_mapping.minimum[2]
1222 image_array = png_mapping.make_rgb_image(arr_red, arr_green, arr_blue)
1224 im = Image.fromarray(image_array[::-1, :, :], mode=
"RGB")
1226 uri = hips_dir.join(f
"Npix{pixel}.png")
1228 with ResourcePath.temporary_uri(suffix=uri.getExtension())
as temporary_uri:
1229 im.save(temporary_uri.ospath)
1231 uri.transfer_from(temporary_uri, transfer=
"copy", overwrite=
True)
1233 def _write_properties_and_moc(
1243 """Write HiPS properties file and MOC.
1247 hips_base_path : : `lsst.resources.ResourcePath`
1248 Resource path to the base of the HiPS directory tree.
1250 Maximum HEALPix order.
1251 pixels : `np.ndarray` (N,)
1252 Array of pixels used.
1253 exposure : `lsst.afw.image.Exposure`
1254 Sample HPX exposure used for generating HiPS tiles.
1260 Is band multiband / color?
1262 area = hpg.nside_to_pixel_area(2**max_order, degrees=
True)*len(pixels)
1264 initial_ra = self.config.properties.initial_ra
1265 initial_dec = self.config.properties.initial_dec
1266 initial_fov = self.config.properties.initial_fov
1268 if initial_ra
is None or initial_dec
is None or initial_fov
is None:
1271 temp_pixels = pixels.copy()
1272 if temp_pixels.size % 2 == 0:
1273 temp_pixels = np.append(temp_pixels, [temp_pixels[0]])
1274 medpix = int(np.median(temp_pixels))
1275 _initial_ra, _initial_dec = hpg.pixel_to_angle(2**max_order, medpix)
1276 _initial_fov = hpg.nside_to_resolution(2**max_order, units=
'arcminutes')/60.
1278 if initial_ra
is None or initial_dec
is None:
1279 initial_ra = _initial_ra
1280 initial_dec = _initial_dec
1281 if initial_fov
is None:
1282 initial_fov = _initial_fov
1284 self._write_hips_properties_file(
1286 self.config.properties,
1299 self._write_hips_moc_file(
1305 def _write_hips_properties_file(
1319 """Write HiPS properties file.
1323 hips_base_path : `lsst.resources.ResourcePath`
1324 ResourcePath at top of HiPS tree. File will be written
1325 to this path as ``properties``.
1326 properties_config : `lsst.pipe.tasks.hips.HipsPropertiesConfig`
1327 Configuration for properties values.
1329 Name of band(s) for HiPS tree.
1331 Is multiband / color?
1332 exposure : `lsst.afw.image.Exposure`
1333 Sample HPX exposure used for generating HiPS tiles.
1335 Maximum HEALPix order.
1339 Coverage area in square degrees.
1340 initial_ra : `float`
1341 Initial HiPS RA position (degrees).
1342 initial_dec : `float`
1343 Initial HiPS Dec position (degrees).
1344 initial_fov : `float`
1345 Initial HiPS display size (degrees).
1351 def _write_property(fh, name, value):
1352 """Write a property name/value to a file handle.
1356 fh : file handle (blah)
1365 if re.search(
r"\s", name):
1366 raise ValueError(f
"``{name}`` cannot contain any space characters.")
1368 raise ValueError(f
"``{name}`` cannot contain an ``=``")
1370 fh.write(f
"{name:25}= {value}\n")
1372 if exposure.image.array.dtype == np.dtype(
"float32"):
1374 elif exposure.image.array.dtype == np.dtype(
"float64"):
1376 elif exposure.image.array.dtype == np.dtype(
"int32"):
1379 date_iso8601 = datetime.utcnow().isoformat(timespec=
"seconds") +
"Z"
1380 pixel_scale = hpg.nside_to_resolution(2**(max_order + shift_order), units=
'degrees')
1382 uri = hips_base_path.join(
"properties")
1383 with ResourcePath.temporary_uri(suffix=uri.getExtension())
as temporary_uri:
1384 with open(temporary_uri.ospath,
"w")
as fh:
1388 properties_config.creator_did_template.format(band=band),
1390 if properties_config.obs_collection
is not None:
1391 _write_property(fh,
"obs_collection", properties_config.obs_collection)
1395 properties_config.obs_title_template.format(band=band),
1397 if properties_config.obs_description_template
is not None:
1401 properties_config.obs_description_template.format(band=band),
1403 if len(properties_config.prov_progenitor) > 0:
1404 for prov_progenitor
in properties_config.prov_progenitor:
1405 _write_property(fh,
"prov_progenitor", prov_progenitor)
1406 if properties_config.obs_ack
is not None:
1407 _write_property(fh,
"obs_ack", properties_config.obs_ack)
1408 _write_property(fh,
"obs_regime",
"Optical")
1409 _write_property(fh,
"data_pixel_bitpix", str(bitpix))
1410 _write_property(fh,
"dataproduct_type",
"image")
1411 _write_property(fh,
"moc_sky_fraction", str(area/41253.))
1412 _write_property(fh,
"data_ucd",
"phot.flux")
1413 _write_property(fh,
"hips_creation_date", date_iso8601)
1414 _write_property(fh,
"hips_builder",
"lsst.pipe.tasks.hips.GenerateHipsTask")
1415 _write_property(fh,
"hips_creator",
"Vera C. Rubin Observatory")
1416 _write_property(fh,
"hips_version",
"1.4")
1417 _write_property(fh,
"hips_release_date", date_iso8601)
1418 _write_property(fh,
"hips_frame",
"equatorial")
1419 _write_property(fh,
"hips_order", str(max_order))
1420 _write_property(fh,
"hips_tile_width", str(exposure.getBBox().getWidth()))
1421 _write_property(fh,
"hips_status",
"private master clonableOnce")
1423 _write_property(fh,
"hips_tile_format",
"png")
1424 _write_property(fh,
"dataproduct_subtype",
"color")
1426 _write_property(fh,
"hips_tile_format",
"png fits")
1427 _write_property(fh,
"hips_pixel_bitpix", str(bitpix))
1428 _write_property(fh,
"hips_pixel_scale", str(pixel_scale))
1429 _write_property(fh,
"hips_initial_ra", str(initial_ra))
1430 _write_property(fh,
"hips_initial_dec", str(initial_dec))
1431 _write_property(fh,
"hips_initial_fov", str(initial_fov))
1433 if self.config.blue_channel_band
in properties_config.spectral_ranges:
1434 em_min = properties_config.spectral_ranges[
1435 self.config.blue_channel_band
1438 self.log.warning(
"blue band %s not in self.config.spectral_ranges.", band)
1440 if self.config.red_channel_band
in properties_config.spectral_ranges:
1441 em_max = properties_config.spectral_ranges[
1442 self.config.red_channel_band
1445 self.log.warning(
"red band %s not in self.config.spectral_ranges.", band)
1448 if band
in properties_config.spectral_ranges:
1449 em_min = properties_config.spectral_ranges[band].lambda_min/1e9
1450 em_max = properties_config.spectral_ranges[band].lambda_max/1e9
1452 self.log.warning(
"band %s not in self.config.spectral_ranges.", band)
1455 _write_property(fh,
"em_min", str(em_min))
1456 _write_property(fh,
"em_max", str(em_max))
1457 if properties_config.t_min
is not None:
1458 _write_property(fh,
"t_min", properties_config.t_min)
1459 if properties_config.t_max
is not None:
1460 _write_property(fh,
"t_max", properties_config.t_max)
1462 uri.transfer_from(temporary_uri, transfer=
"copy", overwrite=
True)
1464 def _write_hips_moc_file(self, hips_base_path, max_order, pixels, min_uniq_order=1):
1465 """Write HiPS MOC file.
1469 hips_base_path : `lsst.resources.ResourcePath`
1470 ResourcePath to top of HiPS tree. File will be written as
1471 to this path as ``Moc.fits``.
1473 Maximum HEALPix order.
1474 pixels : `np.ndarray`
1475 Array of pixels covered.
1476 min_uniq_order : `int`, optional
1477 Minimum HEALPix order for looking for fully covered pixels.
1485 uniq = 4*(4**max_order) + pixels
1488 hspmap = hsp.HealSparseMap.make_empty(2**min_uniq_order, 2**max_order, dtype=np.float32)
1489 hspmap[pixels] = 1.0
1492 for uniq_order
in range(max_order - 1, min_uniq_order - 1, -1):
1493 hspmap = hspmap.degrade(2**uniq_order, reduction=
"sum")
1494 pix_shift = np.right_shift(pixels, 2*(max_order - uniq_order))
1496 covered, = np.isclose(hspmap[pix_shift], 4**(max_order - uniq_order)).nonzero()
1497 if covered.size == 0:
1501 uniq[covered] = 4*(4**uniq_order) + pix_shift[covered]
1504 uniq = np.unique(uniq)
1507 tbl = np.zeros(uniq.size, dtype=[(
"UNIQ",
"i8")])
1510 order = np.log2(tbl[
"UNIQ"]//4).astype(np.int32)//2
1511 moc_order = np.max(order)
1513 hdu = fits.BinTableHDU(tbl)
1514 hdu.header[
"PIXTYPE"] =
"HEALPIX"
1515 hdu.header[
"ORDERING"] =
"NUNIQ"
1516 hdu.header[
"COORDSYS"] =
"C"
1517 hdu.header[
"MOCORDER"] = moc_order
1518 hdu.header[
"MOCTOOL"] =
"lsst.pipe.tasks.hips.GenerateHipsTask"
1520 uri = hips_base_path.join(
"Moc.fits")
1522 with ResourcePath.temporary_uri(suffix=uri.getExtension())
as temporary_uri:
1523 hdu.writeto(temporary_uri.ospath)
1525 uri.transfer_from(temporary_uri, transfer=
"copy", overwrite=
True)
1527 def _write_allsky_file(self, hips_base_path, allsky_order):
1528 """Write an Allsky.png file.
1532 hips_base_path : `lsst.resources.ResourcePath`
1533 Resource path to the base of the HiPS directory tree.
1534 allsky_order : `int`
1535 HEALPix order of the minimum order to make allsky file.
1537 tile_size = self.config.allsky_tilesize
1549 n_tiles = hpg.nside_to_npixel(hpg.order_to_nside(allsky_order))
1550 n_tiles_wide = int(np.floor(np.sqrt(n_tiles)))
1551 n_tiles_high = int(np.ceil(n_tiles / n_tiles_wide))
1555 allsky_order_uri = hips_base_path.join(f
"Norder{allsky_order}", forceDirectory=
True)
1556 pixel_regex = re.compile(
r"Npix([0-9]+)\.png$")
1558 ResourcePath.findFileResources(
1559 candidates=[allsky_order_uri],
1560 file_filter=pixel_regex,
1564 for png_uri
in png_uris:
1565 matches = re.match(pixel_regex, png_uri.basename())
1566 pix_num = int(matches.group(1))
1567 tile_image = Image.open(io.BytesIO(png_uri.read()))
1568 row = math.floor(pix_num//n_tiles_wide)
1569 column = pix_num % n_tiles_wide
1570 box = (column*tile_size, row*tile_size, (column + 1)*tile_size, (row + 1)*tile_size)
1571 tile_image_shrunk = tile_image.resize((tile_size, tile_size))
1573 if allsky_image
is None:
1574 allsky_image = Image.new(
1576 (n_tiles_wide*tile_size, n_tiles_high*tile_size),
1578 allsky_image.paste(tile_image_shrunk, box)
1580 uri = allsky_order_uri.join(
"Allsky.png")
1582 with ResourcePath.temporary_uri(suffix=uri.getExtension())
as temporary_uri:
1583 allsky_image.save(temporary_uri.ospath)
1585 uri.transfer_from(temporary_uri, transfer=
"copy", overwrite=
True)
1587 def _get_dir_number(self, pixel):
1588 """Compute the directory number from a pixel.
1593 HEALPix pixel number.
1598 HiPS directory number.
1600 return (pixel//10000)*10000
1603class GenerateColorHipsConnections(pipeBase.PipelineTaskConnections,
1604 dimensions=(
"instrument", ),
1605 defaultTemplates={
"coaddName":
"deep"}):
1606 hips_exposure_handles = pipeBase.connectionTypes.Input(
1607 doc=
"HiPS-compatible HPX images.",
1608 name=
"{coaddName}Coadd_hpx",
1609 storageClass=
"ExposureF",
1610 dimensions=(
"healpix11",
"band"),
1616class GenerateColorHipsConfig(GenerateHipsConfig,
1617 pipelineConnections=GenerateColorHipsConnections):
1618 """Configuration parameters for GenerateColorHipsTask."""
1619 blue_channel_band = pexConfig.Field(
1620 doc=
"Band to use for blue channel of color pngs.",
1624 green_channel_band = pexConfig.Field(
1625 doc=
"Band to use for green channel of color pngs.",
1629 red_channel_band = pexConfig.Field(
1630 doc=
"Band to use for red channel of color pngs.",
1634 png_color_asinh_minimum = pexConfig.Field(
1635 doc=
"AsinhMapping intensity to be mapped to black for color png scaling.",
1639 png_color_asinh_stretch = pexConfig.Field(
1640 doc=
"AsinhMapping linear stretch for color png scaling.",
1644 png_color_asinh_softening = pexConfig.Field(
1645 doc=
"AsinhMapping softening parameter (Q) for color png scaling.",
1651class GenerateColorHipsTask(GenerateHipsTask):
1652 """Task for making a HiPS tree with color pngs."""
1653 ConfigClass = GenerateColorHipsConfig
1654 _DefaultName =
"generateColorHips"
1657 def _check_data_bands(self, data_bands):
1658 """Check the data for configured bands.
1660 Warn if any color bands are missing data.
1664 data_bands : `set` [`str`]
1665 Bands from the input data.
1669 bands : `list` [`str`]
1670 List of bands in bgr color order.
1672 if len(data_bands) == 0:
1673 raise RuntimeError(
"GenerateColorHipsTask must have data from at least one band.")
1675 if self.config.blue_channel_band
not in data_bands:
1677 "Color png blue_channel_band %s not in dataset.",
1678 self.config.blue_channel_band
1680 if self.config.green_channel_band
not in data_bands:
1682 "Color png green_channel_band %s not in dataset.",
1683 self.config.green_channel_band
1685 if self.config.red_channel_band
not in data_bands:
1687 "Color png red_channel_band %s not in dataset.",
1688 self.config.red_channel_band
1692 self.config.blue_channel_band,
1693 self.config.green_channel_band,
1694 self.config.red_channel_band,
Pass parameters to a Statistics object.
An integer coordinate rectangle.
A RangeSet is a set of unsigned 64 bit integers.
std::shared_ptr< lsst::afw::image::Image< PixelT > > statisticsStack(std::vector< std::shared_ptr< lsst::afw::image::Image< PixelT > > > &images, Property flags, StatisticsControl const &sctrl=StatisticsControl(), std::vector< lsst::afw::image::VariancePixel > const &wvector=std::vector< lsst::afw::image::VariancePixel >(0))
A function to compute some statistics of a stack of Images.
Property stringToStatisticsProperty(std::string const property)
Conversion function to switch a string to a Property (see Statistics.h)