LSST Applications g070148d5b3+33e5256705,g0d53e28543+25c8b88941,g0da5cf3356+2dd1178308,g1081da9e2a+62d12e78cb,g17e5ecfddb+7e422d6136,g1c76d35bf8+ede3a706f7,g295839609d+225697d880,g2e2c1a68ba+cc1f6f037e,g2ffcdf413f+853cd4dcde,g38293774b4+62d12e78cb,g3b44f30a73+d953f1ac34,g48ccf36440+885b902d19,g4b2f1765b6+7dedbde6d2,g5320a0a9f6+0c5d6105b6,g56b687f8c9+ede3a706f7,g5c4744a4d9+ef6ac23297,g5ffd174ac0+0c5d6105b6,g6075d09f38+66af417445,g667d525e37+2ced63db88,g670421136f+2ced63db88,g71f27ac40c+2ced63db88,g774830318a+463cbe8d1f,g7876bc68e5+1d137996f1,g7985c39107+62d12e78cb,g7fdac2220c+0fd8241c05,g96f01af41f+368e6903a7,g9ca82378b8+2ced63db88,g9d27549199+ef6ac23297,gabe93b2c52+e3573e3735,gb065e2a02a+3dfbe639da,gbc3249ced9+0c5d6105b6,gbec6a3398f+0c5d6105b6,gc9534b9d65+35b9f25267,gd01420fc67+0c5d6105b6,geee7ff78d7+a14128c129,gf63283c776+ede3a706f7,gfed783d017+0c5d6105b6,w.2022.47
LSST Data Management Base Package
Loading...
Searching...
No Matches
hips.py
Go to the documentation of this file.
1# This file is part of pipe_tasks.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (https://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <https://www.gnu.org/licenses/>.
21
22"""Tasks for making and manipulating HIPS images."""
23
24__all__ = ["HighResolutionHipsTask", "HighResolutionHipsConfig", "HighResolutionHipsConnections",
25 "GenerateHipsTask", "GenerateHipsConfig", "GenerateColorHipsTask", "GenerateColorHipsConfig"]
26
27from collections import defaultdict
28import numpy as np
29import argparse
30import io
31import sys
32import re
33import warnings
34import math
35from datetime import datetime
36import hpgeom as hpg
37import healsparse as hsp
38from astropy.io import fits
39from astropy.visualization.lupton_rgb import AsinhMapping
40from PIL import Image
41
42from lsst.sphgeom import RangeSet, HealpixPixelization
43from lsst.utils.timer import timeMethod
44from lsst.daf.butler import Butler, DatasetRef, Quantum, SkyPixDimension
45import lsst.pex.config as pexConfig
46import lsst.pipe.base as pipeBase
47import lsst.afw.geom as afwGeom
48import lsst.afw.math as afwMath
49import lsst.afw.image as afwImage
50import lsst.geom as geom
51from lsst.afw.geom import makeHpxWcs
52from lsst.resources import ResourcePath
53
54
55class HighResolutionHipsConnections(pipeBase.PipelineTaskConnections,
56 dimensions=("healpix9", "band"),
57 defaultTemplates={"coaddName": "deep"}):
58 coadd_exposure_handles = pipeBase.connectionTypes.Input(
59 doc="Coadded exposures to convert to HIPS format.",
60 name="{coaddName}Coadd_calexp",
61 storageClass="ExposureF",
62 dimensions=("tract", "patch", "skymap", "band"),
63 multiple=True,
64 deferLoad=True,
65 )
66 hips_exposures = pipeBase.connectionTypes.Output(
67 doc="HiPS-compatible HPX image.",
68 name="{coaddName}Coadd_hpx",
69 storageClass="ExposureF",
70 dimensions=("healpix11", "band"),
71 multiple=True,
72 )
73
74 def __init__(self, *, config=None):
75 super().__init__(config=config)
76
77 quantum_order = None
78 for dim in self.dimensions:
79 if "healpix" in dim:
80 if quantum_order is not None:
81 raise ValueError("Must not specify more than one quantum healpix dimension.")
82 quantum_order = int(dim.split("healpix")[1])
83 if quantum_order is None:
84 raise ValueError("Must specify a healpix dimension in quantum dimensions.")
85
86 if quantum_order > config.hips_order:
87 raise ValueError("Quantum healpix dimension order must not be greater than hips_order")
88
89 order = None
90 for dim in self.hips_exposures.dimensions:
91 if "healpix" in dim:
92 if order is not None:
93 raise ValueError("Must not specify more than one healpix dimension.")
94 order = int(dim.split("healpix")[1])
95 if order is None:
96 raise ValueError("Must specify a healpix dimension in hips_exposure dimensions.")
97
98 if order != config.hips_order:
99 raise ValueError("healpix dimension order must match config.hips_order.")
100
101
102class HighResolutionHipsConfig(pipeBase.PipelineTaskConfig,
103 pipelineConnections=HighResolutionHipsConnections):
104 """Configuration parameters for HighResolutionHipsTask.
105
106 Notes
107 -----
108 A HiPS image covers one HEALPix cell, with the HEALPix nside equal to
109 2**hips_order. Each cell is 'shift_order' orders deeper than the HEALPix
110 cell, with 2**shift_order x 2**shift_order sub-pixels on a side, which
111 defines the target resolution of the HiPS image. The IVOA recommends
112 shift_order=9, for 2**9=512 pixels on a side.
113
114 Table 5 from
115 https://www.ivoa.net/documents/HiPS/20170519/REC-HIPS-1.0-20170519.pdf
116 shows the relationship between hips_order, number of tiles (full
117 sky coverage), cell size, and sub-pixel size/image resolution (with
118 the default shift_order=9):
119 +------------+-----------------+--------------+------------------+
120 | hips_order | Number of Tiles | Cell Size | Image Resolution |
121 +============+=================+==============+==================+
122 | 0 | 12 | 58.63 deg | 6.871 arcmin |
123 | 1 | 48 | 29.32 deg | 3.435 arcmin |
124 | 2 | 192 | 14.66 deg | 1.718 arcmin |
125 | 3 | 768 | 7.329 deg | 51.53 arcsec |
126 | 4 | 3072 | 3.665 deg | 25.77 arcsec |
127 | 5 | 12288 | 1.832 deg | 12.88 arcsec |
128 | 6 | 49152 | 54.97 arcmin | 6.442 arcsec |
129 | 7 | 196608 | 27.48 arcmin | 3.221 arcsec |
130 | 8 | 786432 | 13.74 arcmin | 1.61 arcsec |
131 | 9 | 3145728 | 6.871 arcmin | 805.2mas |
132 | 10 | 12582912 | 3.435 arcmin | 402.6mas |
133 | 11 | 50331648 | 1.718 arcmin | 201.3mas |
134 | 12 | 201326592 | 51.53 arcsec | 100.6mas |
135 | 13 | 805306368 | 25.77 arcsec | 50.32mas |
136 +------------+-----------------+--------------+------------------+
137 """
138 hips_order = pexConfig.Field(
139 doc="HIPS image order.",
140 dtype=int,
141 default=11,
142 )
143 shift_order = pexConfig.Field(
144 doc="HIPS shift order (such that each tile is 2**shift_order pixels on a side)",
145 dtype=int,
146 default=9,
147 )
148 warp = pexConfig.ConfigField(
149 dtype=afwMath.Warper.ConfigClass,
150 doc="Warper configuration",
151 )
152
153 def setDefaults(self):
154 self.warp.warpingKernelName = "lanczos5"
155
156
157class HipsTaskNameDescriptor:
158 """Descriptor used create a DefaultName that matches the order of
159 the defined dimensions in the connections class.
160
161 Parameters
162 ----------
163 prefix : `str`
164 The prefix of the Default name, to which the order will be
165 appended.
166 """
167 def __init__(self, prefix):
168 # create a defaultName template
169 self._defaultName = f"{prefix}{{}}"
170 self._order = None
171
172 def __get__(self, obj, klass=None):
173 if klass is None:
174 raise RuntimeError(
175 "HipsTaskDescriptor was used in an unexpected context"
176 )
177 if self._order is None:
178 klassDimensions = klass.ConfigClass.ConnectionsClass.dimensions
179 for dim in klassDimensions:
180 if (match := re.match(r"^healpix(\d*)$", dim)) is not None:
181 self._order = int(match.group(1))
182 break
183 else:
184 raise RuntimeError(
185 "Could not find healpix dimension in connections class"
186 )
187 return self._defaultName.format(self._order)
188
189
190class HighResolutionHipsTask(pipeBase.PipelineTask):
191 """Task for making high resolution HiPS images."""
192 ConfigClass = HighResolutionHipsConfig
193 _DefaultName = HipsTaskNameDescriptor("highResolutionHips")
194
195 def __init__(self, **kwargs):
196 super().__init__(**kwargs)
197 self.warper = afwMath.Warper.fromConfig(self.config.warp)
198
199 @timeMethod
200 def runQuantum(self, butlerQC, inputRefs, outputRefs):
201 inputs = butlerQC.get(inputRefs)
202
203 healpix_dim = f"healpix{self.config.hips_order}"
204
205 pixels = [hips_exposure.dataId[healpix_dim]
206 for hips_exposure in outputRefs.hips_exposures]
207
208 outputs = self.run(pixels=pixels, coadd_exposure_handles=inputs["coadd_exposure_handles"])
209
210 hips_exposure_ref_dict = {hips_exposure_ref.dataId[healpix_dim]:
211 hips_exposure_ref for hips_exposure_ref in outputRefs.hips_exposures}
212 for pixel, hips_exposure in outputs.hips_exposures.items():
213 butlerQC.put(hips_exposure, hips_exposure_ref_dict[pixel])
214
215 def run(self, pixels, coadd_exposure_handles):
216 """Run the HighResolutionHipsTask.
217
218 Parameters
219 ----------
220 pixels : `Iterable` [ `int` ]
221 Iterable of healpix pixels (nest ordering) to warp to.
222 coadd_exposure_handles : `list` [`lsst.daf.butler.DeferredDatasetHandle`]
223 Handles for the coadd exposures.
224
225 Returns
226 -------
227 outputs : `lsst.pipe.base.Struct`
228 ``hips_exposures`` is a dict with pixel (key) and hips_exposure (value)
229 """
230 self.log.info("Generating HPX images for %d pixels at order %d", len(pixels), self.config.hips_order)
231
232 npix = 2**self.config.shift_order
233 bbox_hpx = geom.Box2I(corner=geom.Point2I(0, 0),
234 dimensions=geom.Extent2I(npix, npix))
235
236 # For each healpix pixel we will create an empty exposure with the
237 # correct HPX WCS. We furthermore create a dict to hold each of
238 # the warps that will go into each HPX exposure.
239 exp_hpx_dict = {}
240 warp_dict = {}
241 for pixel in pixels:
242 wcs_hpx = afwGeom.makeHpxWcs(self.config.hips_order, pixel, shift_order=self.config.shift_order)
243 exp_hpx = afwImage.ExposureF(bbox_hpx, wcs_hpx)
244 exp_hpx_dict[pixel] = exp_hpx
245 warp_dict[pixel] = []
246
247 first_handle = True
248 # Loop over input coadd exposures to minimize i/o (this speeds things
249 # up by ~8x to batch together pixels that overlap a given coadd).
250 for handle in coadd_exposure_handles:
251 coadd_exp = handle.get()
252
253 # For each pixel, warp the coadd to the HPX WCS for the pixel.
254 for pixel in pixels:
255 warped = self.warper.warpExposure(exp_hpx_dict[pixel].getWcs(), coadd_exp, maxBBox=bbox_hpx)
256
257 exp = afwImage.ExposureF(exp_hpx_dict[pixel].getBBox(), exp_hpx_dict[pixel].getWcs())
258 exp.maskedImage.set(np.nan, afwImage.Mask.getPlaneBitMask("NO_DATA"), np.nan)
259
260 if first_handle:
261 # Make sure the mask planes, filter, and photocalib of the output
262 # exposure match the (first) input exposure.
263 exp_hpx_dict[pixel].mask.conformMaskPlanes(coadd_exp.mask.getMaskPlaneDict())
264 exp_hpx_dict[pixel].setFilter(coadd_exp.getFilter())
265 exp_hpx_dict[pixel].setPhotoCalib(coadd_exp.getPhotoCalib())
266
267 if warped.getBBox().getArea() == 0 or not np.any(np.isfinite(warped.image.array)):
268 # There is no overlap, skip.
269 self.log.debug(
270 "No overlap between output HPX %d and input exposure %s",
271 pixel,
272 handle.dataId
273 )
274 continue
275
276 exp.maskedImage.assign(warped.maskedImage, warped.getBBox())
277 warp_dict[pixel].append(exp.maskedImage)
278
279 first_handle = False
280
281 stats_flags = afwMath.stringToStatisticsProperty("MEAN")
282 stats_ctrl = afwMath.StatisticsControl()
283 stats_ctrl.setNanSafe(True)
284 stats_ctrl.setWeighted(True)
285 stats_ctrl.setCalcErrorFromInputVariance(True)
286
287 # Loop over pixels and combine the warps for each pixel.
288 # The combination is done with a simple mean for pixels that
289 # overlap in neighboring patches.
290 for pixel in pixels:
291 exp_hpx_dict[pixel].maskedImage.set(np.nan, afwImage.Mask.getPlaneBitMask("NO_DATA"), np.nan)
292
293 if not warp_dict[pixel]:
294 # Nothing in this pixel
295 self.log.debug("No data in HPX pixel %d", pixel)
296 # Remove the pixel from the output, no need to persist an
297 # empty exposure.
298 exp_hpx_dict.pop(pixel)
299 continue
300
301 exp_hpx_dict[pixel].maskedImage = afwMath.statisticsStack(
302 warp_dict[pixel],
303 stats_flags,
304 stats_ctrl,
305 [1.0]*len(warp_dict[pixel]),
306 clipped=0,
307 maskMap=[]
308 )
309
310 return pipeBase.Struct(hips_exposures=exp_hpx_dict)
311
312 @classmethod
313 def build_quantum_graph_cli(cls, argv):
314 """A command-line interface entry point to `build_quantum_graph`.
315 This method provides the implementation for the
316 ``build-high-resolution-hips-qg`` script.
317
318 Parameters
319 ----------
320 argv : `Sequence` [ `str` ]
321 Command-line arguments (e.g. ``sys.argv[1:]``).
322 """
323 parser = cls._make_cli_parser()
324
325 args = parser.parse_args(argv)
326
327 if args.subparser_name is None:
328 parser.print_help()
329 sys.exit(1)
330
331 pipeline = pipeBase.Pipeline.from_uri(args.pipeline)
332 expanded_pipeline = list(pipeline.toExpandedPipeline())
333
334 if len(expanded_pipeline) != 1:
335 raise RuntimeError(f"Pipeline file {args.pipeline} may only contain one task.")
336
337 (task_def,) = expanded_pipeline
338
339 butler = Butler(args.butler_config, collections=args.input)
340
341 if args.subparser_name == "segment":
342 # Do the segmentation
343 hpix_pixelization = HealpixPixelization(level=args.hpix_build_order)
344 dataset = task_def.connections.coadd_exposure_handles.name
345 data_ids = set(butler.registry.queryDataIds("tract", datasets=dataset).expanded())
346 region_pixels = []
347 for data_id in data_ids:
348 region = data_id.region
349 pixel_range = hpix_pixelization.envelope(region)
350 for r in pixel_range.ranges():
351 region_pixels.extend(range(r[0], r[1]))
352 indices = np.unique(region_pixels)
353
354 print(f"Pixels to run at HEALPix order --hpix_build_order {args.hpix_build_order}:")
355 for pixel in indices:
356 print(pixel)
357
358 elif args.subparser_name == "build":
359 # Build the quantum graph.
360
361 build_ranges = RangeSet(sorted(args.pixels))
362
363 qg = cls.build_quantum_graph(
364 task_def,
365 butler.registry,
366 args.hpix_build_order,
367 build_ranges,
368 where=args.where,
369 collections=args.input
370 )
371 qg.saveUri(args.save_qgraph)
372
373 @classmethod
374 def _make_cli_parser(cls):
375 """Make the command-line parser.
376
377 Returns
378 -------
379 parser : `argparse.ArgumentParser`
380 """
381 parser = argparse.ArgumentParser(
382 description=(
383 "Build a QuantumGraph that runs HighResolutionHipsTask on existing coadd datasets."
384 ),
385 )
386 subparsers = parser.add_subparsers(help="sub-command help", dest="subparser_name")
387
388 parser_segment = subparsers.add_parser("segment",
389 help="Determine survey segments for workflow.")
390 parser_build = subparsers.add_parser("build",
391 help="Build quantum graph for HighResolutionHipsTask")
392
393 for sub in [parser_segment, parser_build]:
394 # These arguments are in common.
395 sub.add_argument(
396 "-b",
397 "--butler-config",
398 type=str,
399 help="Path to data repository or butler configuration.",
400 required=True,
401 )
402 sub.add_argument(
403 "-p",
404 "--pipeline",
405 type=str,
406 help="Pipeline file, limited to one task.",
407 required=True,
408 )
409 sub.add_argument(
410 "-i",
411 "--input",
412 type=str,
413 nargs="+",
414 help="Input collection(s) to search for coadd exposures.",
415 required=True,
416 )
417 sub.add_argument(
418 "-o",
419 "--hpix_build_order",
420 type=int,
421 default=1,
422 help="HEALPix order to segment sky for building quantum graph files.",
423 )
424 sub.add_argument(
425 "-w",
426 "--where",
427 type=str,
428 default=None,
429 help="Data ID expression used when querying for input coadd datasets.",
430 )
431
432 parser_build.add_argument(
433 "-q",
434 "--save-qgraph",
435 type=str,
436 help="Output filename for QuantumGraph.",
437 required=True,
438 )
439 parser_build.add_argument(
440 "-P",
441 "--pixels",
442 type=int,
443 nargs="+",
444 help="Pixels at --hpix_build_order to generate quantum graph.",
445 required=True,
446 )
447
448 return parser
449
450 @classmethod
451 def build_quantum_graph(
452 cls,
453 task_def,
454 registry,
455 constraint_order,
456 constraint_ranges,
457 where=None,
458 collections=None,
459 ):
460 """Generate a `QuantumGraph` for running just this task.
461
462 This is a temporary workaround for incomplete butler query support for
463 HEALPix dimensions.
464
465 Parameters
466 ----------
467 task_def : `lsst.pipe.base.TaskDef`
468 Task definition.
469 registry : `lsst.daf.butler.Registry`
470 Client for the butler database. May be read-only.
471 constraint_order : `int`
472 HEALPix order used to contrain which quanta are generated, via
473 ``constraint_indices``. This should be a coarser grid (smaller
474 order) than the order used for the task's quantum and output data
475 IDs, and ideally something between the spatial scale of a patch or
476 the data repository's "common skypix" system (usually ``htm7``).
477 constraint_ranges : `lsst.sphgeom.RangeSet`
478 RangeSet which describes constraint pixels (HEALPix NEST, with order
479 constraint_order) to constrain generated quanta.
480 where : `str`, optional
481 A boolean `str` expression of the form accepted by
482 `Registry.queryDatasets` to constrain input datasets. This may
483 contain a constraint on tracts, patches, or bands, but not HEALPix
484 indices. Constraints on tracts and patches should usually be
485 unnecessary, however - existing coadds that overlap the given
486 HEALpix indices will be selected without such a constraint, and
487 providing one may reject some that should normally be included.
488 collections : `str` or `Iterable` [ `str` ], optional
489 Collection or collections to search for input datasets, in order.
490 If not provided, ``registry.defaults.collections`` will be
491 searched.
492 """
493 config = task_def.config
494
495 dataset_types = pipeBase.PipelineDatasetTypes.fromPipeline(pipeline=[task_def], registry=registry)
496 # Since we know this is the only task in the pipeline, we know there
497 # is only one overall input and one overall output.
498 (input_dataset_type,) = dataset_types.inputs
499
500 # Extract the main output dataset type (which needs multiple
501 # DatasetRefs, and tells us the output HPX level), and make a set of
502 # what remains for more mechanical handling later.
503 output_dataset_type = dataset_types.outputs[task_def.connections.hips_exposures.name]
504 incidental_output_dataset_types = dataset_types.outputs.copy()
505 incidental_output_dataset_types.remove(output_dataset_type)
506 (hpx_output_dimension,) = (d for d in output_dataset_type.dimensions
507 if isinstance(d, SkyPixDimension))
508
509 constraint_hpx_pixelization = registry.dimensions[f"healpix{constraint_order}"].pixelization
510 common_skypix_name = registry.dimensions.commonSkyPix.name
511 common_skypix_pixelization = registry.dimensions.commonSkyPix.pixelization
512
513 # We will need all the pixels at the quantum resolution as well
514 task_dimensions = registry.dimensions.extract(task_def.connections.dimensions)
515 (hpx_dimension,) = (d for d in task_dimensions if d.name != "band")
516 hpx_pixelization = hpx_dimension.pixelization
517
518 if hpx_pixelization.level < constraint_order:
519 raise ValueError(f"Quantum order {hpx_pixelization.level} must be < {constraint_order}")
520 hpx_ranges = constraint_ranges.scaled(4**(hpx_pixelization.level - constraint_order))
521
522 # We can be generous in looking for pixels here, because we constraint by actual
523 # patch regions below.
524 common_skypix_ranges = RangeSet()
525 for begin, end in constraint_ranges:
526 for hpx_index in range(begin, end):
527 constraint_hpx_region = constraint_hpx_pixelization.pixel(hpx_index)
528 common_skypix_ranges |= common_skypix_pixelization.envelope(constraint_hpx_region)
529
530 # To keep the query from getting out of hand (and breaking) we simplify until we have fewer
531 # than 100 ranges which seems to work fine.
532 for simp in range(1, 10):
533 if len(common_skypix_ranges) < 100:
534 break
535 common_skypix_ranges.simplify(simp)
536
537 # Use that RangeSet to assemble a WHERE constraint expression. This
538 # could definitely get too big if the "constraint healpix" order is too
539 # fine.
540 where_terms = []
541 bind = {}
542 for n, (begin, end) in enumerate(common_skypix_ranges):
543 stop = end - 1 # registry range syntax is inclusive
544 if begin == stop:
545 where_terms.append(f"{common_skypix_name} = cpx{n}")
546 bind[f"cpx{n}"] = begin
547 else:
548 where_terms.append(f"({common_skypix_name} >= cpx{n}a AND {common_skypix_name} <= cpx{n}b)")
549 bind[f"cpx{n}a"] = begin
550 bind[f"cpx{n}b"] = stop
551 if where is None:
552 where = " OR ".join(where_terms)
553 else:
554 where = f"({where}) AND ({' OR '.join(where_terms)})"
555 # Query for input datasets with this constraint, and ask for expanded
556 # data IDs because we want regions. Immediately group this by patch so
557 # we don't do later geometric stuff n_bands more times than we need to.
558 input_refs = registry.queryDatasets(
559 input_dataset_type,
560 where=where,
561 findFirst=True,
562 collections=collections,
563 bind=bind
564 ).expanded()
565 inputs_by_patch = defaultdict(set)
566 patch_dimensions = registry.dimensions.extract(["patch"])
567 for input_ref in input_refs:
568 inputs_by_patch[input_ref.dataId.subset(patch_dimensions)].add(input_ref)
569 if not inputs_by_patch:
570 message_body = "\n".join(input_refs.explain_no_results())
571 raise RuntimeError(f"No inputs found:\n{message_body}")
572
573 # Iterate over patches and compute the set of output healpix pixels
574 # that overlap each one. Use that to associate inputs with output
575 # pixels, but only for the output pixels we've already identified.
576 inputs_by_hpx = defaultdict(set)
577 for patch_data_id, input_refs_for_patch in inputs_by_patch.items():
578 patch_hpx_ranges = hpx_pixelization.envelope(patch_data_id.region)
579 for begin, end in patch_hpx_ranges & hpx_ranges:
580 for hpx_index in range(begin, end):
581 inputs_by_hpx[hpx_index].update(input_refs_for_patch)
582 # Iterate over the dict we just created and create the actual quanta.
583 quanta = []
584 for hpx_index, input_refs_for_hpx_index in inputs_by_hpx.items():
585 # Group inputs by band.
586 input_refs_by_band = defaultdict(list)
587 for input_ref in input_refs_for_hpx_index:
588 input_refs_by_band[input_ref.dataId["band"]].append(input_ref)
589 # Iterate over bands to make quanta.
590 for band, input_refs_for_band in input_refs_by_band.items():
591 data_id = registry.expandDataId({hpx_dimension: hpx_index, "band": band})
592
593 hpx_pixel_ranges = RangeSet(hpx_index)
594 hpx_output_ranges = hpx_pixel_ranges.scaled(4**(config.hips_order - hpx_pixelization.level))
595 output_data_ids = []
596 for begin, end in hpx_output_ranges:
597 for hpx_output_index in range(begin, end):
598 output_data_ids.append(
599 registry.expandDataId({hpx_output_dimension: hpx_output_index, "band": band})
600 )
601 outputs = {dt: [DatasetRef(dt, data_id)] for dt in incidental_output_dataset_types}
602 outputs[output_dataset_type] = [DatasetRef(output_dataset_type, data_id)
603 for data_id in output_data_ids]
604 quanta.append(
605 Quantum(
606 taskName=task_def.taskName,
607 taskClass=task_def.taskClass,
608 dataId=data_id,
609 initInputs={},
610 inputs={input_dataset_type: input_refs_for_band},
611 outputs=outputs,
612 )
613 )
614
615 if len(quanta) == 0:
616 raise RuntimeError("Given constraints yielded empty quantum graph.")
617
618 return pipeBase.QuantumGraph(quanta={task_def: quanta})
619
620
621class HipsPropertiesSpectralTerm(pexConfig.Config):
622 lambda_min = pexConfig.Field(
623 doc="Minimum wavelength (nm)",
624 dtype=float,
625 )
626 lambda_max = pexConfig.Field(
627 doc="Maximum wavelength (nm)",
628 dtype=float,
629 )
630
631
632class HipsPropertiesConfig(pexConfig.Config):
633 """Configuration parameters for writing a HiPS properties file."""
634 creator_did_template = pexConfig.Field(
635 doc=("Unique identifier of the HiPS - Format: IVOID. "
636 "Use ``{band}`` to substitute the band name."),
637 dtype=str,
638 optional=False,
639 )
640 obs_collection = pexConfig.Field(
641 doc="Short name of original data set - Format: one word",
642 dtype=str,
643 optional=True,
644 )
645 obs_description_template = pexConfig.Field(
646 doc=("Data set description - Format: free text, longer free text "
647 "description of the dataset. Use ``{band}`` to substitute "
648 "the band name."),
649 dtype=str,
650 )
651 prov_progenitor = pexConfig.ListField(
652 doc="Provenance of the original data - Format: free text",
653 dtype=str,
654 default=[],
655 )
656 obs_title_template = pexConfig.Field(
657 doc=("Data set title format: free text, but should be short. "
658 "Use ``{band}`` to substitute the band name."),
659 dtype=str,
660 optional=False,
661 )
662 spectral_ranges = pexConfig.ConfigDictField(
663 doc=("Mapping from band to lambda_min, lamba_max (nm). May be approximate."),
664 keytype=str,
665 itemtype=HipsPropertiesSpectralTerm,
666 default={},
667 )
668 initial_ra = pexConfig.Field(
669 doc="Initial RA (deg) (default for HiPS viewer). If not set will use a point in MOC.",
670 dtype=float,
671 optional=True,
672 )
673 initial_dec = pexConfig.Field(
674 doc="Initial Declination (deg) (default for HiPS viewer). If not set will use a point in MOC.",
675 dtype=float,
676 optional=True,
677 )
678 initial_fov = pexConfig.Field(
679 doc="Initial field-of-view (deg). If not set will use ~1 healpix tile.",
680 dtype=float,
681 optional=True,
682 )
683 obs_ack = pexConfig.Field(
684 doc="Observation acknowledgements (free text).",
685 dtype=str,
686 optional=True,
687 )
688 t_min = pexConfig.Field(
689 doc="Time (MJD) of earliest observation included in HiPS",
690 dtype=float,
691 optional=True,
692 )
693 t_max = pexConfig.Field(
694 doc="Time (MJD) of latest observation included in HiPS",
695 dtype=float,
696 optional=True,
697 )
698
699 def validate(self):
700 super().validate()
701
702 if self.obs_collection is not None:
703 if re.search(r"\s", self.obs_collection):
704 raise ValueError("obs_collection cannot contain any space characters.")
705
706 def setDefaults(self):
707 # Values here taken from
708 # https://github.com/lsst-dm/dax_obscore/blob/44ac15029136e2ec15/configs/dp02.yaml#L46
709 u_term = HipsPropertiesSpectralTerm()
710 u_term.lambda_min = 330.
711 u_term.lambda_max = 400.
712 self.spectral_ranges["u"] = u_term
713 g_term = HipsPropertiesSpectralTerm()
714 g_term.lambda_min = 402.
715 g_term.lambda_max = 552.
716 self.spectral_ranges["g"] = g_term
717 r_term = HipsPropertiesSpectralTerm()
718 r_term.lambda_min = 552.
719 r_term.lambda_max = 691.
720 self.spectral_ranges["r"] = r_term
721 i_term = HipsPropertiesSpectralTerm()
722 i_term.lambda_min = 691.
723 i_term.lambda_max = 818.
724 self.spectral_ranges["i"] = i_term
725 z_term = HipsPropertiesSpectralTerm()
726 z_term.lambda_min = 818.
727 z_term.lambda_max = 922.
728 self.spectral_ranges["z"] = z_term
729 y_term = HipsPropertiesSpectralTerm()
730 y_term.lambda_min = 970.
731 y_term.lambda_max = 1060.
732 self.spectral_ranges["y"] = y_term
733
734
735class GenerateHipsConnections(pipeBase.PipelineTaskConnections,
736 dimensions=("instrument", "band"),
737 defaultTemplates={"coaddName": "deep"}):
738 hips_exposure_handles = pipeBase.connectionTypes.Input(
739 doc="HiPS-compatible HPX images.",
740 name="{coaddName}Coadd_hpx",
741 storageClass="ExposureF",
742 dimensions=("healpix11", "band"),
743 multiple=True,
744 deferLoad=True,
745 )
746
747
748class GenerateHipsConfig(pipeBase.PipelineTaskConfig,
749 pipelineConnections=GenerateHipsConnections):
750 """Configuration parameters for GenerateHipsTask."""
751 # WARNING: In general PipelineTasks are not allowed to do any outputs
752 # outside of the butler. This task has been given (temporary)
753 # Special Dispensation because of the nature of HiPS outputs until
754 # a more controlled solution can be found.
755 hips_base_uri = pexConfig.Field(
756 doc="URI to HiPS base for output.",
757 dtype=str,
758 optional=False,
759 )
760 min_order = pexConfig.Field(
761 doc="Minimum healpix order for HiPS tree.",
762 dtype=int,
763 default=3,
764 )
765 properties = pexConfig.ConfigField(
766 dtype=HipsPropertiesConfig,
767 doc="Configuration for properties file.",
768 )
769 allsky_tilesize = pexConfig.Field(
770 dtype=int,
771 doc="Allsky.png tile size. Must be power of 2.",
772 default=512,
773 )
774 png_gray_asinh_minimum = pexConfig.Field(
775 doc="AsinhMapping intensity to be mapped to black for grayscale png scaling.",
776 dtype=float,
777 default=0.0,
778 )
779 png_gray_asinh_stretch = pexConfig.Field(
780 doc="AsinhMapping linear stretch for grayscale png scaling.",
781 dtype=float,
782 default=2.0,
783 )
784 png_gray_asinh_softening = pexConfig.Field(
785 doc="AsinhMapping softening parameter (Q) for grayscale png scaling.",
786 dtype=float,
787 default=8.0,
788 )
789
790
791class GenerateHipsTask(pipeBase.PipelineTask):
792 """Task for making a HiPS tree with FITS and grayscale PNGs."""
793 ConfigClass = GenerateHipsConfig
794 _DefaultName = "generateHips"
795 color_task = False
796
797 @timeMethod
798 def runQuantum(self, butlerQC, inputRefs, outputRefs):
799 inputs = butlerQC.get(inputRefs)
800
801 dims = inputRefs.hips_exposure_handles[0].dataId.names
802 order = None
803 for dim in dims:
804 if "healpix" in dim:
805 order = int(dim.split("healpix")[1])
806 healpix_dim = dim
807 break
808 else:
809 raise RuntimeError("Could not determine healpix order for input exposures.")
810
811 hips_exposure_handle_dict = {
812 (hips_exposure_handle.dataId[healpix_dim],
813 hips_exposure_handle.dataId["band"]): hips_exposure_handle
814 for hips_exposure_handle in inputs["hips_exposure_handles"]
815 }
816
817 data_bands = {hips_exposure_handle.dataId["band"]
818 for hips_exposure_handle in inputs["hips_exposure_handles"]}
819 bands = self._check_data_bands(data_bands)
820
821 self.run(
822 bands=bands,
823 max_order=order,
824 hips_exposure_handle_dict=hips_exposure_handle_dict,
825 do_color=self.color_task,
826 )
827
828 def _check_data_bands(self, data_bands):
829 """Check that the data has only a single band.
830
831 Parameters
832 ----------
833 data_bands : `set` [`str`]
834 Bands from the input data.
835
836 Returns
837 -------
838 bands : `list` [`str`]
839 List of single band to process.
840
841 Raises
842 ------
843 RuntimeError if there is not exactly one band.
844 """
845 if len(data_bands) != 1:
846 raise RuntimeError("GenerateHipsTask can only use data from a single band.")
847
848 return list(data_bands)
849
850 @timeMethod
851 def run(self, bands, max_order, hips_exposure_handle_dict, do_color=False):
852 """Run the GenerateHipsTask.
853
854 Parameters
855 ----------
856 bands : `list [ `str` ]
857 List of bands to be processed (or single band).
858 max_order : `int`
859 HEALPix order of the maximum (native) HPX exposures.
860 hips_exposure_handle_dict : `dict` {`int`: `lsst.daf.butler.DeferredDatasetHandle`}
861 Dict of handles for the HiPS high-resolution exposures.
862 Key is (pixel number, ``band``).
863 do_color : `bool`, optional
864 Do color pngs instead of per-band grayscale.
865 """
866 min_order = self.config.min_order
867
868 if not do_color:
869 png_grayscale_mapping = AsinhMapping(
870 self.config.png_gray_asinh_minimum,
871 self.config.png_gray_asinh_stretch,
872 Q=self.config.png_gray_asinh_softening,
873 )
874 else:
875 png_color_mapping = AsinhMapping(
876 self.config.png_color_asinh_minimum,
877 self.config.png_color_asinh_stretch,
878 Q=self.config.png_color_asinh_softening,
879 )
880
881 bcb = self.config.blue_channel_band
882 gcb = self.config.green_channel_band
883 rcb = self.config.red_channel_band
884 colorstr = f"{bcb}{gcb}{rcb}"
885
886 # The base path is based on the hips_base_uri.
887 hips_base_path = ResourcePath(self.config.hips_base_uri, forceDirectory=True)
888
889 # We need to unique-ify the pixels because they show up for multiple bands.
890 # The output of this is a sorted array.
891 pixels = np.unique(np.array([pixel for pixel, _ in hips_exposure_handle_dict.keys()]))
892
893 # Add a "gutter" pixel at the end. Start with 0 which maps to 0 always.
894 pixels = np.append(pixels, [0])
895
896 # Convert the pixels to each order that will be generated.
897 pixels_shifted = {}
898 pixels_shifted[max_order] = pixels
899 for order in range(max_order - 1, min_order - 1, -1):
900 pixels_shifted[order] = np.right_shift(pixels_shifted[order + 1], 2)
901
902 # And set the gutter to an illegal pixel value.
903 for order in range(min_order, max_order + 1):
904 pixels_shifted[order][-1] = -1
905
906 # Read in the first pixel for determining image properties.
907 exp0 = list(hips_exposure_handle_dict.values())[0].get()
908 bbox = exp0.getBBox()
909 npix = bbox.getWidth()
910 shift_order = int(np.round(np.log2(npix)))
911
912 # Create blank exposures for each level, including the highest order.
913 # We also make sure we create blank exposures for any bands used in the color
914 # PNGs, even if they aren't available.
915 exposures = {}
916 for band in bands:
917 for order in range(min_order, max_order + 1):
918 exp = exp0.Factory(bbox=bbox)
919 exp.image.array[:, :] = np.nan
920 exposures[(band, order)] = exp
921
922 # Loop over all pixels, avoiding the gutter.
923 for pixel_counter, pixel in enumerate(pixels[:-1]):
924 self.log.debug("Working on high resolution pixel %d", pixel)
925 for band in bands:
926 # Read all the exposures here for the highest order.
927 # There will always be at least one band with a HiPS image available
928 # at the highest order. However, for color images it is possible that
929 # not all bands have coverage so we require this check.
930 if (pixel, band) in hips_exposure_handle_dict:
931 exposures[(band, max_order)] = hips_exposure_handle_dict[(pixel, band)].get()
932
933 # Go up the HiPS tree.
934 # We only write pixels and rebin to fill the parent pixel when we are
935 # done with a current pixel, which is determined if the next pixel
936 # has a different pixel number.
937 for order in range(max_order, min_order - 1, -1):
938 if pixels_shifted[order][pixel_counter + 1] == pixels_shifted[order][pixel_counter]:
939 # This order is not done, and so none of the other orders will be.
940 break
941
942 # We can now write out the images for each band.
943 # Note this will always trigger at the max order where each pixel is unique.
944 if not do_color:
945 for band in bands:
946 self._write_hips_image(
947 hips_base_path.join(f"band_{band}", forceDirectory=True),
948 order,
949 pixels_shifted[order][pixel_counter],
950 exposures[(band, order)].image,
951 png_grayscale_mapping,
952 shift_order=shift_order,
953 )
954 else:
955 # Make a color png.
956 self._write_hips_color_png(
957 hips_base_path.join(f"color_{colorstr}", forceDirectory=True),
958 order,
959 pixels_shifted[order][pixel_counter],
960 exposures[(self.config.red_channel_band, order)].image,
961 exposures[(self.config.green_channel_band, order)].image,
962 exposures[(self.config.blue_channel_band, order)].image,
963 png_color_mapping,
964 )
965
966 log_level = self.log.INFO if order == (max_order - 3) else self.log.DEBUG
967 self.log.log(
968 log_level,
969 "Completed HiPS generation for %s, order %d, pixel %d (%d/%d)",
970 ",".join(bands),
971 order,
972 pixels_shifted[order][pixel_counter],
973 pixel_counter,
974 len(pixels) - 1,
975 )
976
977 # When we are at the top of the tree, erase top level images and continue.
978 if order == min_order:
979 for band in bands:
980 exposures[(band, order)].image.array[:, :] = np.nan
981 continue
982
983 # Now average the images for each band.
984 for band in bands:
985 arr = exposures[(band, order)].image.array.reshape(npix//2, 2, npix//2, 2)
986 with warnings.catch_warnings():
987 warnings.simplefilter("ignore")
988 binned_image_arr = np.nanmean(arr, axis=(1, 3))
989
990 # Fill the next level up. We figure out which of the four
991 # sub-pixels the current pixel occupies.
992 sub_index = (pixels_shifted[order][pixel_counter]
993 - np.left_shift(pixels_shifted[order - 1][pixel_counter], 2))
994
995 # Fill exposure at the next level up.
996 exp = exposures[(band, order - 1)]
997
998 # Fill the correct subregion.
999 if sub_index == 0:
1000 exp.image.array[npix//2:, 0: npix//2] = binned_image_arr
1001 elif sub_index == 1:
1002 exp.image.array[0: npix//2, 0: npix//2] = binned_image_arr
1003 elif sub_index == 2:
1004 exp.image.array[npix//2:, npix//2:] = binned_image_arr
1005 elif sub_index == 3:
1006 exp.image.array[0: npix//2, npix//2:] = binned_image_arr
1007 else:
1008 # This should be impossible.
1009 raise ValueError("Illegal pixel sub index")
1010
1011 # Erase the previous exposure.
1012 if order < max_order:
1013 exposures[(band, order)].image.array[:, :] = np.nan
1014
1015 # Write the properties files and MOCs.
1016 if not do_color:
1017 for band in bands:
1018 band_pixels = np.array([pixel
1019 for pixel, band_ in hips_exposure_handle_dict.keys()
1020 if band_ == band])
1021 band_pixels = np.sort(band_pixels)
1022
1023 self._write_properties_and_moc(
1024 hips_base_path.join(f"band_{band}", forceDirectory=True),
1025 max_order,
1026 band_pixels,
1027 exp0,
1028 shift_order,
1029 band,
1030 False,
1031 )
1032 self._write_allsky_file(
1033 hips_base_path.join(f"band_{band}", forceDirectory=True),
1034 min_order,
1035 )
1036 else:
1037 self._write_properties_and_moc(
1038 hips_base_path.join(f"color_{colorstr}", forceDirectory=True),
1039 max_order,
1040 pixels[:-1],
1041 exp0,
1042 shift_order,
1043 colorstr,
1044 True,
1045 )
1046 self._write_allsky_file(
1047 hips_base_path.join(f"color_{colorstr}", forceDirectory=True),
1048 min_order,
1049 )
1050
1051 def _write_hips_image(self, hips_base_path, order, pixel, image, png_mapping, shift_order=9):
1052 """Write a HiPS image.
1053
1054 Parameters
1055 ----------
1056 hips_base_path : `lsst.resources.ResourcePath`
1057 Resource path to the base of the HiPS directory tree.
1058 order : `int`
1059 HEALPix order of the HiPS image to write.
1060 pixel : `int`
1061 HEALPix pixel of the HiPS image.
1062 image : `lsst.afw.image.Image`
1063 Image to write.
1064 png_mapping : `astropy.visualization.lupton_rgb.AsinhMapping`
1065 Mapping to convert image to scaled png.
1066 shift_order : `int`, optional
1067 HPX shift_order.
1068 """
1069 # WARNING: In general PipelineTasks are not allowed to do any outputs
1070 # outside of the butler. This task has been given (temporary)
1071 # Special Dispensation because of the nature of HiPS outputs until
1072 # a more controlled solution can be found.
1073
1074 dir_number = self._get_dir_number(pixel)
1075 hips_dir = hips_base_path.join(
1076 f"Norder{order}",
1077 forceDirectory=True
1078 ).join(
1079 f"Dir{dir_number}",
1080 forceDirectory=True
1081 )
1082
1083 wcs = makeHpxWcs(order, pixel, shift_order=shift_order)
1084
1085 uri = hips_dir.join(f"Npix{pixel}.fits")
1086
1087 with ResourcePath.temporary_uri(suffix=uri.getExtension()) as temporary_uri:
1088 image.writeFits(temporary_uri.ospath, metadata=wcs.getFitsMetadata())
1089
1090 uri.transfer_from(temporary_uri, transfer="copy", overwrite=True)
1091
1092 # And make a grayscale png as well
1093
1094 vals = 255 - png_mapping.map_intensity_to_uint8(image.array).astype(np.uint8)
1095 vals[~np.isfinite(image.array) | (image.array < 0)] = 0
1096 im = Image.fromarray(vals[::-1, :], "L")
1097
1098 uri = hips_dir.join(f"Npix{pixel}.png")
1099
1100 with ResourcePath.temporary_uri(suffix=uri.getExtension()) as temporary_uri:
1101 im.save(temporary_uri.ospath)
1102
1103 uri.transfer_from(temporary_uri, transfer="copy", overwrite=True)
1104
1105 def _write_hips_color_png(
1106 self,
1107 hips_base_path,
1108 order,
1109 pixel,
1110 image_red,
1111 image_green,
1112 image_blue,
1113 png_mapping,
1114 ):
1115 """Write a color png HiPS image.
1116
1117 Parameters
1118 ----------
1119 hips_base_path : `lsst.resources.ResourcePath`
1120 Resource path to the base of the HiPS directory tree.
1121 order : `int`
1122 HEALPix order of the HiPS image to write.
1123 pixel : `int`
1124 HEALPix pixel of the HiPS image.
1125 image_red : `lsst.afw.image.Image`
1126 Input for red channel of output png.
1127 image_green : `lsst.afw.image.Image`
1128 Input for green channel of output png.
1129 image_blue : `lsst.afw.image.Image`
1130 Input for blue channel of output png.
1131 png_mapping : `astropy.visualization.lupton_rgb.AsinhMapping`
1132 Mapping to convert image to scaled png.
1133 """
1134 # WARNING: In general PipelineTasks are not allowed to do any outputs
1135 # outside of the butler. This task has been given (temporary)
1136 # Special Dispensation because of the nature of HiPS outputs until
1137 # a more controlled solution can be found.
1138
1139 dir_number = self._get_dir_number(pixel)
1140 hips_dir = hips_base_path.join(
1141 f"Norder{order}",
1142 forceDirectory=True
1143 ).join(
1144 f"Dir{dir_number}",
1145 forceDirectory=True
1146 )
1147
1148 # We need to convert nans to the minimum values in the mapping.
1149 arr_red = image_red.array.copy()
1150 arr_red[np.isnan(arr_red)] = png_mapping.minimum[0]
1151 arr_green = image_green.array.copy()
1152 arr_green[np.isnan(arr_green)] = png_mapping.minimum[1]
1153 arr_blue = image_blue.array.copy()
1154 arr_blue[np.isnan(arr_blue)] = png_mapping.minimum[2]
1155
1156 image_array = png_mapping.make_rgb_image(arr_red, arr_green, arr_blue)
1157
1158 im = Image.fromarray(image_array[::-1, :, :], mode="RGB")
1159
1160 uri = hips_dir.join(f"Npix{pixel}.png")
1161
1162 with ResourcePath.temporary_uri(suffix=uri.getExtension()) as temporary_uri:
1163 im.save(temporary_uri.ospath)
1164
1165 uri.transfer_from(temporary_uri, transfer="copy", overwrite=True)
1166
1167 def _write_properties_and_moc(
1168 self,
1169 hips_base_path,
1170 max_order,
1171 pixels,
1172 exposure,
1173 shift_order,
1174 band,
1175 multiband
1176 ):
1177 """Write HiPS properties file and MOC.
1178
1179 Parameters
1180 ----------
1181 hips_base_path : : `lsst.resources.ResourcePath`
1182 Resource path to the base of the HiPS directory tree.
1183 max_order : `int`
1184 Maximum HEALPix order.
1185 pixels : `np.ndarray` (N,)
1186 Array of pixels used.
1187 exposure : `lsst.afw.image.Exposure`
1188 Sample HPX exposure used for generating HiPS tiles.
1189 shift_order : `int`
1190 HPX shift order.
1191 band : `str`
1192 Band (or color).
1193 multiband : `bool`
1194 Is band multiband / color?
1195 """
1196 area = hpg.nside_to_pixel_area(2**max_order, degrees=True)*len(pixels)
1197
1198 initial_ra = self.config.properties.initial_ra
1199 initial_dec = self.config.properties.initial_dec
1200 initial_fov = self.config.properties.initial_fov
1201
1202 if initial_ra is None or initial_dec is None or initial_fov is None:
1203 # We want to point to an arbitrary pixel in the footprint.
1204 # Just take the median pixel value for simplicity.
1205 temp_pixels = pixels.copy()
1206 if temp_pixels.size % 2 == 0:
1207 temp_pixels = np.append(temp_pixels, [temp_pixels[0]])
1208 medpix = int(np.median(temp_pixels))
1209 _initial_ra, _initial_dec = hpg.pixel_to_angle(2**max_order, medpix)
1210 _initial_fov = hpg.nside_to_resolution(2**max_order, units='arcminutes')/60.
1211
1212 if initial_ra is None or initial_dec is None:
1213 initial_ra = _initial_ra
1214 initial_dec = _initial_dec
1215 if initial_fov is None:
1216 initial_fov = _initial_fov
1217
1218 self._write_hips_properties_file(
1219 hips_base_path,
1220 self.config.properties,
1221 band,
1222 multiband,
1223 exposure,
1224 max_order,
1225 shift_order,
1226 area,
1227 initial_ra,
1228 initial_dec,
1229 initial_fov,
1230 )
1231
1232 # Write the MOC coverage
1233 self._write_hips_moc_file(
1234 hips_base_path,
1235 max_order,
1236 pixels,
1237 )
1238
1239 def _write_hips_properties_file(
1240 self,
1241 hips_base_path,
1242 properties_config,
1243 band,
1244 multiband,
1245 exposure,
1246 max_order,
1247 shift_order,
1248 area,
1249 initial_ra,
1250 initial_dec,
1251 initial_fov
1252 ):
1253 """Write HiPS properties file.
1254
1255 Parameters
1256 ----------
1257 hips_base_path : `lsst.resources.ResourcePath`
1258 ResourcePath at top of HiPS tree. File will be written
1259 to this path as ``properties``.
1260 properties_config : `lsst.pipe.tasks.hips.HipsPropertiesConfig`
1261 Configuration for properties values.
1262 band : `str`
1263 Name of band(s) for HiPS tree.
1264 multiband : `bool`
1265 Is multiband / color?
1266 exposure : `lsst.afw.image.Exposure`
1267 Sample HPX exposure used for generating HiPS tiles.
1268 max_order : `int`
1269 Maximum HEALPix order.
1270 shift_order : `int`
1271 HPX shift order.
1272 area : `float`
1273 Coverage area in square degrees.
1274 initial_ra : `float`
1275 Initial HiPS RA position (degrees).
1276 initial_dec : `float`
1277 Initial HiPS Dec position (degrees).
1278 initial_fov : `float`
1279 Initial HiPS display size (degrees).
1280 """
1281 # WARNING: In general PipelineTasks are not allowed to do any outputs
1282 # outside of the butler. This task has been given (temporary)
1283 # Special Dispensation because of the nature of HiPS outputs until
1284 # a more controlled solution can be found.
1285 def _write_property(fh, name, value):
1286 """Write a property name/value to a file handle.
1287
1288 Parameters
1289 ----------
1290 fh : file handle (blah)
1291 Open for writing.
1292 name : `str`
1293 Name of property
1294 value : `str`
1295 Value of property
1296 """
1297 # This ensures that the name has no spaces or space-like characters,
1298 # per the HiPS standard.
1299 if re.search(r"\s", name):
1300 raise ValueError(f"``{name}`` cannot contain any space characters.")
1301 if "=" in name:
1302 raise ValueError(f"``{name}`` cannot contain an ``=``")
1303
1304 fh.write(f"{name:25}= {value}\n")
1305
1306 if exposure.image.array.dtype == np.dtype("float32"):
1307 bitpix = -32
1308 elif exposure.image.array.dtype == np.dtype("float64"):
1309 bitpix = -64
1310 elif exposure.image.array.dtype == np.dtype("int32"):
1311 bitpix = 32
1312
1313 date_iso8601 = datetime.utcnow().isoformat(timespec="seconds") + "Z"
1314 pixel_scale = hpg.nside_to_resolution(2**(max_order + shift_order), units='degrees')
1315
1316 uri = hips_base_path.join("properties")
1317 with ResourcePath.temporary_uri(suffix=uri.getExtension()) as temporary_uri:
1318 with open(temporary_uri.ospath, "w") as fh:
1319 _write_property(
1320 fh,
1321 "creator_did",
1322 properties_config.creator_did_template.format(band=band),
1323 )
1324 if properties_config.obs_collection is not None:
1325 _write_property(fh, "obs_collection", properties_config.obs_collection)
1326 _write_property(
1327 fh,
1328 "obs_title",
1329 properties_config.obs_title_template.format(band=band),
1330 )
1331 if properties_config.obs_description_template is not None:
1332 _write_property(
1333 fh,
1334 "obs_description",
1335 properties_config.obs_description_template.format(band=band),
1336 )
1337 if len(properties_config.prov_progenitor) > 0:
1338 for prov_progenitor in properties_config.prov_progenitor:
1339 _write_property(fh, "prov_progenitor", prov_progenitor)
1340 if properties_config.obs_ack is not None:
1341 _write_property(fh, "obs_ack", properties_config.obs_ack)
1342 _write_property(fh, "obs_regime", "Optical")
1343 _write_property(fh, "data_pixel_bitpix", str(bitpix))
1344 _write_property(fh, "dataproduct_type", "image")
1345 _write_property(fh, "moc_sky_fraction", str(area/41253.))
1346 _write_property(fh, "data_ucd", "phot.flux")
1347 _write_property(fh, "hips_creation_date", date_iso8601)
1348 _write_property(fh, "hips_builder", "lsst.pipe.tasks.hips.GenerateHipsTask")
1349 _write_property(fh, "hips_creator", "Vera C. Rubin Observatory")
1350 _write_property(fh, "hips_version", "1.4")
1351 _write_property(fh, "hips_release_date", date_iso8601)
1352 _write_property(fh, "hips_frame", "equatorial")
1353 _write_property(fh, "hips_order", str(max_order))
1354 _write_property(fh, "hips_tile_width", str(exposure.getBBox().getWidth()))
1355 _write_property(fh, "hips_status", "private master clonableOnce")
1356 if multiband:
1357 _write_property(fh, "hips_tile_format", "png")
1358 _write_property(fh, "dataproduct_subtype", "color")
1359 else:
1360 _write_property(fh, "hips_tile_format", "png fits")
1361 _write_property(fh, "hips_pixel_bitpix", str(bitpix))
1362 _write_property(fh, "hips_pixel_scale", str(pixel_scale))
1363 _write_property(fh, "hips_initial_ra", str(initial_ra))
1364 _write_property(fh, "hips_initial_dec", str(initial_dec))
1365 _write_property(fh, "hips_initial_fov", str(initial_fov))
1366 if multiband:
1367 if self.config.blue_channel_band in properties_config.spectral_ranges:
1368 em_min = properties_config.spectral_ranges[
1369 self.config.blue_channel_band
1370 ].lambda_min/1e9
1371 else:
1372 self.log.warning("blue band %s not in self.config.spectral_ranges.", band)
1373 em_min = 3e-7
1374 if self.config.red_channel_band in properties_config.spectral_ranges:
1375 em_max = properties_config.spectral_ranges[
1376 self.config.red_channel_band
1377 ].lambda_max/1e9
1378 else:
1379 self.log.warning("red band %s not in self.config.spectral_ranges.", band)
1380 em_max = 1e-6
1381 else:
1382 if band in properties_config.spectral_ranges:
1383 em_min = properties_config.spectral_ranges[band].lambda_min/1e9
1384 em_max = properties_config.spectral_ranges[band].lambda_max/1e9
1385 else:
1386 self.log.warning("band %s not in self.config.spectral_ranges.", band)
1387 em_min = 3e-7
1388 em_max = 1e-6
1389 _write_property(fh, "em_min", str(em_min))
1390 _write_property(fh, "em_max", str(em_max))
1391 if properties_config.t_min is not None:
1392 _write_property(fh, "t_min", properties_config.t_min)
1393 if properties_config.t_max is not None:
1394 _write_property(fh, "t_max", properties_config.t_max)
1395
1396 uri.transfer_from(temporary_uri, transfer="copy", overwrite=True)
1397
1398 def _write_hips_moc_file(self, hips_base_path, max_order, pixels, min_uniq_order=1):
1399 """Write HiPS MOC file.
1400
1401 Parameters
1402 ----------
1403 hips_base_path : `lsst.resources.ResourcePath`
1404 ResourcePath to top of HiPS tree. File will be written as
1405 to this path as ``Moc.fits``.
1406 max_order : `int`
1407 Maximum HEALPix order.
1408 pixels : `np.ndarray`
1409 Array of pixels covered.
1410 min_uniq_order : `int`, optional
1411 Minimum HEALPix order for looking for fully covered pixels.
1412 """
1413 # WARNING: In general PipelineTasks are not allowed to do any outputs
1414 # outside of the butler. This task has been given (temporary)
1415 # Special Dispensation because of the nature of HiPS outputs until
1416 # a more controlled solution can be found.
1417
1418 # Make the initial list of UNIQ pixels
1419 uniq = 4*(4**max_order) + pixels
1420
1421 # Make a healsparse map which provides easy degrade/comparisons.
1422 hspmap = hsp.HealSparseMap.make_empty(2**min_uniq_order, 2**max_order, dtype=np.float32)
1423 hspmap[pixels] = 1.0
1424
1425 # Loop over orders, degrade each time, and look for pixels with full coverage.
1426 for uniq_order in range(max_order - 1, min_uniq_order - 1, -1):
1427 hspmap = hspmap.degrade(2**uniq_order, reduction="sum")
1428 pix_shift = np.right_shift(pixels, 2*(max_order - uniq_order))
1429 # Check if any of the pixels at uniq_order have full coverage.
1430 covered, = np.isclose(hspmap[pix_shift], 4**(max_order - uniq_order)).nonzero()
1431 if covered.size == 0:
1432 # No pixels at uniq_order are fully covered, we're done.
1433 break
1434 # Replace the UNIQ pixels that are fully covered.
1435 uniq[covered] = 4*(4**uniq_order) + pix_shift[covered]
1436
1437 # Remove duplicate pixels.
1438 uniq = np.unique(uniq)
1439
1440 # Output to fits.
1441 tbl = np.zeros(uniq.size, dtype=[("UNIQ", "i8")])
1442 tbl["UNIQ"] = uniq
1443
1444 order = np.log2(tbl["UNIQ"]//4).astype(np.int32)//2
1445 moc_order = np.max(order)
1446
1447 hdu = fits.BinTableHDU(tbl)
1448 hdu.header["PIXTYPE"] = "HEALPIX"
1449 hdu.header["ORDERING"] = "NUNIQ"
1450 hdu.header["COORDSYS"] = "C"
1451 hdu.header["MOCORDER"] = moc_order
1452 hdu.header["MOCTOOL"] = "lsst.pipe.tasks.hips.GenerateHipsTask"
1453
1454 uri = hips_base_path.join("Moc.fits")
1455
1456 with ResourcePath.temporary_uri(suffix=uri.getExtension()) as temporary_uri:
1457 hdu.writeto(temporary_uri.ospath)
1458
1459 uri.transfer_from(temporary_uri, transfer="copy", overwrite=True)
1460
1461 def _write_allsky_file(self, hips_base_path, allsky_order):
1462 """Write an Allsky.png file.
1463
1464 Parameters
1465 ----------
1466 hips_base_path : `lsst.resources.ResourcePath`
1467 Resource path to the base of the HiPS directory tree.
1468 allsky_order : `int`
1469 HEALPix order of the minimum order to make allsky file.
1470 """
1471 tile_size = self.config.allsky_tilesize
1472 n_tiles_per_side = int(np.sqrt(hpg.nside_to_npixel(hpg.order_to_nside(allsky_order))))
1473
1474 allsky_image = None
1475
1476 allsky_order_uri = hips_base_path.join(f"Norder{allsky_order}", forceDirectory=True)
1477 pixel_regex = re.compile(r"Npix([0-9]+)\.png$")
1478 png_uris = list(
1479 ResourcePath.findFileResources(
1480 candidates=[allsky_order_uri],
1481 file_filter=pixel_regex,
1482 )
1483 )
1484
1485 for png_uri in png_uris:
1486 matches = re.match(pixel_regex, png_uri.basename())
1487 pix_num = int(matches.group(1))
1488 tile_image = Image.open(io.BytesIO(png_uri.read()))
1489 row = math.floor(pix_num//n_tiles_per_side)
1490 column = pix_num % n_tiles_per_side
1491 box = (column*tile_size, row*tile_size, (column + 1)*tile_size, (row + 1)*tile_size)
1492 tile_image_shrunk = tile_image.resize((tile_size, tile_size))
1493
1494 if allsky_image is None:
1495 allsky_image = Image.new(
1496 tile_image.mode,
1497 (n_tiles_per_side*tile_size, n_tiles_per_side*tile_size),
1498 )
1499 allsky_image.paste(tile_image_shrunk, box)
1500
1501 uri = allsky_order_uri.join("Allsky.png")
1502
1503 with ResourcePath.temporary_uri(suffix=uri.getExtension()) as temporary_uri:
1504 allsky_image.save(temporary_uri.ospath)
1505
1506 uri.transfer_from(temporary_uri, transfer="copy", overwrite=True)
1507
1508 def _get_dir_number(self, pixel):
1509 """Compute the directory number from a pixel.
1510
1511 Parameters
1512 ----------
1513 pixel : `int`
1514 HEALPix pixel number.
1515
1516 Returns
1517 -------
1518 dir_number : `int`
1519 HiPS directory number.
1520 """
1521 return (pixel//10000)*10000
1522
1523
1524class GenerateColorHipsConnections(pipeBase.PipelineTaskConnections,
1525 dimensions=("instrument", ),
1526 defaultTemplates={"coaddName": "deep"}):
1527 hips_exposure_handles = pipeBase.connectionTypes.Input(
1528 doc="HiPS-compatible HPX images.",
1529 name="{coaddName}Coadd_hpx",
1530 storageClass="ExposureF",
1531 dimensions=("healpix11", "band"),
1532 multiple=True,
1533 deferLoad=True,
1534 )
1535
1536
1537class GenerateColorHipsConfig(GenerateHipsConfig,
1538 pipelineConnections=GenerateColorHipsConnections):
1539 """Configuration parameters for GenerateColorHipsTask."""
1540 blue_channel_band = pexConfig.Field(
1541 doc="Band to use for blue channel of color pngs.",
1542 dtype=str,
1543 default="g",
1544 )
1545 green_channel_band = pexConfig.Field(
1546 doc="Band to use for green channel of color pngs.",
1547 dtype=str,
1548 default="r",
1549 )
1550 red_channel_band = pexConfig.Field(
1551 doc="Band to use for red channel of color pngs.",
1552 dtype=str,
1553 default="i",
1554 )
1555 png_color_asinh_minimum = pexConfig.Field(
1556 doc="AsinhMapping intensity to be mapped to black for color png scaling.",
1557 dtype=float,
1558 default=0.0,
1559 )
1560 png_color_asinh_stretch = pexConfig.Field(
1561 doc="AsinhMapping linear stretch for color png scaling.",
1562 dtype=float,
1563 default=5.0,
1564 )
1565 png_color_asinh_softening = pexConfig.Field(
1566 doc="AsinhMapping softening parameter (Q) for color png scaling.",
1567 dtype=float,
1568 default=8.0,
1569 )
1570
1571
1572class GenerateColorHipsTask(GenerateHipsTask):
1573 """Task for making a HiPS tree with color pngs."""
1574 ConfigClass = GenerateColorHipsConfig
1575 _DefaultName = "generateColorHips"
1576 color_task = True
1577
1578 def _check_data_bands(self, data_bands):
1579 """Check the data for configured bands.
1580
1581 Warn if any color bands are missing data.
1582
1583 Parameters
1584 ----------
1585 data_bands : `set` [`str`]
1586 Bands from the input data.
1587
1588 Returns
1589 -------
1590 bands : `list` [`str`]
1591 List of bands in bgr color order.
1592 """
1593 if len(data_bands) == 0:
1594 raise RuntimeError("GenerateColorHipsTask must have data from at least one band.")
1595
1596 if self.config.blue_channel_band not in data_bands:
1597 self.log.warning(
1598 "Color png blue_channel_band %s not in dataset.",
1599 self.config.blue_channel_band
1600 )
1601 if self.config.green_channel_band not in data_bands:
1602 self.log.warning(
1603 "Color png green_channel_band %s not in dataset.",
1604 self.config.green_channel_band
1605 )
1606 if self.config.red_channel_band not in data_bands:
1607 self.log.warning(
1608 "Color png red_channel_band %s not in dataset.",
1609 self.config.red_channel_band
1610 )
1611
1612 bands = [
1613 self.config.blue_channel_band,
1614 self.config.green_channel_band,
1615 self.config.red_channel_band,
1616 ]
1617
1618 return bands
OptionalString band
Definition: FilterLabel.cc:213
A class to contain the data, WCS, and other information needed to describe an image of the sky.
Definition: Exposure.h:72
A class to represent a 2-dimensional array of pixels.
Definition: Image.h:51
Pass parameters to a Statistics object.
Definition: Statistics.h:83
An integer coordinate rectangle.
Definition: Box.h:55
A RangeSet is a set of unsigned 64 bit integers.
Definition: RangeSet.h:99
daf::base::PropertyList * list
Definition: fits.cc:928
daf::base::PropertySet * set
Definition: fits.cc:927
std::shared_ptr< lsst::afw::image::Image< PixelT > > statisticsStack(std::vector< std::shared_ptr< lsst::afw::image::Image< PixelT > > > &images, Property flags, StatisticsControl const &sctrl=StatisticsControl(), std::vector< lsst::afw::image::VariancePixel > const &wvector=std::vector< lsst::afw::image::VariancePixel >(0))
A function to compute some statistics of a stack of Images.
Property stringToStatisticsProperty(std::string const property)
Conversion function to switch a string to a Property (see Statistics.h)
Definition: Statistics.cc:762