LSST Applications 26.0.0,g0265f82a02+6660c170cc,g07994bdeae+30b05a742e,g0a0026dc87+17526d298f,g0a60f58ba1+17526d298f,g0e4bf8285c+96dd2c2ea9,g0ecae5effc+c266a536c8,g1e7d6db67d+6f7cb1f4bb,g26482f50c6+6346c0633c,g2bbee38e9b+6660c170cc,g2cc88a2952+0a4e78cd49,g3273194fdb+f6908454ef,g337abbeb29+6660c170cc,g337c41fc51+9a8f8f0815,g37c6e7c3d5+7bbafe9d37,g44018dc512+6660c170cc,g4a941329ef+4f7594a38e,g4c90b7bd52+5145c320d2,g58be5f913a+bea990ba40,g635b316a6c+8d6b3a3e56,g67924a670a+bfead8c487,g6ae5381d9b+81bc2a20b4,g93c4d6e787+26b17396bd,g98cecbdb62+ed2cb6d659,g98ffbb4407+81bc2a20b4,g9ddcbc5298+7f7571301f,ga1e77700b3+99e9273977,gae46bcf261+6660c170cc,gb2715bf1a1+17526d298f,gc86a011abf+17526d298f,gcf0d15dbbd+96dd2c2ea9,gdaeeff99f8+0d8dbea60f,gdb4ec4c597+6660c170cc,ge23793e450+96dd2c2ea9,gf041782ebf+171108ac67
LSST Data Management Base Package
Loading...
Searching...
No Matches
hips.py
Go to the documentation of this file.
1# This file is part of pipe_tasks.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (https://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <https://www.gnu.org/licenses/>.
21
22"""Tasks for making and manipulating HIPS images."""
23
24__all__ = ["HighResolutionHipsTask", "HighResolutionHipsConfig", "HighResolutionHipsConnections",
25 "GenerateHipsTask", "GenerateHipsConfig", "GenerateColorHipsTask", "GenerateColorHipsConfig"]
26
27from collections import defaultdict
28import numpy as np
29import argparse
30import io
31import sys
32import re
33import warnings
34import math
35from datetime import datetime
36import hpgeom as hpg
37import healsparse as hsp
38from astropy.io import fits
39from astropy.visualization.lupton_rgb import AsinhMapping
40from PIL import Image
41
42from lsst.sphgeom import RangeSet, HealpixPixelization
43from lsst.utils.timer import timeMethod
44from lsst.daf.butler import Butler, DataCoordinate, DatasetRef, Quantum, SkyPixDimension
45import lsst.pex.config as pexConfig
46import lsst.pipe.base as pipeBase
47import lsst.afw.geom as afwGeom
48import lsst.afw.math as afwMath
49import lsst.afw.image as afwImage
50import lsst.geom as geom
51from lsst.afw.geom import makeHpxWcs
52from lsst.resources import ResourcePath
53
54
55class HighResolutionHipsConnections(pipeBase.PipelineTaskConnections,
56 dimensions=("healpix9", "band"),
57 defaultTemplates={"coaddName": "deep"}):
58 coadd_exposure_handles = pipeBase.connectionTypes.Input(
59 doc="Coadded exposures to convert to HIPS format.",
60 name="{coaddName}Coadd_calexp",
61 storageClass="ExposureF",
62 dimensions=("tract", "patch", "skymap", "band"),
63 multiple=True,
64 deferLoad=True,
65 )
66 hips_exposures = pipeBase.connectionTypes.Output(
67 doc="HiPS-compatible HPX image.",
68 name="{coaddName}Coadd_hpx",
69 storageClass="ExposureF",
70 dimensions=("healpix11", "band"),
71 multiple=True,
72 )
73
74 def __init__(self, *, config=None):
75 super().__init__(config=config)
76
77 quantum_order = None
78 for dim in self.dimensions:
79 if "healpix" in dim:
80 if quantum_order is not None:
81 raise ValueError("Must not specify more than one quantum healpix dimension.")
82 quantum_order = int(dim.split("healpix")[1])
83 if quantum_order is None:
84 raise ValueError("Must specify a healpix dimension in quantum dimensions.")
85
86 if quantum_order > config.hips_order:
87 raise ValueError("Quantum healpix dimension order must not be greater than hips_order")
88
89 order = None
90 for dim in self.hips_exposures.dimensions:
91 if "healpix" in dim:
92 if order is not None:
93 raise ValueError("Must not specify more than one healpix dimension.")
94 order = int(dim.split("healpix")[1])
95 if order is None:
96 raise ValueError("Must specify a healpix dimension in hips_exposure dimensions.")
97
98 if order != config.hips_order:
99 raise ValueError("healpix dimension order must match config.hips_order.")
100
101
102class HighResolutionHipsConfig(pipeBase.PipelineTaskConfig,
103 pipelineConnections=HighResolutionHipsConnections):
104 """Configuration parameters for HighResolutionHipsTask.
105
106 Notes
107 -----
108 A HiPS image covers one HEALPix cell, with the HEALPix nside equal to
109 2**hips_order. Each cell is 'shift_order' orders deeper than the HEALPix
110 cell, with 2**shift_order x 2**shift_order sub-pixels on a side, which
111 defines the target resolution of the HiPS image. The IVOA recommends
112 shift_order=9, for 2**9=512 pixels on a side.
113
114 Table 5 from
115 https://www.ivoa.net/documents/HiPS/20170519/REC-HIPS-1.0-20170519.pdf
116 shows the relationship between hips_order, number of tiles (full
117 sky coverage), cell size, and sub-pixel size/image resolution (with
118 the default shift_order=9):
119 +------------+-----------------+--------------+------------------+
120 | hips_order | Number of Tiles | Cell Size | Image Resolution |
121 +============+=================+==============+==================+
122 | 0 | 12 | 58.63 deg | 6.871 arcmin |
123 | 1 | 48 | 29.32 deg | 3.435 arcmin |
124 | 2 | 192 | 14.66 deg | 1.718 arcmin |
125 | 3 | 768 | 7.329 deg | 51.53 arcsec |
126 | 4 | 3072 | 3.665 deg | 25.77 arcsec |
127 | 5 | 12288 | 1.832 deg | 12.88 arcsec |
128 | 6 | 49152 | 54.97 arcmin | 6.442 arcsec |
129 | 7 | 196608 | 27.48 arcmin | 3.221 arcsec |
130 | 8 | 786432 | 13.74 arcmin | 1.61 arcsec |
131 | 9 | 3145728 | 6.871 arcmin | 805.2mas |
132 | 10 | 12582912 | 3.435 arcmin | 402.6mas |
133 | 11 | 50331648 | 1.718 arcmin | 201.3mas |
134 | 12 | 201326592 | 51.53 arcsec | 100.6mas |
135 | 13 | 805306368 | 25.77 arcsec | 50.32mas |
136 +------------+-----------------+--------------+------------------+
137 """
138 hips_order = pexConfig.Field(
139 doc="HIPS image order.",
140 dtype=int,
141 default=11,
142 )
143 shift_order = pexConfig.Field(
144 doc="HIPS shift order (such that each tile is 2**shift_order pixels on a side)",
145 dtype=int,
146 default=9,
147 )
148 warp = pexConfig.ConfigField(
149 dtype=afwMath.Warper.ConfigClass,
150 doc="Warper configuration",
151 )
152
153 def setDefaults(self):
154 self.warp.warpingKernelName = "lanczos5"
155
156
157class HipsTaskNameDescriptor:
158 """Descriptor used create a DefaultName that matches the order of
159 the defined dimensions in the connections class.
160
161 Parameters
162 ----------
163 prefix : `str`
164 The prefix of the Default name, to which the order will be
165 appended.
166 """
167 def __init__(self, prefix):
168 # create a defaultName template
169 self._defaultName = f"{prefix}{{}}"
170 self._order = None
171
172 def __get__(self, obj, klass=None):
173 if klass is None:
174 raise RuntimeError(
175 "HipsTaskDescriptor was used in an unexpected context"
176 )
177 if self._order is None:
178 klassDimensions = klass.ConfigClass.ConnectionsClass.dimensions
179 for dim in klassDimensions:
180 if (match := re.match(r"^healpix(\d*)$", dim)) is not None:
181 self._order = int(match.group(1))
182 break
183 else:
184 raise RuntimeError(
185 "Could not find healpix dimension in connections class"
186 )
187 return self._defaultName.format(self._order)
188
189
190class HighResolutionHipsTask(pipeBase.PipelineTask):
191 """Task for making high resolution HiPS images."""
192 ConfigClass = HighResolutionHipsConfig
193 _DefaultName = HipsTaskNameDescriptor("highResolutionHips")
194
195 def __init__(self, **kwargs):
196 super().__init__(**kwargs)
197 self.warper = afwMath.Warper.fromConfig(self.config.warp)
198
199 @timeMethod
200 def runQuantum(self, butlerQC, inputRefs, outputRefs):
201 inputs = butlerQC.get(inputRefs)
202
203 healpix_dim = f"healpix{self.config.hips_order}"
204
205 pixels = [hips_exposure.dataId[healpix_dim]
206 for hips_exposure in outputRefs.hips_exposures]
207
208 outputs = self.run(pixels=pixels, coadd_exposure_handles=inputs["coadd_exposure_handles"])
209
210 hips_exposure_ref_dict = {hips_exposure_ref.dataId[healpix_dim]:
211 hips_exposure_ref for hips_exposure_ref in outputRefs.hips_exposures}
212 for pixel, hips_exposure in outputs.hips_exposures.items():
213 butlerQC.put(hips_exposure, hips_exposure_ref_dict[pixel])
214
215 def run(self, pixels, coadd_exposure_handles):
216 """Run the HighResolutionHipsTask.
217
218 Parameters
219 ----------
220 pixels : `Iterable` [ `int` ]
221 Iterable of healpix pixels (nest ordering) to warp to.
222 coadd_exposure_handles : `list` [`lsst.daf.butler.DeferredDatasetHandle`]
223 Handles for the coadd exposures.
224
225 Returns
226 -------
227 outputs : `lsst.pipe.base.Struct`
228 ``hips_exposures`` is a dict with pixel (key) and hips_exposure (value)
229 """
230 self.log.info("Generating HPX images for %d pixels at order %d", len(pixels), self.config.hips_order)
231
232 npix = 2**self.config.shift_order
233 bbox_hpx = geom.Box2I(corner=geom.Point2I(0, 0),
234 dimensions=geom.Extent2I(npix, npix))
235
236 # For each healpix pixel we will create an empty exposure with the
237 # correct HPX WCS. We furthermore create a dict to hold each of
238 # the warps that will go into each HPX exposure.
239 exp_hpx_dict = {}
240 warp_dict = {}
241 for pixel in pixels:
242 wcs_hpx = afwGeom.makeHpxWcs(self.config.hips_order, pixel, shift_order=self.config.shift_order)
243 exp_hpx = afwImage.ExposureF(bbox_hpx, wcs_hpx)
244 exp_hpx_dict[pixel] = exp_hpx
245 warp_dict[pixel] = []
246
247 first_handle = True
248 # Loop over input coadd exposures to minimize i/o (this speeds things
249 # up by ~8x to batch together pixels that overlap a given coadd).
250 for handle in coadd_exposure_handles:
251 coadd_exp = handle.get()
252
253 # For each pixel, warp the coadd to the HPX WCS for the pixel.
254 for pixel in pixels:
255 warped = self.warper.warpExposure(exp_hpx_dict[pixel].getWcs(), coadd_exp, maxBBox=bbox_hpx)
256
257 exp = afwImage.ExposureF(exp_hpx_dict[pixel].getBBox(), exp_hpx_dict[pixel].getWcs())
258 exp.maskedImage.set(np.nan, afwImage.Mask.getPlaneBitMask("NO_DATA"), np.nan)
259
260 if first_handle:
261 # Make sure the mask planes, filter, and photocalib of the output
262 # exposure match the (first) input exposure.
263 exp_hpx_dict[pixel].mask.conformMaskPlanes(coadd_exp.mask.getMaskPlaneDict())
264 exp_hpx_dict[pixel].setFilter(coadd_exp.getFilter())
265 exp_hpx_dict[pixel].setPhotoCalib(coadd_exp.getPhotoCalib())
266
267 if warped.getBBox().getArea() == 0 or not np.any(np.isfinite(warped.image.array)):
268 # There is no overlap, skip.
269 self.log.debug(
270 "No overlap between output HPX %d and input exposure %s",
271 pixel,
272 handle.dataId
273 )
274 continue
275
276 exp.maskedImage.assign(warped.maskedImage, warped.getBBox())
277 warp_dict[pixel].append(exp.maskedImage)
278
279 first_handle = False
280
281 stats_flags = afwMath.stringToStatisticsProperty("MEAN")
282 stats_ctrl = afwMath.StatisticsControl()
283 stats_ctrl.setNanSafe(True)
284 stats_ctrl.setWeighted(True)
285 stats_ctrl.setCalcErrorFromInputVariance(True)
286
287 # Loop over pixels and combine the warps for each pixel.
288 # The combination is done with a simple mean for pixels that
289 # overlap in neighboring patches.
290 for pixel in pixels:
291 exp_hpx_dict[pixel].maskedImage.set(np.nan, afwImage.Mask.getPlaneBitMask("NO_DATA"), np.nan)
292
293 if not warp_dict[pixel]:
294 # Nothing in this pixel
295 self.log.debug("No data in HPX pixel %d", pixel)
296 # Remove the pixel from the output, no need to persist an
297 # empty exposure.
298 exp_hpx_dict.pop(pixel)
299 continue
300
301 exp_hpx_dict[pixel].maskedImage = afwMath.statisticsStack(
302 warp_dict[pixel],
303 stats_flags,
304 stats_ctrl,
305 [1.0]*len(warp_dict[pixel]),
306 clipped=0,
307 maskMap=[]
308 )
309
310 return pipeBase.Struct(hips_exposures=exp_hpx_dict)
311
312 @classmethod
313 def build_quantum_graph_cli(cls, argv):
314 """A command-line interface entry point to `build_quantum_graph`.
315 This method provides the implementation for the
316 ``build-high-resolution-hips-qg`` script.
317
318 Parameters
319 ----------
320 argv : `Sequence` [ `str` ]
321 Command-line arguments (e.g. ``sys.argv[1:]``).
322 """
323 parser = cls._make_cli_parser()
324
325 args = parser.parse_args(argv)
326
327 if args.subparser_name is None:
328 parser.print_help()
329 sys.exit(1)
330
331 pipeline = pipeBase.Pipeline.from_uri(args.pipeline)
332 expanded_pipeline = list(pipeline.toExpandedPipeline())
333
334 if len(expanded_pipeline) != 1:
335 raise RuntimeError(f"Pipeline file {args.pipeline} may only contain one task.")
336
337 (task_def,) = expanded_pipeline
338
339 butler = Butler(args.butler_config, collections=args.input)
340
341 if args.subparser_name == "segment":
342 # Do the segmentation
343 hpix_pixelization = HealpixPixelization(level=args.hpix_build_order)
344 dataset = task_def.connections.coadd_exposure_handles.name
345 data_ids = set(butler.registry.queryDataIds("tract", datasets=dataset).expanded())
346 region_pixels = []
347 for data_id in data_ids:
348 region = data_id.region
349 pixel_range = hpix_pixelization.envelope(region)
350 for r in pixel_range.ranges():
351 region_pixels.extend(range(r[0], r[1]))
352 indices = np.unique(region_pixels)
353
354 print(f"Pixels to run at HEALPix order --hpix_build_order {args.hpix_build_order}:")
355 for pixel in indices:
356 print(pixel)
357
358 elif args.subparser_name == "build":
359 # Build the quantum graph.
360
361 # Figure out collection names.
362 if args.output_run is None:
363 if args.output is None:
364 raise ValueError("At least one of --output or --output-run options is required.")
365 args.output_run = "{}/{}".format(args.output, pipeBase.Instrument.makeCollectionTimestamp())
366
367 build_ranges = RangeSet(sorted(args.pixels))
368
369 # Metadata includes a subset of attributes defined in CmdLineFwk.
370 metadata = {
371 "input": args.input,
372 "butler_argument": args.butler_config,
373 "output": args.output,
374 "output_run": args.output_run,
375 "data_query": args.where,
376 "time": f"{datetime.now()}",
377 }
378
379 qg = cls.build_quantum_graph(
380 task_def,
381 butler.registry,
382 args.hpix_build_order,
383 build_ranges,
384 where=args.where,
385 collections=args.input,
386 metadata=metadata,
387 )
388 qg.saveUri(args.save_qgraph)
389
390 @classmethod
391 def _make_cli_parser(cls):
392 """Make the command-line parser.
393
394 Returns
395 -------
396 parser : `argparse.ArgumentParser`
397 """
398 parser = argparse.ArgumentParser(
399 description=(
400 "Build a QuantumGraph that runs HighResolutionHipsTask on existing coadd datasets."
401 ),
402 )
403 subparsers = parser.add_subparsers(help="sub-command help", dest="subparser_name")
404
405 parser_segment = subparsers.add_parser("segment",
406 help="Determine survey segments for workflow.")
407 parser_build = subparsers.add_parser("build",
408 help="Build quantum graph for HighResolutionHipsTask")
409
410 for sub in [parser_segment, parser_build]:
411 # These arguments are in common.
412 sub.add_argument(
413 "-b",
414 "--butler-config",
415 type=str,
416 help="Path to data repository or butler configuration.",
417 required=True,
418 )
419 sub.add_argument(
420 "-p",
421 "--pipeline",
422 type=str,
423 help="Pipeline file, limited to one task.",
424 required=True,
425 )
426 sub.add_argument(
427 "-i",
428 "--input",
429 type=str,
430 nargs="+",
431 help="Input collection(s) to search for coadd exposures.",
432 required=True,
433 )
434 sub.add_argument(
435 "-o",
436 "--hpix_build_order",
437 type=int,
438 default=1,
439 help="HEALPix order to segment sky for building quantum graph files.",
440 )
441 sub.add_argument(
442 "-w",
443 "--where",
444 type=str,
445 default=None,
446 help="Data ID expression used when querying for input coadd datasets.",
447 )
448
449 parser_build.add_argument(
450 "--output",
451 type=str,
452 help=(
453 "Name of the output CHAINED collection. If this options is specified and "
454 "--output-run is not, then a new RUN collection will be created by appending "
455 "a timestamp to the value of this option."
456 ),
457 default=None,
458 metavar="COLL",
459 )
460 parser_build.add_argument(
461 "--output-run",
462 type=str,
463 help=(
464 "Output RUN collection to write resulting images. If not provided "
465 "then --output must be provided and a new RUN collection will be created "
466 "by appending a timestamp to the value passed with --output."
467 ),
468 default=None,
469 metavar="RUN",
470 )
471 parser_build.add_argument(
472 "-q",
473 "--save-qgraph",
474 type=str,
475 help="Output filename for QuantumGraph.",
476 required=True,
477 )
478 parser_build.add_argument(
479 "-P",
480 "--pixels",
481 type=int,
482 nargs="+",
483 help="Pixels at --hpix_build_order to generate quantum graph.",
484 required=True,
485 )
486
487 return parser
488
489 @classmethod
490 def build_quantum_graph(
491 cls,
492 task_def,
493 registry,
494 constraint_order,
495 constraint_ranges,
496 where=None,
497 collections=None,
498 metadata=None,
499 ):
500 """Generate a `QuantumGraph` for running just this task.
501
502 This is a temporary workaround for incomplete butler query support for
503 HEALPix dimensions.
504
505 Parameters
506 ----------
507 task_def : `lsst.pipe.base.TaskDef`
508 Task definition.
509 registry : `lsst.daf.butler.Registry`
510 Client for the butler database. May be read-only.
511 constraint_order : `int`
512 HEALPix order used to contrain which quanta are generated, via
513 ``constraint_indices``. This should be a coarser grid (smaller
514 order) than the order used for the task's quantum and output data
515 IDs, and ideally something between the spatial scale of a patch or
516 the data repository's "common skypix" system (usually ``htm7``).
517 constraint_ranges : `lsst.sphgeom.RangeSet`
518 RangeSet which describes constraint pixels (HEALPix NEST, with order
519 constraint_order) to constrain generated quanta.
520 where : `str`, optional
521 A boolean `str` expression of the form accepted by
522 `Registry.queryDatasets` to constrain input datasets. This may
523 contain a constraint on tracts, patches, or bands, but not HEALPix
524 indices. Constraints on tracts and patches should usually be
525 unnecessary, however - existing coadds that overlap the given
526 HEALpix indices will be selected without such a constraint, and
527 providing one may reject some that should normally be included.
528 collections : `str` or `Iterable` [ `str` ], optional
529 Collection or collections to search for input datasets, in order.
530 If not provided, ``registry.defaults.collections`` will be
531 searched.
532 metadata : `dict` [ `str`, `Any` ]
533 Graph metadata. It is required to contain "output_run" key with the
534 name of the output RUN collection.
535 """
536 config = task_def.config
537
538 dataset_types = pipeBase.PipelineDatasetTypes.fromPipeline(pipeline=[task_def], registry=registry)
539 # Since we know this is the only task in the pipeline, we know there
540 # is only one overall input and one overall output.
541 (input_dataset_type,) = dataset_types.inputs
542
543 # Extract the main output dataset type (which needs multiple
544 # DatasetRefs, and tells us the output HPX level), and make a set of
545 # what remains for more mechanical handling later.
546 output_dataset_type = dataset_types.outputs[task_def.connections.hips_exposures.name]
547 incidental_output_dataset_types = dataset_types.outputs.copy()
548 incidental_output_dataset_types.remove(output_dataset_type)
549 (hpx_output_dimension,) = (d for d in output_dataset_type.dimensions
550 if isinstance(d, SkyPixDimension))
551
552 constraint_hpx_pixelization = registry.dimensions[f"healpix{constraint_order}"].pixelization
553 common_skypix_name = registry.dimensions.commonSkyPix.name
554 common_skypix_pixelization = registry.dimensions.commonSkyPix.pixelization
555
556 # We will need all the pixels at the quantum resolution as well
557 task_dimensions = registry.dimensions.extract(task_def.connections.dimensions)
558 (hpx_dimension,) = (d for d in task_dimensions if d.name != "band")
559 hpx_pixelization = hpx_dimension.pixelization
560
561 if hpx_pixelization.level < constraint_order:
562 raise ValueError(f"Quantum order {hpx_pixelization.level} must be < {constraint_order}")
563 hpx_ranges = constraint_ranges.scaled(4**(hpx_pixelization.level - constraint_order))
564
565 # We can be generous in looking for pixels here, because we constraint by actual
566 # patch regions below.
567 common_skypix_ranges = RangeSet()
568 for begin, end in constraint_ranges:
569 for hpx_index in range(begin, end):
570 constraint_hpx_region = constraint_hpx_pixelization.pixel(hpx_index)
571 common_skypix_ranges |= common_skypix_pixelization.envelope(constraint_hpx_region)
572
573 # To keep the query from getting out of hand (and breaking) we simplify until we have fewer
574 # than 100 ranges which seems to work fine.
575 for simp in range(1, 10):
576 if len(common_skypix_ranges) < 100:
577 break
578 common_skypix_ranges.simplify(simp)
579
580 # Use that RangeSet to assemble a WHERE constraint expression. This
581 # could definitely get too big if the "constraint healpix" order is too
582 # fine.
583 where_terms = []
584 bind = {}
585 for n, (begin, end) in enumerate(common_skypix_ranges):
586 stop = end - 1 # registry range syntax is inclusive
587 if begin == stop:
588 where_terms.append(f"{common_skypix_name} = cpx{n}")
589 bind[f"cpx{n}"] = begin
590 else:
591 where_terms.append(f"({common_skypix_name} >= cpx{n}a AND {common_skypix_name} <= cpx{n}b)")
592 bind[f"cpx{n}a"] = begin
593 bind[f"cpx{n}b"] = stop
594 if where is None:
595 where = " OR ".join(where_terms)
596 else:
597 where = f"({where}) AND ({' OR '.join(where_terms)})"
598 # Query for input datasets with this constraint, and ask for expanded
599 # data IDs because we want regions. Immediately group this by patch so
600 # we don't do later geometric stuff n_bands more times than we need to.
601 input_refs = registry.queryDatasets(
602 input_dataset_type,
603 where=where,
604 findFirst=True,
605 collections=collections,
606 bind=bind
607 ).expanded()
608 inputs_by_patch = defaultdict(set)
609 patch_dimensions = registry.dimensions.extract(["patch"])
610 for input_ref in input_refs:
611 inputs_by_patch[input_ref.dataId.subset(patch_dimensions)].add(input_ref)
612 if not inputs_by_patch:
613 message_body = "\n".join(input_refs.explain_no_results())
614 raise RuntimeError(f"No inputs found:\n{message_body}")
615
616 # Iterate over patches and compute the set of output healpix pixels
617 # that overlap each one. Use that to associate inputs with output
618 # pixels, but only for the output pixels we've already identified.
619 inputs_by_hpx = defaultdict(set)
620 for patch_data_id, input_refs_for_patch in inputs_by_patch.items():
621 patch_hpx_ranges = hpx_pixelization.envelope(patch_data_id.region)
622 for begin, end in patch_hpx_ranges & hpx_ranges:
623 for hpx_index in range(begin, end):
624 inputs_by_hpx[hpx_index].update(input_refs_for_patch)
625 # Iterate over the dict we just created and create the actual quanta.
626 quanta = []
627 output_run = metadata["output_run"]
628 for hpx_index, input_refs_for_hpx_index in inputs_by_hpx.items():
629 # Group inputs by band.
630 input_refs_by_band = defaultdict(list)
631 for input_ref in input_refs_for_hpx_index:
632 input_refs_by_band[input_ref.dataId["band"]].append(input_ref)
633 # Iterate over bands to make quanta.
634 for band, input_refs_for_band in input_refs_by_band.items():
635 data_id = registry.expandDataId({hpx_dimension: hpx_index, "band": band})
636
637 hpx_pixel_ranges = RangeSet(hpx_index)
638 hpx_output_ranges = hpx_pixel_ranges.scaled(4**(config.hips_order - hpx_pixelization.level))
639 output_data_ids = []
640 for begin, end in hpx_output_ranges:
641 for hpx_output_index in range(begin, end):
642 output_data_ids.append(
643 registry.expandDataId({hpx_output_dimension: hpx_output_index, "band": band})
644 )
645 outputs = {
646 dt: [DatasetRef(dt, data_id, run=output_run)] for dt in incidental_output_dataset_types
647 }
648 outputs[output_dataset_type] = [DatasetRef(output_dataset_type, data_id, run=output_run)
649 for data_id in output_data_ids]
650 quanta.append(
651 Quantum(
652 taskName=task_def.taskName,
653 taskClass=task_def.taskClass,
654 dataId=data_id,
655 initInputs={},
656 inputs={input_dataset_type: input_refs_for_band},
657 outputs=outputs,
658 )
659 )
660
661 if len(quanta) == 0:
662 raise RuntimeError("Given constraints yielded empty quantum graph.")
663
664 # Define initOutputs refs.
665 empty_data_id = DataCoordinate.makeEmpty(registry.dimensions)
666 init_outputs = {}
667 global_init_outputs = []
668 if config_dataset_type := dataset_types.initOutputs.get(task_def.configDatasetName):
669 init_outputs[task_def] = [DatasetRef(config_dataset_type, empty_data_id, run=output_run)]
670 packages_dataset_name = pipeBase.PipelineDatasetTypes.packagesDatasetName
671 if packages_dataset_type := dataset_types.initOutputs.get(packages_dataset_name):
672 global_init_outputs.append(DatasetRef(packages_dataset_type, empty_data_id, run=output_run))
673
674 return pipeBase.QuantumGraph(
675 quanta={task_def: quanta},
676 initOutputs=init_outputs,
677 globalInitOutputs=global_init_outputs,
678 metadata=metadata,
679 )
680
681
682class HipsPropertiesSpectralTerm(pexConfig.Config):
683 lambda_min = pexConfig.Field(
684 doc="Minimum wavelength (nm)",
685 dtype=float,
686 )
687 lambda_max = pexConfig.Field(
688 doc="Maximum wavelength (nm)",
689 dtype=float,
690 )
691
692
693class HipsPropertiesConfig(pexConfig.Config):
694 """Configuration parameters for writing a HiPS properties file."""
695 creator_did_template = pexConfig.Field(
696 doc=("Unique identifier of the HiPS - Format: IVOID. "
697 "Use ``{band}`` to substitute the band name."),
698 dtype=str,
699 optional=False,
700 )
701 obs_collection = pexConfig.Field(
702 doc="Short name of original data set - Format: one word",
703 dtype=str,
704 optional=True,
705 )
706 obs_description_template = pexConfig.Field(
707 doc=("Data set description - Format: free text, longer free text "
708 "description of the dataset. Use ``{band}`` to substitute "
709 "the band name."),
710 dtype=str,
711 )
712 prov_progenitor = pexConfig.ListField(
713 doc="Provenance of the original data - Format: free text",
714 dtype=str,
715 default=[],
716 )
717 obs_title_template = pexConfig.Field(
718 doc=("Data set title format: free text, but should be short. "
719 "Use ``{band}`` to substitute the band name."),
720 dtype=str,
721 optional=False,
722 )
723 spectral_ranges = pexConfig.ConfigDictField(
724 doc=("Mapping from band to lambda_min, lamba_max (nm). May be approximate."),
725 keytype=str,
726 itemtype=HipsPropertiesSpectralTerm,
727 default={},
728 )
729 initial_ra = pexConfig.Field(
730 doc="Initial RA (deg) (default for HiPS viewer). If not set will use a point in MOC.",
731 dtype=float,
732 optional=True,
733 )
734 initial_dec = pexConfig.Field(
735 doc="Initial Declination (deg) (default for HiPS viewer). If not set will use a point in MOC.",
736 dtype=float,
737 optional=True,
738 )
739 initial_fov = pexConfig.Field(
740 doc="Initial field-of-view (deg). If not set will use ~1 healpix tile.",
741 dtype=float,
742 optional=True,
743 )
744 obs_ack = pexConfig.Field(
745 doc="Observation acknowledgements (free text).",
746 dtype=str,
747 optional=True,
748 )
749 t_min = pexConfig.Field(
750 doc="Time (MJD) of earliest observation included in HiPS",
751 dtype=float,
752 optional=True,
753 )
754 t_max = pexConfig.Field(
755 doc="Time (MJD) of latest observation included in HiPS",
756 dtype=float,
757 optional=True,
758 )
759
760 def validate(self):
761 super().validate()
762
763 if self.obs_collection is not None:
764 if re.search(r"\s", self.obs_collection):
765 raise ValueError("obs_collection cannot contain any space characters.")
766
767 def setDefaults(self):
768 # Values here taken from
769 # https://github.com/lsst-dm/dax_obscore/blob/44ac15029136e2ec15/configs/dp02.yaml#L46
770 u_term = HipsPropertiesSpectralTerm()
771 u_term.lambda_min = 330.
772 u_term.lambda_max = 400.
773 self.spectral_ranges["u"] = u_term
774 g_term = HipsPropertiesSpectralTerm()
775 g_term.lambda_min = 402.
776 g_term.lambda_max = 552.
777 self.spectral_ranges["g"] = g_term
778 r_term = HipsPropertiesSpectralTerm()
779 r_term.lambda_min = 552.
780 r_term.lambda_max = 691.
781 self.spectral_ranges["r"] = r_term
782 i_term = HipsPropertiesSpectralTerm()
783 i_term.lambda_min = 691.
784 i_term.lambda_max = 818.
785 self.spectral_ranges["i"] = i_term
786 z_term = HipsPropertiesSpectralTerm()
787 z_term.lambda_min = 818.
788 z_term.lambda_max = 922.
789 self.spectral_ranges["z"] = z_term
790 y_term = HipsPropertiesSpectralTerm()
791 y_term.lambda_min = 970.
792 y_term.lambda_max = 1060.
793 self.spectral_ranges["y"] = y_term
794
795
796class GenerateHipsConnections(pipeBase.PipelineTaskConnections,
797 dimensions=("instrument", "band"),
798 defaultTemplates={"coaddName": "deep"}):
799 hips_exposure_handles = pipeBase.connectionTypes.Input(
800 doc="HiPS-compatible HPX images.",
801 name="{coaddName}Coadd_hpx",
802 storageClass="ExposureF",
803 dimensions=("healpix11", "band"),
804 multiple=True,
805 deferLoad=True,
806 )
807
808
809class GenerateHipsConfig(pipeBase.PipelineTaskConfig,
810 pipelineConnections=GenerateHipsConnections):
811 """Configuration parameters for GenerateHipsTask."""
812 # WARNING: In general PipelineTasks are not allowed to do any outputs
813 # outside of the butler. This task has been given (temporary)
814 # Special Dispensation because of the nature of HiPS outputs until
815 # a more controlled solution can be found.
816 hips_base_uri = pexConfig.Field(
817 doc="URI to HiPS base for output.",
818 dtype=str,
819 optional=False,
820 )
821 min_order = pexConfig.Field(
822 doc="Minimum healpix order for HiPS tree.",
823 dtype=int,
824 default=3,
825 )
826 properties = pexConfig.ConfigField(
827 dtype=HipsPropertiesConfig,
828 doc="Configuration for properties file.",
829 )
830 allsky_tilesize = pexConfig.Field(
831 dtype=int,
832 doc="Allsky.png tile size. Must be power of 2.",
833 default=512,
834 )
835 png_gray_asinh_minimum = pexConfig.Field(
836 doc="AsinhMapping intensity to be mapped to black for grayscale png scaling.",
837 dtype=float,
838 default=0.0,
839 )
840 png_gray_asinh_stretch = pexConfig.Field(
841 doc="AsinhMapping linear stretch for grayscale png scaling.",
842 dtype=float,
843 default=2.0,
844 )
845 png_gray_asinh_softening = pexConfig.Field(
846 doc="AsinhMapping softening parameter (Q) for grayscale png scaling.",
847 dtype=float,
848 default=8.0,
849 )
850
851
852class GenerateHipsTask(pipeBase.PipelineTask):
853 """Task for making a HiPS tree with FITS and grayscale PNGs."""
854 ConfigClass = GenerateHipsConfig
855 _DefaultName = "generateHips"
856 color_task = False
857
858 @timeMethod
859 def runQuantum(self, butlerQC, inputRefs, outputRefs):
860 inputs = butlerQC.get(inputRefs)
861
862 dims = inputRefs.hips_exposure_handles[0].dataId.names
863 order = None
864 for dim in dims:
865 if "healpix" in dim:
866 order = int(dim.split("healpix")[1])
867 healpix_dim = dim
868 break
869 else:
870 raise RuntimeError("Could not determine healpix order for input exposures.")
871
872 hips_exposure_handle_dict = {
873 (hips_exposure_handle.dataId[healpix_dim],
874 hips_exposure_handle.dataId["band"]): hips_exposure_handle
875 for hips_exposure_handle in inputs["hips_exposure_handles"]
876 }
877
878 data_bands = {hips_exposure_handle.dataId["band"]
879 for hips_exposure_handle in inputs["hips_exposure_handles"]}
880 bands = self._check_data_bands(data_bands)
881
882 self.run(
883 bands=bands,
884 max_order=order,
885 hips_exposure_handle_dict=hips_exposure_handle_dict,
886 do_color=self.color_task,
887 )
888
889 def _check_data_bands(self, data_bands):
890 """Check that the data has only a single band.
891
892 Parameters
893 ----------
894 data_bands : `set` [`str`]
895 Bands from the input data.
896
897 Returns
898 -------
899 bands : `list` [`str`]
900 List of single band to process.
901
902 Raises
903 ------
904 RuntimeError if there is not exactly one band.
905 """
906 if len(data_bands) != 1:
907 raise RuntimeError("GenerateHipsTask can only use data from a single band.")
908
909 return list(data_bands)
910
911 @timeMethod
912 def run(self, bands, max_order, hips_exposure_handle_dict, do_color=False):
913 """Run the GenerateHipsTask.
914
915 Parameters
916 ----------
917 bands : `list [ `str` ]
918 List of bands to be processed (or single band).
919 max_order : `int`
920 HEALPix order of the maximum (native) HPX exposures.
921 hips_exposure_handle_dict : `dict` {`int`: `lsst.daf.butler.DeferredDatasetHandle`}
922 Dict of handles for the HiPS high-resolution exposures.
923 Key is (pixel number, ``band``).
924 do_color : `bool`, optional
925 Do color pngs instead of per-band grayscale.
926 """
927 min_order = self.config.min_order
928
929 if not do_color:
930 png_grayscale_mapping = AsinhMapping(
931 self.config.png_gray_asinh_minimum,
932 self.config.png_gray_asinh_stretch,
933 Q=self.config.png_gray_asinh_softening,
934 )
935 else:
936 png_color_mapping = AsinhMapping(
937 self.config.png_color_asinh_minimum,
938 self.config.png_color_asinh_stretch,
939 Q=self.config.png_color_asinh_softening,
940 )
941
942 bcb = self.config.blue_channel_band
943 gcb = self.config.green_channel_band
944 rcb = self.config.red_channel_band
945 colorstr = f"{bcb}{gcb}{rcb}"
946
947 # The base path is based on the hips_base_uri.
948 hips_base_path = ResourcePath(self.config.hips_base_uri, forceDirectory=True)
949
950 # We need to unique-ify the pixels because they show up for multiple bands.
951 # The output of this is a sorted array.
952 pixels = np.unique(np.array([pixel for pixel, _ in hips_exposure_handle_dict.keys()]))
953
954 # Add a "gutter" pixel at the end. Start with 0 which maps to 0 always.
955 pixels = np.append(pixels, [0])
956
957 # Convert the pixels to each order that will be generated.
958 pixels_shifted = {}
959 pixels_shifted[max_order] = pixels
960 for order in range(max_order - 1, min_order - 1, -1):
961 pixels_shifted[order] = np.right_shift(pixels_shifted[order + 1], 2)
962
963 # And set the gutter to an illegal pixel value.
964 for order in range(min_order, max_order + 1):
965 pixels_shifted[order][-1] = -1
966
967 # Read in the first pixel for determining image properties.
968 exp0 = list(hips_exposure_handle_dict.values())[0].get()
969 bbox = exp0.getBBox()
970 npix = bbox.getWidth()
971 shift_order = int(np.round(np.log2(npix)))
972
973 # Create blank exposures for each level, including the highest order.
974 # We also make sure we create blank exposures for any bands used in the color
975 # PNGs, even if they aren't available.
976 exposures = {}
977 for band in bands:
978 for order in range(min_order, max_order + 1):
979 exp = exp0.Factory(bbox=bbox)
980 exp.image.array[:, :] = np.nan
981 exposures[(band, order)] = exp
982
983 # Loop over all pixels, avoiding the gutter.
984 for pixel_counter, pixel in enumerate(pixels[:-1]):
985 self.log.debug("Working on high resolution pixel %d", pixel)
986 for band in bands:
987 # Read all the exposures here for the highest order.
988 # There will always be at least one band with a HiPS image available
989 # at the highest order. However, for color images it is possible that
990 # not all bands have coverage so we require this check.
991 if (pixel, band) in hips_exposure_handle_dict:
992 exposures[(band, max_order)] = hips_exposure_handle_dict[(pixel, band)].get()
993
994 # Go up the HiPS tree.
995 # We only write pixels and rebin to fill the parent pixel when we are
996 # done with a current pixel, which is determined if the next pixel
997 # has a different pixel number.
998 for order in range(max_order, min_order - 1, -1):
999 if pixels_shifted[order][pixel_counter + 1] == pixels_shifted[order][pixel_counter]:
1000 # This order is not done, and so none of the other orders will be.
1001 break
1002
1003 # We can now write out the images for each band.
1004 # Note this will always trigger at the max order where each pixel is unique.
1005 if not do_color:
1006 for band in bands:
1007 self._write_hips_image(
1008 hips_base_path.join(f"band_{band}", forceDirectory=True),
1009 order,
1010 pixels_shifted[order][pixel_counter],
1011 exposures[(band, order)].image,
1012 png_grayscale_mapping,
1013 shift_order=shift_order,
1014 )
1015 else:
1016 # Make a color png.
1017 self._write_hips_color_png(
1018 hips_base_path.join(f"color_{colorstr}", forceDirectory=True),
1019 order,
1020 pixels_shifted[order][pixel_counter],
1021 exposures[(self.config.red_channel_band, order)].image,
1022 exposures[(self.config.green_channel_band, order)].image,
1023 exposures[(self.config.blue_channel_band, order)].image,
1024 png_color_mapping,
1025 )
1026
1027 log_level = self.log.INFO if order == (max_order - 3) else self.log.DEBUG
1028 self.log.log(
1029 log_level,
1030 "Completed HiPS generation for %s, order %d, pixel %d (%d/%d)",
1031 ",".join(bands),
1032 order,
1033 pixels_shifted[order][pixel_counter],
1034 pixel_counter,
1035 len(pixels) - 1,
1036 )
1037
1038 # When we are at the top of the tree, erase top level images and continue.
1039 if order == min_order:
1040 for band in bands:
1041 exposures[(band, order)].image.array[:, :] = np.nan
1042 continue
1043
1044 # Now average the images for each band.
1045 for band in bands:
1046 arr = exposures[(band, order)].image.array.reshape(npix//2, 2, npix//2, 2)
1047 with warnings.catch_warnings():
1048 warnings.simplefilter("ignore")
1049 binned_image_arr = np.nanmean(arr, axis=(1, 3))
1050
1051 # Fill the next level up. We figure out which of the four
1052 # sub-pixels the current pixel occupies.
1053 sub_index = (pixels_shifted[order][pixel_counter]
1054 - np.left_shift(pixels_shifted[order - 1][pixel_counter], 2))
1055
1056 # Fill exposure at the next level up.
1057 exp = exposures[(band, order - 1)]
1058
1059 # Fill the correct subregion.
1060 if sub_index == 0:
1061 exp.image.array[npix//2:, 0: npix//2] = binned_image_arr
1062 elif sub_index == 1:
1063 exp.image.array[0: npix//2, 0: npix//2] = binned_image_arr
1064 elif sub_index == 2:
1065 exp.image.array[npix//2:, npix//2:] = binned_image_arr
1066 elif sub_index == 3:
1067 exp.image.array[0: npix//2, npix//2:] = binned_image_arr
1068 else:
1069 # This should be impossible.
1070 raise ValueError("Illegal pixel sub index")
1071
1072 # Erase the previous exposure.
1073 if order < max_order:
1074 exposures[(band, order)].image.array[:, :] = np.nan
1075
1076 # Write the properties files and MOCs.
1077 if not do_color:
1078 for band in bands:
1079 band_pixels = np.array([pixel
1080 for pixel, band_ in hips_exposure_handle_dict.keys()
1081 if band_ == band])
1082 band_pixels = np.sort(band_pixels)
1083
1084 self._write_properties_and_moc(
1085 hips_base_path.join(f"band_{band}", forceDirectory=True),
1086 max_order,
1087 band_pixels,
1088 exp0,
1089 shift_order,
1090 band,
1091 False,
1092 )
1093 self._write_allsky_file(
1094 hips_base_path.join(f"band_{band}", forceDirectory=True),
1095 min_order,
1096 )
1097 else:
1098 self._write_properties_and_moc(
1099 hips_base_path.join(f"color_{colorstr}", forceDirectory=True),
1100 max_order,
1101 pixels[:-1],
1102 exp0,
1103 shift_order,
1104 colorstr,
1105 True,
1106 )
1107 self._write_allsky_file(
1108 hips_base_path.join(f"color_{colorstr}", forceDirectory=True),
1109 min_order,
1110 )
1111
1112 def _write_hips_image(self, hips_base_path, order, pixel, image, png_mapping, shift_order=9):
1113 """Write a HiPS image.
1114
1115 Parameters
1116 ----------
1117 hips_base_path : `lsst.resources.ResourcePath`
1118 Resource path to the base of the HiPS directory tree.
1119 order : `int`
1120 HEALPix order of the HiPS image to write.
1121 pixel : `int`
1122 HEALPix pixel of the HiPS image.
1124 Image to write.
1125 png_mapping : `astropy.visualization.lupton_rgb.AsinhMapping`
1126 Mapping to convert image to scaled png.
1127 shift_order : `int`, optional
1128 HPX shift_order.
1129 """
1130 # WARNING: In general PipelineTasks are not allowed to do any outputs
1131 # outside of the butler. This task has been given (temporary)
1132 # Special Dispensation because of the nature of HiPS outputs until
1133 # a more controlled solution can be found.
1134
1135 dir_number = self._get_dir_number(pixel)
1136 hips_dir = hips_base_path.join(
1137 f"Norder{order}",
1138 forceDirectory=True
1139 ).join(
1140 f"Dir{dir_number}",
1141 forceDirectory=True
1142 )
1143
1144 wcs = makeHpxWcs(order, pixel, shift_order=shift_order)
1145
1146 uri = hips_dir.join(f"Npix{pixel}.fits")
1147
1148 with ResourcePath.temporary_uri(suffix=uri.getExtension()) as temporary_uri:
1149 image.writeFits(temporary_uri.ospath, metadata=wcs.getFitsMetadata())
1150
1151 uri.transfer_from(temporary_uri, transfer="copy", overwrite=True)
1152
1153 # And make a grayscale png as well
1154
1155 vals = 255 - png_mapping.map_intensity_to_uint8(image.array).astype(np.uint8)
1156 vals[~np.isfinite(image.array) | (image.array < 0)] = 0
1157 im = Image.fromarray(vals[::-1, :], "L")
1158
1159 uri = hips_dir.join(f"Npix{pixel}.png")
1160
1161 with ResourcePath.temporary_uri(suffix=uri.getExtension()) as temporary_uri:
1162 im.save(temporary_uri.ospath)
1163
1164 uri.transfer_from(temporary_uri, transfer="copy", overwrite=True)
1165
1166 def _write_hips_color_png(
1167 self,
1168 hips_base_path,
1169 order,
1170 pixel,
1171 image_red,
1172 image_green,
1173 image_blue,
1174 png_mapping,
1175 ):
1176 """Write a color png HiPS image.
1177
1178 Parameters
1179 ----------
1180 hips_base_path : `lsst.resources.ResourcePath`
1181 Resource path to the base of the HiPS directory tree.
1182 order : `int`
1183 HEALPix order of the HiPS image to write.
1184 pixel : `int`
1185 HEALPix pixel of the HiPS image.
1187 Input for red channel of output png.
1188 image_green : `lsst.afw.image.Image`
1189 Input for green channel of output png.
1191 Input for blue channel of output png.
1192 png_mapping : `astropy.visualization.lupton_rgb.AsinhMapping`
1193 Mapping to convert image to scaled png.
1194 """
1195 # WARNING: In general PipelineTasks are not allowed to do any outputs
1196 # outside of the butler. This task has been given (temporary)
1197 # Special Dispensation because of the nature of HiPS outputs until
1198 # a more controlled solution can be found.
1199
1200 dir_number = self._get_dir_number(pixel)
1201 hips_dir = hips_base_path.join(
1202 f"Norder{order}",
1203 forceDirectory=True
1204 ).join(
1205 f"Dir{dir_number}",
1206 forceDirectory=True
1207 )
1208
1209 # We need to convert nans to the minimum values in the mapping.
1210 arr_red = image_red.array.copy()
1211 arr_red[np.isnan(arr_red)] = png_mapping.minimum[0]
1212 arr_green = image_green.array.copy()
1213 arr_green[np.isnan(arr_green)] = png_mapping.minimum[1]
1214 arr_blue = image_blue.array.copy()
1215 arr_blue[np.isnan(arr_blue)] = png_mapping.minimum[2]
1216
1217 image_array = png_mapping.make_rgb_image(arr_red, arr_green, arr_blue)
1218
1219 im = Image.fromarray(image_array[::-1, :, :], mode="RGB")
1220
1221 uri = hips_dir.join(f"Npix{pixel}.png")
1222
1223 with ResourcePath.temporary_uri(suffix=uri.getExtension()) as temporary_uri:
1224 im.save(temporary_uri.ospath)
1225
1226 uri.transfer_from(temporary_uri, transfer="copy", overwrite=True)
1227
1228 def _write_properties_and_moc(
1229 self,
1230 hips_base_path,
1231 max_order,
1232 pixels,
1233 exposure,
1234 shift_order,
1235 band,
1236 multiband
1237 ):
1238 """Write HiPS properties file and MOC.
1239
1240 Parameters
1241 ----------
1242 hips_base_path : : `lsst.resources.ResourcePath`
1243 Resource path to the base of the HiPS directory tree.
1244 max_order : `int`
1245 Maximum HEALPix order.
1246 pixels : `np.ndarray` (N,)
1247 Array of pixels used.
1249 Sample HPX exposure used for generating HiPS tiles.
1250 shift_order : `int`
1251 HPX shift order.
1252 band : `str`
1253 Band (or color).
1254 multiband : `bool`
1255 Is band multiband / color?
1256 """
1257 area = hpg.nside_to_pixel_area(2**max_order, degrees=True)*len(pixels)
1258
1259 initial_ra = self.config.properties.initial_ra
1260 initial_dec = self.config.properties.initial_dec
1261 initial_fov = self.config.properties.initial_fov
1262
1263 if initial_ra is None or initial_dec is None or initial_fov is None:
1264 # We want to point to an arbitrary pixel in the footprint.
1265 # Just take the median pixel value for simplicity.
1266 temp_pixels = pixels.copy()
1267 if temp_pixels.size % 2 == 0:
1268 temp_pixels = np.append(temp_pixels, [temp_pixels[0]])
1269 medpix = int(np.median(temp_pixels))
1270 _initial_ra, _initial_dec = hpg.pixel_to_angle(2**max_order, medpix)
1271 _initial_fov = hpg.nside_to_resolution(2**max_order, units='arcminutes')/60.
1272
1273 if initial_ra is None or initial_dec is None:
1274 initial_ra = _initial_ra
1275 initial_dec = _initial_dec
1276 if initial_fov is None:
1277 initial_fov = _initial_fov
1278
1279 self._write_hips_properties_file(
1280 hips_base_path,
1281 self.config.properties,
1282 band,
1283 multiband,
1284 exposure,
1285 max_order,
1286 shift_order,
1287 area,
1288 initial_ra,
1289 initial_dec,
1290 initial_fov,
1291 )
1292
1293 # Write the MOC coverage
1294 self._write_hips_moc_file(
1295 hips_base_path,
1296 max_order,
1297 pixels,
1298 )
1299
1300 def _write_hips_properties_file(
1301 self,
1302 hips_base_path,
1303 properties_config,
1304 band,
1305 multiband,
1306 exposure,
1307 max_order,
1308 shift_order,
1309 area,
1310 initial_ra,
1311 initial_dec,
1312 initial_fov
1313 ):
1314 """Write HiPS properties file.
1315
1316 Parameters
1317 ----------
1318 hips_base_path : `lsst.resources.ResourcePath`
1319 ResourcePath at top of HiPS tree. File will be written
1320 to this path as ``properties``.
1321 properties_config : `lsst.pipe.tasks.hips.HipsPropertiesConfig`
1322 Configuration for properties values.
1323 band : `str`
1324 Name of band(s) for HiPS tree.
1325 multiband : `bool`
1326 Is multiband / color?
1327 exposure : `lsst.afw.image.Exposure`
1328 Sample HPX exposure used for generating HiPS tiles.
1329 max_order : `int`
1330 Maximum HEALPix order.
1331 shift_order : `int`
1332 HPX shift order.
1333 area : `float`
1334 Coverage area in square degrees.
1335 initial_ra : `float`
1336 Initial HiPS RA position (degrees).
1337 initial_dec : `float`
1338 Initial HiPS Dec position (degrees).
1339 initial_fov : `float`
1340 Initial HiPS display size (degrees).
1341 """
1342 # WARNING: In general PipelineTasks are not allowed to do any outputs
1343 # outside of the butler. This task has been given (temporary)
1344 # Special Dispensation because of the nature of HiPS outputs until
1345 # a more controlled solution can be found.
1346 def _write_property(fh, name, value):
1347 """Write a property name/value to a file handle.
1348
1349 Parameters
1350 ----------
1351 fh : file handle (blah)
1352 Open for writing.
1353 name : `str`
1354 Name of property
1355 value : `str`
1356 Value of property
1357 """
1358 # This ensures that the name has no spaces or space-like characters,
1359 # per the HiPS standard.
1360 if re.search(r"\s", name):
1361 raise ValueError(f"``{name}`` cannot contain any space characters.")
1362 if "=" in name:
1363 raise ValueError(f"``{name}`` cannot contain an ``=``")
1364
1365 fh.write(f"{name:25}= {value}\n")
1366
1367 if exposure.image.array.dtype == np.dtype("float32"):
1368 bitpix = -32
1369 elif exposure.image.array.dtype == np.dtype("float64"):
1370 bitpix = -64
1371 elif exposure.image.array.dtype == np.dtype("int32"):
1372 bitpix = 32
1373
1374 date_iso8601 = datetime.utcnow().isoformat(timespec="seconds") + "Z"
1375 pixel_scale = hpg.nside_to_resolution(2**(max_order + shift_order), units='degrees')
1376
1377 uri = hips_base_path.join("properties")
1378 with ResourcePath.temporary_uri(suffix=uri.getExtension()) as temporary_uri:
1379 with open(temporary_uri.ospath, "w") as fh:
1380 _write_property(
1381 fh,
1382 "creator_did",
1383 properties_config.creator_did_template.format(band=band),
1384 )
1385 if properties_config.obs_collection is not None:
1386 _write_property(fh, "obs_collection", properties_config.obs_collection)
1387 _write_property(
1388 fh,
1389 "obs_title",
1390 properties_config.obs_title_template.format(band=band),
1391 )
1392 if properties_config.obs_description_template is not None:
1393 _write_property(
1394 fh,
1395 "obs_description",
1396 properties_config.obs_description_template.format(band=band),
1397 )
1398 if len(properties_config.prov_progenitor) > 0:
1399 for prov_progenitor in properties_config.prov_progenitor:
1400 _write_property(fh, "prov_progenitor", prov_progenitor)
1401 if properties_config.obs_ack is not None:
1402 _write_property(fh, "obs_ack", properties_config.obs_ack)
1403 _write_property(fh, "obs_regime", "Optical")
1404 _write_property(fh, "data_pixel_bitpix", str(bitpix))
1405 _write_property(fh, "dataproduct_type", "image")
1406 _write_property(fh, "moc_sky_fraction", str(area/41253.))
1407 _write_property(fh, "data_ucd", "phot.flux")
1408 _write_property(fh, "hips_creation_date", date_iso8601)
1409 _write_property(fh, "hips_builder", "lsst.pipe.tasks.hips.GenerateHipsTask")
1410 _write_property(fh, "hips_creator", "Vera C. Rubin Observatory")
1411 _write_property(fh, "hips_version", "1.4")
1412 _write_property(fh, "hips_release_date", date_iso8601)
1413 _write_property(fh, "hips_frame", "equatorial")
1414 _write_property(fh, "hips_order", str(max_order))
1415 _write_property(fh, "hips_tile_width", str(exposure.getBBox().getWidth()))
1416 _write_property(fh, "hips_status", "private master clonableOnce")
1417 if multiband:
1418 _write_property(fh, "hips_tile_format", "png")
1419 _write_property(fh, "dataproduct_subtype", "color")
1420 else:
1421 _write_property(fh, "hips_tile_format", "png fits")
1422 _write_property(fh, "hips_pixel_bitpix", str(bitpix))
1423 _write_property(fh, "hips_pixel_scale", str(pixel_scale))
1424 _write_property(fh, "hips_initial_ra", str(initial_ra))
1425 _write_property(fh, "hips_initial_dec", str(initial_dec))
1426 _write_property(fh, "hips_initial_fov", str(initial_fov))
1427 if multiband:
1428 if self.config.blue_channel_band in properties_config.spectral_ranges:
1429 em_min = properties_config.spectral_ranges[
1430 self.config.blue_channel_band
1431 ].lambda_min/1e9
1432 else:
1433 self.log.warning("blue band %s not in self.config.spectral_ranges.", band)
1434 em_min = 3e-7
1435 if self.config.red_channel_band in properties_config.spectral_ranges:
1436 em_max = properties_config.spectral_ranges[
1437 self.config.red_channel_band
1438 ].lambda_max/1e9
1439 else:
1440 self.log.warning("red band %s not in self.config.spectral_ranges.", band)
1441 em_max = 1e-6
1442 else:
1443 if band in properties_config.spectral_ranges:
1444 em_min = properties_config.spectral_ranges[band].lambda_min/1e9
1445 em_max = properties_config.spectral_ranges[band].lambda_max/1e9
1446 else:
1447 self.log.warning("band %s not in self.config.spectral_ranges.", band)
1448 em_min = 3e-7
1449 em_max = 1e-6
1450 _write_property(fh, "em_min", str(em_min))
1451 _write_property(fh, "em_max", str(em_max))
1452 if properties_config.t_min is not None:
1453 _write_property(fh, "t_min", properties_config.t_min)
1454 if properties_config.t_max is not None:
1455 _write_property(fh, "t_max", properties_config.t_max)
1456
1457 uri.transfer_from(temporary_uri, transfer="copy", overwrite=True)
1458
1459 def _write_hips_moc_file(self, hips_base_path, max_order, pixels, min_uniq_order=1):
1460 """Write HiPS MOC file.
1461
1462 Parameters
1463 ----------
1464 hips_base_path : `lsst.resources.ResourcePath`
1465 ResourcePath to top of HiPS tree. File will be written as
1466 to this path as ``Moc.fits``.
1467 max_order : `int`
1468 Maximum HEALPix order.
1469 pixels : `np.ndarray`
1470 Array of pixels covered.
1471 min_uniq_order : `int`, optional
1472 Minimum HEALPix order for looking for fully covered pixels.
1473 """
1474 # WARNING: In general PipelineTasks are not allowed to do any outputs
1475 # outside of the butler. This task has been given (temporary)
1476 # Special Dispensation because of the nature of HiPS outputs until
1477 # a more controlled solution can be found.
1478
1479 # Make the initial list of UNIQ pixels
1480 uniq = 4*(4**max_order) + pixels
1481
1482 # Make a healsparse map which provides easy degrade/comparisons.
1483 hspmap = hsp.HealSparseMap.make_empty(2**min_uniq_order, 2**max_order, dtype=np.float32)
1484 hspmap[pixels] = 1.0
1485
1486 # Loop over orders, degrade each time, and look for pixels with full coverage.
1487 for uniq_order in range(max_order - 1, min_uniq_order - 1, -1):
1488 hspmap = hspmap.degrade(2**uniq_order, reduction="sum")
1489 pix_shift = np.right_shift(pixels, 2*(max_order - uniq_order))
1490 # Check if any of the pixels at uniq_order have full coverage.
1491 covered, = np.isclose(hspmap[pix_shift], 4**(max_order - uniq_order)).nonzero()
1492 if covered.size == 0:
1493 # No pixels at uniq_order are fully covered, we're done.
1494 break
1495 # Replace the UNIQ pixels that are fully covered.
1496 uniq[covered] = 4*(4**uniq_order) + pix_shift[covered]
1497
1498 # Remove duplicate pixels.
1499 uniq = np.unique(uniq)
1500
1501 # Output to fits.
1502 tbl = np.zeros(uniq.size, dtype=[("UNIQ", "i8")])
1503 tbl["UNIQ"] = uniq
1504
1505 order = np.log2(tbl["UNIQ"]//4).astype(np.int32)//2
1506 moc_order = np.max(order)
1507
1508 hdu = fits.BinTableHDU(tbl)
1509 hdu.header["PIXTYPE"] = "HEALPIX"
1510 hdu.header["ORDERING"] = "NUNIQ"
1511 hdu.header["COORDSYS"] = "C"
1512 hdu.header["MOCORDER"] = moc_order
1513 hdu.header["MOCTOOL"] = "lsst.pipe.tasks.hips.GenerateHipsTask"
1514
1515 uri = hips_base_path.join("Moc.fits")
1516
1517 with ResourcePath.temporary_uri(suffix=uri.getExtension()) as temporary_uri:
1518 hdu.writeto(temporary_uri.ospath)
1519
1520 uri.transfer_from(temporary_uri, transfer="copy", overwrite=True)
1521
1522 def _write_allsky_file(self, hips_base_path, allsky_order):
1523 """Write an Allsky.png file.
1524
1525 Parameters
1526 ----------
1527 hips_base_path : `lsst.resources.ResourcePath`
1528 Resource path to the base of the HiPS directory tree.
1529 allsky_order : `int`
1530 HEALPix order of the minimum order to make allsky file.
1531 """
1532 tile_size = self.config.allsky_tilesize
1533 n_tiles_per_side = int(np.sqrt(hpg.nside_to_npixel(hpg.order_to_nside(allsky_order))))
1534
1535 allsky_image = None
1536
1537 allsky_order_uri = hips_base_path.join(f"Norder{allsky_order}", forceDirectory=True)
1538 pixel_regex = re.compile(r"Npix([0-9]+)\.png$")
1539 png_uris = list(
1540 ResourcePath.findFileResources(
1541 candidates=[allsky_order_uri],
1542 file_filter=pixel_regex,
1543 )
1544 )
1545
1546 for png_uri in png_uris:
1547 matches = re.match(pixel_regex, png_uri.basename())
1548 pix_num = int(matches.group(1))
1549 tile_image = Image.open(io.BytesIO(png_uri.read()))
1550 row = math.floor(pix_num//n_tiles_per_side)
1551 column = pix_num % n_tiles_per_side
1552 box = (column*tile_size, row*tile_size, (column + 1)*tile_size, (row + 1)*tile_size)
1553 tile_image_shrunk = tile_image.resize((tile_size, tile_size))
1554
1555 if allsky_image is None:
1556 allsky_image = Image.new(
1557 tile_image.mode,
1558 (n_tiles_per_side*tile_size, n_tiles_per_side*tile_size),
1559 )
1560 allsky_image.paste(tile_image_shrunk, box)
1561
1562 uri = allsky_order_uri.join("Allsky.png")
1563
1564 with ResourcePath.temporary_uri(suffix=uri.getExtension()) as temporary_uri:
1565 allsky_image.save(temporary_uri.ospath)
1566
1567 uri.transfer_from(temporary_uri, transfer="copy", overwrite=True)
1568
1569 def _get_dir_number(self, pixel):
1570 """Compute the directory number from a pixel.
1571
1572 Parameters
1573 ----------
1574 pixel : `int`
1575 HEALPix pixel number.
1576
1577 Returns
1578 -------
1579 dir_number : `int`
1580 HiPS directory number.
1581 """
1582 return (pixel//10000)*10000
1583
1584
1585class GenerateColorHipsConnections(pipeBase.PipelineTaskConnections,
1586 dimensions=("instrument", ),
1587 defaultTemplates={"coaddName": "deep"}):
1588 hips_exposure_handles = pipeBase.connectionTypes.Input(
1589 doc="HiPS-compatible HPX images.",
1590 name="{coaddName}Coadd_hpx",
1591 storageClass="ExposureF",
1592 dimensions=("healpix11", "band"),
1593 multiple=True,
1594 deferLoad=True,
1595 )
1596
1597
1598class GenerateColorHipsConfig(GenerateHipsConfig,
1599 pipelineConnections=GenerateColorHipsConnections):
1600 """Configuration parameters for GenerateColorHipsTask."""
1601 blue_channel_band = pexConfig.Field(
1602 doc="Band to use for blue channel of color pngs.",
1603 dtype=str,
1604 default="g",
1605 )
1606 green_channel_band = pexConfig.Field(
1607 doc="Band to use for green channel of color pngs.",
1608 dtype=str,
1609 default="r",
1610 )
1611 red_channel_band = pexConfig.Field(
1612 doc="Band to use for red channel of color pngs.",
1613 dtype=str,
1614 default="i",
1615 )
1616 png_color_asinh_minimum = pexConfig.Field(
1617 doc="AsinhMapping intensity to be mapped to black for color png scaling.",
1618 dtype=float,
1619 default=0.0,
1620 )
1621 png_color_asinh_stretch = pexConfig.Field(
1622 doc="AsinhMapping linear stretch for color png scaling.",
1623 dtype=float,
1624 default=5.0,
1625 )
1626 png_color_asinh_softening = pexConfig.Field(
1627 doc="AsinhMapping softening parameter (Q) for color png scaling.",
1628 dtype=float,
1629 default=8.0,
1630 )
1631
1632
1633class GenerateColorHipsTask(GenerateHipsTask):
1634 """Task for making a HiPS tree with color pngs."""
1635 ConfigClass = GenerateColorHipsConfig
1636 _DefaultName = "generateColorHips"
1637 color_task = True
1638
1639 def _check_data_bands(self, data_bands):
1640 """Check the data for configured bands.
1641
1642 Warn if any color bands are missing data.
1643
1644 Parameters
1645 ----------
1646 data_bands : `set` [`str`]
1647 Bands from the input data.
1648
1649 Returns
1650 -------
1651 bands : `list` [`str`]
1652 List of bands in bgr color order.
1653 """
1654 if len(data_bands) == 0:
1655 raise RuntimeError("GenerateColorHipsTask must have data from at least one band.")
1656
1657 if self.config.blue_channel_band not in data_bands:
1658 self.log.warning(
1659 "Color png blue_channel_band %s not in dataset.",
1660 self.config.blue_channel_band
1661 )
1662 if self.config.green_channel_band not in data_bands:
1663 self.log.warning(
1664 "Color png green_channel_band %s not in dataset.",
1665 self.config.green_channel_band
1666 )
1667 if self.config.red_channel_band not in data_bands:
1668 self.log.warning(
1669 "Color png red_channel_band %s not in dataset.",
1670 self.config.red_channel_band
1671 )
1672
1673 bands = [
1674 self.config.blue_channel_band,
1675 self.config.green_channel_band,
1676 self.config.red_channel_band,
1677 ]
1678
1679 return bands
A class to contain the data, WCS, and other information needed to describe an image of the sky.
Definition Exposure.h:72
A class to represent a 2-dimensional array of pixels.
Definition Image.h:51
Pass parameters to a Statistics object.
Definition Statistics.h:83
An integer coordinate rectangle.
Definition Box.h:55
A RangeSet is a set of unsigned 64 bit integers.
Definition RangeSet.h:99
daf::base::PropertyList * list
Definition fits.cc:928
daf::base::PropertySet * set
Definition fits.cc:927
std::shared_ptr< lsst::afw::image::Image< PixelT > > statisticsStack(std::vector< std::shared_ptr< lsst::afw::image::Image< PixelT > > > &images, Property flags, StatisticsControl const &sctrl=StatisticsControl(), std::vector< lsst::afw::image::VariancePixel > const &wvector=std::vector< lsst::afw::image::VariancePixel >(0))
A function to compute some statistics of a stack of Images.
Property stringToStatisticsProperty(std::string const property)
Conversion function to switch a string to a Property (see Statistics.h)