LSST Applications g0f08755f38+82efc23009,g12f32b3c4e+e7bdf1200e,g1653933729+a8ce1bb630,g1a0ca8cf93+50eff2b06f,g28da252d5a+52db39f6a5,g2bbee38e9b+37c5a29d61,g2bc492864f+37c5a29d61,g2cdde0e794+c05ff076ad,g3156d2b45e+41e33cbcdc,g347aa1857d+37c5a29d61,g35bb328faa+a8ce1bb630,g3a166c0a6a+37c5a29d61,g3e281a1b8c+fb992f5633,g414038480c+7f03dfc1b0,g41af890bb2+11b950c980,g5fbc88fb19+17cd334064,g6b1c1869cb+12dd639c9a,g781aacb6e4+a8ce1bb630,g80478fca09+72e9651da0,g82479be7b0+04c31367b4,g858d7b2824+82efc23009,g9125e01d80+a8ce1bb630,g9726552aa6+8047e3811d,ga5288a1d22+e532dc0a0b,gae0086650b+a8ce1bb630,gb58c049af0+d64f4d3760,gc28159a63d+37c5a29d61,gcf0d15dbbd+2acd6d4d48,gd7358e8bfb+778a810b6e,gda3e153d99+82efc23009,gda6a2b7d83+2acd6d4d48,gdaeeff99f8+1711a396fd,ge2409df99d+6b12de1076,ge79ae78c31+37c5a29d61,gf0baf85859+d0a5978c5a,gf3967379c6+4954f8c433,gfb92a5be7c+82efc23009,gfec2e1e490+2aaed99252,w.2024.46
LSST Data Management Base Package
Loading...
Searching...
No Matches
isolatedStarAssociation.py
Go to the documentation of this file.
1# This file is part of pipe_tasks.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (https://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <https://www.gnu.org/licenses/>.
21
22__all__ = ['IsolatedStarAssociationConnections',
23 'IsolatedStarAssociationConfig',
24 'IsolatedStarAssociationTask']
25
26import esutil
27import hpgeom as hpg
28import numpy as np
29import pandas as pd
30from smatch.matcher import Matcher
31
32import lsst.pex.config as pexConfig
33import lsst.pipe.base as pipeBase
34from lsst.skymap import BaseSkyMap
35from lsst.meas.algorithms.sourceSelector import sourceSelectorRegistry
36
37from .healSparseMapping import _is_power_of_two
38
39
40class IsolatedStarAssociationConnections(pipeBase.PipelineTaskConnections,
41 dimensions=('instrument', 'tract', 'skymap',),
42 defaultTemplates={}):
43 source_table_visit = pipeBase.connectionTypes.Input(
44 doc='Source table in parquet format, per visit',
45 name='preSourceTable_visit',
46 storageClass='DataFrame',
47 dimensions=('instrument', 'visit'),
48 deferLoad=True,
49 multiple=True,
50 )
51 skymap = pipeBase.connectionTypes.Input(
52 doc="Input definition of geometry/bbox and projection/wcs for warped exposures",
53 name=BaseSkyMap.SKYMAP_DATASET_TYPE_NAME,
54 storageClass='SkyMap',
55 dimensions=('skymap',),
56 )
57 isolated_star_sources = pipeBase.connectionTypes.Output(
58 doc='Catalog of individual sources for the isolated stars',
59 name='isolated_star_presources',
60 storageClass='DataFrame',
61 dimensions=('instrument', 'tract', 'skymap'),
62 )
63 isolated_star_cat = pipeBase.connectionTypes.Output(
64 doc='Catalog of isolated star positions',
65 name='isolated_star_presource_associations',
66 storageClass='DataFrame',
67 dimensions=('instrument', 'tract', 'skymap'),
68 )
69
70
71class IsolatedStarAssociationConfig(pipeBase.PipelineTaskConfig,
72 pipelineConnections=IsolatedStarAssociationConnections):
73 """Configuration for IsolatedStarAssociationTask."""
74
75 inst_flux_field = pexConfig.Field(
76 doc=('Full name of instFlux field to use for s/n selection and persistence. '
77 'The associated flag will be implicity included in bad_flags. '
78 'Note that this is expected to end in ``instFlux``.'),
79 dtype=str,
80 default='normCompTophatFlux_instFlux',
81 )
82 match_radius = pexConfig.Field(
83 doc='Match radius (arcseconds)',
84 dtype=float,
85 default=1.0,
86 )
87 nside_split = pexConfig.Field(
88 doc="HealPix nside to use for splitting tract for reduced memory. Must be power of 2.",
89 dtype=int,
90 default=32,
91 check=_is_power_of_two,
92 )
93 isolation_radius = pexConfig.Field(
94 doc=('Isolation radius (arcseconds). Any stars with average centroids '
95 'within this radius of another star will be rejected from the final '
96 'catalog. This radius should be at least 2x match_radius.'),
97 dtype=float,
98 default=2.0,
99 )
100 band_order = pexConfig.ListField(
101 doc=(('Ordered list of bands to use for matching/storage. '
102 'Any bands not listed will not be matched.')),
103 dtype=str,
104 default=['i', 'z', 'r', 'g', 'y', 'u'],
105 )
106 id_column = pexConfig.Field(
107 doc='Name of column with source id.',
108 dtype=str,
109 default='sourceId',
110 )
111 ra_column = pexConfig.Field(
112 doc='Name of column with right ascension.',
113 dtype=str,
114 default='ra',
115 )
116 dec_column = pexConfig.Field(
117 doc='Name of column with declination.',
118 dtype=str,
119 default='dec',
120 )
121 physical_filter_column = pexConfig.Field(
122 doc='Name of column with physical filter name',
123 dtype=str,
124 default='physical_filter',
125 )
126 band_column = pexConfig.Field(
127 doc='Name of column with band name',
128 dtype=str,
129 default='band',
130 )
131 extra_columns = pexConfig.ListField(
132 doc='Extra names of columns to read and persist (beyond instFlux and error).',
133 dtype=str,
134 default=['x',
135 'y',
136 'xErr',
137 'yErr',
138 'apFlux_12_0_instFlux',
139 'apFlux_12_0_instFluxErr',
140 'apFlux_12_0_flag',
141 'apFlux_17_0_instFlux',
142 'apFlux_17_0_instFluxErr',
143 'apFlux_17_0_flag',
144 'localBackground_instFlux',
145 'localBackground_flag',
146 'ixx',
147 'iyy',
148 'ixy',]
149 )
150 source_selector = sourceSelectorRegistry.makeField(
151 doc='How to select sources. Under normal usage this should not be changed.',
152 default='science'
153 )
154
155 def setDefaults(self):
156 super().setDefaults()
157
158 source_selector = self.source_selector['science']
159 source_selector.setDefaults()
160
161 source_selector.doFlags = True
162 source_selector.doUnresolved = True
163 source_selector.doSignalToNoise = True
164 source_selector.doIsolated = True
165 source_selector.doRequireFiniteRaDec = True
166 source_selector.doRequirePrimary = True
167
168 source_selector.signalToNoise.minimum = 8.0
169 source_selector.signalToNoise.maximum = 1000.0
170
171 flux_flag_name = self.inst_flux_field.replace("instFlux", "flag")
172
173 source_selector.flags.bad = ['pixelFlags_edge',
174 'pixelFlags_interpolatedCenter',
175 'pixelFlags_saturatedCenter',
176 'pixelFlags_crCenter',
177 'pixelFlags_bad',
178 'pixelFlags_interpolated',
179 'pixelFlags_saturated',
180 'centroid_flag',
181 flux_flag_name]
182
183 source_selector.signalToNoise.fluxField = self.inst_flux_field
184 source_selector.signalToNoise.errField = self.inst_flux_field + 'Err'
185
186 source_selector.isolated.parentName = 'parentSourceId'
187 source_selector.isolated.nChildName = 'deblend_nChild'
188
189 source_selector.unresolved.name = 'sizeExtendedness'
190
191 source_selector.requireFiniteRaDec.raColName = self.ra_column
192 source_selector.requireFiniteRaDec.decColName = self.dec_column
193
194
195class IsolatedStarAssociationTask(pipeBase.PipelineTask):
196 """Associate sources into isolated star catalogs.
197 """
198 ConfigClass = IsolatedStarAssociationConfig
199 _DefaultName = 'isolatedStarAssociation'
200
201 def __init__(self, **kwargs):
202 super().__init__(**kwargs)
203
204 self.makeSubtask('source_selector')
205 # Only log warning and fatal errors from the source_selector
206 self.source_selector.log.setLevel(self.source_selector.log.WARN)
207
208 def runQuantum(self, butlerQC, inputRefs, outputRefs):
209 input_ref_dict = butlerQC.get(inputRefs)
210
211 tract = butlerQC.quantum.dataId['tract']
212
213 source_table_refs = input_ref_dict['source_table_visit']
214
215 self.log.info('Running with %d source_table_visit dataRefs',
216 len(source_table_refs))
217
218 source_table_ref_dict_temp = {source_table_ref.dataId['visit']: source_table_ref for
219 source_table_ref in source_table_refs}
220
221 bands = {source_table_ref.dataId['band'] for source_table_ref in source_table_refs}
222 for band in bands:
223 if band not in self.config.band_order:
224 self.log.warning('Input data has data from band %s but that band is not '
225 'configured for matching', band)
226
227 # TODO: Sort by visit until DM-31701 is done and we have deterministic
228 # dataset ordering.
229 source_table_ref_dict = {visit: source_table_ref_dict_temp[visit] for
230 visit in sorted(source_table_ref_dict_temp.keys())}
231
232 struct = self.run(input_ref_dict['skymap'], tract, source_table_ref_dict)
233
234 butlerQC.put(pd.DataFrame(struct.star_source_cat),
235 outputRefs.isolated_star_sources)
236 butlerQC.put(pd.DataFrame(struct.star_cat),
237 outputRefs.isolated_star_cat)
238
239 def run(self, skymap, tract, source_table_ref_dict):
240 """Run the isolated star association task.
241
242 Parameters
243 ----------
244 skymap : `lsst.skymap.SkyMap`
245 Skymap object.
246 tract : `int`
247 Tract number.
248 source_table_ref_dict : `dict`
249 Dictionary of source_table refs. Key is visit, value is dataref.
250
251 Returns
252 -------
253 struct : `lsst.pipe.base.struct`
254 Struct with outputs for persistence.
255 """
256 star_source_cat = self._make_all_star_sources(skymap[tract], source_table_ref_dict)
257
258 primary_bands = self.config.band_order
259
260 # Do primary matching
261 primary_star_cat = self._match_primary_stars(primary_bands, star_source_cat)
262
263 if len(primary_star_cat) == 0:
264 return pipeBase.Struct(star_source_cat=np.zeros(0, star_source_cat.dtype),
265 star_cat=np.zeros(0, primary_star_cat.dtype))
266
267 # Remove neighbors
268 primary_star_cat = self._remove_neighbors(primary_star_cat)
269
270 if len(primary_star_cat) == 0:
271 return pipeBase.Struct(star_source_cat=np.zeros(0, star_source_cat.dtype),
272 star_cat=np.zeros(0, primary_star_cat.dtype))
273
274 # Crop to inner tract region
275 inner_tract_ids = skymap.findTractIdArray(primary_star_cat[self.config.ra_column],
276 primary_star_cat[self.config.dec_column],
277 degrees=True)
278 use = (inner_tract_ids == tract)
279 self.log.info('Total of %d isolated stars in inner tract.', use.sum())
280
281 primary_star_cat = primary_star_cat[use]
282
283 if len(primary_star_cat) == 0:
284 return pipeBase.Struct(star_source_cat=np.zeros(0, star_source_cat.dtype),
285 star_cat=np.zeros(0, primary_star_cat.dtype))
286
287 # Set the unique ids.
288 primary_star_cat['isolated_star_id'] = self._compute_unique_ids(skymap,
289 tract,
290 len(primary_star_cat))
291
292 # Match to sources.
293 star_source_cat, primary_star_cat = self._match_sources(primary_bands,
294 star_source_cat,
295 primary_star_cat)
296
297 return pipeBase.Struct(star_source_cat=star_source_cat,
298 star_cat=primary_star_cat)
299
300 def _make_all_star_sources(self, tract_info, source_table_ref_dict):
301 """Make a catalog of all the star sources.
302
303 Parameters
304 ----------
305 tract_info : `lsst.skymap.TractInfo`
306 Information about the tract.
307 source_table_ref_dict : `dict`
308 Dictionary of source_table refs. Key is visit, value is dataref.
309
310 Returns
311 -------
312 star_source_cat : `np.ndarray`
313 Catalog of star sources.
314 """
315 # Internally, we use a numpy recarray, they are by far the fastest
316 # option in testing for relatively narrow tables.
317 # (have not tested wide tables)
318 all_columns, persist_columns = self._get_source_table_visit_column_names()
319 poly = tract_info.outer_sky_polygon
320
321 tables = []
322 for visit in source_table_ref_dict:
323 source_table_ref = source_table_ref_dict[visit]
324 df = source_table_ref.get(parameters={'columns': all_columns})
325 df.reset_index(inplace=True)
326
327 goodSrc = self.source_selector.selectSources(df)
328
329 table = df[persist_columns][goodSrc.selected].to_records()
330
331 # Append columns that include the row in the source table
332 # and the matched object index (to be filled later).
333 table = np.lib.recfunctions.append_fields(table,
334 ['source_row',
335 'obj_index'],
336 [np.where(goodSrc.selected)[0],
337 np.zeros(goodSrc.selected.sum(), dtype=np.int32)],
338 dtypes=['i4', 'i4'],
339 usemask=False)
340
341 # We cut to the outer tract polygon to ensure consistent matching
342 # from tract to tract.
343 tract_use = poly.contains(np.deg2rad(table[self.config.ra_column]),
344 np.deg2rad(table[self.config.dec_column]))
345
346 tables.append(table[tract_use])
347
348 # Combine tables
349 star_source_cat = np.concatenate(tables)
350
351 return star_source_cat
352
354 """Get the list of sourceTable_visit columns from the config.
355
356 Returns
357 -------
358 all_columns : `list` [`str`]
359 All columns to read
360 persist_columns : `list` [`str`]
361 Columns to persist (excluding selection columns)
362 """
363 columns = [self.config.id_column,
364 'visit', 'detector',
365 self.config.ra_column, self.config.dec_column,
366 self.config.physical_filter_column, self.config.band_column,
367 self.config.inst_flux_field, self.config.inst_flux_field + 'Err']
368 columns.extend(self.config.extra_columns)
369
370 all_columns = columns.copy()
371 if self.source_selector.config.doFlags:
372 all_columns.extend(self.source_selector.config.flags.bad)
373 if self.source_selector.config.doUnresolved:
374 all_columns.append(self.source_selector.config.unresolved.name)
375 if self.source_selector.config.doIsolated:
376 all_columns.append(self.source_selector.config.isolated.parentName)
377 all_columns.append(self.source_selector.config.isolated.nChildName)
378 if self.source_selector.config.doRequirePrimary:
379 all_columns.append(self.source_selector.config.requirePrimary.primaryColName)
380
381 return all_columns, columns
382
383 def _match_primary_stars(self, primary_bands, star_source_cat):
384 """Match primary stars.
385
386 Parameters
387 ----------
388 primary_bands : `list` [`str`]
389 Ordered list of primary bands.
390 star_source_cat : `np.ndarray`
391 Catalog of star sources.
392
393 Returns
394 -------
395 primary_star_cat : `np.ndarray`
396 Catalog of primary star positions
397 """
398 ra_col = self.config.ra_column
399 dec_col = self.config.dec_column
400
401 dtype = self._get_primary_dtype(primary_bands)
402
403 # Figure out nside for computing boundary pixels.
404 # We want to be 10x the match radius to be conservative.
405 nside_split = self.config.nside_split
406 nside_boundary = nside_split
407 res = hpg.nside_to_resolution(nside_boundary)*3600.
408 while res > 10.*self.config.match_radius:
409 nside_boundary *= 2
410 res = hpg.nside_to_resolution(nside_boundary)*3600.
411
412 # Cancel out last jump.
413 nside_boundary //= 2
414
415 # Compute healpix nest bit shift between resolutions.
416 bit_shift = 2*int(np.round(np.log2(nside_boundary / nside_split)))
417
418 primary_star_cat = None
419 for primary_band in primary_bands:
420 use = (star_source_cat['band'] == primary_band)
421
422 ra = star_source_cat[ra_col][use]
423 dec = star_source_cat[dec_col][use]
424
425 hpix_split = np.unique(hpg.angle_to_pixel(nside_split, ra, dec))
426 hpix_highres = hpg.angle_to_pixel(nside_boundary, ra, dec)
427
428 band_cats = []
429 for pix in hpix_split:
430 self.log.info("Matching primary stars in %s band in pixel %d", primary_band, pix)
431 # We take all the high resolution pixels in this pixel, and
432 # add in all the boundary pixels to ensure objects do not get split.
433 pixels_highres = np.left_shift(pix, bit_shift) + np.arange(2**bit_shift)
434 pixels_highres = np.unique(hpg.neighbors(nside_boundary, pixels_highres).ravel())
435 a, b = esutil.numpy_util.match(pixels_highres, hpix_highres)
436
437 _ra = ra[b]
438 _dec = dec[b]
439
440 with Matcher(_ra, _dec) as matcher:
441 idx = matcher.query_groups(self.config.match_radius/3600., min_match=1)
442
443 count = len(idx)
444
445 if count == 0:
446 continue
447
448 band_cat = np.zeros(count, dtype=dtype)
449 band_cat['primary_band'] = primary_band
450
451 # If the tract cross ra=0 (that is, it has both low ra and high ra)
452 # then we need to remap all ra values from [0, 360) to [-180, 180)
453 # before doing any position averaging.
454 remapped = False
455 if _ra.min() < 60.0 and _ra.max() > 300.0:
456 _ra_temp = (_ra + 180.0) % 360. - 180.
457 remapped = True
458 else:
459 _ra_temp = _ra
460
461 # Compute mean position for each primary star
462 for i, row in enumerate(idx):
463 row = np.array(row)
464 band_cat[ra_col][i] = np.mean(_ra_temp[row])
465 band_cat[dec_col][i] = np.mean(_dec[row])
466
467 if remapped:
468 # Remap ra back to [0, 360)
469 band_cat[ra_col] %= 360.0
470
471 # And cut down to objects that are not in the boundary region
472 # after association.
473 inpix = (hpg.angle_to_pixel(nside_split, band_cat[ra_col], band_cat[dec_col]) == pix)
474 if inpix.sum() > 0:
475 band_cats.append(band_cat[inpix])
476
477 if len(band_cats) == 0:
478 self.log.info('Found 0 primary stars in %s band.', primary_band)
479 continue
480
481 band_cat = np.concatenate(band_cats)
482
483 # Match to previous band catalog(s), and remove duplicates.
484 if primary_star_cat is None or len(primary_star_cat) == 0:
485 primary_star_cat = band_cat
486 else:
487 with Matcher(band_cat[ra_col], band_cat[dec_col]) as matcher:
488 idx = matcher.query_radius(primary_star_cat[ra_col],
489 primary_star_cat[dec_col],
490 self.config.match_radius/3600.)
491 # Any object with a match should be removed.
492 match_indices = np.array([i for i in range(len(idx)) if len(idx[i]) > 0])
493 if len(match_indices) > 0:
494 band_cat = np.delete(band_cat, match_indices)
495
496 primary_star_cat = np.append(primary_star_cat, band_cat)
497 self.log.info('Found %d primary stars in %s band.', len(band_cat), primary_band)
498
499 # If everything was cut, we still want the correct datatype.
500 if primary_star_cat is None:
501 primary_star_cat = np.zeros(0, dtype=dtype)
502
503 return primary_star_cat
504
505 def _remove_neighbors(self, primary_star_cat):
506 """Remove neighbors from the primary star catalog.
507
508 Parameters
509 ----------
510 primary_star_cat : `np.ndarray`
511 Primary star catalog.
512
513 Returns
514 -------
515 primary_star_cat_cut : `np.ndarray`
516 Primary star cat with neighbors removed.
517 """
518 ra_col = self.config.ra_column
519 dec_col = self.config.dec_column
520
521 with Matcher(primary_star_cat[ra_col], primary_star_cat[dec_col]) as matcher:
522 # By setting min_match=2 objects that only match to themselves
523 # will not be recorded.
524 try:
525 # New smatch API
526 idx = matcher.query_groups(self.config.isolation_radius/3600., min_match=2)
527 except AttributeError:
528 # Old smatch API
529 idx = matcher.query_self(self.config.isolation_radius/3600., min_match=2)
530
531 try:
532 neighbor_indices = np.concatenate(idx)
533 except ValueError:
534 neighbor_indices = np.zeros(0, dtype=int)
535
536 if len(neighbor_indices) > 0:
537 neighbored = np.unique(neighbor_indices)
538 self.log.info('Cutting %d objects with close neighbors.', len(neighbored))
539 primary_star_cat = np.delete(primary_star_cat, neighbored)
540
541 return primary_star_cat
542
543 def _match_sources(self, bands, star_source_cat, primary_star_cat):
544 """Match individual sources to primary stars.
545
546 Parameters
547 ----------
548 bands : `list` [`str`]
549 List of bands.
550 star_source_cat : `np.ndarray`
551 Array of star sources.
552 primary_star_cat : `np.ndarray`
553 Array of primary stars.
554
555 Returns
556 -------
557 star_source_cat_sorted : `np.ndarray`
558 Sorted and cropped array of star sources.
559 primary_star_cat : `np.ndarray`
560 Catalog of isolated stars, with indexes to star_source_cat_cut.
561 """
562 ra_col = self.config.ra_column
563 dec_col = self.config.dec_column
564
565 # We match sources per-band because it allows us to have sorted
566 # sources for easy retrieval of per-band matches.
567 n_source_per_band_per_obj = np.zeros((len(bands),
568 len(primary_star_cat)),
569 dtype=np.int32)
570 band_uses = []
571 idxs = []
572 with Matcher(primary_star_cat[ra_col], primary_star_cat[dec_col]) as matcher:
573 for b, band in enumerate(bands):
574 band_use, = np.where(star_source_cat['band'] == band)
575
576 idx = matcher.query_radius(star_source_cat[ra_col][band_use],
577 star_source_cat[dec_col][band_use],
578 self.config.match_radius/3600.)
579 n_source_per_band_per_obj[b, :] = np.array([len(row) for row in idx])
580 idxs.append(idx)
581 band_uses.append(band_use)
582
583 n_source_per_obj = np.sum(n_source_per_band_per_obj, axis=0)
584
585 primary_star_cat['nsource'] = n_source_per_obj
586 primary_star_cat['source_cat_index'][1:] = np.cumsum(n_source_per_obj)[:-1]
587
588 n_tot_source = primary_star_cat['source_cat_index'][-1] + primary_star_cat['nsource'][-1]
589
590 # Temporary arrays until we crop/sort the source catalog
591 source_index = np.zeros(n_tot_source, dtype=np.int32)
592 obj_index = np.zeros(n_tot_source, dtype=np.int32)
593
594 ctr = 0
595 for i in range(len(primary_star_cat)):
596 obj_index[ctr: ctr + n_source_per_obj[i]] = i
597 for b in range(len(bands)):
598 source_index[ctr: ctr + n_source_per_band_per_obj[b, i]] = band_uses[b][idxs[b][i]]
599 ctr += n_source_per_band_per_obj[b, i]
600
601 source_cat_index_band_offset = np.cumsum(n_source_per_band_per_obj, axis=0)
602
603 for b, band in enumerate(bands):
604 primary_star_cat[f'nsource_{band}'] = n_source_per_band_per_obj[b, :]
605 if b == 0:
606 # The first band listed is the same as the overall star
607 primary_star_cat[f'source_cat_index_{band}'] = primary_star_cat['source_cat_index']
608 else:
609 # Other band indices are offset from the previous band
610 primary_star_cat[f'source_cat_index_{band}'] = (primary_star_cat['source_cat_index']
611 + source_cat_index_band_offset[b - 1, :])
612
613 star_source_cat = star_source_cat[source_index]
614 star_source_cat['obj_index'] = obj_index
615
616 return star_source_cat, primary_star_cat
617
618 def _compute_unique_ids(self, skymap, tract, nstar):
619 """Compute unique star ids.
620
621 This is a simple hash of the tract and star to provide an
622 id that is unique for a given processing.
623
624 Parameters
625 ----------
626 skymap : `lsst.skymap.Skymap`
627 Skymap object.
628 tract : `int`
629 Tract id number.
630 nstar : `int`
631 Number of stars.
632
633 Returns
634 -------
635 ids : `np.ndarray`
636 Array of unique star ids.
637 """
638 # The end of the id will be big enough to hold the tract number
639 mult = 10**(int(np.log10(len(skymap))) + 1)
640
641 return (np.arange(nstar) + 1)*mult + tract
642
643 def _get_primary_dtype(self, primary_bands):
644 """Get the numpy datatype for the primary star catalog.
645
646 Parameters
647 ----------
648 primary_bands : `list` [`str`]
649 List of primary bands.
650
651 Returns
652 -------
653 dtype : `numpy.dtype`
654 Datatype of the primary catalog.
655 """
656 max_len = max([len(primary_band) for primary_band in primary_bands])
657
658 dtype = [('isolated_star_id', 'i8'),
659 (self.config.ra_column, 'f8'),
660 (self.config.dec_column, 'f8'),
661 ('primary_band', f'U{max_len}'),
662 ('source_cat_index', 'i4'),
663 ('nsource', 'i4')]
664
665 for band in primary_bands:
666 dtype.append((f'source_cat_index_{band}', 'i4'))
667 dtype.append((f'nsource_{band}', 'i4'))
668
669 return dtype
int max