LSST Applications  21.0.0+75b29a8a7f,21.0.0+e70536a077,21.0.0-1-ga51b5d4+62c747d40b,21.0.0-10-gbfb87ad6+3307648ee3,21.0.0-15-gedb9d5423+47cba9fc36,21.0.0-2-g103fe59+fdf0863a2a,21.0.0-2-g1367e85+d38a93257c,21.0.0-2-g45278ab+e70536a077,21.0.0-2-g5242d73+d38a93257c,21.0.0-2-g7f82c8f+e682ffb718,21.0.0-2-g8dde007+d179fbfa6a,21.0.0-2-g8f08a60+9402881886,21.0.0-2-ga326454+e682ffb718,21.0.0-2-ga63a54e+08647d4b1b,21.0.0-2-gde069b7+26c92b3210,21.0.0-2-gecfae73+0445ed2f95,21.0.0-2-gfc62afb+d38a93257c,21.0.0-27-gbbd0d29+ae871e0f33,21.0.0-28-g5fc5e037+feb0e9397b,21.0.0-3-g21c7a62+f4b9c0ff5c,21.0.0-3-g357aad2+57b0bddf0b,21.0.0-3-g4be5c26+d38a93257c,21.0.0-3-g65f322c+3f454acf5d,21.0.0-3-g7d9da8d+75b29a8a7f,21.0.0-3-gaa929c8+9e4ef6332c,21.0.0-3-ge02ed75+4b120a55c4,21.0.0-4-g3300ddd+e70536a077,21.0.0-4-g591bb35+4b120a55c4,21.0.0-4-gc004bbf+4911b9cd27,21.0.0-4-gccdca77+f94adcd104,21.0.0-4-ge8fba5a+2b3a696ff9,21.0.0-5-gb155db7+2c5429117a,21.0.0-5-gdf36809+637e4641ee,21.0.0-6-g00874e7+c9fd7f7160,21.0.0-6-g4e60332+4b120a55c4,21.0.0-7-gc8ca178+40eb9cf840,21.0.0-8-gfbe0b4b+9e4ef6332c,21.0.0-9-g2fd488a+d83b7cd606,w.2021.05
LSST Data Management Base Package
walker.py
Go to the documentation of this file.
1 # This file is part of obs_base.
2 #
3 # Developed for the LSST Data Management System.
4 # This product includes software developed by the LSST Project
5 # (http://www.lsst.org).
6 # See the COPYRIGHT file at the top-level directory of this distribution
7 # for details of code ownership.
8 #
9 # This program is free software: you can redistribute it and/or modify
10 # it under the terms of the GNU General Public License as published by
11 # the Free Software Foundation, either version 3 of the License, or
12 # (at your option) any later version.
13 #
14 # This program is distributed in the hope that it will be useful,
15 # but WITHOUT ANY WARRANTY; without even the implied warranty of
16 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 # GNU General Public License for more details.
18 #
19 # You should have received a copy of the GNU General Public License
20 # along with this program. If not, see <http://www.gnu.org/licenses/>.
21 """High-level interface to the Gen2 repository-walking functionality defined
22 by this package.
23 """
24 from __future__ import annotations
25 
26 __all__ = ["RepoWalker"]
27 
28 from collections import defaultdict
29 import re
30 from typing import (
31  Callable,
32  ClassVar,
33  Dict,
34  Iterable,
35  List,
36  Mapping,
37  Optional,
38  Union,
39 )
40 
41 from lsst.log import Log
42 from lsst.daf.butler import (
43  DataCoordinate,
44  DatasetType,
45  FileDataset,
46 )
47 from .builders import BuilderTargetInput, BuilderSkipInput, BuilderTree
48 from .scanner import DirectoryScanner
49 
50 
51 class RepoWalker:
52  """An object that recursively walks a Gen2 data repository tree, extracting
53  Gen3 `FileDataset` objects and warning about unrecognized or unconvertable
54  Gen2 datasets.
55 
56  Parameters
57  ----------
58  inputs : `~collections.abc.Iterable` of `Target` or `Skip`
59  Structs that indicate dataset types to be extracted (`Target`) or
60  explicitly skipped (`Skip`). Skips may include a warning message to
61  log when matching entries are encountered.
62  fileIgnoreRegEx : `re.Pattern`, optional
63  A regular expression pattern that identifies non-dataset files that
64  can be ignored, to be applied at all levels of the directory tree.
65  dirIgnoreRegEx : `re.Pattern`, optional
66  A regular expression pattern that identifies non-dataset subdirectories
67  that can be ignored, to be applied at all levels of the directory tree.
68  log : `Log`, optional
69  Logger for warnings and diagnostic information.
70  """
71  def __init__(self, inputs: Iterable[Union[Target, Skip]], *,
72  fileIgnoreRegEx: Optional[re.Pattern] = None,
73  dirIgnoreRegEx: Optional[re.Pattern] = None,
74  log: Optional[Log] = None):
75  super().__init__()
76  if log is None:
77  log = Log.getLogger("obs.base.gen2to3.TranslatorFactory")
78  self.loglog = log
79  tree = BuilderTree()
80  allKeys: Dict[str, type] = {}
81  for leaf in inputs:
82  tree.insert(0, leaf)
83  for key, dtype in leaf.keys.items():
84  if allKeys.setdefault(key, dtype) != dtype:
85  raise ValueError(f"Multiple types for key '{key}': {dtype} "
86  f"(from {leaf.template}) vs. {allKeys[key]}.")
87  tree, messages, pruned = tree.prune()
88  if not pruned:
89  self._scanner_scanner = DirectoryScanner(log=self.loglog)
90  tree.fill(self._scanner_scanner, allKeys, {}, fileIgnoreRegEx=fileIgnoreRegEx,
91  dirIgnoreRegEx=dirIgnoreRegEx)
92  else:
93  # Nothing to do; just remember this for later to avoid disturbing
94  # higher-level code with the fact that walk() will be a no-op.
95  self._scanner_scanner = None
96 
97  Target: ClassVar[type] = BuilderTargetInput
98  """An input struct type whose instances represent a dataset type to be
99  extracted (`type`).
100  """
101 
102  Skip: ClassVar[type] = BuilderSkipInput
103  """An input struct type whose instances represent a dataset type to be
104  explicitly skipped.
105  """
106 
107  def walk(self, root: str, *, predicate: Optional[Callable[[DataCoordinate], bool]]
108  ) -> Mapping[DatasetType, Mapping[Optional[str], List[FileDataset]]]:
109  """Walk a Gen2 repository root to extract Gen3 `FileDataset` instances
110  from it.
111 
112  Parameters
113  ----------
114  root : `str`
115  Absolute path to the repository root.
116  predicate : `~collections.abc.Callable`, optional
117  If not `None`, a callable that returns `True` if a `DataCoordinate`
118  is consistent with what we want to extract. If ``predicate``
119  returns `False`, the file or directory that data ID was extracted
120  from will not be processed, even if it includes target dataset
121  types.
122 
123  Returns
124  -------
125  datasets : `defaultdict` [`DatasetType`, `defaultdict` ]
126  Extracted datasets, grouped by Gen3 `DatasetType`. Nested dict
127  keys are "CALIBDATE" strings (for calibration datasets) or `None`
128  (otherwise). Nested dict values are lists of `FileDataset`.
129  """
130  if predicate is None:
131  def predicate(dataId: DataCoordinate) -> bool:
132  return True
133  datasets = defaultdict(lambda: defaultdict(list))
134  if self._scanner_scanner is not None:
135  self._scanner_scanner.scan(root, datasets, predicate=predicate)
136  return datasets
def __init__(self, Iterable[Union[Target, Skip]] inputs, *Optional[re.Pattern] fileIgnoreRegEx=None, Optional[re.Pattern] dirIgnoreRegEx=None, Optional[Log] log=None)
Definition: walker.py:74
Mapping[DatasetType, Mapping[Optional[str], List[FileDataset]]] walk(self, str root, *Optional[Callable[[DataCoordinate], bool]] predicate)
Definition: walker.py:108
Definition: Log.h:706