21 """Classes used in `RepoWalker` construction.
23 The objects here form a temporary tree that is pruned and then transformed
24 into a similar tree of `PathElementHandler` instances. See `BuilderNode`
25 method documentation for more information.
27 from __future__
import annotations
29 __all__ = [
"BuilderSkipInput",
"BuilderTargetInput",
"BuilderTree"]
31 from abc
import ABC, abstractmethod
42 from lsst.daf.butler
import DatasetType, DimensionUniverse, StorageClass, FormatterParameter
43 from ..translators
import TranslatorFactory
44 from .parser
import PathElementParser
45 from .scanner
import PathElementHandler, DirectoryScanner
46 from .handlers
import (IgnoreHandler, SubdirectoryHandler, SkipHandler,
51 """Abstract interface for nodes in the temporary tree that is used to
52 construct a `RepoWalker`.
56 def prune(self) -> Tuple[BuilderNode, List[str], bool]:
57 """Attempt to prune this node and its children from the tree.
61 replacement : `BuilderNode`
62 The result of recursively pruning child nodes; often just ``self``.
63 messages : `list` [`str`]
64 Warning messages that should be logged by a parent node when a
65 matching path element is encountered, if this node is pruned.
67 If `True`, this node may be pruned from the tree (but will not
68 necessarily be - it may correspond to a path element that should
69 be skipped with siblings that should not be).
71 raise NotImplementedError()
74 def build(self, parser: PathElementParser, allKeys: Dict[str, type], cumulativeKeys: Dict[str, type], *,
75 fileIgnoreRegEx: Optional[re.Pattern], dirIgnoreRegEx: Optional[re.Pattern]
76 ) -> PathElementHandler:
77 """Transform this node in the build tree into a corresponding
78 `PathElementHandler`, recursing to any children.
80 Must be called after `prune`.
84 parser : `PathElementParser`
85 An object that matches the path element the new handler is
86 responsible for and extracts a (partial) Gen2 data ID from it.
87 allKeys : `dict` [`str`, `type`]
88 A mapping from Gen2 data ID key to the type of its value. Will
89 contain all keys that may be extracted by the given parser, and
91 cumulativeKeys : `dict` [`str`, `type`], optional
92 A dictionary containing key strings and types for Gen2 data ID keys
93 that have been extracted from previous path elements for this
94 template, including those extracted by ``parser``.
98 handler : `PathElementHandler`
101 raise NotImplementedError()
105 """An intermediate base for `BuilderNode` classes that are provided as
106 direct inputs to a `RepoWalker`, and generally correspond to exactly one
112 The complete Gen2 template to be matched (not just the template for
114 keys : `dict` [`str`, `type`]
115 A mapping from Gen2 data ID key to the type of its value.
117 def __init__(self, template: str, keys: Dict[str, type]):
123 """The complete Gen2 template to be matched (`str`).
126 keys: Dict[str, type]
127 """A mapping from Gen2 data ID key to the type of its value
128 (`dict` [`str`, `type`]).
132 """The path elements (file or directory levels) of `template`
138 """An input to a `RepoWalker` that indicates that matched files should be
139 skipped, possibly with a warning message.
141 BuilderSkipInputs can be pruned. When they are not pruned, they build
142 `SkipHandler` instances.
147 The complete Gen2 template to be matched (not just the template for
149 keys : `dict` [`str`, `type`]
150 A mapping from Gen2 data ID key to the type of its value.
151 message : `str`, optional
152 If not `None`, a warning message that should be printed either when a
153 matching file is enountered or a directory that may contain such files
155 isForFiles : `bool`, optional
156 If `True` (default), this handler should be run on files. Otherwise it
157 should be run on directories.
159 def __init__(self, template: str, keys: Dict[str, type], message: Optional[str] =
None, *,
160 isForFiles: bool =
True):
161 super().
__init__(template=template, keys=keys)
165 def build(self, parser: PathElementParser, allKeys: Dict[str, type], cumulativeKeys: Dict[str, type], *,
166 fileIgnoreRegEx: Optional[re.Pattern], dirIgnoreRegEx: Optional[re.Pattern]
167 ) -> PathElementHandler:
171 def prune(self) -> Tuple[BuilderNode, List[str], bool]:
173 return self, [self.
_message_message]
if self.
_message_message
is not None else [],
True
177 """An input to a `RepoWalker` that matches files that correspond to
178 datasets that we want to extract.
180 BuilderTargetInputs can never be pruned, and always build
181 `TargetFileHandler` instances.
185 datasetTypeName : `str`
186 Name of the dataset type.
188 Full Gen2 filename template.
189 keys : `dict` [`str`, `type`]
190 Dictionary that maps Gen2 data ID key to the type of its value.
191 storageClass : `StorageClass`
192 `StorageClass` for the Gen3 dataset type.
193 universe : `DimensionUniverse`
194 All candidate dimensions for the Gen3 dataset type.
195 formatter : `lsst.daf.butler.Formatter` or `str`, optional
196 A Gen 3 formatter class or fully-qualified name.
197 translatorFactory : `TranslatorFactory`
198 Object that can be used to construct data ID translators.
199 targetHandler : `PathElementHandler`, optional
200 Override target handler for this dataset type.
202 Additional keyword arguments are passed to `Translator.makeMatching`,
203 in along with ``datasetTypeName`` and ``keys``.
205 def __init__(self, *, datasetTypeName: str, template: str, keys: Dict[str, type],
206 storageClass: StorageClass, universe: DimensionUniverse,
207 formatter: FormatterParameter, translatorFactory: TranslatorFactory,
208 targetHandler: Optional[PathElementHandler] =
None,
212 template = template.split(
'[%(')[0]
213 super().
__init__(template=template, keys=keys)
214 self.
_translator_translator = translatorFactory.makeMatching(datasetTypeName, keys, **kwargs)
216 storageClass=storageClass, universe=universe,
217 isCalibration=(
"calibDate" in keys))
219 if targetHandler
is None:
220 targetHandler = TargetFileHandler
221 self.
_handler_handler = targetHandler
223 def build(self, parser: PathElementParser, allKeys: Dict[str, type], cumulativeKeys: Dict[str, type], *,
224 fileIgnoreRegEx: Optional[re.Pattern], dirIgnoreRegEx: Optional[re.Pattern]
225 ) -> PathElementHandler:
230 def prune(self) -> Tuple[BuilderNode, List[str], bool]:
232 return self, [],
False
234 datasetType: DatasetType
235 """The Gen3 dataset type extracted by the handler this object builds
236 (`lsst.daf.butler.DatasetType`).
241 """A `BuilderNode` that represents a subdirectory to be skipped,
242 created by pruning `BuilderTree` that contained only `BuilderSkipInput`
245 BuilderPrunedTrees can be pruned. When they are not pruned, they
246 build `SkipHandler` instances.
250 messages : `list` [`str`]
251 A list of warning messages to be printed when the handler produced by
252 this builder matches a subdirectory.
258 def build(self, parser: PathElementParser, allKeys: Dict[str, type], cumulativeKeys: Dict[str, type], *,
259 fileIgnoreRegEx: Optional[re.Pattern], dirIgnoreRegEx: Optional[re.Pattern]
260 ) -> PathElementHandler:
263 return SkipHandler(parser=parser, isForFiles=
False, message=message)
265 def prune(self) -> Tuple[BuilderNode, List[str], bool]:
267 return self, self.
_messages_messages,
True
271 """A `BuilderNode` that represents a collection of `BuilderInput` instances
272 that all have the same template.
274 def __init__(self, old: BuilderInput, new: BuilderInput):
276 if isinstance(old, BuilderDuplicateInputs):
277 self.
_children_children.extend(old._children)
283 def build(self, parser: PathElementParser, allKeys: Dict[str, type], cumulativeKeys: Dict[str, type], *,
284 fileIgnoreRegEx: Optional[re.Pattern], dirIgnoreRegEx: Optional[re.Pattern]
285 ) -> PathElementHandler:
288 return SkipHandler(parser=parser, isForFiles=
False, message=message)
290 def prune(self) -> Tuple[BuilderNode, List[str], bool]:
295 newChild, childMessages, toPruneChild = child.prune()
297 self.
_messages_messages.extend(childMessages)
299 unprunable.append(newChild)
300 newChildren.append(newChildren)
302 if len(unprunable) == 0:
305 return self, self.
_messages_messages,
True
306 elif len(unprunable) == 1
and not self.
_messages_messages:
311 return unprunable[0], [],
False
317 nested = [f
"{c.datasetType.name} (target)" for c
in unprunable]
319 self.
_messages_messages = [f
"ambiguous match: [{', '.join(nested)}]"]
320 return self, self.
_messages_messages,
True
324 """A `BuilderNode` that represents a directory.
326 This is the only `BuilderNode` class that is not a leaf node. If all
327 of its children can be pruned, it is replaced by a `BuilderPrunedTree`
328 (which can then be pruned itself). It builds `SubdirectoryHandler`
329 instances when not pruned.
334 def insert(self, level: int, leaf: BuilderInput):
335 """Insert an input leaf node into the tree, recursively constructing
336 intermediate parents in order to put it at the right level.
341 The level ``self``is at in the larger tree, with zero the
342 repository root. The right level for the leaf is given by the
343 length of ``leaf.elements``.
344 leaf : `BuilderInput`
345 The leaf node to insert.
347 nextLevel = level + 1
348 element = leaf.elements[level]
349 if nextLevel == len(leaf.elements):
350 conflict = self.
_children_children.get(element)
351 if conflict
is not None:
358 child.insert(nextLevel, leaf)
360 def fill(self, scanner: DirectoryScanner, allKeys: Dict[str, type], previousKeys: Dict[str, type], *,
361 fileIgnoreRegEx: Optional[re.Pattern], dirIgnoreRegEx: Optional[re.Pattern]):
362 """Fill a `DirectoryScanner` instance by recursively building all
367 scanner : `DirectoryScanner`
369 allKeys : `dict` [`str`, `type`]
370 Mapping from Gen2 data ID key to its value type, covering all keys
371 that could be used in any child template.
372 previousKeys : `dict` [`str`, `type`], optional
373 A dictionary containing key strings and types for Gen2 data ID keys
374 that have been extracted from previous path elements of the same
376 fileIgnoreRegEx : `re.Pattern`, optional
377 A regular expression pattern that identifies non-dataset files that
378 can be ignored, to be applied at all levels of the directory tree.
379 dirIgnoreRegEx : `re.Pattern`, optional
380 A regular expression pattern that identifies non-dataset
381 subdirectories that can be ignored, to be applied at all levels of
384 if fileIgnoreRegEx
is not None:
386 if dirIgnoreRegEx
is not None:
390 cumulativeKeys = previousKeys.copy()
391 cumulativeKeys.update(parser.keys)
392 scanner.add(child.build(parser, allKeys, cumulativeKeys, fileIgnoreRegEx=fileIgnoreRegEx,
393 dirIgnoreRegEx=dirIgnoreRegEx))
395 def prune(self) -> Tuple[BuilderNode, List[str], bool]:
402 newChild, childMessages, toPruneChild = child.prune()
403 newChildren[template] = newChild
404 messages.extend(childMessages)
411 return self, [],
False
413 def build(self, parser: PathElementParser, allKeys: Dict[str, type], cumulativeKeys: Dict[str, type], *,
414 fileIgnoreRegEx: Optional[re.Pattern], dirIgnoreRegEx: Optional[re.Pattern]
415 ) -> PathElementHandler:
418 self.
fillfill(built.scanner, allKeys, cumulativeKeys, fileIgnoreRegEx=fileIgnoreRegEx,
419 dirIgnoreRegEx=dirIgnoreRegEx)
std::vector< SchemaItem< Flag > > * items
PathElementHandler build(self, PathElementParser parser, Dict[str, type] allKeys, Dict[str, type] cumulativeKeys, *Optional[re.Pattern] fileIgnoreRegEx, Optional[re.Pattern] dirIgnoreRegEx)
Tuple[BuilderNode, List[str], bool] prune(self)
def __init__(self, List[str] messages)
Tuple[BuilderNode, List[str], bool] prune(self)
PathElementHandler build(self, PathElementParser parser, Dict[str, type] allKeys, Dict[str, type] cumulativeKeys, *Optional[re.Pattern] fileIgnoreRegEx, Optional[re.Pattern] dirIgnoreRegEx)
PathElementHandler build(self, PathElementParser parser, Dict[str, type] allKeys, Dict[str, type] cumulativeKeys, *Optional[re.Pattern] fileIgnoreRegEx, Optional[re.Pattern] dirIgnoreRegEx)
def fill(self, DirectoryScanner scanner, Dict[str, type] allKeys, Dict[str, type] previousKeys, *Optional[re.Pattern] fileIgnoreRegEx, Optional[re.Pattern] dirIgnoreRegEx)
def insert(self, int level, BuilderInput leaf)
Tuple[BuilderNode, List[str], bool] prune(self)
std::shared_ptr< FrameSet > append(FrameSet const &first, FrameSet const &second)
Construct a FrameSet that performs two transformations in series.
daf::base::PropertyList * list