LSSTApplications  20.0.0
LSSTDataManagementBasePackage
connections.py
Go to the documentation of this file.
1 # This file is part of pipe_base.
2 #
3 # Developed for the LSST Data Management System.
4 # This product includes software developed by the LSST Project
5 # (http://www.lsst.org).
6 # See the COPYRIGHT file at the top-level directory of this distribution
7 # for details of code ownership.
8 #
9 # This program is free software: you can redistribute it and/or modify
10 # it under the terms of the GNU General Public License as published by
11 # the Free Software Foundation, either version 3 of the License, or
12 # (at your option) any later version.
13 #
14 # This program is distributed in the hope that it will be useful,
15 # but WITHOUT ANY WARRANTY; without even the implied warranty of
16 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 # GNU General Public License for more details.
18 #
19 # You should have received a copy of the GNU General Public License
20 # along with this program. If not, see <http://www.gnu.org/licenses/>.
21 
22 """Module defining connection classes for PipelineTask.
23 """
24 
25 __all__ = ["PipelineTaskConnections", "InputQuantizedConnection", "OutputQuantizedConnection",
26  "DeferredDatasetRef", "iterConnections"]
27 
28 from collections import UserDict, namedtuple
29 from types import SimpleNamespace
30 import typing
31 
32 import itertools
33 import string
34 
35 from . import config as configMod
36 from .connectionTypes import (InitInput, InitOutput, Input, PrerequisiteInput,
37  Output, BaseConnection)
38 from lsst.daf.butler import DatasetRef, DatasetType, NamedKeyDict, Quantum
39 
40 if typing.TYPE_CHECKING:
41  from .config import PipelineTaskConfig
42 
43 
44 class ScalarError(TypeError):
45  """Exception raised when dataset type is configured as scalar
46  but there are multiple data IDs in a Quantum for that dataset.
47  """
48 
49 
51  """This is a special dict class used by PipelineTaskConnectionMetaclass
52 
53  This dict is used in PipelineTaskConnection class creation, as the
54  dictionary that is initially used as __dict__. It exists to
55  intercept connection fields declared in a PipelineTaskConnection, and
56  what name is used to identify them. The names are then added to class
57  level list according to the connection type of the class attribute. The
58  names are also used as keys in a class level dictionary associated with
59  the corresponding class attribute. This information is a duplicate of
60  what exists in __dict__, but provides a simple place to lookup and
61  iterate on only these variables.
62  """
63  def __init__(self, *args, **kwargs):
64  super().__init__(*args, **kwargs)
65  # Initialize class level variables used to track any declared
66  # class level variables that are instances of
67  # connectionTypes.BaseConnection
68  self.data['inputs'] = []
69  self.data['prerequisiteInputs'] = []
70  self.data['outputs'] = []
71  self.data['initInputs'] = []
72  self.data['initOutputs'] = []
73  self.data['allConnections'] = {}
74 
75  def __setitem__(self, name, value):
76  if isinstance(value, Input):
77  self.data['inputs'].append(name)
78  elif isinstance(value, PrerequisiteInput):
79  self.data['prerequisiteInputs'].append(name)
80  elif isinstance(value, Output):
81  self.data['outputs'].append(name)
82  elif isinstance(value, InitInput):
83  self.data['initInputs'].append(name)
84  elif isinstance(value, InitOutput):
85  self.data['initOutputs'].append(name)
86  # This should not be an elif, as it needs tested for
87  # everything that inherits from BaseConnection
88  if isinstance(value, BaseConnection):
89  object.__setattr__(value, 'varName', name)
90  self.data['allConnections'][name] = value
91  # defer to the default behavior
92  super().__setitem__(name, value)
93 
94 
96  """Metaclass used in the declaration of PipelineTaskConnections classes
97  """
98  def __prepare__(name, bases, **kwargs): # noqa: 805
99  # Create an instance of our special dict to catch and track all
100  # variables that are instances of connectionTypes.BaseConnection
101  # Copy any existing connections from a parent class
103  for base in bases:
104  if isinstance(base, PipelineTaskConnectionsMetaclass):
105  for name, value in base.allConnections.items():
106  dct[name] = value
107  return dct
108 
109  def __new__(cls, name, bases, dct, **kwargs):
110  dimensionsValueError = TypeError("PipelineTaskConnections class must be created with a dimensions "
111  "attribute which is an iterable of dimension names")
112 
113  if name != 'PipelineTaskConnections':
114  # Verify that dimensions are passed as a keyword in class
115  # declaration
116  if 'dimensions' not in kwargs:
117  for base in bases:
118  if hasattr(base, 'dimensions'):
119  kwargs['dimensions'] = base.dimensions
120  break
121  if 'dimensions' not in kwargs:
122  raise dimensionsValueError
123  try:
124  dct['dimensions'] = set(kwargs['dimensions'])
125  except TypeError as exc:
126  raise dimensionsValueError from exc
127  # Lookup any python string templates that may have been used in the
128  # declaration of the name field of a class connection attribute
129  allTemplates = set()
130  stringFormatter = string.Formatter()
131  # Loop over all connections
132  for obj in dct['allConnections'].values():
133  nameValue = obj.name
134  # add all the parameters to the set of templates
135  for param in stringFormatter.parse(nameValue):
136  if param[1] is not None:
137  allTemplates.add(param[1])
138 
139  # look up any template from base classes and merge them all
140  # together
141  mergeDict = {}
142  for base in bases[::-1]:
143  if hasattr(base, 'defaultTemplates'):
144  mergeDict.update(base.defaultTemplates)
145  if 'defaultTemplates' in kwargs:
146  mergeDict.update(kwargs['defaultTemplates'])
147 
148  if len(mergeDict) > 0:
149  kwargs['defaultTemplates'] = mergeDict
150 
151  # Verify that if templated strings were used, defaults were
152  # supplied as an argument in the declaration of the connection
153  # class
154  if len(allTemplates) > 0 and 'defaultTemplates' not in kwargs:
155  raise TypeError("PipelineTaskConnection class contains templated attribute names, but no "
156  "defaut templates were provided, add a dictionary attribute named "
157  "defaultTemplates which contains the mapping between template key and value")
158  if len(allTemplates) > 0:
159  # Verify all templates have a default, and throw if they do not
160  defaultTemplateKeys = set(kwargs['defaultTemplates'].keys())
161  templateDifference = allTemplates.difference(defaultTemplateKeys)
162  if templateDifference:
163  raise TypeError(f"Default template keys were not provided for {templateDifference}")
164  # Verify that templates do not share names with variable names
165  # used for a connection, this is needed because of how
166  # templates are specified in an associated config class.
167  nameTemplateIntersection = allTemplates.intersection(set(dct['allConnections'].keys()))
168  if len(nameTemplateIntersection) > 0:
169  raise TypeError(f"Template parameters cannot share names with Class attributes"
170  f" (conflicts are {nameTemplateIntersection}).")
171  dct['defaultTemplates'] = kwargs.get('defaultTemplates', {})
172 
173  # Convert all the connection containers into frozensets so they cannot
174  # be modified at the class scope
175  for connectionName in ("inputs", "prerequisiteInputs", "outputs", "initInputs", "initOutputs"):
176  dct[connectionName] = frozenset(dct[connectionName])
177  # our custom dict type must be turned into an actual dict to be used in
178  # type.__new__
179  return super().__new__(cls, name, bases, dict(dct))
180 
181  def __init__(cls, name, bases, dct, **kwargs):
182  # This overrides the default init to drop the kwargs argument. Python
183  # metaclasses will have this argument set if any kwargs are passes at
184  # class construction time, but should be consumed before calling
185  # __init__ on the type metaclass. This is in accordance with python
186  # documentation on metaclasses
187  super().__init__(name, bases, dct)
188 
189 
190 class QuantizedConnection(SimpleNamespace):
191  """A Namespace to map defined variable names of connections to their
192  `lsst.daf.buter.DatasetRef`s
193 
194  This class maps the names used to define a connection on a
195  PipelineTaskConnectionsClass to the corresponding
196  `lsst.daf.butler.DatasetRef`s provided by a `lsst.daf.butler.Quantum`
197  instance. This will be a quantum of execution based on the graph created
198  by examining all the connections defined on the
199  `PipelineTaskConnectionsClass`.
200  """
201  def __init__(self, **kwargs):
202  # Create a variable to track what attributes are added. This is used
203  # later when iterating over this QuantizedConnection instance
204  object.__setattr__(self, "_attributes", set())
205 
206  def __setattr__(self, name: str, value: typing.Union[DatasetRef, typing.List[DatasetRef]]):
207  # Capture the attribute name as it is added to this object
208  self._attributes.add(name)
209  super().__setattr__(name, value)
210 
211  def __delattr__(self, name):
212  object.__delattr__(self, name)
213  self._attributes.remove(name)
214 
215  def __iter__(self) -> typing.Generator[typing.Tuple[str, typing.Union[DatasetRef,
216  typing.List[DatasetRef]]], None, None]:
217  """Make an Iterator for this QuantizedConnection
218 
219  Iterating over a QuantizedConnection will yield a tuple with the name
220  of an attribute and the value associated with that name. This is
221  similar to dict.items() but is on the namespace attributes rather than
222  dict keys.
223  """
224  yield from ((name, getattr(self, name)) for name in self._attributes)
225 
226  def keys(self) -> typing.Generator[str, None, None]:
227  """Returns an iterator over all the attributes added to a
228  QuantizedConnection class
229  """
230  yield from self._attributes
231 
232 
234  pass
235 
236 
237 class OutputQuantizedConnection(QuantizedConnection):
238  pass
239 
240 
241 class DeferredDatasetRef(namedtuple("DeferredDatasetRefBase", "datasetRef")):
242  """Class which denotes that a datasetRef should be treated as deferred when
243  interacting with the butler
244 
245  Parameters
246  ----------
247  datasetRef : `lsst.daf.butler.DatasetRef`
248  The `lsst.daf.butler.DatasetRef` that will be eventually used to
249  resolve a dataset
250  """
251  __slots__ = ()
252 
253 
254 class PipelineTaskConnections(metaclass=PipelineTaskConnectionsMetaclass):
255  """PipelineTaskConnections is a class used to declare desired IO when a
256  PipelineTask is run by an activator
257 
258  Parameters
259  ----------
260  config : `PipelineTaskConfig`
261  A `PipelineTaskConfig` class instance whose class has been configured
262  to use this `PipelineTaskConnectionsClass`
263 
264  Notes
265  -----
266  ``PipelineTaskConnection`` classes are created by declaring class
267  attributes of types defined in `lsst.pipe.base.connectionTypes` and are
268  listed as follows:
269 
270  * ``InitInput`` - Defines connections in a quantum graph which are used as
271  inputs to the ``__init__`` function of the `PipelineTask` corresponding
272  to this class
273  * ``InitOuput`` - Defines connections in a quantum graph which are to be
274  persisted using a butler at the end of the ``__init__`` function of the
275  `PipelineTask` corresponding to this class. The variable name used to
276  define this connection should be the same as an attribute name on the
277  `PipelineTask` instance. E.g. if an ``InitOutput`` is declared with
278  the name ``outputSchema`` in a ``PipelineTaskConnections`` class, then
279  a `PipelineTask` instance should have an attribute
280  ``self.outputSchema`` defined. Its value is what will be saved by the
281  activator framework.
282  * ``PrerequisiteInput`` - An input connection type that defines a
283  `lsst.daf.butler.DatasetType` that must be present at execution time,
284  but that will not be used during the course of creating the quantum
285  graph to be executed. These most often are things produced outside the
286  processing pipeline, such as reference catalogs.
287  * ``Input`` - Input `lsst.daf.butler.DatasetType` objects that will be used
288  in the ``run`` method of a `PipelineTask`. The name used to declare
289  class attribute must match a function argument name in the ``run``
290  method of a `PipelineTask`. E.g. If the ``PipelineTaskConnections``
291  defines an ``Input`` with the name ``calexp``, then the corresponding
292  signature should be ``PipelineTask.run(calexp, ...)``
293  * ``Output`` - A `lsst.daf.butler.DatasetType` that will be produced by an
294  execution of a `PipelineTask`. The name used to declare the connection
295  must correspond to an attribute of a `Struct` that is returned by a
296  `PipelineTask` ``run`` method. E.g. if an output connection is
297  defined with the name ``measCat``, then the corresponding
298  ``PipelineTask.run`` method must return ``Struct(measCat=X,..)`` where
299  X matches the ``storageClass`` type defined on the output connection.
300 
301  The process of declaring a ``PipelineTaskConnection`` class involves
302  parameters passed in the declaration statement.
303 
304  The first parameter is ``dimensions`` which is an iterable of strings which
305  defines the unit of processing the run method of a corresponding
306  `PipelineTask` will operate on. These dimensions must match dimensions that
307  exist in the butler registry which will be used in executing the
308  corresponding `PipelineTask`.
309 
310  The second parameter is labeled ``defaultTemplates`` and is conditionally
311  optional. The name attributes of connections can be specified as python
312  format strings, with named format arguments. If any of the name parameters
313  on connections defined in a `PipelineTaskConnections` class contain a
314  template, then a default template value must be specified in the
315  ``defaultTemplates`` argument. This is done by passing a dictionary with
316  keys corresponding to a template identifier, and values corresponding to
317  the value to use as a default when formatting the string. For example if
318  ``ConnectionClass.calexp.name = '{input}Coadd_calexp'`` then
319  ``defaultTemplates`` = {'input': 'deep'}.
320 
321  Once a `PipelineTaskConnections` class is created, it is used in the
322  creation of a `PipelineTaskConfig`. This is further documented in the
323  documentation of `PipelineTaskConfig`. For the purposes of this
324  documentation, the relevant information is that the config class allows
325  configuration of connection names by users when running a pipeline.
326 
327  Instances of a `PipelineTaskConnections` class are used by the pipeline
328  task execution framework to introspect what a corresponding `PipelineTask`
329  will require, and what it will produce.
330 
331  Examples
332  --------
333  >>> from lsst.pipe.base import connectionTypes as cT
334  >>> from lsst.pipe.base import PipelineTaskConnections
335  >>> from lsst.pipe.base import PipelineTaskConfig
336  >>> class ExampleConnections(PipelineTaskConnections,
337  ... dimensions=("A", "B"),
338  ... defaultTemplates={"foo": "Example"}):
339  ... inputConnection = cT.Input(doc="Example input",
340  ... dimensions=("A", "B"),
341  ... storageClass=Exposure,
342  ... name="{foo}Dataset")
343  ... outputConnection = cT.Output(doc="Example output",
344  ... dimensions=("A", "B"),
345  ... storageClass=Exposure,
346  ... name="{foo}output")
347  >>> class ExampleConfig(PipelineTaskConfig,
348  ... pipelineConnections=ExampleConnections):
349  ... pass
350  >>> config = ExampleConfig()
351  >>> config.connections.foo = Modified
352  >>> config.connections.outputConnection = "TotallyDifferent"
353  >>> connections = ExampleConnections(config=config)
354  >>> assert(connections.inputConnection.name == "ModifiedDataset")
355  >>> assert(connections.outputConnection.name == "TotallyDifferent")
356  """
357 
358  def __init__(self, *, config: 'PipelineTaskConfig' = None):
359  self.inputs = set(self.inputs)
361  self.outputs = set(self.outputs)
362  self.initInputs = set(self.initInputs)
364 
365  if config is None or not isinstance(config, configMod.PipelineTaskConfig):
366  raise ValueError("PipelineTaskConnections must be instantiated with"
367  " a PipelineTaskConfig instance")
368  self.config = config
369  # Extract the template names that were defined in the config instance
370  # by looping over the keys of the defaultTemplates dict specified at
371  # class declaration time
372  templateValues = {name: getattr(config.connections, name) for name in getattr(self,
373  'defaultTemplates').keys()}
374  # Extract the configured value corresponding to each connection
375  # variable. I.e. for each connection identifier, populate a override
376  # for the connection.name attribute
377  self._nameOverrides = {name: getattr(config.connections, name).format(**templateValues)
378  for name in self.allConnections.keys()}
379 
380  # connections.name corresponds to a dataset type name, create a reverse
381  # mapping that goes from dataset type name to attribute identifier name
382  # (variable name) on the connection class
383  self._typeNameToVarName = {v: k for k, v in self._nameOverrides.items()}
384 
385  def buildDatasetRefs(self, quantum: Quantum) -> typing.Tuple[InputQuantizedConnection,
386  OutputQuantizedConnection]:
387  """Builds QuantizedConnections corresponding to input Quantum
388 
389  Parameters
390  ----------
391  quantum : `lsst.daf.butler.Quantum`
392  Quantum object which defines the inputs and outputs for a given
393  unit of processing
394 
395  Returns
396  -------
397  retVal : `tuple` of (`InputQuantizedConnection`,
398  `OutputQuantizedConnection`) Namespaces mapping attribute names
399  (identifiers of connections) to butler references defined in the
400  input `lsst.daf.butler.Quantum`
401  """
402  inputDatasetRefs = InputQuantizedConnection()
403  outputDatasetRefs = OutputQuantizedConnection()
404  # operate on a reference object and an interable of names of class
405  # connection attributes
406  for refs, names in zip((inputDatasetRefs, outputDatasetRefs),
407  (itertools.chain(self.inputs, self.prerequisiteInputs), self.outputs)):
408  # get a name of a class connection attribute
409  for attributeName in names:
410  # get the attribute identified by name
411  attribute = getattr(self, attributeName)
412  # Branch if the attribute dataset type is an input
413  if attribute.name in quantum.predictedInputs:
414  # Get the DatasetRefs
415  quantumInputRefs = quantum.predictedInputs[attribute.name]
416  # if the dataset is marked to load deferred, wrap it in a
417  # DeferredDatasetRef
418  if attribute.deferLoad:
419  quantumInputRefs = [DeferredDatasetRef(datasetRef=ref) for ref in quantumInputRefs]
420  # Unpack arguments that are not marked multiples (list of
421  # length one)
422  if not attribute.multiple:
423  if len(quantumInputRefs) > 1:
424  raise ScalarError(
425  f"Received multiple datasets "
426  f"{', '.join(str(r.dataId) for r in quantumInputRefs)} "
427  f"for scalar connection {attributeName} "
428  f"({quantumInputRefs[0].datasetType.name}) "
429  f"of quantum for {quantum.taskName} with data ID {quantum.dataId}."
430  )
431  if len(quantumInputRefs) == 0:
432  continue
433  quantumInputRefs = quantumInputRefs[0]
434  # Add to the QuantizedConnection identifier
435  setattr(refs, attributeName, quantumInputRefs)
436  # Branch if the attribute dataset type is an output
437  elif attribute.name in quantum.outputs:
438  value = quantum.outputs[attribute.name]
439  # Unpack arguments that are not marked multiples (list of
440  # length one)
441  if not attribute.multiple:
442  value = value[0]
443  # Add to the QuantizedConnection identifier
444  setattr(refs, attributeName, value)
445  # Specified attribute is not in inputs or outputs dont know how
446  # to handle, throw
447  else:
448  raise ValueError(f"Attribute with name {attributeName} has no counterpoint "
449  "in input quantum")
450  return inputDatasetRefs, outputDatasetRefs
451 
452  def adjustQuantum(self, datasetRefMap: NamedKeyDict[DatasetType, typing.Set[DatasetRef]]
453  ) -> NamedKeyDict[DatasetType, typing.Set[DatasetRef]]:
454  """Override to make adjustments to `lsst.daf.butler.DatasetRef` objects
455  in the `lsst.daf.butler.core.Quantum` during the graph generation stage
456  of the activator.
457 
458  The base class implementation simply checks that input connections with
459  ``multiple`` set to `False` have no more than one dataset.
460 
461  Parameters
462  ----------
463  datasetRefMap : `NamedKeyDict`
464  Mapping from dataset type to a `set` of
465  `lsst.daf.butler.DatasetRef` objects
466 
467  Returns
468  -------
469  datasetRefMap : `NamedKeyDict`
470  Modified mapping of input with possibly adjusted
471  `lsst.daf.butler.DatasetRef` objects.
472 
473  Raises
474  ------
475  ScalarError
476  Raised if any `Input` or `PrerequisiteInput` connection has
477  ``multiple`` set to `False`, but multiple datasets.
478  Exception
479  Overrides of this function have the option of raising an Exception
480  if a field in the input does not satisfy a need for a corresponding
481  pipelineTask, i.e. no reference catalogs are found.
482  """
483  for connection in itertools.chain(iterConnections(self, "inputs"),
484  iterConnections(self, "prerequisiteInputs")):
485  refs = datasetRefMap[connection.name]
486  if not connection.multiple and len(refs) > 1:
487  raise ScalarError(
488  f"Found multiple datasets {', '.join(str(r.dataId) for r in refs)} "
489  f"for scalar connection {connection.name} ({refs[0].datasetType.name})."
490  )
491  return datasetRefMap
492 
493 
494 def iterConnections(connections: PipelineTaskConnections, connectionType: str) -> typing.Generator:
495  """Creates an iterator over the selected connections type which yields
496  all the defined connections of that type.
497 
498  Parameters
499  ----------
500  connections: `PipelineTaskConnections`
501  An instance of a `PipelineTaskConnections` object that will be iterated
502  over.
503  connectionType: `str`
504  The type of connections to iterate over, valid values are inputs,
505  outputs, prerequisiteInputs, initInputs, initOutputs.
506 
507  Yields
508  -------
509  connection: `BaseConnection`
510  A connection defined on the input connections object of the type
511  supplied. The yielded value Will be an derived type of
512  `BaseConnection`.
513  """
514  for name in getattr(connections, connectionType):
515  yield getattr(connections, name)
lsst.pipe.base.connections.PipelineTaskConnections.initOutputs
initOutputs
Definition: connections.py:363
lsst.pipe.base.connections.PipelineTaskConnections.config
config
Definition: connections.py:368
lsst.pipe.base.connections.PipelineTaskConnectionDict
Definition: connections.py:50
lsst.pipe.base.connections.QuantizedConnection.__delattr__
def __delattr__(self, name)
Definition: connections.py:211
lsst.pipe.base.connections.PipelineTaskConnections.outputs
outputs
Definition: connections.py:361
lsst.pipe.base.connections.PipelineTaskConnectionsMetaclass.__new__
def __new__(cls, name, bases, dct, **kwargs)
Definition: connections.py:109
lsst.pipe.base.connections.PipelineTaskConnectionsMetaclass
Definition: connections.py:95
lsst.pipe.base.connections.QuantizedConnection
Definition: connections.py:190
pex.config.history.format
def format(config, name=None, writeSourceLine=True, prefix="", verbose=False)
Definition: history.py:174
ast::append
std::shared_ptr< FrameSet > append(FrameSet const &first, FrameSet const &second)
Construct a FrameSet that performs two transformations in series.
Definition: functional.cc:33
lsst.pipe.base.connections.PipelineTaskConnections.buildDatasetRefs
typing.Tuple[InputQuantizedConnection, OutputQuantizedConnection] buildDatasetRefs(self, Quantum quantum)
Definition: connections.py:385
astshim.keyMap.keyMapContinued.keys
def keys(self)
Definition: keyMapContinued.py:6
lsst.pipe.base.connections.PipelineTaskConnections.inputs
inputs
Definition: connections.py:359
lsst.pipe.base.connections.ScalarError
Definition: connections.py:44
lsst.pipe.base.connections.QuantizedConnection.__setattr__
def __setattr__(self, str name, typing.Union[DatasetRef, typing.List[DatasetRef]] value)
Definition: connections.py:206
lsst.pipe.base.connections.PipelineTaskConnectionDict.__init__
def __init__(self, *args, **kwargs)
Definition: connections.py:63
lsst.pipe.base.connections.InputQuantizedConnection
Definition: connections.py:233
lsst.pipe.base.connections.QuantizedConnection.__init__
def __init__(self, **kwargs)
Definition: connections.py:201
lsst.pipe.base.connections.PipelineTaskConnections.prerequisiteInputs
prerequisiteInputs
Definition: connections.py:360
lsst.pipe.base.connections.DeferredDatasetRef
Definition: connections.py:241
lsst.pipe.base.connections.OutputQuantizedConnection
Definition: connections.py:237
lsst.pipe.base.connections.QuantizedConnection.__iter__
typing.Generator[typing.Tuple[str, typing.Union[DatasetRef, typing.List[DatasetRef]]], None, None] __iter__(self)
Definition: connections.py:215
lsst.pipe.base.connections.PipelineTaskConnectionDict.__setitem__
def __setitem__(self, name, value)
Definition: connections.py:75
lsst.pipe.base.connections.PipelineTaskConnections
Definition: connections.py:254
lsst.pipe.base.connections.PipelineTaskConnectionsMetaclass.__prepare__
def __prepare__(name, bases, **kwargs)
Definition: connections.py:98
lsst.pipe.base.connections.PipelineTaskConnections.adjustQuantum
NamedKeyDict[DatasetType, typing.Set[DatasetRef]] adjustQuantum(self, NamedKeyDict[DatasetType, typing.Set[DatasetRef]] datasetRefMap)
Definition: connections.py:452
lsst.pipe.base.connections.iterConnections
typing.Generator iterConnections(PipelineTaskConnections connections, str connectionType)
Definition: connections.py:494
lsst.pipe.base.connections.PipelineTaskConnections.initInputs
initInputs
Definition: connections.py:362
items
std::vector< SchemaItem< Flag > > * items
Definition: BaseColumnView.cc:142
lsst.pipe.base.connections.PipelineTaskConnections._nameOverrides
_nameOverrides
Definition: connections.py:377
type
table::Key< int > type
Definition: Detector.cc:163
lsst.pipe.base.connections.QuantizedConnection.keys
typing.Generator[str, None, None] keys(self)
Definition: connections.py:226
lsst.pipe.base.connections.PipelineTaskConnections._typeNameToVarName
_typeNameToVarName
Definition: connections.py:383
set
daf::base::PropertySet * set
Definition: fits.cc:912
lsst.pipe.base.connections.PipelineTaskConnections.__init__
def __init__(self, *'PipelineTaskConfig' config=None)
Definition: connections.py:358
lsst.pipe.base.connections.PipelineTaskConnectionsMetaclass.__init__
def __init__(cls, name, bases, dct, **kwargs)
Definition: connections.py:181