LSSTApplications  16.0-10-g0ee56ad+5,16.0-11-ga33d1f2+5,16.0-12-g3ef5c14+3,16.0-12-g71e5ef5+18,16.0-12-gbdf3636+3,16.0-13-g118c103+3,16.0-13-g8f68b0a+3,16.0-15-gbf5c1cb+4,16.0-16-gfd17674+3,16.0-17-g7c01f5c+3,16.0-18-g0a50484+1,16.0-20-ga20f992+8,16.0-21-g0e05fd4+6,16.0-21-g15e2d33+4,16.0-22-g62d8060+4,16.0-22-g847a80f+4,16.0-25-gf00d9b8+1,16.0-28-g3990c221+4,16.0-3-gf928089+3,16.0-32-g88a4f23+5,16.0-34-gd7987ad+3,16.0-37-gc7333cb+2,16.0-4-g10fc685+2,16.0-4-g18f3627+26,16.0-4-g5f3a788+26,16.0-5-gaf5c3d7+4,16.0-5-gcc1f4bb+1,16.0-6-g3b92700+4,16.0-6-g4412fcd+3,16.0-6-g7235603+4,16.0-69-g2562ce1b+2,16.0-8-g14ebd58+4,16.0-8-g2df868b+1,16.0-8-g4cec79c+6,16.0-8-gadf6c7a+1,16.0-8-gfc7ad86,16.0-82-g59ec2a54a+1,16.0-9-g5400cdc+2,16.0-9-ge6233d7+5,master-g2880f2d8cf+3,v17.0.rc1
LSSTDataManagementBasePackage
argumentParser.py
Go to the documentation of this file.
1 #
2 # LSST Data Management System
3 # Copyright 2008-2015 AURA/LSST.
4 #
5 # This product includes software developed by the
6 # LSST Project (http://www.lsst.org/).
7 #
8 # This program is free software: you can redistribute it and/or modify
9 # it under the terms of the GNU General Public License as published by
10 # the Free Software Foundation, either version 3 of the License, or
11 # (at your option) any later version.
12 #
13 # This program is distributed in the hope that it will be useful,
14 # but WITHOUT ANY WARRANTY; without even the implied warranty of
15 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 # GNU General Public License for more details.
17 #
18 # You should have received a copy of the LSST License Statement and
19 # the GNU General Public License along with this program. If not,
20 # see <https://www.lsstcorp.org/LegalNotices/>.
21 #
22 __all__ = ["ArgumentParser", "ConfigFileAction", "ConfigValueAction", "DataIdContainer",
23  "DatasetArgument", "ConfigDatasetType", "InputOnlyArgumentParser"]
24 
25 import abc
26 import argparse
27 import collections
28 import fnmatch
29 import itertools
30 import logging
31 import os
32 import re
33 import shlex
34 import sys
35 import shutil
36 import textwrap
37 
38 import lsst.utils
39 import lsst.pex.config as pexConfig
41 import lsst.log as lsstLog
42 import lsst.daf.persistence as dafPersist
43 
44 DEFAULT_INPUT_NAME = "PIPE_INPUT_ROOT"
45 DEFAULT_CALIB_NAME = "PIPE_CALIB_ROOT"
46 DEFAULT_OUTPUT_NAME = "PIPE_OUTPUT_ROOT"
47 
48 
49 def _fixPath(defName, path):
50  """Apply environment variable as default root, if present, and abspath.
51 
52  Parameters
53  ----------
54  defName : `str`
55  Name of environment variable containing default root path;
56  if the environment variable does not exist
57  then the path is relative to the current working directory
58  path : `str`
59  Path relative to default root path.
60 
61  Returns
62  -------
63  abspath : `str`
64  Path that has been expanded, or `None` if the environment variable
65  does not exist and path is `None`.
66  """
67  defRoot = os.environ.get(defName)
68  if defRoot is None:
69  if path is None:
70  return None
71  return os.path.abspath(path)
72  return os.path.abspath(os.path.join(defRoot, path or ""))
73 
74 
76  """Container for data IDs and associated data references.
77 
78  Parameters
79  ----------
80  level : `str`
81  The lowest hierarchy level to descend to for this dataset type,
82  for example `"amp"` for `"raw"` or `"ccd"` for `"calexp"`.
83  Use `""` to use the mapper's default for the dataset type.
84  This class does not support `None`, but if it did, `None`
85  would mean the level should not be restricted.
86 
87  Notes
88  -----
89  Override this class for data IDs that require special handling to be
90  converted to ``data references``, and specify the override class
91  as ``ContainerClass`` for ``add_id_argument``.
92 
93  If you don't want the argument parser to compute data references,
94  specify ``doMakeDataRefList=False`` in ``add_id_argument``.
95  """
96 
97  def __init__(self, level=None):
98  self.datasetType = None
99  """Dataset type of the data references (`str`).
100  """
101  self.level = level
102  """See parameter ``level`` (`str`).
103  """
104  self.idList = []
105  """List of data IDs specified on the command line for the
106  appropriate data ID argument (`list` of `dict`).
107  """
108  self.refList = []
109  """List of data references for the data IDs in ``idList``
110  (`list` of `lsst.daf.persistence.ButlerDataRef`).
111  Elements will be omitted if the corresponding data is not found.
112  The list will be empty when returned by ``parse_args`` if
113  ``doMakeDataRefList=False`` was specified in ``add_id_argument``.
114  """
115 
116  def setDatasetType(self, datasetType):
117  """Set actual dataset type, once it is known.
118 
119  Parameters
120  ----------
121  datasetType : `str`
122  Dataset type.
123 
124  Notes
125  -----
126  The reason ``datasetType`` is not a constructor argument is that
127  some subclasses do not know the dataset type until the command
128  is parsed. Thus, to reduce special cases in the code,
129  ``datasetType`` is always set after the command is parsed.
130  """
131  self.datasetType = datasetType
132 
133  def castDataIds(self, butler):
134  """Validate data IDs and cast them to the correct type
135  (modify idList in place).
136 
137  This code casts the values in the data IDs dicts in `dataIdList`
138  to the type required by the butler. Data IDs are read from the
139  command line as `str`, but the butler requires some values to be
140  other types. For example "visit" values should be `int`.
141 
142  Parameters
143  ----------
144  butler : `lsst.daf.persistence.Butler`
145  Data butler.
146  """
147  if self.datasetType is None:
148  raise RuntimeError("Must call setDatasetType first")
149  try:
150  idKeyTypeDict = butler.getKeys(datasetType=self.datasetType, level=self.level)
151  except KeyError as e:
152  msg = "Cannot get keys for datasetType %s at level %s" % (self.datasetType, self.level)
153  raise KeyError(msg) from e
154 
155  for dataDict in self.idList:
156  for key, strVal in dataDict.items():
157  try:
158  keyType = idKeyTypeDict[key]
159  except KeyError:
160  # OK, assume that it's a valid key and guess that it's a string
161  keyType = str
162 
163  log = lsstLog.Log.getDefaultLogger()
164  log.warn("Unexpected ID %s; guessing type is \"%s\"" %
165  (key, 'str' if keyType == str else keyType))
166  idKeyTypeDict[key] = keyType
167 
168  if keyType != str:
169  try:
170  castVal = keyType(strVal)
171  except Exception:
172  raise TypeError("Cannot cast value %r to %s for ID key %r" % (strVal, keyType, key,))
173  dataDict[key] = castVal
174 
175  def makeDataRefList(self, namespace):
176  """Compute refList based on idList.
177 
178  Parameters
179  ----------
180  namespace : `argparse.Namespace`
181  Results of parsing command-line. The ``butler`` and ``log``
182  elements must be set.
183 
184  Notes
185  -----
186  Not called if ``add_id_argument`` was called with
187  ``doMakeDataRefList=False``.
188  """
189  if self.datasetType is None:
190  raise RuntimeError("Must call setDatasetType first")
191  butler = namespace.butler
192  for dataId in self.idList:
193  refList = dafPersist.searchDataRefs(butler, datasetType=self.datasetType,
194  level=self.level, dataId=dataId)
195  if not refList:
196  namespace.log.warn("No data found for dataId=%s", dataId)
197  continue
198  self.refList += refList
199 
200 
202  """data ID argument, used by `ArgumentParser.add_id_argument`.
203 
204  Parameters
205  ----------
206  name : `str`
207  Name of identifier (argument name without dashes).
208  datasetType : `str`
209  Type of dataset; specify a string for a fixed dataset type
210  or a `DatasetArgument` for a dynamic dataset type (e.g.
211  one specified by a command-line argument).
212  level : `str`
213  The lowest hierarchy level to descend to for this dataset type,
214  for example `"amp"` for `"raw"` or `"ccd"` for `"calexp"`.
215  Use `""` to use the mapper's default for the dataset type.
216  Some container classes may also support `None`, which means
217  the level should not be restricted; however the default class,
218  `DataIdContainer`, does not support `None`.
219  doMakeDataRefList : `bool`, optional
220  If `True` (default), construct data references.
221  ContainerClass : `class`, optional
222  Class to contain data IDs and data references; the default class
223  `DataIdContainer` will work for many, but not all, cases.
224  For example if the dataset type is specified on the command line
225  then use `DynamicDatasetType`.
226  """
227 
228  def __init__(self, name, datasetType, level, doMakeDataRefList=True, ContainerClass=DataIdContainer):
229  if name.startswith("-"):
230  raise RuntimeError("Name %s must not start with -" % (name,))
231  self.name = name
232  self.datasetType = datasetType
233  self.level = level
234  self.doMakeDataRefList = bool(doMakeDataRefList)
235  self.ContainerClass = ContainerClass
236  self.argName = name.lstrip("-")
237 
238  @property
240  """`True` if the dataset type is dynamic (that is, specified
241  on the command line).
242  """
243  return isinstance(self.datasetType, DynamicDatasetType)
244 
245  def getDatasetType(self, namespace):
246  """Get the dataset type as a string.
247 
248  Parameters
249  ----------
250  namespace
251  Parsed command.
252 
253  Returns
254  -------
255  datasetType : `str`
256  Dataset type.
257  """
258  if self.isDynamicDatasetType:
259  return self.datasetType.getDatasetType(namespace)
260  else:
261  return self.datasetType
262 
263 
264 class DynamicDatasetType(metaclass=abc.ABCMeta):
265  """Abstract base class for a dataset type determined from parsed
266  command-line arguments.
267  """
268 
269  def addArgument(self, parser, idName):
270  """Add a command-line argument to specify dataset type name,
271  if wanted.
272 
273  Parameters
274  ----------
275  parser : `ArgumentParser`
276  Argument parser to add the argument to.
277  idName : `str`
278  Name of data ID argument, without the leading ``"--"``,
279  e.g. ``"id"``.
280 
281  Notes
282  -----
283  The default implementation does nothing
284  """
285  pass
286 
287  @abc.abstractmethod
288  def getDatasetType(self, namespace):
289  """Get the dataset type as a string, based on parsed command-line
290  arguments.
291 
292  Returns
293  -------
294  datasetType : `str`
295  Dataset type.
296  """
297  raise NotImplementedError("Subclasses must override")
298 
299 
301  """Dataset type specified by a command-line argument.
302 
303  Parameters
304  ----------
305  name : `str`, optional
306  Name of command-line argument (including leading "--",
307  if appropriate) whose value is the dataset type.
308  If `None`, uses ``--idName_dstype`` where idName
309  is the name of the data ID argument (e.g. "id").
310  help : `str`, optional
311  Help string for the command-line argument.
312  default : `object`, optional
313  Default value. If `None`, then the command-line option is required.
314  This argument isignored if the command-line argument is positional
315  (name does not start with "-") because positional arguments do
316  not support default values.
317  """
318 
319  def __init__(self,
320  name=None,
321  help="dataset type to process from input data repository",
322  default=None,
323  ):
324  DynamicDatasetType.__init__(self)
325  self.name = name
326  self.help = help
327  self.default = default
328 
329  def getDatasetType(self, namespace):
330  """Get the dataset type as a string, from the appropriate
331  command-line argument.
332 
333  Parameters
334  ----------
335  namespace :
336  Parsed command.
337 
338  Returns
339  -------
340  datasetType : `str`
341  Dataset type.
342  """
343  argName = self.name.lstrip("-")
344  return getattr(namespace, argName)
345 
346  def addArgument(self, parser, idName):
347  """Add a command-line argument to specify the dataset type name.
348 
349  Parameters
350  ----------
351  parser : `ArgumentParser`
352  Argument parser.
353  idName : `str`
354  Data ID.
355 
356  Notes
357  -----
358  Also sets the `name` attribute if it is currently `None`.
359  """
360  help = self.help if self.help else "dataset type for %s" % (idName,)
361  if self.name is None:
362  self.name = "--%s_dstype" % (idName,)
363  requiredDict = dict()
364  if self.name.startswith("-"):
365  requiredDict = dict(required=self.default is None)
366  parser.add_argument(
367  self.name,
368  default=self.default,
369  help=help,
370  **requiredDict)
371 
372 
374  """Dataset type specified by a config parameter.
375 
376  Parameters
377  ----------
378  name : `str`
379  Name of config option whose value is the dataset type.
380  """
381 
382  def __init__(self, name):
383  DynamicDatasetType.__init__(self)
384  self.name = name
385 
386  def getDatasetType(self, namespace):
387  """Return the dataset type as a string, from the appropriate
388  config field.
389 
390  Parameters
391  ----------
392  namespace : `argparse.Namespace`
393  Parsed command.
394  """
395  # getattr does not work reliably if the config field name is
396  # dotted, so step through one level at a time
397  keyList = self.name.split(".")
398  value = namespace.config
399  for key in keyList:
400  try:
401  value = getattr(value, key)
402  except KeyError:
403  raise RuntimeError("Cannot find config parameter %r" % (self.name,))
404  return value
405 
406 
407 class ArgumentParser(argparse.ArgumentParser):
408  """Argument parser for command-line tasks that is based on
409  `argparse.ArgumentParser`.
410 
411  Parameters
412  ----------
413  name : `str`
414  Name of top-level task; used to identify camera-specific override
415  files.
416  usage : `str`, optional
417  Command-line usage signature.
418  **kwargs
419  Additional keyword arguments for `argparse.ArgumentParser`.
420 
421  Notes
422  -----
423  Users may wish to add additional arguments before calling `parse_args`.
424  """
425  # I would prefer to check data ID keys and values as they are parsed,
426  # but the required information comes from the butler, so I have to
427  # construct a butler before I do this checking. Constructing a butler
428  # is slow, so I only want do it once, after parsing the command line,
429  # so as to catch syntax errors quickly.
430 
431  requireOutput = True
432  """Require an output directory to be specified (`bool`)."""
433 
434  def __init__(self, name, usage="%(prog)s input [options]", **kwargs):
435  self._name = name
436  self._dataIdArgDict = {} # Dict of data identifier specifications, by argument name
437  argparse.ArgumentParser.__init__(self,
438  usage=usage,
439  fromfile_prefix_chars='@',
440  epilog=textwrap.dedent("""Notes:
441  * --config, --configfile, --id, --loglevel and @file may appear multiple times;
442  all values are used, in order left to right
443  * @file reads command-line options from the specified file:
444  * data may be distributed among multiple lines (e.g. one option per line)
445  * data after # is treated as a comment and ignored
446  * blank lines and lines starting with # are ignored
447  * To specify multiple values for an option, do not use = after the option name:
448  * right: --configfile foo bar
449  * wrong: --configfile=foo bar
450  """),
451  formatter_class=argparse.RawDescriptionHelpFormatter,
452  **kwargs)
453  self.add_argument(metavar='input', dest="rawInput",
454  help="path to input data repository, relative to $%s" % (DEFAULT_INPUT_NAME,))
455  self.add_argument("--calib", dest="rawCalib",
456  help="path to input calibration repository, relative to $%s" %
457  (DEFAULT_CALIB_NAME,))
458  self.add_argument("--output", dest="rawOutput",
459  help="path to output data repository (need not exist), relative to $%s" %
460  (DEFAULT_OUTPUT_NAME,))
461  self.add_argument("--rerun", dest="rawRerun", metavar="[INPUT:]OUTPUT",
462  help="rerun name: sets OUTPUT to ROOT/rerun/OUTPUT; "
463  "optionally sets ROOT to ROOT/rerun/INPUT")
464  self.add_argument("-c", "--config", nargs="*", action=ConfigValueAction,
465  help="config override(s), e.g. -c foo=newfoo bar.baz=3", metavar="NAME=VALUE")
466  self.add_argument("-C", "--configfile", dest="configfile", nargs="*", action=ConfigFileAction,
467  help="config override file(s)")
468  self.add_argument("-L", "--loglevel", nargs="*", action=LogLevelAction,
469  help="logging level; supported levels are [trace|debug|info|warn|error|fatal]",
470  metavar="LEVEL|COMPONENT=LEVEL")
471  self.add_argument("--longlog", action="store_true", help="use a more verbose format for the logging")
472  self.add_argument("--debug", action="store_true", help="enable debugging output?")
473  self.add_argument("--doraise", action="store_true",
474  help="raise an exception on error (else log a message and continue)?")
475  self.add_argument("--noExit", action="store_true",
476  help="Do not exit even upon failure (i.e. return a struct to the calling script)")
477  self.add_argument("--profile", help="Dump cProfile statistics to filename")
478  self.add_argument("--show", nargs="+", default=(),
479  help="display the specified information to stdout and quit "
480  "(unless run is specified).")
481  self.add_argument("-j", "--processes", type=int, default=1, help="Number of processes to use")
482  self.add_argument("-t", "--timeout", type=float,
483  help="Timeout for multiprocessing; maximum wall time (sec)")
484  self.add_argument("--clobber-output", action="store_true", dest="clobberOutput", default=False,
485  help=("remove and re-create the output directory if it already exists "
486  "(safe with -j, but not all other forms of parallel execution)"))
487  self.add_argument("--clobber-config", action="store_true", dest="clobberConfig", default=False,
488  help=("backup and then overwrite existing config files instead of checking them "
489  "(safe with -j, but not all other forms of parallel execution)"))
490  self.add_argument("--no-backup-config", action="store_true", dest="noBackupConfig", default=False,
491  help="Don't copy config to file~N backup.")
492  self.add_argument("--clobber-versions", action="store_true", dest="clobberVersions", default=False,
493  help=("backup and then overwrite existing package versions instead of checking"
494  "them (safe with -j, but not all other forms of parallel execution)"))
495  self.add_argument("--no-versions", action="store_true", dest="noVersions", default=False,
496  help="don't check package versions; useful for development")
497  lsstLog.configure_prop("""
498 log4j.rootLogger=INFO, A1
499 log4j.appender.A1=ConsoleAppender
500 log4j.appender.A1.Target=System.out
501 log4j.appender.A1.layout=PatternLayout
502 log4j.appender.A1.layout.ConversionPattern=%c %p: %m%n
503 """)
504 
505  # Forward all Python logging to lsst.log
506  lgr = logging.getLogger()
507  lgr.setLevel(logging.INFO) # same as in log4cxx config above
508  lgr.addHandler(lsstLog.LogHandler())
509 
510  def add_id_argument(self, name, datasetType, help, level=None, doMakeDataRefList=True,
511  ContainerClass=DataIdContainer):
512  """Add a data ID argument.
513 
514 
515  Parameters
516  ----------
517  name : `str`
518  Data ID argument (including leading dashes, if wanted).
519  datasetType : `str` or `DynamicDatasetType`-type
520  Type of dataset. Supply a string for a fixed dataset type.
521  For a dynamically determined dataset type, supply
522  a `DynamicDatasetType`, such a `DatasetArgument`.
523  help : `str`
524  Help string for the argument.
525  level : `str`
526  The lowest hierarchy level to descend to for this dataset type,
527  for example `"amp"` for `"raw"` or `"ccd"` for `"calexp"`.
528  Use `""` to use the mapper's default for the dataset type.
529  Some container classes may also support `None`, which means
530  the level should not be restricted; however the default class,
531  `DataIdContainer`, does not support `None`.
532  doMakeDataRefList : bool, optional
533  If `True` (default), construct data references.
534  ContainerClass : `class`, optional
535  Class to contain data IDs and data references; the default class
536  `DataIdContainer` will work for many, but not all, cases.
537  For example if the dataset type is specified on the command line
538  then use `DynamicDatasetType`.
539 
540  Notes
541  -----
542  If ``datasetType`` is an instance of `DatasetArgument`,
543  then add a second argument to specify the dataset type.
544 
545  The associated data is put into ``namespace.<dataIdArgument.name>``
546  as an instance of `ContainerClass`; the container includes fields:
547 
548  - ``idList``: a list of data ID dicts.
549  - ``refList``: a list of `~lsst.daf.persistence.Butler`
550  data references (empty if ``doMakeDataRefList`` is `False`).
551  """
552  argName = name.lstrip("-")
553 
554  if argName in self._dataIdArgDict:
555  raise RuntimeError("Data ID argument %s already exists" % (name,))
556  if argName in set(("camera", "config", "butler", "log", "obsPkg")):
557  raise RuntimeError("Data ID argument %s is a reserved name" % (name,))
558 
559  self.add_argument(name, nargs="*", action=IdValueAction, help=help,
560  metavar="KEY=VALUE1[^VALUE2[^VALUE3...]")
561 
562  dataIdArgument = DataIdArgument(
563  name=argName,
564  datasetType=datasetType,
565  level=level,
566  doMakeDataRefList=doMakeDataRefList,
567  ContainerClass=ContainerClass,
568  )
569 
570  if dataIdArgument.isDynamicDatasetType:
571  datasetType.addArgument(parser=self, idName=argName)
572 
573  self._dataIdArgDict[argName] = dataIdArgument
574 
575  def parse_args(self, config, args=None, log=None, override=None):
576  """Parse arguments for a command-line task.
577 
578  Parameters
579  ----------
580  config : `lsst.pex.config.Config`
581  Config for the task being run.
582  args : `list`, optional
583  Argument list; if `None` then ``sys.argv[1:]`` is used.
584  log : `lsst.log.Log`, optional
585  `~lsst.log.Log` instance; if `None` use the default log.
586  override : callable, optional
587  A config override function. It must take the root config object
588  as its only argument and must modify the config in place.
589  This function is called after camera-specific overrides files
590  are applied, and before command-line config overrides
591  are applied (thus allowing the user the final word).
592 
593  Returns
594  -------
595  namespace : `argparse.Namespace`
596  A `~argparse.Namespace` instance containing fields:
597 
598  - ``camera``: camera name.
599  - ``config``: the supplied config with all overrides applied,
600  validated and frozen.
601  - ``butler``: a `lsst.daf.persistence.Butler` for the data.
602  - An entry for each of the data ID arguments registered by
603  `add_id_argument`, of the type passed to its ``ContainerClass``
604  keyword (`~lsst.pipe.base.DataIdContainer` by default). It
605  includes public elements ``idList`` and ``refList``.
606  - ``log``: a `lsst.log` Log.
607  - An entry for each command-line argument,
608  with the following exceptions:
609 
610  - config is the supplied config, suitably updated.
611  - configfile, id and loglevel are all missing.
612  - ``obsPkg``: name of the ``obs_`` package for this camera.
613  """
614  if args is None:
615  args = sys.argv[1:]
616 
617  if len(args) < 1 or args[0].startswith("-") or args[0].startswith("@"):
618  self.print_help()
619  if len(args) == 1 and args[0] in ("-h", "--help"):
620  self.exit()
621  else:
622  self.exit("%s: error: Must specify input as first argument" % self.prog)
623 
624  # Note that --rerun may change namespace.input, but if it does
625  # we verify that the new input has the same mapper class.
626  namespace = argparse.Namespace()
627  namespace.input = _fixPath(DEFAULT_INPUT_NAME, args[0])
628  if not os.path.isdir(namespace.input):
629  self.error("Error: input=%r not found" % (namespace.input,))
630 
631  namespace.config = config
632  namespace.log = log if log is not None else lsstLog.Log.getDefaultLogger()
633  mapperClass = dafPersist.Butler.getMapperClass(namespace.input)
634  namespace.camera = mapperClass.getCameraName()
635  namespace.obsPkg = mapperClass.getPackageName()
636 
637  self.handleCamera(namespace)
638 
639  self._applyInitialOverrides(namespace)
640  if override is not None:
641  override(namespace.config)
642 
643  # Add data ID containers to namespace
644  for dataIdArgument in self._dataIdArgDict.values():
645  setattr(namespace, dataIdArgument.name, dataIdArgument.ContainerClass(level=dataIdArgument.level))
646 
647  namespace = argparse.ArgumentParser.parse_args(self, args=args, namespace=namespace)
648  del namespace.configfile
649 
650  self._parseDirectories(namespace)
651 
652  if namespace.clobberOutput:
653  if namespace.output is None:
654  self.error("--clobber-output is only valid with --output or --rerun")
655  elif namespace.output == namespace.input:
656  self.error("--clobber-output is not valid when the output and input repos are the same")
657  if os.path.exists(namespace.output):
658  namespace.log.info("Removing output repo %s for --clobber-output", namespace.output)
659  shutil.rmtree(namespace.output)
660 
661  namespace.log.debug("input=%s", namespace.input)
662  namespace.log.debug("calib=%s", namespace.calib)
663  namespace.log.debug("output=%s", namespace.output)
664 
665  obeyShowArgument(namespace.show, namespace.config, exit=False)
666 
667  # No environment variable or --output or --rerun specified.
668  if self.requireOutput and namespace.output is None and namespace.rerun is None:
669  self.error("no output directory specified.\n"
670  "An output directory must be specified with the --output or --rerun\n"
671  "command-line arguments.\n")
672 
673  butlerArgs = {} # common arguments for butler elements
674  if namespace.calib:
675  butlerArgs = {'mapperArgs': {'calibRoot': namespace.calib}}
676  if namespace.output:
677  outputs = {'root': namespace.output, 'mode': 'rw'}
678  inputs = {'root': namespace.input}
679  inputs.update(butlerArgs)
680  outputs.update(butlerArgs)
681  namespace.butler = dafPersist.Butler(inputs=inputs, outputs=outputs)
682  else:
683  outputs = {'root': namespace.input, 'mode': 'rw'}
684  outputs.update(butlerArgs)
685  namespace.butler = dafPersist.Butler(outputs=outputs)
686 
687  # convert data in each of the identifier lists to proper types
688  # this is done after constructing the butler,
689  # hence after parsing the command line,
690  # because it takes a long time to construct a butler
691  self._processDataIds(namespace)
692  if "data" in namespace.show:
693  for dataIdName in self._dataIdArgDict.keys():
694  for dataRef in getattr(namespace, dataIdName).refList:
695  print("%s dataRef.dataId = %s" % (dataIdName, dataRef.dataId))
696 
697  if namespace.show and "run" not in namespace.show:
698  sys.exit(0)
699 
700  if namespace.debug:
701  try:
702  import debug
703  assert debug # silence pyflakes
704  except ImportError:
705  sys.stderr.write("Warning: no 'debug' module found\n")
706  namespace.debug = False
707 
708  del namespace.loglevel
709 
710  if namespace.longlog:
711  lsstLog.configure_prop("""
712 log4j.rootLogger=INFO, A1
713 log4j.appender.A1=ConsoleAppender
714 log4j.appender.A1.Target=System.out
715 log4j.appender.A1.layout=PatternLayout
716 log4j.appender.A1.layout.ConversionPattern=%-5p %d{yyyy-MM-ddThh:mm:ss.sss} %c (%X{LABEL})(%F:%L)- %m%n
717 """)
718  del namespace.longlog
719 
720  namespace.config.validate()
721  namespace.config.freeze()
722 
723  return namespace
724 
725  def _parseDirectories(self, namespace):
726  """Parse input, output and calib directories
727 
728  This allows for hacking the directories, e.g., to include a
729  "rerun".
730  Modifications are made to the 'namespace' object in-place.
731  """
732  mapperClass = dafPersist.Butler.getMapperClass(_fixPath(DEFAULT_INPUT_NAME, namespace.rawInput))
733  namespace.calib = _fixPath(DEFAULT_CALIB_NAME, namespace.rawCalib)
734 
735  # If an output directory is specified, process it and assign it to the namespace
736  if namespace.rawOutput:
737  namespace.output = _fixPath(DEFAULT_OUTPUT_NAME, namespace.rawOutput)
738  else:
739  namespace.output = None
740 
741  # This section processes the rerun argument.
742  # If rerun is specified as a colon separated value,
743  # it will be parsed as an input and output.
744  # The input value will be overridden if previously specified
745  # (but a check is made to make sure both inputs use
746  # the same mapper)
747  if namespace.rawRerun:
748  if namespace.output:
749  self.error("Error: cannot specify both --output and --rerun")
750  namespace.rerun = namespace.rawRerun.split(":")
751  rerunDir = [os.path.join(namespace.input, "rerun", dd) for dd in namespace.rerun]
752  modifiedInput = False
753  if len(rerunDir) == 2:
754  namespace.input, namespace.output = rerunDir
755  modifiedInput = True
756  elif len(rerunDir) == 1:
757  namespace.output = rerunDir[0]
758  if os.path.exists(os.path.join(namespace.output, "_parent")):
759  namespace.input = os.path.realpath(os.path.join(namespace.output, "_parent"))
760  modifiedInput = True
761  else:
762  self.error("Error: invalid argument for --rerun: %s" % namespace.rerun)
763  if modifiedInput and dafPersist.Butler.getMapperClass(namespace.input) != mapperClass:
764  self.error("Error: input directory specified by --rerun must have the same mapper as INPUT")
765  else:
766  namespace.rerun = None
767  del namespace.rawInput
768  del namespace.rawCalib
769  del namespace.rawOutput
770  del namespace.rawRerun
771 
772  def _processDataIds(self, namespace):
773  """Process the parsed data for each data ID argument in an
774  `~argparse.Namespace`.
775 
776  Processing includes:
777 
778  - Validate data ID keys.
779  - Cast the data ID values to the correct type.
780  - Compute data references from data IDs.
781 
782  Parameters
783  ----------
784  namespace : `argparse.Namespace`
785  Parsed namespace. These attributes are read:
786 
787  - ``butler``
788  - ``log``
789  - ``config``, if any dynamic dataset types are set by
790  a config parameter.
791  - Dataset type arguments (e.g. ``id_dstype``), if any dynamic
792  dataset types are specified by such
793 
794  These attributes are modified:
795 
796  - ``<name>`` for each data ID argument registered using
797  `add_id_argument` with name ``<name>``.
798  """
799  for dataIdArgument in self._dataIdArgDict.values():
800  dataIdContainer = getattr(namespace, dataIdArgument.name)
801  dataIdContainer.setDatasetType(dataIdArgument.getDatasetType(namespace))
802  if dataIdArgument.doMakeDataRefList:
803  try:
804  dataIdContainer.castDataIds(butler=namespace.butler)
805  except (KeyError, TypeError) as e:
806  # failure of castDataIds indicates invalid command args
807  self.error(e)
808 
809  # failure of makeDataRefList indicates a bug
810  # that wants a traceback
811  dataIdContainer.makeDataRefList(namespace)
812 
813  def _applyInitialOverrides(self, namespace):
814  """Apply obs-package-specific and camera-specific config
815  override files, if found
816 
817  Parameters
818  ----------
819  namespace : `argparse.Namespace`
820  Parsed namespace. These attributes are read:
821 
822  - ``obsPkg``
823 
824  Look in the package namespace.obsPkg for files:
825 
826  - ``config/<task_name>.py``
827  - ``config/<camera_name>/<task_name>.py`` and load if found.
828  """
829  obsPkgDir = lsst.utils.getPackageDir(namespace.obsPkg)
830  fileName = self._name + ".py"
831  for filePath in (
832  os.path.join(obsPkgDir, "config", fileName),
833  os.path.join(obsPkgDir, "config", namespace.camera, fileName),
834  ):
835  if os.path.exists(filePath):
836  namespace.log.info("Loading config overrride file %r", filePath)
837  namespace.config.load(filePath)
838  else:
839  namespace.log.debug("Config override file does not exist: %r", filePath)
840 
841  def handleCamera(self, namespace):
842  """Perform camera-specific operations before parsing the command-line.
843 
844  Parameters
845  ----------
846  namespace : `argparse.Namespace`
847  Namespace (an ) with the following fields:
848 
849  - ``camera``: the camera name.
850  - ``config``: the config passed to parse_args, with no overrides applied.
851  - ``obsPkg``: the ``obs_`` package for this camera.
852  - ``log``: a `lsst.log` Log.
853 
854  Notes
855  -----
856  The default implementation does nothing.
857  """
858  pass
859 
860  def convert_arg_line_to_args(self, arg_line):
861  """Allow files of arguments referenced by ``@<path>`` to contain
862  multiple values on each line.
863 
864  Parameters
865  ----------
866  arg_line : `str`
867  Line of text read from an argument file.
868  """
869  arg_line = arg_line.strip()
870  if not arg_line or arg_line.startswith("#"):
871  return
872  for arg in shlex.split(arg_line, comments=True, posix=True):
873  if not arg.strip():
874  continue
875  yield arg
876 
877  def addReuseOption(self, choices):
878  """Add a "--reuse-outputs-from SUBTASK" option to the argument
879  parser.
880 
881  CmdLineTasks that can be restarted at an intermediate step using
882  outputs from earlier (but still internal) steps should use this
883  method to allow the user to control whether that happens when
884  outputs from earlier steps are present.
885 
886  Parameters
887  ----------
888  choices : sequence
889  A sequence of string names (by convention, top-level subtasks)
890  that identify the steps that could be skipped when their
891  outputs are already present. The list is ordered, so when the
892  user specifies one step on the command line, all previous steps
893  may be skipped as well. In addition to the choices provided,
894  users may pass "all" to indicate that all steps may be thus
895  skipped.
896 
897  When this method is called, the ``namespace`` object returned by
898  ``parse_args`` will contain a ``reuse`` attribute containing
899  a list of all steps that should be skipped if their outputs
900  are already present.
901  If no steps should be skipped, the ``reuse`` will be an empty list.
902  """
903  choices = list(choices)
904  choices.append("all")
905  self.add_argument("--reuse-outputs-from", dest="reuse", choices=choices,
906  default=[], action=ReuseAction,
907  help=("Skip the given subtask and its predecessors and reuse their outputs "
908  "if those outputs already exist. Use 'all' to specify all subtasks."))
909 
910 
912  """`ArgumentParser` for command-line tasks that don't write any output.
913  """
914 
915  requireOutput = False # We're not going to write anything
916 
917 
918 def getTaskDict(config, taskDict=None, baseName=""):
919  """Get a dictionary of task info for all subtasks in a config
920 
921  Parameters
922  ----------
923  config : `lsst.pex.config.Config`
924  Configuration to process.
925  taskDict : `dict`, optional
926  Users should not specify this argument. Supports recursion.
927  If provided, taskDict is updated in place, else a new `dict`
928  is started.
929  baseName : `str`, optional
930  Users should not specify this argument. It is only used for
931  recursion: if a non-empty string then a period is appended
932  and the result is used as a prefix for additional entries
933  in taskDict; otherwise no prefix is used.
934 
935  Returns
936  -------
937  taskDict : `dict`
938  Keys are config field names, values are task names.
939 
940  Notes
941  -----
942  This function is designed to be called recursively.
943  The user should call with only a config (leaving taskDict and baseName
944  at their default values).
945  """
946  if taskDict is None:
947  taskDict = dict()
948  for fieldName, field in config.items():
949  if hasattr(field, "value") and hasattr(field, "target"):
950  subConfig = field.value
951  if isinstance(subConfig, pexConfig.Config):
952  subBaseName = "%s.%s" % (baseName, fieldName) if baseName else fieldName
953  try:
954  taskName = "%s.%s" % (field.target.__module__, field.target.__name__)
955  except Exception:
956  taskName = repr(field.target)
957  taskDict[subBaseName] = taskName
958  getTaskDict(config=subConfig, taskDict=taskDict, baseName=subBaseName)
959  return taskDict
960 
961 
962 def obeyShowArgument(showOpts, config=None, exit=False):
963  """Process arguments specified with ``--show`` (but ignores
964  ``"data"``).
965 
966  Parameters
967  ----------
968  showOpts : `list` of `str`
969  List of options passed to ``--show``.
970  config : optional
971  The provided config.
972  exit : bool, optional
973  Exit if ``"run"`` isn't included in ``showOpts``.
974 
975  Parameters
976  ----------
977  Supports the following options in showOpts:
978 
979  - ``config[=PAT]``. Dump all the config entries, or just the ones that
980  match the glob pattern.
981  - ``history=PAT``. Show where the config entries that match the glob
982  pattern were set.
983  - ``tasks``. Show task hierarchy.
984  - ``data``. Ignored; to be processed by caller.
985  - ``run``. Keep going (the default behaviour is to exit if
986  ``--show`` is specified).
987 
988  Calls ``sys.exit(1)`` if any other option found.
989  """
990  if not showOpts:
991  return
992 
993  for what in showOpts:
994  showCommand, showArgs = what.split("=", 1) if "=" in what else (what, "")
995 
996  if showCommand == "config":
997  matConfig = re.search(r"^(?:config.)?(.+)?", showArgs)
998  pattern = matConfig.group(1)
999  if pattern:
1000  class FilteredStream:
1001  """A file object that only prints lines
1002  that match the glob "pattern".
1003 
1004  N.b. Newlines are silently discarded and reinserted;
1005  crude but effective.
1006  """
1007 
1008  def __init__(self, pattern):
1009  # obey case if pattern isn't lowecase or requests NOIGNORECASE
1010  mat = re.search(r"(.*):NOIGNORECASE$", pattern)
1011 
1012  if mat:
1013  pattern = mat.group(1)
1014  self._pattern = re.compile(fnmatch.translate(pattern))
1015  else:
1016  if pattern != pattern.lower():
1017  print(u"Matching \"%s\" without regard to case "
1018  "(append :NOIGNORECASE to prevent this)" % (pattern,), file=sys.stdout)
1019  self._pattern = re.compile(fnmatch.translate(pattern), re.IGNORECASE)
1020 
1021  def write(self, showStr):
1022  showStr = showStr.rstrip()
1023  # Strip off doc string line(s) and cut off
1024  # at "=" for string matching
1025  matchStr = showStr.split("\n")[-1].split("=")[0]
1026  if self._pattern.search(matchStr):
1027  print(u"\n" + showStr)
1028 
1029  fd = FilteredStream(pattern)
1030  else:
1031  fd = sys.stdout
1032 
1033  config.saveToStream(fd, "config")
1034  elif showCommand == "history":
1035  matHistory = re.search(r"^(?:config.)?(.+)?", showArgs)
1036  globPattern = matHistory.group(1)
1037  if not globPattern:
1038  print("Please provide a value with --show history (e.g. history=*.doXXX)", file=sys.stderr)
1039  sys.exit(1)
1040 
1041  error = False
1042  for i, pattern in enumerate(fnmatch.filter(config.names(), globPattern)):
1043  if i > 0:
1044  print("")
1045 
1046  pattern = pattern.split(".")
1047  cpath, cname = pattern[:-1], pattern[-1]
1048  hconfig = config # the config that we're interested in
1049  for i, cpt in enumerate(cpath):
1050  try:
1051  hconfig = getattr(hconfig, cpt)
1052  except AttributeError:
1053  print("Error: configuration %s has no subconfig %s" %
1054  (".".join(["config"] + cpath[:i]), cpt), file=sys.stderr)
1055  error = True
1056 
1057  try:
1058  print(pexConfig.history.format(hconfig, cname))
1059  except KeyError:
1060  print("Error: %s has no field %s" % (".".join(["config"] + cpath), cname),
1061  file=sys.stderr)
1062  error = True
1063 
1064  if error:
1065  sys.exit(1)
1066 
1067  elif showCommand == "data":
1068  pass
1069  elif showCommand == "run":
1070  pass
1071  elif showCommand == "tasks":
1072  showTaskHierarchy(config)
1073  else:
1074  print(u"Unknown value for show: %s (choose from '%s')" %
1075  (what, "', '".join("config[=XXX] data history=XXX tasks run".split())), file=sys.stderr)
1076  sys.exit(1)
1077 
1078  if exit and "run" not in showOpts:
1079  sys.exit(0)
1080 
1081 
1082 def showTaskHierarchy(config):
1083  """Print task hierarchy to stdout.
1084 
1085  Parameters
1086  ----------
1087  config : `lsst.pex.config.Config`
1088  Configuration to process.
1089  """
1090  print(u"Subtasks:")
1091  taskDict = getTaskDict(config=config)
1092 
1093  fieldNameList = sorted(taskDict.keys())
1094  for fieldName in fieldNameList:
1095  taskName = taskDict[fieldName]
1096  print(u"%s: %s" % (fieldName, taskName))
1097 
1098 
1099 class ConfigValueAction(argparse.Action):
1100  """argparse action callback to override config parameters using
1101  name=value pairs from the command-line.
1102  """
1103 
1104  def __call__(self, parser, namespace, values, option_string):
1105  """Override one or more config name value pairs.
1106 
1107  Parameters
1108  ----------
1109  parser : `argparse.ArgumentParser`
1110  Argument parser.
1111  namespace : `argparse.Namespace`
1112  Parsed command. The ``namespace.config`` attribute is updated.
1113  values : `list`
1114  A list of ``configItemName=value`` pairs.
1115  option_string : `str`
1116  Option value specified by the user.
1117  """
1118  if namespace.config is None:
1119  return
1120  for nameValue in values:
1121  name, sep, valueStr = nameValue.partition("=")
1122  if not valueStr:
1123  parser.error("%s value %s must be in form name=value" % (option_string, nameValue))
1124 
1125  # see if setting the string value works; if not, try eval
1126  try:
1127  setDottedAttr(namespace.config, name, valueStr)
1128  except AttributeError:
1129  parser.error("no config field: %s" % (name,))
1130  except Exception:
1131  try:
1132  value = eval(valueStr, {})
1133  except Exception:
1134  parser.error("cannot parse %r as a value for %s" % (valueStr, name))
1135  try:
1136  setDottedAttr(namespace.config, name, value)
1137  except Exception as e:
1138  parser.error("cannot set config.%s=%r: %s" % (name, value, e))
1139 
1140 
1141 class ConfigFileAction(argparse.Action):
1142  """argparse action to load config overrides from one or more files.
1143  """
1144 
1145  def __call__(self, parser, namespace, values, option_string=None):
1146  """Load one or more files of config overrides.
1147 
1148  Parameters
1149  ----------
1150  parser : `argparse.ArgumentParser`
1151  Argument parser.
1152  namespace : `argparse.Namespace`
1153  Parsed command. The following attributes are updated by this
1154  method: ``namespace.config``.
1155  values : `list`
1156  A list of data config file paths.
1157  option_string : `str`, optional
1158  Option value specified by the user.
1159  """
1160  if namespace.config is None:
1161  return
1162  for configfile in values:
1163  try:
1164  namespace.config.load(configfile)
1165  except Exception as e:
1166  parser.error("cannot load config file %r: %s" % (configfile, e))
1167 
1168 
1169 class IdValueAction(argparse.Action):
1170  """argparse action callback to process a data ID into a dict.
1171  """
1172 
1173  def __call__(self, parser, namespace, values, option_string):
1174  """Parse ``--id`` data and append results to
1175  ``namespace.<argument>.idList``.
1176 
1177  Parameters
1178  ----------
1179  parser : `ArgumentParser`
1180  Argument parser.
1181  namespace : `argparse.Namespace`
1182  Parsed command (an instance of argparse.Namespace).
1183  The following attributes are updated:
1184 
1185  - ``<idName>.idList``, where ``<idName>`` is the name of the
1186  ID argument, for instance ``"id"`` for ID argument ``--id``.
1187  values : `list`
1188  A list of data IDs; see Notes below.
1189  option_string : `str`
1190  Option value specified by the user.
1191 
1192  Notes
1193  -----
1194  The data format is::
1195 
1196  key1=value1_1[^value1_2[^value1_3...]
1197  key2=value2_1[^value2_2[^value2_3...]...
1198 
1199  The values (e.g. ``value1_1``) may either be a string,
1200  or of the form ``"int..int"`` (e.g. ``"1..3"``) which is
1201  interpreted as ``"1^2^3"`` (inclusive, unlike a python range).
1202  So ``"0^2..4^7..9"`` is equivalent to ``"0^2^3^4^7^8^9"``.
1203  You may also specify a stride: ``"1..5:2"`` is ``"1^3^5"``.
1204 
1205  The cross product is computed for keys with multiple values.
1206  For example::
1207 
1208  --id visit 1^2 ccd 1,1^2,2
1209 
1210  results in the following data ID dicts being appended to
1211  ``namespace.<argument>.idList``:
1212 
1213  {"visit":1, "ccd":"1,1"}
1214  {"visit":2, "ccd":"1,1"}
1215  {"visit":1, "ccd":"2,2"}
1216  {"visit":2, "ccd":"2,2"}
1217  """
1218  if namespace.config is None:
1219  return
1220  idDict = collections.OrderedDict()
1221  for nameValue in values:
1222  name, sep, valueStr = nameValue.partition("=")
1223  if name in idDict:
1224  parser.error("%s appears multiple times in one ID argument: %s" % (name, option_string))
1225  idDict[name] = []
1226  for v in valueStr.split("^"):
1227  mat = re.search(r"^(\d+)\.\.(\d+)(?::(\d+))?$", v)
1228  if mat:
1229  v1 = int(mat.group(1))
1230  v2 = int(mat.group(2))
1231  v3 = mat.group(3)
1232  v3 = int(v3) if v3 else 1
1233  for v in range(v1, v2 + 1, v3):
1234  idDict[name].append(str(v))
1235  else:
1236  idDict[name].append(v)
1237 
1238  iterList = [idDict[key] for key in idDict.keys()]
1239  idDictList = [collections.OrderedDict(zip(idDict.keys(), valList))
1240  for valList in itertools.product(*iterList)]
1241 
1242  argName = option_string.lstrip("-")
1243  ident = getattr(namespace, argName)
1244  ident.idList += idDictList
1245 
1246 
1247 class LogLevelAction(argparse.Action):
1248  """argparse action to set log level.
1249  """
1250 
1251  def __call__(self, parser, namespace, values, option_string):
1252  """Set trace level.
1253 
1254  Parameters
1255  ----------
1256  parser : `ArgumentParser`
1257  Argument parser.
1258  namespace : `argparse.Namespace`
1259  Parsed command. This argument is not used.
1260  values : `list`
1261  List of trace levels; each item must be of the form
1262  ``component_name=level`` or ``level``, where ``level``
1263  is a keyword (not case sensitive) or an integer.
1264  option_string : `str`
1265  Option value specified by the user.
1266  """
1267  permittedLevelList = ('TRACE', 'DEBUG', 'INFO', 'WARN', 'ERROR', 'FATAL')
1268  permittedLevelSet = set(permittedLevelList)
1269  for componentLevel in values:
1270  component, sep, levelStr = componentLevel.partition("=")
1271  if not levelStr:
1272  levelStr, component = component, None
1273  logLevelUpr = levelStr.upper()
1274  if logLevelUpr in permittedLevelSet:
1275  logLevel = getattr(lsstLog.Log, logLevelUpr)
1276  else:
1277  parser.error("loglevel=%r not one of %s" % (levelStr, permittedLevelList))
1278  if component is None:
1279  namespace.log.setLevel(logLevel)
1280  else:
1281  lsstLog.Log.getLogger(component).setLevel(logLevel)
1282  # set logging level for Python logging
1283  pyLevel = lsstLog.LevelTranslator.lsstLog2logging(logLevel)
1284  logging.getLogger(component).setLevel(pyLevel)
1285 
1286 
1287 class ReuseAction(argparse.Action):
1288  """argparse action associated with ArgumentPraser.addReuseOption."""
1289 
1290  def __call__(self, parser, namespace, value, option_string):
1291  if value == "all":
1292  value = self.choices[-2]
1293  index = self.choices.index(value)
1294  namespace.reuse = self.choices[:index + 1]
1295 
1296 
1297 def setDottedAttr(item, name, value):
1298  """Set an instance attribute (like `setattr` but accepting
1299  hierarchical names such as ``foo.bar.baz``).
1300 
1301  Parameters
1302  ----------
1303  item : obj
1304  Object whose attribute is to be set.
1305  name : `str`
1306  Name of attribute to set.
1307  value : obj
1308  New value for the attribute.
1309 
1310  Notes
1311  -----
1312  For example if name is ``foo.bar.baz`` then ``item.foo.bar.baz``
1313  is set to the specified value.
1314  """
1315  subitem = item
1316  subnameList = name.split(".")
1317  for subname in subnameList[:-1]:
1318  subitem = getattr(subitem, subname)
1319  setattr(subitem, subnameList[-1], value)
1320 
1321 
1322 def getDottedAttr(item, name):
1323  """Get an attribute (like `getattr` but accepts hierarchical names
1324  such as ``foo.bar.baz``).
1325 
1326  Parameters
1327  ----------
1328  item : obj
1329  Object whose attribute is to be returned.
1330  name : `str`
1331  Name of the attribute to get.
1332 
1333  Returns
1334  -------
1335  itemAttr : obj
1336  If name is ``foo.bar.baz then the return value is
1337  ``item.foo.bar.baz``.
1338  """
1339  subitem = item
1340  for subname in name.split("."):
1341  subitem = getattr(subitem, subname)
1342  return subitem
def getTaskDict(config, taskDict=None, baseName="")
def __init__(self, name=None, help="dataset type to process from input data repository", default=None)
def setDottedAttr(item, name, value)
def __call__(self, parser, namespace, values, option_string=None)
std::shared_ptr< FrameSet > append(FrameSet const &first, FrameSet const &second)
Construct a FrameSet that performs two transformations in series.
Definition: functional.cc:33
daf::base::PropertySet * set
Definition: fits.cc:832
def obeyShowArgument(showOpts, config=None, exit=False)
std::string getPackageDir(std::string const &packageName)
return the root directory of a setup package
Definition: packaging.cc:33
Definition: Log.h:691
def __init__(self, minimum, dataRange, Q)
def setLevel(loggername, level)
def __init__(self, name, datasetType, level, doMakeDataRefList=True, ContainerClass=DataIdContainer)
def __call__(self, parser, namespace, value, option_string)
def __init__(self, name, usage="%(prog)s input [options]", kwargs)
def __call__(self, parser, namespace, values, option_string)
def __call__(self, parser, namespace, values, option_string)
def __call__(self, parser, namespace, values, option_string)
def parse_args(self, config, args=None, log=None, override=None)
daf::base::PropertyList * list
Definition: fits.cc:833
def add_id_argument(self, name, datasetType, help, level=None, doMakeDataRefList=True, ContainerClass=DataIdContainer)