LSSTApplications  10.0+286,10.0+36,10.0+46,10.0-2-g4f67435,10.1+152,10.1+37,11.0,11.0+1,11.0-1-g47edd16,11.0-1-g60db491,11.0-1-g7418c06,11.0-2-g04d2804,11.0-2-g68503cd,11.0-2-g818369d,11.0-2-gb8b8ce7
LSSTDataManagementBasePackage
argumentParser.py
Go to the documentation of this file.
1 from __future__ import absolute_import, division
2 #
3 # LSST Data Management System
4 # Copyright 2008, 2009, 2010 LSST Corporation.
5 #
6 # This product includes software developed by the
7 # LSST Project (http://www.lsst.org/).
8 #
9 # This program is free software: you can redistribute it and/or modify
10 # it under the terms of the GNU General Public License as published by
11 # the Free Software Foundation, either version 3 of the License, or
12 # (at your option) any later version.
13 #
14 # This program is distributed in the hope that it will be useful,
15 # but WITHOUT ANY WARRANTY; without even the implied warranty of
16 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 # GNU General Public License for more details.
18 #
19 # You should have received a copy of the LSST License Statement and
20 # the GNU General Public License along with this program. If not,
21 # see <http://www.lsstcorp.org/LegalNotices/>.
22 #
23 import argparse
24 import collections
25 import fnmatch
26 import itertools
27 import os
28 import re
29 import shlex
30 import sys
31 import shutil
32 
33 import lsst.utils
34 import lsst.pex.config as pexConfig
35 import lsst.pex.logging as pexLog
36 import lsst.daf.persistence as dafPersist
37 
38 __all__ = ["ArgumentParser", "ConfigFileAction", "ConfigValueAction", "DataIdContainer", "DatasetArgument"]
39 
40 DEFAULT_INPUT_NAME = "PIPE_INPUT_ROOT"
41 DEFAULT_CALIB_NAME = "PIPE_CALIB_ROOT"
42 DEFAULT_OUTPUT_NAME = "PIPE_OUTPUT_ROOT"
43 
44 def _fixPath(defName, path):
45  """!Apply environment variable as default root, if present, and abspath
46 
47  @param[in] defName name of environment variable containing default root path;
48  if the environment variable does not exist then the path is relative
49  to the current working directory
50  @param[in] path path relative to default root path
51  @return abspath: path that has been expanded, or None if the environment variable does not exist
52  and path is None
53  """
54  defRoot = os.environ.get(defName)
55  if defRoot is None:
56  if path is None:
57  return None
58  return os.path.abspath(path)
59  return os.path.abspath(os.path.join(defRoot, path or ""))
60 
61 
62 class DataIdContainer(object):
63  """!A container for data IDs and associated data references
64 
65  Override for data IDs that require special handling to be converted to data references,
66  and specify the override class as ContainerClass for add_id_argument.
67  (If you don't want the argument parser to compute data references, you may use this class
68  and specify doMakeDataRefList=False in add_id_argument.)
69  """
70  def __init__(self, level=None):
71  """!Construct a DataIdContainer"""
72  self.datasetType = None # the actual dataset type, as specified on the command line (if dynamic)
73  self.level = level
74  self.idList = []
75  self.refList = []
76 
77  def setDatasetType(self, datasetType):
78  """!Set actual dataset type, once it is known"""
79  self.datasetType = datasetType
80 
81  def castDataIds(self, butler):
82  """!Validate data IDs and cast them to the correct type (modify idList in place).
83 
84  @param[in] butler data butler (a \ref lsst.daf.persistence.butler.Butler
85  "lsst.daf.persistence.Butler")
86  """
87  if self.datasetType is None:
88  raise RuntimeError("Must call setDatasetType first")
89  try:
90  idKeyTypeDict = butler.getKeys(datasetType=self.datasetType, level=self.level)
91  except KeyError:
92  raise KeyError("Cannot get keys for datasetType %s at level %s" % (self.datasetType, self.level))
93 
94  for dataDict in self.idList:
95  for key, strVal in dataDict.iteritems():
96  try:
97  keyType = idKeyTypeDict[key]
98  except KeyError:
99  validKeys = sorted(idKeyTypeDict.keys())
100  raise KeyError("Unrecognized ID key %r; valid keys are: %s" % (key, validKeys))
101  if keyType != str:
102  try:
103  castVal = keyType(strVal)
104  except Exception:
105  raise TypeError("Cannot cast value %r to %s for ID key %r" % (strVal, keyType, key,))
106  dataDict[key] = castVal
107 
108  def makeDataRefList(self, namespace):
109  """!Compute refList based on idList
110 
111  Not called if add_id_argument called with doMakeDataRef=False
112 
113  @param[in] namespace results of parsing command-line (with 'butler' and 'log' elements)
114  """
115  if self.datasetType is None:
116  raise RuntimeError("Must call setDatasetType first")
117  butler = namespace.butler
118  for dataId in self.idList:
119  refList = list(butler.subset(datasetType=self.datasetType, level=self.level, dataId=dataId))
120  # exclude nonexistent data
121  # this is a recursive test, e.g. for the sake of "raw" data
122  refList = [dr for dr in refList if dataExists(butler=butler, datasetType=self.datasetType,
123  dataRef=dr)]
124  if not refList:
125  namespace.log.warn("No data found for dataId=%s" % (dataId,))
126  continue
127  self.refList += refList
128 
129 
130 class DataIdArgument(object):
131  """!Glorified struct for data about id arguments, used by ArgumentParser.add_id_argument"""
132  def __init__(self, name, datasetType, level, doMakeDataRefList=True, ContainerClass=DataIdContainer):
133  """!Constructor
134 
135  @param[in] name name of identifier (argument name without dashes)
136  @param[in] datasetType type of dataset; specify a string for a fixed dataset type
137  or a DatasetArgument for a dynamic dataset type (one specified on the command line),
138  in which case an argument is added by name --<name>_dstype
139  @param[in] level level of dataset, for butler
140  @param[in] doMakeDataRefList construct data references?
141  @param[in] ContainerClass class to contain data IDs and data references;
142  the default class will work for many kinds of data, but you may have to override
143  to compute some kinds of data references.
144  """
145  if name.startswith("-"):
146  raise RuntimeError("Name %s must not start with -" % (name,))
147  self.name = name
148  self.datasetType = datasetType
149  self.level = level
150  self.doMakeDataRefList = bool(doMakeDataRefList)
151  self.ContainerClass = ContainerClass
152  self.argName = name.lstrip("-")
153  if self.isDynamicDatasetType():
154  self.datasetTypeName = datasetType.name if datasetType.name else self.name + "_dstype"
155  else:
156  self.datasetTypeName = None
157 
159  """!Is the dataset type dynamic (specified on the command line)?"""
160  return isinstance(self.datasetType, DatasetArgument)
161 
162  def getDatasetType(self, namespace):
163  """!Get the dataset type
164 
165  @param[in] namespace parsed command created by argparse parse_args;
166  if the dataset type is dynamic then it is read from namespace.<name>_dstype
167  else namespace is ignored
168  """
169  return getattr(namespace, self.datasetTypeName) if self.isDynamicDatasetType() else self.datasetType
170 
171 class DatasetArgument(object):
172  """!Specify that the dataset type should be a command-line option.
173 
174  Somewhat more heavyweight than just using, e.g., None as a signal, but
175  provides the ability to have more informative help and a default. Also
176  more extensible in the future.
177 
178  @param[in] name name of command-line argument (including leading "--", if wanted);
179  if omitted a suitable default is chosen
180  @param[in] help help string for the command-line option
181  @param[in] default default value; if None, then the option is required
182  """
183  def __init__(self,
184  name = None,
185  help="dataset type to process from input data repository",
186  default=None,
187  ):
188  self.name = name
189  self.help = help
190  self.default = default
191 
192  @property
193  def required(self):
194  return self.default is None
195 
196 class ArgumentParser(argparse.ArgumentParser):
197  """!An argument parser for pipeline tasks that is based on argparse.ArgumentParser
198 
199  Users may wish to add additional arguments before calling parse_args.
200 
201  @note
202  - I would prefer to check data ID keys and values as they are parsed,
203  but the required information comes from the butler, so I have to construct a butler
204  before I do this checking. Constructing a butler is slow, so I only want do it once,
205  after parsing the command line, so as to catch syntax errors quickly.
206  """
207  def __init__(self, name, usage = "%(prog)s input [options]", **kwargs):
208  """!Construct an ArgumentParser
209 
210  @param[in] name name of top-level task; used to identify camera-specific override files
211  @param[in] usage usage string
212  @param[in] **kwargs additional keyword arguments for argparse.ArgumentParser
213  """
214  self._name = name
215  self._dataIdArgDict = {} # Dict of data identifier specifications, by argument name
216  argparse.ArgumentParser.__init__(self,
217  usage = usage,
218  fromfile_prefix_chars = '@',
219  epilog = """Notes:
220 * --config, --configfile, --id, --loglevel and @file may appear multiple times;
221  all values are used, in order left to right
222 * @file reads command-line options from the specified file:
223  * data may be distributed among multiple lines (e.g. one option per line)
224  * data after # is treated as a comment and ignored
225  * blank lines and lines starting with # are ignored
226 * To specify multiple values for an option, do not use = after the option name:
227  * right: --configfile foo bar
228  * wrong: --configfile=foo bar
229 """,
230  formatter_class = argparse.RawDescriptionHelpFormatter,
231  **kwargs)
232  self.add_argument("input",
233  help="path to input data repository, relative to $%s" % (DEFAULT_INPUT_NAME,))
234  self.add_argument("--calib",
235  help="path to input calibration repository, relative to $%s" % (DEFAULT_CALIB_NAME,))
236  self.add_argument("--output",
237  help="path to output data repository (need not exist), relative to $%s" % (DEFAULT_OUTPUT_NAME,))
238  self.add_argument("-c", "--config", nargs="*", action=ConfigValueAction,
239  help="config override(s), e.g. -c foo=newfoo bar.baz=3", metavar="NAME=VALUE")
240  self.add_argument("-C", "--configfile", dest="configfile", nargs="*", action=ConfigFileAction,
241  help="config override file(s)")
242  self.add_argument("-L", "--loglevel", nargs="*", action=LogLevelAction,
243  help="logging level; supported levels are [debug|warn|info|fatal] or an integer; "
244  "trace level is negative log level, e.g. use level -3 for trace level 3",
245  metavar="LEVEL|COMPONENT=LEVEL")
246  self.add_argument("--debug", action="store_true", help="enable debugging output?")
247  self.add_argument("--doraise", action="store_true",
248  help="raise an exception on error (else log a message and continue)?")
249  self.add_argument("--profile", help="Dump cProfile statistics to filename")
250  self.add_argument("--logdest", help="logging destination")
251  self.add_argument("--show", nargs="+", default=(),
252  help="display the specified information to stdout and quit (unless run is specified).")
253  self.add_argument("-j", "--processes", type=int, default=1, help="Number of processes to use")
254  self.add_argument("-t", "--timeout", type=float,
255  help="Timeout for multiprocessing; maximum wall time (sec)")
256  self.add_argument("--clobber-output", action="store_true", dest="clobberOutput", default=False,
257  help=("remove and re-create the output directory if it already exists "
258  "(safe with -j, but not all other forms of parallel execution)"))
259  self.add_argument("--clobber-config", action="store_true", dest="clobberConfig", default=False,
260  help=("backup and then overwrite existing config files instead of checking them "
261  "(safe with -j, but not all other forms of parallel execution)"))
262 
263  def add_id_argument(self, name, datasetType, help, level=None, doMakeDataRefList=True,
264  ContainerClass=DataIdContainer):
265  """!Add a data ID argument
266 
267  Add an argument to specify data IDs. If datasetType is an instance of DatasetArgument,
268  then add a second argument to specify the dataset type.
269 
270  @param[in] name name of name (including leading dashes, if wanted)
271  @param[in] datasetType type of dataset; supply a string for a fixed dataset type,
272  or a DatasetArgument for a dynamically determined dataset type
273  @param[in] help help string for the argument
274  @param[in] level level of dataset, for butler
275  @param[in] doMakeDataRefList construct data references?
276  @param[in] ContainerClass data ID container class to use to contain results;
277  override the default if you need a special means of computing data references from data IDs
278 
279  The associated data is put into namespace.<dataIdArgument.name> as an instance of ContainerClass;
280  the container includes fields:
281  - idList: a list of data ID dicts
282  - refList: a list of butler data references (empty if doMakeDataRefList false)
283  """
284  argName = name.lstrip("-")
285 
286  if argName in self._dataIdArgDict:
287  raise RuntimeError("Data ID argument %s already exists" % (name,))
288  if argName in set(("camera", "config", "butler", "log", "obsPkg")):
289  raise RuntimeError("Data ID argument %s is a reserved name" % (name,))
290 
291  self.add_argument(name, nargs="*", action=IdValueAction, help=help,
292  metavar="KEY=VALUE1[^VALUE2[^VALUE3...]")
293 
294  dataIdArgument = DataIdArgument(
295  name = argName,
296  datasetType = datasetType,
297  level = level,
298  doMakeDataRefList = doMakeDataRefList,
299  ContainerClass = ContainerClass,
300  )
301 
302  if dataIdArgument.isDynamicDatasetType():
303  datasetType = dataIdArgument.datasetType
304  help = datasetType.help if datasetType.help else "dataset type for %s" % (name,)
305  self.add_argument(
306  "--" + dataIdArgument.datasetTypeName,
307  default = datasetType.default,
308  required = datasetType.required,
309  help = help,
310  )
311  self._dataIdArgDict[argName] = dataIdArgument
312 
313  def parse_args(self, config, args=None, log=None, override=None):
314  """!Parse arguments for a pipeline task
315 
316  @param[in,out] config config for the task being run
317  @param[in] args argument list; if None use sys.argv[1:]
318  @param[in] log log (instance pex_logging Log); if None use the default log
319  @param[in] override a config override function; it must take the root config object
320  as its only argument and must modify the config in place.
321  This function is called after camera-specific overrides files are applied, and before
322  command-line config overrides are applied (thus allowing the user the final word).
323 
324  @return namespace: an argparse.Namespace containing many useful fields including:
325  - camera: camera name
326  - config: the supplied config with all overrides applied, validated and frozen
327  - butler: a butler for the data
328  - an entry for each of the data ID arguments registered by add_id_argument(),
329  the value of which is a DataIdArgument that includes public elements 'idList' and 'refList'
330  - log: a pex_logging log
331  - an entry for each command-line argument, with the following exceptions:
332  - config is the supplied config, suitably updated
333  - configfile, id and loglevel are all missing
334  - obsPkg: name of obs_ package for this camera
335  """
336  if args == None:
337  args = sys.argv[1:]
338 
339  if len(args) < 1 or args[0].startswith("-") or args[0].startswith("@"):
340  self.print_help()
341  if len(args) == 1 and args[0] in ("-h", "--help"):
342  self.exit()
343  else:
344  self.exit("%s: error: Must specify input as first argument" % self.prog)
345 
346  # note: don't set namespace.input until after running parse_args, else it will get overwritten
347  inputRoot = _fixPath(DEFAULT_INPUT_NAME, args[0])
348  if not os.path.isdir(inputRoot):
349  self.error("Error: input=%r not found" % (inputRoot,))
350 
351  namespace = argparse.Namespace()
352  namespace.config = config
353  namespace.log = log if log is not None else pexLog.Log.getDefaultLog()
354  mapperClass = dafPersist.Butler.getMapperClass(inputRoot)
355  namespace.camera = mapperClass.getCameraName()
356  namespace.obsPkg = mapperClass.getPackageName()
357 
358  self.handleCamera(namespace)
359 
360  self._applyInitialOverrides(namespace)
361  if override is not None:
362  override(namespace.config)
363 
364  # Add data ID containers to namespace
365  for dataIdArgument in self._dataIdArgDict.itervalues():
366  setattr(namespace, dataIdArgument.name, dataIdArgument.ContainerClass(level=dataIdArgument.level))
367 
368  namespace = argparse.ArgumentParser.parse_args(self, args=args, namespace=namespace)
369  namespace.input = inputRoot
370  del namespace.configfile
371 
372  namespace.calib = _fixPath(DEFAULT_CALIB_NAME, namespace.calib)
373  namespace.output = _fixPath(DEFAULT_OUTPUT_NAME, namespace.output)
374 
375  if namespace.clobberOutput:
376  if namespace.output is None:
377  self.error("--clobber-output is only valid with --output")
378  elif namespace.output == namespace.input:
379  self.error("--clobber-output is not valid when the output and input repos are the same")
380  if os.path.exists(namespace.output):
381  namespace.log.info("Removing output repo %s for --clobber-output" % namespace.output)
382  shutil.rmtree(namespace.output)
383 
384  namespace.log.info("input=%s" % (namespace.input,))
385  namespace.log.info("calib=%s" % (namespace.calib,))
386  namespace.log.info("output=%s" % (namespace.output,))
387 
388  obeyShowArgument(namespace.show, namespace.config, exit=False)
389 
390  namespace.butler = dafPersist.Butler(
391  root = namespace.input,
392  calibRoot = namespace.calib,
393  outputRoot = namespace.output,
394  )
395 
396  # convert data in each of the identifier lists to proper types
397  # this is done after constructing the butler, hence after parsing the command line,
398  # because it takes a long time to construct a butler
399  self._processDataIds(namespace)
400  if "data" in namespace.show:
401  for dataIdName in self._dataIdArgDict.iterkeys():
402  for dataRef in getattr(namespace, dataIdName).refList:
403  print dataIdName + " dataRef.dataId =", dataRef.dataId
404 
405  if namespace.show and "run" not in namespace.show:
406  sys.exit(0)
407 
408  if namespace.debug:
409  try:
410  import debug
411  assert debug # silence pyflakes
412  except ImportError:
413  sys.stderr.write("Warning: no 'debug' module found\n")
414  namespace.debug = False
415 
416  if namespace.logdest:
417  namespace.log.addDestination(namespace.logdest)
418  del namespace.logdest
419  del namespace.loglevel
420 
421  namespace.config.validate()
422  namespace.config.freeze()
423 
424  return namespace
425 
426  def _processDataIds(self, namespace):
427  """!Process the parsed data for each data ID argument
428 
429  Processing includes:
430  - Validate data ID keys
431  - Cast the data ID values to the correct type
432  - Compute data references from data IDs
433 
434  @param[in,out] namespace parsed namespace (an argparse.Namespace);
435  reads these attributes:
436  - butler
437  - log
438  - <name_dstype> for each data ID argument with a dynamic dataset type registered using
439  add_id_argument
440  and modifies these attributes:
441  - <name> for each data ID argument registered using add_id_argument
442  """
443  for dataIdArgument in self._dataIdArgDict.itervalues():
444  dataIdContainer = getattr(namespace, dataIdArgument.name)
445  dataIdContainer.setDatasetType(dataIdArgument.getDatasetType(namespace))
446  try:
447  dataIdContainer.castDataIds(butler = namespace.butler)
448  except (KeyError, TypeError) as e:
449  # failure of castDataIds indicates invalid command args
450  self.error(e)
451  # failure of makeDataRefList indicates a bug that wants a traceback
452  if dataIdArgument.doMakeDataRefList:
453  dataIdContainer.makeDataRefList(namespace)
454 
455  def _applyInitialOverrides(self, namespace):
456  """!Apply obs-package-specific and camera-specific config override files, if found
457 
458  @param[in] namespace parsed namespace (an argparse.Namespace);
459  reads these attributes:
460  - obsPkg
461 
462  Look in the package namespace.obsPkg for files:
463  - config/<task_name>.py
464  - config/<camera_name>/<task_name>.py
465  and load if found
466  """
467  obsPkgDir = lsst.utils.getPackageDir(namespace.obsPkg)
468  fileName = self._name + ".py"
469  for filePath in (
470  os.path.join(obsPkgDir, "config", fileName),
471  os.path.join(obsPkgDir, "config", namespace.camera, fileName),
472  ):
473  if os.path.exists(filePath):
474  namespace.log.info("Loading config overrride file %r" % (filePath,))
475  namespace.config.load(filePath)
476  else:
477  namespace.log.info("Config override file does not exist: %r" % (filePath,))
478 
479  def handleCamera(self, namespace):
480  """!Perform camera-specific operations before parsing the command line.
481 
482  The default implementation does nothing.
483 
484  @param[in,out] namespace namespace (an argparse.Namespace) with the following fields:
485  - camera: the camera name
486  - config: the config passed to parse_args, with no overrides applied
487  - obsPkg: the obs_ package for this camera
488  - log: a pex_logging log
489  """
490  pass
491 
492  def convert_arg_line_to_args(self, arg_line):
493  """!Allow files of arguments referenced by `@<path>` to contain multiple values on each line
494 
495  @param[in] arg_line line of text read from an argument file
496  """
497  arg_line = arg_line.strip()
498  if not arg_line or arg_line.startswith("#"):
499  return
500  for arg in shlex.split(arg_line, comments=True, posix=True):
501  if not arg.strip():
502  continue
503  yield arg
504 
505 def getTaskDict(config, taskDict=None, baseName=""):
506  """!Get a dictionary of task info for all subtasks in a config
507 
508  Designed to be called recursively; the user should call with only a config
509  (leaving taskDict and baseName at their default values).
510 
511  @param[in] config configuration to process, an instance of lsst.pex.config.Config
512  @param[in,out] taskDict users should not specify this argument;
513  (supports recursion; if provided, taskDict is updated in place, else a new dict is started)
514  @param[in] baseName users should not specify this argument.
515  (supports recursion: if a non-empty string then a period is appended and the result is used
516  as a prefix for additional entries in taskDict; otherwise no prefix is used)
517  @return taskDict: a dict of config field name: task name
518  """
519  if taskDict is None:
520  taskDict = dict()
521  for fieldName, field in config.iteritems():
522  if hasattr(field, "value") and hasattr(field, "target"):
523  subConfig = field.value
524  if isinstance(subConfig, pexConfig.Config):
525  subBaseName = "%s.%s" % (baseName, fieldName) if baseName else fieldName
526  try:
527  taskName = "%s.%s" % (field.target.__module__, field.target.__name__)
528  except Exception:
529  taskName = repr(field.target)
530  taskDict[subBaseName] = taskName
531  getTaskDict(config=subConfig, taskDict=taskDict, baseName=subBaseName)
532  return taskDict
533 
534 def obeyShowArgument(showOpts, config=None, exit=False):
535  """!Process arguments specified with --show (but ignores "data")
536 
537  @param showOpts List of options passed to --show
538  @param config The provided config
539  @param exit Exit if "run" isn't included in showOpts
540 
541  Supports the following options in showOpts:
542  - config[=PAT] Dump all the config entries, or just the ones that match the glob pattern
543  - tasks Show task hierarchy
544  - data Ignored; to be processed by caller
545  - run Keep going (the default behaviour is to exit if --show is specified)
546 
547  Calls sys.exit(1) if any other option found.
548  """
549  if not showOpts:
550  return
551 
552  for what in showOpts:
553  mat = re.search(r"^config(?:=(.+))?", what)
554  if mat:
555  pattern = mat.group(1)
556  if pattern:
557  class FilteredStream(object):
558  """A file object that only prints lines that match the glob "pattern"
559 
560  N.b. Newlines are silently discarded and reinserted; crude but effective.
561  """
562  def __init__(self, pattern):
563  self._pattern = pattern
564 
565  def write(self, str):
566  str = str.rstrip()
567  if str and fnmatch.fnmatch(str, self._pattern):
568  print str
569 
570  fd = FilteredStream(pattern)
571  else:
572  fd = sys.stdout
573 
574  config.saveToStream(fd, "config")
575  elif what == "data":
576  pass
577  elif what == "run":
578  pass
579  elif what == "tasks":
580  showTaskHierarchy(config)
581  else:
582  print >> sys.stderr, "Unknown value for show: %s (choose from '%s')" % \
583  (what, "', '".join("config[=XXX] data tasks run".split()))
584  sys.exit(1)
585 
586  if exit and "run" not in showOpts:
587  sys.exit(0)
588 
589 def showTaskHierarchy(config):
590  """!Print task hierarchy to stdout
591 
592  @param[in] config: configuration to process (an lsst.pex.config.Config)
593  """
594  print "Subtasks:"
595  taskDict = getTaskDict(config=config)
596 
597  fieldNameList = sorted(taskDict.keys())
598  for fieldName in fieldNameList:
599  taskName = taskDict[fieldName]
600  print "%s: %s" % (fieldName, taskName)
601 
602 class ConfigValueAction(argparse.Action):
603  """!argparse action callback to override config parameters using name=value pairs from the command line
604  """
605  def __call__(self, parser, namespace, values, option_string):
606  """!Override one or more config name value pairs
607 
608  @param[in] parser argument parser (instance of ArgumentParser)
609  @param[in,out] namespace parsed command (an instance of argparse.Namespace);
610  updated values:
611  - namespace.config
612  @param[in] values a list of configItemName=value pairs
613  @param[in] option_string option value specified by the user (a str)
614  """
615  if namespace.config is None:
616  return
617  for nameValue in values:
618  name, sep, valueStr = nameValue.partition("=")
619  if not valueStr:
620  parser.error("%s value %s must be in form name=value" % (option_string, nameValue))
621 
622  # see if setting the string value works; if not, try eval
623  try:
624  setDottedAttr(namespace.config, name, valueStr)
625  except AttributeError:
626  parser.error("no config field: %s" % (name,))
627  except Exception:
628  try:
629  value = eval(valueStr, {})
630  except Exception:
631  parser.error("cannot parse %r as a value for %s" % (valueStr, name))
632  try:
633  setDottedAttr(namespace.config, name, value)
634  except Exception, e:
635  parser.error("cannot set config.%s=%r: %s" % (name, value, e))
636 
637 class ConfigFileAction(argparse.Action):
638  """!argparse action to load config overrides from one or more files
639  """
640  def __call__(self, parser, namespace, values, option_string=None):
641  """!Load one or more files of config overrides
642 
643  @param[in] parser argument parser (instance of ArgumentParser)
644  @param[in,out] namespace parsed command (an instance of argparse.Namespace);
645  updated values:
646  - namespace.config
647  @param[in] values a list of data config file paths
648  @param[in] option_string option value specified by the user (a str)
649  """
650  if namespace.config is None:
651  return
652  for configfile in values:
653  try:
654  namespace.config.load(configfile)
655  except Exception, e:
656  parser.error("cannot load config file %r: %s" % (configfile, e))
657 
658 
659 class IdValueAction(argparse.Action):
660  """!argparse action callback to process a data ID into a dict
661  """
662  def __call__(self, parser, namespace, values, option_string):
663  """!Parse --id data and append results to namespace.<argument>.idList
664 
665  @param[in] parser argument parser (instance of ArgumentParser)
666  @param[in,out] namespace parsed command (an instance of argparse.Namespace);
667  updated values:
668  - <idName>.idList, where <idName> is the name of the ID argument,
669  for instance "id" for ID argument --id
670  @param[in] values a list of data IDs; see data format below
671  @param[in] option_string option value specified by the user (a str)
672 
673  The data format is:
674  key1=value1_1[^value1_2[^value1_3...] key2=value2_1[^value2_2[^value2_3...]...
675 
676  The values (e.g. value1_1) may either be a string, or of the form "int..int" (e.g. "1..3")
677  which is interpreted as "1^2^3" (inclusive, unlike a python range). So "0^2..4^7..9" is
678  equivalent to "0^2^3^4^7^8^9". You may also specify a stride: "1..5:2" is "1^3^5"
679 
680  The cross product is computed for keys with multiple values. For example:
681  --id visit 1^2 ccd 1,1^2,2
682  results in the following data ID dicts being appended to namespace.<argument>.idList:
683  {"visit":1, "ccd":"1,1"}
684  {"visit":2, "ccd":"1,1"}
685  {"visit":1, "ccd":"2,2"}
686  {"visit":2, "ccd":"2,2"}
687  """
688  if namespace.config is None:
689  return
690  idDict = collections.OrderedDict()
691  for nameValue in values:
692  name, sep, valueStr = nameValue.partition("=")
693  if name in idDict:
694  parser.error("%s appears multiple times in one ID argument: %s" % (name, option_string))
695  idDict[name] = []
696  for v in valueStr.split("^"):
697  mat = re.search(r"^(\d+)\.\.(\d+)(?::(\d+))?$", v)
698  if mat:
699  v1 = int(mat.group(1))
700  v2 = int(mat.group(2))
701  v3 = mat.group(3); v3 = int(v3) if v3 else 1
702  for v in range(v1, v2 + 1, v3):
703  idDict[name].append(str(v))
704  else:
705  idDict[name].append(v)
706 
707  keyList = idDict.keys()
708  iterList = [idDict[key] for key in keyList]
709  idDictList = [collections.OrderedDict(zip(keyList, valList))
710  for valList in itertools.product(*iterList)]
711 
712  argName = option_string.lstrip("-")
713  ident = getattr(namespace, argName)
714  ident.idList += idDictList
715 
716 
717 
718 class LogLevelAction(argparse.Action):
719  """!argparse action to set log level
720  """
721  def __call__(self, parser, namespace, values, option_string):
722  """!Set trace level
723 
724  @param[in] parser argument parser (instance of ArgumentParser)
725  @param[in] namespace parsed command (an instance of argparse.Namespace); ignored
726  @param[in] values a list of trace levels;
727  each item must be of the form 'component_name=level' or 'level',
728  where level is a keyword (not case sensitive) or an integer
729  @param[in] option_string option value specified by the user (a str)
730  """
731  permittedLevelList = ('DEBUG', 'INFO', 'WARN', 'FATAL')
732  permittedLevelSet = set(permittedLevelList)
733  for componentLevel in values:
734  component, sep, levelStr = componentLevel.partition("=")
735  if not levelStr:
736  levelStr, component = component, None
737  logLevelUpr = levelStr.upper()
738  if logLevelUpr in permittedLevelSet:
739  logLevel = getattr(namespace.log, logLevelUpr)
740  else:
741  try:
742  logLevel = int(levelStr)
743  except Exception:
744  parser.error("loglevel=%r not int or one of %s" % (namespace.loglevel, permittedLevelList))
745  if component is None:
746  namespace.log.setThreshold(logLevel)
747  else:
748  namespace.log.setThresholdFor(component, logLevel)
749 
750 
751 def setDottedAttr(item, name, value):
752  """!Like setattr, but accepts hierarchical names, e.g. foo.bar.baz
753 
754  @param[in,out] item object whose attribute is to be set
755  @param[in] name name of item to set
756  @param[in] value new value for the item
757 
758  For example if name is foo.bar.baz then item.foo.bar.baz is set to the specified value.
759  """
760  subitem = item
761  subnameList = name.split(".")
762  for subname in subnameList[:-1]:
763  subitem = getattr(subitem, subname)
764  setattr(subitem, subnameList[-1], value)
765 
766 def getDottedAttr(item, name):
767  """!Like getattr, but accepts hierarchical names, e.g. foo.bar.baz
768 
769  @param[in] item object whose attribute is to be returned
770  @param[in] name name of item to get
771 
772  For example if name is foo.bar.baz then returns item.foo.bar.baz
773  """
774  subitem = item
775  for subname in name.split("."):
776  subitem = getattr(subitem, subname)
777  return subitem
778 
779 def dataExists(butler, datasetType, dataRef):
780  """!Return True if data exists at the current level or any data exists at a deeper level, False otherwise
781 
782  @param[in] butler data butler (a \ref lsst.daf.persistence.butler.Butler
783  "lsst.daf.persistence.Butler")
784  @param[in] datasetType dataset type (a str)
785  @param[in] dataRef butler data reference (a \ref lsst.daf.persistence.butlerSubset.ButlerDataRef
786  "lsst.daf.persistence.ButlerDataRef")
787  """
788  subDRList = dataRef.subItems()
789  if subDRList:
790  for subDR in subDRList:
791  if dataExists(butler, datasetType, subDR):
792  return True
793  return False
794  else:
795  return butler.datasetExists(datasetType = datasetType, dataId = dataRef.dataId)
def setDatasetType
Set actual dataset type, once it is known.
def castDataIds
Validate data IDs and cast them to the correct type (modify idList in place).
argparse action to set log level
An argument parser for pipeline tasks that is based on argparse.ArgumentParser.
def dataExists
Return True if data exists at the current level or any data exists at a deeper level, False otherwise.
Specify that the dataset type should be a command-line option.
def showTaskHierarchy
Print task hierarchy to stdout.
Glorified struct for data about id arguments, used by ArgumentParser.add_id_argument.
def setDottedAttr
Like setattr, but accepts hierarchical names, e.g.
def __call__
Parse –id data and append results to namespace.
argparse action callback to override config parameters using name=value pairs from the command line ...
def getTaskDict
Get a dictionary of task info for all subtasks in a config.
def add_id_argument
Add a data ID argument.
std::string getPackageDir(std::string const &packageName)
return the root directory of a setup package
Definition: Utils.cc:66
def _applyInitialOverrides
Apply obs-package-specific and camera-specific config override files, if found.
def makeDataRefList
Compute refList based on idList.
def convert_arg_line_to_args
Allow files of arguments referenced by @&lt;path&gt; to contain multiple values on each line...
def getDottedAttr
Like getattr, but accepts hierarchical names, e.g.
def __call__
Load one or more files of config overrides.
def isDynamicDatasetType
Is the dataset type dynamic (specified on the command line)?
def __call__
Override one or more config name value pairs.
argparse action callback to process a data ID into a dict
def handleCamera
Perform camera-specific operations before parsing the command line.
def parse_args
Parse arguments for a pipeline task.
def _processDataIds
Process the parsed data for each data ID argument.
def __init__
Construct an ArgumentParser.
def obeyShowArgument
Process arguments specified with –show (but ignores &quot;data&quot;)
def _fixPath
Apply environment variable as default root, if present, and abspath.
argparse action to load config overrides from one or more files
def __init__
Construct a DataIdContainer.
A container for data IDs and associated data references.