1 from __future__
import absolute_import, division
38 __all__ = [
"ArgumentParser",
"ConfigFileAction",
"ConfigValueAction",
"DataIdContainer",
"DatasetArgument"]
40 DEFAULT_INPUT_NAME =
"PIPE_INPUT_ROOT"
41 DEFAULT_CALIB_NAME =
"PIPE_CALIB_ROOT"
42 DEFAULT_OUTPUT_NAME =
"PIPE_OUTPUT_ROOT"
45 """!Apply environment variable as default root, if present, and abspath
47 @param[in] defName name of environment variable containing default root path;
48 if the environment variable does not exist then the path is relative
49 to the current working directory
50 @param[in] path path relative to default root path
51 @return abspath: path that has been expanded, or None if the environment variable does not exist
54 defRoot = os.environ.get(defName)
58 return os.path.abspath(path)
59 return os.path.abspath(os.path.join(defRoot, path
or ""))
63 """!A container for data IDs and associated data references
65 Override for data IDs that require special handling to be converted to data references,
66 and specify the override class as ContainerClass for add_id_argument.
67 (If you don't want the argument parser to compute data references, you may use this class
68 and specify doMakeDataRefList=False in add_id_argument.)
71 """!Construct a DataIdContainer"""
78 """!Set actual dataset type, once it is known"""
82 """!Validate data IDs and cast them to the correct type (modify idList in place).
84 @param[in] butler data butler (a \ref lsst.daf.persistence.butler.Butler
85 "lsst.daf.persistence.Butler")
88 raise RuntimeError(
"Must call setDatasetType first")
90 idKeyTypeDict = butler.getKeys(datasetType=self.
datasetType, level=self.
level)
92 raise KeyError(
"Cannot get keys for datasetType %s at level %s" % (self.
datasetType, self.
level))
94 for dataDict
in self.
idList:
95 for key, strVal
in dataDict.iteritems():
97 keyType = idKeyTypeDict[key]
99 validKeys = sorted(idKeyTypeDict.keys())
100 raise KeyError(
"Unrecognized ID key %r; valid keys are: %s" % (key, validKeys))
103 castVal = keyType(strVal)
105 raise TypeError(
"Cannot cast value %r to %s for ID key %r" % (strVal, keyType, key,))
106 dataDict[key] = castVal
109 """!Compute refList based on idList
111 Not called if add_id_argument called with doMakeDataRef=False
113 @param[in] namespace results of parsing command-line (with 'butler' and 'log' elements)
116 raise RuntimeError(
"Must call setDatasetType first")
117 butler = namespace.butler
118 for dataId
in self.
idList:
119 refList = list(butler.subset(datasetType=self.
datasetType, level=self.
level, dataId=dataId))
125 namespace.log.warn(
"No data found for dataId=%s" % (dataId,))
131 """!Glorified struct for data about id arguments, used by ArgumentParser.add_id_argument"""
132 def __init__(self, name, datasetType, level, doMakeDataRefList=True, ContainerClass=DataIdContainer):
135 @param[in] name name of identifier (argument name without dashes)
136 @param[in] datasetType type of dataset; specify a string for a fixed dataset type
137 or a DatasetArgument for a dynamic dataset type (one specified on the command line),
138 in which case an argument is added by name --<name>_dstype
139 @param[in] level level of dataset, for butler
140 @param[in] doMakeDataRefList construct data references?
141 @param[in] ContainerClass class to contain data IDs and data references;
142 the default class will work for many kinds of data, but you may have to override
143 to compute some kinds of data references.
145 if name.startswith(
"-"):
146 raise RuntimeError(
"Name %s must not start with -" % (name,))
159 """!Is the dataset type dynamic (specified on the command line)?"""
160 return isinstance(self.
datasetType, DatasetArgument)
163 """!Get the dataset type
165 @param[in] namespace parsed command created by argparse parse_args;
166 if the dataset type is dynamic then it is read from namespace.<name>_dstype
167 else namespace is ignored
172 """!Specify that the dataset type should be a command-line option.
174 Somewhat more heavyweight than just using, e.g., None as a signal, but
175 provides the ability to have more informative help and a default. Also
176 more extensible in the future.
178 @param[in] name name of command-line argument (including leading "--", if wanted);
179 if omitted a suitable default is chosen
180 @param[in] help help string for the command-line option
181 @param[in] default default value; if None, then the option is required
185 help=
"dataset type to process from input data repository",
197 """!An argument parser for pipeline tasks that is based on argparse.ArgumentParser
199 Users may wish to add additional arguments before calling parse_args.
202 - I would prefer to check data ID keys and values as they are parsed,
203 but the required information comes from the butler, so I have to construct a butler
204 before I do this checking. Constructing a butler is slow, so I only want do it once,
205 after parsing the command line, so as to catch syntax errors quickly.
207 def __init__(self, name, usage = "%(prog)s input [options]
", **kwargs):
208 """!Construct an ArgumentParser
210 @param[in] name name of top-level task; used to identify camera-specific override files
211 @param[in] usage usage string
212 @param[in] **kwargs additional keyword arguments for argparse.ArgumentParser
216 argparse.ArgumentParser.__init__(self,
218 fromfile_prefix_chars =
'@',
220 * --config, --configfile, --id, --loglevel and @file may appear multiple times;
221 all values are used, in order left to right
222 * @file reads command-line options from the specified file:
223 * data may be distributed among multiple lines (e.g. one option per line)
224 * data after # is treated as a comment and ignored
225 * blank lines and lines starting with # are ignored
226 * To specify multiple values for an option, do not use = after the option name:
227 * right: --configfile foo bar
228 * wrong: --configfile=foo bar
230 formatter_class = argparse.RawDescriptionHelpFormatter,
232 self.add_argument(
"input",
233 help=
"path to input data repository, relative to $%s" % (DEFAULT_INPUT_NAME,))
234 self.add_argument(
"--calib",
235 help=
"path to input calibration repository, relative to $%s" % (DEFAULT_CALIB_NAME,))
236 self.add_argument(
"--output",
237 help=
"path to output data repository (need not exist), relative to $%s" % (DEFAULT_OUTPUT_NAME,))
238 self.add_argument(
"-c",
"--config", nargs=
"*", action=ConfigValueAction,
239 help=
"config override(s), e.g. -c foo=newfoo bar.baz=3", metavar=
"NAME=VALUE")
240 self.add_argument(
"-C",
"--configfile", dest=
"configfile", nargs=
"*", action=ConfigFileAction,
241 help=
"config override file(s)")
242 self.add_argument(
"-L",
"--loglevel", nargs=
"*", action=LogLevelAction,
243 help=
"logging level; supported levels are [debug|warn|info|fatal] or an integer; "
244 "trace level is negative log level, e.g. use level -3 for trace level 3",
245 metavar=
"LEVEL|COMPONENT=LEVEL")
246 self.add_argument(
"--debug", action=
"store_true", help=
"enable debugging output?")
247 self.add_argument(
"--doraise", action=
"store_true",
248 help=
"raise an exception on error (else log a message and continue)?")
249 self.add_argument(
"--profile", help=
"Dump cProfile statistics to filename")
250 self.add_argument(
"--logdest", help=
"logging destination")
251 self.add_argument(
"--show", nargs=
"+", default=(),
252 help=
"display the specified information to stdout and quit (unless run is specified).")
253 self.add_argument(
"-j",
"--processes", type=int, default=1, help=
"Number of processes to use")
254 self.add_argument(
"-t",
"--timeout", type=float,
255 help=
"Timeout for multiprocessing; maximum wall time (sec)")
256 self.add_argument(
"--clobber-output", action=
"store_true", dest=
"clobberOutput", default=
False,
257 help=(
"remove and re-create the output directory if it already exists "
258 "(safe with -j, but not all other forms of parallel execution)"))
259 self.add_argument(
"--clobber-config", action=
"store_true", dest=
"clobberConfig", default=
False,
260 help=(
"backup and then overwrite existing config files instead of checking them "
261 "(safe with -j, but not all other forms of parallel execution)"))
263 def add_id_argument(self, name, datasetType, help, level=None, doMakeDataRefList=True,
264 ContainerClass=DataIdContainer):
265 """!Add a data ID argument
267 Add an argument to specify data IDs. If datasetType is an instance of DatasetArgument,
268 then add a second argument to specify the dataset type.
270 @param[in] name name of name (including leading dashes, if wanted)
271 @param[in] datasetType type of dataset; supply a string for a fixed dataset type,
272 or a DatasetArgument for a dynamically determined dataset type
273 @param[in] help help string for the argument
274 @param[in] level level of dataset, for butler
275 @param[in] doMakeDataRefList construct data references?
276 @param[in] ContainerClass data ID container class to use to contain results;
277 override the default if you need a special means of computing data references from data IDs
279 The associated data is put into namespace.<dataIdArgument.name> as an instance of ContainerClass;
280 the container includes fields:
281 - idList: a list of data ID dicts
282 - refList: a list of butler data references (empty if doMakeDataRefList false)
284 argName = name.lstrip(
"-")
287 raise RuntimeError(
"Data ID argument %s already exists" % (name,))
288 if argName
in set((
"camera",
"config",
"butler",
"log",
"obsPkg")):
289 raise RuntimeError(
"Data ID argument %s is a reserved name" % (name,))
291 self.add_argument(name, nargs=
"*", action=IdValueAction, help=help,
292 metavar=
"KEY=VALUE1[^VALUE2[^VALUE3...]")
296 datasetType = datasetType,
298 doMakeDataRefList = doMakeDataRefList,
299 ContainerClass = ContainerClass,
302 if dataIdArgument.isDynamicDatasetType():
303 datasetType = dataIdArgument.datasetType
304 help = datasetType.help
if datasetType.help
else "dataset type for %s" % (name,)
306 "--" + dataIdArgument.datasetTypeName,
307 default = datasetType.default,
308 required = datasetType.required,
313 def parse_args(self, config, args=None, log=None, override=None):
314 """!Parse arguments for a pipeline task
316 @param[in,out] config config for the task being run
317 @param[in] args argument list; if None use sys.argv[1:]
318 @param[in] log log (instance pex_logging Log); if None use the default log
319 @param[in] override a config override function; it must take the root config object
320 as its only argument and must modify the config in place.
321 This function is called after camera-specific overrides files are applied, and before
322 command-line config overrides are applied (thus allowing the user the final word).
324 @return namespace: an argparse.Namespace containing many useful fields including:
325 - camera: camera name
326 - config: the supplied config with all overrides applied, validated and frozen
327 - butler: a butler for the data
328 - an entry for each of the data ID arguments registered by add_id_argument(),
329 the value of which is a DataIdArgument that includes public elements 'idList' and 'refList'
330 - log: a pex_logging log
331 - an entry for each command-line argument, with the following exceptions:
332 - config is the supplied config, suitably updated
333 - configfile, id and loglevel are all missing
334 - obsPkg: name of obs_ package for this camera
339 if len(args) < 1
or args[0].startswith(
"-")
or args[0].startswith(
"@"):
341 if len(args) == 1
and args[0]
in (
"-h",
"--help"):
344 self.exit(
"%s: error: Must specify input as first argument" % self.prog)
347 inputRoot =
_fixPath(DEFAULT_INPUT_NAME, args[0])
348 if not os.path.isdir(inputRoot):
349 self.error(
"Error: input=%r not found" % (inputRoot,))
351 namespace = argparse.Namespace()
352 namespace.config = config
353 namespace.log = log
if log
is not None else pexLog.Log.getDefaultLog()
354 mapperClass = dafPersist.Butler.getMapperClass(inputRoot)
355 namespace.camera = mapperClass.getCameraName()
356 namespace.obsPkg = mapperClass.getPackageName()
361 if override
is not None:
362 override(namespace.config)
365 for dataIdArgument
in self._dataIdArgDict.itervalues():
366 setattr(namespace, dataIdArgument.name, dataIdArgument.ContainerClass(level=dataIdArgument.level))
368 namespace = argparse.ArgumentParser.parse_args(self, args=args, namespace=namespace)
369 namespace.input = inputRoot
370 del namespace.configfile
372 namespace.calib =
_fixPath(DEFAULT_CALIB_NAME, namespace.calib)
373 namespace.output =
_fixPath(DEFAULT_OUTPUT_NAME, namespace.output)
375 if namespace.clobberOutput:
376 if namespace.output
is None:
377 self.error(
"--clobber-output is only valid with --output")
378 elif namespace.output == namespace.input:
379 self.error(
"--clobber-output is not valid when the output and input repos are the same")
380 if os.path.exists(namespace.output):
381 namespace.log.info(
"Removing output repo %s for --clobber-output" % namespace.output)
382 shutil.rmtree(namespace.output)
384 namespace.log.info(
"input=%s" % (namespace.input,))
385 namespace.log.info(
"calib=%s" % (namespace.calib,))
386 namespace.log.info(
"output=%s" % (namespace.output,))
391 root = namespace.input,
392 calibRoot = namespace.calib,
393 outputRoot = namespace.output,
400 if "data" in namespace.show:
401 for dataIdName
in self._dataIdArgDict.iterkeys():
402 for dataRef
in getattr(namespace, dataIdName).refList:
403 print dataIdName +
" dataRef.dataId =", dataRef.dataId
405 if namespace.show
and "run" not in namespace.show:
413 sys.stderr.write(
"Warning: no 'debug' module found\n")
414 namespace.debug =
False
416 if namespace.logdest:
417 namespace.log.addDestination(namespace.logdest)
418 del namespace.logdest
419 del namespace.loglevel
421 namespace.config.validate()
422 namespace.config.freeze()
427 """!Process the parsed data for each data ID argument
430 - Validate data ID keys
431 - Cast the data ID values to the correct type
432 - Compute data references from data IDs
434 @param[in,out] namespace parsed namespace (an argparse.Namespace);
435 reads these attributes:
438 - <name_dstype> for each data ID argument with a dynamic dataset type registered using
440 and modifies these attributes:
441 - <name> for each data ID argument registered using add_id_argument
443 for dataIdArgument
in self._dataIdArgDict.itervalues():
444 dataIdContainer = getattr(namespace, dataIdArgument.name)
445 dataIdContainer.setDatasetType(dataIdArgument.getDatasetType(namespace))
447 dataIdContainer.castDataIds(butler = namespace.butler)
448 except (KeyError, TypeError)
as e:
452 if dataIdArgument.doMakeDataRefList:
453 dataIdContainer.makeDataRefList(namespace)
456 """!Apply obs-package-specific and camera-specific config override files, if found
458 @param[in] namespace parsed namespace (an argparse.Namespace);
459 reads these attributes:
462 Look in the package namespace.obsPkg for files:
463 - config/<task_name>.py
464 - config/<camera_name>/<task_name>.py
468 fileName = self.
_name +
".py"
470 os.path.join(obsPkgDir,
"config", fileName),
471 os.path.join(obsPkgDir,
"config", namespace.camera, fileName),
473 if os.path.exists(filePath):
474 namespace.log.info(
"Loading config overrride file %r" % (filePath,))
475 namespace.config.load(filePath)
477 namespace.log.info(
"Config override file does not exist: %r" % (filePath,))
480 """!Perform camera-specific operations before parsing the command line.
482 The default implementation does nothing.
484 @param[in,out] namespace namespace (an argparse.Namespace) with the following fields:
485 - camera: the camera name
486 - config: the config passed to parse_args, with no overrides applied
487 - obsPkg: the obs_ package for this camera
488 - log: a pex_logging log
493 """!Allow files of arguments referenced by `@<path>` to contain multiple values on each line
495 @param[in] arg_line line of text read from an argument file
497 arg_line = arg_line.strip()
498 if not arg_line
or arg_line.startswith(
"#"):
500 for arg
in shlex.split(arg_line, comments=
True, posix=
True):
506 """!Get a dictionary of task info for all subtasks in a config
508 Designed to be called recursively; the user should call with only a config
509 (leaving taskDict and baseName at their default values).
511 @param[in] config configuration to process, an instance of lsst.pex.config.Config
512 @param[in,out] taskDict users should not specify this argument;
513 (supports recursion; if provided, taskDict is updated in place, else a new dict is started)
514 @param[in] baseName users should not specify this argument.
515 (supports recursion: if a non-empty string then a period is appended and the result is used
516 as a prefix for additional entries in taskDict; otherwise no prefix is used)
517 @return taskDict: a dict of config field name: task name
521 for fieldName, field
in config.iteritems():
522 if hasattr(field,
"value")
and hasattr(field,
"target"):
523 subConfig = field.value
524 if isinstance(subConfig, pexConfig.Config):
525 subBaseName =
"%s.%s" % (baseName, fieldName)
if baseName
else fieldName
527 taskName =
"%s.%s" % (field.target.__module__, field.target.__name__)
529 taskName = repr(field.target)
530 taskDict[subBaseName] = taskName
531 getTaskDict(config=subConfig, taskDict=taskDict, baseName=subBaseName)
535 """!Process arguments specified with --show (but ignores "data")
537 @param showOpts List of options passed to --show
538 @param config The provided config
539 @param exit Exit if "run" isn't included in showOpts
541 Supports the following options in showOpts:
542 - config[=PAT] Dump all the config entries, or just the ones that match the glob pattern
543 - tasks Show task hierarchy
544 - data Ignored; to be processed by caller
545 - run Keep going (the default behaviour is to exit if --show is specified)
547 Calls sys.exit(1) if any other option found.
552 for what
in showOpts:
553 mat = re.search(
r"^config(?:=(.+))?", what)
555 pattern = mat.group(1)
557 class FilteredStream(object):
558 """A file object that only prints lines that match the glob "pattern"
560 N.b. Newlines are silently discarded and reinserted; crude but effective.
562 def __init__(self, pattern):
563 self._pattern = pattern
565 def write(self, str):
567 if str
and fnmatch.fnmatch(str, self._pattern):
570 fd = FilteredStream(pattern)
574 config.saveToStream(fd,
"config")
579 elif what ==
"tasks":
582 print >> sys.stderr,
"Unknown value for show: %s (choose from '%s')" % \
583 (what,
"', '".join(
"config[=XXX] data tasks run".split()))
586 if exit
and "run" not in showOpts:
590 """!Print task hierarchy to stdout
592 @param[in] config: configuration to process (an lsst.pex.config.Config)
597 fieldNameList = sorted(taskDict.keys())
598 for fieldName
in fieldNameList:
599 taskName = taskDict[fieldName]
600 print "%s: %s" % (fieldName, taskName)
603 """!argparse action callback to override config parameters using name=value pairs from the command line
605 def __call__(self, parser, namespace, values, option_string):
606 """!Override one or more config name value pairs
608 @param[in] parser argument parser (instance of ArgumentParser)
609 @param[in,out] namespace parsed command (an instance of argparse.Namespace);
612 @param[in] values a list of configItemName=value pairs
613 @param[in] option_string option value specified by the user (a str)
615 if namespace.config
is None:
617 for nameValue
in values:
618 name, sep, valueStr = nameValue.partition(
"=")
620 parser.error(
"%s value %s must be in form name=value" % (option_string, nameValue))
625 except AttributeError:
626 parser.error(
"no config field: %s" % (name,))
629 value = eval(valueStr, {})
631 parser.error(
"cannot parse %r as a value for %s" % (valueStr, name))
635 parser.error(
"cannot set config.%s=%r: %s" % (name, value, e))
638 """!argparse action to load config overrides from one or more files
640 def __call__(self, parser, namespace, values, option_string=None):
641 """!Load one or more files of config overrides
643 @param[in] parser argument parser (instance of ArgumentParser)
644 @param[in,out] namespace parsed command (an instance of argparse.Namespace);
647 @param[in] values a list of data config file paths
648 @param[in] option_string option value specified by the user (a str)
650 if namespace.config
is None:
652 for configfile
in values:
654 namespace.config.load(configfile)
656 parser.error(
"cannot load config file %r: %s" % (configfile, e))
660 """!argparse action callback to process a data ID into a dict
662 def __call__(self, parser, namespace, values, option_string):
663 """!Parse --id data and append results to namespace.<argument>.idList
665 @param[in] parser argument parser (instance of ArgumentParser)
666 @param[in,out] namespace parsed command (an instance of argparse.Namespace);
668 - <idName>.idList, where <idName> is the name of the ID argument,
669 for instance "id" for ID argument --id
670 @param[in] values a list of data IDs; see data format below
671 @param[in] option_string option value specified by the user (a str)
674 key1=value1_1[^value1_2[^value1_3...] key2=value2_1[^value2_2[^value2_3...]...
676 The values (e.g. value1_1) may either be a string, or of the form "int..int" (e.g. "1..3")
677 which is interpreted as "1^2^3" (inclusive, unlike a python range). So "0^2..4^7..9" is
678 equivalent to "0^2^3^4^7^8^9". You may also specify a stride: "1..5:2" is "1^3^5"
680 The cross product is computed for keys with multiple values. For example:
681 --id visit 1^2 ccd 1,1^2,2
682 results in the following data ID dicts being appended to namespace.<argument>.idList:
683 {"visit":1, "ccd":"1,1"}
684 {"visit":2, "ccd":"1,1"}
685 {"visit":1, "ccd":"2,2"}
686 {"visit":2, "ccd":"2,2"}
688 if namespace.config
is None:
690 idDict = collections.OrderedDict()
691 for nameValue
in values:
692 name, sep, valueStr = nameValue.partition(
"=")
694 parser.error(
"%s appears multiple times in one ID argument: %s" % (name, option_string))
696 for v
in valueStr.split(
"^"):
697 mat = re.search(
r"^(\d+)\.\.(\d+)(?::(\d+))?$", v)
699 v1 = int(mat.group(1))
700 v2 = int(mat.group(2))
701 v3 = mat.group(3); v3 = int(v3)
if v3
else 1
702 for v
in range(v1, v2 + 1, v3):
703 idDict[name].append(str(v))
705 idDict[name].append(v)
707 keyList = idDict.keys()
708 iterList = [idDict[key]
for key
in keyList]
709 idDictList = [collections.OrderedDict(zip(keyList, valList))
710 for valList
in itertools.product(*iterList)]
712 argName = option_string.lstrip(
"-")
713 ident = getattr(namespace, argName)
714 ident.idList += idDictList
719 """!argparse action to set log level
721 def __call__(self, parser, namespace, values, option_string):
724 @param[in] parser argument parser (instance of ArgumentParser)
725 @param[in] namespace parsed command (an instance of argparse.Namespace); ignored
726 @param[in] values a list of trace levels;
727 each item must be of the form 'component_name=level' or 'level',
728 where level is a keyword (not case sensitive) or an integer
729 @param[in] option_string option value specified by the user (a str)
731 permittedLevelList = (
'DEBUG',
'INFO',
'WARN',
'FATAL')
732 permittedLevelSet = set(permittedLevelList)
733 for componentLevel
in values:
734 component, sep, levelStr = componentLevel.partition(
"=")
736 levelStr, component = component,
None
737 logLevelUpr = levelStr.upper()
738 if logLevelUpr
in permittedLevelSet:
739 logLevel = getattr(namespace.log, logLevelUpr)
742 logLevel = int(levelStr)
744 parser.error(
"loglevel=%r not int or one of %s" % (namespace.loglevel, permittedLevelList))
745 if component
is None:
746 namespace.log.setThreshold(logLevel)
748 namespace.log.setThresholdFor(component, logLevel)
752 """!Like setattr, but accepts hierarchical names, e.g. foo.bar.baz
754 @param[in,out] item object whose attribute is to be set
755 @param[in] name name of item to set
756 @param[in] value new value for the item
758 For example if name is foo.bar.baz then item.foo.bar.baz is set to the specified value.
761 subnameList = name.split(
".")
762 for subname
in subnameList[:-1]:
763 subitem = getattr(subitem, subname)
764 setattr(subitem, subnameList[-1], value)
767 """!Like getattr, but accepts hierarchical names, e.g. foo.bar.baz
769 @param[in] item object whose attribute is to be returned
770 @param[in] name name of item to get
772 For example if name is foo.bar.baz then returns item.foo.bar.baz
775 for subname
in name.split(
"."):
776 subitem = getattr(subitem, subname)
780 """!Return True if data exists at the current level or any data exists at a deeper level, False otherwise
782 @param[in] butler data butler (a \ref lsst.daf.persistence.butler.Butler
783 "lsst.daf.persistence.Butler")
784 @param[in] datasetType dataset type (a str)
785 @param[in] dataRef butler data reference (a \ref lsst.daf.persistence.butlerSubset.ButlerDataRef
786 "lsst.daf.persistence.ButlerDataRef")
788 subDRList = dataRef.subItems()
790 for subDR
in subDRList:
795 return butler.datasetExists(datasetType = datasetType, dataId = dataRef.dataId)
def setDatasetType
Set actual dataset type, once it is known.
def castDataIds
Validate data IDs and cast them to the correct type (modify idList in place).
argparse action to set log level
An argument parser for pipeline tasks that is based on argparse.ArgumentParser.
def dataExists
Return True if data exists at the current level or any data exists at a deeper level, False otherwise.
Specify that the dataset type should be a command-line option.
def showTaskHierarchy
Print task hierarchy to stdout.
Glorified struct for data about id arguments, used by ArgumentParser.add_id_argument.
def setDottedAttr
Like setattr, but accepts hierarchical names, e.g.
def __call__
Parse –id data and append results to namespace.
argparse action callback to override config parameters using name=value pairs from the command line ...
def getTaskDict
Get a dictionary of task info for all subtasks in a config.
def add_id_argument
Add a data ID argument.
std::string getPackageDir(std::string const &packageName)
return the root directory of a setup package
def _applyInitialOverrides
Apply obs-package-specific and camera-specific config override files, if found.
def makeDataRefList
Compute refList based on idList.
def convert_arg_line_to_args
Allow files of arguments referenced by @<path> to contain multiple values on each line...
def getDottedAttr
Like getattr, but accepts hierarchical names, e.g.
def __call__
Load one or more files of config overrides.
def isDynamicDatasetType
Is the dataset type dynamic (specified on the command line)?
def __call__
Set trace level.
def __call__
Override one or more config name value pairs.
argparse action callback to process a data ID into a dict
def handleCamera
Perform camera-specific operations before parsing the command line.
def parse_args
Parse arguments for a pipeline task.
def _processDataIds
Process the parsed data for each data ID argument.
def __init__
Construct an ArgumentParser.
def obeyShowArgument
Process arguments specified with –show (but ignores "data")
def _fixPath
Apply environment variable as default root, if present, and abspath.
argparse action to load config overrides from one or more files
def getDatasetType
Get the dataset type.
def __init__
Construct a DataIdContainer.
A container for data IDs and associated data references.