1 from __future__
import absolute_import, division
38 __all__ = [
"ArgumentParser",
"ConfigFileAction",
"ConfigValueAction",
"DataIdContainer",
"DatasetArgument"]
40 DEFAULT_INPUT_NAME =
"PIPE_INPUT_ROOT"
41 DEFAULT_CALIB_NAME =
"PIPE_CALIB_ROOT"
42 DEFAULT_OUTPUT_NAME =
"PIPE_OUTPUT_ROOT"
45 """!Apply environment variable as default root, if present, and abspath
47 @param[in] defName name of environment variable containing default root path;
48 if the environment variable does not exist then the path is relative
49 to the current working directory
50 @param[in] path path relative to default root path
51 @return abspath: path that has been expanded, or None if the environment variable does not exist
54 defRoot = os.environ.get(defName)
58 return os.path.abspath(path)
59 return os.path.abspath(os.path.join(defRoot, path
or ""))
63 """!A container for data IDs and associated data references
65 Override for data IDs that require special handling to be converted to data references,
66 and specify the override class as ContainerClass for add_id_argument.
67 (If you don't want the argument parser to compute data references, you may use this class
68 and specify doMakeDataRefList=False in add_id_argument.)
71 """!Construct a DataIdContainer"""
78 """!Set actual dataset type, once it is known"""
82 """!Validate data IDs and cast them to the correct type (modify idList in place).
84 @param[in] butler data butler (a \ref lsst.daf.persistence.butler.Butler
85 "lsst.daf.persistence.Butler")
88 raise RuntimeError(
"Must call setDatasetType first")
90 idKeyTypeDict = butler.getKeys(datasetType=self.
datasetType, level=self.
level)
92 raise KeyError(
"Cannot get keys for datasetType %s at level %s" % (self.
datasetType, self.
level))
94 for dataDict
in self.
idList:
95 for key, strVal
in dataDict.iteritems():
97 keyType = idKeyTypeDict[key]
99 validKeys = sorted(idKeyTypeDict.keys())
100 raise KeyError(
"Unrecognized ID key %r; valid keys are: %s" % (key, validKeys))
103 castVal = keyType(strVal)
105 raise TypeError(
"Cannot cast value %r to %s for ID key %r" % (strVal, keyType, key,))
106 dataDict[key] = castVal
109 """!Compute refList based on idList
111 Not called if add_id_argument called with doMakeDataRef=False
113 @param[in] namespace results of parsing command-line (with 'butler' and 'log' elements)
116 raise RuntimeError(
"Must call setDatasetType first")
117 butler = namespace.butler
118 for dataId
in self.
idList:
119 refList = list(butler.subset(datasetType=self.
datasetType, level=self.
level, dataId=dataId))
125 namespace.log.warn(
"No data found for dataId=%s" % (dataId,))
131 """!Glorified struct for data about id arguments, used by ArgumentParser.add_id_argument"""
132 def __init__(self, name, datasetType, level, doMakeDataRefList=True, ContainerClass=DataIdContainer):
135 @param[in] name name of identifier (argument name without dashes)
136 @param[in] datasetType type of dataset; specify a string for a fixed dataset type
137 or a DatasetArgument for a dynamic dataset type (one specified on the command line),
138 in which case an argument is added by name --<name>_dstype
139 @param[in] level level of dataset, for butler
140 @param[in] doMakeDataRefList construct data references?
141 @param[in] ContainerClass class to contain data IDs and data references;
142 the default class will work for many kinds of data, but you may have to override
143 to compute some kinds of data references.
145 if name.startswith(
"-"):
146 raise RuntimeError(
"Name %s must not start with -" % (name,))
159 """!Is the dataset type dynamic (specified on the command line)?"""
160 return isinstance(self.
datasetType, DatasetArgument)
163 """!Get the dataset type
165 @param[in] namespace parsed command created by argparse parse_args;
166 if the dataset type is dynamic then it is read from namespace.<name>_dstype
167 else namespace is ignored
172 """!Specify that the dataset type should be a command-line option.
174 Somewhat more heavyweight than just using, e.g., None as a signal, but
175 provides the ability to have more informative help and a default. Also
176 more extensible in the future.
178 @param[in] name name of command-line argument (including leading "--", if wanted);
179 if omitted a suitable default is chosen
180 @param[in] help help string for the command-line option
181 @param[in] default default value; if None, then the option is required
185 help=
"dataset type to process from input data repository",
197 """!An argument parser for pipeline tasks that is based on argparse.ArgumentParser
199 Users may wish to add additional arguments before calling parse_args.
202 - I would prefer to check data ID keys and values as they are parsed,
203 but the required information comes from the butler, so I have to construct a butler
204 before I do this checking. Constructing a butler is slow, so I only want do it once,
205 after parsing the command line, so as to catch syntax errors quickly.
207 def __init__(self, name, usage = "%(prog)s input [options]
", **kwargs):
208 """!Construct an ArgumentParser
210 @param[in] name name of top-level task; used to identify camera-specific override files
211 @param[in] usage usage string
212 @param[in] **kwargs additional keyword arguments for argparse.ArgumentParser
216 argparse.ArgumentParser.__init__(self,
218 fromfile_prefix_chars =
'@',
220 * --config, --configfile, --id, --trace and @file may appear multiple times;
221 all values are used, in order left to right
222 * @file reads command-line options from the specified file:
223 * data may be distributed among multiple lines (e.g. one option per line)
224 * data after # is treated as a comment and ignored
225 * blank lines and lines starting with # are ignored
226 * To specify multiple values for an option, do not use = after the option name:
227 * right: --configfile foo bar
228 * wrong: --configfile=foo bar
230 formatter_class = argparse.RawDescriptionHelpFormatter,
232 self.add_argument(
"input",
233 help=
"path to input data repository, relative to $%s" % (DEFAULT_INPUT_NAME,))
234 self.add_argument(
"--calib",
235 help=
"path to input calibration repository, relative to $%s" % (DEFAULT_CALIB_NAME,))
236 self.add_argument(
"--output",
237 help=
"path to output data repository (need not exist), relative to $%s" % (DEFAULT_OUTPUT_NAME,))
238 self.add_argument(
"-c",
"--config", nargs=
"*", action=ConfigValueAction,
239 help=
"config override(s), e.g. -c foo=newfoo bar.baz=3", metavar=
"NAME=VALUE")
240 self.add_argument(
"-C",
"--configfile", dest=
"configfile", nargs=
"*", action=ConfigFileAction,
241 help=
"config override file(s)")
242 self.add_argument(
"-L",
"--loglevel", help=
"logging level")
243 self.add_argument(
"-T",
"--trace", nargs=
"*", action=TraceLevelAction,
244 help=
"trace level for component", metavar=
"COMPONENT=LEVEL")
245 self.add_argument(
"--debug", action=
"store_true", help=
"enable debugging output?")
246 self.add_argument(
"--doraise", action=
"store_true",
247 help=
"raise an exception on error (else log a message and continue)?")
248 self.add_argument(
"--profile", help=
"Dump cProfile statistics to filename")
249 self.add_argument(
"--logdest", help=
"logging destination")
250 self.add_argument(
"--show", nargs=
"+", default=(),
251 help=
"display the specified information to stdout and quit (unless run is specified).")
252 self.add_argument(
"-j",
"--processes", type=int, default=1, help=
"Number of processes to use")
253 self.add_argument(
"-t",
"--timeout", type=float,
254 help=
"Timeout for multiprocessing; maximum wall time (sec)")
255 self.add_argument(
"--clobber-output", action=
"store_true", dest=
"clobberOutput", default=
False,
256 help=(
"remove and re-create the output directory if it already exists "
257 "(safe with -j, but not all other forms of parallel execution)"))
258 self.add_argument(
"--clobber-config", action=
"store_true", dest=
"clobberConfig", default=
False,
259 help=(
"backup and then overwrite existing config files instead of checking them "
260 "(safe with -j, but not all other forms of parallel execution)"))
262 def add_id_argument(self, name, datasetType, help, level=None, doMakeDataRefList=True,
263 ContainerClass=DataIdContainer):
264 """!Add a data ID argument
266 Add an argument to specify data IDs. If datasetType is an instance of DatasetArgument,
267 then add a second argument to specify the dataset type.
269 @param[in] name name of name (including leading dashes, if wanted)
270 @param[in] datasetType type of dataset; supply a string for a fixed dataset type,
271 or a DatasetArgument for a dynamically determined dataset type
272 @param[in] help help string for the argument
273 @param[in] level level of dataset, for butler
274 @param[in] doMakeDataRefList construct data references?
275 @param[in] ContainerClass data ID container class to use to contain results;
276 override the default if you need a special means of computing data references from data IDs
278 The associated data is put into namespace.<dataIdArgument.name> as an instance of ContainerClass;
279 the container includes fields:
280 - idList: a list of data ID dicts
281 - refList: a list of butler data references (empty if doMakeDataRefList false)
283 argName = name.lstrip(
"-")
286 raise RuntimeError(
"Data ID argument %s already exists" % (name,))
287 if argName
in set((
"camera",
"config",
"butler",
"log",
"obsPkg")):
288 raise RuntimeError(
"Data ID argument %s is a reserved name" % (name,))
290 self.add_argument(name, nargs=
"*", action=IdValueAction, help=help,
291 metavar=
"KEY=VALUE1[^VALUE2[^VALUE3...]")
295 datasetType = datasetType,
297 doMakeDataRefList = doMakeDataRefList,
298 ContainerClass = ContainerClass,
301 if dataIdArgument.isDynamicDatasetType():
302 datasetType = dataIdArgument.datasetType
303 help = datasetType.help
if datasetType.help
else "dataset type for %s" % (name,)
305 "--" + dataIdArgument.datasetTypeName,
306 default = datasetType.default,
307 required = datasetType.required,
312 def parse_args(self, config, args=None, log=None, override=None):
313 """!Parse arguments for a pipeline task
315 @param[in,out] config config for the task being run
316 @param[in] args argument list; if None use sys.argv[1:]
317 @param[in] log log (instance pex_logging Log); if None use the default log
318 @param[in] override a config override function; it must take the root config object
319 as its only argument and must modify the config in place.
320 This function is called after camera-specific overrides files are applied, and before
321 command-line config overrides are applied (thus allowing the user the final word).
323 @return namespace: an argparse.Namespace containing many useful fields including:
324 - camera: camera name
325 - config: the supplied config with all overrides applied, validated and frozen
326 - butler: a butler for the data
327 - an entry for each of the data ID arguments registered by add_id_argument(),
328 the value of which is a DataIdArgument that includes public elements 'idList' and 'refList'
329 - log: a pex_logging log
330 - an entry for each command-line argument, with the following exceptions:
331 - config is Config, not an override
332 - configfile, id, logdest, loglevel are all missing
333 - obsPkg: name of obs_ package for this camera
338 if len(args) < 1
or args[0].startswith(
"-")
or args[0].startswith(
"@"):
340 if len(args) == 1
and args[0]
in (
"-h",
"--help"):
343 self.exit(
"%s: error: Must specify input as first argument" % self.prog)
346 inputRoot =
_fixPath(DEFAULT_INPUT_NAME, args[0])
347 if not os.path.isdir(inputRoot):
348 self.error(
"Error: input=%r not found" % (inputRoot,))
350 namespace = argparse.Namespace()
351 namespace.config = config
352 namespace.log = log
if log
is not None else pexLog.Log.getDefaultLog()
353 mapperClass = dafPersist.Butler.getMapperClass(inputRoot)
354 namespace.camera = mapperClass.getCameraName()
355 namespace.obsPkg = mapperClass.getEupsProductName()
360 if override
is not None:
361 override(namespace.config)
364 for dataIdArgument
in self._dataIdArgDict.itervalues():
365 setattr(namespace, dataIdArgument.name, dataIdArgument.ContainerClass(level=dataIdArgument.level))
367 namespace = argparse.ArgumentParser.parse_args(self, args=args, namespace=namespace)
368 namespace.input = inputRoot
369 del namespace.configfile
371 namespace.calib =
_fixPath(DEFAULT_CALIB_NAME, namespace.calib)
372 namespace.output =
_fixPath(DEFAULT_OUTPUT_NAME, namespace.output)
374 if namespace.clobberOutput:
375 if namespace.output
is None:
376 self.error(
"--clobber-output is only valid with --output")
377 elif namespace.output == namespace.input:
378 self.error(
"--clobber-output is not valid when the output and input repos are the same")
379 if os.path.exists(namespace.output):
380 namespace.log.info(
"Removing output repo %s for --clobber-output" % namespace.output)
381 shutil.rmtree(namespace.output)
383 namespace.log.info(
"input=%s" % (namespace.input,))
384 namespace.log.info(
"calib=%s" % (namespace.calib,))
385 namespace.log.info(
"output=%s" % (namespace.output,))
390 root = namespace.input,
391 calibRoot = namespace.calib,
392 outputRoot = namespace.output,
399 if "data" in namespace.show:
400 for dataIdName
in self._dataIdArgDict.iterkeys():
401 for dataRef
in getattr(namespace, dataIdName).refList:
402 print dataIdName +
" dataRef.dataId =", dataRef.dataId
404 if namespace.show
and "run" not in namespace.show:
412 sys.stderr.write(
"Warning: no 'debug' module found\n")
413 namespace.debug =
False
415 if namespace.logdest:
416 namespace.log.addDestination(namespace.logdest)
417 del namespace.logdest
419 if namespace.loglevel:
420 permitted = (
'DEBUG',
'INFO',
'WARN',
'FATAL')
421 if namespace.loglevel.upper()
in permitted:
422 value = getattr(
pexLog.Log, namespace.loglevel.upper())
425 value = int(namespace.loglevel)
427 self.error(
"log-level=%s not int or one of %s" % (namespace.loglevel, permitted))
428 namespace.log.setThreshold(value)
429 del namespace.loglevel
431 namespace.config.validate()
432 namespace.config.freeze()
437 """!Process the parsed data for each data ID argument
440 - Validate data ID keys
441 - Cast the data ID values to the correct type
442 - Compute data references from data IDs
444 @param[in,out] namespace parsed namespace (an argparse.Namespace);
445 reads these attributes:
448 - <name_dstype> for each data ID argument with a dynamic dataset type registered using
450 and modifies these attributes:
451 - <name> for each data ID argument registered using add_id_argument
453 for dataIdArgument
in self._dataIdArgDict.itervalues():
454 dataIdContainer = getattr(namespace, dataIdArgument.name)
455 dataIdContainer.setDatasetType(dataIdArgument.getDatasetType(namespace))
457 dataIdContainer.castDataIds(butler = namespace.butler)
458 except (KeyError, TypeError)
as e:
462 if dataIdArgument.doMakeDataRefList:
463 dataIdContainer.makeDataRefList(namespace)
466 """!Apply obs-package-specific and camera-specific config override files, if found
468 @param[in] namespace parsed namespace (an argparse.Namespace);
469 reads these attributes:
472 Look in the package namespace.obsPkg for files:
473 - config/<task_name>.py
474 - config/<camera_name>/<task_name>.py
477 obsPkgDir = eups.productDir(namespace.obsPkg)
478 fileName = self.
_name +
".py"
480 raise RuntimeError(
"Must set up %r" % (namespace.obsPkg,))
482 os.path.join(obsPkgDir,
"config", fileName),
483 os.path.join(obsPkgDir,
"config", namespace.camera, fileName),
485 if os.path.exists(filePath):
486 namespace.log.info(
"Loading config overrride file %r" % (filePath,))
487 namespace.config.load(filePath)
489 namespace.log.info(
"Config override file does not exist: %r" % (filePath,))
492 """!Perform camera-specific operations before parsing the command line.
494 The default implementation does nothing.
496 @param[in,out] namespace namespace (an argparse.Namespace) with the following fields:
497 - camera: the camera name
498 - config: the config passed to parse_args, with no overrides applied
499 - obsPkg: the obs_ package for this camera
500 - log: a pex_logging log
505 """!Allow files of arguments referenced by `@<path>` to contain multiple values on each line
507 @param[in] arg_line line of text read from an argument file
509 arg_line = arg_line.strip()
510 if not arg_line
or arg_line.startswith(
"#"):
512 for arg
in shlex.split(arg_line, comments=
True, posix=
True):
518 """!Get a dictionary of task info for all subtasks in a config
520 Designed to be called recursively; the user should call with only a config
521 (leaving taskDict and baseName at their default values).
523 @param[in] config configuration to process, an instance of lsst.pex.config.Config
524 @param[in,out] taskDict users should not specify this argument;
525 (supports recursion; if provided, taskDict is updated in place, else a new dict is started)
526 @param[in] baseName users should not specify this argument.
527 (supports recursion: if a non-empty string then a period is appended and the result is used
528 as a prefix for additional entries in taskDict; otherwise no prefix is used)
529 @return taskDict: a dict of config field name: task name
533 for fieldName, field
in config.iteritems():
534 if hasattr(field,
"value")
and hasattr(field,
"target"):
535 subConfig = field.value
536 if isinstance(subConfig, pexConfig.Config):
537 subBaseName =
"%s.%s" % (baseName, fieldName)
if baseName
else fieldName
539 taskName =
"%s.%s" % (field.target.__module__, field.target.__name__)
541 taskName = repr(field.target)
542 taskDict[subBaseName] = taskName
543 getTaskDict(config=subConfig, taskDict=taskDict, baseName=subBaseName)
547 """!Process arguments specified with --show (but ignores "data")
549 @param showOpts List of options passed to --show
550 @param config The provided config
551 @param exit Exit if "run" isn't included in showOpts
553 Supports the following options in showOpts:
554 - config[=PAT] Dump all the config entries, or just the ones that match the glob pattern
555 - tasks Show task hierarchy
556 - data Ignored; to be processed by caller
557 - run Keep going (the default behaviour is to exit if --show is specified)
559 Calls sys.exit(1) if any other option found.
564 for what
in showOpts:
565 mat = re.search(
r"^config(?:=(.+))?", what)
567 pattern = mat.group(1)
569 class FilteredStream(object):
570 """A file object that only prints lines that match the glob "pattern"
572 N.b. Newlines are silently discarded and reinserted; crude but effective.
574 def __init__(self, pattern):
575 self._pattern = pattern
577 def write(self, str):
579 if str
and fnmatch.fnmatch(str, self._pattern):
582 fd = FilteredStream(pattern)
586 config.saveToStream(fd,
"config")
591 elif what ==
"tasks":
594 print >> sys.stderr,
"Unknown value for show: %s (choose from '%s')" % \
595 (what,
"', '".join(
"config[=XXX] data tasks run".split()))
598 if exit
and "run" not in showOpts:
602 """!Print task hierarchy to stdout
604 @param[in] config: configuration to process (an lsst.pex.config.Config)
609 fieldNameList = sorted(taskDict.keys())
610 for fieldName
in fieldNameList:
611 taskName = taskDict[fieldName]
612 print "%s: %s" % (fieldName, taskName)
615 """!argparse action callback to override config parameters using name=value pairs from the command line
617 def __call__(self, parser, namespace, values, option_string):
618 """!Override one or more config name value pairs
620 @param[in] parser argument parser (instance of ArgumentParser)
621 @param[in,out] namespace parsed command (an instance of argparse.Namespace);
624 @param[in] values a list of configItemName=value pairs
625 @param[in] option_string option value specified by the user (a str)
627 if namespace.config
is None:
629 for nameValue
in values:
630 name, sep, valueStr = nameValue.partition(
"=")
632 parser.error(
"%s value %s must be in form name=value" % (option_string, nameValue))
637 except AttributeError:
638 parser.error(
"no config field: %s" % (name,))
641 value = eval(valueStr, {})
643 parser.error(
"cannot parse %r as a value for %s" % (valueStr, name))
647 parser.error(
"cannot set config.%s=%r: %s" % (name, value, e))
650 """!argparse action to load config overrides from one or more files
652 def __call__(self, parser, namespace, values, option_string=None):
653 """!Load one or more files of config overrides
655 @param[in] parser argument parser (instance of ArgumentParser)
656 @param[in,out] namespace parsed command (an instance of argparse.Namespace);
659 @param[in] values a list of data config file paths
660 @param[in] option_string option value specified by the user (a str)
662 if namespace.config
is None:
664 for configfile
in values:
666 namespace.config.load(configfile)
668 parser.error(
"cannot load config file %r: %s" % (configfile, e))
672 """!argparse action callback to process a data ID into a dict
674 def __call__(self, parser, namespace, values, option_string):
675 """!Parse --id data and append results to namespace.<argument>.idList
677 @param[in] parser argument parser (instance of ArgumentParser)
678 @param[in,out] namespace parsed command (an instance of argparse.Namespace);
680 - <idName>.idList, where <idName> is the name of the ID argument,
681 for instance "id" for ID argument --id
682 @param[in] values a list of data IDs; see data format below
683 @param[in] option_string option value specified by the user (a str)
686 key1=value1_1[^value1_2[^value1_3...] key2=value2_1[^value2_2[^value2_3...]...
688 The values (e.g. value1_1) may either be a string, or of the form "int..int" (e.g. "1..3")
689 which is interpreted as "1^2^3" (inclusive, unlike a python range). So "0^2..4^7..9" is
690 equivalent to "0^2^3^4^7^8^9". You may also specify a stride: "1..5:2" is "1^3^5"
692 The cross product is computed for keys with multiple values. For example:
693 --id visit 1^2 ccd 1,1^2,2
694 results in the following data ID dicts being appended to namespace.<argument>.idList:
695 {"visit":1, "ccd":"1,1"}
696 {"visit":2, "ccd":"1,1"}
697 {"visit":1, "ccd":"2,2"}
698 {"visit":2, "ccd":"2,2"}
700 if namespace.config
is None:
702 idDict = collections.OrderedDict()
703 for nameValue
in values:
704 name, sep, valueStr = nameValue.partition(
"=")
706 parser.error(
"%s appears multiple times in one ID argument: %s" % (name, option_string))
708 for v
in valueStr.split(
"^"):
709 mat = re.search(
r"^(\d+)\.\.(\d+)(?::(\d+))?$", v)
711 v1 = int(mat.group(1))
712 v2 = int(mat.group(2))
713 v3 = mat.group(3); v3 = int(v3)
if v3
else 1
714 for v
in range(v1, v2 + 1, v3):
715 idDict[name].append(str(v))
717 idDict[name].append(v)
719 keyList = idDict.keys()
720 iterList = [idDict[key]
for key
in keyList]
721 idDictList = [collections.OrderedDict(zip(keyList, valList))
722 for valList
in itertools.product(*iterList)]
724 argName = option_string.lstrip(
"-")
725 ident = getattr(namespace, argName)
726 ident.idList += idDictList
729 """!argparse action to set trace level
731 def __call__(self, parser, namespace, values, option_string):
734 @param[in] parser argument parser (instance of ArgumentParser)
735 @param[in] namespace parsed command (an instance of argparse.Namespace); ignored
736 @param[in] values a list of trace levels;
737 each item must be of the form component_name=level
738 @param[in] option_string option value specified by the user (a str)
740 for componentLevel
in values:
741 component, sep, levelStr = componentLevel.partition(
"=")
743 parser.error(
"%s level %s must be in form component=level" % (option_string, componentLevel))
745 level = int(levelStr)
747 parser.error(
"cannot parse %r as an integer level for %s" % (levelStr, component))
748 pexLog.Trace.setVerbosity(component, level)
753 """!Like setattr, but accepts hierarchical names, e.g. foo.bar.baz
755 @param[in,out] item object whose attribute is to be set
756 @param[in] name name of item to set
757 @param[in] value new value for the item
759 For example if name is foo.bar.baz then item.foo.bar.baz is set to the specified value.
762 subnameList = name.split(
".")
763 for subname
in subnameList[:-1]:
764 subitem = getattr(subitem, subname)
765 setattr(subitem, subnameList[-1], value)
768 """!Like getattr, but accepts hierarchical names, e.g. foo.bar.baz
770 @param[in] item object whose attribute is to be returned
771 @param[in] name name of item to get
773 For example if name is foo.bar.baz then returns item.foo.bar.baz
776 for subname
in name.split(
"."):
777 subitem = getattr(subitem, subname)
781 """!Return True if data exists at the current level or any data exists at a deeper level, False otherwise
783 @param[in] butler data butler (a \ref lsst.daf.persistence.butler.Butler
784 "lsst.daf.persistence.Butler")
785 @param[in] datasetType dataset type (a str)
786 @param[in] dataRef butler data reference (a \ref lsst.daf.persistence.butlerSubset.ButlerDataRef
787 "lsst.daf.persistence.ButlerDataRef")
789 subDRList = dataRef.subItems()
791 for subDR
in subDRList:
796 return butler.datasetExists(datasetType = datasetType, dataId = dataRef.dataId)
def setDatasetType
Set actual dataset type, once it is known.
def castDataIds
Validate data IDs and cast them to the correct type (modify idList in place).
An argument parser for pipeline tasks that is based on argparse.ArgumentParser.
def dataExists
Return True if data exists at the current level or any data exists at a deeper level, False otherwise.
Specify that the dataset type should be a command-line option.
def showTaskHierarchy
Print task hierarchy to stdout.
Glorified struct for data about id arguments, used by ArgumentParser.add_id_argument.
def setDottedAttr
Like setattr, but accepts hierarchical names, e.g.
def __call__
Set trace level.
def __call__
Parse –id data and append results to namespace.
argparse action callback to override config parameters using name=value pairs from the command line ...
def getTaskDict
Get a dictionary of task info for all subtasks in a config.
def add_id_argument
Add a data ID argument.
a place to record messages and descriptions of the state of processing.
def _applyInitialOverrides
Apply obs-package-specific and camera-specific config override files, if found.
def makeDataRefList
Compute refList based on idList.
def convert_arg_line_to_args
Allow files of arguments referenced by @<path> to contain multiple values on each line...
def getDottedAttr
Like getattr, but accepts hierarchical names, e.g.
def __call__
Load one or more files of config overrides.
def isDynamicDatasetType
Is the dataset type dynamic (specified on the command line)?
def __call__
Override one or more config name value pairs.
argparse action callback to process a data ID into a dict
def handleCamera
Perform camera-specific operations before parsing the command line.
def parse_args
Parse arguments for a pipeline task.
def _processDataIds
Process the parsed data for each data ID argument.
def __init__
Construct an ArgumentParser.
def obeyShowArgument
Process arguments specified with –show (but ignores "data")
argparse action to set trace level
def _fixPath
Apply environment variable as default root, if present, and abspath.
argparse action to load config overrides from one or more files
def getDatasetType
Get the dataset type.
def __init__
Construct a DataIdContainer.
A container for data IDs and associated data references.