LSSTApplications  18.0.0+106,18.0.0+50,19.0.0,19.0.0+1,19.0.0+10,19.0.0+11,19.0.0+13,19.0.0+17,19.0.0+2,19.0.0-1-g20d9b18+6,19.0.0-1-g425ff20,19.0.0-1-g5549ca4,19.0.0-1-g580fafe+6,19.0.0-1-g6fe20d0+1,19.0.0-1-g7011481+9,19.0.0-1-g8c57eb9+6,19.0.0-1-gb5175dc+11,19.0.0-1-gdc0e4a7+9,19.0.0-1-ge272bc4+6,19.0.0-1-ge3aa853,19.0.0-10-g448f008b,19.0.0-12-g6990b2c,19.0.0-2-g0d9f9cd+11,19.0.0-2-g3d9e4fb2+11,19.0.0-2-g5037de4,19.0.0-2-gb96a1c4+3,19.0.0-2-gd955cfd+15,19.0.0-3-g2d13df8,19.0.0-3-g6f3c7dc,19.0.0-4-g725f80e+11,19.0.0-4-ga671dab3b+1,19.0.0-4-gad373c5+3,19.0.0-5-ga2acb9c+2,19.0.0-5-gfe96e6c+2,w.2020.01
LSSTDataManagementBasePackage
demoTask.py
Go to the documentation of this file.
1 import math
2 import collections
3 import operator
4 from lsst.ctrl.pool.parallel import BatchPoolTask
5 from lsst.ctrl.pool.pool import Pool
6 from lsst.pipe.base import ArgumentParser
7 from lsst.pex.config import Config
8 
9 __all__ = ["DemoTask", ]
10 
11 
13  """Task for demonstrating the BatchPoolTask functionality"""
14  ConfigClass = Config
15  _DefaultName = "demo"
16 
17  @classmethod
18  def _makeArgumentParser(cls, *args, **kwargs):
19  kwargs.pop('doBatch', False) # Unused
20  parser = ArgumentParser(name="demo", *args, **kwargs)
21  parser.add_id_argument("--id", datasetType="raw", level="visit",
22  help="data ID, e.g. --id visit=12345")
23  return parser
24 
25  @classmethod
26  def batchWallTime(cls, time, parsedCmd, numCores):
27  """Return walltime request for batch job
28 
29  Subclasses should override if the walltime should be calculated
30  differently (e.g., addition of some serial time).
31 
32  @param time: Requested time per iteration
33  @param parsedCmd: Results of argument parsing
34  @param numCores: Number of cores
35  """
36  numTargets = [sum(1 for ccdRef in visitRef.subItems("ccd") if ccdRef.datasetExists("raw")) for
37  visitRef in parsedCmd.id.refList]
38  return time*sum(math.ceil(tt/numCores) for tt in numTargets)
39 
40  def runDataRef(self, visitRef):
41  """Main entry-point
42 
43  Only the master node runs this method. It will dispatch jobs to the
44  slave nodes.
45  """
46  pool = Pool("test")
47 
48  # Less overhead to transfer the butler once rather than in each dataRef
49  dataIdList = dict([(ccdRef.get("ccdExposureId"), ccdRef.dataId)
50  for ccdRef in visitRef.subItems("ccd") if ccdRef.datasetExists("raw")])
51  dataIdList = collections.OrderedDict(sorted(dataIdList.items()))
52 
53  with self.logOperation("master"):
54  total = pool.reduce(operator.add, self.run, list(dataIdList.values()),
55  butler=visitRef.getButler())
56  self.log.info("Total number of pixels read: %d" % (total,))
57 
58  def run(self, cache, dataId, butler=None):
59  """Read image and return number of pixels
60 
61  Only the slave nodes run this method.
62  """
63  assert butler is not None
64  with self.logOperation("read %s" % (dataId,)):
65  raw = butler.get("raw", dataId, immediate=True)
66  dims = raw.getDimensions()
67  num = dims.getX()*dims.getY()
68  self.log.info("Read %d pixels for %s" % (num, dataId,))
69  return num
70 
71  def _getConfigName(self):
72  return None
73 
74  def _getMetadataName(self):
75  return None
76 
77  def _getEupsVersionsName(self):
78  return None
def batchWallTime(cls, time, parsedCmd, numCores)
Definition: demoTask.py:26
def runDataRef(self, visitRef)
Definition: demoTask.py:40
def logOperation(self, operation, catch=False, trace=True)
Provide a context manager for logging an operation.
Definition: parallel.py:497
def run(self, cache, dataId, butler=None)
Definition: demoTask.py:58
daf::base::PropertyList * list
Definition: fits.cc:903