Loading [MathJax]/extensions/tex2jax.js
LSSTApplications  18.0.0+106,18.0.0+50,19.0.0,19.0.0+1,19.0.0+10,19.0.0+11,19.0.0+13,19.0.0+17,19.0.0+2,19.0.0-1-g20d9b18+6,19.0.0-1-g425ff20,19.0.0-1-g5549ca4,19.0.0-1-g580fafe+6,19.0.0-1-g6fe20d0+1,19.0.0-1-g7011481+9,19.0.0-1-g8c57eb9+6,19.0.0-1-gb5175dc+11,19.0.0-1-gdc0e4a7+9,19.0.0-1-ge272bc4+6,19.0.0-1-ge3aa853,19.0.0-10-g448f008b,19.0.0-12-g6990b2c,19.0.0-2-g0d9f9cd+11,19.0.0-2-g3d9e4fb2+11,19.0.0-2-g5037de4,19.0.0-2-gb96a1c4+3,19.0.0-2-gd955cfd+15,19.0.0-3-g2d13df8,19.0.0-3-g6f3c7dc,19.0.0-4-g725f80e+11,19.0.0-4-ga671dab3b+1,19.0.0-4-gad373c5+3,19.0.0-5-ga2acb9c+2,19.0.0-5-gfe96e6c+2,w.2020.01
LSSTDataManagementBasePackage
All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Properties Friends Macros Modules Pages
ptc.py
Go to the documentation of this file.
1 # This file is part of cp_pipe.
2 #
3 # Developed for the LSST Data Management System.
4 # This product includes software developed by the LSST Project
5 # (https://www.lsst.org).
6 # See the COPYRIGHT file at the top-level directory of this distribution
7 # for details of code ownership.
8 #
9 # This program is free software: you can redistribute it and/or modify
10 # it under the terms of the GNU General Public License as published by
11 # the Free Software Foundation, either version 3 of the License, or
12 # (at your option) any later version.
13 #
14 # This program is distributed in the hope that it will be useful,
15 # but WITHOUT ANY WARRANTY; without even the implied warranty of
16 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 # GNU General Public License for more details.
18 #
19 # You should have received a copy of the GNU General Public License
20 # along with this program. If not, see <https://www.gnu.org/licenses/>.
21 #
22 
23 __all__ = ['MeasurePhotonTransferCurveTask',
24  'MeasurePhotonTransferCurveTaskConfig',
25  'PhotonTransferCurveDataset']
26 
27 import numpy as np
28 import matplotlib.pyplot as plt
29 import os
30 from matplotlib.backends.backend_pdf import PdfPages
31 from sqlite3 import OperationalError
32 from collections import Counter
33 
34 import lsst.afw.math as afwMath
35 import lsst.pex.config as pexConfig
36 import lsst.pipe.base as pipeBase
37 from lsst.ip.isr import IsrTask
38 from .utils import (NonexistentDatasetTaskDataIdContainer, PairedVisitListTaskRunner,
39  checkExpLengthEqual, validateIsrConfig)
40 from scipy.optimize import leastsq, least_squares
41 import numpy.polynomial.polynomial as poly
42 
43 
44 class MeasurePhotonTransferCurveTaskConfig(pexConfig.Config):
45  """Config class for photon transfer curve measurement task"""
46  isr = pexConfig.ConfigurableField(
47  target=IsrTask,
48  doc="""Task to perform instrumental signature removal.""",
49  )
50  isrMandatorySteps = pexConfig.ListField(
51  dtype=str,
52  doc="isr operations that must be performed for valid results. Raises if any of these are False.",
53  default=['doAssembleCcd']
54  )
55  isrForbiddenSteps = pexConfig.ListField(
56  dtype=str,
57  doc="isr operations that must NOT be performed for valid results. Raises if any of these are True",
58  default=['doFlat', 'doFringe', 'doAddDistortionModel', 'doBrighterFatter', 'doUseOpticsTransmission',
59  'doUseFilterTransmission', 'doUseSensorTransmission', 'doUseAtmosphereTransmission']
60  )
61  isrDesirableSteps = pexConfig.ListField(
62  dtype=str,
63  doc="isr operations that it is advisable to perform, but are not mission-critical." +
64  " WARNs are logged for any of these found to be False.",
65  default=['doBias', 'doDark', 'doCrosstalk', 'doDefect']
66  )
67  isrUndesirableSteps = pexConfig.ListField(
68  dtype=str,
69  doc="isr operations that it is *not* advisable to perform in the general case, but are not" +
70  " forbidden as some use-cases might warrant them." +
71  " WARNs are logged for any of these found to be True.",
72  default=['doLinearize']
73  )
74  ccdKey = pexConfig.Field(
75  dtype=str,
76  doc="The key by which to pull a detector from a dataId, e.g. 'ccd' or 'detector'.",
77  default='ccd',
78  )
79  makePlots = pexConfig.Field(
80  dtype=bool,
81  doc="Plot the PTC curves?",
82  default=False,
83  )
84  ptcFitType = pexConfig.ChoiceField(
85  dtype=str,
86  doc="Fit PTC to approximation in Astier+19 (Equation 16) or to a polynomial.",
87  default="POLYNOMIAL",
88  allowed={
89  "POLYNOMIAL": "n-degree polynomial (use 'polynomialFitDegree' to set 'n').",
90  "ASTIERAPPROXIMATION": "Approximation in Astier+19 (Eq. 16)."
91  }
92  )
93  polynomialFitDegree = pexConfig.Field(
94  dtype=int,
95  doc="Degree of polynomial to fit the PTC, when 'ptcFitType'=POLYNOMIAL.",
96  default=2,
97  )
98  binSize = pexConfig.Field(
99  dtype=int,
100  doc="Bin the image by this factor in both dimensions.",
101  default=1,
102  )
103  minMeanSignal = pexConfig.Field(
104  dtype=float,
105  doc="Minimum value (inclusive) of mean signal (in ADU) above which to consider.",
106  default=0,
107  )
108  maxMeanSignal = pexConfig.Field(
109  dtype=float,
110  doc="Maximum value (inclusive) of mean signal (in ADU) below which to consider.",
111  default=9e6,
112  )
113  initialNonLinearityExclusionThresholdPositive = pexConfig.RangeField(
114  dtype=float,
115  doc="Initially exclude data points with a variance that are more than a factor of this from being"
116  " linear in the positive direction, from the PTC fit. Note that these points will also be"
117  " excluded from the non-linearity fit. This is done before the iterative outlier rejection,"
118  " to allow an accurate determination of the sigmas for said iterative fit.",
119  default=0.12,
120  min=0.0,
121  max=1.0,
122  )
123  initialNonLinearityExclusionThresholdNegative = pexConfig.RangeField(
124  dtype=float,
125  doc="Initially exclude data points with a variance that are more than a factor of this from being"
126  " linear in the negative direction, from the PTC fit. Note that these points will also be"
127  " excluded from the non-linearity fit. This is done before the iterative outlier rejection,"
128  " to allow an accurate determination of the sigmas for said iterative fit.",
129  default=0.25,
130  min=0.0,
131  max=1.0,
132  )
133  sigmaCutPtcOutliers = pexConfig.Field(
134  dtype=float,
135  doc="Sigma cut for outlier rejection in PTC.",
136  default=5.0,
137  )
138  maxIterationsPtcOutliers = pexConfig.Field(
139  dtype=int,
140  doc="Maximum number of iterations for outlier rejection in PTC.",
141  default=2,
142  )
143  doFitBootstrap = pexConfig.Field(
144  dtype=bool,
145  doc="Use bootstrap for the PTC fit parameters and errors?.",
146  default=False,
147  )
148  linResidualTimeIndex = pexConfig.Field(
149  dtype=int,
150  doc="Index position in time array for reference time in linearity residual calculation.",
151  default=2,
152  )
153 
154 
156  """A simple class to hold the output data from the PTC task.
157 
158  The dataset is made up of a dictionary for each item, keyed by the
159  amplifiers' names, which much be supplied at construction time.
160 
161  New items cannot be added to the class to save accidentally saving to the
162  wrong property, and the class can be frozen if desired.
163 
164  inputVisitPairs records the visits used to produce the data.
165  When fitPtcAndNl() is run, a mask is built up, which is by definition
166  always the same length as inputVisitPairs, rawExpTimes, rawMeans
167  and rawVars, and is a list of bools, which are incrementally set to False
168  as points are discarded from the fits.
169 
170  PTC fit parameters for polynomials are stored in a list in ascending order
171  of polynomial term, i.e. par[0]*x^0 + par[1]*x + par[2]*x^2 etc
172  with the length of the list corresponding to the order of the polynomial
173  plus one.
174  """
175  def __init__(self, ampNames):
176  # add items to __dict__ directly because __setattr__ is overridden
177 
178  # instance variables
179  self.__dict__["ampNames"] = ampNames
180  self.__dict__["badAmps"] = []
181 
182  # raw data variables
183  self.__dict__["inputVisitPairs"] = {ampName: [] for ampName in ampNames}
184  self.__dict__["visitMask"] = {ampName: [] for ampName in ampNames}
185  self.__dict__["rawExpTimes"] = {ampName: [] for ampName in ampNames}
186  self.__dict__["rawMeans"] = {ampName: [] for ampName in ampNames}
187  self.__dict__["rawVars"] = {ampName: [] for ampName in ampNames}
188 
189  # fit information
190  self.__dict__["ptcFitType"] = {ampName: "" for ampName in ampNames}
191  self.__dict__["ptcFitPars"] = {ampName: [] for ampName in ampNames}
192  self.__dict__["ptcFitParsError"] = {ampName: [] for ampName in ampNames}
193  self.__dict__["nonLinearity"] = {ampName: [] for ampName in ampNames}
194  self.__dict__["nonLinearityError"] = {ampName: [] for ampName in ampNames}
195  self.__dict__["nonLinearityResiduals"] = {ampName: [] for ampName in ampNames}
196 
197  # final results
198  self.__dict__["gain"] = {ampName: -1. for ampName in ampNames}
199  self.__dict__["gainErr"] = {ampName: -1. for ampName in ampNames}
200  self.__dict__["noise"] = {ampName: -1. for ampName in ampNames}
201  self.__dict__["noiseErr"] = {ampName: -1. for ampName in ampNames}
202 
203  def __setattr__(self, attribute, value):
204  """Protect class attributes"""
205  if attribute not in self.__dict__:
206  raise AttributeError(f"{attribute} is not already a member of PhotonTransferCurveDataset, which"
207  " does not support setting of new attributes.")
208  else:
209  self.__dict__[attribute] = value
210 
211  def getVisitsUsed(self, ampName):
212  """Get the visits used, i.e. not discarded, for a given amp.
213 
214  If no mask has been created yet, all visits are returned.
215  """
216  if self.visitMask[ampName] == []:
217  return self.inputVisitPairs[ampName]
218 
219  # if the mask exists it had better be the same length as the visitPairs
220  assert len(self.visitMask[ampName]) == len(self.inputVisitPairs[ampName])
221 
222  pairs = self.inputVisitPairs[ampName]
223  mask = self.visitMask[ampName]
224  # cast to bool required because numpy
225  return [(v1, v2) for ((v1, v2), m) in zip(pairs, mask) if bool(m) is True]
226 
227  def getGoodAmps(self):
228  return [amp for amp in self.ampNames if amp not in self.badAmps]
229 
230 
231 class MeasurePhotonTransferCurveTask(pipeBase.CmdLineTask):
232  """A class to calculate, fit, and plot a PTC from a set of flat pairs.
233 
234  The Photon Transfer Curve (var(signal) vs mean(signal)) is a standard tool
235  used in astronomical detectors characterization (e.g., Janesick 2001,
236  Janesick 2007). This task calculates the PTC from a series of pairs of
237  flat-field images; each pair taken at identical exposure times. The
238  difference image of each pair is formed to eliminate fixed pattern noise,
239  and then the variance of the difference image and the mean of the average image
240  are used to produce the PTC. An n-degree polynomial or the approximation in Equation
241  16 of Astier+19 ("The Shape of the Photon Transfer Curve of CCD sensors",
242  arXiv:1905.08677) can be fitted to the PTC curve. These models include
243  parameters such as the gain (e/ADU) and readout noise.
244 
245  Parameters
246  ----------
247 
248  *args: `list`
249  Positional arguments passed to the Task constructor. None used at this
250  time.
251  **kwargs: `dict`
252  Keyword arguments passed on to the Task constructor. None used at this
253  time.
254 
255  """
256 
257  RunnerClass = PairedVisitListTaskRunner
258  ConfigClass = MeasurePhotonTransferCurveTaskConfig
259  _DefaultName = "measurePhotonTransferCurve"
260 
261  def __init__(self, *args, **kwargs):
262  pipeBase.CmdLineTask.__init__(self, *args, **kwargs)
263  self.makeSubtask("isr")
264  plt.interactive(False) # stop windows popping up when plotting. When headless, use 'agg' backend too
265  validateIsrConfig(self.isr, self.config.isrMandatorySteps,
266  self.config.isrForbiddenSteps, self.config.isrDesirableSteps, checkTrim=False)
267  self.config.validate()
268  self.config.freeze()
269 
270  @classmethod
271  def _makeArgumentParser(cls):
272  """Augment argument parser for the MeasurePhotonTransferCurveTask."""
273  parser = pipeBase.ArgumentParser(name=cls._DefaultName)
274  parser.add_argument("--visit-pairs", dest="visitPairs", nargs="*",
275  help="Visit pairs to use. Each pair must be of the form INT,INT e.g. 123,456")
276  parser.add_id_argument("--id", datasetType="photonTransferCurveDataset",
277  ContainerClass=NonexistentDatasetTaskDataIdContainer,
278  help="The ccds to use, e.g. --id ccd=0..100")
279  return parser
280 
281  @pipeBase.timeMethod
282  def runDataRef(self, dataRef, visitPairs):
283  """Run the Photon Transfer Curve (PTC) measurement task.
284 
285  For a dataRef (which is each detector here),
286  and given a list of visit pairs at different exposure times,
287  measure the PTC.
288 
289  Parameters
290  ----------
291  dataRef : list of lsst.daf.persistence.ButlerDataRef
292  dataRef for the detector for the visits to be fit.
293  visitPairs : `iterable` of `tuple` of `int`
294  Pairs of visit numbers to be processed together
295  """
296 
297  # setup necessary objects
298  detNum = dataRef.dataId[self.config.ccdKey]
299  detector = dataRef.get('camera')[dataRef.dataId[self.config.ccdKey]]
300  # expand some missing fields that we need for lsstCam. This is a work-around
301  # for Gen2 problems that I (RHL) don't feel like solving. The calibs pipelines
302  # (which inherit from CalibTask) use addMissingKeys() to do basically the same thing
303  #
304  # Basically, the butler's trying to look up the fields in `raw_visit` which won't work
305  for name in dataRef.getButler().getKeys('bias'):
306  if name not in dataRef.dataId:
307  try:
308  dataRef.dataId[name] = \
309  dataRef.getButler().queryMetadata('raw', [name], detector=detNum)[0]
310  except OperationalError:
311  pass
312 
313  amps = detector.getAmplifiers()
314  ampNames = [amp.getName() for amp in amps]
315  dataset = PhotonTransferCurveDataset(ampNames)
316 
317  self.log.info('Measuring PTC using %s visits for detector %s' % (visitPairs, detNum))
318 
319  for (v1, v2) in visitPairs:
320  # Perform ISR on each exposure
321  dataRef.dataId['visit'] = v1
322  exp1 = self.isr.runDataRef(dataRef).exposure
323  dataRef.dataId['visit'] = v2
324  exp2 = self.isr.runDataRef(dataRef).exposure
325  del dataRef.dataId['visit']
326 
327  checkExpLengthEqual(exp1, exp2, v1, v2, raiseWithMessage=True)
328  expTime = exp1.getInfo().getVisitInfo().getExposureTime()
329 
330  for amp in detector:
331  mu, varDiff = self.measureMeanVarPair(exp1, exp2, region=amp.getBBox())
332  ampName = amp.getName()
333 
334  dataset.rawExpTimes[ampName].append(expTime)
335  dataset.rawMeans[ampName].append(mu)
336  dataset.rawVars[ampName].append(varDiff)
337  dataset.inputVisitPairs[ampName].append((v1, v2))
338 
339  # Fit PTC and (non)linearity of signal vs time curve
340  # dataset is modified in place but also returned for external code
341  dataset = self.fitPtcAndNl(dataset, ptcFitType=self.config.ptcFitType)
342 
343  if self.config.makePlots:
344  self.plot(dataRef, dataset, ptcFitType=self.config.ptcFitType)
345 
346  # Save data, PTC fit, and NL fit dictionaries
347  self.log.info(f"Writing PTC and NL data to {dataRef.getUri(write=True)}")
348  dataRef.put(dataset, datasetType="photonTransferCurveDataset")
349 
350  self.log.info('Finished measuring PTC for in detector %s' % detNum)
351 
352  return pipeBase.Struct(exitStatus=0)
353 
354  def measureMeanVarPair(self, exposure1, exposure2, region=None):
355  """Calculate the mean signal of two exposures and the variance of their difference.
356 
357  Parameters
358  ----------
359  exposure1 : `lsst.afw.image.exposure.exposure.ExposureF`
360  First exposure of flat field pair.
361 
362  exposure2 : `lsst.afw.image.exposure.exposure.ExposureF`
363  Second exposure of flat field pair.
364 
365  region : `lsst.geom.Box2I`
366  Region of each exposure where to perform the calculations (e.g, an amplifier).
367 
368  Return
369  ------
370 
371  mu : `np.float`
372  0.5*(mu1 + mu2), where mu1, and mu2 are the clipped means of the regions in
373  both exposures.
374 
375  varDiff : `np.float`
376  Half of the clipped variance of the difference of the regions inthe two input
377  exposures.
378  """
379 
380  if region is not None:
381  im1Area = exposure1.maskedImage[region]
382  im2Area = exposure2.maskedImage[region]
383  else:
384  im1Area = exposure1.maskedImage
385  im2Area = exposure2.maskedImage
386 
387  im1Area = afwMath.binImage(im1Area, self.config.binSize)
388  im2Area = afwMath.binImage(im2Area, self.config.binSize)
389 
390  # Clipped mean of images; then average of mean.
391  mu1 = afwMath.makeStatistics(im1Area, afwMath.MEANCLIP).getValue()
392  mu2 = afwMath.makeStatistics(im2Area, afwMath.MEANCLIP).getValue()
393  mu = 0.5*(mu1 + mu2)
394 
395  # Take difference of pairs
396  # symmetric formula: diff = (mu2*im1-mu1*im2)/(0.5*(mu1+mu2))
397  temp = im2Area.clone()
398  temp *= mu1
399  diffIm = im1Area.clone()
400  diffIm *= mu2
401  diffIm -= temp
402  diffIm /= mu
403 
404  varDiff = 0.5*(afwMath.makeStatistics(diffIm, afwMath.VARIANCECLIP).getValue())
405 
406  return mu, varDiff
407 
408  def _fitLeastSq(self, initialParams, dataX, dataY, function):
409  """Do a fit and estimate the parameter errors using using scipy.optimize.leastq.
410 
411  optimize.leastsq returns the fractional covariance matrix. To estimate the
412  standard deviation of the fit parameters, multiply the entries of this matrix
413  by the reduced chi squared and take the square root of the diagonal elements.
414 
415  Parameters
416  ----------
417  initialParams : list of np.float
418  initial values for fit parameters. For ptcFitType=POLYNOMIAL, its length
419  determines the degree of the polynomial.
420 
421  dataX : np.array of np.float
422  Data in the abscissa axis.
423 
424  dataY : np.array of np.float
425  Data in the ordinate axis.
426 
427  function : callable object (function)
428  Function to fit the data with.
429 
430  Return
431  ------
432  pFitSingleLeastSquares : list of np.float
433  List with fitted parameters.
434 
435  pErrSingleLeastSquares : list of np.float
436  List with errors for fitted parameters.
437  """
438 
439  def errFunc(p, x, y):
440  return function(p, x) - y
441 
442  pFit, pCov, infoDict, errMessage, success = leastsq(errFunc, initialParams,
443  args=(dataX, dataY), full_output=1, epsfcn=0.0001)
444 
445  if (len(dataY) > len(initialParams)) and pCov is not None:
446  reducedChiSq = (errFunc(pFit, dataX, dataY)**2).sum()/(len(dataY)-len(initialParams))
447  pCov *= reducedChiSq
448  else:
449  pCov[:, :] = np.inf
450 
451  errorVec = []
452  for i in range(len(pFit)):
453  errorVec.append(np.fabs(pCov[i][i])**0.5)
454 
455  pFitSingleLeastSquares = pFit
456  pErrSingleLeastSquares = np.array(errorVec)
457 
458  return pFitSingleLeastSquares, pErrSingleLeastSquares
459 
460  def _fitBootstrap(self, initialParams, dataX, dataY, function, confidenceSigma=1.):
461  """Do a fit using least squares and bootstrap to estimate parameter errors.
462 
463  The bootstrap error bars are calculated by fitting 100 random data sets.
464 
465  Parameters
466  ----------
467  initialParams : list of np.float
468  initial values for fit parameters. For ptcFitType=POLYNOMIAL, its length
469  determines the degree of the polynomial.
470 
471  dataX : np.array of np.float
472  Data in the abscissa axis.
473 
474  dataY : np.array of np.float
475  Data in the ordinate axis.
476 
477  function : callable object (function)
478  Function to fit the data with.
479 
480  confidenceSigma : np.float
481  Number of sigmas that determine confidence interval for the bootstrap errors.
482 
483  Return
484  ------
485  pFitBootstrap : list of np.float
486  List with fitted parameters.
487 
488  pErrBootstrap : list of np.float
489  List with errors for fitted parameters.
490  """
491 
492  def errFunc(p, x, y):
493  return function(p, x) - y
494 
495  # Fit first time
496  pFit, _ = leastsq(errFunc, initialParams, args=(dataX, dataY), full_output=0)
497 
498  # Get the stdev of the residuals
499  residuals = errFunc(pFit, dataX, dataY)
500  sigmaErrTotal = np.std(residuals)
501 
502  # 100 random data sets are generated and fitted
503  pars = []
504  for i in range(100):
505  randomDelta = np.random.normal(0., sigmaErrTotal, len(dataY))
506  randomDataY = dataY + randomDelta
507  randomFit, _ = leastsq(errFunc, initialParams,
508  args=(dataX, randomDataY), full_output=0)
509  pars.append(randomFit)
510  pars = np.array(pars)
511  meanPfit = np.mean(pars, 0)
512 
513  # confidence interval for parameter estimates
514  nSigma = confidenceSigma
515  errPfit = nSigma*np.std(pars, 0)
516  pFitBootstrap = meanPfit
517  pErrBootstrap = errPfit
518  return pFitBootstrap, pErrBootstrap
519 
520  def funcPolynomial(self, pars, x):
521  """Polynomial function definition"""
522  return poly.polyval(x, [*pars])
523 
524  def funcAstier(self, pars, x):
525  """Single brighter-fatter parameter model for PTC; Equation 16 of Astier+19"""
526  a00, gain, noise = pars
527  return 0.5/(a00*gain*gain)*(np.exp(2*a00*x*gain)-1) + noise/(gain*gain)
528 
529  @staticmethod
530  def _initialParsForPolynomial(order):
531  assert(order >= 2)
532  pars = np.zeros(order, dtype=np.float)
533  pars[0] = 10
534  pars[1] = 1
535  pars[2:] = 0.0001
536  return pars
537 
538  @staticmethod
539  def _boundsForPolynomial(initialPars):
540  lowers = [np.NINF for p in initialPars]
541  uppers = [np.inf for p in initialPars]
542  lowers[1] = 0 # no negative gains
543  return (lowers, uppers)
544 
545  @staticmethod
546  def _boundsForAstier(initialPars):
547  lowers = [np.NINF for p in initialPars]
548  uppers = [np.inf for p in initialPars]
549  return (lowers, uppers)
550 
551  @staticmethod
552  def _getInitialGoodPoints(means, variances, maxDeviationPositive, maxDeviationNegative):
553  """Return a boolean array to mask bad points.
554 
555  A linear function has a constant ratio, so find the median
556  value of the ratios, and exclude the points that deviate
557  from that by more than a factor of maxDeviationPositive/negative.
558  Asymmetric deviations are supported as we expect the PTC to turn
559  down as the flux increases, but sometimes it anomalously turns
560  upwards just before turning over, which ruins the fits, so it
561  is wise to be stricter about restricting positive outliers than
562  negative ones.
563 
564  Too high and points that are so bad that fit will fail will be included
565  Too low and the non-linear points will be excluded, biasing the NL fit."""
566  ratios = [b/a for (a, b) in zip(means, variances)]
567  medianRatio = np.median(ratios)
568  ratioDeviations = [(r/medianRatio)-1 for r in ratios]
569 
570  # so that it doesn't matter if the deviation is expressed as positive or negative
571  maxDeviationPositive = abs(maxDeviationPositive)
572  maxDeviationNegative = -1. * abs(maxDeviationNegative)
573 
574  goodPoints = np.array([True if (r < maxDeviationPositive and r > maxDeviationNegative)
575  else False for r in ratioDeviations])
576  return goodPoints
577 
578  def _makeZeroSafe(self, array, warn=True, substituteValue=1e-9):
579  """"""
580  nBad = Counter(array)[0]
581  if nBad == 0:
582  return array
583 
584  if warn:
585  msg = f"Found {nBad} zeros in array at elements {[x for x in np.where(array==0)[0]]}"
586  self.log.warn(msg)
587 
588  array[array == 0] = substituteValue
589  return array
590 
591  def fitPtcAndNl(self, dataset, ptcFitType):
592  """Fit the photon transfer curve and calculate linearity and residuals.
593 
594  Fit the photon transfer curve with either a polynomial of the order
595  specified in the task config, or using the Astier approximation.
596 
597  Sigma clipping is performed iteratively for the fit, as well as an
598  initial clipping of data points that are more than
599  config.initialNonLinearityExclusionThreshold away from lying on a
600  straight line. This other step is necessary because the photon transfer
601  curve turns over catastrophically at very high flux (because saturation
602  drops the variance to ~0) and these far outliers cause the initial fit
603  to fail, meaning the sigma cannot be calculated to perform the
604  sigma-clipping.
605 
606  Parameters
607  ----------
608  dataset : `lsst.cp.pipe.ptc.PhotonTransferCurveDataset`
609  The dataset containing the means, variances and exposure times
610  ptcFitType : `str`
611  Fit a 'POLYNOMIAL' (degree: 'polynomialFitDegree') or
612  'ASTIERAPPROXIMATION' to the PTC
613  """
614 
615  def errFunc(p, x, y):
616  return ptcFunc(p, x) - y
617 
618  sigmaCutPtcOutliers = self.config.sigmaCutPtcOutliers
619  maxIterationsPtcOutliers = self.config.maxIterationsPtcOutliers
620 
621  for ampName in dataset.ampNames:
622  timeVecOriginal = np.array(dataset.rawExpTimes[ampName])
623  meanVecOriginal = np.array(dataset.rawMeans[ampName])
624  varVecOriginal = np.array(dataset.rawVars[ampName])
625  varVecOriginal = self._makeZeroSafe(varVecOriginal)
626 
627  mask = ((meanVecOriginal >= self.config.minMeanSignal) &
628  (meanVecOriginal <= self.config.maxMeanSignal))
629 
630  goodPoints = self._getInitialGoodPoints(meanVecOriginal, varVecOriginal,
631  self.config.initialNonLinearityExclusionThresholdPositive,
632  self.config.initialNonLinearityExclusionThresholdNegative)
633  mask = mask & goodPoints
634 
635  if ptcFitType == 'ASTIERAPPROXIMATION':
636  ptcFunc = self.funcAstier
637  parsIniPtc = [-1e-9, 1.0, 10.] # a00, gain, noise
638  bounds = self._boundsForAstier(parsIniPtc)
639  if ptcFitType == 'POLYNOMIAL':
640  ptcFunc = self.funcPolynomial
641  parsIniPtc = self._initialParsForPolynomial(self.config.polynomialFitDegree + 1)
642  bounds = self._boundsForPolynomial(parsIniPtc)
643 
644  # Before bootstrap fit, do an iterative fit to get rid of outliers
645  count = 1
646  while count <= maxIterationsPtcOutliers:
647  # Note that application of the mask actually shrinks the array
648  # to size rather than setting elements to zero (as we want) so
649  # always update mask itself and re-apply to the original data
650  meanTempVec = meanVecOriginal[mask]
651  varTempVec = varVecOriginal[mask]
652  res = least_squares(errFunc, parsIniPtc, bounds=bounds, args=(meanTempVec, varTempVec))
653  pars = res.x
654 
655  # change this to the original from the temp because the masks are ANDed
656  # meaning once a point is masked it's always masked, and the masks must
657  # always be the same length for broadcasting
658  sigResids = (varVecOriginal - ptcFunc(pars, meanVecOriginal))/np.sqrt(varVecOriginal)
659  newMask = np.array([True if np.abs(r) < sigmaCutPtcOutliers else False for r in sigResids])
660  mask = mask & newMask
661 
662  nDroppedTotal = Counter(mask)[False]
663  self.log.debug(f"Iteration {count}: discarded {nDroppedTotal} points in total for {ampName}")
664  count += 1
665  # objects should never shrink
666  assert (len(mask) == len(timeVecOriginal) == len(meanVecOriginal) == len(varVecOriginal))
667 
668  dataset.visitMask[ampName] = mask # store the final mask
669 
670  parsIniPtc = pars
671  timeVecFinal = timeVecOriginal[mask]
672  meanVecFinal = meanVecOriginal[mask]
673  varVecFinal = varVecOriginal[mask]
674 
675  if Counter(mask)[False] > 0:
676  self.log.info((f"Number of points discarded in PTC of amplifier {ampName}:" +
677  f" {Counter(mask)[False]} out of {len(meanVecOriginal)}"))
678 
679  if (len(meanVecFinal) < len(parsIniPtc)):
680  msg = (f"\nSERIOUS: Not enough data points ({len(meanVecFinal)}) compared to the number of"
681  f"parameters of the PTC model({len(parsIniPtc)}). Setting {ampName} to BAD.")
682  self.log.warn(msg)
683  dataset.badAmps.append(ampName)
684  dataset.gain[ampName] = np.nan
685  dataset.gainErr[ampName] = np.nan
686  dataset.noise[ampName] = np.nan
687  dataset.noiseErr[ampName] = np.nan
688  dataset.nonLinearity[ampName] = np.nan
689  dataset.nonLinearityError[ampName] = np.nan
690  dataset.nonLinearityResiduals[ampName] = np.nan
691  continue
692 
693  # Fit the PTC
694  if self.config.doFitBootstrap:
695  parsFit, parsFitErr = self._fitBootstrap(parsIniPtc, meanVecFinal, varVecFinal, ptcFunc)
696  else:
697  parsFit, parsFitErr = self._fitLeastSq(parsIniPtc, meanVecFinal, varVecFinal, ptcFunc)
698 
699  dataset.ptcFitPars[ampName] = parsFit
700  dataset.ptcFitParsError[ampName] = parsFitErr
701 
702  if ptcFitType == 'ASTIERAPPROXIMATION':
703  ptcGain = parsFit[1]
704  ptcGainErr = parsFitErr[1]
705  ptcNoise = np.sqrt(np.fabs(parsFit[2]))
706  ptcNoiseErr = 0.5*(parsFitErr[2]/np.fabs(parsFit[2]))*np.sqrt(np.fabs(parsFit[2]))
707  if ptcFitType == 'POLYNOMIAL':
708  ptcGain = 1./parsFit[1]
709  ptcGainErr = np.fabs(1./parsFit[1])*(parsFitErr[1]/parsFit[1])
710  ptcNoise = np.sqrt(np.fabs(parsFit[0]))*ptcGain
711  ptcNoiseErr = (0.5*(parsFitErr[0]/np.fabs(parsFit[0]))*(np.sqrt(np.fabs(parsFit[0]))))*ptcGain
712 
713  dataset.gain[ampName] = ptcGain
714  dataset.gainErr[ampName] = ptcGainErr
715  dataset.noise[ampName] = ptcNoise
716  dataset.noiseErr[ampName] = ptcNoiseErr
717 
718  # Non-linearity residuals (NL of mean vs time curve): percentage, and fit to a quadratic function
719  # In this case, len(parsIniNl) = 3 indicates that we want a quadratic fit
720  parsIniNl = [1., 1., 1.]
721  if self.config.doFitBootstrap:
722  parsFit, parsFitErr = self._fitBootstrap(parsIniNl, timeVecFinal, meanVecFinal,
723  self.funcPolynomial)
724  else:
725  parsFit, parsFitErr = self._fitLeastSq(parsIniNl, timeVecFinal, meanVecFinal,
726  self.funcPolynomial)
727  linResidualTimeIndex = self.config.linResidualTimeIndex
728  if timeVecFinal[linResidualTimeIndex] == 0.0:
729  raise RuntimeError("Reference time for linearity residual can't be 0.0")
730  linResidual = 100*(1 - ((meanVecFinal[linResidualTimeIndex] /
731  timeVecFinal[linResidualTimeIndex]) / (meanVecFinal/timeVecFinal)))
732 
733  dataset.nonLinearity[ampName] = parsFit
734  dataset.nonLinearityError[ampName] = parsFitErr
735  dataset.nonLinearityResiduals[ampName] = linResidual
736 
737  return dataset
738 
739  def plot(self, dataRef, dataset, ptcFitType):
740  dirname = dataRef.getUri(datasetType='cpPipePlotRoot', write=True)
741  if not os.path.exists(dirname):
742  os.makedirs(dirname)
743 
744  detNum = dataRef.dataId[self.config.ccdKey]
745  filename = f"PTC_det{detNum}.pdf"
746  filenameFull = os.path.join(dirname, filename)
747  with PdfPages(filenameFull) as pdfPages:
748  self._plotPtc(dataset, ptcFitType, pdfPages)
749 
750  def _plotPtc(self, dataset, ptcFitType, pdfPages):
751  """Plot PTC, linearity, and linearity residual per amplifier"""
752 
753  if ptcFitType == 'ASTIERAPPROXIMATION':
754  ptcFunc = self.funcAstier
755  stringTitle = r"Var = $\frac{1}{2g^2a_{00}}(\exp (2a_{00} \mu g) - 1) + \frac{n_{00}}{g^2}$"
756 
757  if ptcFitType == 'POLYNOMIAL':
758  ptcFunc = self.funcPolynomial
759  stringTitle = f"Polynomial (degree: {self.config.polynomialFitDegree})"
760 
761  legendFontSize = 7.5
762  labelFontSize = 8
763  titleFontSize = 10
764  supTitleFontSize = 18
765  markerSize = 25
766 
767  # General determination of the size of the plot grid
768  nAmps = len(dataset.ampNames)
769  if nAmps == 2:
770  nRows, nCols = 2, 1
771  nRows = np.sqrt(nAmps)
772  mantissa, _ = np.modf(nRows)
773  if mantissa > 0:
774  nRows = int(nRows) + 1
775  nCols = nRows
776  else:
777  nRows = int(nRows)
778  nCols = nRows
779 
780  f, ax = plt.subplots(nrows=nRows, ncols=nCols, sharex='col', sharey='row', figsize=(13, 10))
781  f2, ax2 = plt.subplots(nrows=nRows, ncols=nCols, sharex='col', sharey='row', figsize=(13, 10))
782 
783  for i, (amp, a, a2) in enumerate(zip(dataset.ampNames, ax.flatten(), ax2.flatten())):
784  meanVecOriginal = np.array(dataset.rawMeans[amp])
785  varVecOriginal = np.array(dataset.rawVars[amp])
786  mask = dataset.visitMask[amp]
787  meanVecFinal = meanVecOriginal[mask]
788  varVecFinal = varVecOriginal[mask]
789  meanVecOutliers = meanVecOriginal[np.invert(mask)]
790  varVecOutliers = varVecOriginal[np.invert(mask)]
791  pars, parsErr = dataset.ptcFitPars[amp], dataset.ptcFitParsError[amp]
792 
793  if ptcFitType == 'ASTIERAPPROXIMATION':
794  ptcA00, ptcA00error = pars[0], parsErr[0]
795  ptcGain, ptcGainError = pars[1], parsErr[1]
796  ptcNoise = np.sqrt(np.fabs(pars[2]))
797  ptcNoiseError = 0.5*(parsErr[2]/np.fabs(pars[2]))*np.sqrt(np.fabs(pars[2]))
798  stringLegend = (f"a00: {ptcA00:.2e}+/-{ptcA00error:.2e}"
799  f"\n Gain: {ptcGain:.4}+/-{ptcGainError:.2e}"
800  f"\n Noise: {ptcNoise:.4}+/-{ptcNoiseError:.2e}")
801 
802  if ptcFitType == 'POLYNOMIAL':
803  ptcGain, ptcGainError = 1./pars[1], np.fabs(1./pars[1])*(parsErr[1]/pars[1])
804  ptcNoise = np.sqrt(np.fabs(pars[0]))*ptcGain
805  ptcNoiseError = (0.5*(parsErr[0]/np.fabs(pars[0]))*(np.sqrt(np.fabs(pars[0]))))*ptcGain
806  stringLegend = (f"Noise: {ptcNoise:.4}+/-{ptcNoiseError:.2e} \n"
807  f"Gain: {ptcGain:.4}+/-{ptcGainError:.2e}")
808 
809  minMeanVecFinal = np.min(meanVecFinal)
810  maxMeanVecFinal = np.max(meanVecFinal)
811  meanVecFit = np.linspace(minMeanVecFinal, maxMeanVecFinal, 100*len(meanVecFinal))
812  minMeanVecOriginal = np.min(meanVecOriginal)
813  maxMeanVecOriginal = np.max(meanVecOriginal)
814  deltaXlim = maxMeanVecOriginal - minMeanVecOriginal
815 
816  a.plot(meanVecFit, ptcFunc(pars, meanVecFit), color='red')
817  a.plot(meanVecFinal, pars[0] + pars[1]*meanVecFinal, color='green', linestyle='--')
818  a.scatter(meanVecFinal, varVecFinal, c='blue', marker='o', s=markerSize)
819  a.scatter(meanVecOutliers, varVecOutliers, c='magenta', marker='s', s=markerSize)
820  a.set_xlabel(r'Mean signal ($\mu$, ADU)', fontsize=labelFontSize)
821  a.set_xticks(meanVecOriginal)
822  a.set_ylabel(r'Variance (ADU$^2$)', fontsize=labelFontSize)
823  a.tick_params(labelsize=11)
824  a.text(0.03, 0.8, stringLegend, transform=a.transAxes, fontsize=legendFontSize)
825  a.set_xscale('linear', fontsize=labelFontSize)
826  a.set_yscale('linear', fontsize=labelFontSize)
827  a.set_title(amp, fontsize=titleFontSize)
828  a.set_xlim([minMeanVecOriginal - 0.2*deltaXlim, maxMeanVecOriginal + 0.2*deltaXlim])
829 
830  # Same, but in log-scale
831  a2.plot(meanVecFit, ptcFunc(pars, meanVecFit), color='red')
832  a2.scatter(meanVecFinal, varVecFinal, c='blue', marker='o', s=markerSize)
833  a2.scatter(meanVecOutliers, varVecOutliers, c='magenta', marker='s', s=markerSize)
834  a2.set_xlabel(r'Mean Signal ($\mu$, ADU)', fontsize=labelFontSize)
835  a2.set_ylabel(r'Variance (ADU$^2$)', fontsize=labelFontSize)
836  a2.tick_params(labelsize=11)
837  a2.text(0.03, 0.8, stringLegend, transform=a2.transAxes, fontsize=legendFontSize)
838  a2.set_xscale('log')
839  a2.set_yscale('log')
840  a2.set_title(amp, fontsize=titleFontSize)
841  a2.set_xlim([minMeanVecOriginal, maxMeanVecOriginal])
842 
843  f.suptitle(f"PTC \n Fit: " + stringTitle, fontsize=20)
844  pdfPages.savefig(f)
845  f2.suptitle(f"PTC (log-log)", fontsize=20)
846  pdfPages.savefig(f2)
847 
848  # Plot mean vs time
849  f, ax = plt.subplots(nrows=4, ncols=4, sharex='col', sharey='row', figsize=(13, 10))
850  for i, (amp, a) in enumerate(zip(dataset.ampNames, ax.flatten())):
851  meanVecFinal = np.array(dataset.rawMeans[amp])[dataset.visitMask[amp]]
852  timeVecFinal = np.array(dataset.rawExpTimes[amp])[dataset.visitMask[amp]]
853 
854  pars, parsErr = dataset.nonLinearity[amp], dataset.nonLinearityError[amp]
855  c0, c0Error = pars[0], parsErr[0]
856  c1, c1Error = pars[1], parsErr[1]
857  c2, c2Error = pars[2], parsErr[2]
858  stringLegend = f"c0: {c0:.4}+/-{c0Error:.2e}\n c1: {c1:.4}+/-{c1Error:.2e}" \
859  + f"\n c2(NL): {c2:.2e}+/-{c2Error:.2e}"
860  a.scatter(timeVecFinal, meanVecFinal)
861  a.plot(timeVecFinal, self.funcPolynomial(pars, timeVecFinal), color='red')
862  a.set_xlabel('Time (sec)', fontsize=labelFontSize)
863  a.set_xticks(timeVecFinal)
864  a.set_ylabel(r'Mean signal ($\mu$, ADU)', fontsize=labelFontSize)
865  a.tick_params(labelsize=labelFontSize)
866  a.text(0.03, 0.75, stringLegend, transform=a.transAxes, fontsize=legendFontSize)
867  a.set_xscale('linear', fontsize=labelFontSize)
868  a.set_yscale('linear', fontsize=labelFontSize)
869  a.set_title(amp, fontsize=titleFontSize)
870 
871  f.suptitle("Linearity \n Fit: " + r"$\mu = c_0 + c_1 t + c_2 t^2$", fontsize=supTitleFontSize)
872  pdfPages.savefig()
873 
874  # Plot linearity residual
875  f, ax = plt.subplots(nrows=4, ncols=4, sharex='col', sharey='row', figsize=(13, 10))
876  for i, (amp, a) in enumerate(zip(dataset.ampNames, ax.flatten())):
877  meanVecFinal = np.array(dataset.rawMeans[amp])[dataset.visitMask[amp]]
878  linRes = np.array(dataset.nonLinearityResiduals[amp])
879 
880  a.scatter(meanVecFinal, linRes)
881  a.axhline(y=0, color='k')
882  a.axvline(x=timeVecFinal[self.config.linResidualTimeIndex], color='g', linestyle='--')
883  a.set_xlabel(r'Mean signal ($\mu$, ADU)', fontsize=labelFontSize)
884  a.set_xticks(meanVecFinal)
885  a.set_ylabel('LR (%)', fontsize=labelFontSize)
886  a.tick_params(labelsize=labelFontSize)
887  a.set_xscale('linear', fontsize=labelFontSize)
888  a.set_yscale('linear', fontsize=labelFontSize)
889  a.set_title(amp, fontsize=titleFontSize)
890 
891  f.suptitle(r"Linearity Residual: $100(1 - \mu_{\rm{ref}}/t_{\rm{ref}})/(\mu / t))$" + "\n" +
892  r"$t_{\rm{ref}}$: " + f"{timeVecFinal[2]} s", fontsize=supTitleFontSize)
893  pdfPages.savefig()
894 
895  return
def _plotPtc(self, dataset, ptcFitType, pdfPages)
Definition: ptc.py:750
Angle abs(Angle const &a)
Definition: Angle.h:106
def runDataRef(self, dataRef, visitPairs)
Definition: ptc.py:282
def __setattr__(self, attribute, value)
Definition: ptc.py:203
def fitPtcAndNl(self, dataset, ptcFitType)
Definition: ptc.py:591
std::shared_ptr< FrameSet > append(FrameSet const &first, FrameSet const &second)
Construct a FrameSet that performs two transformations in series.
Definition: functional.cc:33
def checkExpLengthEqual(exp1, exp2, v1=None, v2=None, raiseWithMessage=False)
Definition: utils.py:162
Statistics makeStatistics(lsst::afw::math::MaskedVector< EntryT > const &mv, std::vector< WeightPixel > const &vweights, int const flags, StatisticsControl const &sctrl=StatisticsControl())
The makeStatistics() overload to handle lsst::afw::math::MaskedVector<>
Definition: Statistics.h:520
def validateIsrConfig(isrTask, mandatory=None, forbidden=None, desirable=None, undesirable=None, checkTrim=True, logName=None)
Definition: utils.py:200
def _getInitialGoodPoints(means, variances, maxDeviationPositive, maxDeviationNegative)
Definition: ptc.py:552
def _fitBootstrap(self, initialParams, dataX, dataY, function, confidenceSigma=1.)
Definition: ptc.py:460
def __init__(self, args, kwargs)
Definition: ptc.py:261
def measureMeanVarPair(self, exposure1, exposure2, region=None)
Definition: ptc.py:354
def _makeZeroSafe(self, array, warn=True, substituteValue=1e-9)
Definition: ptc.py:578
std::shared_ptr< ImageT > binImage(ImageT const &inImage, int const binsize, lsst::afw::math::Property const flags=lsst::afw::math::MEAN)
Definition: binImage.cc:38
def _fitLeastSq(self, initialParams, dataX, dataY, function)
Definition: ptc.py:408
def plot(self, dataRef, dataset, ptcFitType)
Definition: ptc.py:739