LSSTApplications  17.0+124,17.0+14,17.0+73,18.0.0+37,18.0.0+80,18.0.0-4-g68ffd23+4,18.1.0-1-g0001055+12,18.1.0-1-g03d53ef+5,18.1.0-1-g1349e88+55,18.1.0-1-g2505f39+44,18.1.0-1-g5315e5e+4,18.1.0-1-g5e4b7ea+14,18.1.0-1-g7e8fceb+4,18.1.0-1-g85f8cd4+48,18.1.0-1-g8ff0b9f+4,18.1.0-1-ga2c679d+1,18.1.0-1-gd55f500+35,18.1.0-10-gb58edde+2,18.1.0-11-g0997b02+4,18.1.0-13-gfe4edf0b+12,18.1.0-14-g259bd21+21,18.1.0-19-gdb69f3f+2,18.1.0-2-g5f9922c+24,18.1.0-2-gd3b74e5+11,18.1.0-2-gfbf3545+32,18.1.0-26-g728bddb4+5,18.1.0-27-g6ff7ca9+2,18.1.0-3-g52aa583+25,18.1.0-3-g8ea57af+9,18.1.0-3-gb69f684+42,18.1.0-3-gfcaddf3+6,18.1.0-32-gd8786685a,18.1.0-4-gf3f9b77+6,18.1.0-5-g1dd662b+2,18.1.0-5-g6dbcb01+41,18.1.0-6-gae77429+3,18.1.0-7-g9d75d83+9,18.1.0-7-gae09a6d+30,18.1.0-9-gc381ef5+4,w.2019.45
LSSTDataManagementBasePackage
ptc.py
Go to the documentation of this file.
1 # This file is part of cp_pipe.
2 #
3 # Developed for the LSST Data Management System.
4 # This product includes software developed by the LSST Project
5 # (https://www.lsst.org).
6 # See the COPYRIGHT file at the top-level directory of this distribution
7 # for details of code ownership.
8 #
9 # This program is free software: you can redistribute it and/or modify
10 # it under the terms of the GNU General Public License as published by
11 # the Free Software Foundation, either version 3 of the License, or
12 # (at your option) any later version.
13 #
14 # This program is distributed in the hope that it will be useful,
15 # but WITHOUT ANY WARRANTY; without even the implied warranty of
16 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 # GNU General Public License for more details.
18 #
19 # You should have received a copy of the GNU General Public License
20 # along with this program. If not, see <https://www.gnu.org/licenses/>.
21 #
22 
23 __all__ = ['MeasurePhotonTransferCurveTask',
24  'MeasurePhotonTransferCurveTaskConfig', ]
25 
26 import numpy as np
27 import matplotlib.pyplot as plt
28 import os
29 from matplotlib.backends.backend_pdf import PdfPages
30 
31 import lsst.afw.math as afwMath
32 import lsst.pex.config as pexConfig
33 import lsst.pipe.base as pipeBase
34 from lsst.ip.isr import IsrTask
35 from .utils import (NonexistentDatasetTaskDataIdContainer, PairedVisitListTaskRunner,
36  checkExpLengthEqual, validateIsrConfig)
37 from scipy.optimize import leastsq
38 import numpy.polynomial.polynomial as poly
39 
40 
41 class MeasurePhotonTransferCurveTaskConfig(pexConfig.Config):
42  """Config class for photon transfer curve measurement task"""
43  isr = pexConfig.ConfigurableField(
44  target=IsrTask,
45  doc="""Task to perform instrumental signature removal.""",
46  )
47  isrMandatorySteps = pexConfig.ListField(
48  dtype=str,
49  doc="isr operations that must be performed for valid results. Raises if any of these are False.",
50  default=['doAssembleCcd']
51  )
52  isrForbiddenSteps = pexConfig.ListField(
53  dtype=str,
54  doc="isr operations that must NOT be performed for valid results. Raises if any of these are True",
55  default=['doFlat', 'doFringe', 'doAddDistortionModel', 'doBrighterFatter', 'doUseOpticsTransmission',
56  'doUseFilterTransmission', 'doUseSensorTransmission', 'doUseAtmosphereTransmission']
57  )
58  isrDesirableSteps = pexConfig.ListField(
59  dtype=str,
60  doc="isr operations that it is advisable to perform, but are not mission-critical." +
61  " WARNs are logged for any of these found to be False.",
62  default=['doBias', 'doDark', 'doCrosstalk', 'doDefect']
63  )
64  isrUndesirableSteps = pexConfig.ListField(
65  dtype=str,
66  doc="isr operations that it is *not* advisable to perform in the general case, but are not" +
67  " forbidden as some use-cases might warrant them." +
68  " WARNs are logged for any of these found to be True.",
69  default=['doLinearize']
70  )
71  ccdKey = pexConfig.Field(
72  dtype=str,
73  doc="The key by which to pull a detector from a dataId, e.g. 'ccd' or 'detector'.",
74  default='ccd',
75  )
76  makePlots = pexConfig.Field(
77  dtype=bool,
78  doc="Plot the PTC curves?.",
79  default=False,
80  )
81  ptcFitType = pexConfig.ChoiceField(
82  dtype=str,
83  doc="Fit PTC to approximation in Astier+19 (Equation 16) or to a polynomial.",
84  default="POLYNOMIAL",
85  allowed={
86  "POLYNOMIAL": "n-degree polynomial (use 'polynomialFitDegree' to set 'n').",
87  "ASTIERAPPROXIMATION": "Approximation in Astier+19 (Eq. 16)."
88  }
89  )
90  polynomialFitDegree = pexConfig.Field(
91  dtype=int,
92  doc="Degree of polynomial to fit the PTC, when 'ptcFitType'=POLYNOMIAL.",
93  default=2,
94  )
95  binSize = pexConfig.Field(
96  dtype=int,
97  doc="Bin the image by this factor in both dimensions.",
98  default=1,
99  )
100  minMeanSignal = pexConfig.Field(
101  dtype=float,
102  doc="Minimum value of mean signal (in ADU) to consider.",
103  default=0,
104  )
105  maxMeanSignal = pexConfig.Field(
106  dtype=float,
107  doc="Maximum value to of mean signal (in ADU) to consider.",
108  default=9e6,
109  )
110  sigmaCutPtcOutliers = pexConfig.Field(
111  dtype=float,
112  doc="Sigma cut for outlier rejection in PTC.",
113  default=4.0,
114  )
115  maxIterationsPtcOutliers = pexConfig.Field(
116  dtype=int,
117  doc="Maximum number of iterations for outlier rejection in PTC.",
118  default=2,
119  )
120  doFitBootstrap = pexConfig.Field(
121  dtype=bool,
122  doc="Use bootstrap for the PTC fit parameters and errors?.",
123  default=False,
124  )
125  linResidualTimeIndex = pexConfig.Field(
126  dtype=int,
127  doc="Index position in time array for reference time in linearity residual calculation.",
128  default=2,
129  )
130 
131 
132 class MeasurePhotonTransferCurveTask(pipeBase.CmdLineTask):
133  """A class to calculate, fit, and plot a PTC from a set of flat pairs.
134 
135  The Photon Transfer Curve (var(signal) vs mean(signal)) is a standard tool
136  used in astronomical detectors characterization (e.g., Janesick 2001,
137  Janesick 2007). This task calculates the PTC from a series of pairs of
138  flat-field images; each pair taken at identical exposure times. The
139  difference image of each pair is formed to eliminate fixed pattern noise,
140  and then the variance of the difference image and the mean of the average image
141  are used to produce the PTC. An n-degree polynomial or the approximation in Equation
142  16 of Astier+19 ("The Shape of the Photon Transfer Curve of CCD sensors",
143  arXiv:1905.08677) can be fitted to the PTC curve. These models include
144  parameters such as the gain (e/ADU) and readout noise.
145 
146  Parameters
147  ----------
148 
149  *args: `list`
150  Positional arguments passed to the Task constructor. None used at this
151  time.
152  **kwargs: `dict`
153  Keyword arguments passed on to the Task constructor. None used at this
154  time.
155 
156  """
157 
158  RunnerClass = PairedVisitListTaskRunner
159  ConfigClass = MeasurePhotonTransferCurveTaskConfig
160  _DefaultName = "measurePhotonTransferCurve"
161 
162  def __init__(self, *args, **kwargs):
163  pipeBase.CmdLineTask.__init__(self, *args, **kwargs)
164  self.makeSubtask("isr")
165  plt.interactive(False) # stop windows popping up when plotting. When headless, use 'agg' backend too
166  validateIsrConfig(self.isr, self.config.isrMandatorySteps,
167  self.config.isrForbiddenSteps, self.config.isrDesirableSteps, checkTrim=False)
168  self.config.validate()
169  self.config.freeze()
170 
171  @classmethod
172  def _makeArgumentParser(cls):
173  """Augment argument parser for the MeasurePhotonTransferCurveTask."""
174  parser = pipeBase.ArgumentParser(name=cls._DefaultName)
175  parser.add_argument("--visit-pairs", dest="visitPairs", nargs="*",
176  help="Visit pairs to use. Each pair must be of the form INT,INT e.g. 123,456")
177  parser.add_id_argument("--id", datasetType="measurePhotonTransferCurveGainAndNoise",
178  ContainerClass=NonexistentDatasetTaskDataIdContainer,
179  help="The ccds to use, e.g. --id ccd=0..100")
180  return parser
181 
182  @pipeBase.timeMethod
183  def runDataRef(self, dataRef, visitPairs):
184  """Run the Photon Transfer Curve (PTC) measurement task.
185 
186  For a dataRef (which is each detector here),
187  and given a list of visit pairs at different exposure times,
188  measure the PTC.
189 
190  Parameters
191  ----------
192  dataRef : list of lsst.daf.persistence.ButlerDataRef
193  dataRef for the detector for the visits to be fit.
194  visitPairs : `iterable` of `tuple` of `int`
195  Pairs of visit numbers to be processed together
196  """
197 
198  # setup necessary objects
199  detNum = dataRef.dataId[self.config.ccdKey]
200  detector = dataRef.get('camera')[dataRef.dataId[self.config.ccdKey]]
201  amps = detector.getAmplifiers()
202  ampNames = [amp.getName() for amp in amps]
203  dataDict = {key: {} for key in ampNames}
204  fitVectorsDict = {key: ([], [], []) for key in ampNames}
205 
206  self.log.info('Measuring PTC using %s visits for detector %s' % (visitPairs, detNum))
207 
208  for (v1, v2) in visitPairs:
209  # Perform ISR on each exposure
210  dataRef.dataId['visit'] = v1
211  exp1 = self.isr.runDataRef(dataRef).exposure
212  dataRef.dataId['visit'] = v2
213  exp2 = self.isr.runDataRef(dataRef).exposure
214  del dataRef.dataId['visit']
215 
216  checkExpLengthEqual(exp1, exp2, v1, v2, raiseWithMessage=True)
217  expTime = exp1.getInfo().getVisitInfo().getExposureTime()
218 
219  for amp in detector:
220  mu, varDiff = self.measureMeanVarPair(exp1, exp2, region=amp.getBBox())
221  data = dict(expTime=expTime, meanClip=mu, varClip=varDiff)
222  ampName = amp.getName()
223  dataDict[ampName][(v1, v2)] = data
224  fitVectorsDict[ampName][0].append(expTime)
225  fitVectorsDict[ampName][1].append(mu)
226  fitVectorsDict[ampName][2].append(varDiff)
227 
228  # Fit PTC and (non)linearity of signal vs time curve
229  fitPtcDict, nlDict, gainDict, noiseDict = self.fitPtcAndNl(fitVectorsDict,
230  ptcFitType=self.config.ptcFitType)
231  allDict = {"data": dataDict, "ptc": fitPtcDict, "nl": nlDict}
232  gainNoiseNlDict = {"gain": gainDict, "noise": noiseDict, "nl": nlDict}
233 
234  if self.config.makePlots:
235  self.plot(dataRef, fitPtcDict, nlDict, ptcFitType=self.config.ptcFitType)
236 
237  # Save data, PTC fit, and NL fit dictionaries
238  self.log.info(f"Writing PTC and NL data to {dataRef.getUri(write=True)}")
239  dataRef.put(gainNoiseNlDict, datasetType="measurePhotonTransferCurveGainAndNoise")
240  dataRef.put(allDict, datasetType="measurePhotonTransferCurveDatasetAll")
241 
242  self.log.info('Finished measuring PTC for in detector %s' % detNum)
243 
244  return pipeBase.Struct(exitStatus=0)
245 
246  def measureMeanVarPair(self, exposure1, exposure2, region=None):
247  """Calculate the mean signal of two exposures and the variance of their difference.
248 
249  Parameters
250  ----------
251  exposure1 : `lsst.afw.image.exposure.exposure.ExposureF`
252  First exposure of flat field pair.
253 
254  exposure2 : `lsst.afw.image.exposure.exposure.ExposureF`
255  Second exposure of flat field pair.
256 
257  region : `lsst.geom.Box2I`
258  Region of each exposure where to perform the calculations (e.g, an amplifier).
259 
260  Return
261  ------
262 
263  mu : `np.float`
264  0.5*(mu1 + mu2), where mu1, and mu2 are the clipped means of the regions in
265  both exposures.
266 
267  varDiff : `np.float`
268  Half of the clipped variance of the difference of the regions inthe two input
269  exposures.
270  """
271 
272  if region is not None:
273  im1Area = exposure1.maskedImage[region]
274  im2Area = exposure2.maskedImage[region]
275  else:
276  im1Area = exposure1.maskedImage
277  im2Area = exposure2.maskedImage
278 
279  im1Area = afwMath.binImage(im1Area, self.config.binSize)
280  im2Area = afwMath.binImage(im2Area, self.config.binSize)
281 
282  # Clipped mean of images; then average of mean.
283  mu1 = afwMath.makeStatistics(im1Area, afwMath.MEANCLIP).getValue()
284  mu2 = afwMath.makeStatistics(im2Area, afwMath.MEANCLIP).getValue()
285  mu = 0.5*(mu1 + mu2)
286 
287  # Take difference of pairs
288  # symmetric formula: diff = (mu2*im1-mu1*im2)/(0.5*(mu1+mu2))
289  temp = im2Area.clone()
290  temp *= mu1
291  diffIm = im1Area.clone()
292  diffIm *= mu2
293  diffIm -= temp
294  diffIm /= mu
295 
296  varDiff = 0.5*(afwMath.makeStatistics(diffIm, afwMath.VARIANCECLIP).getValue())
297 
298  return mu, varDiff
299 
300  def _fitLeastSq(self, initialParams, dataX, dataY, function):
301  """Do a fit and estimate the parameter errors using using scipy.optimize.leastq.
302 
303  optimize.leastsq returns the fractional covariance matrix. To estimate the
304  standard deviation of the fit parameters, multiply the entries of this matrix
305  by the reduced chi squared and take the square root of the diagon al elements.
306 
307  Parameters
308  ----------
309  initialParams : list of np.float
310  initial values for fit parameters. For ptcFitType=POLYNOMIAL, its length
311  determines the degree of the polynomial.
312 
313  dataX : np.array of np.float
314  Data in the abscissa axis.
315 
316  dataY : np.array of np.float
317  Data in the ordinate axis.
318 
319  function : callable object (function)
320  Function to fit the data with.
321 
322  Return
323  ------
324  pFitSingleLeastSquares : list of np.float
325  List with fitted parameters.
326 
327  pErrSingleLeastSquares : list of np.float
328  List with errors for fitted parameters.
329  """
330 
331  def errFunc(p, x, y):
332  return function(p, x) - y
333 
334  pFit, pCov, infoDict, errMessage, success = leastsq(errFunc, initialParams,
335  args=(dataX, dataY), full_output=1, epsfcn=0.0001)
336 
337  if (len(dataY) > len(initialParams)) and pCov is not None:
338  reducedChiSq = (errFunc(pFit, dataX, dataY)**2).sum()/(len(dataY)-len(initialParams))
339  pCov *= reducedChiSq
340  else:
341  pCov = np.inf
342 
343  errorVec = []
344  for i in range(len(pFit)):
345  errorVec.append(np.fabs(pCov[i][i])**0.5)
346 
347  pFitSingleLeastSquares = pFit
348  pErrSingleLeastSquares = np.array(errorVec)
349 
350  return pFitSingleLeastSquares, pErrSingleLeastSquares
351 
352  def _fitBootstrap(self, initialParams, dataX, dataY, function, confidenceSigma=1.):
353  """Do a fit using least squares and bootstrap to estimate parameter errors.
354 
355  The bootstrap error bars are calculated by fitting 100 random data sets.
356 
357  Parameters
358  ----------
359  initialParams : list of np.float
360  initial values for fit parameters. For ptcFitType=POLYNOMIAL, its length
361  determines the degree of the polynomial.
362 
363  dataX : np.array of np.float
364  Data in the abscissa axis.
365 
366  dataY : np.array of np.float
367  Data in the ordinate axis.
368 
369  function : callable object (function)
370  Function to fit the data with.
371 
372  confidenceSigma : np.float
373  Number of sigmas that determine confidence interval for the bootstrap errors.
374 
375  Return
376  ------
377  pFitBootstrap : list of np.float
378  List with fitted parameters.
379 
380  pErrBootstrap : list of np.float
381  List with errors for fitted parameters.
382  """
383 
384  def errFunc(p, x, y):
385  return function(p, x) - y
386 
387  # Fit first time
388  pFit, _ = leastsq(errFunc, initialParams, args=(dataX, dataY), full_output=0)
389 
390  # Get the stdev of the residuals
391  residuals = errFunc(pFit, dataX, dataY)
392  sigmaErrTotal = np.std(residuals)
393 
394  # 100 random data sets are generated and fitted
395  pars = []
396  for i in range(100):
397  randomDelta = np.random.normal(0., sigmaErrTotal, len(dataY))
398  randomDataY = dataY + randomDelta
399  randomFit, _ = leastsq(errFunc, initialParams,
400  args=(dataX, randomDataY), full_output=0)
401  pars.append(randomFit)
402  pars = np.array(pars)
403  meanPfit = np.mean(pars, 0)
404 
405  # confidence interval for parameter estimates
406  nSigma = confidenceSigma
407  errPfit = nSigma*np.std(pars, 0)
408  pFitBootstrap = meanPfit
409  pErrBootstrap = errPfit
410  return pFitBootstrap, pErrBootstrap
411 
412  def funcPolynomial(self, pars, x):
413  """Polynomial function definition"""
414  return poly.polyval(x, [*pars])
415 
416  def funcAstier(self, pars, x):
417  """Single brighter-fatter parameter model for PTC; Equation 16 of Astier+19"""
418  a00, gain, noise = pars
419  return 0.5/(a00*gain*gain)*(np.exp(2*a00*x*gain)-1) + noise/(gain*gain)
420 
421  def fitPtcAndNl(self, fitVectorsDict, ptcFitType='POLYNOMIAL'):
422  """Function to fit PTC, and calculate linearity and linearity residual
423 
424  Parameters
425  ----------
426  fitVectorsDicti : `dict`
427  Dictionary with exposure time, mean, and variance vectors in a tuple
428  ptcFitType : `str`
429  Fit a 'POLYNOMIAL' (degree: 'polynomialFitDegree') or '
430  ASTIERAPPROXIMATION' to the PTC
431 
432  Returns
433  -------
434  fitPtcDict : `dict`
435  Dictionary of the form fitPtcDict[amp] =
436  (meanVec, varVec, parsFit, parsFitErr, index)
437  nlDict : `dict`
438  Dictionary of the form nlDict[amp] =
439  (timeVec, meanVec, linResidual, parsFit, parsFitErr)
440  """
441  if ptcFitType == 'ASTIERAPPROXIMATION':
442  ptcFunc = self.funcAstier
443  parsIniPtc = [-1e-9, 1.0, 10.] # a00, gain, noise
444  if ptcFitType == 'POLYNOMIAL':
445  ptcFunc = self.funcPolynomial
446  parsIniPtc = np.repeat(1., self.config.polynomialFitDegree + 1)
447 
448  parsIniNl = [1., 1., 1.]
449  fitPtcDict = {key: {} for key in fitVectorsDict}
450  nlDict = {key: {} for key in fitVectorsDict}
451  gainDict = {key: {} for key in fitVectorsDict}
452  noiseDict = {key: {} for key in fitVectorsDict}
453 
454  def errFunc(p, x, y):
455  return ptcFunc(p, x) - y
456 
457  maxIterationsPtcOutliers = self.config.maxIterationsPtcOutliers
458  for amp in fitVectorsDict:
459  timeVec, meanVec, varVec = fitVectorsDict[amp]
460  timeVecOriginal = np.array(timeVec)
461  meanVecOriginal = np.array(meanVec)
462  varVecOriginal = np.array(varVec)
463  index0 = ((meanVecOriginal > self.config.minMeanSignal) &
464  (meanVecOriginal <= self.config.maxMeanSignal))
465  # Before bootstrap fit, do an iterative fit to get rid of outliers in PTC
466  count = 1
467  sigmaCutPtcOutliers = self.config.sigmaCutPtcOutliers
468  maxIterationsPtcOutliers = self.config.maxIterationsPtcOutliers
469  timeTempVec = timeVecOriginal[index0]
470  meanTempVec = meanVecOriginal[index0]
471  varTempVec = varVecOriginal[index0]
472  while count <= maxIterationsPtcOutliers:
473  pars, cov = leastsq(errFunc, parsIniPtc, args=(meanTempVec,
474  varTempVec), full_output=0)
475  sigResids = (varTempVec -
476  ptcFunc(pars, meanTempVec))/np.sqrt(varTempVec)
477  index = list(np.where(np.abs(sigResids) < sigmaCutPtcOutliers)[0])
478  timeTempVec = timeTempVec[index]
479  meanTempVec = meanTempVec[index]
480  varTempVec = varTempVec[index]
481  count += 1
482 
483  parsIniPtc = pars
484  timeVecFinal, meanVecFinal, varVecFinal = timeTempVec, meanTempVec, varTempVec
485  if (len(meanVecFinal) - len(meanVecOriginal)) > 0:
486  self.log.info((f"Number of points discarded in PTC of amplifier {amp}:" +
487  "{len(meanVecFinal)-len(meanVecOriginal)} out of {len(meanVecOriginal)}"))
488 
489  if (len(meanVecFinal) < len(parsIniPtc)):
490  raise RuntimeError(f"Not enough data points ({len(meanVecFinal)}) compared to the number of" +
491  "parameters of the PTC model({len(parsIniPtc)}).")
492  # Fit the PTC
493  if self.config.doFitBootstrap:
494  parsFit, parsFitErr = self._fitBootstrap(parsIniPtc, meanVecFinal, varVecFinal, ptcFunc)
495  else:
496  parsFit, parsFitErr = self._fitLeastSq(parsIniPtc, meanVecFinal, varVecFinal, ptcFunc)
497 
498  fitPtcDict[amp] = (timeVecOriginal, meanVecOriginal, varVecOriginal, timeVecFinal,
499  meanVecFinal, varVecFinal, parsFit, parsFitErr)
500 
501  if ptcFitType == 'ASTIERAPPROXIMATION':
502  ptcGain = parsFit[1]
503  ptcGainErr = parsFitErr[1]
504  ptcNoise = np.sqrt(np.fabs(parsFit[2]))
505  ptcNoiseErr = 0.5*(parsFitErr[2]/np.fabs(parsFit[2]))*np.sqrt(np.fabs(parsFit[2]))
506  if ptcFitType == 'POLYNOMIAL':
507  ptcGain = 1./parsFit[1]
508  ptcGainErr = np.fabs(1./parsFit[1])*(parsFitErr[1]/parsFit[1])
509  ptcNoise = np.sqrt(np.fabs(parsFit[0]))*ptcGain
510  ptcNoiseErr = (0.5*(parsFitErr[0]/np.fabs(parsFit[0]))*(np.sqrt(np.fabs(parsFit[0]))))*ptcGain
511 
512  gainDict[amp] = (ptcGain, ptcGainErr)
513  noiseDict[amp] = (ptcNoise, ptcNoiseErr)
514 
515  # Non-linearity residuals (NL of mean vs time curve): percentage, and fit to a quadratic function
516  # In this case, len(parsIniNl) = 3 indicates that we want a quadratic fit
517  if self.config.doFitBootstrap:
518  parsFit, parsFitErr = self._fitBootstrap(parsIniNl, timeVecFinal, meanVecFinal,
519  self.funcPolynomial)
520  else:
521  parsFit, parsFitErr = self._fitLeastSq(parsIniNl, timeVecFinal, meanVecFinal,
522  self.funcPolynomial)
523  linResidualTimeIndex = self.config.linResidualTimeIndex
524  if timeVecFinal[linResidualTimeIndex] == 0.0:
525  raise RuntimeError("Reference time for linearity residual can't be 0.0")
526  linResidual = 100*(1 - ((meanVecFinal[linResidualTimeIndex] /
527  timeVecFinal[linResidualTimeIndex]) / (meanVecFinal/timeVecFinal)))
528  nlDict[amp] = (timeVecFinal, meanVecFinal, linResidual, parsFit, parsFitErr)
529 
530  return fitPtcDict, nlDict, gainDict, noiseDict
531 
532  def plot(self, dataRef, fitPtcDict, nlDict, ptcFitType='POLYNOMIAL'):
533  dirname = dataRef.getUri(datasetType='cpPipePlotRoot', write=True)
534  if not os.path.exists(dirname):
535  os.makedirs(dirname)
536 
537  detNum = dataRef.dataId[self.config.ccdKey]
538  filename = f"PTC_det{detNum}.pdf"
539  filenameFull = os.path.join(dirname, filename)
540  with PdfPages(filenameFull) as pdfPages:
541  self._plotPtc(fitPtcDict, nlDict, ptcFitType, pdfPages)
542 
543  def _plotPtc(self, fitPtcDict, nlDict, ptcFitType, pdfPages):
544  """Plot PTC, linearity, and linearity residual per amplifier"""
545 
546  if ptcFitType == 'ASTIERAPPROXIMATION':
547  ptcFunc = self.funcAstier
548  stringTitle = r"Var = $\frac{1}{2g^2a_{00}}(\exp (2a_{00} \mu g) - 1) + \frac{n_{00}}{g^2}$"
549 
550  if ptcFitType == 'POLYNOMIAL':
551  ptcFunc = self.funcPolynomial
552  stringTitle = f"Polynomial (degree: {self.config.polynomialFitDegree})"
553 
554  legendFontSize = 7.5
555  labelFontSize = 8
556  titleFontSize = 10
557  supTitleFontSize = 18
558 
559  # General determination of the size of the plot grid
560  nAmps = len(fitPtcDict)
561  if nAmps == 2:
562  nRows, nCols = 2, 1
563  nRows = np.sqrt(nAmps)
564  mantissa, _ = np.modf(nRows)
565  if mantissa > 0:
566  nRows = int(nRows) + 1
567  nCols = nRows
568  else:
569  nRows = int(nRows)
570  nCols = nRows
571 
572  f, ax = plt.subplots(nrows=nRows, ncols=nCols, sharex='col', sharey='row', figsize=(13, 10))
573  f2, ax2 = plt.subplots(nrows=nRows, ncols=nCols, sharex='col', sharey='row', figsize=(13, 10))
574 
575  # fitPtcDict[amp] = (timeVecOriginal, meanVecOriginal, varVecOriginal, timeVecFinal,
576  # meanVecFinal, varVecFinal, parsFit, parsFitErr)
577  for i, (amp, a, a2) in enumerate(zip(fitPtcDict, ax.flatten(), ax2.flatten())):
578  meanVecOriginal, varVecOriginal = fitPtcDict[amp][1], fitPtcDict[amp][2]
579  meanVecFinal, varVecFinal = fitPtcDict[amp][4], fitPtcDict[amp][5]
580  meanVecOutliers = np.setdiff1d(meanVecOriginal, meanVecFinal)
581  varVecOutliers = np.setdiff1d(varVecOriginal, varVecFinal)
582  pars, parsErr = fitPtcDict[amp][6], fitPtcDict[amp][7]
583 
584  if ptcFitType == 'ASTIERAPPROXIMATION':
585  ptcA00, ptcA00error = pars[0], parsErr[0]
586  ptcGain, ptcGainError = pars[1], parsErr[1]
587  ptcNoise = np.sqrt(np.fabs(pars[2]))
588  ptcNoiseError = 0.5*(parsErr[2]/np.fabs(pars[2]))*np.sqrt(np.fabs(pars[2]))
589  stringLegend = (f"a00: {ptcA00:.2e}+/-{ptcA00error:.2e}"
590  f"\n Gain: {ptcGain:.4}+/-{ptcGainError:.2e}"
591  f"\n Noise: {ptcNoise:.4}+/-{ptcNoiseError:.2e}")
592 
593  if ptcFitType == 'POLYNOMIAL':
594  ptcGain, ptcGainError = 1./pars[1], np.fabs(1./pars[1])*(parsErr[1]/pars[1])
595  ptcNoise = np.sqrt(np.fabs(pars[0]))*ptcGain
596  ptcNoiseError = (0.5*(parsErr[0]/np.fabs(pars[0]))*(np.sqrt(np.fabs(pars[0]))))*ptcGain
597  stringLegend = (f"Noise: {ptcNoise:.4}+/-{ptcNoiseError:.2e} \n"
598  f"Gain: {ptcGain:.4}+/-{ptcGainError:.2e}")
599 
600  minMeanVecFinal = np.min(meanVecFinal)
601  maxMeanVecFinal = np.max(meanVecFinal)
602  meanVecFit = np.linspace(minMeanVecFinal, maxMeanVecFinal, 100*len(meanVecFinal))
603  minMeanVecOriginal = np.min(meanVecOriginal)
604  maxMeanVecOriginal = np.max(meanVecOriginal)
605  deltaXlim = maxMeanVecOriginal - minMeanVecOriginal
606 
607  a.plot(meanVecFit, ptcFunc(pars, meanVecFit), color='red')
608  a.plot(meanVecFinal, pars[0] + pars[1]*meanVecFinal, color='green', linestyle='--')
609  a.scatter(meanVecFinal, varVecFinal, c='blue', marker='o')
610  a.scatter(meanVecOutliers, varVecOutliers, c='magenta', marker='s')
611  a.set_xlabel(r'Mean signal ($\mu$, ADU)', fontsize=labelFontSize)
612  a.set_xticks(meanVecOriginal)
613  a.set_ylabel(r'Variance (ADU$^2$)', fontsize=labelFontSize)
614  a.tick_params(labelsize=11)
615  a.text(0.03, 0.8, stringLegend, transform=a.transAxes, fontsize=legendFontSize)
616  a.set_xscale('linear', fontsize=labelFontSize)
617  a.set_yscale('linear', fontsize=labelFontSize)
618  a.set_title(amp, fontsize=titleFontSize)
619  a.set_xlim([minMeanVecOriginal - 0.2*deltaXlim, maxMeanVecOriginal + 0.2*deltaXlim])
620 
621  # Same, but in log-scale
622  a2.plot(meanVecFit, ptcFunc(pars, meanVecFit), color='red')
623  a2.scatter(meanVecFinal, varVecFinal, c='blue', marker='o')
624  a2.scatter(meanVecOutliers, varVecOutliers, c='magenta', marker='s')
625  a2.set_xlabel(r'Mean Signal ($\mu$, ADU)', fontsize=labelFontSize)
626  a2.set_ylabel(r'Variance (ADU$^2$)', fontsize=labelFontSize)
627  a2.tick_params(labelsize=11)
628  a2.text(0.03, 0.8, stringLegend, transform=a2.transAxes, fontsize=legendFontSize)
629  a2.set_xscale('log')
630  a2.set_yscale('log')
631  a2.set_title(amp, fontsize=titleFontSize)
632  a2.set_xlim([minMeanVecOriginal, maxMeanVecOriginal])
633 
634  f.suptitle(f"PTC \n Fit: " + stringTitle, fontsize=20)
635  pdfPages.savefig(f)
636  f2.suptitle(f"PTC (log-log)", fontsize=20)
637  pdfPages.savefig(f2)
638 
639  # Plot mean vs time
640  f, ax = plt.subplots(nrows=4, ncols=4, sharex='col', sharey='row', figsize=(13, 10))
641  for i, (amp, a) in enumerate(zip(fitPtcDict, ax.flatten())):
642  timeVecFinal, meanVecFinal = nlDict[amp][0], nlDict[amp][1]
643  pars, _ = nlDict[amp][3], nlDict[amp][4]
644  c0, c0Error = pars[0], parsErr[0]
645  c1, c1Error = pars[1], parsErr[1]
646  c2, c2Error = pars[2], parsErr[2]
647  stringLegend = f"c0: {c0:.4}+/-{c0Error:.2e}\n c1: {c1:.4}+/-{c1Error:.2e}" \
648  + f"\n c2(NL): {c2:.2e}+/-{c2Error:.2e}"
649  a.scatter(timeVecFinal, meanVecFinal)
650  a.plot(timeVecFinal, self.funcPolynomial(pars, timeVecFinal), color='red')
651  a.set_xlabel('Time (sec)', fontsize=labelFontSize)
652  a.set_xticks(timeVecFinal)
653  a.set_ylabel(r'Mean signal ($\mu$, ADU)', fontsize=labelFontSize)
654  a.tick_params(labelsize=labelFontSize)
655  a.text(0.03, 0.75, stringLegend, transform=a.transAxes, fontsize=legendFontSize)
656  a.set_xscale('linear', fontsize=labelFontSize)
657  a.set_yscale('linear', fontsize=labelFontSize)
658  a.set_title(amp, fontsize=titleFontSize)
659 
660  f.suptitle("Linearity \n Fit: " + r"$\mu = c_0 + c_1 t + c_2 t^2$", fontsize=supTitleFontSize)
661  pdfPages.savefig()
662 
663  # Plot linearity residual
664  f, ax = plt.subplots(nrows=4, ncols=4, sharex='col', sharey='row', figsize=(13, 10))
665  for i, (amp, a) in enumerate(zip(fitPtcDict, ax.flatten())):
666  meanVecFinal, linRes = nlDict[amp][1], nlDict[amp][2]
667  a.scatter(meanVecFinal, linRes)
668  a.axhline(y=0, color='k')
669  a.axvline(x=timeVecFinal[self.config.linResidualTimeIndex], color ='g', linestyle = '--')
670  a.set_xlabel(r'Mean signal ($\mu$, ADU)', fontsize=labelFontSize)
671  a.set_xticks(meanVecFinal)
672  a.set_ylabel('LR (%)', fontsize=labelFontSize)
673  a.tick_params(labelsize=labelFontSize)
674  a.set_xscale('linear', fontsize=labelFontSize)
675  a.set_yscale('linear', fontsize=labelFontSize)
676  a.set_title(amp, fontsize=titleFontSize)
677 
678  f.suptitle(r"Linearity Residual: $100(1 - \mu_{\rm{ref}}/t_{\rm{ref}})/(\mu / t))$" + "\n" +
679  r"$t_{\rm{ref}}$: " + f"{timeVecFinal[2]} s", fontsize=supTitleFontSize)
680  pdfPages.savefig()
681 
682  return
def _plotPtc(self, fitPtcDict, nlDict, ptcFitType, pdfPages)
Definition: ptc.py:543
def runDataRef(self, dataRef, visitPairs)
Definition: ptc.py:183
def fitPtcAndNl(self, fitVectorsDict, ptcFitType='POLYNOMIAL')
Definition: ptc.py:421
std::shared_ptr< FrameSet > append(FrameSet const &first, FrameSet const &second)
Construct a FrameSet that performs two transformations in series.
Definition: functional.cc:33
def checkExpLengthEqual(exp1, exp2, v1=None, v2=None, raiseWithMessage=False)
Definition: utils.py:162
Statistics makeStatistics(lsst::afw::math::MaskedVector< EntryT > const &mv, std::vector< WeightPixel > const &vweights, int const flags, StatisticsControl const &sctrl=StatisticsControl())
The makeStatistics() overload to handle lsst::afw::math::MaskedVector<>
Definition: Statistics.h:520
def validateIsrConfig(isrTask, mandatory=None, forbidden=None, desirable=None, undesirable=None, checkTrim=True, logName=None)
Definition: utils.py:200
def _fitBootstrap(self, initialParams, dataX, dataY, function, confidenceSigma=1.)
Definition: ptc.py:352
def __init__(self, args, kwargs)
Definition: ptc.py:162
def measureMeanVarPair(self, exposure1, exposure2, region=None)
Definition: ptc.py:246
def plot(self, dataRef, fitPtcDict, nlDict, ptcFitType='POLYNOMIAL')
Definition: ptc.py:532
std::shared_ptr< ImageT > binImage(ImageT const &inImage, int const binsize, lsst::afw::math::Property const flags=lsst::afw::math::MEAN)
Definition: binImage.cc:38
def _fitLeastSq(self, initialParams, dataX, dataY, function)
Definition: ptc.py:300
daf::base::PropertyList * list
Definition: fits.cc:903