LSSTApplications  19.0.0-14-gb0260a2+72efe9b372,20.0.0+7927753e06,20.0.0+8829bf0056,20.0.0+995114c5d2,20.0.0+b6f4b2abd1,20.0.0+bddc4f4cbe,20.0.0-1-g253301a+8829bf0056,20.0.0-1-g2b7511a+0d71a2d77f,20.0.0-1-g5b95a8c+7461dd0434,20.0.0-12-g321c96ea+23efe4bbff,20.0.0-16-gfab17e72e+fdf35455f6,20.0.0-2-g0070d88+ba3ffc8f0b,20.0.0-2-g4dae9ad+ee58a624b3,20.0.0-2-g61b8584+5d3db074ba,20.0.0-2-gb780d76+d529cf1a41,20.0.0-2-ged6426c+226a441f5f,20.0.0-2-gf072044+8829bf0056,20.0.0-2-gf1f7952+ee58a624b3,20.0.0-20-geae50cf+e37fec0aee,20.0.0-25-g3dcad98+544a109665,20.0.0-25-g5eafb0f+ee58a624b3,20.0.0-27-g64178ef+f1f297b00a,20.0.0-3-g4cc78c6+e0676b0dc8,20.0.0-3-g8f21e14+4fd2c12c9a,20.0.0-3-gbd60e8c+187b78b4b8,20.0.0-3-gbecbe05+48431fa087,20.0.0-38-ge4adf513+a12e1f8e37,20.0.0-4-g97dc21a+544a109665,20.0.0-4-gb4befbc+087873070b,20.0.0-4-gf910f65+5d3db074ba,20.0.0-5-gdfe0fee+199202a608,20.0.0-5-gfbfe500+d529cf1a41,20.0.0-6-g64f541c+d529cf1a41,20.0.0-6-g9a5b7a1+a1cd37312e,20.0.0-68-ga3f3dda+5fca18c6a4,20.0.0-9-g4aef684+e18322736b,w.2020.45
LSSTDataManagementBasePackage
plotPtc.py
Go to the documentation of this file.
1 # This file is part of cp_pipe.
2 #
3 # Developed for the LSST Data Management System.
4 # This product includes software developed by the LSST Project
5 # (https://www.lsst.org).
6 # See the COPYRIGHT file at the top-level directory of this distribution
7 # for details of code ownership.
8 #
9 # This program is free software: you can redistribute it and/or modify
10 # it under the terms of the GNU General Public License as published by
11 # the Free Software Foundation, either version 3 of the License, or
12 # (at your option) any later version.
13 #
14 # This program is distributed in the hope that it will be useful,
15 # but WITHOUT ANY WARRANTY; without even the implied warranty of
16 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 # GNU General Public License for more details.
18 #
19 # You should have received a copy of the GNU General Public License
20 # along with this program. If not, see <https://www.gnu.org/licenses/>.
21 #
22 
23 __all__ = ['PlotPhotonTransferCurveTask']
24 
25 import numpy as np
26 import matplotlib.pyplot as plt
27 import matplotlib as mpl
28 from matplotlib import gridspec
29 import os
30 from matplotlib.backends.backend_pdf import PdfPages
31 
32 import lsst.ip.isr as isr
33 import lsst.pex.config as pexConfig
34 import lsst.pipe.base as pipeBase
35 
36 from .utils import (funcAstier, funcPolynomial, NonexistentDatasetTaskDataIdContainer,
37  calculateWeightedReducedChi2)
38 from matplotlib.ticker import MaxNLocator
39 
40 from .astierCovPtcFit import computeApproximateAcoeffs
41 from .astierCovPtcUtils import getFitDataFromCovariances
42 
43 from lsst.ip.isr import PhotonTransferCurveDataset
44 
45 
46 class PlotPhotonTransferCurveTaskConfig(pexConfig.Config):
47  """Config class for photon transfer curve measurement task"""
48  datasetFileName = pexConfig.Field(
49  dtype=str,
50  doc="datasetPtc file name (pkl)",
51  default="",
52  )
53  linearizerFileName = pexConfig.Field(
54  dtype=str,
55  doc="linearizer file name (fits)",
56  default="",
57  )
58  ccdKey = pexConfig.Field(
59  dtype=str,
60  doc="The key by which to pull a detector from a dataId, e.g. 'ccd' or 'detector'.",
61  default='detector',
62  )
63  signalElectronsRelativeA = pexConfig.Field(
64  dtype=float,
65  doc="Signal value for relative systematic bias between different methods of estimating a_ij "
66  "(Fig. 15 of Astier+19).",
67  default=75000,
68  )
69  plotNormalizedCovariancesNumberOfBins = pexConfig.Field(
70  dtype=int,
71  doc="Number of bins in `plotNormalizedCovariancesNumber` function "
72  "(Fig. 8, 10., of Astier+19).",
73  default=10,
74  )
75 
76 
77 class PlotPhotonTransferCurveTask(pipeBase.CmdLineTask):
78  """A class to plot the dataset from MeasurePhotonTransferCurveTask.
79 
80  Parameters
81  ----------
82 
83  *args: `list`
84  Positional arguments passed to the Task constructor. None used at this
85  time.
86  **kwargs: `dict`
87  Keyword arguments passed on to the Task constructor. None used at this
88  time.
89 
90  """
91 
92  ConfigClass = PlotPhotonTransferCurveTaskConfig
93  _DefaultName = "plotPhotonTransferCurve"
94 
95  def __init__(self, *args, **kwargs):
96  pipeBase.CmdLineTask.__init__(self, *args, **kwargs)
97  plt.interactive(False) # stop windows popping up when plotting. When headless, use 'agg' backend too
98  self.config.validate()
99  self.config.freeze()
100 
101  @classmethod
102  def _makeArgumentParser(cls):
103  """Augment argument parser for the MeasurePhotonTransferCurveTask."""
104  parser = pipeBase.ArgumentParser(name=cls._DefaultName)
105  parser.add_id_argument("--id", datasetType="photonTransferCurveDataset",
106  ContainerClass=NonexistentDatasetTaskDataIdContainer,
107  help="The ccds to use, e.g. --id ccd=0..100")
108  return parser
109 
110  @pipeBase.timeMethod
111  def runDataRef(self, dataRef):
112  """Run the Photon Transfer Curve (PTC) plotting measurement task.
113 
114  Parameters
115  ----------
116  dataRef : list of lsst.daf.persistence.ButlerDataRef
117  dataRef for the detector for the expIds to be fit.
118  """
119 
120  datasetFile = self.config.datasetFileName
121  datasetPtc = PhotonTransferCurveDataset.readFits(datasetFile)
122 
123  dirname = dataRef.getUri(datasetType='cpPipePlotRoot', write=True)
124  if not os.path.exists(dirname):
125  os.makedirs(dirname)
126 
127  detNum = dataRef.dataId[self.config.ccdKey]
128  filename = f"PTC_det{detNum}.pdf"
129  filenameFull = os.path.join(dirname, filename)
130 
131  if self.config.linearizerFileName:
132  linearizer = isr.linearize.Linearizer.readFits(self.config.linearizerFileName)
133  else:
134  linearizer = None
135  self.run(filenameFull, datasetPtc, linearizer=linearizer, log=self.log)
136 
137  return pipeBase.Struct(exitStatus=0)
138 
139  def run(self, filenameFull, datasetPtc, linearizer=None, log=None):
140  """Make the plots for the PTC task"""
141  ptcFitType = datasetPtc.ptcFitType
142  with PdfPages(filenameFull) as pdfPages:
143  if ptcFitType in ["FULLCOVARIANCE", ]:
144  self.covAstierMakeAllPlots(datasetPtc, pdfPages, log=log)
145  elif ptcFitType in ["EXPAPPROXIMATION", "POLYNOMIAL"]:
146  self._plotStandardPtc(datasetPtc, ptcFitType, pdfPages)
147  else:
148  raise RuntimeError(f"The input dataset had an invalid dataset.ptcFitType: {ptcFitType}. \n" +
149  "Options: 'FULLCOVARIANCE', EXPAPPROXIMATION, or 'POLYNOMIAL'.")
150  if linearizer:
151  self._plotLinearizer(datasetPtc, linearizer, pdfPages)
152 
153  return
154 
155  def covAstierMakeAllPlots(self, dataset, pdfPages,
156  log=None):
157  """Make plots for MeasurePhotonTransferCurve task when doCovariancesAstier=True.
158 
159  This function call other functions that mostly reproduce the plots in Astier+19.
160  Most of the code is ported from Pierre Astier's repository https://github.com/PierreAstier/bfptc
161 
162  Parameters
163  ----------
164  dataset : `lsst.ip.isr.ptcDataset.PhotonTransferCurveDataset`
165  The dataset containing the necessary information to produce the plots.
166 
167  pdfPages: `matplotlib.backends.backend_pdf.PdfPages`
168  PDF file where the plots will be saved.
169 
170  log : `lsst.log.Log`, optional
171  Logger to handle messages
172  """
173  mu = dataset.rawMeans
174  # dictionaries with ampNames as keys
175  fullCovs = dataset.covariances
176  fullCovsModel = dataset.covariancesModel
177  fullCovWeights = dataset.covariancesSqrtWeights
178  aDict = dataset.aMatrix
179  bDict = dataset.bMatrix
180  fullCovsNoB = dataset.covariancesNoB
181  fullCovsModelNoB = dataset.covariancesModelNoB
182  fullCovWeightsNoB = dataset.covariancesSqrtWeightsNoB
183  aDictNoB = dataset.aMatrixNoB
184  gainDict = dataset.gain
185  noiseDict = dataset.noise
186 
187  self.plotCovariances(mu, fullCovs, fullCovsModel, fullCovWeights, fullCovsNoB, fullCovsModelNoB,
188  fullCovWeightsNoB, gainDict, noiseDict, aDict, bDict, pdfPages)
189  self.plotNormalizedCovariances(0, 0, mu, fullCovs, fullCovsModel, fullCovWeights, fullCovsNoB,
190  fullCovsModelNoB, fullCovWeightsNoB, pdfPages, offset=0.01,
191  topPlot=True,
192  numberOfBins=self.config.plotNormalizedCovariancesNumberOfBins,
193  log=log)
194  self.plotNormalizedCovariances(0, 1, mu, fullCovs, fullCovsModel, fullCovWeights, fullCovsNoB,
195  fullCovsModelNoB, fullCovWeightsNoB, pdfPages,
196  numberOfBins=self.config.plotNormalizedCovariancesNumberOfBins,
197  log=log)
198  self.plotNormalizedCovariances(1, 0, mu, fullCovs, fullCovsModel, fullCovWeights, fullCovsNoB,
199  fullCovsModelNoB, fullCovWeightsNoB, pdfPages,
200  numberOfBins=self.config.plotNormalizedCovariancesNumberOfBins,
201  log=log)
202  self.plot_a_b(aDict, bDict, pdfPages)
203  self.ab_vs_dist(aDict, bDict, pdfPages, bRange=4)
204  self.plotAcoeffsSum(aDict, bDict, pdfPages)
205  self.plotRelativeBiasACoeffs(aDict, aDictNoB, fullCovsModel, fullCovsModelNoB,
206  self.config.signalElectronsRelativeA, gainDict, pdfPages, maxr=4)
207 
208  return
209 
210  @staticmethod
211  def plotCovariances(mu, covs, covsModel, covsWeights, covsNoB, covsModelNoB, covsWeightsNoB,
212  gainDict, noiseDict, aDict, bDict, pdfPages):
213  """Plot covariances and models: Cov00, Cov10, Cov01.
214 
215  Figs. 6 and 7 of Astier+19
216 
217  Parameters
218  ----------
219  mu : `dict`, [`str`, `list`]
220  Dictionary keyed by amp name with mean signal values.
221 
222  covs : `dict`, [`str`, `list`]
223  Dictionary keyed by amp names containing a list of measued covariances per mean flux.
224 
225  covsModel : `dict`, [`str`, `list`]
226  Dictionary keyed by amp names containinging covariances model (Eq. 20 of Astier+19) per mean flux.
227 
228  covsWeights : `dict`, [`str`, `list`]
229  Dictionary keyed by amp names containinging sqrt. of covariances weights.
230 
231  covsNoB : `dict`, [`str`, `list`]
232  Dictionary keyed by amp names containing a list of measued covariances per mean flux ('b'=0 in
233  Astier+19).
234 
235  covsModelNoB : `dict`, [`str`, `list`]
236  Dictionary keyed by amp names containing covariances model (with 'b'=0 in Eq. 20 of Astier+19)
237  per mean flux.
238 
239  covsWeightsNoB : `dict`, [`str`, `list`]
240  Dictionary keyed by amp names containing sqrt. of covariances weights ('b' = 0 in Eq. 20 of
241  Astier+19).
242 
243  gainDict : `dict`, [`str`, `float`]
244  Dictionary keyed by amp names containing the gains in e-/ADU.
245 
246  noiseDict : `dict`, [`str`, `float`]
247  Dictionary keyed by amp names containing the rms redout noise in e-.
248 
249  aDict : `dict`, [`str`, `numpy.array`]
250  Dictionary keyed by amp names containing 'a' coefficients (Eq. 20 of Astier+19).
251 
252  bDict : `dict`, [`str`, `numpy.array`]
253  Dictionary keyed by amp names containing 'b' coefficients (Eq. 20 of Astier+19).
254 
255  pdfPages: `matplotlib.backends.backend_pdf.PdfPages`
256  PDF file where the plots will be saved.
257  """
258 
259  legendFontSize = 6.5
260  labelFontSize = 7
261  titleFontSize = 9
262  supTitleFontSize = 18
263  markerSize = 25
264 
265  nAmps = len(covs)
266  if nAmps == 2:
267  nRows, nCols = 2, 1
268  nRows = np.sqrt(nAmps)
269  mantissa, _ = np.modf(nRows)
270  if mantissa > 0:
271  nRows = int(nRows) + 1
272  nCols = nRows
273  else:
274  nRows = int(nRows)
275  nCols = nRows
276 
277  f, ax = plt.subplots(nrows=nRows, ncols=nCols, sharex='col', sharey='row', figsize=(13, 10))
278  f2, ax2 = plt.subplots(nrows=nRows, ncols=nCols, sharex='col', sharey='row', figsize=(13, 10))
279  fResCov00, axResCov00 = plt.subplots(nrows=nRows, ncols=nCols, sharex='col', sharey='row',
280  figsize=(13, 10))
281  fCov01, axCov01 = plt.subplots(nrows=nRows, ncols=nCols, sharex='col', sharey='row', figsize=(13, 10))
282  fCov10, axCov10 = plt.subplots(nrows=nRows, ncols=nCols, sharex='col', sharey='row', figsize=(13, 10))
283 
284  assert(len(covsModel) == nAmps)
285  assert(len(covsWeights) == nAmps)
286 
287  assert(len(covsNoB) == nAmps)
288  assert(len(covsModelNoB) == nAmps)
289  assert(len(covsWeightsNoB) == nAmps)
290 
291  for i, (amp, a, a2, aResVar, a3, a4) in enumerate(zip(covs, ax.flatten(),
292  ax2.flatten(), axResCov00.flatten(),
293  axCov01.flatten(), axCov10.flatten())):
294 
295  muAmp, cov, model, weight = mu[amp], covs[amp], covsModel[amp], covsWeights[amp]
296  if not np.isnan(np.array(cov)).all(): # If all the entries are np.nan, this is a bad amp.
297  aCoeffs, bCoeffs = np.array(aDict[amp]), np.array(bDict[amp])
298  gain, noise = gainDict[amp], noiseDict[amp]
299  (meanVecOriginal, varVecOriginal, varVecModelOriginal,
300  weightsOriginal, varMask) = getFitDataFromCovariances(0, 0, muAmp, cov, model, weight)
301  meanVecFinal, varVecFinal = meanVecOriginal[varMask], varVecOriginal[varMask]
302  varVecModelFinal = varVecModelOriginal[varMask]
303  meanVecOutliers = meanVecOriginal[np.invert(varMask)]
304  varVecOutliers = varVecOriginal[np.invert(varMask)]
305  varWeightsFinal = weightsOriginal[varMask]
306  # Get weighted reduced chi2
307  chi2FullModelVar = calculateWeightedReducedChi2(varVecFinal, varVecModelFinal,
308  varWeightsFinal, len(meanVecFinal), 4)
309 
310  (meanVecOrigCov01, varVecOrigCov01, varVecModelOrigCov01,
311  _, maskCov01) = getFitDataFromCovariances(0, 0, muAmp, cov, model, weight)
312  meanVecFinalCov01, varVecFinalCov01 = meanVecOrigCov01[maskCov01], varVecOrigCov01[maskCov01]
313  varVecModelFinalCov01 = varVecModelOrigCov01[maskCov01]
314  meanVecOutliersCov01 = meanVecOrigCov01[np.invert(maskCov01)]
315  varVecOutliersCov01 = varVecOrigCov01[np.invert(maskCov01)]
316 
317  (meanVecOrigCov10, varVecOrigCov10, varVecModelOrigCov10,
318  _, maskCov10) = getFitDataFromCovariances(1, 0, muAmp, cov, model, weight)
319  meanVecFinalCov10, varVecFinalCov10 = meanVecOrigCov10[maskCov10], varVecOrigCov10[maskCov10]
320  varVecModelFinalCov10 = varVecModelOrigCov10[maskCov10]
321  meanVecOutliersCov10 = meanVecOrigCov10[np.invert(maskCov10)]
322  varVecOutliersCov10 = varVecOrigCov10[np.invert(maskCov10)]
323 
324  # cuadratic fit for residuals below
325  par2 = np.polyfit(meanVecFinal, varVecFinal, 2, w=varWeightsFinal)
326  varModelFinalQuadratic = np.polyval(par2, meanVecFinal)
327  chi2QuadModelVar = calculateWeightedReducedChi2(varVecFinal, varModelFinalQuadratic,
328  varWeightsFinal, len(meanVecFinal), 3)
329 
330  # fit with no 'b' coefficient (c = a*b in Eq. 20 of Astier+19)
331  covNoB, modelNoB, weightNoB = covsNoB[amp], covsModelNoB[amp], covsWeightsNoB[amp]
332  (meanVecFinalNoB, varVecFinalNoB, varVecModelFinalNoB,
333  varWeightsFinalNoB, maskNoB) = getFitDataFromCovariances(0, 0, muAmp, covNoB, modelNoB,
334  weightNoB, returnMasked=True)
335  chi2FullModelNoBVar = calculateWeightedReducedChi2(varVecFinalNoB, varVecModelFinalNoB,
336  varWeightsFinalNoB, len(meanVecFinalNoB),
337  3)
338  stringLegend = (f"Gain: {gain:.4} e/ADU \n" +
339  f"Noise: {noise:.4} e \n" +
340  r"$a_{00}$: %.3e 1/e"%aCoeffs[0, 0] +
341  "\n" + r"$b_{00}$: %.3e 1/e"%bCoeffs[0, 0] +
342  f"\nLast in fit: {meanVecFinal[-1]:.7} ADU ")
343  minMeanVecFinal = np.nanmin(meanVecFinal)
344  maxMeanVecFinal = np.nanmax(meanVecFinal)
345  deltaXlim = maxMeanVecFinal - minMeanVecFinal
346 
347  a.set_xlabel(r'Mean signal ($\mu$, ADU)', fontsize=labelFontSize)
348  a.set_ylabel(r'Variance (ADU$^2$)', fontsize=labelFontSize)
349  a.tick_params(labelsize=11)
350  a.set_xscale('linear', fontsize=labelFontSize)
351  a.set_yscale('linear', fontsize=labelFontSize)
352  a.scatter(meanVecFinal, varVecFinal, c='blue', marker='o', s=markerSize)
353  a.scatter(meanVecOutliers, varVecOutliers, c='magenta', marker='s', s=markerSize)
354  a.plot(meanVecFinal, varVecModelFinal, color='red', lineStyle='-')
355  a.text(0.03, 0.7, stringLegend, transform=a.transAxes, fontsize=legendFontSize)
356  a.set_title(amp, fontsize=titleFontSize)
357  a.set_xlim([minMeanVecFinal - 0.2*deltaXlim, maxMeanVecFinal + 0.2*deltaXlim])
358 
359  # Same as above, but in log-scale
360  a2.set_xlabel(r'Mean Signal ($\mu$, ADU)', fontsize=labelFontSize)
361  a2.set_ylabel(r'Variance (ADU$^2$)', fontsize=labelFontSize)
362  a2.tick_params(labelsize=11)
363  a2.set_xscale('log')
364  a2.set_yscale('log')
365  a2.plot(meanVecFinal, varVecModelFinal, color='red', lineStyle='-')
366  a2.scatter(meanVecFinal, varVecFinal, c='blue', marker='o', s=markerSize)
367  a2.scatter(meanVecOutliers, varVecOutliers, c='magenta', marker='s', s=markerSize)
368  a2.text(0.03, 0.7, stringLegend, transform=a2.transAxes, fontsize=legendFontSize)
369  a2.set_title(amp, fontsize=titleFontSize)
370  a2.set_xlim([minMeanVecFinal, maxMeanVecFinal])
371 
372  # Residuals var - model
373  aResVar.set_xlabel(r'Mean signal ($\mu$, ADU)', fontsize=labelFontSize)
374  aResVar.set_ylabel(r'Residuals (ADU$^2$)', fontsize=labelFontSize)
375  aResVar.tick_params(labelsize=11)
376  aResVar.set_xscale('linear', fontsize=labelFontSize)
377  aResVar.set_yscale('linear', fontsize=labelFontSize)
378  aResVar.plot(meanVecFinal, varVecFinal - varVecModelFinal, color='blue', lineStyle='-',
379  label=r'Full fit ($\chi_{\rm{red}}^2$: %g)'%chi2FullModelVar)
380  aResVar.plot(meanVecFinal, varVecFinal - varModelFinalQuadratic, color='red', lineStyle='-',
381  label=r'Quadratic fit ($\chi_{\rm{red}}^2$: %g)'%chi2QuadModelVar)
382  aResVar.plot(meanVecFinalNoB, varVecFinalNoB - varVecModelFinalNoB, color='green',
383  lineStyle='-',
384  label=r'Full fit (b=0) ($\chi_{\rm{red}}^2$: %g)'%chi2FullModelNoBVar)
385  aResVar.axhline(color='black')
386  aResVar.set_title(amp, fontsize=titleFontSize)
387  aResVar.set_xlim([minMeanVecFinal - 0.2*deltaXlim, maxMeanVecFinal + 0.2*deltaXlim])
388  aResVar.legend(fontsize=7)
389 
390  a3.set_xlabel(r'Mean signal ($\mu$, ADU)', fontsize=labelFontSize)
391  a3.set_ylabel(r'Cov01 (ADU$^2$)', fontsize=labelFontSize)
392  a3.tick_params(labelsize=11)
393  a3.set_xscale('linear', fontsize=labelFontSize)
394  a3.set_yscale('linear', fontsize=labelFontSize)
395  a3.scatter(meanVecFinalCov01, varVecFinalCov01, c='blue', marker='o', s=markerSize)
396  a3.scatter(meanVecOutliersCov01, varVecOutliersCov01, c='magenta', marker='s', s=markerSize)
397  a3.plot(meanVecFinalCov01, varVecModelFinalCov01, color='red', lineStyle='-')
398  a3.set_title(amp, fontsize=titleFontSize)
399  a3.set_xlim([minMeanVecFinal - 0.2*deltaXlim, maxMeanVecFinal + 0.2*deltaXlim])
400 
401  a4.set_xlabel(r'Mean signal ($\mu$, ADU)', fontsize=labelFontSize)
402  a4.set_ylabel(r'Cov10 (ADU$^2$)', fontsize=labelFontSize)
403  a4.tick_params(labelsize=11)
404  a4.set_xscale('linear', fontsize=labelFontSize)
405  a4.set_yscale('linear', fontsize=labelFontSize)
406  a4.scatter(meanVecFinalCov10, varVecFinalCov10, c='blue', marker='o', s=markerSize)
407  a4.scatter(meanVecOutliersCov10, varVecOutliersCov10, c='magenta', marker='s', s=markerSize)
408  a4.plot(meanVecFinalCov10, varVecModelFinalCov10, color='red', lineStyle='-')
409  a4.set_title(amp, fontsize=titleFontSize)
410  a4.set_xlim([minMeanVecFinal - 0.2*deltaXlim, maxMeanVecFinal + 0.2*deltaXlim])
411 
412  else:
413  a.set_title(f"{amp} (BAD)", fontsize=titleFontSize)
414  a2.set_title(f"{amp} (BAD)", fontsize=titleFontSize)
415  a3.set_title(f"{amp} (BAD)", fontsize=titleFontSize)
416  a4.set_title(f"{amp} (BAD)", fontsize=titleFontSize)
417 
418  f.suptitle("PTC from covariances as in Astier+19 \n Fit: Eq. 20, Astier+19",
419  fontsize=supTitleFontSize)
420  pdfPages.savefig(f)
421  f2.suptitle("PTC from covariances as in Astier+19 (log-log) \n Fit: Eq. 20, Astier+19",
422  fontsize=supTitleFontSize)
423  pdfPages.savefig(f2)
424  fResCov00.suptitle("Residuals (data-model) for Cov00 (Var)", fontsize=supTitleFontSize)
425  pdfPages.savefig(fResCov00)
426  fCov01.suptitle("Cov01 as in Astier+19 (nearest parallel neighbor covariance) \n" +
427  " Fit: Eq. 20, Astier+19", fontsize=supTitleFontSize)
428  pdfPages.savefig(fCov01)
429  fCov10.suptitle("Cov10 as in Astier+19 (nearest serial neighbor covariance) \n" +
430  "Fit: Eq. 20, Astier+19", fontsize=supTitleFontSize)
431  pdfPages.savefig(fCov10)
432 
433  return
434 
435  def plotNormalizedCovariances(self, i, j, inputMu, covs, covsModel, covsWeights, covsNoB, covsModelNoB,
436  covsWeightsNoB, pdfPages, offset=0.004,
437  numberOfBins=10, plotData=True, topPlot=False, log=None):
438  """Plot C_ij/mu vs mu.
439 
440  Figs. 8, 10, and 11 of Astier+19
441 
442  Parameters
443  ----------
444  i : `int`
445  Covariane lag
446 
447  j : `int`
448  Covariance lag
449 
450  inputMu : `dict`, [`str`, `list`]
451  Dictionary keyed by amp name with mean signal values.
452 
453  covs : `dict`, [`str`, `list`]
454  Dictionary keyed by amp names containing a list of measued covariances per mean flux.
455 
456  covsModel : `dict`, [`str`, `list`]
457  Dictionary keyed by amp names containinging covariances model (Eq. 20 of Astier+19) per mean flux.
458 
459  covsWeights : `dict`, [`str`, `list`]
460  Dictionary keyed by amp names containinging sqrt. of covariances weights.
461 
462  covsNoB : `dict`, [`str`, `list`]
463  Dictionary keyed by amp names containing a list of measued covariances per mean flux ('b'=0 in
464  Astier+19).
465 
466  covsModelNoB : `dict`, [`str`, `list`]
467  Dictionary keyed by amp names containing covariances model (with 'b'=0 in Eq. 20 of Astier+19)
468  per mean flux.
469 
470  covsWeightsNoB : `dict`, [`str`, `list`]
471  Dictionary keyed by amp names containing sqrt. of covariances weights ('b' = 0 in Eq. 20 of
472  Astier+19).
473 
474  pdfPages: `matplotlib.backends.backend_pdf.PdfPages`
475  PDF file where the plots will be saved.
476 
477  offset : `float`, optional
478  Constant offset factor to plot covariances in same panel (so they don't overlap).
479 
480  numberOfBins : `int`, optional
481  Number of bins for top and bottom plot.
482 
483  plotData : `bool`, optional
484  Plot the data points?
485 
486  topPlot : `bool`, optional
487  Plot the top plot with the covariances, and the bottom plot with the model residuals?
488 
489  log : `lsst.log.Log`, optional
490  Logger to handle messages.
491  """
492  if (not topPlot):
493  fig = plt.figure(figsize=(8, 10))
494  gs = gridspec.GridSpec(2, 1, height_ratios=[3, 1])
495  gs.update(hspace=0)
496  ax0 = plt.subplot(gs[0])
497  plt.setp(ax0.get_xticklabels(), visible=False)
498  else:
499  fig = plt.figure(figsize=(8, 8))
500  ax0 = plt.subplot(111)
501  ax0.ticklabel_format(style='sci', axis='x', scilimits=(0, 0))
502  ax0.tick_params(axis='both', labelsize='x-large')
503  mue, rese, wce = [], [], []
504  mueNoB, reseNoB, wceNoB = [], [], []
505  for counter, amp in enumerate(covs):
506  muAmp, fullCov, fullCovModel, fullCovWeight = (inputMu[amp], covs[amp], covsModel[amp],
507  covsWeights[amp])
508  if len(fullCov) == 0:
509  continue
510  mu, cov, model, weightCov, _ = getFitDataFromCovariances(i, j, muAmp, fullCov, fullCovModel,
511  fullCovWeight, divideByMu=True,
512  returnMasked=True)
513  mue += list(mu)
514  rese += list(cov - model)
515  wce += list(weightCov)
516 
517  fullCovNoB, fullCovModelNoB, fullCovWeightNoB = (covsNoB[amp], covsModelNoB[amp],
518  covsWeightsNoB[amp])
519  if len(fullCovNoB) == 0:
520  continue
521  (muNoB, covNoB, modelNoB,
522  weightCovNoB, _) = getFitDataFromCovariances(i, j, muAmp, fullCovNoB, fullCovModelNoB,
523  fullCovWeightNoB, divideByMu=True,
524  returnMasked=True)
525  mueNoB += list(muNoB)
526  reseNoB += list(covNoB - modelNoB)
527  wceNoB += list(weightCovNoB)
528 
529  # the corresponding fit
530  fit_curve, = plt.plot(mu, model + counter*offset, '-', linewidth=4.0)
531  # bin plot. len(mu) = no binning
532  gind = self.indexForBins(mu, numberOfBins)
533 
534  xb, yb, wyb, sigyb = self.binData(mu, cov, gind, weightCov)
535  plt.errorbar(xb, yb+counter*offset, yerr=sigyb, marker='o', linestyle='none', markersize=6.5,
536  color=fit_curve.get_color(), label=f"{amp} (N: {len(mu)})")
537  # plot the data
538  if plotData:
539  points, = plt.plot(mu, cov + counter*offset, '.', color=fit_curve.get_color())
540  plt.legend(loc='upper right', fontsize=8)
541  # end loop on amps
542  mue = np.array(mue)
543  rese = np.array(rese)
544  wce = np.array(wce)
545  mueNoB = np.array(mueNoB)
546  reseNoB = np.array(reseNoB)
547  wceNoB = np.array(wceNoB)
548 
549  plt.xlabel(r"$\mu (el)$", fontsize='x-large')
550  plt.ylabel(r"$Cov{%d%d}/\mu + Cst (el)$"%(i, j), fontsize='x-large')
551  if (not topPlot):
552  gind = self.indexForBins(mue, numberOfBins)
553  xb, yb, wyb, sigyb = self.binData(mue, rese, gind, wce)
554 
555  ax1 = plt.subplot(gs[1], sharex=ax0)
556  ax1.errorbar(xb, yb, yerr=sigyb, marker='o', linestyle='none', label='Full fit')
557  gindNoB = self.indexForBins(mueNoB, numberOfBins)
558  xb2, yb2, wyb2, sigyb2 = self.binData(mueNoB, reseNoB, gindNoB, wceNoB)
559 
560  ax1.errorbar(xb2, yb2, yerr=sigyb2, marker='o', linestyle='none', label='b = 0')
561  ax1.tick_params(axis='both', labelsize='x-large')
562  plt.legend(loc='upper left', fontsize='large')
563  # horizontal line at zero
564  plt.plot(xb, [0]*len(xb), '--', color='k')
565  plt.ticklabel_format(style='sci', axis='x', scilimits=(0, 0))
566  plt.ticklabel_format(style='sci', axis='y', scilimits=(0, 0))
567  plt.xlabel(r'$\mu (el)$', fontsize='x-large')
568  plt.ylabel(r'$Cov{%d%d}/\mu$ -model (el)'%(i, j), fontsize='x-large')
569  plt.tight_layout()
570  plt.suptitle(f"Nbins: {numberOfBins}")
571  # overlapping y labels:
572  fig.canvas.draw()
573  labels0 = [item.get_text() for item in ax0.get_yticklabels()]
574  labels0[0] = u''
575  ax0.set_yticklabels(labels0)
576  pdfPages.savefig(fig)
577 
578  return
579 
580  @staticmethod
581  def plot_a_b(aDict, bDict, pdfPages, bRange=3):
582  """Fig. 12 of Astier+19
583 
584  Color display of a and b arrays fits, averaged over channels.
585 
586  Parameters
587  ----------
588  aDict : `dict`, [`numpy.array`]
589  Dictionary keyed by amp names containing the fitted 'a' coefficients from the model
590  in Eq. 20 of Astier+19 (if `ptcFitType` is `FULLCOVARIANCE`).
591 
592  bDict : `dict`, [`numpy.array`]
593  Dictionary keyed by amp names containing the fitted 'b' coefficients from the model
594  in Eq. 20 of Astier+19 (if `ptcFitType` is `FULLCOVARIANCE`).
595 
596  pdfPages: `matplotlib.backends.backend_pdf.PdfPages`
597  PDF file where the plots will be saved.
598 
599  bRange : `int`
600  Maximum lag for b arrays.
601  """
602  a, b = [], []
603  for amp in aDict:
604  if np.isnan(aDict[amp]).all():
605  continue
606  a.append(aDict[amp])
607  b.append(bDict[amp])
608  a = np.array(a).mean(axis=0)
609  b = np.array(b).mean(axis=0)
610  fig = plt.figure(figsize=(7, 11))
611  ax0 = fig.add_subplot(2, 1, 1)
612  im0 = ax0.imshow(np.abs(a.transpose()), origin='lower', norm=mpl.colors.LogNorm())
613  ax0.tick_params(axis='both', labelsize='x-large')
614  ax0.set_title(r'$|a|$', fontsize='x-large')
615  ax0.xaxis.set_ticks_position('bottom')
616  cb0 = plt.colorbar(im0)
617  cb0.ax.tick_params(labelsize='x-large')
618 
619  ax1 = fig.add_subplot(2, 1, 2)
620  ax1.tick_params(axis='both', labelsize='x-large')
621  ax1.yaxis.set_major_locator(MaxNLocator(integer=True))
622  ax1.xaxis.set_major_locator(MaxNLocator(integer=True))
623  im1 = ax1.imshow(1e6*b[:bRange, :bRange].transpose(), origin='lower')
624  cb1 = plt.colorbar(im1)
625  cb1.ax.tick_params(labelsize='x-large')
626  ax1.set_title(r'$b \times 10^6$', fontsize='x-large')
627  ax1.xaxis.set_ticks_position('bottom')
628  plt.tight_layout()
629  pdfPages.savefig(fig)
630 
631  return
632 
633  @staticmethod
634  def ab_vs_dist(aDict, bDict, pdfPages, bRange=4):
635  """Fig. 13 of Astier+19.
636 
637  Values of a and b arrays fits, averaged over amplifiers, as a function of distance.
638 
639  Parameters
640  ----------
641  aDict : `dict`, [`numpy.array`]
642  Dictionary keyed by amp names containing the fitted 'a' coefficients from the model
643  in Eq. 20 of Astier+19 (if `ptcFitType` is `FULLCOVARIANCE`).
644 
645  bDict : `dict`, [`numpy.array`]
646  Dictionary keyed by amp names containing the fitted 'b' coefficients from the model
647  in Eq. 20 of Astier+19 (if `ptcFitType` is `FULLCOVARIANCE`).
648 
649  pdfPages: `matplotlib.backends.backend_pdf.PdfPages`
650  PDF file where the plots will be saved.
651 
652  bRange : `int`
653  Maximum lag for b arrays.
654  """
655  assert (len(aDict) == len(bDict))
656  a = []
657  for amp in aDict:
658  if np.isnan(aDict[amp]).all():
659  continue
660  a.append(aDict[amp])
661  a = np.array(a)
662  y = a.mean(axis=0)
663  sy = a.std(axis=0)/np.sqrt(len(aDict))
664  i, j = np.indices(y.shape)
665  upper = (i >= j).ravel()
666  r = np.sqrt(i**2 + j**2).ravel()
667  y = y.ravel()
668  sy = sy.ravel()
669  fig = plt.figure(figsize=(6, 9))
670  ax = fig.add_subplot(211)
671  ax.set_xlim([0.5, r.max()+1])
672  ax.errorbar(r[upper], y[upper], yerr=sy[upper], marker='o', linestyle='none', color='b',
673  label='$i>=j$')
674  ax.errorbar(r[~upper], y[~upper], yerr=sy[~upper], marker='o', linestyle='none', color='r',
675  label='$i<j$')
676  ax.legend(loc='upper center', fontsize='x-large')
677  ax.set_xlabel(r'$\sqrt{i^2+j^2}$', fontsize='x-large')
678  ax.set_ylabel(r'$a_{ij}$', fontsize='x-large')
679  ax.set_yscale('log')
680  ax.tick_params(axis='both', labelsize='x-large')
681 
682  #
683  axb = fig.add_subplot(212)
684  b = []
685  for amp in bDict:
686  if np.isnan(bDict[amp]).all():
687  continue
688  b.append(bDict[amp])
689  b = np.array(b)
690  yb = b.mean(axis=0)
691  syb = b.std(axis=0)/np.sqrt(len(bDict))
692  ib, jb = np.indices(yb.shape)
693  upper = (ib > jb).ravel()
694  rb = np.sqrt(i**2 + j**2).ravel()
695  yb = yb.ravel()
696  syb = syb.ravel()
697  xmin = -0.2
698  xmax = bRange
699  axb.set_xlim([xmin, xmax+0.2])
700  cutu = (r > xmin) & (r < xmax) & (upper)
701  cutl = (r > xmin) & (r < xmax) & (~upper)
702  axb.errorbar(rb[cutu], yb[cutu], yerr=syb[cutu], marker='o', linestyle='none', color='b',
703  label='$i>=j$')
704  axb.errorbar(rb[cutl], yb[cutl], yerr=syb[cutl], marker='o', linestyle='none', color='r',
705  label='$i<j$')
706  plt.legend(loc='upper center', fontsize='x-large')
707  axb.set_xlabel(r'$\sqrt{i^2+j^2}$', fontsize='x-large')
708  axb.set_ylabel(r'$b_{ij}$', fontsize='x-large')
709  axb.ticklabel_format(style='sci', axis='y', scilimits=(0, 0))
710  axb.tick_params(axis='both', labelsize='x-large')
711  plt.tight_layout()
712  pdfPages.savefig(fig)
713 
714  return
715 
716  @staticmethod
717  def plotAcoeffsSum(aDict, bDict, pdfPages):
718  """Fig. 14. of Astier+19
719 
720  Cumulative sum of a_ij as a function of maximum separation. This plot displays the average over
721  channels.
722 
723  Parameters
724  ----------
725  aDict : `dict`, [`numpy.array`]
726  Dictionary keyed by amp names containing the fitted 'a' coefficients from the model
727  in Eq. 20 of Astier+19 (if `ptcFitType` is `FULLCOVARIANCE`).
728 
729  bDict : `dict`, [`numpy.array`]
730  Dictionary keyed by amp names containing the fitted 'b' coefficients from the model
731  in Eq. 20 of Astier+19 (if `ptcFitType` is `FULLCOVARIANCE`).
732 
733  pdfPages: `matplotlib.backends.backend_pdf.PdfPages`
734  PDF file where the plots will be saved.
735  """
736  assert (len(aDict) == len(bDict))
737  a, b = [], []
738  for amp in aDict:
739  if np.isnan(aDict[amp]).all() or np.isnan(bDict[amp]).all():
740  continue
741  a.append(aDict[amp])
742  b.append(bDict[amp])
743  a = np.array(a).mean(axis=0)
744  b = np.array(b).mean(axis=0)
745  fig = plt.figure(figsize=(7, 6))
746  w = 4*np.ones_like(a)
747  w[0, 1:] = 2
748  w[1:, 0] = 2
749  w[0, 0] = 1
750  wa = w*a
751  indices = range(1, a.shape[0]+1)
752  sums = [wa[0:n, 0:n].sum() for n in indices]
753  ax = plt.subplot(111)
754  ax.plot(indices, sums/sums[0], 'o', color='b')
755  ax.set_yscale('log')
756  ax.set_xlim(indices[0]-0.5, indices[-1]+0.5)
757  ax.set_ylim(None, 1.2)
758  ax.set_ylabel(r'$[\sum_{|i|<n\ &\ |j|<n} a_{ij}] / |a_{00}|$', fontsize='x-large')
759  ax.set_xlabel('n', fontsize='x-large')
760  ax.tick_params(axis='both', labelsize='x-large')
761  plt.tight_layout()
762  pdfPages.savefig(fig)
763 
764  return
765 
766  @staticmethod
767  def plotRelativeBiasACoeffs(aDict, aDictNoB, fullCovsModel, fullCovsModelNoB, signalElectrons,
768  gainDict, pdfPages, maxr=None):
769  """Fig. 15 in Astier+19.
770 
771  Illustrates systematic bias from estimating 'a'
772  coefficients from the slope of correlations as opposed to the
773  full model in Astier+19.
774 
775  Parameters
776  ----------
777  aDict: `dict`
778  Dictionary of 'a' matrices (Eq. 20, Astier+19), with amp names as keys.
779 
780  aDictNoB: `dict`
781  Dictionary of 'a' matrices ('b'= 0 in Eq. 20, Astier+19), with amp names as keys.
782 
783  fullCovsModel : `dict`, [`str`, `list`]
784  Dictionary keyed by amp names containing covariances model per mean flux.
785 
786  fullCovsModelNoB : `dict`, [`str`, `list`]
787  Dictionary keyed by amp names containing covariances model (with 'b'=0 in Eq. 20 of
788  Astier+19) per mean flux.
789 
790  signalElectrons : `float`
791  Signal at which to evaluate the a_ij coefficients.
792 
793  pdfPages: `matplotlib.backends.backend_pdf.PdfPages`
794  PDF file where the plots will be saved.
795 
796  gainDict : `dict`, [`str`, `float`]
797  Dicgionary keyed by amp names with the gains in e-/ADU.
798 
799  maxr : `int`, optional
800  Maximum lag.
801  """
802 
803  fig = plt.figure(figsize=(7, 11))
804  title = [f"'a' relative bias at {signalElectrons} e", "'a' relative bias (b=0)"]
805  data = [(aDict, fullCovsModel), (aDictNoB, fullCovsModelNoB)]
806 
807  for k, pair in enumerate(data):
808  diffs = []
809  amean = []
810  for amp in pair[0]:
811  covModel = pair[1][amp]
812  if np.isnan(covModel).all():
813  continue
814  aOld = computeApproximateAcoeffs(covModel, signalElectrons, gainDict[amp])
815  a = pair[0][amp]
816  amean.append(a)
817  diffs.append((aOld-a))
818  amean = np.array(amean).mean(axis=0)
819  diff = np.array(diffs).mean(axis=0)
820  diff = diff/amean
821  diff = diff[:]
822  # The difference should be close to zero
823  diff[0, 0] = 0
824  if maxr is None:
825  maxr = diff.shape[0]
826  diff = diff[:maxr, :maxr]
827  ax0 = fig.add_subplot(2, 1, k+1)
828  im0 = ax0.imshow(diff.transpose(), origin='lower')
829  ax0.yaxis.set_major_locator(MaxNLocator(integer=True))
830  ax0.xaxis.set_major_locator(MaxNLocator(integer=True))
831  ax0.tick_params(axis='both', labelsize='x-large')
832  plt.colorbar(im0)
833  ax0.set_title(title[k])
834 
835  plt.tight_layout()
836  pdfPages.savefig(fig)
837 
838  return
839 
840  def _plotStandardPtc(self, dataset, ptcFitType, pdfPages):
841  """Plot PTC, var/signal vs signal, linearity, and linearity residual per amplifier.
842 
843  Parameters
844  ----------
845  dataset : `lsst.ip.isr.ptcDataset.PhotonTransferCurveDataset`
846  The dataset containing the means, variances, exposure times, and mask.
847 
848  ptcFitType : `str`
849  Type of the model fit to the PTC. Options: 'FULLCOVARIANCE', EXPAPPROXIMATION, or 'POLYNOMIAL'.
850 
851  pdfPages: `matplotlib.backends.backend_pdf.PdfPages`
852  PDF file where the plots will be saved.
853  """
854 
855  if ptcFitType == 'EXPAPPROXIMATION':
856  ptcFunc = funcAstier
857  stringTitle = (r"Var = $\frac{1}{2g^2a_{00}}(\exp (2a_{00} \mu g) - 1) + \frac{n_{00}}{g^2}$ ")
858  elif ptcFitType == 'POLYNOMIAL':
859  ptcFunc = funcPolynomial
860  for key in dataset.ptcFitPars:
861  deg = len(dataset.ptcFitPars[key]) - 1
862  break
863  stringTitle = r"Polynomial (degree: %g)" % (deg)
864  else:
865  raise RuntimeError(f"The input dataset had an invalid dataset.ptcFitType: {ptcFitType}. \n" +
866  "Options: 'FULLCOVARIANCE', EXPAPPROXIMATION, or 'POLYNOMIAL'.")
867 
868  legendFontSize = 6.5
869  labelFontSize = 8
870  titleFontSize = 9
871  supTitleFontSize = 18
872  markerSize = 25
873 
874  # General determination of the size of the plot grid
875  nAmps = len(dataset.ampNames)
876  if nAmps == 2:
877  nRows, nCols = 2, 1
878  nRows = np.sqrt(nAmps)
879  mantissa, _ = np.modf(nRows)
880  if mantissa > 0:
881  nRows = int(nRows) + 1
882  nCols = nRows
883  else:
884  nRows = int(nRows)
885  nCols = nRows
886 
887  f, ax = plt.subplots(nrows=nRows, ncols=nCols, sharex='col', sharey='row', figsize=(13, 10))
888  f2, ax2 = plt.subplots(nrows=nRows, ncols=nCols, sharex='col', sharey='row', figsize=(13, 10))
889  f3, ax3 = plt.subplots(nrows=nRows, ncols=nCols, sharex='col', sharey='row', figsize=(13, 10))
890 
891  for i, (amp, a, a2, a3) in enumerate(zip(dataset.ampNames, ax.flatten(), ax2.flatten(),
892  ax3.flatten())):
893  meanVecOriginal = np.array(dataset.rawMeans[amp])
894  varVecOriginal = np.array(dataset.rawVars[amp])
895  mask = np.array(dataset.expIdMask[amp])
896  if np.isnan(mask[0]): # All NaNs the whole amp is bad
897  a.set_title(f"{amp} (BAD)", fontsize=titleFontSize)
898  a2.set_title(f"{amp} (BAD)", fontsize=titleFontSize)
899  a3.set_title(f"{amp} (BAD)", fontsize=titleFontSize)
900  continue
901  else:
902  mask = mask.astype(bool)
903  meanVecFinal = meanVecOriginal[mask]
904  varVecFinal = varVecOriginal[mask]
905  meanVecOutliers = meanVecOriginal[np.invert(mask)]
906  varVecOutliers = varVecOriginal[np.invert(mask)]
907  pars, parsErr = np.array(dataset.ptcFitPars[amp]), np.array(dataset.ptcFitParsError[amp])
908  ptcRedChi2 = np.array(dataset.ptcFitChiSq[amp])
909  if ptcFitType == 'EXPAPPROXIMATION':
910  if len(meanVecFinal):
911  ptcA00, ptcA00error = pars[0], parsErr[0]
912  ptcGain, ptcGainError = pars[1], parsErr[1]
913  ptcNoise = np.sqrt((pars[2])) # pars[2] is in (e-)^2
914  ptcNoiseAdu = ptcNoise*(1./ptcGain)
915  ptcNoiseError = 0.5*(parsErr[2]/np.fabs(pars[2]))*np.sqrt(np.fabs(pars[2]))
916  stringLegend = (f"a00: {ptcA00:.2e}+/-{ptcA00error:.2e} 1/e"
917  f"\nGain: {ptcGain:.4}+/-{ptcGainError:.2e} e/ADU"
918  f"\nNoise: {ptcNoise:.4}+/-{ptcNoiseError:.2e} e\n"
919  r"$\chi^2_{\rm{red}}$: " + f"{ptcRedChi2:.4}"
920  f"\nLast in fit: {meanVecFinal[-1]:.7} ADU ")
921 
922  if ptcFitType == 'POLYNOMIAL':
923  if len(meanVecFinal):
924  ptcGain, ptcGainError = 1./pars[1], np.fabs(1./pars[1])*(parsErr[1]/pars[1])
925  ptcNoiseAdu = np.sqrt((pars[0])) # pars[0] is in DU^2
926  ptcNoise = ptcNoiseAdu*ptcGain
927  ptcNoiseError = (0.5*(parsErr[0]/np.fabs(pars[0]))*(np.sqrt(np.fabs(pars[0]))))*ptcGain
928  stringLegend = (f"Gain: {ptcGain:.4}+/-{ptcGainError:.2e} e/ADU\n"
929  f"Noise: {ptcNoise:.4}+/-{ptcNoiseError:.2e} e\n"
930  r"$\chi^2_{\rm{red}}$: " + f"{ptcRedChi2:.4}"
931  f"\nLast in fit: {meanVecFinal[-1]:.7} ADU ")
932 
933  a.set_xlabel(r'Mean signal ($\mu$, ADU)', fontsize=labelFontSize)
934  a.set_ylabel(r'Variance (ADU$^2$)', fontsize=labelFontSize)
935  a.tick_params(labelsize=11)
936  a.set_xscale('linear', fontsize=labelFontSize)
937  a.set_yscale('linear', fontsize=labelFontSize)
938 
939  a2.set_xlabel(r'Mean Signal ($\mu$, ADU)', fontsize=labelFontSize)
940  a2.set_ylabel(r'Variance (ADU$^2$)', fontsize=labelFontSize)
941  a2.tick_params(labelsize=11)
942  a2.set_xscale('log')
943  a2.set_yscale('log')
944 
945  a3.set_xlabel(r'Mean signal ($\mu$, ADU)', fontsize=labelFontSize)
946  a3.set_ylabel(r'Variance/$\mu$ (ADU)', fontsize=labelFontSize)
947  a3.tick_params(labelsize=11)
948  a3.set_xscale('log')
949  a3.set_yscale('linear', fontsize=labelFontSize)
950 
951  minMeanVecFinal = np.nanmin(meanVecFinal)
952  maxMeanVecFinal = np.nanmax(meanVecFinal)
953  meanVecFit = np.linspace(minMeanVecFinal, maxMeanVecFinal, 100*len(meanVecFinal))
954  minMeanVecOriginal = np.nanmin(meanVecOriginal)
955  maxMeanVecOriginal = np.nanmax(meanVecOriginal)
956  deltaXlim = maxMeanVecOriginal - minMeanVecOriginal
957  a.plot(meanVecFit, ptcFunc(pars, meanVecFit), color='red')
958  a.plot(meanVecFinal, ptcNoiseAdu**2 + (1./ptcGain)*meanVecFinal, color='green',
959  linestyle='--')
960  a.scatter(meanVecFinal, varVecFinal, c='blue', marker='o', s=markerSize)
961  a.scatter(meanVecOutliers, varVecOutliers, c='magenta', marker='s', s=markerSize)
962  a.text(0.03, 0.66, stringLegend, transform=a.transAxes, fontsize=legendFontSize)
963  a.set_title(amp, fontsize=titleFontSize)
964  a.set_xlim([minMeanVecOriginal - 0.2*deltaXlim, maxMeanVecOriginal + 0.2*deltaXlim])
965 
966  # Same, but in log-scale
967  a2.plot(meanVecFit, ptcFunc(pars, meanVecFit), color='red')
968  a2.scatter(meanVecFinal, varVecFinal, c='blue', marker='o', s=markerSize)
969  a2.scatter(meanVecOutliers, varVecOutliers, c='magenta', marker='s', s=markerSize)
970  a2.text(0.03, 0.66, stringLegend, transform=a2.transAxes, fontsize=legendFontSize)
971  a2.set_title(amp, fontsize=titleFontSize)
972  a2.set_xlim([minMeanVecOriginal, maxMeanVecOriginal])
973 
974  # Var/mu vs mu
975  a3.plot(meanVecFit, ptcFunc(pars, meanVecFit)/meanVecFit, color='red')
976  a3.scatter(meanVecFinal, varVecFinal/meanVecFinal, c='blue', marker='o', s=markerSize)
977  a3.scatter(meanVecOutliers, varVecOutliers/meanVecOutliers, c='magenta', marker='s',
978  s=markerSize)
979  a3.text(0.05, 0.1, stringLegend, transform=a3.transAxes, fontsize=legendFontSize)
980  a3.set_title(amp, fontsize=titleFontSize)
981  a3.set_xlim([minMeanVecOriginal - 0.2*deltaXlim, maxMeanVecOriginal + 0.2*deltaXlim])
982 
983  f.suptitle("PTC \n Fit: " + stringTitle, fontsize=supTitleFontSize)
984  pdfPages.savefig(f)
985  f2.suptitle("PTC (log-log)", fontsize=supTitleFontSize)
986  pdfPages.savefig(f2)
987  f3.suptitle(r"Var/$\mu$", fontsize=supTitleFontSize)
988  pdfPages.savefig(f3)
989 
990  return
991 
992  def _plotLinearizer(self, dataset, linearizer, pdfPages):
993  """Plot linearity and linearity residual per amplifier
994 
995  Parameters
996  ----------
997  dataset : `lsst.ip.isr.ptcDataset.PhotonTransferCurveDataset`
998  The dataset containing the means, variances, exposure times, and mask.
999 
1000  linearizer : `lsst.ip.isr.Linearizer`
1001  Linearizer object
1002  """
1003  legendFontSize = 7
1004  labelFontSize = 7
1005  titleFontSize = 9
1006  supTitleFontSize = 18
1007 
1008  # General determination of the size of the plot grid
1009  nAmps = len(dataset.ampNames)
1010  if nAmps == 2:
1011  nRows, nCols = 2, 1
1012  nRows = np.sqrt(nAmps)
1013  mantissa, _ = np.modf(nRows)
1014  if mantissa > 0:
1015  nRows = int(nRows) + 1
1016  nCols = nRows
1017  else:
1018  nRows = int(nRows)
1019  nCols = nRows
1020 
1021  # Plot mean vs time (f1), and fractional residuals (f2)
1022  f, ax = plt.subplots(nrows=nRows, ncols=nCols, sharex='col', sharey='row', figsize=(13, 10))
1023  f2, ax2 = plt.subplots(nrows=nRows, ncols=nCols, sharex='col', sharey='row', figsize=(13, 10))
1024  for i, (amp, a, a2) in enumerate(zip(dataset.ampNames, ax.flatten(), ax2.flatten())):
1025  mask = dataset.expIdMask[amp]
1026  if np.isnan(mask[0]):
1027  a.set_title(f"{amp} (BAD)", fontsize=titleFontSize)
1028  a2.set_title(f"{amp} (BAD)", fontsize=titleFontSize)
1029  continue
1030  else:
1031  mask = mask.astype(bool)
1032  meanVecFinal = np.array(dataset.rawMeans[amp])[mask]
1033  timeVecFinal = np.array(dataset.rawExpTimes[amp])[mask]
1034 
1035  a.set_xlabel('Time (sec)', fontsize=labelFontSize)
1036  a.set_ylabel(r'Mean signal ($\mu$, ADU)', fontsize=labelFontSize)
1037  a.tick_params(labelsize=labelFontSize)
1038  a.set_xscale('linear', fontsize=labelFontSize)
1039  a.set_yscale('linear', fontsize=labelFontSize)
1040 
1041  a2.axhline(y=0, color='k')
1042  a2.axvline(x=0, color='k', linestyle='-')
1043  a2.set_xlabel(r'Mean signal ($\mu$, ADU)', fontsize=labelFontSize)
1044  a2.set_ylabel('Fractional nonlinearity (%)', fontsize=labelFontSize)
1045  a2.tick_params(labelsize=labelFontSize)
1046  a2.set_xscale('linear', fontsize=labelFontSize)
1047  a2.set_yscale('linear', fontsize=labelFontSize)
1048 
1049  pars, parsErr = linearizer.fitParams[amp], linearizer.fitParamsErr[amp]
1050  k0, k0Error = pars[0], parsErr[0]
1051  k1, k1Error = pars[1], parsErr[1]
1052  k2, k2Error = pars[2], parsErr[2]
1053  linRedChi2 = linearizer.fitChiSq[amp]
1054  stringLegend = (f"k0: {k0:.4}+/-{k0Error:.2e} ADU\nk1: {k1:.4}+/-{k1Error:.2e} ADU/t"
1055  f"\nk2: {k2:.2e}+/-{k2Error:.2e} ADU/t^2\n"
1056  r"$\chi^2_{\rm{red}}$: " + f"{linRedChi2:.4}")
1057  a.scatter(timeVecFinal, meanVecFinal)
1058  a.plot(timeVecFinal, funcPolynomial(pars, timeVecFinal), color='red')
1059  a.text(0.03, 0.75, stringLegend, transform=a.transAxes, fontsize=legendFontSize)
1060  a.set_title(f"{amp}", fontsize=titleFontSize)
1061 
1062  linearPart = k0 + k1*timeVecFinal
1063  fracLinRes = 100*(linearPart - meanVecFinal)/linearPart
1064  a2.plot(meanVecFinal, fracLinRes, c='g')
1065  a2.set_title(f"{amp}", fontsize=titleFontSize)
1066 
1067  f.suptitle("Linearity \n Fit: Polynomial (degree: %g)"
1068  % (len(pars)-1),
1069  fontsize=supTitleFontSize)
1070  f2.suptitle(r"Fractional NL residual" + "\n" +
1071  r"$100\times \frac{(k_0 + k_1*Time-\mu)}{k_0+k_1*Time}$",
1072  fontsize=supTitleFontSize)
1073  pdfPages.savefig(f)
1074  pdfPages.savefig(f2)
1075 
1076  @staticmethod
1077  def findGroups(x, maxDiff):
1078  """Group data into bins, with at most maxDiff distance between bins.
1079 
1080  Parameters
1081  ----------
1082  x: `list`
1083  Data to bin.
1084 
1085  maxDiff: `int`
1086  Maximum distance between bins.
1087 
1088  Returns
1089  -------
1090  index: `list`
1091  Bin indices.
1092  """
1093  ix = np.argsort(x)
1094  xsort = np.sort(x)
1095  index = np.zeros_like(x, dtype=np.int32)
1096  xc = xsort[0]
1097  group = 0
1098  ng = 1
1099 
1100  for i in range(1, len(ix)):
1101  xval = xsort[i]
1102  if (xval - xc < maxDiff):
1103  xc = (ng*xc + xval)/(ng+1)
1104  ng += 1
1105  index[ix[i]] = group
1106  else:
1107  group += 1
1108  ng = 1
1109  index[ix[i]] = group
1110  xc = xval
1111 
1112  return index
1113 
1114  @staticmethod
1115  def indexForBins(x, nBins):
1116  """Builds an index with regular binning. The result can be fed into binData.
1117 
1118  Parameters
1119  ----------
1120  x: `numpy.array`
1121  Data to bin.
1122  nBins: `int`
1123  Number of bin.
1124 
1125  Returns
1126  -------
1127  np.digitize(x, bins): `numpy.array`
1128  Bin indices.
1129  """
1130 
1131  bins = np.linspace(x.min(), x.max() + abs(x.max() * 1e-7), nBins + 1)
1132  return np.digitize(x, bins)
1133 
1134  @staticmethod
1135  def binData(x, y, binIndex, wy=None):
1136  """Bin data (usually for display purposes).
1137 
1138  Patrameters
1139  -----------
1140  x: `numpy.array`
1141  Data to bin.
1142 
1143  y: `numpy.array`
1144  Data to bin.
1145 
1146  binIdex: `list`
1147  Bin number of each datum.
1148 
1149  wy: `numpy.array`
1150  Inverse rms of each datum to use when averaging (the actual weight is wy**2).
1151 
1152  Returns:
1153  -------
1154 
1155  xbin: `numpy.array`
1156  Binned data in x.
1157 
1158  ybin: `numpy.array`
1159  Binned data in y.
1160 
1161  wybin: `numpy.array`
1162  Binned weights in y, computed from wy's in each bin.
1163 
1164  sybin: `numpy.array`
1165  Uncertainty on the bin average, considering actual scatter, and ignoring weights.
1166  """
1167 
1168  if wy is None:
1169  wy = np.ones_like(x)
1170  binIndexSet = set(binIndex)
1171  w2 = wy*wy
1172  xw2 = x*(w2)
1173  xbin = np.array([xw2[binIndex == i].sum()/w2[binIndex == i].sum() for i in binIndexSet])
1174 
1175  yw2 = y*w2
1176  ybin = np.array([yw2[binIndex == i].sum()/w2[binIndex == i].sum() for i in binIndexSet])
1177 
1178  wybin = np.sqrt(np.array([w2[binIndex == i].sum() for i in binIndexSet]))
1179  sybin = np.array([y[binIndex == i].std()/np.sqrt(np.array([binIndex == i]).sum())
1180  for i in binIndexSet])
1181 
1182  return xbin, ybin, wybin, sybin
lsst.cp.pipe.plotPtc.PlotPhotonTransferCurveTask.plotRelativeBiasACoeffs
def plotRelativeBiasACoeffs(aDict, aDictNoB, fullCovsModel, fullCovsModelNoB, signalElectrons, gainDict, pdfPages, maxr=None)
Definition: plotPtc.py:767
lsst.cp.pipe.plotPtc.PlotPhotonTransferCurveTask.plot_a_b
def plot_a_b(aDict, bDict, pdfPages, bRange=3)
Definition: plotPtc.py:581
lsst.cp.pipe.utils.calculateWeightedReducedChi2
def calculateWeightedReducedChi2(measured, model, weightsMeasured, nData, nParsModel)
Definition: utils.py:40
lsst.cp.pipe.utils.funcPolynomial
def funcPolynomial(pars, x)
Definition: utils.py:468
lsst::sphgeom::abs
Angle abs(Angle const &a)
Definition: Angle.h:106
lsst.cp.pipe.astierCovPtcFit.computeApproximateAcoeffs
def computeApproximateAcoeffs(covModel, muEl, gain)
Definition: astierCovPtcFit.py:35
lsst.cp.pipe.plotPtc.PlotPhotonTransferCurveTask.plotNormalizedCovariances
def plotNormalizedCovariances(self, i, j, inputMu, covs, covsModel, covsWeights, covsNoB, covsModelNoB, covsWeightsNoB, pdfPages, offset=0.004, numberOfBins=10, plotData=True, topPlot=False, log=None)
Definition: plotPtc.py:435
lsst.cp.pipe.plotPtc.PlotPhotonTransferCurveTask._plotStandardPtc
def _plotStandardPtc(self, dataset, ptcFitType, pdfPages)
Definition: plotPtc.py:840
lsst.cp.pipe.plotPtc.PlotPhotonTransferCurveTaskConfig
Definition: plotPtc.py:46
lsst.cp.pipe.plotPtc.PlotPhotonTransferCurveTask._plotLinearizer
def _plotLinearizer(self, dataset, linearizer, pdfPages)
Definition: plotPtc.py:992
lsst::geom::all
bool all(CoordinateExpr< N > const &expr) noexcept
Return true if all elements are true.
Definition: CoordinateExpr.h:81
lsst.cp.pipe.plotPtc.PlotPhotonTransferCurveTask.plotAcoeffsSum
def plotAcoeffsSum(aDict, bDict, pdfPages)
Definition: plotPtc.py:717
lsst.cp.pipe.astierCovPtcUtils.getFitDataFromCovariances
def getFitDataFromCovariances(i, j, mu, fullCov, fullCovModel, fullCovSqrtWeights, gain=1.0, divideByMu=False, returnMasked=False)
Definition: astierCovPtcUtils.py:364
lsst.cp.pipe.plotPtc.PlotPhotonTransferCurveTask.covAstierMakeAllPlots
def covAstierMakeAllPlots(self, dataset, pdfPages, log=None)
Definition: plotPtc.py:155
lsst.pex.config
Definition: __init__.py:1
lsst.cp.pipe.plotPtc.PlotPhotonTransferCurveTask.plotCovariances
def plotCovariances(mu, covs, covsModel, covsWeights, covsNoB, covsModelNoB, covsWeightsNoB, gainDict, noiseDict, aDict, bDict, pdfPages)
Definition: plotPtc.py:211
lsst.cp.pipe.plotPtc.PlotPhotonTransferCurveTask.findGroups
def findGroups(x, maxDiff)
Definition: plotPtc.py:1077
lsst.cp.pipe.plotPtc.PlotPhotonTransferCurveTask.ab_vs_dist
def ab_vs_dist(aDict, bDict, pdfPages, bRange=4)
Definition: plotPtc.py:634
lsst.cp.pipe.plotPtc.PlotPhotonTransferCurveTask.indexForBins
def indexForBins(x, nBins)
Definition: plotPtc.py:1115
lsst.cp.pipe.plotPtc.PlotPhotonTransferCurveTask.runDataRef
def runDataRef(self, dataRef)
Definition: plotPtc.py:111
lsst.cp.pipe.plotPtc.PlotPhotonTransferCurveTask.binData
def binData(x, y, binIndex, wy=None)
Definition: plotPtc.py:1135
list
daf::base::PropertyList * list
Definition: fits.cc:913
std
STL namespace.
lsst::ip::isr
Definition: applyLookupTable.h:34
lsst.cp.pipe.plotPtc.PlotPhotonTransferCurveTask._DefaultName
string _DefaultName
Definition: plotPtc.py:93
lsst.pipe.base
Definition: __init__.py:1
lsst.cp.pipe.plotPtc.PlotPhotonTransferCurveTask
Definition: plotPtc.py:77
lsst.pex.config.wrap.validate
validate
Definition: wrap.py:295
set
daf::base::PropertySet * set
Definition: fits.cc:912
lsst.cp.pipe.plotPtc.PlotPhotonTransferCurveTask.run
def run(self, filenameFull, datasetPtc, linearizer=None, log=None)
Definition: plotPtc.py:139
lsst.cp.pipe.plotPtc.PlotPhotonTransferCurveTask.__init__
def __init__(self, *args, **kwargs)
Definition: plotPtc.py:95