23 __all__ = [
'PlotPhotonTransferCurveTask']
26 import matplotlib.pyplot
as plt
27 import matplotlib
as mpl
28 from matplotlib
import gridspec
30 from matplotlib.backends.backend_pdf
import PdfPages
36 from .utils
import (funcAstier, funcPolynomial, NonexistentDatasetTaskDataIdContainer,
37 calculateWeightedReducedChi2)
38 from matplotlib.ticker
import MaxNLocator
40 from .astierCovPtcFit
import computeApproximateAcoeffs
41 from .astierCovPtcUtils
import getFitDataFromCovariances
47 """Config class for photon transfer curve measurement task"""
48 datasetFileName = pexConfig.Field(
50 doc=
"datasetPtc file name (pkl)",
53 linearizerFileName = pexConfig.Field(
55 doc=
"linearizer file name (fits)",
58 ccdKey = pexConfig.Field(
60 doc=
"The key by which to pull a detector from a dataId, e.g. 'ccd' or 'detector'.",
63 signalElectronsRelativeA = pexConfig.Field(
65 doc=
"Signal value for relative systematic bias between different methods of estimating a_ij "
66 "(Fig. 15 of Astier+19).",
69 plotNormalizedCovariancesNumberOfBins = pexConfig.Field(
71 doc=
"Number of bins in `plotNormalizedCovariancesNumber` function "
72 "(Fig. 8, 10., of Astier+19).",
78 """A class to plot the dataset from MeasurePhotonTransferCurveTask.
84 Positional arguments passed to the Task constructor. None used at this
87 Keyword arguments passed on to the Task constructor. None used at this
92 ConfigClass = PlotPhotonTransferCurveTaskConfig
93 _DefaultName =
"plotPhotonTransferCurve"
96 pipeBase.CmdLineTask.__init__(self, *args, **kwargs)
97 plt.interactive(
False)
102 def _makeArgumentParser(cls):
103 """Augment argument parser for the MeasurePhotonTransferCurveTask."""
105 parser.add_id_argument(
"--id", datasetType=
"photonTransferCurveDataset",
106 ContainerClass=NonexistentDatasetTaskDataIdContainer,
107 help=
"The ccds to use, e.g. --id ccd=0..100")
112 """Run the Photon Transfer Curve (PTC) plotting measurement task.
116 dataRef : list of lsst.daf.persistence.ButlerDataRef
117 dataRef for the detector for the expIds to be fit.
120 datasetFile = self.config.datasetFileName
121 datasetPtc = PhotonTransferCurveDataset.readFits(datasetFile)
123 dirname = dataRef.getUri(datasetType=
'cpPipePlotRoot', write=
True)
124 if not os.path.exists(dirname):
127 detNum = dataRef.dataId[self.config.ccdKey]
128 filename = f
"PTC_det{detNum}.pdf"
129 filenameFull = os.path.join(dirname, filename)
131 if self.config.linearizerFileName:
132 linearizer = isr.linearize.Linearizer.readFits(self.config.linearizerFileName)
135 self.
run(filenameFull, datasetPtc, linearizer=linearizer, log=self.log)
137 return pipeBase.Struct(exitStatus=0)
139 def run(self, filenameFull, datasetPtc, linearizer=None, log=None):
140 """Make the plots for the PTC task"""
141 ptcFitType = datasetPtc.ptcFitType
142 with PdfPages(filenameFull)
as pdfPages:
143 if ptcFitType
in [
"FULLCOVARIANCE", ]:
145 elif ptcFitType
in [
"EXPAPPROXIMATION",
"POLYNOMIAL"]:
148 raise RuntimeError(f
"The input dataset had an invalid dataset.ptcFitType: {ptcFitType}. \n" +
149 "Options: 'FULLCOVARIANCE', EXPAPPROXIMATION, or 'POLYNOMIAL'.")
157 """Make plots for MeasurePhotonTransferCurve task when doCovariancesAstier=True.
159 This function call other functions that mostly reproduce the plots in Astier+19.
160 Most of the code is ported from Pierre Astier's repository https://github.com/PierreAstier/bfptc
164 dataset : `lsst.ip.isr.ptcDataset.PhotonTransferCurveDataset`
165 The dataset containing the necessary information to produce the plots.
167 pdfPages: `matplotlib.backends.backend_pdf.PdfPages`
168 PDF file where the plots will be saved.
170 log : `lsst.log.Log`, optional
171 Logger to handle messages
173 mu = dataset.rawMeans
175 fullCovs = dataset.covariances
176 fullCovsModel = dataset.covariancesModel
177 fullCovWeights = dataset.covariancesSqrtWeights
178 aDict = dataset.aMatrix
179 bDict = dataset.bMatrix
180 fullCovsNoB = dataset.covariancesNoB
181 fullCovsModelNoB = dataset.covariancesModelNoB
182 fullCovWeightsNoB = dataset.covariancesSqrtWeightsNoB
183 aDictNoB = dataset.aMatrixNoB
184 gainDict = dataset.gain
185 noiseDict = dataset.noise
187 self.
plotCovariances(mu, fullCovs, fullCovsModel, fullCovWeights, fullCovsNoB, fullCovsModelNoB,
188 fullCovWeightsNoB, gainDict, noiseDict, aDict, bDict, pdfPages)
190 fullCovsModelNoB, fullCovWeightsNoB, pdfPages, offset=0.01,
192 numberOfBins=self.config.plotNormalizedCovariancesNumberOfBins,
195 fullCovsModelNoB, fullCovWeightsNoB, pdfPages,
196 numberOfBins=self.config.plotNormalizedCovariancesNumberOfBins,
199 fullCovsModelNoB, fullCovWeightsNoB, pdfPages,
200 numberOfBins=self.config.plotNormalizedCovariancesNumberOfBins,
202 self.
plot_a_b(aDict, bDict, pdfPages)
203 self.
ab_vs_dist(aDict, bDict, pdfPages, bRange=4)
206 self.config.signalElectronsRelativeA, gainDict, pdfPages, maxr=4)
211 def plotCovariances(mu, covs, covsModel, covsWeights, covsNoB, covsModelNoB, covsWeightsNoB,
212 gainDict, noiseDict, aDict, bDict, pdfPages):
213 """Plot covariances and models: Cov00, Cov10, Cov01.
215 Figs. 6 and 7 of Astier+19
219 mu : `dict`, [`str`, `list`]
220 Dictionary keyed by amp name with mean signal values.
222 covs : `dict`, [`str`, `list`]
223 Dictionary keyed by amp names containing a list of measued covariances per mean flux.
225 covsModel : `dict`, [`str`, `list`]
226 Dictionary keyed by amp names containinging covariances model (Eq. 20 of Astier+19) per mean flux.
228 covsWeights : `dict`, [`str`, `list`]
229 Dictionary keyed by amp names containinging sqrt. of covariances weights.
231 covsNoB : `dict`, [`str`, `list`]
232 Dictionary keyed by amp names containing a list of measued covariances per mean flux ('b'=0 in
235 covsModelNoB : `dict`, [`str`, `list`]
236 Dictionary keyed by amp names containing covariances model (with 'b'=0 in Eq. 20 of Astier+19)
239 covsWeightsNoB : `dict`, [`str`, `list`]
240 Dictionary keyed by amp names containing sqrt. of covariances weights ('b' = 0 in Eq. 20 of
243 gainDict : `dict`, [`str`, `float`]
244 Dictionary keyed by amp names containing the gains in e-/ADU.
246 noiseDict : `dict`, [`str`, `float`]
247 Dictionary keyed by amp names containing the rms redout noise in e-.
249 aDict : `dict`, [`str`, `numpy.array`]
250 Dictionary keyed by amp names containing 'a' coefficients (Eq. 20 of Astier+19).
252 bDict : `dict`, [`str`, `numpy.array`]
253 Dictionary keyed by amp names containing 'b' coefficients (Eq. 20 of Astier+19).
255 pdfPages: `matplotlib.backends.backend_pdf.PdfPages`
256 PDF file where the plots will be saved.
262 supTitleFontSize = 18
268 nRows = np.sqrt(nAmps)
269 mantissa, _ = np.modf(nRows)
271 nRows = int(nRows) + 1
277 f, ax = plt.subplots(nrows=nRows, ncols=nCols, sharex=
'col', sharey=
'row', figsize=(13, 10))
278 f2, ax2 = plt.subplots(nrows=nRows, ncols=nCols, sharex=
'col', sharey=
'row', figsize=(13, 10))
279 fResCov00, axResCov00 = plt.subplots(nrows=nRows, ncols=nCols, sharex=
'col', sharey=
'row',
281 fCov01, axCov01 = plt.subplots(nrows=nRows, ncols=nCols, sharex=
'col', sharey=
'row', figsize=(13, 10))
282 fCov10, axCov10 = plt.subplots(nrows=nRows, ncols=nCols, sharex=
'col', sharey=
'row', figsize=(13, 10))
284 assert(len(covsModel) == nAmps)
285 assert(len(covsWeights) == nAmps)
287 assert(len(covsNoB) == nAmps)
288 assert(len(covsModelNoB) == nAmps)
289 assert(len(covsWeightsNoB) == nAmps)
291 for i, (amp, a, a2, aResVar, a3, a4)
in enumerate(zip(covs, ax.flatten(),
292 ax2.flatten(), axResCov00.flatten(),
293 axCov01.flatten(), axCov10.flatten())):
295 muAmp, cov, model, weight = mu[amp], covs[amp], covsModel[amp], covsWeights[amp]
296 if not np.isnan(np.array(cov)).
all():
297 aCoeffs, bCoeffs = np.array(aDict[amp]), np.array(bDict[amp])
298 gain, noise = gainDict[amp], noiseDict[amp]
299 (meanVecOriginal, varVecOriginal, varVecModelOriginal,
301 meanVecFinal, varVecFinal = meanVecOriginal[varMask], varVecOriginal[varMask]
302 varVecModelFinal = varVecModelOriginal[varMask]
303 meanVecOutliers = meanVecOriginal[np.invert(varMask)]
304 varVecOutliers = varVecOriginal[np.invert(varMask)]
305 varWeightsFinal = weightsOriginal[varMask]
308 varWeightsFinal, len(meanVecFinal), 4)
310 (meanVecOrigCov01, varVecOrigCov01, varVecModelOrigCov01,
312 meanVecFinalCov01, varVecFinalCov01 = meanVecOrigCov01[maskCov01], varVecOrigCov01[maskCov01]
313 varVecModelFinalCov01 = varVecModelOrigCov01[maskCov01]
314 meanVecOutliersCov01 = meanVecOrigCov01[np.invert(maskCov01)]
315 varVecOutliersCov01 = varVecOrigCov01[np.invert(maskCov01)]
317 (meanVecOrigCov10, varVecOrigCov10, varVecModelOrigCov10,
319 meanVecFinalCov10, varVecFinalCov10 = meanVecOrigCov10[maskCov10], varVecOrigCov10[maskCov10]
320 varVecModelFinalCov10 = varVecModelOrigCov10[maskCov10]
321 meanVecOutliersCov10 = meanVecOrigCov10[np.invert(maskCov10)]
322 varVecOutliersCov10 = varVecOrigCov10[np.invert(maskCov10)]
325 par2 = np.polyfit(meanVecFinal, varVecFinal, 2, w=varWeightsFinal)
326 varModelFinalQuadratic = np.polyval(par2, meanVecFinal)
328 varWeightsFinal, len(meanVecFinal), 3)
331 covNoB, modelNoB, weightNoB = covsNoB[amp], covsModelNoB[amp], covsWeightsNoB[amp]
332 (meanVecFinalNoB, varVecFinalNoB, varVecModelFinalNoB,
334 weightNoB, returnMasked=
True)
336 varWeightsFinalNoB, len(meanVecFinalNoB),
338 stringLegend = (f
"Gain: {gain:.4} e/ADU \n" +
339 f
"Noise: {noise:.4} e \n" +
340 r"$a_{00}$: %.3e 1/e"%aCoeffs[0, 0] +
341 "\n" +
r"$b_{00}$: %.3e 1/e"%bCoeffs[0, 0] +
342 f
"\nLast in fit: {meanVecFinal[-1]:.7} ADU ")
343 minMeanVecFinal = np.nanmin(meanVecFinal)
344 maxMeanVecFinal = np.nanmax(meanVecFinal)
345 deltaXlim = maxMeanVecFinal - minMeanVecFinal
347 a.set_xlabel(
r'Mean signal ($\mu$, ADU)', fontsize=labelFontSize)
348 a.set_ylabel(
r'Variance (ADU$^2$)', fontsize=labelFontSize)
349 a.tick_params(labelsize=11)
350 a.set_xscale(
'linear', fontsize=labelFontSize)
351 a.set_yscale(
'linear', fontsize=labelFontSize)
352 a.scatter(meanVecFinal, varVecFinal, c=
'blue', marker=
'o', s=markerSize)
353 a.scatter(meanVecOutliers, varVecOutliers, c=
'magenta', marker=
's', s=markerSize)
354 a.plot(meanVecFinal, varVecModelFinal, color=
'red', lineStyle=
'-')
355 a.text(0.03, 0.7, stringLegend, transform=a.transAxes, fontsize=legendFontSize)
356 a.set_title(amp, fontsize=titleFontSize)
357 a.set_xlim([minMeanVecFinal - 0.2*deltaXlim, maxMeanVecFinal + 0.2*deltaXlim])
360 a2.set_xlabel(
r'Mean Signal ($\mu$, ADU)', fontsize=labelFontSize)
361 a2.set_ylabel(
r'Variance (ADU$^2$)', fontsize=labelFontSize)
362 a2.tick_params(labelsize=11)
365 a2.plot(meanVecFinal, varVecModelFinal, color=
'red', lineStyle=
'-')
366 a2.scatter(meanVecFinal, varVecFinal, c=
'blue', marker=
'o', s=markerSize)
367 a2.scatter(meanVecOutliers, varVecOutliers, c=
'magenta', marker=
's', s=markerSize)
368 a2.text(0.03, 0.7, stringLegend, transform=a2.transAxes, fontsize=legendFontSize)
369 a2.set_title(amp, fontsize=titleFontSize)
370 a2.set_xlim([minMeanVecFinal, maxMeanVecFinal])
373 aResVar.set_xlabel(
r'Mean signal ($\mu$, ADU)', fontsize=labelFontSize)
374 aResVar.set_ylabel(
r'Residuals (ADU$^2$)', fontsize=labelFontSize)
375 aResVar.tick_params(labelsize=11)
376 aResVar.set_xscale(
'linear', fontsize=labelFontSize)
377 aResVar.set_yscale(
'linear', fontsize=labelFontSize)
378 aResVar.plot(meanVecFinal, varVecFinal - varVecModelFinal, color=
'blue', lineStyle=
'-',
379 label=
r'Full fit ($\chi_{\rm{red}}^2$: %g)'%chi2FullModelVar)
380 aResVar.plot(meanVecFinal, varVecFinal - varModelFinalQuadratic, color=
'red', lineStyle=
'-',
381 label=
r'Quadratic fit ($\chi_{\rm{red}}^2$: %g)'%chi2QuadModelVar)
382 aResVar.plot(meanVecFinalNoB, varVecFinalNoB - varVecModelFinalNoB, color=
'green',
384 label=
r'Full fit (b=0) ($\chi_{\rm{red}}^2$: %g)'%chi2FullModelNoBVar)
385 aResVar.axhline(color=
'black')
386 aResVar.set_title(amp, fontsize=titleFontSize)
387 aResVar.set_xlim([minMeanVecFinal - 0.2*deltaXlim, maxMeanVecFinal + 0.2*deltaXlim])
388 aResVar.legend(fontsize=7)
390 a3.set_xlabel(
r'Mean signal ($\mu$, ADU)', fontsize=labelFontSize)
391 a3.set_ylabel(
r'Cov01 (ADU$^2$)', fontsize=labelFontSize)
392 a3.tick_params(labelsize=11)
393 a3.set_xscale(
'linear', fontsize=labelFontSize)
394 a3.set_yscale(
'linear', fontsize=labelFontSize)
395 a3.scatter(meanVecFinalCov01, varVecFinalCov01, c=
'blue', marker=
'o', s=markerSize)
396 a3.scatter(meanVecOutliersCov01, varVecOutliersCov01, c=
'magenta', marker=
's', s=markerSize)
397 a3.plot(meanVecFinalCov01, varVecModelFinalCov01, color=
'red', lineStyle=
'-')
398 a3.set_title(amp, fontsize=titleFontSize)
399 a3.set_xlim([minMeanVecFinal - 0.2*deltaXlim, maxMeanVecFinal + 0.2*deltaXlim])
401 a4.set_xlabel(
r'Mean signal ($\mu$, ADU)', fontsize=labelFontSize)
402 a4.set_ylabel(
r'Cov10 (ADU$^2$)', fontsize=labelFontSize)
403 a4.tick_params(labelsize=11)
404 a4.set_xscale(
'linear', fontsize=labelFontSize)
405 a4.set_yscale(
'linear', fontsize=labelFontSize)
406 a4.scatter(meanVecFinalCov10, varVecFinalCov10, c=
'blue', marker=
'o', s=markerSize)
407 a4.scatter(meanVecOutliersCov10, varVecOutliersCov10, c=
'magenta', marker=
's', s=markerSize)
408 a4.plot(meanVecFinalCov10, varVecModelFinalCov10, color=
'red', lineStyle=
'-')
409 a4.set_title(amp, fontsize=titleFontSize)
410 a4.set_xlim([minMeanVecFinal - 0.2*deltaXlim, maxMeanVecFinal + 0.2*deltaXlim])
413 a.set_title(f
"{amp} (BAD)", fontsize=titleFontSize)
414 a2.set_title(f
"{amp} (BAD)", fontsize=titleFontSize)
415 a3.set_title(f
"{amp} (BAD)", fontsize=titleFontSize)
416 a4.set_title(f
"{amp} (BAD)", fontsize=titleFontSize)
418 f.suptitle(
"PTC from covariances as in Astier+19 \n Fit: Eq. 20, Astier+19",
419 fontsize=supTitleFontSize)
421 f2.suptitle(
"PTC from covariances as in Astier+19 (log-log) \n Fit: Eq. 20, Astier+19",
422 fontsize=supTitleFontSize)
424 fResCov00.suptitle(
"Residuals (data-model) for Cov00 (Var)", fontsize=supTitleFontSize)
425 pdfPages.savefig(fResCov00)
426 fCov01.suptitle(
"Cov01 as in Astier+19 (nearest parallel neighbor covariance) \n" +
427 " Fit: Eq. 20, Astier+19", fontsize=supTitleFontSize)
428 pdfPages.savefig(fCov01)
429 fCov10.suptitle(
"Cov10 as in Astier+19 (nearest serial neighbor covariance) \n" +
430 "Fit: Eq. 20, Astier+19", fontsize=supTitleFontSize)
431 pdfPages.savefig(fCov10)
436 covsWeightsNoB, pdfPages, offset=0.004,
437 numberOfBins=10, plotData=True, topPlot=False, log=None):
438 """Plot C_ij/mu vs mu.
440 Figs. 8, 10, and 11 of Astier+19
450 inputMu : `dict`, [`str`, `list`]
451 Dictionary keyed by amp name with mean signal values.
453 covs : `dict`, [`str`, `list`]
454 Dictionary keyed by amp names containing a list of measued covariances per mean flux.
456 covsModel : `dict`, [`str`, `list`]
457 Dictionary keyed by amp names containinging covariances model (Eq. 20 of Astier+19) per mean flux.
459 covsWeights : `dict`, [`str`, `list`]
460 Dictionary keyed by amp names containinging sqrt. of covariances weights.
462 covsNoB : `dict`, [`str`, `list`]
463 Dictionary keyed by amp names containing a list of measued covariances per mean flux ('b'=0 in
466 covsModelNoB : `dict`, [`str`, `list`]
467 Dictionary keyed by amp names containing covariances model (with 'b'=0 in Eq. 20 of Astier+19)
470 covsWeightsNoB : `dict`, [`str`, `list`]
471 Dictionary keyed by amp names containing sqrt. of covariances weights ('b' = 0 in Eq. 20 of
474 pdfPages: `matplotlib.backends.backend_pdf.PdfPages`
475 PDF file where the plots will be saved.
477 offset : `float`, optional
478 Constant offset factor to plot covariances in same panel (so they don't overlap).
480 numberOfBins : `int`, optional
481 Number of bins for top and bottom plot.
483 plotData : `bool`, optional
484 Plot the data points?
486 topPlot : `bool`, optional
487 Plot the top plot with the covariances, and the bottom plot with the model residuals?
489 log : `lsst.log.Log`, optional
490 Logger to handle messages.
493 fig = plt.figure(figsize=(8, 10))
494 gs = gridspec.GridSpec(2, 1, height_ratios=[3, 1])
496 ax0 = plt.subplot(gs[0])
497 plt.setp(ax0.get_xticklabels(), visible=
False)
499 fig = plt.figure(figsize=(8, 8))
500 ax0 = plt.subplot(111)
501 ax0.ticklabel_format(style=
'sci', axis=
'x', scilimits=(0, 0))
502 ax0.tick_params(axis=
'both', labelsize=
'x-large')
503 mue, rese, wce = [], [], []
504 mueNoB, reseNoB, wceNoB = [], [], []
505 for counter, amp
in enumerate(covs):
506 muAmp, fullCov, fullCovModel, fullCovWeight = (inputMu[amp], covs[amp], covsModel[amp],
508 if len(fullCov) == 0:
511 fullCovWeight, divideByMu=
True,
514 rese +=
list(cov - model)
515 wce +=
list(weightCov)
517 fullCovNoB, fullCovModelNoB, fullCovWeightNoB = (covsNoB[amp], covsModelNoB[amp],
519 if len(fullCovNoB) == 0:
521 (muNoB, covNoB, modelNoB,
523 fullCovWeightNoB, divideByMu=
True,
525 mueNoB +=
list(muNoB)
526 reseNoB +=
list(covNoB - modelNoB)
527 wceNoB +=
list(weightCovNoB)
530 fit_curve, = plt.plot(mu, model + counter*offset,
'-', linewidth=4.0)
534 xb, yb, wyb, sigyb = self.
binData(mu, cov, gind, weightCov)
535 plt.errorbar(xb, yb+counter*offset, yerr=sigyb, marker=
'o', linestyle=
'none', markersize=6.5,
536 color=fit_curve.get_color(), label=f
"{amp} (N: {len(mu)})")
539 points, = plt.plot(mu, cov + counter*offset,
'.', color=fit_curve.get_color())
540 plt.legend(loc=
'upper right', fontsize=8)
543 rese = np.array(rese)
545 mueNoB = np.array(mueNoB)
546 reseNoB = np.array(reseNoB)
547 wceNoB = np.array(wceNoB)
549 plt.xlabel(
r"$\mu (el)$", fontsize=
'x-large')
550 plt.ylabel(
r"$Cov{%d%d}/\mu + Cst (el)$"%(i, j), fontsize=
'x-large')
553 xb, yb, wyb, sigyb = self.
binData(mue, rese, gind, wce)
555 ax1 = plt.subplot(gs[1], sharex=ax0)
556 ax1.errorbar(xb, yb, yerr=sigyb, marker=
'o', linestyle=
'none', label=
'Full fit')
558 xb2, yb2, wyb2, sigyb2 = self.
binData(mueNoB, reseNoB, gindNoB, wceNoB)
560 ax1.errorbar(xb2, yb2, yerr=sigyb2, marker=
'o', linestyle=
'none', label=
'b = 0')
561 ax1.tick_params(axis=
'both', labelsize=
'x-large')
562 plt.legend(loc=
'upper left', fontsize=
'large')
564 plt.plot(xb, [0]*len(xb),
'--', color=
'k')
565 plt.ticklabel_format(style=
'sci', axis=
'x', scilimits=(0, 0))
566 plt.ticklabel_format(style=
'sci', axis=
'y', scilimits=(0, 0))
567 plt.xlabel(
r'$\mu (el)$', fontsize=
'x-large')
568 plt.ylabel(
r'$Cov{%d%d}/\mu$ -model (el)'%(i, j), fontsize=
'x-large')
570 plt.suptitle(f
"Nbins: {numberOfBins}")
573 labels0 = [item.get_text()
for item
in ax0.get_yticklabels()]
575 ax0.set_yticklabels(labels0)
576 pdfPages.savefig(fig)
582 """Fig. 12 of Astier+19
584 Color display of a and b arrays fits, averaged over channels.
588 aDict : `dict`, [`numpy.array`]
589 Dictionary keyed by amp names containing the fitted 'a' coefficients from the model
590 in Eq. 20 of Astier+19 (if `ptcFitType` is `FULLCOVARIANCE`).
592 bDict : `dict`, [`numpy.array`]
593 Dictionary keyed by amp names containing the fitted 'b' coefficients from the model
594 in Eq. 20 of Astier+19 (if `ptcFitType` is `FULLCOVARIANCE`).
596 pdfPages: `matplotlib.backends.backend_pdf.PdfPages`
597 PDF file where the plots will be saved.
600 Maximum lag for b arrays.
604 if np.isnan(aDict[amp]).
all():
608 a = np.array(a).mean(axis=0)
609 b = np.array(b).mean(axis=0)
610 fig = plt.figure(figsize=(7, 11))
611 ax0 = fig.add_subplot(2, 1, 1)
612 im0 = ax0.imshow(np.abs(a.transpose()), origin=
'lower', norm=mpl.colors.LogNorm())
613 ax0.tick_params(axis=
'both', labelsize=
'x-large')
614 ax0.set_title(
r'$|a|$', fontsize=
'x-large')
615 ax0.xaxis.set_ticks_position(
'bottom')
616 cb0 = plt.colorbar(im0)
617 cb0.ax.tick_params(labelsize=
'x-large')
619 ax1 = fig.add_subplot(2, 1, 2)
620 ax1.tick_params(axis=
'both', labelsize=
'x-large')
621 ax1.yaxis.set_major_locator(MaxNLocator(integer=
True))
622 ax1.xaxis.set_major_locator(MaxNLocator(integer=
True))
623 im1 = ax1.imshow(1e6*b[:bRange, :bRange].transpose(), origin=
'lower')
624 cb1 = plt.colorbar(im1)
625 cb1.ax.tick_params(labelsize=
'x-large')
626 ax1.set_title(
r'$b \times 10^6$', fontsize=
'x-large')
627 ax1.xaxis.set_ticks_position(
'bottom')
629 pdfPages.savefig(fig)
635 """Fig. 13 of Astier+19.
637 Values of a and b arrays fits, averaged over amplifiers, as a function of distance.
641 aDict : `dict`, [`numpy.array`]
642 Dictionary keyed by amp names containing the fitted 'a' coefficients from the model
643 in Eq. 20 of Astier+19 (if `ptcFitType` is `FULLCOVARIANCE`).
645 bDict : `dict`, [`numpy.array`]
646 Dictionary keyed by amp names containing the fitted 'b' coefficients from the model
647 in Eq. 20 of Astier+19 (if `ptcFitType` is `FULLCOVARIANCE`).
649 pdfPages: `matplotlib.backends.backend_pdf.PdfPages`
650 PDF file where the plots will be saved.
653 Maximum lag for b arrays.
655 assert (len(aDict) == len(bDict))
658 if np.isnan(aDict[amp]).
all():
663 sy = a.std(axis=0)/np.sqrt(len(aDict))
664 i, j = np.indices(y.shape)
665 upper = (i >= j).ravel()
666 r = np.sqrt(i**2 + j**2).ravel()
669 fig = plt.figure(figsize=(6, 9))
670 ax = fig.add_subplot(211)
671 ax.set_xlim([0.5, r.max()+1])
672 ax.errorbar(r[upper], y[upper], yerr=sy[upper], marker=
'o', linestyle=
'none', color=
'b',
674 ax.errorbar(r[~upper], y[~upper], yerr=sy[~upper], marker=
'o', linestyle=
'none', color=
'r',
676 ax.legend(loc=
'upper center', fontsize=
'x-large')
677 ax.set_xlabel(
r'$\sqrt{i^2+j^2}$', fontsize=
'x-large')
678 ax.set_ylabel(
r'$a_{ij}$', fontsize=
'x-large')
680 ax.tick_params(axis=
'both', labelsize=
'x-large')
683 axb = fig.add_subplot(212)
686 if np.isnan(bDict[amp]).
all():
691 syb = b.std(axis=0)/np.sqrt(len(bDict))
692 ib, jb = np.indices(yb.shape)
693 upper = (ib > jb).ravel()
694 rb = np.sqrt(i**2 + j**2).ravel()
699 axb.set_xlim([xmin, xmax+0.2])
700 cutu = (r > xmin) & (r < xmax) & (upper)
701 cutl = (r > xmin) & (r < xmax) & (~upper)
702 axb.errorbar(rb[cutu], yb[cutu], yerr=syb[cutu], marker=
'o', linestyle=
'none', color=
'b',
704 axb.errorbar(rb[cutl], yb[cutl], yerr=syb[cutl], marker=
'o', linestyle=
'none', color=
'r',
706 plt.legend(loc=
'upper center', fontsize=
'x-large')
707 axb.set_xlabel(
r'$\sqrt{i^2+j^2}$', fontsize=
'x-large')
708 axb.set_ylabel(
r'$b_{ij}$', fontsize=
'x-large')
709 axb.ticklabel_format(style=
'sci', axis=
'y', scilimits=(0, 0))
710 axb.tick_params(axis=
'both', labelsize=
'x-large')
712 pdfPages.savefig(fig)
718 """Fig. 14. of Astier+19
720 Cumulative sum of a_ij as a function of maximum separation. This plot displays the average over
725 aDict : `dict`, [`numpy.array`]
726 Dictionary keyed by amp names containing the fitted 'a' coefficients from the model
727 in Eq. 20 of Astier+19 (if `ptcFitType` is `FULLCOVARIANCE`).
729 bDict : `dict`, [`numpy.array`]
730 Dictionary keyed by amp names containing the fitted 'b' coefficients from the model
731 in Eq. 20 of Astier+19 (if `ptcFitType` is `FULLCOVARIANCE`).
733 pdfPages: `matplotlib.backends.backend_pdf.PdfPages`
734 PDF file where the plots will be saved.
736 assert (len(aDict) == len(bDict))
739 if np.isnan(aDict[amp]).
all()
or np.isnan(bDict[amp]).
all():
743 a = np.array(a).mean(axis=0)
744 b = np.array(b).mean(axis=0)
745 fig = plt.figure(figsize=(7, 6))
746 w = 4*np.ones_like(a)
751 indices = range(1, a.shape[0]+1)
752 sums = [wa[0:n, 0:n].sum()
for n
in indices]
753 ax = plt.subplot(111)
754 ax.plot(indices, sums/sums[0],
'o', color=
'b')
756 ax.set_xlim(indices[0]-0.5, indices[-1]+0.5)
757 ax.set_ylim(
None, 1.2)
758 ax.set_ylabel(
r'$[\sum_{|i|<n\ &\ |j|<n} a_{ij}] / |a_{00}|$', fontsize=
'x-large')
759 ax.set_xlabel(
'n', fontsize=
'x-large')
760 ax.tick_params(axis=
'both', labelsize=
'x-large')
762 pdfPages.savefig(fig)
768 gainDict, pdfPages, maxr=None):
769 """Fig. 15 in Astier+19.
771 Illustrates systematic bias from estimating 'a'
772 coefficients from the slope of correlations as opposed to the
773 full model in Astier+19.
778 Dictionary of 'a' matrices (Eq. 20, Astier+19), with amp names as keys.
781 Dictionary of 'a' matrices ('b'= 0 in Eq. 20, Astier+19), with amp names as keys.
783 fullCovsModel : `dict`, [`str`, `list`]
784 Dictionary keyed by amp names containing covariances model per mean flux.
786 fullCovsModelNoB : `dict`, [`str`, `list`]
787 Dictionary keyed by amp names containing covariances model (with 'b'=0 in Eq. 20 of
788 Astier+19) per mean flux.
790 signalElectrons : `float`
791 Signal at which to evaluate the a_ij coefficients.
793 pdfPages: `matplotlib.backends.backend_pdf.PdfPages`
794 PDF file where the plots will be saved.
796 gainDict : `dict`, [`str`, `float`]
797 Dicgionary keyed by amp names with the gains in e-/ADU.
799 maxr : `int`, optional
803 fig = plt.figure(figsize=(7, 11))
804 title = [f
"'a' relative bias at {signalElectrons} e",
"'a' relative bias (b=0)"]
805 data = [(aDict, fullCovsModel), (aDictNoB, fullCovsModelNoB)]
807 for k, pair
in enumerate(data):
811 covModel = pair[1][amp]
812 if np.isnan(covModel).
all():
817 diffs.append((aOld-a))
818 amean = np.array(amean).mean(axis=0)
819 diff = np.array(diffs).mean(axis=0)
826 diff = diff[:maxr, :maxr]
827 ax0 = fig.add_subplot(2, 1, k+1)
828 im0 = ax0.imshow(diff.transpose(), origin=
'lower')
829 ax0.yaxis.set_major_locator(MaxNLocator(integer=
True))
830 ax0.xaxis.set_major_locator(MaxNLocator(integer=
True))
831 ax0.tick_params(axis=
'both', labelsize=
'x-large')
833 ax0.set_title(title[k])
836 pdfPages.savefig(fig)
840 def _plotStandardPtc(self, dataset, ptcFitType, pdfPages):
841 """Plot PTC, var/signal vs signal, linearity, and linearity residual per amplifier.
845 dataset : `lsst.ip.isr.ptcDataset.PhotonTransferCurveDataset`
846 The dataset containing the means, variances, exposure times, and mask.
849 Type of the model fit to the PTC. Options: 'FULLCOVARIANCE', EXPAPPROXIMATION, or 'POLYNOMIAL'.
851 pdfPages: `matplotlib.backends.backend_pdf.PdfPages`
852 PDF file where the plots will be saved.
855 if ptcFitType ==
'EXPAPPROXIMATION':
857 stringTitle = (
r"Var = $\frac{1}{2g^2a_{00}}(\exp (2a_{00} \mu g) - 1) + \frac{n_{00}}{g^2}$ ")
858 elif ptcFitType ==
'POLYNOMIAL':
859 ptcFunc = funcPolynomial
860 for key
in dataset.ptcFitPars:
861 deg = len(dataset.ptcFitPars[key]) - 1
863 stringTitle =
r"Polynomial (degree: %g)" % (deg)
865 raise RuntimeError(f
"The input dataset had an invalid dataset.ptcFitType: {ptcFitType}. \n" +
866 "Options: 'FULLCOVARIANCE', EXPAPPROXIMATION, or 'POLYNOMIAL'.")
871 supTitleFontSize = 18
875 nAmps = len(dataset.ampNames)
878 nRows = np.sqrt(nAmps)
879 mantissa, _ = np.modf(nRows)
881 nRows = int(nRows) + 1
887 f, ax = plt.subplots(nrows=nRows, ncols=nCols, sharex=
'col', sharey=
'row', figsize=(13, 10))
888 f2, ax2 = plt.subplots(nrows=nRows, ncols=nCols, sharex=
'col', sharey=
'row', figsize=(13, 10))
889 f3, ax3 = plt.subplots(nrows=nRows, ncols=nCols, sharex=
'col', sharey=
'row', figsize=(13, 10))
891 for i, (amp, a, a2, a3)
in enumerate(zip(dataset.ampNames, ax.flatten(), ax2.flatten(),
893 meanVecOriginal = np.array(dataset.rawMeans[amp])
894 varVecOriginal = np.array(dataset.rawVars[amp])
895 mask = np.array(dataset.expIdMask[amp])
896 if np.isnan(mask[0]):
897 a.set_title(f
"{amp} (BAD)", fontsize=titleFontSize)
898 a2.set_title(f
"{amp} (BAD)", fontsize=titleFontSize)
899 a3.set_title(f
"{amp} (BAD)", fontsize=titleFontSize)
902 mask = mask.astype(bool)
903 meanVecFinal = meanVecOriginal[mask]
904 varVecFinal = varVecOriginal[mask]
905 meanVecOutliers = meanVecOriginal[np.invert(mask)]
906 varVecOutliers = varVecOriginal[np.invert(mask)]
907 pars, parsErr = np.array(dataset.ptcFitPars[amp]), np.array(dataset.ptcFitParsError[amp])
908 ptcRedChi2 = np.array(dataset.ptcFitChiSq[amp])
909 if ptcFitType ==
'EXPAPPROXIMATION':
910 if len(meanVecFinal):
911 ptcA00, ptcA00error = pars[0], parsErr[0]
912 ptcGain, ptcGainError = pars[1], parsErr[1]
913 ptcNoise = np.sqrt((pars[2]))
914 ptcNoiseAdu = ptcNoise*(1./ptcGain)
915 ptcNoiseError = 0.5*(parsErr[2]/np.fabs(pars[2]))*np.sqrt(np.fabs(pars[2]))
916 stringLegend = (f
"a00: {ptcA00:.2e}+/-{ptcA00error:.2e} 1/e"
917 f
"\nGain: {ptcGain:.4}+/-{ptcGainError:.2e} e/ADU"
918 f
"\nNoise: {ptcNoise:.4}+/-{ptcNoiseError:.2e} e\n"
919 r"$\chi^2_{\rm{red}}$: " + f
"{ptcRedChi2:.4}"
920 f
"\nLast in fit: {meanVecFinal[-1]:.7} ADU ")
922 if ptcFitType ==
'POLYNOMIAL':
923 if len(meanVecFinal):
924 ptcGain, ptcGainError = 1./pars[1], np.fabs(1./pars[1])*(parsErr[1]/pars[1])
925 ptcNoiseAdu = np.sqrt((pars[0]))
926 ptcNoise = ptcNoiseAdu*ptcGain
927 ptcNoiseError = (0.5*(parsErr[0]/np.fabs(pars[0]))*(np.sqrt(np.fabs(pars[0]))))*ptcGain
928 stringLegend = (f
"Gain: {ptcGain:.4}+/-{ptcGainError:.2e} e/ADU\n"
929 f
"Noise: {ptcNoise:.4}+/-{ptcNoiseError:.2e} e\n"
930 r"$\chi^2_{\rm{red}}$: " + f
"{ptcRedChi2:.4}"
931 f
"\nLast in fit: {meanVecFinal[-1]:.7} ADU ")
933 a.set_xlabel(
r'Mean signal ($\mu$, ADU)', fontsize=labelFontSize)
934 a.set_ylabel(
r'Variance (ADU$^2$)', fontsize=labelFontSize)
935 a.tick_params(labelsize=11)
936 a.set_xscale(
'linear', fontsize=labelFontSize)
937 a.set_yscale(
'linear', fontsize=labelFontSize)
939 a2.set_xlabel(
r'Mean Signal ($\mu$, ADU)', fontsize=labelFontSize)
940 a2.set_ylabel(
r'Variance (ADU$^2$)', fontsize=labelFontSize)
941 a2.tick_params(labelsize=11)
945 a3.set_xlabel(
r'Mean signal ($\mu$, ADU)', fontsize=labelFontSize)
946 a3.set_ylabel(
r'Variance/$\mu$ (ADU)', fontsize=labelFontSize)
947 a3.tick_params(labelsize=11)
949 a3.set_yscale(
'linear', fontsize=labelFontSize)
951 minMeanVecFinal = np.nanmin(meanVecFinal)
952 maxMeanVecFinal = np.nanmax(meanVecFinal)
953 meanVecFit = np.linspace(minMeanVecFinal, maxMeanVecFinal, 100*len(meanVecFinal))
954 minMeanVecOriginal = np.nanmin(meanVecOriginal)
955 maxMeanVecOriginal = np.nanmax(meanVecOriginal)
956 deltaXlim = maxMeanVecOriginal - minMeanVecOriginal
957 a.plot(meanVecFit, ptcFunc(pars, meanVecFit), color=
'red')
958 a.plot(meanVecFinal, ptcNoiseAdu**2 + (1./ptcGain)*meanVecFinal, color=
'green',
960 a.scatter(meanVecFinal, varVecFinal, c=
'blue', marker=
'o', s=markerSize)
961 a.scatter(meanVecOutliers, varVecOutliers, c=
'magenta', marker=
's', s=markerSize)
962 a.text(0.03, 0.66, stringLegend, transform=a.transAxes, fontsize=legendFontSize)
963 a.set_title(amp, fontsize=titleFontSize)
964 a.set_xlim([minMeanVecOriginal - 0.2*deltaXlim, maxMeanVecOriginal + 0.2*deltaXlim])
967 a2.plot(meanVecFit, ptcFunc(pars, meanVecFit), color=
'red')
968 a2.scatter(meanVecFinal, varVecFinal, c=
'blue', marker=
'o', s=markerSize)
969 a2.scatter(meanVecOutliers, varVecOutliers, c=
'magenta', marker=
's', s=markerSize)
970 a2.text(0.03, 0.66, stringLegend, transform=a2.transAxes, fontsize=legendFontSize)
971 a2.set_title(amp, fontsize=titleFontSize)
972 a2.set_xlim([minMeanVecOriginal, maxMeanVecOriginal])
975 a3.plot(meanVecFit, ptcFunc(pars, meanVecFit)/meanVecFit, color=
'red')
976 a3.scatter(meanVecFinal, varVecFinal/meanVecFinal, c=
'blue', marker=
'o', s=markerSize)
977 a3.scatter(meanVecOutliers, varVecOutliers/meanVecOutliers, c=
'magenta', marker=
's',
979 a3.text(0.05, 0.1, stringLegend, transform=a3.transAxes, fontsize=legendFontSize)
980 a3.set_title(amp, fontsize=titleFontSize)
981 a3.set_xlim([minMeanVecOriginal - 0.2*deltaXlim, maxMeanVecOriginal + 0.2*deltaXlim])
983 f.suptitle(
"PTC \n Fit: " + stringTitle, fontsize=supTitleFontSize)
985 f2.suptitle(
"PTC (log-log)", fontsize=supTitleFontSize)
987 f3.suptitle(
r"Var/$\mu$", fontsize=supTitleFontSize)
992 def _plotLinearizer(self, dataset, linearizer, pdfPages):
993 """Plot linearity and linearity residual per amplifier
997 dataset : `lsst.ip.isr.ptcDataset.PhotonTransferCurveDataset`
998 The dataset containing the means, variances, exposure times, and mask.
1000 linearizer : `lsst.ip.isr.Linearizer`
1006 supTitleFontSize = 18
1009 nAmps = len(dataset.ampNames)
1012 nRows = np.sqrt(nAmps)
1013 mantissa, _ = np.modf(nRows)
1015 nRows = int(nRows) + 1
1022 f, ax = plt.subplots(nrows=nRows, ncols=nCols, sharex=
'col', sharey=
'row', figsize=(13, 10))
1023 f2, ax2 = plt.subplots(nrows=nRows, ncols=nCols, sharex=
'col', sharey=
'row', figsize=(13, 10))
1024 for i, (amp, a, a2)
in enumerate(zip(dataset.ampNames, ax.flatten(), ax2.flatten())):
1025 mask = dataset.expIdMask[amp]
1026 if np.isnan(mask[0]):
1027 a.set_title(f
"{amp} (BAD)", fontsize=titleFontSize)
1028 a2.set_title(f
"{amp} (BAD)", fontsize=titleFontSize)
1031 mask = mask.astype(bool)
1032 meanVecFinal = np.array(dataset.rawMeans[amp])[mask]
1033 timeVecFinal = np.array(dataset.rawExpTimes[amp])[mask]
1035 a.set_xlabel(
'Time (sec)', fontsize=labelFontSize)
1036 a.set_ylabel(
r'Mean signal ($\mu$, ADU)', fontsize=labelFontSize)
1037 a.tick_params(labelsize=labelFontSize)
1038 a.set_xscale(
'linear', fontsize=labelFontSize)
1039 a.set_yscale(
'linear', fontsize=labelFontSize)
1041 a2.axhline(y=0, color=
'k')
1042 a2.axvline(x=0, color=
'k', linestyle=
'-')
1043 a2.set_xlabel(
r'Mean signal ($\mu$, ADU)', fontsize=labelFontSize)
1044 a2.set_ylabel(
'Fractional nonlinearity (%)', fontsize=labelFontSize)
1045 a2.tick_params(labelsize=labelFontSize)
1046 a2.set_xscale(
'linear', fontsize=labelFontSize)
1047 a2.set_yscale(
'linear', fontsize=labelFontSize)
1049 pars, parsErr = linearizer.fitParams[amp], linearizer.fitParamsErr[amp]
1050 k0, k0Error = pars[0], parsErr[0]
1051 k1, k1Error = pars[1], parsErr[1]
1052 k2, k2Error = pars[2], parsErr[2]
1053 linRedChi2 = linearizer.fitChiSq[amp]
1054 stringLegend = (f
"k0: {k0:.4}+/-{k0Error:.2e} ADU\nk1: {k1:.4}+/-{k1Error:.2e} ADU/t"
1055 f
"\nk2: {k2:.2e}+/-{k2Error:.2e} ADU/t^2\n"
1056 r"$\chi^2_{\rm{red}}$: " + f
"{linRedChi2:.4}")
1057 a.scatter(timeVecFinal, meanVecFinal)
1058 a.plot(timeVecFinal,
funcPolynomial(pars, timeVecFinal), color=
'red')
1059 a.text(0.03, 0.75, stringLegend, transform=a.transAxes, fontsize=legendFontSize)
1060 a.set_title(f
"{amp}", fontsize=titleFontSize)
1062 linearPart = k0 + k1*timeVecFinal
1063 fracLinRes = 100*(linearPart - meanVecFinal)/linearPart
1064 a2.plot(meanVecFinal, fracLinRes, c=
'g')
1065 a2.set_title(f
"{amp}", fontsize=titleFontSize)
1067 f.suptitle(
"Linearity \n Fit: Polynomial (degree: %g)"
1069 fontsize=supTitleFontSize)
1070 f2.suptitle(
r"Fractional NL residual" +
"\n" +
1071 r"$100\times \frac{(k_0 + k_1*Time-\mu)}{k_0+k_1*Time}$",
1072 fontsize=supTitleFontSize)
1074 pdfPages.savefig(f2)
1078 """Group data into bins, with at most maxDiff distance between bins.
1086 Maximum distance between bins.
1095 index = np.zeros_like(x, dtype=np.int32)
1100 for i
in range(1, len(ix)):
1102 if (xval - xc < maxDiff):
1103 xc = (ng*xc + xval)/(ng+1)
1105 index[ix[i]] = group
1109 index[ix[i]] = group
1116 """Builds an index with regular binning. The result can be fed into binData.
1127 np.digitize(x, bins): `numpy.array`
1131 bins = np.linspace(x.min(), x.max() +
abs(x.max() * 1e-7), nBins + 1)
1132 return np.digitize(x, bins)
1136 """Bin data (usually for display purposes).
1147 Bin number of each datum.
1150 Inverse rms of each datum to use when averaging (the actual weight is wy**2).
1161 wybin: `numpy.array`
1162 Binned weights in y, computed from wy's in each bin.
1164 sybin: `numpy.array`
1165 Uncertainty on the bin average, considering actual scatter, and ignoring weights.
1169 wy = np.ones_like(x)
1170 binIndexSet =
set(binIndex)
1173 xbin = np.array([xw2[binIndex == i].sum()/w2[binIndex == i].sum()
for i
in binIndexSet])
1176 ybin = np.array([yw2[binIndex == i].sum()/w2[binIndex == i].sum()
for i
in binIndexSet])
1178 wybin = np.sqrt(np.array([w2[binIndex == i].sum()
for i
in binIndexSet]))
1179 sybin = np.array([y[binIndex == i].
std()/np.sqrt(np.array([binIndex == i]).sum())
1180 for i
in binIndexSet])
1182 return xbin, ybin, wybin, sybin