23 __all__ = [
'PlotPhotonTransferCurveTaskGen2']
26 import matplotlib.pyplot
as plt
27 import matplotlib
as mpl
28 from matplotlib
import gridspec
30 from matplotlib.backends.backend_pdf
import PdfPages
36 from lsst.cp.pipe.utils import (funcAstier, funcPolynomial, NonexistentDatasetTaskDataIdContainer,
37 calculateWeightedReducedChi2)
38 from matplotlib.ticker
import MaxNLocator
46 """Config class for photon transfer curve measurement task"""
47 datasetFileName = pexConfig.Field(
49 doc=
"datasetPtc file name (fits)",
52 linearizerFileName = pexConfig.Field(
54 doc=
"linearizer file name (fits)",
57 ccdKey = pexConfig.Field(
59 doc=
"The key by which to pull a detector from a dataId, e.g. 'ccd' or 'detector'.",
62 signalElectronsRelativeA = pexConfig.Field(
64 doc=
"Signal value for relative systematic bias between different methods of estimating a_ij "
65 "(Fig. 15 of Astier+19).",
68 plotNormalizedCovariancesNumberOfBins = pexConfig.Field(
70 doc=
"Number of bins in `plotNormalizedCovariancesNumber` function "
71 "(Fig. 8, 10., of Astier+19).",
77 """A class to plot the dataset from MeasurePhotonTransferCurveTask.
83 Positional arguments passed to the Task constructor. None used at this
86 Keyword arguments passed on to the Task constructor. None used at this
91 The plotting code in this file is almost identical to the code in
92 `plotPtc.py`. If further changes are implemented in this file,
93 `plotPtc.py` needs to be updated accordingly, and vice versa.
94 This file (`plotPtcGen2.py`) helps with maintaining backwards
95 compatibility with gen2 as we transition to gen3; the code
96 duplication is meant to only last for few month from now
97 (Jan, 2021). At that point only the `plotPtc.py` file will
101 ConfigClass = PlotPhotonTransferCurveTaskConfigGen2
102 _DefaultName =
"plotPhotonTransferCurve"
106 plt.interactive(
False)
109 def _makeArgumentParser(cls):
110 """Augment argument parser for the MeasurePhotonTransferCurveTask."""
111 parser = pipeBase.ArgumentParser(name=cls.
_DefaultName_DefaultName)
112 parser.add_id_argument(
"--id", datasetType=
"photonTransferCurveDataset",
113 ContainerClass=NonexistentDatasetTaskDataIdContainer,
114 help=
"The ccds to use, e.g. --id ccd=0..100")
119 """Run the Photon Transfer Curve (PTC) plotting measurement task.
123 dataRef : list of lsst.daf.persistence.ButlerDataRef
124 dataRef for the detector for the expIds to be fit.
127 datasetFile = self.config.datasetFileName
128 datasetPtc = PhotonTransferCurveDataset.readFits(datasetFile)
130 dirname = dataRef.getUri(datasetType=
'cpPipePlotRoot', write=
True)
131 if not os.path.exists(dirname):
134 detNum = dataRef.dataId[self.config.ccdKey]
135 filename = f
"PTC_det{detNum}.pdf"
136 filenameFull = os.path.join(dirname, filename)
138 if self.config.linearizerFileName:
139 linearizer = isr.linearize.Linearizer.readFits(self.config.linearizerFileName)
142 self.
runrun(filenameFull, datasetPtc, linearizer=linearizer, log=self.log)
144 return pipeBase.Struct(exitStatus=0)
146 def run(self, filenameFull, datasetPtc, linearizer=None, log=None):
147 """Make the plots for the PTC task"""
148 ptcFitType = datasetPtc.ptcFitType
149 with PdfPages(filenameFull)
as pdfPages:
150 if ptcFitType
in [
"FULLCOVARIANCE", ]:
152 elif ptcFitType
in [
"EXPAPPROXIMATION",
"POLYNOMIAL"]:
155 raise RuntimeError(f
"The input dataset had an invalid dataset.ptcFitType: {ptcFitType}. \n"
156 "Options: 'FULLCOVARIANCE', EXPAPPROXIMATION, or 'POLYNOMIAL'.")
164 """Make plots for MeasurePhotonTransferCurve task when doCovariancesAstier=True.
166 This function call other functions that mostly reproduce the plots in Astier+19.
167 Most of the code is ported from Pierre Astier's repository https://github.com/PierreAstier/bfptc
171 dataset : `lsst.ip.isr.ptcDataset.PhotonTransferCurveDataset`
172 The dataset containing the necessary information to produce the plots.
174 pdfPages: `matplotlib.backends.backend_pdf.PdfPages`
175 PDF file where the plots will be saved.
177 log : `lsst.log.Log`, optional
178 Logger to handle messages
180 mu = dataset.finalMeans
182 fullCovs = dataset.covariances
183 fullCovsModel = dataset.covariancesModel
184 fullCovWeights = dataset.covariancesSqrtWeights
185 aDict = dataset.aMatrix
186 bDict = dataset.bMatrix
187 fullCovsNoB = dataset.covariances
188 fullCovsModelNoB = dataset.covariancesModelNoB
189 fullCovWeightsNoB = dataset.covariancesSqrtWeights
190 aDictNoB = dataset.aMatrixNoB
191 gainDict = dataset.gain
192 noiseDict = dataset.noise
194 self.
plotCovariancesplotCovariances(mu, fullCovs, fullCovsModel, fullCovWeights, fullCovsNoB, fullCovsModelNoB,
195 fullCovWeightsNoB, gainDict, noiseDict, aDict, bDict, pdfPages)
197 fullCovsModelNoB, fullCovWeightsNoB, pdfPages,
198 offset=0.01, topPlot=
True,
199 numberOfBins=self.config.plotNormalizedCovariancesNumberOfBins,
202 fullCovsModelNoB, fullCovWeightsNoB, pdfPages,
203 numberOfBins=self.config.plotNormalizedCovariancesNumberOfBins,
206 fullCovsModelNoB, fullCovWeightsNoB, pdfPages,
207 numberOfBins=self.config.plotNormalizedCovariancesNumberOfBins,
209 self.
plot_a_bplot_a_b(aDict, bDict, pdfPages)
210 self.
ab_vs_distab_vs_dist(aDict, bDict, pdfPages, bRange=4)
213 self.config.signalElectronsRelativeA, gainDict, pdfPages, maxr=4)
218 def plotCovariances(mu, covs, covsModel, covsWeights, covsNoB, covsModelNoB, covsWeightsNoB,
219 gainDict, noiseDict, aDict, bDict, pdfPages):
220 """Plot covariances and models: Cov00, Cov10, Cov01.
222 Figs. 6 and 7 of Astier+19
226 mu : `dict`, [`str`, `list`]
227 Dictionary keyed by amp name with mean signal values.
229 covs : `dict`, [`str`, `list`]
230 Dictionary keyed by amp names containing a list of measued covariances per mean flux.
232 covsModel : `dict`, [`str`, `list`]
233 Dictionary keyed by amp names containinging covariances model (Eq. 20 of Astier+19) per mean flux.
235 covsWeights : `dict`, [`str`, `list`]
236 Dictionary keyed by amp names containinging sqrt. of covariances weights.
238 covsNoB : `dict`, [`str`, `list`]
239 Dictionary keyed by amp names containing a list of measued covariances per mean flux ('b'=0 in
242 covsModelNoB : `dict`, [`str`, `list`]
243 Dictionary keyed by amp names containing covariances model (with 'b'=0 in Eq. 20 of Astier+19)
246 covsWeightsNoB : `dict`, [`str`, `list`]
247 Dictionary keyed by amp names containing sqrt. of covariances weights ('b' = 0 in Eq. 20 of
250 gainDict : `dict`, [`str`, `float`]
251 Dictionary keyed by amp names containing the gains in e-/ADU.
253 noiseDict : `dict`, [`str`, `float`]
254 Dictionary keyed by amp names containing the rms redout noise in e-.
256 aDict : `dict`, [`str`, `numpy.array`]
257 Dictionary keyed by amp names containing 'a' coefficients (Eq. 20 of Astier+19).
259 bDict : `dict`, [`str`, `numpy.array`]
260 Dictionary keyed by amp names containing 'b' coefficients (Eq. 20 of Astier+19).
262 pdfPages: `matplotlib.backends.backend_pdf.PdfPages`
263 PDF file where the plots will be saved.
269 supTitleFontSize = 18
275 nRows = np.sqrt(nAmps)
276 mantissa, _ = np.modf(nRows)
278 nRows = int(nRows) + 1
284 f, ax = plt.subplots(nrows=nRows, ncols=nCols, sharex=
'col', sharey=
'row', figsize=(13, 10))
285 f2, ax2 = plt.subplots(nrows=nRows, ncols=nCols, sharex=
'col', sharey=
'row', figsize=(13, 10))
286 fResCov00, axResCov00 = plt.subplots(nrows=nRows, ncols=nCols, sharex=
'col', sharey=
'row',
288 fCov01, axCov01 = plt.subplots(nrows=nRows, ncols=nCols, sharex=
'col', sharey=
'row', figsize=(13, 10))
289 fCov10, axCov10 = plt.subplots(nrows=nRows, ncols=nCols, sharex=
'col', sharey=
'row', figsize=(13, 10))
291 assert(len(covsModel) == nAmps)
292 assert(len(covsWeights) == nAmps)
294 assert(len(covsNoB) == nAmps)
295 assert(len(covsModelNoB) == nAmps)
296 assert(len(covsWeightsNoB) == nAmps)
298 for i, (amp, a, a2, aResVar, a3, a4)
in enumerate(zip(covs, ax.flatten(),
299 ax2.flatten(), axResCov00.flatten(),
300 axCov01.flatten(), axCov10.flatten())):
302 muAmp, cov, model, weight = mu[amp], covs[amp], covsModel[amp], covsWeights[amp]
303 if not np.isnan(np.array(cov)).
all():
304 aCoeffs, bCoeffs = np.array(aDict[amp]), np.array(bDict[amp])
305 gain, noise = gainDict[amp], noiseDict[amp]
306 (meanVecFinal, varVecFinal, varVecModelFinal,
312 varWeightsFinal, len(meanVecFinal), 4)
314 (meanVecFinalCov01, varVecFinalCov01, varVecModelFinalCov01,
317 (meanVecFinalCov10, varVecFinalCov10, varVecModelFinalCov10,
321 par2 = np.polyfit(meanVecFinal, varVecFinal, 2, w=varWeightsFinal)
322 varModelFinalQuadratic = np.polyval(par2, meanVecFinal)
324 varWeightsFinal, len(meanVecFinal), 3)
327 covNoB, modelNoB, weightNoB = covsNoB[amp], covsModelNoB[amp], covsWeightsNoB[amp]
328 (meanVecFinalNoB, varVecFinalNoB, varVecModelFinalNoB,
330 weightNoB, returnMasked=
True)
333 varWeightsFinalNoB, len(meanVecFinalNoB),
335 stringLegend = (f
"Gain: {gain:.4} e/ADU \n"
336 f
"Noise: {noise:.4} e \n"
337 +
r"$a_{00}$: %.3e 1/e"%aCoeffs[0, 0] +
"\n"
338 +
r"$b_{00}$: %.3e 1/e"%bCoeffs[0, 0]
339 + f
"\nLast in fit: {meanVecFinal[-1]:.7} ADU ")
340 minMeanVecFinal = np.nanmin(meanVecFinal)
341 maxMeanVecFinal = np.nanmax(meanVecFinal)
342 deltaXlim = maxMeanVecFinal - minMeanVecFinal
344 a.set_xlabel(
r'Mean signal ($\mu$, ADU)', fontsize=labelFontSize)
345 a.set_ylabel(
r'Variance (ADU$^2$)', fontsize=labelFontSize)
346 a.tick_params(labelsize=11)
347 a.set_xscale(
'linear')
348 a.set_yscale(
'linear')
349 a.scatter(meanVecFinal, varVecFinal, c=
'blue', marker=
'o', s=markerSize)
350 a.plot(meanVecFinal, varVecModelFinal, color=
'red', linestyle=
'-')
351 a.text(0.03, 0.7, stringLegend, transform=a.transAxes, fontsize=legendFontSize)
352 a.set_title(amp, fontsize=titleFontSize)
353 a.set_xlim([minMeanVecFinal - 0.2*deltaXlim, maxMeanVecFinal + 0.2*deltaXlim])
356 a2.set_xlabel(
r'Mean Signal ($\mu$, ADU)', fontsize=labelFontSize)
357 a2.set_ylabel(
r'Variance (ADU$^2$)', fontsize=labelFontSize)
358 a2.tick_params(labelsize=11)
361 a2.plot(meanVecFinal, varVecModelFinal, color=
'red', linestyle=
'-')
362 a2.scatter(meanVecFinal, varVecFinal, c=
'blue', marker=
'o', s=markerSize)
363 a2.text(0.03, 0.7, stringLegend, transform=a2.transAxes, fontsize=legendFontSize)
364 a2.set_title(amp, fontsize=titleFontSize)
365 a2.set_xlim([minMeanVecFinal, maxMeanVecFinal])
368 aResVar.set_xlabel(
r'Mean signal ($\mu$, ADU)', fontsize=labelFontSize)
369 aResVar.set_ylabel(
r'Residuals (ADU$^2$)', fontsize=labelFontSize)
370 aResVar.tick_params(labelsize=11)
371 aResVar.set_xscale(
'linear')
372 aResVar.set_yscale(
'linear')
373 aResVar.plot(meanVecFinal, varVecFinal - varVecModelFinal, color=
'blue', linestyle=
'-',
374 label=
r'Full fit ($\chi_{\rm{red}}^2$: %g)'%chi2FullModelVar)
375 aResVar.plot(meanVecFinal, varVecFinal - varModelFinalQuadratic, color=
'red', linestyle=
'-',
376 label=
r'Quadratic fit ($\chi_{\rm{red}}^2$: %g)'%chi2QuadModelVar)
377 aResVar.plot(meanVecFinalNoB, varVecFinalNoB - varVecModelFinalNoB, color=
'green',
379 label=
r'Full fit (b=0) ($\chi_{\rm{red}}^2$: %g)'%chi2FullModelNoBVar)
380 aResVar.axhline(color=
'black')
381 aResVar.set_title(amp, fontsize=titleFontSize)
382 aResVar.set_xlim([minMeanVecFinal - 0.2*deltaXlim, maxMeanVecFinal + 0.2*deltaXlim])
383 aResVar.legend(fontsize=7)
385 a3.set_xlabel(
r'Mean signal ($\mu$, ADU)', fontsize=labelFontSize)
386 a3.set_ylabel(
r'Cov01 (ADU$^2$)', fontsize=labelFontSize)
387 a3.tick_params(labelsize=11)
388 a3.set_xscale(
'linear')
389 a3.set_yscale(
'linear')
390 a3.scatter(meanVecFinalCov01, varVecFinalCov01, c=
'blue', marker=
'o', s=markerSize)
391 a3.plot(meanVecFinalCov01, varVecModelFinalCov01, color=
'red', linestyle=
'-')
392 a3.set_title(amp, fontsize=titleFontSize)
393 a3.set_xlim([minMeanVecFinal - 0.2*deltaXlim, maxMeanVecFinal + 0.2*deltaXlim])
395 a4.set_xlabel(
r'Mean signal ($\mu$, ADU)', fontsize=labelFontSize)
396 a4.set_ylabel(
r'Cov10 (ADU$^2$)', fontsize=labelFontSize)
397 a4.tick_params(labelsize=11)
398 a4.set_xscale(
'linear')
399 a4.set_yscale(
'linear')
400 a4.scatter(meanVecFinalCov10, varVecFinalCov10, c=
'blue', marker=
'o', s=markerSize)
401 a4.plot(meanVecFinalCov10, varVecModelFinalCov10, color=
'red', linestyle=
'-')
402 a4.set_title(amp, fontsize=titleFontSize)
403 a4.set_xlim([minMeanVecFinal - 0.2*deltaXlim, maxMeanVecFinal + 0.2*deltaXlim])
406 a.set_title(f
"{amp} (BAD)", fontsize=titleFontSize)
407 a2.set_title(f
"{amp} (BAD)", fontsize=titleFontSize)
408 a3.set_title(f
"{amp} (BAD)", fontsize=titleFontSize)
409 a4.set_title(f
"{amp} (BAD)", fontsize=titleFontSize)
411 f.suptitle(
"PTC from covariances as in Astier+19 \n Fit: Eq. 20, Astier+19",
412 fontsize=supTitleFontSize)
414 f2.suptitle(
"PTC from covariances as in Astier+19 (log-log) \n Fit: Eq. 20, Astier+19",
415 fontsize=supTitleFontSize)
417 fResCov00.suptitle(
"Residuals (data-model) for Cov00 (Var)", fontsize=supTitleFontSize)
418 pdfPages.savefig(fResCov00)
419 fCov01.suptitle(
"Cov01 as in Astier+19 (nearest parallel neighbor covariance) \n"
420 " Fit: Eq. 20, Astier+19", fontsize=supTitleFontSize)
421 pdfPages.savefig(fCov01)
422 fCov10.suptitle(
"Cov10 as in Astier+19 (nearest serial neighbor covariance) \n"
423 "Fit: Eq. 20, Astier+19", fontsize=supTitleFontSize)
424 pdfPages.savefig(fCov10)
429 covsWeightsNoB, pdfPages, offset=0.004,
430 numberOfBins=10, plotData=True, topPlot=False, log=None):
431 """Plot C_ij/mu vs mu.
433 Figs. 8, 10, and 11 of Astier+19
443 inputMu : `dict`, [`str`, `list`]
444 Dictionary keyed by amp name with mean signal values.
446 covs : `dict`, [`str`, `list`]
447 Dictionary keyed by amp names containing a list of measued covariances per mean flux.
449 covsModel : `dict`, [`str`, `list`]
450 Dictionary keyed by amp names containinging covariances model (Eq. 20 of Astier+19) per mean flux.
452 covsWeights : `dict`, [`str`, `list`]
453 Dictionary keyed by amp names containinging sqrt. of covariances weights.
455 covsNoB : `dict`, [`str`, `list`]
456 Dictionary keyed by amp names containing a list of measued covariances per mean flux ('b'=0 in
459 covsModelNoB : `dict`, [`str`, `list`]
460 Dictionary keyed by amp names containing covariances model (with 'b'=0 in Eq. 20 of Astier+19)
463 covsWeightsNoB : `dict`, [`str`, `list`]
464 Dictionary keyed by amp names containing sqrt. of covariances weights ('b' = 0 in Eq. 20 of
467 expIdMask : `dict`, [`str`, `list`]
468 Dictionary keyed by amp names containing the masked exposure pairs.
470 pdfPages: `matplotlib.backends.backend_pdf.PdfPages`
471 PDF file where the plots will be saved.
473 offset : `float`, optional
474 Constant offset factor to plot covariances in same panel (so they don't overlap).
476 numberOfBins : `int`, optional
477 Number of bins for top and bottom plot.
479 plotData : `bool`, optional
480 Plot the data points?
482 topPlot : `bool`, optional
483 Plot the top plot with the covariances, and the bottom plot with the model residuals?
485 log : `lsst.log.Log`, optional
486 Logger to handle messages.
489 fig = plt.figure(figsize=(8, 10))
490 gs = gridspec.GridSpec(2, 1, height_ratios=[3, 1])
492 ax0 = plt.subplot(gs[0])
493 plt.setp(ax0.get_xticklabels(), visible=
False)
495 fig = plt.figure(figsize=(8, 8))
496 ax0 = plt.subplot(111)
497 ax0.ticklabel_format(style=
'sci', axis=
'x', scilimits=(0, 0))
498 ax0.tick_params(axis=
'both', labelsize=
'x-large')
499 mue, rese, wce = [], [], []
500 mueNoB, reseNoB, wceNoB = [], [], []
501 for counter, amp
in enumerate(covs):
502 muAmp, fullCov, fullCovModel, fullCovWeight = (inputMu[amp], covs[amp], covsModel[amp],
504 if len(fullCov) == 0:
507 fullCovWeight, divideByMu=
True,
511 rese +=
list(cov - model)
512 wce +=
list(weightCov)
514 fullCovNoB, fullCovModelNoB, fullCovWeightNoB = (covsNoB[amp], covsModelNoB[amp],
516 if len(fullCovNoB) == 0:
518 (muNoB, covNoB, modelNoB,
520 fullCovWeightNoB, divideByMu=
True,
523 mueNoB +=
list(muNoB)
524 reseNoB +=
list(covNoB - modelNoB)
525 wceNoB +=
list(weightCovNoB)
528 fit_curve, = plt.plot(mu, model + counter*offset,
'-', linewidth=4.0)
532 xb, yb, wyb, sigyb = self.
binDatabinData(mu, cov, gind, weightCov)
533 plt.errorbar(xb, yb+counter*offset, yerr=sigyb, marker=
'o', linestyle=
'none', markersize=6.5,
534 color=fit_curve.get_color(), label=f
"{amp} (N: {len(mu)})")
537 points, = plt.plot(mu, cov + counter*offset,
'.', color=fit_curve.get_color())
538 plt.legend(loc=
'upper right', fontsize=8)
541 rese = np.array(rese)
543 mueNoB = np.array(mueNoB)
544 reseNoB = np.array(reseNoB)
545 wceNoB = np.array(wceNoB)
547 plt.xlabel(
r"$\mu (el)$", fontsize=
'x-large')
548 plt.ylabel(
r"$Cov{%d%d}/\mu + Cst (el)$"%(i, j), fontsize=
'x-large')
551 xb, yb, wyb, sigyb = self.
binDatabinData(mue, rese, gind, wce)
553 ax1 = plt.subplot(gs[1], sharex=ax0)
554 ax1.errorbar(xb, yb, yerr=sigyb, marker=
'o', linestyle=
'none', label=
'Full fit')
555 gindNoB = self.
indexForBinsindexForBins(mueNoB, numberOfBins)
556 xb2, yb2, wyb2, sigyb2 = self.
binDatabinData(mueNoB, reseNoB, gindNoB, wceNoB)
558 ax1.errorbar(xb2, yb2, yerr=sigyb2, marker=
'o', linestyle=
'none', label=
'b = 0')
559 ax1.tick_params(axis=
'both', labelsize=
'x-large')
560 plt.legend(loc=
'upper left', fontsize=
'large')
562 plt.plot(xb, [0]*len(xb),
'--', color=
'k')
563 plt.ticklabel_format(style=
'sci', axis=
'x', scilimits=(0, 0))
564 plt.ticklabel_format(style=
'sci', axis=
'y', scilimits=(0, 0))
565 plt.xlabel(
r'$\mu (el)$', fontsize=
'x-large')
566 plt.ylabel(
r'$Cov{%d%d}/\mu$ -model (el)'%(i, j), fontsize=
'x-large')
568 plt.suptitle(f
"Nbins: {numberOfBins}")
571 labels0 = [item.get_text()
for item
in ax0.get_yticklabels()]
573 ax0.set_yticklabels(labels0)
574 pdfPages.savefig(fig)
580 """Fig. 12 of Astier+19
582 Color display of a and b arrays fits, averaged over channels.
586 aDict : `dict`, [`numpy.array`]
587 Dictionary keyed by amp names containing the fitted 'a' coefficients from the model
588 in Eq. 20 of Astier+19 (if `ptcFitType` is `FULLCOVARIANCE`).
590 bDict : `dict`, [`numpy.array`]
591 Dictionary keyed by amp names containing the fitted 'b' coefficients from the model
592 in Eq. 20 of Astier+19 (if `ptcFitType` is `FULLCOVARIANCE`).
594 pdfPages: `matplotlib.backends.backend_pdf.PdfPages`
595 PDF file where the plots will be saved.
598 Maximum lag for b arrays.
602 if np.isnan(aDict[amp]).
all():
606 a = np.array(a).mean(axis=0)
607 b = np.array(b).mean(axis=0)
608 fig = plt.figure(figsize=(7, 11))
609 ax0 = fig.add_subplot(2, 1, 1)
610 im0 = ax0.imshow(np.abs(a.transpose()), origin=
'lower', norm=mpl.colors.LogNorm())
611 ax0.tick_params(axis=
'both', labelsize=
'x-large')
612 ax0.set_title(
r'$|a|$', fontsize=
'x-large')
613 ax0.xaxis.set_ticks_position(
'bottom')
614 cb0 = plt.colorbar(im0)
615 cb0.ax.tick_params(labelsize=
'x-large')
617 ax1 = fig.add_subplot(2, 1, 2)
618 ax1.tick_params(axis=
'both', labelsize=
'x-large')
619 ax1.yaxis.set_major_locator(MaxNLocator(integer=
True))
620 ax1.xaxis.set_major_locator(MaxNLocator(integer=
True))
621 im1 = ax1.imshow(1e6*b[:bRange, :bRange].transpose(), origin=
'lower')
622 cb1 = plt.colorbar(im1)
623 cb1.ax.tick_params(labelsize=
'x-large')
624 ax1.set_title(
r'$b \times 10^6$', fontsize=
'x-large')
625 ax1.xaxis.set_ticks_position(
'bottom')
627 pdfPages.savefig(fig)
633 """Fig. 13 of Astier+19.
635 Values of a and b arrays fits, averaged over amplifiers, as a function of distance.
639 aDict : `dict`, [`numpy.array`]
640 Dictionary keyed by amp names containing the fitted 'a' coefficients from the model
641 in Eq. 20 of Astier+19 (if `ptcFitType` is `FULLCOVARIANCE`).
643 bDict : `dict`, [`numpy.array`]
644 Dictionary keyed by amp names containing the fitted 'b' coefficients from the model
645 in Eq. 20 of Astier+19 (if `ptcFitType` is `FULLCOVARIANCE`).
647 pdfPages: `matplotlib.backends.backend_pdf.PdfPages`
648 PDF file where the plots will be saved.
651 Maximum lag for b arrays.
653 assert (len(aDict) == len(bDict))
656 if np.isnan(aDict[amp]).
all():
661 sy = a.std(axis=0)/np.sqrt(len(aDict))
662 i, j = np.indices(y.shape)
663 upper = (i >= j).ravel()
664 r = np.sqrt(i**2 + j**2).ravel()
667 fig = plt.figure(figsize=(6, 9))
668 ax = fig.add_subplot(211)
669 ax.set_xlim([0.5, r.max()+1])
670 ax.errorbar(r[upper], y[upper], yerr=sy[upper], marker=
'o', linestyle=
'none', color=
'b',
672 ax.errorbar(r[~upper], y[~upper], yerr=sy[~upper], marker=
'o', linestyle=
'none', color=
'r',
674 ax.legend(loc=
'upper center', fontsize=
'x-large')
675 ax.set_xlabel(
r'$\sqrt{i^2+j^2}$', fontsize=
'x-large')
676 ax.set_ylabel(
r'$a_{ij}$', fontsize=
'x-large')
678 ax.tick_params(axis=
'both', labelsize=
'x-large')
681 axb = fig.add_subplot(212)
684 if np.isnan(bDict[amp]).
all():
689 syb = b.std(axis=0)/np.sqrt(len(bDict))
690 ib, jb = np.indices(yb.shape)
691 upper = (ib > jb).ravel()
692 rb = np.sqrt(i**2 + j**2).ravel()
697 axb.set_xlim([xmin, xmax+0.2])
698 cutu = (r > xmin) & (r < xmax) & (upper)
699 cutl = (r > xmin) & (r < xmax) & (~upper)
700 axb.errorbar(rb[cutu], yb[cutu], yerr=syb[cutu], marker=
'o', linestyle=
'none', color=
'b',
702 axb.errorbar(rb[cutl], yb[cutl], yerr=syb[cutl], marker=
'o', linestyle=
'none', color=
'r',
704 plt.legend(loc=
'upper center', fontsize=
'x-large')
705 axb.set_xlabel(
r'$\sqrt{i^2+j^2}$', fontsize=
'x-large')
706 axb.set_ylabel(
r'$b_{ij}$', fontsize=
'x-large')
707 axb.ticklabel_format(style=
'sci', axis=
'y', scilimits=(0, 0))
708 axb.tick_params(axis=
'both', labelsize=
'x-large')
710 pdfPages.savefig(fig)
716 """Fig. 14. of Astier+19
718 Cumulative sum of a_ij as a function of maximum separation. This plot displays the average over
723 aDict : `dict`, [`numpy.array`]
724 Dictionary keyed by amp names containing the fitted 'a' coefficients from the model
725 in Eq. 20 of Astier+19 (if `ptcFitType` is `FULLCOVARIANCE`).
727 bDict : `dict`, [`numpy.array`]
728 Dictionary keyed by amp names containing the fitted 'b' coefficients from the model
729 in Eq. 20 of Astier+19 (if `ptcFitType` is `FULLCOVARIANCE`).
731 pdfPages: `matplotlib.backends.backend_pdf.PdfPages`
732 PDF file where the plots will be saved.
734 assert (len(aDict) == len(bDict))
737 if np.isnan(aDict[amp]).
all()
or np.isnan(bDict[amp]).
all():
741 a = np.array(a).mean(axis=0)
742 b = np.array(b).mean(axis=0)
743 fig = plt.figure(figsize=(7, 6))
744 w = 4*np.ones_like(a)
749 indices = range(1, a.shape[0]+1)
750 sums = [wa[0:n, 0:n].sum()
for n
in indices]
751 ax = plt.subplot(111)
752 ax.plot(indices, sums/sums[0],
'o', color=
'b')
754 ax.set_xlim(indices[0]-0.5, indices[-1]+0.5)
755 ax.set_ylim(
None, 1.2)
756 ax.set_ylabel(
r'$[\sum_{|i|<n\ &\ |j|<n} a_{ij}] / |a_{00}|$', fontsize=
'x-large')
757 ax.set_xlabel(
'n', fontsize=
'x-large')
758 ax.tick_params(axis=
'both', labelsize=
'x-large')
760 pdfPages.savefig(fig)
766 gainDict, pdfPages, maxr=None):
767 """Fig. 15 in Astier+19.
769 Illustrates systematic bias from estimating 'a'
770 coefficients from the slope of correlations as opposed to the
771 full model in Astier+19.
776 Dictionary of 'a' matrices (Eq. 20, Astier+19), with amp names as keys.
779 Dictionary of 'a' matrices ('b'= 0 in Eq. 20, Astier+19), with amp names as keys.
781 fullCovsModel : `dict`, [`str`, `list`]
782 Dictionary keyed by amp names containing covariances model per mean flux.
784 fullCovsModelNoB : `dict`, [`str`, `list`]
785 Dictionary keyed by amp names containing covariances model (with 'b'=0 in Eq. 20 of
786 Astier+19) per mean flux.
788 signalElectrons : `float`
789 Signal at which to evaluate the a_ij coefficients.
791 pdfPages: `matplotlib.backends.backend_pdf.PdfPages`
792 PDF file where the plots will be saved.
794 gainDict : `dict`, [`str`, `float`]
795 Dicgionary keyed by amp names with the gains in e-/ADU.
797 maxr : `int`, optional
801 fig = plt.figure(figsize=(7, 11))
802 title = [f
"'a' relative bias at {signalElectrons} e",
"'a' relative bias (b=0)"]
803 data = [(aDict, fullCovsModel), (aDictNoB, fullCovsModelNoB)]
805 for k, pair
in enumerate(data):
809 covModel = np.array(pair[1][amp])
810 if np.isnan(covModel).
all():
816 var = covModel[0, 0, 0]
818 aOld = covModel[0, :, :]/(var*signalElectrons)
821 diffs.append((aOld-a))
822 amean = np.array(amean).mean(axis=0)
823 diff = np.array(diffs).mean(axis=0)
830 diff = diff[:maxr, :maxr]
831 ax0 = fig.add_subplot(2, 1, k+1)
832 im0 = ax0.imshow(diff.transpose(), origin=
'lower')
833 ax0.yaxis.set_major_locator(MaxNLocator(integer=
True))
834 ax0.xaxis.set_major_locator(MaxNLocator(integer=
True))
835 ax0.tick_params(axis=
'both', labelsize=
'x-large')
837 ax0.set_title(title[k])
840 pdfPages.savefig(fig)
844 def _plotStandardPtc(self, dataset, ptcFitType, pdfPages):
845 """Plot PTC, var/signal vs signal, linearity, and linearity residual per amplifier.
849 dataset : `lsst.ip.isr.ptcDataset.PhotonTransferCurveDataset`
850 The dataset containing the means, variances, exposure times, and mask.
853 Type of the model fit to the PTC. Options: 'FULLCOVARIANCE', EXPAPPROXIMATION, or 'POLYNOMIAL'.
855 pdfPages: `matplotlib.backends.backend_pdf.PdfPages`
856 PDF file where the plots will be saved.
859 if ptcFitType ==
'EXPAPPROXIMATION':
861 stringTitle = (
r"Var = $\frac{1}{2g^2a_{00}}(\exp (2a_{00} \mu g) - 1) + \frac{n_{00}}{g^2}$ ")
862 elif ptcFitType ==
'POLYNOMIAL':
863 ptcFunc = funcPolynomial
864 for key
in dataset.ptcFitPars:
865 deg = len(dataset.ptcFitPars[key]) - 1
867 stringTitle =
r"Polynomial (degree: %g)" % (deg)
869 raise RuntimeError(f
"The input dataset had an invalid dataset.ptcFitType: {ptcFitType}. \n"
870 "Options: 'FULLCOVARIANCE', EXPAPPROXIMATION, or 'POLYNOMIAL'.")
875 supTitleFontSize = 18
879 nAmps = len(dataset.ampNames)
882 nRows = np.sqrt(nAmps)
883 mantissa, _ = np.modf(nRows)
885 nRows = int(nRows) + 1
891 f, ax = plt.subplots(nrows=nRows, ncols=nCols, sharex=
'col', sharey=
'row', figsize=(13, 10))
892 f2, ax2 = plt.subplots(nrows=nRows, ncols=nCols, sharex=
'col', sharey=
'row', figsize=(13, 10))
893 f3, ax3 = plt.subplots(nrows=nRows, ncols=nCols, sharex=
'col', sharey=
'row', figsize=(13, 10))
895 for i, (amp, a, a2, a3)
in enumerate(zip(dataset.ampNames, ax.flatten(), ax2.flatten(),
897 meanVecOriginal = np.ravel(np.array(dataset.rawMeans[amp]))
898 varVecOriginal = np.ravel(np.array(dataset.rawVars[amp]))
899 mask = np.ravel(np.array(dataset.expIdMask[amp]))
900 if np.isnan(mask[0]):
901 a.set_title(f
"{amp} (BAD)", fontsize=titleFontSize)
902 a2.set_title(f
"{amp} (BAD)", fontsize=titleFontSize)
903 a3.set_title(f
"{amp} (BAD)", fontsize=titleFontSize)
906 mask = mask.astype(bool)
907 meanVecFinal = meanVecOriginal[mask]
908 varVecFinal = varVecOriginal[mask]
909 meanVecOutliers = meanVecOriginal[np.invert(mask)]
910 varVecOutliers = varVecOriginal[np.invert(mask)]
911 pars, parsErr = np.array(dataset.ptcFitPars[amp]), np.array(dataset.ptcFitParsError[amp])
912 ptcRedChi2 = dataset.ptcFitChiSq[amp]
913 if ptcFitType ==
'EXPAPPROXIMATION':
914 if len(meanVecFinal):
915 ptcA00, ptcA00error = pars[0], parsErr[0]
916 ptcGain, ptcGainError = pars[1], parsErr[1]
917 ptcNoise = np.sqrt((pars[2]))
918 ptcNoiseAdu = ptcNoise*(1./ptcGain)
919 ptcNoiseError = 0.5*(parsErr[2]/np.fabs(pars[2]))*np.sqrt(np.fabs(pars[2]))
920 stringLegend = (f
"a00: {ptcA00:.2e}+/-{ptcA00error:.2e} 1/e"
921 f
"\nGain: {ptcGain:.4}+/-{ptcGainError:.2e} e/ADU"
922 f
"\nNoise: {ptcNoise:.4}+/-{ptcNoiseError:.2e} e\n"
923 r"$\chi^2_{\rm{red}}$: " + f
"{ptcRedChi2:.4}"
924 f
"\nLast in fit: {meanVecFinal[-1]:.7} ADU ")
926 if ptcFitType ==
'POLYNOMIAL':
927 if len(meanVecFinal):
928 ptcGain, ptcGainError = 1./pars[1], np.fabs(1./pars[1])*(parsErr[1]/pars[1])
929 ptcNoiseAdu = np.sqrt((pars[0]))
930 ptcNoise = ptcNoiseAdu*ptcGain
931 ptcNoiseError = (0.5*(parsErr[0]/np.fabs(pars[0]))*(np.sqrt(np.fabs(pars[0]))))*ptcGain
932 stringLegend = (f
"Gain: {ptcGain:.4}+/-{ptcGainError:.2e} e/ADU\n"
933 f
"Noise: {ptcNoise:.4}+/-{ptcNoiseError:.2e} e\n"
934 r"$\chi^2_{\rm{red}}$: " + f
"{ptcRedChi2:.4}"
935 f
"\nLast in fit: {meanVecFinal[-1]:.7} ADU ")
937 a.set_xlabel(
r'Mean signal ($\mu$, ADU)', fontsize=labelFontSize)
938 a.set_ylabel(
r'Variance (ADU$^2$)', fontsize=labelFontSize)
939 a.tick_params(labelsize=11)
940 a.set_xscale(
'linear')
941 a.set_yscale(
'linear')
943 a2.set_xlabel(
r'Mean Signal ($\mu$, ADU)', fontsize=labelFontSize)
944 a2.set_ylabel(
r'Variance (ADU$^2$)', fontsize=labelFontSize)
945 a2.tick_params(labelsize=11)
949 a3.set_xlabel(
r'Mean signal ($\mu$, ADU)', fontsize=labelFontSize)
950 a3.set_ylabel(
r'Variance/$\mu$ (ADU)', fontsize=labelFontSize)
951 a3.tick_params(labelsize=11)
953 a3.set_yscale(
'linear')
955 minMeanVecFinal = np.nanmin(meanVecFinal)
956 maxMeanVecFinal = np.nanmax(meanVecFinal)
957 meanVecFit = np.linspace(minMeanVecFinal, maxMeanVecFinal, 100*len(meanVecFinal))
958 minMeanVecOriginal = np.nanmin(meanVecOriginal)
959 maxMeanVecOriginal = np.nanmax(meanVecOriginal)
960 deltaXlim = maxMeanVecOriginal - minMeanVecOriginal
961 a.plot(meanVecFit, ptcFunc(pars, meanVecFit), color=
'red')
962 a.plot(meanVecFinal, ptcNoiseAdu**2 + (1./ptcGain)*meanVecFinal, color=
'green',
964 a.scatter(meanVecFinal, varVecFinal, c=
'blue', marker=
'o', s=markerSize)
965 a.scatter(meanVecOutliers, varVecOutliers, c=
'magenta', marker=
's', s=markerSize)
966 a.text(0.03, 0.66, stringLegend, transform=a.transAxes, fontsize=legendFontSize)
967 a.set_title(amp, fontsize=titleFontSize)
968 a.set_xlim([minMeanVecOriginal - 0.2*deltaXlim, maxMeanVecOriginal + 0.2*deltaXlim])
971 a2.plot(meanVecFit, ptcFunc(pars, meanVecFit), color=
'red')
972 a2.scatter(meanVecFinal, varVecFinal, c=
'blue', marker=
'o', s=markerSize)
973 a2.scatter(meanVecOutliers, varVecOutliers, c=
'magenta', marker=
's', s=markerSize)
974 a2.text(0.03, 0.66, stringLegend, transform=a2.transAxes, fontsize=legendFontSize)
975 a2.set_title(amp, fontsize=titleFontSize)
976 a2.set_xlim([minMeanVecOriginal, maxMeanVecOriginal])
979 a3.plot(meanVecFit, ptcFunc(pars, meanVecFit)/meanVecFit, color=
'red')
980 a3.scatter(meanVecFinal, varVecFinal/meanVecFinal, c=
'blue', marker=
'o', s=markerSize)
981 a3.scatter(meanVecOutliers, varVecOutliers/meanVecOutliers, c=
'magenta', marker=
's',
983 a3.text(0.05, 0.1, stringLegend, transform=a3.transAxes, fontsize=legendFontSize)
984 a3.set_title(amp, fontsize=titleFontSize)
985 a3.set_xlim([minMeanVecOriginal - 0.2*deltaXlim, maxMeanVecOriginal + 0.2*deltaXlim])
987 f.suptitle(
"PTC \n Fit: " + stringTitle, fontsize=supTitleFontSize)
989 f2.suptitle(
"PTC (log-log)", fontsize=supTitleFontSize)
991 f3.suptitle(
r"Var/$\mu$", fontsize=supTitleFontSize)
996 def _plotLinearizer(self, dataset, linearizer, pdfPages):
997 """Plot linearity and linearity residual per amplifier
1001 dataset : `lsst.ip.isr.ptcDataset.PhotonTransferCurveDataset`
1002 The dataset containing the means, variances, exposure times, and mask.
1004 linearizer : `lsst.ip.isr.Linearizer`
1010 supTitleFontSize = 18
1013 nAmps = len(dataset.ampNames)
1016 nRows = np.sqrt(nAmps)
1017 mantissa, _ = np.modf(nRows)
1019 nRows = int(nRows) + 1
1026 f, ax = plt.subplots(nrows=nRows, ncols=nCols, sharex=
'col', sharey=
'row', figsize=(13, 10))
1027 f2, ax2 = plt.subplots(nrows=nRows, ncols=nCols, sharex=
'col', sharey=
'row', figsize=(13, 10))
1028 for i, (amp, a, a2)
in enumerate(zip(dataset.ampNames, ax.flatten(), ax2.flatten())):
1029 mask = dataset.expIdMask[amp]
1030 if np.isnan(mask[0]):
1031 a.set_title(f
"{amp} (BAD)", fontsize=titleFontSize)
1032 a2.set_title(f
"{amp} (BAD)", fontsize=titleFontSize)
1035 mask = mask.astype(bool)
1036 meanVecFinal = np.array(dataset.rawMeans[amp])[mask]
1037 timeVecFinal = np.array(dataset.rawExpTimes[amp])[mask]
1039 a.set_xlabel(
'Time (sec)', fontsize=labelFontSize)
1040 a.set_ylabel(
r'Mean signal ($\mu$, ADU)', fontsize=labelFontSize)
1041 a.tick_params(labelsize=labelFontSize)
1042 a.set_xscale(
'linear')
1043 a.set_yscale(
'linear')
1045 a2.axhline(y=0, color=
'k')
1046 a2.axvline(x=0, color=
'k', linestyle=
'-')
1047 a2.set_xlabel(
r'Mean signal ($\mu$, ADU)', fontsize=labelFontSize)
1048 a2.set_ylabel(
'Fractional nonlinearity (%)', fontsize=labelFontSize)
1049 a2.tick_params(labelsize=labelFontSize)
1050 a2.set_xscale(
'linear')
1051 a2.set_yscale(
'linear')
1053 pars, parsErr = linearizer.fitParams[amp], linearizer.fitParamsErr[amp]
1054 k0, k0Error = pars[0], parsErr[0]
1055 k1, k1Error = pars[1], parsErr[1]
1056 k2, k2Error = pars[2], parsErr[2]
1057 linRedChi2 = linearizer.fitChiSq[amp]
1058 stringLegend = (f
"k0: {k0:.4}+/-{k0Error:.2e} ADU\nk1: {k1:.4}+/-{k1Error:.2e} ADU/t"
1059 f
"\nk2: {k2:.2e}+/-{k2Error:.2e} ADU/t^2\n"
1060 r"$\chi^2_{\rm{red}}$: " + f
"{linRedChi2:.4}")
1061 a.scatter(timeVecFinal, meanVecFinal)
1062 a.plot(timeVecFinal,
funcPolynomial(pars, timeVecFinal), color=
'red')
1063 a.text(0.03, 0.75, stringLegend, transform=a.transAxes, fontsize=legendFontSize)
1064 a.set_title(f
"{amp}", fontsize=titleFontSize)
1066 linearPart = k0 + k1*timeVecFinal
1067 fracLinRes = 100*(linearPart - meanVecFinal)/linearPart
1068 a2.plot(meanVecFinal, fracLinRes, c=
'g')
1069 a2.set_title(f
"{amp}", fontsize=titleFontSize)
1071 f.suptitle(
"Linearity \n Fit: Polynomial (degree: %g)"
1073 fontsize=supTitleFontSize)
1074 f2.suptitle(
r"Fractional NL residual" "\n"
1075 r"$100\times \frac{(k_0 + k_1*Time-\mu)}{k_0+k_1*Time}$",
1076 fontsize=supTitleFontSize)
1078 pdfPages.savefig(f2)
1082 """Group data into bins, with at most maxDiff distance between bins.
1090 Maximum distance between bins.
1099 index = np.zeros_like(x, dtype=np.int32)
1104 for i
in range(1, len(ix)):
1106 if (xval - xc < maxDiff):
1107 xc = (ng*xc + xval)/(ng+1)
1109 index[ix[i]] = group
1113 index[ix[i]] = group
1120 """Builds an index with regular binning. The result can be fed into binData.
1131 np.digitize(x, bins): `numpy.array`
1135 bins = np.linspace(x.min(), x.max() +
abs(x.max() * 1e-7), nBins + 1)
1136 return np.digitize(x, bins)
1140 """Bin data (usually for display purposes).
1151 Bin number of each datum.
1154 Inverse rms of each datum to use when averaging (the actual weight is wy**2).
1165 wybin: `numpy.array`
1166 Binned weights in y, computed from wy's in each bin.
1168 sybin: `numpy.array`
1169 Uncertainty on the bin average, considering actual scatter, and ignoring weights.
1173 wy = np.ones_like(x)
1174 binIndexSet =
set(binIndex)
1177 xbin = np.array([xw2[binIndex == i].sum()/w2[binIndex == i].sum()
for i
in binIndexSet])
1180 ybin = np.array([yw2[binIndex == i].sum()/w2[binIndex == i].sum()
for i
in binIndexSet])
1182 wybin = np.sqrt(np.array([w2[binIndex == i].sum()
for i
in binIndexSet]))
1183 sybin = np.array([y[binIndex == i].
std()/np.sqrt(np.array([binIndex == i]).sum())
1184 for i
in binIndexSet])
1186 return xbin, ybin, wybin, sybin
def run(self, filenameFull, datasetPtc, linearizer=None, log=None)
def plot_a_b(aDict, bDict, pdfPages, bRange=3)
def runDataRef(self, dataRef)
def plotCovariances(mu, covs, covsModel, covsWeights, covsNoB, covsModelNoB, covsWeightsNoB, gainDict, noiseDict, aDict, bDict, pdfPages)
def binData(x, y, binIndex, wy=None)
def plotNormalizedCovariances(self, i, j, inputMu, covs, covsModel, covsWeights, covsNoB, covsModelNoB, covsWeightsNoB, pdfPages, offset=0.004, numberOfBins=10, plotData=True, topPlot=False, log=None)
def _plotStandardPtc(self, dataset, ptcFitType, pdfPages)
def covAstierMakeAllPlots(self, dataset, pdfPages, log=None)
def plotAcoeffsSum(aDict, bDict, pdfPages)
def findGroups(x, maxDiff)
def plotRelativeBiasACoeffs(aDict, aDictNoB, fullCovsModel, fullCovsModelNoB, signalElectrons, gainDict, pdfPages, maxr=None)
def _plotLinearizer(self, dataset, linearizer, pdfPages)
def indexForBins(x, nBins)
def __init__(self, *args, **kwargs)
def ab_vs_dist(aDict, bDict, pdfPages, bRange=4)
daf::base::PropertyList * list
daf::base::PropertySet * set
def getFitDataFromCovariances(i, j, mu, fullCov, fullCovModel, fullCovSqrtWeights, gain=1.0, divideByMu=False, returnMasked=False)
def calculateWeightedReducedChi2(measured, model, weightsMeasured, nData, nParsModel)
def funcPolynomial(pars, x)
bool all(CoordinateExpr< N > const &expr) noexcept
Return true if all elements are true.
Angle abs(Angle const &a)