Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

DM-44889: Investigate feasibility of resurrecting matchBackgrounds.py background fitting algorithms #956

Draft
wants to merge 12 commits into
base: main
Choose a base branch
from
533 changes: 435 additions & 98 deletions python/lsst/pipe/tasks/background.py
Original file line number Diff line number Diff line change
@@ -30,22 +30,21 @@
"SkyStatsConfig",
]

import sys
import numpy
import importlib
import itertools
from scipy.ndimage import gaussian_filter
import sys

import lsst.afw.math as afwMath
import lsst.afw.image as afwImage
import lsst.afw.geom as afwGeom
import lsst.afw.cameraGeom as afwCameraGeom
import lsst.afw.geom as afwGeom
import lsst.afw.image as afwImage
import lsst.afw.math as afwMath
import lsst.afw.table as afwTable
import lsst.geom as geom
import lsst.meas.algorithms as measAlg
import lsst.afw.table as afwTable

from lsst.pex.config import Config, Field, ListField, ChoiceField, ConfigField, RangeField, ConfigurableField
import numpy
from lsst.pex.config import ChoiceField, Config, ConfigField, ConfigurableField, Field, ListField, RangeField
from lsst.pipe.base import Task
from scipy.ndimage import gaussian_filter


def robustMean(array, rej=3.0):
@@ -64,47 +63,62 @@ def robustMean(array, rej=3.0):
Robust mean of `array`.
"""
q1, median, q3 = numpy.percentile(array, [25.0, 50.0, 100.0])
good = numpy.abs(array - median) < rej*0.74*(q3 - q1)
good = numpy.abs(array - median) < rej * 0.74 * (q3 - q1)
return array[good].mean()


class BackgroundConfig(Config):
"""Configuration for background measurement"""
statistic = ChoiceField(dtype=str, default="MEANCLIP", doc="type of statistic to use for grid points",
allowed={"MEANCLIP": "clipped mean",
"MEAN": "unclipped mean",
"MEDIAN": "median"})

statistic = ChoiceField(
dtype=str,
default="MEANCLIP",
doc="type of statistic to use for grid points",
allowed={"MEANCLIP": "clipped mean", "MEAN": "unclipped mean", "MEDIAN": "median"},
)
xBinSize = RangeField(dtype=int, default=32, min=1, doc="Superpixel size in x")
yBinSize = RangeField(dtype=int, default=32, min=1, doc="Superpixel size in y")
algorithm = ChoiceField(dtype=str, default="NATURAL_SPLINE", optional=True,
doc="How to interpolate the background values. "
"This maps to an enum; see afw::math::Background",
allowed={
"CONSTANT": "Use a single constant value",
"LINEAR": "Use linear interpolation",
"NATURAL_SPLINE": "cubic spline with zero second derivative at endpoints",
"AKIMA_SPLINE": "higher-level nonlinear spline that is more robust"
" to outliers",
"NONE": "No background estimation is to be attempted",
})
mask = ListField(dtype=str, default=["SAT", "BAD", "EDGE", "DETECTED", "DETECTED_NEGATIVE", "NO_DATA"],
doc="Names of mask planes to ignore while estimating the background")
algorithm = ChoiceField(
dtype=str,
default="NATURAL_SPLINE",
optional=True,
doc="How to interpolate the background values. " "This maps to an enum; see afw::math::Background",
allowed={
"CONSTANT": "Use a single constant value",
"LINEAR": "Use linear interpolation",
"NATURAL_SPLINE": "cubic spline with zero second derivative at endpoints",
"AKIMA_SPLINE": "higher-level nonlinear spline that is more robust" " to outliers",
"NONE": "No background estimation is to be attempted",
},
)
mask = ListField(
dtype=str,
default=["SAT", "BAD", "EDGE", "DETECTED", "DETECTED_NEGATIVE", "NO_DATA"],
doc="Names of mask planes to ignore while estimating the background",
)


class SkyStatsConfig(Config):
"""Parameters controlling the measurement of sky statistics"""
statistic = ChoiceField(dtype=str, default="MEANCLIP", doc="type of statistic to use for grid points",
allowed={"MEANCLIP": "clipped mean",
"MEAN": "unclipped mean",
"MEDIAN": "median"})

statistic = ChoiceField(
dtype=str,
default="MEANCLIP",
doc="type of statistic to use for grid points",
allowed={"MEANCLIP": "clipped mean", "MEAN": "unclipped mean", "MEDIAN": "median"},
)
clip = Field(doc="Clipping threshold for background", dtype=float, default=3.0)
nIter = Field(doc="Clipping iterations for background", dtype=int, default=3)
mask = ListField(doc="Mask planes to reject", dtype=str,
default=["SAT", "DETECTED", "DETECTED_NEGATIVE", "BAD", "NO_DATA"])
mask = ListField(
doc="Mask planes to reject",
dtype=str,
default=["SAT", "DETECTED", "DETECTED_NEGATIVE", "BAD", "NO_DATA"],
)


class SkyMeasurementConfig(Config):
"""Configuration for SkyMeasurementTask"""

skyIter = Field(dtype=int, default=3, doc="k-sigma rejection iterations for sky scale")
skyRej = Field(dtype=float, default=3.0, doc="k-sigma rejection threshold for sky scale")
background = ConfigField(dtype=BackgroundConfig, doc="Background measurement")
@@ -122,6 +136,7 @@ class SkyMeasurementTask(Task):
background model (a `lsst.afw.math.BackgroundMI`). The sky frame represents
the dominant response of the camera to the sky background.
"""

ConfigClass = SkyMeasurementConfig

@staticmethod
@@ -149,11 +164,16 @@ def exposureToBackground(bgExp):
algorithm = header.getScalar("ALGORITHM")
bbox = geom.Box2I(geom.Point2I(xMin, yMin), geom.Point2I(xMax, yMax))
return afwMath.BackgroundList(
(afwMath.BackgroundMI(bbox, bgExp.getMaskedImage()),
afwMath.stringToInterpStyle(algorithm),
afwMath.stringToUndersampleStyle("REDUCE_INTERP_ORDER"),
afwMath.ApproximateControl.UNKNOWN,
0, 0, False))
(
afwMath.BackgroundMI(bbox, bgExp.getMaskedImage()),
afwMath.stringToInterpStyle(algorithm),
afwMath.stringToUndersampleStyle("REDUCE_INTERP_ORDER"),
afwMath.ApproximateControl.UNKNOWN,
0,
0,
False,
)
)

def backgroundToExposure(self, statsImage, bbox):
"""Convert a background model to an exposure
@@ -207,22 +227,26 @@ def measureBackground(self, image):
stats.setNanSafe(True)
ctrl = afwMath.BackgroundControl(
self.config.background.algorithm,
max(int(image.getWidth()/self.config.background.xBinSize + 0.5), 1),
max(int(image.getHeight()/self.config.background.yBinSize + 0.5), 1),
max(int(image.getWidth() / self.config.background.xBinSize + 0.5), 1),
max(int(image.getHeight() / self.config.background.yBinSize + 0.5), 1),
"REDUCE_INTERP_ORDER",
stats,
self.config.background.statistic
self.config.background.statistic,
)

bg = afwMath.makeBackground(image, ctrl)

return afwMath.BackgroundList((
bg,
afwMath.stringToInterpStyle(self.config.background.algorithm),
afwMath.stringToUndersampleStyle("REDUCE_INTERP_ORDER"),
afwMath.ApproximateControl.UNKNOWN,
0, 0, False
))
return afwMath.BackgroundList(
(
bg,
afwMath.stringToInterpStyle(self.config.background.algorithm),
afwMath.stringToUndersampleStyle("REDUCE_INTERP_ORDER"),
afwMath.ApproximateControl.UNKNOWN,
0,
0,
False,
)
)

def averageBackgrounds(self, bgList):
"""Average multiple background models
@@ -243,8 +267,9 @@ def averageBackgrounds(self, bgList):
assert all(len(bg) == 1 for bg in bgList), "Mixed bgList: %s" % ([len(bg) for bg in bgList],)
images = [bg[0][0].getStatsImage() for bg in bgList]
boxes = [bg[0][0].getImageBBox() for bg in bgList]
assert len(set((box.getMinX(), box.getMinY(), box.getMaxX(), box.getMaxY()) for box in boxes)) == 1, \
"Bounding boxes not all equal"
assert (
len(set((box.getMinX(), box.getMinY(), box.getMaxX(), box.getMaxY()) for box in boxes)) == 1
), "Bounding boxes not all equal"
bbox = boxes.pop(0)

# Ensure bad pixels are masked
@@ -343,18 +368,18 @@ def solveScales(self, scales):
skySamples = numpy.array(skySamples)

def solve(mask):
return afwMath.LeastSquares.fromDesignMatrix(skySamples[mask].reshape(mask.sum(), 1),
imageSamples[mask],
afwMath.LeastSquares.DIRECT_SVD).getSolution()
return afwMath.LeastSquares.fromDesignMatrix(
skySamples[mask].reshape(mask.sum(), 1), imageSamples[mask], afwMath.LeastSquares.DIRECT_SVD
).getSolution()

mask = numpy.isfinite(imageSamples) & numpy.isfinite(skySamples)
for ii in range(self.config.skyIter):
solution = solve(mask)
residuals = imageSamples - solution*skySamples
residuals = imageSamples - solution * skySamples
lq, uq = numpy.percentile(residuals[mask], [25, 75])
stdev = 0.741*(uq - lq) # Robust stdev from IQR
stdev = 0.741 * (uq - lq) # Robust stdev from IQR
with numpy.errstate(invalid="ignore"): # suppress NAN warnings
bad = numpy.abs(residuals) > self.config.skyRej*stdev
bad = numpy.abs(residuals) > self.config.skyRej * stdev
mask[bad] = False

return solve(mask)
@@ -414,14 +439,15 @@ def interpolate1D(method, xSample, ySample, xInterp):
"""
if len(xSample) == 0:
return numpy.ones_like(xInterp)*numpy.nan
return numpy.ones_like(xInterp) * numpy.nan
try:
return afwMath.makeInterpolate(xSample.astype(float), ySample.astype(float),
method).interpolate(xInterp.astype(float))
return afwMath.makeInterpolate(xSample.astype(float), ySample.astype(float), method).interpolate(
xInterp.astype(float)
)
except Exception:
if method == afwMath.Interpolate.CONSTANT:
# We've already tried the most basic interpolation and it failed
return numpy.ones_like(xInterp)*numpy.nan
return numpy.ones_like(xInterp) * numpy.nan
newMethod = afwMath.lookupMaxInterpStyle(len(xSample))
if newMethod == method:
newMethod = afwMath.Interpolate.CONSTANT
@@ -453,15 +479,17 @@ def interpolateBadPixels(array, isBad, interpolationStyle):
isGood = ~isBad
for y in range(height):
if numpy.any(isBad[y, :]) and numpy.any(isGood[y, :]):
array[y][isBad[y]] = interpolate1D(method, xIndices[isGood[y]], array[y][isGood[y]],
xIndices[isBad[y]])
array[y][isBad[y]] = interpolate1D(
method, xIndices[isGood[y]], array[y][isGood[y]], xIndices[isBad[y]]
)

isBad = numpy.isnan(array)
isGood = ~isBad
for x in range(width):
if numpy.any(isBad[:, x]) and numpy.any(isGood[:, x]):
array[:, x][isBad[:, x]] = interpolate1D(method, yIndices[isGood[:, x]],
array[:, x][isGood[:, x]], yIndices[isBad[:, x]])
array[:, x][isBad[:, x]] = interpolate1D(
method, yIndices[isGood[:, x]], array[:, x][isGood[:, x]], yIndices[isBad[:, x]]
)


class FocalPlaneBackgroundConfig(Config):
@@ -473,15 +501,21 @@ class FocalPlaneBackgroundConfig(Config):
need to be revised according to each particular camera. For
this reason, no defaults are set for those.
"""

xSize = Field(dtype=float, doc="Bin size in x")
ySize = Field(dtype=float, doc="Bin size in y")
pixelSize = Field(dtype=float, default=1.0, doc="Pixel size in same units as xSize/ySize")
minFrac = Field(dtype=float, default=0.1, doc="Minimum fraction of bin size for good measurement")
mask = ListField(dtype=str, doc="Mask planes to treat as bad",
default=["BAD", "SAT", "INTRP", "DETECTED", "DETECTED_NEGATIVE", "EDGE", "NO_DATA"])
mask = ListField(
dtype=str,
doc="Mask planes to treat as bad",
default=["BAD", "SAT", "INTRP", "DETECTED", "DETECTED_NEGATIVE", "EDGE", "NO_DATA"],
)
interpolation = ChoiceField(
doc="how to interpolate the background values. This maps to an enum; see afw::math::Background",
dtype=str, default="AKIMA_SPLINE", optional=True,
dtype=str,
default="AKIMA_SPLINE",
optional=True,
allowed={
"CONSTANT": "Use a single constant value",
"LINEAR": "Use linear interpolation",
@@ -520,6 +554,7 @@ class FocalPlaneBackground:
Once you've built the background model, you can apply it to individual
CCDs with the `toCcdBackground` method.
"""

@classmethod
def fromCamera(cls, config, camera):
"""Construct from a camera object
@@ -538,14 +573,17 @@ def fromCamera(cls, config, camera):

width, height = cameraBox.getDimensions()
# Offset so that we run from zero
offset = geom.Extent2D(cameraBox.getMin())*-1
offset = geom.Extent2D(cameraBox.getMin()) * -1
# Add an extra pixel buffer on either side
dims = geom.Extent2I(int(numpy.ceil(width/config.xSize)) + 2,
int(numpy.ceil(height/config.ySize)) + 2)
dims = geom.Extent2I(
int(numpy.ceil(width / config.xSize)) + 2, int(numpy.ceil(height / config.ySize)) + 2
)
# Transform takes us from focal plane coordinates --> sample coordinates
transform = (geom.AffineTransform.makeTranslation(geom.Extent2D(1, 1))
* geom.AffineTransform.makeScaling(1.0/config.xSize, 1.0/config.ySize)
* geom.AffineTransform.makeTranslation(offset))
transform = (
geom.AffineTransform.makeTranslation(geom.Extent2D(1, 1))
* geom.AffineTransform.makeScaling(1.0 / config.xSize, 1.0 / config.ySize)
* geom.AffineTransform.makeTranslation(offset)
)

return cls(config, dims, afwGeom.makeTransform(transform))

@@ -626,8 +664,9 @@ def addCcd(self, exposure):
CCD exposure to measure
"""
detector = exposure.getDetector()
transform = detector.getTransformMap().getTransform(detector.makeCameraSys(afwCameraGeom.PIXELS),
detector.makeCameraSys(afwCameraGeom.FOCAL_PLANE))
transform = detector.getTransformMap().getTransform(
detector.makeCameraSys(afwCameraGeom.PIXELS), detector.makeCameraSys(afwCameraGeom.FOCAL_PLANE)
)
image = exposure.getMaskedImage()
maskVal = image.getMask().getPlaneBitMask(self.config.mask)

@@ -656,7 +695,7 @@ def addCcd(self, exposure):
num = result.getValue(afwMath.NPOINT)
if not numpy.isfinite(mean) or not numpy.isfinite(num):
continue
warped[xx, yy, afwImage.LOCAL] = mean*num
warped[xx, yy, afwImage.LOCAL] = mean * num
warpedCounts[xx, yy, afwImage.LOCAL] = num

self._values += warped
@@ -680,10 +719,12 @@ def toCcdBackground(self, detector, bbox):
bg : `lsst.afw.math.BackgroundList`
Background model for CCD.
"""
transform = detector.getTransformMap().getTransform(detector.makeCameraSys(afwCameraGeom.PIXELS),
detector.makeCameraSys(afwCameraGeom.FOCAL_PLANE))
binTransform = (geom.AffineTransform.makeScaling(self.config.binning)
* geom.AffineTransform.makeTranslation(geom.Extent2D(0.5, 0.5)))
transform = detector.getTransformMap().getTransform(
detector.makeCameraSys(afwCameraGeom.PIXELS), detector.makeCameraSys(afwCameraGeom.FOCAL_PLANE)
)
binTransform = geom.AffineTransform.makeScaling(
self.config.binning
) * geom.AffineTransform.makeTranslation(geom.Extent2D(0.5, 0.5))

# Binned image on CCD --> unbinned image on CCD --> focal plane --> binned focal plane
toSample = afwGeom.makeTransform(binTransform).then(transform).then(self.transform)
@@ -692,7 +733,7 @@ def toCcdBackground(self, detector, bbox):
fpNorm = afwImage.ImageF(focalPlane.getBBox())
fpNorm.set(1.0)

image = afwImage.ImageF(bbox.getDimensions()//self.config.binning)
image = afwImage.ImageF(bbox.getDimensions() // self.config.binning)
norm = afwImage.ImageF(image.getBBox())
ctrl = afwMath.WarpingControl("bilinear")
afwMath.warpImage(image, focalPlane, toSample.inverted(), ctrl)
@@ -705,11 +746,15 @@ def toCcdBackground(self, detector, bbox):
image.getArray()[isBad] = image.getArray()[~isBad].mean()

return afwMath.BackgroundList(
(afwMath.BackgroundMI(bbox, afwImage.makeMaskedImage(image, mask)),
afwMath.stringToInterpStyle(self.config.interpolation),
afwMath.stringToUndersampleStyle("REDUCE_INTERP_ORDER"),
afwMath.ApproximateControl.UNKNOWN,
0, 0, False)
(
afwMath.BackgroundMI(bbox, afwImage.makeMaskedImage(image, mask)),
afwMath.stringToInterpStyle(self.config.interpolation),
afwMath.stringToUndersampleStyle("REDUCE_INTERP_ORDER"),
afwMath.ApproximateControl.UNKNOWN,
0,
0,
False,
)
)

def merge(self, other):
@@ -730,8 +775,10 @@ def merge(self, other):
The merged background model.
"""
if (self.config.xSize, self.config.ySize) != (other.config.xSize, other.config.ySize):
raise RuntimeError("Size mismatch: %s vs %s" % ((self.config.xSize, self.config.ySize),
(other.config.xSize, other.config.ySize)))
raise RuntimeError(
"Size mismatch: %s vs %s"
% ((self.config.xSize, self.config.ySize), (other.config.xSize, other.config.ySize))
)
if self.dims != other.dims:
raise RuntimeError("Dimensions mismatch: %s vs %s" % (self.dims, other.dims))
self._values += other._values
@@ -760,8 +807,11 @@ def getStatsImage(self):
"""
values = self._values.clone()
values /= self._numbers
thresh = (self.config.minFrac
* (self.config.xSize/self.config.pixelSize)*(self.config.ySize/self.config.pixelSize))
thresh = (
self.config.minFrac
* (self.config.xSize / self.config.pixelSize)
* (self.config.ySize / self.config.pixelSize)
)
isBad = self._numbers.getArray() < thresh
if self.config.doSmooth:
array = values.getArray()
@@ -772,11 +822,293 @@ def getStatsImage(self):
return values


class TractBackgroundConfig(Config):
"""Configuration for TractBackground
Note that `xBin` and `yBin` are in pixels, as unlike FocalPlaneBackground,
translation from warps to tract and back only requires geometric
transformations in the warped pixel plane.
"""

xBin = Field(dtype=float, default=500, doc="Bin size in x")
yBin = Field(dtype=float, default=500, doc="Bin size in y")
minFrac = Field(dtype=float, default=0.1, doc="Minimum fraction of bin size for good measurement")
mask = ListField(
dtype=str,
doc="Mask planes to treat as bad",
default=["BAD", "SAT", "INTRP", "DETECTED", "DETECTED_NEGATIVE", "EDGE", "NO_DATA"],
)
interpolation = ChoiceField(
doc="how to interpolate the background values. This maps to an enum; see afw::math::Background",
dtype=str,
default="AKIMA_SPLINE",
optional=True,
allowed={
"CONSTANT": "Use a single constant value",
"LINEAR": "Use linear interpolation",
"NATURAL_SPLINE": "cubic spline with zero second derivative at endpoints",
"AKIMA_SPLINE": "higher-level nonlinear spline that is more robust to outliers",
"NONE": "No background estimation is to be attempted",
},
)
doSmooth = Field(dtype=bool, default=False, doc="Do smoothing?")
smoothScale = Field(dtype=float, default=2.0, doc="Smoothing scale, as a multiple of the bin size")
binning = Field(dtype=int, default=64, doc="Binning to use for warp background model (pixels)")


class TractBackground:
"""
As FocalPlaneBackground, but works in warped tract coordinates
"""

@classmethod
def fromSimilar(cls, other):
"""Construct from an object that has the same interface.
Parameters
----------
other : `TractBackground`-like
An object that matches the interface of `TractBackground`
but which may be different.
Returns
-------
background : `TractBackground`
Something guaranteed to be a `TractBackground`.
"""
return cls(other.config, other.dims, other.transform, other._values, other._numbers)

def __init__(self, config, values=None, numbers=None):
"""Constructor
Developers should note that changes to the signature of this method
require coordinated changes to the `__reduce__` and `clone` methods.
Parameters
----------
config : `TractBackgroundConfig`
Configuration for measuring tract backgrounds.
transform : `lsst.afw.geom.TransformPoint2ToPoint2`
Transformation from tract coordinates to warp coordinates.
values : `lsst.afw.image.ImageF`
Measured background values.
numbers : `lsst.afw.image.ImageF`
Number of pixels in each background measurement.
"""
self.config = config
# TODO: dynamic tract dimensions?
self.dims = geom.Extent2I(36000 / self.config.xBin, 36000 / self.config.yBin)

if values is None:
values = afwImage.ImageF(self.dims)
values.set(0.0)
else:
values = values.clone()
assert values.getDimensions() == self.dims
self._values = values
if numbers is None:
numbers = afwImage.ImageF(self.dims) # float for dynamic range and convenience
numbers.set(0.0)
else:
numbers = numbers.clone()
assert numbers.getDimensions() == self.dims
self._numbers = numbers

def __reduce__(self):
return self.__class__, (self.config, self._values, self._numbers)

def clone(self):
return self.__class__(self.config, self._values, self._numbers)

def addWarp(self, warp):
"""
Equivalent to FocalPlaneBackground.addCcd(), but on warps instead.
Bins masked images of warps and adds these values into a blank image
with the binned tract dimensions at the location of the warp in the
tract.
Parameters
----------
warp : `lsst.afw.image.ExposureF`
Warped image corresponding to a single patch in a single visit
"""
image = warp.getMaskedImage()
maskVal = image.getMask().getPlaneBitMask(self.config.mask)
# Photometric scaling necessary for contiguous background across tract
image.image.array *= warp.getPhotoCalib().instFluxToNanojansky(1)

warped = afwImage.ImageF(self._values.getBBox())
warpedCounts = afwImage.ImageF(self._numbers.getBBox())

stats = afwMath.StatisticsControl()
stats.setAndMask(maskVal)
stats.setNanSafe(True)

# Pixel locations in binned tract-scale image
pixels = itertools.product(
numpy.arange(warped.getBBox().getMinX(), warped.getBBox().getMaxX() + 1),
numpy.arange(warped.getBBox().getMinY(), warped.getBBox().getMaxY() + 1),
)
for xx, yy in pixels:
llc = geom.Point2D((xx - 0.5) * self.config.xBin, (yy - 0.5) * self.config.yBin)
urc = geom.Point2D(
(xx + 0.5) * self.config.xBin + self.config.xBin - 1,
(yy + 0.5) * self.config.yBin + self.config.yBin - 1,
)
bbox = geom.Box2I(geom.Point2I(llc), geom.Point2I(urc))
bbox.clip(image.getBBox()) # Works in tract coordinates
if bbox.isEmpty():
continue
subImage = image.Factory(image, bbox)
result = afwMath.makeStatistics(subImage, afwMath.MEANCLIP | afwMath.NPOINT, stats)
mean = result.getValue(afwMath.MEANCLIP)
num = result.getValue(afwMath.NPOINT)
if not numpy.isfinite(mean) or not numpy.isfinite(num):
continue
warped[xx, yy, afwImage.LOCAL] = mean * num
warpedCounts[xx, yy, afwImage.LOCAL] = num

self._values += warped
self._numbers += warpedCounts

def merge(self, other):
"""Merge with another TractBackground
This allows multiple background models to be constructed from
different warps, and then merged to form a single consistent
background model for the entire tract.
Parameters
----------
other : `TractBackground`
Another background model to merge.
Returns
-------
self : `TractBackground`
The merged background model.
"""
if (self.config.xBin, self.config.yBin) != (other.config.xBin, other.config.yBin):
raise RuntimeError(
"Size mismatch: %s vs %s"
% ((self.config.xBin, self.config.yBin), (other.config.xBin, other.config.yBin))
)
if self.dims != other.dims:
raise RuntimeError("Dimensions mismatch: %s vs %s" % (self.dims, other.dims))
self._values += other._values
self._numbers += other._numbers
return self

def __iadd__(self, other):
"""Merge with another TractBackground
Parameters
----------
other : `TractBackground`
Another background model to merge.
Returns
-------
self : `TractBackground`
The merged background model.
"""
return self.merge(other)

def toWarpBackground(self, warp):
"""
Equivalent of FocalPlaneBackground.toCcdBackground(), but creates a
background model for a warp using a full tract model.
Parameters
----------
warp : `lsst.afw.image.ExposureF`
Warped image corresponding to a single patch in a single visit
Returns
-------
bg : `lsst.afw.math.BackgroundList`
Background model for warp
"""
# Transform to binned warp plane
binTransform = geom.AffineTransform.makeScaling(self.config.binning)

# Transform from binned tract plane to tract plane
# Start at the patch corner, not the warp corner overlap region
corner = warp.getBBox().getMin()
if corner[0] % 4000 != 0: # TODO: hard-coded patch dimensions are bad
corner[0] += 100
corner[1] += 100
offset = geom.Extent2D(corner[0], corner[1])
tractTransform = (
geom.AffineTransform.makeTranslation(geom.Extent2D(-0.5, -0.5))
* geom.AffineTransform.makeScaling(1.0 / self.config.xBin, 1.0 / self.config.yBin)
* geom.AffineTransform.makeTranslation(offset)
)
transform = afwGeom.makeTransform(tractTransform)

# Full transform
toSample = afwGeom.makeTransform(binTransform).then(transform)

# Full tract sky model and normalization array
tractPlane = self.getStatsImage()
tpNorm = afwImage.ImageF(tractPlane.getBBox())
tpNorm.set(1.0)

# Binned warp image and normalization array
bbox = warp.getBBox()
image = afwImage.ImageF(bbox.getDimensions() // self.config.binning)
norm = afwImage.ImageF(image.getBBox())

ctrl = afwMath.WarpingControl("bilinear")
afwMath.warpImage(image, tractPlane, toSample.inverted(), ctrl)
afwMath.warpImage(norm, tpNorm, toSample.inverted(), ctrl)
image /= norm
# Convert back to counts so the model can be subtracted w/o conversion
image /= warp.getPhotoCalib().instFluxToNanojansky(1)

mask = afwImage.Mask(image.getBBox())
isBad = numpy.isnan(image.getArray())
mask.getArray()[isBad] = mask.getPlaneBitMask("BAD")
image.getArray()[isBad] = image.getArray()[~isBad].mean()

return afwMath.BackgroundList(
(
afwMath.BackgroundMI(warp.getBBox(), afwImage.makeMaskedImage(image, mask)),
afwMath.stringToInterpStyle(self.config.interpolation),
afwMath.stringToUndersampleStyle("REDUCE_INTERP_ORDER"),
afwMath.ApproximateControl.UNKNOWN,
0,
0,
False,
)
)

def getStatsImage(self):
"""Return the background model data
This is the measurement of the background for each of the superpixels.
"""
values = self._values.clone()
values /= self._numbers
# This old logic doesn't work because everything outside the FP is bad
# thresh = self.config.minFrac * (self.config.xBin) * (self.config.yBin)
# isBad = self._numbers.getArray() < thresh
# if self.config.doSmooth:
# array = values.getArray()
# array[:] = smoothArray(array, isBad, self.config.smoothScale)
# isBad = numpy.isnan(values.array)
# if numpy.any(isBad):
# interpolateBadPixels(values.getArray(), isBad, self.config.interpolation)
return values


class MaskObjectsConfig(Config):
"""Configuration for MaskObjectsTask"""

nIter = Field(dtype=int, default=3, doc="Number of iterations")
subtractBackground = ConfigurableField(target=measAlg.SubtractBackgroundTask,
doc="Background subtraction")
subtractBackground = ConfigurableField(
target=measAlg.SubtractBackgroundTask, doc="Background subtraction"
)
detection = ConfigurableField(target=measAlg.SourceDetectionTask, doc="Source detection")
detectSigma = Field(dtype=float, default=5.0, doc="Detection threshold (standard deviations)")
doInterpolate = Field(dtype=bool, default=True, doc="Interpolate when removing objects?")
@@ -793,11 +1125,15 @@ def setDefaults(self):
self.interpolate.useApprox = False

def validate(self):
if (self.detection.reEstimateBackground
or self.detection.doTempLocalBackground
or self.detection.doTempWideBackground):
raise RuntimeError("Incorrect settings for object masking: reEstimateBackground, "
"doTempLocalBackground and doTempWideBackground must be False")
if (
self.detection.reEstimateBackground
or self.detection.doTempLocalBackground
or self.detection.doTempWideBackground
):
raise RuntimeError(
"Incorrect settings for object masking: reEstimateBackground, "
"doTempLocalBackground and doTempWideBackground must be False"
)


class MaskObjectsTask(Task):
@@ -811,6 +1147,7 @@ class MaskObjectsTask(Task):
We deliberately use the specified ``detectSigma`` instead of the PSF,
in order to better pick up the faint wings of objects.
"""

ConfigClass = MaskObjectsConfig

def __init__(self, *args, **kwargs):
@@ -902,7 +1239,7 @@ def smoothArray(array, bad, sigma):
"""
convolved = gaussian_filter(numpy.where(bad, 0.0, array), sigma, mode="constant", cval=0.0)
denominator = gaussian_filter(numpy.where(bad, 0.0, 1.0), sigma, mode="constant", cval=0.0)
return convolved/denominator
return convolved / denominator


def _create_module_child(name):
1,084 changes: 555 additions & 529 deletions python/lsst/pipe/tasks/matchBackgrounds.py
Original file line number Diff line number Diff line change
@@ -19,358 +19,574 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.

__all__ = ["MatchBackgroundsConfig", "MatchBackgroundsTask"]

import numpy
import lsst.afw.image as afwImage
import lsst.afw.math as afwMath
import lsst.geom as geom
import lsst.pex.config as pexConfig
import lsst.pipe.base as pipeBase
import lsstDebug
__all__ = ["MatchBackgroundsConnections", "MatchBackgroundsConfig", "MatchBackgroundsTask"]

import numpy as np
from lsst.afw.image import LOCAL, ImageF, Mask, MaskedImageF
from lsst.afw.math import (
MEANCLIP,
MEANSQUARE,
NPOINT,
VARIANCE,
ApproximateControl,
BackgroundControl,
BackgroundList,
BackgroundMI,
StatisticsControl,
makeBackground,
makeStatistics,
stringToInterpStyle,
stringToStatisticsProperty,
stringToUndersampleStyle,
)
from lsst.pex.config import ChoiceField, ConfigField, Field, ListField, RangeField
from lsst.pipe.base import PipelineTask, PipelineTaskConfig, PipelineTaskConnections, Struct, TaskError
from lsst.pipe.base.connectionTypes import Input, Output
from lsst.pipe.tasks.background import TractBackground, TractBackgroundConfig
from lsst.utils.timer import timeMethod


class MatchBackgroundsConfig(pexConfig.Config):
class MatchBackgroundsConnections(
PipelineTaskConnections,
# How to get it to do visit, warp?
# Need to kill all collections w/new dataset types to try other combos here...
# https://pipelines.lsst.io/v/weekly/modules/lsst.pipe.base/creating-a-pipelinetask.html#pipelinetask-processing-multiple-datasets
# dimensions=("skymap", "tract", "patch", "band"),
dimensions=("skymap", "tract", "band"), # Don't want to mix bands...
defaultTemplates={
"inputCoaddName": "deep",
"outputCoaddName": "deep",
"warpType": "psfMatched",
"warpTypeSuffix": "",
},
):

warps = Input(
doc=("Warps used to construct a list of matched backgrounds."),
name="{inputCoaddName}Coadd_{warpType}Warp",
storageClass="ExposureF",
dimensions=("skymap", "tract", "patch", "visit"),
deferLoad=True,
multiple=True,
)
backgroundInfoList = Output(
doc="List of differential backgrounds, with goodness of fit params.",
name="psfMatchedWarpBackground_diff", # This needs to change
# dimensions=("skymap", "tract", "patch", "visit"),
dimensions=("skymap", "tract", "visit", "patch"),
storageClass="Background",
multiple=True,
)
matchedImageList = Output(
doc="List of background-matched warps.",
name="{inputCoaddName}Coadd_{warpType}Warp_bgMatched",
storageClass="ExposureF",
# dimensions=("skymap", "tract", "patch", "visit"),
dimensions=("skymap", "tract", "visit", "patch"),
multiple=True,
)


class MatchBackgroundsConfig(PipelineTaskConfig, pipelineConnections=MatchBackgroundsConnections):

# Reference warp selection
refWarpVisit = Field[int](
doc="Visit ID of the reference warp. If None, the best warp is chosen from the list of warps.",
optional=True,
)
bestRefWeightChi2 = RangeField(
dtype=float,
doc="Mean background goodness of fit statistic weight when calculating the best reference exposure. "
"Higher weights prefer exposures with flatter backgrounds. Ignored when ref visit supplied.",
default=0.2,
min=0.0,
max=1.0,
)
bestRefWeightVariance = RangeField(
dtype=float,
doc="Image variance weight when calculating the best reference exposure. "
"Higher weights prefers exposures with low image variances. Ignored when ref visit supplied",
default=0.2,
min=0.0,
max=1.0,
)
bestRefWeightGlobalCoverage = RangeField(
dtype=float,
doc="Global coverage weight (total number of valid pixels) when calculating the best reference "
"exposure. Higher weights prefer exposures with high coverage. Ignored when a ref visit supplied.",
default=0.2,
min=0.0,
max=1.0,
)
bestRefWeightEdgeCoverage = RangeField(
dtype=float,
doc="Edge coverage weight (number of valid edge pixels) when calculating the best reference "
"exposure. Higher weights prefer exposures with high coverage. Ignored when a ref visit supplied.",
default=0.4,
min=0.0,
max=1.0,
)

usePolynomial = pexConfig.Field(
dtype=bool,
doc="Fit background difference with Chebychev polynomial interpolation "
"(using afw.math.Approximate)? If False, fit with spline interpolation using afw.math.Background",
default=False
# Background matching
tractBgModel = ConfigField(
dtype=TractBackgroundConfig,
doc="Background model for the entire tract",
)
usePolynomial = Field[bool](
doc="Fit background difference with a Chebychev polynomial interpolation? "
"If False, fit with spline interpolation instead.",
default=False,
)
order = pexConfig.Field(
dtype=int,
doc="Order of Chebyshev polynomial background model. Ignored if usePolynomial False",
default=8
order = Field[int](
doc="Order of Chebyshev polynomial background model. Ignored if ``usePolynomial=False``.",
default=8,
)
badMaskPlanes = pexConfig.ListField(
doc="Names of mask planes to ignore while estimating the background",
dtype=str, default=["NO_DATA", "DETECTED", "DETECTED_NEGATIVE", "SAT", "BAD", "INTRP", "CR"],
itemCheck=lambda x: x in afwImage.Mask().getMaskPlaneDict(),
badMaskPlanes = ListField[str](
doc="Names of mask planes to ignore while estimating the background.",
default=[
"NO_DATA",
"DETECTED",
"DETECTED_NEGATIVE",
"SAT",
"BAD",
"INTRP",
"CR",
],
itemCheck=lambda x: x in Mask().getMaskPlaneDict(),
)
gridStatistic = pexConfig.ChoiceField(
gridStatistic = ChoiceField(
dtype=str,
doc="Type of statistic to estimate pixel value for the grid points",
default="MEAN",
allowed={
"MEAN": "mean",
"MEDIAN": "median",
"MEANCLIP": "clipped mean"
}
doc="Type of statistic to estimate pixel value for the grid points.",
default="MEANCLIP",
allowed={"MEAN": "mean", "MEDIAN": "median", "MEANCLIP": "clipped mean"},
)
undersampleStyle = pexConfig.ChoiceField(
doc="Behaviour if there are too few points in grid for requested interpolation style. "
"Note: INCREASE_NXNYSAMPLE only allowed for usePolynomial=True.",
undersampleStyle = ChoiceField(
dtype=str,
doc="Behaviour if there are too few points in the grid for requested interpolation style. "
"Note: choice ``INCREASE_NXNYSAMPLE`` only allowed for ``usePolynomial=True``.",
default="REDUCE_INTERP_ORDER",
allowed={
"THROW_EXCEPTION": "throw an exception if there are too few points",
"THROW_EXCEPTION": "throw an exception if there are too few points.",
"REDUCE_INTERP_ORDER": "use an interpolation style with a lower order.",
"INCREASE_NXNYSAMPLE": "Increase the number of samples used to make the interpolation grid.",
}
},
)
binSize = Field[int](
doc="Bin size for gridding the difference image and fitting a spatial model.",
default=256,
)
binSize = pexConfig.Field(
doc="Bin size for gridding the difference image and fitting a spatial model",
dtype=int,
default=256
chi2BinSize = Field[int](
doc="Bin size for gridding images when choosing best reference exposure.",
default=1024,
)
interpStyle = pexConfig.ChoiceField(
interpStyle = ChoiceField(
dtype=str,
doc="Algorithm to interpolate the background values; ignored if usePolynomial is True"
"Maps to an enum; see afw.math.Background",
doc="Algorithm to interpolate the background values; ignored if ``usePolynomial=True``."
"Maps to an enum; see afw.math.Background for more information.",
default="AKIMA_SPLINE",
allowed={
"CONSTANT": "Use a single constant value",
"LINEAR": "Use linear interpolation",
"NATURAL_SPLINE": "cubic spline with zero second derivative at endpoints",
"AKIMA_SPLINE": "higher-level nonlinear spline that is more robust to outliers",
"NONE": "No background estimation is to be attempted",
}
"CONSTANT": "Use a single constant value.",
"LINEAR": "Use linear interpolation.",
"NATURAL_SPLINE": "A cubic spline with zero second derivative at endpoints.",
"AKIMA_SPLINE": "A higher-level non-linear spline that is more robust to outliers.",
"NONE": "No background estimation is to be attempted.",
},
)
numSigmaClip = pexConfig.Field(
dtype=int,
doc="Sigma for outlier rejection; ignored if gridStatistic != 'MEANCLIP'.",
default=3
numSigmaClip = Field[int](
doc="Sigma for outlier rejection. Ignored if ``gridStatistic != 'MEANCLIP'``.",
default=3,
)
numIter = pexConfig.Field(
dtype=int,
doc="Number of iterations of outlier rejection; ignored if gridStatistic != 'MEANCLIP'.",
default=2
numIter = Field[int](
doc="Number of iterations of outlier rejection. Ignored if ``gridStatistic != 'MEANCLIP'``.",
default=3,
)
bestRefWeightCoverage = pexConfig.RangeField(
dtype=float,
doc="Weight given to coverage (number of pixels that overlap with patch), "
"when calculating best reference exposure. Higher weight prefers exposures with high coverage."
"Ignored when reference visit is supplied",
default=0.4,
min=0., max=1.
)
bestRefWeightVariance = pexConfig.RangeField(
dtype=float,
doc="Weight given to image variance when calculating best reference exposure. "
"Higher weight prefers exposures with low image variance. Ignored when reference visit is supplied",
default=0.4,
min=0., max=1.
)
bestRefWeightLevel = pexConfig.RangeField(
dtype=float,
doc="Weight given to mean background level when calculating best reference exposure. "
"Higher weight prefers exposures with low mean background level. "
"Ignored when reference visit is supplied.",
default=0.2,
min=0., max=1.
)
approxWeighting = pexConfig.Field(
dtype=bool,
doc=("Use inverse-variance weighting when approximating background offset model? "
"This will fail when the background offset is constant "
"(this is usually only the case in testing with artificial images)."
"(usePolynomial=True)"),

approxWeighting = Field[bool](
doc="Use inverse-variance weighting when approximating the background offset model? This will fail "
"when the background offset is constant (usually only the case in testing with artificial images)."
"Only applied if ``usePolynomial=True``.",
default=True,
)
gridStdevEpsilon = pexConfig.RangeField(
gridStdevEpsilon = RangeField(
dtype=float,
doc="Tolerance on almost zero standard deviation in a background-offset grid bin. "
"If all bins have a standard deviation below this value, the background offset model "
"is approximated without inverse-variance weighting. (usePolynomial=True)",
doc="Tolerance on almost zero standard deviation in a background-offset grid bin. If all bins have a "
"standard deviation below this value, the background offset model is approximated without "
"inverse-variance weighting. Only applied if ``usePolynomial=True``.",
default=1e-8,
min=0.
min=0.0,
)


class MatchBackgroundsTask(pipeBase.Task):
class MatchBackgroundsTask(PipelineTask):
"""Match the backgrounds of a list of warped exposures to a reference.
This task is a part of the background subtraction pipeline.
It matches the backgrounds of a list of science exposures to a reference
science exposure.
The reference exposure is chosen from the list of science exposures by
minimizing a cost function that penalizes high background complexity
(divergence from a plane), high variance, low global coverage, and low
coverage along image edges.
The cost function is a weighted sum of these four metrics.
The weights are set by the config parameters:
- ``bestRefWeightChi2``
- ``bestRefWeightVariance``
- ``bestRefWeightGlobalCoverage``
- ``bestRefWeightEdgeCoverage``
Attributes
----------
config : `MatchBackgroundsConfig`
Configuration for this task.
statsCtrl : `~lsst.afw.math.StatisticsControl`
Statistics control object.
"""

ConfigClass = MatchBackgroundsConfig
config: MatchBackgroundsConfig
_DefaultName = "matchBackgrounds"

def __init__(self, *args, **kwargs):
pipeBase.Task.__init__(self, *args, **kwargs)

self.sctrl = afwMath.StatisticsControl()
self.sctrl.setAndMask(afwImage.Mask.getPlaneBitMask(self.config.badMaskPlanes))
self.sctrl.setNanSafe(True)
super().__init__(**kwargs)
self.statsFlag = stringToStatisticsProperty(self.config.gridStatistic)
self.statsCtrl = StatisticsControl()
self.statsCtrl.setAndMask(Mask.getPlaneBitMask(self.config.badMaskPlanes))
self.statsCtrl.setNanSafe(True)
self.statsCtrl.setNumSigmaClip(self.config.numSigmaClip)
self.statsCtrl.setNumIter(self.config.numIter)
self.stringToInterpStyle = stringToInterpStyle(self.config.interpStyle)
self.undersampleStyle = stringToUndersampleStyle(self.config.undersampleStyle)

def runQuantum(self, butlerQC, inputRefs, outputRefs):
inputs = butlerQC.get(inputRefs)
outputs = self.run(**inputs)
butlerQC.put(outputs, outputRefs)

@timeMethod
def run(self, expRefList, expDatasetType, imageScalerList=None, refExpDataRef=None, refImageScaler=None):
"""Match the backgrounds of a list of coadd temp exposures to a reference coadd temp exposure.
def run(self, warps):
"""Match the backgrounds of a list of warped exposures to a reference.
Choose a refExpDataRef automatically if none supplied.
A reference warp will be chosen automatically if none is supplied.
Parameters
----------
expRefList : `list`
List of data references to science exposures to be background-matched;
all exposures must exist.
expDatasetType : `str`
Dataset type of exposures, e.g. 'goodSeeingCoadd_tempExp'.
imageScalerList : `list`, optional
List of image scalers (coaddUtils.ImageScaler);
if None then the images are not scaled.
refExpDataRef : `Unknown`, optional
Data reference for the reference exposure.
If None, then this task selects the best exposures from expRefList.
If not None then must be one of the exposures in expRefList.
refImageScaler : `Unknown`, optional
Image scaler for reference image;
ignored if refExpDataRef is None, else scaling is not performed if None.
warps : `list`[`~lsst.afw.image.Exposure`]
List of warped science exposures to be background-matched.
Returns
-------
result : `lsst.pipe.base.Struct`
Results as a struct with attributes:
``backgroundInfoList``
A `list` of `pipeBase.Struct`, one per exposure in expRefList,
each of which contains these fields:
- ``isReference``: This is the reference exposure (only one
returned Struct will contain True for this
value, unless the ref exposure is listed multiple times).
- ``backgroundModel``: Differential background model
(afw.Math.Background or afw.Math.Approximate).
Add this to the science exposure to match the reference exposure.
- ``fitRMS``: The RMS of the fit. This is the sqrt(mean(residuals**2)).
- ``matchedMSE``: The MSE of the reference and matched images:
mean((refImage - matchedSciImage)**2);
should be comparable to difference image's mean variance.
- ``diffImVar``: The mean variance of the difference image.
All fields except isReference will be None if isReference True or the fit failed.
result : `~lsst.afw.math.BackgroundList`
Differential background model
Add this to the science exposure to match the reference exposure.
Raises
------
RuntimeError
Raised if an exposure does not exist on disk.
"""
numExp = len(expRefList)
if numExp < 1:
raise pipeBase.TaskError("No exposures to match")

if expDatasetType is None:
raise pipeBase.TaskError("Must specify expDatasetType")

if imageScalerList is None:
self.log.info("imageScalerList is None; no scaling will be performed")
imageScalerList = [None] * numExp

if len(expRefList) != len(imageScalerList):
raise RuntimeError("len(expRefList) = %s != %s = len(imageScalerList)" %
(len(expRefList), len(imageScalerList)))

refInd = None
if refExpDataRef is None:
# select the best reference exposure from expRefList
refInd = self.selectRefExposure(
expRefList=expRefList,
imageScalerList=imageScalerList,
expDatasetType=expDatasetType,
)
refExpDataRef = expRefList[refInd]
refImageScaler = imageScalerList[refInd]

# refIndSet is the index of all exposures in expDataList that match the reference.
# It is used to avoid background-matching an exposure to itself. It is a list
# because it is possible (though unlikely) that expDataList will contain duplicates.
expKeyList = refExpDataRef.butlerSubset.butler.getKeys(expDatasetType)
refMatcher = DataRefMatcher(refExpDataRef.butlerSubset.butler, expDatasetType)
refIndSet = set(refMatcher.matchList(ref0=refExpDataRef, refList=expRefList))
if (numExp := len(warps)) < 1:
raise TaskError("No exposures to match")
# TODO: store ref visit ID between runs, to skip selection process?

if refInd is not None and refInd not in refIndSet:
raise RuntimeError("Internal error: selected reference %s not found in expRefList")
# First, build FFP BG models of each visit
visitTractBgs = self._makeTractBackgrounds(warps)

refExposure = refExpDataRef.get()
if refImageScaler is not None:
refMI = refExposure.getMaskedImage()
refImageScaler.scaleMaskedImage(refMI)
# Define a reference warp; 'warps' is modified in-place to exclude it
refBg, refVis, bkgd = self._defineWarps(visitTractBgs, refVisitId=self.config.refWarpVisit)

debugIdKeyList = tuple(set(expKeyList) - set(['tract', 'patch']))
# Images must be scaled to a common ZP
# Converting everything to nJy to accomplish this
refExposure = refWarp.get()

Check failure on line 300 in python/lsst/pipe/tasks/matchBackgrounds.py

GitHub Actions / call-workflow / lint

F821

undefined name 'refWarp'
instFluxToNanojanskyRef = self._fluxScale(refExposure)

self.log.info("Matching %d Exposures", numExp)

# Blank ref warp background as reference background
bkgdIm = bkgd.getImageF()
bkgdStatsIm = bkgd.getStatsImage()
bkgdIm *= 0
bkgdStatsIm *= 0
blank = BackgroundList(
(
bkgd,
stringToInterpStyle(self.config.interpStyle),
stringToUndersampleStyle(self.config.undersampleStyle),
ApproximateControl.UNKNOWN,
0,
0,
False,
)
)

backgroundInfoList = []
for ind, (toMatchRef, imageScaler) in enumerate(zip(expRefList, imageScalerList)):
if ind in refIndSet:
backgroundInfoStruct = pipeBase.Struct(
isReference=True,
backgroundModel=None,
fitRMS=0.0,
matchedMSE=None,
diffImVar=None,
matchedImageList = []
for exp in warps:
self.log.info(
"Matching background of %s to %s",
exp.dataId,
refWarp.dataId,

Check failure on line 328 in python/lsst/pipe/tasks/matchBackgrounds.py

GitHub Actions / call-workflow / lint

F821

undefined name 'refWarp'
)
toMatchExposure = exp.get()
instFluxToNanojansky = self._fluxScale(toMatchExposure)
try:
backgroundInfoStruct = self.matchBackgrounds(
refExposure=refExposure,
sciExposure=toMatchExposure,
)
else:
self.log.info("Matching background of %s to %s", toMatchRef.dataId, refExpDataRef.dataId)
try:
toMatchExposure = toMatchRef.get()
if imageScaler is not None:
toMatchMI = toMatchExposure.getMaskedImage()
imageScaler.scaleMaskedImage(toMatchMI)
# store a string specifying the visit to label debug plot
self.debugDataIdString = ''.join([str(toMatchRef.dataId[vk]) for vk in debugIdKeyList])
backgroundInfoStruct = self.matchBackgrounds(
refExposure=refExposure,
sciExposure=toMatchExposure,
)
backgroundInfoStruct.isReference = False
except Exception as e:
self.log.warning("Failed to fit background %s: %s", toMatchRef.dataId, e)
backgroundInfoStruct = pipeBase.Struct(
isReference=False,
backgroundModel=None,
fitRMS=None,
matchedMSE=None,
diffImVar=None,
)
backgroundInfoStruct.isReference = False
except Exception as e:
self.log.warning("Failed to fit background %s: %s", exp.dataId, e)
backgroundInfoStruct = blank

backgroundInfoList.append(backgroundInfoStruct)
toMatchExposure.image /= instFluxToNanojansky # Back to cts
matchedImageList.append(toMatchExposure)

return pipeBase.Struct(
backgroundInfoList=backgroundInfoList)
backgroundInfoList.insert(refInd, blank)

Check failure on line 346 in python/lsst/pipe/tasks/matchBackgrounds.py

GitHub Actions / call-workflow / lint

F821

undefined name 'refInd'
refExposure.image /= instFluxToNanojanskyRef # Back to cts
matchedImageList.insert(refInd, refExposure)

Check failure on line 348 in python/lsst/pipe/tasks/matchBackgrounds.py

GitHub Actions / call-workflow / lint

F821

undefined name 'refInd'
return Struct(backgroundInfoList=backgroundInfoList, matchedImageList=matchedImageList)

@timeMethod
def selectRefExposure(self, expRefList, imageScalerList, expDatasetType):
"""Find best exposure to use as the reference exposure.
def _makeTractBackgrounds(self, warps, refWarpDDF=None):
"""Create full tract model of the backgrounds of all visits.
Calculate an appropriate reference exposure by minimizing a cost function that penalizes
high variance, high background level, and low coverage. Use the following config parameters:
- bestRefWeightCoverage
- bestRefWeightVariance
- bestRefWeightLevel
Parameters
----------
warps : `list`[`~lsst.daf.butler.DeferredDatasetHandle`]
List of warped exposures (of type `~lsst.afw.image.ExposureF`).
This is ordered by patch ID, then by visit ID
refWarpDDF : ``~lsst.daf.butler.DeferredDatasetHandle` optional
Chosen reference warp to match to, if supplied
Returns
-------
visitTractBackrounds : `dict`{`TractBackground`}
Models of full tract backgrounds for all visits, accessed by visit
IDs
"""
# First, separate warps by visit
visits = np.unique([i.dataId["visit"] for i in warps])

# Then build background models for each visit, and store
visitTractBackgrounds = {}
for i in range(len(visits)):
# Find all the warps for the visit
visitWarps = [j for j in warps if j.dataId["visit"] == visits[i]]
# Set up empty full tract background model object
bgModelBase = TractBackground(self.config.tractBgModel)

bgModels = []
for warp in visitWarps:
msg = "Constructing FFP background model for visit %d using %d patches"
self.log.debug(
msg,
visits[i],
len(warps),
)
if refWarpDDF is not None:
msg = "Doing difference imaging: reference warp visit ID: %d"
self.log.debug(msg, refWarpDDF.dataId["visit"])
visitWarp = warp.get()
# If a reference image is supplied, make a BG of the difference
if refWarpDDF is not None:
refWarp = refWarpDDF.get()
visitWarp = refWarp - visitWarp
bgModel = bgModelBase.clone()
bgModel.addWarp(visitWarp)
bgModels.append(bgModel)

# Merge warp models to make a single full tract background model
for bgModel, warp in zip(bgModels, visitWarps):
msg = (
"Patch %d: Merging %d unmasked pixels (%.1f%s of detector area) into tract plane BG "
"model"
)
self.log.debug(
msg,
warp.dataId["patch"],
bgModel._numbers.getArray().sum(),
100 * bgModel._numbers.getArray().sum() / visitWarp.getBBox().getArea(),
"%",
)
bgModelBase.merge(bgModel)
visitTractBackgrounds[visits[i]] = bgModelBase
return visitTractBackgrounds

@timeMethod
def _defineWarps(self, visitTractBackgrounds, refVisitId=None):
"""Define the reference visit and list of comparison visits.
If no reference visit ID is supplied, this method calculates an
appropriate reference exposure from the supplied list of visit
backgrounds by minimizing a cost function that penalizes high
background complexity (divergence from a plane), high variance, and low
global coverage.
Parameters
----------
expRefList : `list`
List of data references to exposures.
Retrieves dataset type specified by expDatasetType.
If an exposure is not found, it is skipped with a warning.
imageScalerList : `list`
List of image scalers (coaddUtils.ImageScaler);
must be the same length as expRefList.
expDatasetType : `str`
Dataset type of exposure: e.g. 'goodSeeingCoadd_tempExp'.
visitTractBackgrounds : `dict`{`TractBackground`}
Models of full tract backgrounds for all visits, accessed by visit
IDs
refVisitId : `int`, optional
ID of the reference visit.
If None, the best visit is chosen using the dictionary of existing
backgrounds.
Returns
-------
bestIdx : `int`
Index of best exposure.
refBg : `~lsst.afw.math.BackgroundMI`
Reference background to match to.
refVis : `int`
Index of the reference visit removed from the dictionary.
fitBg : `~lsst.afw.math.BackgroundMI`
Temporary background model, used to make a blank BG for the ref
Notes
-----
This method modifies the input list of warps in place by removing the
reference warp from it.
Raises
------
RuntimeError
Raised if none of the exposures in expRefList are found.
"""
self.log.info("Calculating best reference visit")
varList = []
meanBkgdLevelList = []
coverageList = []

if len(expRefList) != len(imageScalerList):
raise RuntimeError("len(expRefList) = %s != %s = len(imageScalerList)" %
(len(expRefList), len(imageScalerList)))

for expRef, imageScaler in zip(expRefList, imageScalerList):
exposure = expRef.get()
maskedImage = exposure.getMaskedImage()
if imageScaler is not None:
try:
imageScaler.scaleMaskedImage(maskedImage)
except Exception:
# need to put a place holder in Arr
varList.append(numpy.nan)
meanBkgdLevelList.append(numpy.nan)
coverageList.append(numpy.nan)
continue
statObjIm = afwMath.makeStatistics(maskedImage.getImage(), maskedImage.getMask(),
afwMath.MEAN | afwMath.NPOINT | afwMath.VARIANCE, self.sctrl)
meanVar, meanVarErr = statObjIm.getResult(afwMath.VARIANCE)
meanBkgdLevel, meanBkgdLevelErr = statObjIm.getResult(afwMath.MEAN)
npoints, npointsErr = statObjIm.getResult(afwMath.NPOINT)
varList.append(meanVar)
meanBkgdLevelList.append(meanBkgdLevel)
coverageList.append(npoints)
if not coverageList:
raise pipeBase.TaskError(
"None of the candidate %s exist; cannot select best reference exposure" % (expDatasetType,))

# Normalize metrics to range from 0 to 1
varArr = numpy.array(varList)/numpy.nanmax(varList)
meanBkgdLevelArr = numpy.array(meanBkgdLevelList)/numpy.nanmax(meanBkgdLevelList)
coverageArr = numpy.nanmin(coverageList)/numpy.array(coverageList)

costFunctionArr = self.config.bestRefWeightVariance * varArr
costFunctionArr += self.config.bestRefWeightLevel * meanBkgdLevelArr
costFunctionArr += self.config.bestRefWeightCoverage * coverageArr
return numpy.nanargmin(costFunctionArr)
# User-defined reference visit, if one has been supplied
if refVisitId:
visits = [visId for visId in visitTractBackgrounds.keys()]
try:
refTractBackground = visitTractBackgrounds.pop(refVisitId)
self.log.info("Using user-supplied reference visit %d", refVisitId)
# TODO: need to return a background object here!
return refTractBackground, refVisitId
except ValueError:
raise TaskError(f"Reference visit {refVisitId} is not found in the list of warps.")

# Extract mean/var/npoints for each warp
fitChi2s = [] # Background goodness of fit
# warpVars = [] # Variance
fitNPointsGlobal = [] # Global coverage
# warpNPointsEdge = [] # Edge coverage
visits = [] # To ensure dictionary key order is correct
for vis in visitTractBackgrounds:
visits.append(vis)
# Fit a model to the FFP
# TODO: need a variance plane in the tractBg as well
tractBg = visitTractBackgrounds[vis].getStatsImage()
fitBg, _ = self._makeBackground(tractBg, binSize=1)
fitBg.getStatsImage().variance = ImageF(np.sqrt(fitBg.getStatsImage().image.array))

# Return an approximation to the background
approxCtrl = ApproximateControl(ApproximateControl.CHEBYSHEV, 1, 1, self.config.approxWeighting)
fitApprox = fitBg.getApproximate(approxCtrl, self.undersampleStyle)

fitBgSub = MaskedImageF(ImageF(tractBg.array - fitApprox.getImage().array))
bad_mask_bit_mask = fitBgSub.mask.getPlaneBitMask("BAD")
fitBgSub.mask.array[np.isnan(fitBgSub.image.array)] = bad_mask_bit_mask

fitStats = makeStatistics(fitBgSub.image, fitBgSub.mask, VARIANCE | NPOINT, self.statsCtrl)

good = (fitBgSub.mask.array.astype(int) & bad_mask_bit_mask) == 0
dof = len(good[good]) - 6 # Assuming eq. of plane
fitChi2 = (
np.nansum(fitBgSub.image.array[good] ** 2 / fitBg.getStatsImage().variance.array[good]) / dof
)
# fitVar, _ = fitStats.getResult(VARIANCE) # Will add this back later
fitNPointGlobal, _ = fitStats.getResult(NPOINT)
# This becomes pointless: it's a circle within a square. Coverage is what's needed for that.
# warpNPointEdge = (
# np.sum(~np.isnan(warp.image.array[:, 0])) # Left edge
# + np.sum(~np.isnan(warp.image.array[:, -1])) # Right edge
# + np.sum(~np.isnan(warp.image.array[0, :])) # Bottom edge
# + np.sum(~np.isnan(warp.image.array[-1, :])) # Top edge
# )
fitChi2s.append(fitChi2)
# warpVars.append(warpVar)
fitNPointsGlobal.append(int(fitNPointGlobal))
# warpNPointsEdge.append(warpNPointEdge)

self.log.info(
# "Sci exp. visit %d; BG fit Chi^2=%.1f, var=%.1f nJy, nPoints global=%d, nPoints edge=%d",
"Sci exp. visit %d; BG fit Chi^2=%.1f, nPoints global=%d",
vis,
fitChi2,
# warpVar,
fitNPointGlobal,
# warpNPointEdge,
)
# Normalize mean/var/npoints to range from 0 to 1
fitChi2sFrac = np.array(fitChi2s) / np.nanmax(fitChi2s)
# warpVarsFrac = np.array(warpVars) / np.nanmax(warpVars)
fitNPointsGlobalFrac = np.nanmin(fitNPointsGlobal) / np.array(fitNPointsGlobal)
# warpNPointsEdgeFrac = np.nanmin(warpNPointsEdge) / np.array(warpNPointsEdge)

# Calculate cost function values
costFunctionVals = self.config.bestRefWeightChi2 * fitChi2sFrac
# costFunctionVals += self.config.bestRefWeightVariance * warpVarsFrac
costFunctionVals += self.config.bestRefWeightGlobalCoverage * fitNPointsGlobalFrac
# costFunctionVals += self.config.bestRefWeightEdgeCoverage * warpNPointsEdgeFrac

ind = np.nanargmin(costFunctionVals)
refVis = visits[ind]
refBg = visitTractBackgrounds.pop(refVis)
self.log.info("Using best reference visit %d", refVis)
return refBg, refVis, fitBg

def _makeBackground(self, warp: MaskedImageF, binSize) -> tuple[BackgroundMI, BackgroundControl]:
"""Generate a simple binned background masked image for warped data.
Parameters
----------
warp: `~lsst.afw.image.MaskedImageF`
Warped exposure for which to estimate background.
Returns
-------
bkgd: `~lsst.afw.math.BackgroundMI`
Background model of masked warp.
bgCtrl: `~lsst.afw.math.BackgroundControl`
Background control object.
"""
nx = warp.getWidth() // binSize
ny = warp.getHeight() // binSize

bgCtrl = BackgroundControl(nx, ny, self.statsCtrl, self.statsFlag)
bgCtrl.setUndersampleStyle(self.config.undersampleStyle)
bkgd = makeBackground(warp, bgCtrl)

return bkgd, bgCtrl

def _fluxScale(self, exposure):
"""Scales image to nJy flux using photometric calibration.
Parameters
----------
exposure: `lsst.afw.image._exposure.ExposureF`
Exposure to scale.
Returns
-------
fluxZp: `float`
Counts to nanojanskies conversion factor
"""
fluxZp = exposure.getPhotoCalib().instFluxToNanojansky(1)
exposure.image *= fluxZp
return fluxZp

@timeMethod
def matchBackgrounds(self, refExposure, sciExposure):
"""Match science exposure's background level to that of reference exposure.
"""Match science exposure's background level to that of reference
exposure.
Process creates a difference image of the reference exposure minus the science exposure, and then
generates an afw.math.Background object. It assumes (but does not require/check) that the mask plane
already has detections set. If detections have not been set/masked, sources will bias the
background estimation.
Process creates a difference image of the reference exposure minus the
science exposure, and then generates an afw.math.Background object. It
assumes (but does not require/check) that the mask plane already has
detections set. If detections have not been set/masked, sources will
bias the background estimation.
The 'background' of the difference image is smoothed by spline interpolation (by the Background class)
or by polynomial interpolation by the Approximate class. This model of difference image
is added to the science exposure in memory.
The 'background' of the difference image is smoothed by spline
interpolation (by the Background class) or by polynomial interpolation
by the Approximate class. This model of difference image is added to
the science exposure in memory.
Fit diagnostics are also calculated and returned.
@@ -379,323 +595,133 @@
refExposure : `lsst.afw.image.Exposure`
Reference exposure.
sciExposure : `lsst.afw.image.Exposure`
Science exposure; modified by changing the background level
to match that of the reference exposure.
Science exposure; ultimately modified by changing the background
level to match that of the reference exposure.
Returns
-------
model : `lsst.pipe.base.Struct`
Background model as a struct with attributes:
``backgroundModel``
An afw.math.Approximate or an afw.math.Background.
``fitRMS``
RMS of the fit. This is the sqrt(mean(residuals**2)), (`float`).
``matchedMSE``
The MSE of the reference and matched images: mean((refImage - matchedSciImage)**2);
should be comparable to difference image's mean variance (`float`).
``diffImVar``
The mean variance of the difference image (`float`).
model : `~lsst.afw.math.BackgroundMI`
Background model of difference image, reference - science
"""
if lsstDebug.Info(__name__).savefits:
refExposure.writeFits(lsstDebug.Info(__name__).figpath + 'refExposure.fits')
sciExposure.writeFits(lsstDebug.Info(__name__).figpath + 'sciExposure.fits')

# Check Configs for polynomials:
if self.config.usePolynomial:
x, y = sciExposure.getDimensions()
shortSideLength = min(x, y)
if shortSideLength < self.config.binSize:
raise ValueError("%d = config.binSize > shorter dimension = %d" % (self.config.binSize,
shortSideLength))
raise ValueError(
"%d = config.binSize > shorter dimension = %d" % (self.config.binSize, shortSideLength)
)
npoints = shortSideLength // self.config.binSize
if shortSideLength % self.config.binSize != 0:
npoints += 1

# If order of polynomial to be fit > number of bins to fit, error
if self.config.order > npoints - 1:
raise ValueError("%d = config.order > npoints - 1 = %d" % (self.config.order, npoints - 1))

# Check that exposures are same shape
if (sciExposure.getDimensions() != refExposure.getDimensions()):
if sciExposure.getDimensions() != refExposure.getDimensions():
wSci, hSci = sciExposure.getDimensions()
wRef, hRef = refExposure.getDimensions()
raise RuntimeError(
"Exposures are different dimensions. sci:(%i, %i) vs. ref:(%i, %i)" %
(wSci, hSci, wRef, hRef))

statsFlag = getattr(afwMath, self.config.gridStatistic)
self.sctrl.setNumSigmaClip(self.config.numSigmaClip)
self.sctrl.setNumIter(self.config.numIter)
"Exposures are different dimensions. sci:(%i, %i) vs. ref:(%i, %i)" % (wSci, hSci, wRef, hRef)
)

im = refExposure.getMaskedImage()
diffMI = im.Factory(im, True)
diffMI = im.clone()
diffMI -= sciExposure.getMaskedImage()

width = diffMI.getWidth()
height = diffMI.getHeight()
nx = width // self.config.binSize
if width % self.config.binSize != 0:
nx += 1
ny = height // self.config.binSize
if height % self.config.binSize != 0:
ny += 1

bctrl = afwMath.BackgroundControl(nx, ny, self.sctrl, statsFlag)
bctrl.setUndersampleStyle(self.config.undersampleStyle)

bkgd = afwMath.makeBackground(diffMI, bctrl)
bkgd, bctrl = self._makeBackground(diffMI, binSize=self.config.binSize)

# Some config and input checks if config.usePolynomial:
# 1) Check that order/bin size make sense:
# 2) Change binsize or order if underconstrained.
if self.config.usePolynomial:
order = self.config.order
bgX, bgY, bgZ, bgdZ = self._gridImage(diffMI, self.config.binSize, statsFlag)
bgX, bgY, bgZ, bgdZ = self._gridImage(diffMI, self.config.binSize, self.statsFlag)
minNumberGridPoints = min(len(set(bgX)), len(set(bgY)))
if len(bgZ) == 0:
raise ValueError("No overlap with reference. Nothing to match")
elif minNumberGridPoints <= self.config.order:
# must either lower order or raise number of bins or throw exception
# must lower order or raise number of bins, or throw exception
if self.config.undersampleStyle == "THROW_EXCEPTION":
raise ValueError("Image does not cover enough of ref image for order and binsize")
elif self.config.undersampleStyle == "REDUCE_INTERP_ORDER":
self.log.warning("Reducing order to %d", (minNumberGridPoints - 1))
order = minNumberGridPoints - 1
elif self.config.undersampleStyle == "INCREASE_NXNYSAMPLE":
newBinSize = (minNumberGridPoints*self.config.binSize) // (self.config.order + 1)
newBinSize = (minNumberGridPoints * self.config.binSize) // (self.config.order + 1)
bctrl.setNxSample(newBinSize)
bctrl.setNySample(newBinSize)
bkgd = afwMath.makeBackground(diffMI, bctrl) # do over
bkgd = makeBackground(diffMI, bctrl) # do over
self.log.warning("Decreasing binsize to %d", newBinSize)

# If there is no variance in any image pixels, do not weight bins by inverse variance
isUniformImageDiff = not numpy.any(bgdZ > self.config.gridStdevEpsilon)
# If there is no variance in any image pixels,
# do not weight bins by inverse variance
isUniformImageDiff = not np.any(bgdZ > self.config.gridStdevEpsilon)
weightByInverseVariance = False if isUniformImageDiff else self.config.approxWeighting

# Add offset to sciExposure
try:
if self.config.usePolynomial:
actrl = afwMath.ApproximateControl(afwMath.ApproximateControl.CHEBYSHEV,
order, order, weightByInverseVariance)
undersampleStyle = getattr(afwMath, self.config.undersampleStyle)
actrl = ApproximateControl(
ApproximateControl.CHEBYSHEV, order, order, weightByInverseVariance
)
undersampleStyle = stringToUndersampleStyle(self.config.undersampleStyle)
approx = bkgd.getApproximate(actrl, undersampleStyle)
bkgdImage = approx.getImage()
else:
bkgdImage = bkgd.getImageF(self.config.interpStyle, self.config.undersampleStyle)
except Exception as e:
raise RuntimeError("Background/Approximation failed to interp image %s: %s" % (
self.debugDataIdString, e))
raise RuntimeError(
"Background/Approximation failed to interp image %s: %s" % (sciExposure.dataId, e)
)

instFluxToNanojansky = sciExposure.getPhotoCalib().instFluxToNanojansky(1)
sciMI = sciExposure.getMaskedImage()
sciMI += bkgdImage
del sciMI
del sciMI # sciExposure is now a BG-matched image

# Need RMS from fit: 2895 will replace this:
rms = 0.0
X, Y, Z, dZ = self._gridImage(diffMI, self.config.binSize, statsFlag)
bgX, bgY, bgZ, bgdZ = self._gridImage(diffMI, self.config.binSize, self.statsFlag)
x0, y0 = diffMI.getXY0()
modelValueArr = numpy.empty(len(Z))
for i in range(len(X)):
modelValueArr[i] = bkgdImage[int(X[i]-x0), int(Y[i]-y0), afwImage.LOCAL]
resids = Z - modelValueArr
rms = numpy.sqrt(numpy.mean(resids[~numpy.isnan(resids)]**2))
modelValueArr = np.empty(len(bgZ))
for i in range(len(bgX)):
modelValueArr[i] = bkgdImage[int(bgX[i] - x0), int(bgY[i] - y0), LOCAL]
resids = bgZ - modelValueArr
rms = np.sqrt(np.mean(resids[~np.isnan(resids)] ** 2))

if lsstDebug.Info(__name__).savefits:
sciExposure.writeFits(lsstDebug.Info(__name__).figpath + 'sciMatchedExposure.fits')

if lsstDebug.Info(__name__).savefig:
bbox = geom.Box2D(refExposure.getMaskedImage().getBBox())
try:
self._debugPlot(X, Y, Z, dZ, bkgdImage, bbox, modelValueArr, resids)
except Exception as e:
self.log.warning('Debug plot not generated: %s', e)

meanVar = afwMath.makeStatistics(diffMI.getVariance(), diffMI.getMask(),
afwMath.MEANCLIP, self.sctrl).getValue()
meanVar = makeStatistics(diffMI.getVariance(), diffMI.getMask(), MEANCLIP, self.statsCtrl).getValue()

diffIm = diffMI.getImage()
diffIm -= bkgdImage # diffMI should now have a mean ~ 0
del diffIm
mse = afwMath.makeStatistics(diffMI, afwMath.MEANSQUARE, self.sctrl).getValue()
mse = makeStatistics(diffMI, MEANSQUARE, self.statsCtrl).getValue()

outBkgd = approx if self.config.usePolynomial else bkgd

return pipeBase.Struct(
backgroundModel=outBkgd,
fitRMS=rms,
matchedMSE=mse,
diffImVar=meanVar)

def _debugPlot(self, X, Y, Z, dZ, modelImage, bbox, model, resids):
"""Generate a plot showing the background fit and residuals.
It is called when lsstDebug.Info(__name__).savefig = True.
Saves the fig to lsstDebug.Info(__name__).figpath.
Displays on screen if lsstDebug.Info(__name__).display = True.
Parameters
----------
X : `numpy.ndarray`, (N,)
Array of x positions.
Y : `numpy.ndarray`, (N,)
Array of y positions.
Z : `numpy.ndarray`
Array of the grid values that were interpolated.
dZ : `numpy.ndarray`, (len(Z),)
Array of the error on the grid values.
modelImage : `Unknown`
Image of the model of the fit.
model : `numpy.ndarray`, (len(Z),)
Array of len(Z) containing the grid values predicted by the model.
resids : `Unknown`
Z - model.
"""
import matplotlib.pyplot as plt
import matplotlib.colors
from mpl_toolkits.axes_grid1 import ImageGrid
zeroIm = afwImage.MaskedImageF(geom.Box2I(bbox))
zeroIm += modelImage
x0, y0 = zeroIm.getXY0()
dx, dy = zeroIm.getDimensions()
if len(Z) == 0:
self.log.warning("No grid. Skipping plot generation.")
else:
max, min = numpy.max(Z), numpy.min(Z)
norm = matplotlib.colors.normalize(vmax=max, vmin=min)
maxdiff = numpy.max(numpy.abs(resids))
diffnorm = matplotlib.colors.normalize(vmax=maxdiff, vmin=-maxdiff)
rms = numpy.sqrt(numpy.mean(resids**2))
fig = plt.figure(1, (8, 6))
meanDz = numpy.mean(dZ)
grid = ImageGrid(fig, 111, nrows_ncols=(1, 2), axes_pad=0.1,
share_all=True, label_mode="L", cbar_mode="each",
cbar_size="7%", cbar_pad="2%", cbar_location="top")
im = grid[0].imshow(zeroIm.getImage().getArray(),
extent=(x0, x0+dx, y0+dy, y0), norm=norm,
cmap='Spectral')
im = grid[0].scatter(X, Y, c=Z, s=15.*meanDz/dZ, edgecolor='none', norm=norm,
marker='o', cmap='Spectral')
im2 = grid[1].scatter(X, Y, c=resids, edgecolor='none', norm=diffnorm,
marker='s', cmap='seismic')
grid.cbar_axes[0].colorbar(im)
grid.cbar_axes[1].colorbar(im2)
grid[0].axis([x0, x0+dx, y0+dy, y0])
grid[1].axis([x0, x0+dx, y0+dy, y0])
grid[0].set_xlabel("model and grid")
grid[1].set_xlabel("residuals. rms = %0.3f"%(rms))
if lsstDebug.Info(__name__).savefig:
fig.savefig(lsstDebug.Info(__name__).figpath + self.debugDataIdString + '.png')
if lsstDebug.Info(__name__).display:
plt.show()
plt.clf()

def _gridImage(self, maskedImage, binsize, statsFlag):
"""Private method to grid an image for debugging."""
width, height = maskedImage.getDimensions()
x0, y0 = maskedImage.getXY0()
xedges = numpy.arange(0, width, binsize)
yedges = numpy.arange(0, height, binsize)
xedges = numpy.hstack((xedges, width)) # add final edge
yedges = numpy.hstack((yedges, height)) # add final edge

# Use lists/append to protect against the case where
# a bin has no valid pixels and should not be included in the fit
bgX = []
bgY = []
bgZ = []
bgdZ = []

for ymin, ymax in zip(yedges[0:-1], yedges[1:]):
for xmin, xmax in zip(xedges[0:-1], xedges[1:]):
subBBox = geom.Box2I(geom.PointI(int(x0 + xmin), int(y0 + ymin)),
geom.PointI(int(x0 + xmax-1), int(y0 + ymax-1)))
subIm = afwImage.MaskedImageF(maskedImage, subBBox, afwImage.PARENT, False)
stats = afwMath.makeStatistics(subIm,
afwMath.MEAN | afwMath.MEANCLIP | afwMath.MEDIAN
| afwMath.NPOINT | afwMath.STDEV,
self.sctrl)
npoints, _ = stats.getResult(afwMath.NPOINT)
if npoints >= 2:
stdev, _ = stats.getResult(afwMath.STDEV)
if stdev < self.config.gridStdevEpsilon:
stdev = self.config.gridStdevEpsilon
bgX.append(0.5 * (x0 + xmin + x0 + xmax))
bgY.append(0.5 * (y0 + ymin + y0 + ymax))
bgdZ.append(stdev/numpy.sqrt(npoints))
est, _ = stats.getResult(statsFlag)
bgZ.append(est)

return numpy.array(bgX), numpy.array(bgY), numpy.array(bgZ), numpy.array(bgdZ)


class DataRefMatcher:
"""Match data references for a specified dataset type.
Note that this is not exact, but should suffice for this task
until there is better support for this kind of thing in the butler.
Parameters
----------
butler : `lsst.daf.butler.Butler`
Butler to search for maches in.
datasetType : `str`
Dataset type to match.
"""

def __init__(self, butler, datasetType):
self._datasetType = datasetType # for diagnostics
self._keyNames = butler.getKeys(datasetType)

def _makeKey(self, ref):
"""Return a tuple of values for the specified keyNames.
Parameters
----------
ref : `Unknown`
Data reference.
Raises
------
KeyError
Raised if ref.dataId is missing a key in keyNames.
"""
return tuple(ref.dataId[key] for key in self._keyNames)

def isMatch(self, ref0, ref1):
"""Return True if ref0 == ref1.
Parameters
----------
ref0 : `Unknown`
Data for ref 0.
ref1 : `Unknown`
Data for ref 1.
Raises
------
KeyError
Raised if either ID is missing a key in keyNames.
"""
return self._makeKey(ref0) == self._makeKey(ref1)

def matchList(self, ref0, refList):
"""Return a list of indices of matches.
Parameters
----------
ref0 : `Unknown`
Data for ref 0.
`refList` : `list`
Returns
-------
matches : `tuple`
Tuple of indices of matches.
Raises
------
KeyError
Raised if any ID is missing a key in keyNames.
"""
key0 = self._makeKey(ref0)
return tuple(ind for ind, ref in enumerate(refList) if self._makeKey(ref) == key0)
# Convert this back into counts
statsIm = outBkgd.getStatsImage()
statsIm /= instFluxToNanojansky
bkgdIm = outBkgd.getImageF()
bkgdIm /= instFluxToNanojansky

self.log.info(
"Visit %d; difference BG fit RMS=%.1f cts, matched MSE=%.1f cts, mean variance=%.1f cts",
sciExposure.getInfo().getVisitInfo().id,
rms,
mse,
meanVar,
)
# TODO: verify this is correct (borrowed from background.py)
return BackgroundList(
(
outBkgd,
stringToInterpStyle(self.config.interpStyle),
stringToUndersampleStyle(self.config.undersampleStyle),
ApproximateControl.UNKNOWN,
0,
0,
False,
)
)