Skip to content

Commit

Permalink
Still working on cleaning up tests for new decam image
Browse files Browse the repository at this point in the history
  • Loading branch information
rknop committed Jun 26, 2024
1 parent 9e3687f commit 54a8476
Show file tree
Hide file tree
Showing 3 changed files with 22 additions and 23 deletions.
2 changes: 1 addition & 1 deletion tests/pipeline/test_backgrounding.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ def test_measuring_background(decam_processed_image, backgrounder):
# is the background subtracted image a good representation?
mu, sig = sigma_clipping(ds.image.nandata_bgsub) # also checks that nandata_bgsub exists
assert mu == pytest.approx(0, abs=sig)
assert sig < 10
assert sig < 25

# most of the pixels are inside a 3 sigma range
assert np.sum(np.abs(ds.image.nandata_bgsub) < 3 * sig) > 0.9 * ds.image.nandata.size
Expand Down
24 changes: 12 additions & 12 deletions tests/pipeline/test_photo_cal.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,18 +51,18 @@ def test_decam_photo_cal( decam_datastore, photometor, blocking_plots ):
plt.show(block=blocking_plots)
fig.savefig( ofpath )

# WORRY : zp + apercor (for the first aperture) is off from the
# aperture-specific zeropoint that the lensgrinder pipeline
# calculated for this image by 0.13 mags. That was calibrated to
# either DECaPS or PanSTARRS (investigate this), and it's
# entirely possible that it's the lensgrinder zeropoint that is
# off.
assert ds.zp.zp == pytest.approx( 30.168, abs=0.01 )
assert ds.zp.dzp == pytest.approx( 1.38e-7, rel=0.1 ) # That number is absurd, but oh well
assert ds.zp.aper_cor_radii == pytest.approx( [ 2.915, 4.331, 8.661, 12.992,
17.323, 21.653, 30.315, 43.307 ], abs=0.01 )
assert ds.zp.aper_cors == pytest.approx( [-0.457, -0.177, -0.028, -0.007,
0.0, 0.003, 0.005, 0.006 ], abs=0.01 )
# WORRY : zp + apercor (for the first aperture) is off from the
# aperture-specific zeropoint that the lensgrinder pipeline
# calculated for this image by 0.13 mags. That was calibrated to
# either DECaPS or PanSTARRS (investigate this), and it's
# entirely possible that it's the lensgrinder zeropoint that is
# off. <--- that comment was written for a different image.
# investigate if it's still true for the image we're looking
# at now.
assert ds.zp.zp == pytest.approx( 30.128, abs=0.01 )
assert ds.zp.dzp == pytest.approx( 2.15e-6, rel=0.1 ) # That number is absurd, but oh well
assert ds.zp.aper_cor_radii == pytest.approx( [ 4.164, 8.328, 12.492, 20.819 ], abs=0.01 )
assert ds.zp.aper_cors == pytest.approx( [ -0.205, -0.035, -0.006, 0. ], abs=0.01 )


def test_warnings_and_exceptions(decam_datastore, photometor):
Expand Down
19 changes: 9 additions & 10 deletions tests/pipeline/test_preprocessing.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ def test_preprocessing(
# _get_default_calibrators won't be called as a side effect of calls
# to Preprocessor.run(). (To avoid committing.)
preprocessor.pars.test_parameter = uuid.uuid4().hex # make a new Provenance for this temporary image
ds = preprocessor.run( decam_exposure, 'N1' )
ds = preprocessor.run( decam_exposure, 'S3' )
assert preprocessor.has_recalculated

# TODO: this might not work, because for some filters (g) the fringe correction doesn't happen
Expand All @@ -29,8 +29,8 @@ def test_preprocessing(
assert preprocessor.pars.calibset == 'externally_supplied'
assert preprocessor.pars.flattype == 'externally_supplied'
assert preprocessor.pars.steps_required == [ 'overscan', 'linearity', 'flat', 'fringe' ]
ds.exposure.filter[:1] == 'g'
ds.section_id == 'N1'
ds.exposure.filter[:1] == 'r'
ds.section_id == 'S3'
assert set( preprocessor.stepfiles.keys() ) == { 'flat', 'linearity' }

# Make sure that the BSCALE and BZERO keywords got stripped
Expand All @@ -42,23 +42,22 @@ def test_preprocessing(

# Flatfielding should have improved the sky noise, though for DECam
# it looks like this is a really small effect. I've picked out a
# section that's all sky (though it may be in the wings of a bright
# star, but, whatever).
# section that's all sky.

# 56 is how much got trimmed from this image
rawsec = ds.image.raw_data[ 2226:2267, 267+56:308+56 ]
flatsec = ds.image.data[ 2226:2267, 267:308 ]
rawsec = ds.image.raw_data[ 1780:1820, 830+56:870+56 ]
flatsec = ds.image.data[ 1780:1820, 830:870 ]
assert flatsec.std() < rawsec.std()

# Make sure that some bad pixels got masked, but not too many
assert np.all( ds.image._flags[ 1390:1400, 1430:1440 ] == 4 )
assert np.all( ds.image._flags[ 4085:4093, 1080:1100 ] == 1 )
assert np.all( ds.image._flags[ 2756:2773, 991:996 ] == 4 )
assert np.all( ds.image._flags[ 0:4095, 57:60 ] == 1 )
assert ( ds.image._flags != 0 ).sum() / ds.image.data.size < 0.03

# Make sure that the weight is reasonable
assert not np.any( ds.image._weight < 0 )
assert ( ds.image.data[3959:3980, 653:662].std() ==
pytest.approx( 1./np.sqrt(ds.image._weight[3959:3980, 653:662]), rel=0.2 ) )
pytest.approx( 1./np.sqrt(ds.image._weight[3959:3980, 653:662]), rel=0.05 ) )

# Make sure that the expected files get written
try:
Expand Down

0 comments on commit 54a8476

Please sign in to comment.