From 54a8476ebbf6201ec8c31d4b9870ee97907c4fc8 Mon Sep 17 00:00:00 2001 From: Rob Knop Date: Wed, 26 Jun 2024 13:34:26 -0700 Subject: [PATCH] Still working on cleaning up tests for new decam image --- tests/pipeline/test_backgrounding.py | 2 +- tests/pipeline/test_photo_cal.py | 24 ++++++++++++------------ tests/pipeline/test_preprocessing.py | 19 +++++++++---------- 3 files changed, 22 insertions(+), 23 deletions(-) diff --git a/tests/pipeline/test_backgrounding.py b/tests/pipeline/test_backgrounding.py index 0d995ad5..b45847b7 100644 --- a/tests/pipeline/test_backgrounding.py +++ b/tests/pipeline/test_backgrounding.py @@ -18,7 +18,7 @@ def test_measuring_background(decam_processed_image, backgrounder): # is the background subtracted image a good representation? mu, sig = sigma_clipping(ds.image.nandata_bgsub) # also checks that nandata_bgsub exists assert mu == pytest.approx(0, abs=sig) - assert sig < 10 + assert sig < 25 # most of the pixels are inside a 3 sigma range assert np.sum(np.abs(ds.image.nandata_bgsub) < 3 * sig) > 0.9 * ds.image.nandata.size diff --git a/tests/pipeline/test_photo_cal.py b/tests/pipeline/test_photo_cal.py index ee274a61..891a719e 100644 --- a/tests/pipeline/test_photo_cal.py +++ b/tests/pipeline/test_photo_cal.py @@ -51,18 +51,18 @@ def test_decam_photo_cal( decam_datastore, photometor, blocking_plots ): plt.show(block=blocking_plots) fig.savefig( ofpath ) - # WORRY : zp + apercor (for the first aperture) is off from the - # aperture-specific zeropoint that the lensgrinder pipeline - # calculated for this image by 0.13 mags. That was calibrated to - # either DECaPS or PanSTARRS (investigate this), and it's - # entirely possible that it's the lensgrinder zeropoint that is - # off. - assert ds.zp.zp == pytest.approx( 30.168, abs=0.01 ) - assert ds.zp.dzp == pytest.approx( 1.38e-7, rel=0.1 ) # That number is absurd, but oh well - assert ds.zp.aper_cor_radii == pytest.approx( [ 2.915, 4.331, 8.661, 12.992, - 17.323, 21.653, 30.315, 43.307 ], abs=0.01 ) - assert ds.zp.aper_cors == pytest.approx( [-0.457, -0.177, -0.028, -0.007, - 0.0, 0.003, 0.005, 0.006 ], abs=0.01 ) + # WORRY : zp + apercor (for the first aperture) is off from the + # aperture-specific zeropoint that the lensgrinder pipeline + # calculated for this image by 0.13 mags. That was calibrated to + # either DECaPS or PanSTARRS (investigate this), and it's + # entirely possible that it's the lensgrinder zeropoint that is + # off. <--- that comment was written for a different image. + # investigate if it's still true for the image we're looking + # at now. + assert ds.zp.zp == pytest.approx( 30.128, abs=0.01 ) + assert ds.zp.dzp == pytest.approx( 2.15e-6, rel=0.1 ) # That number is absurd, but oh well + assert ds.zp.aper_cor_radii == pytest.approx( [ 4.164, 8.328, 12.492, 20.819 ], abs=0.01 ) + assert ds.zp.aper_cors == pytest.approx( [ -0.205, -0.035, -0.006, 0. ], abs=0.01 ) def test_warnings_and_exceptions(decam_datastore, photometor): diff --git a/tests/pipeline/test_preprocessing.py b/tests/pipeline/test_preprocessing.py index 0095042d..5e6613f8 100644 --- a/tests/pipeline/test_preprocessing.py +++ b/tests/pipeline/test_preprocessing.py @@ -17,7 +17,7 @@ def test_preprocessing( # _get_default_calibrators won't be called as a side effect of calls # to Preprocessor.run(). (To avoid committing.) preprocessor.pars.test_parameter = uuid.uuid4().hex # make a new Provenance for this temporary image - ds = preprocessor.run( decam_exposure, 'N1' ) + ds = preprocessor.run( decam_exposure, 'S3' ) assert preprocessor.has_recalculated # TODO: this might not work, because for some filters (g) the fringe correction doesn't happen @@ -29,8 +29,8 @@ def test_preprocessing( assert preprocessor.pars.calibset == 'externally_supplied' assert preprocessor.pars.flattype == 'externally_supplied' assert preprocessor.pars.steps_required == [ 'overscan', 'linearity', 'flat', 'fringe' ] - ds.exposure.filter[:1] == 'g' - ds.section_id == 'N1' + ds.exposure.filter[:1] == 'r' + ds.section_id == 'S3' assert set( preprocessor.stepfiles.keys() ) == { 'flat', 'linearity' } # Make sure that the BSCALE and BZERO keywords got stripped @@ -42,23 +42,22 @@ def test_preprocessing( # Flatfielding should have improved the sky noise, though for DECam # it looks like this is a really small effect. I've picked out a - # section that's all sky (though it may be in the wings of a bright - # star, but, whatever). + # section that's all sky. # 56 is how much got trimmed from this image - rawsec = ds.image.raw_data[ 2226:2267, 267+56:308+56 ] - flatsec = ds.image.data[ 2226:2267, 267:308 ] + rawsec = ds.image.raw_data[ 1780:1820, 830+56:870+56 ] + flatsec = ds.image.data[ 1780:1820, 830:870 ] assert flatsec.std() < rawsec.std() # Make sure that some bad pixels got masked, but not too many - assert np.all( ds.image._flags[ 1390:1400, 1430:1440 ] == 4 ) - assert np.all( ds.image._flags[ 4085:4093, 1080:1100 ] == 1 ) + assert np.all( ds.image._flags[ 2756:2773, 991:996 ] == 4 ) + assert np.all( ds.image._flags[ 0:4095, 57:60 ] == 1 ) assert ( ds.image._flags != 0 ).sum() / ds.image.data.size < 0.03 # Make sure that the weight is reasonable assert not np.any( ds.image._weight < 0 ) assert ( ds.image.data[3959:3980, 653:662].std() == - pytest.approx( 1./np.sqrt(ds.image._weight[3959:3980, 653:662]), rel=0.2 ) ) + pytest.approx( 1./np.sqrt(ds.image._weight[3959:3980, 653:662]), rel=0.05 ) ) # Make sure that the expected files get written try: