diff --git a/models/background.py b/models/background.py index d3a28463..ee9ab001 100644 --- a/models/background.py +++ b/models/background.py @@ -294,8 +294,9 @@ def save( self, filename=None, **kwargs ): f"Variance shape {self.variance.shape} does not match image shape {self.image_shape}" ) - bggrp.create_dataset( 'counts', data=self.counts ) - bggrp.create_dataset( 'variance', data=self.variance ) + opts = dict(compression='gzip', compression_opts=1, chunks=(128, 128)) + bggrp.create_dataset( 'counts', data=self.counts, **opts ) + bggrp.create_dataset( 'variance', data=self.variance, **opts ) elif self.format == 'polynomial': raise NotImplementedError('Currently we do not support a polynomial background model. ') bggrp.create_dataset( 'coeffs', data=self.counts ) diff --git a/models/base.py b/models/base.py index ac20b483..fc894f4b 100644 --- a/models/base.py +++ b/models/base.py @@ -1,13 +1,10 @@ import warnings -import sys import os import math import types import hashlib import pathlib -import logging import json -import shutil import datetime from uuid import UUID diff --git a/tests/conftest.py b/tests/conftest.py index a3cd2bff..8a614d21 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -111,7 +111,7 @@ def pytest_sessionfinish(session, exitstatus): dbsession.commit() - verify_archive_database_empty = False # set to False to avoid spurious errors at end of tests (when debugging) + verify_archive_database_empty = True # set to False to avoid spurious errors at end of tests (when debugging) if any_objects and verify_archive_database_empty: raise RuntimeError('There are objects in the database. Some tests are not properly cleaning up!') diff --git a/tests/fixtures/datastore_factory.py b/tests/fixtures/datastore_factory.py index fef12f45..60f241ec 100644 --- a/tests/fixtures/datastore_factory.py +++ b/tests/fixtures/datastore_factory.py @@ -168,7 +168,7 @@ def make_datastore( if image_cache_path is not None and output_path != image_cache_path: warnings.warn(f'cache path {image_cache_path} does not match output path {output_path}') else: - cache_base_name = output_path[:-10] # remove the '.image.fits' part + cache_base_name = output_path[:-16] # remove the '.image.fits.json' part ds.cache_base_name = output_path SCLogger.debug(f'Saving image to cache at: {output_path}') use_cache = True # the two other conditions are true to even get to this part... diff --git a/tests/models/test_background.py b/tests/models/test_background.py index fc58d3b6..dbd441c2 100644 --- a/tests/models/test_background.py +++ b/tests/models/test_background.py @@ -1,3 +1,8 @@ +import os + +import sep +import time + import pytest import numpy as np import h5py @@ -72,9 +77,15 @@ def test_save_load_backgrounds(decam_raw_image, code_version): with pytest.raises(RuntimeError, match='Counts shape .* does not match image shape .*'): b2.save() - b2.counts = np.random.normal(bg_mean, 1, size=image.data.shape) - b2.variance = np.random.normal(bg_var, 1, size=image.data.shape) + # use actual background measurements so we can get a realistic estimate of the compression + back = sep.Background(image.data) + b2.counts = back.back() + b2.variance = back.rms() ** 2 + + t0 = time.perf_counter() b2.save() + # print(f'Background save time: {time.perf_counter() - t0:.3f} s') + # print(f'Background file size: {os.path.getsize(b2.get_fullpath()) / 1024 ** 2:.3f} MB') # check the filename contains the provenance hash assert prov.id[:6] in b2.get_fullpath() diff --git a/util/cache.py b/util/cache.py index 4f4611f4..d93de5be 100644 --- a/util/cache.py +++ b/util/cache.py @@ -8,6 +8,7 @@ # ====================================================================== # Functions for copying FileOnDisk objects to/from cache + def copy_to_cache(FoD, cache_dir, filepath=None): """Save a copy of the object (and, potentially, associated files) into a cache directory. @@ -79,6 +80,7 @@ def copy_to_cache(FoD, cache_dir, filepath=None): return json_filepath + def copy_list_to_cache(obj_list, cache_dir, filepath=None): """Copy a correlated list of objects to the cache directory. @@ -137,6 +139,7 @@ def copy_list_to_cache(obj_list, cache_dir, filepath=None): return json_filepath + def copy_from_cache(cls, cache_dir, filepath): """Copy and reconstruct an object from the cache directory. @@ -200,6 +203,7 @@ def copy_from_cache(cls, cache_dir, filepath): return output + def copy_list_from_cache(cls, cache_dir, filepath): """Copy and reconstruct a list of objects from the cache directory.