Skip to content

Commit

Permalink
Respond to PR comments and clean up rebase mistakes.
Browse files Browse the repository at this point in the history
  • Loading branch information
DinoBektesevic committed Jan 23, 2024
1 parent c2ec4dc commit 3da0f39
Show file tree
Hide file tree
Showing 9 changed files with 55 additions and 165 deletions.
4 changes: 2 additions & 2 deletions src/kbmod/image_collection.py
Original file line number Diff line number Diff line change
Expand Up @@ -180,7 +180,7 @@ def read(cls, *args, format=None, units=None, descriptions=None, **kwargs):
Image Collection
"""
metadata = Table.read(*args, format=format, units=units, descriptions=descriptions, **kwargs)
metadata["wcs"] = [WCS(w) for w in metadata["wcs"] if w is not None]
metadata["wcs"] = [WCS(w) if w is not None else None for w in metadata["wcs"]]
metadata["bbox"] = [json.loads(b) for b in metadata["bbox"]]
metadata["config"] = [json.loads(c) for c in metadata["config"]]
meta = json.loads(
Expand Down Expand Up @@ -400,7 +400,7 @@ def get_standardizers(self, idxs, **kwargs):
Parameters
----------
idx : `int` or `iterable`
idxs : `int` or `iterable`
Index of the row for which to retrieve the Standardizer.
**kwargs : `dict`
Keyword arguments are passed onto the constructors of the retrieved
Expand Down
2 changes: 1 addition & 1 deletion src/kbmod/standardizers/butler_standardizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@


def deferred_import(module, name=None):
"""Imports module/class/function/name as ``name`` into global modules.
"""Imports module/class/function/name as ``name`` into global modules.
If module/class/function/name already exists does nothing.
Intended for importing a large module or a library only when needed, as to
Expand Down
11 changes: 6 additions & 5 deletions src/kbmod/standardizers/fits_standardizers/fits_standardizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ class FitsStandardizer(Standardizer):
The primary HDU.
processable : `list`
Any additional extensions marked by the standardizer for further
processing. Does not include the primary HDU if it doesn't contain any
processing. Does not include the primary HDU if it doesn't contain any
image data. Contains at least 1 entry.
"""

Expand All @@ -76,7 +76,7 @@ class FitsStandardizer(Standardizer):
configClass = FitsStandardizerConfig
"""The default standardizer configuration."""

extensions = [".fit", ".fits", ".fits.fz"]
valid_extensions = [".fit", ".fits", ".fits.fz"]
"""File extensions this processor can handle."""

@classmethod
Expand Down Expand Up @@ -111,7 +111,7 @@ def resolveFromPath(cls, tgt):
if not extensions:
return False, {}

if extensions[-1] in cls.extensions:
if extensions[-1] in cls.valid_extensions:
try:
hdulist = fits.open(tgt)
except OSError:
Expand Down Expand Up @@ -169,13 +169,14 @@ def __init__(self, location=None, hdulist=None, config=None, **kwargs):
raise ValueError("Expected location or HDUList, got neither.")

if hdulist is None and (location == ":memory:" or not isfile(location)):
raise FileNotFoundError("Given location is not a file, but no " "hdulist is given.")
raise FileNotFoundError("Given location is not a file, but no hdulist is given.")

# Otherwise it's pretty normal
# - if location is ok and no HDUList exists, read location
# - if HDUList exists and location doesn't, try to get loc from it, put
# :memory: otherwise
# - if hdulist and location exist - nothing to do.
# The object will attempt to close the hdulist when it gets GC'd
if location is not None and hdulist is None:
hdulist = fits.open(location)
elif location is None and hdulist is not None:
Expand Down Expand Up @@ -360,7 +361,7 @@ def translateHeader(self, *args, **kwargs):
metadata : `dict`
Required and optional metadata.
"""
raise NotImplementedError("This FitsStandardizer doesn't implement a " "header standardizer.")
raise NotImplementedError("This FitsStandardizer doesn't implement a header standardizer.")

def standardizeMetadata(self):
metadata = self.translateHeader()
Expand Down
84 changes: 0 additions & 84 deletions src/kbmod/standardizers/fits_standardizers/rubin_scipip_fits.py

This file was deleted.

30 changes: 19 additions & 11 deletions src/kbmod/standardizers/standardizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,6 @@
class ConfigurationError(Exception):
"""Error that is raised when configuration parameters contain a logical error."""

pass


class StandardizerConfig:
"""Base class for Standardizer configuration.
Expand Down Expand Up @@ -212,7 +210,7 @@ def get(cls, tgt, force=None, config=None, **kwargs):
See ``self.registry`` for a list of registered Standardizers.
When the correct standardizer is not known, the the target can be
provided. The Standardizer with the highest priority, that also marks
provided. The Standardizer with the highest priority that marks
the target as processable, will be returned.
At least one of either the target or the standardizer parameters have
Expand All @@ -222,15 +220,25 @@ def get(cls, tgt, force=None, config=None, **kwargs):
----------
tgt : any
The target to be standardized.
standardizer : `str` or `cls`
Force the use of the given Standardizer. The given name must be a
part of the registered Standardizers. If a class reference is
given, returns it (no-op).
force : `str` or `cls`, optional
Force the use of the given `Standardizer`. The given name must be a
part of the registered `Standardizers` or a callable. When no
`Standardizer` is forced, all registered standardizers are tested
and the highest priority `Standardizer` is selected.
config : `~StandardizerConfig`, `dict` or `None`, optional
Standardizer configuration or dictionary containing the config
parameters for standardization. When `None` default values for the
appropriate `Standardizer` will be used.
**kwargs : `dict`, optional
Any additional, optional, keyword arguments are passed into the
`Standardizer`. See relevant `Standardizer` documentation for
details.
Returns
-------
standardizer : `cls`
Standardizer class that can process the given upload.
standardizer : `object`
Standardizer instance forced, or selected as the most appropriate
one, to process the given target..
Raises
------
Expand Down Expand Up @@ -369,8 +377,8 @@ def __init_subclass__(cls, **kwargs):
# parameters as availible standardizers. Note the specific
# standardizer has to be imported before this class since the
# mechanism is triggered at definition time.
name = getattr(cls, "name", False)
if name and name is not None:
name = getattr(cls, "name", None)
if name is not None:
super().__init_subclass__(**kwargs)
Standardizer.registry[cls.name] = cls

Expand Down
32 changes: 12 additions & 20 deletions tests/test_butlerstd.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ class MockButler:
def __init__(self, root, ref=None, mock_images_f=None):
self.datastore = Datastore(root)
self.registry = Registry()
self.current_ref = None
self.current_ref = ref
self.mockImages = mock_images_f

def getURI(self, ref, collections=None):
Expand Down Expand Up @@ -225,24 +225,11 @@ def test_standardize(self):
self.assertAlmostEqual(standardized["meta"]["dec"][0], fits[1].header["CRVAL2"], 1)

# compare standardized images
np.testing.assert_equal(
[
fits["IMAGE"].data,
],
standardized["science"],
)
np.testing.assert_equal(
[
fits["VARIANCE"].data,
],
standardized["variance"],
)
np.testing.assert_equal(
[
fits["MASK"].data,
],
standardized["mask"],
)
# fmt: off
np.testing.assert_equal([fits["IMAGE"].data,], standardized["science"])
np.testing.assert_equal([fits["VARIANCE"].data,], standardized["variance"])
np.testing.assert_equal([fits["MASK"].data,], standardized["mask"])
# fmt: on

# these are not easily comparable so just assert they exist
self.assertTrue(standardized["meta"]["wcs"])
Expand All @@ -255,7 +242,12 @@ def test_roundtrip(self):
standardized = std.standardize()

std2 = ButlerStandardizer(**standardized["meta"], butler=self.butler)
self.assertIsInstance(std, ButlerStandardizer)
self.assertIsInstance(std2, ButlerStandardizer)

standardized2 = std2.standardize()
# TODO: I got to come up with some reasonable way of comparing this
for k in ['location', 'bbox', 'mjd', 'filter', 'id', 'exp_id', 'ra', 'dec']:
self.assertEqual(standardized["meta"][k], standardized2["meta"][k])

def mock_kbmodv1like_bitmasking(self, mockedexp):
"""Assign each flag that exists to a pixel, standardize, then expect
Expand Down
10 changes: 0 additions & 10 deletions tests/test_collection.py

This file was deleted.

12 changes: 2 additions & 10 deletions tests/test_standardizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,11 +53,9 @@ def __init__(self, *args, required_flag, optional_flag=False, **kwargs):
def translateHeader(self):
# invoke the parent functionality to standardize the default values
metadata = super().translateHeader()
metadata["required_flag"] = False
if self.required_flag:
metadata["required_flag"] = True
metadata["required_flag"] = self.required_flag
if self.optional_flag:
metadata["optional_flag"] = True
metadata["optional_flag"] = self.optional_flag
return metadata


Expand All @@ -76,8 +74,6 @@ def tearDown(self):
MyStd.priority = 3
warnings.resetwarnings()

# self.fits.close(output_verify="ignore")

def test_kwargs_to_init(self):
"""Test kwargs are correctly passed from top-level Standardizer to the
underlying standardizer implementation."""
Expand Down Expand Up @@ -159,10 +155,6 @@ def setUp(self):
self.fits = FitsFactory.mock_fits(spoof_data=True)

def tearDown(self):
# Note that np.int32 in setUp is necessary because astropy will raise
# throw a RuntimeError here otherwise. If we gave it a CompImageHDU, it
# expects an 32bit int as data and can't handle getting anything else
# See: https://docs.astropy.org/en/stable/io/fits/usage/image.html
self.fits.close(output_verify="ignore")

def test_init_direct(self):
Expand Down
35 changes: 13 additions & 22 deletions tests/utils/mock_fits.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ class MockFitsFileFactory(abc.ABC):
Parameters
----------
archive_name : `str`
Name of the TAR archive in ``data`` dir ontaining raw data.
Name of the TAR archive in ``data`` dir containing raw data.
fname : `str`
Filename within the archive that contains the wanted raw data.
compression : `str`, optional
Expand Down Expand Up @@ -235,21 +235,12 @@ class DECamImdiffFactory(MockFitsFileFactory):
def __init__(self, archive_name="decam_imdiff_headers.ecsv.tar.bz2", fname="decam_imdiff_headers.ecsv"):
super().__init__(archive_name, fname)

self.hdus = [
PrimaryHDU,
]
self.hdus.extend(
[
CompImageHDU,
]
* 3
)
self.hdus.extend(
[
BinTableHDU,
]
* 12
)
# don't let Black have its way with these lines because it's a massacre
# fmt: off
self.hdus = [PrimaryHDU, ]
self.hdus.extend([CompImageHDU, ] * 3)
self.hdus.extend([BinTableHDU, ] * 12)
# fmt: on

@property
def hdu_types(self):
Expand All @@ -266,13 +257,13 @@ def spoof_data(self, hdul):
# error. Eventually it is possible, that one of them could want to read
# the PSF HDU, which we will then have to spoof correctly.
# On top of that AstroPy will, rightfully, be very opinionated about
# checking metadata of the data matches the data itself (f.e. NAIXS
# kwargs) and will throw warnings and errors. We only have the header
# raw data because storing data is too much, so now we have to reverse
# engineer, on a per-standardizer level, the data, taking care we don't
# checking metadata of the data matches the data itself and will throw
# warnings and errors. We have to match data type using dtypes because
# they default to 64bit representation in pure Python as well as number
# of columns and rows - even if we leave the HDU data itself empty!
# Of course, storing data takes too much space, so now we have to reverse
# engineer the data, on a per-standardizer level, taking care we don't
# hit any of these roadblocks.
# For example, it does not appear possible to write out a fits file
# where NAXIS keys do not match the data layoyut.
empty_array = np.zeros((5, 5), np.float32)
hdul["IMAGE"].data = empty_array
hdul["VARIANCE"].data = empty_array
Expand Down

0 comments on commit 3da0f39

Please sign in to comment.