Skip to content

Commit

Permalink
Migrate for formatter v2
Browse files Browse the repository at this point in the history
  • Loading branch information
timj committed Jun 28, 2024
1 parent 2c6965c commit 67af938
Show file tree
Hide file tree
Showing 4 changed files with 94 additions and 118 deletions.
39 changes: 16 additions & 23 deletions python/lsst/obs/base/_fitsRawFormatterBase.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,12 +23,14 @@

import logging
from abc import abstractmethod
from typing import Any

import lsst.afw.fits
import lsst.afw.geom
import lsst.afw.image
from astro_metadata_translator import ObservationInfo, fix_header
from lsst.daf.butler import FileDescriptor
from lsst.daf.butler import FileDescriptor, FormatterNotImplementedError
from lsst.resources import ResourcePath
from lsst.utils.classes import cached_getter

from .formatters.fitsExposure import FitsImageFormatterBase, standardizeAmplifierParameters
Expand All @@ -43,6 +45,8 @@ class FitsRawFormatterBase(FitsImageFormatterBase):
FITS files.
"""

ReaderClass = lsst.afw.image.ImageFitsReader

# This has to be explicit until we fix camera geometry in DM-20746
wcsFlipX = False
"""Control whether the WCS is flipped in the X-direction (`bool`)"""
Expand Down Expand Up @@ -120,7 +124,7 @@ def readImage(self):
image : `~lsst.afw.image.Image`
In-memory image component.
"""
return lsst.afw.image.ImageU(self.fileDescriptor.location.path)
return lsst.afw.image.ImageU(self._reader_uri.ospath)

def isOnSky(self):
"""Boolean to determine if the exposure is thought to be on the sky.
Expand Down Expand Up @@ -160,7 +164,7 @@ def readMetadata(self):
metadata : `~lsst.daf.base.PropertyList`
Header metadata.
"""
md = lsst.afw.fits.readMetadata(self.fileDescriptor.location.path)
md = self.reader.readMetadata()
fix_header(md, translator_class=self.translatorClass)
return md

Expand All @@ -180,12 +184,12 @@ def makeVisitInfo(self):
Structured metadata about the observation.
"""
visit_info = MakeRawVisitInfoViaObsInfo.observationInfo2visitInfo(self.observationInfo)
if self.dataId and "exposure" in self.dataId:
if self.data_id and "exposure" in self.data_id:
# Special case exposure existing for this dataset type.
# Want to ensure that the id stored in the visitInfo matches that
# from the dataId from butler, regardless of what may have come
# from the ObservationInfo. In some edge cases they might differ.
exposure_id = self.dataId["exposure"]
exposure_id = self.data_id["exposure"]
if exposure_id != visit_info.id:
visit_info = visit_info.copyWith(id=exposure_id)

Expand Down Expand Up @@ -330,14 +334,13 @@ def readFull(self):
self.getDetector(self.observationInfo.detector_num),
)
if amplifier is not None:
reader = lsst.afw.image.ImageFitsReader(self.fileDescriptor.location.path)
amplifier_isolator = lsst.afw.cameraGeom.AmplifierIsolator(
amplifier,
reader.readBBox(),
self.reader.readBBox(),
detector,
)
subimage = amplifier_isolator.transform_subimage(
reader.read(bbox=amplifier_isolator.subimage_bbox)
self.reader.read(bbox=amplifier_isolator.subimage_bbox)
)
exposure = lsst.afw.image.makeExposure(lsst.afw.image.makeMaskedImage(subimage))
exposure.setDetector(amplifier_isolator.make_detector())
Expand All @@ -347,20 +350,8 @@ def readFull(self):
self.attachComponentsFromMetadata(exposure)
return exposure

def write(self, inMemoryDataset):
"""Write a Python object to a file.
Parameters
----------
inMemoryDataset : `object`
The Python object to store.
Returns
-------
path : `str`
The `URI` where the primary file is stored.
"""
raise NotImplementedError("Raw data cannot be `put`.")
def write_local_file(self, in_memory_dataset: Any, uri: ResourcePath) -> None:
raise FormatterNotImplementedError("Raw data cannot be `put`.")

@property
def observationInfo(self):
Expand All @@ -369,7 +360,9 @@ def observationInfo(self):
read-only).
"""
if self._observationInfo is None:
location = self.fileDescriptor.location
# Use the primary path rather than any local variant that the
# formatter might be using.
location = self.file_descriptor.location
path = location.path if location is not None else None
self._observationInfo = ObservationInfo(
self.metadata, translator_class=self.translatorClass, filename=path
Expand Down
106 changes: 54 additions & 52 deletions python/lsst/obs/base/formatters/fitsExposure.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@
import warnings
from abc import abstractmethod
from collections.abc import Set
from typing import ClassVar
from typing import Any, ClassVar

from lsst.afw.cameraGeom import AmplifierGeometryComparison, AmplifierIsolator
from lsst.afw.image import (
Expand All @@ -45,12 +45,13 @@
# Needed for ApCorrMap to resolve properly
from lsst.afw.math import BoundedField # noqa: F401
from lsst.daf.base import PropertySet
from lsst.daf.butler import Formatter
from lsst.daf.butler import FormatterV2
from lsst.resources import ResourcePath
from lsst.utils.classes import cached_getter
from lsst.utils.introspection import find_outside_stacklevel


class FitsImageFormatterBase(Formatter):
class FitsImageFormatterBase(FormatterV2):
"""Base class formatter for image-like storage classes stored via FITS.
Notes
Expand All @@ -64,12 +65,29 @@ class FitsImageFormatterBase(Formatter):
(even if just to disable them by raising an exception).
"""

extension = ".fits"
supportedExtensions: ClassVar[Set[str]] = frozenset({".fits", ".fits.gz", ".fits.fz", ".fz", ".fit"})
can_read_from_local_file = True
default_extension = ".fits"
supported_extensions: ClassVar[Set[str]] = frozenset({".fits", ".fits.gz", ".fits.fz", ".fz", ".fit"})

unsupportedParameters: ClassVar[Set[str]] = frozenset()
unsupported_parameters: ClassVar[Set[str]] = frozenset()
"""Support all parameters."""

_reader = None
_reader_uri: ResourcePath | None = None

ReaderClass: type # must be set by concrete subclasses

@property
def reader(self):
"""The reader object that backs this formatter's read operations.
This is computed on first use and then cached. It should never be
accessed when writing. Currently assumes a local file.
"""
if self._reader is None:
self._reader = self.ReaderClass(self._reader_uri.ospath)
return self._reader

@property
@cached_getter
def checked_parameters(self):
Expand All @@ -81,14 +99,20 @@ def checked_parameters(self):
accessed when writing. Subclasses that need additional checking should
delegate to `super` and then check the result before returning it.
"""
parameters = self.fileDescriptor.parameters
parameters = self.file_descriptor.parameters
if parameters is None:
parameters = {}
self.fileDescriptor.storageClass.validateParameters(parameters)
self.file_descriptor.storageClass.validateParameters(parameters)
return parameters

def read(self, component=None):
def read_from_local_file(self, local_uri: ResourcePath, component: str | None = None) -> Any:
# Docstring inherited.
# The methods doing the reading all currently assume local file
# and assume that the file descriptor refers to a local file.
# With FormatterV2 that file descriptor does not refer to a local
# file.
self._reader_uri = local_uri
self._reader = None # Reset any cache.
if component is not None:
return self.readComponent(component)
return self.readFull()
Expand Down Expand Up @@ -219,18 +243,7 @@ class StandardFitsImageFormatterBase(ReaderFitsImageFormatterBase):
"""

supportedWriteParameters = frozenset({"recipe"})
ReaderClass: type # must be set by concrete subclasses

@property
@cached_getter
def reader(self):
"""The reader object that backs this formatter's read operations.
This is computed on first use and then cached. It should never be
accessed when writing.
"""
return self.ReaderClass(self.fileDescriptor.location.path)
supported_write_parameters = frozenset({"recipe"})

def readComponent(self, component):
# Docstring inherited.
Expand All @@ -249,31 +262,20 @@ def readFull(self):
# Docstring inherited.
return self.reader.read(**self.checked_parameters)

def write(self, inMemoryDataset):
"""Write a Python object to a file.
Parameters
----------
inMemoryDataset : `object`
The Python object to store.
"""
# Update the location with the formatter-preferred file extension
self.fileDescriptor.location.updateExtension(self.extension)
outputPath = self.fileDescriptor.location.path

def write_local_file(self, in_memory_dataset: Any, uri: ResourcePath) -> None:
# check to see if we have a recipe requested
recipeName = self.writeParameters.get("recipe")
recipe = self.getImageCompressionSettings(recipeName)
recipeName = self.write_parameters.get("recipe")
recipe = self.get_image_compression_settings(recipeName)
if recipe:
# Can not construct a PropertySet from a hierarchical
# dict but can update one.
ps = PropertySet()
ps.update(recipe)
inMemoryDataset.writeFitsWithOptions(outputPath, options=ps)
in_memory_dataset.writeFitsWithOptions(uri.ospath, options=ps)
else:
inMemoryDataset.writeFits(outputPath)
in_memory_dataset.writeFits(uri.ospath)

def getImageCompressionSettings(self, recipeName):
def get_image_compression_settings(self, recipeName):
"""Retrieve the relevant compression settings for this recipe.
Parameters
Expand All @@ -290,17 +292,17 @@ def getImageCompressionSettings(self, recipeName):
# if no recipe has been provided and there is no default
# return immediately
if not recipeName:
if "default" not in self.writeRecipes:
if "default" not in self.write_recipes:
return {}
recipeName = "default"

if recipeName not in self.writeRecipes:
if recipeName not in self.write_recipes:
raise RuntimeError(f"Unrecognized recipe option given for compression: {recipeName}")

recipe = self.writeRecipes[recipeName]
recipe = self.write_recipes[recipeName]

# Set the seed based on dataId
seed = hash(tuple(self.dataId.required.items())) % 2**31
seed = hash(tuple(self.data_id.required.items())) % 2**31
for plane in ("image", "mask", "variance"):
if plane in recipe and "scaling" in recipe[plane]:
scaling = recipe[plane]["scaling"]
Expand All @@ -310,7 +312,7 @@ def getImageCompressionSettings(self, recipeName):
return recipe

@classmethod
def validateWriteRecipes(cls, recipes):
def validate_write_recipes(cls, recipes):
"""Validate supplied recipes for this formatter.
The recipes are supplemented with default values where appropriate.
Expand Down Expand Up @@ -599,20 +601,20 @@ def _fixFilterLabels(self, file_filter_label, should_be_standardized=None):
missing = []
band = None
physical_filter = None
if "band" in self.dataId.dimensions.names:
band = self.dataId.get("band")
if "band" in self.data_id.dimensions.names:
band = self.data_id.get("band")
# band isn't in the data ID; is that just because this data ID
# hasn't been filled in with everything the Registry knows, or
# because this dataset is never associated with a band?
if band is None and not self.dataId.hasFull() and "band" in self.dataId.dimensions.implied:
if band is None and not self.data_id.hasFull() and "band" in self.data_id.dimensions.implied:
missing.append("band")
if "physical_filter" in self.dataId.dimensions.names:
physical_filter = self.dataId.get("physical_filter")
if "physical_filter" in self.data_id.dimensions.names:
physical_filter = self.data_id.get("physical_filter")
# Same check as above for band, but for physical_filter.
if (
physical_filter is None
and not self.dataId.hasFull()
and "physical_filter" in self.dataId.dimensions.implied
and not self.data_id.hasFull()
and "physical_filter" in self.data_id.dimensions.implied
):
missing.append("physical_filter")
if should_be_standardized is None:
Expand All @@ -626,7 +628,7 @@ def _fixFilterLabels(self, file_filter_label, should_be_standardized=None):
# predates filter standardization.
if not should_be_standardized:
warnings.warn(
f"Data ID {self.dataId} is missing (implied) value(s) for {missing}; "
f"Data ID {self.data_id} is missing (implied) value(s) for {missing}; "
"the correctness of this Exposure's FilterLabel cannot be guaranteed. "
"Call Registry.expandDataId before Butler.get to avoid this.",
# Report the warning from outside of middleware or the
Expand All @@ -646,7 +648,7 @@ def _fixFilterLabels(self, file_filter_label, should_be_standardized=None):
# in whatever code produced the Exposure (though it may be one that
# has been fixed since the file was written).
warnings.warn(
f"Reading {self.fileDescriptor.location} with data ID {self.dataId}: "
f"Reading {self.file_descriptor.location} with data ID {self.data_id}: "
f"filter label mismatch (file is {file_filter_label}, data ID is "
f"{data_id_filter_label}). This is probably a bug in the code that produced it.",
stacklevel=find_outside_stacklevel(
Expand Down
54 changes: 14 additions & 40 deletions python/lsst/obs/base/formatters/fitsGeneric.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,56 +21,30 @@

__all__ = ("FitsGenericFormatter",)

import os.path
from typing import Any

from lsst.daf.butler.formatters.file import FileFormatter
from lsst.daf.butler import FormatterV2
from lsst.resources import ResourcePath


class FitsGenericFormatter(FileFormatter):
class FitsGenericFormatter(FormatterV2):
"""Interface for reading and writing objects that support the standard
afw I/O readFits/writeFits methods.
"""

supportedExtensions = frozenset({".fits", ".fits.gz", ".fits.fz", ".fz", ".fit"})
extension = ".fits"
supported_extensions = frozenset({".fits", ".fits.gz", ".fits.fz", ".fz", ".fit"})
default_extension = ".fits"
can_read_from_local_file = True

def _readFile(self, path, pytype):
"""Read a file from the path in FITS format.
Parameters
----------
path : `str`
Path to use to open the file.
pytype : `class`
Class to use to read the FITS file.
Returns
-------
data : `object`
Instance of class `pytype` read from FITS file. None
if the file could not be opened.
"""
if not os.path.exists(path):
return None
if self.fileDescriptor.parameters:
def read_from_local_file(self, local_uri: ResourcePath, component: str | None = None) -> Any:
pytype = self.file_descriptor.storageClass.pytype
if self.file_descriptor.parameters:
try:
return pytype.readFitsWithOptions(path, options=self.fileDescriptor.parameters)
return pytype.readFitsWithOptions(local_uri.ospath, options=self.file_descriptor.parameters)
except AttributeError:
pass

return pytype.readFits(path)

def _writeFile(self, inMemoryDataset):
"""Write the in memory dataset to file on disk.
Parameters
----------
inMemoryDataset : `object`
Object to serialize.
return pytype.readFits(local_uri.ospath)

Raises
------
Exception
The file could not be written.
"""
inMemoryDataset.writeFits(self.fileDescriptor.location.path)
def write_local_file(self, in_memory_dataset: Any, uri: ResourcePath) -> None:
in_memory_dataset.writeFits(uri.ospath)
Loading

0 comments on commit 67af938

Please sign in to comment.