diff --git a/src/kbmod/__init__.py b/src/kbmod/__init__.py index c816ba79e..b3bc7c267 100644 --- a/src/kbmod/__init__.py +++ b/src/kbmod/__init__.py @@ -5,7 +5,6 @@ except ImportError: warnings.warn("Unable to determine the package version. " "This is likely a broken installation.") -from .standardizers import * from . import ( analysis, analysis_utils, @@ -15,7 +14,8 @@ jointfit_functions, result_list, run_search, - standardizer, ) - +from .standardizers import * +from .standardizer import Standardizer +from .image_collection import ImageCollection diff --git a/src/kbmod/image_collection.py b/src/kbmod/image_collection.py index 140fcc1f5..94e90189d 100644 --- a/src/kbmod/image_collection.py +++ b/src/kbmod/image_collection.py @@ -6,18 +6,24 @@ import os import glob import json +import time +import astropy.units as u from astropy.io import fits from astropy.time import Time from astropy.table import Table from astropy.wcs import WCS from astropy.utils import isiterable +from astropy.coordinates import SkyCoord import numpy as np -from kbmod.file_utils import FileUtils -from kbmod.search import pixel_pos, layered_image +from kbmod.search import image_stack, stack_search from kbmod.standardizer import Standardizer +from kbmod.analysis_utils import PostProcess + + +__all__ = ["ImageCollection", ] class ImageCollection: @@ -134,14 +140,15 @@ def __init__(self, metadata, standardizers=None): if standardizers is not None: self.data.meta["n_entries"] = len(standardizers) - self._standardizers = standardizers + self._standardizers = np.array(standardizers) elif metadata.meta and "n_entries" in metadata.meta: n_entries = metadata.meta["n_entries"] - self._standardizers = [None]*n_entries + self._standardizers = np.full((n_entries, ), None) else: n_entries = len(np.unique(metadata["location"])) self.data.meta["n_entries"] = n_entries - self._standardizers = [None]*n_entries + self._standardizers = np.full((n_entries, ), None) + #self._standardizers = [None]*n_entries # hidden indices that track the unravelled lookup to standardizer # extension index. I should imagine there's a better than double-loop @@ -157,7 +164,9 @@ def __init__(self, metadata, standardizers=None): no_std_map = True self.data["ext_idx"] = [None]*len(self.data) - if standardizers and no_std_map: + # standardizers are not Falsy - empty lists, Nones, empty tuples, empty + # arrays etc... + if (standardizers is not None and any(standardizers)) and no_std_map: std_idxs, ext_idxs = [], [] for i, stdFits in enumerate(standardizers): for j, ext in enumerate(stdFits.exts): @@ -195,7 +204,7 @@ def read(cls, *args, format=None, units=None, descriptions=None, **kwargs): return cls(metadata) @classmethod - def _fromStandardizers(cls, standardizers, meta=None): + def fromStandardizers(cls, standardizers, meta=None): """Create ImageCollection from a collection `Standardizers`. The `Standardizer` is "unravelled", i.e. the shared metadata is @@ -224,7 +233,7 @@ def _fromStandardizers(cls, standardizers, meta=None): # ButlerStd.stdMeta. Everything that is an iterable, except for a # string because that could be a location key? unravelColumns = [key for key, val in stdMeta.items() if isiterable(val) and not isinstance(val, str)] - for j, ext in enumerate(stdFits.exts): + for j, ext in enumerate(stdFits.processable): row = {} for key in stdMeta.keys(): if key in unravelColumns: @@ -263,7 +272,7 @@ def _fromFilepaths(cls, filepaths, forceStandardizer, **kwargs): Standardizer.fromFile(path=path, forceStandardizer=forceStandardizer, **kwargs) for path in filepaths ] - return cls._fromStandardizers(standardizers) + return cls.fromStandardizers(standardizers) @classmethod def _fromDir(cls, path, recursive, forceStandardizer, **kwargs): @@ -357,7 +366,7 @@ def fromDatasetRefs(cls, butler, refs, **kwargs): standardizer_cls = Standardizer.get(standardizer="ButlerStandardizer") standardizer = standardizer_cls(butler, refs, **kwargs) meta = {"root": butler.datastore.root.geturl(), "n_entries": len(list(refs))} - return cls._fromStandardizers([standardizer, ], meta=meta) + return cls.fromStandardizers([standardizer, ], meta=meta) def fromAQueryTable(self): # ? TBD pass @@ -372,7 +381,12 @@ def __repr__(self): return repr(self.data).replace("Table", "ImageInfoSet") def __getitem__(self, key): - return self.data[key] + if isinstance(key, (int, str, np.integer)): + return self.data[self._userColumns][key] + elif isinstance(key, (list, np.ndarray, slice)): + return self.__class__(self.data[key], standardizers=self._standardizers[key]) + else: + return self.data[key] def __setitem__(self, key, val): self.data[key] = val @@ -417,7 +431,7 @@ def standardizers(self): """A list of used standardizer names.""" return self._standardizer_names - def _get_standardizer(self, index): + def get_standardizer(self, index, **kwargs): """Get the standardizer and extension index for the selected row of the unravelled metadata table. @@ -429,6 +443,8 @@ def _get_standardizer(self, index): index : `int` Index, as it appears in the unravelled table of metadata properties. + **kwargs : `dict` + Keyword arguments are passed onto the Standardizer constructor. Returns ------- @@ -441,13 +457,13 @@ def _get_standardizer(self, index): ext_idx = row["ext_idx"] if self._standardizers[std_idx] is None: std_cls = Standardizer.registry[row["std_name"]] - self._standardizers[std_idx] = std_cls(row["location"]) + self._standardizers[std_idx] = std_cls(**kwargs, **row) # maybe a clever dataclass to shortcut the idx lookups on the user end? return {"std": self._standardizers[std_idx], "ext": self.data[index]["ext_idx"]} - def get_standardizers(self, idxs): + def get_standardizers(self, idxs, **kwargs): """ Get the standardizers used to extract metadata of the selected rows. @@ -455,6 +471,9 @@ def get_standardizers(self, idxs): ---------- idx : `int` or `iterable` Index of the row for which to retrieve the Standardizer. + **kwargs : `dict` + Keyword arguments are passed onto the constructors of the retrieved + Standardizer. Returns ------- @@ -463,19 +482,13 @@ def get_standardizers(self, idxs): the extension (``ext``) that maps to the given metadata row index. """ if isinstance(idxs, int): - return self._get_standardizer(idxs) + return [self.get_standardizer(idxs, **kwargs), ] else: - return [self._get_standardizer(idx) for idx in idxs] + return [self.get_standardizer(idx, **kwargs) for idx in idxs] ######################## # FUNCTIONALITY (object operations, transformative functionality) ######################## - def toImageStack(self): - pass - - def plot_onsky(self): - pass - def write(self, *args, format=None, serialize_method=None, **kwargs): tmpdata = self.data.copy() @@ -522,3 +535,193 @@ def get_duration(self): """ # maybe timespan? return self.data["mjd"][-1] - self.data["mjd"][0] + + def toImageStack(self): + """Return an `~kbmod.search.image_stack` object for processing with + KBMOD. + + Returns + ------- + imageStack : `~kbmod.search.image_stack` + Image stack for processing with KBMOD. + """ + # unpack the layred image list to flatten the array + # this is so stupidly costly because we have an internal array + # representation that doesn't interface with numpy via ndarray it makes + # a copy every time + layeredImages = [img for std in self._standardizers for img in std.toLayeredImage()] + return image_stack(layeredImages) + + def _calc_suggested_angle(self, wcs, center_pixel=(1000, 2000), step=12): + """Projects an unit-vector parallel with the ecliptic onto the image + and calculates the angle of the projected unit-vector in the pixel + space. + + Parameters + ---------- + wcs : ``astropy.wcs.WCS`` + World Coordinate System object. + center_pixel : tuple, array-like + Pixel coordinates of image center. + step : ``float`` or ``int`` + Size of step, in arcseconds, used to find the pixel coordinates of + the second pixel in the image parallel to the ecliptic. + + Returns + ------- + suggested_angle : ``float`` + Angle the projected unit-vector parallel to the ecliptic + closes with the image axes. Used to transform the specified + search angles, with respect to the ecliptic, to search angles + within the image. + + Note + ---- + It is not neccessary to calculate this angle for each image in an + image set if they have all been warped to a common WCS. + + See Also + -------- + run_search.do_gpu_search + """ + # pick a starting pixel approximately near the center of the image + # convert it to ecliptic coordinates + start_pixel = np.array(center_pixel) + start_pixel_coord = SkyCoord.from_pixel(start_pixel[0], start_pixel[1], wcs) + start_ecliptic_coord = start_pixel_coord.geocentrictrueecliptic + + # pick a guess pixel by moving parallel to the ecliptic + # convert it to pixel coordinates for the given WCS + guess_ecliptic_coord = SkyCoord( + start_ecliptic_coord.lon + step * u.arcsec, + start_ecliptic_coord.lat, + frame="geocentrictrueecliptic", + ) + guess_pixel_coord = guess_ecliptic_coord.to_pixel(wcs) + + # calculate the distance, in pixel coordinates, between the guess and + # the start pixel. Calculate the angle that represents in the image. + x_dist, y_dist = np.array(guess_pixel_coord) - start_pixel + return np.arctan2(y_dist, x_dist) + + def run(self, config): + """Run KBMOD on the images in collection. + + Parameters + ---------- + config : `~kbmod.configuration.KBMODConfig` + Processing configuration + + Returns + ------- + results : `kbmod.results.ResultList` + KBMOD search results. + + Notes + ----- + Requires WCS. + """ + imageStack = self.toImageStack() + + # Compute the ecliptic angle for the images. Assume they are all the + # same size? Technically that is currently a requirement, although it's + # not explicit (can this be in C++ code?) + center_pixel = (imageStack.get_width()/2, imageStack.get_height()/2) + suggested_angle = self._calc_suggested_angle(self.wcs[0], center_pixel) + + # Set up the post processing data structure. + kb_post_process = PostProcess(config, self.data["mjd"].data) + + # Perform the actual search. + search = stack_search(imageStack) + # search, search_params = self.do_gpu_search(search, img_info, + # suggested_angle, kb_post_process) + # not sure why these were separated, I guess it made it look neater? + # definitely doesn't feel like everything is in place if there are so + # many ifs for a config - feels like that should be a config job? + # Anyhow, I'll be lazy and just unravel this here. + search_params = {} + + # Run the grid search + # Set min and max values for angle and velocity + if config["average_angle"] == None: + average_angle = suggested_angle + else: + average_angle = config["average_angle"] + ang_min = average_angle - config["ang_arr"][0] + ang_max = average_angle + config["ang_arr"][1] + vel_min = config["v_arr"][0] + vel_max = config["v_arr"][1] + search_params["ang_lims"] = [ang_min, ang_max] + search_params["vel_lims"] = [vel_min, vel_max] + + # Set the search bounds. + if config["x_pixel_bounds"] and len(config["x_pixel_bounds"]) == 2: + search.set_start_bounds_x(config["x_pixel_bounds"][0], config["x_pixel_bounds"][1]) + elif config["x_pixel_buffer"] and config["x_pixel_buffer"] > 0: + width = search.get_image_stack().get_width() + search.set_start_bounds_x(-config["x_pixel_buffer"], width + config["x_pixel_buffer"]) + + if config["y_pixel_bounds"] and len(config["y_pixel_bounds"]) == 2: + search.set_start_bounds_y(config["y_pixel_bounds"][0], config["y_pixel_bounds"][1]) + elif config["y_pixel_buffer"] and config["y_pixel_buffer"] > 0: + height = search.get_image_stack().get_height() + search.set_start_bounds_y(-config["y_pixel_buffer"], height + config["y_pixel_buffer"]) + + # If we are using barycentric corrections, compute the parameters and + # enable it in the search function. This can't be not-none atm because + # I hadn't copied bary_corr over.... + if config["bary_dist"] is not None: + bary_corr = self._calc_barycentric_corr(img_info, config["bary_dist"]) + # print average barycentric velocity for debugging + + mjd_range = img_info.get_duration() + bary_vx = bary_corr[-1, 0] / mjd_range + bary_vy = bary_corr[-1, 3] / mjd_range + bary_v = np.sqrt(bary_vx * bary_vx + bary_vy * bary_vy) + bary_ang = np.arctan2(bary_vy, bary_vx) + print("Average Velocity from Barycentric Correction", bary_v, "pix/day", bary_ang, "angle") + search.enable_corr(bary_corr.flatten()) + + search_start = time.time() + print("Starting Search") + print("---------------------------------------") + param_headers = ( + "Ecliptic Angle", + "Min. Search Angle", + "Max Search Angle", + "Min Velocity", + "Max Velocity", + ) + param_values = (suggested_angle, *search_params["ang_lims"], *search_params["vel_lims"]) + for header, val in zip(param_headers, param_values): + print("%s = %.4f" % (header, val)) + + # If we are using gpu_filtering, enable it and set the parameters. + if config["gpu_filter"]: + print("Using in-line GPU sigmaG filtering methods", flush=True) + coeff = post_process._find_sigmaG_coeff(config["sigmaG_lims"]) + search.enable_gpu_sigmag_filter( + np.array(config["sigmaG_lims"]) / 100.0, + coeff, + config["lh_level"], + ) + + # If we are using an encoded image representation on GPU, enable it and + # set the parameters. + if config["encode_psi_bytes"] > 0 or config["encode_phi_bytes"] > 0: + search.enable_gpu_encoding(config["encode_psi_bytes"], config["encode_phi_bytes"]) + + # Enable debugging. + if config["debug"]: + search.set_debug(config["debug"]) + + search.search( + int(config["ang_arr"][2]), + int(config["v_arr"][2]), + *search_params["ang_lims"], + *search_params["vel_lims"], + int(config["num_obs"]), + ) + print("Search finished in {0:.3f}s".format(time.time() - search_start), flush=True) + return search, search_params diff --git a/src/kbmod/standardizer.py b/src/kbmod/standardizer.py index 9a7a06662..c24a0e0a6 100644 --- a/src/kbmod/standardizer.py +++ b/src/kbmod/standardizer.py @@ -1,5 +1,7 @@ import abc +from kbmod.search import layered_image, raw_image, psf + __all__ = ["Standardizer",] @@ -41,6 +43,9 @@ class Standardizer(abc.ABC): multiple science exposures is expected to unravel that metadata and associate it with each of the science exposures individualy. + The data for which this standardization and unravelling will occur needs to + be appended to the items in ``processable`` attribute of theclass. + This class is an abstract base class to serve as a recipe for implementing a Standardizer specialized for processing an particular dataset. Do not attempt to instantiate this class directly. @@ -52,8 +57,15 @@ class Standardizer(abc.ABC): Attributes ---------- - loc : `str` + location : `str` Location of the file being standardized. + processable : `list` + List of standardizers internal units that can be processed by KBMOD. + For example, for FITS files on local filesystem these could be the + AstroPy header units (`~astropy.io.fits.HDUList` elements), for Vera C. + Rubin their internal `Exposure*` objects etc. The number of processable + units will match the number of returned standardized images, variances, + masks and PSFs. wcs : `list` WCSs associated with the processable image data. bbox : `list` @@ -71,26 +83,6 @@ class Standardizer(abc.ABC): standardizers with low priority when processing an upload. """ - @abc.abstractmethod - def __init__(self, location, *args, **kwargs): - self.location = location - - def __init_subclass__(cls, **kwargs): - # Registers subclasses of this class with set `name` class - # parameters as availible standardizers. Note the specific - # standardizer has to be imported before this class since the - # mechanism is triggered at definition time. - name = getattr(cls, "name", False) - if name and name is not None: - super().__init_subclass__(**kwargs) - Standardizer.registry[cls.name] = cls - - def __str__(self): - return f"{self.name}({self.location}, {self.exts})" - - def __repr__(self): - return f"{self.__class__.__name__}({self.location})" - @classmethod def get(cls, tgt=None, standardizer=None): """Get the standardizer class that can handle given file. @@ -229,6 +221,70 @@ def canStandardize(self, tgt): """ raise NotImplementedError() + def __init_subclass__(cls, **kwargs): + # Registers subclasses of this class with set `name` class + # parameters as availible standardizers. Note the specific + # standardizer has to be imported before this class since the + # mechanism is triggered at definition time. + name = getattr(cls, "name", False) + if name and name is not None: + super().__init_subclass__(**kwargs) + Standardizer.registry[cls.name] = cls + + @abc.abstractmethod + def __init__(self, location, *args, **kwargs): + self.location = location + + def __str__(self): + return f"{self.name}({self.location}, {self.exts})" + + def __repr__(self): + return f"{self.__class__.__name__}({self.location})" + + # I was a bit lazy and didn't feel like rekajiggering the whole code just + # to replace .exts with .processable - although it would save us some + # overhead. + @abc.abstractproperty + def processable(self): + """A list of processable units (i.e. images that can be processed by + KBMOD) extracted from the given target data source.""" + raise NotImplementedError() + + # all these should probably be named in plural - but lord allmighty that's + # a lot of references to update and it feels so unnatural - help? + @abc.abstractproperty + def wcs(self): + """A list of WCS's or `None` for each entry marked as processable. + + Expected to be an WCS object, but when it is not possible to construct + one `None` will be returned in its place. + """ + raise NotImplementedError() + + @abc.abstractproperty + def bbox(self): + """A list of bounding boxes or `None` for each entry marked as + processable. + + ========== ============================================================ + Key Description + ========== ============================================================ + center_ra On-sky (ICRS) right ascension coordinate, expressed in + decimal degrees, of the center of an rectangular bounding + box. + center_dec On-sky (ICRS) declination coordinate, expressed in decimal + degrees, of the center of an rectangular bounding box. + corner_ra On-sky (ICRS) right ascension coordinate, expressed in + decimal degrees, of an corner of a rectangular bounding box. + corner_dec On-sky (ICRS) declination coordinate, expressed in decimal + degrees, of an corner of a rectangular bounding box. + ========== ============================================================ + + Often, when WCS can not be constructed, the BBOX can not be + constructed. Then `None` is expected to be returned in it's place. + """ + raise NotImplementedError() + @abc.abstractmethod def standardizeWCS(self): """Creates an WCS for every processable science image. @@ -263,7 +319,7 @@ def standardizeMetadata(self): """Standardizes required and optional metadata from the given data. The standardized metadata is expected to be a dict containing the - following keys in data: + following keys and values: ======== ============================================================== Key Description @@ -286,6 +342,19 @@ def standardizeMetadata(self): ------- metadata : `dict` Standardized metadata. + + + Notes + ----- + If a target being standardized contains only 1 image that can be + standardized and processed by KBMOD, the values in the dictionary can + be strings, integers, floats, and other such single-values non-iterable + types, or an iterable type containing only 1 element. + When the target being standardized contains multiple images/extensions + that are processable by KBMOD the single-valued dict entries will be + considered as shared by all the processable images/extension/units of + the target. The iterable values in the dictionary must match the number + and order of the units marked as processable. """ raise NotImplementedError() @@ -299,8 +368,8 @@ def standardizeScienceImage(self): Returns ------- - image : `np.array` - Science image. + image : `list[~np.array]` + Science images. """ raise NotImplemented() @@ -320,8 +389,8 @@ def standardizeVarianceImage(self): Returns ------- - variance : `np.array` - Variance image. + variance : `list[`np.array]` + Variance images. """ raise NotImplementedError() @@ -337,50 +406,50 @@ def standardizeMaskImage(self): Returns ------- - mask : `np.array` - Mask image. + mask : `list[~np.array]` + Mask images. """ raise NotImplementedError() # no idea really what to do bout this one? AFAIK almost no images come # with PSFs in them - #@abc.abstractmethod - #def standardizePSF(self): - # raise NotImplementedError() - - def standardize(self): - """Invokes standardize metadata, image, variance, mask and PSF and - returns the results as a dictionary. + @abc.abstractmethod + def standardizePSF(self): + """Returns PSF for each extension marked as processable. Returns ------- - standardizedData : `dict` - Dictionary with standardized ``meta``, ``science``, ``variance``, - ``mask`` and ``psf`` values. + psfs : `list[~kbmod.search.psf]` + List of `~kbmod.search.psf` objects. """ - std = {"meta": self.standardizeStddata()} - std.update({"science": self.standardizeScienceImage()}) - std.update({"variance": self.standardizeVarianceImage()}) - std.update({"mask": self.standardizeMaskImage()}) - std.update({"psf": self.standardizePSF()}) + raise NotImplementedError() - return std + @abc.abstractmethod def toLayeredImage(self): """Run metadata standardization methods. These include header and bounding box standardization. Notes ----- - Implementation is processor-specific. + Implementation is standardizer-specific. + """ + raise NotImplementedError() + + def standardize(self): + """Invokes standardize metadata, image, variance, mask and PSF and + returns the results as a dictionary. + + Returns + ------- + standardizedData : `dict` + Dictionary with standardized ``meta``, ``science``, ``variance``, + ``mask`` and ``psf`` values. """ - # note that these will be lists so we need a new constructor - # They will be equal length(maybe?) and each is a different - # detector - i.e. not an image stack. Because we don't use - # ndarray this will be a copy operation and not very fast. - header = self.standardizeHeader() - masks = self.standardizeMask() - variances = self.standardizeVariance() - #psfs = self.standardizePSF() - - return LayeredImage(self.loc, self.exts, masks, variances, psfs, header["mjd"]) + std = {"meta": self.standardizeMetadata()} + std["science"] = self.standardizeScienceImage() + std["variance"] = self.standardizeVarianceImage() + std["mask"] = self.standardizeMaskImage() + std["psf"] = self.standardizePSF() + + return std diff --git a/src/kbmod/standardizers/butler_standardizer.py b/src/kbmod/standardizers/butler_standardizer.py index d81e608c4..17b2ce8eb 100644 --- a/src/kbmod/standardizers/butler_standardizer.py +++ b/src/kbmod/standardizers/butler_standardizer.py @@ -1,6 +1,12 @@ +import astropy.nddata as bitmask from astropy.wcs import WCS +import numpy as np + +from scipy.signal import convolve2d + from kbmod.standardizer import Standardizer +from kbmod.search import layered_image, raw_image, psf __all__ = ["ButlerStandardizer"] @@ -28,16 +34,58 @@ def canStandardize(self, tgt): return True, tgt return False, [] - def __init__(self, butler, datasetRefs): + def __init__(self, butler, datasetRefs=None, id=None, **kwargs): + # this requires an invovled logic sequence where + # 1) we need to check if butler is Butler, datasetRefs a list of + # DatasetRef - proceed as is + # 2) if butler is a string - instantiate butler + # 3) if the datRefs are a list of DatasetRefs - great + # 4) else construct DatasetRef's + # 4.1) They are DataId objects - butler.registry.getDataset(id) + # 4.2) if the datRefs are strings and UUIDs - b.r.getDataset(id) + # 4.3) mappings must specify datasetType, instrument, detector, + # visit, and collection, then: + # butler.registry.queryDatasets(datasetType='goodSeeingDiff_differenceTempExp', + # dataId= {"instrument":"DECam", "detector":35, "visit":974895}, + # collections=["collection"/datref.run, ]) + # I guess majority of these we can put in our table by default, + # certainly the number of columns we need is growing compared to + # metadata columns for the user - maybe separate into two tables + # and then flatten before writing and serialize the metadata + # about which columns go where into `meta`, but this is getting + # a bit too far atm, let's just raise an error in _get_std for now + # Another question to answer here is the construction and lazy loading + # of standardizers. FitsStandardizers have 1 input init param - location + # so in _get_std in image collection we just pass that. A more generic + # way would be to pass the whole row. The issue here is that __init__ + # is now having to deal with handling a mapping and that makes it less + # obvious for the end user. Optionally, we can give a mandatory + # **kwarg to every __init__ and rely on column naming and unpacking. + # this is not the best, explicit better than implicit, but also now + # the __init__ method kwargs need to be named the same as columns and + # be optional too. For example does this init make sense to you? + # I don't think I would be able to write this without knowing how the + # whole thing works... + #breakpoint() self.butler = butler - self.refs = datasetRefs + self.location = butler.datastore.root + if datasetRefs is None: + # import this here for now, for optimization + from lsst.daf.butler.core import DatasetId + self.refs = [butler.registry.getDataset(DatasetId(id)),] + else: + self.refs = datasetRefs # wth? why is it a data reference if it doesn't reference data without # being explicit about it? Why `collection` and `collections` don't # behave the same way? Why is this a thing! AAARRRGGGHH! - self.exts = [butler.get(ref, collections=[ref.run, ]) for ref in datasetRefs] + self.exts = [butler.get(ref, collections=[ref.run, ]) for ref in self.refs] self._wcs = [] self._bbox = [] + @property + def processable(self): + return self.exts + @property def wcs(self): if not self._wcs: @@ -113,7 +161,7 @@ def _maskSingleExp(self, exp): ) brigthness_threshold = exp.image.array.mean() - exp.image.array.std() - threshold_mask = exp.image.array > brightness_threshold + threshold_mask = exp.image.array > brigthness_threshold net_mask = bit_mask & threshold_mask @@ -160,6 +208,7 @@ def standardizeMetadata(self): metadata["mjd"] = [e.visitInfo.date.toAstropy().mjd for e in self.exts] metadata["filter"] = [e.info.getFilter().physicalLabel for e in self.exts] + metadata["id"] = [str(r.id) for r in self.refs] metadata["exp_id"] = [e.info.id for e in self.exts] # it's also like super dificult to extract any information out of the @@ -196,3 +245,21 @@ def standardizeVarianceImage(self): def standardizeMaskImage(self): return (self._maskSingleExp(exp) for exp in self.exts) + + def standardizePSF(self): + return (psf(1) for e in self.exts) + + def toLayeredImage(self): + meta = self.standardizeMetadata() + sciences = self.standardizeScienceImage() + variances = self.standardizeVarianceImage() + masks = self.standardizeMaskImage() + + psfs = self.standardizePSF() + + # guaranteed to exist, i.e. safe to access + mjds = meta["mjd"] + imgs = [] + for sci, var, mask, psf, t in zip(sciences, variances, masks, psfs, mjds): + imgs.append(layered_image(raw_image(sci), raw_image(var), raw_image(mask), t, psf)) + return imgs diff --git a/src/kbmod/standardizers/decam_community_pipe_fits.py b/src/kbmod/standardizers/decam_community_pipe_fits.py index a6d23631f..69778d15e 100644 --- a/src/kbmod/standardizers/decam_community_pipe_fits.py +++ b/src/kbmod/standardizers/decam_community_pipe_fits.py @@ -3,6 +3,11 @@ from astropy.stats import sigma_clipped_stats from astropy.io import fits +from astropy.nddata import bitmask + +import numpy as np + +from scipy.signal import convolve2d from .multi_extension_fits import MultiExtensionFits from astro_metadata_translator import ObservationInfo @@ -84,8 +89,8 @@ def _getScienceImages(cls, hdulist): return exts - def __init__(self, location): - super().__init__(location) + def __init__(self, location, **kwargs): + super().__init__(location, **kwargs) # self.exts is filled in super() but that could include tracking and # focus chips too (as they fall under image-like conditions). We want diff --git a/src/kbmod/standardizers/fits_standardizer.py b/src/kbmod/standardizers/fits_standardizer.py index b7c04ad56..61261f763 100644 --- a/src/kbmod/standardizers/fits_standardizer.py +++ b/src/kbmod/standardizers/fits_standardizer.py @@ -6,6 +6,7 @@ from astropy.wcs import WCS from kbmod.standardizer import Standardizer +from kbmod.search import layered_image, raw_image, psf __all__ = ["FitsStandardizer",] @@ -50,7 +51,6 @@ class FitsStandardizer(Standardizer): """File extensions this processor can handle.""" @classmethod - @abstractmethod def canStandardize(cls, tgt): # docstring inherited from Standardizer; TODO: check it's True canProcess, hdulist = False, None @@ -85,6 +85,10 @@ def __init__(self, location): self._wcs = [] self._bbox = [] + @property + def processable(self): + return self.exts + @property def wcs(self): if not self._wcs: @@ -237,7 +241,7 @@ def translateHeader(self, *args, **kwargs): def standardizeMetadata(self): metadata = self.translateHeader() - metadata.update({"location": self.location}) + metadata["location"] = self.location metadata.update({"wcs": self.wcs, "bbox": self.bbox}) # calculate the pointing from the bbox or wcs if they exist? @@ -262,4 +266,40 @@ def standardizeMetadata(self): return metadata def standardizeScienceImage(self): + # the assumption here is that all Exts are AstroPy HDU objects return (ext.data for ext in self.exts) + + def standardizePSF(self): + return (psf(1) for e in self.exts) + + def toLayeredImage(self): + """Returns a list of `~kbmod.search.layered_image` objects for each + entry marked as processable. + + Returns + ------- + layeredImage : `list` + Layered image objects. + """ + meta = self.standardizeMetadata() + sciences = self.standardizeScienceImage() + variances = self.standardizeVarianceImage() + masks = self.standardizeMaskImage() + + psfs = self.standardizePSF() + + # guaranteed to exist, i.e. safe to access, but isn't unravelled here + # or potentially it is - we can't tell? + if isinstance(meta["mjd"], (list, tuple)): + mjds = meta["mjd"] + else: + mjds = (meta["mjd"] for e in self.exts) + + imgs = [] + for sci, var, mask, psf, t in zip(sciences, variances, masks, psfs, mjds): + imgs.append(layered_image(raw_image(sci), raw_image(var), raw_image(mask), t, psf)) + + return imgs + + + diff --git a/src/kbmod/standardizers/multi_extension_fits.py b/src/kbmod/standardizers/multi_extension_fits.py index 50927199e..9d47867bb 100644 --- a/src/kbmod/standardizers/multi_extension_fits.py +++ b/src/kbmod/standardizers/multi_extension_fits.py @@ -95,7 +95,7 @@ def canStandardize(cls, location): canStandardize = parentCanStandardize and len(hdulist) > 1 return canStandardize, hdulist - def __init__(self, location, set_exts=False): + def __init__(self, location, set_exts=False, **kwargs): super().__init__(location) if set_exts: diff --git a/src/kbmod/standardizers/rubin_scipipe_fits.py b/src/kbmod/standardizers/rubin_scipipe_fits.py index 8b7014cc5..7ca674401 100644 --- a/src/kbmod/standardizers/rubin_scipipe_fits.py +++ b/src/kbmod/standardizers/rubin_scipipe_fits.py @@ -7,6 +7,7 @@ from astropy.time import Time from astropy.nddata import bitmask +import numpy as np from scipy.signal import convolve2d from kbmod.standardizers import MultiExtensionFits @@ -64,8 +65,8 @@ def canStandardize(cls, location): canStandardize = parentCanStandardize and isRubin return canStandardize, hdulist - def __init__(self, location): - super().__init__(location) + def __init__(self, location, **kwargs): + super().__init__(location, **kwargs) # this is the only science-image header for Rubin self.exts = [self.hdulist[1], ] @@ -86,7 +87,6 @@ def translateHeader(self): return standardizedHeader def standardizeMaskImage(self): - idx = self.hdulist.index_of("MASK") bitfield = self.hdulist[idx].data bit_mask = bitmask.bitfield_to_boolean_mask( @@ -97,7 +97,7 @@ def standardizeMaskImage(self): idx = self.hdulist.index_of("IMAGE") image = self.hdulist[idx].data - brigthness_threshold = image.mean() - image.std() + brightness_threshold = image.mean() - image.std() threshold_mask = image > brightness_threshold net_mask = threshold_mask & bit_mask @@ -106,8 +106,7 @@ def standardizeMaskImage(self): grow_kernel = np.ones((11, 11)) grown_mask = convolve2d(net_mask, grow_kernel, mode="same") - return grown_mask + return [grown_mask, ] def standardizeVarianceImage(self): - idx = self.hdulist.index_of("VARIANCE") - return self.hdulist[idx].data + return [self.hdulist["VARIANCE"].data, ]