From cbc670be3543978f936d1311dfc1bdee08ac0950 Mon Sep 17 00:00:00 2001 From: temuller Date: Wed, 30 Oct 2024 12:37:54 +0000 Subject: [PATCH] fix problems with HST and include drizzlepac images --- README.md | 1 + src/hostphot/cutouts.py | 126 +++------------------------------------- src/hostphot/utils.py | 13 ++++- 3 files changed, 22 insertions(+), 118 deletions(-) diff --git a/README.md b/README.md index 75ec3ff..4726c4b 100644 --- a/README.md +++ b/README.md @@ -168,6 +168,7 @@ If you make use of HostPhot, please cite the following [paper](https://joss.theo ## What's new! v2.10.0 * SkyMapper upgrade from DR2 to DR4 (note that there is still a problem with the photometric calibration of extended sources) +* Fix HST functions, which includes working with Drizzlepac images * Change path handling package (os --> pathlib) * Interactive aperture and download of RGB images have been removed and are no longer available * Close some fits images that were previously kept opened diff --git a/src/hostphot/cutouts.py b/src/hostphot/cutouts.py index 9f04523..ad8c46b 100644 --- a/src/hostphot/cutouts.py +++ b/src/hostphot/cutouts.py @@ -1084,18 +1084,17 @@ def update_HST_header(hdu): hdu : Header Data Unit. HST FITS image. """ - # get WCS + # Drizzlepac images have the info on hdu[0] + # MAST-archive images have the info on hdu[1] with warnings.catch_warnings(): warnings.simplefilter("ignore", AstropyWarning) - for i in range(1, len(hdu) - 1): - try: - img_wcs = wcs.WCS(hdu[i].header) - except: - continue - hdu[0].header.update(img_wcs.to_header()) - hdu[0].header["PHOTFLAM"] = hdu[1].header["PHOTFLAM"] - hdu[0].header["PHOTPLAM"] = hdu[1].header["PHOTPLAM"] - hdu[0].data = hdu[1].data + if "PHOTFLAM" not in hdu[0].header: + # MAST image: move things to hdu[0] to homogenise + img_wcs = wcs.WCS(hdu[1].header) + hdu[0].header.update(img_wcs.to_header()) + hdu[0].header["PHOTFLAM"] = hdu[1].header["PHOTFLAM"] + hdu[0].header["PHOTPLAM"] = hdu[1].header["PHOTPLAM"] + hdu[0].data = hdu[1].data # add zeropoints # https://www.stsci.edu/hst/instrumentation/acs/data-analysis/zeropoints @@ -1260,113 +1259,6 @@ def get_HST_images(ra, dec, size=3, filt=None): return hdu_list - -def _get_HST_images_OLD(ra, dec, size=3, filt=None, instrument=None): - """Downloads a set of HST fits images for a given set - of coordinates and filters using the MAST archive. - - Parameters - ---------- - ra: str or float - Right ascension in degrees. - dec: str or float - Declination in degrees. - size: float or ~astropy.units.Quantity, default ``3`` - Image size. If a float is given, the units are assumed to be arcmin. - filt: str, default ``None`` - Filter to use. - instrument: str, default ``None`` - Instrument to use. - - Return - ------ - hdu_list: list - List with fits image for the given filter. - `None` is returned if no image is found. - """ - check_HST_filters(filt, instrument) - - if isinstance(size, (float, int)): - size_arcsec = (size * u.arcmin).to(u.arcsec) - else: - size_arcsec = size.to(u.arcsec) - size_arcsec = size_arcsec.value - - with warnings.catch_warnings(): - warnings.simplefilter("ignore", AstropyWarning) - coords = SkyCoord( - ra=ra, dec=dec, unit=(u.degree, u.degree), frame="icrs" - ) - - obs_table = Observations.query_region(coords, radius=size) - obs_table = obs_table.filled() # remove masked rows - obs_df = obs_table.to_pandas() - - obs_df = obs_df[ - (obs_df.obs_collection == "HST") | (obs_df.obs_collection == "HLA") - ] - obs_df = obs_df[obs_df.dataproduct_type == "image"] - obs_df = obs_df[obs_df.t_exptime > 15] # remove short exposures - - # filter by instrument+filter - inst = instrument.split("/")[0] - # inst_df = obs_df[obs_df.instrument_name==instrument] - inst_df = obs_df[obs_df.instrument_name.str.contains(inst)] - filt_df = inst_df[inst_df.filters == filt] - # just use one image - filt_df = filt_df[filt_df.t_exptime == filt_df.t_exptime.max()] - - # get data products - data_products = Observations.get_product_list(Table.from_pandas(filt_df)) - dp_df = data_products.to_pandas() - dp_df = dp_df[dp_df.type == "S"] - dp_df = dp_df[dp_df.productSubGroupDescription == "FLT"] # only images - # choose first image - single_dp_df = dp_df[dp_df.obs_id == dp_df.obs_id.values[0]] - - Observations.download_products( - Table.from_pandas(single_dp_df), - download_dir=None, - cache=False, - productType="SCIENCE", - extension=["fits"], - ) - - for path, subdirs, files in os.walk("mastDownload"): - for file in files: - fits_file = os.path.join(path, file) - - hdu = fits.open(fits_file) - hdu[0].data = hdu[1].data - - # add zeropoints - # https://www.stsci.edu/hst/instrumentation/acs/data-analysis/zeropoints - photflam = hdu[0].header["PHOTFLAM"] - photplam = hdu[0].header["PHOTFLAM"] - hdu[0].header["MAGZP"] = ( - -2.5 * np.log10(photflam) - 5 * np.log10(photplam) - 2.408 - ) - hdu_list = [hdu] - - # remove directory created by MAST download - shutil.rmtree("mastDownload", ignore_errors=True) - - # HST images can be large so need to be trimmed - pixel_scale = survey_pixel_scale("HST") - size_pixels = int(size_arcsec / pixel_scale) - pos = SkyCoord(ra=ra * u.degree, dec=dec * u.degree) - - for hdu in hdu_list: - with warnings.catch_warnings(): - warnings.simplefilter("ignore", AstropyWarning) - img_wcs = wcs.WCS(hdu[0].header) - - trimmed_data = Cutout2D(hdu[0].data, pos, size_pixels, img_wcs) - hdu[0].data = trimmed_data.data - hdu[0].header.update(trimmed_data.wcs.to_header()) - - return hdu_list - # SkyMapper def get_SkyMapper_urls(ra, dec, fov, filters="uvgriz"): """Obtains the URLs of the SkyMapper images. diff --git a/src/hostphot/utils.py b/src/hostphot/utils.py index afffeb4..a38db55 100644 --- a/src/hostphot/utils.py +++ b/src/hostphot/utils.py @@ -809,14 +809,25 @@ def survey_pixel_scale(survey, filt=None): survey_df = config_df[config_df.survey == survey] pixel_scale = survey_df.pixel_scale.values[0] + # some surveys have multiple scales (separated by ',') depending on the instrument if len(pixel_scale.split(",")) > 1: - filters = get_survey_filters(survey) + if survey in ["HST"]: + filters = filt # needs to be explicitly specified + else: + filters = get_survey_filters(survey) pixel_scale_dict = { f: float(ps) for f, ps in zip(filters, pixel_scale.split(",")) } if filt in pixel_scale_dict.keys(): pixel_scale = pixel_scale_dict[filt] + elif survey == "HST": + if "UVIS" in filt: + pixel_scale = list(pixel_scale_dict.values())[0] + elif "IR" in filt: + pixel_scale = list(pixel_scale_dict.values())[1] + else: + raise ValueError(f"Not a valid HST filter: {filt}") else: print(f"No pixel scale found for filter {filt}.") filt_used = list(pixel_scale_dict.keys())[0]