From 1b55522a82507930bd92b3fc4ce0b0509c9f6b42 Mon Sep 17 00:00:00 2001 From: nzywucka Date: Fri, 13 Sep 2024 11:44:12 +0200 Subject: [PATCH 01/10] Adding a function to create a log file and save cloud correction parameters in the file --- .../lst1_magic/lst_m1_m2_cloud_correction.py | 42 ++++++++++++++++++- 1 file changed, 41 insertions(+), 1 deletion(-) diff --git a/magicctapipe/scripts/lst1_magic/lst_m1_m2_cloud_correction.py b/magicctapipe/scripts/lst1_magic/lst_m1_m2_cloud_correction.py index f7655a98..b10053c4 100644 --- a/magicctapipe/scripts/lst1_magic/lst_m1_m2_cloud_correction.py +++ b/magicctapipe/scripts/lst1_magic/lst_m1_m2_cloud_correction.py @@ -12,8 +12,10 @@ """ import argparse import logging +import re import astropy.units as u +import ctapipe import numpy as np import pandas as pd import yaml @@ -28,6 +30,7 @@ from ctapipe.instrument import SubarrayDescription from ctapipe.io import read_table +import magicctapipe from magicctapipe.io import save_pandas_data_in_table logger = logging.getLogger(__name__) @@ -35,9 +38,41 @@ logger.setLevel(logging.INFO) +def save_cloud_correction_params(config, run_number): + """ + Creates a log file and saves the cloud correction parameters from the config file and software information in the log file + + Parameters + ---------- + config : dict + Configuration for the LST-1 + MAGIC analysis + run_number : str + Number of the analysed run + """ + cloud_correction = config.get("cloud_correction", {}) + log_filename = f"correction_log_{run_number}.log" + logging.basicConfig( + filename=log_filename, + level=logging.INFO, + format="%(asctime)s - %(levelname)s - %(message)s", + ) + + logging.info(f"Cloud Correction Parameters for Run {run_number}:") + logging.info(f"{cloud_correction}") + + with open(f"cloud_correction_params_{run_number}.yaml", "w") as f: + yaml.dump(cloud_correction, f, default_flow_style=False) + + ctapipe_version = ctapipe.__version__ + magicctapipe_version = magicctapipe.__version__ + + logging.info(f"ctapipe version: {ctapipe_version}") + logging.info(f"magicctapipe version: {magicctapipe_version}") + + def model0(imp, h, zd): """ - Calculated the geometrical part of the model relating the emission height with the angular distance from the arrival direction + Calculates the geometrical part of the model relating the emission height with the angular distance from the arrival direction Parameters ---------- @@ -340,9 +375,14 @@ def main(): else: raise ValueError(f"No optics data found for telescope: {telescope.name}") + run_number_match = re.search(r"Run\d+\.\d+", args.input_file) + run_number = run_number_match.group() if run_number_match else "unknown_run" + with open(args.config_file, "r") as file: config = yaml.safe_load(file) + save_cloud_correction_params(config, run_number) + tel_ids = config["mc_tel_ids"] dfs = [] # Initialize an empty list to store DataFrames From ba5ed61619793edd93a07dbcf43e94683907910e Mon Sep 17 00:00:00 2001 From: nzywucka Date: Mon, 16 Sep 2024 12:53:29 +0200 Subject: [PATCH 02/10] Saving parameters to the .log file simplified and saving the .h5 files improved. --- .../lst1_magic/lst_m1_m2_cloud_correction.py | 72 ++++++++----------- 1 file changed, 29 insertions(+), 43 deletions(-) diff --git a/magicctapipe/scripts/lst1_magic/lst_m1_m2_cloud_correction.py b/magicctapipe/scripts/lst1_magic/lst_m1_m2_cloud_correction.py index b10053c4..9ebaa10a 100644 --- a/magicctapipe/scripts/lst1_magic/lst_m1_m2_cloud_correction.py +++ b/magicctapipe/scripts/lst1_magic/lst_m1_m2_cloud_correction.py @@ -12,7 +12,8 @@ """ import argparse import logging -import re +import os +from pathlib import Path import astropy.units as u import ctapipe @@ -38,38 +39,6 @@ logger.setLevel(logging.INFO) -def save_cloud_correction_params(config, run_number): - """ - Creates a log file and saves the cloud correction parameters from the config file and software information in the log file - - Parameters - ---------- - config : dict - Configuration for the LST-1 + MAGIC analysis - run_number : str - Number of the analysed run - """ - cloud_correction = config.get("cloud_correction", {}) - log_filename = f"correction_log_{run_number}.log" - logging.basicConfig( - filename=log_filename, - level=logging.INFO, - format="%(asctime)s - %(levelname)s - %(message)s", - ) - - logging.info(f"Cloud Correction Parameters for Run {run_number}:") - logging.info(f"{cloud_correction}") - - with open(f"cloud_correction_params_{run_number}.yaml", "w") as f: - yaml.dump(cloud_correction, f, default_flow_style=False) - - ctapipe_version = ctapipe.__version__ - magicctapipe_version = magicctapipe.__version__ - - logging.info(f"ctapipe version: {ctapipe_version}") - logging.info(f"magicctapipe version: {magicctapipe_version}") - - def model0(imp, h, zd): """ Calculates the geometrical part of the model relating the emission height with the angular distance from the arrival direction @@ -283,7 +252,7 @@ def process_telescope_data(input_file, config, tel_id, camgeom, focal_eff): if assigned_tel_ids["LST-1"] == tel_id: conc_params = concentration_parameters( clean_camgeom, clean_image, hillas_params - ) # "For LST-1 we compute concentration from the cleaned image and for MAGIC from the full image to reproduce the current behaviour in the standard code + ) # "For LST-1 we compute concentration from the cleaned image and for MAGIC from the full image to reproduce the current behaviour in the standard code/home/natti/CTA/lodz_cluster/lstchain_10_11_test/scripts/new/outfiles/dl1_coincidence_stereo_0.75-0.85_03955 else: conc_params = concentration_parameters(camgeom, corr_image, hillas_params) @@ -337,9 +306,9 @@ def main(): ) parser.add_argument( - "--output_file", + "--output_dir", "-o", - dest="output_file", + dest="output_dir", type=str, default="./data", help="Path to a directory where to save an output corrected DL1 file", @@ -375,14 +344,9 @@ def main(): else: raise ValueError(f"No optics data found for telescope: {telescope.name}") - run_number_match = re.search(r"Run\d+\.\d+", args.input_file) - run_number = run_number_match.group() if run_number_match else "unknown_run" - with open(args.config_file, "r") as file: config = yaml.safe_load(file) - save_cloud_correction_params(config, run_number) - tel_ids = config["mc_tel_ids"] dfs = [] # Initialize an empty list to store DataFrames @@ -392,6 +356,7 @@ def main(): df = process_telescope_data( args.input_file, config, tel_id, camgeom[tel_id], focal_eff[tel_id] ) + dfs.append(df) df_all = pd.concat(dfs, ignore_index=True) @@ -419,11 +384,32 @@ def main(): df_all = df_all.drop(columns=["deviation"], errors="ignore") + Path(args.output_dir).mkdir(exist_ok=True, parents=True) + input_file_name = Path(args.input_file).name + output_file_name = input_file_name.replace("dl1_stereo", "dl1_corr") + output_file = f"{args.output_dir}/{output_file_name}" + save_pandas_data_in_table( - df_all, args.output_file, group_name="/events", table_name="parameters" + df_all, output_file, group_name="/events", table_name="parameters" + ) + + subarray_info.to_hdf(output_file) + + correction_params = config.get("cloud_correction", {}) + + log_dir = args.output_dir + log_filename = "cloud_correction.log" + log_filepath = os.path.join(log_dir, log_filename) + + logging.basicConfig( + filename=log_filepath, + level=logging.INFO, + format="%(asctime)s - %(levelname)s - %(message)s", ) - subarray_info.to_hdf(args.output_file) + logging.info(f"Correction parameters: {correction_params}") + logging.info(f"ctapipe version: {ctapipe.__version__}") + logging.info(f"magicctapipe version: {magicctapipe.__version__}") logger.info("\nDone.") From e0ce62dcd7ebb71d289ab7f9f46fb5efe0590e17 Mon Sep 17 00:00:00 2001 From: nzywucka Date: Tue, 1 Oct 2024 09:30:00 +0200 Subject: [PATCH 03/10] Provided changes in check of the available images in the .h5 files, logger improvements, and units changes. --- .../lst1_magic/ls1_magic_cloud_correction.py | 425 ++++++++++++++++++ .../lst1_magic/lst_m1_m2_cloud_correction.py | 2 +- 2 files changed, 426 insertions(+), 1 deletion(-) create mode 100644 magicctapipe/scripts/lst1_magic/ls1_magic_cloud_correction.py diff --git a/magicctapipe/scripts/lst1_magic/ls1_magic_cloud_correction.py b/magicctapipe/scripts/lst1_magic/ls1_magic_cloud_correction.py new file mode 100644 index 00000000..ed6ec8dd --- /dev/null +++ b/magicctapipe/scripts/lst1_magic/ls1_magic_cloud_correction.py @@ -0,0 +1,425 @@ +#!/usr/bin/env python +# coding: utf-8 + +""" +This script corrects LST-1 and MAGIC data for the cloud affection. The script works on DL 1 stereo files with saved images. As output, the script creates DL 1 files with corrected parameters. + +Usage: +$ python lst_m1_m2_cloud_correction.py +--input_file dl1_stereo/dl1_LST-1_MAGIC.Run03265.0040.h5 +(--output_dir dl1_corrected) +(--config_file config.yaml) +""" +import argparse +import logging +import time +from pathlib import Path + +import astropy.units as u +import ctapipe +import numpy as np +import pandas as pd +import tables +import yaml +from astropy.coordinates import AltAz, SkyCoord +from ctapipe.coordinates import TelescopeFrame +from ctapipe.image import ( + concentration_parameters, + hillas_parameters, + leakage_parameters, + timing_parameters, +) +from ctapipe.instrument import SubarrayDescription +from ctapipe.io import read_table + +import magicctapipe +from magicctapipe.io import save_pandas_data_in_table + +logger = logging.getLogger(__name__) +logger.addHandler(logging.StreamHandler()) +logger.setLevel(logging.INFO) + + +def model0(imp, h, zd): + """ + Calculates the geometrical part of the model relating the emission height with the angular distance from the arrival direction + + Parameters + ---------- + imp : astropy.units.quantity.Quantity + Impact + h : astropy.units.quantity.Quantity + Array with heights of each cloud layer a.g.l. + zd : numpy.float64 + Zenith distance in deg + + Returns + ------- + numpy ndarray + Angular distance in units of degree + """ + d = h / np.cos(np.deg2rad(zd)) + return np.arctan((imp / d).to("")).to_value("deg") + + +def model2(imp, h, zd): + """ + Calculates the phenomenological correction to the distances obtained with model0 + + Parameters + ---------- + imp : astropy.units.quantity.Quantity + Impact + h : astropy.units.quantity.Quantity + Array with heights of each cloud layer a.g.l. + zd : numpy.float64 + Zenith distance in deg + + Returns + ------- + numpy ndarray + Angular distance corrected for bias in units of degrees + """ + H0 = 2.2e3 * u.m + bias = 0.877 + 0.015 * ((h + H0) / (7.0e3 * u.m)) + return bias * model0(imp, h, zd) + + +def trans_height(x, Hc, dHc, trans): + """ + Calculates transmission from a geometrically broad cloud at a set of heights. + Cloud is assumed to be homegeneous. + + Parameters + ---------- + x : astropy.units.quantity.Quantity + Array with heights of each cloud layer a.g.l. + Hc : astropy.units.quantity.Quantity + Height of the base of the cloud a.g.l. + dHc : astropy.units.quantity.Quantity + Cloud thickness + trans : numpy.float64 + Transmission of the cloud + + Returns + ------- + numpy.ndarray + Cloud transmission + """ + t = pow(trans, ((x - Hc) / dHc).to_value("")) + t = np.where(x < Hc, 1, t) + t = np.where(x > Hc + dHc, trans, t) + return t + + +def process_telescope_data(input_file, config, tel_id, camgeom, focal_eff): + """ + Corrects LST-1 and MAGIC data affected by a cloud presence + + Parameters + ---------- + input_file : str + Path to an input .h5 DL1 file + config : dict + Configuration for the LST-1 + MAGIC analysis + tel_id : numpy.int16 + LST-1 and MAGIC telescope ids + camgeom : ctapipe.instrument.camera.geometry.CameraGeometry + An instance of the CameraGeometry class containing information about the + camera's configuration, including pixel type, number of pixels, rotation + angles, and the reference frame. + focal_eff : astropy.units.quantity.Quantity + Effective focal length + + Returns + ------- + pandas.core.frame.DataFrame + Data frame of corrected DL1 parameters + """ + + logger.info(f"\nChecking available image for telescope with ID: {tel_id}") + assigned_tel_ids = config["mc_tel_ids"] + + correction_params = config.get("cloud_correction", {}) + Hc = u.Quantity(correction_params.get("base_height")) + dHc = u.Quantity(correction_params.get("thickness")) + trans0 = correction_params.get("vertical_transmission") + nlayers = correction_params.get("number_of_layers") + + all_params_list = [] + + dl1_params = read_table(input_file, "/events/parameters") + + image_node_path = "/events/dl1/image_" + str(tel_id) + + try: + dl1_images = read_table(input_file, image_node_path) + logger.info(f"Found images for telescope with ID {tel_id}. Processing...") + except tables.NoSuchNodeError: + logger.info(f"No image for telescope with ID {tel_id}. Skipping.") + return None + + m2deg = np.rad2deg(1) / focal_eff * u.degree + + inds = np.where( + np.logical_and(dl1_params["intensity"] > 0.0, dl1_params["tel_id"] == tel_id) + )[0] + for index in inds: + event_id_lst = dl1_params["event_id_lst"][index] + obs_id_lst = dl1_params["obs_id_lst"][index] + event_id = dl1_params["event_id"][index] + obs_id = dl1_params["obs_id"][index] + event_id_magic = dl1_params["event_id_magic"][index] + obs_id_magic = dl1_params["obs_id_magic"][index] + timestamp = dl1_params["timestamp"][index] + multiplicity = dl1_params["multiplicity"][index] + combo_type = dl1_params["combo_type"][index] + + if assigned_tel_ids["LST-1"] == tel_id: + event_id_image, obs_id_image = event_id_lst, obs_id_lst + else: + event_id_image, obs_id_image = event_id_magic, obs_id_magic + + pointing_az = dl1_params["pointing_az"][index] + pointing_alt = dl1_params["pointing_alt"][index] + zenith = 90.0 - np.rad2deg(pointing_alt) + time_diff = dl1_params["time_diff"][index] + n_islands = dl1_params["n_islands"][index] + signal_pixels = dl1_params["n_pixels"][index] + + trans = trans0 ** (1 / np.cos(np.deg2rad(zenith))) + Hcl = np.linspace(Hc, Hc + dHc, nlayers) # position of each layer + transl = trans_height(Hcl, Hc, dHc, trans) # transmissions of each layer + transl = np.append(transl, transl[-1]) + + alt_rad = np.deg2rad(dl1_params["alt"][index]) + az_rad = np.deg2rad(dl1_params["az"][index]) + + impact = dl1_params["impact"][index] * u.m + psi = dl1_params["psi"][index] * u.deg + cog_x = (dl1_params["x"][index] * m2deg).value * u.deg + cog_y = (dl1_params["y"][index] * m2deg).value * u.deg + + # Source position + reco_pos = SkyCoord(alt=alt_rad * u.rad, az=az_rad * u.rad, frame=AltAz()) + telescope_pointing = SkyCoord( + alt=pointing_alt * u.rad, + az=pointing_az * u.rad, + frame=AltAz(), + ) + + tel_frame = TelescopeFrame(telescope_pointing=telescope_pointing) + tel = reco_pos.transform_to(tel_frame) + + src_x = tel.fov_lat + src_y = tel.fov_lon + + # Transform to Engineering camera + src_x, src_y = -src_y, -src_x + cog_x, cog_y = -cog_y, -cog_x + + pix_x_tel = (camgeom.pix_x * m2deg).to(u.deg) + pix_y_tel = (camgeom.pix_y * m2deg).to(u.deg) + + distance = np.abs( + (pix_y_tel - src_y) * np.cos(psi) + (pix_x_tel - src_x) * np.sin(psi) + ) + + d2_cog_src = (cog_x - src_x) ** 2 + (cog_y - src_y) ** 2 + d2_cog_pix = (cog_x - pix_x_tel) ** 2 + (cog_y - pix_y_tel) ** 2 + d2_src_pix = (src_x - pix_x_tel) ** 2 + (src_y - pix_y_tel) ** 2 + + distance[d2_cog_pix > d2_cog_src + d2_src_pix] = 0 + dist_corr_layer = model2(impact, Hcl, zenith) * u.deg + + ilayer = np.digitize(distance, dist_corr_layer) + trans_pixels = transl[ilayer] + + inds_img = np.where( + (dl1_images["event_id"] == event_id_image) + & (dl1_images["tel_id"] == tel_id) + & (dl1_images["obs_id"] == obs_id_image) + )[0] + + if len(inds_img) == 0: + raise ValueError("Error: 'inds_img' list is empty!") + index_img = inds_img[0] + image = dl1_images["image"][index_img] + cleanmask = dl1_images["image_mask"][index_img] + peak_time = dl1_images["peak_time"][index_img] + image /= trans_pixels + corr_image = image.copy() + + clean_image = corr_image[cleanmask] + clean_camgeom = camgeom[cleanmask] + + hillas_params = hillas_parameters(clean_camgeom, clean_image) + timing_params = timing_parameters( + clean_camgeom, clean_image, peak_time[cleanmask], hillas_params + ) + leakage_params = leakage_parameters(camgeom, corr_image, cleanmask) + if assigned_tel_ids["LST-1"] == tel_id: + conc_params = concentration_parameters( + clean_camgeom, clean_image, hillas_params + ) # For LST-1 we compute concentration from the cleaned image and for MAGIC from the full image to reproduce the current behaviour in the standard code + else: + conc_params = concentration_parameters(camgeom, corr_image, hillas_params) + + event_params = { + **hillas_params, + **timing_params, + **leakage_params, + } + + prefixed_conc_params = { + f"concentration_{key}": value for key, value in conc_params.items() + } + event_params.update(prefixed_conc_params) + + event_info_dict = { + "obs_id": obs_id, + "event_id": event_id, + "tel_id": tel_id, + "pointing_alt": pointing_alt, + "pointing_az": pointing_az, + "time_diff": time_diff, + "n_pixels": signal_pixels, + "n_islands": n_islands, + "event_id_lst": event_id_lst, + "obs_id_lst": obs_id_lst, + "event_id_magic": event_id_magic, + "obs_id_magic": obs_id_magic, + "combo_type": combo_type, + "timestamp": timestamp, + "multiplicity": multiplicity, + } + event_params.update(event_info_dict) + + all_params_list.append(event_params) + + df = pd.DataFrame(all_params_list) + return df + + +def main(): + """Main function.""" + start_time = time.time() + parser = argparse.ArgumentParser() + + parser.add_argument( + "--input_file", + "-i", + dest="input_file", + type=str, + required=True, + help="Path to an input .h5 DL1 data file", + ) + + parser.add_argument( + "--output_dir", + "-o", + dest="output_dir", + type=str, + default="./data", + help="Path to a directory where to save an output corrected DL1 file", + ) + + parser.add_argument( + "--config_file", + "-c", + dest="config_file", + type=str, + default="./resources/config.yaml", + help="Path to a configuration file", + ) + + args = parser.parse_args() + + subarray_info = SubarrayDescription.from_hdf(args.input_file) + + tel_descriptions = subarray_info.tel + camgeom = {} + for telid, telescope in tel_descriptions.items(): + camgeom[telid] = telescope.camera.geometry + + optics_table = read_table( + args.input_file, "/configuration/instrument/telescope/optics" + ) + focal_eff = {} + + for telid, telescope in tel_descriptions.items(): + optics_row = optics_table[optics_table["optics_name"] == telescope.name] + if len(optics_row) > 0: + focal_eff[telid] = optics_row["effective_focal_length"][0] * u.m + else: + raise ValueError(f"No optics data found for telescope: {telescope.name}") + + with open(args.config_file, "r") as file: + config = yaml.safe_load(file) + + tel_ids = config["mc_tel_ids"] + + dfs = [] # Initialize an empty list to store DataFrames + + for tel_name, tel_id in tel_ids.items(): + if tel_id != 0: # Only process telescopes that have a non-zero ID + df = process_telescope_data( + args.input_file, config, tel_id, camgeom[tel_id], focal_eff[tel_id] + ) + + dfs.append(df) + + df_all = pd.concat(dfs, ignore_index=True) + + columns_to_convert = [ + "x", + "y", + "r", + "phi", + "length", + "length_uncertainty", + "width", + "width_uncertainty", + "psi", + "slope", + ] + + for col in columns_to_convert: + df_all[col] = df_all[col].apply( + lambda x: x.value if isinstance(x, u.Quantity) else x + ) + + df_all["psi"] = np.degrees(df_all["psi"]) + df_all["phi"] = np.degrees(df_all["phi"]) + + for col in columns_to_convert: + df_all[col] = pd.to_numeric(df_all[col], errors="coerce") + + df_all = df_all.drop(columns=["deviation"], errors="ignore") + + Path(args.output_dir).mkdir(exist_ok=True, parents=True) + input_file_name = Path(args.input_file).name + output_file_name = input_file_name.replace("dl1_stereo", "dl1_corr") + output_file = f"{args.output_dir}/{output_file_name}" + + save_pandas_data_in_table( + df_all, output_file, group_name="/events", table_name="parameters" + ) + + subarray_info.to_hdf(output_file) + + correction_params = config.get("cloud_correction", {}) + + logger.info(f"Correction parameters: {correction_params}") + logger.info(f"ctapipe version: {ctapipe.__version__}") + logger.info(f"magicctapipe version: {magicctapipe.__version__}") + + process_time = time.time() - start_time + logger.info(f"\nProcess time: {process_time:.0f} [sec]\n") + logger.info(f"\nOutput file: {output_file}") + + logger.info("\nDone.") + + +if __name__ == "__main__": + main() diff --git a/magicctapipe/scripts/lst1_magic/lst_m1_m2_cloud_correction.py b/magicctapipe/scripts/lst1_magic/lst_m1_m2_cloud_correction.py index 9ebaa10a..cd81d859 100644 --- a/magicctapipe/scripts/lst1_magic/lst_m1_m2_cloud_correction.py +++ b/magicctapipe/scripts/lst1_magic/lst_m1_m2_cloud_correction.py @@ -252,7 +252,7 @@ def process_telescope_data(input_file, config, tel_id, camgeom, focal_eff): if assigned_tel_ids["LST-1"] == tel_id: conc_params = concentration_parameters( clean_camgeom, clean_image, hillas_params - ) # "For LST-1 we compute concentration from the cleaned image and for MAGIC from the full image to reproduce the current behaviour in the standard code/home/natti/CTA/lodz_cluster/lstchain_10_11_test/scripts/new/outfiles/dl1_coincidence_stereo_0.75-0.85_03955 + ) # For LST-1 we compute concentration from the cleaned image and for MAGIC from the full image to reproduce the current behaviour in the standard code else: conc_params = concentration_parameters(camgeom, corr_image, hillas_params) From bd16f7302701bffb3928b2d20a086bf84d092c5d Mon Sep 17 00:00:00 2001 From: nzywucka Date: Tue, 29 Oct 2024 21:41:06 +0100 Subject: [PATCH 04/10] config.yaml: the cloud_correction section has been changed extracted_lidar_reports_test.csv: file with the cloud parameters from LIDAR lst1_magic_cloud_correction.py: interpolation function has been added to the code --- magicctapipe/resources/config.yaml | 8 +- .../extracted_lidar_reports_test.csv | 7 + .../lst1_magic/lst1_magic_cloud_correction.py | 543 ++++++++++++++++++ 3 files changed, 554 insertions(+), 4 deletions(-) create mode 100644 magicctapipe/scripts/lst1_magic/extracted_lidar_reports_test.csv create mode 100644 magicctapipe/scripts/lst1_magic/lst1_magic_cloud_correction.py diff --git a/magicctapipe/resources/config.yaml b/magicctapipe/resources/config.yaml index 8b4e14c6..c0c65ff3 100644 --- a/magicctapipe/resources/config.yaml +++ b/magicctapipe/resources/config.yaml @@ -102,7 +102,7 @@ event_coincidence: stereo_reco: - quality_cuts: "(intensity > 50) & (width > 0)" + quality_cuts: "(intensity > 50) & (intensity > 80 or tel_id > 1) & (width > 0)" #"(intensity > 50) & (width > 0)" theta_uplim: "6 arcmin" @@ -280,8 +280,8 @@ dl2_to_dl3: cloud_correction: specify_cloud: yes - base_height: "5900. m" - thickness: "1320. m" - vertical_transmission: 1.0 + max_gap_lidar_shots: "900 s" + lidar_report_file: "extracted_lidar_reports_test.csv" number_of_layers: 10 + diff --git a/magicctapipe/scripts/lst1_magic/extracted_lidar_reports_test.csv b/magicctapipe/scripts/lst1_magic/extracted_lidar_reports_test.csv new file mode 100644 index 00000000..75f70bd7 --- /dev/null +++ b/magicctapipe/scripts/lst1_magic/extracted_lidar_reports_test.csv @@ -0,0 +1,7 @@ +timestamp,base_height,top_height,transmission,lidar_zenith +ISO 8601,meters,meters,1,degrees +2021-03-11 21:38:00.006,6751.55,9367.46,0.619646,23.21 +2021-03-11 21:42:05.017,7379.25,10448.1,0.433587,23.25 +2021-03-11 21:46:01.023,6542.9,9804.14,0.383883,23.35 +2021-03-11 21:50:07.009,7116.1,10586.2,0.456444,23.44 +2021-03-11 21:54:03.995,7022.38,9500.14,0.417058,23.50 \ No newline at end of file diff --git a/magicctapipe/scripts/lst1_magic/lst1_magic_cloud_correction.py b/magicctapipe/scripts/lst1_magic/lst1_magic_cloud_correction.py new file mode 100644 index 00000000..ea4ba8c9 --- /dev/null +++ b/magicctapipe/scripts/lst1_magic/lst1_magic_cloud_correction.py @@ -0,0 +1,543 @@ +#!/usr/bin/env python +# coding: utf-8 + +""" +This script corrects LST-1 and MAGIC data for the cloud affection. The script works on DL 1 stereo files with saved images. As output, the script creates DL 1 files with corrected parameters. + +Usage: +$ python lst_m1_m2_cloud_correction.py +--input_file dl1_stereo/dl1_LST-1_MAGIC.Run03265.0040.h5 +(--output_dir dl1_corrected) +(--config_file config.yaml) +""" +import argparse +import logging +import os +import time +from pathlib import Path + +import astropy.units as u +import ctapipe +import numpy as np +import pandas as pd +import tables +import yaml +from astropy.coordinates import AltAz, SkyCoord +from astropy.time import Time +from ctapipe.coordinates import TelescopeFrame +from ctapipe.image import ( + concentration_parameters, + hillas_parameters, + leakage_parameters, + timing_parameters, +) +from ctapipe.instrument import SubarrayDescription +from ctapipe.io import read_table +from scipy.interpolate import interp1d + +import magicctapipe +from magicctapipe.io import save_pandas_data_in_table + +logger = logging.getLogger(__name__) +logger.addHandler(logging.StreamHandler()) +logger.setLevel(logging.INFO) + + +def model0(imp, h, zd): + """ + Calculates the geometrical part of the model relating the emission height with the angular distance from the arrival direction + + Parameters + ---------- + imp : astropy.units.quantity.Quantity + Impact + h : astropy.units.quantity.Quantity + Array with heights of each cloud layer a.g.l. + zd : numpy.float64 + Zenith distance in deg + + Returns + ------- + numpy ndarray + Angular distance in units of degree + """ + d = h / np.cos(np.deg2rad(zd)) + return np.arctan((imp / d).to("")).to_value("deg") + + +def model2(imp, h, zd): + """ + Calculates the phenomenological correction to the distances obtained with model0 + + Parameters + ---------- + imp : astropy.units.quantity.Quantity + Impact + h : astropy.units.quantity.Quantity + Array with heights of each cloud layer a.g.l. + zd : numpy.float64 + Zenith distance in deg + + Returns + ------- + numpy ndarray + Angular distance corrected for bias in units of degrees + """ + H0 = 2.2e3 * u.m + bias = 0.877 + 0.015 * ((h + H0) / (7.0e3 * u.m)) + return bias * model0(imp, h, zd) + + +def trans_height(x, Hc, dHc, trans): + """ + Calculates transmission from a geometrically broad cloud at a set of heights. + Cloud is assumed to be homegeneous. + + Parameters + ---------- + x : astropy.units.quantity.Quantity + Array with heights of each cloud layer a.g.l. + Hc : astropy.units.quantity.Quantity + Height of the base of the cloud a.g.l. + dHc : astropy.units.quantity.Quantity + Cloud thickness + trans : numpy.float64 + Transmission of the cloud + + Returns + ------- + numpy.ndarray + Cloud transmission + """ + t = pow(trans, ((x - Hc) / dHc).to_value("")) + t = np.where(x < Hc, 1, t) + t = np.where(x > Hc + dHc, trans, t) + return t + + +def lidar_cloud_interpolation( + mean_subrun_timestamp, max_gap_lidar_shots, lidar_report_file +): + """ + Retrieves or interpolates LIDAR cloud parameters based on the closest timestamps to an input mean timestamp of the processed subrun. + + Parameters + ----------- + mean_subrun_timestamp : int or float + The target timestamp (format: unix). + + max_gap_lidar_shots : int or float + Maximum allowed time gap for interpolation (in seconds). + + lidar_report_file : str + Path to the CSV file containing LIDAR laser reports with columns: + - "timestamp" (format: ISO 8601) + - "base_height", "top_height", "transmission", "lidar_zenith" + + Returns + -------- + tuple or None A tuple containing interpolated or nearest values for: + - base_height (float): The base height of the cloud layer in meters. + - top_height (float): The top height of the cloud layer in meters. + - vertical_transmission (float): Transmission factor of the cloud layer adjusted for vertical angle. + + If no nodes are found within the maximum allowed time gap, returns None. + """ + + if not os.path.isfile(lidar_report_file): + raise FileNotFoundError(f"LIDAR report file not found: {lidar_report_file}") + + df = pd.read_csv(lidar_report_file, sep=",", skiprows=[1]) + df["timestamp"] = pd.to_datetime(df["timestamp"], format="ISO8601") + df["unix_timestamp"] = df["timestamp"].astype(np.int64) / 10**9 + df["time_diff"] = df["unix_timestamp"] - mean_subrun_timestamp + + df["vertical_transmission"] = df["transmission"] * np.cos( + np.deg2rad(df["lidar_zenith"]) + ) + + closest_node_before = df[ + (df["time_diff"] < 0) & (np.abs(df["time_diff"]) <= max_gap_lidar_shots) + ].nlargest(1, "time_diff") + closest_node_after = df[ + (df["time_diff"] > 0) & (np.abs(df["time_diff"]) <= max_gap_lidar_shots) + ].nsmallest(1, "time_diff") + + # Check whether the conditions for interpolation are met or not + if not closest_node_before.empty and not closest_node_after.empty: + node_before = closest_node_before.iloc[0] + node_after = closest_node_after.iloc[0] + logger.info( + f"\nFound suitable interpolation nodes within the allowed temporal gap for timestamp {Time(mean_subrun_timestamp, format='unix').iso}" + f"\nUsing folowing interpolation nodes:\n" + f"\n******************** Node before ******************* \n{node_before}" + f"\n******************** Node after ******************** \n{node_after}" + f"\n\nInterpolation results:" + ) + + interp_values = {} + for param in ["base_height", "top_height", "vertical_transmission"]: + interp_func = interp1d( + [node_before["unix_timestamp"], node_after["unix_timestamp"]], + [node_before[param], node_after[param]], + kind="linear", + bounds_error=False, + ) + interp_values[param] = interp_func(mean_subrun_timestamp) + logger.info(f"\t {param}: {interp_values[param]:.4f}") + + return ( + interp_values["base_height"], + interp_values["top_height"], + interp_values["vertical_transmission"], + ) + + # Handle cases where only one node is available + closest_node = ( + closest_node_before if not closest_node_before.empty else closest_node_after + ) + + if closest_node is not None and not closest_node.empty: + closest = closest_node.iloc[0] + logger.info( + f"\nOnly one suitable LIDAR report found for timestamp {Time(mean_subrun_timestamp, format='unix').iso} " + f"within the maximum allowed temporal gap. \nSkipping interpolation. Using nearest node values instead." + f"\n\n{closest}" + ) + return ( + closest["base_height"], + closest["top_height"], + closest["vertical_transmission"], + ) + + logger.info( + f"\nNo node is within the maximum allowed temporal gap for timestamp {Time(mean_subrun_timestamp, format='unix').iso}. Exiting ..." + ) + return None + + +def process_telescope_data(input_file, config, tel_id, camgeom, focal_eff): + """ + Corrects LST-1 and MAGIC data affected by a cloud presence + + Parameters + ---------- + input_file : str + Path to an input .h5 DL1 file + config : dict + Configuration for the LST-1 + MAGIC analysis + tel_id : numpy.int16 + LST-1 and MAGIC telescope ids + camgeom : ctapipe.instrument.camera.geometry.CameraGeometry + An instance of the CameraGeometry class containing information about the + camera's configuration, including pixel type, number of pixels, rotation + angles, and the reference frame. + focal_eff : astropy.units.quantity.Quantity + Effective focal length + + Returns + ------- + pandas.core.frame.DataFrame + Data frame of corrected DL1 parameters + """ + + assigned_tel_ids = config["mc_tel_ids"] + + correction_params = config.get("cloud_correction", {}) + max_gap_lidar_shots = u.Quantity( + correction_params.get("max_gap_lidar_shots") + ) # set it to 900 seconds + lidar_report_file = correction_params.get( + "lidar_report_file" + ) # path to the lidar report + nlayers = correction_params.get("number_of_layers") + + all_params_list = [] + + dl1_params = read_table(input_file, "/events/parameters") + + image_node_path = "/events/dl1/image_" + str(tel_id) + + try: + dl1_images = read_table(input_file, image_node_path) + logger.info(f"Found images for telescope with ID {tel_id}. Processing...") + except tables.NoSuchNodeError: + logger.info(f"No image for telescope with ID {tel_id}. Skipping.") + return None + + m2deg = np.rad2deg(1) / focal_eff * u.degree + + mean_subrun_timestamp = np.mean(dl1_params["timestamp"]) + mean_subrun_zenith = np.mean(90.0 - np.rad2deg(dl1_params["pointing_alt"])) + + cloud_params = lidar_cloud_interpolation( + mean_subrun_timestamp, max_gap_lidar_shots, lidar_report_file + ) + + Hc = u.Quantity(cloud_params[0], u.m) + dHc = u.Quantity(cloud_params[1] - cloud_params[0], u.m) + incl_trans = u.Quantity(cloud_params[2]) + trans = incl_trans ** ( + 1 / np.cos(np.deg2rad(mean_subrun_zenith)) + ) # vertical transmission + Hcl = np.linspace(Hc, Hc + dHc, nlayers) # position of each layer + transl = trans_height(Hcl, Hc, dHc, trans) # transmissions of each layer + transl = np.append(transl, transl[-1]) + + inds = np.where( + np.logical_and(dl1_params["intensity"] > 0.0, dl1_params["tel_id"] == tel_id) + )[0] + for index in inds: + event_id_lst = dl1_params["event_id_lst"][index] + obs_id_lst = dl1_params["obs_id_lst"][index] + event_id = dl1_params["event_id"][index] + obs_id = dl1_params["obs_id"][index] + event_id_magic = dl1_params["event_id_magic"][index] + obs_id_magic = dl1_params["obs_id_magic"][index] + timestamp = dl1_params["timestamp"][index] + multiplicity = dl1_params["multiplicity"][index] + combo_type = dl1_params["combo_type"][index] + + if assigned_tel_ids["LST-1"] == tel_id: + event_id_image, obs_id_image = event_id_lst, obs_id_lst + else: + event_id_image, obs_id_image = event_id_magic, obs_id_magic + + pointing_az = dl1_params["pointing_az"][index] + pointing_alt = dl1_params["pointing_alt"][index] + time_diff = dl1_params["time_diff"][index] + n_islands = dl1_params["n_islands"][index] + signal_pixels = dl1_params["n_pixels"][index] + + alt_rad = np.deg2rad(dl1_params["alt"][index]) + az_rad = np.deg2rad(dl1_params["az"][index]) + + impact = dl1_params["impact"][index] * u.m + cog_x = (dl1_params["x"][index] * m2deg).value * u.deg + cog_y = (dl1_params["y"][index] * m2deg).value * u.deg + + # Source position + reco_pos = SkyCoord(alt=alt_rad * u.rad, az=az_rad * u.rad, frame=AltAz()) + telescope_pointing = SkyCoord( + alt=pointing_alt * u.rad, + az=pointing_az * u.rad, + frame=AltAz(), + ) + + tel_frame = TelescopeFrame(telescope_pointing=telescope_pointing) + tel = reco_pos.transform_to(tel_frame) + + src_x = tel.fov_lat + src_y = tel.fov_lon + + # Transform to Engineering camera + src_x, src_y = -src_y, -src_x + cog_x, cog_y = -cog_y, -cog_x + + psi = np.arctan2(src_x - cog_x, src_y - cog_y) + + pix_x_tel = (camgeom.pix_x * m2deg).to(u.deg) + pix_y_tel = (camgeom.pix_y * m2deg).to(u.deg) + + distance = np.abs( + (pix_y_tel - src_y) * np.cos(psi) + (pix_x_tel - src_x) * np.sin(psi) + ) + + d2_cog_src = (cog_x - src_x) ** 2 + (cog_y - src_y) ** 2 + d2_cog_pix = (cog_x - pix_x_tel) ** 2 + (cog_y - pix_y_tel) ** 2 + d2_src_pix = (src_x - pix_x_tel) ** 2 + (src_y - pix_y_tel) ** 2 + + distance[d2_cog_pix > d2_cog_src + d2_src_pix] = 0 + dist_corr_layer = model2(impact, Hcl, mean_subrun_zenith) * u.deg + + ilayer = np.digitize(distance, dist_corr_layer) + trans_pixels = transl[ilayer] + + inds_img = np.where( + (dl1_images["event_id"] == event_id_image) + & (dl1_images["tel_id"] == tel_id) + & (dl1_images["obs_id"] == obs_id_image) + )[0] + + if len(inds_img) == 0: + raise ValueError("Error: 'inds_img' list is empty!") + index_img = inds_img[0] + image = dl1_images["image"][index_img] + cleanmask = dl1_images["image_mask"][index_img] + peak_time = dl1_images["peak_time"][index_img] + image /= trans_pixels + corr_image = image.copy() + + clean_image = corr_image[cleanmask] + clean_camgeom = camgeom[cleanmask] + + hillas_params = hillas_parameters(clean_camgeom, clean_image) + timing_params = timing_parameters( + clean_camgeom, clean_image, peak_time[cleanmask], hillas_params + ) + leakage_params = leakage_parameters(camgeom, corr_image, cleanmask) + if assigned_tel_ids["LST-1"] == tel_id: + conc_params = concentration_parameters( + clean_camgeom, clean_image, hillas_params + ) # For LST-1 we compute concentration from the cleaned image and for MAGIC from the full image to reproduce the current behaviour in the standard code + else: + conc_params = concentration_parameters(camgeom, corr_image, hillas_params) + + event_params = { + **hillas_params, + **timing_params, + **leakage_params, + } + + prefixed_conc_params = { + f"concentration_{key}": value for key, value in conc_params.items() + } + event_params.update(prefixed_conc_params) + + event_info_dict = { + "obs_id": obs_id, + "event_id": event_id, + "tel_id": tel_id, + "pointing_alt": pointing_alt, + "pointing_az": pointing_az, + "time_diff": time_diff, + "n_pixels": signal_pixels, + "n_islands": n_islands, + "event_id_lst": event_id_lst, + "obs_id_lst": obs_id_lst, + "event_id_magic": event_id_magic, + "obs_id_magic": obs_id_magic, + "combo_type": combo_type, + "timestamp": timestamp, + "multiplicity": multiplicity, + } + event_params.update(event_info_dict) + + all_params_list.append(event_params) + + df = pd.DataFrame(all_params_list) + return df + + +def main(): + """Main function.""" + start_time = time.time() + parser = argparse.ArgumentParser() + + parser.add_argument( + "--input_file", + "-i", + dest="input_file", + type=str, + required=True, + help="Path to an input .h5 DL1 data file", + ) + + parser.add_argument( + "--output_dir", + "-o", + dest="output_dir", + type=str, + default="./data", + help="Path to a directory where to save an output corrected DL1 file", + ) + + parser.add_argument( + "--config_file", + "-c", + dest="config_file", + type=str, + default="./resources/config.yaml", + help="Path to a configuration file", + ) + + args = parser.parse_args() + + subarray_info = SubarrayDescription.from_hdf(args.input_file) + + tel_descriptions = subarray_info.tel + camgeom = {} + for telid, telescope in tel_descriptions.items(): + camgeom[telid] = telescope.camera.geometry + + optics_table = read_table( + args.input_file, "/configuration/instrument/telescope/optics" + ) + focal_eff = {} + + for telid, telescope in tel_descriptions.items(): + optics_row = optics_table[optics_table["optics_name"] == telescope.name] + if len(optics_row) > 0: + focal_eff[telid] = optics_row["effective_focal_length"][0] * u.m + else: + raise ValueError(f"No optics data found for telescope: {telescope.name}") + + with open(args.config_file, "r") as file: + config = yaml.safe_load(file) + + tel_ids = config["mc_tel_ids"] + + dfs = [] # Initialize an empty list to store DataFrames + + for tel_name, tel_id in tel_ids.items(): + if tel_id != 0: # Only process telescopes that have a non-zero ID + df = process_telescope_data( + args.input_file, config, tel_id, camgeom[tel_id], focal_eff[tel_id] + ) + + dfs.append(df) + + df_all = pd.concat(dfs, ignore_index=True) + + columns_to_convert = [ + "x", + "y", + "r", + "phi", + "length", + "length_uncertainty", + "width", + "width_uncertainty", + "psi", + "slope", + ] + + for col in columns_to_convert: + df_all[col] = df_all[col].apply( + lambda x: x.value if isinstance(x, u.Quantity) else x + ) + + df_all["psi"] = np.degrees(df_all["psi"]) + df_all["phi"] = np.degrees(df_all["phi"]) + + for col in columns_to_convert: + df_all[col] = pd.to_numeric(df_all[col], errors="coerce") + + df_all = df_all.drop(columns=["deviation"], errors="ignore") + + Path(args.output_dir).mkdir(exist_ok=True, parents=True) + input_file_name = Path(args.input_file).name + output_file_name = input_file_name.replace("dl1_stereo", "dl1_corr") + output_file = f"{args.output_dir}/{output_file_name}" + + save_pandas_data_in_table( + df_all, output_file, group_name="/events", table_name="parameters" + ) + + subarray_info.to_hdf(output_file) + + correction_params = config.get("cloud_correction", {}) + + logger.info(f"Correction parameters: {correction_params}") + logger.info(f"ctapipe version: {ctapipe.__version__}") + logger.info(f"magicctapipe version: {magicctapipe.__version__}") + + process_time = time.time() - start_time + logger.info(f"\nProcess time: {process_time:.0f} [sec]\n") + logger.info(f"\nOutput file: {output_file}") + + logger.info("\nDone.") + + +if __name__ == "__main__": + main() From edb06c7e5db981dbd3be91a9c78d24bd76b04674 Mon Sep 17 00:00:00 2001 From: nzywucka Date: Mon, 4 Nov 2024 10:41:32 +0100 Subject: [PATCH 05/10] Rename file to correct name --- .../lst1_magic/ls1_magic_cloud_correction.py | 425 ------------------ .../lst1_magic/lst1_magic_cloud_correction.py | 142 +----- 2 files changed, 12 insertions(+), 555 deletions(-) delete mode 100644 magicctapipe/scripts/lst1_magic/ls1_magic_cloud_correction.py diff --git a/magicctapipe/scripts/lst1_magic/ls1_magic_cloud_correction.py b/magicctapipe/scripts/lst1_magic/ls1_magic_cloud_correction.py deleted file mode 100644 index ed6ec8dd..00000000 --- a/magicctapipe/scripts/lst1_magic/ls1_magic_cloud_correction.py +++ /dev/null @@ -1,425 +0,0 @@ -#!/usr/bin/env python -# coding: utf-8 - -""" -This script corrects LST-1 and MAGIC data for the cloud affection. The script works on DL 1 stereo files with saved images. As output, the script creates DL 1 files with corrected parameters. - -Usage: -$ python lst_m1_m2_cloud_correction.py ---input_file dl1_stereo/dl1_LST-1_MAGIC.Run03265.0040.h5 -(--output_dir dl1_corrected) -(--config_file config.yaml) -""" -import argparse -import logging -import time -from pathlib import Path - -import astropy.units as u -import ctapipe -import numpy as np -import pandas as pd -import tables -import yaml -from astropy.coordinates import AltAz, SkyCoord -from ctapipe.coordinates import TelescopeFrame -from ctapipe.image import ( - concentration_parameters, - hillas_parameters, - leakage_parameters, - timing_parameters, -) -from ctapipe.instrument import SubarrayDescription -from ctapipe.io import read_table - -import magicctapipe -from magicctapipe.io import save_pandas_data_in_table - -logger = logging.getLogger(__name__) -logger.addHandler(logging.StreamHandler()) -logger.setLevel(logging.INFO) - - -def model0(imp, h, zd): - """ - Calculates the geometrical part of the model relating the emission height with the angular distance from the arrival direction - - Parameters - ---------- - imp : astropy.units.quantity.Quantity - Impact - h : astropy.units.quantity.Quantity - Array with heights of each cloud layer a.g.l. - zd : numpy.float64 - Zenith distance in deg - - Returns - ------- - numpy ndarray - Angular distance in units of degree - """ - d = h / np.cos(np.deg2rad(zd)) - return np.arctan((imp / d).to("")).to_value("deg") - - -def model2(imp, h, zd): - """ - Calculates the phenomenological correction to the distances obtained with model0 - - Parameters - ---------- - imp : astropy.units.quantity.Quantity - Impact - h : astropy.units.quantity.Quantity - Array with heights of each cloud layer a.g.l. - zd : numpy.float64 - Zenith distance in deg - - Returns - ------- - numpy ndarray - Angular distance corrected for bias in units of degrees - """ - H0 = 2.2e3 * u.m - bias = 0.877 + 0.015 * ((h + H0) / (7.0e3 * u.m)) - return bias * model0(imp, h, zd) - - -def trans_height(x, Hc, dHc, trans): - """ - Calculates transmission from a geometrically broad cloud at a set of heights. - Cloud is assumed to be homegeneous. - - Parameters - ---------- - x : astropy.units.quantity.Quantity - Array with heights of each cloud layer a.g.l. - Hc : astropy.units.quantity.Quantity - Height of the base of the cloud a.g.l. - dHc : astropy.units.quantity.Quantity - Cloud thickness - trans : numpy.float64 - Transmission of the cloud - - Returns - ------- - numpy.ndarray - Cloud transmission - """ - t = pow(trans, ((x - Hc) / dHc).to_value("")) - t = np.where(x < Hc, 1, t) - t = np.where(x > Hc + dHc, trans, t) - return t - - -def process_telescope_data(input_file, config, tel_id, camgeom, focal_eff): - """ - Corrects LST-1 and MAGIC data affected by a cloud presence - - Parameters - ---------- - input_file : str - Path to an input .h5 DL1 file - config : dict - Configuration for the LST-1 + MAGIC analysis - tel_id : numpy.int16 - LST-1 and MAGIC telescope ids - camgeom : ctapipe.instrument.camera.geometry.CameraGeometry - An instance of the CameraGeometry class containing information about the - camera's configuration, including pixel type, number of pixels, rotation - angles, and the reference frame. - focal_eff : astropy.units.quantity.Quantity - Effective focal length - - Returns - ------- - pandas.core.frame.DataFrame - Data frame of corrected DL1 parameters - """ - - logger.info(f"\nChecking available image for telescope with ID: {tel_id}") - assigned_tel_ids = config["mc_tel_ids"] - - correction_params = config.get("cloud_correction", {}) - Hc = u.Quantity(correction_params.get("base_height")) - dHc = u.Quantity(correction_params.get("thickness")) - trans0 = correction_params.get("vertical_transmission") - nlayers = correction_params.get("number_of_layers") - - all_params_list = [] - - dl1_params = read_table(input_file, "/events/parameters") - - image_node_path = "/events/dl1/image_" + str(tel_id) - - try: - dl1_images = read_table(input_file, image_node_path) - logger.info(f"Found images for telescope with ID {tel_id}. Processing...") - except tables.NoSuchNodeError: - logger.info(f"No image for telescope with ID {tel_id}. Skipping.") - return None - - m2deg = np.rad2deg(1) / focal_eff * u.degree - - inds = np.where( - np.logical_and(dl1_params["intensity"] > 0.0, dl1_params["tel_id"] == tel_id) - )[0] - for index in inds: - event_id_lst = dl1_params["event_id_lst"][index] - obs_id_lst = dl1_params["obs_id_lst"][index] - event_id = dl1_params["event_id"][index] - obs_id = dl1_params["obs_id"][index] - event_id_magic = dl1_params["event_id_magic"][index] - obs_id_magic = dl1_params["obs_id_magic"][index] - timestamp = dl1_params["timestamp"][index] - multiplicity = dl1_params["multiplicity"][index] - combo_type = dl1_params["combo_type"][index] - - if assigned_tel_ids["LST-1"] == tel_id: - event_id_image, obs_id_image = event_id_lst, obs_id_lst - else: - event_id_image, obs_id_image = event_id_magic, obs_id_magic - - pointing_az = dl1_params["pointing_az"][index] - pointing_alt = dl1_params["pointing_alt"][index] - zenith = 90.0 - np.rad2deg(pointing_alt) - time_diff = dl1_params["time_diff"][index] - n_islands = dl1_params["n_islands"][index] - signal_pixels = dl1_params["n_pixels"][index] - - trans = trans0 ** (1 / np.cos(np.deg2rad(zenith))) - Hcl = np.linspace(Hc, Hc + dHc, nlayers) # position of each layer - transl = trans_height(Hcl, Hc, dHc, trans) # transmissions of each layer - transl = np.append(transl, transl[-1]) - - alt_rad = np.deg2rad(dl1_params["alt"][index]) - az_rad = np.deg2rad(dl1_params["az"][index]) - - impact = dl1_params["impact"][index] * u.m - psi = dl1_params["psi"][index] * u.deg - cog_x = (dl1_params["x"][index] * m2deg).value * u.deg - cog_y = (dl1_params["y"][index] * m2deg).value * u.deg - - # Source position - reco_pos = SkyCoord(alt=alt_rad * u.rad, az=az_rad * u.rad, frame=AltAz()) - telescope_pointing = SkyCoord( - alt=pointing_alt * u.rad, - az=pointing_az * u.rad, - frame=AltAz(), - ) - - tel_frame = TelescopeFrame(telescope_pointing=telescope_pointing) - tel = reco_pos.transform_to(tel_frame) - - src_x = tel.fov_lat - src_y = tel.fov_lon - - # Transform to Engineering camera - src_x, src_y = -src_y, -src_x - cog_x, cog_y = -cog_y, -cog_x - - pix_x_tel = (camgeom.pix_x * m2deg).to(u.deg) - pix_y_tel = (camgeom.pix_y * m2deg).to(u.deg) - - distance = np.abs( - (pix_y_tel - src_y) * np.cos(psi) + (pix_x_tel - src_x) * np.sin(psi) - ) - - d2_cog_src = (cog_x - src_x) ** 2 + (cog_y - src_y) ** 2 - d2_cog_pix = (cog_x - pix_x_tel) ** 2 + (cog_y - pix_y_tel) ** 2 - d2_src_pix = (src_x - pix_x_tel) ** 2 + (src_y - pix_y_tel) ** 2 - - distance[d2_cog_pix > d2_cog_src + d2_src_pix] = 0 - dist_corr_layer = model2(impact, Hcl, zenith) * u.deg - - ilayer = np.digitize(distance, dist_corr_layer) - trans_pixels = transl[ilayer] - - inds_img = np.where( - (dl1_images["event_id"] == event_id_image) - & (dl1_images["tel_id"] == tel_id) - & (dl1_images["obs_id"] == obs_id_image) - )[0] - - if len(inds_img) == 0: - raise ValueError("Error: 'inds_img' list is empty!") - index_img = inds_img[0] - image = dl1_images["image"][index_img] - cleanmask = dl1_images["image_mask"][index_img] - peak_time = dl1_images["peak_time"][index_img] - image /= trans_pixels - corr_image = image.copy() - - clean_image = corr_image[cleanmask] - clean_camgeom = camgeom[cleanmask] - - hillas_params = hillas_parameters(clean_camgeom, clean_image) - timing_params = timing_parameters( - clean_camgeom, clean_image, peak_time[cleanmask], hillas_params - ) - leakage_params = leakage_parameters(camgeom, corr_image, cleanmask) - if assigned_tel_ids["LST-1"] == tel_id: - conc_params = concentration_parameters( - clean_camgeom, clean_image, hillas_params - ) # For LST-1 we compute concentration from the cleaned image and for MAGIC from the full image to reproduce the current behaviour in the standard code - else: - conc_params = concentration_parameters(camgeom, corr_image, hillas_params) - - event_params = { - **hillas_params, - **timing_params, - **leakage_params, - } - - prefixed_conc_params = { - f"concentration_{key}": value for key, value in conc_params.items() - } - event_params.update(prefixed_conc_params) - - event_info_dict = { - "obs_id": obs_id, - "event_id": event_id, - "tel_id": tel_id, - "pointing_alt": pointing_alt, - "pointing_az": pointing_az, - "time_diff": time_diff, - "n_pixels": signal_pixels, - "n_islands": n_islands, - "event_id_lst": event_id_lst, - "obs_id_lst": obs_id_lst, - "event_id_magic": event_id_magic, - "obs_id_magic": obs_id_magic, - "combo_type": combo_type, - "timestamp": timestamp, - "multiplicity": multiplicity, - } - event_params.update(event_info_dict) - - all_params_list.append(event_params) - - df = pd.DataFrame(all_params_list) - return df - - -def main(): - """Main function.""" - start_time = time.time() - parser = argparse.ArgumentParser() - - parser.add_argument( - "--input_file", - "-i", - dest="input_file", - type=str, - required=True, - help="Path to an input .h5 DL1 data file", - ) - - parser.add_argument( - "--output_dir", - "-o", - dest="output_dir", - type=str, - default="./data", - help="Path to a directory where to save an output corrected DL1 file", - ) - - parser.add_argument( - "--config_file", - "-c", - dest="config_file", - type=str, - default="./resources/config.yaml", - help="Path to a configuration file", - ) - - args = parser.parse_args() - - subarray_info = SubarrayDescription.from_hdf(args.input_file) - - tel_descriptions = subarray_info.tel - camgeom = {} - for telid, telescope in tel_descriptions.items(): - camgeom[telid] = telescope.camera.geometry - - optics_table = read_table( - args.input_file, "/configuration/instrument/telescope/optics" - ) - focal_eff = {} - - for telid, telescope in tel_descriptions.items(): - optics_row = optics_table[optics_table["optics_name"] == telescope.name] - if len(optics_row) > 0: - focal_eff[telid] = optics_row["effective_focal_length"][0] * u.m - else: - raise ValueError(f"No optics data found for telescope: {telescope.name}") - - with open(args.config_file, "r") as file: - config = yaml.safe_load(file) - - tel_ids = config["mc_tel_ids"] - - dfs = [] # Initialize an empty list to store DataFrames - - for tel_name, tel_id in tel_ids.items(): - if tel_id != 0: # Only process telescopes that have a non-zero ID - df = process_telescope_data( - args.input_file, config, tel_id, camgeom[tel_id], focal_eff[tel_id] - ) - - dfs.append(df) - - df_all = pd.concat(dfs, ignore_index=True) - - columns_to_convert = [ - "x", - "y", - "r", - "phi", - "length", - "length_uncertainty", - "width", - "width_uncertainty", - "psi", - "slope", - ] - - for col in columns_to_convert: - df_all[col] = df_all[col].apply( - lambda x: x.value if isinstance(x, u.Quantity) else x - ) - - df_all["psi"] = np.degrees(df_all["psi"]) - df_all["phi"] = np.degrees(df_all["phi"]) - - for col in columns_to_convert: - df_all[col] = pd.to_numeric(df_all[col], errors="coerce") - - df_all = df_all.drop(columns=["deviation"], errors="ignore") - - Path(args.output_dir).mkdir(exist_ok=True, parents=True) - input_file_name = Path(args.input_file).name - output_file_name = input_file_name.replace("dl1_stereo", "dl1_corr") - output_file = f"{args.output_dir}/{output_file_name}" - - save_pandas_data_in_table( - df_all, output_file, group_name="/events", table_name="parameters" - ) - - subarray_info.to_hdf(output_file) - - correction_params = config.get("cloud_correction", {}) - - logger.info(f"Correction parameters: {correction_params}") - logger.info(f"ctapipe version: {ctapipe.__version__}") - logger.info(f"magicctapipe version: {magicctapipe.__version__}") - - process_time = time.time() - start_time - logger.info(f"\nProcess time: {process_time:.0f} [sec]\n") - logger.info(f"\nOutput file: {output_file}") - - logger.info("\nDone.") - - -if __name__ == "__main__": - main() diff --git a/magicctapipe/scripts/lst1_magic/lst1_magic_cloud_correction.py b/magicctapipe/scripts/lst1_magic/lst1_magic_cloud_correction.py index ea4ba8c9..ed6ec8dd 100644 --- a/magicctapipe/scripts/lst1_magic/lst1_magic_cloud_correction.py +++ b/magicctapipe/scripts/lst1_magic/lst1_magic_cloud_correction.py @@ -12,7 +12,6 @@ """ import argparse import logging -import os import time from pathlib import Path @@ -23,7 +22,6 @@ import tables import yaml from astropy.coordinates import AltAz, SkyCoord -from astropy.time import Time from ctapipe.coordinates import TelescopeFrame from ctapipe.image import ( concentration_parameters, @@ -33,7 +31,6 @@ ) from ctapipe.instrument import SubarrayDescription from ctapipe.io import read_table -from scipy.interpolate import interp1d import magicctapipe from magicctapipe.io import save_pandas_data_in_table @@ -115,107 +112,6 @@ def trans_height(x, Hc, dHc, trans): return t -def lidar_cloud_interpolation( - mean_subrun_timestamp, max_gap_lidar_shots, lidar_report_file -): - """ - Retrieves or interpolates LIDAR cloud parameters based on the closest timestamps to an input mean timestamp of the processed subrun. - - Parameters - ----------- - mean_subrun_timestamp : int or float - The target timestamp (format: unix). - - max_gap_lidar_shots : int or float - Maximum allowed time gap for interpolation (in seconds). - - lidar_report_file : str - Path to the CSV file containing LIDAR laser reports with columns: - - "timestamp" (format: ISO 8601) - - "base_height", "top_height", "transmission", "lidar_zenith" - - Returns - -------- - tuple or None A tuple containing interpolated or nearest values for: - - base_height (float): The base height of the cloud layer in meters. - - top_height (float): The top height of the cloud layer in meters. - - vertical_transmission (float): Transmission factor of the cloud layer adjusted for vertical angle. - - If no nodes are found within the maximum allowed time gap, returns None. - """ - - if not os.path.isfile(lidar_report_file): - raise FileNotFoundError(f"LIDAR report file not found: {lidar_report_file}") - - df = pd.read_csv(lidar_report_file, sep=",", skiprows=[1]) - df["timestamp"] = pd.to_datetime(df["timestamp"], format="ISO8601") - df["unix_timestamp"] = df["timestamp"].astype(np.int64) / 10**9 - df["time_diff"] = df["unix_timestamp"] - mean_subrun_timestamp - - df["vertical_transmission"] = df["transmission"] * np.cos( - np.deg2rad(df["lidar_zenith"]) - ) - - closest_node_before = df[ - (df["time_diff"] < 0) & (np.abs(df["time_diff"]) <= max_gap_lidar_shots) - ].nlargest(1, "time_diff") - closest_node_after = df[ - (df["time_diff"] > 0) & (np.abs(df["time_diff"]) <= max_gap_lidar_shots) - ].nsmallest(1, "time_diff") - - # Check whether the conditions for interpolation are met or not - if not closest_node_before.empty and not closest_node_after.empty: - node_before = closest_node_before.iloc[0] - node_after = closest_node_after.iloc[0] - logger.info( - f"\nFound suitable interpolation nodes within the allowed temporal gap for timestamp {Time(mean_subrun_timestamp, format='unix').iso}" - f"\nUsing folowing interpolation nodes:\n" - f"\n******************** Node before ******************* \n{node_before}" - f"\n******************** Node after ******************** \n{node_after}" - f"\n\nInterpolation results:" - ) - - interp_values = {} - for param in ["base_height", "top_height", "vertical_transmission"]: - interp_func = interp1d( - [node_before["unix_timestamp"], node_after["unix_timestamp"]], - [node_before[param], node_after[param]], - kind="linear", - bounds_error=False, - ) - interp_values[param] = interp_func(mean_subrun_timestamp) - logger.info(f"\t {param}: {interp_values[param]:.4f}") - - return ( - interp_values["base_height"], - interp_values["top_height"], - interp_values["vertical_transmission"], - ) - - # Handle cases where only one node is available - closest_node = ( - closest_node_before if not closest_node_before.empty else closest_node_after - ) - - if closest_node is not None and not closest_node.empty: - closest = closest_node.iloc[0] - logger.info( - f"\nOnly one suitable LIDAR report found for timestamp {Time(mean_subrun_timestamp, format='unix').iso} " - f"within the maximum allowed temporal gap. \nSkipping interpolation. Using nearest node values instead." - f"\n\n{closest}" - ) - return ( - closest["base_height"], - closest["top_height"], - closest["vertical_transmission"], - ) - - logger.info( - f"\nNo node is within the maximum allowed temporal gap for timestamp {Time(mean_subrun_timestamp, format='unix').iso}. Exiting ..." - ) - return None - - def process_telescope_data(input_file, config, tel_id, camgeom, focal_eff): """ Corrects LST-1 and MAGIC data affected by a cloud presence @@ -241,15 +137,13 @@ def process_telescope_data(input_file, config, tel_id, camgeom, focal_eff): Data frame of corrected DL1 parameters """ + logger.info(f"\nChecking available image for telescope with ID: {tel_id}") assigned_tel_ids = config["mc_tel_ids"] correction_params = config.get("cloud_correction", {}) - max_gap_lidar_shots = u.Quantity( - correction_params.get("max_gap_lidar_shots") - ) # set it to 900 seconds - lidar_report_file = correction_params.get( - "lidar_report_file" - ) # path to the lidar report + Hc = u.Quantity(correction_params.get("base_height")) + dHc = u.Quantity(correction_params.get("thickness")) + trans0 = correction_params.get("vertical_transmission") nlayers = correction_params.get("number_of_layers") all_params_list = [] @@ -267,23 +161,6 @@ def process_telescope_data(input_file, config, tel_id, camgeom, focal_eff): m2deg = np.rad2deg(1) / focal_eff * u.degree - mean_subrun_timestamp = np.mean(dl1_params["timestamp"]) - mean_subrun_zenith = np.mean(90.0 - np.rad2deg(dl1_params["pointing_alt"])) - - cloud_params = lidar_cloud_interpolation( - mean_subrun_timestamp, max_gap_lidar_shots, lidar_report_file - ) - - Hc = u.Quantity(cloud_params[0], u.m) - dHc = u.Quantity(cloud_params[1] - cloud_params[0], u.m) - incl_trans = u.Quantity(cloud_params[2]) - trans = incl_trans ** ( - 1 / np.cos(np.deg2rad(mean_subrun_zenith)) - ) # vertical transmission - Hcl = np.linspace(Hc, Hc + dHc, nlayers) # position of each layer - transl = trans_height(Hcl, Hc, dHc, trans) # transmissions of each layer - transl = np.append(transl, transl[-1]) - inds = np.where( np.logical_and(dl1_params["intensity"] > 0.0, dl1_params["tel_id"] == tel_id) )[0] @@ -305,14 +182,21 @@ def process_telescope_data(input_file, config, tel_id, camgeom, focal_eff): pointing_az = dl1_params["pointing_az"][index] pointing_alt = dl1_params["pointing_alt"][index] + zenith = 90.0 - np.rad2deg(pointing_alt) time_diff = dl1_params["time_diff"][index] n_islands = dl1_params["n_islands"][index] signal_pixels = dl1_params["n_pixels"][index] + trans = trans0 ** (1 / np.cos(np.deg2rad(zenith))) + Hcl = np.linspace(Hc, Hc + dHc, nlayers) # position of each layer + transl = trans_height(Hcl, Hc, dHc, trans) # transmissions of each layer + transl = np.append(transl, transl[-1]) + alt_rad = np.deg2rad(dl1_params["alt"][index]) az_rad = np.deg2rad(dl1_params["az"][index]) impact = dl1_params["impact"][index] * u.m + psi = dl1_params["psi"][index] * u.deg cog_x = (dl1_params["x"][index] * m2deg).value * u.deg cog_y = (dl1_params["y"][index] * m2deg).value * u.deg @@ -334,8 +218,6 @@ def process_telescope_data(input_file, config, tel_id, camgeom, focal_eff): src_x, src_y = -src_y, -src_x cog_x, cog_y = -cog_y, -cog_x - psi = np.arctan2(src_x - cog_x, src_y - cog_y) - pix_x_tel = (camgeom.pix_x * m2deg).to(u.deg) pix_y_tel = (camgeom.pix_y * m2deg).to(u.deg) @@ -348,7 +230,7 @@ def process_telescope_data(input_file, config, tel_id, camgeom, focal_eff): d2_src_pix = (src_x - pix_x_tel) ** 2 + (src_y - pix_y_tel) ** 2 distance[d2_cog_pix > d2_cog_src + d2_src_pix] = 0 - dist_corr_layer = model2(impact, Hcl, mean_subrun_zenith) * u.deg + dist_corr_layer = model2(impact, Hcl, zenith) * u.deg ilayer = np.digitize(distance, dist_corr_layer) trans_pixels = transl[ilayer] From 7d24ab279c175aa2cae2e7418eb9eb7bf244c033 Mon Sep 17 00:00:00 2001 From: nzywucka Date: Thu, 5 Dec 2024 12:33:50 +0100 Subject: [PATCH 06/10] Removed filename from the repository --- .../lst1_magic/lst_m1_m2_cloud_correction.py | 418 ------------------ 1 file changed, 418 deletions(-) delete mode 100644 magicctapipe/scripts/lst1_magic/lst_m1_m2_cloud_correction.py diff --git a/magicctapipe/scripts/lst1_magic/lst_m1_m2_cloud_correction.py b/magicctapipe/scripts/lst1_magic/lst_m1_m2_cloud_correction.py deleted file mode 100644 index cd81d859..00000000 --- a/magicctapipe/scripts/lst1_magic/lst_m1_m2_cloud_correction.py +++ /dev/null @@ -1,418 +0,0 @@ -#!/usr/bin/env python -# coding: utf-8 - -""" -This script corrects LST-1 and MAGIC data for the cloud affection. The script works on DL 1 stereo files with saved images. As output, the script creates DL 1 files with corrected parameters. - -Usage: -$ python lst_m1_m2_cloud_correction.py ---input_file dl1_stereo/dl1_LST-1_MAGIC.Run03265.0040.h5 -(--output_dir dl1_corrected) -(--config_file config.yaml) -""" -import argparse -import logging -import os -from pathlib import Path - -import astropy.units as u -import ctapipe -import numpy as np -import pandas as pd -import yaml -from astropy.coordinates import AltAz, SkyCoord -from ctapipe.coordinates import TelescopeFrame -from ctapipe.image import ( - concentration_parameters, - hillas_parameters, - leakage_parameters, - timing_parameters, -) -from ctapipe.instrument import SubarrayDescription -from ctapipe.io import read_table - -import magicctapipe -from magicctapipe.io import save_pandas_data_in_table - -logger = logging.getLogger(__name__) -logger.addHandler(logging.StreamHandler()) -logger.setLevel(logging.INFO) - - -def model0(imp, h, zd): - """ - Calculates the geometrical part of the model relating the emission height with the angular distance from the arrival direction - - Parameters - ---------- - imp : astropy.units.quantity.Quantity - Impact - h : astropy.units.quantity.Quantity - Array with heights of each cloud layer a.g.l. - zd : numpy.float64 - Zenith distance in deg - - Returns - ------- - numpy ndarray - Angular distance in units of degree - """ - d = h / np.cos(zd) - return np.arctan((imp / d).to("")).to_value("deg") - - -def model2(imp, h, zd): - """ - Calculates the phenomenological correction to the distances obtained with model0 - - Parameters - ---------- - imp : astropy.units.quantity.Quantity - Impact - h : astropy.units.quantity.Quantity - Array with heights of each cloud layer a.g.l. - zd : numpy.float64 - Zenith distance in deg - - Returns - ------- - numpy ndarray - Angular distance corrected for bias in units of degrees - """ - H0 = 2.2e3 * u.m - bias = 0.877 + 0.015 * ((h + H0) / (7.0e3 * u.m)) - return bias * model0(imp, h, zd) - - -def trans_height(x, Hc, dHc, trans): - """ - Calculates transmission from a geometrically broad cloud at a set of heights. - Cloud is assumed to be homegeneous. - - Parameters - ---------- - x : astropy.units.quantity.Quantity - Array with heights of each cloud layer a.g.l. - Hc : astropy.units.quantity.Quantity - Height of the base of the cloud a.g.l. - dHc : astropy.units.quantity.Quantity - Cloud thickness - trans : numpy.float64 - Transmission of the cloud - - Returns - ------- - numpy.ndarray - Cloud transmission - """ - t = pow(trans, ((x - Hc) / dHc).to_value("")) - t = np.where(x < Hc, 1, t) - t = np.where(x > Hc + dHc, trans, t) - return t - - -def process_telescope_data(input_file, config, tel_id, camgeom, focal_eff): - """ - Corrects LST-1 and MAGIC data affected by a cloud presence - - Parameters - ---------- - input_file : str - Path to an input .h5 DL1 file - config : dict - Configuration for the LST-1 + MAGIC analysis - tel_id : numpy.int16 - LST-1 and MAGIC telescope ids - camgeom : ctapipe.instrument.camera.geometry.CameraGeometry - An instance of the CameraGeometry class containing information about the - camera's configuration, including pixel type, number of pixels, rotation - angles, and the reference frame. - focal_eff : astropy.units.quantity.Quantity - Effective focal length - - Returns - ------- - pandas.core.frame.DataFrame - Data frame of corrected DL1 parameters - """ - - assigned_tel_ids = config["mc_tel_ids"] - - correction_params = config.get("cloud_correction", {}) - Hc = u.Quantity(correction_params.get("base_height")) - dHc = u.Quantity(correction_params.get("thickness")) - trans0 = correction_params.get("vertical_transmission") - nlayers = correction_params.get("number_of_layers") - - all_params_list = [] - - dl1_params = read_table(input_file, "/events/parameters") - dl1_images = read_table(input_file, "/events/dl1/image_" + str(tel_id)) - - m2deg = np.rad2deg(1) / focal_eff * u.degree - - inds = np.where( - np.logical_and(dl1_params["intensity"] > 0.0, dl1_params["tel_id"] == tel_id) - )[0] - for index in inds: - event_id_lst = dl1_params["event_id_lst"][index] - obs_id_lst = dl1_params["obs_id_lst"][index] - event_id = dl1_params["event_id"][index] - obs_id = dl1_params["obs_id"][index] - event_id_magic = dl1_params["event_id_magic"][index] - obs_id_magic = dl1_params["obs_id_magic"][index] - timestamp = dl1_params["timestamp"][index] - multiplicity = dl1_params["multiplicity"][index] - combo_type = dl1_params["combo_type"][index] - - if assigned_tel_ids["LST-1"] == tel_id: - event_id_image, obs_id_image = event_id_lst, obs_id_lst - else: - event_id_image, obs_id_image = event_id_magic, obs_id_magic - - pointing_az = dl1_params["pointing_az"][index] - pointing_alt = dl1_params["pointing_alt"][index] - zenith = 90.0 - np.rad2deg(pointing_alt) - psi = dl1_params["psi"][index] * u.deg - time_diff = dl1_params["time_diff"][index] - n_islands = dl1_params["n_islands"][index] - signal_pixels = dl1_params["n_pixels"][index] - - trans = trans0 ** (1 / np.cos(zenith)) - Hcl = np.linspace(Hc, Hc + dHc, nlayers) # position of each layer - transl = trans_height(Hcl, Hc, dHc, trans) # transmissions of each layer - transl = np.append(transl, transl[-1]) - - alt_rad = np.deg2rad(dl1_params["alt"][index]) - az_rad = np.deg2rad(dl1_params["az"][index]) - - impact = dl1_params["impact"][index] * u.m - psi = dl1_params["psi"][index] * u.deg - cog_x = (dl1_params["x"][index] * m2deg).value * u.deg - cog_y = (dl1_params["y"][index] * m2deg).value * u.deg - - # Source position - reco_pos = SkyCoord(alt=alt_rad * u.rad, az=az_rad * u.rad, frame=AltAz()) - telescope_pointing = SkyCoord( - alt=pointing_alt * u.rad, - az=pointing_az * u.rad, - frame=AltAz(), - ) - - tel_frame = TelescopeFrame(telescope_pointing=telescope_pointing) - tel = reco_pos.transform_to(tel_frame) - - src_x = tel.fov_lat - src_y = tel.fov_lon - - # Transform to Engineering camera - src_x, src_y = -src_y, -src_x - cog_x, cog_y = -cog_y, -cog_x - - pix_x_tel = (camgeom.pix_x * m2deg).to(u.deg) - pix_y_tel = (camgeom.pix_y * m2deg).to(u.deg) - - distance = np.abs( - (pix_y_tel - src_y) * np.cos(psi) + (pix_x_tel - src_x) * np.sin(psi) - ) - - d2_cog_src = (cog_x - src_x) ** 2 + (cog_y - src_y) ** 2 - d2_cog_pix = (cog_x - pix_x_tel) ** 2 + (cog_y - pix_y_tel) ** 2 - d2_src_pix = (src_x - pix_x_tel) ** 2 + (src_y - pix_y_tel) ** 2 - - distance[d2_cog_pix > d2_cog_src + d2_src_pix] = 0 - dist_corr_layer = model2(impact, Hcl, zenith) * u.deg - - ilayer = np.digitize(distance, dist_corr_layer) - trans_pixels = transl[ilayer] - - inds_img = np.where( - (dl1_images["event_id"] == event_id_image) - & (dl1_images["tel_id"] == tel_id) - & (dl1_images["obs_id"] == obs_id_image) - )[0] - - if len(inds_img) == 0: - raise ValueError("Error: 'inds_img' list is empty!") - index_img = inds_img[0] - image = dl1_images["image"][index_img] - cleanmask = dl1_images["image_mask"][index_img] - peak_time = dl1_images["peak_time"][index_img] - image /= trans_pixels - corr_image = image.copy() - - clean_image = corr_image[cleanmask] - clean_camgeom = camgeom[cleanmask] - - hillas_params = hillas_parameters(clean_camgeom, clean_image) - timing_params = timing_parameters( - clean_camgeom, clean_image, peak_time[cleanmask], hillas_params - ) - leakage_params = leakage_parameters(camgeom, corr_image, cleanmask) - if assigned_tel_ids["LST-1"] == tel_id: - conc_params = concentration_parameters( - clean_camgeom, clean_image, hillas_params - ) # For LST-1 we compute concentration from the cleaned image and for MAGIC from the full image to reproduce the current behaviour in the standard code - else: - conc_params = concentration_parameters(camgeom, corr_image, hillas_params) - - event_params = { - **hillas_params, - **timing_params, - **leakage_params, - } - - prefixed_conc_params = { - f"concentration_{key}": value for key, value in conc_params.items() - } - event_params.update(prefixed_conc_params) - - event_info_dict = { - "obs_id": obs_id, - "event_id": event_id, - "tel_id": tel_id, - "pointing_alt": pointing_alt, - "pointing_az": pointing_az, - "time_diff": time_diff, - "n_pixels": signal_pixels, - "n_islands": n_islands, - "event_id_lst": event_id_lst, - "obs_id_lst": obs_id_lst, - "event_id_magic": event_id_magic, - "obs_id_magic": obs_id_magic, - "combo_type": combo_type, - "timestamp": timestamp, - "multiplicity": multiplicity, - } - event_params.update(event_info_dict) - - all_params_list.append(event_params) - - df = pd.DataFrame(all_params_list) - return df - - -def main(): - """Main function.""" - parser = argparse.ArgumentParser() - - parser.add_argument( - "--input_file", - "-i", - dest="input_file", - type=str, - required=True, - help="Path to an input .h5 DL1 data file", - ) - - parser.add_argument( - "--output_dir", - "-o", - dest="output_dir", - type=str, - default="./data", - help="Path to a directory where to save an output corrected DL1 file", - ) - - parser.add_argument( - "--config_file", - "-c", - dest="config_file", - type=str, - default="./resources/config.yaml", - help="Path to a configuration file", - ) - - args = parser.parse_args() - - subarray_info = SubarrayDescription.from_hdf(args.input_file) - - tel_descriptions = subarray_info.tel - camgeom = {} - for telid, telescope in tel_descriptions.items(): - camgeom[telid] = telescope.camera.geometry - - optics_table = read_table( - args.input_file, "/configuration/instrument/telescope/optics" - ) - focal_eff = {} - - for telid, telescope in tel_descriptions.items(): - optics_row = optics_table[optics_table["optics_name"] == telescope.name] - if len(optics_row) > 0: - focal_eff[telid] = optics_row["effective_focal_length"][0] * u.m - else: - raise ValueError(f"No optics data found for telescope: {telescope.name}") - - with open(args.config_file, "r") as file: - config = yaml.safe_load(file) - - tel_ids = config["mc_tel_ids"] - - dfs = [] # Initialize an empty list to store DataFrames - - for tel_name, tel_id in tel_ids.items(): - if tel_id != 0: # Only process telescopes that have a non-zero ID - df = process_telescope_data( - args.input_file, config, tel_id, camgeom[tel_id], focal_eff[tel_id] - ) - - dfs.append(df) - - df_all = pd.concat(dfs, ignore_index=True) - - columns_to_convert = [ - "x", - "y", - "r", - "phi", - "length", - "length_uncertainty", - "width", - "width_uncertainty", - "psi", - "slope", - ] - - for col in columns_to_convert: - df_all[col] = df_all[col].apply( - lambda x: x.value if isinstance(x, u.Quantity) else x - ) - - for col in columns_to_convert: - df_all[col] = pd.to_numeric(df_all[col], errors="coerce") - - df_all = df_all.drop(columns=["deviation"], errors="ignore") - - Path(args.output_dir).mkdir(exist_ok=True, parents=True) - input_file_name = Path(args.input_file).name - output_file_name = input_file_name.replace("dl1_stereo", "dl1_corr") - output_file = f"{args.output_dir}/{output_file_name}" - - save_pandas_data_in_table( - df_all, output_file, group_name="/events", table_name="parameters" - ) - - subarray_info.to_hdf(output_file) - - correction_params = config.get("cloud_correction", {}) - - log_dir = args.output_dir - log_filename = "cloud_correction.log" - log_filepath = os.path.join(log_dir, log_filename) - - logging.basicConfig( - filename=log_filepath, - level=logging.INFO, - format="%(asctime)s - %(levelname)s - %(message)s", - ) - - logging.info(f"Correction parameters: {correction_params}") - logging.info(f"ctapipe version: {ctapipe.__version__}") - logging.info(f"magicctapipe version: {magicctapipe.__version__}") - - logger.info("\nDone.") - - -if __name__ == "__main__": - main() From 8047de9026fb893a217346ea512d18849020e1a7 Mon Sep 17 00:00:00 2001 From: nzywucka Date: Thu, 5 Dec 2024 12:39:19 +0100 Subject: [PATCH 07/10] config.yaml: stereo reco quality cut has been changed to the orginal. lst1_magic_cloud_correction.py: interpolation implementation has been added to the code. --- magicctapipe/resources/config.yaml | 5 +- .../lst1_magic/lst1_magic_cloud_correction.py | 167 ++++++++++++++++-- 2 files changed, 157 insertions(+), 15 deletions(-) diff --git a/magicctapipe/resources/config.yaml b/magicctapipe/resources/config.yaml index c0c65ff3..d07457ca 100644 --- a/magicctapipe/resources/config.yaml +++ b/magicctapipe/resources/config.yaml @@ -102,7 +102,7 @@ event_coincidence: stereo_reco: - quality_cuts: "(intensity > 50) & (intensity > 80 or tel_id > 1) & (width > 0)" #"(intensity > 50) & (width > 0)" + quality_cuts: "(intensity > 50) & (width > 0)" theta_uplim: "6 arcmin" @@ -279,9 +279,8 @@ dl2_to_dl3: source_dec: null # used when the source name cannot be resolved cloud_correction: - specify_cloud: yes max_gap_lidar_shots: "900 s" - lidar_report_file: "extracted_lidar_reports_test.csv" + lidar_report_file: "/home/zywuckan/magic-cta-pipe/magicctapipe/resources/lidar_reports_clouds_ST.03.16.yaml" number_of_layers: 10 diff --git a/magicctapipe/scripts/lst1_magic/lst1_magic_cloud_correction.py b/magicctapipe/scripts/lst1_magic/lst1_magic_cloud_correction.py index ed6ec8dd..88840985 100644 --- a/magicctapipe/scripts/lst1_magic/lst1_magic_cloud_correction.py +++ b/magicctapipe/scripts/lst1_magic/lst1_magic_cloud_correction.py @@ -12,6 +12,7 @@ """ import argparse import logging +import os import time from pathlib import Path @@ -22,6 +23,7 @@ import tables import yaml from astropy.coordinates import AltAz, SkyCoord +from astropy.time import Time from ctapipe.coordinates import TelescopeFrame from ctapipe.image import ( concentration_parameters, @@ -31,6 +33,7 @@ ) from ctapipe.instrument import SubarrayDescription from ctapipe.io import read_table +from scipy.interpolate import interp1d import magicctapipe from magicctapipe.io import save_pandas_data_in_table @@ -112,6 +115,132 @@ def trans_height(x, Hc, dHc, trans): return t +def lidar_cloud_interpolation( + mean_subrun_timestamp, max_gap_lidar_shots, lidar_report_file +): + """ + Retrieves or interpolates LIDAR cloud parameters based on the closest timestamps to an input mean timestamp of the processed subrun. + + Parameters + ----------- + mean_subrun_timestamp : int or float + The mean timestamp of the processed subrun (format: unix). + + max_gap_lidar_shots : int or float + Maximum allowed time gap for interpolation (in seconds). + + lidar_report_file : str + Path to the yaml file containing LIDAR laser reports with columns: + - "timestamp" (format: ISO 8601) + - "base_height", "top_height", "transmission", "lidar_zenith" + + Returns + -------- + tuple or None + + A tuple containing interpolated or nearest values for: + - base_height (float): The base height of the cloud layer in meters. + - top_height (float): The top height of the cloud layer in meters. + - vertical_transmission (float): Transmission factor of the cloud layer adjusted for vertical angle. + + If no nodes are found within the maximum allowed time gap, returns None. + """ + + if not os.path.isfile(lidar_report_file): + raise FileNotFoundError(f"LIDAR report file not found: {lidar_report_file}") + + with open(lidar_report_file, "r") as f: + data = yaml.safe_load(f) + + records = [] + for entry in data["data"]: + timestamp = pd.to_datetime(entry["timestamp"], errors="coerce") + lidar_zenith = entry["lidar_zenith"] + + lowest_transmission_layer = min( + entry["layers"], key=lambda layer: layer["transmission"] + ) + + records.append( + { + "timestamp": timestamp, + "lidar_zenith": lidar_zenith, + "base_height": lowest_transmission_layer["base_height"], + "top_height": lowest_transmission_layer["top_height"], + "transmission": lowest_transmission_layer["transmission"], + } + ) + + df = pd.DataFrame(records) + + df["timestamp"] = pd.to_datetime(df["timestamp"], format="ISO8601") + df["unix_timestamp"] = df["timestamp"].astype(np.int64) / 10**9 + df["time_diff"] = df["unix_timestamp"] - mean_subrun_timestamp + + df["lidar_zenith"] = (pd.to_numeric(df["lidar_zenith"], errors="coerce")).to_numpy() + vertical_transmission = df["transmission"] * np.cos(np.deg2rad(df["lidar_zenith"])) + df["vertical_transmission"] = vertical_transmission + + closest_node_before = df[ + (df["time_diff"] < 0) & (np.abs(df["time_diff"]) <= max_gap_lidar_shots) + ].nlargest(1, "time_diff") + closest_node_after = df[ + (df["time_diff"] > 0) & (np.abs(df["time_diff"]) <= max_gap_lidar_shots) + ].nsmallest(1, "time_diff") + + # Check whether the conditions for interpolation are met or not + if not closest_node_before.empty and not closest_node_after.empty: + node_before = closest_node_before.iloc[0] + node_after = closest_node_after.iloc[0] + logger.info( + f"\nFound suitable interpolation nodes within the allowed temporal gap for timestamp {Time(mean_subrun_timestamp, format='unix').iso}" + f"\nUsing following interpolation nodes:\n" + f"\n******************** Node before ******************* \n{node_before}" + f"\n******************** Node after ******************** \n{node_after}" + f"\n\nInterpolation results:" + ) + + interp_values = {} + for param in ["base_height", "top_height", "vertical_transmission"]: + interp_func = interp1d( + [node_before["unix_timestamp"], node_after["unix_timestamp"]], + [node_before[param], node_after[param]], + kind="linear", + bounds_error=False, + ) + interp_values[param] = interp_func(mean_subrun_timestamp) + logger.info(f"\t {param}: {interp_values[param]:.4f}") + + return ( + interp_values["base_height"], + interp_values["top_height"], + interp_values["vertical_transmission"], + ) + + # Handle cases where only one node is available + closest_node = ( + closest_node_before if not closest_node_before.empty else closest_node_after + ) + + if closest_node is not None and not closest_node.empty: + closest = closest_node.iloc[0] + logger.info( + f"\nOnly one suitable LIDAR report found for timestamp {Time(mean_subrun_timestamp, format='unix').iso} " + f"within the maximum allowed temporal gap. \nSkipping interpolation. Using nearest node values instead." + f"\n\n{closest}" + ) + return ( + closest["base_height"], + closest["top_height"], + closest["vertical_transmission"], + ) + + logger.info( + f"\nNo node is within the maximum allowed temporal gap for timestamp {Time(mean_subrun_timestamp, format='unix').iso}. Exiting ..." + ) + return None + + def process_telescope_data(input_file, config, tel_id, camgeom, focal_eff): """ Corrects LST-1 and MAGIC data affected by a cloud presence @@ -137,13 +266,15 @@ def process_telescope_data(input_file, config, tel_id, camgeom, focal_eff): Data frame of corrected DL1 parameters """ - logger.info(f"\nChecking available image for telescope with ID: {tel_id}") assigned_tel_ids = config["mc_tel_ids"] correction_params = config.get("cloud_correction", {}) - Hc = u.Quantity(correction_params.get("base_height")) - dHc = u.Quantity(correction_params.get("thickness")) - trans0 = correction_params.get("vertical_transmission") + max_gap_lidar_shots = u.Quantity( + correction_params.get("max_gap_lidar_shots") + ) # set it to 900 seconds + lidar_report_file = correction_params.get( + "lidar_report_file" + ) # path to the lidar report nlayers = correction_params.get("number_of_layers") all_params_list = [] @@ -161,6 +292,23 @@ def process_telescope_data(input_file, config, tel_id, camgeom, focal_eff): m2deg = np.rad2deg(1) / focal_eff * u.degree + mean_subrun_timestamp = np.mean(dl1_params["timestamp"]) + mean_subrun_zenith = np.mean(90.0 - np.rad2deg(dl1_params["pointing_alt"])) + + cloud_params = lidar_cloud_interpolation( + mean_subrun_timestamp, max_gap_lidar_shots, lidar_report_file + ) + + Hc = u.Quantity(cloud_params[0], u.m) + dHc = u.Quantity(cloud_params[1] - cloud_params[0], u.m) + incl_trans = u.Quantity(cloud_params[2]) + trans = incl_trans ** ( + 1 / np.cos(np.deg2rad(mean_subrun_zenith)) + ) # vertical transmission + Hcl = np.linspace(Hc, Hc + dHc, nlayers) # position of each layer + transl = trans_height(Hcl, Hc, dHc, trans) # transmissions of each layer + transl = np.append(transl, transl[-1]) + inds = np.where( np.logical_and(dl1_params["intensity"] > 0.0, dl1_params["tel_id"] == tel_id) )[0] @@ -182,21 +330,14 @@ def process_telescope_data(input_file, config, tel_id, camgeom, focal_eff): pointing_az = dl1_params["pointing_az"][index] pointing_alt = dl1_params["pointing_alt"][index] - zenith = 90.0 - np.rad2deg(pointing_alt) time_diff = dl1_params["time_diff"][index] n_islands = dl1_params["n_islands"][index] signal_pixels = dl1_params["n_pixels"][index] - trans = trans0 ** (1 / np.cos(np.deg2rad(zenith))) - Hcl = np.linspace(Hc, Hc + dHc, nlayers) # position of each layer - transl = trans_height(Hcl, Hc, dHc, trans) # transmissions of each layer - transl = np.append(transl, transl[-1]) - alt_rad = np.deg2rad(dl1_params["alt"][index]) az_rad = np.deg2rad(dl1_params["az"][index]) impact = dl1_params["impact"][index] * u.m - psi = dl1_params["psi"][index] * u.deg cog_x = (dl1_params["x"][index] * m2deg).value * u.deg cog_y = (dl1_params["y"][index] * m2deg).value * u.deg @@ -218,6 +359,8 @@ def process_telescope_data(input_file, config, tel_id, camgeom, focal_eff): src_x, src_y = -src_y, -src_x cog_x, cog_y = -cog_y, -cog_x + psi = np.arctan2(src_x - cog_x, src_y - cog_y) + pix_x_tel = (camgeom.pix_x * m2deg).to(u.deg) pix_y_tel = (camgeom.pix_y * m2deg).to(u.deg) @@ -230,7 +373,7 @@ def process_telescope_data(input_file, config, tel_id, camgeom, focal_eff): d2_src_pix = (src_x - pix_x_tel) ** 2 + (src_y - pix_y_tel) ** 2 distance[d2_cog_pix > d2_cog_src + d2_src_pix] = 0 - dist_corr_layer = model2(impact, Hcl, zenith) * u.deg + dist_corr_layer = model2(impact, Hcl, mean_subrun_zenith) * u.deg ilayer = np.digitize(distance, dist_corr_layer) trans_pixels = transl[ilayer] From 11041ed654de5750d1b5ebe07d5cddc3224f27da Mon Sep 17 00:00:00 2001 From: nzywucka Date: Thu, 5 Dec 2024 15:19:15 +0100 Subject: [PATCH 08/10] config.yaml: additional cleaning LST and MAGIC parameters added lidar_reports_clouds_ST.03.16.yaml: configuration file with LIDAR reports of ST.03.16 period added lst1_magic_cloud_correction.py: additional cleaning procedure and improvements to the interpolation function added --- magicctapipe/resources/config.yaml | 18 +- .../lidar_reports_clouds_ST.03.16.yaml | 533 ++++++++++++++++++ .../lst1_magic/lst1_magic_cloud_correction.py | 210 +++++-- 3 files changed, 711 insertions(+), 50 deletions(-) create mode 100644 magicctapipe/resources/lidar_reports_clouds_ST.03.16.yaml diff --git a/magicctapipe/resources/config.yaml b/magicctapipe/resources/config.yaml index d07457ca..34f9e46e 100644 --- a/magicctapipe/resources/config.yaml +++ b/magicctapipe/resources/config.yaml @@ -280,7 +280,21 @@ dl2_to_dl3: cloud_correction: max_gap_lidar_shots: "900 s" - lidar_report_file: "/home/zywuckan/magic-cta-pipe/magicctapipe/resources/lidar_reports_clouds_ST.03.16.yaml" + lidar_report_file: "../../resources/lidar_reports_clouds_ST.03.16.yaml" number_of_layers: 10 - + additional_lst_cleaning: + picture_thresh: 10 + boundary_thresh: 5 + keep_isolated_pixels: false + min_number_picture_neighbors: 2 + + additional_magic_cleaning: + use_time: true + use_sum: true + picture_thresh: 6 + boundary_thresh: 3.5 + max_time_off: 4.5 + max_time_diff: 1.5 + find_hotpixels: false + pedestal_type: "from_extractor_rndm" diff --git a/magicctapipe/resources/lidar_reports_clouds_ST.03.16.yaml b/magicctapipe/resources/lidar_reports_clouds_ST.03.16.yaml new file mode 100644 index 00000000..4eacc516 --- /dev/null +++ b/magicctapipe/resources/lidar_reports_clouds_ST.03.16.yaml @@ -0,0 +1,533 @@ +units: + timestamp: 'iso' + lidar_zenith: 'deg' + base_height: 'm' + top_height: 'm' + transmission: 'dimensionless' + +data: +- timestamp: '2020-11-18 05:47:19' + lidar_zenith: 41.35 + layers: + - base_height: 8083.39 + top_height: 9504.9 + transmission: 0.990278 +- timestamp: '2020-11-18 05:51:18' + lidar_zenith: 42.24 + layers: + - base_height: 8280.29 + top_height: 8830.14 + transmission: 0.983246 +- timestamp: '2020-11-18 23:39:14' + lidar_zenith: 43.14 + layers: + - base_height: 9081.92 + top_height: 11583.4 + transmission: 0.967012 +- timestamp: '2020-11-18 23:47:22' + lidar_zenith: 41.28 + layers: + - base_height: 8662.92 + top_height: 11455.4 + transmission: 0.983271 +- timestamp: '2020-11-18 23:55:18' + lidar_zenith: 39.5 + layers: + - base_height: 8704.25 + top_height: 10239.4 + transmission: 0.997521 + - base_height: 10702.4 + top_height: 11423.5 + transmission: 0.982105 +- timestamp: '2020-11-19 00:07:19' + lidar_zenith: 36.17 + layers: + - base_height: 9228.45 + top_height: 11376.5 + transmission: 0.97805 +- timestamp: '2020-11-19 00:11:20' + lidar_zenith: 35.23 + layers: + - base_height: 9057.65 + top_height: 12171.3 + transmission: 0.946499 +- timestamp: '2020-11-20 05:28:57' + lidar_zenith: 38.29 + layers: + - base_height: 3097.33 + top_height: 6023.49 + transmission: 0.272683 +- timestamp: '2020-11-21 03:28:16' + lidar_zenith: 11.82 + layers: + - base_height: 6983.88 + top_height: 7769.79 + transmission: 0.96198 +- timestamp: '2020-11-21 03:32:13' + lidar_zenith: 12.73 + layers: + - base_height: 2401.35 + top_height: 8008.39 + transmission: 0.993979 +- timestamp: '2020-11-21 03:36:15' + lidar_zenith: 13.61 + layers: + - base_height: 3284.24 + top_height: 7653.41 + transmission: 0.978311 +- timestamp: '2020-11-21 03:40:12' + lidar_zenith: 14.55 + layers: + - base_height: 3750.04 + top_height: 7637.11 + transmission: 0.997026 +- timestamp: '2020-11-21 03:44:13' + lidar_zenith: 15.47 + layers: + - base_height: 2794.53 + top_height: 7727.88 + transmission: 0.995458 +- timestamp: '2020-11-21 03:52:14' + lidar_zenith: 17.98 + layers: + - base_height: 6946.36 + top_height: 9337.81 + transmission: 0.996709 +- timestamp: '2020-11-21 04:00:19' + lidar_zenith: 19.84 + layers: + - base_height: 6711.66 + top_height: 7466.94 + transmission: 0.995375 +- timestamp: '2020-11-21 04:04:18' + lidar_zenith: 20.75 + layers: + - base_height: 3802.37 + top_height: 6750.56 + transmission: 0.996894 + - base_height: 7031.1 + top_height: 10039.4 + transmission: 0.993838 +- timestamp: '2020-11-21 04:24:21' + lidar_zenith: 24.6 + layers: + - base_height: 4503.69 + top_height: 6890.61 + transmission: 0.997654 +- timestamp: '2020-11-21 04:28:15' + lidar_zenith: 25.5 + layers: + - base_height: 7197.54 + top_height: 7662.89 + transmission: 0.998542 +- timestamp: '2020-12-07 23:24:05' + lidar_zenith: 29.44 + layers: + - base_height: 5782.58 + top_height: 10741.3 + transmission: 0.983072 +- timestamp: '2020-12-07 23:28:04' + lidar_zenith: 28.52 + layers: + - base_height: 2337.05 + top_height: 3253.23 + transmission: 0.997173 + - base_height: 4064.57 + top_height: 8393.48 + transmission: 0.995944 +- timestamp: '2020-12-08 00:08:02' + lidar_zenith: 19.27 + layers: + - base_height: 3996.74 + top_height: 4120.95 + transmission: 0.994905 + - base_height: 4494.68 + top_height: 8271.34 + transmission: 0.958404 +- timestamp: '2020-12-08 00:12:03' + lidar_zenith: 18.33 + layers: + - base_height: 5703.4 + top_height: 8499.75 + transmission: 0.997426 +- timestamp: '2020-12-08 00:16:04' + lidar_zenith: 17.4 + layers: + - base_height: 5237.92 + top_height: 6690.51 + transmission: 0.998836 + - base_height: 6976.79 + top_height: 8383.41 + transmission: 0.99353 +- timestamp: '2020-12-08 00:20:03' + lidar_zenith: 16.48 + layers: + - base_height: 6183.28 + top_height: 8516.48 + transmission: 0.942522 +- timestamp: '2020-12-08 00:24:04' + lidar_zenith: 14.98 + layers: + - base_height: 4321.69 + top_height: 9205.67 + transmission: 0.998552 +- timestamp: '2020-12-08 00:28:03' + lidar_zenith: 14.06 + layers: + - base_height: 4572.29 + top_height: 10825.6 + transmission: 0.97851 +- timestamp: '2020-12-08 00:32:17' + lidar_zenith: 13.08 + layers: + - base_height: 5206.56 + top_height: 11266.7 + transmission: 0.978387 +- timestamp: '2020-12-08 00:36:07' + lidar_zenith: 12.2 + layers: + - base_height: 6255.68 + top_height: 11024.3 + transmission: 0.983348 +- timestamp: '2020-12-08 00:40:02' + lidar_zenith: 11.29 + layers: + - base_height: 4865.59 + top_height: 8333.48 + transmission: 0.994915 +- timestamp: '2020-12-08 00:44:03' + lidar_zenith: 10.93 + layers: + - base_height: 2346.57 + top_height: 8310.34 + transmission: 0.993694 +- timestamp: '2020-12-08 00:48:03' + lidar_zenith: 10.01 + layers: + - base_height: 4272.17 + top_height: 8321.34 + transmission: 0.979886 +- timestamp: '2020-12-08 00:52:06' + lidar_zenith: 9.11 + layers: + - base_height: 2430.81 + top_height: 3466.43 + transmission: 0.998161 + - base_height: 3762.64 + top_height: 7869.89 + transmission: 0.996369 +- timestamp: '2020-12-08 00:56:06' + lidar_zenith: 8.16 + layers: + - base_height: 3724.66 + top_height: 8364.42 + transmission: 0.994163 +- timestamp: '2020-12-14 04:57:01' + lidar_zenith: 52.09 + layers: + - base_height: 4192.5 + top_height: 4626.91 + transmission: 0.99579 +- timestamp: '2020-12-23 02:58:55' + lidar_zenith: 34.47 + layers: + - base_height: 7348.87 + top_height: 9279.59 + transmission: 0.994382 +- timestamp: '2020-12-23 03:03:01' + lidar_zenith: 35.35 + layers: + - base_height: 6943.06 + top_height: 10142.6 + transmission: 0.989766 +- timestamp: '2020-12-23 03:14:59' + lidar_zenith: 38.08 + layers: + - base_height: 8771.97 + top_height: 9054.62 + transmission: 0.993417 +- timestamp: '2020-12-23 03:22:58' + lidar_zenith: 39.14 + layers: + - base_height: 7229.97 + top_height: 7806.03 + transmission: 0.984449 + - base_height: 8643.32 + top_height: 8847.44 + transmission: 0.99359 +- timestamp: '2020-12-23 03:26:57' + lidar_zenith: 40.03 + layers: + - base_height: 8612.31 + top_height: 9034.13 + transmission: 0.984858 +- timestamp: '2020-12-23 03:42:57' + lidar_zenith: 44.31 + layers: + - base_height: 7905.67 + top_height: 8093.98 + transmission: 0.993864 +- timestamp: '2020-12-23 03:46:56' + lidar_zenith: 45.19 + layers: + - base_height: 7808.49 + top_height: 8129.15 + transmission: 0.998521 +- timestamp: '2020-12-23 03:50:55' + lidar_zenith: 46.07 + layers: + - base_height: 7320.88 + top_height: 8435.01 + transmission: 0.954334 +- timestamp: '2020-12-23 04:22:58' + lidar_zenith: 53.1 + layers: + - base_height: 6810.82 + top_height: 8811.56 + transmission: 0.634311 +- timestamp: '2020-12-23 04:26:59' + lidar_zenith: 53.98 + layers: + - base_height: 5505.64 + top_height: 8875.23 + transmission: 0.550786 +- timestamp: '2020-12-23 04:31:05' + lidar_zenith: 54.87 + layers: + - base_height: 5750.35 + top_height: 8440.46 + transmission: 0.694123 +- timestamp: '2021-03-11 20:41:58' + lidar_zenith: 19.43 + layers: + - base_height: 5960.02 + top_height: 8586.78 + transmission: 0.944362 +- timestamp: '2021-03-11 20:45:58' + lidar_zenith: 20.34 + layers: + - base_height: 6510.41 + top_height: 8897.3 + transmission: 0.977718 +- timestamp: '2021-03-11 20:50:01' + lidar_zenith: 21.26 + layers: + - base_height: 3059.78 + top_height: 9707.01 + transmission: 0.960416 +- timestamp: '2021-03-11 20:54:00' + lidar_zenith: 22.17 + layers: + - base_height: 4920.12 + top_height: 6196.61 + transmission: 0.998693 + - base_height: 6474.43 + top_height: 9053.89 + transmission: 0.960237 +- timestamp: '2021-03-11 20:57:59' + lidar_zenith: 22.33 + layers: + - base_height: 8202.81 + top_height: 9693.81 + transmission: 0.946716 +- timestamp: '2021-03-11 21:02:06' + lidar_zenith: 23.3 + layers: + - base_height: 7984.35 + top_height: 9103.63 + transmission: 0.968782 +- timestamp: '2021-03-11 21:06:05' + lidar_zenith: 24.21 + layers: + - base_height: 5785.97 + top_height: 5949.72 + transmission: 0.99856 + - base_height: 6354.54 + top_height: 10003.5 + transmission: 0.847347 +- timestamp: '2021-03-11 21:10:06' + lidar_zenith: 25.1 + layers: + - base_height: 7025.92 + top_height: 10677.4 + transmission: 0.676118 +- timestamp: '2021-03-11 21:14:05' + lidar_zenith: 26.02 + layers: + - base_height: 6476.6 + top_height: 9770.24 + transmission: 0.537755 +- timestamp: '2021-03-11 21:22:05' + lidar_zenith: 28.55 + layers: + - base_height: 7004.69 + top_height: 10139.9 + transmission: 0.544511 +- timestamp: '2021-03-11 21:26:00' + lidar_zenith: 29.44 + layers: + - base_height: 6819.36 + top_height: 10387.1 + transmission: 0.741491 +- timestamp: '2021-03-11 21:30:05' + lidar_zenith: 30.35 + layers: + - base_height: 6902.21 + top_height: 9264.49 + transmission: 0.758622 +- timestamp: '2021-03-11 21:34:00' + lidar_zenith: 31.25 + layers: + - base_height: 6161.44 + top_height: 6437.92 + transmission: 0.995586 + - base_height: 7063.36 + top_height: 10114.7 + transmission: 0.728179 +- timestamp: '2021-03-11 21:42:05' + lidar_zenith: 32.34 + layers: + - base_height: 7358.49 + top_height: 10179.9 + transmission: 0.446966 +- timestamp: '2021-03-11 21:46:01' + lidar_zenith: 33.24 + layers: + - base_height: 6542.9 + top_height: 9804.14 + transmission: 0.384501 +- timestamp: '2021-03-11 21:50:07' + lidar_zenith: 34.14 + layers: + - base_height: 7096.26 + top_height: 10552.6 + transmission: 0.456605 +- timestamp: '2021-03-11 21:54:03' + lidar_zenith: 35.01 + layers: + - base_height: 7022.38 + top_height: 9500.14 + transmission: 0.41693 +- timestamp: '2021-03-11 21:58:05' + lidar_zenith: 35.94 + layers: + - base_height: 5660.39 + top_height: 9546.18 + transmission: 0.292292 +- timestamp: '2021-03-13 22:03:28' + lidar_zenith: 38.9 + layers: + - base_height: 6630.79 + top_height: 7377.42 + transmission: 0.980601 +- timestamp: '2021-03-13 22:07:29' + lidar_zenith: 39.76 + layers: + - base_height: 6056.41 + top_height: 7546.81 + transmission: 0.993576 +- timestamp: '2021-03-13 22:11:33' + lidar_zenith: 40.71 + layers: + - base_height: 4209.05 + top_height: 5217.55 + transmission: 0.998153 + - base_height: 5590.36 + top_height: 8065.24 + transmission: 0.65145 +- timestamp: '2021-03-13 22:15:32' + lidar_zenith: 41.59 + layers: + - base_height: 5539.99 + top_height: 8005.37 + transmission: 0.579692 +- timestamp: '2021-03-13 22:19:34' + lidar_zenith: 43.19 + layers: + - base_height: 5604.24 + top_height: 7856.89 + transmission: 0.523405 +- timestamp: '2021-03-13 22:23:34' + lidar_zenith: 44.07 + layers: + - base_height: 5522.78 + top_height: 7604.87 + transmission: 0.552276 +- timestamp: '2021-03-13 22:27:33' + lidar_zenith: 44.98 + layers: + - base_height: 5850.12 + top_height: 8113.95 + transmission: 0.559199 +- timestamp: '2021-03-13 22:31:34' + lidar_zenith: 45.87 + layers: + - base_height: 5168.48 + top_height: 7408.42 + transmission: 0.747925 +- timestamp: '2021-03-13 22:35:35' + lidar_zenith: 46.76 + layers: + - base_height: 5298.63 + top_height: 7448.09 + transmission: 0.608835 +- timestamp: '2021-03-13 22:39:36' + lidar_zenith: 46.92 + layers: + - base_height: 5586.04 + top_height: 7973.29 + transmission: 0.629101 +- timestamp: '2021-03-13 22:43:39' + lidar_zenith: 47.78 + layers: + - base_height: 5584.18 + top_height: 7499.26 + transmission: 0.725406 +- timestamp: '2021-03-13 22:47:40' + lidar_zenith: 48.62 + layers: + - base_height: 5255.6 + top_height: 7413.89 + transmission: 0.740245 +- timestamp: '2021-03-13 22:51:38' + lidar_zenith: 49.56 + layers: + - base_height: 5421.43 + top_height: 7549.98 + transmission: 0.707472 +- timestamp: '2021-03-13 22:55:39' + lidar_zenith: 50.44 + layers: + - base_height: 5491.07 + top_height: 7448.29 + transmission: 0.782424 +- timestamp: '2021-03-13 23:11:41' + lidar_zenith: 53.95 + layers: + - base_height: 5598.62 + top_height: 7164.52 + transmission: 0.942725 +- timestamp: '2021-03-13 23:15:40' + lidar_zenith: 54.8 + layers: + - base_height: 5507.28 + top_height: 7372.8 + transmission: 0.907823 +- timestamp: '2021-03-15 20:50:00' + lidar_zenith: 24.09 + layers: + - base_height: 3690.09 + top_height: 6261.75 + transmission: 0.992119 +- timestamp: '2021-03-15 21:02:08' + lidar_zenith: 27.58 + layers: + - base_height: 2357.57 + top_height: 6299.63 + transmission: 0.994461 +- timestamp: '2021-03-15 21:21:59' + lidar_zenith: 31.35 + layers: + - base_height: 6769.2 + top_height: 7594.22 + transmission: 0.993382 diff --git a/magicctapipe/scripts/lst1_magic/lst1_magic_cloud_correction.py b/magicctapipe/scripts/lst1_magic/lst1_magic_cloud_correction.py index 88840985..0a694a09 100644 --- a/magicctapipe/scripts/lst1_magic/lst1_magic_cloud_correction.py +++ b/magicctapipe/scripts/lst1_magic/lst1_magic_cloud_correction.py @@ -14,6 +14,7 @@ import logging import os import time +import warnings from pathlib import Path import astropy.units as u @@ -29,6 +30,8 @@ concentration_parameters, hillas_parameters, leakage_parameters, + number_of_islands, + tailcuts_clean, timing_parameters, ) from ctapipe.instrument import SubarrayDescription @@ -36,6 +39,7 @@ from scipy.interpolate import interp1d import magicctapipe +from magicctapipe.image import MAGICClean from magicctapipe.io import save_pandas_data_in_table logger = logging.getLogger(__name__) @@ -178,7 +182,7 @@ def lidar_cloud_interpolation( df["time_diff"] = df["unix_timestamp"] - mean_subrun_timestamp df["lidar_zenith"] = (pd.to_numeric(df["lidar_zenith"], errors="coerce")).to_numpy() - vertical_transmission = df["transmission"] * np.cos(np.deg2rad(df["lidar_zenith"])) + vertical_transmission = df["transmission"] ** np.cos(np.deg2rad(df["lidar_zenith"])) df["vertical_transmission"] = vertical_transmission closest_node_before = df[ @@ -241,24 +245,51 @@ def lidar_cloud_interpolation( return None -def process_telescope_data(input_file, config, tel_id, camgeom, focal_eff): +def process_telescope_data( + dl1_params, + dl1_images, + config, + tel_id, + tel_ids, + camgeom, + focal_eff, + nlayers, + mean_subrun_zenith, + Hc, + dHc, + trans, +): """ Corrects LST-1 and MAGIC data affected by a cloud presence Parameters ---------- - input_file : str - Path to an input .h5 DL1 file + dl1_params : str + Path to an input .h5 DL1 table with parameters + dl1_images : str + Path to an input .h5 DL1 table with images config : dict Configuration for the LST-1 + MAGIC analysis tel_id : numpy.int16 LST-1 and MAGIC telescope ids + tel_ids : dict + List of LST-1 and MAGIC telescope names and ids from config file camgeom : ctapipe.instrument.camera.geometry.CameraGeometry An instance of the CameraGeometry class containing information about the camera's configuration, including pixel type, number of pixels, rotation angles, and the reference frame. focal_eff : astropy.units.quantity.Quantity Effective focal length + nlayers : astropy.units.quantity.Quantity + Array with heights of each cloud layer a.g.l. + mean_subrun_zenith : numpy.float64 + Mean value of zenith per subran + Hc : astropy.units.quantity.Quantity + Height of the base of the cloud a.g.l. + dHc : astropy.units.quantity.Quantity + Cloud thickness + trans : numpy.float64 + Transmission of the cloud Returns ------- @@ -266,45 +297,25 @@ def process_telescope_data(input_file, config, tel_id, camgeom, focal_eff): Data frame of corrected DL1 parameters """ - assigned_tel_ids = config["mc_tel_ids"] - - correction_params = config.get("cloud_correction", {}) - max_gap_lidar_shots = u.Quantity( - correction_params.get("max_gap_lidar_shots") - ) # set it to 900 seconds - lidar_report_file = correction_params.get( - "lidar_report_file" - ) # path to the lidar report - nlayers = correction_params.get("number_of_layers") + # AC LST + cleaning_level = {"LSTCam": (5, 10, 2)} - all_params_list = [] + # AC MAGIC + # Ignore runtime warnings appeared during the image cleaning + warnings.simplefilter("ignore", category=RuntimeWarning) + # Configure the MAGIC image cleaning + config_clean_magic = config["cloud_correction"]["additional_magic_cleaning"] + magic_clean = MAGICClean(camgeom, config_clean_magic) - dl1_params = read_table(input_file, "/events/parameters") + unsuitable_mask = None - image_node_path = "/events/dl1/image_" + str(tel_id) + all_params_list = [] - try: - dl1_images = read_table(input_file, image_node_path) - logger.info(f"Found images for telescope with ID {tel_id}. Processing...") - except tables.NoSuchNodeError: - logger.info(f"No image for telescope with ID {tel_id}. Skipping.") + if dl1_images is None: return None m2deg = np.rad2deg(1) / focal_eff * u.degree - mean_subrun_timestamp = np.mean(dl1_params["timestamp"]) - mean_subrun_zenith = np.mean(90.0 - np.rad2deg(dl1_params["pointing_alt"])) - - cloud_params = lidar_cloud_interpolation( - mean_subrun_timestamp, max_gap_lidar_shots, lidar_report_file - ) - - Hc = u.Quantity(cloud_params[0], u.m) - dHc = u.Quantity(cloud_params[1] - cloud_params[0], u.m) - incl_trans = u.Quantity(cloud_params[2]) - trans = incl_trans ** ( - 1 / np.cos(np.deg2rad(mean_subrun_zenith)) - ) # vertical transmission Hcl = np.linspace(Hc, Hc + dHc, nlayers) # position of each layer transl = trans_height(Hcl, Hc, dHc, trans) # transmissions of each layer transl = np.append(transl, transl[-1]) @@ -323,7 +334,7 @@ def process_telescope_data(input_file, config, tel_id, camgeom, focal_eff): multiplicity = dl1_params["multiplicity"][index] combo_type = dl1_params["combo_type"][index] - if assigned_tel_ids["LST-1"] == tel_id: + if tel_ids["LST-1"] == tel_id: event_id_image, obs_id_image = event_id_lst, obs_id_lst else: event_id_image, obs_id_image = event_id_magic, obs_id_magic @@ -388,25 +399,61 @@ def process_telescope_data(input_file, config, tel_id, camgeom, focal_eff): raise ValueError("Error: 'inds_img' list is empty!") index_img = inds_img[0] image = dl1_images["image"][index_img] - cleanmask = dl1_images["image_mask"][index_img] peak_time = dl1_images["peak_time"][index_img] image /= trans_pixels - corr_image = image.copy() - clean_image = corr_image[cleanmask] - clean_camgeom = camgeom[cleanmask] + # additional cleaning + if tel_ids["LST-1"] == tel_id: + + boundary, picture, min_neighbors = cleaning_level[ + camgeom.name + ] # .camera_name] + + clean = tailcuts_clean( + camgeom, + image, + boundary_thresh=boundary, + picture_thresh=picture, + min_number_picture_neighbors=min_neighbors, + ) + + # Create a mask indicating which pixels survived the cleaning + clean_mask = np.zeros_like(image, dtype=bool) + clean_mask[clean] = True + + if np.sum(clean_mask) == 0: + continue + + # Apply the mask to relevant data arrays + clean_peak_time = peak_time[clean_mask] + clean_image = image[clean_mask] + clean_camgeom = camgeom[clean_mask] + + else: + + # Apply the MAGIC image cleaning + clean_mask, image, peak_time = magic_clean.clean_image( + event_image=image, + event_pulse_time=peak_time, + unsuitable_mask=unsuitable_mask, + ) + n_islands, _ = number_of_islands(camgeom, clean_mask) + + clean_camgeom = camgeom[clean_mask] + clean_image = image[clean_mask] + clean_peak_time = peak_time[clean_mask] hillas_params = hillas_parameters(clean_camgeom, clean_image) timing_params = timing_parameters( - clean_camgeom, clean_image, peak_time[cleanmask], hillas_params + clean_camgeom, clean_image, clean_peak_time, hillas_params ) - leakage_params = leakage_parameters(camgeom, corr_image, cleanmask) - if assigned_tel_ids["LST-1"] == tel_id: + leakage_params = leakage_parameters(camgeom, image, clean_mask) + if tel_ids["LST-1"] == tel_id: conc_params = concentration_parameters( clean_camgeom, clean_image, hillas_params ) # For LST-1 we compute concentration from the cleaned image and for MAGIC from the full image to reproduce the current behaviour in the standard code else: - conc_params = concentration_parameters(camgeom, corr_image, hillas_params) + conc_params = concentration_parameters(camgeom, clean_image, hillas_params) event_params = { **hillas_params, @@ -502,15 +549,57 @@ def main(): tel_ids = config["mc_tel_ids"] - dfs = [] # Initialize an empty list to store DataFrames + correction_params = config.get("cloud_correction", {}) + max_gap_lidar_shots = u.Quantity( + correction_params.get("max_gap_lidar_shots") + ) # set it to 900 seconds + lidar_report_file = correction_params.get( + "lidar_report_file" + ) # path to the lidar report + nlayers = correction_params.get("number_of_layers") + + dl1_params = read_table(args.input_file, "/events/parameters") + + mean_subrun_timestamp = np.mean(dl1_params["timestamp"]) + mean_subrun_zenith = np.mean(90.0 - np.rad2deg(dl1_params["pointing_alt"])) + + cloud_params = lidar_cloud_interpolation( + mean_subrun_timestamp, max_gap_lidar_shots, lidar_report_file + ) + + Hc = u.Quantity(cloud_params[0], u.m) + dHc = u.Quantity(cloud_params[1] - cloud_params[0], u.m) + vertical_trans = u.Quantity(cloud_params[2]) + trans = vertical_trans ** (1 / np.cos(np.deg2rad(mean_subrun_zenith))) + + dfs = [] for tel_name, tel_id in tel_ids.items(): if tel_id != 0: # Only process telescopes that have a non-zero ID + # Read images for each telescope + image_node_path = "/events/dl1/image_" + str(tel_id) + try: + dl1_images = read_table(args.input_file, image_node_path) + except tables.NoSuchNodeError: + logger.info(f"\nNo image for telescope with ID {tel_id}. Skipping.") + dl1_images = None + df = process_telescope_data( - args.input_file, config, tel_id, camgeom[tel_id], focal_eff[tel_id] + dl1_params, + dl1_images, + config, + tel_id, + tel_ids, + camgeom[tel_id], + focal_eff[tel_id], + nlayers, + mean_subrun_zenith, + Hc, + dHc, + trans, ) - - dfs.append(df) + if df is not None: + dfs.append(df) df_all = pd.concat(dfs, ignore_index=True) @@ -549,6 +638,31 @@ def main(): df_all, output_file, group_name="/events", table_name="parameters" ) + with tables.open_file(output_file, mode="a") as f: + cloud_metadata_group = f.create_group("/", "weather", "Cloud parameters") + + cloud_base_height_data = Hc.to_value(u.m) + cloud_base_height_array = f.create_array( + cloud_metadata_group, + "cloud_base_height", + np.array([cloud_base_height_data]), + ) + cloud_base_height_array.attrs["unit"] = str(u.m) + + cloud_thickness_data = dHc.to_value(u.m) + cloud_thickness_array = f.create_array( + cloud_metadata_group, "cloud_thickness", np.array([cloud_thickness_data]) + ) + cloud_thickness_array.attrs["unit"] = str(u.m) + + cloud_vertical_trans_data = vertical_trans.to_value(u.dimensionless_unscaled) + cloud_vertical_trans_array = f.create_array( + cloud_metadata_group, + "cloud_vertical_transmission", + np.array([cloud_vertical_trans_data]), + ) + cloud_vertical_trans_array.attrs["unit"] = str(u.dimensionless_unscaled) + subarray_info.to_hdf(output_file) correction_params = config.get("cloud_correction", {}) From d213c9b21bca9115ffbac24e95ce474c75fa5efd Mon Sep 17 00:00:00 2001 From: nzywucka Date: Tue, 17 Dec 2024 16:15:00 +0100 Subject: [PATCH 09/10] config.yaml: modification of the cloud_correction section lst1_magic_cloud_correction.py: update on the additional cleaning method --- magicctapipe/resources/config.yaml | 17 +-- .../lst1_magic/lst1_magic_cloud_correction.py | 117 +++++++++++------- 2 files changed, 77 insertions(+), 57 deletions(-) diff --git a/magicctapipe/resources/config.yaml b/magicctapipe/resources/config.yaml index 34f9e46e..864a1307 100644 --- a/magicctapipe/resources/config.yaml +++ b/magicctapipe/resources/config.yaml @@ -282,19 +282,6 @@ cloud_correction: max_gap_lidar_shots: "900 s" lidar_report_file: "../../resources/lidar_reports_clouds_ST.03.16.yaml" number_of_layers: 10 + cleaning_multiplication_factor: 1.25 + use_additional_cleaning: False - additional_lst_cleaning: - picture_thresh: 10 - boundary_thresh: 5 - keep_isolated_pixels: false - min_number_picture_neighbors: 2 - - additional_magic_cleaning: - use_time: true - use_sum: true - picture_thresh: 6 - boundary_thresh: 3.5 - max_time_off: 4.5 - max_time_diff: 1.5 - find_hotpixels: false - pedestal_type: "from_extractor_rndm" diff --git a/magicctapipe/scripts/lst1_magic/lst1_magic_cloud_correction.py b/magicctapipe/scripts/lst1_magic/lst1_magic_cloud_correction.py index 0a694a09..bf21c128 100644 --- a/magicctapipe/scripts/lst1_magic/lst1_magic_cloud_correction.py +++ b/magicctapipe/scripts/lst1_magic/lst1_magic_cloud_correction.py @@ -30,7 +30,6 @@ concentration_parameters, hillas_parameters, leakage_parameters, - number_of_islands, tailcuts_clean, timing_parameters, ) @@ -258,6 +257,7 @@ def process_telescope_data( Hc, dHc, trans, + cmf, ): """ Corrects LST-1 and MAGIC data affected by a cloud presence @@ -290,6 +290,8 @@ def process_telescope_data( Cloud thickness trans : numpy.float64 Transmission of the cloud + cmf : float + Multiplication factor for additional cleaning Returns ------- @@ -298,13 +300,33 @@ def process_telescope_data( """ # AC LST - cleaning_level = {"LSTCam": (5, 10, 2)} + cleaning_level = config["LST"]["tailcuts_clean"] + # Multiply only non-boolean numerical values + config_clean_LST = { + key: ( + value * cmf + if isinstance(value, (int, float)) and not isinstance(value, bool) + else value + ) + for key, value in cleaning_level.items() + } # AC MAGIC # Ignore runtime warnings appeared during the image cleaning warnings.simplefilter("ignore", category=RuntimeWarning) # Configure the MAGIC image cleaning - config_clean_magic = config["cloud_correction"]["additional_magic_cleaning"] + config_clean = config["MAGIC"]["magic_clean"] + # Scale numeric values and set 'find_hotpixels' to False + config_clean_magic = { + key: ( + False + if key == "find_hotpixels" + else value * cmf + if isinstance(value, (int, float)) and not isinstance(value, bool) + else value + ) + for key, value in config_clean.items() + } magic_clean = MAGICClean(camgeom, config_clean_magic) unsuitable_mask = None @@ -399,49 +421,58 @@ def process_telescope_data( raise ValueError("Error: 'inds_img' list is empty!") index_img = inds_img[0] image = dl1_images["image"][index_img] + clean_mask = dl1_images["image_mask"][index_img] peak_time = dl1_images["peak_time"][index_img] image /= trans_pixels - # additional cleaning - if tel_ids["LST-1"] == tel_id: - - boundary, picture, min_neighbors = cleaning_level[ - camgeom.name - ] # .camera_name] - - clean = tailcuts_clean( - camgeom, - image, - boundary_thresh=boundary, - picture_thresh=picture, - min_number_picture_neighbors=min_neighbors, - ) - - # Create a mask indicating which pixels survived the cleaning - clean_mask = np.zeros_like(image, dtype=bool) - clean_mask[clean] = True + clean_image = image[clean_mask] + clean_camgeom = camgeom[clean_mask] + clean_peak_time = peak_time[clean_mask] - if np.sum(clean_mask) == 0: - continue - - # Apply the mask to relevant data arrays - clean_peak_time = peak_time[clean_mask] - clean_image = image[clean_mask] - clean_camgeom = camgeom[clean_mask] - - else: - - # Apply the MAGIC image cleaning - clean_mask, image, peak_time = magic_clean.clean_image( - event_image=image, - event_pulse_time=peak_time, - unsuitable_mask=unsuitable_mask, - ) - n_islands, _ = number_of_islands(camgeom, clean_mask) - - clean_camgeom = camgeom[clean_mask] - clean_image = image[clean_mask] - clean_peak_time = peak_time[clean_mask] + # additional cleaning + if config["cloud_correction"]["use_additional_cleaning"]: + if tel_ids["LST-1"] == tel_id: + + clean = tailcuts_clean( + camgeom, + image, + boundary_thresh=config_clean_LST["boundary_thresh"], + picture_thresh=config_clean_LST["picture_thresh"], + min_number_picture_neighbors=config_clean_LST[ + "min_number_picture_neighbors" + ], + ) + # Create a mask indicating which pixels survived the cleaning + clean_mask = np.zeros_like(image, dtype=bool) + clean_mask[clean] = True + + if np.sum(clean_mask) == 0: + continue + + # Apply the mask to relevant data arrays + clean_peak_time = peak_time[clean_mask] + clean_image = image[clean_mask] + clean_camgeom = camgeom[clean_mask] + + else: + + # Apply the image cleaning + clean_mask, image, peak_time = magic_clean.clean_image( + event_image=image, + event_pulse_time=peak_time, + unsuitable_mask=unsuitable_mask, + ) + + # n_pixels = np.count_nonzero(clean_mask) + # n_islands, _ = number_of_islands(camgeom, clean_mask) + + clean_camgeom = camgeom[clean_mask] # camera_geom_masked + clean_image = image[clean_mask] # image_masked + clean_peak_time = peak_time[clean_mask] # signal_pixels + + # Check if clean_image is empty or all zeros + if clean_image.size == 0 or not np.any(clean_image): + continue # Skip this iteration if clean_image is empty or all zeros hillas_params = hillas_parameters(clean_camgeom, clean_image) timing_params = timing_parameters( @@ -557,6 +588,7 @@ def main(): "lidar_report_file" ) # path to the lidar report nlayers = correction_params.get("number_of_layers") + cmf = correction_params.get("cleaning_multiplication_factor") dl1_params = read_table(args.input_file, "/events/parameters") @@ -597,6 +629,7 @@ def main(): Hc, dHc, trans, + cmf, ) if df is not None: dfs.append(df) From c299361c033959c607b8dce3bcab74a8c07c12b6 Mon Sep 17 00:00:00 2001 From: nzywucka Date: Fri, 31 Jan 2025 14:27:52 +0100 Subject: [PATCH 10/10] Adjustment of the additional cleaning procedure for LST and MAGIC teleskopes. --- .../lst1_magic/lst1_magic_cloud_correction.py | 109 ++++++++++++------ 1 file changed, 75 insertions(+), 34 deletions(-) diff --git a/magicctapipe/scripts/lst1_magic/lst1_magic_cloud_correction.py b/magicctapipe/scripts/lst1_magic/lst1_magic_cloud_correction.py index bf21c128..30ab7636 100644 --- a/magicctapipe/scripts/lst1_magic/lst1_magic_cloud_correction.py +++ b/magicctapipe/scripts/lst1_magic/lst1_magic_cloud_correction.py @@ -244,6 +244,65 @@ def lidar_cloud_interpolation( return None +def clean_image_with_modified_thresholds( + event_image, event_pulse_time, unsuitable_mask, magic_clean, config_clean_magic, cmf +): + + """ + Creates a wrapper function to modify the thresholds + + Parameters + ---------- + event_image : numpy.ndarray + Input array with event image + + event_pulse_time : numpy.ndarray + Input array with event times + + unsuitable_mask : numpy.ndarray + Array of unsuitable pixels + + magic_clean : magicctapipe.image.cleaning.MAGICClean + Cleaning implementation used by MAGIC + + config_clean_magic : dict + Cleaning parameters read from the config file + + cmf : float + Multiplication factor for additional cleaning + + Returns + ------- + clean_mask : numpy.ndarray + Mask with pixels surviving the cleaning + + image : numpy.ndarray + Image with surviving pixels + + peak_time : numpy.ndarray + Times with only surviving pixels + """ + + # Multiply the thresholds by cmf + modified_picture_thresh = config_clean_magic["picture_thresh"] * cmf + modified_boundary_thresh = config_clean_magic["boundary_thresh"] * cmf + + # Directly modify magic_clean instance variables with the modified thresholds + magic_clean.picture_thresh = modified_picture_thresh + magic_clean.boundary_thresh = modified_boundary_thresh + + # Now call the clean_image method on the magic_clean instance with the modified thresholds + clean_mask, image, peak_time = magic_clean.clean_image( + event_image=event_image, + event_pulse_time=event_pulse_time, + unsuitable_mask=unsuitable_mask, + # picture_thresh=modified_picture_thresh, + # boundary_thresh=modified_boundary_thresh + ) + + return clean_mask, image, peak_time + + def process_telescope_data( dl1_params, dl1_images, @@ -300,33 +359,14 @@ def process_telescope_data( """ # AC LST - cleaning_level = config["LST"]["tailcuts_clean"] - # Multiply only non-boolean numerical values - config_clean_LST = { - key: ( - value * cmf - if isinstance(value, (int, float)) and not isinstance(value, bool) - else value - ) - for key, value in cleaning_level.items() - } - + cleaning_level_lst = config["LST"]["tailcuts_clean"] + modified_boundary_thresh_lst = cleaning_level_lst["boundary_thresh"] * cmf + modified_picture_thresh_lst = cleaning_level_lst["picture_thresh"] * cmf # AC MAGIC # Ignore runtime warnings appeared during the image cleaning warnings.simplefilter("ignore", category=RuntimeWarning) # Configure the MAGIC image cleaning - config_clean = config["MAGIC"]["magic_clean"] - # Scale numeric values and set 'find_hotpixels' to False - config_clean_magic = { - key: ( - False - if key == "find_hotpixels" - else value * cmf - if isinstance(value, (int, float)) and not isinstance(value, bool) - else value - ) - for key, value in config_clean.items() - } + config_clean_magic = config["MAGIC"]["magic_clean"] magic_clean = MAGICClean(camgeom, config_clean_magic) unsuitable_mask = None @@ -436,9 +476,9 @@ def process_telescope_data( clean = tailcuts_clean( camgeom, image, - boundary_thresh=config_clean_LST["boundary_thresh"], - picture_thresh=config_clean_LST["picture_thresh"], - min_number_picture_neighbors=config_clean_LST[ + boundary_thresh=modified_boundary_thresh_lst, + picture_thresh=modified_picture_thresh_lst, + min_number_picture_neighbors=cleaning_level_lst[ "min_number_picture_neighbors" ], ) @@ -456,24 +496,25 @@ def process_telescope_data( else: - # Apply the image cleaning - clean_mask, image, peak_time = magic_clean.clean_image( + # Now, use the wrapper function to clean the image + clean_mask, image, peak_time = clean_image_with_modified_thresholds( event_image=image, event_pulse_time=peak_time, unsuitable_mask=unsuitable_mask, + magic_clean=magic_clean, # Pass the magic_clean instance here + config_clean_magic=config_clean_magic, + cmf=cmf, ) - # n_pixels = np.count_nonzero(clean_mask) - # n_islands, _ = number_of_islands(camgeom, clean_mask) - - clean_camgeom = camgeom[clean_mask] # camera_geom_masked - clean_image = image[clean_mask] # image_masked - clean_peak_time = peak_time[clean_mask] # signal_pixels + clean_camgeom = camgeom[clean_mask] + clean_image = image[clean_mask] + clean_peak_time = peak_time[clean_mask] # Check if clean_image is empty or all zeros if clean_image.size == 0 or not np.any(clean_image): continue # Skip this iteration if clean_image is empty or all zeros + # re-calculation of dl1 parameters hillas_params = hillas_parameters(clean_camgeom, clean_image) timing_params = timing_parameters( clean_camgeom, clean_image, clean_peak_time, hillas_params