From 00dfa9e4f2eb60d1562c10288c75982ce0a6b77c Mon Sep 17 00:00:00 2001 From: Vincent LHEUREUX Date: Tue, 30 Jul 2024 11:03:10 +0200 Subject: [PATCH 1/3] offboresight angle for all sensors --- src/xsar/base_dataset.py | 1 + src/xsar/radarsat2_dataset.py | 141 +++++++++++++++++++++++----------- src/xsar/rcm_dataset.py | 134 ++++++++++++++++++++++---------- src/xsar/sentinel1_dataset.py | 72 +++++++++-------- src/xsar/xsar.py | 5 +- 5 files changed, 231 insertions(+), 122 deletions(-) diff --git a/src/xsar/base_dataset.py b/src/xsar/base_dataset.py index b774aa60..3b822037 100644 --- a/src/xsar/base_dataset.py +++ b/src/xsar/base_dataset.py @@ -60,6 +60,7 @@ class BaseDataset(ABC): 'elevation': 'f4', 'altitude': 'f4', 'ground_heading': 'f4', + 'offboresight': 'f4', 'nesz': None, 'negz': None, 'sigma0_raw': None, diff --git a/src/xsar/radarsat2_dataset.py b/src/xsar/radarsat2_dataset.py index a1808a8e..634e7462 100644 --- a/src/xsar/radarsat2_dataset.py +++ b/src/xsar/radarsat2_dataset.py @@ -17,7 +17,8 @@ logger.addHandler(logging.NullHandler()) # we know tiff as no geotransform : ignore warning -warnings.filterwarnings("ignore", category=rasterio.errors.NotGeoreferencedWarning) +warnings.filterwarnings( + "ignore", category=rasterio.errors.NotGeoreferencedWarning) # allow nan without warnings # some dask warnings are still non filtered: https://github.com/dask/dask/issues/3245 @@ -62,6 +63,7 @@ class RadarSat2Dataset(BaseDataset): activate or not the lazy loading of the high resolution fields """ + def __init__(self, dataset_id, resolution=None, resampling=rasterio.enums.Resampling.rms, chunks={'line': 5000, 'sample': 5000}, @@ -86,7 +88,8 @@ def __init__(self, dataset_id, resolution=None, # assert isinstance(sar_meta.coords2ll(100, 100),tuple) else: # we want self.sar_meta to be a dask actor on a worker - self.sar_meta = BlockingActorProxy(RadarSat2Meta.from_dict, dataset_id.dict) + self.sar_meta = BlockingActorProxy( + RadarSat2Meta.from_dict, dataset_id.dict) del dataset_id if self.sar_meta.multidataset: raise IndexError( @@ -97,37 +100,37 @@ def __init__(self, dataset_id, resolution=None, # build datatree DN_tmp = load_digital_number(self.sar_meta.dt, resolution=resolution, resampling=resampling, chunks=chunks)['digital_numbers'].ds - ### In order to respect xsar convention, lines and samples have been flipped in the metadata when necessary. - ### `load_digital_number` uses these metadata but rio creates new coords without keeping the flipping done. - ### So we have to flip again a time digital numbers to respect xsar convention + # In order to respect xsar convention, lines and samples have been flipped in the metadata when necessary. + # `load_digital_number` uses these metadata but rio creates new coords without keeping the flipping done. + # So we have to flip again a time digital numbers to respect xsar convention DN_tmp = self.flip_sample_da(DN_tmp) DN_tmp = self.flip_line_da(DN_tmp) - ### geoloc + # geoloc geoloc = self.sar_meta.geoloc geoloc.attrs['history'] = 'annotations' - ### orbitAndAttitude + # orbitAndAttitude orbit_and_attitude = self.sar_meta.orbit_and_attitude orbit_and_attitude.attrs['history'] = 'annotations' - ### dopplerCentroid + # dopplerCentroid doppler_centroid = self.sar_meta.doppler_centroid doppler_centroid.attrs['history'] = 'annotations' - ### dopplerRateValues + # dopplerRateValues doppler_rate_values = self.sar_meta.doppler_rate_values doppler_rate_values.attrs['history'] = 'annotations' - ### chirp + # chirp chirp = self.sar_meta.chirp chirp.attrs['history'] = 'annotations' - ### radarParameters + # radarParameters radar_parameters = self.sar_meta.radar_parameters radar_parameters.attrs['history'] = 'annotations' - ### lookUpTables + # lookUpTables lut = self.sar_meta.lut lut.attrs['history'] = 'annotations' @@ -149,7 +152,8 @@ def __init__(self, dataset_id, resolution=None, 'gamma0': 'lutGamma', 'beta0': 'lutBeta', } - geoloc_vars = ['latitude', 'longitude', 'altitude', 'incidence', 'elevation'] + geoloc_vars = ['latitude', 'longitude', 'altitude', + 'incidence', 'elevation'] for vv in skip_variables: if vv in geoloc_vars: geoloc_vars.remove(vv) @@ -161,25 +165,28 @@ def __init__(self, dataset_id, resolution=None, self._dataset.attrs[att] = self.sar_meta.__getattr__(att) value_res_line = self.sar_meta.geoloc.line.attrs['rasterAttributes_sampledLineSpacing_value'] - value_res_sample = self.sar_meta.geoloc.pixel.attrs['rasterAttributes_sampledPixelSpacing_value'] + value_res_sample = self.sar_meta.geoloc.pixel.attrs[ + 'rasterAttributes_sampledPixelSpacing_value'] # self._load_incidence_from_lut() refe_spacing = 'slant' if resolution is not None: - refe_spacing = 'ground' # if the data sampling changed it means that the quantities are projected on ground + # if the data sampling changed it means that the quantities are projected on ground + refe_spacing = 'ground' if isinstance(resolution, str): value_res_sample = float(resolution.replace('m', '')) value_res_line = value_res_sample elif isinstance(resolution, dict): value_res_sample = self.sar_meta.geoloc.pixel.attrs['rasterAttributes_sampledPixelSpacing_value'] \ - * resolution['sample'] + * resolution['sample'] value_res_line = self.sar_meta.geoloc.line.attrs['rasterAttributes_sampledLineSpacing_value'] \ - * resolution['line'] + * resolution['line'] else: logger.warning('resolution type not handle (%s) should be str or dict -> sampleSpacing' ' and lineSpacing are not correct', type(resolution)) self._dataset['sampleSpacing'] = xr.DataArray(value_res_sample, attrs={'units': 'm', 'referential': refe_spacing}) - self._dataset['lineSpacing'] = xr.DataArray(value_res_line, attrs={'units': 'm'}) + self._dataset['lineSpacing'] = xr.DataArray( + value_res_line, attrs={'units': 'm'}) # dataset no-pol template for function evaluation on coordinates (*no* values used) # what's matter here is the shape of the image, not the values. @@ -197,30 +204,36 @@ def __init__(self, dataset_id, resolution=None, self._dataset = xr.merge([ xr.DataArray(data=self.sar_meta.samples_flipped, attrs={'meaning': - 'xsar convention : increasing incidence values along samples axis'} + 'xsar convention : increasing incidence values along samples axis'} ).to_dataset(name='samples_flipped'), self._dataset ]) self._dataset = xr.merge([ xr.DataArray(data=self.sar_meta.lines_flipped, attrs={'meaning': - 'xsar convention : increasing time along line axis ' - '(whatever ascending or descending pass direction)'} + 'xsar convention : increasing time along line axis ' + '(whatever ascending or descending pass direction)'} ).to_dataset(name='lines_flipped'), self._dataset ]) self._luts = self.lazy_load_luts() self.apply_calibration_and_denoising() - self._dataset = xr.merge([self.load_from_geoloc(geoloc_vars, lazy_loading=lazyloading), self._dataset]) + self._dataset = xr.merge([self.load_from_geoloc( + geoloc_vars, lazy_loading=lazyloading), self._dataset]) + + # compute offboresight in self._dataset + self._get_offboresight_from_elevation() rasters = self._load_rasters_vars() if rasters is not None: self._dataset = xr.merge([self._dataset, rasters]) self._dataset = xr.merge([self.interpolate_times, self._dataset]) if 'ground_heading' not in skip_variables: - self._dataset = xr.merge([self.load_ground_heading(), self._dataset]) + self._dataset = xr.merge( + [self.load_ground_heading(), self._dataset]) if 'velocity' not in skip_variables: - self._dataset = xr.merge([self.get_sensor_velocity(), self._dataset]) + self._dataset = xr.merge( + [self.get_sensor_velocity(), self._dataset]) self._rasterized_masks = self.load_rasterized_masks() self._dataset = xr.merge([self._rasterized_masks, self._dataset]) """a = self._dataset.copy() @@ -228,7 +241,8 @@ def __init__(self, dataset_id, resolution=None, self.datatree['measurement'] = self.datatree['measurement'].assign(self._dataset) a = self._dataset.copy() self._dataset = self.flip_line_da(a)""" - self.datatree['measurement'] = self.datatree['measurement'].assign(self._dataset) + self.datatree['measurement'] = self.datatree['measurement'].assign( + self._dataset) """self.datatree = datatree.DataTree.from_dict( {'measurement': self.datatree['measurement'], 'geolocation_annotation': self.datatree['geolocation_annotation'], @@ -252,13 +266,33 @@ def lazy_load_luts(self): merge_list = [] for lut_name in luts_ds: lut_f_delayed = dask.delayed()(luts_ds[lut_name]) - ar = dask.array.from_delayed(lut_f_delayed.data, (luts_ds[lut_name].data.size,), luts_ds[lut_name].dtype) + ar = dask.array.from_delayed( + lut_f_delayed.data, (luts_ds[lut_name].data.size,), luts_ds[lut_name].dtype) da = xr.DataArray(data=ar, dims=['sample'], coords={'sample': luts_ds[lut_name].sample}, attrs=luts_ds[lut_name].attrs) ds_lut_f_delayed = da.to_dataset(name=lut_name) merge_list.append(ds_lut_f_delayed) return xr.combine_by_coords(merge_list) + @timing + def _get_offboresight_from_elevation(self): + """ + Compute offboresight angle. + + Returns + ------- + + """ + self._dataset["offboresight"] = self._dataset.elevation - \ + (30.1833947 * self._dataset.latitude ** 0 + + 0.0082998714 * self._dataset.latitude ** 1 - + 0.00031181534 * self._dataset.latitude ** 2 - + 0.0943533e-07 * self._dataset.latitude ** 3 + + 3.0191435e-08 * self._dataset.latitude ** 4 + + 4.968415428e-12 * self._dataset.latitude ** 5 - + 9.315371305e-13 * self._dataset.latitude ** 6) + 29.45 + self._dataset["offboresight"].attrs['comment'] = 'built from elevation angle and latitude' + @timing def load_from_geoloc(self, varnames, lazy_loading=True): """ @@ -314,7 +348,8 @@ def load_from_geoloc(self, varnames, lazy_loading=True): interp_func ) else: - da_val = interp_func(self._dataset.digital_number.line, self._dataset.digital_number.sample) + da_val = interp_func( + self._dataset.digital_number.line, self._dataset.digital_number.sample) da_var = xr.DataArray(data=da_val, dims=['line', 'sample'], coords={'line': self._dataset.digital_number.line, 'sample': self._dataset.digital_number.sample}) @@ -362,7 +397,8 @@ def _resample_lut_values(self, lut): lines = self.sar_meta.geoloc.line samples = np.arange(lut.shape[0]) lut_values_2d = dask.delayed(np.tile)(lut, (lines.shape[0], 1)) - interp_func = dask.delayed(RectBivariateSpline)(x=lines, y=samples, z=lut_values_2d, kx=1, ky=1) + interp_func = dask.delayed(RectBivariateSpline)( + x=lines, y=samples, z=lut_values_2d, kx=1, ky=1) """var = inter_func(self._dataset.digital_number.line, self._dataset.digital_number.sample) da_var = xr.DataArray(data=var, dims=['line', 'sample'], coords={'line': self._dataset.digital_number.line, @@ -409,11 +445,13 @@ def _get_lut_noise(self, var_name): try: lut_name = self._map_var_lut_noise[var_name] except KeyError: - raise ValueError("can't find noise lut name for var '%s'" % var_name) + raise ValueError( + "can't find noise lut name for var '%s'" % var_name) try: lut = self.sar_meta.dt['radarParameters'][lut_name] except KeyError: - raise ValueError("can't find noise lut from name '%s' for variable '%s'" % (lut_name, var_name)) + raise ValueError( + "can't find noise lut from name '%s' for variable '%s'" % (lut_name, var_name)) return lut @timing @@ -442,8 +480,10 @@ def _interpolate_for_noise_lut(self, var_name): noise_values = (10 ** (initial_lut / 10)) lines = np.arange(self.sar_meta.geoloc.line[-1] + 1) noise_values_2d = np.tile(noise_values, (lines.shape[0], 1)) - indexes = [first_pix + step * i for i in range(0, noise_values.shape[0])] - interp_func = dask.delayed(RectBivariateSpline)(x=lines, y=indexes, z=noise_values_2d, kx=1, ky=1) + indexes = [first_pix + step * + i for i in range(0, noise_values.shape[0])] + interp_func = dask.delayed(RectBivariateSpline)( + x=lines, y=indexes, z=noise_values_2d, kx=1, ky=1) da_var = map_blocks_coords( self._da_tmpl.astype(self._dtypes['noise_lut']), interp_func @@ -469,7 +509,8 @@ def _get_noise(self, var_name): concat_list = [] # add pol dim for pol in self._dataset.pol: - lut_noise = self._interpolate_for_noise_lut(var_name).assign_coords(pol=pol).expand_dims('pol') + lut_noise = self._interpolate_for_noise_lut( + var_name).assign_coords(pol=pol).expand_dims('pol') concat_list.append(lut_noise) return xr.concat(concat_list, dim='pol').to_dataset(name=name) @@ -484,13 +525,16 @@ def apply_calibration_and_denoising(self): for var_name, lut_name in self._map_var_lut.items(): if lut_name in self._luts: # merge var_name into dataset (not denoised) - self._dataset = self._dataset.merge(self._apply_calibration_lut(var_name)) + self._dataset = self._dataset.merge( + self._apply_calibration_lut(var_name)) # merge noise equivalent for var_name (named 'ne%sz' % var_name[0) self._dataset = self._dataset.merge(self._get_noise(var_name)) else: - logger.debug("Skipping variable '%s' ('%s' lut is missing)" % (var_name, lut_name)) + logger.debug("Skipping variable '%s' ('%s' lut is missing)" % ( + var_name, lut_name)) self._dataset = self._add_denoised(self._dataset) - self.datatree['measurement'] = self.datatree['measurement'].assign(self._dataset) + self.datatree['measurement'] = self.datatree['measurement'].assign( + self._dataset) # self._dataset = self.datatree[ # 'measurement'].to_dataset() # test oct 22 to see if then I can modify variables of the dt return @@ -575,7 +619,8 @@ def flip_sample_da(self, ds): pass_direction = self.sar_meta.dt.attrs['passDirection'] flipped_cases = [('Left', 'Ascending'), ('Right', 'Descending')] if (antenna_pointing, pass_direction) in flipped_cases: - new_ds = ds.copy().isel(sample=slice(None, None, -1)).assign_coords(sample=ds.sample) + new_ds = ds.copy().isel(sample=slice(None, None, -1) + ).assign_coords(sample=ds.sample) else: new_ds = ds return new_ds @@ -620,7 +665,8 @@ def interpolate_times(self): lines = self.sar_meta.geoloc.line samples = self.sar_meta.geoloc.pixel time_values_2d = np.tile(times, (samples.shape[0], 1)).transpose() - interp_func = RectBivariateSpline(x=lines, y=samples, z=time_values_2d.astype(float), kx=1, ky=1) + interp_func = RectBivariateSpline( + x=lines, y=samples, z=time_values_2d.astype(float), kx=1, ky=1) da_var = map_blocks_coords( self._da_tmpl.astype('datetime64[ns]'), interp_func @@ -652,7 +698,8 @@ def get_sensor_velocity(self): vels = np.sqrt(np.sum(velos, axis=0)) interp_f = interp1d(azimuth_times.astype(float), vels) _vels = interp_f(interp_times.astype(float)) - res = xr.DataArray(_vels, dims=['line'], coords={'line': self.dataset.line}) + res = xr.DataArray(_vels, dims=['line'], coords={ + 'line': self.dataset.line}) return xr.Dataset({'velocity': res}) def _reconfigure_reader_datatree(self): @@ -667,7 +714,7 @@ def _reconfigure_reader_datatree(self): """ dic = {'measurement': self.datatree['measurement'], - 'geolocation_annotation': self.datatree['geolocation_annotation'], + 'geolocation_annotation': self.datatree['geolocation_annotation'], } def del_items_in_dt(dt, list_items): @@ -706,14 +753,17 @@ def get_list_keys_delete(dt, list_keys, inside=True): new_dt['lut'] = dt['lut'].rename(rename_lut) # extract noise_lut, rename and put these in a dataset - new_dt['noise_lut'] = dt['radarParameters'].rename(rename_radarParameters) + new_dt['noise_lut'] = dt['radarParameters'].rename( + rename_radarParameters) new_dt['noise_lut'].attrs = {} # reset attributes - delete_list = get_list_keys_delete(new_dt['noise_lut'], rename_radarParameters.values(), inside=False) + delete_list = get_list_keys_delete( + new_dt['noise_lut'], rename_radarParameters.values(), inside=False) del_items_in_dt(new_dt['noise_lut'], delete_list) # Create a dataset for radar parameters without the noise luts new_dt['radarParameters'] = dt['radarParameters'] - delete_list = get_list_keys_delete(new_dt['radarParameters'], rename_radarParameters.keys()) + delete_list = get_list_keys_delete( + new_dt['radarParameters'], rename_radarParameters.keys()) del_items_in_dt(new_dt['radarParameters'], delete_list) # extract others dataset @@ -721,7 +771,8 @@ def get_list_keys_delete(dt, list_keys, inside=True): for key in dt.copy(): if key not in exclude: if key == 'imageGenerationParameters': - new_dt[key] = datatree.DataTree(parent=None, children=copy_dt[key]) + new_dt[key] = datatree.DataTree( + parent=None, children=copy_dt[key]) else: new_dt[key] = copy_dt[key] self.datatree = new_dt @@ -767,5 +818,3 @@ def dataset(self): def rs2meta(self): logger.warning('Please use `sar_meta` to call the sar meta object') return self.sar_meta - - diff --git a/src/xsar/rcm_dataset.py b/src/xsar/rcm_dataset.py index 339c0fab..9aff38da 100644 --- a/src/xsar/rcm_dataset.py +++ b/src/xsar/rcm_dataset.py @@ -22,7 +22,8 @@ logger.addHandler(logging.NullHandler()) # we know tiff as no geotransform : ignore warning -warnings.filterwarnings("ignore", category=rasterio.errors.NotGeoreferencedWarning) +warnings.filterwarnings( + "ignore", category=rasterio.errors.NotGeoreferencedWarning) # allow nan without warnings # some dask warnings are still non filtered: https://github.com/dask/dask/issues/3245 @@ -94,7 +95,8 @@ def __init__(self, dataset_id, resolution=None, # assert isinstance(rs2meta.coords2ll(100, 100),tuple) else: # we want self.rs2meta to be a dask actor on a worker - self.sar_meta = BlockingActorProxy(RcmMeta.from_dict, dataset_id.dict) + self.sar_meta = BlockingActorProxy( + RcmMeta.from_dict, dataset_id.dict) del dataset_id if self.sar_meta.multidataset: @@ -103,15 +105,16 @@ def __init__(self, dataset_id, resolution=None, ) # build datatree - DN_tmp = self.load_digital_number(resolution=resolution, resampling=resampling, chunks=chunks) + DN_tmp = self.load_digital_number( + resolution=resolution, resampling=resampling, chunks=chunks) DN_tmp = self.flip_sample_da(DN_tmp) DN_tmp = self.flip_line_da(DN_tmp) - ### geoloc + # geoloc geoloc = self.sar_meta.geoloc geoloc.attrs['history'] = 'annotations' - ### orbitInformation + # orbitInformation orbit = self.sar_meta.orbit orbit.attrs['history'] = 'annotations' @@ -134,8 +137,7 @@ def __init__(self, dataset_id, resolution=None, } geoloc_vars = ['latitude', 'longitude', 'altitude', - 'incidence', 'elevation' - ] + 'incidence', 'elevation'] for vv in skip_variables: if vv in geoloc_vars: geoloc_vars.remove(vv) @@ -151,26 +153,29 @@ def __init__(self, dataset_id, resolution=None, # self._load_incidence_from_lut() refe_spacing = 'slant' if resolution is not None: - refe_spacing = 'ground' # if the data sampling changed it means that the quantities are projected on ground + # if the data sampling changed it means that the quantities are projected on ground + refe_spacing = 'ground' if isinstance(resolution, str): value_res_sample = float(resolution.replace('m', '')) value_res_line = value_res_sample elif isinstance(resolution, dict): - value_res_sample = self.sar_meta.pixel_sample_m * resolution['sample'] - value_res_line = self.sar_meta.pixel_line_m * resolution['line'] + value_res_sample = self.sar_meta.pixel_sample_m * \ + resolution['sample'] + value_res_line = self.sar_meta.pixel_line_m * \ + resolution['line'] else: logger.warning('resolution type not handle (%s) should be str or dict -> sampleSpacing' ' and lineSpacing are not correct', type(resolution)) self._dataset['sampleSpacing'] = \ xr.DataArray(value_res_sample, attrs={'referential': refe_spacing} | - self.sar_meta - .dt['imageReferenceAttributes/rasterAttributes']['sampledPixelSpacing'].attrs + self.sar_meta + .dt['imageReferenceAttributes/rasterAttributes']['sampledPixelSpacing'].attrs ) self._dataset['lineSpacing'] = \ xr.DataArray(value_res_line, attrs=self.sar_meta - .dt['imageReferenceAttributes/rasterAttributes']['sampledLineSpacing'].attrs + .dt['imageReferenceAttributes/rasterAttributes']['sampledLineSpacing'].attrs ) # dataset no-pol template for function evaluation on coordinates (*no* values used) @@ -190,34 +195,42 @@ def __init__(self, dataset_id, resolution=None, self._dataset = xr.merge([ xr.DataArray(data=self.sar_meta.samples_flipped, attrs={'meaning': - 'xsar convention : increasing incidence values along samples axis'} + 'xsar convention : increasing incidence values along samples axis'} ).to_dataset(name='samples_flipped'), self._dataset ]) self._dataset = xr.merge([ xr.DataArray(data=self.sar_meta.lines_flipped, attrs={'meaning': - 'xsar convention : increasing time along line axis ' - '(whatever ascending or descending pass direction)'} + 'xsar convention : increasing time along line axis ' + '(whatever ascending or descending pass direction)'} ).to_dataset(name='lines_flipped'), self._dataset ]) self._luts = self.lazy_load_luts() self._noise_luts = self.lazy_load_noise_luts() - self._noise_luts = self._noise_luts.drop(['pixelFirstNoiseValue', 'stepSize']) + self._noise_luts = self._noise_luts.drop( + ['pixelFirstNoiseValue', 'stepSize']) self.apply_calibration_and_denoising() - self._dataset = xr.merge([self.load_from_geoloc(geoloc_vars, lazy_loading=lazyloading), self._dataset]) + self._dataset = xr.merge([self.load_from_geoloc( + geoloc_vars, lazy_loading=lazyloading), self._dataset]) + # compute offboresight in self._dataset + self._get_offboresight_from_elevation() + rasters = self._load_rasters_vars() if rasters is not None: self._dataset = xr.merge([self._dataset, rasters]) if 'ground_heading' not in skip_variables: - self._dataset = xr.merge([self.load_ground_heading(), self._dataset]) + self._dataset = xr.merge( + [self.load_ground_heading(), self._dataset]) if 'velocity' not in skip_variables: - self._dataset = xr.merge([self.get_sensor_velocity(), self._dataset]) + self._dataset = xr.merge( + [self.get_sensor_velocity(), self._dataset]) self._rasterized_masks = self.load_rasterized_masks() self._dataset = xr.merge([self._rasterized_masks, self._dataset]) - self.datatree['measurement'] = self.datatree['measurement'].assign(self._dataset) + self.datatree['measurement'] = self.datatree['measurement'].assign( + self._dataset) # merge the datatree with the reader self.reconfigure_reader_datatree() self._dataset.attrs.update(self.sar_meta.to_dict("all")) @@ -239,14 +252,16 @@ def lazy_load_luts(self): lut = self.sar_meta.lut.lookup_tables.sel(sarCalibrationType=value, pole=pola)\ .rename({ 'pixel': 'sample', - } - ) + } + ) values_nb = lut.attrs['numberOfValues'] lut_f_delayed = dask.delayed()(lut) - ar = dask.array.from_delayed(lut_f_delayed.data, (values_nb,), lut.dtype) + ar = dask.array.from_delayed( + lut_f_delayed.data, (values_nb,), lut.dtype) da = xr.DataArray(data=ar, dims=['sample'], coords={'sample': lut.sample}, attrs=lut.attrs) - da = self._interpolate_var(da).assign_coords(pole=pola).rename({'pole': 'pol'}) + da = self._interpolate_var(da).assign_coords( + pole=pola).rename({'pole': 'pol'}) list_da.append(da) full_da = xr.concat(list_da, dim='pol') full_da.attrs = lut.attrs @@ -271,10 +286,11 @@ def lazy_load_noise_luts(self): lut = self.sar_meta.noise_lut.noiseLevelValues.sel(sarCalibrationType=value, pole=pola)\ .rename({ 'pixel': 'sample', - } - ) + } + ) lut_f_delayed = dask.delayed()(lut) - ar = dask.array.from_delayed(lut_f_delayed.data, (values_nb,), lut.dtype) + ar = dask.array.from_delayed( + lut_f_delayed.data, (values_nb,), lut.dtype) da = xr.DataArray(data=ar, dims=['sample'], coords={'sample': lut.sample}, attrs=lut.attrs) da = self._interpolate_var(da, type='noise').assign_coords(pole=pola)\ @@ -308,7 +324,8 @@ def _interpolate_var(self, var, type="lut"): """ accepted_types = ["lut", "noise", "incidence"] if type not in accepted_types: - raise ValueError("Please enter a type accepted ('lut', 'noise', 'incidence')") + raise ValueError( + "Please enter a type accepted ('lut', 'noise', 'incidence')") lines = self.sar_meta.geoloc.line samples = var.sample var_type = None @@ -321,7 +338,8 @@ def _interpolate_var(self, var, type="lut"): elif type == 'incidence': var_type = self._dtypes[type] var_2d = np.tile(var, (lines.shape[0], 1)) - interp_func = dask.delayed(RectBivariateSpline)(x=lines, y=samples, z=var_2d, kx=1, ky=1) + interp_func = dask.delayed(RectBivariateSpline)( + x=lines, y=samples, z=var_2d, kx=1, ky=1) da_var = map_blocks_coords( self._da_tmpl.astype(var_type), interp_func @@ -373,7 +391,8 @@ def _get_noise(self, var_name): try: lut = self._noise_luts[lut_name] except KeyError: - raise ValueError("can't find noise lut from name '%s' for variable '%s' " % (lut_name, var_name)) + raise ValueError( + "can't find noise lut from name '%s' for variable '%s' " % (lut_name, var_name)) return lut.to_dataset(name=name) def apply_calibration_and_denoising(self): @@ -387,13 +406,16 @@ def apply_calibration_and_denoising(self): for var_name, lut_name in self._map_var_lut.items(): if lut_name in self._luts: # merge var_name into dataset (not denoised) - self._dataset = self._dataset.merge(self._apply_calibration_lut(var_name)) + self._dataset = self._dataset.merge( + self._apply_calibration_lut(var_name)) # merge noise equivalent for var_name (named 'ne%sz' % var_name[0) self._dataset = self._dataset.merge(self._get_noise(var_name)) else: - logger.debug("Skipping variable '%s' ('%s' lut is missing)" % (var_name, lut_name)) + logger.debug("Skipping variable '%s' ('%s' lut is missing)" % ( + var_name, lut_name)) self._dataset = self._add_denoised(self._dataset) - self.datatree['measurement'] = self.datatree['measurement'].assign(self._dataset) + self.datatree['measurement'] = self.datatree['measurement'].assign( + self._dataset) # self._dataset = self.datatree[ # 'measurement'].to_dataset() # test oct 22 to see if then I can modify variables of the dt return @@ -446,7 +468,8 @@ def _load_incidence_from_lut(self): angles = incidence.angles values_nb = incidence.attrs['numberOfValues'] lut_f_delayed = dask.delayed()(angles) - ar = dask.array.from_delayed(lut_f_delayed.data, (values_nb,), self._dtypes['incidence']) + ar = dask.array.from_delayed( + lut_f_delayed.data, (values_nb,), self._dtypes['incidence']) da = xr.DataArray(data=ar, dims=['sample'], coords={'sample': angles.sample}, attrs=angles.attrs) da = self._interpolate_var(da, type='incidence') @@ -473,6 +496,25 @@ def _load_elevation_from_lut(self): inside = angle_rad * earth_radius / (earth_radius + satellite_height) return np.degrees(np.arcsin(inside)) + @timing + def _get_offboresight_from_elevation(self): + """ + Compute offboresight angle. + + Returns + ------- + + """ + self._dataset["offboresight"] = self._dataset.elevation - \ + (30.1833947 * self._dataset.latitude ** 0 + + 0.0082998714 * self._dataset.latitude ** 1 - + 0.00031181534 * self._dataset.latitude ** 2 - + 0.0943533e-07 * self._dataset.latitude ** 3 + + 3.0191435e-08 * self._dataset.latitude ** 4 + + 4.968415428e-12 * self._dataset.latitude ** 5 - + 9.315371305e-13 * self._dataset.latitude ** 6) + 29.45 + self._dataset["offboresight"].attrs['comment'] = 'built from elevation angle and latitude' + @timing def load_from_geoloc(self, varnames, lazy_loading=True): """ @@ -508,6 +550,7 @@ def load_from_geoloc(self, varnames, lazy_loading=True): da = self._load_elevation_from_lut() da.name = varname da_list.append(da) + else: if varname == 'longitude': z_values = self.sar_meta.geoloc[varname] @@ -529,7 +572,8 @@ def load_from_geoloc(self, varnames, lazy_loading=True): interp_func ) else: - da_val = interp_func(self._dataset.digital_number.line, self._dataset.digital_number.sample) + da_val = interp_func( + self._dataset.digital_number.line, self._dataset.digital_number.sample) da_var = xr.DataArray(data=da_val, dims=['line', 'sample'], coords={'line': self._dataset.digital_number.line, 'sample': self._dataset.digital_number.sample}) @@ -563,7 +607,8 @@ def interpolate_times(self): lines = self.sar_meta.geoloc.line samples = self.sar_meta.geoloc.pixel time_values_2d = np.tile(times, (samples.shape[0], 1)).transpose() - interp_func = RectBivariateSpline(x=lines, y=samples, z=time_values_2d.astype(float), kx=1, ky=1) + interp_func = RectBivariateSpline( + x=lines, y=samples, z=time_values_2d.astype(float), kx=1, ky=1) da_var = map_blocks_coords( self._da_tmpl.astype('datetime64[ns]'), interp_func @@ -585,7 +630,8 @@ def get_sensor_velocity(self): vels = np.sqrt(np.sum(velos, axis=0)) interp_f = interp1d(azimuth_times.astype(float), vels) _vels = interp_f(self.interpolate_times['time'].astype(float)) - res = xr.DataArray(_vels, dims=['line'], coords={'line': self.dataset.line}) + res = xr.DataArray(_vels, dims=['line'], coords={ + 'line': self.dataset.line}) return xr.Dataset({'velocity': res}) @timing @@ -662,7 +708,8 @@ def _sort_list_files_and_get_pols(list_tiff): chunks['pol'] = 1 # sort chunks keys like map_dims - chunks = dict(sorted(chunks.items(), key=lambda pair: list(map_dims.keys()).index(pair[0]))) + chunks = dict(sorted(chunks.items(), key=lambda pair: list( + map_dims.keys()).index(pair[0]))) chunks_rio = {map_dims[d]: chunks[d] for d in map_dims.keys()} self.resolution = None if resolution is None: @@ -729,9 +776,11 @@ def _sort_list_files_and_get_pols(list_tiff): ).chunk(chunks) # create coordinates at box center - translate = Affine.translation((resolution['sample'] - 1) / 2, (resolution['line'] - 1) / 2) + translate = Affine.translation( + (resolution['sample'] - 1) / 2, (resolution['line'] - 1) / 2) scale = Affine.scale( - rio.width // resolution['sample'] * resolution['sample'] / out_shape[1], + rio.width // resolution['sample'] * + resolution['sample'] / out_shape[1], rio.height // resolution['line'] * resolution['line'] / out_shape[0]) sample, _ = translate * scale * (dn.sample, 0) _, line = translate * scale * (0, dn.line) @@ -787,7 +836,8 @@ def flip_sample_da(self, ds): pass_direction = self.sar_meta.dt['sourceAttributes/orbitAndAttitude/orbitInformation'].attrs['passDirection'] flipped_cases = [('Left', 'Ascending'), ('Right', 'Descending')] if (antenna_pointing, pass_direction) in flipped_cases: - new_ds = ds.copy().isel(sample=slice(None, None, -1)).assign_coords(sample=ds.sample) + new_ds = ds.copy().isel(sample=slice(None, None, -1) + ).assign_coords(sample=ds.sample) else: new_ds = ds return new_ds diff --git a/src/xsar/sentinel1_dataset.py b/src/xsar/sentinel1_dataset.py index 84262e26..293d1f1f 100644 --- a/src/xsar/sentinel1_dataset.py +++ b/src/xsar/sentinel1_dataset.py @@ -139,16 +139,18 @@ def __init__(self, dataset_id, resolution=None, # geoloc geoloc = self.sar_meta.geoloc geoloc.attrs['history'] = 'annotations' + + #  offboresight angle geoloc["offboresightAngle"] = geoloc.elevationAngle - \ - (30.1833947 * geoloc.latitude ** 0 + \ - 0.0082998714 * geoloc.latitude ** 1 - \ - 0.00031181534 * geoloc.latitude ** 2 - \ - 0.0943533e-07 * geoloc.latitude ** 3 + \ - 3.0191435e-08 * geoloc.latitude ** 4 + \ - 4.968415428e-12 *geoloc.latitude ** 5 - \ - 9.315371305e-13 * geoloc.latitude ** 6) + 29.45 - geoloc["offboresightAngle"].attrs['comment']='built from elevation angle and latitude' - + (30.1833947 * geoloc.latitude ** 0 + + 0.0082998714 * geoloc.latitude ** 1 - + 0.00031181534 * geoloc.latitude ** 2 - + 0.0943533e-07 * geoloc.latitude ** 3 + + 3.0191435e-08 * geoloc.latitude ** 4 + + 4.968415428e-12 * geoloc.latitude ** 5 - + 9.315371305e-13 * geoloc.latitude ** 6) + 29.45 + geoloc["offboresightAngle"].attrs['comment'] = 'built from elevation angle and latitude' + # bursts bu = self.sar_meta._bursts bu.attrs['history'] = 'annotations' @@ -292,10 +294,9 @@ def __init__(self, dataset_id, resolution=None, # load land_mask by default for GRD products - if 'GRD' in str(self.datatree.attrs['product']): self.add_high_resolution_variables( - patch_variable=patch_variable, luts=luts, lazy_loading=lazyloading) + patch_variable=patch_variable, luts=luts, lazy_loading=lazyloading) if self.apply_recalibration: self.select_gains() self.apply_calibration_and_denoising() @@ -304,7 +305,8 @@ def __init__(self, dataset_id, resolution=None, self.datatree['measurement'].attrs = self.datatree.attrs if self.sar_meta.product == 'SLC' and 'WV' not in self.sar_meta.swath: # TOPS cases tmp = self.corrected_range_noise_lut(self.datatree) - self.datatree['noise_range'].ds = tmp # the corrcted noise_range dataset shold now contain an attrs 'corrected_range_noise_lut' + # the corrcted noise_range dataset shold now contain an attrs 'corrected_range_noise_lut' + self.datatree['noise_range'].ds = tmp self.sliced = False """True if dataset is a slice of original L1 dataset""" @@ -314,7 +316,7 @@ def __init__(self, dataset_id, resolution=None, # save original bbox self._bbox_coords_ori = self._bbox_coords - def corrected_range_noise_lut(self,dt): + def corrected_range_noise_lut(self, dt): """ Patch F.Nouguier see https://jira-projects.cls.fr/browse/MPCS-3581 and https://github.com/umr-lops/xsar_slc/issues/175 Return range noise lut with corrected line numbering. This function should be used only on the full SLC dataset dt @@ -326,21 +328,27 @@ def corrected_range_noise_lut(self,dt): # Detection of azimuthTime jumps (linked to burst changes). Burst sensingTime should NOT be used since they have erroneous value too ! line_shift = 0 tt = dt['measurement']['time'] - i_jump = np.ravel(np.argwhere(np.diff(tt) Date: Tue, 30 Jul 2024 10:34:36 +0000 Subject: [PATCH 2/3] new rule for recalibration --- src/xsar/sentinel1_dataset.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/xsar/sentinel1_dataset.py b/src/xsar/sentinel1_dataset.py index 293d1f1f..ab6bf097 100644 --- a/src/xsar/sentinel1_dataset.py +++ b/src/xsar/sentinel1_dataset.py @@ -526,14 +526,16 @@ def add_high_resolution_variables(self, luts=False, patch_variable=True, skip_va sel_cal = dataframe_aux.loc[(dataframe_aux.sat_name == self.sar_meta.manifest_attrs['satellite']) & (dataframe_aux.aux_type == "CAL") & + (dataframe_aux.icid == int(self.sar_meta.manifest_attrs['icid'])) & (dataframe_aux.validation_date <= self.sar_meta.start_date)] sel_cal = sel_cal.sort_values( by=["validation_date", "generation_date"], ascending=False) path_new_cal = sel_cal.iloc[0].aux_path sel_pp1 = dataframe_aux.loc[(dataframe_aux.sat_name == self.sar_meta.manifest_attrs['satellite']) & - (dataframe_aux.aux_type == "PP1") - & (dataframe_aux.validation_date <= self.sar_meta.start_date)] + (dataframe_aux.aux_type == "PP1") & + (dataframe_aux.icid == int(self.sar_meta.manifest_attrs['icid'])) & + (dataframe_aux.validation_date <= self.sar_meta.start_date)] sel_pp1 = sel_pp1.sort_values( by=["validation_date", "generation_date"], ascending=False) path_new_pp1 = sel_pp1.iloc[0].aux_path From 9cecb1fa65b2ee3eb5c7d6b35f7f6aaef6af547d Mon Sep 17 00:00:00 2001 From: Vincent LHEUREUX Date: Tue, 6 Aug 2024 11:05:08 +0200 Subject: [PATCH 3/3] rcm denoising now depends on attr 'noiseSubtractionPerformed' --- src/xsar/rcm_dataset.py | 41 ++++++++++++++++++++++++++++++++--------- 1 file changed, 32 insertions(+), 9 deletions(-) diff --git a/src/xsar/rcm_dataset.py b/src/xsar/rcm_dataset.py index b07d3eb7..fb19a155 100644 --- a/src/xsar/rcm_dataset.py +++ b/src/xsar/rcm_dataset.py @@ -123,6 +123,10 @@ def __init__(self, dataset_id, resolution=None, self._dataset = self.datatree['measurement'].to_dataset() + # merge the datatree with the reader + for group in self.sar_meta.dt: + self.datatree[group] = self.sar_meta.dt[group] + # dict mapping for calibration type in the reader self._map_calibration_type = { 'sigma0': 'Sigma Nought', @@ -232,6 +236,7 @@ def __init__(self, dataset_id, resolution=None, self.datatree['measurement'] = self.datatree['measurement'].assign( self._dataset) # merge the datatree with the reader + self.reconfigure_reader_datatree() self._dataset.attrs.update(self.sar_meta.to_dict("all")) self.datatree.attrs.update(self.sar_meta.to_dict("all")) @@ -437,22 +442,43 @@ def _add_denoised(self, ds, clip=False, vars=None): xarray.DataSet dataset with denoised vars """ + already_denoised = self.datatree['imageGenerationParameters'][ + 'sarProcessingInformation'].attrs['noiseSubtractionPerformed'] + if vars is None: vars = ['sigma0', 'beta0', 'gamma0'] for varname in vars: + varname_raw = varname + '_raw' noise = 'ne%sz' % varname[0] if varname_raw not in ds: continue else: - denoised = ds[varname_raw] - ds[noise] - if clip: - denoised = denoised.clip(min=0) - denoised.attrs['comment'] = 'clipped, no values <0' + if not already_denoised: + denoised = ds[varname_raw] - ds[noise] + if clip: + denoised = denoised.clip(min=0) + denoised.attrs['comment'] = 'clipped, no values <0' + else: + denoised.attrs['comment'] = 'not clipped, some values can be <0' + ds[varname] = denoised + else: - denoised.attrs['comment'] = 'not clipped, some values can be <0' - ds[varname] = denoised + logging.debug( + "product was already denoised (noiseSubtractionPerformed = True), noise added back") + denoised = ds[varname_raw] + denoised.attrs['denoising information'] = 'already denoised by Canadian Agency' + if clip: + denoised = denoised.clip(min=0) + denoised.attrs['comment'] = 'clipped, no values <0' + else: + denoised.attrs['comment'] = 'not clipped, some values can be <0' + ds[varname] = denoised + + ds[varname_raw] = denoised + ds[noise] + ds[varname_raw].attrs['denoising information'] = 'product was already denoised, noise added back' + return ds def _load_incidence_from_lut(self): @@ -868,11 +894,8 @@ def flip_line_da(self, ds): def reconfigure_reader_datatree(self): """ - Merge self.datatree with the reader's one. Merge attributes of the reader's datatree in the attributes of self.datatree """ - for group in self.sar_meta.dt: - self.datatree[group] = self.sar_meta.dt[group] self.datatree.attrs |= self.sar_meta.dt.attrs return