diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 8f6816285c..faa0aea2cc 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -90,7 +90,7 @@ jobs: # may break the conda-forge libraries trying to use newer glibc versions run: | python -m pip install \ - --index-url https://pypi.anaconda.org/scipy-wheels-nightly/simple/ \ + --index-url https://pypi.anaconda.org/scientific-python-nightly-wheels/simple/ \ --trusted-host pypi.anaconda.org \ --no-deps --pre --upgrade \ matplotlib \ diff --git a/.github/workflows/deploy-sdist.yaml b/.github/workflows/deploy-sdist.yaml index a32d6a53dc..68463b2b03 100644 --- a/.github/workflows/deploy-sdist.yaml +++ b/.github/workflows/deploy-sdist.yaml @@ -19,7 +19,7 @@ jobs: - name: Publish package to PyPI if: github.event.action == 'published' - uses: pypa/gh-action-pypi-publish@v1.8.6 + uses: pypa/gh-action-pypi-publish@v1.8.8 with: user: __token__ password: ${{ secrets.pypi_password }} diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 76867bf2b1..995f3035c4 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -20,7 +20,7 @@ repos: - id: bandit args: [--ini, .bandit] - repo: https://github.com/pre-commit/mirrors-mypy - rev: 'v1.2.0' # Use the sha / tag you want to point at + rev: 'v1.4.1' # Use the sha / tag you want to point at hooks: - id: mypy additional_dependencies: diff --git a/AUTHORS.md b/AUTHORS.md index 202fe687bc..796ee9743b 100644 --- a/AUTHORS.md +++ b/AUTHORS.md @@ -27,6 +27,7 @@ The following people have made contributions to this project: - [Ulrik Egede (egede)](https://github.com/egede) - [Joleen Feltz (joleenf)](https://github.com/joleenf) - [Stephan Finkensieper (sfinkens)](https://github.com/sfinkens) - Deutscher Wetterdienst +- [Gionata Ghiggi (ghiggi)](https://github.com/ghiggi) - [Andrea Grillini (AppLEaDaY)](https://github.com/AppLEaDaY) - [Blanka Gvozdikova (gvozdikb)](https://github.com/gvozdikb) - [Nina Håkansson (ninahakansson)](https://github.com/ninahakansson) @@ -35,6 +36,7 @@ The following people have made contributions to this project: - [Gerrit Holl (gerritholl)](https://github.com/gerritholl) - Deutscher Wetterdienst - [David Hoese (djhoese)](https://github.com/djhoese) - [Marc Honnorat (honnorat)](https://github.com/honnorat) +- [Lloyd Hughes (system123)](https://github.com/system123) - [Mikhail Itkin (mitkin)](https://github.com/mitkin) - [Tommy Jasmin (tommyjasmin)](https://github.com/tommyjasmin) - [Jactry Zeng](https://github.com/jactry) @@ -85,3 +87,4 @@ The following people have made contributions to this project: - [praerien (praerien)](https://github.com/praerien) - [Xin Zhang (zxdawn)](https://github.com/zxdawn) - [Yufei Zhu (yufeizhu600)](https://github.com/yufeizhu600) +- [Youva Aoun (YouvaEUMex)](https://github.com/YouvaEUMex) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6b6be918dd..799ae0a867 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,67 @@ +## Version 0.43.0 (2023/07/03) + +### Issues Closed + +* [Issue 2519](https://github.com/pytroll/satpy/issues/2519) - MSG Dust RGB adding coastilnes and grid to the image +* [Issue 2506](https://github.com/pytroll/satpy/issues/2506) - Add xarray_kwargs capability to the geocat reader ([PR 2507](https://github.com/pytroll/satpy/pull/2507) by [@joleenf](https://github.com/joleenf)) +* [Issue 2502](https://github.com/pytroll/satpy/issues/2502) - Cropping S3 image not working +* [Issue 2494](https://github.com/pytroll/satpy/issues/2494) - avhrr_l1b_gaclac fails to read most files from NOAA CLASS ([PR 2501](https://github.com/pytroll/satpy/pull/2501) by [@sfinkens](https://github.com/sfinkens)) +* [Issue 2490](https://github.com/pytroll/satpy/issues/2490) - ninjogeotiff writer adds offset/scale factor when this is not meaningful ([PR 2491](https://github.com/pytroll/satpy/pull/2491) by [@gerritholl](https://github.com/gerritholl)) +* [Issue 2483](https://github.com/pytroll/satpy/issues/2483) - Cacheing doesn't work with `scn.crop` ([PR 2485](https://github.com/pytroll/satpy/pull/2485) by [@djhoese](https://github.com/djhoese)) +* [Issue 2465](https://github.com/pytroll/satpy/issues/2465) - Possibility of dual licensing: GPL-3.0 & MIT +* [Issue 2464](https://github.com/pytroll/satpy/issues/2464) - MITIFF writer using pillow: turn off compression due to rowsperstrip issues +* [Issue 2463](https://github.com/pytroll/satpy/issues/2463) - seviri_l1b_native reader issue with reading remote files (azure) +* [Issue 2409](https://github.com/pytroll/satpy/issues/2409) - Inconsistent behavior of time attributes in EUM L1 GEO readers ([PR 2420](https://github.com/pytroll/satpy/pull/2420) by [@YouvaEUMex](https://github.com/YouvaEUMex)) +* [Issue 1749](https://github.com/pytroll/satpy/issues/1749) - Load from blended scene +* [Issue 859](https://github.com/pytroll/satpy/issues/859) - Doesn't recognize MODIS L2 file + +In this release 12 issues were closed. + +### Pull Requests Merged + +#### Bugs fixed + +* [PR 2522](https://github.com/pytroll/satpy/pull/2522) - Fix CF tests due to new xarray release +* [PR 2516](https://github.com/pytroll/satpy/pull/2516) - Fix SEVIRI native reader failing when missing main header +* [PR 2510](https://github.com/pytroll/satpy/pull/2510) - Fix warnings from NWCSAF reader +* [PR 2507](https://github.com/pytroll/satpy/pull/2507) - Fix HDF4 support in geocat reader with hardcoded engine ([2506](https://github.com/pytroll/satpy/issues/2506)) +* [PR 2492](https://github.com/pytroll/satpy/pull/2492) - Fix xarray version for cf tests +* [PR 2491](https://github.com/pytroll/satpy/pull/2491) - Change logic for ninjogeotiff gradient/axisintercept tags ([2490](https://github.com/pytroll/satpy/issues/2490)) +* [PR 2485](https://github.com/pytroll/satpy/pull/2485) - Fix angle caching not handling a specific type of irregular chunking ([2483](https://github.com/pytroll/satpy/issues/2483)) +* [PR 2481](https://github.com/pytroll/satpy/pull/2481) - Fix NWCSAF reading for NOAA-21 + +#### Features added + +* [PR 2521](https://github.com/pytroll/satpy/pull/2521) - Add a median filter modifier +* [PR 2508](https://github.com/pytroll/satpy/pull/2508) - Add support for OLCI L2 files which are missing Frame_IDs +* [PR 2504](https://github.com/pytroll/satpy/pull/2504) - Improve flexibility of olci level2 reader +* [PR 2501](https://github.com/pytroll/satpy/pull/2501) - Add Pygac reference to avhrr_l1b_gaclac documentation ([2494](https://github.com/pytroll/satpy/issues/2494)) +* [PR 2499](https://github.com/pytroll/satpy/pull/2499) - Add option to clip negative ABI radiances +* [PR 2497](https://github.com/pytroll/satpy/pull/2497) - Enable to pass a custom function to Scene.aggregate +* [PR 2489](https://github.com/pytroll/satpy/pull/2489) - Add "neutral_resolution_band" kwarg to RatioSharpenedRGB/SelfSharpenedRGB +* [PR 2480](https://github.com/pytroll/satpy/pull/2480) - Add helper-function for reading SEVIRI L1.5 Native header. +* [PR 2449](https://github.com/pytroll/satpy/pull/2449) - Generalise the `true_color_reproduction` composite and enhancement +* [PR 2420](https://github.com/pytroll/satpy/pull/2420) - Fix inconsistent behavior of time attributes in EUM L1 GEO readers ([2409](https://github.com/pytroll/satpy/issues/2409)) +* [PR 2259](https://github.com/pytroll/satpy/pull/2259) - Refactor `CFWriter.save_datasets` and enable retrieval of equivalent xr.Dataset with `scn.to_xarray()` +* [PR 2117](https://github.com/pytroll/satpy/pull/2117) - Add reader for GMS-5 VISSR data + +#### Documentation changes + +* [PR 2514](https://github.com/pytroll/satpy/pull/2514) - Fix argument name in DayNightComposite example document +* [PR 2501](https://github.com/pytroll/satpy/pull/2501) - Add Pygac reference to avhrr_l1b_gaclac documentation ([2494](https://github.com/pytroll/satpy/issues/2494)) +* [PR 2478](https://github.com/pytroll/satpy/pull/2478) - Fix eccodes package names in setup.py, update documentation for setting up development environment. +* [PR 2474](https://github.com/pytroll/satpy/pull/2474) - Reorganize seviri_l2_grib.yaml file and add more documentation to seviri_l1b_native.py + +#### Clean ups + +* [PR 2523](https://github.com/pytroll/satpy/pull/2523) - Convert CF Writer tests to pytest +* [PR 2486](https://github.com/pytroll/satpy/pull/2486) - Fix leftover deprecated nosetest teardown methods +* [PR 2478](https://github.com/pytroll/satpy/pull/2478) - Fix eccodes package names in setup.py, update documentation for setting up development environment. +* [PR 2474](https://github.com/pytroll/satpy/pull/2474) - Reorganize seviri_l2_grib.yaml file and add more documentation to seviri_l1b_native.py + +In this release 28 pull requests were closed. + + ## Version 0.42.2 (2023/05/10) ### Issues Closed diff --git a/continuous_integration/environment.yaml b/continuous_integration/environment.yaml index 6f7e33f11a..1c92222a1f 100644 --- a/continuous_integration/environment.yaml +++ b/continuous_integration/environment.yaml @@ -5,10 +5,12 @@ dependencies: - xarray!=2022.9.0 - dask - distributed + - dask-image - donfig - appdirs - toolz - Cython + - numba - sphinx - cartopy - panel>=0.12.7 @@ -54,6 +56,7 @@ dependencies: - xarray-datatree - pint-xarray - ephem + - bokeh<3 - pip: - trollsift - trollimage>=1.20 diff --git a/doc/rtd_environment.yml b/doc/rtd_environment.yml index 82168df77d..ce147a1644 100644 --- a/doc/rtd_environment.yml +++ b/doc/rtd_environment.yml @@ -6,11 +6,13 @@ dependencies: - pip - appdirs - dask + - dask-image - defusedxml - donfig # 2.19.1 seems to cause library linking issues - eccodes>=2.20 - graphviz + - numba - numpy - pillow - pooch diff --git a/doc/source/composites.rst b/doc/source/composites.rst index 4804aba0df..d0c494e414 100644 --- a/doc/source/composites.rst +++ b/doc/source/composites.rst @@ -173,7 +173,7 @@ In the case below, the image shows its day portion and day/night transition with night portion blacked-out instead of transparent:: >>> from satpy.composites import DayNightCompositor - >>> compositor = DayNightCompositor("dnc", lim_low=85., lim_high=88., day_night="day_only", need_alpha=False) + >>> compositor = DayNightCompositor("dnc", lim_low=85., lim_high=88., day_night="day_only", include_alpha=False) >>> composite = compositor([local_scene['true_color']) RealisticColors diff --git a/doc/source/config.rst b/doc/source/config.rst index 63da03dac7..b1777c9751 100644 --- a/doc/source/config.rst +++ b/doc/source/config.rst @@ -258,6 +258,23 @@ as part of the :func:`~satpy.modifiers.angles.get_angles` and used by multiple modifiers and composites including the default rayleigh correction. +Clipping Negative Infrared Radiances +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +* **Environment variable**: ``SATPY_READERS__CLIP_NEGATIVE_RADIANCES`` +* **YAML/Config Key**: ``readers.clip_negative_radiances`` +* **Default**: False + +Whether to clip negative infrared radiances to the minimum allowable value before +computing the brightness temperature. +If ``clip_negative_radiances=False``, pixels with negative radiances will have +``np.nan`` brightness temperatures. + +Clipping of negative radiances is currently implemented for the following readers: + +* ``abi_l1b`` + + Temporary Directory ^^^^^^^^^^^^^^^^^^^ diff --git a/doc/source/dev_guide/index.rst b/doc/source/dev_guide/index.rst index a1d9a26cb5..e877fd1c63 100644 --- a/doc/source/dev_guide/index.rst +++ b/doc/source/dev_guide/index.rst @@ -49,7 +49,7 @@ can do this using conda_:: .. _conda: https://conda.io/ -This will create a new environment called "satpy-dev" with Python 3.8 +This will create a new environment called "satpy-dev" with Python 3.11 installed. The second command will activate the environment so any future conda, python, or pip commands will use this new environment. @@ -68,6 +68,12 @@ should be run from the root of the cloned Satpy repository (where the You can now edit the python files in your cloned repository and have them immediately reflected in your conda environment. +All the required dependencies for a full development environment, i.e. running the +tests and building the documentation, can be installed with:: + + conda install eccodes + pip install -e ".[all]" + Running tests ============= @@ -80,6 +86,7 @@ libraries. If you want to run all Satpy tests you will need to install additional dependencies that aren't needed for regular Satpy usage. To install them run:: + conda install eccodes pip install -e ".[tests]" Satpy tests can be executed by running:: @@ -115,8 +122,12 @@ Documentation ============= Satpy's documentation is built using Sphinx. All documentation lives in the -``doc/`` directory of the project repository. After editing the source files -there the documentation can be generated locally:: +``doc/`` directory of the project repository. For building the documentation, +additional packages are needed. These can be installed with :: + + pip install -e ".[all]" + +After editing the source files there the documentation can be generated locally:: cd doc make html diff --git a/satpy/_config.py b/satpy/_config.py index 2b583c435c..4abc00aba2 100644 --- a/satpy/_config.py +++ b/satpy/_config.py @@ -62,6 +62,9 @@ def impr_files(module_name: str) -> Path: 'demo_data_dir': '.', 'download_aux': True, 'sensor_angles_position_preference': 'actual', + 'readers': { + 'clip_negative_radiances': False, + }, } # Satpy main configuration object diff --git a/satpy/_scene_converters.py b/satpy/_scene_converters.py new file mode 100644 index 0000000000..25fe728b9f --- /dev/null +++ b/satpy/_scene_converters.py @@ -0,0 +1,124 @@ +# Copyright (c) 2023 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""Helper functions for converting the Scene object to some other object.""" + +import xarray as xr + +from satpy.dataset import DataID + + +def _get_dataarrays_from_identifiers(scn, identifiers): + """Return a list of DataArray based on a single or list of identifiers. + + An identifier can be a DataID or a string with name of a valid DataID. + """ + if isinstance(identifiers, (str, DataID)): + identifiers = [identifiers] + + if identifiers is not None: + dataarrays = [scn[ds] for ds in identifiers] + else: + dataarrays = [scn._datasets.get(ds) for ds in scn._wishlist] + dataarrays = [dataarray for dataarray in dataarrays if dataarray is not None] + return dataarrays + + +def to_xarray(scn, + datasets=None, # DataID + header_attrs=None, + exclude_attrs=None, + flatten_attrs=False, + pretty=True, + include_lonlats=True, + epoch=None, + include_orig_name=True, + numeric_name_prefix='CHANNEL_'): + """Merge all xr.DataArray(s) of a satpy.Scene to a CF-compliant xarray object. + + If all Scene DataArrays are on the same area, it returns an xr.Dataset. + If Scene DataArrays are on different areas, currently it fails, although + in future we might return a DataTree object, grouped by area. + + Parameters + ---------- + scn: satpy.Scene + Satpy Scene. + datasets (iterable): + List of Satpy Scene datasets to include in the output xr.Dataset. + Elements can be string name, a wavelength as a number, a DataID, + or DataQuery object. + If None (the default), it include all loaded Scene datasets. + header_attrs: + Global attributes of the output xr.Dataset. + epoch (str): + Reference time for encoding the time coordinates (if available). + Example format: "seconds since 1970-01-01 00:00:00". + If None, the default reference time is retrieved using "from satpy.cf_writer import EPOCH" + flatten_attrs (bool): + If True, flatten dict-type attributes. + exclude_attrs (list): + List of xr.DataArray attribute names to be excluded. + include_lonlats (bool): + If True, it includes 'latitude' and 'longitude' coordinates. + If the 'area' attribute is a SwathDefinition, it always includes + latitude and longitude coordinates. + pretty (bool): + Don't modify coordinate names, if possible. Makes the file prettier, + but possibly less consistent. + include_orig_name (bool). + Include the original dataset name as a variable attribute in the xr.Dataset. + numeric_name_prefix (str): + Prefix to add the each variable with name starting with a digit. + Use '' or None to leave this out. + + Returns + ------- + ds, xr.Dataset + A CF-compliant xr.Dataset + + """ + from satpy.writers.cf_writer import EPOCH, collect_cf_datasets + + if epoch is None: + epoch = EPOCH + + # Get list of DataArrays + if datasets is None: + datasets = list(scn.keys()) # list all loaded DataIDs + list_dataarrays = _get_dataarrays_from_identifiers(scn, datasets) + + # Check that some DataArray could be returned + if len(list_dataarrays) == 0: + return xr.Dataset() + + # Collect xr.Dataset for each group + grouped_datasets, header_attrs = collect_cf_datasets(list_dataarrays=list_dataarrays, + header_attrs=header_attrs, + exclude_attrs=exclude_attrs, + flatten_attrs=flatten_attrs, + pretty=pretty, + include_lonlats=include_lonlats, + epoch=epoch, + include_orig_name=include_orig_name, + numeric_name_prefix=numeric_name_prefix, + groups=None) + if len(grouped_datasets) == 1: + ds = grouped_datasets[None] + return ds + else: + msg = """The Scene object contains datasets with different areas. + Resample the Scene to have matching dimensions using i.e. scn.resample(resampler="native") """ + raise NotImplementedError(msg) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 0948d543ab..da4d1a9e5c 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1039,15 +1039,36 @@ class RatioSharpenedRGB(GenericCompositor): new_G = G * ratio new_B = B * ratio + In some cases, there could be multiple high resolution bands:: + + R_lo - 1000m resolution - shape=(2000, 2000) + G_hi - 500m resolution - shape=(4000, 4000) + B - 1000m resolution - shape=(2000, 2000) + R_hi - 500m resolution - shape=(4000, 4000) + + To avoid the green band getting involved in calculating ratio or sharpening, + add "neutral_resolution_band: green" in the YAML config file. This way + only the blue band will get sharpened:: + + ratio = R_hi / R_lo + new_R = R_hi + new_G = G_hi + new_B = B * ratio + """ def __init__(self, *args, **kwargs): """Instanciate the ration sharpener.""" self.high_resolution_color = kwargs.pop("high_resolution_band", "red") + self.neutral_resolution_color = kwargs.pop("neutral_resolution_band", None) if self.high_resolution_color not in ['red', 'green', 'blue', None]: raise ValueError("RatioSharpenedRGB.high_resolution_band must " "be one of ['red', 'green', 'blue', None]. Not " "'{}'".format(self.high_resolution_color)) + if self.neutral_resolution_color not in ['red', 'green', 'blue', None]: + raise ValueError("RatioSharpenedRGB.neutral_resolution_band must " + "be one of ['red', 'green', 'blue', None]. Not " + "'{}'".format(self.neutral_resolution_color)) super(RatioSharpenedRGB, self).__init__(*args, **kwargs) def __call__(self, datasets, optional_datasets=None, **info): @@ -1082,28 +1103,33 @@ def _get_and_sharpen_rgb_data_arrays_and_meta(self, datasets, optional_datasets) if 'rows_per_scan' in high_res.attrs: new_attrs.setdefault('rows_per_scan', high_res.attrs['rows_per_scan']) new_attrs.setdefault('resolution', high_res.attrs['resolution']) - low_res_colors = ['red', 'green', 'blue'] - low_resolution_index = low_res_colors.index(self.high_resolution_color) + else: LOG.debug("No sharpening band specified for ratio sharpening") high_res = None - low_resolution_index = 0 + bands = {'red': low_res_red, 'green': low_res_green, 'blue': low_res_blue} if high_res is not None: - low_res = (low_res_red, low_res_green, low_res_blue)[low_resolution_index] - ratio = da.map_blocks( - _get_sharpening_ratio, - high_res.data, - low_res.data, - meta=np.array((), dtype=high_res.dtype), - dtype=high_res.dtype, - chunks=high_res.chunks, - ) - with xr.set_options(keep_attrs=True): - low_res_red = high_res if low_resolution_index == 0 else low_res_red * ratio - low_res_green = high_res if low_resolution_index == 1 else low_res_green * ratio - low_res_blue = high_res if low_resolution_index == 2 else low_res_blue * ratio - return low_res_red, low_res_green, low_res_blue, new_attrs + self._sharpen_bands_with_high_res(bands, high_res) + + return bands['red'], bands['green'], bands['blue'], new_attrs + + def _sharpen_bands_with_high_res(self, bands, high_res): + ratio = da.map_blocks( + _get_sharpening_ratio, + high_res.data, + bands[self.high_resolution_color].data, + meta=np.array((), dtype=high_res.dtype), + dtype=high_res.dtype, + chunks=high_res.chunks, + ) + + bands[self.high_resolution_color] = high_res + + with xr.set_options(keep_attrs=True): + for color in bands.keys(): + if color != self.neutral_resolution_color and color != self.high_resolution_color: + bands[color] = bands[color] * ratio def _combined_sharpened_info(self, info, new_attrs): combined_info = {} diff --git a/satpy/enhancements/__init__.py b/satpy/enhancements/__init__.py index b0fa2b8aa3..6f6a66654d 100644 --- a/satpy/enhancements/__init__.py +++ b/satpy/enhancements/__init__.py @@ -54,6 +54,7 @@ def invert(img, *args): def exclude_alpha(func): """Exclude the alpha channel from the DataArray before further processing.""" + @wraps(func) def wrapper(data, **kwargs): bands = data.coords['bands'].values @@ -70,6 +71,7 @@ def wrapper(data, **kwargs): data.data = new_data.sel(bands=bands).data data.attrs = attrs return data + return wrapper @@ -85,6 +87,7 @@ def my_enhancement_function(data): """ + @wraps(func) def wrapper(data, **kwargs): attrs = data.attrs @@ -103,12 +106,14 @@ def wrapper(data, **kwargs): def on_dask_array(func): """Pass the underlying dask array to *func* instead of the xarray.DataArray.""" + @wraps(func) def wrapper(data, **kwargs): dims = data.dims coords = data.coords d_arr = func(data.data, **kwargs) return xr.DataArray(d_arr, dims=dims, coords=coords) + return wrapper @@ -118,10 +123,12 @@ def using_map_blocks(func): This means dask will call the provided function with a single chunk as a numpy array. """ + @wraps(func) def wrapper(data, **kwargs): return da.map_blocks(func, data, meta=np.array((), dtype=data.dtype), dtype=data.dtype, chunks=data.chunks, **kwargs) + return on_dask_array(wrapper) @@ -250,7 +257,7 @@ def reinhard_to_srgb(img, saturation=1.25, white=100, **kwargs): rgb = (luma + (rgb - luma) * saturation).clip(0) # reinhard - reinhard_luma = (luma / (1 + luma)) * (1 + luma/(white**2)) + reinhard_luma = (luma / (1 + luma)) * (1 + luma / (white ** 2)) coef = reinhard_luma / luma rgb = rgb * coef @@ -482,11 +489,11 @@ def _create_colormap_from_dataset(img, dataset, color_scale): """Create a colormap from an auxiliary variable in a source file.""" match = find_in_ancillary(img.data, dataset) return Colormap.from_array_with_metadata( - match, img.data.dtype, color_scale, - valid_range=img.data.attrs.get("valid_range"), - scale_factor=img.data.attrs.get("scale_factor", 1), - add_offset=img.data.attrs.get("add_offset", 0), - remove_last=False) + match, img.data.dtype, color_scale, + valid_range=img.data.attrs.get("valid_range"), + scale_factor=img.data.attrs.get("scale_factor", 1), + add_offset=img.data.attrs.get("add_offset", 0), + remove_last=False) def three_d_effect(img, **kwargs): @@ -562,3 +569,70 @@ def _bt_threshold(band_data, threshold, high_coeffs, low_coeffs): return np.where(band_data >= threshold, high_coeffs.offset - high_coeffs.factor * band_data, low_coeffs.offset - low_coeffs.factor * band_data) + + +def jma_true_color_reproduction(img): + """Apply CIE XYZ matrix and return True Color Reproduction data. + + Himawari-8 True Color Reproduction Approach Based on the CIE XYZ Color System + Hidehiko MURATA, Kotaro SAITOH, and Yasuhiko SUMIDA + Meteorological Satellite Center, Japan Meteorological Agency + NOAA National Environmental Satellite, Data, and Information Service + Colorado State University—CIRA + https://www.jma.go.jp/jma/jma-eng/satellite/introduction/TCR.html + """ + _jma_true_color_reproduction(img.data, + platform=img.data.attrs['platform_name']) + + +@exclude_alpha +@on_dask_array +def _jma_true_color_reproduction(img_data, platform=None): + """Convert from AHI RGB space to sRGB space. + + The conversion matrices for this are supplied per-platform. + The matrices are computed using the method described in the paper: + 'True Color Imagery Rendering for Himawari-8 with a Color Reproduction Approach + Based on the CIE XYZ Color System' (:doi:`10.2151/jmsj.2018-049`). + + """ + # Conversion matrix dictionaries specifying sensor and platform. + ccm_dict = {'himawari-8': np.array([[1.1629, 0.1539, -0.2175], + [-0.0252, 0.8725, 0.1300], + [-0.0204, -0.1100, 1.0633]]), + + 'himawari-9': np.array([[1.1619, 0.1542, -0.2168], + [-0.0271, 0.8749, 0.1295], + [-0.0202, -0.1103, 1.0634]]), + + 'goes-16': np.array([[1.1425, 0.1819, -0.2250], + [-0.0951, 0.9363, 0.1360], + [-0.0113, -0.1179, 1.0621]]), + 'goes-17': np.array([[1.1437, 0.1818, -0.2262], + [-0.0952, 0.9354, 0.1371], + [-0.0113, -0.1178, 1.0620]]), + 'goes-18': np.array([[1.1629, 0.1539, -0.2175], + [-0.0252, 0.8725, 0.1300], + [-0.0204, -0.1100, 1.0633]]), + + 'mtg-i1': np.array([[0.9007, 0.2086, -0.0100], + [-0.0475, 1.0662, -0.0414], + [-0.0123, -0.1342, 1.0794]]), + + 'geo-kompsat-2a': np.array([[1.1661, 0.1489, -0.2157], + [-0.0255, 0.8745, 0.1282], + [-0.0205, -0.1103, 1.0637]]), + } + + # A conversion matrix, sensor name and platform name is required + if platform is None: + raise ValueError("Missing platform name.") + + # Get the satellite-specific conversion matrix + try: + ccm = ccm_dict[platform.lower()] + except KeyError: + raise KeyError(f"No conversion matrix found for platform {platform}") + + output = da.dot(img_data.T, ccm.T) + return output.T diff --git a/satpy/enhancements/ahi.py b/satpy/enhancements/ahi.py deleted file mode 100644 index a0f332cfa2..0000000000 --- a/satpy/enhancements/ahi.py +++ /dev/null @@ -1,46 +0,0 @@ -#!/usr/bin/env python -# Copyright (c) 2021 Satpy developers -# -# This file is part of satpy. -# -# satpy is free software: you can redistribute it and/or modify it under the -# terms of the GNU General Public License as published by the Free Software -# Foundation, either version 3 of the License, or (at your option) any later -# version. -# -# satpy is distributed in the hope that it will be useful, but WITHOUT ANY -# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR -# A PARTICULAR PURPOSE. See the GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along with -# satpy. If not, see . -"""Enhancement functions specific to the AHI sensor.""" -import dask.array as da -import numpy as np - -from satpy.enhancements import exclude_alpha, on_dask_array - - -def jma_true_color_reproduction(img, **kwargs): - """Apply CIE XYZ matrix and return True Color Reproduction data. - - Himawari-8 True Color Reproduction Approach Based on the CIE XYZ Color System - Hidehiko MURATA, Kotaro SAITOH, and Yasuhiko SUMIDA - Meteorological Satellite Center, Japan Meteorological Agency - NOAA National Environmental Satellite, Data, and Information Service - Colorado State University—CIRA - https://www.jma.go.jp/jma/jma-eng/satellite/introduction/TCR.html - """ - _jma_true_color_reproduction(img.data) - - -@exclude_alpha -@on_dask_array -def _jma_true_color_reproduction(img_data): - ccm = np.array([ - [1.1759, 0.0561, -0.1322], - [-0.0386, 0.9587, 0.0559], - [-0.0189, -0.1161, 1.0777] - ]) - output = da.dot(img_data.T, ccm.T) - return output.T diff --git a/satpy/etc/composites/abi.yaml b/satpy/etc/composites/abi.yaml index 489ef1f210..d34e86313b 100644 --- a/satpy/etc/composites/abi.yaml +++ b/satpy/etc/composites/abi.yaml @@ -705,3 +705,34 @@ composites: prerequisites: - name: C14 standard_name: highlighted_toa_brightness_temperature + + true_color_reproduction: + # JMA True Color Reproduction complete composite with corrected and uncorrected blend. + # http://www.jma.go.jp/jma/jma-eng/satellite/introduction/TCR.html + compositor: !!python/name:satpy.composites.DayNightCompositor + standard_name: true_color_reproduction + lim_low: 77. + lim_high: 88. + prerequisites: + - true_color_reproduction_corr + - true_color_reproduction_uncorr + + true_color_reproduction_corr: + # JMA True Color Reproduction corrected composite. + compositor: !!python/name:satpy.composites.SelfSharpenedRGB + prerequisites: + - name: C02 + modifiers: [sunz_corrected, rayleigh_corrected] + - name: green + - name: C01 + modifiers: [sunz_corrected, rayleigh_corrected] + standard_name: true_color_reproduction_color_stretch + + true_color_reproduction_uncorr: + # JMA True Color Reproduction uncorrected composite. + compositor: !!python/name:satpy.composites.SelfSharpenedRGB + prerequisites: + - name: C02 + - name: green_nocorr + - name: C01 + standard_name: true_color_reproduction_color_stretch diff --git a/satpy/etc/composites/ahi.yaml b/satpy/etc/composites/ahi.yaml index a2e80a4ac1..9e36bf7d7f 100644 --- a/satpy/etc/composites/ahi.yaml +++ b/satpy/etc/composites/ahi.yaml @@ -300,6 +300,16 @@ composites: high_resolution_band: red standard_name: true_color + true_color_reproduction_night_ir: + # JMA True Color Reproduction complete composite with corrected and uncorrected blend. + # http://www.jma.go.jp/jma/jma-eng/satellite/introduction/TCR.html + compositor: !!python/name:satpy.composites.DayNightCompositor + lim_low: 83. + lim_high: 88. + prerequisites: + - true_color_reproduction + - ir_cloud_day + true_color_reproduction: # JMA True Color Reproduction complete composite with corrected and uncorrected blend. # http://www.jma.go.jp/jma/jma-eng/satellite/introduction/TCR.html diff --git a/satpy/etc/composites/ami.yaml b/satpy/etc/composites/ami.yaml index 26eb54ca02..55466ea7e1 100644 --- a/satpy/etc/composites/ami.yaml +++ b/satpy/etc/composites/ami.yaml @@ -259,3 +259,35 @@ composites: prerequisites: - night_ir_alpha - _night_background_hires + + + true_color_reproduction: + # JMA True Color Reproduction complete composite with corrected and uncorrected blend. + # http://www.jma.go.jp/jma/jma-eng/satellite/introduction/TCR.html + compositor: !!python/name:satpy.composites.DayNightCompositor + standard_name: true_color_reproduction + lim_low: 73. + lim_high: 85. + prerequisites: + - true_color_reproduction_corr + - true_color_reproduction_uncorr + + true_color_reproduction_corr: + # JMA True Color Reproduction corrected composite. + compositor: !!python/name:satpy.composites.SelfSharpenedRGB + prerequisites: + - name: VI006 + modifiers: [sunz_corrected, rayleigh_corrected] + - name: green + - name: VI004 + modifiers: [sunz_corrected, rayleigh_corrected] + standard_name: true_color_reproduction_color_stretch + + true_color_reproduction_uncorr: + # JMA True Color Reproduction uncorrected composite. + compositor: !!python/name:satpy.composites.SelfSharpenedRGB + prerequisites: + - name: VI006 + - name: green_nocorr + - name: VI004 + standard_name: true_color_reproduction_color_stretch diff --git a/satpy/etc/composites/fci.yaml b/satpy/etc/composites/fci.yaml index 4e450a9779..193415656b 100644 --- a/satpy/etc/composites/fci.yaml +++ b/satpy/etc/composites/fci.yaml @@ -64,3 +64,34 @@ composites: - name: ndvi_hybrid_green_raw - name: vis_04 standard_name: true_color_raw + + true_color_reproduction: + # JMA True Color Reproduction complete composite with corrected and uncorrected blend. + # http://www.jma.go.jp/jma/jma-eng/satellite/introduction/TCR.html + compositor: !!python/name:satpy.composites.DayNightCompositor + standard_name: true_color_reproduction + lim_low: 73. + lim_high: 85. + prerequisites: + - true_color_reproduction_corr + - true_color_reproduction_uncorr + + true_color_reproduction_corr: + # JMA True Color Reproduction corrected composite. + compositor: !!python/name:satpy.composites.SelfSharpenedRGB + prerequisites: + - name: vis_06 + modifiers: [sunz_corrected, rayleigh_corrected] + - name: ndvi_hybrid_green + - name: vis_04 + modifiers: [sunz_corrected, rayleigh_corrected] + standard_name: true_color_reproduction_color_stretch + + true_color_reproduction_uncorr: + # JMA True Color Reproduction uncorrected composite. + compositor: !!python/name:satpy.composites.SelfSharpenedRGB + prerequisites: + - name: vis_06 + - name: ndvi_hybrid_green_raw + - name: vis_04 + standard_name: true_color_reproduction_color_stretch diff --git a/satpy/etc/composites/visir.yaml b/satpy/etc/composites/visir.yaml index 8bd61ff7a8..0bb177e9ff 100644 --- a/satpy/etc/composites/visir.yaml +++ b/satpy/etc/composites/visir.yaml @@ -96,6 +96,11 @@ modifiers: - solar_azimuth_angle - solar_zenith_angle + median5x5: + modifier: !!python/name:satpy.modifiers.filters.Median + median_filter_params: + size: 5 + composites: airmass: diff --git a/satpy/etc/enhancements/ahi.yaml b/satpy/etc/enhancements/ahi.yaml index cd6a833512..8951eaf7cd 100644 --- a/satpy/etc/enhancements/ahi.yaml +++ b/satpy/etc/enhancements/ahi.yaml @@ -9,25 +9,3 @@ enhancements: stretch: crude min_stretch: [-26.2, -43.2, 243.9] max_stretch: [0.6, 6.7, 208.5] - - true_color_reproduction_color_stretch: - standard_name: true_color_reproduction_color_stretch - operations: - - name: color - method: !!python/name:satpy.enhancements.ahi.jma_true_color_reproduction - - name: stretch - method: !!python/name:satpy.enhancements.stretch - kwargs: - stretch: log - min_stretch: [3.,3.,3.] # tweak min/max values for desired contrast - max_stretch: [150., 150., 150.] - - true_color_reproduction: - standard_name: true_color_reproduction - operations: - - name: stretch - method: !!python/name:satpy.enhancements.stretch - kwargs: - stretch: crude - min_stretch: [0,0,0] - max_stretch: [1,1,1] diff --git a/satpy/etc/enhancements/generic.yaml b/satpy/etc/enhancements/generic.yaml index d25f98610b..37b375f36c 100644 --- a/satpy/etc/enhancements/generic.yaml +++ b/satpy/etc/enhancements/generic.yaml @@ -1151,3 +1151,25 @@ enhancements: stretch: crude min_stretch: [273, 233, 0.] max_stretch: [338, 253, 80.] + + true_color_reproduction_color_stretch: + standard_name: true_color_reproduction_color_stretch + operations: + - name: color + method: !!python/name:satpy.enhancements.jma_true_color_reproduction + - name: stretch + method: !!python/name:satpy.enhancements.stretch + kwargs: + stretch: log + min_stretch: [3.,3.,3.] # tweak min/max values for desired contrast + max_stretch: [150., 150., 150.] + + true_color_reproduction: + standard_name: true_color_reproduction + operations: + - name: stretch + method: !!python/name:satpy.enhancements.stretch + kwargs: + stretch: crude + min_stretch: [0,0,0] + max_stretch: [1,1,1] diff --git a/satpy/etc/readers/fci_l1c_nc.yaml b/satpy/etc/readers/fci_l1c_nc.yaml index f558b3dfd0..f89699ae3a 100644 --- a/satpy/etc/readers/fci_l1c_nc.yaml +++ b/satpy/etc/readers/fci_l1c_nc.yaml @@ -16,7 +16,7 @@ reader: file_types: fci_l1c_fdhsi: file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler - file_patterns: [ '{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-FDHSI-FD-{subsetting}-{component1}-BODY-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc' ] + file_patterns: [ '{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-FDHSI-{coverage}-{subsetting}-{component1}-BODY-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc' ] expected_segments: 40 required_netcdf_variables: - attr/platform @@ -74,7 +74,7 @@ file_types: - ir_133 fci_l1c_hrfi: file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler - file_patterns: [ '{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-HRFI-FD-{subsetting}-{component1}-BODY-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc' ] + file_patterns: [ '{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-HRFI-{coverage}-{subsetting}-{component1}-BODY-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc' ] expected_segments: 40 required_netcdf_variables: - attr/platform diff --git a/satpy/etc/readers/gms5-vissr_l1b.yaml b/satpy/etc/readers/gms5-vissr_l1b.yaml new file mode 100644 index 0000000000..7bcca57399 --- /dev/null +++ b/satpy/etc/readers/gms5-vissr_l1b.yaml @@ -0,0 +1,99 @@ +reader: + name: gms5-vissr_l1b + short_name: GMS-5 VISSR L1b + long_name: GMS-5 VISSR Level 1b + description: > + Reader for GMS-5 VISSR Level 1b data. References: + + - https://www.data.jma.go.jp/mscweb/en/operation/fig/VISSR_FORMAT_GMS-5.pdf + - https://www.data.jma.go.jp/mscweb/en/operation/fig/GMS_Users_Guide_3rd_Edition_Rev1.pdf + + status: Alpha + supports_fsspec: true + sensors: [gms5-vissr] + default_channels: [] + reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader + +file_types: + gms5_vissr_vis: + file_reader: !!python/name:satpy.readers.gms.gms5_vissr_l1b.GMS5VISSRFileHandler + file_patterns: + - 'VISSR_{start_time:%Y%m%d_%H%M}_VIS.{mode}.IMG' + - 'VISSR_{start_time:%Y%m%d_%H%M}_VIS.{mode}.IMG.gz' + + gms5_vissr_ir1: + file_reader: !!python/name:satpy.readers.gms.gms5_vissr_l1b.GMS5VISSRFileHandler + file_patterns: + - 'VISSR_{start_time:%Y%m%d_%H%M}_IR1.{mode}.IMG' + - 'VISSR_{start_time:%Y%m%d_%H%M}_IR1.{mode}.IMG.gz' + + gms5_vissr_ir2: + file_reader: !!python/name:satpy.readers.gms.gms5_vissr_l1b.GMS5VISSRFileHandler + file_patterns: + - 'VISSR_{start_time:%Y%m%d_%H%M}_IR2.{mode}.IMG' + - 'VISSR_{start_time:%Y%m%d_%H%M}_IR2.{mode}.IMG.gz' + + + gms5_vissr_ir3: + file_reader: !!python/name:satpy.readers.gms.gms5_vissr_l1b.GMS5VISSRFileHandler + file_patterns: + - 'VISSR_{start_time:%Y%m%d_%H%M}_IR3.{mode}.IMG' + - 'VISSR_{start_time:%Y%m%d_%H%M}_IR3.{mode}.IMG.gz' + + +datasets: + VIS: + name: VIS + sensor: gms5-vissr + wavelength: [0.55, 0.73, 0.9] + resolution: 1250 + calibration: + counts: + standard_name: counts + units: 1 + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + file_type: gms5_vissr_vis + + IR1: + name: IR1 + sensor: gms5-vissr + wavelength: [10.5, 11.0, 11.5] + resolution: 5000 + calibration: + counts: + standard_name: counts + units: 1 + brightness_temperature: + standard_name: toa_brightness_temperature + units: K + file_type: gms5_vissr_ir1 + + IR2: + name: IR2 + sensor: gms5-vissr + wavelength: [11.5, 12.0, 12.5] + resolution: 5000 + calibration: + counts: + standard_name: counts + units: 1 + brightness_temperature: + standard_name: toa_brightness_temperature + units: K + file_type: gms5_vissr_ir2 + + IR3: + name: IR3 + sensor: gms5-vissr + wavelength: [6.5, 6.75, 7.0] + resolution: 5000 + calibration: + counts: + standard_name: counts + units: 1 + brightness_temperature: + standard_name: toa_brightness_temperature + units: K + file_type: gms5_vissr_ir3 diff --git a/satpy/etc/readers/nwcsaf-pps_nc.yaml b/satpy/etc/readers/nwcsaf-pps_nc.yaml index d3a61e80ef..29fabf304c 100644 --- a/satpy/etc/readers/nwcsaf-pps_nc.yaml +++ b/satpy/etc/readers/nwcsaf-pps_nc.yaml @@ -85,6 +85,24 @@ datasets: name: cma_extended_pal file_type: nc_nwcsaf_cma + cma_conditions: + name: cma_conditions + file_type: nc_nwcsaf_cma + coordinates: [lon, lat] + standard_name: cma_conditions + + cma_quality: + name: cma_quality + file_type: nc_nwcsaf_cma + coordinates: [lon, lat] + standard_name: cma_quality + + cma_status_flag: + name: cma_status_flag + file_type: nc_nwcsaf_cma + coordinates: [lon, lat] + standard_name: cma_status_flag + cmaprob: name: cmaprob file_type: nc_nwcsaf_cmaprob @@ -315,3 +333,21 @@ datasets: file_key: quality file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic] coordinates: [lon, lat] + + cmic_dcwp: + name: cmic_dcwp + file_key: dcwp + file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic] + coordinates: [lon, lat] + + cmic_dcre: + name: cmic_dcre + file_key: dcre + file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic] + coordinates: [lon, lat] + + cmic_dcot: + name: cmic_dcot + file_key: dcot + file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic] + coordinates: [lon, lat] diff --git a/satpy/etc/readers/olci_l2.yaml b/satpy/etc/readers/olci_l2.yaml index 5da6e0b1ce..110bb11a2e 100644 --- a/satpy/etc/readers/olci_l2.yaml +++ b/satpy/etc/readers/olci_l2.yaml @@ -13,43 +13,69 @@ file_types: esa_l2_reflectance: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCI2 - file_patterns: ['{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/{dataset_name}_reflectance.nc'] + file_patterns: + - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/{dataset_name}_reflectance.nc' + - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}______{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/{dataset_name}_reflectance.nc' esa_l2_chl_nn: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCI2 - file_patterns: ['{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/chl_nn.nc'] + file_patterns: + - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/chl_nn.nc' + - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}______{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/chl_nn.nc' esa_l2_chl_oc4me: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCI2 - file_patterns: ['{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/chl_oc4me.nc'] + file_patterns: + - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/chl_oc4me.nc' + - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}______{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/chl_oc4me.nc' esa_l2_iop_nn: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCI2 - file_patterns: ['{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/iop_nn.nc'] + file_patterns: + - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/iop_nn.nc' + - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}______{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/iop_nn.nc' esa_l2_trsp: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCI2 - file_patterns: ['{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/trsp.nc'] + file_patterns: + - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/trsp.nc' + - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}______{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/trsp.nc' esa_l2_tsm_nn: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCI2 - file_patterns: ['{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/tsm_nn.nc'] + file_patterns: + - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/tsm_nn.nc' + - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}______{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/tsm_nn.nc' esa_l2_wqsf: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCI2 - file_patterns: ['{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/wqsf.nc'] + file_patterns: + - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/wqsf.nc' + - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}______{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/wqsf.nc' esa_l2_gifapar: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCI2 - file_patterns: ['{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/gifapar.nc'] + file_patterns: + - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/gifapar.nc' + - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}______{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/gifapar.nc' esa_l2_rc_gifapar: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCI2 - file_patterns: ['{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/rc_gifapar.nc'] + file_patterns: + - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/rc_gifapar.nc' + - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}______{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/rc_gifapar.nc' esa_l2_iwv: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCI2 - file_patterns: ['{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/iwv.nc'] + file_patterns: + - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/iwv.nc' + - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}______{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/iwv.nc' esa_l2_otci: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCI2 - file_patterns: ['{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/otci.nc'] + file_patterns: + - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/otci.nc' + - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}______{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/otci.nc' esa_angles: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCIAngles - file_patterns: ['{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/tie_geometries.nc'] + file_patterns: + - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/tie_geometries.nc' + - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}______{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/tie_geometries.nc' esa_geo: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCIGeo - file_patterns: ['{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/geo_coordinates.nc'] + file_patterns: + - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/geo_coordinates.nc' + - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}______{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/geo_coordinates.nc' datasets: @@ -356,10 +382,8 @@ datasets: name: chl_nn sensor: olci resolution: 300 - calibration: - reflectance: - standard_name: algal_pigment_concentration - units: "lg(re mg.m-3)" + standard_name: algal_pigment_concentration + units: "lg(re mg.m-3)" coordinates: [longitude, latitude] file_type: esa_l2_chl_nn nc_key: CHL_NN @@ -368,10 +392,8 @@ datasets: name: iop_nn sensor: olci resolution: 300 - calibration: - reflectance: - standard_name: cdm_absorption_coefficient - units: "lg(re m-l)" + standard_name: cdm_absorption_coefficient + units: "lg(re m-1)" coordinates: [longitude, latitude] file_type: esa_l2_iop_nn nc_key: ADG443_NN @@ -380,10 +402,8 @@ datasets: name: trsp sensor: olci resolution: 300 - calibration: - reflectance: - standard_name: diffuse_attenuation_coefficient - units: "lg(re m-l)" + standard_name: diffuse_attenuation_coefficient + units: "lg(re m-1)" coordinates: [longitude, latitude] file_type: esa_l2_trsp nc_key: KD490_M07 @@ -392,10 +412,8 @@ datasets: name: tsm_nn sensor: olci resolution: 300 - calibration: - reflectance: - standard_name: total_suspended_matter_concentration - units: "lg(re g.m-3)" + standard_name: total_suspended_matter_concentration + units: "lg(re g.m-3)" coordinates: [longitude, latitude] file_type: esa_l2_tsm_nn nc_key: TSM_NN @@ -412,10 +430,8 @@ datasets: name: iwv sensor: olci resolution: 300 - calibration: - reflectance: - standard_name: integrated_water_vapour_column - units: "kg.m-2" + standard_name: integrated_water_vapour_column + units: "kg.m-2" coordinates: [longitude, latitude] file_type: esa_l2_iwv nc_key: IWV @@ -424,10 +440,8 @@ datasets: name: iwv_unc sensor: olci resolution: 300 - calibration: - reflectance: - standard_name: uncertainty_estimate_integrated_water_vapour_column - units: "kg.m-2" + standard_name: uncertainty_estimate_integrated_water_vapour_column + units: "kg.m-2" coordinates: [longitude, latitude] file_type: esa_l2_iwv nc_key: IWV_unc @@ -436,9 +450,7 @@ datasets: name: otci sensor: olci resolution: 300 - calibration: - reflectance: - standard_name: terrestrial_chlorophyll_index + standard_name: terrestrial_chlorophyll_index coordinates: [longitude, latitude] file_type: esa_l2_otci nc_key: OTCI @@ -447,9 +459,7 @@ datasets: name: otci_unc sensor: olci resolution: 300 - calibration: - reflectance: - standard_name: uncertainty_estimate_terrestrial_chlorophyll_index + standard_name: uncertainty_estimate_terrestrial_chlorophyll_index coordinates: [longitude, latitude] file_type: esa_l2_otci nc_key: OTCI_unc @@ -458,9 +468,7 @@ datasets: name: otci_quality_flags sensor: olci resolution: 300 - calibration: - reflectance: - standard_name: quality_flags_for_terrestrial_chlorophyll_index + standard_name: quality_flags_for_terrestrial_chlorophyll_index coordinates: [longitude, latitude] file_type: esa_l2_otci nc_key: OTCI_quality_flags @@ -469,9 +477,7 @@ datasets: name: gifapar sensor: olci resolution: 300 - calibration: - reflectance: - standard_name: green_instantaneous_fraction_of_absorbed_photosynthetically_available_radiation + standard_name: green_instantaneous_fraction_of_absorbed_photosynthetically_available_radiation coordinates: [longitude, latitude] file_type: esa_l2_gifapar nc_key: GIFAPAR @@ -480,9 +486,7 @@ datasets: name: gifapar_unc sensor: olci resolution: 300 - calibration: - reflectance: - standard_name: uncertainty_in_green_instantaneous_fraction_of_absorbed_photosynthetically_available_radiation + standard_name: uncertainty_in_green_instantaneous_fraction_of_absorbed_photosynthetically_available_radiation coordinates: [longitude, latitude] file_type: esa_l2_gifapar nc_key: GIFAPAR_unc @@ -491,10 +495,8 @@ datasets: name: rc_gifapar_oa10 sensor: olci resolution: 300 - calibration: - reflectance: - standard_name: rectified_reflectance_for_band_oa10 - units: 'mW.m-2.sr-1.nm-1' + standard_name: rectified_reflectance_for_band_oa10 + units: 'mW.m-2.sr-1.nm-1' coordinates: [longitude, latitude] file_type: esa_l2_rc_gifapar nc_key: RC681 @@ -503,10 +505,8 @@ datasets: name: rc_gifapar_oa10_unc sensor: olci resolution: 300 - calibration: - reflectance: - standard_name: ucnertainty_in_rectified_reflectance_for_band_oa10 - units: 'mW.m-2.sr-1.nm-1' + standard_name: ucnertainty_in_rectified_reflectance_for_band_oa10 + units: 'mW.m-2.sr-1.nm-1' coordinates: [longitude, latitude] file_type: esa_l2_rc_gifapar nc_key: RC681_unc @@ -515,10 +515,8 @@ datasets: name: rc_gifapar_oa17 sensor: olci resolution: 300 - calibration: - reflectance: - standard_name: rectified_reflectance_for_band_oa17 - units: 'mW.m-2.sr-1.nm-1' + standard_name: rectified_reflectance_for_band_oa17 + units: 'mW.m-2.sr-1.nm-1' coordinates: [longitude, latitude] file_type: esa_l2_rc_gifapar nc_key: RC865 @@ -527,10 +525,8 @@ datasets: name: rc_gifapar_oa17_unc sensor: olci resolution: 300 - calibration: - reflectance: - standard_name: ucnertainty_in_rectified_reflectance_for_band_oa17 - units: 'mW.m-2.sr-1.nm-1' + standard_name: ucnertainty_in_rectified_reflectance_for_band_oa17 + units: 'mW.m-2.sr-1.nm-1' coordinates: [longitude, latitude] file_type: esa_l2_rc_gifapar nc_key: RC865_unc diff --git a/satpy/etc/readers/seviri_l2_grib.yaml b/satpy/etc/readers/seviri_l2_grib.yaml index 6df08e596a..cbe6c81f09 100644 --- a/satpy/etc/readers/seviri_l2_grib.yaml +++ b/satpy/etc/readers/seviri_l2_grib.yaml @@ -1,7 +1,7 @@ reader: name: seviri_l2_grib short_name: SEVIRI L2 GRIB - long_name: MSG (Meteosat 8 to 11) Level 2 products in GRIB2 format + long_name: MSG (Meteosat 8 to 11) SEVIRI Level 2 products in GRIB2 format description: Reader for EUMETSAT MSG SEVIRI L2 files in GRIB format. status: Nominal supports_fsspec: false @@ -10,33 +10,9 @@ reader: file_types: - # EUMETSAT MSG SEVIRI L2 Cloud Mask files in GRIB format - grib_seviri_clm: - file_reader: !!python/name:satpy.readers.seviri_l2_grib.SeviriL2GribFileHandler - file_patterns: - - 'CLMEncProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:8s}_{spacecraft:5s}_{scan_mode:3s}_{sub_sat:5s}' - - '{spacecraft:4s}-SEVI-MSGCLMK-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}' - - '{spacecraft:4s}-SEVI-MSGCLMK-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}.grb' - - '{spacecraft:4s}-SEVI-MSGCLMK-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-NA.grb' - - # EUMETSAT MSG SEVIRI L2 Optimal Cloud Analysis files in GRIB format - grib_seviri_oca: - file_reader: !!python/name:satpy.readers.seviri_l2_grib.SeviriL2GribFileHandler - file_patterns: - - 'OCAEncProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:8s}_{spacecraft:5s}_{scan_mode:3s}_{sub_sat:5s}' - - '{spacecraft:4s}-SEVI-MSGOCAE-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}' - - '{spacecraft:4s}-SEVI-MSGOCAE-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}.grb' - - '{spacecraft:4s}-SEVI-MSGOCAE-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-NA.grb' - - # EUMETSAT MSG SEVIRI L2 Active Fire Monitoring files in GRIB format - grib_seviri_fir: - file_reader: !!python/name:satpy.readers.seviri_l2_grib.SeviriL2GribFileHandler - file_patterns: - - 'FIREncProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:8s}_{spacecraft:5s}_{scan_mode:3s}_{sub_sat:5s}' - - '{spacecraft:4s}-SEVI-MSGFIRG-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}' - - '{spacecraft:4s}-SEVI-MSGFIRG-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}.grb' - - '{spacecraft:4s}-SEVI-MSGFIRG-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-NA.grb' + # EUMETSAT MSG SEVIRI L2 Aerosol Properties over Sea product + # https://navigator.eumetsat.int/product/EO:EUM:DAT:MSG:AES grib_seviri_aes: file_reader: !!python/name:satpy.readers.seviri_l2_grib.SeviriL2GribFileHandler file_patterns: @@ -45,6 +21,18 @@ file_types: - '{spacecraft:4s}-SEVI-MSGAESE-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}.grb' - '{spacecraft:4s}-SEVI-MSGAESE-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-NA.grb' + # EUMETSAT MSG SEVIRI L2 Cloud Mask product + # https://navigator.eumetsat.int/product/EO:EUM:DAT:MSG:CLM + grib_seviri_clm: + file_reader: !!python/name:satpy.readers.seviri_l2_grib.SeviriL2GribFileHandler + file_patterns: + - 'CLMEncProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:8s}_{spacecraft:5s}_{scan_mode:3s}_{sub_sat:5s}' + - '{spacecraft:4s}-SEVI-MSGCLMK-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}' + - '{spacecraft:4s}-SEVI-MSGCLMK-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}.grb' + - '{spacecraft:4s}-SEVI-MSGCLMK-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-NA.grb' + + # EUMETSAT MSG SEVIRI L2 Cloud Top Height product + # https://navigator.eumetsat.int/product/EO:EUM:DAT:MSG:CTH grib_seviri_cth: file_reader: !!python/name:satpy.readers.seviri_l2_grib.SeviriL2GribFileHandler file_patterns: @@ -53,6 +41,8 @@ file_types: - '{spacecraft:4s}-SEVI-MSGCLTH-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}.grb' - '{spacecraft:4s}-SEVI-MSGCLTH-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-NA.grb' + # EUMETSAT MSG SEVIRI L2 Clear-Sky Reflectance Map product + # https://navigator.eumetsat.int/product/EO:EUM:DAT:MSG:CRM grib_seviri_crm: file_reader: !!python/name:satpy.readers.seviri_l2_grib.SeviriL2GribFileHandler file_patterns: @@ -61,6 +51,18 @@ file_types: - '{spacecraft:4s}-SEVI-MSGCRMN-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}.grb' - '{spacecraft:4s}-SEVI-MSGCRMN-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-NA.grb' + # EUMETSAT MSG SEVIRI L2 Active Fire Monitoring product + # https://navigator.eumetsat.int/product/EO:EUM:DAT:MSG:FIR + grib_seviri_fir: + file_reader: !!python/name:satpy.readers.seviri_l2_grib.SeviriL2GribFileHandler + file_patterns: + - 'FIREncProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:8s}_{spacecraft:5s}_{scan_mode:3s}_{sub_sat:5s}' + - '{spacecraft:4s}-SEVI-MSGFIRG-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}' + - '{spacecraft:4s}-SEVI-MSGFIRG-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}.grb' + - '{spacecraft:4s}-SEVI-MSGFIRG-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-NA.grb' + + # EUMETSAT MSG SEVIRI L2 Multi-Sensor Precipitation Estimate product + # https://navigator.eumetsat.int/product/EO:EUM:DAT:MSG:MPE-GRIB grib_seviri_mpe: file_reader: !!python/name:satpy.readers.seviri_l2_grib.SeviriL2GribFileHandler file_patterns: @@ -69,149 +71,20 @@ file_types: - '{spacecraft:4s}-SEVI-MSGMPEG-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}.grb' - '{spacecraft:4s}-SEVI-MSGMPEG-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-NA.grb' -datasets: - - cloud_mask: - name: cloud_mask - long_name: Cloud Classification - standard_name: cloud_classification - resolution: 3000.403165817 - file_type: grib_seviri_clm - parameter_number: 7 - units: "1" - flag_values: [0, 1, 2, 3] - flag_meanings: ['clear sky over water','clear sky over land', 'cloudy', 'no data' ] - - pixel_scene_type: - name: pixel_scene_type - long_name: Cloud Type - standard_name: scene_classification - resolution: 3000.403165817 - file_type: grib_seviri_oca - parameter_number: 8 - units: "1" - flag_values: [24,111,112] - flag_meanings: ['multi-layered cloud','water cloud','ice cloud'] - - measurement_cost: - name: measurement_cost - long_name: OCA Cost Function - Measurement part - standard_name: cost_function - resolution: 3000.403165817 - file_type: grib_seviri_oca - parameter_number: 30 - units: "1" - - upper_layer_cloud_optical_depth: - name: upper_layer_cloud_optical_depth - long_name: Upper Cloud Layer Optical Depth - standard_name: atmosphere_optical_thickness_due_to_cloud - resolution: 3000.403165817 - file_type: grib_seviri_oca - parameter_number: 31 - units: "1" - - upper_layer_cloud_top_pressure: - name: upper_layer_cloud_top_pressure - long_name: Upper Cloud Top Pressure - standard_name: air_pressure_at_cloud_top - resolution: 3000.403165817 - file_type: grib_seviri_oca - parameter_number: 32 - units: Pa - - upper_layer_cloud_effective_radius: - name: upper_layer_cloud_effective_radius - long_name: Upper Cloud Particule Effective Radius - standard_name: effective_radius_of_cloud_condensed_water_particles_at_cloud_top - resolution: 3000.403165817 - file_type: grib_seviri_oca - parameter_number: 33 - units: m - - error_in_upper_layer_cloud_optical_depth: - name: error_in_upper_layer_cloud_optical_depth - long_name: Upper Cloud Optical Depth Error Estimate - standard_name: atmosphere_optical_thickness_due_to_cloud standard_error - resolution: 3000.403165817 - file_type: grib_seviri_oca - parameter_number: 34 - units: "1" - - error_in_upper_layer_cloud_top_pressure: - name: error_in_upper_layer_cloud_top_pressure - long_name: Upper Cloud Top Pressure Error Estimate - standard_name: air_pressure_at_cloud_top standard_error - resolution: 3000.403165817 - file_type: grib_seviri_oca - parameter_number: 35 - units: Pa - - error_in_upper_layer_cloud_effective_radius: - name: error_in_upper_layer_cloud_effective_radius - long_name: Upper Cloud Particule Effective Radius Error Estimate - standard_name: effective_radius_of_cloud_condensed_water_particles_at_cloud_top standard_error - resolution: 3000.403165817 - file_type: grib_seviri_oca - parameter_number: 36 - units: m - - lower_layer_cloud_optical_depth: - name: lower_layer_cloud_optical_depth - long_name: Lower Cloud Optical Depth - standard_name: atmosphere_optical_thickness_due_to_cloud_in_lower_atmosphere_layer - resolution: 3000.403165817 - file_type: grib_seviri_oca - parameter_number: 37 - units: "1" - - lower_layer_cloud_top_pressure: - name: lower_layer_cloud_top_pressure - long_name: Lower Cloud Top Pressure - standard_name: air_pressure_at_cloud_top_in_lower_atmosphere_layer - resolution: 3000.403165817 - file_type: grib_seviri_oca - parameter_number: 38 - units: Pa - - error_in_lower_layer_cloud_optical_depth: - name: error_in_lower_layer_cloud_optical_depth - long_name: Lower Cloud Optical Depth Error Estimate - standard_name: atmosphere_optical_thickness_due_to_cloud_in_lower_atmosphere_layer standard_error - resolution: 3000.403165817 - file_type: grib_seviri_oca - parameter_number: 39 - units: "1" - - error_in_lower_layer_cloud_top_pressure: - name: error_in_lower_layer_cloud_top_pressure - long_name: Lower Cloud Top Pressure Error Estimate - standard_name: air_pressure_at_cloud_top_in_lower_atmosphere_layer standard_error - resolution: 3000.403165817 - file_type: grib_seviri_oca - parameter_number: 40 - units: Pa + # EUMETSAT MSG SEVIRI L2 Optimal Cloud Analysis product + # https://navigator.eumetsat.int/product/EO:EUM:DAT:MSG:OCA + grib_seviri_oca: + file_reader: !!python/name:satpy.readers.seviri_l2_grib.SeviriL2GribFileHandler + file_patterns: + - 'OCAEncProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:8s}_{spacecraft:5s}_{scan_mode:3s}_{sub_sat:5s}' + - '{spacecraft:4s}-SEVI-MSGOCAE-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}' + - '{spacecraft:4s}-SEVI-MSGOCAE-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}.grb' + - '{spacecraft:4s}-SEVI-MSGOCAE-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-NA.grb' - fire_probability: - name: fire_probability - long_name: Fire Probability - standard_name: fire_probability - resolution: 3000.403165817 - file_type: grib_seviri_fir - parameter_number: 192 - units: "%" - active_fires: - name: active_fires - long_name: Active Fire Classification - standard_name: active_fire_classification - resolution: 3000.403165817 - file_type: grib_seviri_fir - parameter_number: 9 - units: "1" - flag_values: [0, 1, 2, 3] - flag_meanings: ['no fire','possible fire', 'probable fire', 'missing' ] +datasets: + # EUMETSAT MSG SEVIRI L2 Aerosol Properties over Sea product aerosol_optical_thickness_vis06: name: aerosol_optical_thickness_vis06 long_name: Aerosol optical Thickness at 0.6um @@ -259,6 +132,21 @@ datasets: flag_values: [0, 1, 2, 3] flag_meanings: ['clear sky over water','clear sky over land', 'cloudy', 'no data' ] + + # EUMETSAT MSG SEVIRI L2 Cloud Mask product + cloud_mask: + name: cloud_mask + long_name: Cloud Classification + standard_name: cloud_classification + resolution: 3000.403165817 + file_type: grib_seviri_clm + parameter_number: 7 + units: "1" + flag_values: [0, 1, 2, 3] + flag_meanings: ['clear sky over water','clear sky over land', 'cloudy', 'no data' ] + + + # EUMETSAT MSG SEVIRI L2 Cloud Top Height product cloud_top_height: name: cloud_top_height long_name: Cloud Top Height @@ -277,8 +165,10 @@ datasets: parameter_number: 3 units: "1" flag_values: [0, 1] - flag_meanings: ['good quality retreival','poor quality retreival' ] + flag_meanings: ['good quality retrieval','poor quality retrieval' ] + + # EUMETSAT MSG SEVIRI L2 Clear-Sky Reflectance Map product vis_refl_06: name: vis_refl_06 long_name: TOA Bidirectional Reflectance at 0.6um (7 days average) @@ -346,6 +236,30 @@ datasets: parameter_number: 8 units: degrees + + # EUMETSAT MSG SEVIRI L2 Active Fire Monitoring product + active_fires: + name: active_fires + long_name: Active Fire Classification + standard_name: active_fire_classification + resolution: 3000.403165817 + file_type: grib_seviri_fir + parameter_number: 9 + units: "1" + flag_values: [0, 1, 2, 3] + flag_meanings: ['no fire','possible fire', 'probable fire', 'missing' ] + + fire_probability: + name: fire_probability + long_name: Fire Probability + standard_name: fire_probability + resolution: 3000.403165817 + file_type: grib_seviri_fir + parameter_number: 192 + units: "%" + + + # EUMETSAT MSG SEVIRI L2 Multi-Sensor Precipitation Estimate product instantaneous_rain_rate: name: instantaneous_rain_rate long_name: MPE Product Instantaneous Rain Rate @@ -354,3 +268,115 @@ datasets: file_type: grib_seviri_mpe parameter_number: 1 units: "kg m-2 s-1" + + + # EUMETSAT MSG SEVIRI L2 Optimal Cloud Analysis product + pixel_scene_type: + name: pixel_scene_type + long_name: Cloud Type + standard_name: scene_classification + resolution: 3000.403165817 + file_type: grib_seviri_oca + parameter_number: 8 + units: "1" + flag_values: [24,111,112] + flag_meanings: ['multi-layered cloud','water cloud','ice cloud'] + + measurement_cost: + name: measurement_cost + long_name: OCA Cost Function - Measurement part + standard_name: cost_function + resolution: 3000.403165817 + file_type: grib_seviri_oca + parameter_number: 30 + units: "1" + + upper_layer_cloud_optical_depth: + name: upper_layer_cloud_optical_depth + long_name: Upper Cloud Layer Optical Depth + standard_name: atmosphere_optical_thickness_due_to_cloud + resolution: 3000.403165817 + file_type: grib_seviri_oca + parameter_number: 31 + units: "1" + + upper_layer_cloud_top_pressure: + name: upper_layer_cloud_top_pressure + long_name: Upper Cloud Top Pressure + standard_name: air_pressure_at_cloud_top + resolution: 3000.403165817 + file_type: grib_seviri_oca + parameter_number: 32 + units: Pa + + upper_layer_cloud_effective_radius: + name: upper_layer_cloud_effective_radius + long_name: Upper Cloud Particle Effective Radius + standard_name: effective_radius_of_cloud_condensed_water_particles_at_cloud_top + resolution: 3000.403165817 + file_type: grib_seviri_oca + parameter_number: 33 + units: m + + error_in_upper_layer_cloud_optical_depth: + name: error_in_upper_layer_cloud_optical_depth + long_name: Upper Cloud Optical Depth Error Estimate + standard_name: atmosphere_optical_thickness_due_to_cloud standard_error + resolution: 3000.403165817 + file_type: grib_seviri_oca + parameter_number: 34 + units: "1" + + error_in_upper_layer_cloud_top_pressure: + name: error_in_upper_layer_cloud_top_pressure + long_name: Upper Cloud Top Pressure Error Estimate + standard_name: air_pressure_at_cloud_top standard_error + resolution: 3000.403165817 + file_type: grib_seviri_oca + parameter_number: 35 + units: Pa + + error_in_upper_layer_cloud_effective_radius: + name: error_in_upper_layer_cloud_effective_radius + long_name: Upper Cloud Particle Effective Radius Error Estimate + standard_name: effective_radius_of_cloud_condensed_water_particles_at_cloud_top standard_error + resolution: 3000.403165817 + file_type: grib_seviri_oca + parameter_number: 36 + units: m + + lower_layer_cloud_optical_depth: + name: lower_layer_cloud_optical_depth + long_name: Lower Cloud Optical Depth + standard_name: atmosphere_optical_thickness_due_to_cloud_in_lower_atmosphere_layer + resolution: 3000.403165817 + file_type: grib_seviri_oca + parameter_number: 37 + units: "1" + + lower_layer_cloud_top_pressure: + name: lower_layer_cloud_top_pressure + long_name: Lower Cloud Top Pressure + standard_name: air_pressure_at_cloud_top_in_lower_atmosphere_layer + resolution: 3000.403165817 + file_type: grib_seviri_oca + parameter_number: 38 + units: Pa + + error_in_lower_layer_cloud_optical_depth: + name: error_in_lower_layer_cloud_optical_depth + long_name: Lower Cloud Optical Depth Error Estimate + standard_name: atmosphere_optical_thickness_due_to_cloud_in_lower_atmosphere_layer standard_error + resolution: 3000.403165817 + file_type: grib_seviri_oca + parameter_number: 39 + units: "1" + + error_in_lower_layer_cloud_top_pressure: + name: error_in_lower_layer_cloud_top_pressure + long_name: Lower Cloud Top Pressure Error Estimate + standard_name: air_pressure_at_cloud_top_in_lower_atmosphere_layer standard_error + resolution: 3000.403165817 + file_type: grib_seviri_oca + parameter_number: 40 + units: Pa diff --git a/satpy/modifiers/angles.py b/satpy/modifiers/angles.py index 210ab76232..28adb60028 100644 --- a/satpy/modifiers/angles.py +++ b/satpy/modifiers/angles.py @@ -314,7 +314,9 @@ def _chunks_are_irregular(chunks_tuple: tuple) -> bool: is when all chunks are the same size (except for the last one). """ - return any(len(set(chunks[:-1])) > 1 for chunks in chunks_tuple) + if any(len(set(chunks[:-1])) > 1 for chunks in chunks_tuple): + return True + return any(chunks[-1] > chunks[0] for chunks in chunks_tuple) def _geo_dask_to_data_array(arr: da.Array) -> xr.DataArray: diff --git a/satpy/modifiers/filters.py b/satpy/modifiers/filters.py new file mode 100644 index 0000000000..151082e723 --- /dev/null +++ b/satpy/modifiers/filters.py @@ -0,0 +1,34 @@ +"""Tests for image filters.""" +import logging + +import xarray as xr + +from satpy.modifiers import ModifierBase + +logger = logging.getLogger(__name__) + + +class Median(ModifierBase): + """Apply a median filter to the band.""" + + def __init__(self, median_filter_params, **kwargs): + """Create the instance. + + Args: + median_filter_params: The arguments to pass to dask-image's median_filter function. For example, {size: 3} + makes give the median filter a kernel of size 3. + + """ + self.median_filter_params = median_filter_params + super().__init__(**kwargs) + + def __call__(self, arrays, **info): + """Get the median filtered band.""" + from dask_image.ndfilters import median_filter + + data = arrays[0] + logger.debug(f"Apply median filtering with parameters {self.median_filter_params}.") + res = xr.DataArray(median_filter(data.data, **self.median_filter_params), + dims=data.dims, attrs=data.attrs, coords=data.coords) + self.apply_modifier_info(data, res) + return res diff --git a/satpy/readers/abi_l1b.py b/satpy/readers/abi_l1b.py index d1ed730792..dafdc8a373 100644 --- a/satpy/readers/abi_l1b.py +++ b/satpy/readers/abi_l1b.py @@ -22,11 +22,11 @@ https://www.goes-r.gov/users/docs/PUG-L1b-vol3.pdf """ - import logging import numpy as np +import satpy from satpy.readers.abi_base import NC_ABI_BASE logger = logging.getLogger(__name__) @@ -35,9 +35,17 @@ class NC_ABI_L1B(NC_ABI_BASE): """File reader for individual ABI L1B NetCDF4 files.""" + def __init__(self, filename, filename_info, filetype_info, clip_negative_radiances=None): + """Open the NetCDF file with xarray and prepare the Dataset for reading.""" + super().__init__(filename, filename_info, filetype_info) + if clip_negative_radiances is None: + clip_negative_radiances = satpy.config.get("readers.clip_negative_radiances") + self.clip_negative_radiances = clip_negative_radiances + def get_dataset(self, key, info): """Load a dataset.""" logger.debug('Reading in get_dataset %s.', key['name']) + # For raw cal, don't apply scale and offset, return raw file counts if key['calibration'] == 'counts': radiances = self.nc['Rad'].copy() @@ -139,6 +147,16 @@ def _vis_calibrate(self, data): res.attrs['standard_name'] = 'toa_bidirectional_reflectance' return res + def _get_minimum_radiance(self, data): + """Estimate minimum radiance from Rad DataArray.""" + attrs = data.attrs + scale_factor = attrs["scale_factor"] + add_offset = attrs["add_offset"] + count_zero_rad = - add_offset / scale_factor + count_pos = np.ceil(count_zero_rad) + min_rad = count_pos * scale_factor + add_offset + return min_rad + def _ir_calibrate(self, data): """Calibrate IR channels to BT.""" fk1 = float(self["planck_fk1"]) @@ -146,6 +164,10 @@ def _ir_calibrate(self, data): bc1 = float(self["planck_bc1"]) bc2 = float(self["planck_bc2"]) + if self.clip_negative_radiances: + min_rad = self._get_minimum_radiance(data) + data = data.clip(min=min_rad) + res = (fk2 / np.log(fk1 / data + 1) - bc1) / bc2 res.attrs = data.attrs res.attrs['units'] = 'K' diff --git a/satpy/readers/avhrr_l1b_gaclac.py b/satpy/readers/avhrr_l1b_gaclac.py index 6dbcc84895..e520b29b30 100644 --- a/satpy/readers/avhrr_l1b_gaclac.py +++ b/satpy/readers/avhrr_l1b_gaclac.py @@ -17,11 +17,16 @@ # satpy. If not, see . """Reading and calibrating GAC and LAC AVHRR data. +Uses Pygac under the hood. See the `Pygac Documentation`_ for supported data +formats as well as calibration and navigation methods. + .. todo:: Fine grained calibration Radiance output +.. _Pygac Documentation: + https://pygac.readthedocs.io/en/stable """ import logging diff --git a/satpy/readers/fci_l1c_nc.py b/satpy/readers/fci_l1c_nc.py index 3a72347c3a..8e28219035 100644 --- a/satpy/readers/fci_l1c_nc.py +++ b/satpy/readers/fci_l1c_nc.py @@ -112,6 +112,7 @@ from __future__ import absolute_import, division, print_function, unicode_literals import logging +from datetime import timedelta from functools import cached_property import dask.array as da @@ -211,15 +212,47 @@ def __init__(self, filename, filename_info, filetype_info): self._cache = {} + @property + def rc_period_min(self): + """Get nominal repeat cycle duration. + + As RSS is not yet implemeted and error will be raised if RSS are to be read + """ + if not self.filename_info['coverage'] == 'FD': + raise NotImplementedError(f"coverage for {self.filename_info['coverage']} not supported by this reader") + return 2.5 + return 10 + + @property + def nominal_start_time(self): + """Get nominal start time.""" + rc_date = self.observation_start_time.replace(hour=0, minute=0, second=0, microsecond=0) + return rc_date + timedelta(minutes=(self.filename_info['repeat_cycle_in_day']-1)*self.rc_period_min) + + @property + def nominal_end_time(self): + """Get nominal end time.""" + return self.nominal_start_time + timedelta(minutes=self.rc_period_min) + + @property + def observation_start_time(self): + """Get observation start time.""" + return self.filename_info['start_time'] + + @property + def observation_end_time(self): + """Get observation end time.""" + return self.filename_info['end_time'] + @property def start_time(self): """Get start time.""" - return self.filename_info['start_time'] + return self.nominal_start_time @property def end_time(self): """Get end time.""" - return self.filename_info['end_time'] + return self.nominal_end_time def get_channel_measured_group_path(self, channel): """Get the channel's measured group path.""" @@ -348,7 +381,13 @@ def _get_dataset_measurand(self, key, info=None): # remove attributes from original file which don't apply anymore res.attrs.pop('long_name') - + # Add time_parameter attributes + res.attrs['time_parameters'] = { + 'nominal_start_time': self.nominal_start_time, + 'nominal_end_time': self.nominal_end_time, + 'observation_start_time': self.observation_start_time, + 'observation_end_time': self.observation_end_time, + } res.attrs.update(self.orbital_param) return res diff --git a/satpy/readers/geocat.py b/satpy/readers/geocat.py index 3343e25533..5086cd899b 100644 --- a/satpy/readers/geocat.py +++ b/satpy/readers/geocat.py @@ -56,7 +56,30 @@ class GEOCATFileHandler(NetCDF4FileHandler): - """GEOCAT netCDF4 file handler.""" + """GEOCAT netCDF4 file handler. + + **Loading data with decode_times=True** + + By default, this reader will use ``xarray_kwargs={"engine": "netcdf4", "decode_times": False}``. + to match behavior of xarray when the geocat reader was first written. To use different options + use reader_kwargs when loading the Scene:: + + scene = satpy.Scene(filenames, + reader='geocat', + reader_kwargs={'xarray_kwargs': {'engine': 'netcdf4', 'decode_times': True}}) + """ + + def __init__(self, filename, filename_info, filetype_info, + **kwargs): + """Open and perform initial investigation of NetCDF file.""" + kwargs.setdefault('xarray_kwargs', {}).setdefault( + 'engine', "netcdf4") + kwargs.setdefault('xarray_kwargs', {}).setdefault( + 'decode_times', False) + + super(GEOCATFileHandler, self).__init__( + filename, filename_info, filetype_info, + xarray_kwargs=kwargs["xarray_kwargs"]) sensors = { 'goes': 'goes_imager', diff --git a/satpy/readers/gms/__init__.py b/satpy/readers/gms/__init__.py new file mode 100644 index 0000000000..7b1f2041c3 --- /dev/null +++ b/satpy/readers/gms/__init__.py @@ -0,0 +1 @@ +"""GMS reader module.""" diff --git a/satpy/readers/gms/gms5_vissr_format.py b/satpy/readers/gms/gms5_vissr_format.py new file mode 100644 index 0000000000..a5052097eb --- /dev/null +++ b/satpy/readers/gms/gms5_vissr_format.py @@ -0,0 +1,397 @@ +"""GMS-5 VISSR archive data format. + +Reference: `VISSR Format Description`_ + +.. _VISSR Format Description: + https://www.data.jma.go.jp/mscweb/en/operation/fig/VISSR_FORMAT_GMS-5.pdf +""" + +import numpy as np + +U1 = ">u1" +I2 = ">i2" +I4 = ">i4" +R4 = ">f4" +R8 = ">f8" + +VIS_CHANNEL = "VIS" +IR_CHANNEL = "IR" +CHANNEL_TYPES = { + "VIS": VIS_CHANNEL, + "IR1": IR_CHANNEL, + "IR2": IR_CHANNEL, + "IR3": IR_CHANNEL, + "WV": IR_CHANNEL, +} +ALT_CHANNEL_NAMES = {"VIS": "VIS", "IR1": "IR1", "IR2": "IR2", "IR3": "WV"} +BLOCK_SIZE_VIS = 13504 +BLOCK_SIZE_IR = 3664 + +IMAGE_PARAM_ITEM_SIZE = 2688 +TIME = [("date", I4), ("time", I4)] +CHANNELS = [("VIS", R4), ("IR1", R4), ("IR2", R4), ("WV", R4)] +VISIR_SOLAR = [("VIS", R4), ("IR", R4)] + +CONTROL_BLOCK = np.dtype([('control_block_size', I2), + ('head_block_number_of_parameter_block', I2), + ('parameter_block_size', I2), + ('head_block_number_of_image_data', I2), + ('total_block_size_of_image_data', I2), + ('available_block_size_of_image_data', I2), + ('head_valid_line_number', I2), + ('final_valid_line_number', I2), + ('final_data_block_number', I2)]) + +MODE_BLOCK_FRAME_PARAMETERS = [('bit_length', I4), + ('number_of_lines', I4), + ('number_of_pixels', I4), + ('stepping_angle', R4), + ('sampling_angle', R4), + ('lcw_pixel_size', I4), + ('doc_pixel_size', I4), + ('reserved', I4)] + +MODE_BLOCK = np.dtype([('satellite_number', I4), + ('satellite_name', '|S12'), + ('observation_time_ad', '|S16'), + ('observation_time_mjd', R8), + ('gms_operation_mode', I4), + ('dpc_operation_mode', I4), + ('vissr_observation_mode', I4), + ('scanner_selection', I4), + ('sensor_selection', I4), + ('sensor_mode', I4), + ('scan_frame_mode', I4), + ('scan_mode', I4), + ('upper_limit_of_scan_number', I4), + ('lower_limit_of_scan_number', I4), + ('equatorial_scan_line_number', I4), + ('spin_rate', R4), + ('vis_frame_parameters', MODE_BLOCK_FRAME_PARAMETERS), + ('ir_frame_parameters', MODE_BLOCK_FRAME_PARAMETERS), + ('satellite_height', R4), + ('earth_radius', R4), + ('ssp_longitude', R4), + ('reserved_1', I4, 9), + ('table_of_sensor_trouble', I4, 14), + ('reserved_2', I4, 36), + ('status_tables_of_data_relative_address_segment', I4, 60)]) + +COORDINATE_CONVERSION_PARAMETERS = np.dtype([ + ('data_segment', I4), + ('data_validity', I4), + ('data_generation_time', TIME), + ('scheduled_observation_time', R8), + ('stepping_angle_along_line', CHANNELS), + ('sampling_angle_along_pixel', CHANNELS), + ('central_line_number_of_vissr_frame', CHANNELS), + ('central_pixel_number_of_vissr_frame', CHANNELS), + ('pixel_difference_of_vissr_center_from_normal_position', CHANNELS), + ('number_of_sensor_elements', CHANNELS), + ('total_number_of_vissr_frame_lines', CHANNELS), + ('total_number_of_vissr_frame_pixels', CHANNELS), + ('vissr_misalignment', R4, (3,)), + ('matrix_of_misalignment', R4, (3, 3)), + ('parameters', [('judgement_of_observation_convergence_time', R4), + ('judgement_of_line_convergence', R4), + ('east_west_angle_of_sun_light_condense_prism', R4), + ('north_south_angle_of_sun_light_condense_prism', R4), + ('pi', R4), + ('pi_divided_by_180', R4), + ('180_divided_by_pi', R4), + ('equatorial_radius', R4), + ('oblateness_of_earth', R4), + ('eccentricity_of_earth_orbit', R4), + ('first_angle_of_vissr_observation_in_sdb', R4), + ('upper_limited_line_of_2nd_prism_for_vis_solar_observation', R4), + ('lower_limited_line_of_1st_prism_for_vis_solar_observation', R4), + ('upper_limited_line_of_3rd_prism_for_vis_solar_observation', R4), + ('lower_limited_line_of_2nd_prism_for_vis_solar_observation', R4)]), + ('solar_stepping_angle_along_line', VISIR_SOLAR), + ('solar_sampling_angle_along_pixel', VISIR_SOLAR), + ('solar_center_line_of_vissr_frame', VISIR_SOLAR), + ('solar_center_pixel_of_vissr_frame', VISIR_SOLAR), + ('solar_pixel_difference_of_vissr_center_from_normal_position', VISIR_SOLAR), + ('solar_number_of_sensor_elements', VISIR_SOLAR), + ('solar_total_number_of_vissr_frame_lines', VISIR_SOLAR), + ('solar_total_number_of_vissr_frame_pixels', VISIR_SOLAR), + ('reserved_1', I4, 19), + ('orbital_parameters', [('epoch_time', R8), + ('semi_major_axis', R8), + ('eccentricity', R8), + ('orbital_inclination', R8), + ('longitude_of_ascending_node', R8), + ('argument_of_perigee', R8), + ('mean_anomaly', R8), + ('longitude_of_ssp', R8), + ('latitude_of_ssp', R8)]), + ('reserved_2', I4, 2), + ('attitude_parameters', [('epoch_time', R8), + ('angle_between_z_axis_and_satellite_spin_axis_at_epoch_time', R8), + ('angle_change_rate_between_spin_axis_and_z_axis', R8), + ('angle_between_spin_axis_and_zy_axis', R8), + ('angle_change_rate_between_spin_axis_and_zt_axis', R8), + ('daily_mean_of_spin_rate', R8)]), + ('reserved_3', I4, 529), + ('correction_of_image_distortion', [('stepping_angle_along_line_of_ir1', R4), + ('stepping_angle_along_line_of_ir2', R4), + ('stepping_angle_along_line_of_wv', R4), + ('stepping_angle_along_line_of_vis', R4), + ('sampling_angle_along_pixel_of_ir1', R4), + ('sampling_angle_along_pixel_of_ir2', R4), + ('sampling_angle_along_pixel_of_wv', R4), + ('sampling_angle_along_pixel_of_vis', R4), + ('x_component_vissr_misalignment', R4), + ('y_component_vissr_misalignment', R4)]) +]) + +ATTITUDE_PREDICTION_DATA = np.dtype([('prediction_time_mjd', R8), + ('prediction_time_utc', TIME), + ('right_ascension_of_attitude', R8), + ('declination_of_attitude', R8), + ('sun_earth_angle', R8), + ('spin_rate', R8), + ('right_ascension_of_orbital_plane', R8), + ('declination_of_orbital_plane', R8), + ('reserved', R8), + ('eclipse_flag', I4), + ('spin_axis_flag', I4)]) + +ATTITUDE_PREDICTION = np.dtype([('data_segment', I4), + ('data_validity', I4), + ('data_generation_time', TIME), + ('start_time', R8), + ('end_time', R8), + ('prediction_interval_time', R8), + ('number_of_prediction', I4), + ('data_size', I4), + ('data', ATTITUDE_PREDICTION_DATA, (33,))]) + +ORBIT_PREDICTION_DATA = [('prediction_time_mjd', R8), + ('prediction_time_utc', TIME), + ('satellite_position_1950', R8, (3,)), + ('satellite_velocity_1950', R8, (3,)), + ('satellite_position_earth_fixed', R8, (3,)), + ('satellite_velocity_earth_fixed', R8, (3,)), + ('greenwich_sidereal_time', R8), + ('sat_sun_vector_1950', [('azimuth', R8), + ('elevation', R8)]), + ('sat_sun_vector_earth_fixed', [('azimuth', R8), + ('elevation', R8)]), + ('conversion_matrix', R8, (3, 3)), + ('moon_directional_vector', R8, (3,)), + ('satellite_position', [('ssp_longitude', R8), + ('ssp_latitude', R8), + ('satellite_height', R8)]), + ('eclipse_period_flag', I4), + ('reserved', I4)] + +ORBIT_PREDICTION = np.dtype([('data_segment', I4), + ('data_validity', I4), + ('data_generation_time', TIME), + ('start_time', R8), + ('end_time', R8), + ('prediction_interval_time', R8), + ('number_of_prediction', I4), + ('data_size', I4), + ('data', ORBIT_PREDICTION_DATA, (9,))]) + +VIS_CALIBRATION_TABLE = np.dtype([ + ('channel_number', I4), + ('data_validity', I4), + ('updated_time', TIME), + ('table_id', I4), + ('brightness_albedo_conversion_table', R4, (64,)), + ('vis_channel_staircase_brightness_data', R4, (6,)), + ('coefficients_table_of_vis_staircase_regression_curve', R4, (10,)), + ('brightness_table_for_calibration', [('universal_space_brightness', R4), + ('solar_brightness', R4)]), + ('calibration_uses_brightness_correspondence_voltage_chart', [('universal_space_voltage', R4), + ('solar_voltage', R4)]), + ('calibration_coefficients_of_radiation_observation', [('G', R4), ('V0', R4)]), + ('reserved', I4, (9,)) + ]) + +VIS_CALIBRATION = np.dtype([('data_segment', I4), + ('data_validity', I4), + ('data_generation_time', TIME), + ('sensor_group', I4), + ('vis1_calibration_table', VIS_CALIBRATION_TABLE), + ('vis2_calibration_table', VIS_CALIBRATION_TABLE), + ('vis3_calibration_table', VIS_CALIBRATION_TABLE), + ('reserved', I4, (267,))]) + +TELEMETRY_DATA = np.dtype([ + ('shutter_temp', R4), + ('redundant_mirror_temp', R4), + ('primary_mirror_temp', R4), + ('baffle_fw_temp', R4), + ('baffle_af_temp', R4), + ('15_volt_auxiliary_power_supply', R4), + ('radiative_cooler_temp_1', R4), + ('radiative_cooler_temp_2', R4), + ('electronics_module_temp', R4), + ('scan_mirror_temp', R4), + ('shutter_cavity_temp', R4), + ('primary_mirror_sealed_temp', R4), + ('redundant_mirror_sealed_temp', R4), + ('shutter_temp_2', R4), + ('reserved', R4, (2,)) +]) + +IR_CALIBRATION = np.dtype([ + ('data_segment', I4), + ('data_validity', I4), + ('updated_time', TIME), + ('sensor_group', I4), + ('table_id', I4), + ('reserved_1', I4, (2,)), + ('conversion_table_of_equivalent_black_body_radiation', R4, (256,)), + ('conversion_table_of_equivalent_black_body_temperature', R4, (256,)), + ('staircase_brightness_data', R4, (6,)), + ('coefficients_table_of_staircase_regression_curve', R4, (10,)), + ('brightness_data_for_calibration', [('brightness_of_space', R4), + ('brightness_of_black_body_shutter', R4), + ('reserved', R4)]), + ('voltage_table_for_brightness_of_calibration', [('voltage_of_space', R4), + ('voltage_of_black_body_shutter', R4), + ('reserved', R4)]), + ('calibration_coefficients_of_radiation_observation', [('G', R4), ('V0', R4)]), + ('valid_shutter_temperature', R4), + ('valid_shutter_radiation', R4), + ('telemetry_data_table', TELEMETRY_DATA), + ('flag_of_calid_shutter_temperature_calculation', I4), + ('reserved_2', I4, (109,)) +]) + +SIMPLE_COORDINATE_CONVERSION_TABLE = np.dtype([ + ('coordinate_conversion_table', I2, (1250,)), + ('earth_equator_radius', R4), + ('satellite_height', R4), + ('stepping_angle', R4), + ('sampling_angle', R4), + ('ssp_latitude', R4), + ('ssp_longitude', R4), + ('ssp_line_number', R4), + ('ssp_pixel_number', R4), + ('pi', R4), + ('line_correction_ir1_vis', R4), + ('pixel_correction_ir1_vis', R4), + ('line_correction_ir1_ir2', R4), + ('pixel_correction_ir1_ir2', R4), + ('line_correction_ir1_wv', R4), + ('pixel_correction_ir1_wv', R4), + ('reserved', R4, (32,)), +]) + +IMAGE_PARAMS = { + 'mode': { + 'dtype': MODE_BLOCK, + 'offset': { + VIS_CHANNEL: 2 * BLOCK_SIZE_VIS, + IR_CHANNEL: 2 * BLOCK_SIZE_IR + } + }, + 'coordinate_conversion': { + 'dtype': COORDINATE_CONVERSION_PARAMETERS, + 'offset': { + VIS_CHANNEL: 2 * BLOCK_SIZE_VIS + 2 * IMAGE_PARAM_ITEM_SIZE, + IR_CHANNEL: 4 * BLOCK_SIZE_IR + } + }, + 'attitude_prediction': { + 'dtype': ATTITUDE_PREDICTION, + 'offset': { + VIS_CHANNEL: 2 * BLOCK_SIZE_VIS + 3 * IMAGE_PARAM_ITEM_SIZE, + IR_CHANNEL: 5 * BLOCK_SIZE_IR + }, + 'preserve': 'data' + }, + 'orbit_prediction_1': { + 'dtype': ORBIT_PREDICTION, + 'offset': { + VIS_CHANNEL: 3 * BLOCK_SIZE_VIS, + IR_CHANNEL: 6 * BLOCK_SIZE_IR + }, + 'preserve': 'data' + }, + 'orbit_prediction_2': { + 'dtype': ORBIT_PREDICTION, + 'offset': { + VIS_CHANNEL: 3 * BLOCK_SIZE_VIS + 1 * IMAGE_PARAM_ITEM_SIZE, + IR_CHANNEL: 7 * BLOCK_SIZE_IR + }, + 'preserve': 'data' + }, + 'vis_calibration': { + 'dtype': VIS_CALIBRATION, + 'offset': { + VIS_CHANNEL: 3 * BLOCK_SIZE_VIS + 3 * IMAGE_PARAM_ITEM_SIZE, + IR_CHANNEL: 9 * BLOCK_SIZE_IR + }, + 'preserve': 'data' + }, + 'ir1_calibration': { + 'dtype': IR_CALIBRATION, + 'offset': { + VIS_CHANNEL: 4 * BLOCK_SIZE_VIS, + IR_CHANNEL: 10 * BLOCK_SIZE_IR + }, + }, + 'ir2_calibration': { + 'dtype': IR_CALIBRATION, + 'offset': { + VIS_CHANNEL: 4 * BLOCK_SIZE_VIS + IMAGE_PARAM_ITEM_SIZE, + IR_CHANNEL: 11 * BLOCK_SIZE_IR + }, + }, + 'wv_calibration': { + 'dtype': IR_CALIBRATION, + 'offset': { + VIS_CHANNEL: 4 * BLOCK_SIZE_VIS + 2 * IMAGE_PARAM_ITEM_SIZE, + IR_CHANNEL: 12 * BLOCK_SIZE_IR + }, + }, + 'simple_coordinate_conversion_table': { + 'dtype': SIMPLE_COORDINATE_CONVERSION_TABLE, + 'offset': { + VIS_CHANNEL: 5 * BLOCK_SIZE_VIS + 2 * IMAGE_PARAM_ITEM_SIZE, + IR_CHANNEL: 16 * BLOCK_SIZE_IR + }, + } +} + +LINE_CONTROL_WORD = np.dtype([ + ('data_id', U1, (4, )), + ('line_number', I4), + ('line_name', I4), + ('error_line_flag', I4), + ('error_message', I4), + ('mode_error_flag', I4), + ('scan_time', R8), + ('beta_angle', R4), + ('west_side_earth_edge', I4), + ('east_side_earth_edge', I4), + ('received_time_1', R8), # Typo in format description (I*4) + ('received_time_2', I4), + ('reserved', U1, (8, )) +]) + +IMAGE_DATA_BLOCK_IR = np.dtype([('LCW', LINE_CONTROL_WORD), + ('DOC', U1, (256,)), # Omitted + ('image_data', U1, 3344)]) + +IMAGE_DATA_BLOCK_VIS = np.dtype([('LCW', LINE_CONTROL_WORD), + ('DOC', U1, (64,)), # Omitted + ('image_data', U1, (13376,))]) + +IMAGE_DATA = { + VIS_CHANNEL: { + 'offset': 6 * BLOCK_SIZE_VIS, + 'dtype': IMAGE_DATA_BLOCK_VIS, + }, + IR_CHANNEL: { + 'offset': 18 * BLOCK_SIZE_IR, + 'dtype': IMAGE_DATA_BLOCK_IR + } +} diff --git a/satpy/readers/gms/gms5_vissr_l1b.py b/satpy/readers/gms/gms5_vissr_l1b.py new file mode 100644 index 0000000000..f3c6898f65 --- /dev/null +++ b/satpy/readers/gms/gms5_vissr_l1b.py @@ -0,0 +1,803 @@ +"""Reader for GMS-5 VISSR Level 1B data. + +Introduction +------------ +The ``gms5_vissr_l1b`` reader can decode, navigate and calibrate Level 1B data +from the Visible and Infrared Spin Scan Radiometer (VISSR) in `VISSR +archive format`. Corresponding platforms are GMS-5 (Japanese Geostationary +Meteorological Satellite) and GOES-09 (2003-2006 backup after MTSAT-1 launch +failure). + +VISSR has four channels, each stored in a separate file: + +.. code-block:: none + + VISSR_20020101_0031_IR1.A.IMG + VISSR_20020101_0031_IR2.A.IMG + VISSR_20020101_0031_IR3.A.IMG + VISSR_20020101_0031_VIS.A.IMG + +This is how to read them with Satpy: + +.. code-block:: python + + from satpy import Scene + import glob + + filenames = glob.glob(""/data/VISSR*") + scene = Scene(filenames, reader="gms5-vissr_l1b") + scene.load(["VIS", "IR1"]) + + +References +~~~~~~~~~~ + +Details about platform, instrument and data format can be found in the +following references: + + - `VISSR Format Description`_ + - `GMS User Guide`_ + +.. _VISSR Format Description: + https://www.data.jma.go.jp/mscweb/en/operation/fig/VISSR_FORMAT_GMS-5.pdf +.. _GMS User Guide: + https://www.data.jma.go.jp/mscweb/en/operation/fig/GMS_Users_Guide_3rd_Edition_Rev1.pdf + + +Compression +----------- + +Gzip-compressed VISSR files can be decompressed on the fly using +:class:`~satpy.readers.FSFile`: + +.. code-block:: python + + import fsspec + from satpy import Scene + from satpy.readers import FSFile + + filename = "VISSR_19960217_2331_IR1.A.IMG.gz" + open_file = fsspec.open(filename, compression="gzip") + fs_file = FSFile(open_file) + scene = Scene([fs_file], reader="gms5-vissr_l1b") + scene.load(["IR1"]) + + +Calibration +----------- + +Sensor counts are calibrated by looking up reflectance/temperature values in the +calibration tables included in each file. See section 2.2 in the VISSR user +guide. + + +Navigation +---------- + +VISSR images are oversampled and not rectified. + + +Oversampling +~~~~~~~~~~~~ +VISSR oversamples the viewed scene in E-W direction by a factor of ~1.46: +IR/VIS pixels are 14/3.5 urad on a side, but the instrument samples every +9.57/2.39 urad in E-W direction. That means pixels are actually overlapping on +the ground. + +This cannot be represented by a pyresample area definition, so each dataset +is accompanied by 2-dimensional longitude and latitude coordinates. For +resampling purpose a full disc area definition with uniform sampling is provided +via + +.. code-block:: python + + scene[dataset].attrs["area_def_uniform_sampling"] + + +Rectification +~~~~~~~~~~~~~ + +VISSR images are not rectified. That means lon/lat coordinates are different + +1) for all channels of the same repeat cycle, even if their spatial resolution + is identical (IR channels) +2) for different repeat cycles, even if the channel is identical + +However, the above area definition is using the nominal subsatellite point as +projection center. As this rarely changes, the area definition is pretty +constant. + + +Performance +~~~~~~~~~~~ + +Navigation of VISSR images is computationally expensive, because for each pixel +the view vector of the (rotating) instrument needs to be intersected with the +earth, including interpolation of attitude and orbit prediction. For IR channels +this takes about 10 seconds, for VIS channels about 160 seconds. + + +Space Pixels +------------ + +VISSR produces data for pixels outside the Earth disk (i.e. atmospheric limb or +deep space pixels). By default, these pixels are masked out as they contain +data of limited or no value, but some applications do require these pixels. +To turn off masking, set ``mask_space=False`` upon scene creation: + +.. code-block:: python + + import satpy + import glob + + filenames = glob.glob("VISSR*.IMG") + scene = satpy.Scene(filenames, + reader="gms5-vissr_l1b", + reader_kwargs={"mask_space": False}) + scene.load(["VIS", "IR1]) + + +Metadata +-------- + +Dataset attributes include metadata such as time and orbital parameters, +see :ref:`dataset_metadata`. + +Partial Scans +------------- + +Between 2001 and 2003 VISSR also recorded partial scans of the northern +hemisphere. On demand a special Typhoon schedule would be activated between +03:00 and 05:00 UTC. +""" + +import datetime as dt + +import dask.array as da +import numba +import numpy as np +import xarray as xr + +import satpy.readers._geos_area as geos_area +import satpy.readers.gms.gms5_vissr_format as fmt +import satpy.readers.gms.gms5_vissr_navigation as nav +from satpy.readers.file_handlers import BaseFileHandler +from satpy.readers.hrit_jma import mjd2datetime64 +from satpy.readers.utils import generic_open +from satpy.utils import get_legacy_chunk_size + +CHUNK_SIZE = get_legacy_chunk_size() + + +def _recarr2dict(arr, preserve=None): + if not preserve: + preserve = [] + res = {} + for key, value in zip(arr.dtype.names, arr): + if key.startswith("reserved"): + continue + if value.dtype.names and key not in preserve: + # Nested record array + res[key] = _recarr2dict(value) + else: + # Scalar or record array that shall be preserved + res[key] = value + return res + + +class GMS5VISSRFileHandler(BaseFileHandler): + """File handler for GMS-5 VISSR data in VISSR archive format.""" + + def __init__(self, filename, filename_info, filetype_info, mask_space=True): + """Initialize the file handler. + + Args: + filename: Name of file to be read + filename_info: Information obtained from filename + filetype_info: Information about file type + mask_space: Mask space pixels. + """ + super(GMS5VISSRFileHandler, self).__init__( + filename, filename_info, filetype_info + ) + self._filename = filename + self._filename_info = filename_info + self._header, self._channel_type = self._read_header(filename) + self._mda = self._get_mda() + self._mask_space = mask_space + + def _read_header(self, filename): + header = {} + with generic_open(filename, mode="rb") as file_obj: + header["control_block"] = self._read_control_block(file_obj) + channel_type = self._get_channel_type( + header["control_block"]["parameter_block_size"] + ) + header["image_parameters"] = self._read_image_params(file_obj, channel_type) + return header, channel_type + + @staticmethod + def _get_channel_type(parameter_block_size): + if parameter_block_size == 4: + return fmt.VIS_CHANNEL + elif parameter_block_size == 16: + return fmt.IR_CHANNEL + raise ValueError( + f"Cannot determine channel type, possibly corrupt file " + f"(unknown parameter block size: {parameter_block_size})" + ) + + def _read_control_block(self, file_obj): + ctrl_block = read_from_file_obj(file_obj, dtype=fmt.CONTROL_BLOCK, count=1) + return _recarr2dict(ctrl_block[0]) + + def _read_image_params(self, file_obj, channel_type): + """Read image parameters from the header.""" + image_params = {} + for name, param in fmt.IMAGE_PARAMS.items(): + image_params[name] = self._read_image_param(file_obj, param, channel_type) + + image_params["orbit_prediction"] = self._concat_orbit_prediction( + image_params.pop("orbit_prediction_1"), + image_params.pop("orbit_prediction_2"), + ) + return image_params + + @staticmethod + def _read_image_param(file_obj, param, channel_type): + """Read a single image parameter block from the header.""" + image_params = read_from_file_obj( + file_obj, + dtype=param["dtype"], + count=1, + offset=param["offset"][channel_type], + ) + return _recarr2dict(image_params[0], preserve=param.get("preserve")) + + @staticmethod + def _concat_orbit_prediction(orb_pred_1, orb_pred_2): + """Concatenate orbit prediction data. + + It is split over two image parameter blocks in the header. + """ + orb_pred = orb_pred_1 + orb_pred["data"] = np.concatenate([orb_pred_1["data"], orb_pred_2["data"]]) + return orb_pred + + def _get_frame_parameters_key(self): + if self._channel_type == fmt.VIS_CHANNEL: + return "vis_frame_parameters" + return "ir_frame_parameters" + + def _get_actual_shape(self): + actual_num_lines = self._header["control_block"][ + "available_block_size_of_image_data" + ] + _, nominal_num_pixels = self._get_nominal_shape() + return actual_num_lines, nominal_num_pixels + + def _get_nominal_shape(self): + frame_params = self._header["image_parameters"]["mode"][ + self._get_frame_parameters_key() + ] + return frame_params["number_of_lines"], frame_params["number_of_pixels"] + + def _get_mda(self): + return { + "platform": self._mode_block["satellite_name"].decode().strip().upper(), + "sensor": "VISSR", + "time_parameters": self._get_time_parameters(), + "orbital_parameters": self._get_orbital_parameters(), + } + + def _get_orbital_parameters(self): + # Note: SSP longitude in simple coordinate conversion table seems to be + # incorrect (80 deg instead of 140 deg). Use orbital parameters instead. + im_params = self._header["image_parameters"] + mode = im_params["mode"] + simple_coord = im_params["simple_coordinate_conversion_table"] + orb_params = im_params["coordinate_conversion"]["orbital_parameters"] + return { + "satellite_nominal_longitude": mode["ssp_longitude"], + "satellite_nominal_latitude": 0.0, + "satellite_nominal_altitude": mode["satellite_height"], + "satellite_actual_longitude": orb_params["longitude_of_ssp"], + "satellite_actual_latitude": orb_params["latitude_of_ssp"], + "satellite_actual_altitude": simple_coord["satellite_height"], + } + + def _get_time_parameters(self): + start_time = mjd2datetime64(self._mode_block["observation_time_mjd"]) + start_time = start_time.astype(dt.datetime).replace(second=0, microsecond=0) + end_time = start_time + dt.timedelta( + minutes=25 + ) # Source: GMS User Guide, section 3.3.1 + return { + "nominal_start_time": start_time, + "nominal_end_time": end_time, + } + + def get_dataset(self, dataset_id, ds_info): + """Get dataset from file.""" + image_data = self._get_image_data() + counts = self._get_counts(image_data) + dataset = self._calibrate(counts, dataset_id) + space_masker = SpaceMasker(image_data, dataset_id["name"]) + dataset = self._mask_space_pixels(dataset, space_masker) + self._attach_lons_lats(dataset, dataset_id) + self._update_attrs(dataset, dataset_id, ds_info) + return dataset + + def _get_image_data(self): + image_data = self._read_image_data() + return da.from_array(image_data, chunks=(CHUNK_SIZE,)) + + def _read_image_data(self): + num_lines, _ = self._get_actual_shape() + specs = self._get_image_data_type_specs() + with generic_open(self._filename, "rb") as file_obj: + return read_from_file_obj( + file_obj, dtype=specs["dtype"], count=num_lines, offset=specs["offset"] + ) + + def _get_image_data_type_specs(self): + return fmt.IMAGE_DATA[self._channel_type] + + def _get_counts(self, image_data): + return self._make_counts_data_array(image_data) + + def _make_counts_data_array(self, image_data): + return xr.DataArray( + image_data["image_data"], + dims=("y", "x"), + coords={ + "acq_time": ("y", self._get_acq_time(image_data)), + "line_number": ("y", self._get_line_number(image_data)), + }, + ) + + def _get_acq_time(self, dask_array): + acq_time = dask_array["LCW"]["scan_time"].compute() + return mjd2datetime64(acq_time) + + def _get_line_number(self, dask_array): + return dask_array["LCW"]["line_number"].compute() + + def _calibrate(self, counts, dataset_id): + table = self._get_calibration_table(dataset_id) + cal = Calibrator(table) + return cal.calibrate(counts, dataset_id["calibration"]) + + def _get_calibration_table(self, dataset_id): + tables = { + "VIS": self._header["image_parameters"]["vis_calibration"][ + "vis1_calibration_table" + ]["brightness_albedo_conversion_table"], + "IR1": self._header["image_parameters"]["ir1_calibration"][ + "conversion_table_of_equivalent_black_body_temperature" + ], + "IR2": self._header["image_parameters"]["ir2_calibration"][ + "conversion_table_of_equivalent_black_body_temperature" + ], + "IR3": self._header["image_parameters"]["wv_calibration"][ + "conversion_table_of_equivalent_black_body_temperature" + ], + } + return tables[dataset_id["name"]] + + def _get_area_def_uniform_sampling(self, dataset_id): + a = AreaDefEstimator( + coord_conv_params=self._header["image_parameters"]["coordinate_conversion"], + metadata=self._mda, + ) + return a.get_area_def_uniform_sampling(dataset_id) + + def _mask_space_pixels(self, dataset, space_masker): + if self._mask_space: + return space_masker.mask_space(dataset) + return dataset + + def _attach_lons_lats(self, dataset, dataset_id): + lons, lats = self._get_lons_lats(dataset, dataset_id) + dataset.coords["lon"] = lons + dataset.coords["lat"] = lats + + def _get_lons_lats(self, dataset, dataset_id): + lines, pixels = self._get_image_coords(dataset) + nav_params = self._get_navigation_parameters(dataset_id) + lons, lats = nav.get_lons_lats(lines, pixels, nav_params) + return self._make_lons_lats_data_array(lons, lats) + + def _get_image_coords(self, data): + lines = data.coords["line_number"].values + pixels = np.arange(data.shape[1]) + return lines.astype(np.float64), pixels.astype(np.float64) + + def _get_navigation_parameters(self, dataset_id): + return nav.ImageNavigationParameters( + static=self._get_static_navigation_params(dataset_id), + predicted=self._get_predicted_navigation_params() + ) + + def _get_static_navigation_params(self, dataset_id): + """Get static navigation parameters. + + Note that, "central_line_number_of_vissr_frame" is different for each + channel, even if their spatial resolution is identical. For example: + + VIS: 5513.0 + IR1: 1378.5 + IR2: 1378.7 + IR3: 1379.1001 + """ + alt_ch_name = _get_alternative_channel_name(dataset_id) + scan_params = nav.ScanningParameters( + start_time_of_scan=self._coord_conv["scheduled_observation_time"], + spinning_rate=self._mode_block["spin_rate"], + num_sensors=self._coord_conv["number_of_sensor_elements"][alt_ch_name], + sampling_angle=self._coord_conv["sampling_angle_along_pixel"][alt_ch_name], + ) + proj_params = self._get_proj_params(dataset_id) + return nav.StaticNavigationParameters( + proj_params=proj_params, + scan_params=scan_params + ) + + def _get_proj_params(self, dataset_id): + proj_params = nav.ProjectionParameters( + image_offset=self._get_image_offset(dataset_id), + scanning_angles=self._get_scanning_angles(dataset_id), + earth_ellipsoid=self._get_earth_ellipsoid() + ) + return proj_params + + def _get_earth_ellipsoid(self): + # Use earth radius and flattening from JMA's Msial library, because + # the values in the data seem to be pretty old. For example the + # equatorial radius is from the Bessel Ellipsoid (1841). + return nav.EarthEllipsoid( + flattening=nav.EARTH_FLATTENING, + equatorial_radius=nav.EARTH_EQUATORIAL_RADIUS, + ) + + def _get_scanning_angles(self, dataset_id): + alt_ch_name = _get_alternative_channel_name(dataset_id) + misalignment = np.ascontiguousarray( + self._coord_conv["matrix_of_misalignment"].transpose().astype(np.float64) + ) + return nav.ScanningAngles( + stepping_angle=self._coord_conv["stepping_angle_along_line"][alt_ch_name], + sampling_angle=self._coord_conv["sampling_angle_along_pixel"][ + alt_ch_name], + misalignment=misalignment + ) + + def _get_image_offset(self, dataset_id): + alt_ch_name = _get_alternative_channel_name(dataset_id) + center_line_vissr_frame = self._coord_conv["central_line_number_of_vissr_frame"][ + alt_ch_name + ] + center_pixel_vissr_frame = self._coord_conv["central_pixel_number_of_vissr_frame"][ + alt_ch_name + ] + pixel_offset = self._coord_conv[ + "pixel_difference_of_vissr_center_from_normal_position" + ][alt_ch_name] + return nav.ImageOffset( + line_offset=center_line_vissr_frame, + pixel_offset=center_pixel_vissr_frame + pixel_offset, + ) + + def _get_predicted_navigation_params(self): + """Get predictions of time-dependent navigation parameters.""" + attitude_prediction = self._get_attitude_prediction() + orbit_prediction = self._get_orbit_prediction() + return nav.PredictedNavigationParameters( + attitude=attitude_prediction, + orbit=orbit_prediction + ) + + def _get_attitude_prediction(self): + att_pred = self._header["image_parameters"]["attitude_prediction"]["data"] + attitudes = nav.Attitude( + angle_between_earth_and_sun=att_pred["sun_earth_angle"].astype( + np.float64), + angle_between_sat_spin_and_z_axis=att_pred[ + "right_ascension_of_attitude" + ].astype(np.float64), + angle_between_sat_spin_and_yz_plane=att_pred[ + "declination_of_attitude" + ].astype(np.float64), + ) + attitude_prediction = nav.AttitudePrediction( + prediction_times=att_pred["prediction_time_mjd"].astype(np.float64), + attitude=attitudes + ) + return attitude_prediction + + def _get_orbit_prediction(self): + orb_pred = self._header["image_parameters"]["orbit_prediction"]["data"] + orbit_angles = nav.OrbitAngles( + greenwich_sidereal_time=np.deg2rad( + orb_pred["greenwich_sidereal_time"].astype(np.float64) + ), + declination_from_sat_to_sun=np.deg2rad( + orb_pred["sat_sun_vector_earth_fixed"]["elevation"].astype(np.float64) + ), + right_ascension_from_sat_to_sun=np.deg2rad( + orb_pred["sat_sun_vector_earth_fixed"]["azimuth"].astype(np.float64) + ), + ) + sat_position = nav.Satpos( + x=orb_pred["satellite_position_earth_fixed"][:, 0].astype(np.float64), + y=orb_pred["satellite_position_earth_fixed"][:, 1].astype(np.float64), + z=orb_pred["satellite_position_earth_fixed"][:, 2].astype(np.float64), + ) + orbit_prediction = nav.OrbitPrediction( + prediction_times=orb_pred["prediction_time_mjd"].astype(np.float64), + angles=orbit_angles, + sat_position=sat_position, + nutation_precession=np.ascontiguousarray( + orb_pred["conversion_matrix"].transpose(0, 2, 1).astype(np.float64) + ), + ) + return orbit_prediction + + def _make_lons_lats_data_array(self, lons, lats): + lons = xr.DataArray( + lons, + dims=("y", "x"), + attrs={"standard_name": "longitude", "units": "degrees_east"}, + ) + lats = xr.DataArray( + lats, + dims=("y", "x"), + attrs={"standard_name": "latitude", "units": "degrees_north"}, + ) + return lons, lats + + def _update_attrs(self, dataset, dataset_id, ds_info): + dataset.attrs.update(ds_info) + dataset.attrs.update(self._mda) + dataset.attrs[ + "area_def_uniform_sampling" + ] = self._get_area_def_uniform_sampling(dataset_id) + + @property + def start_time(self): + """Nominal start time of the dataset.""" + return self._mda["time_parameters"]["nominal_start_time"] + + @property + def end_time(self): + """Nominal end time of the dataset.""" + return self._mda["time_parameters"]["nominal_end_time"] + + @property + def _coord_conv(self): + return self._header["image_parameters"]["coordinate_conversion"] + + @property + def _mode_block(self): + return self._header["image_parameters"]["mode"] + + +def _get_alternative_channel_name(dataset_id): + return fmt.ALT_CHANNEL_NAMES[dataset_id["name"]] + + +def read_from_file_obj(file_obj, dtype, count, offset=0): + """Read data from file object. + + Args: + file_obj: An open file object. + dtype: Data type to be read. + count: Number of elements to be read. + offset: Byte offset where to start reading. + """ + file_obj.seek(offset) + data = file_obj.read(dtype.itemsize * count) + return np.frombuffer(data, dtype=dtype, count=count) + + +class Calibrator: + """Calibrate VISSR data to reflectance or brightness temperature. + + Reference: Section 2.2 in the VISSR User Guide. + """ + + def __init__(self, calib_table): + """Initialize the calibrator. + + Args: + calib_table: Calibration table + """ + self._calib_table = calib_table + + def calibrate(self, counts, calibration): + """Transform counts to given calibration level.""" + if calibration == "counts": + return counts + res = self._calibrate(counts) + res = self._postproc(res, calibration) + return self._make_data_array(res, counts) + + def _calibrate(self, counts): + return da.map_blocks( + self._lookup_calib_table, + counts.data, + calib_table=self._calib_table, + dtype=np.float32, + ) + + def _postproc(self, res, calibration): + if calibration == "reflectance": + res = self._convert_to_percent(res) + return res + + def _convert_to_percent(self, res): + return res * 100 + + def _make_data_array(self, interp, counts): + return xr.DataArray( + interp, + dims=counts.dims, + coords=counts.coords, + ) + + def _lookup_calib_table(self, counts, calib_table): + return calib_table[counts] + + +class SpaceMasker: + """Mask pixels outside the earth disk.""" + + _fill_value = -1 # scanline not intersecting the earth + + def __init__(self, image_data, channel): + """Initialize the space masker. + + Args: + image_data: Image data + channel: Channel name + """ + self._image_data = image_data + self._channel = channel + self._shape = image_data["image_data"].shape + self._earth_mask = self._get_earth_mask() + + def mask_space(self, dataset): + """Mask space pixels in the given dataset.""" + return dataset.where(self._earth_mask).astype(np.float32) + + def _get_earth_mask(self): + earth_edges = self._get_earth_edges() + return get_earth_mask(self._shape, earth_edges, self._fill_value) + + def _get_earth_edges(self): + west_edges = self._get_earth_edges_per_scan_line("west_side_earth_edge") + east_edges = self._get_earth_edges_per_scan_line("east_side_earth_edge") + return west_edges, east_edges + + def _get_earth_edges_per_scan_line(self, cardinal): + edges = self._image_data["LCW"][cardinal].compute().astype(np.int32) + if is_vis_channel(self._channel): + edges = self._correct_vis_edges(edges) + return edges + + def _correct_vis_edges(self, edges): + """Correct VIS edges. + + VIS data contains earth edges of IR channel. Compensate for that + by scaling with a factor of 4 (1 IR pixel ~ 4 VIS pixels). + """ + return np.where(edges != self._fill_value, edges * 4, edges) + + +@numba.njit +def get_earth_mask(shape, earth_edges, fill_value=-1): + """Get binary mask where 1/0 indicates earth/space. + + Args: + shape: Image shape + earth_edges: First and last earth pixel in each scanline + fill_value: Fill value for scanlines not intersecting the earth. + """ + first_earth_pixels, last_earth_pixels = earth_edges + mask = np.zeros(shape, dtype=np.int8) + for line in range(shape[0]): + first = first_earth_pixels[line] + last = last_earth_pixels[line] + if first == fill_value or last == fill_value: + continue + mask[line, first:last+1] = 1 + return mask + + +def is_vis_channel(channel_name): + """Check if it's the visible channel.""" + return channel_name == "VIS" + + +class AreaDefEstimator: + """Estimate area definition for VISSR images.""" + + full_disk_size = { + "IR": 2366, + "VIS": 9464, + } + + def __init__(self, coord_conv_params, metadata): + """Initialize the area definition estimator. + + Args: + coord_conv_params: Coordinate conversion parameters + metadata: VISSR file metadata + """ + self.coord_conv = coord_conv_params + self.metadata = metadata + + def get_area_def_uniform_sampling(self, dataset_id): + """Get full disk area definition with uniform sampling. + + Args: + dataset_id: ID of the corresponding dataset. + """ + proj_dict = self._get_proj_dict(dataset_id) + extent = geos_area.get_area_extent(proj_dict) + return geos_area.get_area_definition(proj_dict, extent) + + def _get_proj_dict(self, dataset_id): + proj_dict = {} + proj_dict.update(self._get_name_dict(dataset_id)) + proj_dict.update(self._get_proj4_dict()) + proj_dict.update(self._get_shape_dict(dataset_id)) + return proj_dict + + def _get_name_dict(self, dataset_id): + name_dict = geos_area.get_geos_area_naming( + { + "platform_name": self.metadata["platform"], + "instrument_name": self.metadata["sensor"], + "service_name": "western-pacific", + "service_desc": "Western Pacific", + "resolution": dataset_id["resolution"], + } + ) + return { + "a_name": name_dict["area_id"], + "p_id": name_dict["area_id"], + "a_desc": name_dict["description"], + } + + def _get_proj4_dict( + self, + ): + # Use nominal parameters to make the area def as constant as possible + return { + "ssp_lon": self.metadata["orbital_parameters"][ + "satellite_nominal_longitude" + ], + "a": nav.EARTH_EQUATORIAL_RADIUS, + "b": nav.EARTH_POLAR_RADIUS, + "h": self.metadata["orbital_parameters"]["satellite_nominal_altitude"], + } + + def _get_shape_dict(self, dataset_id): + # Apply sampling from the vertical dimension to the horizontal + # dimension to obtain a square area definition with uniform sampling. + ch_type = fmt.CHANNEL_TYPES[dataset_id["name"]] + alt_ch_name = _get_alternative_channel_name(dataset_id) + stepping_angle = self.coord_conv["stepping_angle_along_line"][alt_ch_name] + size = self.full_disk_size[ch_type] + line_pixel_offset = 0.5 * size + lfac_cfac = geos_area.sampling_to_lfac_cfac(stepping_angle) + return { + "nlines": size, + "ncols": size, + "lfac": lfac_cfac, + "cfac": lfac_cfac, + "coff": line_pixel_offset, + "loff": line_pixel_offset, + "scandir": "N2S", + } diff --git a/satpy/readers/gms/gms5_vissr_navigation.py b/satpy/readers/gms/gms5_vissr_navigation.py new file mode 100644 index 0000000000..8a811b2210 --- /dev/null +++ b/satpy/readers/gms/gms5_vissr_navigation.py @@ -0,0 +1,932 @@ +"""GMS-5 VISSR Navigation. + +Reference: `GMS User Guide`_, Appendix E, S-VISSR Mapping. + +.. _GMS User Guide: + https://www.data.jma.go.jp/mscweb/en/operation/fig/GMS_Users_Guide_3rd_Edition_Rev1.pdf +""" + +from collections import namedtuple + +import dask.array as da +import numba +import numpy as np + +from satpy.utils import get_legacy_chunk_size + +CHUNK_SIZE = get_legacy_chunk_size() + +EARTH_FLATTENING = 1 / 298.257 +EARTH_EQUATORIAL_RADIUS = 6378136.0 +EARTH_POLAR_RADIUS = EARTH_EQUATORIAL_RADIUS * (1 - EARTH_FLATTENING) +"""Constants taken from JMA's Msial library.""" + + +Pixel = namedtuple( + "Pixel", + ["line", "pixel"] +) +"""A VISSR pixel.""" + +Vector2D = namedtuple( + "Vector2D", + ["x", "y"] +) +"""A 2D vector.""" + + +Vector3D = namedtuple( + "Vector3D", + ["x", "y", "z"] +) +"""A 3D vector.""" + + +Satpos = namedtuple( + "Satpos", + ["x", "y", "z"] +) +"""A 3D vector.""" + + +Attitude = namedtuple( + "Attitude", + [ + "angle_between_earth_and_sun", + "angle_between_sat_spin_and_z_axis", + "angle_between_sat_spin_and_yz_plane", + ], +) +"""Attitude parameters. + +Units: radians +""" + + +Orbit = namedtuple( + "Orbit", + [ + "angles", + "sat_position", + "nutation_precession", + ], +) +"""Orbital Parameters + +Args: + angles (OrbitAngles): Orbit angles + sat_position (Vector3D): Satellite position + nutation_precession: Nutation and precession matrix (3x3) +""" + + +OrbitAngles = namedtuple( + "OrbitAngles", + [ + "greenwich_sidereal_time", + "declination_from_sat_to_sun", + "right_ascension_from_sat_to_sun", + ], +) +"""Orbit angles. + +Units: radians +""" + + +ImageNavigationParameters = namedtuple( + "ImageNavigationParameters", + ["static", "predicted"] +) +"""Navigation parameters for the entire image. + +Args: + static (StaticNavigationParameters): Static parameters. + predicted (PredictedNavigationParameters): Predicted time-dependent parameters. +""" + + +PixelNavigationParameters = namedtuple( + "PixelNavigationParameters", + ["attitude", "orbit", "proj_params"] +) +"""Navigation parameters for a single pixel. + +Args: + attitude (Attitude): Attitude parameters + orbit (Orbit): Orbit parameters + proj_params (ProjectionParameters): Projection parameters +""" + + +StaticNavigationParameters = namedtuple( + "StaticNavigationParameters", + [ + "proj_params", + "scan_params" + ] +) +"""Navigation parameters which are constant for the entire scan. + +Args: + proj_params (ProjectionParameters): Projection parameters + scan_params (ScanningParameters): Scanning parameters +""" + + +PredictedNavigationParameters = namedtuple( + "PredictedNavigationParameters", + [ + "attitude", + "orbit" + ] +) +"""Predictions of time-dependent navigation parameters. + +They need to be evaluated for each pixel. + +Args: + attitude (AttitudePrediction): Attitude prediction + orbit (OrbitPrediction): Orbit prediction +""" + + +ScanningParameters = namedtuple( + "ScanningParameters", + [ + "start_time_of_scan", + "spinning_rate", + "num_sensors", + "sampling_angle" + ], +) + + +ProjectionParameters = namedtuple( + "ProjectionParameters", + [ + "image_offset", + "scanning_angles", + "earth_ellipsoid", + ], +) +"""Projection parameters. + +Args: + image_offset (ImageOffset): Image offset + scanning_angles (ScanningAngles): Scanning angles + earth_ellipsoid (EarthEllipsoid): Earth ellipsoid +""" + + +ImageOffset = namedtuple( + "ImageOffset", + [ + "line_offset", + "pixel_offset", + ] +) +"""Image offset + +Args: + line_offset: Line offset from image center + pixel_offset: Pixel offset from image center +""" + + +ScanningAngles = namedtuple( + "ScanningAngles", + [ + "stepping_angle", + "sampling_angle", + "misalignment" + ] +) +"""Scanning angles + +Args: + stepping_angle: Scanning angle along line (rad) + sampling_angle: Scanning angle along pixel (rad) + misalignment: Misalignment matrix (3x3) +""" + + +EarthEllipsoid = namedtuple( + "EarthEllipsoid", + [ + "flattening", + "equatorial_radius" + ] +) +"""Earth ellipsoid. + +Args: + flattening: Ellipsoid flattening + equatorial_radius: Equatorial radius (meters) +""" + + +_AttitudePrediction = namedtuple( + "_AttitudePrediction", + [ + "prediction_times", + "attitude" + ], +) + + +_OrbitPrediction = namedtuple( + "_OrbitPrediction", + [ + "prediction_times", + "angles", + "sat_position", + "nutation_precession", + ], +) + + +class AttitudePrediction: + """Attitude prediction. + + Use .to_numba() to pass this object to jitted methods. This extra + layer avoids usage of jitclasses and having to re-implement np.unwrap in + numba. + """ + + def __init__( + self, + prediction_times, + attitude + ): + """Initialize attitude prediction. + + In order to accelerate interpolation, the 2-pi periodicity of angles + is unwrapped here already (that means phase jumps greater than pi + are wrapped to their 2*pi complement). + + Args: + prediction_times: Timestamps of predicted attitudes + attitude (Attitude): Attitudes at prediction times + """ + self.prediction_times = prediction_times + self.attitude = self._unwrap_angles(attitude) + + def _unwrap_angles(self, attitude): + return Attitude( + np.unwrap(attitude.angle_between_earth_and_sun), + np.unwrap(attitude.angle_between_sat_spin_and_z_axis), + np.unwrap(attitude.angle_between_sat_spin_and_yz_plane), + ) + + def to_numba(self): + """Convert to numba-compatible type.""" + return _AttitudePrediction( + prediction_times=self.prediction_times, + attitude=self.attitude + ) + + +class OrbitPrediction: + """Orbit prediction. + + Use .to_numba() to pass this object to jitted methods. This extra + layer avoids usage of jitclasses and having to re-implement np.unwrap in + numba. + """ + + def __init__( + self, + prediction_times, + angles, + sat_position, + nutation_precession, + ): + """Initialize orbit prediction. + + In order to accelerate interpolation, the 2-pi periodicity of angles + is unwrapped here already (that means phase jumps greater than pi + are wrapped to their 2*pi complement). + + Args: + prediction_times: Timestamps of orbit prediction. + angles (OrbitAngles): Orbit angles + sat_position (Vector3D): Satellite position + nutation_precession: Nutation and precession matrix. + """ + self.prediction_times = prediction_times + self.angles = self._unwrap_angles(angles) + self.sat_position = sat_position + self.nutation_precession = nutation_precession + + def _unwrap_angles(self, angles): + return OrbitAngles( + greenwich_sidereal_time=np.unwrap(angles.greenwich_sidereal_time), + declination_from_sat_to_sun=np.unwrap(angles.declination_from_sat_to_sun), + right_ascension_from_sat_to_sun=np.unwrap( + angles.right_ascension_from_sat_to_sun + ), + ) + + def to_numba(self): + """Convert to numba-compatible type.""" + return _OrbitPrediction( + prediction_times=self.prediction_times, + angles=self.angles, + sat_position=self.sat_position, + nutation_precession=self.nutation_precession, + ) + + +def get_lons_lats(lines, pixels, nav_params): + """Compute lon/lat coordinates given VISSR image coordinates. + + Args: + lines: VISSR image lines + pixels: VISSR image pixels + nav_params: Image navigation parameters + """ + pixels_2d, lines_2d = da.meshgrid(pixels, lines) + lons, lats = da.map_blocks( + _get_lons_lats_numba, + lines_2d, + pixels_2d, + nav_params=_make_nav_params_numba_compatible(nav_params), + **_get_map_blocks_kwargs(pixels_2d.chunks) + ) + return lons, lats + + +def _make_nav_params_numba_compatible(nav_params): + predicted = PredictedNavigationParameters( + attitude=nav_params.predicted.attitude.to_numba(), + orbit=nav_params.predicted.orbit.to_numba() + ) + return ImageNavigationParameters(nav_params.static, predicted) + + +def _get_map_blocks_kwargs(chunks): + # Get keyword arguments for da.map_blocks, so that it can be used + # with a function that returns two arguments. + return { + "new_axis": 0, + "chunks": (2,) + chunks, + "dtype": np.float32, + } + + +@numba.njit +def _get_lons_lats_numba(lines_2d, pixels_2d, nav_params): + shape = lines_2d.shape + lons = np.zeros(shape, dtype=np.float32) + lats = np.zeros(shape, dtype=np.float32) + for i in range(shape[0]): + for j in range(shape[1]): + pixel = Pixel(lines_2d[i, j], pixels_2d[i, j]) + nav_params_pix = _get_pixel_navigation_parameters( + pixel, nav_params + ) + lon, lat = get_lon_lat(pixel, nav_params_pix) + lons[i, j] = lon + lats[i, j] = lat + # Stack lons and lats because da.map_blocks doesn't support multiple + # return values. + return np.stack((lons, lats)) + + +@numba.njit +def _get_pixel_navigation_parameters(point, im_nav_params): + obs_time = get_observation_time(point, im_nav_params.static.scan_params) + attitude, orbit = interpolate_navigation_prediction( + attitude_prediction=im_nav_params.predicted.attitude, + orbit_prediction=im_nav_params.predicted.orbit, + observation_time=obs_time + ) + return PixelNavigationParameters( + attitude=attitude, + orbit=orbit, + proj_params=im_nav_params.static.proj_params + ) + + +@numba.njit +def get_observation_time(point, scan_params): + """Calculate observation time of a VISSR pixel.""" + relative_time = _get_relative_observation_time(point, scan_params) + return scan_params.start_time_of_scan + relative_time + + +@numba.njit +def _get_relative_observation_time(point, scan_params): + line, pixel = point + pixel = pixel + 1 + line = line + 1 + spinning_freq = 1440 * scan_params.spinning_rate + line_step = np.floor((line - 1) / scan_params.num_sensors) + pixel_step = (scan_params.sampling_angle * pixel) / (2 * np.pi) + return (line_step + pixel_step) / spinning_freq + + +@numba.njit +def interpolate_navigation_prediction( + attitude_prediction, orbit_prediction, observation_time +): + """Interpolate predicted navigation parameters.""" + attitude = interpolate_attitude_prediction(attitude_prediction, observation_time) + orbit = interpolate_orbit_prediction(orbit_prediction, observation_time) + return attitude, orbit + + +@numba.njit +def get_lon_lat(pixel, nav_params): + """Get longitude and latitude coordinates for a given image pixel. + + Args: + pixel (Pixel): Point in image coordinates. + nav_params (PixelNavigationParameters): Navigation parameters for a + single pixel. + Returns: + Longitude and latitude in degrees. + """ + scan_angles = transform_image_coords_to_scanning_angles( + pixel, + nav_params.proj_params.image_offset, + nav_params.proj_params.scanning_angles + ) + view_vector_sat = transform_scanning_angles_to_satellite_coords( + scan_angles, + nav_params.proj_params.scanning_angles.misalignment + ) + view_vector_earth_fixed = transform_satellite_to_earth_fixed_coords( + view_vector_sat, + nav_params.orbit, + nav_params.attitude + ) + point_on_earth = intersect_with_earth( + view_vector_earth_fixed, + nav_params.orbit.sat_position, + nav_params.proj_params.earth_ellipsoid + ) + lon, lat = transform_earth_fixed_to_geodetic_coords( + point_on_earth, nav_params.proj_params.earth_ellipsoid.flattening + ) + return lon, lat + + +@numba.njit +def transform_image_coords_to_scanning_angles(point, image_offset, scanning_angles): + """Transform image coordinates to scanning angles. + + Args: + point (Pixel): Point in image coordinates. + image_offset (ImageOffset): Image offset. + scanning_angles (ScanningAngles): Scanning angles. + Returns: + Scanning angles (x, y) at the pixel center (rad). + """ + line_offset = image_offset.line_offset + pixel_offset = image_offset.pixel_offset + stepping_angle = scanning_angles.stepping_angle + sampling_angle = scanning_angles.sampling_angle + x = sampling_angle * (point.pixel + 1 - pixel_offset) + y = stepping_angle * (point.line + 1 - line_offset) + return Vector2D(x, y) + + +@numba.njit +def transform_scanning_angles_to_satellite_coords(angles, misalignment): + """Transform scanning angles to satellite angular momentum coordinates. + + Args: + angles (Vector2D): Scanning angles in radians. + misalignment: Misalignment matrix (3x3) + + Returns: + View vector (Vector3D) in satellite angular momentum coordinates. + """ + x, y = angles.x, angles.y + sin_x = np.sin(x) + cos_x = np.cos(x) + view = Vector3D(np.cos(y), 0.0, np.sin(y)) + + # Correct for misalignment + view = matrix_vector(misalignment, view) + + # Rotate around z-axis + return Vector3D( + cos_x * view.x - sin_x * view.y, + sin_x * view.x + cos_x * view.y, + view.z + ) + + +@numba.njit +def transform_satellite_to_earth_fixed_coords( + point, + orbit, + attitude +): + """Transform from earth-fixed to satellite angular momentum coordinates. + + Args: + point (Vector3D): Point in satellite angular momentum coordinates. + orbit (Orbit): Orbital parameters + attitude (Attitude): Attitude parameters + Returns: + Point (Vector3D) in earth-fixed coordinates. + """ + unit_vector_z = _get_satellite_unit_vector_z(attitude, orbit) + unit_vector_x = _get_satellite_unit_vector_x(unit_vector_z, attitude, orbit) + unit_vector_y = _get_satellite_unit_vector_y(unit_vector_x, unit_vector_z) + return _get_earth_fixed_coords( + point, + unit_vector_x, + unit_vector_y, + unit_vector_z + ) + + +@numba.njit +def _get_satellite_unit_vector_z(attitude, orbit): + v1950 = _get_satellite_z_axis_1950( + attitude.angle_between_sat_spin_and_z_axis, + attitude.angle_between_sat_spin_and_yz_plane + ) + vcorr = _correct_nutation_precession( + v1950, + orbit.nutation_precession + ) + return _rotate_to_greenwich( + vcorr, + orbit.angles.greenwich_sidereal_time + ) + + +@numba.njit +def _get_satellite_z_axis_1950( + angle_between_sat_spin_and_z_axis, + angle_between_sat_spin_and_yz_plane +): + """Get satellite z-axis (spin) in mean of 1950 coordinates.""" + alpha = angle_between_sat_spin_and_z_axis + delta = angle_between_sat_spin_and_yz_plane + cos_delta = np.cos(delta) + return Vector3D( + x=np.sin(delta), + y=-cos_delta * np.sin(alpha), + z=cos_delta * np.cos(alpha) + ) + + +@numba.njit +def _correct_nutation_precession(vector, nutation_precession): + return matrix_vector(nutation_precession, vector) + + +@numba.njit +def _rotate_to_greenwich(vector, greenwich_sidereal_time): + cos_sid = np.cos(greenwich_sidereal_time) + sin_sid = np.sin(greenwich_sidereal_time) + rotated = Vector3D( + x=cos_sid * vector.x + sin_sid * vector.y, + y=-sin_sid * vector.x + cos_sid * vector.y, + z=vector.z + ) + return normalize_vector(rotated) + + +@numba.njit +def _get_satellite_unit_vector_x(unit_vector_z, attitude, orbit): + sat_sun_vec = _get_vector_from_satellite_to_sun( + orbit.angles.declination_from_sat_to_sun, + orbit.angles.right_ascension_from_sat_to_sun + ) + return _get_unit_vector_x( + sat_sun_vec, + unit_vector_z, + attitude.angle_between_earth_and_sun + ) + + +@numba.njit +def _get_vector_from_satellite_to_sun( + declination_from_sat_to_sun, + right_ascension_from_sat_to_sun +): + declination = declination_from_sat_to_sun + right_ascension = right_ascension_from_sat_to_sun + cos_declination = np.cos(declination) + return Vector3D( + x=cos_declination * np.cos(right_ascension), + y=cos_declination * np.sin(right_ascension), + z=np.sin(declination) + ) + + +@numba.njit +def _get_unit_vector_x( + sat_sun_vec, + unit_vector_z, + angle_between_earth_and_sun + +): + beta = angle_between_earth_and_sun + sin_beta = np.sin(beta) + cos_beta = np.cos(beta) + cross1 = _get_uz_cross_satsun(unit_vector_z, sat_sun_vec) + cross2 = cross_product(cross1, unit_vector_z) + unit_vector_x = Vector3D( + x=sin_beta * cross1.x + cos_beta * cross2.x, + y=sin_beta * cross1.y + cos_beta * cross2.y, + z=sin_beta * cross1.z + cos_beta * cross2.z + ) + return normalize_vector(unit_vector_x) + + +@numba.njit +def _get_uz_cross_satsun(unit_vector_z, sat_sun_vec): + res = cross_product(unit_vector_z, sat_sun_vec) + return normalize_vector(res) + + +@numba.njit +def _get_satellite_unit_vector_y(unit_vector_x, unit_vector_z): + res = cross_product(unit_vector_z, unit_vector_x) + return normalize_vector(res) + + +@numba.njit +def _get_earth_fixed_coords(point, unit_vector_x, unit_vector_y, unit_vector_z): + ux, uy, uz = unit_vector_x, unit_vector_y, unit_vector_z + # Multiply with matrix of satellite unit vectors [ux, uy, uz] + return Vector3D( + x=ux.x * point.x + uy.x * point.y + uz.x * point.z, + y=ux.y * point.x + uy.y * point.y + uz.y * point.z, + z=ux.z * point.x + uy.z * point.y + uz.z * point.z + ) + + +@numba.njit +def intersect_with_earth(view_vector, sat_pos, ellipsoid): + """Intersect instrument viewing vector with the earth's surface. + + Reference: Appendix E, section 2.11 in the GMS user guide. + + Args: + view_vector (Vector3D): Instrument viewing vector in earth-fixed + coordinates. + sat_pos (Vector3D): Satellite position in earth-fixed coordinates. + ellipsoid (EarthEllipsoid): Earth ellipsoid. + Returns: + Intersection (Vector3D) with the earth's surface. + """ + distance = _get_distance_to_intersection(view_vector, sat_pos, ellipsoid) + return Vector3D( + sat_pos.x + distance * view_vector.x, + sat_pos.y + distance * view_vector.y, + sat_pos.z + distance * view_vector.z + ) + + +@numba.njit +def _get_distance_to_intersection(view_vector, sat_pos, ellipsoid): + """Get distance to intersection with the earth. + + If the instrument is pointing towards the earth, there will be two + intersections with the surface. Choose the one on the instrument-facing + side of the earth. + """ + d1, d2 = _get_distances_to_intersections(view_vector, sat_pos, ellipsoid) + return min(d1, d2) + + +@numba.njit +def _get_distances_to_intersections(view_vector, sat_pos, ellipsoid): + """Get distances to intersections with the earth's surface. + + Returns: + Distances to two intersections with the surface. + """ + a, b, c = _get_abc_helper(view_vector, sat_pos, ellipsoid) + tmp = np.sqrt((b**2 - a * c)) + dist_1 = (-b + tmp) / a + dist_2 = (-b - tmp) / a + return dist_1, dist_2 + + +@numba.njit +def _get_abc_helper(view_vector, sat_pos, ellipsoid): + """Get a,b,c helper variables. + + Reference: Appendix E, Equation (26) in the GMS user guide. + """ + flat2 = (1 - ellipsoid.flattening) ** 2 + ux, uy, uz = view_vector.x, view_vector.y, view_vector.z + x, y, z = sat_pos.x, sat_pos.y, sat_pos.z + a = flat2 * (ux ** 2 + uy ** 2) + uz ** 2 + b = flat2 * (x * ux + y * uy) + z * uz + c = flat2 * (x ** 2 + y ** 2 - ellipsoid.equatorial_radius ** 2) + z ** 2 + return a, b, c + + +@numba.njit +def transform_earth_fixed_to_geodetic_coords(point, earth_flattening): + """Transform from earth-fixed to geodetic coordinates. + + Args: + point (Vector3D): Point in earth-fixed coordinates. + earth_flattening: Flattening of the earth. + + Returns: + Geodetic longitude and latitude (degrees). + """ + x, y, z = point.x, point.y, point.z + f = earth_flattening + lon = np.arctan2(y, x) + lat = np.arctan2(z, ((1 - f) ** 2 * np.sqrt(x**2 + y**2))) + return np.rad2deg(lon), np.rad2deg(lat) + + +@numba.njit +def interpolate_orbit_prediction(orbit_prediction, observation_time): + """Interpolate orbit prediction at the given observation time.""" + angles = _interpolate_orbit_angles(observation_time, orbit_prediction) + sat_position = _interpolate_sat_position(observation_time, orbit_prediction) + nutation_precession = interpolate_nearest( + observation_time, + orbit_prediction.prediction_times, + orbit_prediction.nutation_precession, + ) + return Orbit( + angles=angles, + sat_position=sat_position, + nutation_precession=nutation_precession, + ) + + +@numba.njit +def _interpolate_orbit_angles(observation_time, orbit_prediction): + sidereal_time = interpolate_angles( + observation_time, + orbit_prediction.prediction_times, + orbit_prediction.angles.greenwich_sidereal_time, + ) + declination = interpolate_angles( + observation_time, + orbit_prediction.prediction_times, + orbit_prediction.angles.declination_from_sat_to_sun, + ) + right_ascension = interpolate_angles( + observation_time, + orbit_prediction.prediction_times, + orbit_prediction.angles.right_ascension_from_sat_to_sun, + ) + return OrbitAngles( + greenwich_sidereal_time=sidereal_time, + declination_from_sat_to_sun=declination, + right_ascension_from_sat_to_sun=right_ascension, + ) + + +@numba.njit +def _interpolate_sat_position(observation_time, orbit_prediction): + x = interpolate_continuous( + observation_time, + orbit_prediction.prediction_times, + orbit_prediction.sat_position.x, + ) + y = interpolate_continuous( + observation_time, + orbit_prediction.prediction_times, + orbit_prediction.sat_position.y, + ) + z = interpolate_continuous( + observation_time, + orbit_prediction.prediction_times, + orbit_prediction.sat_position.z, + ) + return Vector3D(x, y, z) + + +@numba.njit +def interpolate_attitude_prediction(attitude_prediction, observation_time): + """Interpolate attitude prediction at given observation time.""" + angle_between_earth_and_sun = interpolate_angles( + observation_time, + attitude_prediction.prediction_times, + attitude_prediction.attitude.angle_between_earth_and_sun, + ) + angle_between_sat_spin_and_z_axis = interpolate_angles( + observation_time, + attitude_prediction.prediction_times, + attitude_prediction.attitude.angle_between_sat_spin_and_z_axis, + ) + angle_between_sat_spin_and_yz_plane = interpolate_angles( + observation_time, + attitude_prediction.prediction_times, + attitude_prediction.attitude.angle_between_sat_spin_and_yz_plane, + ) + return Attitude( + angle_between_earth_and_sun, + angle_between_sat_spin_and_z_axis, + angle_between_sat_spin_and_yz_plane, + ) + + +@numba.njit +def interpolate_continuous(x, x_sample, y_sample): + """Linear interpolation of continuous quantities. + + Numpy equivalent would be np.interp(..., left=np.nan, right=np.nan), but + numba currently doesn't support those keyword arguments. + """ + try: + return _interpolate(x, x_sample, y_sample) + except Exception: + # Numba cannot distinguish exception types + return np.nan + + +@numba.njit +def _interpolate(x, x_sample, y_sample): + i = _find_enclosing_index(x, x_sample) + offset = y_sample[i] + x_diff = x_sample[i + 1] - x_sample[i] + y_diff = y_sample[i + 1] - y_sample[i] + slope = y_diff / x_diff + dist = x - x_sample[i] + return offset + slope * dist + + +@numba.njit +def _find_enclosing_index(x, x_sample): + """Find where x_sample encloses x.""" + for i in range(len(x_sample) - 1): + if x_sample[i] <= x < x_sample[i + 1]: + return i + raise Exception("x not enclosed by x_sample") + + +@numba.njit +def interpolate_angles(x, x_sample, y_sample): + """Linear interpolation of angles. + + Requires 2-pi periodicity to be unwrapped before (for + performance reasons). Interpolated angles are wrapped + back to [-pi, pi] to restore periodicity. + """ + return _wrap_2pi(interpolate_continuous(x, x_sample, y_sample)) + + +@numba.njit +def _wrap_2pi(values): + """Wrap values to interval [-pi, pi]. + + Source: https://stackoverflow.com/a/15927914/5703449 + """ + return (values + np.pi) % (2 * np.pi) - np.pi + + +@numba.njit +def interpolate_nearest(x, x_sample, y_sample): + """Nearest neighbour interpolation.""" + try: + return _interpolate_nearest(x, x_sample, y_sample) + except Exception: + return np.nan * np.ones_like(y_sample[0]) + + +@numba.njit +def _interpolate_nearest(x, x_sample, y_sample): + i = _find_enclosing_index(x, x_sample) + return y_sample[i] + + +@numba.njit +def matrix_vector(m, v): + """Multiply (3,3)-matrix and Vector3D.""" + x = m[0, 0] * v.x + m[0, 1] * v.y + m[0, 2] * v.z + y = m[1, 0] * v.x + m[1, 1] * v.y + m[1, 2] * v.z + z = m[2, 0] * v.x + m[2, 1] * v.y + m[2, 2] * v.z + return Vector3D(x, y, z) + + +@numba.njit +def cross_product(a, b): + """Compute vector product a x b.""" + return Vector3D( + x=a.y * b.z - a.z * b.y, + y=a.z * b.x - a.x * b.z, + z=a.x * b.y - a.y * b.x + ) + + +@numba.njit +def normalize_vector(v): + """Normalize a Vector3D.""" + norm = np.sqrt(v.x**2 + v.y**2 + v.z**2) + return Vector3D( + v.x / norm, + v.y / norm, + v.z / norm + ) diff --git a/satpy/readers/hrit_base.py b/satpy/readers/hrit_base.py index a092288575..c8b2287653 100644 --- a/satpy/readers/hrit_base.py +++ b/satpy/readers/hrit_base.py @@ -175,7 +175,6 @@ def __init__(self, filename, filename_info, filetype_info, hdr_info): self.mda = {} self.hdr_info = hdr_info self._get_hd(self.hdr_info) - self._start_time = filename_info['start_time'] self._end_time = self._start_time + timedelta(minutes=15) @@ -222,6 +221,16 @@ def _get_hd(self, hdr_info): 'SSP_longitude': 0.0} self.mda['orbital_parameters'] = {} + @property + def observation_start_time(self): + """Get observation start time.""" + return self._start_time + + @property + def observation_end_time(self): + """Get observation end time.""" + return self._end_time + @property def start_time(self): """Get start time.""" diff --git a/satpy/readers/nwcsaf_nc.py b/satpy/readers/nwcsaf_nc.py index 3c004b3a27..7ecc5f43f4 100644 --- a/satpy/readers/nwcsaf_nc.py +++ b/satpy/readers/nwcsaf_nc.py @@ -52,7 +52,13 @@ 'EOS-Terra': 'modis', 'Suomi-NPP': 'viirs', 'NOAA-20': 'viirs', + 'NOAA-21': 'viirs', + 'NOAA-22': 'viirs', + 'NOAA-23': 'viirs', 'JPSS-1': 'viirs', + 'Metop-SG-A1': 'metimage', + 'Metop-SG-A2': 'metimage', + 'Metop-SG-A3': 'metimage', 'GOES-16': 'abi', 'GOES-17': 'abi', 'Himawari-8': 'ahi', diff --git a/satpy/readers/olci_nc.py b/satpy/readers/olci_nc.py index 9aafb981fb..112f5455ac 100644 --- a/satpy/readers/olci_nc.py +++ b/satpy/readers/olci_nc.py @@ -51,6 +51,10 @@ from satpy.readers.file_handlers import BaseFileHandler from satpy.utils import angle2xyz, get_legacy_chunk_size, xyz2angle +DEFAULT_MASK_ITEMS = ["INVALID", "SNOW_ICE", "INLAND_WATER", "SUSPECT", + "AC_FAIL", "CLOUD", "HISOLZEN", "OCNN_FAIL", + "CLOUD_MARGIN", "CLOUD_AMBIGUOUS", "LOWRW", "LAND"] + logger = logging.getLogger(__name__) CHUNK_SIZE = get_legacy_chunk_size() @@ -100,7 +104,7 @@ class NCOLCIBase(BaseFileHandler): cols_name = "columns" def __init__(self, filename, filename_info, filetype_info, - engine=None): + engine=None, **kwargs): """Init the olci reader base.""" super().__init__(filename, filename_info, filetype_info) self._engine = engine @@ -203,6 +207,12 @@ def get_dataset(self, key, info): class NCOLCI2(NCOLCIChannelBase): """File handler for OLCI l2.""" + def __init__(self, filename, filename_info, filetype_info, engine=None, unlog=False, mask_items=None): + """Init the file handler.""" + super().__init__(filename, filename_info, filetype_info, engine) + self.unlog = unlog + self.mask_items = mask_items + def get_dataset(self, key, info): """Load a dataset.""" if self.channel is not None and self.channel != key['name']: @@ -216,19 +226,28 @@ def get_dataset(self, key, info): if key['name'] == 'wqsf': dataset.attrs['_FillValue'] = 1 elif key['name'] == 'mask': - dataset = self.getbitmask(dataset) - + dataset = self.getbitmask(dataset, self.mask_items) dataset.attrs['platform_name'] = self.platform_name dataset.attrs['sensor'] = self.sensor dataset.attrs.update(key.to_dict()) + if self.unlog: + dataset = self.delog(dataset) + return dataset + def delog(self, data_array): + """Remove log10 from the units and values.""" + units = data_array.attrs["units"] + + if units.startswith("lg("): + data_array = 10 ** data_array + data_array.attrs["units"] = units.split("lg(re ")[1].strip(")") + return data_array + def getbitmask(self, wqsf, items=None): """Get the bitmask.""" if items is None: - items = ["INVALID", "SNOW_ICE", "INLAND_WATER", "SUSPECT", - "AC_FAIL", "CLOUD", "HISOLZEN", "OCNN_FAIL", - "CLOUD_MARGIN", "CLOUD_AMBIGUOUS", "LOWRW", "LAND"] + items = DEFAULT_MASK_ITEMS bflags = BitFlags(wqsf) return reduce(np.logical_or, [bflags[item] for item in items]) @@ -240,7 +259,7 @@ class NCOLCILowResData(NCOLCIBase): cols_name = "tie_columns" def __init__(self, filename, filename_info, filetype_info, - engine=None): + engine=None, **kwargs): """Init the file handler.""" super().__init__(filename, filename_info, filetype_info, engine) self.l_step = self.nc.attrs['al_subsampling_factor'] diff --git a/satpy/readers/seviri_base.py b/satpy/readers/seviri_base.py index 7708af7b36..131fe39ad4 100644 --- a/satpy/readers/seviri_base.py +++ b/satpy/readers/seviri_base.py @@ -56,7 +56,7 @@ argument upon Scene creation:: import satpy - scene = satpy.Scene(filenames, + scene = satpy.Scene(filenames=filenames, reader='seviri_l1b_...', reader_kwargs={'calib_mode': 'GSICS'}) scene.load(['VIS006', 'IR_108']) @@ -138,7 +138,7 @@ * Raw metadata from the file header can be included by setting the reader argument ``include_raw_metadata=True`` (HRIT and Native format only). Note that this comes with a performance penalty of up to 10% if raw metadata from - multiple segments or scans need to be combined. By default arrays with more + multiple segments or scans need to be combined. By default, arrays with more than 100 elements are excluded to limit the performance penalty. This threshold can be adjusted using the ``mda_max_array_size`` reader keyword argument: @@ -158,7 +158,7 @@ https://www-cdn.eumetsat.int/files/2020-04/pdf_msg_seviri_rad2refl.pdf .. _MSG Level 1.5 Image Data Format Description: - https://www-cdn.eumetsat.int/files/2020-05/pdf_ten_05105_msg_img_data.pdf + https://www.eumetsat.int/media/45126 .. _Radiometric Calibration of MSG SEVIRI Level 1.5 Image Data in Equivalent Spectral Blackbody Radiance: https://www-cdn.eumetsat.int/files/2020-04/pdf_ten_msg_seviri_rad_calib.pdf @@ -166,6 +166,7 @@ """ import warnings +from datetime import timedelta import dask.array as da import numpy as np @@ -214,7 +215,7 @@ VIS_CHANNELS = ['HRV', 'VIS006', 'VIS008', 'IR_016'] # Polynomial coefficients for spectral-effective BT fits -BTFIT = {} +BTFIT = dict() # [A, B, C] BTFIT['IR_039'] = [0.0, 1.011751900, -3.550400] BTFIT['WV_062'] = [0.00001805700, 1.000255533, -1.790930] @@ -230,7 +231,7 @@ 323: "10", 324: "11"} -CALIB = {} +CALIB = dict() # Meteosat 8 CALIB[321] = {'HRV': {'F': 78.7599}, @@ -680,13 +681,13 @@ def __init__(self, coefs, start_time, end_time): self.end_time = end_time def evaluate(self, time): - """Get satellite position in earth-centered cartesion coordinates. + """Get satellite position in earth-centered cartesian coordinates. Args: time: Timestamp where to evaluate the polynomial Returns: - Earth-centered cartesion coordinates (x, y, z) in meters + Earth-centered cartesian coordinates (x, y, z) in meters """ domain = [np.datetime64(self.start_time).astype('int64'), np.datetime64(self.end_time).astype('int64')] @@ -852,8 +853,8 @@ def calculate_area_extent(area_dict): east: Eastmost column number west: Westmost column number south: Southmost row number - column_step: Pixel resulution in meters in east-west direction - line_step: Pixel resulution in meters in soutth-north direction + column_step: Pixel resolution in meters in east-west direction + line_step: Pixel resolution in meters in south-north direction [column_offset: Column offset, defaults to 0 if not given] [line_offset: Line offset, defaults to 0 if not given] Returns: @@ -981,3 +982,19 @@ def mask_bad_quality(data, line_validity, line_geometric_quality, line_radiometr line_mask = line_mask[:, np.newaxis] data = data.where(~line_mask, np.nan).astype(np.float32) return data + + +def round_nom_time(dt, time_delta): + """Round a datetime object to a multiple of a timedelta. + + dt : datetime.datetime object, default now. + time_delta : timedelta object, we round to a multiple of this, default 1 minute. + adapted for SEVIRI from: + https://stackoverflow.com/questions/3463930/how-to-round-the-minute-of-a-datetime-object-python + """ + seconds = (dt - dt.min).seconds + round_to = time_delta.total_seconds() + + rounding = (seconds + round_to / 2) // round_to * round_to + + return dt + timedelta(0, rounding - seconds, - dt.microsecond) diff --git a/satpy/readers/seviri_l1b_hrit.py b/satpy/readers/seviri_l1b_hrit.py index 481a885518..1fedadb0e2 100644 --- a/satpy/readers/seviri_l1b_hrit.py +++ b/satpy/readers/seviri_l1b_hrit.py @@ -15,7 +15,7 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . -r"""SEVIRI HRIT format reader. +r"""SEVIRI Level 1.5 HRIT format reader. Introduction ------------ @@ -46,7 +46,7 @@ Some arguments can be provided to the reader to change its behaviour. These are provided through the `Scene` instantiation, eg:: - Scene(reader="seviri_l1b_hrit", filenames=fnames, reader_kwargs={'fill_hrv': False}) + scn = Scene(filenames=filenames, reader="seviri_l1b_hrit", reader_kwargs={'fill_hrv': False}) To see the full list of arguments that can be provided, look into the documentation of :class:`HRITMSGFileHandler`. @@ -60,6 +60,29 @@ This reader also accepts bzipped file with the extension ``.bz2`` for the prologue, epilogue, and segment files. +Nominal start/end time +---------------------- + +.. warning:: attribute access change + +``nominal_start_time`` and ``nominal_end_time`` should be accessed using the ``time_parameters`` attribute. + +``nominal_start_time`` and ``nominal_end_time`` are also available directly +via ``start_time`` and ``end_time`` respectively. + +Here is an exmaple of the content of the start/end time and ``time_parameters`` attibutes + +.. code-block:: python + + Start time: 2019-08-29 12:00:00 + End time: 2019-08-29 12:15:00 + time_parameters: + {'nominal_start_time': datetime.datetime(2019, 8, 29, 12, 0), + 'nominal_end_time': datetime.datetime(2019, 8, 29, 12, 15), + 'observation_start_time': datetime.datetime(2019, 8, 29, 12, 0, 9, 338000), + 'observation_end_time': datetime.datetime(2019, 8, 29, 12, 15, 9, 203000) + } + Example ------- @@ -175,10 +198,14 @@ References: + - `EUMETSAT Product Navigator`_ - `MSG Level 1.5 Image Data Format Description`_ + - `fsspec`_ +.. _EUMETSAT Product Navigator: + https://navigator.eumetsat.int/product/EO:EUM:DAT:MSG:HRSEVIRI .. _MSG Level 1.5 Image Data Format Description: - https://www-cdn.eumetsat.int/files/2020-05/pdf_ten_05105_msg_img_data.pdf + https://www.eumetsat.int/media/45126 .. _fsspec: https://filesystem-spec.readthedocs.io """ @@ -187,7 +214,7 @@ import copy import logging -from datetime import datetime +from datetime import datetime, timedelta import dask.array as da import numpy as np @@ -208,6 +235,7 @@ from satpy.readers.seviri_base import ( CHANNEL_NAMES, HRV_NUM_COLUMNS, + REPEAT_CYCLE_DURATION, SATNUM, NoValidOrbitParams, OrbitPolynomialFinder, @@ -218,6 +246,7 @@ get_satpos, mask_bad_quality, pad_data_horizontally, + round_nom_time, ) from satpy.readers.seviri_l1b_native_hdr import hrit_epilogue, hrit_prologue, impf_configuration from satpy.utils import get_legacy_chunk_size @@ -339,14 +368,12 @@ def satpos(self): Returns: Longitude [deg east], Latitude [deg north] and Altitude [m] """ a, b = self.get_earth_radii() - start_time = self.prologue['ImageAcquisition'][ - 'PlannedAcquisitionTime']['TrueRepeatCycleStart'] poly_finder = OrbitPolynomialFinder(self.prologue['SatelliteStatus'][ 'Orbit']['OrbitPolynomial']) - orbit_polynomial = poly_finder.get_orbit_polynomial(start_time) + orbit_polynomial = poly_finder.get_orbit_polynomial(self.observation_start_time) return get_satpos( orbit_polynomial=orbit_polynomial, - time=start_time, + time=self.observation_start_time, semi_major_axis=a, semi_minor_axis=b, ) @@ -412,8 +439,8 @@ class HRITMSGFileHandler(HRITFileHandler): **Padding of the HRV channel** - By default, the HRV channel is loaded padded with no-data, that is it is - returned as a full-disk dataset. If you want the original, unpadded, data, + By default, the HRV channel is loaded padded with no-data, returning + a full-disk dataset. If you want the original, unpadded data, just provide the `fill_hrv` as False in the `reader_kwargs`:: scene = satpy.Scene(filenames, @@ -449,7 +476,6 @@ def __init__(self, filename, filename_info, filetype_info, self.calib_mode = calib_mode self.ext_calib_coefs = ext_calib_coefs or {} self.mask_bad_quality_scan_lines = mask_bad_quality_scan_lines - self._get_header() def _get_header(self): @@ -490,30 +516,49 @@ def _get_header(self): self.mda['service'] = service self.channel_name = CHANNEL_NAMES[self.mda['spectral_channel_id']] + @property + def _repeat_cycle_duration(self): + """Get repeat cycle duration from epilogue.""" + if self.epilogue['ImageProductionStats']['ActualScanningSummary']['ReducedScan'] == 1: + return 5 + return REPEAT_CYCLE_DURATION + @property def nominal_start_time(self): - """Get the start time.""" - return self.prologue['ImageAcquisition'][ + """Get the start time and round it according to scan law.""" + tm = self.prologue['ImageAcquisition'][ 'PlannedAcquisitionTime']['TrueRepeatCycleStart'] + return round_nom_time(tm, time_delta=timedelta(minutes=self._repeat_cycle_duration)) @property def nominal_end_time(self): - """Get the end time.""" - return self.prologue['ImageAcquisition'][ + """Get the end time and round it according to scan law.""" + tm = self.prologue['ImageAcquisition'][ 'PlannedAcquisitionTime']['PlannedRepeatCycleEnd'] + return round_nom_time(tm, time_delta=timedelta(minutes=self._repeat_cycle_duration)) @property - def start_time(self): - """Get the start time.""" + def observation_start_time(self): + """Get the observation start time.""" return self.epilogue['ImageProductionStats'][ 'ActualScanningSummary']['ForwardScanStart'] @property - def end_time(self): - """Get the end time.""" + def observation_end_time(self): + """Get the observation end time.""" return self.epilogue['ImageProductionStats'][ 'ActualScanningSummary']['ForwardScanEnd'] + @property + def start_time(self): + """Get general start time for this file.""" + return self.nominal_start_time + + @property + def end_time(self): + """Get the general end time for this file.""" + return self.nominal_end_time + def _get_area_extent(self, pdict): """Get the area extent of the file. @@ -529,7 +574,7 @@ def _get_area_extent(self, pdict): if not self.mda['offset_corrected']: # Geo-referencing offset present. Adjust area extent to match the shifted data. Note that we have to adjust - # the corners in the *opposite* direction, i.e. S-E. Think of it as if the coastlines were fixed and you + # the corners in the *opposite* direction, i.e. S-E. Think of it as if the coastlines were fixed, and you # dragged the image to S-E until coastlines and data area aligned correctly. # # Although the image is flipped upside-down and left-right, the projection coordinates retain their @@ -546,7 +591,7 @@ def get_area_def(self, dsid): # Common parameters for both HRV and other channels nlines = int(self.mda['number_of_lines']) loff = np.float32(self.mda['loff']) - pdict = {} + pdict = dict() pdict['cfac'] = np.int32(self.mda['cfac']) pdict['lfac'] = np.int32(self.mda['lfac']) pdict['coff'] = np.float32(self.mda['coff']) @@ -632,7 +677,7 @@ def get_dataset(self, key, info): res = self.calibrate(res, key['calibration']) is_calibration = key['calibration'] in ['radiance', 'reflectance', 'brightness_temperature'] - if (is_calibration and self.mask_bad_quality_scan_lines): # noqa: E129 + if is_calibration and self.mask_bad_quality_scan_lines: # noqa: E129 res = self._mask_bad_quality(res) if key['name'] == 'HRV' and self.fill_hrv: @@ -682,7 +727,7 @@ def calibrate(self, data, calibration): channel_name=self.channel_name, coefs=self._get_calib_coefs(self.channel_name), calib_mode=self.calib_mode, - scan_time=self.start_time + scan_time=self.observation_start_time ) res = calib.calibrate(data, calibration) logger.debug("Calibration time " + str(datetime.now() - tic)) @@ -722,8 +767,14 @@ def _update_attrs(self, res, info): res.attrs['standard_name'] = info['standard_name'] res.attrs['platform_name'] = self.platform_name res.attrs['sensor'] = 'seviri' - res.attrs['nominal_start_time'] = self.nominal_start_time - res.attrs['nominal_end_time'] = self.nominal_end_time + res.attrs['nominal_start_time'] = self.nominal_start_time, + res.attrs['nominal_end_time'] = self.nominal_end_time, + res.attrs['time_parameters'] = { + 'nominal_start_time': self.nominal_start_time, + 'nominal_end_time': self.nominal_end_time, + 'observation_start_time': self.observation_start_time, + 'observation_end_time': self.observation_end_time, + } res.attrs['orbital_parameters'] = { 'projection_longitude': self.mda['projection_parameters']['SSP_longitude'], 'projection_latitude': self.mda['projection_parameters']['SSP_latitude'], diff --git a/satpy/readers/seviri_l1b_native.py b/satpy/readers/seviri_l1b_native.py index 33af8e4402..cdad865f0c 100644 --- a/satpy/readers/seviri_l1b_native.py +++ b/satpy/readers/seviri_l1b_native.py @@ -15,19 +15,91 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . -"""SEVIRI native format reader. +r"""SEVIRI Level 1.5 native format reader. + +Introduction +____________ +The ``seviri_l1b_native`` reader reads and calibrates MSG-SEVIRI L1.5 image data in binary format. The format is +explained in the `MSG Level 1.5 Native Format File Definition`_. The files are usually named as +follows: + +.. code-block:: none + + MSG4-SEVI-MSG15-0100-NA-20210302124244.185000000Z-NA.nat + +Reader Arguments +---------------- +Some arguments can be provided to the reader to change its behaviour. These are +provided through the `Scene` instantiation, eg:: + + scn = Scene(filenames=filenames, reader="seviri_l1b_native", reader_kwargs={'fill_disk': True}) + +To see the full list of arguments that can be provided, look into the documentation +of :class:`NativeMSGFileHandler`. + +Example +------- +Here is an example how to read the data in satpy. + +NOTE: When loading the data, the orientation +of the image can be set with ``upper_right_corner``-keyword. +Possible options are ``NW``, ``NE``, ``SW``, ``SE``, or ``native``. + +.. code-block:: python + + from satpy import Scene + + filenames = ['MSG4-SEVI-MSG15-0100-NA-20210302124244.185000000Z-NA.nat'] + scn = Scene(filenames=filenames, reader='seviri_l1b_native') + scn.load(['VIS006', 'IR_108'], upper_right_corner='NE') + print(scn['IR_108']) + + +Output: + +.. code-block:: none + + + dask.array + Coordinates: + acq_time (y) datetime64[ns] NaT NaT NaT NaT NaT NaT ... NaT NaT NaT NaT NaT + crs object PROJCRS["unknown",BASEGEOGCRS["unknown",DATUM["unknown",... + * y (y) float64 -5.566e+06 -5.563e+06 ... 5.566e+06 5.569e+06 + * x (x) float64 5.566e+06 5.563e+06 5.56e+06 ... -5.566e+06 -5.569e+06 + Attributes: + orbital_parameters: {'projection_longitude': 0.0, 'projection_latit... + time_parameters: {'nominal_start_time': datetime.datetime(2021, ... + units: K + wavelength: 10.8 µm (9.8-11.8 µm) + standard_name: toa_brightness_temperature + platform_name: Meteosat-11 + sensor: seviri + georef_offset_corrected: True + start_time: 2021-03-02 12:30:11.584603 + end_time: 2021-03-02 12:45:09.949762 + reader: seviri_l1b_native + area: Area ID: msg_seviri_fes_3km\\nDescription: MSG S... + name: IR_108 + resolution: 3000.403165817 + calibration: brightness_temperature + modifiers: () + _satpy_id: DataID(name='IR_108', wavelength=WavelengthRang... + ancillary_variables: [] + References: + - `EUMETSAT Product Navigator`_ - `MSG Level 1.5 Native Format File Definition`_ +.. _EUMETSAT Product Navigator: + https://navigator.eumetsat.int/product/EO:EUM:DAT:MSG:HRSEVIRI .. _MSG Level 1.5 Native Format File Definition: https://www-cdn.eumetsat.int/files/2020-04/pdf_fg15_msg-native-format-15.pdf - """ import logging import warnings -from datetime import datetime +from datetime import datetime, timedelta import dask.array as da import numpy as np @@ -42,6 +114,7 @@ CHANNEL_NAMES, HRV_NUM_COLUMNS, HRV_NUM_LINES, + REPEAT_CYCLE_DURATION, SATNUM, VISIR_NUM_COLUMNS, VISIR_NUM_LINES, @@ -56,6 +129,7 @@ get_satpos, pad_data_horizontally, pad_data_vertically, + round_nom_time, ) from satpy.readers.seviri_l1b_native_hdr import ( DEFAULT_15_SECONDARY_PRODUCT_HEADER, @@ -68,6 +142,7 @@ logger = logging.getLogger('native_msg') CHUNK_SIZE = get_legacy_chunk_size() +ASCII_STARTSWITH = b'FormatName : NATIVE' class NativeMSGFileHandler(BaseFileHandler): @@ -81,7 +156,7 @@ class NativeMSGFileHandler(BaseFileHandler): By providing the `fill_disk` as True in the `reader_kwargs`, the channel is loaded as full disk, padded with no-data where necessary. This is especially useful for the - HRV channel, but can also be used for RSS and ROI data. By default the original, + HRV channel, but can also be used for RSS and ROI data. By default, the original, unpadded, data are loaded:: scene = satpy.Scene(filenames, @@ -115,39 +190,40 @@ def __init__(self, filename, filename_info, filetype_info, # Read header, prepare dask-array, read trailer and initialize image boundaries # Available channels are known only after the header has been read - self.header_type = get_native_header(self._has_archive_header()) + self.header_type = get_native_header(has_archive_header(self.filename)) self._read_header() self.dask_array = da.from_array(self._get_memmap(), chunks=(CHUNK_SIZE,)) self._read_trailer() self.image_boundaries = ImageBoundaries(self.header, self.trailer, self.mda) - def _has_archive_header(self): - """Check whether the file includes an ASCII archive header.""" - ascii_startswith = b'FormatName : NATIVE' - with open(self.filename, mode='rb') as istream: - return istream.read(36) == ascii_startswith + @property + def _repeat_cycle_duration(self): + """Get repeat cycle duration from the trailer.""" + if self.trailer['15TRAILER']['ImageProductionStats']['ActualScanningSummary']['ReducedScan'] == 1: + return 5 + return REPEAT_CYCLE_DURATION @property def nominal_start_time(self): - """Read the repeat cycle nominal start time from metadata.""" - return self.header['15_DATA_HEADER']['ImageAcquisition'][ - 'PlannedAcquisitionTime']['TrueRepeatCycleStart'] + """Get the repeat cycle nominal start time from file header and round it to expected nominal time slot.""" + tm = self.header['15_DATA_HEADER']['ImageAcquisition']['PlannedAcquisitionTime']['TrueRepeatCycleStart'] + return round_nom_time(tm, time_delta=timedelta(minutes=self._repeat_cycle_duration)) @property def nominal_end_time(self): - """Read the repeat cycle nominal end time from metadata.""" - return self.header['15_DATA_HEADER']['ImageAcquisition'][ - 'PlannedAcquisitionTime']['PlannedRepeatCycleEnd'] + """Get the repeat cycle nominal end time from file header and round it to expected nominal time slot.""" + tm = self.header['15_DATA_HEADER']['ImageAcquisition']['PlannedAcquisitionTime']['PlannedRepeatCycleEnd'] + return round_nom_time(tm, time_delta=timedelta(minutes=self._repeat_cycle_duration)) @property def observation_start_time(self): - """Read the repeat cycle sensing start time from metadata.""" + """Get observation start time from trailer.""" return self.trailer['15TRAILER']['ImageProductionStats'][ 'ActualScanningSummary']['ForwardScanStart'] @property def observation_end_time(self): - """Read the repeat cycle sensing end time from metadata.""" + """Get observation end time from trailer.""" return self.trailer['15TRAILER']['ImageProductionStats'][ 'ActualScanningSummary']['ForwardScanEnd'] @@ -211,10 +287,7 @@ def _get_memmap(self): def _read_header(self): """Read the header info.""" - data = np.fromfile(self.filename, - dtype=self.header_type, count=1) - - self.header.update(recarray2dict(data)) + self.header.update(read_header(self.filename)) if '15_SECONDARY_PRODUCT_HEADER' not in self.header: # No archive header, that means we have a complete file @@ -233,7 +306,7 @@ def _read_header(self): 'SatelliteStatus']['SatelliteDefinition']['SatelliteId'] self.mda['platform_name'] = "Meteosat-" + SATNUM[self.platform_id] self.mda['offset_corrected'] = data15hd['GeometricProcessing'][ - 'EarthModel']['TypeOfEarthModel'] == 2 + 'EarthModel']['TypeOfEarthModel'] == 2 equator_radius = data15hd['GeometricProcessing'][ 'EarthModel']['EquatorialRadius'] * 1000. @@ -297,7 +370,9 @@ def _read_header(self): self.mda['hrv_number_of_lines'] = int(sec15hd["NumberLinesHRV"]['Value']) self.mda['hrv_number_of_columns'] = cols_hrv - if self.header['15_MAIN_PRODUCT_HEADER']['QQOV']['Value'] == 'NOK': + if '15_MAIN_PRODUCT_HEADER' not in self.header: + logger.info("Quality flag check was not possible due to missing 15_MAIN_PRODUCT_HEADER.") + elif self.header['15_MAIN_PRODUCT_HEADER']['QQOV']['Value'] == 'NOK': warnings.warn( "The quality flag for this file indicates not OK. " "Use this data with caution!", @@ -341,7 +416,7 @@ def get_area_def(self, dataset_id): definitions defined in the `areas.yaml` file correspond to the HRIT ones. """ - pdict = {} + pdict = dict() pdict['a'] = self.mda['projection_parameters']['a'] pdict['b'] = self.mda['projection_parameters']['b'] pdict['h'] = self.mda['projection_parameters']['h'] @@ -464,13 +539,13 @@ def get_area_extent(self, dataset_id): def is_roi(self): """Check if data covers a selected region of interest (ROI). - Standard RSS data consists of 3712 columns and 1392 lines, covering the three northmost segements + Standard RSS data consists of 3712 columns and 1392 lines, covering the three northmost segments of the SEVIRI disk. Hence, if the data does not cover the full disk, nor the standard RSS region in RSS mode, it's assumed to be ROI data. """ is_rapid_scan = self.trailer['15TRAILER']['ImageProductionStats']['ActualScanningSummary']['ReducedScan'] - # Standard RSS data is assumed to cover the three northmost segements, thus consisting of all 3712 columns and + # Standard RSS data is assumed to cover the three northmost segments, thus consisting of all 3712 columns and # the 1392 northmost lines nlines = int(self.mda['number_of_lines']) ncolumns = int(self.mda['number_of_columns']) @@ -509,7 +584,7 @@ def get_dataset(self, dataset_id, dataset_info): def _get_visir_channel(self, dataset_id): shape = (self.mda['number_of_lines'], self.mda['number_of_columns']) # Check if there is only 1 channel in the list as a change - # is needed in the arrray assignment ie channl id is not present + # is needed in the array assignment ie channel id is not present if len(self.mda['channel_list']) == 1: raw = self.dask_array['visir']['line_data'] else: @@ -541,7 +616,7 @@ def calibrate(self, data, dataset_id): channel_name=channel_name, coefs=self._get_calib_coefs(channel_name), calib_mode=self.calib_mode, - scan_time=self.start_time + scan_time=self.observation_start_time ) res = calib.calibrate(data, dataset_id['calibration']) logger.debug("Calibration time " + str(datetime.now() - tic)) @@ -559,7 +634,7 @@ def _get_calib_coefs(self, channel_name): coefs_gsics = self.header['15_DATA_HEADER'][ 'RadiometricProcessing']['MPEFCalFeedback'] radiance_types = self.header['15_DATA_HEADER']['ImageDescription'][ - 'Level15ImageProduction']['PlannedChanProcessing'] + 'Level15ImageProduction']['PlannedChanProcessing'] return create_coef_dict( coefs_nominal=( coefs_nominal['CalSlope'][band_idx], @@ -594,7 +669,7 @@ def _get_acq_time_hrv(self): def _get_acq_time_visir(self, dataset_id): """Get raw acquisition time for VIS/IR channels.""" # Check if there is only 1 channel in the list as a change - # is needed in the arrray assignment ie channl id is not present + # is needed in the array assignment, i.e. channel id is not present if len(self.mda['channel_list']) == 1: return self.dask_array['visir']['acq_time'].compute() i = self.mda['channel_list'].index(dataset_id['name']) @@ -652,7 +727,7 @@ def satpos(self): Returns: Longitude [deg east], Latitude [deg north] and Altitude [m] """ poly_finder = OrbitPolynomialFinder(self.header['15_DATA_HEADER'][ - 'SatelliteStatus']['Orbit']['OrbitPolynomial']) + 'SatelliteStatus']['Orbit']['OrbitPolynomial']) orbit_polynomial = poly_finder.get_orbit_polynomial(self.start_time) return get_satpos( orbit_polynomial=orbit_polynomial, @@ -787,7 +862,7 @@ def pad_data(self, dataset): def _extract_data_to_pad(self, dataset, south_bound, north_bound): """Extract the data that shall be padded. - In case of FES (HRV) data, 'dataset' contains data from twoseparate windows that + In case of FES (HRV) data, 'dataset' contains data from two separate windows that are padded separately. Hence, we extract a subset of data. """ if self._is_full_disk: @@ -800,11 +875,24 @@ def _extract_data_to_pad(self, dataset, south_bound, north_bound): def get_available_channels(header): """Get the available channels from the header information.""" - chlist_str = header['15_SECONDARY_PRODUCT_HEADER'][ + channels_str = header['15_SECONDARY_PRODUCT_HEADER'][ 'SelectedBandIDs']['Value'] - retv = {} + available_channels = {} + + for idx, char in zip(range(12), channels_str): + available_channels[CHANNEL_NAMES[idx + 1]] = (char == 'X') + + return available_channels + + +def has_archive_header(filename): + """Check whether the file includes an ASCII archive header.""" + with open(filename, mode='rb') as istream: + return istream.read(36) == ASCII_STARTSWITH - for idx, char in zip(range(12), chlist_str): - retv[CHANNEL_NAMES[idx + 1]] = (char == 'X') - return retv +def read_header(filename): + """Read SEVIRI L1.5 native header.""" + dtype = get_native_header(has_archive_header(filename)) + hdr = np.fromfile(filename, dtype=dtype, count=1) + return recarray2dict(hdr) diff --git a/satpy/readers/seviri_l1b_nc.py b/satpy/readers/seviri_l1b_nc.py index ca91ceb761..ae56053114 100644 --- a/satpy/readers/seviri_l1b_nc.py +++ b/satpy/readers/seviri_l1b_nc.py @@ -19,6 +19,7 @@ import datetime import logging +from datetime import timedelta import numpy as np @@ -36,6 +37,7 @@ get_cds_time, get_satpos, mask_bad_quality, + round_nom_time, ) from satpy.utils import get_legacy_chunk_size @@ -69,15 +71,45 @@ def __init__(self, filename, filename_info, filetype_info, self.get_metadata() @property - def start_time(self): - """Get the start time.""" + def _repeat_cycle_duration(self): + """Get repeat cycle duration from the metadata.""" + if self.nc.attrs['nominal_image_scanning'] == 'T': + return 15 + elif self.nc.attrs['reduced_scanning'] == 'T': + return 5 + + @property + def nominal_start_time(self): + """Read the repeat cycle nominal start time from metadata and round it to expected nominal time slot.""" + tm = self.deltaSt + return round_nom_time(tm, time_delta=timedelta(minutes=self._repeat_cycle_duration)) + + @property + def nominal_end_time(self): + """Read the repeat cycle nominal end time from metadata and round it to expected nominal time slot.""" + tm = self.deltaEnd + return round_nom_time(tm, time_delta=timedelta(minutes=self._repeat_cycle_duration)) + + @property + def observation_start_time(self): + """Get the repeat cycle observation start time from metadata.""" return self.deltaSt @property - def end_time(self): - """Get the end time.""" + def observation_end_time(self): + """Get the repeat cycle observation end time from metadata.""" return self.deltaEnd + @property + def start_time(self): + """Get general start time for this file.""" + return self.nominal_start_time + + @property + def end_time(self): + """Get the general end time for this file.""" + return self.nominal_end_time + @cached_property def nc(self): """Read the file.""" @@ -159,7 +191,7 @@ def calibrate(self, dataset, dataset_id): channel_name=channel, coefs=self._get_calib_coefs(dataset, channel), calib_mode='NOMINAL', - scan_time=self.start_time + scan_time=self.observation_start_time ) return calib.calibrate(dataset, calibration) @@ -204,6 +236,12 @@ def _update_attrs(self, dataset, dataset_info): ), 'satellite_nominal_latitude': 0.0, } + dataset.attrs['time_parameters'] = { + 'nominal_start_time': self.nominal_start_time, + 'nominal_end_time': self.nominal_end_time, + 'observation_start_time': self.observation_start_time, + 'observation_end_time': self.observation_end_time, + } try: actual_lon, actual_lat, actual_alt = self.satpos dataset.attrs['orbital_parameters'].update({ diff --git a/satpy/readers/utils.py b/satpy/readers/utils.py index 7e5da3be51..31f6dea6d9 100644 --- a/satpy/readers/utils.py +++ b/satpy/readers/utils.py @@ -49,7 +49,7 @@ def np2str(value): value (ndarray): scalar or 1-element numpy array to convert Raises: - ValueError: if value is array larger than 1-element or it is not of + ValueError: if value is array larger than 1-element, or it is not of type `numpy.string_` or it is not a numpy array """ diff --git a/satpy/scene.py b/satpy/scene.py index 4dbeed1fcb..ca42567590 100644 --- a/satpy/scene.py +++ b/satpy/scene.py @@ -48,6 +48,25 @@ LOG = logging.getLogger(__name__) +def _get_area_resolution(area): + """Attempt to retrieve resolution from AreaDefinition.""" + try: + resolution = max(area.pixel_size_x, area.pixel_size_y) + except AttributeError: + resolution = max(area.lats.attrs["resolution"], area.lons.attrs["resolution"]) + return resolution + + +def _aggregate_data_array(data_array, func, **coarsen_kwargs): + """Aggregate xr.DataArray.""" + res = data_array.coarsen(**coarsen_kwargs) + if callable(func): + out = res.reduce(func) + else: + out = getattr(res, func)() + return out + + class DelayedGeneration(KeyError): """Mark that a dataset can't be generated without further modification.""" @@ -762,10 +781,10 @@ def aggregate(self, dataset_ids=None, boundary='trim', side='left', func='mean', Args: dataset_ids (iterable): DataIDs to include in the returned `Scene`. Defaults to all datasets. - func (string): Function to apply on each aggregation window. One of + func (string, callable): Function to apply on each aggregation window. One of 'mean', 'sum', 'min', 'max', 'median', 'argmin', - 'argmax', 'prod', 'std', 'var'. - 'mean' is the default. + 'argmax', 'prod', 'std', 'var' strings or a custom + function. 'mean' is the default. boundary: See :meth:`xarray.DataArray.coarsen`, 'trim' by default. side: See :meth:`xarray.DataArray.coarsen`, 'left' by default. dim_kwargs: the size of the windows to aggregate. @@ -790,18 +809,16 @@ def aggregate(self, dataset_ids=None, boundary='trim', side='left', func='mean', continue target_area = src_area.aggregate(boundary=boundary, **dim_kwargs) - try: - resolution = max(target_area.pixel_size_x, target_area.pixel_size_y) - except AttributeError: - resolution = max(target_area.lats.resolution, target_area.lons.resolution) + resolution = _get_area_resolution(target_area) for ds_id in ds_ids: - res = self[ds_id].coarsen(boundary=boundary, side=side, **dim_kwargs) - - new_scn._datasets[ds_id] = getattr(res, func)() + new_scn._datasets[ds_id] = _aggregate_data_array(self[ds_id], + func=func, + boundary=boundary, + side=side, + **dim_kwargs) new_scn._datasets[ds_id].attrs = self[ds_id].attrs.copy() new_scn._datasets[ds_id].attrs['area'] = target_area new_scn._datasets[ds_id].attrs['resolution'] = resolution - return new_scn def get(self, key, default=None): @@ -1131,7 +1148,9 @@ def to_xarray_dataset(self, datasets=None): Returns: :class:`xarray.Dataset` """ - dataarrays = self._get_dataarrays_from_identifiers(datasets) + from satpy._scene_converters import _get_dataarrays_from_identifiers + + dataarrays = _get_dataarrays_from_identifiers(self, datasets) if len(dataarrays) == 0: return xr.Dataset() @@ -1153,13 +1172,70 @@ def to_xarray_dataset(self, datasets=None): ds.attrs = mdata return ds - def _get_dataarrays_from_identifiers(self, identifiers): - if identifiers is not None: - dataarrays = [self[ds] for ds in identifiers] - else: - dataarrays = [self._datasets.get(ds) for ds in self._wishlist] - dataarrays = [ds for ds in dataarrays if ds is not None] - return dataarrays + def to_xarray(self, + datasets=None, # DataID + header_attrs=None, + exclude_attrs=None, + flatten_attrs=False, + pretty=True, + include_lonlats=True, + epoch=None, + include_orig_name=True, + numeric_name_prefix='CHANNEL_'): + """Merge all xr.DataArray(s) of a satpy.Scene to a CF-compliant xarray object. + + If all Scene DataArrays are on the same area, it returns an xr.Dataset. + If Scene DataArrays are on different areas, currently it fails, although + in future we might return a DataTree object, grouped by area. + + Parameters + ---------- + datasets (iterable): + List of Satpy Scene datasets to include in the output xr.Dataset. + Elements can be string name, a wavelength as a number, a DataID, + or DataQuery object. + If None (the default), it include all loaded Scene datasets. + header_attrs: + Global attributes of the output xr.Dataset. + epoch (str): + Reference time for encoding the time coordinates (if available). + Example format: "seconds since 1970-01-01 00:00:00". + If None, the default reference time is retrieved using "from satpy.cf_writer import EPOCH" + flatten_attrs (bool): + If True, flatten dict-type attributes. + exclude_attrs (list): + List of xr.DataArray attribute names to be excluded. + include_lonlats (bool): + If True, it includes 'latitude' and 'longitude' coordinates. + If the 'area' attribute is a SwathDefinition, it always includes + latitude and longitude coordinates. + pretty (bool): + Don't modify coordinate names, if possible. Makes the file prettier, + but possibly less consistent. + include_orig_name (bool). + Include the original dataset name as a variable attribute in the xr.Dataset. + numeric_name_prefix (str): + Prefix to add the each variable with name starting with a digit. + Use '' or None to leave this out. + + Returns + ------- + ds, xr.Dataset + A CF-compliant xr.Dataset + + """ + from satpy._scene_converters import to_xarray + + return to_xarray(scn=self, + datasets=datasets, # DataID + header_attrs=header_attrs, + exclude_attrs=exclude_attrs, + flatten_attrs=flatten_attrs, + pretty=pretty, + include_lonlats=include_lonlats, + epoch=epoch, + include_orig_name=include_orig_name, + numeric_name_prefix=numeric_name_prefix) def images(self): """Generate images for all the datasets from the scene.""" @@ -1260,7 +1336,9 @@ def save_datasets(self, writer=None, filename=None, datasets=None, compute=True, close any objects that have a "close" method. """ - dataarrays = self._get_dataarrays_from_identifiers(datasets) + from satpy._scene_converters import _get_dataarrays_from_identifiers + + dataarrays = _get_dataarrays_from_identifiers(self, datasets) if not dataarrays: raise RuntimeError("None of the requested datasets have been " "generated or could not be loaded. Requested " diff --git a/satpy/tests/enhancement_tests/test_ahi.py b/satpy/tests/enhancement_tests/test_ahi.py deleted file mode 100644 index a1be64f52b..0000000000 --- a/satpy/tests/enhancement_tests/test_ahi.py +++ /dev/null @@ -1,52 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# Copyright (c) 2021 Satpy developers -# -# This file is part of satpy. -# -# satpy is free software: you can redistribute it and/or modify it under the -# terms of the GNU General Public License as published by the Free Software -# Foundation, either version 3 of the License, or (at your option) any later -# version. -# -# satpy is distributed in the hope that it will be useful, but WITHOUT ANY -# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR -# A PARTICULAR PURPOSE. See the GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along with -# satpy. If not, see . -"""Unit testing for the AHI enhancement function.""" - -import dask.array as da -import numpy as np -import xarray as xr - - -class TestAHIEnhancement: - """Test the AHI enhancement functions.""" - - def setup_method(self): - """Create test data.""" - data = da.arange(-100, 1000, 110).reshape(2, 5) - rgb_data = np.stack([data, data, data]) - self.rgb = xr.DataArray(rgb_data, dims=('bands', 'y', 'x'), - coords={'bands': ['R', 'G', 'B']}) - - def test_jma_true_color_reproduction(self): - """Test the jma_true_color_reproduction enhancement.""" - from trollimage.xrimage import XRImage - - from satpy.enhancements.ahi import jma_true_color_reproduction - - expected = [[[-109.98, 10.998, 131.976, 252.954, 373.932], - [494.91, 615.888, 736.866, 857.844, 978.822]], - - [[-97.6, 9.76, 117.12, 224.48, 331.84], - [439.2, 546.56, 653.92, 761.28, 868.64]], - - [[-94.27, 9.427, 113.124, 216.821, 320.518], - [424.215, 527.912, 631.609, 735.306, 839.003]]] - - img = XRImage(self.rgb) - jma_true_color_reproduction(img) - np.testing.assert_almost_equal(img.data.compute(), expected) diff --git a/satpy/tests/enhancement_tests/test_enhancements.py b/satpy/tests/enhancement_tests/test_enhancements.py index 8cef879352..4420f4ea9b 100644 --- a/satpy/tests/enhancement_tests/test_enhancements.py +++ b/satpy/tests/enhancement_tests/test_enhancements.py @@ -88,10 +88,12 @@ def setup_method(self): @pytest.mark.parametrize("input_data_name", ["ch1", "ch2", "rgb"]) def test_apply_enhancement(self, input_data_name, decorator, exp_call_cls): """Test the 'apply_enhancement' utility function.""" + def _enh_func(img): def _calc_func(data): assert isinstance(data, exp_call_cls) return data + decorated_func = decorator(_calc_func) return decorated_func(img.data) @@ -224,7 +226,7 @@ def test_merge_colormaps(self): cmap1 = {'colors': 'blues', 'min_value': 0, 'max_value': 1} kwargs = {'palettes': [cmap1]} - with mock.patch('satpy.enhancements.create_colormap', create_colormap_mock),\ + with mock.patch('satpy.enhancements.create_colormap', create_colormap_mock), \ mock.patch('trollimage.colormap.blues', ret_map): _ = mcp(kwargs) create_colormap_mock.assert_called_once() @@ -449,6 +451,7 @@ def test_cmap_list(self): def test_on_separate_bands(): """Test the `on_separate_bands` decorator.""" + def func(array, index, gain=2): return xr.DataArray(np.ones(array.shape, dtype=array.dtype) * index * gain, coords=array.coords, dims=array.dims, attrs=array.attrs) @@ -461,6 +464,7 @@ def func(array, index, gain=2): def test_using_map_blocks(): """Test the `using_map_blocks` decorator.""" + def func(np_array, block_info=None): value = block_info[0]['chunk-location'][-1] return np.ones(np_array.shape) * value @@ -474,6 +478,7 @@ def func(np_array, block_info=None): def test_on_dask_array(): """Test the `on_dask_array` decorator.""" + def func(dask_array): if not isinstance(dask_array, da.core.Array): pytest.fail("Array is not a dask array") @@ -493,83 +498,83 @@ def fake_area(): _nwcsaf_geo_props = { - 'cma_geo': ("geo", "cma", None, 'cma_pal', None, 'cloudmask', 'CMA', "uint8"), - 'cma_pps': ("pps", "cma", None, 'cma_pal', None, 'cloudmask', 'CMA', "uint8"), - 'cma_extended_pps': ("pps", "cma_extended", None, 'cma_extended_pal', None, - 'cloudmask_extended', 'CMA', "uint8"), - 'cmaprob_pps': ("pps", "cmaprob", None, 'cmaprob_pal', None, 'cloudmask_probability', - 'CMAPROB', "uint8"), - 'ct_geo': ("geo", "ct", None, 'ct_pal', None, 'cloudtype', 'CT', "uint8"), - 'ct_pps': ("pps", "ct", None, 'ct_pal', None, 'cloudtype', 'CT', "uint8"), - 'ctth_alti_geo': ("geo", "ctth_alti", None, 'ctth_alti_pal', None, 'cloud_top_height', + 'cma_geo': ("geo", "cma", None, 'cma_pal', None, 'cloudmask', 'CMA', "uint8"), + 'cma_pps': ("pps", "cma", None, 'cma_pal', None, 'cloudmask', 'CMA', "uint8"), + 'cma_extended_pps': ("pps", "cma_extended", None, 'cma_extended_pal', None, + 'cloudmask_extended', 'CMA', "uint8"), + 'cmaprob_pps': ("pps", "cmaprob", None, 'cmaprob_pal', None, 'cloudmask_probability', + 'CMAPROB', "uint8"), + 'ct_geo': ("geo", "ct", None, 'ct_pal', None, 'cloudtype', 'CT', "uint8"), + 'ct_pps': ("pps", "ct", None, 'ct_pal', None, 'cloudtype', 'CT', "uint8"), + 'ctth_alti_geo': ("geo", "ctth_alti", None, 'ctth_alti_pal', None, 'cloud_top_height', + 'CTTH', "float64"), + 'ctth_alti_pps': ("pps", "ctth_alti", None, 'ctth_alti_pal', "ctth_status_flag", + 'cloud_top_height', 'CTTH', "float64"), + 'ctth_pres_geo': ("geo", "ctth_pres", None, 'ctth_pres_pal', None, 'cloud_top_pressure', + 'CTTH', "float64"), + 'ctth_pres_pps': ("pps", "ctth_pres", None, 'ctth_pres_pal', None, 'cloud_top_pressure', + 'CTTH', "float64"), + 'ctth_tempe_geo': ("geo", "ctth_tempe", None, 'ctth_tempe_pal', None, 'cloud_top_temperature', 'CTTH', "float64"), - 'ctth_alti_pps': ("pps", "ctth_alti", None, 'ctth_alti_pal', "ctth_status_flag", - 'cloud_top_height', 'CTTH', "float64"), - 'ctth_pres_geo': ("geo", "ctth_pres", None, 'ctth_pres_pal', None, 'cloud_top_pressure', + 'ctth_tempe_pps': ("pps", "ctth_tempe", None, 'ctth_tempe_pal', None, 'cloud_top_temperature', 'CTTH', "float64"), - 'ctth_pres_pps': ("pps", "ctth_pres", None, 'ctth_pres_pal', None, 'cloud_top_pressure', - 'CTTH', "float64"), - 'ctth_tempe_geo': ("geo", "ctth_tempe", None, 'ctth_tempe_pal', None, 'cloud_top_temperature', - 'CTTH', "float64"), - 'ctth_tempe_pps': ("pps", "ctth_tempe", None, 'ctth_tempe_pal', None, 'cloud_top_temperature', - 'CTTH', "float64"), - 'cmic_phase_geo': ("geo", "cmic_phase", None, 'cmic_phase_pal', None, 'cloud_top_phase', - 'CMIC', "uint8"), - 'cmic_phase_pps': ("pps", "cmic_phase", None, 'cmic_phase_pal', "cmic_status_flag", 'cloud_top_phase', - 'CMIC', "uint8"), - 'cmic_reff_geo': ("geo", "cmic_reff", None, 'cmic_reff_pal', None, 'cloud_drop_effective_radius', - 'CMIC', "float64"), - 'cmic_reff_pps': ("pps", "cmic_reff", "cmic_cre", 'cmic_cre_pal', "cmic_status_flag", - 'cloud_drop_effective_radius', 'CMIC', "float64"), - 'cmic_cot_geo': ("geo", "cmic_cot", None, 'cmic_cot_pal', None, 'cloud_optical_thickness', - 'CMIC', "float64"), - 'cmic_cot_pps': ("pps", "cmic_cot", None, 'cmic_cot_pal', None, 'cloud_optical_thickness', - 'CMIC', "float64"), - 'cmic_cwp_pps': ("pps", "cmic_cwp", None, 'cmic_cwp_pal', None, 'cloud_water_path', - 'CMIC', "float64"), - 'cmic_lwp_geo': ("geo", "cmic_lwp", None, 'cmic_lwp_pal', None, 'cloud_liquid_water_path', + 'cmic_phase_geo': ("geo", "cmic_phase", None, 'cmic_phase_pal', None, 'cloud_top_phase', + 'CMIC', "uint8"), + 'cmic_phase_pps': ("pps", "cmic_phase", None, 'cmic_phase_pal', "cmic_status_flag", 'cloud_top_phase', + 'CMIC', "uint8"), + 'cmic_reff_geo': ("geo", "cmic_reff", None, 'cmic_reff_pal', None, 'cloud_drop_effective_radius', 'CMIC', "float64"), - 'cmic_lwp_pps': ("pps", "cmic_lwp", None, 'cmic_lwp_pal', None, 'liquid_water_path', - 'CMIC', "float64"), - 'cmic_iwp_geo': ("geo", "cmic_iwp", None, 'cmic_iwp_pal', None, 'cloud_ice_water_path', - 'CMIC', "float64"), - 'cmic_iwp_pps': ("pps", "cmic_iwp", None, 'cmic_iwp_pal', None, 'ice_water_path', - 'CMIC', "float64"), - 'pc': ("geo", "pc", None, 'pc_pal', None, 'precipitation_probability', 'PC', "uint8"), - 'crr': ("geo", "crr", None, 'crr_pal', None, 'convective_rain_rate', 'CRR', "uint8"), - 'crr_accum': ("geo", "crr_accum", None, 'crr_pal', None, - 'convective_precipitation_hourly_accumulation', 'CRR', "uint8"), - 'ishai_tpw': ("geo", "ishai_tpw", None, 'ishai_tpw_pal', None, 'total_precipitable_water', - 'iSHAI', "float64"), - 'ishai_shw': ("geo", "ishai_shw", None, 'ishai_shw_pal', None, 'showalter_index', - 'iSHAI', "float64"), - 'ishai_li': ("geo", "ishai_li", None, 'ishai_li_pal', None, 'lifted_index', + 'cmic_reff_pps': ("pps", "cmic_reff", "cmic_cre", 'cmic_cre_pal', "cmic_status_flag", + 'cloud_drop_effective_radius', 'CMIC', "float64"), + 'cmic_cot_geo': ("geo", "cmic_cot", None, 'cmic_cot_pal', None, 'cloud_optical_thickness', + 'CMIC', "float64"), + 'cmic_cot_pps': ("pps", "cmic_cot", None, 'cmic_cot_pal', None, 'cloud_optical_thickness', + 'CMIC', "float64"), + 'cmic_cwp_pps': ("pps", "cmic_cwp", None, 'cmic_cwp_pal', None, 'cloud_water_path', + 'CMIC', "float64"), + 'cmic_lwp_geo': ("geo", "cmic_lwp", None, 'cmic_lwp_pal', None, 'cloud_liquid_water_path', + 'CMIC', "float64"), + 'cmic_lwp_pps': ("pps", "cmic_lwp", None, 'cmic_lwp_pal', None, 'liquid_water_path', + 'CMIC', "float64"), + 'cmic_iwp_geo': ("geo", "cmic_iwp", None, 'cmic_iwp_pal', None, 'cloud_ice_water_path', + 'CMIC', "float64"), + 'cmic_iwp_pps': ("pps", "cmic_iwp", None, 'cmic_iwp_pal', None, 'ice_water_path', + 'CMIC', "float64"), + 'pc': ("geo", "pc", None, 'pc_pal', None, 'precipitation_probability', 'PC', "uint8"), + 'crr': ("geo", "crr", None, 'crr_pal', None, 'convective_rain_rate', 'CRR', "uint8"), + 'crr_accum': ("geo", "crr_accum", None, 'crr_pal', None, + 'convective_precipitation_hourly_accumulation', 'CRR', "uint8"), + 'ishai_tpw': ("geo", "ishai_tpw", None, 'ishai_tpw_pal', None, 'total_precipitable_water', + 'iSHAI', "float64"), + 'ishai_shw': ("geo", "ishai_shw", None, 'ishai_shw_pal', None, 'showalter_index', 'iSHAI', "float64"), - 'ci_prob30': ("geo", "ci_prob30", None, 'ci_pal', None, 'convection_initiation_prob30', - 'CI', "float64"), - 'ci_prob60': ("geo", "ci_prob60", None, 'ci_pal', None, 'convection_initiation_prob60', - 'CI', "float64"), - 'ci_prob90': ("geo", "ci_prob90", None, 'ci_pal', None, 'convection_initiation_prob90', - 'CI', "float64"), - 'asii_turb_trop_prob': ("geo", "asii_turb_trop_prob", None, 'asii_turb_prob_pal', None, - 'asii_prob', 'ASII-NG', "float64"), - 'MapCellCatType': ("geo", "MapCellCatType", None, 'MapCellCatType_pal', None, - 'rdt_cell_type', 'RDT-CW', "uint8"), - } + 'ishai_li': ("geo", "ishai_li", None, 'ishai_li_pal', None, 'lifted_index', + 'iSHAI', "float64"), + 'ci_prob30': ("geo", "ci_prob30", None, 'ci_pal', None, 'convection_initiation_prob30', + 'CI', "float64"), + 'ci_prob60': ("geo", "ci_prob60", None, 'ci_pal', None, 'convection_initiation_prob60', + 'CI', "float64"), + 'ci_prob90': ("geo", "ci_prob90", None, 'ci_pal', None, 'convection_initiation_prob90', + 'CI', "float64"), + 'asii_turb_trop_prob': ("geo", "asii_turb_trop_prob", None, 'asii_turb_prob_pal', None, + 'asii_prob', 'ASII-NG', "float64"), + 'MapCellCatType': ("geo", "MapCellCatType", None, 'MapCellCatType_pal', None, + 'rdt_cell_type', 'RDT-CW', "uint8"), +} @pytest.mark.parametrize( - "data", - ['cma_geo', 'cma_pps', 'cma_extended_pps', 'cmaprob_pps', 'ct_geo', - 'ct_pps', 'ctth_alti_geo', 'ctth_alti_pps', 'ctth_pres_geo', - 'ctth_pres_pps', 'ctth_tempe_geo', 'ctth_tempe_pps', - 'cmic_phase_geo', 'cmic_phase_pps', 'cmic_reff_geo', - 'cmic_reff_pps', 'cmic_cot_geo', 'cmic_cot_pps', 'cmic_cwp_pps', - 'cmic_lwp_geo', 'cmic_lwp_pps', 'cmic_iwp_geo', 'cmic_iwp_pps', - 'pc', 'crr', 'crr_accum', 'ishai_tpw', 'ishai_shw', 'ishai_li', - 'ci_prob30', 'ci_prob60', 'ci_prob90', 'asii_turb_trop_prob', - 'MapCellCatType'] - ) + "data", + ['cma_geo', 'cma_pps', 'cma_extended_pps', 'cmaprob_pps', 'ct_geo', + 'ct_pps', 'ctth_alti_geo', 'ctth_alti_pps', 'ctth_pres_geo', + 'ctth_pres_pps', 'ctth_tempe_geo', 'ctth_tempe_pps', + 'cmic_phase_geo', 'cmic_phase_pps', 'cmic_reff_geo', + 'cmic_reff_pps', 'cmic_cot_geo', 'cmic_cot_pps', 'cmic_cwp_pps', + 'cmic_lwp_geo', 'cmic_lwp_pps', 'cmic_iwp_geo', 'cmic_iwp_pps', + 'pc', 'crr', 'crr_accum', 'ishai_tpw', 'ishai_shw', 'ishai_li', + 'ci_prob30', 'ci_prob60', 'ci_prob90', 'asii_turb_trop_prob', + 'MapCellCatType'] +) def test_nwcsaf_comps(fake_area, tmp_path, data): """Test loading NWCSAF composites.""" from satpy.writers import get_enhanced_image @@ -589,40 +594,41 @@ def test_nwcsaf_comps(fake_area, tmp_path, data): # create a minimally fake netCDF file, otherwise satpy won't load the # composite ds = xr.Dataset( - coords={"nx": [0], "ny": [0]}, - attrs={ - "source": "satpy unit test", - "time_coverage_start": "0001-01-01T00:00:00Z", - "time_coverage_end": "0001-01-01T01:00:00Z", - } - ) + coords={"nx": [0], "ny": [0]}, + attrs={ + "source": "satpy unit test", + "time_coverage_start": "0001-01-01T00:00:00Z", + "time_coverage_end": "0001-01-01T01:00:00Z", + } + ) ds.attrs.update(id_) ds.to_netcdf(fk) sc = Scene(filenames=[os.fspath(fk)], reader=[reader]) sc[palettename] = xr.DataArray( - da.tile(da.arange(256), [3, 1]).T, - dims=("pal02_colors", "pal_RGB")) + da.tile(da.arange(256), [3, 1]).T, + dims=("pal02_colors", "pal_RGB")) fake_alti = da.linspace(rng[0], rng[1], 4, chunks=2, dtype=dtp).reshape(2, 2) ancvars = [sc[palettename]] if statusname is not None: sc[statusname] = xr.DataArray( - da.zeros(shape=(2, 2), dtype="uint8"), - attrs={ - "area": fake_area, - "_FillValue": 123}, - dims=("y", "x")) - ancvars.append(sc[statusname]) - sc[dvname] = xr.DataArray( - fake_alti, - dims=("y", "x"), + da.zeros(shape=(2, 2), dtype="uint8"), attrs={ "area": fake_area, - "scaled_FillValue": 123, - "ancillary_variables": ancvars, - "valid_range": rng}) + "_FillValue": 123}, + dims=("y", "x")) + ancvars.append(sc[statusname]) + sc[dvname] = xr.DataArray( + fake_alti, + dims=("y", "x"), + attrs={ + "area": fake_area, + "scaled_FillValue": 123, + "ancillary_variables": ancvars, + "valid_range": rng}) def _fake_get_varname(info, info_type="file_key"): return altname or dvname + with mock.patch("satpy.readers.nwcsaf_nc.NcNWCSAF._get_varname_in_file") as srnN_: srnN_.side_effect = _fake_get_varname sc.load([comp]) @@ -632,7 +638,49 @@ def _fake_get_varname(info, info_type="file_key"): np.testing.assert_array_equal(im.data.coords["bands"], ["P"]) if dtp == "float64": np.testing.assert_allclose( - im.data.sel(bands="P"), - ((fake_alti - rng[0]) * (255/np.ptp(rng))).round()) + im.data.sel(bands="P"), + ((fake_alti - rng[0]) * (255 / np.ptp(rng))).round()) else: np.testing.assert_allclose(im.data.sel(bands="P"), fake_alti) + + +class TestTCREnhancement: + """Test the AHI enhancement functions.""" + + def setup_method(self): + """Create test data.""" + data = da.arange(-100, 1000, 110).reshape(2, 5) + rgb_data = np.stack([data, data, data]) + self.rgb = xr.DataArray(rgb_data, dims=('bands', 'y', 'x'), + coords={'bands': ['R', 'G', 'B']}, + attrs={'platform_name': 'Himawari-8'}) + + def test_jma_true_color_reproduction(self): + """Test the jma_true_color_reproduction enhancement.""" + from trollimage.xrimage import XRImage + + from satpy.enhancements import jma_true_color_reproduction + + expected = [[[-109.93, 10.993, 131.916, 252.839, 373.762], + [494.685, 615.608, 736.531, 857.454, 978.377]], + + [[-97.73, 9.773, 117.276, 224.779, 332.282], + [439.785, 547.288, 654.791, 762.294, 869.797]], + + [[-93.29, 9.329, 111.948, 214.567, 317.186], + [419.805, 522.424, 625.043, 727.662, 830.281]]] + + img = XRImage(self.rgb) + jma_true_color_reproduction(img) + + np.testing.assert_almost_equal(img.data.compute(), expected) + + self.rgb.attrs['platform_name'] = None + img = XRImage(self.rgb) + with pytest.raises(ValueError, match="Missing platform name."): + jma_true_color_reproduction(img) + + self.rgb.attrs['platform_name'] = 'Fakesat' + img = XRImage(self.rgb) + with pytest.raises(KeyError, match="No conversion matrix found for platform Fakesat"): + jma_true_color_reproduction(img) diff --git a/satpy/tests/modifier_tests/test_angles.py b/satpy/tests/modifier_tests/test_angles.py index 4990edbe5d..cd5082a5b7 100644 --- a/satpy/tests/modifier_tests/test_angles.py +++ b/satpy/tests/modifier_tests/test_angles.py @@ -104,6 +104,10 @@ def _get_angle_test_data_odd_chunks(): return _get_angle_test_data(chunks=((2, 1, 2), (1, 1, 2, 1))) +def _get_angle_test_data_odd_chunks2(): + return _get_angle_test_data(chunks=((1, 4), (2, 3))) + + def _similar_sat_pos_datetime(orig_data, lon_offset=0.04): # change data slightly new_data = orig_data.copy() @@ -226,6 +230,7 @@ def test_get_angles_satpos_preference(self, forced_preference): (_get_angle_test_data, 9, ((2, 2, 1), (2, 2, 1))), (_get_stacked_angle_test_data, 3, ((5,), (2, 2, 1))), (_get_angle_test_data_odd_chunks, 9, ((2, 1, 2), (1, 1, 2, 1))), + (_get_angle_test_data_odd_chunks2, 4, ((1, 4), (2, 3))), (_get_angle_test_data_rgb, 9, ((2, 2, 1), (2, 2, 1))), (_get_angle_test_data_rgb_nodims, 9, ((2, 2, 1), (2, 2, 1))), ]) diff --git a/satpy/tests/modifier_tests/test_filters.py b/satpy/tests/modifier_tests/test_filters.py new file mode 100644 index 0000000000..62e732d300 --- /dev/null +++ b/satpy/tests/modifier_tests/test_filters.py @@ -0,0 +1,35 @@ +"""Implementation of some image filters.""" + +import logging + +import dask.array as da +import numpy as np +import xarray as xr + +from satpy.modifiers.filters import Median + + +def test_median(caplog): + """Test the median filter modifier.""" + caplog.set_level(logging.DEBUG) + dims = "y", "x" + coordinates = dict(x=np.arange(6), y=np.arange(6)) + attrs = dict(units="K") + median_filter_params = dict(size=3) + name = "median_filter" + median_filter = Median(median_filter_params, name=name) + array = xr.DataArray(da.arange(36).reshape((6, 6)), coords=coordinates, dims=dims, attrs=attrs) + res = median_filter([array]) + filtered_array = np.array([[1, 2, 3, 4, 5, 5], + [6, 7, 8, 9, 10, 11], + [12, 13, 14, 15, 16, 17], + [18, 19, 20, 21, 22, 23], + [24, 25, 26, 27, 28, 29], + [30, 30, 31, 32, 33, 34]]) + np.testing.assert_allclose(res, filtered_array) + assert res.dims == dims + assert attrs.items() <= res.attrs.items() + assert res.attrs["name"] == name + np.testing.assert_equal(res.coords["x"], coordinates["x"]) + np.testing.assert_equal(res.coords["y"], coordinates["y"]) + assert "Apply median filtering with parameters {'size': 3}" in caplog.text diff --git a/satpy/tests/reader_tests/gms/__init__.py b/satpy/tests/reader_tests/gms/__init__.py new file mode 100644 index 0000000000..d37bb755ca --- /dev/null +++ b/satpy/tests/reader_tests/gms/__init__.py @@ -0,0 +1 @@ +"""Unit tests for GMS reader.""" diff --git a/satpy/tests/reader_tests/gms/test_gms5_vissr_data.py b/satpy/tests/reader_tests/gms/test_gms5_vissr_data.py new file mode 100644 index 0000000000..5ddf13438e --- /dev/null +++ b/satpy/tests/reader_tests/gms/test_gms5_vissr_data.py @@ -0,0 +1,812 @@ +"""Real world test data for GMS-5 VISSR unit tests.""" + +import numpy as np + +import satpy.readers.gms.gms5_vissr_format as fmt + +ATTITUDE_PREDICTION = np.array( + [ + ( + 50130.93055556, + (19960217, 222000), + 3.14911863, + 0.00054604, + 4.3324597, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50130.93402778, + (19960217, 222500), + 3.14911863, + 0.00054604, + 4.31064812, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50130.9375, + (19960217, 223000), + 3.14911863, + 0.00054604, + 4.28883633, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50130.94097222, + (19960217, 223500), + 3.14911863, + 0.00054604, + 4.26702432, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50130.94444444, + (19960217, 224000), + 3.14911863, + 0.00054604, + 4.2452121, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50130.94791667, + (19960217, 224500), + 3.14911863, + 0.00054604, + 4.22339966, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50130.95138889, + (19960217, 225000), + 3.14911863, + 0.00054604, + 4.201587, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50130.95486111, + (19960217, 225500), + 3.14911863, + 0.00054604, + 4.17977411, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50130.95833333, + (19960217, 230000), + 3.14911863, + 0.00054604, + 4.157961, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50130.96180556, + (19960217, 230500), + 3.14911863, + 0.00054604, + 4.13614765, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50130.96527778, + (19960217, 231000), + 3.14911863, + 0.00054604, + 4.11433408, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50130.96875, + (19960217, 231500), + 3.14911863, + 0.00054604, + 4.09252027, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50130.97222222, + (19960217, 232000), + 3.14911863, + 0.00054604, + 4.07070622, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50130.97569444, + (19960217, 232500), + 3.14911863, + 0.00054604, + 4.04889193, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50130.97916667, + (19960217, 233000), + 3.14911863, + 0.00054604, + 4.02707741, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50130.98263889, + (19960217, 233500), + 3.14911863, + 0.00054604, + 4.00526265, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50130.98611111, + (19960217, 234000), + 3.14911863, + 0.00054604, + 3.98344765, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50130.98958333, + (19960217, 234500), + 3.14911863, + 0.00054604, + 3.96163241, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50130.99305556, + (19960217, 235000), + 3.14911863, + 0.00054604, + 3.93981692, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50130.99652778, + (19960217, 235500), + 3.14911863, + 0.00054604, + 3.9180012, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50131.0, + (19960218, 0), + 3.14911863, + 0.00054604, + 3.89618523, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50131.00347222, + (19960218, 500), + 3.14911863, + 0.00054604, + 3.87436903, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50131.00694444, + (19960218, 1000), + 3.14911863, + 0.00054604, + 3.85255258, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50131.01041667, + (19960218, 1500), + 3.14911863, + 0.00054604, + 3.8307359, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50131.01388889, + (19960218, 2000), + 3.14911863, + 0.00054604, + 3.80891898, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50131.01736111, + (19960218, 2500), + 3.14911863, + 0.00054604, + 3.78710182, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50131.02083333, + (19960218, 3000), + 3.14911863, + 0.00054604, + 3.76528442, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50131.02430556, + (19960218, 3500), + 3.14911863, + 0.00054604, + 3.74346679, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50131.02777778, + (19960218, 4000), + 3.14911863, + 0.00054604, + 3.72164893, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50131.03125, + (19960218, 4500), + 3.14911863, + 0.00054604, + 3.69983084, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50131.03472222, + (19960218, 5000), + 3.14911863, + 0.00054604, + 3.67801252, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50131.03819444, + (19960218, 5500), + 3.14911863, + 0.00054604, + 3.65619398, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50131.04166667, + (19960218, 10000), + 3.14911863, + 0.00054604, + 3.63437521, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ], + dtype=fmt.ATTITUDE_PREDICTION_DATA, +) + +ORBIT_PREDICTION_1 = np.array( + [ + ( + 50130.96180556, + (960217, 230500), + [2247604.14185506, -42110997.39399951, -276688.79765022], + [3069.77904265, 164.12584895, 3.65437628], + [-32392525.09983424, 27002204.93121811, -263873.25702763], + [0.81859376, 0.6760037, 17.44588753], + 133.46391815, + (330.12326803, -12.19424863), + (197.27884747, -11.96904141), + [ + [9.99936382e-01, 1.03449318e-02, 4.49611916e-03], + [-1.03447475e-02, 9.99946490e-01, -6.42483646e-05], + [-4.49654321e-03, 1.77330598e-05, 9.99989890e-01], + ], + [2.46885475e08, -2.07840219e08, -7.66028692e07], + (-0.35887085, 140.18562594, 35793706.31768975), + 0, + 0, + ), + ( + 50130.96527778, + (960217, 231000), + [3167927.33749398, -42051692.51095297, -275526.52514815], + [3065.46435995, 231.22434208, 4.09379482], + [-32392279.4626506, 27002405.27592725, -258576.96255205], + [0.81939962, 0.66017389, 17.86159393], + 134.71734048, + (330.12643276, -12.19310271), + (196.02858456, -11.9678881), + [ + [9.99936382e-01, 1.03449336e-02, 4.49611993e-03], + [-1.03447493e-02, 9.99946490e-01, -6.42473793e-05], + [-4.49654398e-03, 1.77320586e-05, 9.99989890e-01], + ], + [2.46204142e08, -2.07689897e08, -7.65268207e07], + (-0.35166851, 140.18520316, 35793613.0815237), + 0, + 0, + ), + ( + 50130.96875, + (960217, 231500), + [4086736.12968183, -41972273.80964861, -274232.7185828], + [3059.68341675, 298.21262775, 4.53123515], + [-32392033.65156128, 27002600.83510851, -253157.23498394], + [0.81975174, 0.6441, 18.26873686], + 135.97076281, + (330.12959087, -12.19195587), + (194.77831505, -11.96673388), + [ + [9.99936382e-01, 1.03449353e-02, 4.49612071e-03], + [-1.03447510e-02, 9.99946490e-01, -6.42463940e-05], + [-4.49654474e-03, 1.77310575e-05, 9.99989890e-01], + ], + [2.45524133e08, -2.07559497e08, -7.64508451e07], + (-0.3442983, 140.18478523, 35793516.57370046), + 0, + 0, + ), + ( + 50130.97222222, + (960217, 232000), + [5003591.03339227, -41872779.15809826, -272808.0027587], + [3052.43895532, 365.05867777, 4.9664885], + [-32391787.80234722, 27002791.53735474, -247616.67261456], + [0.81965461, 0.62779672, 18.66712192], + 137.22418515, + (330.13274246, -12.19080808), + (193.52803902, -11.9655787), + [ + [9.99936382e-01, 1.03449371e-02, 4.49612148e-03], + [-1.03447528e-02, 9.99946490e-01, -6.42454089e-05], + [-4.49654551e-03, 1.77300565e-05, 9.99989890e-01], + ], + [2.44845888e08, -2.07448982e08, -7.63749418e07], + (-0.33676374, 140.18437233, 35793416.91561355), + 0, + 0, + ), + ( + 50130.97569444, + (960217, 232500), + [5918053.49286455, -41753256.02295399, -271253.06495935], + [3043.73441705, 431.73053079, 5.39934712], + [-32391542.0492856, 27002977.3157848, -241957.93142027], + [0.81911313, 0.61127876, 19.05655891], + 138.47760748, + (330.13588763, -12.1896593), + (192.27775657, -11.96442254), + [ + [9.99936382e-01, 1.03449388e-02, 4.49612225e-03], + [-1.03447545e-02, 9.99946490e-01, -6.42444238e-05], + [-4.49654627e-03, 1.77290557e-05, 9.99989890e-01], + ], + [2.44169846e08, -2.07358303e08, -7.62991102e07], + (-0.32906846, 140.18396465, 35793314.23041636), + 0, + 0, + ), + ( + 50130.97916667, + (960217, 233000), + [6829686.08751574, -41613761.44760592, -269568.65462124], + [3033.5739409, 498.19630731, 5.82960444], + [-32391296.52466749, 27003158.10847847, -236183.72381214], + [0.81813262, 0.59456087, 19.43686189], + 139.73102981, + (330.1390265, -12.18850951), + (191.02746783, -11.96326537), + [ + [9.99936382e-01, 1.03449406e-02, 4.49612302e-03], + [-1.03447563e-02, 9.99946490e-01, -6.42434389e-05], + [-4.49654703e-03, 1.77280550e-05, 9.99989890e-01], + ], + [2.43496443e08, -2.07287406e08, -7.62233495e07], + (-0.32121612, 140.18356238, 35793208.6428103), + 0, + 0, + ), + ( + 50130.98263889, + (960217, 233500), + [7738052.74476409, -41454362.02480648, -267755.58296603], + [3021.96236148, 564.42422513, 6.25705512], + [-32391051.35918404, 27003333.85786499, -230296.81731314], + [0.81671881, 0.57765777, 19.80784932], + 140.98445214, + (330.14215916, -12.18735869), + (189.77717289, -11.96210717), + [ + [9.99936381e-01, 1.03449423e-02, 4.49612379e-03], + [-1.03447580e-02, 9.99946489e-01, -6.42424541e-05], + [-4.49654778e-03, 1.77270545e-05, 9.99989890e-01], + ], + [2.42826115e08, -2.07236222e08, -7.61476592e07], + (-0.3132105, 140.18316567, 35793100.27882991), + 0, + 0, + ), + ( + 50130.98611111, + (960217, 234000), + [8642718.9445816, -41275133.86582235, -265814.72261683], + [3008.90520686, 630.38261431, 6.68149519], + [-32390806.68247503, 27003504.50991426, -224300.03325666], + [0.81487783, 0.56058415, 20.16934411], + 142.23787447, + (330.14528573, -12.18620679), + (188.52687186, -11.9609479), + [ + [9.99936381e-01, 1.03449440e-02, 4.49612456e-03], + [-1.03447598e-02, 9.99946489e-01, -6.42414694e-05], + [-4.49654854e-03, 1.77260540e-05, 9.99989890e-01], + ], + [2.42159297e08, -2.07204676e08, -7.60720382e07], + (-0.30505542, 140.18277471, 35792989.2656269), + 0, + 0, + ), + ( + 50130.98958333, + (960217, 234500), + [9543251.93095296, -41076162.56379041, -263747.00717057], + [2994.40869593, 696.03993248, 7.10272213], + [-32390562.62077149, 27003670.01680953, -218196.24541058], + [0.81261619, 0.54335463, 20.52117372], + 143.4912968, + (330.14840632, -12.18505381), + (187.27656486, -11.95978754), + [ + [9.99936381e-01, 1.03449458e-02, 4.49612532e-03], + [-1.03447615e-02, 9.99946489e-01, -6.42404848e-05], + [-4.49654930e-03, 1.77250538e-05, 9.99989890e-01], + ], + [2.41496422e08, -2.07192684e08, -7.59964859e07], + (-0.29675479, 140.18238966, 35792875.73125207), + 0, + 0, + ), + ], + dtype=fmt.ORBIT_PREDICTION_DATA, +) + +ORBIT_PREDICTION_2 = np.array( + [ + ( + 50130.99305556, + (960217, 235000), + [10439220.91492008, -40857543.15396438, -261553.43075696], + [2978.47973561, 761.36477969, 7.52053495], + [-32390319.30020279, 27003830.33282405, -211988.37862591], + [0.80994076, 0.52598377, 20.86317023], + 144.74471913, + (330.15152105, -12.1838997), + (186.026252, -11.95862606), + [ + [9.99936381e-01, 1.03449475e-02, 4.49612609e-03], + [-1.03447632e-02, 9.99946489e-01, -6.42395003e-05], + [-4.49655005e-03, 1.77240537e-05, 9.99989890e-01], + ], + [2.40837919e08, -2.07200148e08, -7.59210011e07], + (-0.28831259, 140.18201066, 35792759.80443729), + 0, + 0, + ), + ( + 50130.99652778, + (960217, 235500), + [11330197.2840407, -40619380.06793167, -259235.04755252], + [2961.12591755, 826.32591367, 7.93473432], + [-32390076.84311398, 27003985.41857829, -205679.40741202], + [0.80685878, 0.50848599, 21.19517045], + 145.99814147, + (330.15463004, -12.18274445), + (184.77593341, -11.95746344), + [ + [9.99936381e-01, 1.03449492e-02, 4.49612685e-03], + [-1.03447650e-02, 9.99946489e-01, -6.42385159e-05], + [-4.49655080e-03, 1.77230537e-05, 9.99989890e-01], + ], + [2.40184218e08, -2.07226967e08, -7.58455830e07], + (-0.27973286, 140.18163787, 35792641.6143761), + 0, + 0, + ), + ( + 50131.0, + (960218, 0), + [12215754.80493221, -40361787.08463053, -256792.97127933], + [2942.35551459, 890.89226454, 8.34512262], + [-32389835.37113104, 27004135.23720251, -199272.35452792], + [0.8033778, 0.49087558, 21.51701595], + 147.2515638, + (330.15773341, -12.18158803), + (183.5256092, -11.95629965), + [ + [9.99936381e-01, 1.03449510e-02, 4.49612761e-03], + [-1.03447667e-02, 9.99946489e-01, -6.42375317e-05], + [-4.49655155e-03, 1.77220539e-05, 9.99989890e-01], + ], + [2.39535744e08, -2.07273025e08, -7.57702305e07], + (-0.2710197, 140.18127143, 35792521.29050537), + 0, + 0, + ), + ( + 50131.00347222, + (960218, 500), + [13095469.82708225, -40084887.27645436, -254228.37467049], + [2922.17747695, 955.03294974, 8.75150409], + [-32389595.00191828, 27004279.7580633, -192770.28953487], + [0.79950572, 0.47316669, 21.82855319], + 148.50498613, + (330.16083128, -12.18043041), + (182.27527951, -11.95513466), + [ + [9.99936381e-01, 1.03449527e-02, 4.49612837e-03], + [-1.03447684e-02, 9.99946489e-01, -6.42365476e-05], + [-4.49655230e-03, 1.77210542e-05, 9.99989890e-01], + ], + [2.38892921e08, -2.07338200e08, -7.56949425e07], + (-0.26217728, 140.18091148, 35792398.96228714), + 0, + 0, + ), + ( + 50131.00694444, + (960218, 1000), + [13968921.48773305, -39788812.95011112, -251542.48890031], + [2900.60142795, 1018.71728887, 9.15368488], + [-32389355.85220329, 27004418.95297137, -186176.32730922], + [0.79525074, 0.45537327, 22.12963356], + 149.75840846, + (330.16392379, -12.17927157), + (181.02494445, -11.95396845), + [ + [9.99936381e-01, 1.03449544e-02, 4.49612913e-03], + [-1.03447701e-02, 9.99946489e-01, -6.42355636e-05], + [-4.49655305e-03, 1.77200547e-05, 9.99989890e-01], + ], + [2.38256170e08, -2.07422360e08, -7.56197178e07], + (-0.25320985, 140.18055815, 35792274.75899146), + 0, + 0, + ), + ( + 50131.01041667, + (960218, 1500), + [14835691.90970188, -39473705.58489136, -248736.60300345], + [2877.63765957, 1081.9148182, 9.55147314], + [-32389118.03536845, 27004552.79890675, -179493.62657611], + [0.79062131, 0.43750908, 22.42011344], + 151.01183079, + (330.16701107, -12.17811148), + (179.77462147, -11.952801), + [ + [9.99936381e-01, 1.03449561e-02, 4.49612989e-03], + [-1.03447719e-02, 9.99946489e-01, -6.42345798e-05], + [-4.49655380e-03, 1.77190553e-05, 9.99989890e-01], + ], + [2.37625908e08, -2.07525364e08, -7.55445552e07], + (-0.24412169, 140.18021156, 35792148.80948149), + 0, + 0, + ), + ( + 50131.01388889, + (960218, 2000), + [15695366.40490882, -39139715.76420763, -245812.06324505], + [2853.29712752, 1144.59530548, 9.94467917], + [-32388881.66227116, 27004681.27687033, -172725.38836895], + [0.7856262, 0.41958762, 22.69985431], + 152.26525312, + (330.17009324, -12.17695013), + (178.52427609, -11.95163228), + [ + [9.99936381e-01, 1.03449578e-02, 4.49613064e-03], + [-1.03447736e-02, 9.99946489e-01, -6.42335961e-05], + [-4.49655455e-03, 1.77180562e-05, 9.99989890e-01], + ], + [2.37002549e08, -2.07647061e08, -7.54694534e07], + (-0.23491716, 140.17987182, 35792021.2420001), + 0, + 0, + ), + ( + 50131.01736111, + (960218, 2500), + [16547533.6691137, -38787003.10533711, -242770.27248672], + [2827.5914462, 1206.72876414, 10.33311542], + [-32388646.84104986, 27004804.37195345, -165874.85452439], + [0.78027439, 0.40162218, 22.96872279], + 153.51867545, + (330.17317044, -12.17578748), + (177.27392574, -11.95046228), + [ + [9.99936381e-01, 1.03449595e-02, 4.49613140e-03], + [-1.03447753e-02, 9.99946489e-01, -6.42326125e-05], + [-4.49655529e-03, 1.77170571e-05, 9.99989890e-01], + ], + [2.36386506e08, -2.07787291e08, -7.53944111e07], + (-0.22560065, 140.17953905, 35791892.18395986), + 0, + 0, + ), + ( + 50131.02083333, + (960218, 3000), + [17391785.98229151, -38415736.18212036, -239612.68950141], + [2800.53288309, 1268.28546791, 10.71659666], + [-32388413.67874206, 27004922.07123395, -158945.30610131], + [0.77457509, 0.38362576, 23.2265907], + 154.77209777, + (330.17624281, -12.17462353), + (176.02357057, -11.94929096), + [ + [9.99936381e-01, 1.03449612e-02, 4.49613215e-03], + [-1.03447770e-02, 9.99946489e-01, -6.42316291e-05], + [-4.49655603e-03, 1.77160583e-05, 9.99989890e-01], + ], + [2.35778185e08, -2.07945887e08, -7.53194268e07], + (-0.21617663, 140.17921335, 35791761.76173551), + 0, + 0, + ), + ], + dtype=fmt.ORBIT_PREDICTION_DATA, +) diff --git a/satpy/tests/reader_tests/gms/test_gms5_vissr_l1b.py b/satpy/tests/reader_tests/gms/test_gms5_vissr_l1b.py new file mode 100644 index 0000000000..f4908c0a2b --- /dev/null +++ b/satpy/tests/reader_tests/gms/test_gms5_vissr_l1b.py @@ -0,0 +1,636 @@ +"""Unit tests for GMS-5 VISSR reader.""" + +import datetime as dt +import gzip + +import fsspec +import numpy as np +import pytest +import xarray as xr +from pyresample.geometry import AreaDefinition + +import satpy.tests.reader_tests.gms.test_gms5_vissr_data as real_world +from satpy.readers import FSFile +from satpy.tests.reader_tests.utils import get_jit_methods, skip_numba_unstable_if_missing +from satpy.tests.utils import make_dataid + +try: + import satpy.readers.gms.gms5_vissr_format as fmt + import satpy.readers.gms.gms5_vissr_l1b as vissr + import satpy.readers.gms.gms5_vissr_navigation as nav +except ImportError as err: + if skip_numba_unstable_if_missing(): + pytest.skip(f"Numba is not compatible with unstable NumPy: {err!s}", allow_module_level=True) + raise + + +@pytest.fixture(params=[False, True], autouse=True) +def disable_jit(request, monkeypatch): + """Run tests with jit enabled and disabled. + + Reason: Coverage report is only accurate with jit disabled. + """ + if request.param: + jit_methods = get_jit_methods(vissr) + for name, method in jit_methods.items(): + monkeypatch.setattr(name, method.py_func) + + +class TestEarthMask: + """Test getting the earth mask.""" + + def test_get_earth_mask(self): + """Test getting the earth mask.""" + first_earth_pixels = np.array([-1, 1, 0, -1]) + last_earth_pixels = np.array([-1, 3, 2, -1]) + edges = first_earth_pixels, last_earth_pixels + mask_exp = np.array( + [[0, 0, 0, 0], + [0, 1, 1, 1], + [1, 1, 1, 0], + [0, 0, 0, 0]] + ) + mask = vissr.get_earth_mask(mask_exp.shape, edges) + np.testing.assert_equal(mask, mask_exp) + + +class TestFileHandler: + """Test VISSR file handler.""" + + @pytest.fixture(autouse=True) + def patch_number_of_pixels_per_scanline(self, monkeypatch): + """Patch data types so that each scanline has two pixels.""" + num_pixels = 2 + IMAGE_DATA_BLOCK_IR = np.dtype( + [ + ("LCW", fmt.LINE_CONTROL_WORD), + ("DOC", fmt.U1, (256,)), + ("image_data", fmt.U1, num_pixels), + ] + ) + IMAGE_DATA_BLOCK_VIS = np.dtype( + [ + ("LCW", fmt.LINE_CONTROL_WORD), + ("DOC", fmt.U1, (64,)), + ("image_data", fmt.U1, (num_pixels,)), + ] + ) + IMAGE_DATA = { + fmt.VIS_CHANNEL: { + "offset": 6 * fmt.BLOCK_SIZE_VIS, + "dtype": IMAGE_DATA_BLOCK_VIS, + }, + fmt.IR_CHANNEL: { + "offset": 18 * fmt.BLOCK_SIZE_IR, + "dtype": IMAGE_DATA_BLOCK_IR, + }, + } + monkeypatch.setattr( + "satpy.readers.gms.gms5_vissr_format.IMAGE_DATA_BLOCK_IR", IMAGE_DATA_BLOCK_IR + ) + monkeypatch.setattr( + "satpy.readers.gms.gms5_vissr_format.IMAGE_DATA_BLOCK_VIS", IMAGE_DATA_BLOCK_VIS + ) + monkeypatch.setattr("satpy.readers.gms.gms5_vissr_format.IMAGE_DATA", IMAGE_DATA) + + @pytest.fixture( + params=[ + make_dataid(name="VIS", calibration="reflectance", resolution=1250), + make_dataid( + name="IR1", calibration="brightness_temperature", resolution=5000 + ), + make_dataid(name="IR1", calibration="counts", resolution=5000), + ] + ) + def dataset_id(self, request): + """Get dataset ID.""" + return request.param + + @pytest.fixture(params=[True, False]) + def mask_space(self, request): + """Mask space pixels.""" + return request.param + + @pytest.fixture(params=[True, False]) + def with_compression(self, request): + """Enable compression.""" + return request.param + + @pytest.fixture + def open_function(self, with_compression): + """Get open function for writing test files.""" + return gzip.open if with_compression else open + + @pytest.fixture + def vissr_file(self, dataset_id, file_contents, open_function, tmp_path): + """Get test VISSR file.""" + filename = tmp_path / "vissr_file" + ch_type = fmt.CHANNEL_TYPES[dataset_id["name"]] + writer = VissrFileWriter(ch_type, open_function) + writer.write(filename, file_contents) + return filename + + @pytest.fixture + def file_contents(self, control_block, image_parameters, image_data): + """Get VISSR file contents.""" + return { + "control_block": control_block, + "image_parameters": image_parameters, + "image_data": image_data, + } + + @pytest.fixture + def control_block(self, dataset_id): + """Get VISSR control block.""" + block_size = {"IR1": 16, "VIS": 4} + ctrl_block = np.zeros(1, dtype=fmt.CONTROL_BLOCK) + ctrl_block["parameter_block_size"] = block_size[dataset_id["name"]] + ctrl_block["available_block_size_of_image_data"] = 2 + return ctrl_block + + @pytest.fixture + def image_parameters(self, mode_block, cal_params, nav_params): + """Get VISSR image parameters.""" + image_params = {"mode": mode_block} + image_params.update(cal_params) + image_params.update(nav_params) + return image_params + + @pytest.fixture + def nav_params( + self, + coordinate_conversion, + attitude_prediction, + orbit_prediction, + ): + """Get navigation parameters.""" + nav_params = {} + nav_params.update(attitude_prediction) + nav_params.update(orbit_prediction) + nav_params.update(coordinate_conversion) + return nav_params + + @pytest.fixture + def cal_params( + self, + vis_calibration, + ir1_calibration, + ir2_calibration, + wv_calibration, + ): + """Get calibration parameters.""" + return { + "vis_calibration": vis_calibration, + "ir1_calibration": ir1_calibration, + "ir2_calibration": ir2_calibration, + "wv_calibration": wv_calibration, + } + + @pytest.fixture + def mode_block(self): + """Get VISSR mode block.""" + mode = np.zeros(1, dtype=fmt.MODE_BLOCK) + mode["satellite_name"] = b"GMS-5 " + mode["spin_rate"] = 99.21774 + mode["observation_time_mjd"] = 50000.0 + mode["ssp_longitude"] = 140.0 + mode["satellite_height"] = 123456.0 + mode["ir_frame_parameters"]["number_of_lines"] = 2 + mode["ir_frame_parameters"]["number_of_pixels"] = 2 + mode["vis_frame_parameters"]["number_of_lines"] = 2 + mode["vis_frame_parameters"]["number_of_pixels"] = 2 + return mode + + @pytest.fixture + def coordinate_conversion(self, coord_conv, simple_coord_conv_table): + """Get all coordinate conversion parameters.""" + return { + "coordinate_conversion": coord_conv, + "simple_coordinate_conversion_table": simple_coord_conv_table + } + + @pytest.fixture + def coord_conv(self): + """Get parameters for coordinate conversions. + + Adjust pixel offset so that the first column is at the image center. + This has the advantage that we can test with very small 2x2 images. + Otherwise, all pixels would be in space. + """ + conv = np.zeros(1, dtype=fmt.COORDINATE_CONVERSION_PARAMETERS) + + cline = conv["central_line_number_of_vissr_frame"] + cline["IR1"] = 1378.5 + cline["VIS"] = 5513.0 + + cpix = conv["central_pixel_number_of_vissr_frame"] + cpix["IR1"] = 0.5 # instead of 1672.5 + cpix["VIS"] = 0.5 # instead of 6688.5 + + conv['scheduled_observation_time'] = 50130.979089568464 + + nsensors = conv["number_of_sensor_elements"] + nsensors["IR1"] = 1 + nsensors["VIS"] = 4 + + sampling_angle = conv["sampling_angle_along_pixel"] + sampling_angle["IR1"] = 9.5719995e-05 + sampling_angle["VIS"] = 2.3929999e-05 + + stepping_angle = conv["stepping_angle_along_line"] + stepping_angle["IR1"] = 0.00014000005 + stepping_angle["VIS"] = 3.5000005e-05 + + conv["matrix_of_misalignment"] = np.array( + [[9.9999917e-01, -5.1195198e-04, -1.2135329e-03], + [5.1036407e-04, 9.9999905e-01, -1.3083406e-03], + [1.2142011e-03, 1.3077201e-03, 9.9999845e-01]], + dtype=np.float32 + ) + + conv["parameters"]["equatorial_radius"] = 6377397.0 + conv["parameters"]["oblateness_of_earth"] = 0.003342773 + + conv["orbital_parameters"]["longitude_of_ssp"] = 141.0 + conv["orbital_parameters"]["latitude_of_ssp"] = 1.0 + return conv + + @pytest.fixture + def attitude_prediction(self): + """Get attitude prediction.""" + att_pred = np.zeros(1, dtype=fmt.ATTITUDE_PREDICTION) + att_pred["data"] = real_world.ATTITUDE_PREDICTION + return {"attitude_prediction": att_pred} + + @pytest.fixture + def orbit_prediction(self, orbit_prediction_1, orbit_prediction_2): + """Get predictions of orbital parameters.""" + return { + "orbit_prediction_1": orbit_prediction_1, + "orbit_prediction_2": orbit_prediction_2 + } + + @pytest.fixture + def orbit_prediction_1(self): + """Get first block of orbit prediction data.""" + orb_pred = np.zeros(1, dtype=fmt.ORBIT_PREDICTION) + orb_pred["data"] = real_world.ORBIT_PREDICTION_1 + return orb_pred + + @pytest.fixture + def orbit_prediction_2(self): + """Get second block of orbit prediction data.""" + orb_pred = np.zeros(1, dtype=fmt.ORBIT_PREDICTION) + orb_pred["data"] = real_world.ORBIT_PREDICTION_2 + return orb_pred + + @pytest.fixture + def vis_calibration(self): + """Get VIS calibration block.""" + vis_cal = np.zeros(1, dtype=fmt.VIS_CALIBRATION) + table = vis_cal["vis1_calibration_table"]["brightness_albedo_conversion_table"] + table[0, 0:4] = np.array([0, 0.25, 0.5, 1]) + return vis_cal + + @pytest.fixture + def ir1_calibration(self): + """Get IR1 calibration block.""" + cal = np.zeros(1, dtype=fmt.IR_CALIBRATION) + table = cal["conversion_table_of_equivalent_black_body_temperature"] + table[0, 0:4] = np.array([0, 100, 200, 300]) + return cal + + @pytest.fixture + def ir2_calibration(self): + """Get IR2 calibration block.""" + cal = np.zeros(1, dtype=fmt.IR_CALIBRATION) + return cal + + @pytest.fixture + def wv_calibration(self): + """Get WV calibration block.""" + cal = np.zeros(1, dtype=fmt.IR_CALIBRATION) + return cal + + @pytest.fixture + def simple_coord_conv_table(self): + """Get simple coordinate conversion table.""" + table = np.zeros(1, dtype=fmt.SIMPLE_COORDINATE_CONVERSION_TABLE) + table["satellite_height"] = 123457.0 + return table + + @pytest.fixture + def image_data(self, dataset_id, image_data_ir1, image_data_vis): + """Get VISSR image data.""" + data = {"IR1": image_data_ir1, "VIS": image_data_vis} + return data[dataset_id["name"]] + + @pytest.fixture + def image_data_ir1(self): + """Get IR1 image data.""" + image_data = np.zeros(2, fmt.IMAGE_DATA_BLOCK_IR) + image_data["LCW"]["line_number"] = [686, 2089] + image_data["LCW"]["scan_time"] = [50000, 50000] + image_data["LCW"]["west_side_earth_edge"] = [0, 0] + image_data["LCW"]["east_side_earth_edge"] = [1, 1] + image_data["image_data"] = [[0, 1], [2, 3]] + return image_data + + @pytest.fixture + def image_data_vis(self): + """Get VIS image data.""" + image_data = np.zeros(2, fmt.IMAGE_DATA_BLOCK_VIS) + image_data["LCW"]["line_number"] = [2744, 8356] + image_data["LCW"]["scan_time"] = [50000, 50000] + image_data["LCW"]["west_side_earth_edge"] = [-1, 0] + image_data["LCW"]["east_side_earth_edge"] = [-1, 1] + image_data["image_data"] = [[0, 1], [2, 3]] + return image_data + + @pytest.fixture + def vissr_file_like(self, vissr_file, with_compression): + """Get file-like object for VISSR test file.""" + if with_compression: + open_file = fsspec.open(vissr_file, compression="gzip") + return FSFile(open_file) + return vissr_file + + @pytest.fixture + def file_handler(self, vissr_file_like, mask_space): + """Get file handler to be tested.""" + return vissr.GMS5VISSRFileHandler( + vissr_file_like, {}, {}, mask_space=mask_space + ) + + @pytest.fixture + def vis_refl_exp(self, mask_space, lons_lats_exp): + """Get expected VIS reflectance.""" + lons, lats = lons_lats_exp + if mask_space: + data = [[np.nan, np.nan], [50, 100]] + else: + data = [[0, 25], [50, 100]] + return xr.DataArray( + data, + dims=("y", "x"), + coords={ + "lon": lons, + "lat": lats, + "acq_time": ( + "y", + [dt.datetime(1995, 10, 10), dt.datetime(1995, 10, 10)], + ), + "line_number": ("y", [2744, 8356]), + }, + ) + + @pytest.fixture + def ir1_counts_exp(self, lons_lats_exp): + """Get expected IR1 counts.""" + lons, lats = lons_lats_exp + return xr.DataArray( + [[0, 1], [2, 3]], + dims=("y", "x"), + coords={ + "lon": lons, + "lat": lats, + "acq_time": ( + "y", + [dt.datetime(1995, 10, 10), dt.datetime(1995, 10, 10)], + ), + "line_number": ("y", [686, 2089]), + }, + ) + + @pytest.fixture + def ir1_bt_exp(self, lons_lats_exp): + """Get expected IR1 brightness temperature.""" + lons, lats = lons_lats_exp + return xr.DataArray( + [[0, 100], [200, 300]], + dims=("y", "x"), + coords={ + "lon": lons, + "lat": lats, + "acq_time": ( + "y", + [dt.datetime(1995, 10, 10), dt.datetime(1995, 10, 10)], + ), + "line_number": ("y", [686, 2089]), + }, + ) + + @pytest.fixture + def lons_lats_exp(self, dataset_id): + """Get expected lon/lat coordinates. + + Computed with JMA's Msial library for 2 pixels near the central column + (6688.5/1672.5 for VIS/IR). + + VIS: + + pix = [6688, 6688, 6689, 6689] + lin = [2744, 8356, 2744, 8356] + + IR1: + + pix = [1672, 1672, 1673, 1673] + lin = [686, 2089, 686, 2089] + """ + expectations = { + "IR1": { + "lons": [[139.680120, 139.718902], + [140.307367, 140.346062]], + "lats": [[35.045132, 35.045361], + [-34.971012, -34.970738]] + }, + "VIS": { + "lons": [[139.665133, 139.674833], + [140.292579, 140.302249]], + "lats": [[35.076113, 35.076170], + [-34.940439, -34.940370]] + } + } + exp = expectations[dataset_id["name"]] + lons = xr.DataArray(exp["lons"], dims=("y", "x")) + lats = xr.DataArray(exp["lats"], dims=("y", "x")) + return lons, lats + + @pytest.fixture + def dataset_exp(self, dataset_id, ir1_counts_exp, ir1_bt_exp, vis_refl_exp): + """Get expected dataset.""" + ir1_counts_id = make_dataid(name="IR1", calibration="counts", resolution=5000) + ir1_bt_id = make_dataid( + name="IR1", calibration="brightness_temperature", resolution=5000 + ) + vis_refl_id = make_dataid( + name="VIS", calibration="reflectance", resolution=1250 + ) + expectations = { + ir1_counts_id: ir1_counts_exp, + ir1_bt_id: ir1_bt_exp, + vis_refl_id: vis_refl_exp, + } + return expectations[dataset_id] + + @pytest.fixture + def area_def_exp(self, dataset_id): + """Get expected area definition.""" + if dataset_id["name"] == "IR1": + resol = 5 + size = 2366 + extent = (-20438.1468, -20438.1468, 20455.4306, 20455.4306) + else: + resol = 1 + size = 9464 + extent = (-20444.6235, -20444.6235, 20448.9445, 20448.9445) + area_id = f"gms-5_vissr_western-pacific_{resol}km" + desc = f"GMS-5 VISSR Western Pacific area definition with {resol} km resolution" + return AreaDefinition( + area_id=area_id, + description=desc, + proj_id=area_id, + projection={ + "a": nav.EARTH_EQUATORIAL_RADIUS, + "b": nav.EARTH_POLAR_RADIUS, + "h": "123456", + "lon_0": "140", + "no_defs": "None", + "proj": "geos", + "type": "crs", + "units": "m", + "x_0": "0", + "y_0": "0", + }, + area_extent=extent, + width=size, + height=size, + ) + + @pytest.fixture + def attrs_exp(self, area_def_exp): + """Get expected dataset attributes.""" + return { + "yaml": "info", + "platform": "GMS-5", + "sensor": "VISSR", + "time_parameters": { + "nominal_start_time": dt.datetime(1995, 10, 10), + "nominal_end_time": dt.datetime(1995, 10, 10, 0, 25), + }, + "orbital_parameters": { + "satellite_nominal_longitude": 140.0, + "satellite_nominal_latitude": 0.0, + "satellite_nominal_altitude": 123456.0, + "satellite_actual_longitude": 141.0, + "satellite_actual_latitude": 1.0, + "satellite_actual_altitude": 123457.0, + }, + "area_def_uniform_sampling": area_def_exp, + } + + def test_get_dataset(self, file_handler, dataset_id, dataset_exp, attrs_exp): + """Test getting the dataset.""" + dataset = file_handler.get_dataset(dataset_id, {"yaml": "info"}) + xr.testing.assert_allclose(dataset.compute(), dataset_exp, atol=1e-6) + assert dataset.attrs == attrs_exp + + def test_time_attributes(self, file_handler, attrs_exp): + """Test the file handler's time attributes.""" + start_time_exp = attrs_exp["time_parameters"]["nominal_start_time"] + end_time_exp = attrs_exp["time_parameters"]["nominal_end_time"] + assert file_handler.start_time == start_time_exp + assert file_handler.end_time == end_time_exp + + +class TestCorruptFile: + """Test reading corrupt files.""" + + @pytest.fixture + def file_contents(self): + """Get corrupt file contents (all zero).""" + control_block = np.zeros(1, dtype=fmt.CONTROL_BLOCK) + image_data = np.zeros(1, dtype=fmt.IMAGE_DATA_BLOCK_IR) + return { + "control_block": control_block, + "image_parameters": {}, + "image_data": image_data, + } + + @pytest.fixture + def corrupt_file(self, file_contents, tmp_path): + """Write corrupt VISSR file to disk.""" + filename = tmp_path / "my_vissr_file" + writer = VissrFileWriter(ch_type="VIS", open_function=open) + writer.write(filename, file_contents) + return filename + + def test_corrupt_file(self, corrupt_file): + """Test reading a corrupt file.""" + with pytest.raises(ValueError, match=r".* corrupt .*"): + vissr.GMS5VISSRFileHandler(corrupt_file, {}, {}) + + +class VissrFileWriter: + """Write data in VISSR archive format.""" + + image_params_order = [ + "mode", + "coordinate_conversion", + "attitude_prediction", + "orbit_prediction_1", + "orbit_prediction_2", + "vis_calibration", + "ir1_calibration", + "ir2_calibration", + "wv_calibration", + "simple_coordinate_conversion_table", + ] + + def __init__(self, ch_type, open_function): + """Initialize the writer. + + Args: + ch_type: Channel type (VIS or IR) + open_function: Open function to be used (e.g. open or gzip.open) + """ + self.ch_type = ch_type + self.open_function = open_function + + def write(self, filename, contents): + """Write file contents to disk.""" + with self.open_function(filename, mode="wb") as fd: + self._write_control_block(fd, contents) + self._write_image_parameters(fd, contents) + self._write_image_data(fd, contents) + + def _write_control_block(self, fd, contents): + self._write(fd, contents["control_block"]) + + def _write_image_parameters(self, fd, contents): + for name in self.image_params_order: + im_param = contents["image_parameters"].get(name) + if im_param: + self._write_image_parameter(fd, im_param, name) + + def _write_image_parameter(self, fd, im_param, name): + offset = fmt.IMAGE_PARAMS[name]["offset"][self.ch_type] + self._write(fd, im_param, offset) + + def _write_image_data(self, fd, contents): + offset = fmt.IMAGE_DATA[self.ch_type]["offset"] + self._write(fd, contents["image_data"], offset) + + def _write(self, fd, data, offset=None): + """Write data to file. + + If specified, prepend with 'offset' placeholder bytes. + """ + if offset: + self._fill(fd, offset) + fd.write(data.tobytes()) + + def _fill(self, fd, target_byte): + """Write placeholders from current position to target byte.""" + nbytes = target_byte - fd.tell() + fd.write(b" " * nbytes) diff --git a/satpy/tests/reader_tests/gms/test_gms5_vissr_navigation.py b/satpy/tests/reader_tests/gms/test_gms5_vissr_navigation.py new file mode 100644 index 0000000000..144139a07a --- /dev/null +++ b/satpy/tests/reader_tests/gms/test_gms5_vissr_navigation.py @@ -0,0 +1,573 @@ +"""Unit tests for GMS-5 VISSR navigation.""" + +import numpy as np +import pytest + +from satpy.tests.reader_tests.utils import get_jit_methods, skip_numba_unstable_if_missing + +try: + import satpy.readers.gms.gms5_vissr_navigation as nav +except ImportError as err: + if skip_numba_unstable_if_missing(): + pytest.skip(f"Numba is not compatible with unstable NumPy: {err!s}", allow_module_level=True) + raise + +# Navigation references computed with JMA's Msial library (files +# VISSR_19960217_2331_IR1.A.IMG and VISSR_19960217_2331_VIS.A.IMG). The VIS +# navigation is slightly off (< 0.01 deg) compared to JMA's reference. +# This is probably due to precision problems with the copied numbers. +IR_NAVIGATION_REFERENCE = [ + { + "pixel": nav.Pixel(line=686, pixel=1680), + 'lon': 139.990380, + 'lat': 35.047056, + 'nav_params': nav.PixelNavigationParameters( + attitude=nav.Attitude( + angle_between_earth_and_sun=3.997397917902958, + angle_between_sat_spin_and_z_axis=3.149118633034304, + angle_between_sat_spin_and_yz_plane=0.000546042025980, + ), + orbit=nav.Orbit( + angles=nav.OrbitAngles( + greenwich_sidereal_time=2.468529732418296, + declination_from_sat_to_sun=-0.208770861178982, + right_ascension_from_sat_to_sun=3.304369303579407, + ), + sat_position=nav.Vector3D( + x=-32390963.148471601307392, + y=27003395.381247851997614, + z=-228134.860026293463307, + ), + nutation_precession=np.array( + [[0.999936381496146, -0.010344758016410, -0.004496547784299], + [0.010344942303489, 0.999946489495557, 0.000017727054455], + [0.004496123789670, -0.000064242454080, 0.999989890320785]] + ), + ), + proj_params=nav.ProjectionParameters( + image_offset=nav.ImageOffset( + line_offset=1378.5, + pixel_offset=1672.5, + ), + scanning_angles=nav.ScanningAngles( + stepping_angle=0.000140000047395, + sampling_angle=0.000095719995443, + misalignment=np.array( + [[0.999999165534973, 0.000510364072397, 0.001214201096445], + [-0.000511951977387, 0.999999046325684, 0.001307720085606], + [-0.001213532872498, -0.001308340579271, 0.999998450279236]] + ) + ), + earth_ellipsoid=nav.EarthEllipsoid( + flattening=0.003352813177897, + equatorial_radius=6378136.0 + ) + ), + ) + }, + { + "pixel": nav.Pixel(line=2089, pixel=1793), + 'lon': 144.996967, + 'lat': -34.959853, + 'nav_params': nav.PixelNavigationParameters( + attitude=nav.Attitude( + angle_between_earth_and_sun=3.935707944355762, + angle_between_sat_spin_and_z_axis=3.149118633034304, + angle_between_sat_spin_and_yz_plane=0.000546042025980, + ), + orbit=nav.Orbit( + angles=nav.OrbitAngles( + greenwich_sidereal_time=2.530392320846865, + declination_from_sat_to_sun=-0.208713576872247, + right_ascension_from_sat_to_sun=3.242660398458377, + ), + sat_position=nav.Vector3D( + x=-32390273.633551981300116, + y=27003859.543135114014149, + z=-210800.087589388160268, + ), + nutation_precession=np.array( + [[0.999936381432029, -0.010344763228876, -0.004496550050695], + [0.010344947502662, 0.999946489441823, 0.000017724053657], + [0.004496126086653, -0.000064239500295, 0.999989890310647]] + ), + ), + proj_params=nav.ProjectionParameters( + image_offset=nav.ImageOffset( + line_offset=1378.5, + pixel_offset=1672.5, + ), + scanning_angles=nav.ScanningAngles( + stepping_angle=0.000140000047395, + sampling_angle=0.000095719995443, + misalignment=np.array( + [[0.999999165534973, 0.000510364072397, 0.001214201096445], + [-0.000511951977387, 0.999999046325684, 0.001307720085606], + [-0.001213532872498, -0.001308340579271, 0.999998450279236]] + ), + ), + earth_ellipsoid=nav.EarthEllipsoid( + flattening=0.003352813177897, + equatorial_radius=6378136 + ) + ), + ) + } +] + + +VIS_NAVIGATION_REFERENCE = [ + { + "pixel": nav.Pixel(line=2744, pixel=6720), + 'lon': 139.975527, + 'lat': 35.078028, + 'nav_params': nav.PixelNavigationParameters( + attitude=nav.Attitude( + angle_between_earth_and_sun=3.997397918405798, + angle_between_sat_spin_and_z_axis=3.149118633034304, + angle_between_sat_spin_and_yz_plane=0.000546042025980, + ), + orbit=nav.Orbit( + angles=nav.OrbitAngles( + greenwich_sidereal_time=2.468529731914041, + declination_from_sat_to_sun=-0.208770861179448, + right_ascension_from_sat_to_sun=3.304369304082406, + ), + sat_position=nav.Vector3D( + x=-32390963.148477241396904, + y=27003395.381243918091059, + z=-228134.860164520738181, + ), + nutation_precession=np.array( + [[0.999936381496146, -0.010344758016410, -0.004496547784299], + [0.010344942303489, 0.999946489495557, 0.000017727054455], + [0.004496123789670, -0.000064242454080, 0.999989890320785]] + ), + ), + proj_params=nav.ProjectionParameters( + image_offset=nav.ImageOffset( + line_offset=5513.0, + pixel_offset=6688.5, + ), + scanning_angles=nav.ScanningAngles( + stepping_angle=0.000035000004573, + sampling_angle=0.000023929998861, + misalignment=np.array( + [[0.999999165534973, 0.000510364072397, 0.001214201096445], + [-0.000511951977387, 0.999999046325684, 0.001307720085606], + [-0.001213532872498, -0.001308340579271, 0.999998450279236]] + ), + ), + earth_ellipsoid=nav.EarthEllipsoid( + flattening=0.003352813177897, + equatorial_radius=6378136 + ) + ), + ) + }, + { + "pixel": nav.Pixel(line=8356, pixel=7172), + 'lon': 144.980104, + 'lat': -34.929123, + 'nav_params': nav.PixelNavigationParameters( + attitude=nav.Attitude( + angle_between_earth_and_sun=3.935707944858620, + angle_between_sat_spin_and_z_axis=3.149118633034304, + angle_between_sat_spin_and_yz_plane=0.000546042025980, + ), + orbit=nav.Orbit( + angles=nav.OrbitAngles( + greenwich_sidereal_time=2.530392320342610, + declination_from_sat_to_sun=-0.208713576872715, + right_ascension_from_sat_to_sun=3.242660398961383, + ), + sat_position=nav.Vector3D( + x=-32390273.633557569235563, + y=27003859.543131537735462, + z=-210800.087734811415430, + ), + nutation_precession=np.array( + [[0.999936381432029, -0.010344763228876, -0.004496550050695], + [0.010344947502662, 0.999946489441823, 0.000017724053657], + [0.004496126086653, -0.000064239500295, 0.999989890310647]] + ), + ), + proj_params=nav.ProjectionParameters( + image_offset=nav.ImageOffset( + line_offset=5513.0, + pixel_offset=6688.5, + ), + scanning_angles=nav.ScanningAngles( + stepping_angle=0.000035000004573, + sampling_angle=0.000023929998861, + misalignment=np.array( + [[0.999999165534973, 0.000510364072397, 0.001214201096445], + [-0.000511951977387, 0.999999046325684, 0.001307720085606], + [-0.001213532872498, -0.001308340579271, 0.999998450279236]] + ), + ), + earth_ellipsoid=nav.EarthEllipsoid( + flattening=0.003352813177897, + equatorial_radius=6378136 + ) + ), + ) + }, +] + +NAVIGATION_REFERENCE = VIS_NAVIGATION_REFERENCE + IR_NAVIGATION_REFERENCE + + +@pytest.fixture(params=[False, True], autouse=True) +def disable_jit(request, monkeypatch): + """Run tests with jit enabled and disabled. + + Reason: Coverage report is only accurate with jit disabled. + """ + if request.param: + jit_methods = get_jit_methods(nav) + for name, method in jit_methods.items(): + monkeypatch.setattr(name, method.py_func) + + +class TestSinglePixelNavigation: + """Test navigation of a single pixel.""" + + @pytest.mark.parametrize( + "point,nav_params,expected", + [ + (ref["pixel"], ref["nav_params"], (ref["lon"], ref["lat"])) + for ref in NAVIGATION_REFERENCE + ], + ) + def test_get_lon_lat(self, point, nav_params, expected): + """Test getting lon/lat coordinates for a given pixel.""" + lon, lat = nav.get_lon_lat(point, nav_params) + np.testing.assert_allclose((lon, lat), expected) + + def test_transform_image_coords_to_scanning_angles(self): + """Test transformation from image coordinates to scanning angles.""" + offset = nav.ImageOffset(line_offset=100, pixel_offset=200) + scanning_angles = nav.ScanningAngles( + stepping_angle=0.01, sampling_angle=0.02, misalignment=-999 + ) + angles = nav.transform_image_coords_to_scanning_angles( + point=nav.Pixel(199, 99), + image_offset=offset, + scanning_angles=scanning_angles, + ) + np.testing.assert_allclose(angles, [-2, 1]) + + def test_transform_scanning_angles_to_satellite_coords(self): + """Test transformation from scanning angles to satellite coordinates.""" + scanning_angles = nav.Vector2D(np.pi, np.pi / 2) + misalignment = np.diag([1, 2, 3]).astype(float) + point_sat = nav.transform_scanning_angles_to_satellite_coords( + scanning_angles, misalignment + ) + np.testing.assert_allclose(point_sat, [0, 0, 3], atol=1e-12) + + def test_transform_satellite_to_earth_fixed_coords(self): + """Test transformation from satellite to earth-fixed coordinates.""" + point_sat = nav.Vector3D(1, 2, 3) + attitude = nav.Attitude( + angle_between_earth_and_sun=np.pi, + angle_between_sat_spin_and_z_axis=np.pi, + angle_between_sat_spin_and_yz_plane=np.pi / 2, + ) + orbit = nav.Orbit( + angles=nav.OrbitAngles( + greenwich_sidereal_time=np.pi, + declination_from_sat_to_sun=np.pi, + right_ascension_from_sat_to_sun=np.pi / 2, + ), + sat_position=nav.Vector3D(-999, -999, -999), + nutation_precession=np.diag([1, 2, 3]).astype(float), + ) + res = nav.transform_satellite_to_earth_fixed_coords(point_sat, orbit, attitude) + np.testing.assert_allclose(res, [-3, 1, -2]) + + def test_intersect_view_vector_with_earth(self): + """Test intersection of a view vector with the earth's surface.""" + view_vector = nav.Vector3D(-1, 0, 0) + ellipsoid = nav.EarthEllipsoid(equatorial_radius=6371 * 1000, flattening=0.003) + sat_pos = nav.Vector3D(x=36000 * 1000.0, y=0.0, z=0.0) + point = nav.intersect_with_earth(view_vector, sat_pos, ellipsoid) + exp = [ellipsoid.equatorial_radius, 0, 0] + np.testing.assert_allclose(point, exp) + + @pytest.mark.parametrize( + "point_earth_fixed,point_geodetic_exp", + [ + ([0, 0, 1], [0, 90]), + ([0, 0, -1], [0, -90]), + ([1, 0, 0], [0, 0]), + ([-1, 0, 0], [180, 0]), + ([1, 1, 1], [45, 35.426852]), + ], + ) + def test_transform_earth_fixed_to_geodetic_coords( + self, point_earth_fixed, point_geodetic_exp + ): + """Test transformation from earth-fixed to geodetic coordinates.""" + point_geodetic = nav.transform_earth_fixed_to_geodetic_coords( + nav.Vector3D(*point_earth_fixed), + 0.003 + ) + np.testing.assert_allclose(point_geodetic, point_geodetic_exp) + + def test_normalize_vector(self): + """Test vector normalization.""" + v = nav.Vector3D(1, 2, 3) + norm = np.sqrt(14) + exp = nav.Vector3D(1 / norm, 2 / norm, 3 / norm) + normed = nav.normalize_vector(v) + np.testing.assert_allclose(normed, exp) + + +class TestImageNavigation: + """Test navigation of an entire image.""" + + @pytest.fixture + def expected(self): + """Get expected coordinates.""" + exp = { + "lon": [[-114.56923, -112.096837, -109.559702], + [8.33221, 8.793893, 9.22339], + [15.918476, 16.268354, 16.6332]], + "lat": [[-23.078721, -24.629845, -26.133314], + [-42.513409, -39.790231, -37.06392], + [3.342834, 6.07043, 8.795932]] + } + return exp + + def test_get_lons_lats(self, navigation_params, expected): + """Test getting lon/lat coordinates.""" + lons, lats = nav.get_lons_lats( + lines=np.array([1000, 1500, 2000]), + pixels=np.array([1000, 1500, 2000]), + nav_params=navigation_params, + ) + np.testing.assert_allclose(lons, expected["lon"]) + np.testing.assert_allclose(lats, expected["lat"]) + + +class TestPredictionInterpolation: + """Test interpolation of orbit and attitude predictions.""" + + @pytest.mark.parametrize( + "obs_time,expected", [(-1, np.nan), (1.5, 2.5), (5, np.nan)] + ) + def test_interpolate_continuous(self, obs_time, expected): + """Test interpolation of continuous variables.""" + prediction_times = np.array([0, 1, 2, 3]) + predicted_values = np.array([1, 2, 3, 4]) + res = nav.interpolate_continuous(obs_time, prediction_times, predicted_values) + np.testing.assert_allclose(res, expected) + + @pytest.mark.parametrize( + "obs_time,expected", + [ + (-1, np.nan), + (1.5, 0.75 * np.pi), + (2.5, -0.75 * np.pi), + (3.5, -0.25 * np.pi), + (5, np.nan), + ], + ) + def test_interpolate_angles(self, obs_time, expected): + """Test interpolation of periodic angles.""" + prediction_times = np.array([0, 1, 2, 3, 4]) + predicted_angles = np.array( + [0, 0.5 * np.pi, np.pi, 1.5 * np.pi, 2 * np.pi] + ) # already unwrapped + res = nav.interpolate_angles(obs_time, prediction_times, predicted_angles) + np.testing.assert_allclose(res, expected) + + @pytest.mark.parametrize( + "obs_time,expected", + [ + (-1, np.nan * np.ones((2, 2))), + (1.5, [[1, 0], [0, 2]]), + (3, np.nan * np.ones((2, 2))), + ], + ) + def test_interpolate_nearest(self, obs_time, expected): + """Test nearest neighbour interpolation.""" + prediction_times = np.array([0, 1, 2]) + predicted_angles = np.array( + [np.zeros((2, 2)), np.diag((1, 2)), np.zeros((2, 2))] + ) + res = nav.interpolate_nearest(obs_time, prediction_times, predicted_angles) + np.testing.assert_allclose(res, expected) + + def test_interpolate_orbit_prediction( + self, obs_time, orbit_prediction, orbit_expected + ): + """Test interpolating orbit prediction.""" + orbit_prediction = orbit_prediction.to_numba() + orbit = nav.interpolate_orbit_prediction(orbit_prediction, obs_time) + _assert_namedtuple_close(orbit, orbit_expected) + + def test_interpolate_attitude_prediction( + self, obs_time, attitude_prediction, attitude_expected + ): + """Test interpolating attitude prediction.""" + attitude_prediction = attitude_prediction.to_numba() + attitude = nav.interpolate_attitude_prediction(attitude_prediction, obs_time) + _assert_namedtuple_close(attitude, attitude_expected) + + @pytest.fixture + def obs_time(self): + """Get observation time.""" + return 2.5 + + @pytest.fixture + def orbit_expected(self): + """Get expected orbit.""" + return nav.Orbit( + angles=nav.OrbitAngles( + greenwich_sidereal_time=1.5, + declination_from_sat_to_sun=1.6, + right_ascension_from_sat_to_sun=1.7, + ), + sat_position=nav.Vector3D( + x=1.8, + y=1.9, + z=2.0, + ), + nutation_precession=1.6 * np.identity(3), + ) + + @pytest.fixture + def attitude_expected(self): + """Get expected attitude.""" + return nav.Attitude( + angle_between_earth_and_sun=1.5, + angle_between_sat_spin_and_z_axis=1.6, + angle_between_sat_spin_and_yz_plane=1.7, + ) + + +@pytest.fixture +def sampling_angle(): + """Get sampling angle.""" + return 0.000095719995443 + + +@pytest.fixture +def scan_params(sampling_angle): + """Get scanning parameters.""" + return nav.ScanningParameters( + start_time_of_scan=0, + spinning_rate=0.5, + num_sensors=1, + sampling_angle=sampling_angle, + ) + + +@pytest.fixture +def attitude_prediction(): + """Get attitude prediction.""" + return nav.AttitudePrediction( + prediction_times=np.array([1.0, 2.0, 3.0]), + attitude=nav.Attitude( + angle_between_earth_and_sun=np.array([0.0, 1.0, 2.0]), + angle_between_sat_spin_and_z_axis=np.array([0.1, 1.1, 2.1]), + angle_between_sat_spin_and_yz_plane=np.array([0.2, 1.2, 2.2]), + ), + ) + + +@pytest.fixture +def orbit_prediction(): + """Get orbit prediction.""" + return nav.OrbitPrediction( + prediction_times=np.array([1.0, 2.0, 3.0, 4.0]), + angles=nav.OrbitAngles( + greenwich_sidereal_time=np.array([0.0, 1.0, 2.0, 3.0]), + declination_from_sat_to_sun=np.array([0.1, 1.1, 2.1, 3.1]), + right_ascension_from_sat_to_sun=np.array([0.2, 1.2, 2.2, 3.2]), + ), + sat_position=nav.Vector3D( + x=np.array([0.3, 1.3, 2.3, 3.3]), + y=np.array([0.4, 1.4, 2.4, 3.4]), + z=np.array([0.5, 1.5, 2.5, 3.5]), + ), + nutation_precession=np.array( + [ + 0.6 * np.identity(3), + 1.6 * np.identity(3), + 2.6 * np.identity(3), + 3.6 * np.identity(3), + ] + ), + ) + + +@pytest.fixture +def proj_params(sampling_angle): + """Get projection parameters.""" + return nav.ProjectionParameters( + image_offset=nav.ImageOffset( + line_offset=1378.5, + pixel_offset=1672.5, + ), + scanning_angles=nav.ScanningAngles( + stepping_angle=0.000140000047395, + sampling_angle=sampling_angle, + misalignment=np.identity(3).astype(np.float64), + ), + earth_ellipsoid=nav.EarthEllipsoid( + flattening=0.003352813177897, + equatorial_radius=6378136, + ), + ) + + +@pytest.fixture +def static_nav_params(proj_params, scan_params): + """Get static navigation parameters.""" + return nav.StaticNavigationParameters(proj_params, scan_params) + + +@pytest.fixture +def predicted_nav_params(attitude_prediction, orbit_prediction): + """Get predicted navigation parameters.""" + return nav.PredictedNavigationParameters(attitude_prediction, orbit_prediction) + + +@pytest.fixture +def navigation_params(static_nav_params, predicted_nav_params): + """Get image navigation parameters.""" + return nav.ImageNavigationParameters(static_nav_params, predicted_nav_params) + + +def test_get_observation_time(): + """Test getting a pixel's observation time.""" + scan_params = nav.ScanningParameters( + start_time_of_scan=50000.0, + spinning_rate=100, + num_sensors=1, + sampling_angle=0.01, + ) + pixel = nav.Pixel(11, 100) + obs_time = nav.get_observation_time(pixel, scan_params) + np.testing.assert_allclose(obs_time, 50000.0000705496871047) + + +def _assert_namedtuple_close(a, b): + cls_name = b.__class__.__name__ + assert a.__class__ == b.__class__ + for attr in b._fields: + a_attr = getattr(a, attr) + b_attr = getattr(b, attr) + if _is_namedtuple(b_attr): + _assert_namedtuple_close(a_attr, b_attr) + np.testing.assert_allclose( + a_attr, b_attr, err_msg=f"{cls_name} attribute {attr} differs" + ) + + +def _is_namedtuple(obj): + return hasattr(obj, "_fields") diff --git a/satpy/tests/reader_tests/test_abi_l1b.py b/satpy/tests/reader_tests/test_abi_l1b.py index b8ad4400cb..344f918f8f 100644 --- a/satpy/tests/reader_tests/test_abi_l1b.py +++ b/satpy/tests/reader_tests/test_abi_l1b.py @@ -27,93 +27,105 @@ from satpy.tests.utils import make_dataid +def _create_fake_rad_dataarray(rad=None): + x_image = xr.DataArray(0.) + y_image = xr.DataArray(0.) + time = xr.DataArray(0.) + if rad is None: + rad_data = (np.arange(10.).reshape((2, 5)) + 1.) * 50. + rad_data = (rad_data + 1.) / 0.5 + rad_data = rad_data.astype(np.int16) + rad = xr.DataArray( + rad_data, + dims=('y', 'x'), + attrs={ + 'scale_factor': 0.5, + 'add_offset': -1., + '_FillValue': 1002, + 'units': 'W m-2 um-1 sr-1', + 'valid_range': (0, 4095), + } + ) + rad.coords['t'] = time + rad.coords['x_image'] = x_image + rad.coords['y_image'] = y_image + return rad + + +def _create_fake_rad_dataset(rad=None): + rad = _create_fake_rad_dataarray(rad=rad) + + x__ = xr.DataArray( + range(5), + attrs={'scale_factor': 2., 'add_offset': -1.}, + dims=('x',) + ) + y__ = xr.DataArray( + range(2), + attrs={'scale_factor': -2., 'add_offset': 1.}, + dims=('y',) + ) + proj = xr.DataArray( + [], + attrs={ + 'semi_major_axis': 1., + 'semi_minor_axis': 1., + 'perspective_point_height': 1., + 'longitude_of_projection_origin': -90., + 'latitude_of_projection_origin': 0., + 'sweep_angle_axis': u'x' + } + ) + + fake_dataset = xr.Dataset( + data_vars={ + 'Rad': rad, + 'band_id': np.array(8), + # 'x': x__, + # 'y': y__, + 'x_image': xr.DataArray(0.), + 'y_image': xr.DataArray(0.), + 'goes_imager_projection': proj, + 'yaw_flip_flag': np.array([1]), + "planck_fk1": np.array(13432.1), + "planck_fk2": np.array(1497.61), + "planck_bc1": np.array(0.09102), + "planck_bc2": np.array(0.99971), + "esun": np.array(2017), + "nominal_satellite_subpoint_lat": np.array(0.0), + "nominal_satellite_subpoint_lon": np.array(-89.5), + "nominal_satellite_height": np.array(35786.02), + "earth_sun_distance_anomaly_in_AU": np.array(0.99) + }, + coords={ + 't': rad.coords['t'], + 'x': x__, + 'y': y__, + + }, + attrs={ + "time_coverage_start": "2017-09-20T17:30:40.8Z", + "time_coverage_end": "2017-09-20T17:41:17.5Z", + }, + ) + return fake_dataset + + class Test_NC_ABI_L1B_Base(unittest.TestCase): """Common setup for NC_ABI_L1B tests.""" @mock.patch('satpy.readers.abi_base.xr') - def setUp(self, xr_, rad=None): + def setUp(self, xr_, rad=None, clip_negative_radiances=False): """Create a fake dataset using the given radiance data.""" from satpy.readers.abi_l1b import NC_ABI_L1B - x_image = xr.DataArray(0.) - y_image = xr.DataArray(0.) - time = xr.DataArray(0.) - if rad is None: - rad_data = (np.arange(10.).reshape((2, 5)) + 1.) * 50. - rad_data = (rad_data + 1.) / 0.5 - rad_data = rad_data.astype(np.int16) - rad = xr.DataArray( - rad_data, - dims=('y', 'x'), - attrs={ - 'scale_factor': 0.5, - 'add_offset': -1., - '_FillValue': 1002, - 'units': 'W m-2 um-1 sr-1', - 'valid_range': (0, 4095), - } - ) - rad.coords['t'] = time - rad.coords['x_image'] = x_image - rad.coords['y_image'] = y_image - x__ = xr.DataArray( - range(5), - attrs={'scale_factor': 2., 'add_offset': -1.}, - dims=('x',) - ) - y__ = xr.DataArray( - range(2), - attrs={'scale_factor': -2., 'add_offset': 1.}, - dims=('y',) - ) - proj = xr.DataArray( - [], - attrs={ - 'semi_major_axis': 1., - 'semi_minor_axis': 1., - 'perspective_point_height': 1., - 'longitude_of_projection_origin': -90., - 'latitude_of_projection_origin': 0., - 'sweep_angle_axis': u'x' - } - ) - fake_dataset = xr.Dataset( - data_vars={ - 'Rad': rad, - 'band_id': np.array(8), - # 'x': x__, - # 'y': y__, - 'x_image': x_image, - 'y_image': y_image, - 'goes_imager_projection': proj, - 'yaw_flip_flag': np.array([1]), - "planck_fk1": np.array(13432.1), - "planck_fk2": np.array(1497.61), - "planck_bc1": np.array(0.09102), - "planck_bc2": np.array(0.99971), - "esun": np.array(2017), - "nominal_satellite_subpoint_lat": np.array(0.0), - "nominal_satellite_subpoint_lon": np.array(-89.5), - "nominal_satellite_height": np.array(35786.02), - "earth_sun_distance_anomaly_in_AU": np.array(0.99) - }, - coords={ - 't': rad.coords['t'], - 'x': x__, - 'y': y__, - - }, - attrs={ - "time_coverage_start": "2017-09-20T17:30:40.8Z", - "time_coverage_end": "2017-09-20T17:41:17.5Z", - }, - ) - xr_.open_dataset.return_value = fake_dataset + xr_.open_dataset.return_value = _create_fake_rad_dataset(rad=rad) self.reader = NC_ABI_L1B('filename', {'platform_shortname': 'G16', 'observation_type': 'Rad', 'suffix': 'custom', 'scene_abbr': 'C', 'scan_mode': 'M3'}, - {'filetype': 'info'}) + {'filetype': 'info'}, + clip_negative_radiances=clip_negative_radiances) class TestABIYAML: @@ -200,7 +212,7 @@ def test_get_area_def(self, adef): class Test_NC_ABI_L1B_ir_cal(Test_NC_ABI_L1B_Base): - """Test the NC_ABI_L1B reader's IR calibration.""" + """Test the NC_ABI_L1B reader's default IR calibration.""" def setUp(self): """Create fake data for the tests.""" @@ -213,19 +225,16 @@ def setUp(self): attrs={ 'scale_factor': 0.5, 'add_offset': -1., - '_FillValue': 1002, + '_FillValue': 1002, # last rad_data value } ) super(Test_NC_ABI_L1B_ir_cal, self).setUp(rad=rad) - def test_ir_calibrate(self): - """Test IR calibration.""" + def test_ir_calibration_attrs(self): + """Test IR calibrated DataArray attributes.""" res = self.reader.get_dataset( make_dataid(name='C05', calibration='brightness_temperature'), {}) - expected = np.array([[267.55572248, 305.15576503, 332.37383249, 354.73895301, 374.19710115], - [391.68679226, 407.74064808, 422.69329105, 436.77021913, np.nan]]) - self.assertTrue(np.allclose(res.data, expected, equal_nan=True)) # make sure the attributes from the file are in the data array self.assertNotIn('scale_factor', res.attrs) self.assertNotIn('_FillValue', res.attrs) @@ -233,6 +242,68 @@ def test_ir_calibrate(self): 'toa_brightness_temperature') self.assertEqual(res.attrs['long_name'], 'Brightness Temperature') + def test_clip_negative_radiances_attribute(self): + """Assert that clip_negative_radiances is set to False.""" + assert not self.reader.clip_negative_radiances + + def test_ir_calibrate(self): + """Test IR calibration.""" + res = self.reader.get_dataset( + make_dataid(name='C05', calibration='brightness_temperature'), {}) + + expected = np.array([[267.55572248, 305.15576503, 332.37383249, 354.73895301, 374.19710115], + [391.68679226, 407.74064808, 422.69329105, 436.77021913, np.nan]]) + assert np.allclose(res.data, expected, equal_nan=True) + + +class Test_NC_ABI_L1B_clipped_ir_cal(Test_NC_ABI_L1B_Base): + """Test the NC_ABI_L1B reader's IR calibration (clipping negative radiance).""" + + def setUp(self): + """Create fake data for the tests.""" + values = np.arange(10.) + values[0] = -0.0001 # introduce below minimum expected radiance + rad_data = (values.reshape((2, 5)) + 1.) * 50. + rad_data = (rad_data + 1.) / 0.5 + rad_data = rad_data.astype(np.int16) + rad = xr.DataArray( + rad_data, + dims=('y', 'x'), + attrs={ + 'scale_factor': 0.5, + 'add_offset': -1., + '_FillValue': 1002, + } + ) + + super().setUp(rad=rad, clip_negative_radiances=True) + + def test_clip_negative_radiances_attribute(self): + """Assert that clip_negative_radiances has been set to True.""" + assert self.reader.clip_negative_radiances + + def test_ir_calibrate(self): + """Test IR calibration.""" + res = self.reader.get_dataset( + make_dataid(name='C07', calibration='brightness_temperature'), {}) + + clipped_ir = 267.07775531 + expected = np.array([[clipped_ir, 305.15576503, 332.37383249, 354.73895301, 374.19710115], + [391.68679226, 407.74064808, 422.69329105, 436.77021913, np.nan]]) + assert np.allclose(res.data, expected, equal_nan=True) + + def test_get_minimum_radiance(self): + """Test get_minimum_radiance from Rad DataArray.""" + from satpy.readers.abi_l1b import NC_ABI_L1B + data = xr.DataArray( + attrs={ + 'scale_factor': 0.5, + 'add_offset': -1., + '_FillValue': 1002, + } + ) + np.testing.assert_allclose(NC_ABI_L1B._get_minimum_radiance(NC_ABI_L1B, data), 0.0) + class Test_NC_ABI_L1B_vis_cal(Test_NC_ABI_L1B_Base): """Test the NC_ABI_L1B reader.""" diff --git a/satpy/tests/reader_tests/test_fy4_base.py b/satpy/tests/reader_tests/test_fy4_base.py index e6ba3c6e18..432117e1ad 100644 --- a/satpy/tests/reader_tests/test_fy4_base.py +++ b/satpy/tests/reader_tests/test_fy4_base.py @@ -36,7 +36,7 @@ def setup_method(self): self.file_type = {'file_type': 'agri_l1_0500m'} - def teardown(self): + def teardown_method(self): """Stop wrapping the HDF5 file handler.""" self.p.stop() diff --git a/satpy/tests/reader_tests/test_geocat.py b/satpy/tests/reader_tests/test_geocat.py index b9323a39e2..91de6a4265 100644 --- a/satpy/tests/reader_tests/test_geocat.py +++ b/satpy/tests/reader_tests/test_geocat.py @@ -130,10 +130,22 @@ def test_init(self): loadables = r.select_files_from_pathnames([ 'geocatL2.GOES-13.2015143.234500.nc', ]) - self.assertEqual(len(loadables), 1) + assert len(loadables) == 1 r.create_filehandlers(loadables) # make sure we have some files - self.assertTrue(r.file_handlers) + assert r.file_handlers + + def test_init_with_kwargs(self): + """Test basic init with extra parameters.""" + from satpy.readers import load_reader + r = load_reader(self.reader_configs, xarray_kwargs={"decode_times": True}) + loadables = r.select_files_from_pathnames([ + 'geocatL2.GOES-13.2015143.234500.nc', + ]) + assert len(loadables) == 1 + r.create_filehandlers(loadables, fh_kwargs={"xarray_kwargs": {'decode_times': True}}) + # make sure we have some files + assert r.file_handlers def test_load_all_old_goes(self): """Test loading all test datasets from old GOES files.""" diff --git a/satpy/tests/reader_tests/test_ghi_l1.py b/satpy/tests/reader_tests/test_ghi_l1.py index 10f74f31ef..79667ef37d 100644 --- a/satpy/tests/reader_tests/test_ghi_l1.py +++ b/satpy/tests/reader_tests/test_ghi_l1.py @@ -233,7 +233,7 @@ def setup_method(self): 'C07': np.array([[0.2, 0.3, 0.4, 0.5, 0.6], [0.7, 0.8, 0.9, 1., np.nan]]), } - def teardown(self): + def teardown_method(self): """Stop wrapping the HDF5 file handler.""" self.p.stop() diff --git a/satpy/tests/reader_tests/test_hrit_base.py b/satpy/tests/reader_tests/test_hrit_base.py index 0f46d94882..f8ad241532 100644 --- a/satpy/tests/reader_tests/test_hrit_base.py +++ b/satpy/tests/reader_tests/test_hrit_base.py @@ -21,7 +21,7 @@ import gzip import os import unittest -from datetime import datetime +from datetime import datetime, timedelta from tempfile import NamedTemporaryFile, gettempdir from unittest import mock @@ -268,6 +268,13 @@ def test_read_band_gzip_stream(self, stub_gzipped_hrit_file): res = self.reader.read_band('VIS006', None) assert res.compute().shape == (464, 3712) + def test_start_end_time(self): + """Test reading and converting start/end time.""" + assert self.reader.start_time == datetime(2016, 3, 3, 0, 0) + assert self.reader.start_time == self.reader.observation_start_time + assert self.reader.end_time == datetime(2016, 3, 3, 0, 0) + timedelta(minutes=15) + assert self.reader.end_time == self.reader.observation_end_time + def fake_decompress(infile, outdir='.'): """Fake decompression.""" diff --git a/satpy/tests/reader_tests/test_msu_gsa_l1b.py b/satpy/tests/reader_tests/test_msu_gsa_l1b.py index bb17d5fa6b..a5efc52be6 100644 --- a/satpy/tests/reader_tests/test_msu_gsa_l1b.py +++ b/satpy/tests/reader_tests/test_msu_gsa_l1b.py @@ -142,7 +142,7 @@ def setup_method(self): files = self.reader.select_files_from_pathnames(filenames) self.reader.create_filehandlers(files) - def teardown(self): + def teardown_method(self): """Stop wrapping the HDF5 file handler.""" self.p.stop() diff --git a/satpy/tests/reader_tests/test_olci_nc.py b/satpy/tests/reader_tests/test_olci_nc.py index b0196eb3b8..6761511cf5 100644 --- a/satpy/tests/reader_tests/test_olci_nc.py +++ b/satpy/tests/reader_tests/test_olci_nc.py @@ -16,6 +16,7 @@ # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Module for testing the satpy.readers.olci_nc module.""" +import datetime import unittest import unittest.mock as mock @@ -93,7 +94,7 @@ def test_open_file_objects(self, mocked_open_dataset): open_file.open.return_value == mocked_open_dataset.call_args[1].get('filename_or_obj')) @mock.patch('xarray.open_dataset') - def test_get_dataset(self, mocked_dataset): + def test_get_mask(self, mocked_dataset): """Test reading datasets.""" import numpy as np import xarray as xr @@ -109,6 +110,32 @@ def test_get_dataset(self, mocked_dataset): test = NCOLCI2('somedir/somefile.nc', filename_info, 'c') res = test.get_dataset(ds_id, {'nc_key': 'mask'}) self.assertEqual(res.dtype, np.dtype('bool')) + expected = np.array([[True, False, True, True, True, True], + [False, False, True, True, False, False], + [False, False, False, False, False, True], + [False, True, False, False, False, True], + [True, False, False, True, False, False]]) + np.testing.assert_array_equal(res.values, expected) + + @mock.patch('xarray.open_dataset') + def test_get_mask_with_alternative_items(self, mocked_dataset): + """Test reading datasets.""" + import numpy as np + import xarray as xr + + from satpy.readers.olci_nc import NCOLCI2 + from satpy.tests.utils import make_dataid + mocked_dataset.return_value = xr.Dataset({'mask': (['rows', 'columns'], + np.array([1 << x for x in range(30)]).reshape(5, 6))}, + coords={'rows': np.arange(5), + 'columns': np.arange(6)}) + ds_id = make_dataid(name='mask') + filename_info = {'mission_id': 'S3A', 'dataset_name': 'mask', 'start_time': 0, 'end_time': 0} + test = NCOLCI2('somedir/somefile.nc', filename_info, 'c', mask_items=["INVALID"]) + res = test.get_dataset(ds_id, {'nc_key': 'mask'}) + self.assertEqual(res.dtype, np.dtype('bool')) + expected = np.array([True] + [False] * 29).reshape(5, 6) + np.testing.assert_array_equal(res.values, expected) @mock.patch('xarray.open_dataset') def test_olci_angles(self, mocked_dataset): @@ -177,6 +204,42 @@ def test_olci_meteo(self, mocked_dataset): mocked_dataset.assert_called() mocked_dataset.reset_mock() + @mock.patch("xarray.open_dataset") + def test_chl_nn(self, mocked_dataset): + """Test unlogging the chl_nn product.""" + import numpy as np + import xarray as xr + + from satpy.readers.olci_nc import NCOLCI2 + from satpy.tests.utils import make_dataid + attr_dict = { + 'ac_subsampling_factor': 64, + 'al_subsampling_factor': 1, + } + data = {'CHL_NN': (['rows', 'columns'], + np.arange(30).reshape(5, 6).astype(float), + {"units": "lg(re mg.m-3)"})} + mocked_dataset.return_value = xr.Dataset(data, + coords={'rows': np.arange(5), + 'columns': np.arange(6)}, + attrs=attr_dict) + ds_info = {'name': 'chl_nn', 'sensor': 'olci', 'resolution': 300, + 'standard_name': 'algal_pigment_concentration', 'units': 'lg(re mg.m-3)', + 'coordinates': ('longitude', 'latitude'), 'file_type': 'esa_l2_chl_nn', 'nc_key': 'CHL_NN', + 'modifiers': ()} + filename_info = {'mission_id': 'S3A', 'datatype_id': 'WFR', + 'start_time': datetime.datetime(2019, 9, 24, 9, 29, 39), + 'end_time': datetime.datetime(2019, 9, 24, 9, 32, 39), + 'creation_time': datetime.datetime(2019, 9, 24, 11, 40, 26), 'duration': 179, 'cycle': 49, + 'relative_orbit': 307, 'frame': 1800, 'centre': 'MAR', 'mode': 'O', 'timeliness': 'NR', + 'collection': '002'} + ds_id = make_dataid(name='chl_nn') + file_handler = NCOLCI2('somedir/somefile.nc', filename_info, None, unlog=True) + res = file_handler.get_dataset(ds_id, ds_info) + + assert res.attrs["units"] == "mg.m-3" + assert res.values[-1, -1] == 1e29 + class TestBitFlags(unittest.TestCase): """Test the bitflag reading.""" diff --git a/satpy/tests/reader_tests/test_seviri_base.py b/satpy/tests/reader_tests/test_seviri_base.py index 2f02c46bb9..157ed88bbf 100644 --- a/satpy/tests/reader_tests/test_seviri_base.py +++ b/satpy/tests/reader_tests/test_seviri_base.py @@ -18,7 +18,7 @@ """Test the MSG common (native and hrit format) functionionalities.""" import unittest -from datetime import datetime +from datetime import datetime, timedelta import dask.array as da import numpy as np @@ -36,6 +36,7 @@ get_satpos, pad_data_horizontally, pad_data_vertically, + round_nom_time, ) from satpy.utils import get_legacy_chunk_size @@ -107,6 +108,29 @@ def test_pad_data_vertically_bad_shape(self): with self.assertRaises(IndexError): pad_data_vertically(data, final_size, south_bound, north_bound) + def observation_start_time(self): + """Get scan start timestamp for testing.""" + return datetime(2023, 3, 20, 15, 0, 10, 691000) + + def observation_end_time(self): + """Get scan end timestamp for testing.""" + return datetime(2023, 3, 20, 15, 12, 43, 843000) + + def test_round_nom_time(self): + """Test the rouding of start/end_time.""" + self.assertEqual(round_nom_time( + dt=self.observation_start_time(), + time_delta=timedelta(minutes=15) + ), + datetime(2023, 3, 20, 15, 0) + ) + self.assertEqual(round_nom_time( + dt=self.observation_end_time(), + time_delta=timedelta(minutes=15) + ), + datetime(2023, 3, 20, 15, 15) + ) + @staticmethod def test_pad_data_horizontally(): """Test the horizontal hrv padding.""" diff --git a/satpy/tests/reader_tests/test_seviri_l1b_hrit.py b/satpy/tests/reader_tests/test_seviri_l1b_hrit.py index f105870f12..90785ffdbf 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_hrit.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_hrit.py @@ -46,10 +46,10 @@ class TestHRITMSGFileHandlerHRV(TestHRITMSGBase): def setUp(self): """Set up the hrit file handler for testing HRV.""" - self.start_time = datetime(2016, 3, 3, 0, 0) + self.observation_start_time = datetime(2006, 1, 1, 12, 15, 9, 304888) self.nlines = 464 self.reader = setup.get_fake_file_handler( - start_time=self.start_time, + observation_start_time=self.observation_start_time, nlines=self.nlines, ncols=5568, ) @@ -88,7 +88,7 @@ def test_get_dataset(self, calibrate, parent_get_dataset): self.assert_attrs_equal(res.attrs, setup.get_attrs_exp()) np.testing.assert_equal( res['acq_time'], - setup.get_acq_time_exp(self.start_time, self.nlines) + setup.get_acq_time_exp(self.observation_start_time, self.nlines) ) @mock.patch('satpy.readers.seviri_l1b_hrit.HRITFileHandler.get_dataset') @@ -111,7 +111,7 @@ def test_get_dataset_non_fill(self, calibrate, parent_get_dataset): self.assert_attrs_equal(res.attrs, setup.get_attrs_exp()) np.testing.assert_equal( res['acq_time'], - setup.get_acq_time_exp(self.start_time, self.nlines) + setup.get_acq_time_exp(self.observation_start_time, self.nlines) ) def test_get_area_def(self): @@ -144,12 +144,12 @@ class TestHRITMSGFileHandler(TestHRITMSGBase): def setUp(self): """Set up the hrit file handler for testing.""" - self.start_time = datetime(2016, 3, 3, 0, 0) + self.observation_start_time = datetime(2006, 1, 1, 12, 15, 9, 304888) self.nlines = 464 self.ncols = 3712 self.projection_longitude = 9.5 self.reader = setup.get_fake_file_handler( - start_time=self.start_time, + observation_start_time=self.observation_start_time, nlines=self.nlines, ncols=self.ncols, projection_longitude=self.projection_longitude @@ -219,13 +219,26 @@ def test_get_dataset(self, calibrate, parent_get_dataset): expected['acq_time'] = ( 'y', - setup.get_acq_time_exp(self.start_time, self.nlines) + setup.get_acq_time_exp(self.observation_start_time, self.nlines) ) xr.testing.assert_equal(res, expected) self.assert_attrs_equal( res.attrs, setup.get_attrs_exp(self.projection_longitude) ) + # testing start/end time + self.assertEqual(datetime(2006, 1, 1, 12, 15, 9, 304888), self.reader.observation_start_time) + self.assertEqual(datetime(2006, 1, 1, 12, 15,), self.reader.start_time) + self.assertEqual(self.reader.start_time, self.reader.nominal_start_time) + + self.assertEqual(datetime(2006, 1, 1, 12, 27, 39), self.reader.observation_end_time) + self.assertEqual(self.reader.end_time, self.reader.nominal_end_time) + self.assertEqual(datetime(2006, 1, 1, 12, 30,), self.reader.end_time) + # test repeat cycle duration + self.assertEqual(15, self.reader._repeat_cycle_duration) + # Change the reducescan scenario to test the repeat cycle duration handling + self.reader.epilogue['ImageProductionStats']['ActualScanningSummary']['ReducedScan'] = 1 + self.assertEqual(5, self.reader._repeat_cycle_duration) @mock.patch('satpy.readers.seviri_l1b_hrit.HRITFileHandler.get_dataset') @mock.patch('satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler.calibrate') @@ -244,7 +257,7 @@ def test_get_dataset_without_masking_bad_scan_lines(self, calibrate, parent_get_ expected = data.copy() expected['acq_time'] = ( 'y', - setup.get_acq_time_exp(self.start_time, self.nlines) + setup.get_acq_time_exp(self.observation_start_time, self.nlines) ) xr.testing.assert_equal(res, expected) self.assert_attrs_equal( @@ -277,7 +290,7 @@ def test_get_raw_mda(self): def test_satpos_no_valid_orbit_polynomial(self): """Test satellite position if there is no valid orbit polynomial.""" reader = setup.get_fake_file_handler( - start_time=self.start_time, + observation_start_time=self.observation_start_time, nlines=self.nlines, ncols=self.ncols, projection_longitude=self.projection_longitude, @@ -295,7 +308,7 @@ class TestHRITMSGPrologueFileHandler(unittest.TestCase): def setUp(self, *mocks): """Set up the test case.""" fh = setup.get_fake_file_handler( - start_time=datetime(2016, 3, 3, 0, 0), + observation_start_time=datetime(2016, 3, 3, 0, 0), nlines=464, ncols=3712, ) @@ -403,7 +416,12 @@ def file_handler(self): 'Level15ImageProduction': { 'PlannedChanProcessing': self.radiance_types } - } + }, + 'ImageAcquisition': { + 'PlannedAcquisitionTime': { + 'TrueRepeatCycleStart': self.scan_time, + } + } } epilog = { 'ImageProductionStats': { @@ -479,7 +497,6 @@ def test_calibrate( fh.channel_name = channel fh.calib_mode = calib_mode fh.ext_calib_coefs = external_coefs - res = fh.calibrate(counts, calibration) xr.testing.assert_allclose(res, expected) diff --git a/satpy/tests/reader_tests/test_seviri_l1b_hrit_setup.py b/satpy/tests/reader_tests/test_seviri_l1b_hrit_setup.py index a9ca6377c4..841d45b943 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_hrit_setup.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_hrit_setup.py @@ -46,12 +46,12 @@ def new_read_prologue(self): return new_read_prologue -def get_fake_file_handler(start_time, nlines, ncols, projection_longitude=0, +def get_fake_file_handler(observation_start_time, nlines, ncols, projection_longitude=0, orbit_polynomials=ORBIT_POLYNOMIALS): """Create a mocked SEVIRI HRIT file handler.""" prologue = get_fake_prologue(projection_longitude, orbit_polynomials) - mda = get_fake_mda(nlines=nlines, ncols=ncols, start_time=start_time) - filename_info = get_fake_filename_info(start_time) + mda = get_fake_mda(nlines=nlines, ncols=ncols, start_time=observation_start_time) + filename_info = get_fake_filename_info(observation_start_time) epilogue = get_fake_epilogue() m = mock.mock_open() @@ -141,6 +141,11 @@ def get_fake_epilogue(): 'UpperNorthLineActual': 11136, 'UpperEastColumnActual': 1805, 'UpperWestColumnActual': 7372 + }, + 'ActualScanningSummary': { + 'ReducedScan': 0, + 'ForwardScanStart': datetime(2006, 1, 1, 12, 15, 9, 304888), + 'ForwardScanEnd': datetime(2006, 1, 1, 12, 27, 39, 0) } } } @@ -194,7 +199,8 @@ def get_acq_time_cds(start_time, nlines): dtype=[('days', '>u2'), ('milliseconds', '>u4')] ) tline['days'][1:-1] = days_since_1958 * np.ones(nlines - 2) - tline['milliseconds'][1:-1] = np.arange(nlines - 2) + offset_second = (start_time - start_time.replace(hour=0, minute=0, second=0, microsecond=0)).total_seconds()*1000 + tline['milliseconds'][1:-1] = np.arange(nlines - 2)+offset_second return tline @@ -225,6 +231,12 @@ def get_attrs_exp(projection_longitude=0.0): 'satellite_actual_latitude': -0.5711243456528018, 'satellite_actual_altitude': 35783296.150123544}, 'georef_offset_corrected': True, - 'nominal_start_time': datetime(2006, 1, 1, 12, 15, 9, 304888), - 'nominal_end_time': datetime(2006, 1, 1, 12, 30, 0, 0) + 'nominal_start_time': (datetime(2006, 1, 1, 12, 15),), + 'nominal_end_time': (datetime(2006, 1, 1, 12, 30),), + 'time_parameters': { + 'nominal_start_time': datetime(2006, 1, 1, 12, 15), + 'nominal_end_time': datetime(2006, 1, 1, 12, 30), + 'observation_start_time': datetime(2006, 1, 1, 12, 15, 9, 304888), + 'observation_end_time': datetime(2006, 1, 1, 12, 27, 39, 0) + } } diff --git a/satpy/tests/reader_tests/test_seviri_l1b_native.py b/satpy/tests/reader_tests/test_seviri_l1b_native.py index a1a73179e2..e344d09ff9 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_native.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_native.py @@ -30,8 +30,15 @@ import pytest import xarray as xr -from satpy.readers.eum_base import time_cds_short -from satpy.readers.seviri_l1b_native import ImageBoundaries, NativeMSGFileHandler, Padder, get_available_channels +from satpy.readers.eum_base import recarray2dict, time_cds_short +from satpy.readers.seviri_l1b_native import ( + ASCII_STARTSWITH, + ImageBoundaries, + NativeMSGFileHandler, + Padder, + get_available_channels, + has_archive_header, +) from satpy.tests.reader_tests.test_seviri_base import ORBIT_POLYNOMIALS, ORBIT_POLYNOMIALS_INVALID from satpy.tests.reader_tests.test_seviri_l1b_calibration import TestFileHandlerCalibrationBase from satpy.tests.utils import assert_attrs_equal, make_dataid @@ -495,497 +502,276 @@ } -class TestNativeMSGFileHandler(unittest.TestCase): - """Test the NativeMSGFileHandler.""" - - def test_get_available_channels(self): - """Test the derivation of the available channel list.""" - available_chs = get_available_channels(TEST1_HEADER_CHNLIST) - trues = ['WV_062', 'WV_073', 'IR_108', 'VIS006', 'VIS008', 'IR_120'] - for bandname in AVAILABLE_CHANNELS: - if bandname in trues: - self.assertTrue(available_chs[bandname]) - else: - self.assertFalse(available_chs[bandname]) - - available_chs = get_available_channels(TEST2_HEADER_CHNLIST) - trues = ['VIS006', 'VIS008', 'IR_039', 'WV_062', 'WV_073', 'IR_087', 'HRV'] - for bandname in AVAILABLE_CHANNELS: - if bandname in trues: - self.assertTrue(available_chs[bandname]) - else: - self.assertFalse(available_chs[bandname]) +def create_test_header(earth_model, dataset_id, is_full_disk, is_rapid_scan, good_qual='OK'): + """Create test header for SEVIRI L1.5 product. - available_chs = get_available_channels(TEST3_HEADER_CHNLIST) - for bandname in AVAILABLE_CHANNELS: - self.assertTrue(available_chs[bandname]) - - -class TestNativeMSGArea(unittest.TestCase): - """Test NativeMSGFileHandler.get_area_extent. - - The expected results have been verified by manually - inspecting the output of geoferenced imagery. + Header includes mandatory attributes for NativeMSGFileHandler.get_area_extent """ - - @staticmethod - def create_test_header(earth_model, dataset_id, is_full_disk, is_rapid_scan, good_qual='OK'): - """Create mocked NativeMSGFileHandler. - - Contains sufficient attributes for NativeMSGFileHandler.get_area_extent to be able to execute. - """ - if dataset_id['name'] == 'HRV': - reference_grid = 'ReferenceGridHRV' - column_dir_grid_step = 1.0001343488693237 - line_dir_grid_step = 1.0001343488693237 - else: - reference_grid = 'ReferenceGridVIS_IR' - column_dir_grid_step = 3.0004031658172607 - line_dir_grid_step = 3.0004031658172607 - - if is_full_disk: - north = 3712 - east = 1 - west = 3712 - south = 1 - n_visir_cols = 3712 - n_visir_lines = 3712 - n_hrv_cols = 11136 - n_hrv_lines = 11136 - ssp_lon = 0 - elif is_rapid_scan: - north = 3712 - east = 1 - west = 3712 - south = 2321 - n_visir_cols = 3712 - n_visir_lines = 1392 - n_hrv_cols = 11136 - n_hrv_lines = 4176 - ssp_lon = 9.5 - else: - north = 3574 - east = 78 - west = 2591 - south = 1746 - n_visir_cols = 2516 - n_visir_lines = north - south + 1 - n_hrv_cols = n_visir_cols * 3 - n_hrv_lines = n_visir_lines * 3 - ssp_lon = 0 - header = { - '15_MAIN_PRODUCT_HEADER': { - 'QQOV': {'Name': 'QQOV', - 'Value': good_qual} - }, - '15_DATA_HEADER': { - 'ImageDescription': { - reference_grid: { - 'ColumnDirGridStep': column_dir_grid_step, - 'LineDirGridStep': line_dir_grid_step, - 'GridOrigin': 2, # south-east corner - }, - 'ProjectionDescription': { - 'LongitudeOfSSP': ssp_lon - } - }, - 'GeometricProcessing': { - 'EarthModel': { - 'TypeOfEarthModel': earth_model, - 'EquatorialRadius': 6378169.0, - 'NorthPolarRadius': 6356583.800000001, - 'SouthPolarRadius': 6356583.800000001, - } + if dataset_id['name'] == 'HRV': + reference_grid = 'ReferenceGridHRV' + column_dir_grid_step = 1.0001343488693237 + line_dir_grid_step = 1.0001343488693237 + else: + reference_grid = 'ReferenceGridVIS_IR' + column_dir_grid_step = 3.0004031658172607 + line_dir_grid_step = 3.0004031658172607 + + if is_full_disk: + north = 3712 + east = 1 + west = 3712 + south = 1 + n_visir_cols = 3712 + n_visir_lines = 3712 + n_hrv_cols = 11136 + n_hrv_lines = 11136 + ssp_lon = 0 + elif is_rapid_scan: + north = 3712 + east = 1 + west = 3712 + south = 2321 + n_visir_cols = 3712 + n_visir_lines = 1392 + n_hrv_cols = 11136 + n_hrv_lines = 4176 + ssp_lon = 9.5 + else: + north = 3574 + east = 78 + west = 2591 + south = 1746 + n_visir_cols = 2516 + n_visir_lines = north - south + 1 + n_hrv_cols = n_visir_cols * 3 + n_hrv_lines = n_visir_lines * 3 + ssp_lon = 0 + header = { + '15_MAIN_PRODUCT_HEADER': { + 'QQOV': {'Name': 'QQOV', + 'Value': good_qual} + }, + '15_DATA_HEADER': { + 'ImageDescription': { + reference_grid: { + 'ColumnDirGridStep': column_dir_grid_step, + 'LineDirGridStep': line_dir_grid_step, + 'GridOrigin': 2, # south-east corner }, - 'SatelliteStatus': { - 'SatelliteDefinition': { - 'SatelliteId': 324 - } + 'ProjectionDescription': { + 'LongitudeOfSSP': ssp_lon } }, - '15_SECONDARY_PRODUCT_HEADER': { - 'NorthLineSelectedRectangle': {'Value': north}, - 'EastColumnSelectedRectangle': {'Value': east}, - 'WestColumnSelectedRectangle': {'Value': west}, - 'SouthLineSelectedRectangle': {'Value': south}, - 'SelectedBandIDs': {'Value': 'xxxxxxxxxxxx'}, - 'NumberColumnsVISIR': {'Value': n_visir_cols}, - 'NumberLinesVISIR': {'Value': n_visir_lines}, - 'NumberColumnsHRV': {'Value': n_hrv_cols}, - 'NumberLinesHRV': {'Value': n_hrv_lines}, - } - - } - - return header - - @staticmethod - def create_test_trailer(is_rapid_scan): - """Create Test Trailer. - - Mocked Trailer with sufficient attributes for - NativeMSGFileHandler.get_area_extent to be able to execute. - """ - trailer = { - '15TRAILER': { - 'ImageProductionStats': { - 'ActualL15CoverageHRV': { - 'UpperNorthLineActual': 11136, - 'UpperWestColumnActual': 7533, - 'UpperSouthLineActual': 8193, - 'UpperEastColumnActual': 1966, - 'LowerNorthLineActual': 8192, - 'LowerWestColumnActual': 5568, - 'LowerSouthLineActual': 1, - 'LowerEastColumnActual': 1 - }, - 'ActualScanningSummary': { - 'ReducedScan': is_rapid_scan - } + 'GeometricProcessing': { + 'EarthModel': { + 'TypeOfEarthModel': earth_model, + 'EquatorialRadius': 6378169.0, + 'NorthPolarRadius': 6356583.800000001, + 'SouthPolarRadius': 6356583.800000001, + } + }, + 'SatelliteStatus': { + 'SatelliteDefinition': { + 'SatelliteId': 324 } } + }, + '15_SECONDARY_PRODUCT_HEADER': { + 'NorthLineSelectedRectangle': {'Value': north}, + 'EastColumnSelectedRectangle': {'Value': east}, + 'WestColumnSelectedRectangle': {'Value': west}, + 'SouthLineSelectedRectangle': {'Value': south}, + 'SelectedBandIDs': {'Value': 'xxxxxxxxxxxx'}, + 'NumberColumnsVISIR': {'Value': n_visir_cols}, + 'NumberLinesVISIR': {'Value': n_visir_lines}, + 'NumberColumnsHRV': {'Value': n_hrv_cols}, + 'NumberLinesHRV': {'Value': n_hrv_lines}, } - return trailer - - def prepare_area_defs(self, test_dict): - """Prepare calculated and expected area definitions for equal checking.""" - earth_model = test_dict['earth_model'] - dataset_id = test_dict['dataset_id'] - is_full_disk = test_dict['is_full_disk'] - is_rapid_scan = test_dict['is_rapid_scan'] - fill_disk = test_dict['fill_disk'] - header = self.create_test_header(earth_model, dataset_id, is_full_disk, is_rapid_scan) - trailer = self.create_test_trailer(is_rapid_scan) - expected_area_def = test_dict['expected_area_def'] - - with mock.patch('satpy.readers.seviri_l1b_native.np.fromfile') as fromfile, \ - mock.patch('satpy.readers.seviri_l1b_native.recarray2dict') as recarray2dict, \ - mock.patch('satpy.readers.seviri_l1b_native.NativeMSGFileHandler._get_memmap') as _get_memmap, \ - mock.patch('satpy.readers.seviri_l1b_native.NativeMSGFileHandler._read_trailer'), \ - mock.patch( - 'satpy.readers.seviri_l1b_native.NativeMSGFileHandler._has_archive_header' - ) as _has_archive_header: - _has_archive_header.return_value = True - fromfile.return_value = header - recarray2dict.side_effect = (lambda x: x) - _get_memmap.return_value = np.arange(3) - fh = NativeMSGFileHandler(None, {}, None) - fh.fill_disk = fill_disk - fh.header = header - fh.trailer = trailer - fh.image_boundaries = ImageBoundaries(header, trailer, fh.mda) - calc_area_def = fh.get_area_def(dataset_id) - - return (calc_area_def, expected_area_def) - - # Earth model 1 tests - def test_earthmodel1_visir_fulldisk(self): - """Test the VISIR FES with the EarthModel 1.""" - calculated, expected = self.prepare_area_defs( - TEST_AREA_EXTENT_EARTHMODEL1_VISIR_FULLDISK - ) - np.testing.assert_allclose(np.array(calculated.area_extent), - np.array(expected['Area extent'])) - self.assertEqual(calculated.width, expected['Number of columns']) - self.assertEqual(calculated.height, expected['Number of rows']) - self.assertEqual(calculated.area_id, expected['Area ID']) - - def test_earthmodel1_hrv_fulldisk(self): - """Test the HRV FES with the EarthModel 1.""" - calculated, expected = self.prepare_area_defs( - TEST_AREA_EXTENT_EARTHMODEL1_HRV_FULLDISK - ) - np.testing.assert_allclose(np.array(calculated.defs[0].area_extent), - np.array(expected['Area extent 0'])) - np.testing.assert_allclose(np.array(calculated.defs[1].area_extent), - np.array(expected['Area extent 1'])) - - self.assertEqual(calculated.width, expected['Number of columns']) - self.assertEqual(calculated.height, expected['Number of rows']) - self.assertEqual(calculated.defs[0].area_id, expected['Area ID']) - self.assertEqual(calculated.defs[1].area_id, expected['Area ID']) - - def test_earthmodel1_hrv_fulldisk_fill(self): - """Test the HRV FES padded to fulldisk with the EarthModel 1.""" - calculated, expected = self.prepare_area_defs( - TEST_AREA_EXTENT_EARTHMODEL1_HRV_FULLDISK_FILL - ) - np.testing.assert_allclose(np.array(calculated.area_extent), - np.array(expected['Area extent'])) - self.assertEqual(calculated.width, expected['Number of columns']) - self.assertEqual(calculated.height, expected['Number of rows']) - self.assertEqual(calculated.area_id, expected['Area ID']) - - def test_earthmodel1_visir_rapidscan(self): - """Test the VISIR RSS with the EarthModel 1.""" - calculated, expected = self.prepare_area_defs( - TEST_AREA_EXTENT_EARTHMODEL1_VISIR_RAPIDSCAN - ) - - np.testing.assert_allclose(np.array(calculated.area_extent), - np.array(expected['Area extent'])) + } - self.assertEqual(calculated.width, expected['Number of columns']) - self.assertEqual(calculated.height, expected['Number of rows']) - self.assertEqual(calculated.area_id, expected['Area ID']) + return header - def test_earthmodel1_visir_rapidscan_fill(self): - """Test the VISIR RSS padded to fulldisk with the EarthModel 1.""" - calculated, expected = self.prepare_area_defs( - TEST_AREA_EXTENT_EARTHMODEL1_VISIR_RAPIDSCAN_FILL - ) - np.testing.assert_allclose(np.array(calculated.area_extent), - np.array(expected['Area extent'])) +def create_test_trailer(is_rapid_scan): + """Create test trailer for SEVIRI L1.5 product. - self.assertEqual(calculated.width, expected['Number of columns']) - self.assertEqual(calculated.height, expected['Number of rows']) - self.assertEqual(calculated.area_id, expected['Area ID']) - - def test_earthmodel1_hrv_rapidscan(self): - """Test the HRV RSS with the EarthModel 1.""" - calculated, expected = self.prepare_area_defs( - TEST_AREA_EXTENT_EARTHMODEL1_HRV_RAPIDSCAN - ) + Trailer includes mandatory attributes for NativeMSGFileHandler.get_area_extent + """ + trailer = { + '15TRAILER': { + 'ImageProductionStats': { + 'ActualL15CoverageHRV': { + 'UpperNorthLineActual': 11136, + 'UpperWestColumnActual': 7533, + 'UpperSouthLineActual': 8193, + 'UpperEastColumnActual': 1966, + 'LowerNorthLineActual': 8192, + 'LowerWestColumnActual': 5568, + 'LowerSouthLineActual': 1, + 'LowerEastColumnActual': 1 + }, + 'ActualScanningSummary': { + 'ReducedScan': is_rapid_scan + } + } + } + } - np.testing.assert_allclose(np.array(calculated.area_extent), - np.array(expected['Area extent'])) + return trailer - self.assertEqual(calculated.width, expected['Number of columns']) - self.assertEqual(calculated.height, expected['Number of rows']) - self.assertEqual(calculated.area_id, expected['Area ID']) - def test_earthmodel1_hrv_rapidscan_fill(self): - """Test the HRV RSS padded to fulldisk with the EarthModel 1.""" - calculated, expected = self.prepare_area_defs( - TEST_AREA_EXTENT_EARTHMODEL1_HRV_RAPIDSCAN_FILL - ) +def prepare_area_definitions(test_dict): + """Prepare calculated and expected area definitions for equal checking.""" + earth_model = test_dict['earth_model'] + dataset_id = test_dict['dataset_id'] + is_full_disk = test_dict['is_full_disk'] + is_rapid_scan = test_dict['is_rapid_scan'] + fill_disk = test_dict['fill_disk'] + header = create_test_header(earth_model, dataset_id, is_full_disk, is_rapid_scan) + trailer = create_test_trailer(is_rapid_scan) + expected_area_def = test_dict['expected_area_def'] - np.testing.assert_allclose(np.array(calculated.area_extent), - np.array(expected['Area extent'])) + with mock.patch('satpy.readers.seviri_l1b_native.np.fromfile') as fromfile, \ + mock.patch('satpy.readers.seviri_l1b_native.recarray2dict') as recarray2dict, \ + mock.patch('satpy.readers.seviri_l1b_native.NativeMSGFileHandler._get_memmap') as _get_memmap, \ + mock.patch('satpy.readers.seviri_l1b_native.NativeMSGFileHandler._read_trailer'), \ + mock.patch( + 'satpy.readers.seviri_l1b_native.has_archive_header' + ) as has_archive_header: + has_archive_header.return_value = True + fromfile.return_value = header + recarray2dict.side_effect = (lambda x: x) + _get_memmap.return_value = np.arange(3) + fh = NativeMSGFileHandler(filename=None, filename_info={}, filetype_info=None) + fh.fill_disk = fill_disk + fh.header = header + fh.trailer = trailer + fh.image_boundaries = ImageBoundaries(header, trailer, fh.mda) + actual_area_def = fh.get_area_def(dataset_id) - self.assertEqual(calculated.width, expected['Number of columns']) - self.assertEqual(calculated.height, expected['Number of rows']) - self.assertEqual(calculated.area_id, expected['Area ID']) + return actual_area_def, expected_area_def - def test_earthmodel1_visir_roi(self): - """Test the VISIR ROI with the EarthModel 1.""" - calculated, expected = self.prepare_area_defs( - TEST_AREA_EXTENT_EARTHMODEL1_VISIR_ROI - ) - np.testing.assert_allclose(np.array(calculated.area_extent), - np.array(expected['Area extent'])) - self.assertEqual(calculated.width, expected['Number of columns']) - self.assertEqual(calculated.height, expected['Number of rows']) - self.assertEqual(calculated.area_id, expected['Area ID']) - - def test_earthmodel1_visir_roi_fill(self): - """Test the VISIR ROI padded to fulldisk with the EarthModel 1.""" - calculated, expected = self.prepare_area_defs( - TEST_AREA_EXTENT_EARTHMODEL1_VISIR_ROI_FILL - ) - np.testing.assert_allclose(np.array(calculated.area_extent), - np.array(expected['Area extent'])) - self.assertEqual(calculated.width, expected['Number of columns']) - self.assertEqual(calculated.height, expected['Number of rows']) - self.assertEqual(calculated.area_id, expected['Area ID']) - - def test_earthmodel1_hrv_roi(self): - """Test the HRV ROI with the EarthModel 1.""" - calculated, expected = self.prepare_area_defs( - TEST_AREA_EXTENT_EARTHMODEL1_HRV_ROI - ) - np.testing.assert_allclose(np.array(calculated.area_extent), - np.array(expected['Area extent'])) - self.assertEqual(calculated.width, expected['Number of columns']) - self.assertEqual(calculated.height, expected['Number of rows']) - self.assertEqual(calculated.area_id, expected['Area ID']) - - def test_earthmodel1_hrv_roi_fill(self): - """Test the HRV ROI padded to fulldisk with the EarthModel 1.""" - calculated, expected = self.prepare_area_defs( - TEST_AREA_EXTENT_EARTHMODEL1_HRV_ROI_FILL - ) - np.testing.assert_allclose(np.array(calculated.area_extent), - np.array(expected['Area extent'])) - self.assertEqual(calculated.width, expected['Number of columns']) - self.assertEqual(calculated.height, expected['Number of rows']) - self.assertEqual(calculated.area_id, expected['Area ID']) - - # Earth model 2 tests - def test_earthmodel2_visir_fulldisk(self): - """Test the VISIR FES with the EarthModel 2.""" - calculated, expected = self.prepare_area_defs( - TEST_AREA_EXTENT_EARTHMODEL2_VISIR_FULLDISK - ) - np.testing.assert_allclose(np.array(calculated.area_extent), - np.array(expected['Area extent'])) - self.assertEqual(calculated.width, expected['Number of columns']) - self.assertEqual(calculated.height, expected['Number of rows']) - self.assertEqual(calculated.area_id, expected['Area ID']) - - def test_earthmodel2_hrv_fulldisk(self): - """Test the HRV FES with the EarthModel 2.""" - calculated, expected = self.prepare_area_defs( - TEST_AREA_EXTENT_EARTHMODEL2_HRV_FULLDISK - ) - np.testing.assert_allclose(np.array(calculated.defs[0].area_extent), - np.array(expected['Area extent 0'])) - np.testing.assert_allclose(np.array(calculated.defs[1].area_extent), - np.array(expected['Area extent 1'])) - - self.assertEqual(calculated.width, expected['Number of columns']) - self.assertEqual(calculated.height, expected['Number of rows']) - self.assertEqual(calculated.defs[0].area_id, expected['Area ID']) - self.assertEqual(calculated.defs[1].area_id, expected['Area ID']) - - def test_earthmodel2_hrv_fulldisk_fill(self): - """Test the HRV FES padded to fulldisk with the EarthModel 2.""" - calculated, expected = self.prepare_area_defs( - TEST_AREA_EXTENT_EARTHMODEL2_HRV_FULLDISK_FILL - ) - np.testing.assert_allclose(np.array(calculated.area_extent), - np.array(expected['Area extent'])) - self.assertEqual(calculated.width, expected['Number of columns']) - self.assertEqual(calculated.height, expected['Number of rows']) - self.assertEqual(calculated.area_id, expected['Area ID']) - - def test_earthmodel2_visir_rapidscan(self): - """Test the VISIR RSS with the EarthModel 2.""" - calculated, expected = self.prepare_area_defs( - TEST_AREA_EXTENT_EARTHMODEL2_VISIR_RAPIDSCAN - ) - np.testing.assert_allclose(np.array(calculated.area_extent), - np.array(expected['Area extent'])) - self.assertEqual(calculated.width, expected['Number of columns']) - self.assertEqual(calculated.height, expected['Number of rows']) - self.assertEqual(calculated.area_id, expected['Area ID']) +@pytest.mark.parametrize( + "actual, expected", + ( + (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL1_VISIR_FULLDISK)), + (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL1_HRV_FULLDISK_FILL)), + (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL1_VISIR_RAPIDSCAN)), + (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL1_VISIR_RAPIDSCAN_FILL)), + (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL1_HRV_RAPIDSCAN)), + (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL1_HRV_RAPIDSCAN_FILL)), + (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL1_VISIR_ROI)), + (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL1_VISIR_ROI_FILL)), + (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL1_HRV_ROI)), + (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL1_HRV_ROI_FILL)), + (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL2_VISIR_FULLDISK)), + (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL2_HRV_FULLDISK_FILL)), + (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL2_VISIR_RAPIDSCAN)), + (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL2_VISIR_RAPIDSCAN_FILL)), + (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL2_HRV_RAPIDSCAN)), + (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL2_HRV_RAPIDSCAN_FILL)), + (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL2_VISIR_ROI)), + (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL2_VISIR_ROI_FILL)), + (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL2_HRV_ROI)), + (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL2_HRV_ROI_FILL)), + ) +) +def test_area_definitions(actual, expected): + """Test area definitions with only one area.""" + np.testing.assert_allclose(np.array(actual.area_extent), + np.array(expected['Area extent'])) + assert actual.width == expected['Number of columns'] + assert actual.height == expected['Number of rows'] + assert actual.area_id == expected['Area ID'] - def test_earthmodel2_visir_rapidscan_fill(self): - """Test the VISIR RSS padded to fulldisk with the EarthModel 2.""" - calculated, expected = self.prepare_area_defs( - TEST_AREA_EXTENT_EARTHMODEL2_VISIR_RAPIDSCAN_FILL - ) - np.testing.assert_allclose(np.array(calculated.area_extent), - np.array(expected['Area extent'])) - self.assertEqual(calculated.width, expected['Number of columns']) - self.assertEqual(calculated.height, expected['Number of rows']) - self.assertEqual(calculated.area_id, expected['Area ID']) +@pytest.mark.parametrize( + "actual, expected", + ( + (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL1_HRV_FULLDISK)), + (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL2_HRV_FULLDISK)), + ) +) +def test_stacked_area_definitions(actual, expected): + """Test area definitions with stacked areas.""" + np.testing.assert_allclose(np.array(actual.defs[0].area_extent), + np.array(expected['Area extent 0'])) + np.testing.assert_allclose(np.array(actual.defs[1].area_extent), + np.array(expected['Area extent 1'])) + assert actual.width == expected['Number of columns'] + assert actual.height == expected['Number of rows'] + assert actual.defs[0].area_id, expected['Area ID'] + assert actual.defs[1].area_id, expected['Area ID'] + + +def prepare_is_roi(test_dict): + """Prepare calculated and expected check for region of interest data for equal checking.""" + earth_model = 2 + dataset_id = make_dataid(name='VIS006') + is_full_disk = test_dict['is_full_disk'] + is_rapid_scan = test_dict['is_rapid_scan'] + header = create_test_header(earth_model, dataset_id, is_full_disk, is_rapid_scan) + trailer = create_test_trailer(is_rapid_scan) + expected = test_dict['is_roi'] - def test_earthmodel2_hrv_rapidscan(self): - """Test the HRV RSS with the EarthModel 2.""" - calculated, expected = self.prepare_area_defs( - TEST_AREA_EXTENT_EARTHMODEL2_HRV_RAPIDSCAN - ) - np.testing.assert_allclose(np.array(calculated.area_extent), - np.array(expected['Area extent'])) + with mock.patch('satpy.readers.seviri_l1b_native.np.fromfile') as fromfile, \ + mock.patch('satpy.readers.seviri_l1b_native.recarray2dict') as recarray2dict, \ + mock.patch('satpy.readers.seviri_l1b_native.NativeMSGFileHandler._get_memmap') as _get_memmap, \ + mock.patch('satpy.readers.seviri_l1b_native.NativeMSGFileHandler._read_trailer'), \ + mock.patch( + 'satpy.readers.seviri_l1b_native.has_archive_header' + ) as has_archive_header: + has_archive_header.return_value = True + fromfile.return_value = header + recarray2dict.side_effect = (lambda x: x) + _get_memmap.return_value = np.arange(3) + fh = NativeMSGFileHandler(filename=None, filename_info={}, filetype_info=None) + fh.header = header + fh.trailer = trailer + actual = fh.is_roi() - self.assertEqual(calculated.width, expected['Number of columns']) - self.assertEqual(calculated.height, expected['Number of rows']) - self.assertEqual(calculated.area_id, expected['Area ID']) + return actual, expected - def test_earthmodel2_hrv_rapidscan_fill(self): - """Test the HRV RSS padded to fulldisk with the EarthModel 2.""" - calculated, expected = self.prepare_area_defs( - TEST_AREA_EXTENT_EARTHMODEL2_HRV_RAPIDSCAN_FILL - ) - np.testing.assert_allclose(np.array(calculated.area_extent), - np.array(expected['Area extent'])) - self.assertEqual(calculated.width, expected['Number of columns']) - self.assertEqual(calculated.height, expected['Number of rows']) - self.assertEqual(calculated.area_id, expected['Area ID']) +@pytest.mark.parametrize( + "actual, expected", + ( + (prepare_is_roi(TEST_IS_ROI_FULLDISK)), + (prepare_is_roi(TEST_IS_ROI_RAPIDSCAN)), + (prepare_is_roi(TEST_IS_ROI_ROI)), + ) +) +def test_is_roi(actual, expected): + """Test if given area is of area-of-interest.""" + assert actual == expected - def test_earthmodel2_visir_roi(self): - """Test the VISIR ROI with the EarthModel 2.""" - calculated, expected = self.prepare_area_defs( - TEST_AREA_EXTENT_EARTHMODEL2_VISIR_ROI - ) - np.testing.assert_allclose(np.array(calculated.area_extent), - np.array(expected['Area extent'])) - self.assertEqual(calculated.width, expected['Number of columns']) - self.assertEqual(calculated.height, expected['Number of rows']) - self.assertEqual(calculated.area_id, expected['Area ID']) - - def test_earthmodel2_visir_roi_fill(self): - """Test the VISIR ROI padded to fulldisk with the EarthModel 2.""" - calculated, expected = self.prepare_area_defs( - TEST_AREA_EXTENT_EARTHMODEL2_VISIR_ROI_FILL - ) - np.testing.assert_allclose(np.array(calculated.area_extent), - np.array(expected['Area extent'])) - self.assertEqual(calculated.width, expected['Number of columns']) - self.assertEqual(calculated.height, expected['Number of rows']) - self.assertEqual(calculated.area_id, expected['Area ID']) - - def test_earthmodel2_hrv_roi(self): - """Test the HRV ROI with the EarthModel 2.""" - calculated, expected = self.prepare_area_defs( - TEST_AREA_EXTENT_EARTHMODEL2_HRV_ROI - ) - np.testing.assert_allclose(np.array(calculated.area_extent), - np.array(expected['Area extent'])) - self.assertEqual(calculated.width, expected['Number of columns']) - self.assertEqual(calculated.height, expected['Number of rows']) - self.assertEqual(calculated.area_id, expected['Area ID']) - - def test_earthmodel2_hrv_roi_fill(self): - """Test the HRV ROI padded to fulldisk with the EarthModel 2.""" - calculated, expected = self.prepare_area_defs( - TEST_AREA_EXTENT_EARTHMODEL2_HRV_ROI_FILL - ) - np.testing.assert_allclose(np.array(calculated.area_extent), - np.array(expected['Area extent'])) - self.assertEqual(calculated.width, expected['Number of columns']) - self.assertEqual(calculated.height, expected['Number of rows']) - self.assertEqual(calculated.area_id, expected['Area ID']) - - # Test check for Region Of Interest (ROI) data - def prepare_is_roi(self, test_dict): - """Prepare calculated and expected check for region of interest data for equal checking.""" - earth_model = 2 - dataset_id = make_dataid(name='VIS006') - is_full_disk = test_dict['is_full_disk'] - is_rapid_scan = test_dict['is_rapid_scan'] - header = self.create_test_header(earth_model, dataset_id, is_full_disk, is_rapid_scan) - trailer = self.create_test_trailer(is_rapid_scan) - expected_is_roi = test_dict['is_roi'] - - with mock.patch('satpy.readers.seviri_l1b_native.np.fromfile') as fromfile, \ - mock.patch('satpy.readers.seviri_l1b_native.recarray2dict') as recarray2dict, \ - mock.patch('satpy.readers.seviri_l1b_native.NativeMSGFileHandler._get_memmap') as _get_memmap, \ - mock.patch('satpy.readers.seviri_l1b_native.NativeMSGFileHandler._read_trailer'), \ - mock.patch( - 'satpy.readers.seviri_l1b_native.NativeMSGFileHandler._has_archive_header' - ) as _has_archive_header: - _has_archive_header.return_value = True - fromfile.return_value = header - recarray2dict.side_effect = (lambda x: x) - _get_memmap.return_value = np.arange(3) - fh = NativeMSGFileHandler(None, {}, None) - fh.header = header - fh.trailer = trailer - calc_is_roi = fh.is_roi() - return (calc_is_roi, expected_is_roi) +class TestNativeMSGFileHandler(unittest.TestCase): + """Test the NativeMSGFileHandler.""" - def test_is_roi_fulldisk(self): - """Test check for region of interest with FES data.""" - calculated, expected = self.prepare_is_roi(TEST_IS_ROI_FULLDISK) - self.assertEqual(calculated, expected) + def test_get_available_channels(self): + """Test the derivation of the available channel list.""" + available_chs = get_available_channels(TEST1_HEADER_CHNLIST) + trues = ('WV_062', 'WV_073', 'IR_108', 'VIS006', 'VIS008', 'IR_120') + for bandname in AVAILABLE_CHANNELS: + if bandname in trues: + self.assertTrue(available_chs[bandname]) + else: + self.assertFalse(available_chs[bandname]) - def test_is_roi_rapidscan(self): - """Test check for region of interest with RSS data.""" - calculated, expected = self.prepare_is_roi(TEST_IS_ROI_RAPIDSCAN) - self.assertEqual(calculated, expected) + available_chs = get_available_channels(TEST2_HEADER_CHNLIST) + trues = ('VIS006', 'VIS008', 'IR_039', 'WV_062', 'WV_073', 'IR_087', 'HRV') + for bandname in AVAILABLE_CHANNELS: + if bandname in trues: + self.assertTrue(available_chs[bandname]) + else: + self.assertFalse(available_chs[bandname]) - def test_is_roi_roi(self): - """Test check for region of interest with ROI data.""" - calculated, expected = self.prepare_is_roi(TEST_IS_ROI_ROI) - self.assertEqual(calculated, expected) + available_chs = get_available_channels(TEST3_HEADER_CHNLIST) + for bandname in AVAILABLE_CHANNELS: + self.assertTrue(available_chs[bandname]) TEST_HEADER_CALIB = { @@ -1035,7 +821,7 @@ def file_handler(self): header['15_DATA_HEADER'].update(TEST_HEADER_CALIB) with mock.patch('satpy.readers.seviri_l1b_native.NativeMSGFileHandler.__init__', return_value=None): - fh = NativeMSGFileHandler() + fh = NativeMSGFileHandler(filename=None, filename_info={}, filetype_info=None) fh.header = header fh.trailer = trailer fh.platform_id = self.platform_id @@ -1043,7 +829,7 @@ def file_handler(self): @pytest.mark.parametrize( ('channel', 'calibration', 'calib_mode', 'use_ext_coefs'), - [ + ( # VIS channel, internal coefficients ('VIS006', 'counts', 'NOMINAL', False), ('VIS006', 'radiance', 'NOMINAL', False), @@ -1069,7 +855,7 @@ def file_handler(self): # HRV channel, external coefficients (mode should have no effect) ('HRV', 'radiance', 'GSICS', True), ('HRV', 'reflectance', 'NOMINAL', True), - ] + ) ) def test_calibrate( self, file_handler, counts, channel, calibration, calib_mode, @@ -1104,7 +890,8 @@ def file_handler(self): 'ImageProductionStats': { 'ActualScanningSummary': { 'ForwardScanStart': datetime(2006, 1, 1, 12, 15, 9, 304888), - 'ForwardScanEnd': datetime(2006, 1, 1, 12, 27, 9, 304888) + 'ForwardScanEnd': datetime(2006, 1, 1, 12, 27, 9, 304888), + 'ReducedScan': 0 } } } @@ -1127,7 +914,7 @@ def file_handler(self): data = self._fake_data() with mock.patch('satpy.readers.seviri_l1b_native.NativeMSGFileHandler.__init__', return_value=None): - fh = NativeMSGFileHandler() + fh = NativeMSGFileHandler(filename=None, filename_info={}, filetype_info=None) fh.header = header fh.trailer = trailer fh.mda = mda @@ -1202,13 +989,30 @@ def test_get_dataset(self, file_handler): 'wavelength': (1, 2, 3), 'standard_name': 'counts' } - dataset = file_handler.get_dataset(dataset_id, dataset_info) + xarr = file_handler.get_dataset(dataset_id, dataset_info) expected = self._exp_data_array() - xr.testing.assert_equal(dataset, expected) - assert 'raw_metadata' not in dataset.attrs + xr.testing.assert_equal(xarr, expected) + assert 'raw_metadata' not in xarr.attrs assert file_handler.start_time == datetime(2006, 1, 1, 12, 15, 0) assert file_handler.end_time == datetime(2006, 1, 1, 12, 30, 0) - assert_attrs_equal(dataset.attrs, expected.attrs, tolerance=1e-4) + assert_attrs_equal(xarr.attrs, expected.attrs, tolerance=1e-4) + + def test_time(self, file_handler): + """Test start/end nominal/observation time handling.""" + assert datetime(2006, 1, 1, 12, 15, 9, 304888) == file_handler.observation_start_time + assert datetime(2006, 1, 1, 12, 15,) == file_handler.start_time + assert file_handler.start_time == file_handler.nominal_start_time + + assert datetime(2006, 1, 1, 12, 27, 9, 304888) == file_handler.observation_end_time + assert file_handler.end_time == file_handler.nominal_end_time + assert datetime(2006, 1, 1, 12, 30,) == file_handler.end_time + + def test_repeat_cycle_duration(self, file_handler): + """Test repeat cycle handling for FD or ReduscedScan.""" + assert 15 == file_handler._repeat_cycle_duration + # Change the reducescan scenario to test the repeat cycle duration handling + file_handler.trailer['15TRAILER']['ImageProductionStats']['ActualScanningSummary']['ReducedScan'] = 1 + assert 5 == file_handler._repeat_cycle_duration @staticmethod def _exp_data_array(): @@ -1218,7 +1022,7 @@ def _exp_data_array(): [44., 192., 835., 527.], [64., 273., 132., 788.]], dtype=np.float32), - dims=('y', 'x'), + dims=['y', 'x'], attrs={ 'orbital_parameters': { 'satellite_actual_longitude': -3.55117540817073, @@ -1263,8 +1067,8 @@ def test_get_dataset_with_raw_metadata(self, file_handler): 'wavelength': (1, 2, 3), 'standard_name': 'counts' } - res = file_handler.get_dataset(dataset_id, dataset_info) - assert 'raw_metadata' in res.attrs + xarr = file_handler.get_dataset(dataset_id, dataset_info) + assert 'raw_metadata' in xarr.attrs def test_satpos_no_valid_orbit_polynomial(self, file_handler): """Test satellite position if there is no valid orbit polynomial.""" @@ -1281,8 +1085,8 @@ def test_satpos_no_valid_orbit_polynomial(self, file_handler): 'standard_name': 'counts' } with pytest.warns(UserWarning, match="No orbit polynomial"): - res = file_handler.get_dataset(dataset_id, dataset_info) - assert 'satellite_actual_longitude' not in res.attrs[ + xarr = file_handler.get_dataset(dataset_id, dataset_info) + assert 'satellite_actual_longitude' not in xarr.attrs[ 'orbital_parameters'] @@ -1303,7 +1107,7 @@ def prepare_padder(test_dict): padder._final_shape = final_shape calc_padded_data = padder.pad_data(dataset) - return (calc_padded_data, expected_padded_data) + return calc_padded_data, expected_padded_data def test_padder_rss_roi(self): """Test padder for RSS and ROI data (applies to both VISIR and HRV).""" @@ -1347,14 +1151,14 @@ def test_file_pattern(self, reader): @pytest.mark.parametrize( 'file_content,exp_header_size', - [ - (b'FormatName : NATIVE', 450400), # with ascii header + ( + (ASCII_STARTSWITH, 450400), # with ascii header (b'foobar', 445286), # without ascii header - ] + ) ) def test_header_type(file_content, exp_header_size): """Test identification of the file header type.""" - header = TestNativeMSGArea.create_test_header( + header = create_test_header( dataset_id=make_dataid(name='VIS006', resolution=3000), earth_model=1, is_full_disk=True, @@ -1370,21 +1174,21 @@ def test_header_type(file_content, exp_header_size): fromfile.return_value = header recarray2dict.side_effect = (lambda x: x) _get_memmap.return_value = np.arange(3) - fh = NativeMSGFileHandler('myfile', {}, None) + fh = NativeMSGFileHandler(filename=None, filename_info={}, filetype_info=None) assert fh.header_type.itemsize == exp_header_size assert '15_SECONDARY_PRODUCT_HEADER' in fh.header def test_header_warning(): """Test warning is raised for NOK quality flag.""" - header_good = TestNativeMSGArea.create_test_header( + header_good = create_test_header( dataset_id=make_dataid(name='VIS006', resolution=3000), earth_model=1, is_full_disk=True, is_rapid_scan=0, good_qual='OK' ) - header_bad = TestNativeMSGArea.create_test_header( + header_bad = create_test_header( dataset_id=make_dataid(name='VIS006', resolution=3000), earth_model=1, is_full_disk=True, @@ -1396,7 +1200,7 @@ def test_header_warning(): mock.patch('satpy.readers.seviri_l1b_native.recarray2dict') as recarray2dict, \ mock.patch('satpy.readers.seviri_l1b_native.NativeMSGFileHandler._get_memmap') as _get_memmap, \ mock.patch('satpy.readers.seviri_l1b_native.NativeMSGFileHandler._read_trailer'), \ - mock.patch("builtins.open", mock.mock_open(read_data=b'FormatName : NATIVE')): + mock.patch("builtins.open", mock.mock_open(read_data=ASCII_STARTSWITH)): recarray2dict.side_effect = (lambda x: x) _get_memmap.return_value = np.arange(3) @@ -1405,8 +1209,46 @@ def test_header_warning(): fromfile.return_value = header_good with warnings.catch_warnings(): warnings.simplefilter("error") - NativeMSGFileHandler('myfile', {}, None) + NativeMSGFileHandler(filename=None, filename_info={}, filetype_info=None) fromfile.return_value = header_bad with pytest.warns(UserWarning, match=exp_warning): + NativeMSGFileHandler(filename=None, filename_info={}, filetype_info=None) + + # check that without Main Header the code doesn't crash + header_missing = header_good.copy() + header_missing.pop('15_MAIN_PRODUCT_HEADER') + fromfile.return_value = header_missing + with warnings.catch_warnings(): + warnings.simplefilter("error") NativeMSGFileHandler('myfile', {}, None) + + +@pytest.mark.parametrize( + "starts_with, expected", + [ + (ASCII_STARTSWITH, True), + (b'this_shall_fail', False) + ] +) +def test_has_archive_header(starts_with, expected): + """Test if the file includes an ASCII archive header.""" + with mock.patch("builtins.open", mock.mock_open(read_data=starts_with)): + actual = has_archive_header('filename') + assert actual == expected + + +def test_read_header(): + """Test that reading header returns the header correctly converted to a dictionary.""" + keys = ('SatelliteId', 'NominalLongitude', 'SatelliteStatus') + values = (324, 0.0, 1) + expected = dict(zip(keys, values)) + + types = (np.uint16, np.float32, np.uint8) + dtypes = np.dtype([(k, t) for k, t in zip(keys, types)]) + hdr_data = np.array([values], dtype=dtypes) + + with mock.patch('satpy.readers.seviri_l1b_native.np.fromfile') as fromfile: + fromfile.return_value = hdr_data + actual = recarray2dict(hdr_data) + assert actual == expected diff --git a/satpy/tests/reader_tests/test_seviri_l1b_nc.py b/satpy/tests/reader_tests/test_seviri_l1b_nc.py index 39e4ad570e..f85e9f5aae 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_nc.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_nc.py @@ -155,6 +155,7 @@ def _get_fake_dataset(self, counts, h5netcdf): 'vis_ir_column_dir_grid_step': 3.0004032, 'vis_ir_line_dir_grid_step': 3.0004032, 'type_of_earth_model': '0x02', + 'nominal_image_scanning': 'T', } ) @@ -323,6 +324,12 @@ def test_get_dataset(self, file_handler, channel, calibration, mask_bad_quality_ 'projection_latitude': 0.0, 'projection_altitude': 35785831.0 }, + 'time_parameters': { + 'nominal_start_time': datetime(2020, 1, 1, 0, 0), + 'nominal_end_time': datetime(2020, 1, 1, 0, 0), + 'observation_start_time': datetime(2020, 1, 1, 0, 0), + 'observation_end_time': datetime(2020, 1, 1, 0, 0), + }, 'georef_offset_corrected': True, 'platform_name': 'Meteosat-11', 'sensor': 'seviri', @@ -343,6 +350,25 @@ def test_get_dataset(self, file_handler, channel, calibration, mask_bad_quality_ res.attrs.pop(key, None) assert_attrs_equal(res.attrs, expected.attrs, tolerance=1e-4) + def test_time(self, file_handler): + """Test start/end nominal/observation time handling.""" + assert datetime(2020, 1, 1, 0, 0) == file_handler.observation_start_time + assert datetime(2020, 1, 1, 0, 0) == file_handler.start_time + assert file_handler.start_time == file_handler.nominal_start_time + + assert datetime(2020, 1, 1, 0, 0) == file_handler.observation_end_time + assert file_handler.end_time == file_handler.nominal_end_time + assert datetime(2020, 1, 1, 0, 0) == file_handler.end_time + + def test_repeat_cycle_duration(self, file_handler): + """Test repeat cycle handling for FD or ReduscedScan.""" + assert 15 == file_handler._repeat_cycle_duration + # Change the reducescan scenario to test the repeat cycle duration handling + file_handler.nc.attrs['nominal_image_scanning'] = '' + file_handler.nc.attrs['reduced_scanning'] = 'T' + # file_handler.trailer['15TRAILER']['ImageProductionStats']['ActualScanningSummary']['ReducedScan'] = 1 + assert 5 == file_handler._repeat_cycle_duration + def test_satpos_no_valid_orbit_polynomial(self, file_handler): """Test satellite position if there is no valid orbit polynomial.""" dataset_id = make_dataid(name='VIS006', calibration='counts') diff --git a/satpy/tests/reader_tests/utils.py b/satpy/tests/reader_tests/utils.py index dd5b09c86a..05e6d9cb18 100644 --- a/satpy/tests/reader_tests/utils.py +++ b/satpy/tests/reader_tests/utils.py @@ -17,6 +17,9 @@ # satpy. If not, see . """Utilities for reader tests.""" +import inspect +import os + def default_attr_processor(root, attr): """Do not change the attribute.""" @@ -43,3 +46,39 @@ def fill_h5(root, contents, attr_processor=default_attr_processor): if "attrs" in val: for attr_name, attr_val in val["attrs"].items(): root[key].attrs[attr_name] = attr_processor(root, attr_val) + + +def get_jit_methods(module): + """Get all jit-compiled methods in a module.""" + res = {} + module_name = module.__name__ + members = inspect.getmembers(module) + for member_name, obj in members: + if _is_jit_method(obj): + full_name = f"{module_name}.{member_name}" + res[full_name] = obj + return res + + +def _is_jit_method(obj): + return hasattr(obj, "py_func") + + +def skip_numba_unstable_if_missing(): + """Determine if numba-based tests should be skipped during unstable CI tests. + + If numba fails to import it could be because numba is not compatible with + a newer version of numpy. This is very likely to happen in the + unstable/experimental CI environment. This function returns ``True`` if + numba-based tests should be skipped if ``numba`` could not + be imported *and* we're in the unstable environment. We determine if we're + in this CI environment by looking for the ``UNSTABLE="1"`` + environment variable. + + """ + try: + import numba + except ImportError: + numba = None + + return numba is None and os.environ.get("UNSTABLE", "0") in ("1", "true") diff --git a/satpy/tests/scene_tests/test_conversions.py b/satpy/tests/scene_tests/test_conversions.py index 0a15fd941f..c62ffcea1d 100644 --- a/satpy/tests/scene_tests/test_conversions.py +++ b/satpy/tests/scene_tests/test_conversions.py @@ -80,3 +80,87 @@ def test_geoviews_basic_with_swath(self): gv_obj = scn.to_geoviews() # we assume that if we got something back, geoviews can use it assert gv_obj is not None + + +class TestToXarrayConversion: + """Test Scene.to_xarray() conversion.""" + + def test_with_empty_scene(self): + """Test converting empty Scene to xarray.""" + scn = Scene() + ds = scn.to_xarray() + assert isinstance(ds, xr.Dataset) + assert len(ds.variables) == 0 + assert len(ds.coords) == 0 + + @pytest.fixture + def single_area_scn(self): + """Define Scene with single area.""" + from pyresample.geometry import AreaDefinition + + area = AreaDefinition('test', 'test', 'test', + {'proj': 'geos', 'lon_0': -95.5, 'h': 35786023.0}, + 2, 2, [-200, -200, 200, 200]) + data_array = xr.DataArray(da.zeros((2, 2), chunks=-1), + dims=('y', 'x'), + attrs={'start_time': datetime(2018, 1, 1), 'area': area}) + scn = Scene() + scn['var1'] = data_array + return scn + + @pytest.fixture + def multi_area_scn(self): + """Define Scene with multiple area.""" + from pyresample.geometry import AreaDefinition + + area1 = AreaDefinition('test', 'test', 'test', + {'proj': 'geos', 'lon_0': -95.5, 'h': 35786023.0}, + 2, 2, [-200, -200, 200, 200]) + area2 = AreaDefinition('test', 'test', 'test', + {'proj': 'geos', 'lon_0': -95.5, 'h': 35786023.0}, + 4, 4, [-200, -200, 200, 200]) + + data_array1 = xr.DataArray(da.zeros((2, 2), chunks=-1), + dims=('y', 'x'), + attrs={'start_time': datetime(2018, 1, 1), 'area': area1}) + data_array2 = xr.DataArray(da.zeros((4, 4), chunks=-1), + dims=('y', 'x'), + attrs={'start_time': datetime(2018, 1, 1), 'area': area2}) + scn = Scene() + scn['var1'] = data_array1 + scn['var2'] = data_array2 + return scn + + def test_with_single_area_scene_type(self, single_area_scn): + """Test converting single area Scene to xarray dataset.""" + ds = single_area_scn.to_xarray() + assert isinstance(ds, xr.Dataset) + assert "var1" in ds.data_vars + + def test_include_lonlats_true(self, single_area_scn): + """Test include lonlats.""" + ds = single_area_scn.to_xarray(include_lonlats=True) + assert "latitude" in ds.coords + assert "longitude" in ds.coords + + def test_include_lonlats_false(self, single_area_scn): + """Test exclude lonlats.""" + ds = single_area_scn.to_xarray(include_lonlats=False) + assert "latitude" not in ds.coords + assert "longitude" not in ds.coords + + def test_dataset_string_accepted(self, single_area_scn): + """Test accept dataset string.""" + ds = single_area_scn.to_xarray(datasets="var1") + assert isinstance(ds, xr.Dataset) + + def test_wrong_dataset_key(self, single_area_scn): + """Test raise error if unexisting dataset.""" + with pytest.raises(KeyError): + _ = single_area_scn.to_xarray(datasets="var2") + + def test_to_xarray_with_multiple_area_scene(self, multi_area_scn): + """Test converting muiltple area Scene to xarray.""" + # TODO: in future adapt for DataTree implementation + with pytest.raises(ValueError): + _ = multi_area_scn.to_xarray() diff --git a/satpy/tests/scene_tests/test_resampling.py b/satpy/tests/scene_tests/test_resampling.py index 19268f9e19..39f9a50092 100644 --- a/satpy/tests/scene_tests/test_resampling.py +++ b/satpy/tests/scene_tests/test_resampling.py @@ -560,6 +560,17 @@ def test_aggregate(self): expected_aggregated_shape = (y_size / 2, x_size / 2) self._check_aggregation_results(expected_aggregated_shape, scene1, scene2, x_size, y_size) + def test_custom_aggregate(self): + """Test the aggregate method with custom function.""" + x_size = 3712 + y_size = 3712 + + scene1 = self._create_test_data(x_size, y_size) + + scene2 = scene1.aggregate(func=np.sum, x=2, y=2) + expected_aggregated_shape = (y_size / 2, x_size / 2) + self._check_aggregation_results(expected_aggregated_shape, scene1, scene2, x_size, y_size) + @staticmethod def _create_test_data(x_size, y_size): from pyresample.geometry import AreaDefinition diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index 1749013837..f27c73d849 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -129,10 +129,10 @@ def test_nondimensional_coords(self): self.assertNotIn('acq_time', ret_datasets[0].coords) -class TestRatioSharpenedCompositors(unittest.TestCase): +class TestRatioSharpenedCompositors: """Test RatioSharpenedRGB and SelfSharpendRGB compositors.""" - def setUp(self): + def setup_method(self): """Create test data.""" from pyresample.geometry import AreaDefinition area = AreaDefinition('test', 'test', 'test', @@ -151,11 +151,13 @@ def setUp(self): attrs=attrs, dims=('y', 'x'), coords={'y': [0, 1], 'x': [0, 1]}) self.ds1 = ds1 + ds2 = xr.DataArray(da.ones((2, 2), chunks=2, dtype=np.float64) + 2, attrs=attrs, dims=('y', 'x'), coords={'y': [0, 1], 'x': [0, 1]}) ds2.attrs['name'] += '2' self.ds2 = ds2 + ds3 = xr.DataArray(da.ones((2, 2), chunks=2, dtype=np.float64) + 3, attrs=attrs, dims=('y', 'x'), coords={'y': [0, 1], 'x': [0, 1]}) @@ -173,83 +175,130 @@ def setUp(self): self.ds4 = ds4 # high resolution version - but too big - ds4 = xr.DataArray(da.ones((4, 4), chunks=2, dtype=np.float64), - attrs=attrs.copy(), dims=('y', 'x'), - coords={'y': [0, 1, 2, 3], 'x': [0, 1, 2, 3]}) - ds4.attrs['name'] += '4' - ds4.attrs['resolution'] = 500 - ds4.attrs['rows_per_scan'] = 1 - ds4.attrs['area'] = AreaDefinition('test', 'test', 'test', - {'proj': 'merc'}, 4, 4, - (-2000, -2000, 2000, 2000)) - self.ds4_big = ds4 + ds4_big = xr.DataArray(da.ones((4, 4), chunks=2, dtype=np.float64), + attrs=attrs.copy(), dims=('y', 'x'), + coords={'y': [0, 1, 2, 3], 'x': [0, 1, 2, 3]}) + ds4_big.attrs['name'] += '4' + ds4_big.attrs['resolution'] = 500 + ds4_big.attrs['rows_per_scan'] = 1 + ds4_big.attrs['area'] = AreaDefinition('test', 'test', 'test', + {'proj': 'merc'}, 4, 4, + (-2000, -2000, 2000, 2000)) + self.ds4_big = ds4_big - def test_bad_color(self): + @pytest.mark.parametrize( + "init_kwargs", + [ + {'high_resolution_band': "bad", 'neutral_resolution_band': "red"}, + {'high_resolution_band': "red", 'neutral_resolution_band': "bad"} + ] + ) + def test_bad_colors(self, init_kwargs): """Test that only valid band colors can be provided.""" from satpy.composites import RatioSharpenedRGB - self.assertRaises(ValueError, RatioSharpenedRGB, name='true_color', high_resolution_band='bad') + with pytest.raises(ValueError): + RatioSharpenedRGB(name='true_color', **init_kwargs) def test_match_data_arrays(self): """Test that all areas have to be the same resolution.""" from satpy.composites import IncompatibleAreas, RatioSharpenedRGB comp = RatioSharpenedRGB(name='true_color') - self.assertRaises(IncompatibleAreas, comp, (self.ds1, self.ds2, self.ds3), optional_datasets=(self.ds4_big,)) + with pytest.raises(IncompatibleAreas): + comp((self.ds1, self.ds2, self.ds3), optional_datasets=(self.ds4_big,)) def test_more_than_three_datasets(self): """Test that only 3 datasets can be passed.""" from satpy.composites import RatioSharpenedRGB comp = RatioSharpenedRGB(name='true_color') - self.assertRaises(ValueError, comp, (self.ds1, self.ds2, self.ds3, self.ds1), - optional_datasets=(self.ds4_big,)) + with pytest.raises(ValueError): + comp((self.ds1, self.ds2, self.ds3, self.ds1), optional_datasets=(self.ds4_big,)) + + def test_self_sharpened_no_high_res(self): + """Test for exception when no high_res band is specified.""" + from satpy.composites import SelfSharpenedRGB + comp = SelfSharpenedRGB(name='true_color', high_resolution_band=None) + with pytest.raises(ValueError): + comp((self.ds1, self.ds2, self.ds3)) def test_basic_no_high_res(self): """Test that three datasets can be passed without optional high res.""" from satpy.composites import RatioSharpenedRGB - comp = RatioSharpenedRGB(name='true_color') + comp = RatioSharpenedRGB(name="true_color") res = comp((self.ds1, self.ds2, self.ds3)) - self.assertEqual(res.shape, (3, 2, 2)) + assert res.shape == (3, 2, 2) def test_basic_no_sharpen(self): """Test that color None does no sharpening.""" from satpy.composites import RatioSharpenedRGB - comp = RatioSharpenedRGB(name='true_color', high_resolution_band=None) + comp = RatioSharpenedRGB(name="true_color", high_resolution_band=None) res = comp((self.ds1, self.ds2, self.ds3), optional_datasets=(self.ds4,)) - self.assertEqual(res.shape, (3, 2, 2)) + assert res.shape == (3, 2, 2) - def test_basic_red(self): - """Test that basic high resolution red can be passed.""" + @pytest.mark.parametrize( + ("high_resolution_band", "neutral_resolution_band", "exp_r", "exp_g", "exp_b"), + [ + ("red", None, + np.array([[1.0, 1.0], [np.nan, 1.0]], dtype=np.float64), + np.array([[0.6, 0.6], [np.nan, 3.0]], dtype=np.float64), + np.array([[0.8, 0.8], [np.nan, 4.0]], dtype=np.float64)), + ("red", "green", + np.array([[1.0, 1.0], [np.nan, 1.0]], dtype=np.float64), + np.array([[3.0, 3.0], [np.nan, 3.0]], dtype=np.float64), + np.array([[0.8, 0.8], [np.nan, 4.0]], dtype=np.float64)), + ("green", None, + np.array([[5 / 3, 5 / 3], [np.nan, 0.0]], dtype=np.float64), + np.array([[1.0, 1.0], [np.nan, 1.0]], dtype=np.float64), + np.array([[4 / 3, 4 / 3], [np.nan, 4 / 3]], dtype=np.float64)), + ("green", "blue", + np.array([[5 / 3, 5 / 3], [np.nan, 0.0]], dtype=np.float64), + np.array([[1.0, 1.0], [np.nan, 1.0]], dtype=np.float64), + np.array([[4.0, 4.0], [np.nan, 4.0]], dtype=np.float64)), + ("blue", None, + np.array([[1.25, 1.25], [np.nan, 0.0]], dtype=np.float64), + np.array([[0.75, 0.75], [np.nan, 0.75]], dtype=np.float64), + np.array([[1.0, 1.0], [np.nan, 1.0]], dtype=np.float64)), + ("blue", "red", + np.array([[5.0, 5.0], [np.nan, 0.0]], dtype=np.float64), + np.array([[0.75, 0.75], [np.nan, 0.75]], dtype=np.float64), + np.array([[1.0, 1.0], [np.nan, 1.0]], dtype=np.float64)) + ] + ) + def test_ratio_sharpening(self, high_resolution_band, neutral_resolution_band, exp_r, exp_g, exp_b): + """Test RatioSharpenedRGB by different groups of high_resolution_band and neutral_resolution_band.""" from satpy.composites import RatioSharpenedRGB - comp = RatioSharpenedRGB(name='true_color') + comp = RatioSharpenedRGB(name='true_color', high_resolution_band=high_resolution_band, + neutral_resolution_band=neutral_resolution_band) res = comp((self.ds1, self.ds2, self.ds3), optional_datasets=(self.ds4,)) - res = res.values - self.assertEqual(res.shape, (3, 2, 2)) - np.testing.assert_allclose(res[0], self.ds4.values) - np.testing.assert_allclose(res[1], np.array([[0.6, 0.6], [np.nan, 3.0]], dtype=np.float64)) - np.testing.assert_allclose(res[2], np.array([[0.8, 0.8], [np.nan, 4.0]], dtype=np.float64)) - def test_self_sharpened_no_high_res(self): - """Test for exception when no high res band is specified.""" - from satpy.composites import SelfSharpenedRGB - comp = SelfSharpenedRGB(name='true_color', high_resolution_band=None) - self.assertRaises(ValueError, comp, (self.ds1, self.ds2, self.ds3)) + assert "units" not in res.attrs + assert isinstance(res, xr.DataArray) + assert isinstance(res.data, da.Array) + + data = res.values + np.testing.assert_allclose(data[0], exp_r, rtol=1e-5) + np.testing.assert_allclose(data[1], exp_g, rtol=1e-5) + np.testing.assert_allclose(data[2], exp_b, rtol=1e-5) - def test_self_sharpened_basic(self): + @pytest.mark.parametrize( + ("exp_shape", "exp_r", "exp_g", "exp_b"), + [ + ((3, 2, 2), + np.array([[5.0, 5.0], [5.0, 0]], dtype=np.float64), + np.array([[4.0, 4.0], [4.0, 0]], dtype=np.float64), + np.array([[16 / 3, 16 / 3], [16 / 3, 0]], dtype=np.float64)) + ] + ) + def test_self_sharpened_basic(self, exp_shape, exp_r, exp_g, exp_b): """Test that three datasets can be passed without optional high res.""" from satpy.composites import SelfSharpenedRGB comp = SelfSharpenedRGB(name='true_color') res = comp((self.ds1, self.ds2, self.ds3)) - res = res.values - self.assertEqual(res.shape, (3, 2, 2)) - np.testing.assert_allclose(res[0], self.ds1.values) - np.testing.assert_allclose(res[1], np.array([[4, 4], [4, 0]], dtype=np.float64)) - np.testing.assert_allclose(res[2], np.array([[5.333333, 5.333333], [5.333333, 0]], dtype=np.float64)) - - def test_no_units(self): - """Test that the computed RGB has no units attribute.""" - from satpy.composites import RatioSharpenedRGB - comp = RatioSharpenedRGB(name='true_color') - res = comp((self.ds1, self.ds2, self.ds3)) - assert "units" not in res.attrs + data = res.values + + assert data.shape == exp_shape + np.testing.assert_allclose(data[0], exp_r, rtol=1e-5) + np.testing.assert_allclose(data[1], exp_g, rtol=1e-5) + np.testing.assert_allclose(data[2], exp_b, rtol=1e-5) class TestDifferenceCompositor(unittest.TestCase): @@ -466,7 +515,7 @@ def test_day_only_area_with_alpha(self): """Test compositor with day portion with alpha_band when SZA data is not provided.""" from satpy.composites import DayNightCompositor comp = DayNightCompositor(name='dn_test', day_night="day_only", include_alpha=True) - res = comp((self.data_a, )) + res = comp((self.data_a,)) res = res.compute() expected_l_channel = np.array([[0., 0.33164983], [0.66835017, 1.]]) expected_alpha = np.array([[1., 1.], [1., 1.]]) diff --git a/satpy/tests/test_readers.py b/satpy/tests/test_readers.py index f3efcabd00..db50900cad 100644 --- a/satpy/tests/test_readers.py +++ b/satpy/tests/test_readers.py @@ -21,6 +21,7 @@ import os import sys import unittest +import warnings from contextlib import suppress from unittest import mock @@ -372,8 +373,10 @@ def test_missing_requirements(self, *mocks): epi_pro_miss = ['H-000-MSG4__-MSG4________-IR_108___-000006___-201809050900-__'] epi_miss = epi_pro_miss + ['H-000-MSG4__-MSG4________-_________-PRO______-201809050900-__'] pro_miss = epi_pro_miss + ['H-000-MSG4__-MSG4________-_________-EPI______-201809050900-__'] - for filenames in [epi_miss, pro_miss, epi_pro_miss]: - self.assertRaises(ValueError, load_readers, reader='seviri_l1b_hrit', filenames=filenames) + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", message=r"No handler for reading requirement.*", category=UserWarning) + for filenames in [epi_miss, pro_miss, epi_pro_miss]: + self.assertRaises(ValueError, load_readers, reader='seviri_l1b_hrit', filenames=filenames) # Filenames from multiple scans at_least_one_complete = [ @@ -384,11 +387,13 @@ def test_missing_requirements(self, *mocks): # 10:00 scan is incomplete 'H-000-MSG4__-MSG4________-IR_108___-000006___-201809051000-__', ] - try: - load_readers(filenames=at_least_one_complete, reader='seviri_l1b_hrit') - except ValueError: - self.fail('If at least one set of filenames is complete, no ' - 'exception should be raised') + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", message=r"No matching requirement file.*", category=UserWarning) + try: + load_readers(filenames=at_least_one_complete, reader='seviri_l1b_hrit') + except ValueError: + self.fail('If at least one set of filenames is complete, no ' + 'exception should be raised') def test_all_filtered(self): """Test behaviour if no file matches the filter parameters.""" @@ -581,8 +586,8 @@ def test_old_reader_name_mapping(self): """Test that requesting old reader names raises a warning.""" from satpy.readers import OLD_READER_NAMES, get_valid_reader_names if not OLD_READER_NAMES: - return unittest.skip("Skipping deprecated reader tests because " - "no deprecated readers.") + return pytest.skip("Skipping deprecated reader tests because " + "no deprecated readers.") test_reader = sorted(OLD_READER_NAMES.keys())[0] with pytest.raises(ValueError): get_valid_reader_names([test_reader]) diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py index 100d6b75f2..baeb45a4e4 100644 --- a/satpy/tests/writer_tests/test_cf.py +++ b/satpy/tests/writer_tests/test_cf.py @@ -21,11 +21,9 @@ import logging import os import tempfile -import unittest import warnings from collections import OrderedDict from datetime import datetime -from unittest import mock import dask.array as da import numpy as np @@ -51,7 +49,7 @@ # - request -class TempFile(object): +class TempFile: """A temporary filename class.""" def __init__(self, suffix=".nc"): @@ -70,7 +68,125 @@ def __exit__(self, *args): os.remove(self.filename) -class TestCFWriter(unittest.TestCase): +def test_lonlat_storage(tmp_path): + """Test correct storage for area with lon/lat units.""" + from ..utils import make_fake_scene + scn = make_fake_scene( + {"ketolysis": np.arange(25).reshape(5, 5)}, + daskify=True, + area=create_area_def("mavas", 4326, shape=(5, 5), + center=(0, 0), resolution=(1, 1))) + + filename = os.fspath(tmp_path / "test.nc") + scn.save_datasets(filename=filename, writer="cf", include_lonlats=False) + with xr.open_dataset(filename) as ds: + assert ds["ketolysis"].attrs["grid_mapping"] == "mavas" + assert ds["mavas"].attrs["grid_mapping_name"] == "latitude_longitude" + assert ds["x"].attrs["units"] == "degrees_east" + assert ds["y"].attrs["units"] == "degrees_north" + assert ds["mavas"].attrs["longitude_of_prime_meridian"] == 0.0 + np.testing.assert_allclose(ds["mavas"].attrs["semi_major_axis"], 6378137.0) + np.testing.assert_allclose(ds["mavas"].attrs["inverse_flattening"], 298.257223563) + + +def test_da2cf_lonlat(): + """Test correct da2cf encoding for area with lon/lat units.""" + from satpy.resample import add_crs_xy_coords + from satpy.writers.cf_writer import CFWriter + + area = create_area_def("mavas", 4326, shape=(5, 5), + center=(0, 0), resolution=(1, 1)) + da = xr.DataArray( + np.arange(25).reshape(5, 5), + dims=("y", "x"), + attrs={"area": area}) + da = add_crs_xy_coords(da, area) + new_da = CFWriter.da2cf(da) + assert new_da["x"].attrs["units"] == "degrees_east" + assert new_da["y"].attrs["units"] == "degrees_north" + + +def test_is_projected(caplog): + """Tests for private _is_projected function.""" + from satpy.writers.cf.crs import _is_projected + + # test case with units but no area + da = xr.DataArray( + np.arange(25).reshape(5, 5), + dims=("y", "x"), + coords={"x": xr.DataArray(np.arange(5), dims=("x",), attrs={"units": "m"}), + "y": xr.DataArray(np.arange(5), dims=("y",), attrs={"units": "m"})}) + assert _is_projected(da) + + da = xr.DataArray( + np.arange(25).reshape(5, 5), + dims=("y", "x"), + coords={"x": xr.DataArray(np.arange(5), dims=("x",), attrs={"units": "degrees_east"}), + "y": xr.DataArray(np.arange(5), dims=("y",), attrs={"units": "degrees_north"})}) + assert not _is_projected(da) + + da = xr.DataArray( + np.arange(25).reshape(5, 5), + dims=("y", "x")) + with caplog.at_level(logging.WARNING): + assert _is_projected(da) + assert "Failed to tell if data are projected." in caplog.text + + +def test_preprocess_dataarray_name(): + """Test saving an array to netcdf/cf where dataset name starting with a digit with prefix include orig name.""" + from satpy import Scene + from satpy.writers.cf_writer import _preprocess_dataarray_name + + scn = Scene() + scn['1'] = xr.DataArray([1, 2, 3]) + dataarray = scn['1'] + # If numeric_name_prefix is a string, test add the original_name attributes + out_da = _preprocess_dataarray_name(dataarray, numeric_name_prefix="TEST", include_orig_name=True) + assert out_da.attrs['original_name'] == '1' + + # If numeric_name_prefix is empty string, False or None, test do not add original_name attributes + out_da = _preprocess_dataarray_name(dataarray, numeric_name_prefix="", include_orig_name=True) + assert "original_name" not in out_da.attrs + + out_da = _preprocess_dataarray_name(dataarray, numeric_name_prefix=False, include_orig_name=True) + assert "original_name" not in out_da.attrs + + out_da = _preprocess_dataarray_name(dataarray, numeric_name_prefix=None, include_orig_name=True) + assert "original_name" not in out_da.attrs + + +def test_add_time_cf_attrs(): + """Test addition of CF-compliant time attributes.""" + from satpy import Scene + from satpy.writers.cf_writer import add_time_bounds_dimension + + scn = Scene() + test_array = np.array([[1, 2], [3, 4], [5, 6], [7, 8]]) + times = np.array(['2018-05-30T10:05:00', '2018-05-30T10:05:01', + '2018-05-30T10:05:02', '2018-05-30T10:05:03'], dtype=np.datetime64) + scn['test-array'] = xr.DataArray(test_array, + dims=['y', 'x'], + coords={'time': ('y', times)}, + attrs=dict(start_time=times[0], end_time=times[-1])) + ds = scn['test-array'].to_dataset(name='test-array') + ds = add_time_bounds_dimension(ds) + assert "bnds_1d" in ds.dims + assert ds.dims['bnds_1d'] == 2 + assert "time_bnds" in list(ds.data_vars) + assert "bounds" in ds["time"].attrs + assert "standard_name" in ds["time"].attrs + + +def test_empty_collect_cf_datasets(): + """Test that if no DataArrays, collect_cf_datasets raise error.""" + from satpy.writers.cf_writer import collect_cf_datasets + + with pytest.raises(RuntimeError): + collect_cf_datasets(list_dataarrays=[]) + + +class TestCFWriter: """Test case for CF writer.""" def test_init(self): @@ -94,8 +210,7 @@ def test_save_array(self): with xr.open_dataset(filename) as f: np.testing.assert_array_equal(f['test-array'][:], [1, 2, 3]) expected_prereq = ("DataQuery(name='hej')") - self.assertEqual(f['test-array'].attrs['prerequisites'], - expected_prereq) + assert f['test-array'].attrs['prerequisites'] == expected_prereq def test_save_array_coords(self): """Test saving array with coordinates.""" @@ -123,12 +238,11 @@ def test_save_array_coords(self): np.testing.assert_array_equal(f['test-array'][:], [[1, 2, 3]]) np.testing.assert_array_equal(f['x'][:], [0, 1, 2]) np.testing.assert_array_equal(f['y'][:], [0]) - self.assertNotIn('crs', f) - self.assertNotIn('_FillValue', f['x'].attrs) - self.assertNotIn('_FillValue', f['y'].attrs) + assert 'crs' not in f + assert '_FillValue' not in f['x'].attrs + assert '_FillValue' not in f['y'].attrs expected_prereq = ("DataQuery(name='hej')") - self.assertEqual(f['test-array'].attrs['prerequisites'], - expected_prereq) + assert f['test-array'].attrs['prerequisites'] == expected_prereq def test_save_dataset_a_digit(self): """Test saving an array to netcdf/cf where dataset name starting with a digit.""" @@ -156,7 +270,7 @@ def test_save_dataset_a_digit_prefix_include_attr(self): scn.save_datasets(filename=filename, writer='cf', include_orig_name=True, numeric_name_prefix='TEST') with xr.open_dataset(filename) as f: np.testing.assert_array_equal(f['TEST1'][:], [1, 2, 3]) - self.assertEqual(f['TEST1'].attrs['original_name'], '1') + assert f['TEST1'].attrs['original_name'] == '1' def test_save_dataset_a_digit_no_prefix_include_attr(self): """Test saving an array to netcdf/cf dataset name starting with a digit with no prefix include orig name.""" @@ -166,7 +280,7 @@ def test_save_dataset_a_digit_no_prefix_include_attr(self): scn.save_datasets(filename=filename, writer='cf', include_orig_name=True, numeric_name_prefix='') with xr.open_dataset(filename) as f: np.testing.assert_array_equal(f['1'][:], [1, 2, 3]) - self.assertNotIn('original_name', f['1'].attrs) + assert 'original_name' not in f['1'].attrs def test_ancillary_variables(self): """Test ancillary_variables cited each other.""" @@ -185,10 +299,8 @@ def test_ancillary_variables(self): with TempFile() as filename: scn.save_datasets(filename=filename, writer='cf') with xr.open_dataset(filename) as f: - self.assertEqual(f['test-array-1'].attrs['ancillary_variables'], - 'test-array-2') - self.assertEqual(f['test-array-2'].attrs['ancillary_variables'], - 'test-array-1') + assert f['test-array-1'].attrs['ancillary_variables'] == 'test-array-2' + assert f['test-array-2'].attrs['ancillary_variables'] == 'test-array-1' def test_groups(self): """Test creating a file with groups.""" @@ -225,14 +337,14 @@ def test_groups(self): pretty=True) nc_root = xr.open_dataset(filename) - self.assertIn('history', nc_root.attrs) - self.assertSetEqual(set(nc_root.variables.keys()), set()) + assert 'history' in nc_root.attrs + assert set(nc_root.variables.keys()) == set() nc_visir = xr.open_dataset(filename, group='visir') nc_hrv = xr.open_dataset(filename, group='hrv') - self.assertSetEqual(set(nc_visir.variables.keys()), {'VIS006', 'IR_108', 'y', 'x', 'VIS006_acq_time', - 'IR_108_acq_time'}) - self.assertSetEqual(set(nc_hrv.variables.keys()), {'HRV', 'y', 'x', 'acq_time'}) + assert set(nc_visir.variables.keys()) == {'VIS006', 'IR_108', + 'y', 'x', 'VIS006_acq_time', 'IR_108_acq_time'} + assert set(nc_hrv.variables.keys()) == {'HRV', 'y', 'x', 'acq_time'} for tst, ref in zip([nc_visir['VIS006'], nc_visir['IR_108'], nc_hrv['HRV']], [scn['VIS006'], scn['IR_108'], scn['HRV']]): np.testing.assert_array_equal(tst.data, ref.data) @@ -242,7 +354,8 @@ def test_groups(self): # Different projection coordinates in one group are not supported with TempFile() as filename: - self.assertRaises(ValueError, scn.save_datasets, datasets=['VIS006', 'HRV'], filename=filename, writer='cf') + with pytest.raises(ValueError): + scn.save_datasets(datasets=['VIS006', 'HRV'], filename=filename, writer='cf') def test_single_time_value(self): """Test setting a single time value.""" @@ -294,7 +407,7 @@ def test_bounds(self): with xr.open_dataset(filename, decode_cf=True) as f: bounds_exp = np.array([[start_time, end_time]], dtype='datetime64[m]') np.testing.assert_array_equal(f['time_bnds'], bounds_exp) - self.assertEqual(f['time'].attrs['bounds'], 'time_bnds') + assert f['time'].attrs['bounds'] == 'time_bnds' # Check raw time coordinates & bounds with xr.open_dataset(filename, decode_cf=False) as f: @@ -368,7 +481,7 @@ def test_unlimited_dims_kwarg(self): with TempFile() as filename: scn.save_datasets(filename=filename, writer='cf', unlimited_dims=['time']) with xr.open_dataset(filename) as f: - self.assertSetEqual(f.encoding['unlimited_dims'], {'time'}) + assert set(f.encoding['unlimited_dims']) == {'time'} def test_header_attrs(self): """Check global attributes are set.""" @@ -393,18 +506,18 @@ def test_header_attrs(self): flatten_attrs=True, writer='cf') with xr.open_dataset(filename) as f: - self.assertIn('history', f.attrs) - self.assertEqual(f.attrs['sensor'], 'SEVIRI') - self.assertEqual(f.attrs['orbit'], 99999) + assert 'history' in f.attrs + assert f.attrs['sensor'] == 'SEVIRI' + assert f.attrs['orbit'] == 99999 np.testing.assert_array_equal(f.attrs['list'], [1, 2, 3]) - self.assertEqual(f.attrs['set'], '{1, 2, 3}') - self.assertEqual(f.attrs['dict_a'], 1) - self.assertEqual(f.attrs['dict_b'], 2) - self.assertEqual(f.attrs['nested_outer_inner1'], 1) - self.assertEqual(f.attrs['nested_outer_inner2'], 2) - self.assertEqual(f.attrs['bool'], 'true') - self.assertEqual(f.attrs['bool_'], 'true') - self.assertTrue('none' not in f.attrs.keys()) + assert f.attrs['set'] == '{1, 2, 3}' + assert f.attrs['dict_a'] == 1 + assert f.attrs['dict_b'] == 2 + assert f.attrs['nested_outer_inner1'] == 1 + assert f.attrs['nested_outer_inner2'] == 2 + assert f.attrs['bool'] == 'true' + assert f.attrs['bool_'] == 'true' + assert 'none' not in f.attrs.keys() def get_test_attrs(self): """Create some dataset attributes for testing purpose. @@ -435,9 +548,9 @@ def get_test_attrs(self): 'dict': {'a': 1, 'b': 2}, 'nested_dict': {'l1': {'l2': {'l3': np.array([1, 2, 3], dtype='uint8')}}}, 'raw_metadata': OrderedDict([ - ('recarray', np.zeros(3, dtype=[('x', 'i4'), ('y', 'u1')])), - ('flag', np.bool_(True)), - ('dict', OrderedDict([('a', 1), ('b', np.array([1, 2, 3], dtype='uint8'))])) + ('recarray', np.zeros(3, dtype=[('x', 'i4'), ('y', 'u1')])), + ('flag', np.bool_(True)), + ('dict', OrderedDict([('a', 1), ('b', np.array([1, 2, 3], dtype='uint8'))])) ])} encoded = {'name': 'IR_108', 'start_time': '2018-01-01 00:00:00', @@ -490,17 +603,17 @@ def get_test_attrs(self): def assertDictWithArraysEqual(self, d1, d2): """Check that dicts containing arrays are equal.""" - self.assertSetEqual(set(d1.keys()), set(d2.keys())) + assert set(d1.keys()) == set(d2.keys()) for key, val1 in d1.items(): val2 = d2[key] if isinstance(val1, np.ndarray): np.testing.assert_array_equal(val1, val2) - self.assertEqual(val1.dtype, val2.dtype) + assert val1.dtype == val2.dtype else: - self.assertEqual(val1, val2) + assert val1 == val2 if isinstance(val1, (np.floating, np.integer, np.bool_)): - self.assertTrue(isinstance(val2, np.generic)) - self.assertEqual(val1.dtype, val2.dtype) + assert isinstance(val2, np.generic) + assert val1.dtype == val2.dtype def test_encode_attrs_nc(self): """Test attributes encoding.""" @@ -516,10 +629,10 @@ def test_encode_attrs_nc(self): raw_md_roundtrip = {'recarray': [[0, 0], [0, 0], [0, 0]], 'flag': 'true', 'dict': {'a': 1, 'b': [1, 2, 3]}} - self.assertDictEqual(json.loads(encoded['raw_metadata']), raw_md_roundtrip) - self.assertListEqual(json.loads(encoded['array_3d']), [[[1, 2], [3, 4]], [[1, 2], [3, 4]]]) - self.assertDictEqual(json.loads(encoded['nested_dict']), {"l1": {"l2": {"l3": [1, 2, 3]}}}) - self.assertListEqual(json.loads(encoded['nested_list']), ["1", ["2", [3]]]) + assert json.loads(encoded['raw_metadata']) == raw_md_roundtrip + assert json.loads(encoded['array_3d']) == [[[1, 2], [3, 4]], [[1, 2], [3, 4]]] + assert json.loads(encoded['nested_dict']) == {"l1": {"l2": {"l3": [1, 2, 3]}}} + assert json.loads(encoded['nested_list']) == ["1", ["2", [3]]] def test_da2cf(self): """Test the conversion of a DataArray to a CF-compatible DataArray.""" @@ -550,8 +663,8 @@ def test_da2cf(self): np.testing.assert_array_equal(res['x'], arr['x']) np.testing.assert_array_equal(res['y'], arr['y']) np.testing.assert_array_equal(res['acq_time'], arr['acq_time']) - self.assertDictEqual(res['x'].attrs, {'units': 'm', 'standard_name': 'projection_x_coordinate'}) - self.assertDictEqual(res['y'].attrs, {'units': 'm', 'standard_name': 'projection_y_coordinate'}) + assert res['x'].attrs == {'units': 'm', 'standard_name': 'projection_x_coordinate'} + assert res['y'].attrs == {'units': 'm', 'standard_name': 'projection_y_coordinate'} self.assertDictWithArraysEqual(res.attrs, attrs_expected) # Test attribute kwargs @@ -567,10 +680,9 @@ def test_da2cf_one_dimensional_array(self): coords={'y': [0, 1, 2, 3], 'acq_time': ('y', [0, 1, 2, 3])}) _ = CFWriter.da2cf(arr) - @mock.patch('satpy.writers.cf_writer.CFWriter.__init__', return_value=None) - def test_collect_datasets(self, *mocks): + def test_collect_cf_dataarrays(self): """Test collecting CF datasets from a DataArray objects.""" - from satpy.writers.cf_writer import CFWriter + from satpy.writers.cf_writer import _collect_cf_dataset geos = pyresample.geometry.AreaDefinition( area_id='geos', @@ -587,30 +699,26 @@ def test_collect_datasets(self, *mocks): time = [1, 2] tstart = datetime(2019, 4, 1, 12, 0) tend = datetime(2019, 4, 1, 12, 15) - datasets = [xr.DataArray(data=data, dims=('y', 'x'), coords={'y': y, 'x': x, 'acq_time': ('y', time)}, - attrs={'name': 'var1', 'start_time': tstart, 'end_time': tend, 'area': geos}), - xr.DataArray(data=data, dims=('y', 'x'), coords={'y': y, 'x': x, 'acq_time': ('y', time)}, - attrs={'name': 'var2', 'long_name': 'variable 2'})] + list_dataarrays = [xr.DataArray(data=data, dims=('y', 'x'), coords={'y': y, 'x': x, 'acq_time': ('y', time)}, + attrs={'name': 'var1', 'start_time': tstart, 'end_time': tend, 'area': geos}), + xr.DataArray(data=data, dims=('y', 'x'), coords={'y': y, 'x': x, 'acq_time': ('y', time)}, + attrs={'name': 'var2', 'long_name': 'variable 2'})] # Collect datasets - writer = CFWriter() - datas, start_times, end_times = writer._collect_datasets(datasets, include_lonlats=True) + ds = _collect_cf_dataset(list_dataarrays, include_lonlats=True) # Test results - self.assertEqual(len(datas), 3) - self.assertEqual(set(datas.keys()), {'var1', 'var2', 'geos'}) - self.assertListEqual(start_times, [None, tstart, None]) - self.assertListEqual(end_times, [None, tend, None]) - var1 = datas['var1'] - var2 = datas['var2'] - self.assertEqual(var1.name, 'var1') - self.assertEqual(var1.attrs['grid_mapping'], 'geos') - self.assertEqual(var1.attrs['start_time'], '2019-04-01 12:00:00') - self.assertEqual(var1.attrs['end_time'], '2019-04-01 12:15:00') - self.assertEqual(var1.attrs['long_name'], 'var1') + assert len(ds.keys()) == 3 + assert set(ds.keys()) == {'var1', 'var2', 'geos'} + + da_var1 = ds['var1'] + da_var2 = ds['var2'] + assert da_var1.name == 'var1' + assert da_var1.attrs['grid_mapping'] == 'geos' + assert da_var1.attrs['long_name'] == 'var1' # variable 2 - self.assertNotIn('grid_mapping', var2.attrs) - self.assertEqual(var2.attrs['long_name'], 'variable 2') + assert 'grid_mapping' not in da_var2.attrs + assert da_var2.attrs['long_name'] == 'variable 2' def test_assert_xy_unique(self): """Test that the x and y coordinates are unique.""" @@ -623,7 +731,8 @@ def test_assert_xy_unique(self): assert_xy_unique(datas) datas['c'] = xr.DataArray(data=dummy, dims=('y', 'x'), coords={'y': [1, 3], 'x': [3, 4]}) - self.assertRaises(ValueError, assert_xy_unique, datas) + with pytest.raises(ValueError): + assert_xy_unique(datas) def test_link_coords(self): """Check that coordinates link has been established correctly.""" @@ -646,19 +755,19 @@ def test_link_coords(self): link_coords(datasets) # Check that link has been established correctly and 'coordinate' atrribute has been dropped - self.assertIn('lon', datasets['var1'].coords) - self.assertIn('lat', datasets['var1'].coords) + assert 'lon' in datasets['var1'].coords + assert 'lat' in datasets['var1'].coords np.testing.assert_array_equal(datasets['var1']['lon'].data, lon) np.testing.assert_array_equal(datasets['var1']['lat'].data, lat) - self.assertNotIn('coordinates', datasets['var1'].attrs) + assert 'coordinates' not in datasets['var1'].attrs # There should be no link if there was no 'coordinate' attribute - self.assertNotIn('lon', datasets['var2'].coords) - self.assertNotIn('lat', datasets['var2'].coords) + assert 'lon' not in datasets['var2'].coords + assert 'lat' not in datasets['var2'].coords - # The non-existant dimension or coordinate should be dropped - self.assertNotIn('time', datasets['var3'].coords) - self.assertNotIn('not_exist', datasets['var4'].coords) + # The non-existent dimension or coordinate should be dropped + assert 'time' not in datasets['var3'].coords + assert 'not_exist' not in datasets['var4'].coords def test_make_alt_coords_unique(self): """Test that created coordinate variables are unique.""" @@ -680,8 +789,8 @@ def test_make_alt_coords_unique(self): res = make_alt_coords_unique(datasets) np.testing.assert_array_equal(res['var1']['var1_acq_time'], time1) np.testing.assert_array_equal(res['var2']['var2_acq_time'], time2) - self.assertNotIn('acq_time', res['var1'].coords) - self.assertNotIn('acq_time', res['var2'].coords) + assert 'acq_time' not in res['var1'].coords + assert 'acq_time' not in res['var2'].coords # Make sure nothing else is modified np.testing.assert_array_equal(res['var1']['x'], x) @@ -690,21 +799,20 @@ def test_make_alt_coords_unique(self): np.testing.assert_array_equal(res['var2']['y'], y) # Coords not unique -> Dataset names must be prepended, even if pretty=True - with mock.patch('satpy.writers.cf_writer.warnings.warn') as warn: + with pytest.warns(UserWarning, match='Cannot pretty-format "acq_time"'): res = make_alt_coords_unique(datasets, pretty=True) - warn.assert_called() - np.testing.assert_array_equal(res['var1']['var1_acq_time'], time1) - np.testing.assert_array_equal(res['var2']['var2_acq_time'], time2) - self.assertNotIn('acq_time', res['var1'].coords) - self.assertNotIn('acq_time', res['var2'].coords) + np.testing.assert_array_equal(res['var1']['var1_acq_time'], time1) + np.testing.assert_array_equal(res['var2']['var2_acq_time'], time2) + assert 'acq_time' not in res['var1'].coords + assert 'acq_time' not in res['var2'].coords # Coords unique and pretty=True -> Don't modify coordinate names datasets['var2']['acq_time'] = ('y', time1) res = make_alt_coords_unique(datasets, pretty=True) np.testing.assert_array_equal(res['var1']['acq_time'], time1) np.testing.assert_array_equal(res['var2']['acq_time'], time1) - self.assertNotIn('var1_acq_time', res['var1'].coords) - self.assertNotIn('var2_acq_time', res['var2'].coords) + assert 'var1_acq_time' not in res['var1'].coords + assert 'var2_acq_time' not in res['var2'].coords def test_area2cf(self): """Test the conversion of an area to CF standards.""" @@ -724,44 +832,44 @@ def test_area2cf(self): ds = ds_base.copy(deep=True) ds.attrs['area'] = geos - res = area2cf(ds) - self.assertEqual(len(res), 2) - self.assertEqual(res[0].size, 1) # grid mapping variable - self.assertEqual(res[0].name, res[1].attrs['grid_mapping']) + res = area2cf(ds, include_lonlats=False) + assert len(res) == 2 + assert res[0].size == 1 # grid mapping variable + assert res[0].name == res[1].attrs['grid_mapping'] - # b) Area Definition and strict=False + # b) Area Definition and include_lonlats=False ds = ds_base.copy(deep=True) ds.attrs['area'] = geos - res = area2cf(ds, strict=True) + res = area2cf(ds, include_lonlats=True) # same as above - self.assertEqual(len(res), 2) - self.assertEqual(res[0].size, 1) # grid mapping variable - self.assertEqual(res[0].name, res[1].attrs['grid_mapping']) + assert len(res) == 2 + assert res[0].size == 1 # grid mapping variable + assert res[0].name == res[1].attrs['grid_mapping'] # but now also have the lon/lats - self.assertIn('longitude', res[1].coords) - self.assertIn('latitude', res[1].coords) + assert 'longitude' in res[1].coords + assert 'latitude' in res[1].coords # c) Swath Definition swath = pyresample.geometry.SwathDefinition(lons=[[1, 1], [2, 2]], lats=[[1, 2], [1, 2]]) ds = ds_base.copy(deep=True) ds.attrs['area'] = swath - res = area2cf(ds) - self.assertEqual(len(res), 1) - self.assertIn('longitude', res[0].coords) - self.assertIn('latitude', res[0].coords) - self.assertNotIn('grid_mapping', res[0].attrs) + res = area2cf(ds, include_lonlats=False) + assert len(res) == 1 + assert 'longitude' in res[0].coords + assert 'latitude' in res[0].coords + assert 'grid_mapping' not in res[0].attrs - def test_area2gridmapping(self): + def test__add_grid_mapping(self): """Test the conversion from pyresample area object to CF grid mapping.""" - from satpy.writers.cf_writer import area2gridmapping + from satpy.writers.cf_writer import _add_grid_mapping def _gm_matches(gmapping, expected): """Assert that all keys in ``expected`` match the values in ``gmapping``.""" for attr_key, attr_val in expected.attrs.items(): test_val = gmapping.attrs[attr_key] if attr_val is None or isinstance(attr_val, str): - self.assertEqual(test_val, attr_val) + assert test_val == attr_val else: np.testing.assert_almost_equal(test_val, attr_val, decimal=3) @@ -792,15 +900,15 @@ def _gm_matches(gmapping, expected): ds = ds_base.copy() ds.attrs['area'] = geos - new_ds, grid_mapping = area2gridmapping(ds) + new_ds, grid_mapping = _add_grid_mapping(ds) if 'sweep_angle_axis' in grid_mapping.attrs: # older versions of pyproj might not include this - self.assertEqual(grid_mapping.attrs['sweep_angle_axis'], 'y') + assert grid_mapping.attrs['sweep_angle_axis'] == 'y' - self.assertEqual(new_ds.attrs['grid_mapping'], 'geos') + assert new_ds.attrs['grid_mapping'] == 'geos' _gm_matches(grid_mapping, geos_expected) # should not have been modified - self.assertNotIn('grid_mapping', ds.attrs) + assert 'grid_mapping' not in ds.attrs # b) Projection does not have a corresponding CF representation (COSMO) cosmo7 = pyresample.geometry.AreaDefinition( @@ -816,15 +924,15 @@ def _gm_matches(gmapping, expected): ds = ds_base.copy() ds.attrs['area'] = cosmo7 - new_ds, grid_mapping = area2gridmapping(ds) - self.assertIn('crs_wkt', grid_mapping.attrs) + new_ds, grid_mapping = _add_grid_mapping(ds) + assert 'crs_wkt' in grid_mapping.attrs wkt = grid_mapping.attrs['crs_wkt'] - self.assertIn('ELLIPSOID["WGS 84"', wkt) - self.assertIn('PARAMETER["lat_0",46', wkt) - self.assertIn('PARAMETER["lon_0",4.535', wkt) - self.assertIn('PARAMETER["o_lat_p",90', wkt) - self.assertIn('PARAMETER["o_lon_p",-5.465', wkt) - self.assertEqual(new_ds.attrs['grid_mapping'], 'cosmo7') + assert 'ELLIPSOID["WGS 84"' in wkt + assert 'PARAMETER["lat_0",46' in wkt + assert 'PARAMETER["lon_0",4.535' in wkt + assert 'PARAMETER["o_lat_p",90' in wkt + assert 'PARAMETER["o_lon_p",-5.465' in wkt + assert new_ds.attrs['grid_mapping'] == 'cosmo7' # c) Projection Transverse Mercator lat_0 = 36.5 @@ -849,8 +957,8 @@ def _gm_matches(gmapping, expected): ds = ds_base.copy() ds.attrs['area'] = tmerc - new_ds, grid_mapping = area2gridmapping(ds) - self.assertEqual(new_ds.attrs['grid_mapping'], 'tmerc') + new_ds, grid_mapping = _add_grid_mapping(ds) + assert new_ds.attrs['grid_mapping'] == 'tmerc' _gm_matches(grid_mapping, tmerc_expected) # d) Projection that has a representation but no explicit a/b @@ -875,9 +983,9 @@ def _gm_matches(gmapping, expected): ds = ds_base.copy() ds.attrs['area'] = geos - new_ds, grid_mapping = area2gridmapping(ds) + new_ds, grid_mapping = _add_grid_mapping(ds) - self.assertEqual(new_ds.attrs['grid_mapping'], 'geos') + assert new_ds.attrs['grid_mapping'] == 'geos' _gm_matches(grid_mapping, geos_expected) # e) oblique Mercator @@ -906,9 +1014,9 @@ def _gm_matches(gmapping, expected): ds = ds_base.copy() ds.attrs['area'] = area - new_ds, grid_mapping = area2gridmapping(ds) + new_ds, grid_mapping = _add_grid_mapping(ds) - self.assertEqual(new_ds.attrs['grid_mapping'], 'omerc_otf') + assert new_ds.attrs['grid_mapping'] == 'omerc_otf' _gm_matches(grid_mapping, omerc_expected) # f) Projection that has a representation but no explicit a/b @@ -931,14 +1039,14 @@ def _gm_matches(gmapping, expected): ds = ds_base.copy() ds.attrs['area'] = geos - new_ds, grid_mapping = area2gridmapping(ds) + new_ds, grid_mapping = _add_grid_mapping(ds) - self.assertEqual(new_ds.attrs['grid_mapping'], 'geos') + assert new_ds.attrs['grid_mapping'] == 'geos' _gm_matches(grid_mapping, geos_expected) - def test_area2lonlat(self): + def test_add_lonlat_coords(self): """Test the conversion from areas to lon/lat.""" - from satpy.writers.cf_writer import area2lonlat + from satpy.writers.cf_writer import add_lonlat_coords area = pyresample.geometry.AreaDefinition( 'seviri', @@ -951,11 +1059,11 @@ def test_area2lonlat(self): lons_ref, lats_ref = area.get_lonlats() dataarray = xr.DataArray(data=[[1, 2], [3, 4]], dims=('y', 'x'), attrs={'area': area}) - res = area2lonlat(dataarray) + res = add_lonlat_coords(dataarray) # original should be unmodified - self.assertNotIn('longitude', dataarray.coords) - self.assertEqual(set(res.coords), {'longitude', 'latitude'}) + assert 'longitude' not in dataarray.coords + assert set(res.coords) == {'longitude', 'latitude'} lat = res['latitude'] lon = res['longitude'] np.testing.assert_array_equal(lat.data, lats_ref) @@ -972,13 +1080,13 @@ def test_area2lonlat(self): [-5570248.686685662, -5567248.28340708, 5567248.28340708, 5570248.686685662] ) lons_ref, lats_ref = area.get_lonlats() - dataarray = xr.DataArray(data=da.from_array(np.arange(3*10*10).reshape(3, 10, 10), chunks=(1, 5, 5)), + dataarray = xr.DataArray(data=da.from_array(np.arange(3 * 10 * 10).reshape(3, 10, 10), chunks=(1, 5, 5)), dims=('bands', 'y', 'x'), attrs={'area': area}) - res = area2lonlat(dataarray) + res = add_lonlat_coords(dataarray) # original should be unmodified - self.assertNotIn('longitude', dataarray.coords) - self.assertEqual(set(res.coords), {'longitude', 'latitude'}) + assert 'longitude' not in dataarray.coords + assert set(res.coords) == {'longitude', 'latitude'} lat = res['latitude'] lon = res['longitude'] np.testing.assert_array_equal(lat.data, lats_ref) @@ -1014,8 +1122,8 @@ def test_global_attr_default_history_and_Conventions(self): with TempFile() as filename: scn.save_datasets(filename=filename, writer='cf') with xr.open_dataset(filename) as f: - self.assertEqual(f.attrs['Conventions'], 'CF-1.7') - self.assertIn('Created by pytroll/satpy on', f.attrs['history']) + assert f.attrs['Conventions'] == 'CF-1.7' + assert 'Created by pytroll/satpy on' in f.attrs['history'] def test_global_attr_history_and_Conventions(self): """Test saving global attributes history and Conventions.""" @@ -1033,81 +1141,17 @@ def test_global_attr_history_and_Conventions(self): with TempFile() as filename: scn.save_datasets(filename=filename, writer='cf', header_attrs=header_attrs) with xr.open_dataset(filename) as f: - self.assertEqual(f.attrs['Conventions'], 'CF-1.7, ACDD-1.3') - self.assertIn('TEST add history\n', f.attrs['history']) - self.assertIn('Created by pytroll/satpy on', f.attrs['history']) + assert f.attrs['Conventions'] == 'CF-1.7, ACDD-1.3' + assert 'TEST add history\n' in f.attrs['history'] + assert 'Created by pytroll/satpy on' in f.attrs['history'] -def test_lonlat_storage(tmp_path): - """Test correct storage for area with lon/lat units.""" - from ..utils import make_fake_scene - scn = make_fake_scene( - {"ketolysis": np.arange(25).reshape(5, 5)}, - daskify=True, - area=create_area_def("mavas", 4326, shape=(5, 5), - center=(0, 0), resolution=(1, 1))) - - filename = os.fspath(tmp_path / "test.nc") - scn.save_datasets(filename=filename, writer="cf", include_lonlats=False) - with xr.open_dataset(filename) as ds: - assert ds["ketolysis"].attrs["grid_mapping"] == "mavas" - assert ds["mavas"].attrs["grid_mapping_name"] == "latitude_longitude" - assert ds["x"].attrs["units"] == "degrees_east" - assert ds["y"].attrs["units"] == "degrees_north" - assert ds["mavas"].attrs["longitude_of_prime_meridian"] == 0.0 - np.testing.assert_allclose(ds["mavas"].attrs["semi_major_axis"], 6378137.0) - np.testing.assert_allclose(ds["mavas"].attrs["inverse_flattening"], 298.257223563) - - -def test_da2cf_lonlat(): - """Test correct da2cf encoding for area with lon/lat units.""" - from satpy.resample import add_crs_xy_coords - from satpy.writers.cf_writer import CFWriter - - area = create_area_def("mavas", 4326, shape=(5, 5), - center=(0, 0), resolution=(1, 1)) - da = xr.DataArray( - np.arange(25).reshape(5, 5), - dims=("y", "x"), - attrs={"area": area}) - da = add_crs_xy_coords(da, area) - new_da = CFWriter.da2cf(da) - assert new_da["x"].attrs["units"] == "degrees_east" - assert new_da["y"].attrs["units"] == "degrees_north" - - -def test_is_projected(caplog): - """Tests for private _is_projected function.""" - from satpy.writers.cf_writer import CFWriter - - # test case with units but no area - da = xr.DataArray( - np.arange(25).reshape(5, 5), - dims=("y", "x"), - coords={"x": xr.DataArray(np.arange(5), dims=("x",), attrs={"units": "m"}), - "y": xr.DataArray(np.arange(5), dims=("y",), attrs={"units": "m"})}) - assert CFWriter._is_projected(da) - - da = xr.DataArray( - np.arange(25).reshape(5, 5), - dims=("y", "x"), - coords={"x": xr.DataArray(np.arange(5), dims=("x",), attrs={"units": "degrees_east"}), - "y": xr.DataArray(np.arange(5), dims=("y",), attrs={"units": "degrees_north"})}) - assert not CFWriter._is_projected(da) - - da = xr.DataArray( - np.arange(25).reshape(5, 5), - dims=("y", "x")) - with caplog.at_level(logging.WARNING): - assert CFWriter._is_projected(da) - assert "Failed to tell if data are projected." in caplog.text - - -class TestCFWriterData(unittest.TestCase): +class TestCFWriterData: """Test case for CF writer where data arrays are needed.""" - def setUp(self): - """Create some test data.""" + @pytest.fixture + def datasets(self): + """Create test dataset.""" data = [[75, 2], [3, 4]] y = [1, 2] x = [1, 2] @@ -1118,146 +1162,166 @@ def setUp(self): projection={'proj': 'geos', 'h': 35785831., 'a': 6378169., 'b': 6356583.8}, width=2, height=2, area_extent=[-1, -1, 1, 1]) - self.datasets = {'var1': xr.DataArray(data=data, - dims=('y', 'x'), - coords={'y': y, 'x': x}), - 'var2': xr.DataArray(data=data, - dims=('y', 'x'), - coords={'y': y, 'x': x}), - 'lat': xr.DataArray(data=data, - dims=('y', 'x'), - coords={'y': y, 'x': x}), - 'lon': xr.DataArray(data=data, - dims=('y', 'x'), - coords={'y': y, 'x': x})} - self.datasets['lat'].attrs['standard_name'] = 'latitude' - self.datasets['var1'].attrs['standard_name'] = 'dummy' - self.datasets['var2'].attrs['standard_name'] = 'dummy' - self.datasets['var2'].attrs['area'] = geos - self.datasets['var1'].attrs['area'] = geos - self.datasets['lat'].attrs['name'] = 'lat' - self.datasets['var1'].attrs['name'] = 'var1' - self.datasets['var2'].attrs['name'] = 'var2' - self.datasets['lon'].attrs['name'] = 'lon' - - def test_dataset_is_projection_coords(self): - """Test the dataset_is_projection_coords function.""" - from satpy.writers.cf_writer import dataset_is_projection_coords - self.assertTrue(dataset_is_projection_coords(self.datasets['lat'])) - self.assertFalse(dataset_is_projection_coords(self.datasets['var1'])) - - def test_has_projection_coords(self): + datasets = { + 'var1': xr.DataArray(data=data, + dims=('y', 'x'), + coords={'y': y, 'x': x}), + 'var2': xr.DataArray(data=data, + dims=('y', 'x'), + coords={'y': y, 'x': x}), + 'lat': xr.DataArray(data=data, + dims=('y', 'x'), + coords={'y': y, 'x': x}), + 'lon': xr.DataArray(data=data, + dims=('y', 'x'), + coords={'y': y, 'x': x})} + datasets['lat'].attrs['standard_name'] = 'latitude' + datasets['var1'].attrs['standard_name'] = 'dummy' + datasets['var2'].attrs['standard_name'] = 'dummy' + datasets['var2'].attrs['area'] = geos + datasets['var1'].attrs['area'] = geos + datasets['lat'].attrs['name'] = 'lat' + datasets['var1'].attrs['name'] = 'var1' + datasets['var2'].attrs['name'] = 'var2' + datasets['lon'].attrs['name'] = 'lon' + return datasets + + def test_is_lon_or_lat_dataarray(self, datasets): + """Test the is_lon_or_lat_dataarray function.""" + from satpy.writers.cf_writer import is_lon_or_lat_dataarray + + assert is_lon_or_lat_dataarray(datasets['lat']) + assert not is_lon_or_lat_dataarray(datasets['var1']) + + def test_has_projection_coords(self, datasets): """Test the has_projection_coords function.""" from satpy.writers.cf_writer import has_projection_coords - self.assertTrue(has_projection_coords(self.datasets)) - self.datasets['lat'].attrs['standard_name'] = 'dummy' - self.assertFalse(has_projection_coords(self.datasets)) - @mock.patch('satpy.writers.cf_writer.CFWriter.__init__', return_value=None) - def test_collect_datasets_with_latitude_named_lat(self, *mocks): - """Test collecting CF datasets with latitude named lat.""" - from operator import getitem + assert has_projection_coords(datasets) + datasets['lat'].attrs['standard_name'] = 'dummy' + assert not has_projection_coords(datasets) - from satpy.writers.cf_writer import CFWriter + def test_collect_cf_dataarrays_with_latitude_named_lat(self, datasets): + """Test collecting CF datasets with latitude named lat.""" + from satpy.writers.cf_writer import _collect_cf_dataset - self.datasets_list = [self.datasets[key] for key in self.datasets] - self.datasets_list_no_latlon = [self.datasets[key] for key in ['var1', 'var2']] + datasets_list = [datasets[key] for key in datasets.keys()] + datasets_list_no_latlon = [datasets[key] for key in ['var1', 'var2']] # Collect datasets - writer = CFWriter() - datas, start_times, end_times = writer._collect_datasets(self.datasets_list, include_lonlats=True) - datas2, start_times, end_times = writer._collect_datasets(self.datasets_list_no_latlon, include_lonlats=True) - # Test results + ds = _collect_cf_dataset(datasets_list, include_lonlats=True) + ds2 = _collect_cf_dataset(datasets_list_no_latlon, include_lonlats=True) - self.assertEqual(len(datas), 5) - self.assertEqual(set(datas.keys()), {'var1', 'var2', 'lon', 'lat', 'geos'}) - self.assertRaises(KeyError, getitem, datas['var1'], 'latitude') - self.assertRaises(KeyError, getitem, datas['var1'], 'longitude') - self.assertEqual(datas2['var1']['latitude'].attrs['name'], 'latitude') - self.assertEqual(datas2['var1']['longitude'].attrs['name'], 'longitude') + # Test results + assert len(ds.keys()) == 5 + assert set(ds.keys()) == {'var1', 'var2', 'lon', 'lat', 'geos'} + with pytest.raises(KeyError): + ds['var1'].attrs["latitude"] + with pytest.raises(KeyError): + ds['var1'].attrs["longitude"] + assert ds2['var1']['latitude'].attrs['name'] == 'latitude' + assert ds2['var1']['longitude'].attrs['name'] == 'longitude' -class EncodingUpdateTest(unittest.TestCase): +class EncodingUpdateTest: """Test update of netCDF encoding.""" - def setUp(self): + @pytest.fixture + def fake_ds(self): + """Create fake data for testing.""" + ds = xr.Dataset({'foo': (('y', 'x'), [[1, 2], [3, 4]]), + 'bar': (('y', 'x'), [[3, 4], [5, 6]])}, + coords={'y': [1, 2], + 'x': [3, 4], + 'lon': (('y', 'x'), [[7, 8], [9, 10]])}) + return ds + + @pytest.fixture + def fake_ds_digit(self): """Create fake data for testing.""" - self.ds = xr.Dataset({'foo': (('y', 'x'), [[1, 2], [3, 4]]), - 'bar': (('y', 'x'), [[3, 4], [5, 6]])}, - coords={'y': [1, 2], - 'x': [3, 4], - 'lon': (('y', 'x'), [[7, 8], [9, 10]])}) - self.ds_digit = xr.Dataset({'CHANNEL_1': (('y', 'x'), [[1, 2], [3, 4]]), - 'CHANNEL_2': (('y', 'x'), [[3, 4], [5, 6]])}, - coords={'y': [1, 2], - 'x': [3, 4], - 'lon': (('y', 'x'), [[7, 8], [9, 10]])}) - - def test_dataset_name_digit(self): + ds_digit = xr.Dataset({'CHANNEL_1': (('y', 'x'), [[1, 2], [3, 4]]), + 'CHANNEL_2': (('y', 'x'), [[3, 4], [5, 6]])}, + coords={'y': [1, 2], + 'x': [3, 4], + 'lon': (('y', 'x'), [[7, 8], [9, 10]])}) + return ds_digit + + def test_dataset_name_digit(self, fake_ds_digit): """Test data with dataset name staring with a digit.""" from satpy.writers.cf_writer import update_encoding # Dataset with name staring with digit - ds = self.ds_digit + ds_digit = fake_ds_digit kwargs = {'encoding': {'1': {'dtype': 'float32'}, '2': {'dtype': 'float32'}}, 'other': 'kwargs'} - enc, other_kwargs = update_encoding(ds, kwargs, numeric_name_prefix='CHANNEL_') - self.assertDictEqual(enc, {'y': {'_FillValue': None}, - 'x': {'_FillValue': None}, - 'CHANNEL_1': {'dtype': 'float32'}, - 'CHANNEL_2': {'dtype': 'float32'}}) - self.assertDictEqual(other_kwargs, {'other': 'kwargs'}) - - def test_without_time(self): + enc, other_kwargs = update_encoding(ds_digit, kwargs, numeric_name_prefix='CHANNEL_') + expected_dict = { + 'y': {'_FillValue': None}, + 'x': {'_FillValue': None}, + 'CHANNEL_1': {'dtype': 'float32'}, + 'CHANNEL_2': {'dtype': 'float32'} + } + assert enc == expected_dict + assert other_kwargs == {'other': 'kwargs'} + + def test_without_time(self, fake_ds): """Test data with no time dimension.""" from satpy.writers.cf_writer import update_encoding # Without time dimension - ds = self.ds.chunk(2) + ds = fake_ds.chunk(2) kwargs = {'encoding': {'bar': {'chunksizes': (1, 1)}}, 'other': 'kwargs'} enc, other_kwargs = update_encoding(ds, kwargs) - self.assertDictEqual(enc, {'y': {'_FillValue': None}, - 'x': {'_FillValue': None}, - 'lon': {'chunksizes': (2, 2)}, - 'foo': {'chunksizes': (2, 2)}, - 'bar': {'chunksizes': (1, 1)}}) - self.assertDictEqual(other_kwargs, {'other': 'kwargs'}) + expected_dict = { + 'y': {'_FillValue': None}, + 'x': {'_FillValue': None}, + 'lon': {'chunksizes': (2, 2)}, + 'foo': {'chunksizes': (2, 2)}, + 'bar': {'chunksizes': (1, 1)} + } + assert enc == expected_dict + assert other_kwargs == {'other': 'kwargs'} # Chunksize may not exceed shape - ds = self.ds.chunk(8) + ds = fake_ds.chunk(8) kwargs = {'encoding': {}, 'other': 'kwargs'} enc, other_kwargs = update_encoding(ds, kwargs) - self.assertDictEqual(enc, {'y': {'_FillValue': None}, - 'x': {'_FillValue': None}, - 'lon': {'chunksizes': (2, 2)}, - 'foo': {'chunksizes': (2, 2)}, - 'bar': {'chunksizes': (2, 2)}}) + expected_dict = { + 'y': {'_FillValue': None}, + 'x': {'_FillValue': None}, + 'lon': {'chunksizes': (2, 2)}, + 'foo': {'chunksizes': (2, 2)}, + 'bar': {'chunksizes': (2, 2)} + } + assert enc == expected_dict - def test_with_time(self): + def test_with_time(self, fake_ds): """Test data with a time dimension.""" from satpy.writers.cf_writer import update_encoding # With time dimension - ds = self.ds.chunk(8).expand_dims({'time': [datetime(2009, 7, 1, 12, 15)]}) + ds = fake_ds.chunk(8).expand_dims({'time': [datetime(2009, 7, 1, 12, 15)]}) kwargs = {'encoding': {'bar': {'chunksizes': (1, 1, 1)}}, 'other': 'kwargs'} enc, other_kwargs = update_encoding(ds, kwargs) - self.assertDictEqual(enc, {'y': {'_FillValue': None}, - 'x': {'_FillValue': None}, - 'lon': {'chunksizes': (2, 2)}, - 'foo': {'chunksizes': (1, 2, 2)}, - 'bar': {'chunksizes': (1, 1, 1)}, - 'time': {'_FillValue': None, - 'calendar': 'proleptic_gregorian', - 'units': 'days since 2009-07-01 12:15:00'}, - 'time_bnds': {'_FillValue': None, - 'calendar': 'proleptic_gregorian', - 'units': 'days since 2009-07-01 12:15:00'}}) - + expected_dict = { + 'y': {'_FillValue': None}, + 'x': {'_FillValue': None}, + 'lon': {'chunksizes': (2, 2)}, + 'foo': {'chunksizes': (1, 2, 2)}, + 'bar': {'chunksizes': (1, 1, 1)}, + 'time': {'_FillValue': None, + 'calendar': 'proleptic_gregorian', + 'units': 'days since 2009-07-01 12:15:00'}, + 'time_bnds': {'_FillValue': None, + 'calendar': 'proleptic_gregorian', + 'units': 'days since 2009-07-01 12:15:00'} + } + assert enc == expected_dict # User-defined encoding may not be altered - self.assertDictEqual(kwargs['encoding'], {'bar': {'chunksizes': (1, 1, 1)}}) + assert kwargs['encoding'] == {'bar': {'chunksizes': (1, 1, 1)}} class TestEncodingKwarg: @@ -1384,5 +1448,5 @@ def _should_use_compression_keyword(): versions = _get_backend_versions() return ( versions["libnetcdf"] >= Version("4.9.0") and - versions["xarray"] >= Version("2023.05") + versions["xarray"] >= Version("2023.8") ) diff --git a/satpy/tests/writer_tests/test_ninjogeotiff.py b/satpy/tests/writer_tests/test_ninjogeotiff.py index 11a843c8ee..ac75b68cbf 100644 --- a/satpy/tests/writer_tests/test_ninjogeotiff.py +++ b/satpy/tests/writer_tests/test_ninjogeotiff.py @@ -28,7 +28,7 @@ from pyresample import create_area_def from satpy import Scene -from satpy.writers import get_enhanced_image +from satpy.writers import get_enhanced_image, to_image try: from math import prod @@ -53,7 +53,7 @@ def _get_fake_da(lo, hi, shp, dtype="f4"): This is more or less a 2d linspace: it'll return a 2-d dask array of shape ``shp``, lowest value is ``lo``, highest value is ``hi``. """ - return da.arange(lo, hi, (hi-lo)/prod(shp), chunks=50, dtype=dtype).reshape(shp) + return da.linspace(lo, hi, prod(shp), dtype=dtype).reshape(shp) @pytest.fixture(scope="module") @@ -203,6 +203,23 @@ def test_image_small_mid_atlantic_K_L(test_area_tiny_eqc_sphere): return get_enhanced_image(arr) +@pytest.fixture(scope="module") +def test_image_small_mid_atlantic_L_no_quantity(test_area_tiny_eqc_sphere): + """Get a small test image, mode L, over Atlantic, with non-quantitywvalues. + + This could be the case, for example, for vis_with_night_ir. + """ + arr = xr.DataArray( + _get_fake_da(0, 273, test_area_tiny_eqc_sphere.shape + (1,)), + dims=("y", "x", "bands"), + attrs={ + "name": "test-small-mid-atlantic", + "start_time": datetime.datetime(1985, 8, 13, 13, 0), + "area": test_area_tiny_eqc_sphere, + "units": "N/A"}) + return get_enhanced_image(arr) + + @pytest.fixture(scope="module") def test_image_large_asia_RGB(test_area_small_eqc_wgs84): """Get a large-ish test image in mode RGB, over Asia.""" @@ -230,7 +247,7 @@ def test_image_small_arctic_P(test_area_tiny_stereographic_wgs84): "start_time": datetime.datetime(2027, 8, 2, 8, 20), "area": test_area_tiny_stereographic_wgs84, "mode": "P"}) - return get_enhanced_image(arr) + return to_image(arr) @pytest.fixture(scope="module") @@ -489,9 +506,9 @@ def test_write_and_read_file(test_image_small_mid_atlantic_L, tmp_path): assert tgs["ninjo_FileName"] == fn assert tgs["ninjo_DataSource"] == "dowsing rod" np.testing.assert_allclose(float(tgs["ninjo_Gradient"]), - 0.4653780307919959) + 0.46771654391851947) np.testing.assert_allclose(float(tgs["ninjo_AxisIntercept"]), - -79.86837954904149) + -79.86771951938239) def test_write_and_read_file_RGB(test_image_large_asia_RGB, tmp_path): @@ -542,8 +559,8 @@ def test_write_and_read_file_LA(test_image_latlon, tmp_path): tgs = src.tags() assert tgs["ninjo_FileName"] == fn assert tgs["ninjo_DataSource"] == "dowsing rod" - np.testing.assert_allclose(float(tgs["ninjo_Gradient"]), 0.30816176470588236) - np.testing.assert_allclose(float(tgs["ninjo_AxisIntercept"]), -49.603125) + np.testing.assert_allclose(float(tgs["ninjo_Gradient"]), 0.31058823679007746) + np.testing.assert_allclose(float(tgs["ninjo_AxisIntercept"]), -49.6) assert tgs["ninjo_PhysicValue"] == "Reflectance" assert tgs["ninjo_TransparentPixel"] == "-1" # meaning not set @@ -574,6 +591,8 @@ def test_write_and_read_file_P(test_image_small_arctic_P, tmp_path): tgs = src.tags() assert tgs["ninjo_FileName"] == fn assert tgs["ninjo_DataSource"] == "dowsing rod" + assert "ninjo_Gradient" not in tgs + assert "ninjo_AxisIntercept" not in tgs def test_write_and_read_file_units( @@ -609,9 +628,9 @@ def test_write_and_read_file_units( assert tgs["ninjo_FileName"] == fn assert tgs["ninjo_DataSource"] == "dowsing rod" np.testing.assert_allclose(float(tgs["ninjo_Gradient"]), - 0.465379, rtol=1e-5) + 0.467717, rtol=1e-5) np.testing.assert_allclose(float(tgs["ninjo_AxisIntercept"]), - -79.86838) + -79.86771) fn2 = os.fspath(tmp_path / "test2.tif") with caplog.at_level(logging.WARNING): ngtw.save_dataset( @@ -633,6 +652,34 @@ def test_write_and_read_file_units( "No conversion applied.") in caplog.text +@pytest.mark.parametrize("unit", ["N/A", "1", ""]) +def test_write_and_read_no_quantity( + test_image_small_mid_atlantic_L_no_quantity, tmp_path, unit): + """Test that no scale/offset written if no valid units present.""" + import rasterio + + from satpy.writers.ninjogeotiff import NinJoGeoTIFFWriter + fn = os.fspath(tmp_path / "test.tif") + ngtw = NinJoGeoTIFFWriter() + ngtw.save_dataset( + test_image_small_mid_atlantic_L_no_quantity.data, + filename=fn, + blockxsize=128, + blockysize=128, + compress="lzw", + predictor=2, + PhysicUnit=unit, + PhysicValue="N/A", + SatelliteNameID=6400014, + ChannelID=900015, + DataType="GORN", + DataSource="dowsing rod") + src = rasterio.open(fn) + tgs = src.tags() + assert "ninjo_Gradient" not in tgs.keys() + assert "ninjo_AxisIntercept" not in tgs.keys() + + def test_write_and_read_via_scene(test_image_small_mid_atlantic_L, tmp_path): """Test that all attributes are written also when writing from scene. @@ -810,7 +857,6 @@ def test_get_max_gray_value_RGB(ntg2): assert ntg2.get_max_gray_value() == 255 -@pytest.mark.xfail(reason="Needs GeoTIFF P fixes, see GH#1844") def test_get_max_gray_value_P(ntg3): """Test getting max gray value for mode P.""" assert ntg3.get_max_gray_value().compute().item() == 10 diff --git a/satpy/writers/cf/__init__.py b/satpy/writers/cf/__init__.py new file mode 100644 index 0000000000..f597a9264c --- /dev/null +++ b/satpy/writers/cf/__init__.py @@ -0,0 +1,3 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +"""Code for generation of CF-compliant datasets.""" diff --git a/satpy/writers/cf/coords_attrs.py b/satpy/writers/cf/coords_attrs.py new file mode 100644 index 0000000000..c7e559adc2 --- /dev/null +++ b/satpy/writers/cf/coords_attrs.py @@ -0,0 +1,46 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +"""Set CF-compliant attributes to x and y spatial dimensions.""" + +import logging + +from satpy.writers.cf.crs import _is_projected + +logger = logging.getLogger(__name__) + + +def add_xy_coords_attrs(dataarray): + """Add relevant attributes to x, y coordinates.""" + # If there are no coords, return dataarray + if not dataarray.coords.keys() & {"x", "y", "crs"}: + return dataarray + # If projected area + if _is_projected(dataarray): + dataarray = _add_xy_projected_coords_attrs(dataarray) + else: + dataarray = _add_xy_geographic_coords_attrs(dataarray) + if 'crs' in dataarray.coords: + dataarray = dataarray.drop_vars('crs') + return dataarray + + +def _add_xy_projected_coords_attrs(dataarray, x='x', y='y'): + """Add relevant attributes to x, y coordinates of a projected CRS.""" + if x in dataarray.coords: + dataarray[x].attrs['standard_name'] = 'projection_x_coordinate' + dataarray[x].attrs['units'] = 'm' + if y in dataarray.coords: + dataarray[y].attrs['standard_name'] = 'projection_y_coordinate' + dataarray[y].attrs['units'] = 'm' + return dataarray + + +def _add_xy_geographic_coords_attrs(dataarray, x='x', y='y'): + """Add relevant attributes to x, y coordinates of a geographic CRS.""" + if x in dataarray.coords: + dataarray[x].attrs['standard_name'] = 'longitude' + dataarray[x].attrs['units'] = 'degrees_east' + if y in dataarray.coords: + dataarray[y].attrs['standard_name'] = 'latitude' + dataarray[y].attrs['units'] = 'degrees_north' + return dataarray diff --git a/satpy/writers/cf/crs.py b/satpy/writers/cf/crs.py new file mode 100644 index 0000000000..e6952a484f --- /dev/null +++ b/satpy/writers/cf/crs.py @@ -0,0 +1,47 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +"""CRS utility.""" + +import logging +from contextlib import suppress + +from pyresample.geometry import AreaDefinition, SwathDefinition + +logger = logging.getLogger(__name__) + + +def _is_projected(dataarray): + """Guess whether data are projected or not.""" + crs = _try_to_get_crs(dataarray) + if crs: + return crs.is_projected + units = _try_get_units_from_coords(dataarray) + if units: + if units.endswith("m"): + return True + if units.startswith("degrees"): + return False + logger.warning("Failed to tell if data are projected. Assuming yes.") + return True + + +def _try_to_get_crs(dataarray): + """Try to get a CRS from attributes.""" + if "area" in dataarray.attrs: + if isinstance(dataarray.attrs["area"], AreaDefinition): + return dataarray.attrs["area"].crs + if not isinstance(dataarray.attrs["area"], SwathDefinition): + logger.warning( + f"Could not tell CRS from area of type {type(dataarray.attrs['area']).__name__:s}. " + "Assuming projected CRS.") + if "crs" in dataarray.coords: + return dataarray.coords["crs"].item() + + +def _try_get_units_from_coords(dataarray): + """Try to retrieve coordinate x/y units.""" + for c in ["x", "y"]: + with suppress(KeyError): + # If the data has only 1 dimension, it has only one of x or y coords + if "units" in dataarray.coords[c].attrs: + return dataarray.coords[c].attrs["units"] diff --git a/satpy/writers/cf_writer.py b/satpy/writers/cf_writer.py index 0cfb808425..b9a24b9292 100644 --- a/satpy/writers/cf_writer.py +++ b/satpy/writers/cf_writer.py @@ -44,7 +44,7 @@ coordinate is identical for all datasets, the prefix can be removed by setting ``pretty=True``. * Some dataset names start with a digit, like AVHRR channels 1, 2, 3a, 3b, 4 and 5. This doesn't comply with CF https://cfconventions.org/Data/cf-conventions/cf-conventions-1.7/build/ch02s03.html. These channels are prefixed - with `CHANNEL_` by default. This can be controlled with the variable `numeric_name_prefix` to `save_datasets`. + with ``"CHANNEL_"`` by default. This can be controlled with the variable `numeric_name_prefix` to `save_datasets`. Setting it to `None` or `''` will skip the prefixing. Grouping @@ -160,7 +160,6 @@ import logging import warnings from collections import OrderedDict, defaultdict -from contextlib import suppress from datetime import datetime import numpy as np @@ -171,6 +170,7 @@ from xarray.coding.times import CFDatetimeCoder from satpy.writers import Writer +from satpy.writers.cf.coords_attrs import add_xy_coords_attrs from satpy.writers.utils import flatten_dict logger = logging.getLogger(__name__) @@ -203,6 +203,7 @@ np.string_] # Unsigned and int64 isn't CF 1.7 compatible +# Note: Unsigned and int64 are CF 1.9 compatible CF_DTYPES = [np.dtype('int8'), np.dtype('int16'), np.dtype('int32'), @@ -213,32 +214,26 @@ CF_VERSION = 'CF-1.7' -def create_grid_mapping(area): - """Create the grid mapping instance for `area`.""" - import pyproj - if Version(pyproj.__version__) < Version('2.4.1'): - # technically 2.2, but important bug fixes in 2.4.1 - raise ImportError("'cf' writer requires pyproj 2.4.1 or greater") - # let pyproj do the heavily lifting - # pyproj 2.0+ required - grid_mapping = area.crs.to_cf() - return area.area_id, grid_mapping - - -def get_extra_ds(dataset, keys=None): - """Get the extra datasets associated to *dataset*.""" +def get_extra_ds(dataarray, keys=None): + """Get the ancillary_variables DataArrays associated to a dataset.""" ds_collection = {} - for ds in dataset.attrs.get('ancillary_variables', []): - if keys and ds.name not in keys: - keys.append(ds.name) - ds_collection.update(get_extra_ds(ds, keys)) - ds_collection[dataset.attrs['name']] = dataset - + # Retrieve ancillary variable datarrays + for ancillary_dataarray in dataarray.attrs.get('ancillary_variables', []): + ancillary_variable = ancillary_dataarray.name + if keys and ancillary_variable not in keys: + keys.append(ancillary_variable) + ds_collection.update(get_extra_ds(ancillary_dataarray, keys=keys)) + # Add input dataarray + ds_collection[dataarray.attrs['name']] = dataarray return ds_collection -def area2lonlat(dataarray): - """Convert an area to longitudes and latitudes.""" +# ###--------------------------------------------------------------------------. +# ### CF-Area + + +def add_lonlat_coords(dataarray): + """Add 'longitude' and 'latitude' coordinates to DataArray.""" dataarray = dataarray.copy() area = dataarray.attrs['area'] ignore_dims = {dim: 0 for dim in dataarray.dims if dim not in ['x', 'y']} @@ -257,36 +252,102 @@ def area2lonlat(dataarray): return dataarray -def area2gridmapping(dataarray): +def _create_grid_mapping(area): + """Create the grid mapping instance for `area`.""" + import pyproj + + if Version(pyproj.__version__) < Version('2.4.1'): + # technically 2.2, but important bug fixes in 2.4.1 + raise ImportError("'cf' writer requires pyproj 2.4.1 or greater") + # let pyproj do the heavily lifting (pyproj 2.0+ required) + grid_mapping = area.crs.to_cf() + return area.area_id, grid_mapping + + +def _add_grid_mapping(dataarray): """Convert an area to at CF grid mapping.""" dataarray = dataarray.copy() area = dataarray.attrs['area'] - gmapping_var_name, attrs = create_grid_mapping(area) + gmapping_var_name, attrs = _create_grid_mapping(area) dataarray.attrs['grid_mapping'] = gmapping_var_name return dataarray, xr.DataArray(0, attrs=attrs, name=gmapping_var_name) -def area2cf(dataarray, strict=False, got_lonlats=False): +def area2cf(dataarray, include_lonlats=False, got_lonlats=False): """Convert an area to at CF grid mapping or lon and lats.""" res = [] - if not got_lonlats and (isinstance(dataarray.attrs['area'], SwathDefinition) or strict): - dataarray = area2lonlat(dataarray) + if not got_lonlats and (isinstance(dataarray.attrs['area'], SwathDefinition) or include_lonlats): + dataarray = add_lonlat_coords(dataarray) if isinstance(dataarray.attrs['area'], AreaDefinition): - dataarray, gmapping = area2gridmapping(dataarray) + dataarray, gmapping = _add_grid_mapping(dataarray) res.append(gmapping) res.append(dataarray) return res -def make_time_bounds(start_times, end_times): - """Create time bounds for the current *dataarray*.""" - start_time = min(start_time for start_time in start_times - if start_time is not None) - end_time = min(end_time for end_time in end_times - if end_time is not None) - data = xr.DataArray([[np.datetime64(start_time), np.datetime64(end_time)]], - dims=['time', 'bnds_1d']) - return data +def is_lon_or_lat_dataarray(dataarray): + """Check if the DataArray represents the latitude or longitude coordinate.""" + if 'standard_name' in dataarray.attrs and dataarray.attrs['standard_name'] in ['longitude', 'latitude']: + return True + return False + + +def has_projection_coords(ds_collection): + """Check if DataArray collection has a "longitude" or "latitude" DataArray.""" + for dataarray in ds_collection.values(): + if is_lon_or_lat_dataarray(dataarray): + return True + return False + + +def make_alt_coords_unique(datas, pretty=False): + """Make non-dimensional coordinates unique among all datasets. + + Non-dimensional (or alternative) coordinates, such as scanline timestamps, + may occur in multiple datasets with the same name and dimension + but different values. + + In order to avoid conflicts, prepend the dataset name to the coordinate name. + If a non-dimensional coordinate is unique among all datasets and ``pretty=True``, + its name will not be modified. + + Since all datasets must have the same projection coordinates, + this is not applied to latitude and longitude. + + Args: + datas (dict): + Dictionary of (dataset name, dataset) + pretty (bool): + Don't modify coordinate names, if possible. Makes the file prettier, but possibly less consistent. + + Returns: + Dictionary holding the updated datasets + + """ + # Determine which non-dimensional coordinates are unique + tokens = defaultdict(set) + for dataset in datas.values(): + for coord_name in dataset.coords: + if not is_lon_or_lat_dataarray(dataset[coord_name]) and coord_name not in dataset.dims: + tokens[coord_name].add(tokenize(dataset[coord_name].data)) + coords_unique = dict([(coord_name, len(tokens) == 1) for coord_name, tokens in tokens.items()]) + + # Prepend dataset name, if not unique or no pretty-format desired + new_datas = datas.copy() + for coord_name, unique in coords_unique.items(): + if not pretty or not unique: + if pretty: + warnings.warn( + 'Cannot pretty-format "{}" coordinates because they are ' + 'not identical among the given datasets'.format(coord_name), + stacklevel=2 + ) + for ds_name, dataset in datas.items(): + if coord_name in dataset.coords: + rename = {coord_name: '{}_{}'.format(ds_name, coord_name)} + new_datas[ds_name] = new_datas[ds_name].rename(rename) + + return new_datas def assert_xy_unique(datas): @@ -335,65 +396,52 @@ def link_coords(datas): data.attrs.pop('coordinates', None) -def dataset_is_projection_coords(dataset): - """Check if dataset is a projection coords.""" - if 'standard_name' in dataset.attrs and dataset.attrs['standard_name'] in ['longitude', 'latitude']: - return True - return False +# ###--------------------------------------------------------------------------. +# ### CF-Time +def add_time_bounds_dimension(ds, time="time"): + """Add time bound dimension to xr.Dataset.""" + start_times = [] + end_times = [] + for _var_name, data_array in ds.items(): + start_times.append(data_array.attrs.get("start_time", None)) + end_times.append(data_array.attrs.get("end_time", None)) + start_time = min(start_time for start_time in start_times + if start_time is not None) + end_time = min(end_time for end_time in end_times + if end_time is not None) + ds['time_bnds'] = xr.DataArray([[np.datetime64(start_time), + np.datetime64(end_time)]], + dims=['time', 'bnds_1d']) + ds[time].attrs['bounds'] = "time_bnds" + ds[time].attrs['standard_name'] = "time" + return ds -def has_projection_coords(ds_collection): - """Check if collection has a projection coords among data arrays.""" - for dataset in ds_collection.values(): - if dataset_is_projection_coords(dataset): - return True - return False +def _process_time_coord(dataarray, epoch): + """Process the 'time' coordinate, if existing. -def make_alt_coords_unique(datas, pretty=False): - """Make non-dimensional coordinates unique among all datasets. + If expand the DataArray with a time dimension if does not yet exists. - Non-dimensional (or alternative) coordinates, such as scanline timestamps, may occur in multiple datasets with - the same name and dimension but different values. In order to avoid conflicts, prepend the dataset name to the - coordinate name. If a non-dimensional coordinate is unique among all datasets and ``pretty=True``, its name will not - be modified. + The function assumes - Since all datasets must have the same projection coordinates, this is not applied to latitude and longitude. + - that x and y dimensions have at least shape > 1 + - the time coordinate has size 1 - Args: - datas (dict): - Dictionary of (dataset name, dataset) - pretty (bool): - Don't modify coordinate names, if possible. Makes the file prettier, but possibly less consistent. + """ + if 'time' in dataarray.coords: + dataarray['time'].encoding['units'] = epoch + dataarray['time'].attrs['standard_name'] = 'time' + dataarray['time'].attrs.pop('bounds', None) - Returns: - Dictionary holding the updated datasets + if 'time' not in dataarray.dims and dataarray["time"].size not in dataarray.shape: + dataarray = dataarray.expand_dims('time') - """ - # Determine which non-dimensional coordinates are unique - tokens = defaultdict(set) - for dataset in datas.values(): - for coord_name in dataset.coords: - if not dataset_is_projection_coords(dataset[coord_name]) and coord_name not in dataset.dims: - tokens[coord_name].add(tokenize(dataset[coord_name].data)) - coords_unique = dict([(coord_name, len(tokens) == 1) for coord_name, tokens in tokens.items()]) + return dataarray - # Prepend dataset name, if not unique or no pretty-format desired - new_datas = datas.copy() - for coord_name, unique in coords_unique.items(): - if not pretty or not unique: - if pretty: - warnings.warn( - 'Cannot pretty-format "{}" coordinates because they are ' - 'not identical among the given datasets'.format(coord_name), - stacklevel=2 - ) - for ds_name, dataset in datas.items(): - if coord_name in dataset.coords: - rename = {coord_name: '{}_{}'.format(ds_name, coord_name)} - new_datas[ds_name] = new_datas[ds_name].rename(rename) - return new_datas +# --------------------------------------------------------------------------. +# ### Attributes class AttributeEncoder(json.JSONEncoder): @@ -502,6 +550,86 @@ def encode_attrs_nc(attrs): return OrderedDict(encoded_attrs) +def _add_ancillary_variables_attrs(dataarray): + """Replace ancillary_variables DataArray with a list of their name.""" + list_ancillary_variable_names = [da_ancillary.attrs['name'] + for da_ancillary in dataarray.attrs.get('ancillary_variables', [])] + if list_ancillary_variable_names: + dataarray.attrs['ancillary_variables'] = ' '.join(list_ancillary_variable_names) + else: + dataarray.attrs.pop("ancillary_variables", None) + return dataarray + + +def _drop_exclude_attrs(dataarray, exclude_attrs): + """Remove user-specified list of attributes.""" + if exclude_attrs is None: + exclude_attrs = [] + for key in exclude_attrs: + dataarray.attrs.pop(key, None) + return dataarray + + +def _remove_satpy_attrs(new_data): + """Remove _satpy attribute.""" + satpy_attrs = [key for key in new_data.attrs if key.startswith('_satpy')] + for satpy_attr in satpy_attrs: + new_data.attrs.pop(satpy_attr) + new_data.attrs.pop('_last_resampler', None) + return new_data + + +def _format_prerequisites_attrs(dataarray): + """Reformat prerequisites attribute value to string.""" + if 'prerequisites' in dataarray.attrs: + dataarray.attrs['prerequisites'] = [np.string_(str(prereq)) for prereq in dataarray.attrs['prerequisites']] + return dataarray + + +def _remove_none_attrs(dataarray): + """Remove attribute keys with None value.""" + for key, val in dataarray.attrs.copy().items(): + if val is None: + dataarray.attrs.pop(key) + return dataarray + + +def preprocess_datarray_attrs(dataarray, flatten_attrs, exclude_attrs): + """Preprocess DataArray attributes to be written into CF-compliant netCDF/Zarr.""" + dataarray = _remove_satpy_attrs(dataarray) + dataarray = _add_ancillary_variables_attrs(dataarray) + dataarray = _drop_exclude_attrs(dataarray, exclude_attrs) + dataarray = _format_prerequisites_attrs(dataarray) + dataarray = _remove_none_attrs(dataarray) + _ = dataarray.attrs.pop("area", None) + + if 'long_name' not in dataarray.attrs and 'standard_name' not in dataarray.attrs: + dataarray.attrs['long_name'] = dataarray.name + + if flatten_attrs: + dataarray.attrs = flatten_dict(dataarray.attrs) + + dataarray.attrs = encode_attrs_nc(dataarray.attrs) + + return dataarray + + +def preprocess_header_attrs(header_attrs, flatten_attrs=False): + """Prepare file header attributes.""" + if header_attrs is not None: + if flatten_attrs: + header_attrs = flatten_dict(header_attrs) + header_attrs = encode_attrs_nc(header_attrs) # OrderedDict + else: + header_attrs = {} + header_attrs = _add_history(header_attrs) + return header_attrs + + +# ###--------------------------------------------------------------------------. +# ### netCDF encodings + + def _set_default_chunks(encoding, dataset): """Update encoding to preserve current dask chunks. @@ -515,6 +643,7 @@ def _set_default_chunks(encoding, dataset): ) # Chunksize may not exceed shape encoding.setdefault(var_name, {}) encoding[var_name].setdefault('chunksizes', chunks) + return encoding def _set_default_fill_value(encoding, dataset): @@ -529,13 +658,15 @@ def _set_default_fill_value(encoding, dataset): for coord_var in coord_vars: encoding.setdefault(coord_var, {}) encoding[coord_var].update({'_FillValue': None}) + return encoding def _set_default_time_encoding(encoding, dataset): """Set default time encoding. - Make sure time coordinates and bounds have the same units. Default is xarray's CF datetime - encoding, which can be overridden by user-defined encoding. + Make sure time coordinates and bounds have the same units. + Default is xarray's CF datetime encoding, which can be overridden + by user-defined encoding. """ if 'time' in dataset: try: @@ -551,21 +682,25 @@ def _set_default_time_encoding(encoding, dataset): '_FillValue': None} encoding['time'] = time_enc encoding['time_bnds'] = bounds_enc # FUTURE: Not required anymore with xarray-0.14+ + return encoding -def _set_encoding_dataset_names(encoding, dataset, numeric_name_prefix): - """Set Netcdf variable names encoding according to numeric_name_prefix. +def _update_encoding_dataset_names(encoding, dataset, numeric_name_prefix): + """Ensure variable names of the encoding dictionary account for numeric_name_prefix. - A lot of channel names in satpy starts with a digit. When writing data with the satpy_cf_nc - these channels are prepended with numeric_name_prefix. - This ensures this is also done with any matching variables in encoding. + A lot of channel names in satpy starts with a digit. + When preparing CF-compliant datasets, these channels are prefixed with numeric_name_prefix. + + If variables names in the encoding dictionary are numeric digits, their name is prefixed + with numeric_name_prefix """ - for _var_name, _variable in dataset.variables.items(): - if not numeric_name_prefix or not _var_name.startswith(numeric_name_prefix): + for var_name in list(dataset.variables): + if not numeric_name_prefix or not var_name.startswith(numeric_name_prefix): continue - _orig_var_name = _var_name.replace(numeric_name_prefix, '') - if _orig_var_name in encoding: - encoding[_var_name] = encoding.pop(_orig_var_name) + orig_var_name = var_name.replace(numeric_name_prefix, '') + if orig_var_name in encoding: + encoding[var_name] = encoding.pop(orig_var_name) + return encoding def update_encoding(dataset, to_netcdf_kwargs, numeric_name_prefix='CHANNEL_'): @@ -576,54 +711,341 @@ def update_encoding(dataset, to_netcdf_kwargs, numeric_name_prefix='CHANNEL_'): """ other_to_netcdf_kwargs = to_netcdf_kwargs.copy() encoding = other_to_netcdf_kwargs.pop('encoding', {}).copy() + encoding = _update_encoding_dataset_names(encoding, dataset, numeric_name_prefix) + encoding = _set_default_chunks(encoding, dataset) + encoding = _set_default_fill_value(encoding, dataset) + encoding = _set_default_time_encoding(encoding, dataset) + return encoding, other_to_netcdf_kwargs - _set_encoding_dataset_names(encoding, dataset, numeric_name_prefix) - _set_default_chunks(encoding, dataset) - _set_default_fill_value(encoding, dataset) - _set_default_time_encoding(encoding, dataset) - return encoding, other_to_netcdf_kwargs +# ###--------------------------------------------------------------------------. +# ### CF-conversion def _handle_dataarray_name(original_name, numeric_name_prefix): - name = original_name - if name[0].isdigit(): + if original_name[0].isdigit(): if numeric_name_prefix: - name = numeric_name_prefix + original_name + new_name = numeric_name_prefix + original_name else: warnings.warn( - 'Invalid NetCDF dataset name: {} starts with a digit.'.format(name), + f'Invalid NetCDF dataset name: {original_name} starts with a digit.', stacklevel=5 ) - return original_name, name + new_name = original_name # occurs when numeric_name_prefix = '', None or False + else: + new_name = original_name + return original_name, new_name + + +def _preprocess_dataarray_name(dataarray, numeric_name_prefix, include_orig_name): + """Change the DataArray name by prepending numeric_name_prefix if the name is a digit.""" + original_name = None + dataarray = dataarray.copy() + if 'name' in dataarray.attrs: + original_name = dataarray.attrs.pop('name') + original_name, new_name = _handle_dataarray_name(original_name, numeric_name_prefix) + dataarray = dataarray.rename(new_name) + + if include_orig_name and numeric_name_prefix and original_name and original_name != new_name: + dataarray.attrs['original_name'] = original_name + + return dataarray -def _set_history(root): +def _add_history(attrs): + """Add 'history' attribute to dictionary.""" _history_create = 'Created by pytroll/satpy on {}'.format(datetime.utcnow()) - if 'history' in root.attrs: - if isinstance(root.attrs['history'], list): - root.attrs['history'] = ''.join(root.attrs['history']) - root.attrs['history'] += '\n' + _history_create + if 'history' in attrs: + if isinstance(attrs['history'], list): + attrs['history'] = ''.join(attrs['history']) + attrs['history'] += '\n' + _history_create else: - root.attrs['history'] = _history_create + attrs['history'] = _history_create + return attrs + +def _get_groups(groups, list_datarrays): + """Return a dictionary with the list of xr.DataArray associated to each group. -def _get_groups(groups, datasets, root): + If no groups (groups=None), return all DataArray attached to a single None key. + Else, collect the DataArrays associated to each group. + """ if groups is None: - # Groups are not CF-1.7 compliant - if 'Conventions' not in root.attrs: - root.attrs['Conventions'] = CF_VERSION - # Write all datasets to the file root without creating a group - groups_ = {None: datasets} + grouped_dataarrays = {None: list_datarrays} else: - # User specified a group assignment using dataset names. Collect the corresponding datasets. - groups_ = defaultdict(list) - for dataset in datasets: + grouped_dataarrays = defaultdict(list) + for datarray in list_datarrays: for group_name, group_members in groups.items(): - if dataset.attrs['name'] in group_members: - groups_[group_name].append(dataset) + if datarray.attrs['name'] in group_members: + grouped_dataarrays[group_name].append(datarray) break - return groups_ + return grouped_dataarrays + + +def make_cf_dataarray(dataarray, + epoch=EPOCH, + flatten_attrs=False, + exclude_attrs=None, + include_orig_name=True, + numeric_name_prefix='CHANNEL_'): + """Make the xr.DataArray CF-compliant. + + Parameters + ---------- + dataarray : xr.DataArray + The data array to be made CF-compliant. + epoch : str, optional + Reference time for encoding of time coordinates. + flatten_attrs : bool, optional + If True, flatten dict-type attributes. + The default is False. + exclude_attrs : list, optional + List of dataset attributes to be excluded. + The default is None. + include_orig_name : bool, optional + Include the original dataset name in the netcdf variable attributes. + The default is True. + numeric_name_prefix : TYPE, optional + Prepend dataset name with this if starting with a digit. + The default is ``"CHANNEL_"``. + + Returns + ------- + new_data : xr.DataArray + CF-compliant xr.DataArray. + + """ + dataarray = _preprocess_dataarray_name(dataarray=dataarray, + numeric_name_prefix=numeric_name_prefix, + include_orig_name=include_orig_name) + dataarray = preprocess_datarray_attrs(dataarray=dataarray, + flatten_attrs=flatten_attrs, + exclude_attrs=exclude_attrs) + dataarray = add_xy_coords_attrs(dataarray) + dataarray = _process_time_coord(dataarray, epoch=epoch) + return dataarray + + +def _collect_cf_dataset(list_dataarrays, + epoch=EPOCH, + flatten_attrs=False, + exclude_attrs=None, + include_lonlats=True, + pretty=False, + include_orig_name=True, + numeric_name_prefix='CHANNEL_'): + """Process a list of xr.DataArray and return a dictionary with CF-compliant xr.Dataset. + + Parameters + ---------- + list_dataarrays : list + List of DataArrays to make CF compliant and merge into a xr.Dataset. + epoch : str + Reference time for encoding the time coordinates (if available). + Example format: "seconds since 1970-01-01 00:00:00". + If None, the default reference time is retrieved using `from satpy.cf_writer import EPOCH` + flatten_attrs : bool, optional + If True, flatten dict-type attributes. + exclude_attrs : list, optional + List of xr.DataArray attribute names to be excluded. + include_lonlats : bool, optional + If True, it includes 'latitude' and 'longitude' coordinates also for satpy scene defined on an AreaDefinition. + If the 'area' attribute is a SwathDefinition, it always include latitude and longitude coordinates. + pretty : bool, optional + Don't modify coordinate names, if possible. Makes the file prettier, but possibly less consistent. + include_orig_name : bool, optional + Include the original dataset name as a variable attribute in the xr.Dataset. + numeric_name_prefix : str, optional + Prefix to add the each variable with name starting with a digit. + Use '' or None to leave this out. + + Returns + ------- + ds : xr.Dataset + A partially CF-compliant xr.Dataset + """ + # Create dictionary of input datarrays + # --> Since keys=None, it doesn't never retrieve ancillary variables !!! + ds_collection = {} + for dataarray in list_dataarrays: + ds_collection.update(get_extra_ds(dataarray)) + + # Check if one DataArray in the collection has 'longitude' or 'latitude' + got_lonlats = has_projection_coords(ds_collection) + + # Sort dictionary by keys name + ds_collection = dict(sorted(ds_collection.items())) + + dict_dataarrays = {} + for dataarray in ds_collection.values(): + dataarray_type = dataarray.dtype + if dataarray_type not in CF_DTYPES: + warnings.warn( + f'dtype {dataarray_type} not compatible with {CF_VERSION}.', + stacklevel=3 + ) + # Deep copy the datarray since adding/modifying attributes and coordinates + dataarray = dataarray.copy(deep=True) + + # Add CF-compliant area information from the pyresample area + # - If include_lonlats=True, add latitude and longitude coordinates + # - Add grid_mapping attribute to the DataArray + # - Return the CRS DataArray as first list element + # - Return the CF-compliant input DataArray as second list element + try: + list_new_dataarrays = area2cf(dataarray, + include_lonlats=include_lonlats, + got_lonlats=got_lonlats) + except KeyError: + list_new_dataarrays = [dataarray] + + # Ensure each DataArray is CF-compliant + # --> NOTE: Here the CRS DataArray is repeatedly overwrited + # --> NOTE: If the input list_dataarrays have different pyresample areas with the same name + # area information can be lost here !!! + for new_dataarray in list_new_dataarrays: + new_dataarray = make_cf_dataarray(new_dataarray, + epoch=epoch, + flatten_attrs=flatten_attrs, + exclude_attrs=exclude_attrs, + include_orig_name=include_orig_name, + numeric_name_prefix=numeric_name_prefix) + dict_dataarrays[new_dataarray.name] = new_dataarray + + # Check all DataArray have same size + assert_xy_unique(dict_dataarrays) + + # Deal with the 'coordinates' attributes indicating lat/lon coords + # NOTE: this currently is dropped by default !!! + link_coords(dict_dataarrays) + + # Ensure non-dimensional coordinates to be unique across DataArrays + # --> If not unique, prepend the DataArray name to the coordinate + # --> If unique, does not prepend the DataArray name only if pretty=True + # --> 'longitude' and 'latitude' coordinates are not prepended + dict_dataarrays = make_alt_coords_unique(dict_dataarrays, pretty=pretty) + + # Create a xr.Dataset + ds = xr.Dataset(dict_dataarrays) + return ds + + +def collect_cf_datasets(list_dataarrays, + header_attrs=None, + exclude_attrs=None, + flatten_attrs=False, + pretty=True, + include_lonlats=True, + epoch=EPOCH, + include_orig_name=True, + numeric_name_prefix='CHANNEL_', + groups=None): + """Process a list of xr.DataArray and return a dictionary with CF-compliant xr.Datasets. + + If the xr.DataArrays does not share the same dimensions, it creates a collection + of xr.Datasets sharing the same dimensions. + + Parameters + ---------- + list_dataarrays (list): + List of DataArrays to make CF compliant and merge into groups of xr.Datasets. + header_attrs: (dict): + Global attributes of the output xr.Dataset. + epoch (str): + Reference time for encoding the time coordinates (if available). + Example format: "seconds since 1970-01-01 00:00:00". + If None, the default reference time is retrieved using `from satpy.cf_writer import EPOCH` + flatten_attrs (bool): + If True, flatten dict-type attributes. + exclude_attrs (list): + List of xr.DataArray attribute names to be excluded. + include_lonlats (bool): + If True, it includes 'latitude' and 'longitude' coordinates also for satpy scene defined on an AreaDefinition. + If the 'area' attribute is a SwathDefinition, it always include latitude and longitude coordinates. + pretty (bool): + Don't modify coordinate names, if possible. Makes the file prettier, but possibly less consistent. + include_orig_name (bool). + Include the original dataset name as a variable attribute in the xr.Dataset. + numeric_name_prefix (str): + Prefix to add the each variable with name starting with a digit. + Use '' or None to leave this out. + groups (dict): + Group datasets according to the given assignment: + + `{'': ['dataset_name1', 'dataset_name2', ...]}` + + It is used to create grouped netCDFs using the CF_Writer. + If None (the default), no groups will be created. + + Returns + ------- + grouped_datasets : dict + A dictionary of CF-compliant xr.Dataset: {group_name: xr.Dataset} + header_attrs : dict + Global attributes to be attached to the xr.Dataset / netCDF4. + """ + if not list_dataarrays: + raise RuntimeError("None of the requested datasets have been " + "generated or could not be loaded. Requested " + "composite inputs may need to have matching " + "dimensions (eg. through resampling).") + + header_attrs = preprocess_header_attrs(header_attrs=header_attrs, + flatten_attrs=flatten_attrs) + + # Retrieve groups + # - If groups is None: {None: list_dataarrays} + # - if groups not None: {group_name: [xr.DataArray, xr.DataArray ,..], ...} + # Note: if all dataset names are wrong, behave like groups = None ! + grouped_dataarrays = _get_groups(groups, list_dataarrays) + is_grouped = len(grouped_dataarrays) >= 2 + + # If not grouped, add CF conventions. + # - If 'Conventions' key already present, do not overwrite ! + if "Conventions" not in header_attrs and not is_grouped: + header_attrs['Conventions'] = CF_VERSION + + # Create dictionary of group xr.Datasets + # --> If no groups (groups=None) --> group_name=None + grouped_datasets = {} + for group_name, group_dataarrays in grouped_dataarrays.items(): + ds = _collect_cf_dataset( + list_dataarrays=group_dataarrays, + epoch=epoch, + flatten_attrs=flatten_attrs, + exclude_attrs=exclude_attrs, + include_lonlats=include_lonlats, + pretty=pretty, + include_orig_name=include_orig_name, + numeric_name_prefix=numeric_name_prefix) + + if not is_grouped: + ds.attrs = header_attrs + + if 'time' in ds: + ds = add_time_bounds_dimension(ds, time="time") + + grouped_datasets[group_name] = ds + return grouped_datasets, header_attrs + + +def _sanitize_writer_kwargs(writer_kwargs): + """Remove satpy-specific kwargs.""" + writer_kwargs = copy.deepcopy(writer_kwargs) + satpy_kwargs = ['overlay', 'decorate', 'config_files'] + for kwarg in satpy_kwargs: + writer_kwargs.pop(kwarg, None) + return writer_kwargs + + +def _initialize_root_netcdf(filename, engine, header_attrs, to_netcdf_kwargs): + """Initialize root empty netCDF.""" + root = xr.Dataset({}, attrs=header_attrs) + init_nc_kwargs = to_netcdf_kwargs.copy() + init_nc_kwargs.pop('encoding', None) # No variables to be encoded at this point + init_nc_kwargs.pop('unlimited_dims', None) + written = [root.to_netcdf(filename, engine=engine, mode='w', **init_nc_kwargs)] + return written class CFWriter(Writer): @@ -648,198 +1070,28 @@ def da2cf(dataarray, epoch=EPOCH, flatten_attrs=False, exclude_attrs=None, numeric_name_prefix (str): Prepend dataset name with this if starting with a digit """ - if exclude_attrs is None: - exclude_attrs = [] - - original_name = None - new_data = dataarray.copy() - if 'name' in new_data.attrs: - name = new_data.attrs.pop('name') - original_name, name = _handle_dataarray_name(name, numeric_name_prefix) - new_data = new_data.rename(name) - - CFWriter._remove_satpy_attributes(new_data) - - new_data = CFWriter._encode_time(new_data, epoch) - new_data = CFWriter._encode_coords(new_data) - - # Remove area as well as user-defined attributes - for key in ['area'] + exclude_attrs: - new_data.attrs.pop(key, None) - - anc = [ds.attrs['name'] - for ds in new_data.attrs.get('ancillary_variables', [])] - if anc: - new_data.attrs['ancillary_variables'] = ' '.join(anc) - # TODO: make this a grid mapping or lon/lats - # new_data.attrs['area'] = str(new_data.attrs.get('area')) - CFWriter._cleanup_attrs(new_data) - - if 'long_name' not in new_data.attrs and 'standard_name' not in new_data.attrs: - new_data.attrs['long_name'] = new_data.name - if 'prerequisites' in new_data.attrs: - new_data.attrs['prerequisites'] = [np.string_(str(prereq)) for prereq in new_data.attrs['prerequisites']] - - if include_orig_name and numeric_name_prefix and original_name and original_name != name: - new_data.attrs['original_name'] = original_name - - # Flatten dict-type attributes, if desired - if flatten_attrs: - new_data.attrs = flatten_dict(new_data.attrs) - - # Encode attributes to netcdf-compatible datatype - new_data.attrs = encode_attrs_nc(new_data.attrs) - - return new_data + warnings.warn('CFWriter.da2cf is deprecated.' + 'Use satpy.writers.cf_writer.make_cf_dataarray instead.', + DeprecationWarning, stacklevel=3) + return make_cf_dataarray(dataarray=dataarray, + epoch=epoch, + flatten_attrs=flatten_attrs, + exclude_attrs=exclude_attrs, + include_orig_name=include_orig_name, + numeric_name_prefix=numeric_name_prefix) @staticmethod - def _cleanup_attrs(new_data): - for key, val in new_data.attrs.copy().items(): - if val is None: - new_data.attrs.pop(key) - if key == 'ancillary_variables' and val == []: - new_data.attrs.pop(key) - - @staticmethod - def _encode_coords(new_data): - """Encode coordinates.""" - if not new_data.coords.keys() & {"x", "y", "crs"}: - # there are no coordinates - return new_data - is_projected = CFWriter._is_projected(new_data) - if is_projected: - new_data = CFWriter._encode_xy_coords_projected(new_data) - else: - new_data = CFWriter._encode_xy_coords_geographic(new_data) - if 'crs' in new_data.coords: - new_data = new_data.drop_vars('crs') - return new_data - - @staticmethod - def _is_projected(new_data): - """Guess whether data are projected or not.""" - crs = CFWriter._try_to_get_crs(new_data) - if crs: - return crs.is_projected - units = CFWriter._try_get_units_from_coords(new_data) - if units: - if units.endswith("m"): - return True - if units.startswith("degrees"): - return False - logger.warning("Failed to tell if data are projected. Assuming yes.") - return True - - @staticmethod - def _try_to_get_crs(new_data): - """Try to get a CRS from attributes.""" - if "area" in new_data.attrs: - if isinstance(new_data.attrs["area"], AreaDefinition): - return new_data.attrs["area"].crs - if not isinstance(new_data.attrs["area"], SwathDefinition): - logger.warning( - f"Could not tell CRS from area of type {type(new_data.attrs['area']).__name__:s}. " - "Assuming projected CRS.") - if "crs" in new_data.coords: - return new_data.coords["crs"].item() - - @staticmethod - def _try_get_units_from_coords(new_data): - for c in "xy": - with suppress(KeyError): - # If the data has only 1 dimension, it has only one of x or y coords - if "units" in new_data.coords[c].attrs: - return new_data.coords[c].attrs["units"] - - @staticmethod - def _encode_xy_coords_projected(new_data): - """Encode coordinates, assuming projected CRS.""" - if 'x' in new_data.coords: - new_data['x'].attrs['standard_name'] = 'projection_x_coordinate' - new_data['x'].attrs['units'] = 'm' - if 'y' in new_data.coords: - new_data['y'].attrs['standard_name'] = 'projection_y_coordinate' - new_data['y'].attrs['units'] = 'm' - return new_data - - @staticmethod - def _encode_xy_coords_geographic(new_data): - """Encode coordinates, assuming geographic CRS.""" - if 'x' in new_data.coords: - new_data['x'].attrs['standard_name'] = 'longitude' - new_data['x'].attrs['units'] = 'degrees_east' - if 'y' in new_data.coords: - new_data['y'].attrs['standard_name'] = 'latitude' - new_data['y'].attrs['units'] = 'degrees_north' - return new_data - - @staticmethod - def _encode_time(new_data, epoch): - if 'time' in new_data.coords: - new_data['time'].encoding['units'] = epoch - new_data['time'].attrs['standard_name'] = 'time' - new_data['time'].attrs.pop('bounds', None) - new_data = CFWriter._add_time_dimension(new_data) - return new_data - - @staticmethod - def _add_time_dimension(new_data): - if 'time' not in new_data.dims and new_data["time"].size not in new_data.shape: - new_data = new_data.expand_dims('time') - return new_data - - @staticmethod - def _remove_satpy_attributes(new_data): - # Remove _satpy* attributes - satpy_attrs = [key for key in new_data.attrs if key.startswith('_satpy')] - for satpy_attr in satpy_attrs: - new_data.attrs.pop(satpy_attr) - new_data.attrs.pop('_last_resampler', None) + def update_encoding(dataset, to_netcdf_kwargs): + """Update encoding info (deprecated).""" + warnings.warn('CFWriter.update_encoding is deprecated. ' + 'Use satpy.writers.cf_writer.update_encoding instead.', + DeprecationWarning, stacklevel=3) + return update_encoding(dataset, to_netcdf_kwargs) def save_dataset(self, dataset, filename=None, fill_value=None, **kwargs): """Save the *dataset* to a given *filename*.""" return self.save_datasets([dataset], filename, **kwargs) - def _collect_datasets(self, datasets, epoch=EPOCH, flatten_attrs=False, exclude_attrs=None, include_lonlats=True, - pretty=False, include_orig_name=True, numeric_name_prefix='CHANNEL_'): - """Collect and prepare datasets to be written.""" - ds_collection = {} - for ds in datasets: - ds_collection.update(get_extra_ds(ds)) - got_lonlats = has_projection_coords(ds_collection) - datas = {} - start_times = [] - end_times = [] - # sort by name, but don't use the name - for _, ds in sorted(ds_collection.items()): - if ds.dtype not in CF_DTYPES: - warnings.warn( - 'Dtype {} not compatible with {}.'.format(str(ds.dtype), CF_VERSION), - stacklevel=3 - ) - # we may be adding attributes, coordinates, or modifying the - # structure of attributes - ds = ds.copy(deep=True) - try: - new_datasets = area2cf(ds, strict=include_lonlats, got_lonlats=got_lonlats) - except KeyError: - new_datasets = [ds] - for new_ds in new_datasets: - start_times.append(new_ds.attrs.get("start_time", None)) - end_times.append(new_ds.attrs.get("end_time", None)) - new_var = self.da2cf(new_ds, epoch=epoch, flatten_attrs=flatten_attrs, - exclude_attrs=exclude_attrs, - include_orig_name=include_orig_name, - numeric_name_prefix=numeric_name_prefix) - datas[new_var.name] = new_var - - # Check and prepare coordinates - assert_xy_unique(datas) - link_coords(datas) - datas = make_alt_coords_unique(datas, pretty=pretty) - - return datas, start_times, end_times - def save_datasets(self, datasets, filename=None, groups=None, header_attrs=None, engine=None, epoch=EPOCH, flatten_attrs=False, exclude_attrs=None, include_lonlats=True, pretty=False, include_orig_name=True, numeric_name_prefix='CHANNEL_', **to_netcdf_kwargs): @@ -849,31 +1101,31 @@ def save_datasets(self, datasets, filename=None, groups=None, header_attrs=None, Args: datasets (list): - Datasets to be saved + List of xr.DataArray to be saved. filename (str): Output file groups (dict): Group datasets according to the given assignment: `{'group_name': ['dataset1', 'dataset2', ...]}`. - Group name `None` corresponds to the root of the file, i.e. no group will be created. Warning: The - results will not be fully CF compliant! + Group name `None` corresponds to the root of the file, i.e. no group will be created. + Warning: The results will not be fully CF compliant! header_attrs: - Global attributes to be included + Global attributes to be included. engine (str): Module to be used for writing netCDF files. Follows xarray's :meth:`~xarray.Dataset.to_netcdf` engine choices with a preference for 'netcdf4'. epoch (str): - Reference time for encoding of time coordinates + Reference time for encoding of time coordinates. flatten_attrs (bool): - If True, flatten dict-type attributes + If True, flatten dict-type attributes. exclude_attrs (list): - List of dataset attributes to be excluded + List of dataset attributes to be excluded. include_lonlats (bool): - Always include latitude and longitude coordinates, even for datasets with area definition + Always include latitude and longitude coordinates, even for datasets with area definition. pretty (bool): Don't modify coordinate names, if possible. Makes the file prettier, but possibly less consistent. include_orig_name (bool). - Include the original dataset name as an varaibel attribute in the final netcdf + Include the original dataset name as a variable attribute in the final netCDF. numeric_name_prefix (str): Prefix to add the each variable with name starting with a digit. Use '' or None to leave this out. @@ -881,55 +1133,58 @@ def save_datasets(self, datasets, filename=None, groups=None, header_attrs=None, logger.info('Saving datasets to NetCDF4/CF.') _check_backend_versions() - # Write global attributes to file root (creates the file) + # Define netCDF filename if not provided + # - It infers the name from the first DataArray filename = filename or self.get_filename(**datasets[0].attrs) - root = xr.Dataset({}, attrs={}) - if header_attrs is not None: - if flatten_attrs: - header_attrs = flatten_dict(header_attrs) - root.attrs = encode_attrs_nc(header_attrs) - - _set_history(root) - + # Collect xr.Dataset for each group + grouped_datasets, header_attrs = collect_cf_datasets(list_dataarrays=datasets, # list of xr.DataArray + header_attrs=header_attrs, + exclude_attrs=exclude_attrs, + flatten_attrs=flatten_attrs, + pretty=pretty, + include_lonlats=include_lonlats, + epoch=epoch, + include_orig_name=include_orig_name, + numeric_name_prefix=numeric_name_prefix, + groups=groups, + ) # Remove satpy-specific kwargs - to_netcdf_kwargs = copy.deepcopy(to_netcdf_kwargs) # may contain dictionaries (encoding) - satpy_kwargs = ['overlay', 'decorate', 'config_files'] - for kwarg in satpy_kwargs: - to_netcdf_kwargs.pop(kwarg, None) - - init_nc_kwargs = to_netcdf_kwargs.copy() - init_nc_kwargs.pop('encoding', None) # No variables to be encoded at this point - init_nc_kwargs.pop('unlimited_dims', None) - - groups_ = _get_groups(groups, datasets, root) - - written = [root.to_netcdf(filename, engine=engine, mode='w', **init_nc_kwargs)] - - # Write datasets to groups (appending to the file; group=None means no group) - for group_name, group_datasets in groups_.items(): - # XXX: Should we combine the info of all datasets? - datas, start_times, end_times = self._collect_datasets( - group_datasets, epoch=epoch, flatten_attrs=flatten_attrs, exclude_attrs=exclude_attrs, - include_lonlats=include_lonlats, pretty=pretty, - include_orig_name=include_orig_name, numeric_name_prefix=numeric_name_prefix) - dataset = xr.Dataset(datas) - if 'time' in dataset: - dataset['time_bnds'] = make_time_bounds(start_times, - end_times) - dataset['time'].attrs['bounds'] = "time_bnds" - dataset['time'].attrs['standard_name'] = "time" - else: - grp_str = ' of group {}'.format(group_name) if group_name is not None else '' - logger.warning('No time dimension in datasets{}, skipping time bounds creation.'.format(grp_str)) - - encoding, other_to_netcdf_kwargs = update_encoding(dataset, to_netcdf_kwargs, numeric_name_prefix) - res = dataset.to_netcdf(filename, engine=engine, group=group_name, mode='a', encoding=encoding, - **other_to_netcdf_kwargs) + # - This kwargs can contain encoding dictionary + to_netcdf_kwargs = _sanitize_writer_kwargs(to_netcdf_kwargs) + + # If writing grouped netCDF, create an empty "root" netCDF file + # - Add the global attributes + # - All groups will be appended in the for loop below + if groups is not None: + written = _initialize_root_netcdf(filename=filename, + engine=engine, + header_attrs=header_attrs, + to_netcdf_kwargs=to_netcdf_kwargs) + mode = "a" + else: + mode = "w" + written = [] + + # Write the netCDF + # - If grouped netCDF, it appends to the root file + # - If single netCDF, it write directly + for group_name, ds in grouped_datasets.items(): + encoding, other_to_netcdf_kwargs = update_encoding(ds, + to_netcdf_kwargs=to_netcdf_kwargs, + numeric_name_prefix=numeric_name_prefix) + res = ds.to_netcdf(filename, + engine=engine, + group=group_name, + mode=mode, + encoding=encoding, + **other_to_netcdf_kwargs) written.append(res) - return written +# --------------------------------------------------------------------------. +# NetCDF version + def _check_backend_versions(): """Issue warning if backend versions do not match.""" diff --git a/satpy/writers/mitiff.py b/satpy/writers/mitiff.py index c5a144ad38..11f847c114 100644 --- a/satpy/writers/mitiff.py +++ b/satpy/writers/mitiff.py @@ -640,7 +640,7 @@ def _save_as_palette(self, datasets, tmp_gen_filename, tiffinfo, **kwargs): LOG.error("In a mitiff palette image a color map must be provided: palette_color_map is missing.") return - img.save(tmp_gen_filename, compression='tiff_deflate', compress_level=9, tiffinfo=tiffinfo) + img.save(tmp_gen_filename, compression='raw', compress_level=9, tiffinfo=tiffinfo) def _save_as_enhanced(self, datasets, tmp_gen_filename, **kwargs): """Save datasets as an enhanced RGB image.""" @@ -659,7 +659,7 @@ def _save_as_enhanced(self, datasets, tmp_gen_filename, **kwargs): data = data.clip(0, 255) mitiff_frames.append(Image.fromarray(data.astype(np.uint8), mode='L')) mitiff_frames[0].save(tmp_gen_filename, save_all=True, append_images=mitiff_frames[1:], - compression='tiff_deflate', compress_level=9, tiffinfo=tiffinfo) + compression='raw', compress_level=9, tiffinfo=tiffinfo) def _generate_intermediate_filename(self, gen_filename): """Replace mitiff ext because pillow doesn't recognise the file type.""" @@ -697,7 +697,7 @@ def _save_datasets_as_mitiff(self, datasets, image_description, mitiff_frames.append(Image.fromarray(data.astype(np.uint8), mode='L')) break mitiff_frames[0].save(tmp_gen_filename, save_all=True, append_images=mitiff_frames[1:], - compression='tiff_deflate', compress_level=9, tiffinfo=tiffinfo) + compression='raw', compress_level=9, tiffinfo=tiffinfo) elif 'dataset' in datasets.attrs['name']: LOG.debug("Saving dataset as single dataset.") self._save_single_dataset(datasets, cns, tmp_gen_filename, tiffinfo, kwargs) @@ -721,7 +721,7 @@ def _save_single_dataset(self, datasets, cns, tmp_gen_filename, tiffinfo, kwargs data = self._calibrate_data(datasets, datasets.attrs['prerequisites'][0].get('calibration'), self.mitiff_config[kwargs['sensor']][cn]['min-val'], self.mitiff_config[kwargs['sensor']][cn]['max-val']) - Image.fromarray(data.astype(np.uint8)).save(tmp_gen_filename, compression='tiff_deflate', + Image.fromarray(data.astype(np.uint8)).save(tmp_gen_filename, compression="raw", compress_level=9, tiffinfo=tiffinfo) else: mitiff_frames = [] @@ -741,4 +741,4 @@ def _save_single_dataset(self, datasets, cns, tmp_gen_filename, tiffinfo, kwargs break mitiff_frames[0].save(tmp_gen_filename, save_all=True, append_images=mitiff_frames[1:], - compression='tiff_deflate', compress_level=9, tiffinfo=tiffinfo) + compression='raw', compress_level=9, tiffinfo=tiffinfo) diff --git a/satpy/writers/ninjogeotiff.py b/satpy/writers/ninjogeotiff.py index 2492630a5a..a8f603861e 100644 --- a/satpy/writers/ninjogeotiff.py +++ b/satpy/writers/ninjogeotiff.py @@ -17,13 +17,12 @@ # satpy. If not, see . """Writer for GeoTIFF images with tags for the NinJo visualization tool. -The next version of NinJo (release expected spring 2022) will be able -to read standard GeoTIFF images, with required metadata encoded as a set -of XML tags in the GDALMetadata TIFF tag. Each of the XML tags must be -prepended with ``'NINJO_'``. For NinJo delivery, these GeoTIFF files -supersede the old NinJoTIFF format. The :class:`NinJoGeoTIFFWriter` -therefore supersedes the old Satpy NinJoTIFF writer and the pyninjotiff -package. +Starting with NinJo 7, NinJo is able to read standard GeoTIFF images, +with required metadata encoded as a set of XML tags in the GDALMetadata +TIFF tag. Each of the XML tags must be prepended with ``'NINJO_'``. +For NinJo delivery, these GeoTIFF files supersede the old NinJoTIFF +format. The :class:`NinJoGeoTIFFWriter` therefore supersedes the old +Satpy NinJoTIFF writer and the pyninjotiff package. The reference documentation for valid NinJo tags and their meaning is contained in `NinJoPedia`_. Since this page is not in the public web, @@ -70,6 +69,16 @@ Instead, pass those values in source units to the :func:`~satpy.enhancements.stretch` enhancement with the ``min_stretch`` and ``max_stretch`` arguments. + +For images where the pixel value corresponds directly to a physical value, +NinJo has a functionality to read the corresponding quantity (example: +brightness temperature or reflectance). To make this possible, the writer +adds the tags ``Gradient`` and ``AxisIntercept``. Those tags are added if +and only if the image has mode ``L`` or ``LA`` and ``PhysicUnit`` is not set +to ``"N/A"``. In other words, to suppress those tags for images with mode +``L`` or ``LA`` (for example, for the composite ``vis_with_ir``, where the +physical interpretation of individual pixels is lost), one should set +``PhysicUnit`` to ``"N/A"``, ``"n/a"``, ``"1"``, or ``""`` (empty string). """ import copy @@ -92,6 +101,8 @@ class NinJoGeoTIFFWriter(GeoTIFFWriter): :meth:`~NinJoGeoTIFFWriter.save_image`. """ + scale_offset_tag_names = ("ninjo_Gradient", "ninjo_AxisIntercept") + def save_image( self, image, filename=None, fill_value=None, compute=True, keep_palette=False, cmap=None, overviews=None, @@ -149,7 +160,8 @@ def save_image( "Temperature", PhysicUnit is set to "C", but data attributes incidate the data have unit "K", then the writer will adapt the header ``ninjo_AxisIntercept`` such that data are interpreted - in units of "C". + in units of "C". If PhysicUnit is set to "N/A", no + AxisIntercept and Gradient tags will be written. PhysicValue (str) NinJo label for quantity (example: "temperature") @@ -192,7 +204,9 @@ def save_image( overviews_minsize=overviews_minsize, overviews_resampling=overviews_resampling, tags={**(tags or {}), **ninjo_tags}, - scale_offset_tags=None if image.mode.startswith("RGB") else ("ninjo_Gradient", "ninjo_AxisIntercept"), + scale_offset_tags=(self.scale_offset_tag_names + if self._check_include_scale_offset(image, PhysicUnit) + else None), **gdal_opts) def _fix_units(self, image, quantity, unit): @@ -220,6 +234,12 @@ def _fix_units(self, image, quantity, unit): return image + def _check_include_scale_offset(self, image, unit): + """Check if scale-offset tags should be included.""" + if image.mode.startswith("L") and unit.lower() not in ("n/a", "1", ""): + return True + return False + class NinJoTagGenerator: """Class to collect NinJo tags. diff --git a/setup.py b/setup.py index 3e74245b4a..e56377cfc1 100644 --- a/setup.py +++ b/setup.py @@ -38,8 +38,8 @@ test_requires = ['behave', 'h5py', 'netCDF4', 'pyhdf', 'imageio', 'rasterio', 'geoviews', 'trollimage', 'fsspec', 'bottleneck', 'rioxarray', 'pytest', 'pytest-lazy-fixture', 'defusedxml', - 's3fs', 'python-eccodes', 'h5netcdf', 'xarray-datatree', - 'skyfield', 'ephem', 'pint-xarray', 'astropy'] + 's3fs', 'eccodes', 'h5netcdf', 'xarray-datatree', + 'skyfield', 'ephem', 'pint-xarray', 'astropy', 'dask-image'] extras_require = { @@ -63,11 +63,12 @@ 'seviri_l1b_hrit': ['pyorbital >= 1.3.1'], 'seviri_l1b_native': ['pyorbital >= 1.3.1'], 'seviri_l1b_nc': ['pyorbital >= 1.3.1', 'netCDF4 >= 1.1.8'], - 'seviri_l2_bufr': ['eccodes-python'], - 'seviri_l2_grib': ['eccodes-python'], + 'seviri_l2_bufr': ['eccodes'], + 'seviri_l2_grib': ['eccodes'], 'hsaf_grib': ['pygrib'], 'remote_reading': ['fsspec'], 'insat_3d': ['xarray-datatree'], + 'gms5-vissr_l1b': ["numba"], # Writers: 'cf': ['h5netcdf >= 0.7.3'], 'awips_tiled': ['netCDF4 >= 1.1.8'], @@ -77,6 +78,7 @@ # Composites/Modifiers: 'rayleigh': ['pyspectral >= 0.10.1'], 'angles': ['pyorbital >= 1.3.1'], + 'filters': ['dask-image'], # MultiScene: 'animations': ['imageio'], # Documentation: