From bccd5f2d17ec3865eee2ce9dc3b165ca9fefdfc9 Mon Sep 17 00:00:00 2001 From: Norman Fomferra Date: Tue, 9 Mar 2021 18:05:55 +0100 Subject: [PATCH 1/2] workaround for some cases that end up in #347 --- CHANGES.md | 3 +++ test/core/test_normalize.py | 20 +++++++++----------- xcube/core/normalize.py | 7 ++++++- 3 files changed, 18 insertions(+), 12 deletions(-) diff --git a/CHANGES.md b/CHANGES.md index b64ae0b5a..dc69d3bea 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,5 +1,8 @@ ## Changes in 0.7.1.dev1 (in development) +* Dataset normalisation no longer includes reordering increasing + latitude coordinates, as this creates datasets that are no longer writable + to Zarr. (#347) * Added missing requirements `requests` and `urllib3`. ## Changes in 0.7.0 diff --git a/test/core/test_normalize.py b/test/core/test_normalize.py index 3f0b10997..fd9499b68 100644 --- a/test/core/test_normalize.py +++ b/test/core/test_normalize.py @@ -12,11 +12,11 @@ from jdcal import gcal2jd from numpy.testing import assert_array_almost_equal +from xcube.core.normalize import adjust_spatial_attrs +from xcube.core.normalize import adjust_temporal_attrs from xcube.core.normalize import normalize_coord_vars from xcube.core.normalize import normalize_dataset from xcube.core.normalize import normalize_missing_time -from xcube.core.normalize import adjust_spatial_attrs -from xcube.core.normalize import adjust_temporal_attrs # noinspection PyPep8Naming @@ -85,25 +85,23 @@ def test_normalize_lon_lat(self): dataset = xr.Dataset({'first': (['latitude', 'longitude'], [[1, 2, 3], [2, 3, 4]])}) - # Since normalization puts latitudes into descending order, we - # expect the rows to be swapped. - expected = xr.Dataset({'first': (['lat', 'lon'], [[2, 3, 4], - [1, 2, 3]])}) + expected = xr.Dataset({'first': (['lat', 'lon'], [[1, 2, 3], + [2, 3, 4]])}) actual = normalize_dataset(dataset) assertDatasetEqual(actual, expected) dataset = xr.Dataset({'first': (['lat', 'long'], [[1, 2, 3], [2, 3, 4]])}) - expected = xr.Dataset({'first': (['lat', 'lon'], [[2, 3, 4], - [1, 2, 3]])}) + expected = xr.Dataset({'first': (['lat', 'lon'], [[1, 2, 3], + [2, 3, 4]])}) actual = normalize_dataset(dataset) assertDatasetEqual(actual, expected) dataset = xr.Dataset({'first': (['latitude', 'spacetime'], [[1, 2, 3], [2, 3, 4]])}) - expected = xr.Dataset({'first': (['lat', 'spacetime'], [[2, 3, 4], - [1, 2, 3]])}) + expected = xr.Dataset({'first': (['lat', 'spacetime'], [[1, 2, 3], + [2, 3, 4]])}) actual = normalize_dataset(dataset) assertDatasetEqual(actual, expected) @@ -136,7 +134,7 @@ def test_normalize_inverted_lat(self): chunks={'time': 1}) actual = normalize_dataset(ds) - xr.testing.assert_equal(actual, expected) + xr.testing.assert_equal(actual, ds) def test_normalize_with_missing_time_dim(self): ds = xr.Dataset({'first': (['lat', 'lon'], np.zeros([90, 180])), diff --git a/xcube/core/normalize.py b/xcube/core/normalize.py index 9ac3e5030..090cdf40b 100644 --- a/xcube/core/normalize.py +++ b/xcube/core/normalize.py @@ -59,7 +59,12 @@ def normalize_dataset(ds: xr.Dataset) -> xr.Dataset: # xcube viewer currently requires decreasing latitude co-ordinates, so # we invert them here if necessary. - ds = _ensure_lat_decreasing(ds) + + # TODO: commented out as _ensure_lat_decreasing() produces + # chunks in "lat" that have the smallest chunk first. This will + # fail when writing to Zarr. (Hack for #347) + # See also https://github.com/pydata/xarray/issues/2300 + # ds = _ensure_lat_decreasing(ds) ds = normalize_missing_time(ds) ds = _normalize_jd2datetime(ds) From df83144f03801b1c3c8c244215133c483103775d Mon Sep 17 00:00:00 2001 From: Norman Fomferra Date: Tue, 16 Mar 2021 14:52:31 +0100 Subject: [PATCH 2/2] Preparing 0.7.1 release --- CHANGES.md | 7 ++++--- test/core/test_normalize.py | 12 +----------- xcube/core/normalize.py | 31 +------------------------------ xcube/version.py | 2 +- 4 files changed, 7 insertions(+), 45 deletions(-) diff --git a/CHANGES.md b/CHANGES.md index 75d6a50af..df4ef8348 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,10 +1,11 @@ -## Changes in 0.7.1.dev1 (in development) +## Changes in 0.7.1 -* Added `s3fs` requirement that has been removed by accident. * Dataset normalisation no longer includes reordering increasing latitude coordinates, as this creates datasets that are no longer writable to Zarr. (#347) -* Added missing requirements `requests` and `urllib3`. +* Updated package requirements + - Added `s3fs` requirement that has been removed by accident. + - Added missing requirements `requests` and `urllib3`. ## Changes in 0.7.0 diff --git a/test/core/test_normalize.py b/test/core/test_normalize.py index fd9499b68..37441a6c9 100644 --- a/test/core/test_normalize.py +++ b/test/core/test_normalize.py @@ -112,7 +112,7 @@ def test_normalize_lon_lat(self): actual = normalize_dataset(dataset) assertDatasetEqual(actual, expected) - def test_normalize_inverted_lat(self): + def test_normalize_does_not_reorder_increasing_lat(self): first = np.zeros([3, 45, 90]) first[0, :, :] = np.eye(45, 90) ds = xr.Dataset({ @@ -123,16 +123,6 @@ def test_normalize_inverted_lat(self): 'time': [datetime(2000, x, 1) for x in range(1, 4)]}).chunk( chunks={'time': 1}) - first = np.zeros([3, 45, 90]) - first[0, :, :] = np.flip(np.eye(45, 90), axis=0) - expected = xr.Dataset({ - 'first': (['time', 'lat', 'lon'], first), - 'second': (['time', 'lat', 'lon'], np.zeros([3, 45, 90])), - 'lat': np.linspace(88, -88, 45), - 'lon': np.linspace(-178, 178, 90), - 'time': [datetime(2000, x, 1) for x in range(1, 4)]}).chunk( - chunks={'time': 1}) - actual = normalize_dataset(ds) xr.testing.assert_equal(actual, ds) diff --git a/xcube/core/normalize.py b/xcube/core/normalize.py index 090cdf40b..d0a395703 100644 --- a/xcube/core/normalize.py +++ b/xcube/core/normalize.py @@ -56,39 +56,11 @@ def normalize_dataset(ds: xr.Dataset) -> xr.Dataset: ds = _normalize_lat_lon_2d(ds) ds = _normalize_dim_order(ds) ds = _normalize_lon_360(ds) - - # xcube viewer currently requires decreasing latitude co-ordinates, so - # we invert them here if necessary. - - # TODO: commented out as _ensure_lat_decreasing() produces - # chunks in "lat" that have the smallest chunk first. This will - # fail when writing to Zarr. (Hack for #347) - # See also https://github.com/pydata/xarray/issues/2300 - # ds = _ensure_lat_decreasing(ds) - ds = normalize_missing_time(ds) ds = _normalize_jd2datetime(ds) return ds -def _ensure_lat_decreasing(ds: xr.Dataset) -> xr.Dataset: - """ - If the latitude is increasing, invert it to make it decreasing. - :param ds: some xarray dataset - :return: a normalized xarray dataset - """ - try: - if not _is_lat_decreasing(ds.lat): - ds = ds.sel(lat=slice(None, None, -1)) - except AttributeError: - # The dataset doesn't have 'lat', probably not geospatial - pass - except ValueError: - # The dataset still has an ND 'lat' array - pass - return ds - - def _normalize_lat_lon(ds: xr.Dataset) -> xr.Dataset: """ Rename variables named 'longitude' or 'long' to 'lon', and 'latitude' to 'lon'. @@ -371,7 +343,7 @@ def normalize_missing_time(ds: xr.Dataset) -> xr.Dataset: return ds -def adjust_spatial_attrs(ds: xr.Dataset, allow_point: bool=False) -> xr.Dataset: +def adjust_spatial_attrs(ds: xr.Dataset, allow_point: bool = False) -> xr.Dataset: """ Adjust the global spatial attributes of the dataset by doing some introspection of the dataset and adjusting the appropriate attributes @@ -697,7 +669,6 @@ def _is_lat_decreasing(lat: xr.DataArray) -> bool: def _normalize_dim_order(ds: xr.Dataset) -> xr.Dataset: - copy_created = False for var_name in ds.data_vars: diff --git a/xcube/version.py b/xcube/version.py index 2a007fc77..b30ba38b6 100644 --- a/xcube/version.py +++ b/xcube/version.py @@ -19,4 +19,4 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. -version = '0.7.1.dev1' +version = '0.7.1'