Skip to content

Commit

Permalink
Merge pull request #419 from dcs4cop/forman-347-dont_reorder_lat
Browse files Browse the repository at this point in the history
Workaround for some cases that end up in #347
  • Loading branch information
forman authored Mar 17, 2021
2 parents e224598 + df83144 commit 2280c12
Show file tree
Hide file tree
Showing 4 changed files with 20 additions and 52 deletions.
12 changes: 8 additions & 4 deletions CHANGES.md
Original file line number Diff line number Diff line change
@@ -1,7 +1,11 @@
## Changes in 0.7.1.dev1 (in development)

* Added `s3fs` requirement that has been removed by accident.
* Added missing requirements `requests` and `urllib3`.
## Changes in 0.7.1

* Dataset normalisation no longer includes reordering increasing
latitude coordinates, as this creates datasets that are no longer writable
to Zarr. (#347)
* Updated package requirements
- Added `s3fs` requirement that has been removed by accident.
- Added missing requirements `requests` and `urllib3`.

## Changes in 0.7.0

Expand Down
32 changes: 10 additions & 22 deletions test/core/test_normalize.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,11 +12,11 @@
from jdcal import gcal2jd
from numpy.testing import assert_array_almost_equal

from xcube.core.normalize import adjust_spatial_attrs
from xcube.core.normalize import adjust_temporal_attrs
from xcube.core.normalize import normalize_coord_vars
from xcube.core.normalize import normalize_dataset
from xcube.core.normalize import normalize_missing_time
from xcube.core.normalize import adjust_spatial_attrs
from xcube.core.normalize import adjust_temporal_attrs


# noinspection PyPep8Naming
Expand Down Expand Up @@ -85,25 +85,23 @@ def test_normalize_lon_lat(self):
dataset = xr.Dataset({'first': (['latitude',
'longitude'], [[1, 2, 3],
[2, 3, 4]])})
# Since normalization puts latitudes into descending order, we
# expect the rows to be swapped.
expected = xr.Dataset({'first': (['lat', 'lon'], [[2, 3, 4],
[1, 2, 3]])})
expected = xr.Dataset({'first': (['lat', 'lon'], [[1, 2, 3],
[2, 3, 4]])})
actual = normalize_dataset(dataset)
assertDatasetEqual(actual, expected)

dataset = xr.Dataset({'first': (['lat', 'long'], [[1, 2, 3],
[2, 3, 4]])})
expected = xr.Dataset({'first': (['lat', 'lon'], [[2, 3, 4],
[1, 2, 3]])})
expected = xr.Dataset({'first': (['lat', 'lon'], [[1, 2, 3],
[2, 3, 4]])})
actual = normalize_dataset(dataset)
assertDatasetEqual(actual, expected)

dataset = xr.Dataset({'first': (['latitude',
'spacetime'], [[1, 2, 3],
[2, 3, 4]])})
expected = xr.Dataset({'first': (['lat', 'spacetime'], [[2, 3, 4],
[1, 2, 3]])})
expected = xr.Dataset({'first': (['lat', 'spacetime'], [[1, 2, 3],
[2, 3, 4]])})
actual = normalize_dataset(dataset)
assertDatasetEqual(actual, expected)

Expand All @@ -114,7 +112,7 @@ def test_normalize_lon_lat(self):
actual = normalize_dataset(dataset)
assertDatasetEqual(actual, expected)

def test_normalize_inverted_lat(self):
def test_normalize_does_not_reorder_increasing_lat(self):
first = np.zeros([3, 45, 90])
first[0, :, :] = np.eye(45, 90)
ds = xr.Dataset({
Expand All @@ -125,18 +123,8 @@ def test_normalize_inverted_lat(self):
'time': [datetime(2000, x, 1) for x in range(1, 4)]}).chunk(
chunks={'time': 1})

first = np.zeros([3, 45, 90])
first[0, :, :] = np.flip(np.eye(45, 90), axis=0)
expected = xr.Dataset({
'first': (['time', 'lat', 'lon'], first),
'second': (['time', 'lat', 'lon'], np.zeros([3, 45, 90])),
'lat': np.linspace(88, -88, 45),
'lon': np.linspace(-178, 178, 90),
'time': [datetime(2000, x, 1) for x in range(1, 4)]}).chunk(
chunks={'time': 1})

actual = normalize_dataset(ds)
xr.testing.assert_equal(actual, expected)
xr.testing.assert_equal(actual, ds)

def test_normalize_with_missing_time_dim(self):
ds = xr.Dataset({'first': (['lat', 'lon'], np.zeros([90, 180])),
Expand Down
26 changes: 1 addition & 25 deletions xcube/core/normalize.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,34 +56,11 @@ def normalize_dataset(ds: xr.Dataset) -> xr.Dataset:
ds = _normalize_lat_lon_2d(ds)
ds = _normalize_dim_order(ds)
ds = _normalize_lon_360(ds)

# xcube viewer currently requires decreasing latitude co-ordinates, so
# we invert them here if necessary.
ds = _ensure_lat_decreasing(ds)

ds = normalize_missing_time(ds)
ds = _normalize_jd2datetime(ds)
return ds


def _ensure_lat_decreasing(ds: xr.Dataset) -> xr.Dataset:
"""
If the latitude is increasing, invert it to make it decreasing.
:param ds: some xarray dataset
:return: a normalized xarray dataset
"""
try:
if not _is_lat_decreasing(ds.lat):
ds = ds.sel(lat=slice(None, None, -1))
except AttributeError:
# The dataset doesn't have 'lat', probably not geospatial
pass
except ValueError:
# The dataset still has an ND 'lat' array
pass
return ds


def _normalize_lat_lon(ds: xr.Dataset) -> xr.Dataset:
"""
Rename variables named 'longitude' or 'long' to 'lon', and 'latitude' to 'lon'.
Expand Down Expand Up @@ -366,7 +343,7 @@ def normalize_missing_time(ds: xr.Dataset) -> xr.Dataset:
return ds


def adjust_spatial_attrs(ds: xr.Dataset, allow_point: bool=False) -> xr.Dataset:
def adjust_spatial_attrs(ds: xr.Dataset, allow_point: bool = False) -> xr.Dataset:
"""
Adjust the global spatial attributes of the dataset by doing some
introspection of the dataset and adjusting the appropriate attributes
Expand Down Expand Up @@ -692,7 +669,6 @@ def _is_lat_decreasing(lat: xr.DataArray) -> bool:


def _normalize_dim_order(ds: xr.Dataset) -> xr.Dataset:

copy_created = False

for var_name in ds.data_vars:
Expand Down
2 changes: 1 addition & 1 deletion xcube/version.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,4 +19,4 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.

version = '0.7.1.dev1'
version = '0.7.1'

0 comments on commit 2280c12

Please sign in to comment.