diff --git a/CHANGES.md b/CHANGES.md index 823bc15c4..fff762f90 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -5,6 +5,11 @@ * Bundled [xcube-viewer 1.1.0-dev.1](https://github.com/dcs4cop/xcube-viewer/releases/tag/v1.1.0-dev.1). * Fixed various issues with the auto-generated Python API documentation. +* Fixed rounding of timestamp issue observed in xcube viewer https://github.com/dcs4cop/xcube-viewer/issues/289. + xcube server now rounds the time dimension labels for a dataset as follows (rounding frequency is always 1 second): + - First timesstamp: floor(time[0]) + - Last timesstamp: ceil(time[-1]) + - In-between timestamps: round(time[1: -1]) * Fixed a problem where time series requests may have missed outer values of a requested time range. Introduced query parameter `tolerance` for diff --git a/docs/source/overview.rst b/docs/source/overview.rst index 4b03c0070..15b05042d 100644 --- a/docs/source/overview.rst +++ b/docs/source/overview.rst @@ -17,7 +17,8 @@ Overview *xcube* is an open-source Python package and toolkit that has been developed to provide Earth observation (EO) data in an analysis-ready form to users. xcube achieves this by carefully converting EO data sources into self-contained *data cubes* -that can be published in the cloud. +that can be published in the cloud. The Python package is expanded and maintained by +[Brockmann Consult GmbH](https://www.brockmann-consult.de) Data Cube ========= diff --git a/test/core/test_timecoord.py b/test/core/test_timecoord.py index 39f633133..1d99c6726 100644 --- a/test/core/test_timecoord.py +++ b/test/core/test_timecoord.py @@ -2,6 +2,7 @@ import numpy as np import pandas as pd +import pytest from test.sampledata import create_highroc_dataset from xcube.core.new import new_cube @@ -13,6 +14,8 @@ from xcube.core.timecoord import get_time_range_from_data from xcube.core.timecoord import timestamp_to_iso_string from xcube.core.timecoord import to_time_in_days_since_1970 + + # from xcube.core.timecoord import find_datetime_format # from xcube.core.timecoord import get_timestamp_from_string # from xcube.core.timecoord import get_timestamps_from_string @@ -22,7 +25,8 @@ class AddTimeCoordsTest(unittest.TestCase): def test_add_time_coords_point(self): dataset = create_highroc_dataset() - dataset_with_time = add_time_coords(dataset, (365 * 47 + 20, 365 * 47 + 20)) + dataset_with_time = add_time_coords(dataset, + (365 * 47 + 20, 365 * 47 + 20)) self.assertIsNot(dataset_with_time, dataset) self.assertIn('time', dataset_with_time) self.assertEqual(dataset_with_time.time.shape, (1,)) @@ -30,7 +34,8 @@ def test_add_time_coords_point(self): def test_add_time_coords_range(self): dataset = create_highroc_dataset() - dataset_with_time = add_time_coords(dataset, (365 * 47 + 20, 365 * 47 + 21)) + dataset_with_time = add_time_coords(dataset, + (365 * 47 + 20, 365 * 47 + 21)) self.assertIsNot(dataset_with_time, dataset) self.assertIn('time', dataset_with_time) self.assertEqual(dataset_with_time.time.shape, (1,)) @@ -47,7 +52,8 @@ def test_to_time_in_days_since_1970(self): self.assertEqual(17690.5, to_time_in_days_since_1970('2018-06-08T12:00')) self.assertEqual(18173.42625622898, - to_time_in_days_since_1970('04-OCT-2019 10:13:48.538184')) + to_time_in_days_since_1970( + '04-OCT-2019 10:13:48.538184')) def test_from_time_in_days_since_1970(self): self.assertEqual('2017-06-07T12:00:00.000000000', @@ -64,7 +70,8 @@ def test_from_time_in_days_since_1970(self): to_time_in_days_since_1970('2018-06-08T12:00')))) self.assertEqual('2019-10-04T10:13:48.538000000', str(from_time_in_days_since_1970( - to_time_in_days_since_1970('04-OCT-2019 10:13:48.538184')))) + to_time_in_days_since_1970( + '04-OCT-2019 10:13:48.538184')))) class GetTimeRangeTest(unittest.TestCase): @@ -73,16 +80,20 @@ def test_get_time_range_from_data(self): cube = new_cube(drop_bounds=True) time_range = get_time_range_from_data(cube) self.assertIsNotNone(time_range) - self.assertEqual('2010-01-01T00:00:00', pd.Timestamp(time_range[0]).isoformat()) - self.assertEqual('2010-01-06T00:00:00', pd.Timestamp(time_range[1]).isoformat()) + self.assertEqual('2010-01-01T00:00:00', + pd.Timestamp(time_range[0]).isoformat()) + self.assertEqual('2010-01-06T00:00:00', + pd.Timestamp(time_range[1]).isoformat()) def test_get_time_range_from_data_with_irregular_data(self): cube = new_cube(drop_bounds=True, time_freq='M') time_range = get_time_range_from_data(cube) self.assertIsNotNone(time_range) - self.assertEqual('2010-01-31T00:00:00', pd.Timestamp(time_range[0]).isoformat()) - self.assertEqual('2010-06-30T00:00:00', pd.Timestamp(time_range[1]).isoformat()) + self.assertEqual('2010-01-31T00:00:00', + pd.Timestamp(time_range[0]).isoformat()) + self.assertEqual('2010-06-30T00:00:00', + pd.Timestamp(time_range[1]).isoformat()) def test_get_time_range_from_data_with_irregular_data_and_no_metadata(self): cube = new_cube(drop_bounds=True, @@ -91,8 +102,10 @@ def test_get_time_range_from_data_with_irregular_data_and_no_metadata(self): cube.attrs.pop('time_coverage_end') time_range = get_time_range_from_data(cube) self.assertIsNotNone(time_range) - self.assertEqual('2010-02-14T00:00:00', pd.Timestamp(time_range[0]).isoformat()) - self.assertEqual('2010-06-14T00:00:00', pd.Timestamp(time_range[1]).isoformat()) + self.assertEqual('2010-02-14T00:00:00', + pd.Timestamp(time_range[0]).isoformat()) + self.assertEqual('2010-06-14T00:00:00', + pd.Timestamp(time_range[1]).isoformat()) def test_get_time_range_from_data_cftime(self): cube = new_cube(drop_bounds=True, @@ -100,8 +113,10 @@ def test_get_time_range_from_data_cftime(self): time_dtype=None) time_range = get_time_range_from_data(cube) self.assertIsNotNone(time_range) - self.assertEqual('2010-01-01T00:00:00', pd.Timestamp(time_range[0]).isoformat()) - self.assertEqual('2010-01-06T00:00:00', pd.Timestamp(time_range[1]).isoformat()) + self.assertEqual('2010-01-01T00:00:00', + pd.Timestamp(time_range[0]).isoformat()) + self.assertEqual('2010-01-06T00:00:00', + pd.Timestamp(time_range[1]).isoformat()) def test_get_time_range_from_data_with_irregular_cftime_data(self): cube = new_cube(drop_bounds=True, @@ -110,10 +125,13 @@ def test_get_time_range_from_data_with_irregular_cftime_data(self): time_dtype=None) time_range = get_time_range_from_data(cube) self.assertIsNotNone(time_range) - self.assertEqual('2010-01-31T00:00:00', pd.Timestamp(time_range[0]).isoformat()) - self.assertEqual('2010-06-30T00:00:00', pd.Timestamp(time_range[1]).isoformat()) + self.assertEqual('2010-01-31T00:00:00', + pd.Timestamp(time_range[0]).isoformat()) + self.assertEqual('2010-06-30T00:00:00', + pd.Timestamp(time_range[1]).isoformat()) - def test_get_time_range_from_data_with_irregular_cftime_data_and_no_metadata(self): + def test_get_time_range_from_data_with_irregular_cftime_data_and_no_metadata( + self): cube = new_cube(drop_bounds=True, time_freq='M', use_cftime=True, @@ -122,34 +140,43 @@ def test_get_time_range_from_data_with_irregular_cftime_data_and_no_metadata(sel cube.attrs.pop('time_coverage_end') time_range = get_time_range_from_data(cube) self.assertIsNotNone(time_range) - self.assertEqual('2010-02-14T00:00:00', pd.Timestamp(time_range[0]).isoformat()) - self.assertEqual('2010-06-14T00:00:00', pd.Timestamp(time_range[1]).isoformat()) + self.assertEqual('2010-02-14T00:00:00', + pd.Timestamp(time_range[0]).isoformat()) + self.assertEqual('2010-06-14T00:00:00', + pd.Timestamp(time_range[1]).isoformat()) def test_get_time_range_from_data_time_named_t(self): cube = new_cube(drop_bounds=True, time_name='t') time_range = get_time_range_from_data(cube) self.assertIsNotNone(time_range) - self.assertEqual('2010-01-01T00:00:00', pd.Timestamp(time_range[0]).isoformat()) - self.assertEqual('2010-01-06T00:00:00', pd.Timestamp(time_range[1]).isoformat()) + self.assertEqual('2010-01-01T00:00:00', + pd.Timestamp(time_range[0]).isoformat()) + self.assertEqual('2010-01-06T00:00:00', + pd.Timestamp(time_range[1]).isoformat()) def test_get_time_range_from_data_additional_t_variable(self): import xarray as xr start_time_data = pd.date_range(start='2010-01-03T12:00:00', periods=5, - freq='5D').values.astype(dtype='datetime64[s]') + freq='5D').values.astype( + dtype='datetime64[s]') start_time = xr.DataArray(start_time_data, dims='time') end_time_data = pd.date_range(start='2010-01-07T12:00:00', periods=5, - freq='5D').values.astype(dtype='datetime64[s]') + freq='5D').values.astype( + dtype='datetime64[s]') end_time = xr.DataArray(end_time_data, dims='time') cube = new_cube(drop_bounds=True, time_start='2010-01-05T12:00:00', time_freq='5D', - variables=dict(start_time=start_time, end_time=end_time)) + variables=dict(start_time=start_time, + end_time=end_time)) time_range = get_time_range_from_data(cube) self.assertIsNotNone(time_range) - self.assertEqual('2010-01-03T12:00:00', pd.Timestamp(time_range[0]).isoformat()) - self.assertEqual('2010-01-27T12:00:00', pd.Timestamp(time_range[1]).isoformat()) + self.assertEqual('2010-01-03T12:00:00', + pd.Timestamp(time_range[0]).isoformat()) + self.assertEqual('2010-01-27T12:00:00', + pd.Timestamp(time_range[1]).isoformat()) def test_get_time_range_from_data_start_and_end_time_arrays(self): cube = new_cube(drop_bounds=True, @@ -157,32 +184,40 @@ def test_get_time_range_from_data_start_and_end_time_arrays(self): time_dtype=None) time_range = get_time_range_from_data(cube) self.assertIsNotNone(time_range) - self.assertEqual('2010-01-01T00:00:00', pd.Timestamp(time_range[0]).isoformat()) - self.assertEqual('2010-01-06T00:00:00', pd.Timestamp(time_range[1]).isoformat()) + self.assertEqual('2010-01-01T00:00:00', + pd.Timestamp(time_range[0]).isoformat()) + self.assertEqual('2010-01-06T00:00:00', + pd.Timestamp(time_range[1]).isoformat()) def test_get_time_range_from_data_bounds(self): cube = new_cube() time_range = get_time_range_from_data(cube) self.assertIsNotNone(time_range) - self.assertEqual('2010-01-01T00:00:00', pd.Timestamp(time_range[0]).isoformat()) - self.assertEqual('2010-01-06T00:00:00', pd.Timestamp(time_range[1]).isoformat()) + self.assertEqual('2010-01-01T00:00:00', + pd.Timestamp(time_range[0]).isoformat()) + self.assertEqual('2010-01-06T00:00:00', + pd.Timestamp(time_range[1]).isoformat()) def test_get_time_range_from_attrs(self): cube = new_cube() time_range = get_time_range_from_attrs(cube) self.assertIsNotNone(time_range) - self.assertEqual('2010-01-01T00:00:00', pd.Timestamp(time_range[0]).isoformat()) - self.assertEqual('2010-01-06T00:00:00', pd.Timestamp(time_range[1]).isoformat()) + self.assertEqual('2010-01-01T00:00:00', + pd.Timestamp(time_range[0]).isoformat()) + self.assertEqual('2010-01-06T00:00:00', + pd.Timestamp(time_range[1]).isoformat()) def test_get_start_time_from_attrs(self): cube = new_cube() start_time = get_start_time_from_attrs(cube) - self.assertEqual('2010-01-01T00:00:00', pd.Timestamp(start_time).isoformat()) + self.assertEqual('2010-01-01T00:00:00', + pd.Timestamp(start_time).isoformat()) def test_get_end_time_from_attrs(self): cube = new_cube() end_time = get_end_time_from_attrs(cube) - self.assertEqual('2010-01-06T00:00:00', pd.Timestamp(end_time).isoformat()) + self.assertEqual('2010-01-06T00:00:00', + pd.Timestamp(end_time).isoformat()) class TimestampToIsoStringTest(unittest.TestCase): @@ -190,26 +225,89 @@ def test_it_with_default_res(self): self.assertEqual("2018-09-05T00:00:00Z", timestamp_to_iso_string(np.datetime64("2018-09-05"))) self.assertEqual("2018-09-05T10:35:42Z", - timestamp_to_iso_string(np.datetime64("2018-09-05 10:35:42"))) + timestamp_to_iso_string( + np.datetime64("2018-09-05 10:35:42"))) + self.assertEqual("2018-09-05T10:35:42Z", + timestamp_to_iso_string( + np.datetime64("2018-09-05 10:35:42.164"))) + self.assertEqual("2019-10-04T10:13:49Z", + timestamp_to_iso_string( + pd.to_datetime("04-OCT-2019 10:13:48.538184"))) + + def test_it_with_ceil_round_fn(self): + self.assertEqual("2018-09-05T00:00:00Z", + timestamp_to_iso_string(np.datetime64("2018-09-05"), + round_fn="ceil")) self.assertEqual("2018-09-05T10:35:42Z", - timestamp_to_iso_string(np.datetime64("2018-09-05 10:35:42.164"))) + timestamp_to_iso_string( + np.datetime64("2018-09-05 10:35:42"), + round_fn="ceil")) + self.assertEqual("2018-09-05T10:35:43Z", + timestamp_to_iso_string( + np.datetime64("2018-09-05 10:35:42.164"), + round_fn="ceil")) self.assertEqual("2019-10-04T10:13:49Z", - timestamp_to_iso_string(pd.to_datetime("04-OCT-2019 10:13:48.538184"))) + timestamp_to_iso_string( + pd.to_datetime("04-OCT-2019 10:13:48.538184"), + round_fn="ceil")) + + def test_it_with_floor_round_fn(self): + self.assertEqual("2018-09-05T00:00:00Z", + timestamp_to_iso_string(np.datetime64("2018-09-05"), + round_fn="floor")) + self.assertEqual("2018-09-05T10:35:42Z", + timestamp_to_iso_string( + np.datetime64("2018-09-05 10:35:42"), + round_fn="floor")) + self.assertEqual("2018-09-05T10:35:42Z", + timestamp_to_iso_string( + np.datetime64("2018-09-05 10:35:42.164"), + round_fn="floor")) + self.assertEqual("2019-10-04T10:13:48Z", + timestamp_to_iso_string( + pd.to_datetime("04-OCT-2019 10:13:48.538184"), + round_fn="floor")) + + def test_it_with_array_round_fn(self): + var = [np.datetime64("2018-09-05 10:35:42.564"), + np.datetime64("2018-09-06 10:35:42.564"), + np.datetime64("2018-09-07 10:35:42.564"), + pd.to_datetime("04-OCT-2019 10:13:48.038184") + ] + expected_values = ["2018-09-05T10:35:42Z", + "2018-09-06T10:35:43Z", + "2018-09-07T10:35:43Z", + "2019-10-04T10:13:49Z"] + values = [timestamp_to_iso_string(var[0], round_fn="floor")] +\ + list(map(timestamp_to_iso_string, var[1:-1])) +\ + [timestamp_to_iso_string(var[-1], round_fn="ceil")] + self.assertEqual(expected_values, values) + + + # noinspection PyMethodMayBeStatic + def test_it_with_invalid_round_fn(self): + with pytest.raises(ValueError, + match=r"round_fn must be one of" + r" \('ceil', 'floor', 'round'\)"): + timestamp_to_iso_string(np.datetime64("2018-09-05 10:35:42.164"), + round_fn="foo") def test_it_with_h_res(self): self.assertEqual("2018-09-05T00:00:00Z", timestamp_to_iso_string(np.datetime64("2018-09-05"), freq="H")) self.assertEqual("2018-09-05T11:00:00Z", - timestamp_to_iso_string(np.datetime64("2018-09-05 10:35:42"), - freq="H")) + timestamp_to_iso_string( + np.datetime64("2018-09-05 10:35:42"), + freq="H")) self.assertEqual("2018-09-05T11:00:00Z", - timestamp_to_iso_string(np.datetime64("2018-09-05 10:35:42.164"), - freq="H")) + timestamp_to_iso_string( + np.datetime64("2018-09-05 10:35:42.164"), + freq="H")) self.assertEqual("2019-10-04T10:00:00Z", - timestamp_to_iso_string(pd.to_datetime("04-OCT-2019 10:13:48.538184"), - freq="H")) - + timestamp_to_iso_string( + pd.to_datetime("04-OCT-2019 10:13:48.538184"), + freq="H")) # class TimeStampsTest(unittest.TestCase): # diff --git a/xcube/core/timecoord.py b/xcube/core/timecoord.py index 8a5085406..1546e37be 100644 --- a/xcube/core/timecoord.py +++ b/xcube/core/timecoord.py @@ -19,15 +19,20 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. -import cftime import datetime -from typing import Optional, Sequence, Tuple, Union +import re +from typing import Optional +from typing import Sequence +from typing import Tuple +from typing import Union +import cftime import numpy as np import pandas as pd -import re import xarray as xr +from xcube.util.assertions import assert_in + REF_DATETIME_STR = '1970-01-01 00:00:00' REF_DATETIME = pd.to_datetime(REF_DATETIME_STR, utc=True) DATETIME_UNITS = f'days since {REF_DATETIME_STR}' @@ -42,7 +47,8 @@ (re.compile(4 * '\\d'), '%Y')] -def add_time_coords(dataset: xr.Dataset, time_range: Tuple[float, float]) -> xr.Dataset: +def add_time_coords(dataset: xr.Dataset, + time_range: Tuple[float, float]) -> xr.Dataset: t1, t2 = time_range if t1 != t2: t_center = (t1 + t2) / 2 @@ -50,7 +56,8 @@ def add_time_coords(dataset: xr.Dataset, time_range: Tuple[float, float]) -> xr. t_center = t1 dataset = dataset.expand_dims('time') dataset = dataset.assign_coords(time=(['time'], - from_time_in_days_since_1970([t_center]))) + from_time_in_days_since_1970( + [t_center]))) time_var = dataset.coords['time'] time_var.attrs['long_name'] = 'time' time_var.attrs['standard_name'] = 'time' @@ -65,7 +72,8 @@ def add_time_coords(dataset: xr.Dataset, time_range: Tuple[float, float]) -> xr. if t1 != t2: time_var.attrs['bounds'] = 'time_bnds' dataset = dataset.assign_coords( - time_bnds=(['time', 'bnds'], from_time_in_days_since_1970([t1, t2]).reshape(1, 2)) + time_bnds=(['time', 'bnds'], + from_time_in_days_since_1970([t1, t2]).reshape(1, 2)) ) time_bnds_var = dataset.coords['time_bnds'] time_bnds_var.attrs['long_name'] = 'time' @@ -82,7 +90,8 @@ def add_time_coords(dataset: xr.Dataset, time_range: Tuple[float, float]) -> xr. return dataset -def get_time_range_from_data(dataset: xr.Dataset, maybe_consider_metadata: bool = True) \ +def get_time_range_from_data(dataset: xr.Dataset, + maybe_consider_metadata: bool = True) \ -> Tuple[Optional[float], Optional[float]]: """ Determines a time range from a dataset by inspecting its time_bounds or time data arrays. @@ -127,7 +136,8 @@ def get_time_range_from_data(dataset: xr.Dataset, maybe_consider_metadata: bool maybe_consider_metadata) time_diff = time.diff(dim=time.dims[0]).values time_res = time_diff[0] - time_regular = all([time_res - diff == np.timedelta64(0) for diff in time_diff[1:]]) + time_regular = all( + [time_res - diff == np.timedelta64(0) for diff in time_diff[1:]]) if time_regular: return data_start - time_res / 2, data_end + time_res / 2 return _maybe_return_time_range_from_metadata(dataset, @@ -139,11 +149,14 @@ def get_time_range_from_data(dataset: xr.Dataset, maybe_consider_metadata: bool def _maybe_return_time_range_from_metadata(dataset: xr.Dataset, data_start_time: float, data_end_time: float, - maybe_consider_metadata: bool) -> Tuple[float, float]: + maybe_consider_metadata: bool) -> \ +Tuple[float, float]: if maybe_consider_metadata: attr_start_time, attr_end_time = get_time_range_from_attrs(dataset) - attr_start_time = pd.to_datetime(attr_start_time, infer_datetime_format=False, utc=True) - attr_end_time = pd.to_datetime(attr_end_time, infer_datetime_format=False, utc=True) + attr_start_time = pd.to_datetime(attr_start_time, + infer_datetime_format=False, utc=True) + attr_end_time = pd.to_datetime(attr_end_time, + infer_datetime_format=False, utc=True) if attr_start_time is not None and attr_end_time is not None: try: if attr_start_time < data_start_time and attr_end_time > data_end_time: @@ -166,17 +179,21 @@ def _get_time_range_from_time_bounds(dataset: xr.Dataset, time_bounds_name: str) return time_bnds[0, 0].values, time_bnds[-1, 1].values -def get_time_range_from_attrs(dataset: xr.Dataset) -> Tuple[Optional[str], Optional[str]]: +def get_time_range_from_attrs(dataset: xr.Dataset) -> Tuple[ + Optional[str], Optional[str]]: return get_start_time_from_attrs(dataset), get_end_time_from_attrs(dataset) def get_start_time_from_attrs(dataset: xr.Dataset) -> Optional[str]: - return _get_attr(dataset, ['time_coverage_start', 'time_start', 'start_time', 'start_date']) + return _get_attr(dataset, + ['time_coverage_start', 'time_start', 'start_time', + 'start_date']) def get_end_time_from_attrs(dataset: xr.Dataset) -> Optional[str]: - return _get_attr(dataset, ['time_coverage_end', 'time_stop', 'time_end', 'stop_time', - 'end_time', 'stop_date', 'end_date']) + return _get_attr(dataset, + ['time_coverage_end', 'time_stop', 'time_end', 'stop_time', + 'end_time', 'stop_date', 'end_date']) def _get_attr(dataset: xr.Dataset, names: Sequence[str]) -> Optional[str]: @@ -187,39 +204,48 @@ def _get_attr(dataset: xr.Dataset, names: Sequence[str]) -> Optional[str]: def remove_time_part_from_isoformat(datetime_str: str) -> str: date_length = 10 # for example len("2010-02-04") == 10 - if len(datetime_str) > date_length and datetime_str[date_length] in ('T', ' '): + if len(datetime_str) > date_length and datetime_str[date_length] in ( + 'T', ' '): return datetime_str[0: date_length] return datetime_str def to_time_in_days_since_1970(time_str: str, pattern=None) -> float: - date_time = pd.to_datetime(time_str, format=pattern, infer_datetime_format=False, utc=True) + date_time = pd.to_datetime(time_str, format=pattern, + infer_datetime_format=False, utc=True) timedelta = date_time - REF_DATETIME return timedelta.days + timedelta.seconds / SECONDS_PER_DAY + \ - timedelta.microseconds / MICROSECONDS_PER_DAY + timedelta.microseconds / MICROSECONDS_PER_DAY -def from_time_in_days_since_1970(time_value: Union[float, Sequence[float]]) -> np.ndarray: +def from_time_in_days_since_1970( + time_value: Union[float, Sequence[float]]) -> np.ndarray: if isinstance(time_value, int) or isinstance(time_value, float): - return pd.to_datetime(time_value, utc=True, unit='d', origin='unix').round(freq='ms') \ + return pd.to_datetime(time_value, utc=True, unit='d', + origin='unix').round(freq='ms') \ .to_datetime64() else: return np.array(list(map(from_time_in_days_since_1970, time_value))) -def timestamp_to_iso_string(time: Union[np.datetime64, datetime.datetime], freq='S'): +def timestamp_to_iso_string(time: Union[np.datetime64, datetime.datetime], + freq: str = 'S', + round_fn: str = 'round'): """ Convert a UTC timestamp given as nanos, millis, seconds, etc. since 1970-01-01 00:00:00 to an ISO-format string. - :param time: UTC timestamp given as time delta since since 1970-01-01 00:00:00 in the units + :param time: UTC timestamp given as time delta since 1970-01-01 00:00:00 in the units given by the numpy datetime64 type, so it can be as nanos, millis, seconds since 1970-01-01 00:00:00. :param freq: time rounding resolution. See pandas.Timestamp.round(). + :param round_fn: time rounding function. Defaults to pandas.Timestamp.round(). :return: ISO-format string. """ # All times are UTC (Z = Zulu Time Zone = UTC) - return pd.Timestamp(time).round(freq).isoformat() + 'Z' + assert_in(round_fn, ("ceil", "floor", "round"), name="round_fn") + timestamp = pd.Timestamp(time) + return getattr(timestamp, round_fn)(freq).isoformat() + 'Z' def find_datetime_format(line: str) -> Tuple[Optional[str], int, int]: diff --git a/xcube/webapi/datasets/controllers.py b/xcube/webapi/datasets/controllers.py index 22e33bc4a..50fdd7103 100644 --- a/xcube/webapi/datasets/controllers.py +++ b/xcube/webapi/datasets/controllers.py @@ -479,15 +479,18 @@ def get_dataset_coordinates(ctx: DatasetsContext, ds_id: str, dim_name: str) -> Dict: ds, var = ctx.get_dataset_and_coord_variable(ds_id, dim_name) - values = list() if np.issubdtype(var.dtype, np.floating): - converter = float + values = list(map(float, var.values)) elif np.issubdtype(var.dtype, np.integer): - converter = int + values = list(map(int, var.values)) + elif len(var) == 1: + values = [timestamp_to_iso_string(var.values[0], round_fn="floor")] else: - converter = timestamp_to_iso_string - for value in var.values: - values.append(converter(value)) + # see https://github.com/dcs4cop/xcube-viewer/issues/289 + assert len(var) > 1, "Dimension length must be greater than 0." + values = [timestamp_to_iso_string(var.values[0], round_fn="floor")] +\ + list(map(timestamp_to_iso_string, var.values[1:-1])) +\ + [timestamp_to_iso_string(var.values[-1], round_fn="ceil")] return dict(name=dim_name, size=len(values), dtype=str(var.dtype),