You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
When trying to save a S1GRD cube with the variable names 'HH' or 'HV' for the area [5, 35, 6, 36] writing the cube fails with HTTPError('500 Server Error: Internal Server Error for url: https://services.sentinel-hub.com/api/v1/process')
If that is the case it would be great to get a more meaningful error message which would inform the user about lack of availability of certain variables for areas (it could also happen with VV if someone requested it e.g. for Greenland.
But maybe that is not a issue to fix on xcube-sh side but should be reported to Sinergise?
To reproduce:
from xcube.core.store import find_data_store_extensions
from xcube.core.store import get_data_store_params_schema
from xcube.core.store import new_data_store
from xcube.core.dsio import write_cube
store = new_data_store('sentinelhub')
store
cube = store.open_data(data_id='S1GRD',
variable_names=['HV'],
spatial_res =0.001,
bbox=[5, 35, 6, 36],
time_range=['2017-01-01', '2017-01-06'])
write_cube(cube, 'test_S1GRD_v7.zarr')
Stacktrace:
---------------------------------------------------------------------------
HTTPError Traceback (most recent call last)
<ipython-input-53-3338fe969f3a> in <module>
----> 1 write_cube(cube, 'test_S1GRD_v7.zarr')
~/Desktop/projects/xcube/xcube/core/dsio.py in write_cube(cube, output_path, format_name, cube_asserted, **kwargs)
70 if not cube_asserted:
71 assert_cube(cube)
---> 72 return write_dataset(cube, output_path, format_name=format_name, **kwargs)
73
74
~/Desktop/projects/xcube/xcube/core/dsio.py in write_dataset(dataset, output_path, format_name, **kwargs)
117 if dataset_io is None:
118 raise ValueError(f"Unknown output format {format_name!r} for {output_path}")
--> 119 dataset_io.write(dataset, output_path, **kwargs)
120 return dataset
121
~/Desktop/projects/xcube/xcube/core/dsio.py in write(self, dataset, output_path, compressor, chunksizes, packing, s3_kwargs, s3_client_kwargs, **kwargs)
448 mode='w')
449 encoding = self._get_write_encodings(dataset, compressor, chunksizes, packing)
--> 450 dataset.to_zarr(path_or_store, mode='w', encoding=encoding, **kwargs)
451
452 @classmethod
~/miniconda3/envs/xcube/lib/python3.7/site-packages/xarray/core/dataset.py in to_zarr(self, store, chunk_store, mode, synchronizer, group, encoding, compute, consolidated, append_dim, region)
1754 consolidated=consolidated,
1755 append_dim=append_dim,
-> 1756 region=region,
1757 )
1758
~/miniconda3/envs/xcube/lib/python3.7/site-packages/xarray/backends/api.py in to_zarr(dataset, store, chunk_store, mode, synchronizer, group, encoding, compute, consolidated, append_dim, region)
1480 # TODO: figure out how to properly handle unlimited_dims
1481 dump_to_store(dataset, zstore, writer, encoding=encoding)
-> 1482 writes = writer.sync(compute=compute)
1483
1484 if compute:
~/miniconda3/envs/xcube/lib/python3.7/site-packages/xarray/backends/common.py in sync(self, compute)
159 compute=compute,
160 flush=True,
--> 161 regions=self.regions,
162 )
163 self.sources = []
~/miniconda3/envs/xcube/lib/python3.7/site-packages/dask/array/core.py in store(sources, targets, lock, regions, compute, return_stored, **kwargs)
979
980 if compute:
--> 981 result.compute(**kwargs)
982 return None
983 else:
~/miniconda3/envs/xcube/lib/python3.7/site-packages/dask/base.py in compute(self, **kwargs)
165 dask.base.compute
166 """
--> 167 (result,) = compute(self, traverse=False, **kwargs)
168 return result
169
~/miniconda3/envs/xcube/lib/python3.7/site-packages/dask/base.py in compute(*args, **kwargs)
450 postcomputes.append(x.__dask_postcompute__())
451
--> 452 results = schedule(dsk, keys, **kwargs)
453 return repack([f(r, *a) for r, (f, a) in zip(results, postcomputes)])
454
~/miniconda3/envs/xcube/lib/python3.7/site-packages/dask/threaded.py in get(dsk, result, cache, num_workers, pool, **kwargs)
82 get_id=_thread_get_id,
83 pack_exception=pack_exception,
---> 84 **kwargs
85 )
86
~/miniconda3/envs/xcube/lib/python3.7/site-packages/dask/local.py in get_async(apply_async, num_workers, dsk, result, cache, get_id, rerun_exceptions_locally, pack_exception, raise_exception, callbacks, dumps, loads, **kwargs)
484 _execute_task(task, data) # Re-execute locally
485 else:
--> 486 raise_exception(exc, tb)
487 res, worker_id = loads(res_info)
488 state["cache"][key] = res
~/miniconda3/envs/xcube/lib/python3.7/site-packages/dask/local.py in reraise(exc, tb)
314 if exc.__traceback__ is not tb:
315 raise exc.with_traceback(tb)
--> 316 raise exc
317
318
~/miniconda3/envs/xcube/lib/python3.7/site-packages/dask/local.py in execute_task(key, task_info, dumps, loads, get_id, pack_exception)
220 try:
221 task, data = loads(task_info)
--> 222 result = _execute_task(task, data)
223 id = get_id()
224 result = dumps((result, id))
~/miniconda3/envs/xcube/lib/python3.7/site-packages/dask/core.py in _execute_task(arg, cache, dsk)
119 # temporaries by their reference count and can execute certain
120 # operations in-place.
--> 121 return func(*(_execute_task(a, cache) for a in args))
122 elif not ishashable(arg):
123 return arg
~/miniconda3/envs/xcube/lib/python3.7/site-packages/dask/array/core.py in getter(a, b, asarray, lock)
100 c = a[b]
101 if asarray:
--> 102 c = np.asarray(c)
103 finally:
104 if lock:
~/miniconda3/envs/xcube/lib/python3.7/site-packages/numpy/core/_asarray.py in asarray(a, dtype, order)
81
82 """
---> 83 return array(a, dtype, copy=False, order=order)
84
85
~/miniconda3/envs/xcube/lib/python3.7/site-packages/xarray/core/indexing.py in __array__(self, dtype)
501
502 def __array__(self, dtype=None):
--> 503 return np.asarray(self.array, dtype=dtype)
504
505 def __getitem__(self, key):
~/miniconda3/envs/xcube/lib/python3.7/site-packages/numpy/core/_asarray.py in asarray(a, dtype, order)
81
82 """
---> 83 return array(a, dtype, copy=False, order=order)
84
85
~/miniconda3/envs/xcube/lib/python3.7/site-packages/xarray/core/indexing.py in __array__(self, dtype)
661
662 def __array__(self, dtype=None):
--> 663 return np.asarray(self.array, dtype=dtype)
664
665 def __getitem__(self, key):
~/miniconda3/envs/xcube/lib/python3.7/site-packages/numpy/core/_asarray.py in asarray(a, dtype, order)
81
82 """
---> 83 return array(a, dtype, copy=False, order=order)
84
85
~/miniconda3/envs/xcube/lib/python3.7/site-packages/xarray/core/indexing.py in __array__(self, dtype)
566 def __array__(self, dtype=None):
567 array = as_indexable(self.array)
--> 568 return np.asarray(array[self.key], dtype=None)
569
570 def transpose(self, order):
~/miniconda3/envs/xcube/lib/python3.7/site-packages/xarray/backends/zarr.py in __getitem__(self, key)
55 array = self.get_array()
56 if isinstance(key, indexing.BasicIndexer):
---> 57 return array[key.tuple]
58 elif isinstance(key, indexing.VectorizedIndexer):
59 return array.vindex[
~/miniconda3/envs/xcube/lib/python3.7/site-packages/zarr/core.py in __getitem__(self, selection)
569
570 fields, selection = pop_fields(selection)
--> 571 return self.get_basic_selection(selection, fields=fields)
572
573 def get_basic_selection(self, selection=Ellipsis, out=None, fields=None):
~/miniconda3/envs/xcube/lib/python3.7/site-packages/zarr/core.py in get_basic_selection(self, selection, out, fields)
695 else:
696 return self._get_basic_selection_nd(selection=selection, out=out,
--> 697 fields=fields)
698
699 def _get_basic_selection_zd(self, selection, out=None, fields=None):
~/miniconda3/envs/xcube/lib/python3.7/site-packages/zarr/core.py in _get_basic_selection_nd(self, selection, out, fields)
737 indexer = BasicIndexer(selection, self)
738
--> 739 return self._get_selection(indexer=indexer, out=out, fields=fields)
740
741 def get_orthogonal_selection(self, selection, out=None, fields=None):
~/miniconda3/envs/xcube/lib/python3.7/site-packages/zarr/core.py in _get_selection(self, indexer, out, fields)
1028 # load chunk selection into output array
1029 self._chunk_getitem(chunk_coords, chunk_selection, out, out_selection,
-> 1030 drop_axes=indexer.drop_axes, fields=fields)
1031 else:
1032 # allow storage to get multiple items at once
~/miniconda3/envs/xcube/lib/python3.7/site-packages/zarr/core.py in _chunk_getitem(self, chunk_coords, chunk_selection, out, out_selection, drop_axes, fields)
1660 try:
1661 # obtain compressed data for chunk
-> 1662 cdata = self.chunk_store[ckey]
1663
1664 except KeyError:
~/Desktop/projects/xcube-sh/xcube_sh/chunkstore.py in __getitem__(self, key)
407 value = self._vfs[key]
408 if isinstance(value, tuple):
--> 409 return self._fetch_chunk(*value)
410 return value
411
~/Desktop/projects/xcube-sh/xcube_sh/chunkstore.py in _fetch_chunk(self, band_name, chunk_index)
338
339 if exception:
--> 340 raise exception
341
342 return chunk_data
~/Desktop/projects/xcube-sh/xcube_sh/chunkstore.py in _fetch_chunk(self, band_name, chunk_index)
323 chunk_index,
324 bbox=request_bbox,
--> 325 time_range=request_time_range)
326 except Exception as e:
327 exception = e
~/Desktop/projects/xcube-sh/xcube_sh/chunkstore.py in fetch_chunk(self, band_name, chunk_index, bbox, time_range)
543 )
544
--> 545 response = self._sentinel_hub.get_data(request, mime_type='application/octet-stream')
546
547 return response.content
~/Desktop/projects/xcube-sh/xcube_sh/sentinelhub.py in get_data(self, request, mime_type)
295 self.error_handler(response)
296 if self.error_policy == 'fail':
--> 297 response.raise_for_status()
298 raise SentinelHubError(response)
299 else:
~/miniconda3/envs/xcube/lib/python3.7/site-packages/requests/models.py in raise_for_status(self)
939
940 if http_error_msg:
--> 941 raise HTTPError(http_error_msg, response=self)
942
943 def close(self):
HTTPError: 500 Server Error: Internal Server Error for url: https://services.sentinel-hub.com/api/v1/process
The text was updated successfully, but these errors were encountered:
forman
changed the title
Saving S1GRD Cube which includes var 'HH' and 'HV' fails with HTTPError('500 Server Error: Internal Server Error for url: https://services.sentinel-hub.com/api/v1/process')
Saving S1GRD Cube fails
Feb 3, 2021
forman
changed the title
Saving S1GRD Cube fails
Saving S1GRD Cube which includes vars 'HH' and 'HV' fails
Feb 3, 2021
When trying to save a S1GRD cube with the variable names 'HH' or 'HV' for the area [5, 35, 6, 36] writing the cube fails with
HTTPError('500 Server Error: Internal Server Error for url: https://services.sentinel-hub.com/api/v1/process')
This might be due to the fact, that it is not available for that region, if I understand the image provided by Sentinelhub:
https://docs.sentinel-hub.com/api/latest/static/00900923cd6d57f05706c8f218481ddc/b4294/Sentinel-1-Mode-Polarisation-Observation-Geometry-2018.jpg
If that is the case it would be great to get a more meaningful error message which would inform the user about lack of availability of certain variables for areas (it could also happen with VV if someone requested it e.g. for Greenland.
But maybe that is not a issue to fix on xcube-sh side but should be reported to Sinergise?
To reproduce:
Stacktrace:
The text was updated successfully, but these errors were encountered: