From bf1ecbad723714376b633067fa308b833809e03c Mon Sep 17 00:00:00 2001 From: Stefaan Lippens Date: Tue, 17 Dec 2024 18:48:53 +0100 Subject: [PATCH] Issue #346 Some more ProcessArgs porting for less boilerplate code and better/earlier error messages --- openeo_driver/ProcessGraphDeserializer.py | 316 ++++++++---------- openeo_driver/datacube.py | 6 +- openeo_driver/processes.py | 13 +- .../data/pg/1.0/resample_and_merge_cubes.json | 4 +- tests/test_processes.py | 13 +- tests/test_views_execute.py | 3 +- 6 files changed, 168 insertions(+), 187 deletions(-) diff --git a/openeo_driver/ProcessGraphDeserializer.py b/openeo_driver/ProcessGraphDeserializer.py index ecd9b329..271e5164 100644 --- a/openeo_driver/ProcessGraphDeserializer.py +++ b/openeo_driver/ProcessGraphDeserializer.py @@ -13,7 +13,7 @@ import time import warnings from pathlib import Path -from typing import Any, Callable, Dict, Iterable, List, Tuple, Union, Sequence +from typing import Any, Callable, Dict, Iterable, List, Tuple, Union, Sequence, Optional import geopandas as gpd import numpy as np @@ -140,7 +140,7 @@ def wrapped(args: dict, env: EvalEnv): # Type hint alias for a "process function": # a Python function that implements some openEO process (as used in `apply_process`) -ProcessFunction = Callable[[dict, EvalEnv], Any] +ProcessFunction = Callable[[Union[dict, ProcessArgs], EvalEnv], Any] def process(f: ProcessFunction) -> ProcessFunction: @@ -764,14 +764,15 @@ def load_collection(args: dict, env: EvalEnv) -> DriverDataCube: .param(name='options', description="options specific to the file format", schema={"type": "object"}) .returns(description="the data as a data cube", schema={}) ) -def load_disk_data(args: Dict, env: EvalEnv) -> DriverDataCube: +def load_disk_data(args: ProcessArgs, env: EvalEnv) -> DriverDataCube: """ Deprecated, use load_uploaded_files or load_stac """ + _log.warning("DEPRECATED: load_disk_data usage") kwargs = dict( - glob_pattern=extract_arg(args, 'glob_pattern'), - format=extract_arg(args, 'format'), - options=args.get('options', {}), + glob_pattern=args.get_required("glob_pattern", expected_type=str), + format=args.get_required("format", expected_type=str), + options=args.get_optional("options", default={}, expected_type=dict), ) dry_run_tracer: DryRunDataTracer = env.get(ENV_DRY_RUN_TRACER) if dry_run_tracer: @@ -930,22 +931,18 @@ def save_result(args: Dict, env: EvalEnv) -> SaveResult: # TODO: return type no @process_registry_100.add_function(spec=read_spec("openeo-processes/experimental/save_ml_model.json")) @process_registry_2xx.add_function(spec=read_spec("openeo-processes/experimental/save_ml_model.json")) -def save_ml_model(args: dict, env: EvalEnv) -> MlModelResult: - data: DriverMlModel = extract_arg(args, "data", process_id="save_ml_model") - if not isinstance(data, DriverMlModel): - raise ProcessParameterInvalidException( - parameter="data", process="save_ml_model", reason=f"Invalid data type {type(data)!r} expected raster-cube." - ) - options = args.get("options", {}) +def save_ml_model(args: ProcessArgs, env: EvalEnv) -> MlModelResult: + data = args.get_required("data", expected_type=DriverMlModel) + options = args.get_optional("options", default={}, expected_type=dict) return MlModelResult(ml_model=data, options=options) @process_registry_100.add_function(spec=read_spec("openeo-processes/experimental/load_ml_model.json")) @process_registry_2xx.add_function(spec=read_spec("openeo-processes/experimental/load_ml_model.json")) -def load_ml_model(args: dict, env: EvalEnv) -> DriverMlModel: +def load_ml_model(args: ProcessArgs, env: EvalEnv) -> DriverMlModel: if env.get(ENV_DRY_RUN_TRACER): return DriverMlModel() - job_id = extract_arg(args, "id") + job_id = args.get_required("id", expected_type=str) return env.backend_implementation.load_ml_model(job_id) @@ -1019,7 +1016,9 @@ def apply_polygon(args: ProcessArgs, env: EvalEnv) -> DriverDataCube: process = args.get_deep("process", "process_graph", expected_type=dict) if "polygons" in args and "geometries" not in args: # TODO remove this deprecated "polygons" parameter handling when not used anymore - _log.warning("In process 'apply_polygon': parameter 'polygons' is deprecated, use 'geometries' instead.") + _log.warning( + "DEPRECATED: In process 'apply_polygon': parameter 'polygons' is deprecated, use 'geometries' instead." + ) geometries = args.get_required("polygons") else: geometries = args.get_required("geometries") @@ -1152,19 +1151,19 @@ def get_validated_parameter(args, param_name, default_value, expected_type, min_ @process_registry_100.add_function(spec=read_spec("openeo-processes/experimental/predict_random_forest.json")) @process_registry_2xx.add_function(spec=read_spec("openeo-processes/experimental/predict_random_forest.json")) -def predict_random_forest(args: dict, env: EvalEnv): +def predict_random_forest(args: ProcessArgs, env: EvalEnv): raise NotImplementedError @process_registry_100.add_function(spec=read_spec("openeo-processes/experimental/predict_catboost.json")) @process_registry_2xx.add_function(spec=read_spec("openeo-processes/experimental/predict_catboost.json")) -def predict_catboost(args: dict, env: EvalEnv): +def predict_catboost(args: ProcessArgs, env: EvalEnv): raise NotImplementedError @process_registry_100.add_function(spec=read_spec("openeo-processes/experimental/predict_probabilities.json")) @process_registry_2xx.add_function(spec=read_spec("openeo-processes/experimental/predict_probabilities.json")) -def predict_probabilities(args: dict, env: EvalEnv): +def predict_probabilities(args: ProcessArgs, env: EvalEnv): raise NotImplementedError @@ -1179,51 +1178,34 @@ def add_dimension(args: ProcessArgs, env: EvalEnv) -> DriverDataCube: @process -def drop_dimension(args: dict, env: EvalEnv) -> DriverDataCube: - data_cube = extract_arg(args, 'data') - if not isinstance(data_cube, DriverDataCube): - raise ProcessParameterInvalidException( - parameter="data", process="drop_dimension", - reason=f"Invalid data type {type(data_cube)!r} expected raster-cube." - ) - return data_cube.drop_dimension(name=extract_arg(args, 'name')) +def drop_dimension(args: ProcessArgs, env: EvalEnv) -> DriverDataCube: + cube: DriverDataCube = args.get_required("data", expected_type=DriverDataCube) + name: str = args.get_required("name", expected_type=str) + return cube.drop_dimension(name=name) @process -def dimension_labels(args: dict, env: EvalEnv) -> DriverDataCube: - data_cube = extract_arg(args, 'data') - if not isinstance(data_cube, DriverDataCube): - raise ProcessParameterInvalidException( - parameter="data", process="dimension_labels", - reason=f"Invalid data type {type(data_cube)!r} expected raster-cube." - ) - return data_cube.dimension_labels(dimension=extract_arg(args, 'dimension')) +def dimension_labels(args: ProcessArgs, env: EvalEnv) -> List[str]: + cube: DriverDataCube = args.get_required("data", expected_type=DriverDataCube) + dimension: str = args.get_required("dimension", expected_type=str) + return cube.dimension_labels(dimension=dimension) @process -def rename_dimension(args: dict, env: EvalEnv) -> DriverDataCube: - data_cube = extract_arg(args, 'data') - if not isinstance(data_cube, DriverDataCube): - raise ProcessParameterInvalidException( - parameter="data", process="rename_dimension", - reason=f"Invalid data type {type(data_cube)!r} expected raster-cube." - ) - return data_cube.rename_dimension(source=extract_arg(args, 'source'),target=extract_arg(args, 'target')) +def rename_dimension(args: ProcessArgs, env: EvalEnv) -> DriverDataCube: + cube: DriverDataCube = args.get_required("data", expected_type=DriverDataCube) + source: str = args.get_required("source", expected_type=str) + target: str = args.get_required("target", expected_type=str) + return cube.rename_dimension(source=source, target=target) @process -def rename_labels(args: dict, env: EvalEnv) -> DriverDataCube: - data_cube = extract_arg(args, 'data') - if not isinstance(data_cube, DriverDataCube): - raise ProcessParameterInvalidException( - parameter="data", process="rename_labels", - reason=f"Invalid data type {type(data_cube)!r} expected raster-cube." - ) - return data_cube.rename_labels( - dimension=extract_arg(args, 'dimension'), - target=extract_arg(args, 'target'), - source=args.get('source',[]) - ) +def rename_labels(args: ProcessArgs, env: EvalEnv) -> DriverDataCube: + cube: DriverDataCube = args.get_required("data", expected_type=DriverDataCube) + dimension: str = args.get_required("dimension", expected_type=str) + target: List = args.get_required("target", expected_type=list) + source: Optional[list] = args.get_optional("source", default=None, expected_type=list) + return cube.rename_labels(dimension=dimension, target=target, source=source) @process @@ -1369,14 +1351,10 @@ def aggregate_spatial(args: ProcessArgs, env: EvalEnv) -> DriverDataCube: @process -def mask(args: dict, env: EvalEnv) -> DriverDataCube: - cube = extract_arg(args, 'data') - if not isinstance(cube, DriverDataCube): - raise ProcessParameterInvalidException( - parameter="data", process="mask", reason=f"Invalid data type {type(cube)!r} expected raster-cube." - ) - mask = extract_arg(args, 'mask') - replacement = args.get('replacement', None) +def mask(args: ProcessArgs, env: EvalEnv) -> DriverDataCube: + cube: DriverDataCube = args.get_required("data", expected_type=DriverDataCube) + mask: DriverDataCube = args.get_required("mask", expected_type=DriverDataCube) + replacement = args.get_optional("replacement", default=None) return cube.mask(mask=mask, replacement=replacement) @@ -1408,7 +1386,10 @@ def mask_polygon(args: dict, env: EvalEnv) -> DriverDataCube: return image_collection -def _extract_temporal_extent(args: dict, field="extent", process_id="filter_temporal") -> Tuple[str, str]: +def _extract_temporal_extent( + args: Union[dict, ProcessArgs], field="extent", process_id="filter_temporal" +) -> Tuple[str, str]: + # TODO #346: make this a ProcessArgs method? extent = extract_arg(args, name=field, process_id=process_id) if len(extent) != 2: raise ProcessParameterInvalidException( @@ -1433,29 +1414,27 @@ def _extract_temporal_extent(args: dict, field="extent", process_id="filter_temp @process -def filter_temporal(args: dict, env: EvalEnv) -> DriverDataCube: - cube = extract_arg(args, 'data') - if not isinstance(cube, DriverDataCube): - raise ProcessParameterInvalidException( - parameter="data", process="filter_temporal", - reason=f"Invalid data type {type(cube)!r} expected raster-cube." - ) +def filter_temporal(args: ProcessArgs, env: EvalEnv) -> DriverDataCube: + cube: DriverDataCube = args.get_required("data", expected_type=DriverDataCube) extent = _extract_temporal_extent(args, field="extent", process_id="filter_temporal") return cube.filter_temporal(start=extent[0], end=extent[1]) + @process_registry_100.add_function(spec=read_spec("openeo-processes/1.x/proposals/filter_labels.json")) @process_registry_2xx.add_function(spec=read_spec("openeo-processes/2.x/proposals/filter_labels.json")) -def filter_labels(args: dict, env: EvalEnv) -> DriverDataCube: - cube = extract_arg(args, 'data') - if not isinstance(cube, DriverDataCube): - raise ProcessParameterInvalidException( - parameter="data", process="filter_labels", - reason=f"Invalid data type {type(cube)!r} expected cube." - ) +def filter_labels(args: ProcessArgs, env: EvalEnv) -> DriverDataCube: + cube: DriverDataCube = args.get_required("data", expected_type=DriverDataCube) + # TODO: validation that condition is a process graph construct + condition = args.get_required("condition", expected_type=dict) + dimension = args.get_required("dimension", expected_type=str) + context = args.get_optional("context", default=None) + return cube.filter_labels(condition=condition, dimension=dimension, context=context, env=env) - return cube.filter_labels(condition=extract_arg(args,"condition"),dimension=extract_arg(args,"dimension"),context=args.get("context",None),env=env) -def _extract_bbox_extent(args: dict, field="extent", process_id="filter_bbox", handle_geojson=False) -> dict: +def _extract_bbox_extent( + args: Union[dict, ProcessArgs], field="extent", process_id="filter_bbox", handle_geojson=False +) -> dict: + # TODO #346: make this a ProcessArgs method? extent = extract_arg(args, name=field, process_id=process_id) if handle_geojson and extent.get("type") in [ "Polygon", @@ -1480,24 +1459,16 @@ def _extract_bbox_extent(args: dict, field="extent", process_id="filter_bbox", h @process -def filter_bbox(args: Dict, env: EvalEnv) -> DriverDataCube: - cube = extract_arg(args, 'data') - if not isinstance(cube, DriverDataCube): - raise ProcessParameterInvalidException( - parameter="data", process="filter_bbox", reason=f"Invalid data type {type(cube)!r} expected raster-cube." - ) +def filter_bbox(args: ProcessArgs, env: EvalEnv) -> DriverDataCube: + cube: DriverDataCube = args.get_required("data", expected_type=DriverDataCube) spatial_extent = _extract_bbox_extent(args, "extent", process_id="filter_bbox") return cube.filter_bbox(**spatial_extent) @process -def filter_spatial(args: Dict, env: EvalEnv) -> DriverDataCube: - cube = extract_arg(args, 'data') - geometries = extract_arg(args, 'geometries') - if not isinstance(cube, DriverDataCube): - raise ProcessParameterInvalidException( - parameter="data", process="filter_spatial", reason=f"Invalid data type {type(cube)!r} expected raster-cube." - ) +def filter_spatial(args: ProcessArgs, env: EvalEnv) -> DriverDataCube: + cube: DriverDataCube = args.get_required("data", expected_type=DriverDataCube) + geometries = args.get_required("geometries") if isinstance(geometries, dict): if "type" in geometries and geometries["type"] != "GeometryCollection": @@ -1526,32 +1497,22 @@ def filter_spatial(args: Dict, env: EvalEnv) -> DriverDataCube: @process -def filter_bands(args: Dict, env: EvalEnv) -> Union[DriverDataCube, DriverVectorCube]: - cube: Union[DriverDataCube, DriverVectorCube] = extract_arg(args, "data") - if not isinstance(cube, DriverDataCube) and not isinstance(cube, DriverVectorCube): - raise ProcessParameterInvalidException( - parameter="data", process="filter_bands", reason=f"Invalid data type {type(cube)!r} expected raster-cube." - ) - bands = extract_arg(args, "bands", process_id="filter_bands") +def filter_bands(args: ProcessArgs, env: EvalEnv) -> Union[DriverDataCube, DriverVectorCube]: + cube: Union[DriverDataCube, DriverVectorCube] = args.get_required( + "data", expected_type=(DriverDataCube, DriverVectorCube) + ) + bands = args.get_required("bands", expected_type=list) return cube.filter_bands(bands=bands) @process -def apply_kernel(args: Dict, env: EvalEnv) -> DriverDataCube: - image_collection = extract_arg(args, 'data') - kernel = np.asarray(extract_arg(args, 'kernel')) - factor = args.get('factor', 1.0) - border = args.get('border', 0) - if not isinstance(image_collection, DriverDataCube): - raise ProcessParameterInvalidException( - parameter="data", process="apply_kernel", - reason=f"Invalid data type {type(image_collection)!r} expected raster-cube." - ) - if border == "0": - # R-client sends `0` border as a string - border = 0 - replace_invalid = args.get('replace_invalid', 0) - return image_collection.apply_kernel(kernel=kernel, factor=factor, border=border, replace_invalid=replace_invalid) +def apply_kernel(args: ProcessArgs, env: EvalEnv) -> DriverDataCube: + cube: DriverDataCube = args.get_required("data", expected_type=DriverDataCube) + kernel = np.asarray(args.get_required("kernel", expected_type=list)) + factor = args.get_optional("factor", default=1.0, expected_type=(int, float)) + border = args.get_optional("border", default=0, expected_type=int) + replace_invalid = args.get_optional("replace_invalid", default=0, expected_type=(int, float)) + return cube.apply_kernel(kernel=kernel, factor=factor, border=border, replace_invalid=replace_invalid) @process @@ -1583,16 +1544,30 @@ def resample_spatial(args: ProcessArgs, env: EvalEnv) -> DriverDataCube: @process -def resample_cube_spatial(args: dict, env: EvalEnv) -> DriverDataCube: - image_collection = extract_arg(args, 'data') - target_image_collection = extract_arg(args, 'target') - method = args.get('method', 'near') - if not isinstance(image_collection, DriverDataCube): - raise ProcessParameterInvalidException( - parameter="data", process="resample_cube_spatial", - reason=f"Invalid data type {type(image_collection)!r} expected raster-cube." - ) - return image_collection.resample_cube_spatial(target=target_image_collection, method=method) +def resample_cube_spatial(args: ProcessArgs, env: EvalEnv) -> DriverDataCube: + cube: DriverDataCube = args.get_required("data", expected_type=DriverDataCube) + target: DriverDataCube = args.get_required("target", expected_type=DriverDataCube) + method = args.get_enum( + "method", + options=[ + "average", + "bilinear", + "cubic", + "cubicspline", + "lanczos", + "max", + "med", + "min", + "mode", + "near", + "q1", + "q3", + "rms", + "sum", + ], + default="near", + ) + return cube.resample_cube_spatial(target=target, method=method) @process @@ -1691,25 +1666,22 @@ def run_udf(args: dict, env: EvalEnv): @process -def linear_scale_range(args: dict, env: EvalEnv) -> DriverDataCube: - image_collection = extract_arg(args, 'x') - - inputMin = extract_arg(args, "inputMin") - inputMax = extract_arg(args, "inputMax") - outputMax = args.get("outputMax", 1.0) - outputMin = args.get("outputMin", 0.0) - if not isinstance(image_collection, DriverDataCube): - raise ProcessParameterInvalidException( - parameter="data", process="linear_scale_range", - reason=f"Invalid data type {type(image_collection)!r} expected raster-cube." - ) - - return image_collection.linear_scale_range(inputMin, inputMax, outputMin, outputMax) +def linear_scale_range(args: ProcessArgs, env: EvalEnv) -> DriverDataCube: + # TODO: eliminate this top-level linear_scale_range process implementation (should be used as `apply` callback) + _log.warning("DEPRECATED: linear_scale_range usage directly on cube is deprecated/non-standard.") + cube: DriverDataCube = args.get_required("x", expected_type=DriverDataCube) + # Note: non-standard camelCase parameter names (https://github.com/Open-EO/openeo-processes/issues/302) + input_min = args.get_required("inputMin") + input_max = args.get_required("inputMax") + output_min = args.get_optional("outputMin", default=0.0) + output_max = args.get_optional("outputMax", default=1.0) + # TODO linear_scale_range is defined on GeopysparkDataCube, but not on DriverDataCube + return cube.linear_scale_range(input_min, input_max, output_min, output_max) @process -def constant(args: dict, env: EvalEnv): - return args["x"] +def constant(args: ProcessArgs, env: EvalEnv): + return args.get_required("x") def flatten_children_node_types(process_graph: Union[dict, list]): @@ -1861,10 +1833,11 @@ def apply_process(process_id: str, args: dict, namespace: Union[str, None], env: ]) .returns("GeoJSON-style feature collection", schema={"type": "object", "subtype": "geojson"}) ) -def read_vector(args: Dict, env: EvalEnv) -> DelayedVector: +def read_vector(args: ProcessArgs, env: EvalEnv) -> DelayedVector: # TODO #114 EP-3981: deprecated in favor of load_uploaded_files/load_external? https://github.com/Open-EO/openeo-processes/issues/322 # TODO: better argument name than `filename`? - path = extract_arg(args, "filename") + _log.warning("DEPRECATED: read_vector usage") + path = args.get_required("filename") _check_geometry_path_assumption( path=path, process="read_vector", parameter="filename" ) @@ -1911,10 +1884,10 @@ def load_uploaded_files(args: ProcessArgs, env: EvalEnv) -> Union[DriverVectorCu .param('data', description="GeoJson object.", schema={"type": "object", "subtype": "geojson"}) .returns("vector-cube", schema={"type": "object", "subtype": "vector-cube"}) ) -def to_vector_cube(args: Dict, env: EvalEnv): - _log.warning("Experimental process `to_vector_cube` is deprecated, use `load_geojson` instead") +def to_vector_cube(args: ProcessArgs, env: EvalEnv): + _log.warning("DEPRECATED: process to_vector_cube is deprecated, use load_geojson instead") # TODO: remove this experimental/deprecated process - data = extract_arg(args, "data", process_id="to_vector_cube") + data = args.get_required("data") if isinstance(data, dict) and data.get("type") in {"Polygon", "MultiPolygon", "Feature", "FeatureCollection"}: return env.backend_implementation.vector_cube_cls.from_geojson(data) raise FeatureUnsupportedException(f"Converting {type(data)} to vector cube is not supported") @@ -1980,14 +1953,10 @@ def get_geometries(args: Dict, env: EvalEnv) -> Union[DelayedVector, dict]: .param('data', description="A raster data cube.", schema={"type": "object", "subtype": "raster-cube"}) .returns("vector-cube", schema={"type": "object", "subtype": "vector-cube"}) ) -def raster_to_vector(args: Dict, env: EvalEnv): - image_collection = extract_arg(args, 'data') - if not isinstance(image_collection, DriverDataCube): - raise ProcessParameterInvalidException( - parameter="data", process="raster_to_vector", - reason=f"Invalid data type {type(image_collection)!r} expected raster-cube." - ) - return image_collection.raster_to_vector() +def raster_to_vector(args: ProcessArgs, env: EvalEnv): + cube: DriverDataCube = args.get_required("data", expected_type=DriverDataCube) + # TODO: raster_to_vector is only defined on GeopysparkDataCube, not DriverDataCube + return cube.raster_to_vector() @non_standard_process( @@ -2111,9 +2080,9 @@ def evaluate_process_from_url(process_id: str, namespace: str, args: dict, env: .param('seconds', description="Number of seconds to sleep.", schema={"type": "number"}, required=True) .returns("Original data", schema={}) ) -def sleep(args: Dict, env: EvalEnv): - data = extract_arg(args, "data") - seconds = extract_arg(args, "seconds") +def sleep(args: ProcessArgs, env: EvalEnv): + data = args.get_required("data") + seconds = args.get_required("seconds", expected_type=(int, float)) dry_run_tracer: DryRunDataTracer = env.get(ENV_DRY_RUN_TRACER) if not dry_run_tracer: _log.info("Sleeping {s} seconds".format(s=seconds)) @@ -2220,20 +2189,15 @@ def resolution_merge(args: ProcessArgs, env: EvalEnv): .param('data', description="Data to discard.", schema={}, required=False) .returns("Nothing", schema={}) ) -def discard_result(args: Dict, env: EvalEnv): +def discard_result(args: ProcessArgs, env: EvalEnv): # TODO: keep a reference to the discarded result? return NullResult() @process_registry_100.add_function(spec=read_spec("openeo-processes/experimental/mask_scl_dilation.json")) @process_registry_2xx.add_function(spec=read_spec("openeo-processes/experimental/mask_scl_dilation.json")) -def mask_scl_dilation(args: Dict, env: EvalEnv): - cube: DriverDataCube = extract_arg(args, 'data') - if not isinstance(cube, DriverDataCube): - raise ProcessParameterInvalidException( - parameter="data", process="mask_scl_dilation", - reason=f"Invalid data type {type(cube)!r} expected raster-cube." - ) +def mask_scl_dilation(args: ProcessArgs, env: EvalEnv): + cube: DriverDataCube = args.get_required("data", expected_type=DriverDataCube) if hasattr(cube, "mask_scl_dilation"): the_args = args.copy() del the_args["data"] @@ -2264,13 +2228,8 @@ def to_scl_dilation_mask(args: ProcessArgs, env: EvalEnv): @process_registry_100.add_function(spec=read_spec("openeo-processes/experimental/mask_l1c.json")) @process_registry_2xx.add_function(spec=read_spec("openeo-processes/experimental/mask_l1c.json")) -def mask_l1c(args: Dict, env: EvalEnv): - cube: DriverDataCube = extract_arg(args, 'data') - if not isinstance(cube, DriverDataCube): - raise ProcessParameterInvalidException( - parameter="data", process="mask_l1c", - reason=f"Invalid data type {type(cube)!r} expected raster-cube." - ) +def mask_l1c(args: ProcessArgs, env: EvalEnv): + cube: DriverDataCube = args.get_required("data", expected_type=DriverDataCube) if hasattr(cube, "mask_l1c"): return cube.mask_l1c() else: @@ -2335,8 +2294,8 @@ def array_create(args: ProcessArgs, env: EvalEnv) -> list: @process_registry_100.add_function(spec=read_spec("openeo-processes/1.x/proposals/load_result.json")) -def load_result(args: dict, env: EvalEnv) -> DriverDataCube: - job_id = extract_arg(args, "id") +def load_result(args: ProcessArgs, env: EvalEnv) -> DriverDataCube: + job_id = args.get_required("id", expected_type=str) user = env.get("user") arguments = {} @@ -2364,10 +2323,11 @@ def load_result(args: dict, env: EvalEnv) -> DriverDataCube: @process_registry_100.add_function(spec=read_spec("openeo-processes/1.x/proposals/inspect.json")) @process_registry_2xx.add_function(spec=read_spec("openeo-processes/2.x/proposals/inspect.json")) -def inspect(args: dict, env: EvalEnv): - data = extract_arg(args, "data") - message = args.get("message", "") - level = args.get("level", "info") +def inspect(args: ProcessArgs, env: EvalEnv): + data = args.get_required("data") + message = args.get_optional("message", default="") + code = args.get_optional("code", default="User") + level = args.get_optional("level", default="info") if message: _log.log(level=logging.getLevelName(level.upper()), msg=message) data_message = str(data) diff --git a/openeo_driver/datacube.py b/openeo_driver/datacube.py index 1b5b082f..061f5ba4 100644 --- a/openeo_driver/datacube.py +++ b/openeo_driver/datacube.py @@ -85,13 +85,15 @@ def filter_spatial(self, geometries) -> 'DriverDataCube': def filter_bands(self, bands) -> 'DriverDataCube': self._not_implemented() - def filter_labels(self, condition: dict,dimensin: str, context: Optional[dict] = None, env: EvalEnv = None ) -> 'DriverDataCube': + def filter_labels( + self, condition: dict, dimension: str, context: Optional[dict] = None, env: EvalEnv = None + ) -> "DriverDataCube": self._not_implemented() def apply(self, process: dict, *, context: Optional[dict] = None, env: EvalEnv) -> "DriverDataCube": self._not_implemented() - def apply_kernel(self, kernel: list, factor=1, border=0, replace_invalid=0) -> 'DriverDataCube': + def apply_kernel(self, kernel: numpy.ndarray, factor=1, border=0, replace_invalid=0) -> "DriverDataCube": self._not_implemented() def apply_neighborhood( diff --git a/openeo_driver/processes.py b/openeo_driver/processes.py index 3d2b4a9b..d4a250ea 100644 --- a/openeo_driver/processes.py +++ b/openeo_driver/processes.py @@ -5,6 +5,7 @@ from pathlib import Path from typing import Any, Callable, Collection, Dict, List, Optional, Tuple, Union +from openeo_driver.datacube import DriverDataCube from openeo_driver.errors import ( OpenEOApiException, ProcessParameterInvalidException, @@ -325,6 +326,8 @@ def _check_value( ): if expected_type: if not isinstance(value, expected_type): + if expected_type is DriverDataCube: + expected_type = "raster cube" raise ProcessParameterInvalidException( parameter=name, process=self.process_id, reason=f"Expected {expected_type} but got {type(value)}." ) @@ -423,13 +426,19 @@ def get_subset(self, names: List[str], aliases: Optional[Dict[str, str]] = None) kwargs[key] = self[alias] return kwargs - def get_enum(self, name: str, options: Collection[ArgumentValue]) -> ArgumentValue: + def get_enum( + self, name: str, options: Collection[ArgumentValue], default: Optional[ArgumentValue] = None + ) -> ArgumentValue: """ Get argument by name and check if it belongs to given set of (enum) values. Originally: `extract_arg_enum` """ - value = self.get_required(name=name) + # TODO: use an "unset" sentinel value instead of None for default? + if default is None: + value = self.get_required(name=name) + else: + value = self.get_optional(name=name, default=default) if value not in options: raise ProcessParameterInvalidException( parameter=name, diff --git a/tests/data/pg/1.0/resample_and_merge_cubes.json b/tests/data/pg/1.0/resample_and_merge_cubes.json index ce0a059f..17df6859 100644 --- a/tests/data/pg/1.0/resample_and_merge_cubes.json +++ b/tests/data/pg/1.0/resample_and_merge_cubes.json @@ -20,7 +20,7 @@ "target": { "from_node": "collection2" }, - "method": "cube" + "method": "cubic" }, "result": false }, @@ -52,4 +52,4 @@ }, "result": true } -} \ No newline at end of file +} diff --git a/tests/test_processes.py b/tests/test_processes.py index 663954fd..bfe76f78 100644 --- a/tests/test_processes.py +++ b/tests/test_processes.py @@ -458,7 +458,7 @@ def test_get_required_with_type(self): with pytest.raises( ProcessParameterInvalidException, match=re.escape( - "The value passed for parameter 'color' in process 'wibble' is invalid: Expected but got ." + "The value passed for parameter 'color' in process 'wibble' is invalid: Expected raster cube but got ." ), ): _ = args.get_required("color", expected_type=DriverDataCube) @@ -521,7 +521,7 @@ def test_get_optional_with_type(self): with pytest.raises( ProcessParameterInvalidException, match=re.escape( - "The value passed for parameter 'foo' in process 'wibble' is invalid: Expected but got ." + "The value passed for parameter 'foo' in process 'wibble' is invalid: Expected raster cube but got ." ), ): _ = args.get_optional("foo", expected_type=DriverDataCube) @@ -615,6 +615,15 @@ def test_get_enum(self): ): _ = args.get_enum("color", options=["R", "G", "B"]) + def test_get_enum_optional(self): + args = ProcessArgs({"size": 3, "color": "red"}, process_id="wibble") + assert args.get_enum("color", options=["red", "green", "blue"], default="green") == "red" + assert args.get_enum("colour", options=["red", "green", "blue"], default="green") == "green" + + assert args.get_enum("size", options=[0, 1, 2, 3], default=0) == 3 + assert args.get_enum("dim", options=[0, 1, 2, 3], default=0) == 0 + assert args.get_enum("dim", options=[0, 1, 2, 3], default=2) == 2 + def test_validator_generic(self): args = ProcessArgs({"size": 11}, process_id="wibble") diff --git a/tests/test_views_execute.py b/tests/test_views_execute.py index 9bcebd36..109f2127 100644 --- a/tests/test_views_execute.py +++ b/tests/test_views_execute.py @@ -521,6 +521,7 @@ def test_execute_resample_and_merge_cubes(api): assert last_load_collection_call.target_resolution == [10, 10] assert dummy.merge_cubes.call_count == 1 assert dummy.resample_cube_spatial.call_count == 1 + assert dummy.resample_cube_spatial.call_args.kwargs["method"] == "cubic" args, kwargs = dummy.merge_cubes.call_args assert args[1:] == ('or',) @@ -3021,7 +3022,7 @@ def test_execute_no_cube_dynamic_args(api): assert kwargs["factor"] == 7.75 -@pytest.mark.parametrize(["border", "expected"], [(0, 0), ("0", 0), ]) +@pytest.mark.parametrize(["border", "expected"], [(0, 0)]) def test_execute_apply_kernel_border(api, border, expected): pg = { "lc1": {'process_id': 'load_collection', 'arguments': {'id': 'S2_FOOBAR'}},