Skip to content

Commit

Permalink
Fix #652 preserve job title in execute_batch
Browse files Browse the repository at this point in the history
  • Loading branch information
soxofaan committed Oct 31, 2024
1 parent 7c321be commit 4ce7d78
Show file tree
Hide file tree
Showing 8 changed files with 131 additions and 7 deletions.
2 changes: 2 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0

### Fixed

- Fixed support for `title` and `description` in `execute_batch()` ([#652](https://github.com/Open-EO/openeo-python-client/issues/652))


## [0.33.0] - 2024-10-18

Expand Down
3 changes: 3 additions & 0 deletions openeo/rest/_testing.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,6 +87,9 @@ def _handle_post_jobs(self, request, context):
pg = post_data["process"]["process_graph"]
job_id = f"job-{len(self.batch_jobs):03d}"
job_data = {"job_id": job_id, "pg": pg, "status": "created"}
for field in ["title", "description"]:
if field in post_data:
job_data[field] = post_data[field]
for field in self.extra_job_metadata_fields:
job_data[field] = post_data.get(field)
self.batch_jobs[job_id] = job_data
Expand Down
14 changes: 13 additions & 1 deletion openeo/rest/datacube.py
Original file line number Diff line number Diff line change
Expand Up @@ -2361,6 +2361,10 @@ def execute_batch(
outputfile: Optional[Union[str, pathlib.Path]] = None,
out_format: Optional[str] = None,
*,
title: Optional[str] = None,
description: Optional[str] = None,
plan: Optional[str] = None,
budget: Optional[float] = None,
print: typing.Callable[[str], None] = print,
max_poll_interval: float = 60,
connection_retry_interval: float = 30,
Expand Down Expand Up @@ -2402,7 +2406,15 @@ def execute_batch(
method="DataCube.execute_batch()",
)

job = cube.create_job(job_options=job_options, validate=validate, auto_add_save_result=False)
job = cube.create_job(
title=title,
description=description,
plan=plan,
budget=budget,
job_options=job_options,
validate=validate,
auto_add_save_result=False,
)
return job.run_synchronous(
outputfile=outputfile,
print=print, max_poll_interval=max_poll_interval, connection_retry_interval=connection_retry_interval
Expand Down
17 changes: 12 additions & 5 deletions openeo/rest/mlmodel.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,10 +59,17 @@ def load_ml_model(connection: Connection, id: Union[str, BatchJob]) -> MlModel:
return MlModel(graph=PGNode(process_id="load_ml_model", id=id), connection=connection)

def execute_batch(
self,
outputfile: Union[str, pathlib.Path],
print=print, max_poll_interval=60, connection_retry_interval=30,
job_options=None,
self,
outputfile: Union[str, pathlib.Path],
*,
title: Optional[str] = None,
description: Optional[str] = None,
plan: Optional[str] = None,
budget: Optional[float] = None,
print=print,
max_poll_interval=60,
connection_retry_interval=30,
job_options=None,
) -> BatchJob:
"""
Evaluate the process graph by creating a batch job, and retrieving the results when it is finished.
Expand All @@ -75,7 +82,7 @@ def execute_batch(
:param out_format: (optional) Format of the job result.
:param format_options: String Parameters for the job result format
"""
job = self.create_job(job_options=job_options)
job = self.create_job(title=title, description=description, plan=plan, budget=budget, job_options=job_options)
return job.run_synchronous(
# TODO #135 support multi file result sets too
outputfile=outputfile,
Expand Down
14 changes: 13 additions & 1 deletion openeo/rest/vectorcube.py
Original file line number Diff line number Diff line change
Expand Up @@ -248,6 +248,10 @@ def execute_batch(
outputfile: Optional[Union[str, pathlib.Path]] = None,
out_format: Optional[str] = None,
*,
title: Optional[str] = None,
description: Optional[str] = None,
plan: Optional[str] = None,
budget: Optional[float] = None,
print=print,
max_poll_interval: float = 60,
connection_retry_interval: float = 30,
Expand Down Expand Up @@ -287,7 +291,15 @@ def execute_batch(
default_format=self._DEFAULT_VECTOR_FORMAT,
method="VectorCube.execute_batch()",
)
job = cube.create_job(job_options=job_options, validate=validate, auto_add_save_result=False)
job = cube.create_job(
title=title,
description=description,
plan=plan,
budget=budget,
job_options=job_options,
validate=validate,
auto_add_save_result=False,
)
return job.run_synchronous(
# TODO #135 support multi file result sets too
outputfile=outputfile,
Expand Down
34 changes: 34 additions & 0 deletions tests/extra/test_job_management.py
Original file line number Diff line number Diff line change
Expand Up @@ -1089,6 +1089,8 @@ def test_minimal(self, con, dummy_backend, remote_process_definitions):
}
},
"status": "created",
"title": "Process '3plus5' with {}",
"description": "Process '3plus5' (namespace https://remote.test/3plus5.json) with {}",
}
}

Expand Down Expand Up @@ -1150,6 +1152,8 @@ def test_basic_parameterization(self, con, dummy_backend, parameter_defaults, ro
}
},
"status": "created",
"title": dirty_equals.IsStr(regex="Process 'increment' with .*"),
"description": dirty_equals.IsStr(regex="Process 'increment' .*"),
}
}

Expand Down Expand Up @@ -1199,6 +1203,8 @@ def test_process_references_in_constructor(
"job_id": "job-000",
"pg": {"3plus51": {**expected, "arguments": {}, "result": True}},
"status": "created",
"title": "Process '3plus5' with {}",
"description": f"Process '3plus5' (namespace {namespace}) with {{}}",
}
}

Expand Down Expand Up @@ -1251,6 +1257,8 @@ def test_with_job_manager_remote_basic(
}
},
"status": "finished",
"title": "Process 'increment' with {'data': 1, 'increment': 5}",
"description": "Process 'increment' (namespace https://remote.test/increment.json) with {'data': 1, 'increment': 5}",
},
"job-001": {
"job_id": "job-001",
Expand All @@ -1263,6 +1271,8 @@ def test_with_job_manager_remote_basic(
}
},
"status": "finished",
"title": "Process 'increment' with {'data': 2, 'increment': 5}",
"description": "Process 'increment' (namespace https://remote.test/increment.json) with {'data': 2, 'increment': 5}",
},
"job-002": {
"job_id": "job-002",
Expand All @@ -1275,6 +1285,8 @@ def test_with_job_manager_remote_basic(
}
},
"status": "finished",
"title": "Process 'increment' with {'data': 3, 'increment': 5}",
"description": "Process 'increment' (namespace https://remote.test/increment.json) with {'data': 3, 'increment': 5}",
},
}

Expand Down Expand Up @@ -1353,6 +1365,8 @@ def test_with_job_manager_remote_parameter_handling(
}
},
"status": "finished",
"title": dirty_equals.IsStr(regex="Process 'increment' with .*"),
"description": dirty_equals.IsStr(regex="Process 'increment'.*"),
},
"job-001": {
"job_id": "job-001",
Expand All @@ -1365,6 +1379,8 @@ def test_with_job_manager_remote_parameter_handling(
}
},
"status": "finished",
"title": dirty_equals.IsStr(regex="Process 'increment' with .*"),
"description": dirty_equals.IsStr(regex="Process 'increment'.*"),
},
"job-002": {
"job_id": "job-002",
Expand All @@ -1377,6 +1393,8 @@ def test_with_job_manager_remote_parameter_handling(
}
},
"status": "finished",
"title": dirty_equals.IsStr(regex="Process 'increment' with .*"),
"description": dirty_equals.IsStr(regex="Process 'increment'.*"),
},
}

Expand Down Expand Up @@ -1436,6 +1454,8 @@ def test_with_job_manager_remote_geometry(self, tmp_path, requests_mock, dummy_b
}
},
"status": "finished",
"title": "Process 'offset_polygon' with {'data': 123, 'polygons': {'type': 'Point', 'coordinates': (1...",
"description": "Process 'offset_polygon' (namespace https://remote.test/offset_polygon.json) with {'data': 123, 'polygons': {'type': 'Point', 'coordinates': (1.0, 2.0)}, 'offset': 11}",
},
"job-001": {
"job_id": "job-001",
Expand All @@ -1452,6 +1472,8 @@ def test_with_job_manager_remote_geometry(self, tmp_path, requests_mock, dummy_b
}
},
"status": "finished",
"title": "Process 'offset_polygon' with {'data': 123, 'polygons': {'type': 'Point', 'coordinates': (3...",
"description": "Process 'offset_polygon' (namespace https://remote.test/offset_polygon.json) with {'data': 123, 'polygons': {'type': 'Point', 'coordinates': (3.0, 4.0)}, 'offset': 22}",
},
}

Expand Down Expand Up @@ -1527,6 +1549,8 @@ def test_with_job_manager_remote_geometry_after_resume(
}
},
"status": "finished",
"title": dirty_equals.IsStr(regex="Process 'offset_polygon' with.*"),
"description": dirty_equals.IsStr(regex="Process 'offset_polygon' .*"),
},
"job-001": {
"job_id": "job-001",
Expand All @@ -1543,6 +1567,8 @@ def test_with_job_manager_remote_geometry_after_resume(
}
},
"status": "finished",
"title": dirty_equals.IsStr(regex="Process 'offset_polygon' with.*"),
"description": dirty_equals.IsStr(regex="Process 'offset_polygon' .*"),
},
}

Expand Down Expand Up @@ -1586,6 +1612,8 @@ def test_with_job_manager_udp_basic(
}
},
"status": "finished",
"title": "Process 'increment' with {'data': 3, 'increment': 5}",
"description": "Process 'increment' (namespace None) with {'data': 3, 'increment': 5}",
},
"job-001": {
"job_id": "job-001",
Expand All @@ -1597,6 +1625,8 @@ def test_with_job_manager_udp_basic(
}
},
"status": "finished",
"title": "Process 'increment' with {'data': 5, 'increment': 5}",
"description": "Process 'increment' (namespace None) with {'data': 5, 'increment': 5}",
},
}

Expand Down Expand Up @@ -1638,6 +1668,8 @@ def test_with_job_manager_parameter_column_map(
}
},
"status": "finished",
"title": "Process 'increment' with {'data': 3, 'increment': 100}",
"description": "Process 'increment' (namespace https://remote.test/increment.json) with {'data': 3, 'increment': 100}",
},
"job-001": {
"job_id": "job-001",
Expand All @@ -1650,5 +1682,7 @@ def test_with_job_manager_parameter_column_map(
}
},
"status": "finished",
"title": "Process 'increment' with {'data': 5, 'increment': 200}",
"description": "Process 'increment' (namespace https://remote.test/increment.json) with {'data': 5, 'increment': 200}",
},
}
27 changes: 27 additions & 0 deletions tests/rest/datacube/test_datacube.py
Original file line number Diff line number Diff line change
Expand Up @@ -1236,3 +1236,30 @@ def test_cube_execute_batch_validation(self, dummy_backend, connection, validate
else:
assert dummy_backend.validation_requests == []
assert caplog.messages == []


def test_execute_batch_with_title(s2cube, dummy_backend):
"""
Support title/description in execute_batch
https://github.com/Open-EO/openeo-python-client/issues/652
"""
s2cube.execute_batch(title="S2 job", description="Lorem ipsum dolor S2 amet")
assert dummy_backend.batch_jobs == {
"job-000": {
"job_id": "job-000",
"pg": {
"loadcollection1": {
"process_id": "load_collection",
"arguments": {"id": "S2", "spatial_extent": None, "temporal_extent": None},
},
"saveresult1": {
"process_id": "save_result",
"arguments": {"data": {"from_node": "loadcollection1"}, "format": "GTiff", "options": {}},
"result": True,
},
},
"status": "finished",
"title": "S2 job",
"description": "Lorem ipsum dolor S2 amet",
}
}
27 changes: 27 additions & 0 deletions tests/rest/datacube/test_vectorcube.py
Original file line number Diff line number Diff line change
Expand Up @@ -829,3 +829,30 @@ def test_vector_to_raster(s2cube, vector_cube, requests_mock, target_parameter,
"result": True,
},
}


def test_execute_batch_with_title(vector_cube, dummy_backend):
"""
Support title/description in execute_batch
https://github.com/Open-EO/openeo-python-client/issues/652
"""
vector_cube.execute_batch(title="S2 job", description="Lorem ipsum dolor S2 amet")
assert dummy_backend.batch_jobs == {
"job-000": {
"job_id": "job-000",
"pg": {
"loadgeojson1": {
"process_id": "load_geojson",
"arguments": {"data": {"coordinates": [1, 2], "type": "Point"}, "properties": []},
},
"saveresult1": {
"process_id": "save_result",
"arguments": {"data": {"from_node": "loadgeojson1"}, "format": "GeoJSON", "options": {}},
"result": True,
},
},
"status": "finished",
"title": "S2 job",
"description": "Lorem ipsum dolor S2 amet",
}
}

0 comments on commit 4ce7d78

Please sign in to comment.