diff --git a/tests/endpoints/test_voyages_search_enriched.py b/tests/endpoints/test_voyages_search_enriched.py index 23e56065..cfd5cbb4 100644 --- a/tests/endpoints/test_voyages_search_enriched.py +++ b/tests/endpoints/test_voyages_search_enriched.py @@ -22,6 +22,32 @@ def test_search_returns_dataframe(self): assert len(df) == 2 + def test_search_pagination_on_df(self): + start = datetime(2021, 1, 1) + end = datetime(2021, 2, 28) + + res = ( + VoyagesSearchEnriched() + .search( + time_min=start, time_max=end, origins=rotterdam, columns="all" + ) + .to_df() + ) + + assert len(res) > 1000 + + def test_search_pagination_on_lists(self): + start = datetime(2021, 1, 1) + end = datetime(2021, 2, 28) + + res = ( + VoyagesSearchEnriched() + .search(time_min=start, time_max=end, origins=rotterdam) + .to_list() + ) + + assert len(res) > 1000 + def test_search_returns_some_cols(self): start = datetime(2021, 6, 17) end = datetime(2021, 6, 21) diff --git a/vortexasdk/client.py b/vortexasdk/client.py index 7f3ebce0..9f082bcc 100644 --- a/vortexasdk/client.py +++ b/vortexasdk/client.py @@ -10,6 +10,8 @@ from urllib.parse import urlencode import uuid +import json + from requests import Response from tqdm import tqdm from warnings import warn @@ -195,8 +197,14 @@ def _process_multiple_pages_with_search_after( size = data.get("size", 500) first_response = _send_post_request(url, payload, size, 0, headers) + responses.append(first_response.get("data", [])) next_request = first_response.get("next_request") + search_after = first_response.get("search_after") + + if not next_request and search_after: + next_request = dict(payload) + next_request["search_after"] = search_after while next_request: logger.warn(f"Sending post request with search_after") @@ -205,6 +213,10 @@ def _process_multiple_pages_with_search_after( ) responses.append(dict_response.get("data", [])) next_request = dict_response.get("next_request") + search_after = dict_response.get("search_after") + if not next_request and search_after: + next_request = dict(payload) + next_request["search_after"] = search_after return responses @@ -295,7 +307,13 @@ def _handle_response( "data": data, "total": int(response.headers["x-total"]), } - + if response.headers["x-next-request"] != "undefined": + try: + decoded["search_after"] = json.loads( + response.headers["x-next-request"] + ) + except Exception as e: + logger.error(f"error parsing search_after: {e}") else: decoded = response.json() except JSONDecodeError: