Skip to content

Commit

Permalink
Setting HTTP timeout to just 5 seconds, so we can fail fast if there'…
Browse files Browse the repository at this point in the history
…s a networking error.
  • Loading branch information
robotrapta committed Feb 14, 2024
1 parent 9eee49d commit a0586f8
Show file tree
Hide file tree
Showing 2 changed files with 23 additions and 8 deletions.
21 changes: 16 additions & 5 deletions src/groundlight/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,11 @@

logger = logging.getLogger("groundlight.sdk")

# Set urllib3 request timeout to something modern and fast.
# The system defaults can be stupidly long
# It used to take >8 min to timeout to a bad IP address
DEFAULT_REQUEST_TIMEOUT = 5


class ApiTokenError(Exception):
pass
Expand Down Expand Up @@ -154,7 +159,7 @@ def get_detector(self, id: Union[str, Detector]) -> Detector: # pylint: disable
if isinstance(id, Detector):
# Short-circuit
return id
obj = self.detectors_api.get_detector(id=id)
obj = self.detectors_api.get_detector(id=id, _request_timeout=DEFAULT_REQUEST_TIMEOUT)
return Detector.parse_obj(obj.to_dict())

def get_detector_by_name(self, name: str) -> Detector:
Expand All @@ -177,7 +182,9 @@ def list_detectors(self, page: int = 1, page_size: int = 10) -> PaginatedDetecto
:return: PaginatedDetectorList
"""
obj = self.detectors_api.list_detectors(page=page, page_size=page_size)
obj = self.detectors_api.list_detectors(
page=page, page_size=page_size, _request_timeout=DEFAULT_REQUEST_TIMEOUT
)
return PaginatedDetectorList.parse_obj(obj.to_dict())

def create_detector(
Expand Down Expand Up @@ -213,7 +220,7 @@ def create_detector(
detector_creation_input.pipeline_config = pipeline_config
if metadata is not None:
detector_creation_input.metadata = str(url_encode_dict(metadata, name="metadata", size_limit_bytes=1024))
obj = self.detectors_api.create_detector(detector_creation_input)
obj = self.detectors_api.create_detector(detector_creation_input, _request_timeout=DEFAULT_REQUEST_TIMEOUT)
return Detector.parse_obj(obj.to_dict())

def get_or_create_detector(
Expand Down Expand Up @@ -282,7 +289,7 @@ def get_image_query(self, id: str) -> ImageQuery: # pylint: disable=redefined-b
:return: ImageQuery
"""
obj = self.image_queries_api.get_image_query(id=id)
obj = self.image_queries_api.get_image_query(id=id, _request_timeout=DEFAULT_REQUEST_TIMEOUT)
iq = ImageQuery.parse_obj(obj.to_dict())
return self._fixup_image_query(iq)

Expand All @@ -296,7 +303,9 @@ def list_image_queries(self, page: int = 1, page_size: int = 10) -> PaginatedIma
:return: PaginatedImageQueryList
"""
obj = self.image_queries_api.list_image_queries(page=page, page_size=page_size)
obj = self.image_queries_api.list_image_queries(
page=page, page_size=page_size, _request_timeout=DEFAULT_REQUEST_TIMEOUT
)
image_queries = PaginatedImageQueryList.parse_obj(obj.to_dict())
if image_queries.results is not None:
image_queries.results = [self._fixup_image_query(iq) for iq in image_queries.results]
Expand Down Expand Up @@ -385,6 +394,8 @@ def submit_image_query( # noqa: PLR0913 # pylint: disable=too-many-arguments, t
# url- and base64-encode the metadata.
params["metadata"] = url_encode_dict(metadata, name="metadata", size_limit_bytes=1024)

params["_request_timeout"] = DEFAULT_REQUEST_TIMEOUT

# If no inspection_id is provided, we submit the image query using image_queries_api (autogenerated via OpenAPI)
# However, our autogenerated code does not currently support inspection_id, so if an inspection_id was
# provided, we use the private API client instead.
Expand Down
10 changes: 7 additions & 3 deletions src/groundlight/internalapi.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
from datetime import datetime
import json
import logging
import os
Expand Down Expand Up @@ -128,6 +129,8 @@ def decorated(*args, **kwargs): # pylint: disable=inconsistent-return-statement

while retry_count <= self.max_retries:
try:
friendly_datestamp = datetime.now().isoformat()
print(f"{friendly_datestamp} Trying {function.__name__} with args={args} kwargs={kwargs}")
return function(*args, **kwargs)
except ApiException as e:
is_retryable = (e.status is not None) and (e.status in self.status_code_range)
Expand All @@ -139,15 +142,16 @@ def decorated(*args, **kwargs): # pylint: disable=inconsistent-return-statement
if is_retryable:
status_code = e.status
if status_code in self.status_code_range:
# This is implementing a full jitter strategy
random_delay = random.uniform(0, delay)
logger.warning(
(
f"Current HTTP response status: {status_code}. "
f"Remaining retries: {self.max_retries - retry_count}"
f"Remaining retries: {self.max_retries - retry_count}. "
f"Delaying {random_delay:.1f}s before retrying."
),
exc_info=True,
)
# This is implementing a full jitter strategy
random_delay = random.uniform(0, delay)
time.sleep(random_delay)

retry_count += 1
Expand Down

0 comments on commit a0586f8

Please sign in to comment.