Skip to content

Commit

Permalink
GBFSMetadata : ajout de cache HTTP (#4322)
Browse files Browse the repository at this point in the history
  • Loading branch information
AntoineAugusti authored Nov 19, 2024
1 parent b6a936b commit 72c1c0b
Show file tree
Hide file tree
Showing 2 changed files with 31 additions and 11 deletions.
24 changes: 16 additions & 8 deletions apps/transport/lib/transport/gbfs_metadata.ex
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ defmodule Transport.GBFSMetadata do
"""
@impl Transport.GBFSMetadata.Wrapper
def compute_feed_metadata(url) do
{:ok, %HTTPoison.Response{status_code: 200, body: body}} = http_client().get(url)
{:ok, %HTTPoison.Response{status_code: 200, body: body}} = cached_http_get(url)
{:ok, json} = Jason.decode(body)

# we compute the feed delay before the rest for accuracy
Expand Down Expand Up @@ -122,7 +122,7 @@ defmodule Transport.GBFSMetadata do
defp feed_ttl(value) when is_integer(value) and value >= 0, do: value

defp feed_ttl(feed_url) when is_binary(feed_url) do
with {:ok, %HTTPoison.Response{status_code: 200, body: body}} <- http_client().get(feed_url),
with {:ok, %HTTPoison.Response{status_code: 200, body: body}} <- cached_http_get(feed_url),
{:ok, json} <- Jason.decode(body) do
json["ttl"]
else
Expand Down Expand Up @@ -196,7 +196,7 @@ defmodule Transport.GBFSMetadata do
feed_url = feed_url_by_name(payload, :station_status)

with {:feed_exists, true} <- {:feed_exists, not is_nil(feed_url)},
{:ok, %HTTPoison.Response{status_code: 200, body: body}} <- http_client().get(feed_url),
{:ok, %HTTPoison.Response{status_code: 200, body: body}} <- cached_http_get(feed_url),
{:ok, json} <- Jason.decode(body) do
stations = Enum.reject(json["data"]["stations"], &unrealistic_station_data?/1)

Expand Down Expand Up @@ -224,7 +224,7 @@ defmodule Transport.GBFSMetadata do
feed_url = feed_url_by_name(payload, :vehicle_status)

with {:feed_exists, true} <- {:feed_exists, not is_nil(feed_url)},
{:ok, %HTTPoison.Response{status_code: 200, body: body}} <- http_client().get(feed_url),
{:ok, %HTTPoison.Response{status_code: 200, body: body}} <- cached_http_get(feed_url),
{:ok, json} <- Jason.decode(body) do
vehicles = json["data"]["vehicles"] || json["data"]["bikes"]
nb_vehicles = Enum.count(vehicles)
Expand Down Expand Up @@ -274,7 +274,7 @@ defmodule Transport.GBFSMetadata do
feed_url = feed_url_by_name(payload, :system_information)

with {:feed_exists, true} <- {:feed_exists, not is_nil(feed_url)},
{:ok, %HTTPoison.Response{status_code: 200, body: body}} <- http_client().get(feed_url),
{:ok, %HTTPoison.Response{status_code: 200, body: body}} <- cached_http_get(feed_url),
{:ok, json} <- Jason.decode(body) do
transform_localized_strings(json["data"])
else
Expand Down Expand Up @@ -320,7 +320,7 @@ defmodule Transport.GBFSMetadata do
# > If this file is not included, then all vehicles in the feed are assumed to be non-motorized bicycles.
["bicycle"]
else
with {:ok, %HTTPoison.Response{status_code: 200, body: body}} <- http_client().get(feed_url),
with {:ok, %HTTPoison.Response{status_code: 200, body: body}} <- cached_http_get(feed_url),
{:ok, json} <- Jason.decode(body) do
json["data"]["vehicle_types"] |> Enum.map(& &1["form_factor"]) |> Enum.uniq()
else
Expand All @@ -335,7 +335,7 @@ defmodule Transport.GBFSMetadata do
else
feed_url = feed_url_by_name(payload, :system_information)

with {:ok, %HTTPoison.Response{status_code: 200, body: body}} <- http_client().get(feed_url),
with {:ok, %HTTPoison.Response{status_code: 200, body: body}} <- cached_http_get(feed_url),
{:ok, json} <- Jason.decode(body) do
get_in(json, ["data", "languages"])
else
Expand All @@ -351,7 +351,7 @@ defmodule Transport.GBFSMetadata do
if is_nil(versions_url) do
[Map.get(payload, "version", "1.0")]
else
with {:ok, %HTTPoison.Response{status_code: 200, body: body}} <- http_client().get(versions_url),
with {:ok, %HTTPoison.Response{status_code: 200, body: body}} <- cached_http_get(versions_url),
{:ok, json} <- Jason.decode(body) do
json["data"]["versions"] |> Enum.map(& &1["version"]) |> Enum.sort(:desc)
else
Expand Down Expand Up @@ -427,5 +427,13 @@ defmodule Transport.GBFSMetadata do
# https://github.com/MobilityData/gbfs/blob/v1.1/gbfs.md#output-format
defp before_v3?(%{}), do: true

defp cached_http_get(url) do
Transport.Cache.fetch(
"#{__MODULE__}::http_get::#{url}",
fn -> http_client().get(url) end,
:timer.seconds(30)
)
end

defp http_client, do: Transport.Shared.Wrapper.HTTPoison.impl()
end
18 changes: 15 additions & 3 deletions apps/transport/test/transport/gbfs_metadata_test.exs
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
defmodule Transport.GBFSMetadataTest do
use ExUnit.Case, async: true
# async: false is required because we use real in-memory caching in these tests
use ExUnit.Case, async: false
import Mox
import Transport.GBFSMetadata
import ExUnit.CaptureLog
Expand All @@ -9,8 +10,20 @@ defmodule Transport.GBFSMetadataTest do

@gbfs_url "https://example.com/gbfs.json"

setup :set_mox_global
setup :verify_on_exit!

setup do
# Use a real in-memory cache for these tests to test the caching mecanism
old_value = Application.fetch_env!(:transport, :cache_impl)
Application.put_env(:transport, :cache_impl, Transport.Cache.Cachex)

on_exit(fn ->
Application.put_env(:transport, :cache_impl, old_value)
Cachex.reset(Transport.Cache.Cachex.cache_name())
end)
end

describe "Compute GBFS metadata for a feed" do
test "for a stations feed with a single version" do
setup_feeds([:gbfs, :system_information, :station_information, :station_status])
Expand Down Expand Up @@ -60,8 +73,7 @@ defmodule Transport.GBFSMetadataTest do
:system_information,
:vehicle_types,
:free_bike_status,
:station_status,
:free_bike_status
:station_status
])

setup_validation_result(
Expand Down

0 comments on commit 72c1c0b

Please sign in to comment.