Skip to content

Commit

Permalink
Merge pull request #386 from kingosticks/look_many
Browse files Browse the repository at this point in the history
Batched method performance improvements
  • Loading branch information
kingosticks authored Apr 16, 2024
2 parents aad7ece + 4dee212 commit d432ba4
Show file tree
Hide file tree
Showing 16 changed files with 802 additions and 429 deletions.
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ classifiers = [
]
dynamic = ["version"]
dependencies = [
"mopidy >= 3.4.0",
"mopidy >= 4.0.0a1",
"pykka >= 4.0",
"requests >= 2.20.0",
"setuptools >= 66",
Expand Down
7 changes: 4 additions & 3 deletions src/mopidy_spotify/browse.py
Original file line number Diff line number Diff line change
Expand Up @@ -108,9 +108,10 @@ def _browse_album(web_client, uri):
logger.info(f"Failed to browse {uri!r}: {exc}")
return []

web_album = web_client.get_album(link)
web_tracks = web_album.get("tracks", {}).get("items", [])
return list(translator.web_to_track_refs(web_tracks))
for web_album in web_client.get_albums([link]):
web_tracks = web_album.get("tracks", {}).get("items", [])
return list(translator.web_to_track_refs(web_tracks))
return []


def _browse_artist(web_client, uri):
Expand Down
145 changes: 54 additions & 91 deletions src/mopidy_spotify/images.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,16 @@
import itertools
import logging
import operator
import urllib.parse

from mopidy_spotify.browse import BROWSE_DIR_URIS
from mopidy_spotify.translator import web_to_image
from mopidy_spotify.utils import group_by_type
from mopidy_spotify.web import LinkType, WebLink

_API_MAX_IDS_PER_REQUEST = 50
SUPPORTED_TYPES = (
LinkType.TRACK,
LinkType.ALBUM,
LinkType.ARTIST,
LinkType.PLAYLIST,
)

_cache = {} # (type, id) -> [Image(), ...]

Expand All @@ -15,113 +19,72 @@

def get_images(web_client, uris):
result = {}
uri_type_getter = operator.itemgetter("type")
uris = (_parse_uri(u) for u in uris)
uris = sorted((u for u in uris if u), key=uri_type_getter)
for uri_type, group in itertools.groupby(uris, uri_type_getter):
links = (_parse_uri(u) for u in uris)
for link_type, link_group in group_by_type(links):
batch = []
for uri in group:
if uri["key"] in _cache:
result[uri["uri"]] = _cache[uri["key"]]
elif uri_type == "playlist":
result.update(_process_uri(web_client, uri))
for link in link_group:
key = _make_cache_key(link)
if key in _cache:
result[link.uri] = _cache[key]
elif link_type == LinkType.PLAYLIST:
result.update(_process_one(web_client, link))
else:
batch.append(uri)
if len(batch) >= _API_MAX_IDS_PER_REQUEST:
result.update(_process_uris(web_client, uri_type, batch))
batch = []
result.update(_process_uris(web_client, uri_type, batch))
batch.append(link)
result.update(_process_many(web_client, link_type, batch))
return result


def _make_cache_key(link):
return (link.type, link.id)


def _parse_uri(uri):
if uri in BROWSE_DIR_URIS:
return None # These are internal to the extension.
try:
parsed_uri = urllib.parse.urlparse(uri)
uri_type, uri_id = None, None

match parsed_uri.scheme:
case "spotify":
match parsed_uri.path.split(":"):
case uri_type, uri_id, *_:
pass
case _:
raise ValueError("Too few arguments") # noqa: TRY301
case "http" | "https":
if parsed_uri.netloc in ("open.spotify.com", "play.spotify.com"):
uri_type, uri_id = parsed_uri.path.split("/")[1:3]

supported_types = ("track", "album", "artist", "playlist")
if uri_type:
if uri_type not in supported_types:
logger.warning(f"Unsupported image type '{uri_type}' in {uri!r}")
return None
if uri_id:
return {
"uri": uri,
"type": uri_type,
"id": uri_id,
"key": (uri_type, uri_id),
}
raise ValueError("Unknown error") # noqa: TRY301
link = WebLink.from_uri(uri)
if link.type not in SUPPORTED_TYPES:
raise ValueError(f"Unsupported image type '{link.type}' in {uri!r}") # noqa: TRY301
if not link.id:
raise ValueError("ID missing") # noqa: TRY301
except Exception as e:
logger.exception(f"Could not parse {uri!r} as a Spotify URI ({e!s})") # noqa: TRY401
return None

return link

def _process_uri(web_client, uri):
data = web_client.get(f"{uri['type']}s/{uri['id']}")
_cache[uri["key"]] = tuple(web_to_image(i) for i in data.get("images") or [])
return {uri["uri"]: _cache[uri["key"]]}

def _process_one(web_client, link):
data = web_client.get(f"{link.type}s/{link.id}")
key = _make_cache_key(link)
_cache[key] = tuple(web_to_image(i) for i in data.get("images") or [])
return {link.uri: _cache[key]}

def _process_uris( # noqa: C901

def _process_many(
web_client,
uri_type,
uris,
link_type,
links,
):
result = {}
ids = [u["id"] for u in uris]
ids_to_uris = {u["id"]: u for u in uris}

if not uris:
if not links:
return result

data = web_client.get(uri_type + "s", params={"ids": ",".join(ids)})
for item in (
data.get(
uri_type + "s",
)
or []
):
if not item:
continue

if "linked_from" in item:
item_id = item["linked_from"].get("id")
else:
item_id = item.get("id")
uri = ids_to_uris.get(item_id)
if not uri:
continue

if uri["key"] not in _cache:
if uri_type == "track":
if "album" not in item:
continue
album = _parse_uri(item["album"].get("uri"))
if not album:
continue
album_key = album["key"]
if album_key not in _cache:
_cache[album_key] = tuple(
web_to_image(i) for i in item["album"].get("images") or []
)
_cache[uri["key"]] = _cache[album_key]
else:
_cache[uri["key"]] = tuple(
web_to_image(i) for i in item.get("images") or []
for link, item in web_client.get_batch(link_type, links):
key = _make_cache_key(link)
if link_type == LinkType.TRACK:
if not (album_item := item.get("album")):
continue
if not (album_link := _parse_uri(album_item.get("uri"))):
continue
album_key = _make_cache_key(album_link)
if album_key not in _cache:
_cache[album_key] = tuple(
web_to_image(i) for i in album_item.get("images") or []
)
result[uri["uri"]] = _cache[uri["key"]]
_cache[key] = _cache[album_key]
else:
_cache[key] = tuple(web_to_image(i) for i in item.get("images") or [])
result[link.uri] = _cache[key]

return result
4 changes: 2 additions & 2 deletions src/mopidy_spotify/library.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,8 +34,8 @@ def get_distinct(self, field, query=None):
def get_images(self, uris):
return images.get_images(self._backend._web_client, uris)

def lookup(self, uri):
return lookup.lookup(self._config, self._backend._web_client, uri)
def lookup_many(self, uris):
return lookup.lookup(self._config, self._backend._web_client, uris)

def search(
self,
Expand Down
Loading

0 comments on commit d432ba4

Please sign in to comment.