Skip to content

Commit

Permalink
Sliding Sync: Retrieve fewer events from DB in sync (#17688)
Browse files Browse the repository at this point in the history
When using timeline limit of 1 we end up fetching 2 events from the DB
purely to tell if the response was "limited" or not. Lets not do that.
  • Loading branch information
erikjohnston authored Sep 10, 2024
1 parent 515c1cc commit 588e5b5
Show file tree
Hide file tree
Showing 9 changed files with 90 additions and 92 deletions.
1 change: 1 addition & 0 deletions changelog.d/17688.misc
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Speed up sync by pulling out fewer events from the database.
1 change: 1 addition & 0 deletions synapse/handlers/admin.py
Original file line number Diff line number Diff line change
Expand Up @@ -200,6 +200,7 @@ async def export_user_data(self, user_id: str, writer: "ExfiltrationWriter") ->
(
events,
_,
_,
) = await self._store.paginate_room_events_by_topological_ordering(
room_id=room_id,
from_key=from_key,
Expand Down
2 changes: 2 additions & 0 deletions synapse/handlers/pagination.py
Original file line number Diff line number Diff line change
Expand Up @@ -510,6 +510,7 @@ async def get_messages(
(
events,
next_key,
_,
) = await self.store.paginate_room_events_by_topological_ordering(
room_id=room_id,
from_key=from_token.room_key,
Expand Down Expand Up @@ -588,6 +589,7 @@ async def get_messages(
(
events,
next_key,
_,
) = await self.store.paginate_room_events_by_topological_ordering(
room_id=room_id,
from_key=from_token.room_key,
Expand Down
2 changes: 1 addition & 1 deletion synapse/handlers/room.py
Original file line number Diff line number Diff line change
Expand Up @@ -1753,7 +1753,7 @@ async def get_new_events(
)

events = list(room_events)
events.extend(e for evs, _ in room_to_events.values() for e in evs)
events.extend(e for evs, _, _ in room_to_events.values() for e in evs)

# We know stream_ordering must be not None here, as its been
# persisted, but mypy doesn't know that
Expand Down
20 changes: 2 additions & 18 deletions synapse/handlers/sliding_sync/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,6 @@
MutableStateMap,
PersistedEventPosition,
Requester,
RoomStreamToken,
SlidingSyncStreamToken,
StateMap,
StreamKeyType,
Expand Down Expand Up @@ -632,36 +631,21 @@ async def get_room_sync_data(
# Use `stream_ordering` for updates
else paginate_room_events_by_stream_ordering
)
timeline_events, new_room_key = await pagination_method(
timeline_events, new_room_key, limited = await pagination_method(
room_id=room_id,
# The bounds are reversed so we can paginate backwards
# (from newer to older events) starting at to_bound.
# This ensures we fill the `limit` with the newest events first,
from_key=to_bound,
to_key=timeline_from_bound,
direction=Direction.BACKWARDS,
# We add one so we can determine if there are enough events to saturate
# the limit or not (see `limited`)
limit=room_sync_config.timeline_limit + 1,
limit=room_sync_config.timeline_limit,
)

# We want to return the events in ascending order (the last event is the
# most recent).
timeline_events.reverse()

# Determine our `limited` status based on the timeline. We do this before
# filtering the events so we can accurately determine if there is more to
# paginate even if we filter out some/all events.
if len(timeline_events) > room_sync_config.timeline_limit:
limited = True
# Get rid of that extra "+ 1" event because we only used it to determine
# if we hit the limit or not
timeline_events = timeline_events[-room_sync_config.timeline_limit :]
assert timeline_events[0].internal_metadata.stream_ordering
new_room_key = RoomStreamToken(
stream=timeline_events[0].internal_metadata.stream_ordering - 1
)

# Make sure we don't expose any events that the client shouldn't see
timeline_events = await filter_events_for_client(
self.storage_controllers,
Expand Down
11 changes: 3 additions & 8 deletions synapse/handlers/sync.py
Original file line number Diff line number Diff line change
Expand Up @@ -906,17 +906,15 @@ async def _load_filtered_recents(
# Use `stream_ordering` for updates
else paginate_room_events_by_stream_ordering
)
events, end_key = await pagination_method(
events, end_key, limited = await pagination_method(
room_id=room_id,
# The bounds are reversed so we can paginate backwards
# (from newer to older events) starting at to_bound.
# This ensures we fill the `limit` with the newest events first,
from_key=end_key,
to_key=since_key,
direction=Direction.BACKWARDS,
# We add one so we can determine if there are enough events to saturate
# the limit or not (see `limited`)
limit=load_limit + 1,
limit=load_limit,
)
# We want to return the events in ascending order (the last event is the
# most recent).
Expand Down Expand Up @@ -971,9 +969,6 @@ async def _load_filtered_recents(
loaded_recents.extend(recents)
recents = loaded_recents

if len(events) <= load_limit:
limited = False
break
max_repeat -= 1

if len(recents) > timeline_limit:
Expand Down Expand Up @@ -2608,7 +2603,7 @@ async def _get_room_changes_for_incremental_sync(

newly_joined = room_id in newly_joined_rooms
if room_entry:
events, start_key = room_entry
events, start_key, _ = room_entry
# We want to return the events in ascending order (the last event is the
# most recent).
events.reverse()
Expand Down
Loading

0 comments on commit 588e5b5

Please sign in to comment.