Skip to content

Commit

Permalink
Revert "Add cache to all-transactions endpoint (#2190)"
Browse files Browse the repository at this point in the history
This reverts commit fb94983.
  • Loading branch information
falvaradorodriguez committed Aug 22, 2024
1 parent 4380a07 commit 22ba79d
Show file tree
Hide file tree
Showing 4 changed files with 2 additions and 195 deletions.
19 changes: 1 addition & 18 deletions safe_transaction_service/history/services/transaction_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
from django.db.models import QuerySet
from django.utils import timezone

from eth_typing import ChecksumAddress, HexStr
from eth_typing import HexStr
from redis import Redis

from gnosis.eth import EthereumClient, get_auto_ethereum_client
Expand Down Expand Up @@ -103,23 +103,6 @@ def store_txs_in_cache(
pipe.expire(key, 60 * 60) # Expire in one hour
pipe.execute()

def get_all_txs_cache_hash_key(self, safe_address: ChecksumAddress) -> str:
"""
Retrieves a redis hash for the provided Safe address that group several fields together, so when something changes for that address everything in cache gets invalidated at once.
https://redis.io/docs/latest/develop/data-types/hashes/
:param safe_address:
:return: cache hash key
"""
return f"all-txs:{safe_address}"

def del_all_txs_cache_hash_key(self, safe_address: ChecksumAddress) -> None:
"""
Deletes the hash for a specific Safe address, invalidating all-transactions cache related with Safe at once.
:param safe_address:
:return:
"""
self.redis.unlink(self.get_all_txs_cache_hash_key(safe_address))

# End of cache methods ----------------------------

def get_all_tx_identifiers(
Expand Down
22 changes: 0 additions & 22 deletions safe_transaction_service/history/signals.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@
SafeStatus,
TokenTransfer,
)
from .services import TransactionServiceProvider
from .services.notification_service import build_event_payload, is_relevant_notification

logger = getLogger(__name__)
Expand Down Expand Up @@ -146,23 +145,6 @@ def get_safe_addresses_involved_from_db_instance(
return addresses


def _clean_all_txs_cache(
instance: Union[
TokenTransfer,
InternalTx,
MultisigConfirmation,
MultisigTransaction,
]
) -> None:
"""
Remove the all-transactions cache related with instance modified
:param instance:
"""
transaction_service = TransactionServiceProvider()
for address in get_safe_addresses_involved_from_db_instance(instance):
transaction_service.del_all_txs_cache_hash_key(address)


def _process_notification_event(
sender: Type[Model],
instance: Union[
Expand All @@ -179,10 +161,6 @@ def _process_notification_event(
created and deleted
), "An instance cannot be created and deleted at the same time"

# Ignore SafeContract because it is not affecting all-transaction cache.
if sender != SafeContract:
_clean_all_txs_cache(instance)

logger.debug("Start building payloads for created=%s object=%s", created, instance)
payloads = build_event_payload(sender, instance, deleted=deleted)
logger.debug(
Expand Down
94 changes: 0 additions & 94 deletions safe_transaction_service/history/tests/test_views.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
import json
import logging
import pickle
from unittest import mock
from unittest.mock import MagicMock, PropertyMock

Expand Down Expand Up @@ -404,99 +403,6 @@ def test_all_transactions_ordering(self):
multisig_transaction.ethereum_tx_id,
)

def test_all_transactions_cache_view(self):
safe_address = "0x54f3c8e4Bf7bFDFF39B36d1FAE4e5ceBdD93C6A9"
# Older transaction
factory_transactions = [
MultisigTransactionFactory(safe=safe_address),
MultisigTransactionFactory(safe=safe_address),
]
# all-txs:{safe_address}
cache_hash_key = f"all-txs:{safe_address}"
# {limit}:{offset}:{ordering}
cache_query_field = "10:0:timestamp"
redis = get_redis()
redis.unlink(cache_hash_key)
cache_result = redis.hget(cache_hash_key, cache_query_field)
# Should be empty at the beginning
self.assertIsNone(cache_result)

response = self.client.get(
reverse("v1:history:all-transactions", args=(safe_address,))
+ "?ordering=timestamp"
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data["count"], 2)

cache_result = redis.hget(cache_hash_key, cache_query_field)
# Should be stored in redis cache
self.assertIsNotNone(cache_result)
# Cache should content the expected values
cache_values, cache_count = pickle.loads(cache_result)
self.assertEqual(cache_count, 2)
for cache_value, factory_transaction in zip(cache_values, factory_transactions):
self.assertEqual(
cache_value.ethereum_tx.tx_hash, factory_transaction.ethereum_tx.tx_hash
)
self.assertEqual(
cache_value.ethereum_tx.created, factory_transaction.ethereum_tx.created
)
self.assertEqual(
cache_value.ethereum_tx.execution_date,
factory_transaction.ethereum_tx.execution_date,
)
self.assertEqual(
cache_value.ethereum_tx.block.number,
factory_transaction.ethereum_tx.block_id,
)
self.assertEqual(
cache_value.ethereum_tx.nonce, factory_transaction.ethereum_tx.nonce
)
# Modify cache to empty list
redis.hset(cache_hash_key, cache_query_field, pickle.dumps(([], 0)))
redis.expire(cache_hash_key, 60 * 10)
response = self.client.get(
reverse("v1:history:all-transactions", args=(safe_address,))
+ "?ordering=timestamp"
)
# Response should be returned from cache
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data["count"], 0)

# Cache should be invalidated because there is new transaction
MultisigTransactionFactory(safe=safe_address)
response = self.client.get(
reverse("v1:history:all-transactions", args=(safe_address,))
+ "?ordering=timestamp"
)
self.assertEqual(response.data["count"], 3)

def test_all_transactions_cache_limit_offset_view(self):
"""
Test limit and offset
"""
safe_address = "0x54f3c8e4Bf7bFDFF39B36d1FAE4e5ceBdD93C6A9"
number_transactions = 100
for _ in range(number_transactions):
MultisigTransactionFactory(safe=safe_address)

for limit, offset in ((57, 12), (13, 24)):
with self.subTest(limit=limit, offset=offset):
# all-txs:{safe_address}
cache_hash_key = f"all-txs:{safe_address}"
# {limit}:{offset}:{ordering}
cache_query_field = f"{limit}:{offset}:timestamp"
redis = get_redis()
self.assertFalse(redis.hexists(cache_hash_key, cache_query_field))

response = self.client.get(
reverse("v1:history:all-transactions", args=(safe_address,))
+ f"?ordering=timestamp&limit={limit}&offset={offset}"
)
self.assertEqual(response.data["count"], number_transactions)
self.assertEqual(len(response.data["results"]), limit)
self.assertTrue(redis.hexists(cache_hash_key, cache_query_field))

def test_all_transactions_wrong_transfer_type_view(self):
# No token in database, so we must trust the event
safe_address = Account.create().address
Expand Down
62 changes: 1 addition & 61 deletions safe_transaction_service/history/views.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
import hashlib
import logging
import pickle
from typing import Any, Dict, Optional, Tuple

from django.conf import settings
Expand Down Expand Up @@ -35,7 +34,6 @@
from safe_transaction_service.utils.ethereum import get_chain_id
from safe_transaction_service.utils.utils import parse_boolean_query_param

from ..utils.redis import get_redis
from . import filters, pagination, serializers
from .helpers import add_tokens_to_transfers, is_valid_unique_transfer_id
from .models import (
Expand Down Expand Up @@ -313,64 +311,6 @@ def get_page_tx_identifiers(

return page

def get_cached_page_tx_identifiers(
self,
safe: ChecksumAddress,
ordering: Optional[str],
limit: int,
offset: int,
) -> Optional[Response]:
"""
Cache for tx identifiers. A quick ``SQL COUNT`` in all the transactions/events
tables will determinate if cache for the provided values is still valid or not
:param safe:
:param ordering:
:param limit:
:param offset:
:return:
"""
transaction_service = TransactionServiceProvider()
cache_timeout = settings.CACHE_ALL_TXS_VIEW
redis = get_redis()

# Get all relevant elements for a Safe to be cached
cache_hash_key = transaction_service.get_all_txs_cache_hash_key(safe)
cache_query_field = f"{limit}:{offset}:{ordering}"
lock_key = f"locks:{cache_hash_key}:{cache_query_field}"

logger.debug(
"%s: All txs from identifiers for Safe=%s lock-key=%s",
self.__class__.__name__,
safe,
lock_key,
)
if not cache_timeout:
# Cache disabled
return self.get_page_tx_identifiers(safe, ordering, limit, offset)

with redis.lock(
lock_key,
timeout=settings.GUNICORN_REQUEST_TIMEOUT, # This prevents a service restart to leave a lock forever
):
if result := redis.hget(cache_hash_key, cache_query_field):
# Count needs to be retrieved to set it up the paginator
page, count = pickle.loads(result)
# Setting the paginator like this is not very elegant and needs to be tested really well
self.paginator.count = count
self.paginator.limit = limit
self.paginator.offset = offset
self.paginator.request = self.request
return page

page = self.get_page_tx_identifiers(safe, ordering, limit, offset)
redis.hset(
cache_hash_key,
cache_query_field,
pickle.dumps((page, self.paginator.count)),
)
redis.expire(cache_hash_key, cache_timeout)
return page

def list(self, request, *args, **kwargs):
transaction_service = TransactionServiceProvider()
safe = self.kwargs["address"]
Expand All @@ -379,7 +319,7 @@ def list(self, request, *args, **kwargs):
list_pagination = DummyPagination(self.request)
limit, offset = list_pagination.limit, list_pagination.offset

tx_identifiers_page = self.get_cached_page_tx_identifiers(
tx_identifiers_page = self.get_page_tx_identifiers(
safe, ordering, limit, offset
)
if not tx_identifiers_page:
Expand Down

0 comments on commit 22ba79d

Please sign in to comment.