Skip to content

Commit

Permalink
ref(deletes): search-issues -> generic-events (#6503)
Browse files Browse the repository at this point in the history
Updating for consistency with naming conventions, will need
getsentry/sentry-kafka-schemas#347 before we can
merge this PR (and update it with the right version)
  • Loading branch information
MeredithAnya authored Nov 4, 2024
1 parent b8a94db commit cf25e38
Show file tree
Hide file tree
Showing 4 changed files with 5 additions and 5 deletions.
2 changes: 1 addition & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ python-dateutil==2.8.2
python-rapidjson==1.8
redis==4.5.4
sentry-arroyo==2.17.6
sentry-kafka-schemas==0.1.115
sentry-kafka-schemas==0.1.117
sentry-redis-tools==0.3.0
sentry-relay==0.9.2
sentry-sdk==2.8.0
Expand Down
2 changes: 1 addition & 1 deletion snuba/utils/streams/topics.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ class Topic(Enum):
METRICS_SUMMARIES = "snuba-metrics-summaries"
EAP_MUTATIONS = "snuba-eap-mutations"

LW_DELETIONS_SEARCH_ISSUES = "snuba-lw-deletions-search-issues"
LW_DELETIONS_GENERIC_EVENTS = "snuba-lw-deletions-generic-events"

COGS_SHARED_RESOURCES_USAGE = "shared-resources-usage"

Expand Down
2 changes: 1 addition & 1 deletion snuba/web/bulk_delete_query.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ class DeleteQueryMessage(TypedDict):

PRODUCER_MAP: MutableMapping[str, Producer] = {}
STORAGE_TOPIC: Mapping[str, Topic] = {
StorageKey.SEARCH_ISSUES.value: Topic.LW_DELETIONS_SEARCH_ISSUES
StorageKey.SEARCH_ISSUES.value: Topic.LW_DELETIONS_GENERIC_EVENTS
}


Expand Down
4 changes: 2 additions & 2 deletions tests/web/test_bulk_delete_query.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ def get_attribution_info(
@patch("snuba.web.bulk_delete_query._enforce_max_rows", return_value=10)
def test_delete_success(mock_enforce_max_row: Mock) -> None:
admin_client = AdminClient(get_default_kafka_configuration())
create_topics(admin_client, [Topic.LW_DELETIONS_SEARCH_ISSUES])
create_topics(admin_client, [Topic.LW_DELETIONS_GENERIC_EVENTS])

consumer = Consumer(CONSUMER_CONFIG)
storage = get_writable_storage(StorageKey("search_issues"))
Expand All @@ -56,7 +56,7 @@ def test_delete_success(mock_enforce_max_row: Mock) -> None:

# just give in second before subscribing
time.sleep(2.0)
consumer.subscribe([Topic.LW_DELETIONS_SEARCH_ISSUES.value])
consumer.subscribe([Topic.LW_DELETIONS_GENERIC_EVENTS.value])

result = delete_from_storage(storage, conditions, attr_info)
assert result["search_issues_local_v2"]["data"] == [{"rows_to_delete": 10}]
Expand Down

0 comments on commit cf25e38

Please sign in to comment.