Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

ACI Gardening #79258

Merged
merged 3 commits into from
Oct 17, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion src/sentry/workflow_engine/models/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
"DataSourceDetector",
"Detector",
"DetectorState",
"DetectorEvaluationResult",
"DetectorWorkflow",
"Workflow",
"WorkflowDataConditionGroup",
Expand All @@ -19,7 +20,7 @@
from .data_condition_group_action import DataConditionGroupAction
from .data_source import DataPacket, DataSource
from .data_source_detector import DataSourceDetector
from .detector import Detector
from .detector import Detector, DetectorEvaluationResult
from .detector_state import DetectorState
from .detector_workflow import DetectorWorkflow
from .workflow import Workflow
Expand Down
1 change: 1 addition & 0 deletions src/sentry/workflow_engine/models/data_condition_group.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ class DataConditionGroup(DefaultFieldsModel):
class Type(models.TextChoices):
ANY = "any"
ALL = "all"
NONE = "none"

logic_type = models.CharField(max_length=200, choices=Type.choices, default=Type.ANY)
organization = models.ForeignKey("sentry.Organization", on_delete=models.CASCADE)
2 changes: 2 additions & 0 deletions src/sentry/workflow_engine/processors/__init__.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
__all__ = [
"process_data_sources",
"process_detectors",
]

from .data_source import process_data_sources
from .detector import process_detectors
8 changes: 5 additions & 3 deletions src/sentry/workflow_engine/processors/detector.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,6 @@
import logging

from sentry.workflow_engine.models import Detector
from sentry.workflow_engine.models.data_source import DataPacket
from sentry.workflow_engine.models.detector import DetectorEvaluationResult
from sentry.workflow_engine.models import DataPacket, Detector, DetectorEvaluationResult

logger = logging.getLogger(__name__)

Expand All @@ -11,12 +9,16 @@ def process_detectors(
data_packet: DataPacket, detectors: list[Detector]
) -> list[tuple[Detector, list[DetectorEvaluationResult]]]:
results = []

for detector in detectors:
handler = detector.detector_handler

if not handler:
continue

detector_results = handler.evaluate(data_packet)
detector_group_keys = set()

for result in detector_results:
if result.state_update_data:
if result.state_update_data.group_key in detector_group_keys:
Expand Down
31 changes: 14 additions & 17 deletions tests/sentry/workflow_engine/processors/test_data_sources.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from sentry.snuba.models import SnubaQuery
from sentry.testutils.cases import TestCase
from sentry.workflow_engine.models import DataSource
from sentry.workflow_engine.models import DataPacket, DataSource
from sentry.workflow_engine.processors import process_data_sources


Expand Down Expand Up @@ -30,25 +30,22 @@ def setUp(self):
)
self.ds2.detectors.set([self.detector_two])

self.packet = self.query
self.packet_two = self.query_two

# turn a query into a data packet, "simulating" the result from the snuba query
self.packet.query_id = self.query.id
self.packet_two.query_id = self.query_two.id
self.packet = DataPacket[dict](self.query.id, {"query_id": self.query.id, "foo": "bar"})
self.packet_two = DataPacket[dict](
self.query_two.id, {"query_id": self.query_two.id, "foo": "baz"}
)

self.data_packets = [self.query, self.query_two]
self.data_packets = [self.packet, self.packet_two]

def test_single_data_packet(self):
self.data_packets = [self.query]
assert process_data_sources(self.data_packets, DataSource.Type.SNUBA_QUERY) == [
(self.query, [self.detector_one])
assert process_data_sources([self.packet], DataSource.Type.SNUBA_QUERY) == [
(self.packet, [self.detector_one])
]

def test_multiple_data_packets(self):
assert process_data_sources(self.data_packets, DataSource.Type.SNUBA_QUERY) == [
(self.query, [self.detector_one]),
(self.query_two, [self.detector_two]),
(self.packet, [self.detector_one]),
(self.packet_two, [self.detector_two]),
]

def test_multiple_detectors(self):
Expand All @@ -62,9 +59,9 @@ def test_multiple_detectors(self):
self.ds2.detectors.add(self.detector_five)

assert process_data_sources(self.data_packets, DataSource.Type.SNUBA_QUERY) == [
(self.query, [self.detector_one]),
(self.packet, [self.detector_one]),
(
self.query_two,
self.packet_two,
[self.detector_two, self.detector_three, self.detector_four, self.detector_five],
),
]
Expand All @@ -90,6 +87,6 @@ def test_different_data_packet_type__with_results(self):
assert process_data_sources(
self.data_packets, DataSource.Type.SNUBA_QUERY_SUBSCRIPTION
) == [
(self.query, [self.detector_one]),
(self.query_two, [self.detector_two]),
(self.packet, [self.detector_one]),
(self.packet_two, [self.detector_two]),
]
Loading