Skip to content

Commit 1817fb7

Browse files
authored
chore(ACI): Add back subscription processor test coverage (#105113)
Add missing test coverage for the subscription processor. Many tests we still need were deleted and need to be recovered but updated for ACI. * Add missing tests for early returns from [here](https://github.com/getsentry/sentry/blob/e3f3190fcfcc4ff63e3562516ab92f641d4d1680/tests/sentry/incidents/subscription_processor/test_subscription_processor.py#L287-L388) * Add missing anomaly detection test coverage from [here](https://github.com/getsentry/sentry/blob/e3f3190fcfcc4ff63e3562516ab92f641d4d1680/tests/sentry/incidents/subscription_processor/test_subscription_processor.py#L3138) TODO in future PRs: - [x] Add missing crash rate alert tests from [here](https://github.com/getsentry/sentry/blob/e3f3190fcfcc4ff63e3562516ab92f641d4d1680/tests/sentry/incidents/subscription_processor/test_subscription_processor.py#L3741) #105205 - [x] Add missing upsample tests from [here](https://github.com/getsentry/sentry/blob/e3f3190fcfcc4ff63e3562516ab92f641d4d1680/tests/sentry/incidents/subscription_processor/test_subscription_processor.py#L4626) #105272 * Add missing EAP test (I can't get this one to pass, it always receives `None` from `get_eap_aggregation_value` so I will ask Shruthi post holidays) from [here](https://github.com/getsentry/sentry/blob/e3f3190fcfcc4ff63e3562516ab92f641d4d1680/tests/sentry/incidents/subscription_processor/test_subscription_processor.py#L1869)
1 parent 64acec7 commit 1817fb7

File tree

3 files changed

+214
-68
lines changed

3 files changed

+214
-68
lines changed

tests/sentry/incidents/handlers/condition/test_anomaly_detection_handler.py

Lines changed: 85 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,11 @@
11
from datetime import UTC, datetime
2+
from typing import Any
23
from unittest import mock
34

45
import orjson
56
from urllib3.response import HTTPResponse
67

8+
from sentry.conf.server import SEER_ANOMALY_DETECTION_ENDPOINT_URL
79
from sentry.incidents.utils.types import AnomalyDetectionUpdate
810
from sentry.seer.anomaly_detection.types import (
911
AnomalyDetectionSeasonality,
@@ -13,6 +15,7 @@
1315
DataSourceType,
1416
DetectAnomaliesResponse,
1517
)
18+
from sentry.snuba.dataset import Dataset
1619
from sentry.snuba.subscriptions import create_snuba_subscription
1720
from sentry.workflow_engine.models import Condition, DataPacket
1821
from sentry.workflow_engine.types import ConditionError, DetectorPriorityLevel
@@ -44,7 +47,6 @@ def setUp(self) -> None:
4447
timestamp=datetime.now(UTC),
4548
entity="test-entity",
4649
)
47-
4850
self.data_source = self.create_data_source(
4951
source_id=str(packet.subscription_id),
5052
organization=self.organization,
@@ -75,43 +77,106 @@ def setUp(self) -> None:
7577
"anomaly_type": AnomalyType.HIGH_CONFIDENCE,
7678
},
7779
"timestamp": 1,
78-
"value": 10,
80+
"value": self.data_packet.packet.values["value"],
7981
}
8082
],
8183
}
84+
self.low_confidence_seer_response: DetectAnomaliesResponse = {
85+
"success": True,
86+
"timeseries": [
87+
{
88+
"anomaly": {
89+
"anomaly_score": 0.2,
90+
"anomaly_type": AnomalyType.LOW_CONFIDENCE,
91+
},
92+
"timestamp": 1,
93+
"value": self.data_packet.packet.values["value"],
94+
}
95+
],
96+
}
97+
98+
def assert_seer_call(self, deserialized_body: dict[str, Any]) -> None:
99+
assert deserialized_body["organization_id"] == self.detector.project.organization.id
100+
assert deserialized_body["project_id"] == self.detector.project_id
101+
assert deserialized_body["config"]["time_period"] == self.snuba_query.time_window / 60
102+
assert (
103+
deserialized_body["config"]["sensitivity"]
104+
== self.dc.comparison.get("sensitivity").value
105+
)
106+
assert (
107+
deserialized_body["config"]["expected_seasonality"]
108+
== self.dc.comparison.get("seasonality").value
109+
)
110+
assert deserialized_body["context"]["source_id"] == self.subscription.id
111+
assert (
112+
deserialized_body["context"]["source_type"] == DataSourceType.SNUBA_QUERY_SUBSCRIPTION
113+
)
114+
assert (
115+
deserialized_body["context"]["cur_window"]["value"]
116+
== self.data_packet.packet.values["value"]
117+
)
82118

83119
@mock.patch(
84120
"sentry.seer.anomaly_detection.get_anomaly_data.SEER_ANOMALY_DETECTION_CONNECTION_POOL.urlopen"
85121
)
86-
def test_passes(self, mock_seer_request: mock.MagicMock) -> None:
87-
seer_return_value = self.high_confidence_seer_response
88-
mock_seer_request.return_value = HTTPResponse(orjson.dumps(seer_return_value), status=200)
122+
def test_triggers(self, mock_seer_request: mock.MagicMock) -> None:
123+
mock_seer_request.return_value = HTTPResponse(
124+
orjson.dumps(self.high_confidence_seer_response), status=200
125+
)
89126
assert (
90127
self.dc.evaluate_value(self.data_packet.packet.values)
91128
== DetectorPriorityLevel.HIGH.value
92129
)
130+
assert mock_seer_request.call_args.args[0] == "POST"
131+
assert mock_seer_request.call_args.args[1] == SEER_ANOMALY_DETECTION_ENDPOINT_URL
132+
deserialized_body = orjson.loads(mock_seer_request.call_args.kwargs["body"])
133+
self.assert_seer_call(deserialized_body)
93134

94135
@mock.patch(
95136
"sentry.seer.anomaly_detection.get_anomaly_data.SEER_ANOMALY_DETECTION_CONNECTION_POOL.urlopen"
96137
)
97-
def test_passes_medium(self, mock_seer_request: mock.MagicMock) -> None:
98-
seer_return_value: DetectAnomaliesResponse = {
99-
"success": True,
100-
"timeseries": [
101-
{
102-
"anomaly": {
103-
"anomaly_score": 0.2,
104-
"anomaly_type": AnomalyType.LOW_CONFIDENCE,
105-
},
106-
"timestamp": 1,
107-
"value": 10,
108-
}
109-
],
110-
}
111-
mock_seer_request.return_value = HTTPResponse(orjson.dumps(seer_return_value), status=200)
138+
def test_does_not_trigger(self, mock_seer_request: mock.MagicMock) -> None:
139+
mock_seer_request.return_value = HTTPResponse(
140+
orjson.dumps(self.low_confidence_seer_response), status=200
141+
)
142+
assert (
143+
self.dc.evaluate_value(self.data_packet.packet.values) == DetectorPriorityLevel.OK.value
144+
)
145+
assert mock_seer_request.call_args.args[0] == "POST"
146+
assert mock_seer_request.call_args.args[1] == SEER_ANOMALY_DETECTION_ENDPOINT_URL
147+
deserialized_body = orjson.loads(mock_seer_request.call_args.kwargs["body"])
148+
self.assert_seer_call(deserialized_body)
149+
150+
@mock.patch(
151+
"sentry.seer.anomaly_detection.get_anomaly_data.SEER_ANOMALY_DETECTION_CONNECTION_POOL.urlopen"
152+
)
153+
def test_triggers_performance_detector(self, mock_seer_request: mock.MagicMock) -> None:
154+
self.snuba_query.update(time_window=15 * 60, dataset=Dataset.Transactions)
155+
156+
# ensure that it triggers
157+
mock_seer_request.return_value = HTTPResponse(
158+
orjson.dumps(self.high_confidence_seer_response), status=200
159+
)
160+
assert (
161+
self.dc.evaluate_value(self.data_packet.packet.values)
162+
== DetectorPriorityLevel.HIGH.value
163+
)
164+
assert mock_seer_request.call_args.args[0] == "POST"
165+
assert mock_seer_request.call_args.args[1] == SEER_ANOMALY_DETECTION_ENDPOINT_URL
166+
deserialized_body = orjson.loads(mock_seer_request.call_args.kwargs["body"])
167+
self.assert_seer_call(deserialized_body)
168+
169+
# ensure that it resolves
170+
mock_seer_request.return_value = HTTPResponse(
171+
orjson.dumps(self.low_confidence_seer_response), status=200
172+
)
112173
assert (
113174
self.dc.evaluate_value(self.data_packet.packet.values) == DetectorPriorityLevel.OK.value
114175
)
176+
assert mock_seer_request.call_args.args[0] == "POST"
177+
assert mock_seer_request.call_args.args[1] == SEER_ANOMALY_DETECTION_ENDPOINT_URL
178+
deserialized_body = orjson.loads(mock_seer_request.call_args.kwargs["body"])
179+
self.assert_seer_call(deserialized_body)
115180

116181
@mock.patch(
117182
"sentry.seer.anomaly_detection.get_anomaly_data.SEER_ANOMALY_DETECTION_CONNECTION_POOL.urlopen"

tests/sentry/incidents/subscription_processor/test_subscription_processor_aci.py

Lines changed: 127 additions & 46 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,13 @@
11
import copy
22
from datetime import timedelta
33
from functools import cached_property
4-
from unittest.mock import call, patch
4+
from unittest.mock import MagicMock, call, patch
55

66
from django.utils import timezone
77

8+
from sentry.constants import ObjectStatus
9+
from sentry.incidents.subscription_processor import SubscriptionProcessor
10+
from sentry.snuba.dataset import Dataset
811
from sentry.testutils.factories import DEFAULT_EVENT_DATA
912
from sentry.workflow_engine.models.data_condition import Condition, DataCondition
1013
from sentry.workflow_engine.types import DetectorPriorityLevel
@@ -18,80 +21,158 @@ class ProcessUpdateTest(ProcessUpdateBaseClass):
1821
Test early return scenarios + simple cases.
1922
"""
2023

21-
# TODO: tests for early return scenarios. These will need to be added once
22-
# we've decoupled the subscription processor from the alert rule model.
23-
2424
def test_simple(self) -> None:
2525
"""
2626
Verify that an alert can trigger.
2727
"""
2828
self.send_update(self.critical_threshold + 1)
2929
assert self.get_detector_state(self.metric_detector) == DetectorPriorityLevel.HIGH
3030

31+
@patch("sentry.incidents.subscription_processor.metrics")
32+
def test_missing_project(self, mock_metrics: MagicMock) -> None:
33+
metrics_call = "incidents.alert_rules.ignore_deleted_project"
34+
35+
self.sub.project.update(status=ObjectStatus.PENDING_DELETION)
36+
self.sub.project.save()
37+
38+
assert self.send_update(self.critical_threshold + 1) is False
39+
mock_metrics.incr.assert_has_calls([call(metrics_call)])
40+
41+
mock_metrics.reset_mock()
42+
43+
self.sub.project.delete()
44+
assert self.send_update(self.critical_threshold + 1) is False
45+
mock_metrics.incr.assert_has_calls([call(metrics_call)])
46+
47+
@patch("sentry.incidents.subscription_processor.metrics")
48+
def test_has_downgraded_incidents(self, mock_metrics: MagicMock) -> None:
49+
processor = SubscriptionProcessor(self.sub)
50+
message = self.build_subscription_update(
51+
self.sub, value=self.critical_threshold + 1, time_delta=timedelta()
52+
)
53+
54+
with self.capture_on_commit_callbacks(execute=True):
55+
assert processor.process_update(message) is False
56+
mock_metrics.incr.assert_has_calls(
57+
[call("incidents.alert_rules.ignore_update_missing_incidents")]
58+
)
59+
60+
@patch("sentry.incidents.subscription_processor.metrics")
61+
def test_has_downgraded_incidents_performance(self, mock_metrics: MagicMock) -> None:
62+
snuba_query = self.get_snuba_query(self.detector)
63+
snuba_query.update(time_window=15 * 60, dataset=Dataset.Transactions.value)
64+
snuba_query.save()
65+
66+
processor = SubscriptionProcessor(self.sub)
67+
message = self.build_subscription_update(
68+
self.sub, value=self.critical_threshold + 1, time_delta=timedelta()
69+
)
70+
71+
with self.capture_on_commit_callbacks(execute=True):
72+
assert processor.process_update(message) is False
73+
mock_metrics.incr.assert_has_calls(
74+
[call("incidents.alert_rules.ignore_update_missing_incidents_performance")]
75+
)
76+
77+
@patch("sentry.incidents.subscription_processor.metrics")
78+
def test_has_downgraded_on_demand(self, mock_metrics: MagicMock) -> None:
79+
snuba_query = self.get_snuba_query(self.detector)
80+
snuba_query.update(time_window=15 * 60, dataset=Dataset.PerformanceMetrics.value)
81+
snuba_query.save()
82+
83+
processor = SubscriptionProcessor(self.sub)
84+
message = self.build_subscription_update(
85+
self.sub, value=self.critical_threshold + 1, time_delta=timedelta()
86+
)
87+
88+
with self.capture_on_commit_callbacks(execute=True):
89+
assert processor.process_update(message) is False
90+
mock_metrics.incr.assert_has_calls(
91+
[call("incidents.alert_rules.ignore_update_missing_on_demand")]
92+
)
93+
94+
@patch("sentry.incidents.subscription_processor.metrics")
95+
def test_skip_already_processed_update(self, mock_metrics: MagicMock) -> None:
96+
assert self.send_update(value=self.critical_threshold + 1) is True
97+
mock_metrics.incr.reset_mock()
98+
assert self.send_update(value=self.critical_threshold + 1) is False
99+
100+
mock_metrics.incr.assert_called_once_with(
101+
"incidents.alert_rules.skipping_already_processed_update"
102+
)
103+
mock_metrics.incr.reset_mock()
104+
assert (
105+
self.send_update(value=self.critical_threshold + 1, time_delta=timedelta(hours=1))
106+
is True
107+
)
108+
109+
mock_metrics.incr.assert_called_once_with("incidents.alert_rules.process_update.start")
110+
31111
def test_resolve(self) -> None:
32-
detector = self.metric_detector
33112
self.send_update(self.critical_threshold + 1, timedelta(minutes=-2))
34-
assert self.get_detector_state(detector) == DetectorPriorityLevel.HIGH
113+
assert self.get_detector_state(self.detector) == DetectorPriorityLevel.HIGH
35114

36115
self.send_update(self.resolve_threshold - 1, timedelta(minutes=-1))
37-
assert self.get_detector_state(detector) == DetectorPriorityLevel.OK
116+
assert self.get_detector_state(self.detector) == DetectorPriorityLevel.OK
38117

39118
def test_resolve_percent_boundary(self) -> None:
40-
detector = self.metric_detector
41-
self.update_threshold(detector, DetectorPriorityLevel.HIGH, 0.5)
42-
self.update_threshold(detector, DetectorPriorityLevel.OK, 0.5)
119+
self.update_threshold(self.detector, DetectorPriorityLevel.HIGH, 0.5)
120+
self.update_threshold(self.detector, DetectorPriorityLevel.OK, 0.5)
43121
self.send_update(self.critical_threshold + 0.1, timedelta(minutes=-2))
44-
assert self.get_detector_state(detector) == DetectorPriorityLevel.HIGH
122+
assert self.get_detector_state(self.detector) == DetectorPriorityLevel.HIGH
45123

46124
self.send_update(self.resolve_threshold, timedelta(minutes=-1))
47-
assert self.get_detector_state(detector) == DetectorPriorityLevel.OK
125+
assert self.get_detector_state(self.detector) == DetectorPriorityLevel.OK
48126

49127
def test_reversed(self) -> None:
50128
"""
51129
Test that resolutions work when the threshold is reversed.
52130
"""
53-
detector = self.metric_detector
54-
DataCondition.objects.filter(condition_group=detector.workflow_condition_group).delete()
55-
self.set_up_data_conditions(detector, Condition.LESS, 100, None, 100)
131+
DataCondition.objects.filter(
132+
condition_group=self.detector.workflow_condition_group
133+
).delete()
134+
self.set_up_data_conditions(self.detector, Condition.LESS, 100, None, 100)
56135
self.send_update(self.critical_threshold - 1, timedelta(minutes=-2))
57-
assert self.get_detector_state(detector) == DetectorPriorityLevel.HIGH
136+
assert self.get_detector_state(self.detector) == DetectorPriorityLevel.HIGH
58137

59138
self.send_update(self.resolve_threshold, timedelta(minutes=-1))
60-
assert self.get_detector_state(detector) == DetectorPriorityLevel.OK
139+
assert self.get_detector_state(self.detector) == DetectorPriorityLevel.OK
61140

62141
def test_multiple_triggers(self) -> None:
63-
detector = self.metric_detector
64-
DataCondition.objects.filter(condition_group=detector.workflow_condition_group).delete()
65-
self.set_up_data_conditions(detector, Condition.GREATER, 100, 50, 50)
142+
DataCondition.objects.filter(
143+
condition_group=self.detector.workflow_condition_group
144+
).delete()
145+
self.set_up_data_conditions(self.detector, Condition.GREATER, 100, 50, 50)
66146

67147
self.send_update(self.warning_threshold + 1, timedelta(minutes=-5))
68-
assert self.get_detector_state(detector) == DetectorPriorityLevel.MEDIUM
148+
assert self.get_detector_state(self.detector) == DetectorPriorityLevel.MEDIUM
69149

70150
self.send_update(self.critical_threshold + 1, timedelta(minutes=-4))
71-
assert self.get_detector_state(detector) == DetectorPriorityLevel.HIGH
151+
assert self.get_detector_state(self.detector) == DetectorPriorityLevel.HIGH
72152

73153
self.send_update(self.critical_threshold - 1, timedelta(minutes=-3))
74-
assert self.get_detector_state(detector) == DetectorPriorityLevel.MEDIUM
154+
assert self.get_detector_state(self.detector) == DetectorPriorityLevel.MEDIUM
75155

76156
self.send_update(self.warning_threshold - 1, timedelta(minutes=-2))
77-
assert self.get_detector_state(detector) == DetectorPriorityLevel.OK
157+
assert self.get_detector_state(self.detector) == DetectorPriorityLevel.OK
78158

79159
def test_multiple_triggers_reversed(self) -> None:
80-
detector = self.metric_detector
81-
DataCondition.objects.filter(condition_group=detector.workflow_condition_group).delete()
82-
self.set_up_data_conditions(detector, Condition.LESS, 50, 100, 100)
160+
DataCondition.objects.filter(
161+
condition_group=self.detector.workflow_condition_group
162+
).delete()
163+
self.set_up_data_conditions(self.detector, Condition.LESS, 50, 100, 100)
83164

84165
self.send_update(self.warning_threshold - 1, timedelta(minutes=-5))
85-
assert self.get_detector_state(detector) == DetectorPriorityLevel.MEDIUM
166+
assert self.get_detector_state(self.detector) == DetectorPriorityLevel.MEDIUM
86167

87168
self.send_update(self.critical_threshold - 1, timedelta(minutes=-4))
88-
assert self.get_detector_state(detector) == DetectorPriorityLevel.HIGH
169+
assert self.get_detector_state(self.detector) == DetectorPriorityLevel.HIGH
89170

90171
self.send_update(self.critical_threshold + 1, timedelta(minutes=-3))
91-
assert self.get_detector_state(detector) == DetectorPriorityLevel.MEDIUM
172+
assert self.get_detector_state(self.detector) == DetectorPriorityLevel.MEDIUM
92173

93174
self.send_update(self.warning_threshold + 1, timedelta(minutes=-2))
94-
assert self.get_detector_state(detector) == DetectorPriorityLevel.OK
175+
assert self.get_detector_state(self.detector) == DetectorPriorityLevel.OK
95176

96177
# TODO: the subscription processor has a 10 minute cooldown period for creating new incidents
97178
# We probably need similar logic within workflow engine.
@@ -100,25 +181,25 @@ def test_multiple_triggers_reversed(self) -> None:
100181
class ProcessUpdateComparisonAlertTest(ProcessUpdateBaseClass):
101182
@cached_property
102183
def comparison_detector_above(self):
103-
detector = self.metric_detector
104-
detector.config.update({"comparison_delta": 60 * 60})
105-
detector.save()
106-
self.update_threshold(detector, DetectorPriorityLevel.HIGH, 150)
107-
self.update_threshold(detector, DetectorPriorityLevel.OK, 150)
108-
snuba_query = self.get_snuba_query(detector)
109-
snuba_query.update(time_window=60 * 60)
110-
return detector
184+
self.detector.config.update({"comparison_delta": 60 * 60})
185+
self.detector.save()
186+
self.update_threshold(self.detector, DetectorPriorityLevel.HIGH, 150)
187+
self.update_threshold(self.detector, DetectorPriorityLevel.OK, 150)
188+
self.snuba_query = self.get_snuba_query(self.detector)
189+
self.snuba_query.update(time_window=60 * 60)
190+
return self.detector
111191

112192
@cached_property
113193
def comparison_detector_below(self):
114-
detector = self.metric_detector
115-
detector.config.update({"comparison_delta": 60 * 60})
116-
detector.save()
117-
DataCondition.objects.filter(condition_group=detector.workflow_condition_group).delete()
118-
self.set_up_data_conditions(detector, Condition.LESS, 50, None, 50)
119-
snuba_query = self.get_snuba_query(detector)
194+
self.detector.config.update({"comparison_delta": 60 * 60})
195+
self.detector.save()
196+
DataCondition.objects.filter(
197+
condition_group=self.detector.workflow_condition_group
198+
).delete()
199+
self.set_up_data_conditions(self.detector, Condition.LESS, 50, None, 50)
200+
snuba_query = self.get_snuba_query(self.detector)
120201
snuba_query.update(time_window=60 * 60)
121-
return detector
202+
return self.detector
122203

123204
@patch("sentry.incidents.utils.process_update_helpers.metrics")
124205
def test_comparison_alert_above(self, helper_metrics):

0 commit comments

Comments
 (0)