Skip to content

Commit

Permalink
fix(alerts): Pass cur_window and parse response correctly (#77004)
Browse files Browse the repository at this point in the history
Fix `cur_window` not being a list, and fix parsing the Seer response
which has `timeseries` instead of `anomalies`
  • Loading branch information
ceorourke authored Sep 5, 2024
1 parent 77f99af commit 4c161d1
Show file tree
Hide file tree
Showing 3 changed files with 30 additions and 29 deletions.
12 changes: 6 additions & 6 deletions src/sentry/incidents/subscription_processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -654,7 +654,7 @@ def process_update(self, subscription_update: QuerySubscriptionUpdate) -> None:
# before the next one then we might alert twice.
self.update_alert_rule_stats()

def has_anomaly(self, anomaly, label: str, has_fake_anomalies: bool) -> bool:
def has_anomaly(self, anomaly: TimeSeriesPoint, label: str, has_fake_anomalies: bool) -> bool:
"""
Helper function to determine whether we care about an anomaly based on the
anomaly type and trigger type.
Expand All @@ -670,7 +670,7 @@ def has_anomaly(self, anomaly, label: str, has_fake_anomalies: bool) -> bool:
return True
return False

def anomaly_has_confidence(self, anomaly) -> bool:
def anomaly_has_confidence(self, anomaly: TimeSeriesPoint) -> bool:
"""
Helper function to determine whether we have the 7+ days of data necessary
to detect anomalies/send alerts for dynamic alert rules.
Expand All @@ -687,9 +687,9 @@ def get_anomaly_data_from_seer(self, aggregation_value: float | None):
)
context = AlertInSeer(
id=self.alert_rule.id,
cur_window=[
TimeSeriesPoint(timestamp=self.last_update.timestamp(), value=aggregation_value)
],
cur_window=TimeSeriesPoint(
timestamp=self.last_update.timestamp(), value=aggregation_value
),
)
detect_anomalies_request = DetectAnomaliesRequest(
organization_id=self.subscription.project.organization.id,
Expand Down Expand Up @@ -724,7 +724,7 @@ def get_anomaly_data_from_seer(self, aggregation_value: float | None):
return None

try:
results = json.loads(response.data.decode("utf-8")).get("anomalies")
results = json.loads(response.data.decode("utf-8")).get("timeseries")
if not results:
logger.warning(
"Seer anomaly detection response returned no potential anomalies",
Expand Down
3 changes: 2 additions & 1 deletion src/sentry/seer/anomaly_detection/types.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,13 @@

class Anomaly(TypedDict):
anomaly_type: str
anomaly_value: float
anomaly_score: float


class TimeSeriesPoint(TypedDict):
timestamp: float
value: float
anomaly: NotRequired[Anomaly]


class AlertInSeer(TypedDict):
Expand Down
44 changes: 22 additions & 22 deletions tests/sentry/incidents/test_subscription_processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@
)
from sentry.incidents.utils.types import AlertRuleActivationConditionType
from sentry.models.project import Project
from sentry.seer.anomaly_detection.types import AnomalyType
from sentry.seer.anomaly_detection.types import AnomalyType, TimeSeriesPoint
from sentry.seer.anomaly_detection.utils import translate_direction
from sentry.sentry_metrics.configuration import UseCaseKey
from sentry.sentry_metrics.indexer.postgres.models import MetricsKeyIndexer
Expand Down Expand Up @@ -485,7 +485,7 @@ def test_seer_call(self, mock_seer_request: MagicMock):
str(self.user.id),
)
seer_return_value_1 = {
"anomalies": [
"timeseries": [
{
"anomaly": {
"anomaly_score": 0.7,
Expand All @@ -510,7 +510,7 @@ def test_seer_call(self, mock_seer_request: MagicMock):
assert deserialized_body["config"]["expected_seasonality"] == rule.seasonality.value
assert deserialized_body["config"]["direction"] == translate_direction(rule.threshold_type)
assert deserialized_body["context"]["id"] == rule.id
assert deserialized_body["context"]["cur_window"][0]["value"] == 5
assert deserialized_body["context"]["cur_window"]["value"] == 5

self.assert_trigger_counts(processor, trigger, 0, 0)
self.assert_trigger_counts(processor, warning_trigger, 0, 0)
Expand All @@ -525,7 +525,7 @@ def test_seer_call(self, mock_seer_request: MagicMock):

# trigger critical
seer_return_value_2 = {
"anomalies": [
"timeseries": [
{
"anomaly": {
"anomaly_score": 0.9,
Expand All @@ -550,7 +550,7 @@ def test_seer_call(self, mock_seer_request: MagicMock):
assert deserialized_body["config"]["expected_seasonality"] == rule.seasonality.value
assert deserialized_body["config"]["direction"] == translate_direction(rule.threshold_type)
assert deserialized_body["context"]["id"] == rule.id
assert deserialized_body["context"]["cur_window"][0]["value"] == 10
assert deserialized_body["context"]["cur_window"]["value"] == 10

self.assert_trigger_counts(processor, trigger, 0, 0)
self.assert_trigger_counts(processor, warning_trigger, 0, 0)
Expand All @@ -565,7 +565,7 @@ def test_seer_call(self, mock_seer_request: MagicMock):

# trigger a resolution
seer_return_value_3 = {
"anomalies": [
"timeseries": [
{
"anomaly": {"anomaly_score": 0.5, "anomaly_type": AnomalyType.NONE.value},
"timestamp": 1,
Expand All @@ -587,7 +587,7 @@ def test_seer_call(self, mock_seer_request: MagicMock):
assert deserialized_body["config"]["expected_seasonality"] == rule.seasonality.value
assert deserialized_body["config"]["direction"] == translate_direction(rule.threshold_type)
assert deserialized_body["context"]["id"] == rule.id
assert deserialized_body["context"]["cur_window"][0]["value"] == 1
assert deserialized_body["context"]["cur_window"]["value"] == 1

self.assert_trigger_counts(processor, self.trigger, 0, 0)
self.assert_trigger_counts(processor, warning_trigger, 0, 0)
Expand All @@ -609,7 +609,7 @@ def test_seer_call_performance_rule(self, mock_seer_request: MagicMock):
throughput_rule.snuba_query.update(time_window=15 * 60, dataset=Dataset.Transactions)
# trigger critical
seer_return_value = {
"anomalies": [
"timeseries": [
{
"anomaly": {
"anomaly_score": 0.9,
Expand Down Expand Up @@ -639,7 +639,7 @@ def test_seer_call_performance_rule(self, mock_seer_request: MagicMock):
throughput_rule.threshold_type
)
assert deserialized_body["context"]["id"] == throughput_rule.id
assert deserialized_body["context"]["cur_window"][0]["value"] == 10
assert deserialized_body["context"]["cur_window"]["value"] == 10

self.assert_trigger_counts(processor, self.trigger, 0, 0)
incident = self.assert_active_incident(throughput_rule)
Expand All @@ -652,7 +652,7 @@ def test_seer_call_performance_rule(self, mock_seer_request: MagicMock):

# trigger a resolution
seer_return_value_2 = {
"anomalies": [
"timeseries": [
{
"anomaly": {"anomaly_score": 0.5, "anomaly_type": AnomalyType.NONE.value},
"timestamp": 1,
Expand All @@ -679,7 +679,7 @@ def test_seer_call_performance_rule(self, mock_seer_request: MagicMock):
throughput_rule.threshold_type
)
assert deserialized_body["context"]["id"] == throughput_rule.id
assert deserialized_body["context"]["cur_window"][0]["value"] == 1
assert deserialized_body["context"]["cur_window"]["value"] == 1

self.assert_trigger_counts(processor, self.trigger, 0, 0)
self.assert_no_active_incident(throughput_rule)
Expand All @@ -691,19 +691,19 @@ def test_seer_call_performance_rule(self, mock_seer_request: MagicMock):
def test_has_anomaly(self):
rule = self.dynamic_rule
# test alert ABOVE
anomaly1 = {
anomaly1: TimeSeriesPoint = {
"anomaly": {"anomaly_score": 0.9, "anomaly_type": AnomalyType.HIGH_CONFIDENCE.value},
"timestamp": 1,
"value": 10,
}

anomaly2 = {
anomaly2: TimeSeriesPoint = {
"anomaly": {"anomaly_score": 0.6, "anomaly_type": AnomalyType.LOW_CONFIDENCE.value},
"timestamp": 1,
"value": 10,
}

not_anomaly = {
not_anomaly: TimeSeriesPoint = {
"anomaly": {"anomaly_score": 0.2, "anomaly_type": AnomalyType.NONE.value},
"timestamp": 1,
"value": 10,
Expand All @@ -723,7 +723,7 @@ def test_has_anomaly(self):
assert not processor.has_anomaly(not_anomaly, warning_label, False)

def test_fake_anomaly(self):
anomaly = {
anomaly: TimeSeriesPoint = {
"anomaly": {"anomaly_score": 0.2, "anomaly_type": AnomalyType.NONE.value},
"timestamp": 1,
"value": 10,
Expand Down Expand Up @@ -771,7 +771,7 @@ def test_dynamic_alert_rule_not_enough_data(self, mock_seer_request):

# test that we don't activate if we get "no_data"
seer_return_value = {
"anomalies": [
"timeseries": [
{
"anomaly": {
"anomaly_score": 0.0,
Expand Down Expand Up @@ -804,7 +804,7 @@ def test_enable_dynamic_alert_rule(self, mock_seer_request):

# test that we activate but don't fire an alert if we get "none"
seer_return_value = {
"anomalies": [
"timeseries": [
{
"anomaly": {
"anomaly_score": 0.2,
Expand Down Expand Up @@ -837,7 +837,7 @@ def test_enable_dynamic_alert_rule_and_fire(self, mock_seer_request):

# test that we can activate and fire an alert
seer_return_value = {
"anomalies": [
"timeseries": [
{
"anomaly": {
"anomaly_score": 0.7,
Expand Down Expand Up @@ -877,7 +877,7 @@ def test_fire_dynamic_alert_rule_fake_anomaly(self, mock_seer_request):
rule = self.dynamic_rule
value = 10
seer_return_value = {
"anomalies": [
"timeseries": [
{
"anomaly": {
"anomaly_score": 0.0,
Expand Down Expand Up @@ -910,7 +910,7 @@ def test_fire_dynamic_alert_rule_fake_anomaly(self, mock_seer_request):
@mock.patch("sentry.incidents.subscription_processor.logger")
def test_seer_call_empty_list(self, mock_logger, mock_seer_request):
processor = SubscriptionProcessor(self.sub)
seer_return_value: dict[str, list] = {"anomalies": []}
seer_return_value: dict[str, list] = {"timeseries": []}
mock_seer_request.return_value = HTTPResponse(orjson.dumps(seer_return_value), status=200)
result = processor.get_anomaly_data_from_seer(10)
assert mock_logger.warning.call_args[0] == (
Expand Down Expand Up @@ -3046,7 +3046,7 @@ def test_dynamic_crash_rate_alert_for_sessions_with_auto_resolve_critical(

# Send Critical Update
seer_return_value = {
"anomalies": [
"timeseries": [
{
"anomaly": {
"anomaly_score": 0.7,
Expand Down Expand Up @@ -3077,7 +3077,7 @@ def test_dynamic_crash_rate_alert_for_sessions_with_auto_resolve_critical(

# Close the metric alert
seer_return_value = {
"anomalies": [
"timeseries": [
{
"anomaly": {
"anomaly_score": 0.2,
Expand Down

0 comments on commit 4c161d1

Please sign in to comment.