diff --git a/src/sentry/api/urls.py b/src/sentry/api/urls.py
index 8473ea53ea99df..48a5588bac6636 100644
--- a/src/sentry/api/urls.py
+++ b/src/sentry/api/urls.py
@@ -80,6 +80,9 @@
 from sentry.incidents.endpoints.organization_alert_rule_activations import (
     OrganizationAlertRuleActivationsEndpoint,
 )
+from sentry.incidents.endpoints.organization_alert_rule_anomalies import (
+    OrganizationAlertRuleAnomaliesEndpoint,
+)
 from sentry.incidents.endpoints.organization_alert_rule_available_action_index import (
     OrganizationAlertRuleAvailableActionIndexEndpoint,
 )
@@ -1169,6 +1172,11 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]:
         OrganizationAlertRuleActivationsEndpoint.as_view(),
         name="sentry-api-0-organization-alert-rule-activations",
     ),
+    re_path(
+        r"^(?P<organization_id_or_slug>[^\/]+)/alert-rules/(?P<alert_rule_id>[^\/]+)/anomalies/$",
+        OrganizationAlertRuleAnomaliesEndpoint.as_view(),
+        name="sentry-api-0-organization-alert-rule-anomalies",
+    ),
     re_path(  # fetch combined metric and issue alert rules
         r"^(?P<organization_id_or_slug>[^\/]+)/combined-rules/$",
         OrganizationCombinedRuleIndexEndpoint.as_view(),
diff --git a/src/sentry/apidocs/examples/metric_alert_examples.py b/src/sentry/apidocs/examples/metric_alert_examples.py
index a9f95d7d9ca604..6061b1e411868a 100644
--- a/src/sentry/apidocs/examples/metric_alert_examples.py
+++ b/src/sentry/apidocs/examples/metric_alert_examples.py
@@ -239,3 +239,19 @@ class MetricAlertExamples:
             ],
         )
     ]
+
+    GET_METRIC_ALERT_ANOMALIES = [
+        OpenApiExample(
+            "Fetch a list of anomalies for a metric alert rule",
+            value=[
+                {
+                    "timestamp": 0.1,
+                    "value": 100.0,
+                    "anomaly": {
+                        "anomaly_type": "high",
+                        "anomaly_value": 100,
+                    },
+                }
+            ],
+        )
+    ]
diff --git a/src/sentry/incidents/endpoints/organization_alert_rule_anomalies.py b/src/sentry/incidents/endpoints/organization_alert_rule_anomalies.py
new file mode 100644
index 00000000000000..7e96dfe42c5b97
--- /dev/null
+++ b/src/sentry/incidents/endpoints/organization_alert_rule_anomalies.py
@@ -0,0 +1,78 @@
+# from datetime import datetime
+
+from drf_spectacular.utils import extend_schema
+from pydantic import BaseModel
+from rest_framework.request import Request
+from rest_framework.response import Response
+
+from sentry.api.api_owners import ApiOwner
+from sentry.api.api_publish_status import ApiPublishStatus
+from sentry.api.base import region_silo_endpoint
+from sentry.api.paginator import OffsetPaginator
+from sentry.api.serializers.base import serialize
+from sentry.apidocs.constants import (
+    RESPONSE_BAD_REQUEST,
+    RESPONSE_FORBIDDEN,
+    RESPONSE_NOT_FOUND,
+    RESPONSE_UNAUTHORIZED,
+)
+from sentry.apidocs.examples.metric_alert_examples import MetricAlertExamples
+from sentry.apidocs.parameters import GlobalParams, MetricAlertParams
+from sentry.apidocs.utils import inline_sentry_response_serializer
+from sentry.incidents.endpoints.bases import OrganizationAlertRuleEndpoint
+from sentry.incidents.models.alert_rule import AlertRule
+from sentry.models.organization import Organization
+from sentry.seer.anomaly_detection.get_historical_anomalies import (
+    get_historical_anomaly_data_from_seer,
+)
+from sentry.seer.anomaly_detection.types import TimeSeriesPoint
+
+
+class DetectAnomaliesResponse(BaseModel):
+    timeseries: list[TimeSeriesPoint]
+
+
+@region_silo_endpoint
+class OrganizationAlertRuleAnomaliesEndpoint(OrganizationAlertRuleEndpoint):
+    owner = ApiOwner.ALERTS_NOTIFICATIONS
+    publish_status = {
+        "GET": ApiPublishStatus.PUBLIC,
+    }
+
+    @extend_schema(
+        operation_id="Retrieve anomalies for a Metric Alert Rule",
+        parameters=[GlobalParams.ORG_ID_OR_SLUG, MetricAlertParams.METRIC_RULE_ID],
+        responses={
+            200: inline_sentry_response_serializer(
+                "ListAlertRuleAnomalies", DetectAnomaliesResponse
+            ),
+            400: RESPONSE_BAD_REQUEST,
+            401: RESPONSE_UNAUTHORIZED,
+            403: RESPONSE_FORBIDDEN,
+            404: RESPONSE_NOT_FOUND,
+        },
+        examples=MetricAlertExamples.GET_METRIC_ALERT_ANOMALIES,
+    )
+    def get(self, request: Request, organization: Organization, alert_rule: AlertRule) -> Response:
+        """
+        Return a list of anomalies for a metric alert rule.
+        """
+        project = alert_rule.projects.first()
+        start = request.GET.get("start", None)
+        end = request.GET.get("end", None)
+
+        if not project or start is None or end is None:
+            return Response(
+                "Unable to get historical anomaly data: missing required argument(s) project, start, and/or end",
+                status=400,
+            )
+
+        anomalies = get_historical_anomaly_data_from_seer(alert_rule, project, start, end)
+        if anomalies is None:
+            return Response("Unable to get historical anomaly data", status=400)
+        return self.paginate(
+            request=request,
+            queryset=anomalies,
+            paginator_cls=OffsetPaginator,
+            on_results=lambda x: serialize(x, request.user),
+        )
diff --git a/src/sentry/seer/anomaly_detection/get_historical_anomalies.py b/src/sentry/seer/anomaly_detection/get_historical_anomalies.py
new file mode 100644
index 00000000000000..80d194132603f9
--- /dev/null
+++ b/src/sentry/seer/anomaly_detection/get_historical_anomalies.py
@@ -0,0 +1,163 @@
+import logging
+from datetime import datetime
+
+from django.conf import settings
+from django.core.exceptions import ValidationError
+from urllib3.exceptions import MaxRetryError, TimeoutError
+
+from sentry.conf.server import SEER_ANOMALY_DETECTION_ENDPOINT_URL
+from sentry.incidents.models.alert_rule import AlertRule, AlertRuleStatus
+from sentry.models.project import Project
+from sentry.net.http import connection_from_url
+from sentry.search.events.types import SnubaParams
+from sentry.seer.anomaly_detection.types import AnomalyDetectionConfig
+from sentry.seer.anomaly_detection.utils import format_historical_data, translate_direction
+from sentry.seer.signed_seer_api import make_signed_seer_api_request
+from sentry.snuba.models import SnubaQuery
+from sentry.snuba.referrer import Referrer
+from sentry.snuba.utils import get_dataset
+from sentry.utils import json
+from sentry.utils.json import JSONDecodeError
+from sentry.utils.snuba import SnubaTSResult
+
+logger = logging.getLogger(__name__)
+
+seer_anomaly_detection_connection_pool = connection_from_url(
+    settings.SEER_ANOMALY_DETECTION_URL,
+    timeout=settings.SEER_ANOMALY_DETECTION_TIMEOUT,
+)
+
+
+def get_historical_anomaly_data_from_seer(
+    alert_rule: AlertRule, project: Project, start_string: str, end_string: str
+) -> list | None:
+    """
+    Send time series data to Seer and return anomaly detection response.
+    """
+    if alert_rule.status == AlertRuleStatus.NOT_ENOUGH_DATA.value:
+        return []
+    # don't think this can happen but mypy is yelling
+    if not alert_rule.snuba_query:
+        raise ValidationError("No snuba query associated with alert rule.")
+    subscription = alert_rule.snuba_query.subscriptions.first()
+    # same deal as above
+    if not subscription:
+        raise ValidationError("No subscription associated with alert rule.")
+    snuba_query = SnubaQuery.objects.get(id=alert_rule.snuba_query_id)
+    window_min = int(snuba_query.time_window / 60)
+    start = datetime.fromisoformat(start_string)
+    end = datetime.fromisoformat(end_string)
+    historical_data = fetch_historical_data(
+        alert_rule=alert_rule, snuba_query=snuba_query, project=project, start=start, end=end
+    )
+
+    if not historical_data:
+        raise ValidationError("No historical data available.")
+    formatted_data = format_historical_data(historical_data)
+    if (
+        not alert_rule.sensitivity
+        or not alert_rule.seasonality
+        or alert_rule.threshold_type is None
+        or alert_rule.organization is None
+    ):
+        # this won't happen because we've already gone through the serializer, but mypy insists
+        raise ValidationError("Missing expected configuration for a dynamic alert.")
+
+    anomaly_detection_config = AnomalyDetectionConfig(
+        time_period=window_min,
+        sensitivity=alert_rule.sensitivity,
+        direction=translate_direction(alert_rule.threshold_type),
+        expected_seasonality=alert_rule.seasonality,
+    )
+    try:
+        response = make_signed_seer_api_request(
+            seer_anomaly_detection_connection_pool,
+            SEER_ANOMALY_DETECTION_ENDPOINT_URL,
+            json.dumps(
+                {
+                    "organization_id": alert_rule.organization.id,
+                    "project_id": project.id,
+                    "config": anomaly_detection_config,
+                    "context": formatted_data,
+                }
+            ).encode("utf-8"),
+        )
+    except (TimeoutError, MaxRetryError):
+        logger.warning(
+            "Timeout error when hitting anomaly detection endpoint",
+            extra={
+                "subscription_id": subscription.id,
+                "dataset": alert_rule.snuba_query.dataset,
+                "organization_id": alert_rule.organization.id,
+                "project_id": project.id,
+                "alert_rule_id": alert_rule.id,
+            },
+        )
+        return None
+
+    if response.status != 200:
+        logger.error(
+            f"Received {response.status} when calling Seer endpoint {SEER_ANOMALY_DETECTION_ENDPOINT_URL}.",  # noqa
+            extra={"response_data": response.data},
+        )
+        return None
+
+    try:
+        results = json.loads(response.data.decode("utf-8")).get("anomalies")
+        if not results:
+            logger.warning(
+                "Seer anomaly detection response returned no potential anomalies",
+                extra={
+                    "ad_config": anomaly_detection_config,
+                    "context": formatted_data,
+                    "response_data": response.data,
+                    "reponse_code": response.status,
+                },
+            )
+            return None
+        return results
+    except (
+        AttributeError,
+        UnicodeError,
+        JSONDecodeError,
+    ):
+        logger.exception(
+            "Failed to parse Seer anomaly detection response",
+            extra={
+                "ad_config": anomaly_detection_config,
+                "context": formatted_data,
+                "response_data": response.data,
+                "reponse_code": response.status,
+            },
+        )
+        return None
+
+
+def fetch_historical_data(
+    alert_rule: AlertRule, snuba_query: SnubaQuery, project: Project, start: datetime, end: datetime
+) -> SnubaTSResult | None:
+    granularity = snuba_query.time_window
+
+    dataset_label = snuba_query.dataset
+    if dataset_label == "events":
+        # DATSET_OPTIONS expects the name 'errors'
+        dataset_label = "errors"
+    dataset = get_dataset(dataset_label)
+
+    if not project or not dataset or not alert_rule.organization:
+        return None
+
+    historical_data = dataset.timeseries_query(
+        selected_columns=[snuba_query.aggregate],
+        query=snuba_query.query,
+        snuba_params=SnubaParams(
+            organization=alert_rule.organization,
+            projects=[project],
+            start=start,
+            end=end,
+        ),
+        rollup=granularity,
+        referrer=Referrer.ANOMALY_DETECTION_RETURN_HISTORICAL_ANOMALIES.value,
+        zerofill_results=True,
+    )
+    return historical_data
diff --git a/src/sentry/seer/anomaly_detection/store_data.py b/src/sentry/seer/anomaly_detection/store_data.py
index 4d8cfd61e1091a..37e8b07ca3e425 100644
--- a/src/sentry/seer/anomaly_detection/store_data.py
+++ b/src/sentry/seer/anomaly_detection/store_data.py
@@ -15,9 +15,8 @@
     AlertInSeer,
     AnomalyDetectionConfig,
     StoreDataRequest,
-    TimeSeriesPoint,
 )
-from sentry.seer.anomaly_detection.utils import translate_direction
+from sentry.seer.anomaly_detection.utils import format_historical_data, translate_direction
 from sentry.seer.signed_seer_api import make_signed_seer_api_request
 from sentry.snuba.models import SnubaQuery
 from sentry.snuba.referrer import Referrer
@@ -33,22 +32,6 @@
 )
 
 
-def format_historical_data(data: SnubaTSResult) -> list[TimeSeriesPoint]:
-    """
-    Format Snuba data into the format the Seer API expects.
-    If there are no results, it's just the timestamp
-    {'time': 1719012000}, {'time': 1719018000}, {'time': 1719024000}
-
-    If there are results, the count is added
-    {'time': 1721300400, 'count': 2}
-    """
-    formatted_data = []
-    for datum in data.data.get("data", []):
-        ts_point = TimeSeriesPoint(timestamp=datum.get("time"), value=datum.get("count", 0))
-        formatted_data.append(ts_point)
-    return formatted_data
-
-
 def _get_start_and_end_indices(data: SnubaTSResult) -> tuple[int, int]:
     """
     Helper to return the first and last data points that have event counts.
diff --git a/src/sentry/seer/anomaly_detection/types.py b/src/sentry/seer/anomaly_detection/types.py
index 9da123cc51b846..31475809434c99 100644
--- a/src/sentry/seer/anomaly_detection/types.py
+++ b/src/sentry/seer/anomaly_detection/types.py
@@ -6,6 +6,11 @@ class AlertInSeer(TypedDict):
     id: int
 
 
+class Anomaly(TypedDict):
+    anomaly_type: str
+    anomaly_value: float
+
+
 class TimeSeriesPoint(TypedDict):
     timestamp: float
     value: float
diff --git a/src/sentry/seer/anomaly_detection/utils.py b/src/sentry/seer/anomaly_detection/utils.py
index 3eb8b92fa1ec64..720dc4b4a94b7b 100644
--- a/src/sentry/seer/anomaly_detection/utils.py
+++ b/src/sentry/seer/anomaly_detection/utils.py
@@ -1,4 +1,6 @@
 from sentry.incidents.models.alert_rule import AlertRuleThresholdType
+from sentry.seer.anomaly_detection.types import TimeSeriesPoint
+from sentry.utils.snuba import SnubaTSResult
 
 
 def translate_direction(direction: int) -> str:
@@ -11,3 +13,19 @@ def translate_direction(direction: int) -> str:
         AlertRuleThresholdType.ABOVE_AND_BELOW: "both",
     }
     return direction_map[AlertRuleThresholdType(direction)]
+
+
+def format_historical_data(data: SnubaTSResult) -> list[TimeSeriesPoint]:
+    """
+    Format Snuba data into the format the Seer API expects.
+    If there are no results, it's just the timestamp
+    {'time': 1719012000}, {'time': 1719018000}, {'time': 1719024000}
+
+    If there are results, the count is added
+    {'time': 1721300400, 'count': 2}
+    """
+    formatted_data = []
+    for datum in data.data.get("data", []):
+        ts_point = TimeSeriesPoint(timestamp=datum.get("time"), value=datum.get("count", 0))
+        formatted_data.append(ts_point)
+    return formatted_data
diff --git a/src/sentry/snuba/referrer.py b/src/sentry/snuba/referrer.py
index cdbcfef67384bd..145be18468daf5 100644
--- a/src/sentry/snuba/referrer.py
+++ b/src/sentry/snuba/referrer.py
@@ -13,6 +13,9 @@ class Referrer(Enum):
     ALERTRULESERIALIZER_TEST_QUERY_PRIMARY = "alertruleserializer.test_query.primary"
     ALERTRULESERIALIZER_TEST_QUERY = "alertruleserializer.test_query"
     ANOMALY_DETECTION_HISTORICAL_DATA_QUERY = "anomaly_detection_historical_data_query"
+    ANOMALY_DETECTION_RETURN_HISTORICAL_ANOMALIES = (
+        "anomaly_detection_get_historical_anomalies_query"
+    )
     API_ALERTS_ALERT_RULE_CHART_METRICS_ENHANCED = "api.alerts.alert-rule-chart.metrics-enhanced"
     API_ALERTS_ALERT_RULE_CHART = "api.alerts.alert-rule-chart"
     API_ALERTS_CHARTCUTERIE = "api.alerts.chartcuterie"
diff --git a/tests/sentry/incidents/endpoints/test_organization_alert_rule_anomalies.py b/tests/sentry/incidents/endpoints/test_organization_alert_rule_anomalies.py
new file mode 100644
index 00000000000000..e884ceb9c4d0ce
--- /dev/null
+++ b/tests/sentry/incidents/endpoints/test_organization_alert_rule_anomalies.py
@@ -0,0 +1,143 @@
+from datetime import timedelta
+from unittest.mock import patch
+
+import orjson
+import pytest
+from urllib3.response import HTTPResponse
+
+from sentry.conf.server import SEER_ANOMALY_DETECTION_ENDPOINT_URL
+from sentry.incidents.models.alert_rule import (
+    AlertRuleDetectionType,
+    AlertRuleSeasonality,
+    AlertRuleSensitivity,
+)
+from sentry.seer.anomaly_detection.types import AnomalyType
+from sentry.testutils.cases import SnubaTestCase
+from sentry.testutils.factories import EventType
+from sentry.testutils.helpers.datetime import before_now, freeze_time, iso_format
+from sentry.testutils.helpers.features import with_feature
+from sentry.testutils.outbox import outbox_runner
+from sentry.testutils.skips import requires_snuba
+from tests.sentry.incidents.endpoints.test_organization_alert_rule_index import AlertRuleBase
+
+pytestmark = [pytest.mark.sentry_metrics, requires_snuba]
+
+
+@freeze_time()
+class AlertRuleAnomalyEndpointTest(AlertRuleBase, SnubaTestCase):
+    endpoint = "sentry-api-0-organization-alert-rule-anomalies"
+
+    method = "get"
+
+    @with_feature("organizations:anomaly-detection-alerts")
+    @with_feature("organizations:incidents")
+    @patch(
+        "sentry.seer.anomaly_detection.store_data.seer_anomaly_detection_connection_pool.urlopen"
+    )
+    @patch(
+        "sentry.seer.anomaly_detection.get_historical_anomalies.seer_anomaly_detection_connection_pool.urlopen"
+    )
+    def test_simple(self, mock_seer_request, mock_seer_store_request):
+        self.create_team(organization=self.organization, members=[self.user])
+        two_weeks_ago = before_now(days=14).replace(hour=10, minute=0, second=0, microsecond=0)
+        with self.options({"issues.group_attributes.send_kafka": True}):
+            self.store_event(
+                data={
+                    "event_id": "a" * 32,
+                    "message": "super duper bad",
+                    "timestamp": iso_format(two_weeks_ago + timedelta(minutes=1)),
+                    "fingerprint": ["group1"],
+                    "tags": {"sentry:user": self.user.email},
+                },
+                event_type=EventType.ERROR,
+                project_id=self.project.id,
+            )
+            self.store_event(
+                data={
+                    "event_id": "b" * 32,
+                    "message": "super bad",
+                    "timestamp": iso_format(two_weeks_ago + timedelta(days=10)),
+                    "fingerprint": ["group2"],
+                    "tags": {"sentry:user": self.user.email},
+                },
+                event_type=EventType.ERROR,
+                project_id=self.project.id,
+            )
+
+        alert_rule = self.create_alert_rule(
+            time_window=15,
+            sensitivity=AlertRuleSensitivity.MEDIUM,
+            seasonality=AlertRuleSeasonality.AUTO,
+            detection_type=AlertRuleDetectionType.DYNAMIC,
+        )
+
+        self.login_as(self.user)
+
+        seer_return_value = {
+            "anomalies": [
+                {
+                    "anomaly": {
+                        "anomaly_score": 0.0,
+                        "anomaly_type": AnomalyType.NONE.value,
+                    },
+                    "timestamp": 1,
+                    "value": 1,
+                },
+                {
+                    "anomaly": {
+                        "anomaly_score": 0.0,
+                        "anomaly_type": AnomalyType.NONE.value,
+                    },
+                    "timestamp": 2,
+                    "value": 1,
+                },
+            ]
+        }
+        mock_seer_store_request.return_value = HTTPResponse(status=200)
+        mock_seer_request.return_value = HTTPResponse(orjson.dumps(seer_return_value), status=200)
+        with outbox_runner():
+            resp = self.get_success_response(
+                self.organization.slug,
+                alert_rule.id,
+                qs_params={
+                    "start": str(two_weeks_ago),
+                    "end": str(two_weeks_ago + timedelta(days=12)),
+                },
+                status_code=200,
+            )
+
+        assert mock_seer_store_request.call_count == 1
+        assert mock_seer_request.call_count == 1
+        assert mock_seer_request.call_args.args[0] == "POST"
+        assert mock_seer_request.call_args.args[1] == SEER_ANOMALY_DETECTION_ENDPOINT_URL
+        assert resp.data == seer_return_value["anomalies"]
+
+    @with_feature("organizations:anomaly-detection-alerts")
+    @with_feature("organizations:incidents")
+    @patch(
+        "sentry.seer.anomaly_detection.store_data.seer_anomaly_detection_connection_pool.urlopen"
+    )
+    def test_alert_not_enough_data(self, mock_seer_store_request):
+        self.create_team(organization=self.organization, members=[self.user])
+        two_weeks_ago = before_now(days=14).replace(hour=10, minute=0, second=0, microsecond=0)
+        alert_rule = self.create_alert_rule(
+            time_window=15,
+            sensitivity=AlertRuleSensitivity.MEDIUM,
+            seasonality=AlertRuleSeasonality.AUTO,
+            detection_type=AlertRuleDetectionType.DYNAMIC,
+        )
+
+        self.login_as(self.user)
+        mock_seer_store_request.return_value = HTTPResponse(status=200)
+        with outbox_runner():
+            resp = self.get_success_response(
+                self.organization.slug,
+                alert_rule.id,
+                qs_params={
+                    "start": str(two_weeks_ago),
+                    "end": str(two_weeks_ago + timedelta(days=12)),
+                },
+                status_code=200,
+            )
+
+        assert resp.data == []
diff --git a/tests/sentry/seer/anomaly_detection/test_store_data.py b/tests/sentry/seer/anomaly_detection/test_store_data.py
index 929d925affca7e..844c94040c99a7 100644
--- a/tests/sentry/seer/anomaly_detection/test_store_data.py
+++ b/tests/sentry/seer/anomaly_detection/test_store_data.py
@@ -1,6 +1,7 @@
 from datetime import datetime
 
-from sentry.seer.anomaly_detection.store_data import fetch_historical_data, format_historical_data
+from sentry.seer.anomaly_detection.store_data import fetch_historical_data
+from sentry.seer.anomaly_detection.utils import format_historical_data
 from sentry.snuba.models import SnubaQuery
 from sentry.testutils.cases import SnubaTestCase
 from sentry.testutils.factories import EventType