Skip to content

Commit

Permalink
Merge branch 'master' into rc-processing-split-pt1
Browse files Browse the repository at this point in the history
  • Loading branch information
kneeyo1 committed Oct 18, 2024
2 parents ff63406 + c3c7968 commit 9a95123
Show file tree
Hide file tree
Showing 147 changed files with 3,863 additions and 1,003 deletions.
26 changes: 25 additions & 1 deletion src/sentry/api/endpoints/organization_dashboard_details.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from django.db.models import F
from django.utils import timezone
from drf_spectacular.utils import extend_schema
from rest_framework.permissions import BasePermission
from rest_framework.request import Request
from rest_framework.response import Response

Expand Down Expand Up @@ -30,9 +31,32 @@
READ_FEATURE = "organizations:dashboards-basic"


class DashboardPermissions(BasePermission):
"""
Django Permissions Class for managing Dashboard Edit
permissions defined in the DashboardPermissions Model
"""

scope_map = {
"GET": ["org:read", "org:write", "org:admin"],
"POST": ["org:read", "org:write", "org:admin"],
"PUT": ["org:read", "org:write", "org:admin"],
"DELETE": ["org:read", "org:write", "org:admin"],
}

def has_object_permission(self, request: Request, view, obj):
if isinstance(obj, Dashboard) and features.has(
"organizations:dashboards-edit-access", obj.organization, actor=request.user
):
# Check if user has permissions to edit dashboard
if hasattr(obj, "permissions"):
return obj.permissions.has_edit_permissions(request.user.id)
return True


class OrganizationDashboardBase(OrganizationEndpoint):
owner = ApiOwner.PERFORMANCE
permission_classes = (OrganizationDashboardsPermission,)
permission_classes = (OrganizationDashboardsPermission, DashboardPermissions)

def convert_args(
self, request: Request, organization_id_or_slug, dashboard_id, *args, **kwargs
Expand Down
44 changes: 25 additions & 19 deletions src/sentry/api/endpoints/organization_traces.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,13 +97,24 @@ class OrganizationTracesSerializer(serializers.Serializer):
query = serializers.ListField(
required=False, allow_empty=True, child=serializers.CharField(allow_blank=True)
)
sort = serializers.CharField(required=False)

def validate_dataset(self, value):
if value == "spans":
return Dataset.EventsAnalyticsPlatform
if value == "spansIndexed":
return Dataset.SpansIndexed
raise NotImplementedError
raise ParseError(detail=f"Unsupported dataset: {value}")

def validate(self, data):
if data["dataset"] == Dataset.EventsAnalyticsPlatform:
sort = data.get("sort")
if sort is not None:
sort_field = sort[1:] if sort.startswith("-") else sort

if sort_field not in {"timestamp"}:
raise ParseError(detail=f"Unsupported sort: {sort}")
return data


@contextmanager
Expand Down Expand Up @@ -151,6 +162,7 @@ def get(self, request: Request, organization: Organization) -> Response:
dataset=serialized["dataset"],
snuba_params=snuba_params,
user_queries=serialized.get("query", []),
sort=serialized.get("sort"),
metrics_max=serialized.get("metricsMax"),
metrics_min=serialized.get("metricsMin"),
metrics_operation=serialized.get("metricsOp"),
Expand Down Expand Up @@ -202,7 +214,7 @@ def validate_dataset(self, value):
return Dataset.EventsAnalyticsPlatform
if value == "spansIndexed":
return Dataset.SpansIndexed
raise NotImplementedError
raise ParseError(detail=f"Unsupported dataset: {value}")


@region_silo_endpoint
Expand Down Expand Up @@ -267,7 +279,7 @@ def validate_dataset(self, value):
return Dataset.EventsAnalyticsPlatform
if value == "spansIndexed":
return Dataset.SpansIndexed
raise NotImplementedError
raise ParseError(detail=f"Unsupported dataset: {value}")


@region_silo_endpoint
Expand Down Expand Up @@ -344,6 +356,7 @@ def __init__(
dataset: Dataset,
snuba_params: SnubaParams,
user_queries: list[str],
sort: str | None,
metrics_max: float | None,
metrics_min: float | None,
metrics_operation: str | None,
Expand All @@ -356,6 +369,7 @@ def __init__(
self.dataset = dataset
self.snuba_params = snuba_params
self.user_queries = process_user_queries(snuba_params, user_queries, dataset)
self.sort = sort
self.metrics_max = metrics_max
self.metrics_min = metrics_min
self.metrics_operation = metrics_operation
Expand Down Expand Up @@ -427,6 +441,9 @@ def _execute(self):
traces_breakdown_projects_results=traces_breakdown_projects_results,
)

ordering = {trace_id: i for i, trace_id in enumerate(trace_ids)}
data.sort(key=lambda trace: ordering[trace["trace"]])

return data

def refine_params(self, min_timestamp: datetime, max_timestamp: datetime):
Expand Down Expand Up @@ -607,24 +624,24 @@ def get_traces_matching_span_conditions_in_traces(
def get_traces_matching_span_conditions_query(
self,
snuba_params: SnubaParams,
sort: str | None = None,
) -> tuple[BaseQueryBuilder, str]:
if self.dataset == Dataset.EventsAnalyticsPlatform:
return self.get_traces_matching_span_conditions_query_eap(snuba_params, sort)
return self.get_traces_matching_span_conditions_query_indexed(snuba_params, sort)
return self.get_traces_matching_span_conditions_query_eap(snuba_params)
return self.get_traces_matching_span_conditions_query_indexed(snuba_params)

def get_traces_matching_span_conditions_query_eap(
self,
snuba_params: SnubaParams,
sort: str | None = None,
) -> tuple[BaseQueryBuilder, str]:
if len(self.user_queries) < 2:
timestamp_column = "timestamp"
else:
timestamp_column = "min(timestamp)"

if sort == "-timestamp":
if self.sort == "-timestamp":
orderby = [f"-{timestamp_column}"]
elif self.sort == "timestamp":
orderby = [timestamp_column]
else:
# The orderby is intentionally `None` here as this query is much faster
# if we let Clickhouse decide which order to return the results in.
Expand Down Expand Up @@ -694,21 +711,12 @@ def get_traces_matching_span_conditions_query_eap(
def get_traces_matching_span_conditions_query_indexed(
self,
snuba_params: SnubaParams,
sort: str | None = None,
) -> tuple[BaseQueryBuilder, str]:
if len(self.user_queries) < 2:
timestamp_column = "timestamp"
else:
timestamp_column = "min(timestamp)"

if sort == "-timestamp":
orderby = [f"-{timestamp_column}"]
else:
# The orderby is intentionally `None` here as this query is much faster
# if we let Clickhouse decide which order to return the results in.
# This also means we cannot order by any columns or paginate.
orderby = None

if len(self.user_queries) < 2:
# Optimization: If there is only a condition for a single span,
# we can take the fast path and query without using aggregates.
Expand All @@ -718,7 +726,6 @@ def get_traces_matching_span_conditions_query_indexed(
snuba_params=snuba_params,
query=None,
selected_columns=["trace", timestamp_column],
orderby=orderby,
limit=self.limit,
limitby=("trace", 1),
config=QueryBuilderConfig(
Expand All @@ -735,7 +742,6 @@ def get_traces_matching_span_conditions_query_indexed(
snuba_params=snuba_params,
query=None,
selected_columns=["trace", timestamp_column],
orderby=orderby,
limit=self.limit,
config=QueryBuilderConfig(
auto_aggregations=True,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ def get_time_params(start: datetime, end: datetime) -> MappedParams:
@region_silo_endpoint
class OrganizationTransactionAnomalyDetectionEndpoint(OrganizationEventsEndpointBase):
publish_status = {
"GET": ApiPublishStatus.UNKNOWN,
"GET": ApiPublishStatus.PRIVATE,
}

def has_feature(self, organization, request):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -58,9 +58,9 @@ def validate(self, data):
@region_silo_endpoint
class ProjectTransactionThresholdOverrideEndpoint(OrganizationEventsV2EndpointBase):
publish_status = {
"DELETE": ApiPublishStatus.UNKNOWN,
"GET": ApiPublishStatus.UNKNOWN,
"POST": ApiPublishStatus.UNKNOWN,
"DELETE": ApiPublishStatus.PRIVATE,
"GET": ApiPublishStatus.PRIVATE,
"POST": ApiPublishStatus.PRIVATE,
}
permission_classes = (ProjectTransactionThresholdOverridePermission,)

Expand Down
6 changes: 6 additions & 0 deletions src/sentry/api/serializers/models/organization.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,7 @@
SAFE_FIELDS_DEFAULT,
SCRAPE_JAVASCRIPT_DEFAULT,
SENSITIVE_FIELDS_DEFAULT,
TARGET_SAMPLE_RATE_DEFAULT,
UPTIME_AUTODETECTION,
ObjectStatus,
)
Expand Down Expand Up @@ -612,6 +613,11 @@ def serialize( # type: ignore[explicit-override, override]
obj.get_option("sentry:uptime_autodetection", UPTIME_AUTODETECTION)
)

if features.has("organizations:dynamic-sampling-custom", obj, actor=user):
context["targetSampleRate"] = float(
obj.get_option("sentry:target_sample_rate", TARGET_SAMPLE_RATE_DEFAULT)
)

trusted_relays_raw = obj.get_option("sentry:trusted-relays") or []
# serialize trusted relays info into their external form
context["trustedRelays"] = [TrustedRelaySerializer(raw).data for raw in trusted_relays_raw]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
"/api/0/{var}/{issue_id}/events/{event_id}/": {"GET"},
"/api/0/{var}/{issue_id}/{var}/": {"GET", "POST"},
"/api/0/{var}/{issue_id}/{var}/{note_id}/": {"DELETE", "PUT"},
"/api/0/{var}/{issue_id}/hashes/": {"GET", "DELETE"},
"/api/0/{var}/{issue_id}/hashes/": {"GET", "DELETE", "PUT"},
"/api/0/{var}/{issue_id}/reprocessing/": {"POST"},
"/api/0/{var}/{issue_id}/stats/": {"GET"},
"/api/0/{var}/{issue_id}/tags/": {"GET"},
Expand Down Expand Up @@ -82,6 +82,7 @@
"/api/0/organizations/{organization_id_or_slug}/{var}/{issue_id}/hashes/": {
"GET",
"DELETE",
"PUT",
},
"/api/0/organizations/{organization_id_or_slug}/{var}/{issue_id}/reprocessing/": {"POST"},
"/api/0/organizations/{organization_id_or_slug}/{var}/{issue_id}/stats/": {"GET"},
Expand Down
32 changes: 22 additions & 10 deletions src/sentry/data_secrecy/api/waive_data_secrecy.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import logging
from collections.abc import Mapping
from typing import Any

Expand All @@ -20,6 +21,8 @@
from sentry.data_secrecy.models import DataSecrecyWaiver
from sentry.models.organization import Organization

logger = logging.getLogger("sentry.data_secrecy")


class WaiveDataSecrecyPermission(OrganizationPermission):
scope_map = {
Expand Down Expand Up @@ -124,20 +127,29 @@ def delete(self, request: Request, organization: Organization):
Reinstates data secrecy for an organization.
"""
try:
logger.info("Reinstating data secrecy for organization %s", organization.id)
ds = DataSecrecyWaiver.objects.get(organization=organization)
ds.delete()

self.create_audit_entry(
request=request,
organization=organization,
event=audit_log.get_event_id("DATA_SECRECY_REINSTATED"),
)
return Response(
{"detail": "Data secrecy has been reinstated."},
status=status.HTTP_204_NO_CONTENT,
logger.info(
"Data secrecy waiver found for organization %s",
organization.id,
extra={"ds": ds.id},
)
except DataSecrecyWaiver.DoesNotExist:
logger.info("No data secrecy waiver found for organization %s", organization.id)
return Response(
{"detail": "No data secrecy waiver found for this organization."},
status=status.HTTP_404_NOT_FOUND,
)

ds.delete()
logger.info("Data secrecy waiver deleted for organization %s", organization.id)

self.create_audit_entry(
request=request,
organization=organization,
event=audit_log.get_event_id("DATA_SECRECY_REINSTATED"),
)
return Response(
{"detail": "Data secrecy has been reinstated."},
status=status.HTTP_204_NO_CONTENT,
)
6 changes: 5 additions & 1 deletion src/sentry/deletions/defaults/group.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
from collections.abc import Mapping, Sequence
from typing import Any

from sentry_sdk import set_tag
from snuba_sdk import DeleteQuery, Request

from sentry import eventstore, eventstream, features, models, nodestore
Expand Down Expand Up @@ -202,7 +203,7 @@ def delete_events_from_snuba(self) -> None:
for project_id, group_ids in self.project_groups.items():
query = DeleteQuery(
self.dataset.value,
column_conditions={"project_id": [project_id], "group_id": group_ids},
column_conditions={"project_id": [project_id], "group_id": list(group_ids)},
)
request = Request(
dataset=self.dataset.value,
Expand Down Expand Up @@ -268,6 +269,9 @@ def _delete_children(self, instance_list: Sequence[Group]) -> None:
)

if issue_platform_groups:
# This helps creating custom Sentry alerts;
# remove when #proj-snuba-lightweight_delets is done
set_tag("issue_platform_deletion", True)
params = {"groups": issue_platform_groups}
child_relations.append(
BaseRelation(params=params, task=IssuePlatformEventsDeletionTask)
Expand Down
4 changes: 0 additions & 4 deletions src/sentry/features/temporary.py
Original file line number Diff line number Diff line change
Expand Up @@ -497,14 +497,10 @@ def register_temporary_features(manager: FeatureManager):
manager.add("organizations:transaction-name-normalize", OrganizationFeature, FeatureHandlerStrategy.INTERNAL, default=True, api_expose=False)
# Sanitize transaction names in the ingestion pipeline. # Deprecated
manager.add("organizations:transaction-name-sanitization", OrganizationFeature, FeatureHandlerStrategy.INTERNAL, api_expose=False)
# Enables creation and full updating of uptime monitors via the api
manager.add("organizations:uptime-api-create-update", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
# Enables automatic hostname detection in uptime
manager.add("organizations:uptime-automatic-hostname-detection", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False)
# Enables automatic subscription creation in uptime
manager.add("organizations:uptime-automatic-subscription-creation", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False)
# Enabled returning uptime monitors from the rule api
manager.add("organizations:uptime-rule-api", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False)
# Enable creating issues via the issue platform
manager.add("organizations:uptime-create-issues", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False)
# Enables uptime related settings for projects and orgs
Expand Down
Loading

0 comments on commit 9a95123

Please sign in to comment.