Browse Source

feat(cra-metrics): Adds crash rate alerts over metrics [INGEST-629] [INGEST-632] (#30400)

* feat(cra-metrics): Adds crash rate alerts over metrics

Adds ability to create crash rate alerts over metrics
for sessions. Adds `EntitySubscription` class that
is an abstraction layer for all entity subscriptions
Ahmed Etefy 3 years ago
parent
commit
6457270942

+ 1 - 0
mypy.ini

@@ -59,6 +59,7 @@ files = src/sentry/api/bases/external_actor.py,
         src/sentry/shared_integrations/constants.py,
         src/sentry/shared_integrations/constants.py,
         src/sentry/snuba/outcomes.py,
         src/sentry/snuba/outcomes.py,
         src/sentry/snuba/query_subscription_consumer.py,
         src/sentry/snuba/query_subscription_consumer.py,
+        src/sentry/snuba/entity_subscription.py,
         src/sentry/spans/,
         src/sentry/spans/,
         src/sentry/tasks/app_store_connect.py,
         src/sentry/tasks/app_store_connect.py,
         src/sentry/tasks/low_priority_symbolication.py,
         src/sentry/tasks/low_priority_symbolication.py,

+ 1 - 1
src/sentry/api/serializers/models/incident.py

@@ -11,8 +11,8 @@ from sentry.incidents.models import (
     IncidentSeen,
     IncidentSeen,
     IncidentSubscription,
     IncidentSubscription,
 )
 )
+from sentry.snuba.entity_subscription import apply_dataset_query_conditions
 from sentry.snuba.models import QueryDatasets
 from sentry.snuba.models import QueryDatasets
-from sentry.snuba.tasks import apply_dataset_query_conditions
 
 
 
 
 @register(Incident)
 @register(Incident)

+ 8 - 0
src/sentry/exceptions.py

@@ -69,3 +69,11 @@ class InvalidSearchQuery(Exception):
 
 
 class UnableToAcceptMemberInvitationException(Exception):
 class UnableToAcceptMemberInvitationException(Exception):
     pass
     pass
+
+
+class UnsupportedQuerySubscription(Exception):
+    pass
+
+
+class InvalidQuerySubscription(Exception):
+    pass

+ 50 - 17
src/sentry/incidents/endpoints/serializers.py

@@ -1,18 +1,20 @@
 import logging
 import logging
 import operator
 import operator
+from copy import copy
 from datetime import timedelta
 from datetime import timedelta
 
 
 from django.db import transaction
 from django.db import transaction
 from django.utils import timezone
 from django.utils import timezone
 from django.utils.encoding import force_text
 from django.utils.encoding import force_text
 from rest_framework import serializers
 from rest_framework import serializers
+from snuba_sdk.legacy import json_to_snql
 
 
 from sentry import analytics
 from sentry import analytics
 from sentry.api.fields.actor import ActorField
 from sentry.api.fields.actor import ActorField
 from sentry.api.serializers.rest_framework.base import CamelSnakeModelSerializer
 from sentry.api.serializers.rest_framework.base import CamelSnakeModelSerializer
 from sentry.api.serializers.rest_framework.environment import EnvironmentField
 from sentry.api.serializers.rest_framework.environment import EnvironmentField
 from sentry.api.serializers.rest_framework.project import ProjectField
 from sentry.api.serializers.rest_framework.project import ProjectField
-from sentry.exceptions import InvalidSearchQuery
+from sentry.exceptions import InvalidSearchQuery, UnsupportedQuerySubscription
 from sentry.incidents.logic import (
 from sentry.incidents.logic import (
     CRITICAL_TRIGGER_LABEL,
     CRITICAL_TRIGGER_LABEL,
     WARNING_TRIGGER_LABEL,
     WARNING_TRIGGER_LABEL,
@@ -43,10 +45,12 @@ from sentry.mediators import alert_rule_actions
 from sentry.models import OrganizationMember, SentryAppInstallation, Team, User
 from sentry.models import OrganizationMember, SentryAppInstallation, Team, User
 from sentry.shared_integrations.exceptions import ApiRateLimitedError
 from sentry.shared_integrations.exceptions import ApiRateLimitedError
 from sentry.snuba.dataset import Dataset
 from sentry.snuba.dataset import Dataset
+from sentry.snuba.entity_subscription import map_aggregate_to_entity_subscription
 from sentry.snuba.models import QueryDatasets, SnubaQueryEventType
 from sentry.snuba.models import QueryDatasets, SnubaQueryEventType
 from sentry.snuba.tasks import build_snuba_filter
 from sentry.snuba.tasks import build_snuba_filter
+from sentry.utils import json
 from sentry.utils.compat import zip
 from sentry.utils.compat import zip
-from sentry.utils.snuba import raw_query
+from sentry.utils.snuba import raw_snql_query
 
 
 logger = logging.getLogger(__name__)
 logger = logging.getLogger(__name__)
 
 
@@ -443,7 +447,8 @@ class AlertRuleSerializer(CamelSnakeModelSerializer):
             )
             )
 
 
     def validate_event_types(self, event_types):
     def validate_event_types(self, event_types):
-        if self.initial_data.get("dataset") == Dataset.Sessions.value:
+        dataset = self.initial_data.get("dataset")
+        if dataset not in [Dataset.Events.value, Dataset.Transactions.value]:
             return []
             return []
         try:
         try:
             return [SnubaQueryEventType.EventType[event_type.upper()] for event_type in event_types]
             return [SnubaQueryEventType.EventType[event_type.upper()] for event_type in event_types]
@@ -477,13 +482,24 @@ class AlertRuleSerializer(CamelSnakeModelSerializer):
             # the query. We don't use the returned data anywhere, so it doesn't
             # the query. We don't use the returned data anywhere, so it doesn't
             # matter which.
             # matter which.
             project_id = list(self.context["organization"].project_set.all()[:1])
             project_id = list(self.context["organization"].project_set.all()[:1])
+
+        try:
+            entity_subscription = map_aggregate_to_entity_subscription(
+                dataset=QueryDatasets(data["dataset"]),
+                aggregate=data["aggregate"],
+                extra_fields={
+                    "org_id": project_id[0].organization_id,
+                    "event_types": data.get("event_types"),
+                },
+            )
+        except UnsupportedQuerySubscription as e:
+            raise serializers.ValidationError(f"{e}")
+
         try:
         try:
             snuba_filter = build_snuba_filter(
             snuba_filter = build_snuba_filter(
-                data["dataset"],
+                entity_subscription,
                 data["query"],
                 data["query"],
-                data["aggregate"],
                 data.get("environment"),
                 data.get("environment"),
-                data.get("event_types"),
                 params={
                 params={
                     "project_id": [p.id for p in project_id],
                     "project_id": [p.id for p in project_id],
                     "start": timezone.now() - timedelta(minutes=10),
                     "start": timezone.now() - timedelta(minutes=10),
@@ -503,18 +519,35 @@ class AlertRuleSerializer(CamelSnakeModelSerializer):
             dataset = Dataset(data["dataset"].value)
             dataset = Dataset(data["dataset"].value)
             self._validate_time_window(dataset, data.get("time_window"))
             self._validate_time_window(dataset, data.get("time_window"))
 
 
+            conditions = copy(snuba_filter.conditions)
+            time_col = entity_subscription.time_col
+            conditions += [
+                [time_col, ">=", snuba_filter.start],
+                [time_col, "<", snuba_filter.end],
+            ]
+
+            body = {
+                "project": project_id[0].id,
+                "project_id": project_id[0].id,
+                "aggregations": snuba_filter.aggregations,
+                "conditions": conditions,
+                "filter_keys": snuba_filter.filter_keys,
+                "having": snuba_filter.having,
+                "dataset": dataset.value,
+                "limit": 1,
+                **entity_subscription.get_entity_extra_params(),
+            }
+
             try:
             try:
-                raw_query(
-                    aggregations=snuba_filter.aggregations,
-                    start=snuba_filter.start,
-                    end=snuba_filter.end,
-                    conditions=snuba_filter.conditions,
-                    filter_keys=snuba_filter.filter_keys,
-                    having=snuba_filter.having,
-                    dataset=dataset,
-                    limit=1,
-                    referrer="alertruleserializer.test_query",
+                snql_query = json_to_snql(body, entity_subscription.entity_key.value)
+                snql_query.validate()
+            except Exception as e:
+                raise serializers.ValidationError(
+                    str(e), params={"params": json.dumps(body), "dataset": data["dataset"].value}
                 )
                 )
+
+            try:
+                raw_snql_query(snql_query, referrer="alertruleserializer.test_query")
             except Exception:
             except Exception:
                 logger.exception("Error while validating snuba alert rule query")
                 logger.exception("Error while validating snuba alert rule query")
                 raise serializers.ValidationError(
                 raise serializers.ValidationError(
@@ -582,7 +615,7 @@ class AlertRuleSerializer(CamelSnakeModelSerializer):
 
 
     @staticmethod
     @staticmethod
     def _validate_time_window(dataset, time_window):
     def _validate_time_window(dataset, time_window):
-        if dataset == Dataset.Sessions:
+        if dataset in [Dataset.Sessions, Dataset.Metrics]:
             # Validate time window
             # Validate time window
             if time_window not in CRASH_RATE_ALERTS_ALLOWED_TIME_WINDOWS:
             if time_window not in CRASH_RATE_ALERTS_ALLOWED_TIME_WINDOWS:
                 raise serializers.ValidationError(
                 raise serializers.ValidationError(

+ 7 - 3
src/sentry/incidents/logic.py

@@ -35,6 +35,7 @@ from sentry.search.events.fields import resolve_field
 from sentry.search.events.filter import get_filter
 from sentry.search.events.filter import get_filter
 from sentry.shared_integrations.exceptions import DuplicateDisplayNameError
 from sentry.shared_integrations.exceptions import DuplicateDisplayNameError
 from sentry.snuba.dataset import Dataset
 from sentry.snuba.dataset import Dataset
+from sentry.snuba.entity_subscription import map_aggregate_to_entity_subscription
 from sentry.snuba.models import QueryDatasets
 from sentry.snuba.models import QueryDatasets
 from sentry.snuba.subscriptions import (
 from sentry.snuba.subscriptions import (
     bulk_create_snuba_subscriptions,
     bulk_create_snuba_subscriptions,
@@ -288,12 +289,15 @@ def build_incident_query_params(incident, start=None, end=None, windowed_stats=F
         params["project_id"] = project_ids
         params["project_id"] = project_ids
 
 
     snuba_query = incident.alert_rule.snuba_query
     snuba_query = incident.alert_rule.snuba_query
+    entity_subscription = map_aggregate_to_entity_subscription(
+        dataset=QueryDatasets(snuba_query.dataset),
+        aggregate=snuba_query.aggregate,
+        extra_fields={"org_id": incident.organization.id, "event_types": snuba_query.event_types},
+    )
     snuba_filter = build_snuba_filter(
     snuba_filter = build_snuba_filter(
-        QueryDatasets(snuba_query.dataset),
+        entity_subscription,
         snuba_query.query,
         snuba_query.query,
-        snuba_query.aggregate,
         snuba_query.environment,
         snuba_query.environment,
-        snuba_query.event_types,
         params=params,
         params=params,
     )
     )
 
 

+ 10 - 4
src/sentry/incidents/subscription_processor.py

@@ -32,7 +32,7 @@ from sentry.models import Project
 from sentry.release_health.metrics import reverse_tag_value, tag_key
 from sentry.release_health.metrics import reverse_tag_value, tag_key
 from sentry.snuba.dataset import Dataset
 from sentry.snuba.dataset import Dataset
 from sentry.snuba.models import QueryDatasets
 from sentry.snuba.models import QueryDatasets
-from sentry.snuba.tasks import build_snuba_filter
+from sentry.snuba.tasks import build_snuba_filter, map_aggregate_to_entity_subscription
 from sentry.utils import metrics, redis
 from sentry.utils import metrics, redis
 from sentry.utils.compat import zip
 from sentry.utils.compat import zip
 from sentry.utils.dates import to_datetime, to_timestamp
 from sentry.utils.dates import to_datetime, to_timestamp
@@ -176,13 +176,19 @@ class SubscriptionProcessor:
         snuba_query = self.subscription.snuba_query
         snuba_query = self.subscription.snuba_query
         start = end - timedelta(seconds=snuba_query.time_window)
         start = end - timedelta(seconds=snuba_query.time_window)
 
 
+        entity_subscription = map_aggregate_to_entity_subscription(
+            dataset=QueryDatasets(snuba_query.dataset),
+            aggregate=snuba_query.aggregate,
+            extra_fields={
+                "org_id": self.subscription.project.organization,
+                "event_types": snuba_query.event_types,
+            },
+        )
         try:
         try:
             snuba_filter = build_snuba_filter(
             snuba_filter = build_snuba_filter(
-                QueryDatasets(snuba_query.dataset),
+                entity_subscription,
                 snuba_query.query,
                 snuba_query.query,
-                snuba_query.aggregate,
                 snuba_query.environment,
                 snuba_query.environment,
-                snuba_query.event_types,
                 params={
                 params={
                     "project_id": [self.subscription.project_id],
                     "project_id": [self.subscription.project_id],
                     "start": start,
                     "start": start,

+ 3 - 0
src/sentry/snuba/dataset.py

@@ -14,6 +14,9 @@ class Dataset(Enum):
 
 
 @unique
 @unique
 class EntityKey(Enum):
 class EntityKey(Enum):
+    Events = "events"
+    Sessions = "sessions"
+    Transactions = "transactions"
     MetricsSets = "metrics_sets"
     MetricsSets = "metrics_sets"
     MetricsCounters = "metrics_counters"
     MetricsCounters = "metrics_counters"
     MetricsDistributions = "metrics_distributions"
     MetricsDistributions = "metrics_distributions"

+ 300 - 0
src/sentry/snuba/entity_subscription.py

@@ -0,0 +1,300 @@
+import re
+from abc import ABC, abstractmethod
+from copy import copy
+from dataclasses import dataclass
+from typing import Any, List, Mapping, Optional, TypedDict
+
+from sentry.constants import CRASH_RATE_ALERT_SESSION_COUNT_ALIAS
+from sentry.eventstore import Filter
+from sentry.exceptions import InvalidQuerySubscription, UnsupportedQuerySubscription
+from sentry.models import Environment
+from sentry.release_health.metrics import get_tag_values_list, metric_id, tag_key, tag_value
+from sentry.search.events.fields import resolve_field_list
+from sentry.search.events.filter import get_filter
+from sentry.sentry_metrics.sessions import SessionMetricKey
+from sentry.snuba.dataset import EntityKey
+from sentry.snuba.models import QueryDatasets, SnubaQueryEventType
+from sentry.utils.snuba import Dataset, resolve_column, resolve_snuba_aliases
+
+# TODO: If we want to support security events here we'll need a way to
+# differentiate within the dataset. For now we can just assume all subscriptions
+# created within this dataset are just for errors.
+DATASET_CONDITIONS: Mapping[QueryDatasets, str] = {
+    QueryDatasets.EVENTS: "event.type:error",
+    QueryDatasets.TRANSACTIONS: "event.type:transaction",
+}
+ENTITY_TIME_COLUMNS: Mapping[EntityKey, str] = {
+    EntityKey.Events: "timestamp",
+    EntityKey.Sessions: "started",
+    EntityKey.Transactions: "finish_ts",
+    EntityKey.MetricsCounters: "timestamp",
+}
+CRASH_RATE_ALERT_AGGREGATE_RE = (
+    r"^percentage\([ ]*(sessions_crashed|users_crashed)[ ]*\,[ ]*(sessions|users)[ ]*\)"
+)
+
+
+def apply_dataset_query_conditions(
+    dataset: QueryDatasets,
+    query: str,
+    event_types: Optional[List[SnubaQueryEventType]],
+    discover: bool = False,
+) -> str:
+    """
+    Applies query dataset conditions to a query. This essentially turns a query like
+    'release:123 or release:456' into '(event.type:error) AND (release:123 or release:456)'.
+    :param dataset: The `QueryDataset` that the query applies to
+    :param query: A string containing query to apply conditions to
+    :param event_types: A list of EventType(s) to apply to the query
+    :param discover: Whether this is intended for use with the discover dataset or not.
+    When False, we won't modify queries for `QueryDatasets.TRANSACTIONS` at all. This is
+    because the discover dataset requires that we always specify `event.type` so we can
+    differentiate between errors and transactions, but the TRANSACTIONS dataset doesn't
+    need it specified, and `event.type` ends up becoming a tag search.
+    """
+    if not discover and dataset == QueryDatasets.TRANSACTIONS:
+        return query
+
+    if event_types:
+        event_type_conditions = " OR ".join(
+            f"event.type:{event_type.name.lower()}" for event_type in event_types
+        )
+    elif dataset in DATASET_CONDITIONS:
+        event_type_conditions = DATASET_CONDITIONS[dataset]
+    else:
+        return query
+
+    if query:
+        return f"({event_type_conditions}) AND ({query})"
+
+    return event_type_conditions
+
+
+class _EntitySpecificParams(TypedDict, total=False):
+    org_id: int
+    event_types: Optional[List[SnubaQueryEventType.EventType]]
+
+
+@dataclass
+class _EntitySubscription:
+    entity_key: EntityKey
+    dataset: QueryDatasets
+    time_col: str
+
+
+class BaseEntitySubscription(ABC, _EntitySubscription):
+    """
+    An abstraction layer for all different entity subscriptions. It is important to note that
+    this abstraction layer was added because the subscription logic was too coupled to the
+    events and transactions entities, which was fine initially but now as we are adding more
+    entities to support subscriptions (alerts), we need to decouple this logic.
+    """
+
+    def __init__(self, aggregate: str, extra_fields: Optional[_EntitySpecificParams] = None):
+        self.time_col = ENTITY_TIME_COLUMNS[self.entity_key]
+
+    @abstractmethod
+    def build_snuba_filter(
+        self,
+        query: str,
+        environment: Optional[Environment],
+        params: Optional[Mapping[str, Any]] = None,
+    ) -> Filter:
+        raise NotImplementedError
+
+    @abstractmethod
+    def get_entity_extra_params(self) -> Mapping[str, Any]:
+        raise NotImplementedError
+
+
+class BaseEventsAndTransactionEntitySubscription(BaseEntitySubscription, ABC):
+    def __init__(self, aggregate: str, extra_fields: Optional[_EntitySpecificParams] = None):
+        super().__init__(aggregate, extra_fields)
+        self.aggregate = aggregate
+        self.event_types = None
+        if extra_fields:
+            self.event_types = extra_fields.get("event_types")
+
+    def build_snuba_filter(
+        self,
+        query: str,
+        environment: Optional[Environment],
+        params: Optional[Mapping[str, Any]] = None,
+    ) -> Filter:
+        resolve_func = resolve_column(Dataset(self.dataset.value))
+
+        query = apply_dataset_query_conditions(QueryDatasets(self.dataset), query, self.event_types)
+        snuba_filter = get_filter(query, params=params)
+        snuba_filter.update_with(
+            resolve_field_list([self.aggregate], snuba_filter, auto_fields=False)
+        )
+        snuba_filter = resolve_snuba_aliases(snuba_filter, resolve_func)[0]
+        if snuba_filter.group_ids:
+            snuba_filter.conditions.append(
+                ["group_id", "IN", list(map(int, snuba_filter.group_ids))]
+            )
+        if environment:
+            snuba_filter.conditions.append(["environment", "=", environment.name])
+        return snuba_filter
+
+    def get_entity_extra_params(self) -> Mapping[str, Any]:
+        return {}
+
+
+class EventsEntitySubscription(BaseEventsAndTransactionEntitySubscription):
+    dataset = QueryDatasets.EVENTS
+    entity_key = EntityKey.Events
+
+
+class TransactionsEntitySubscription(BaseEventsAndTransactionEntitySubscription):
+    dataset = QueryDatasets.TRANSACTIONS
+    entity_key = EntityKey.Transactions
+
+
+class SessionsEntitySubscription(BaseEntitySubscription):
+    dataset = QueryDatasets.SESSIONS
+    entity_key = EntityKey.Sessions
+
+    def __init__(self, aggregate: str, extra_fields: Optional[_EntitySpecificParams] = None):
+        super().__init__(aggregate, extra_fields)
+        self.aggregate = aggregate
+        if not extra_fields or "org_id" not in extra_fields:
+            raise InvalidQuerySubscription(
+                "org_id is a required param when "
+                "building snuba filter for a metrics subscription"
+            )
+        self.org_id = extra_fields["org_id"]
+
+    def build_snuba_filter(
+        self,
+        query: str,
+        environment: Optional[Environment],
+        params: Optional[Mapping[str, Any]] = None,
+    ) -> Filter:
+        resolve_func = resolve_column(Dataset(self.dataset.value))
+        aggregations = [self.aggregate]
+        # This aggregation is added to return the total number of sessions in crash
+        # rate alerts that is used to identify if we are below a general minimum alert threshold
+        count_col = re.search(r"(sessions|users)", self.aggregate)
+        if not count_col:
+            raise UnsupportedQuerySubscription(
+                "Only crash free percentage queries are supported for subscriptions"
+                "over the sessions dataset"
+            )
+        count_col_matched = count_col.group()
+
+        aggregations += [f"identity({count_col_matched}) AS {CRASH_RATE_ALERT_SESSION_COUNT_ALIAS}"]
+        functions_acl = ["identity"]
+        snuba_filter = get_filter(query, params=params)
+        snuba_filter.update_with(
+            resolve_field_list(
+                aggregations, snuba_filter, auto_fields=False, functions_acl=functions_acl
+            )
+        )
+        snuba_filter = resolve_snuba_aliases(snuba_filter, resolve_func)[0]
+        if environment:
+            snuba_filter.conditions.append(["environment", "=", environment.name])
+        return snuba_filter
+
+    def get_entity_extra_params(self) -> Mapping[str, Any]:
+        return {"organization": self.org_id}
+
+
+class MetricsCountersEntitySubscription(BaseEntitySubscription):
+    dataset = QueryDatasets.METRICS
+    entity_key = EntityKey.MetricsCounters
+
+    def __init__(self, aggregate: str, extra_fields: Optional[_EntitySpecificParams] = None):
+        super().__init__(aggregate, extra_fields)
+        self.aggregate = aggregate
+        if not extra_fields or "org_id" not in extra_fields:
+            raise InvalidQuerySubscription(
+                "org_id is a required param when "
+                "building snuba filter for a metrics subscription"
+            )
+        self.org_id = extra_fields["org_id"]
+        self.session_status = tag_key(self.org_id, "session.status")
+
+    def get_query_groupby(self) -> List[str]:
+        return [self.session_status]
+
+    def build_snuba_filter(
+        self,
+        query: str,
+        environment: Optional[Environment],
+        params: Optional[Mapping[str, Any]] = None,
+    ) -> Filter:
+        snuba_filter = get_filter(query, params=params)
+        conditions = copy(snuba_filter.conditions)
+        session_status_tag_values = get_tag_values_list(self.org_id, ["crashed", "init"])
+        snuba_filter.update_with(
+            {
+                "aggregations": [["sum(value)", None, "value"]],
+                "conditions": [
+                    ["metric_id", "=", metric_id(self.org_id, SessionMetricKey.SESSION)],
+                    [self.session_status, "IN", session_status_tag_values],
+                ],
+                "groupby": self.get_query_groupby(),
+            }
+        )
+        if environment:
+            snuba_filter.conditions.append(
+                [tag_key(self.org_id, "environment"), "=", tag_value(self.org_id, environment.name)]
+            )
+        if query and len(conditions) > 0:
+            release_conditions = [
+                condition for condition in conditions if condition[0] == "release"
+            ]
+
+            for release_condition in release_conditions:
+                snuba_filter.conditions.append(
+                    [
+                        tag_key(self.org_id, release_condition[0]),
+                        release_condition[1],
+                        tag_value(self.org_id, release_condition[2]),
+                    ]
+                )
+
+        return snuba_filter
+
+    def get_entity_extra_params(self) -> Mapping[str, Any]:
+        return {"organization": self.org_id, "groupby": self.get_query_groupby()}
+
+
+def map_aggregate_to_entity_subscription(
+    dataset: QueryDatasets, aggregate: str, extra_fields: Optional[_EntitySpecificParams] = None
+) -> BaseEntitySubscription:
+    """
+    Function that routes to the correct instance of `EntitySubscription` based on the dataset,
+    additionally does validation on aggregate for datasets like the sessions dataset and the
+    metrics datasets then returns the instance of `EntitySubscription`
+    """
+    entity_subscription: BaseEntitySubscription
+    if dataset == QueryDatasets.SESSIONS:
+        match = re.match(CRASH_RATE_ALERT_AGGREGATE_RE, aggregate)
+        if not match:
+            raise UnsupportedQuerySubscription(
+                "Only crash free percentage queries are supported for subscriptions"
+                "over the sessions dataset"
+            )
+        entity_subscription = SessionsEntitySubscription(aggregate, extra_fields)
+    elif dataset == QueryDatasets.TRANSACTIONS:
+        entity_subscription = TransactionsEntitySubscription(aggregate, extra_fields)
+    elif dataset == QueryDatasets.METRICS:
+        match = re.match(CRASH_RATE_ALERT_AGGREGATE_RE, aggregate)
+        if not match:
+            raise UnsupportedQuerySubscription(
+                "Only crash free percentage queries are supported for subscriptions"
+                "over the metrics dataset"
+            )
+
+        count_col_matched = match.group(2)
+        if count_col_matched == "sessions":
+            entity_subscription = MetricsCountersEntitySubscription(aggregate, extra_fields)
+        else:
+            raise UnsupportedQuerySubscription(
+                "Crash Free Users subscriptions are not supported yet"
+            )
+    else:
+        entity_subscription = EventsEntitySubscription(aggregate, extra_fields)
+    return entity_subscription

+ 27 - 97
src/sentry/snuba/tasks.py

@@ -1,72 +1,27 @@
 import logging
 import logging
-import re
 from datetime import timedelta
 from datetime import timedelta
 
 
 import sentry_sdk
 import sentry_sdk
 from django.utils import timezone
 from django.utils import timezone
 from snuba_sdk.legacy import json_to_snql
 from snuba_sdk.legacy import json_to_snql
 
 
-from sentry.constants import CRASH_RATE_ALERT_SESSION_COUNT_ALIAS
-from sentry.search.events.fields import resolve_field_list
-from sentry.search.events.filter import get_filter
+from sentry.eventstore import Filter
+from sentry.models import Any, Environment, Mapping, Optional
+from sentry.snuba.entity_subscription import (
+    BaseEntitySubscription,
+    map_aggregate_to_entity_subscription,
+)
 from sentry.snuba.models import QueryDatasets, QuerySubscription
 from sentry.snuba.models import QueryDatasets, QuerySubscription
 from sentry.tasks.base import instrumented_task
 from sentry.tasks.base import instrumented_task
 from sentry.utils import json, metrics
 from sentry.utils import json, metrics
-from sentry.utils.snuba import (
-    Dataset,
-    SnubaError,
-    _snuba_pool,
-    resolve_column,
-    resolve_snuba_aliases,
-)
+from sentry.utils.snuba import SnubaError, _snuba_pool
 
 
 logger = logging.getLogger(__name__)
 logger = logging.getLogger(__name__)
 
 
 
 
-# TODO: If we want to support security events here we'll need a way to
-# differentiate within the dataset. For now we can just assume all subscriptions
-# created within this dataset are just for errors.
-DATASET_CONDITIONS = {
-    QueryDatasets.EVENTS: "event.type:error",
-    QueryDatasets.TRANSACTIONS: "event.type:transaction",
-}
 SUBSCRIPTION_STATUS_MAX_AGE = timedelta(minutes=10)
 SUBSCRIPTION_STATUS_MAX_AGE = timedelta(minutes=10)
 
 
 
 
-def apply_dataset_query_conditions(dataset, query, event_types, discover=False):
-    """
-    Applies query dataset conditions to a query. This essentially turns a query like
-    'release:123 or release:456' into '(event.type:error) AND (release:123 or release:456)'.
-    :param dataset: The `QueryDataset` that the query applies to
-    :param query: A string containing query to apply conditions to
-    :param event_types: A list of EventType(s) to apply to the query
-    :param discover: Whether this is intended for use with the discover dataset or not.
-    When False, we won't modify queries for `QueryDatasets.TRANSACTIONS` at all. This is
-    because the discover dataset requires that we always specify `event.type` so we can
-    differentiate between errors and transactions, but the TRANSACTIONS dataset doesn't
-    need it specified, and `event.type` ends up becoming a tag search.
-    """
-    if not discover and dataset == QueryDatasets.TRANSACTIONS:
-        return query
-
-    if dataset == QueryDatasets.SESSIONS:
-        return query
-
-    if event_types:
-        event_type_conditions = " OR ".join(
-            f"event.type:{event_type.name.lower()}" for event_type in event_types
-        )
-    elif dataset in DATASET_CONDITIONS:
-        event_type_conditions = DATASET_CONDITIONS[dataset]
-    else:
-        return query
-
-    if query:
-        return f"({event_type_conditions}) AND ({query})"
-
-    return event_type_conditions
-
-
 @instrumented_task(
 @instrumented_task(
     name="sentry.snuba.tasks.create_subscription_in_snuba",
     name="sentry.snuba.tasks.create_subscription_in_snuba",
     queue="subscriptions",
     queue="subscriptions",
@@ -173,48 +128,29 @@ def delete_subscription_from_snuba(query_subscription_id, **kwargs):
         subscription.update(subscription_id=None)
         subscription.update(subscription_id=None)
 
 
 
 
-def build_snuba_filter(dataset, query, aggregate, environment, event_types, params=None):
-    resolve_func = {
-        QueryDatasets.EVENTS: resolve_column(Dataset.Events),
-        QueryDatasets.SESSIONS: resolve_column(Dataset.Sessions),
-        QueryDatasets.TRANSACTIONS: resolve_column(Dataset.Transactions),
-    }[dataset]
-
-    functions_acl = None
-
-    aggregations = [aggregate]
-    if dataset == QueryDatasets.SESSIONS:
-        # This aggregation is added to return the total number of sessions in crash
-        # rate alerts that is used to identify if we are below a general minimum alert threshold
-        count_col = re.search(r"(sessions|users)", aggregate)
-        count_col_matched = count_col.group()
-
-        aggregations += [f"identity({count_col_matched}) AS {CRASH_RATE_ALERT_SESSION_COUNT_ALIAS}"]
-        functions_acl = ["identity"]
-
-    query = apply_dataset_query_conditions(dataset, query, event_types)
-    snuba_filter = get_filter(query, params=params)
-    snuba_filter.update_with(
-        resolve_field_list(
-            aggregations, snuba_filter, auto_fields=False, functions_acl=functions_acl
-        )
-    )
-    snuba_filter = resolve_snuba_aliases(snuba_filter, resolve_func)[0]
-    if snuba_filter.group_ids:
-        snuba_filter.conditions.append(["group_id", "IN", list(map(int, snuba_filter.group_ids))])
-    if environment:
-        snuba_filter.conditions.append(["environment", "=", environment.name])
-    return snuba_filter
+def build_snuba_filter(
+    entity_subscription: BaseEntitySubscription,
+    query: str,
+    environment: Optional[Environment],
+    params: Optional[Mapping[str, Any]] = None,
+) -> Filter:
+    return entity_subscription.build_snuba_filter(query, environment, params)
 
 
 
 
-def _create_in_snuba(subscription):
+def _create_in_snuba(subscription: QuerySubscription) -> str:
     snuba_query = subscription.snuba_query
     snuba_query = subscription.snuba_query
+    entity_subscription = map_aggregate_to_entity_subscription(
+        dataset=QueryDatasets(snuba_query.dataset),
+        aggregate=snuba_query.aggregate,
+        extra_fields={
+            "org_id": subscription.project.organization_id,
+            "event_types": snuba_query.event_types,
+        },
+    )
     snuba_filter = build_snuba_filter(
     snuba_filter = build_snuba_filter(
-        QueryDatasets(snuba_query.dataset),
+        entity_subscription,
         snuba_query.query,
         snuba_query.query,
-        snuba_query.aggregate,
         snuba_query.environment,
         snuba_query.environment,
-        snuba_query.event_types,
     )
     )
 
 
     body = {
     body = {
@@ -225,18 +161,12 @@ def _create_in_snuba(subscription):
         "aggregations": snuba_filter.aggregations,
         "aggregations": snuba_filter.aggregations,
         "time_window": snuba_query.time_window,
         "time_window": snuba_query.time_window,
         "resolution": snuba_query.resolution,
         "resolution": snuba_query.resolution,
+        **entity_subscription.get_entity_extra_params(),
     }
     }
 
 
-    if Dataset(snuba_query.dataset) == Dataset.Sessions:
-        body.update(
-            {
-                "organization": subscription.project.organization_id,
-            }
-        )
-
     try:
     try:
         metrics.incr("snuba.snql.subscription.create", tags={"dataset": snuba_query.dataset})
         metrics.incr("snuba.snql.subscription.create", tags={"dataset": snuba_query.dataset})
-        snql_query = json_to_snql(body, snuba_query.dataset)
+        snql_query = json_to_snql(body, entity_subscription.entity_key.value)
         snql_query.validate()
         snql_query.validate()
         body["query"] = str(snql_query)
         body["query"] = str(snql_query)
         body["type"] = "delegate"  # mark this as a combined subscription
         body["type"] = "delegate"  # mark this as a combined subscription
@@ -249,7 +179,7 @@ def _create_in_snuba(subscription):
 
 
     response = _snuba_pool.urlopen(
     response = _snuba_pool.urlopen(
         "POST",
         "POST",
-        f"/{snuba_query.dataset}/subscriptions",
+        f"/{snuba_query.dataset}/{entity_subscription.entity_key.value}/subscriptions",
         body=json.dumps(body),
         body=json.dumps(body),
     )
     )
     if response.status != 202:
     if response.status != 202:

+ 41 - 0
tests/sentry/incidents/endpoints/test_project_alert_rule_index.py

@@ -8,6 +8,9 @@ from freezegun import freeze_time
 from sentry.api.serializers import serialize
 from sentry.api.serializers import serialize
 from sentry.incidents.models import AlertRule, AlertRuleTrigger, AlertRuleTriggerAction
 from sentry.incidents.models import AlertRule, AlertRuleTrigger, AlertRuleTriggerAction
 from sentry.models import Integration
 from sentry.models import Integration
+from sentry.sentry_metrics import indexer
+from sentry.sentry_metrics.sessions import SessionMetricKey
+from sentry.snuba.dataset import Dataset
 from sentry.snuba.models import QueryDatasets
 from sentry.snuba.models import QueryDatasets
 from sentry.testutils import APITestCase
 from sentry.testutils import APITestCase
 from sentry.testutils.helpers.datetime import before_now
 from sentry.testutils.helpers.datetime import before_now
@@ -636,6 +639,18 @@ class AlertRuleCreateEndpointTestCrashRateAlert(APITestCase):
             "30min, 1h, 2h, 4h, 12h and 24h"
             "30min, 1h, 2h, 4h, 12h and 24h"
         )
         )
 
 
+    def test_simple_crash_rate_alerts_for_non_supported_aggregate(self):
+        self.valid_alert_rule.update({"aggregate": "count(sessions)"})
+        with self.feature(["organizations:incidents", "organizations:performance-view"]):
+            resp = self.get_valid_response(
+                self.organization.slug, self.project.slug, status_code=400, **self.valid_alert_rule
+            )
+        assert (
+            resp.data["nonFieldErrors"][0]
+            == f"Only crash free percentage queries are supported for subscriptions"
+            f"over the {self.valid_alert_rule['dataset']} dataset"
+        )
+
     @patch(
     @patch(
         "sentry.integrations.slack.utils.channel.get_channel_id_with_timeout",
         "sentry.integrations.slack.utils.channel.get_channel_id_with_timeout",
         return_value=("#", None, True),
         return_value=("#", None, True),
@@ -680,3 +695,29 @@ class AlertRuleCreateEndpointTestCrashRateAlert(APITestCase):
             "user_id": self.user.id,
             "user_id": self.user.id,
         }
         }
         mock_find_channel_id_for_alert_rule.assert_called_once_with(kwargs=kwargs)
         mock_find_channel_id_for_alert_rule.assert_called_once_with(kwargs=kwargs)
+
+
+@freeze_time()
+class MetricsCrashRateAlertCreationTest(AlertRuleCreateEndpointTestCrashRateAlert):
+    endpoint = "sentry-api-0-project-alert-rules"
+    method = "post"
+
+    def setUp(self):
+        super().setUp()
+        self.valid_alert_rule["dataset"] = Dataset.Metrics.value
+        for tag in [SessionMetricKey.SESSION.value, "session.status", "init", "crashed"]:
+            indexer.record(tag)
+
+    def test_simple_crash_rate_alerts_for_users(self):
+        self.valid_alert_rule.update(
+            {
+                "aggregate": "percentage(users_crashed, users) AS _crash_rate_alert_aggregate",
+            }
+        )
+        with self.feature(["organizations:incidents", "organizations:performance-view"]):
+            resp = self.get_valid_response(
+                self.organization.slug, self.project.slug, status_code=400, **self.valid_alert_rule
+            )
+        assert (
+            resp.data["nonFieldErrors"][0] == "Crash Free Users subscriptions are not supported yet"
+        )

Some files were not shown because too many files changed in this diff