Browse Source

refs(metric_alerts): Consolidate `QueryDatasets` and `Dataset` (#36894) (#36935)

Putting this up again since it broke getsentry. Just leaving `QueryDatasets` in place temporarily 
so that getsentry won't break.

This refactor pr removes `QueryDatasets` and just uses `Dataset` everywhere. `QueryDatasets` existed
before `Dataset`, but `Dataset` is now more widely used and is more up to date. The values here are
the same, `Dataset` just supports a few more datasets.

We already make sure that only datasets that are valid for alerts can be passed to the alert rules
api, so this won't allow people to attempt to create alerts on datasets that don't support them.
Dan Fuller 2 years ago
parent
commit
7ba826ce52

+ 4 - 6
src/sentry/incidents/charts.py

@@ -15,8 +15,9 @@ from sentry.charts.types import ChartSize, ChartType
 from sentry.incidents.logic import translate_aggregate_field
 from sentry.incidents.models import AlertRule, Incident, User
 from sentry.models import ApiKey, Organization
+from sentry.snuba.dataset import Dataset
 from sentry.snuba.entity_subscription import apply_dataset_query_conditions
-from sentry.snuba.models import QueryDatasets, SnubaQuery
+from sentry.snuba.models import SnubaQuery
 
 CRASH_FREE_SESSIONS = "percentage(sessions_crashed, sessions) AS _crash_rate_alert_aggregate"
 CRASH_FREE_USERS = "percentage(users_crashed, users) AS _crash_rate_alert_aggregate"
@@ -156,7 +157,7 @@ def build_metric_alert_chart(
 ) -> Optional[str]:
     """Builds the dataset required for metric alert chart the same way the frontend would"""
     snuba_query: SnubaQuery = alert_rule.snuba_query
-    dataset = QueryDatasets(snuba_query.dataset)
+    dataset = Dataset(snuba_query.dataset)
     query_type = SnubaQuery.Type(snuba_query.type)
     is_crash_free_alert = query_type == SnubaQuery.Type.CRASH_RATE
     style = (
@@ -218,10 +219,7 @@ def build_metric_alert_chart(
             user,
         )
     else:
-        if (
-            query_type == SnubaQuery.Type.PERFORMANCE
-            and dataset == QueryDatasets.PERFORMANCE_METRICS
-        ):
+        if query_type == SnubaQuery.Type.PERFORMANCE and dataset == Dataset.PerformanceMetrics:
             query_params["dataset"] = "metrics"
         else:
             query_params["dataset"] = "discover"

+ 14 - 13
src/sentry/incidents/logic.py

@@ -37,13 +37,14 @@ from sentry.models import Integration, PagerDutyService, Project, SentryApp
 from sentry.search.events.builder import QueryBuilder
 from sentry.search.events.fields import resolve_field
 from sentry.shared_integrations.exceptions import DuplicateDisplayNameError
+from sentry.snuba.dataset import Dataset
 from sentry.snuba.entity_subscription import (
     ENTITY_TIME_COLUMNS,
     EntitySubscription,
     get_entity_key_from_query_builder,
     get_entity_subscription_from_snuba_query,
 )
-from sentry.snuba.models import QueryDatasets, SnubaQuery
+from sentry.snuba.models import SnubaQuery
 from sentry.snuba.subscriptions import (
     bulk_create_snuba_subscriptions,
     bulk_delete_snuba_subscriptions,
@@ -419,14 +420,14 @@ DEFAULT_ALERT_RULE_RESOLUTION = 1
 DEFAULT_CMP_ALERT_RULE_RESOLUTION = 2
 
 
-# Temporary mapping of `QueryDatasets` to `AlertRule.Type`. In the future, `Performance` will be
+# Temporary mapping of `Dataset` to `AlertRule.Type`. In the future, `Performance` will be
 # able to be run on `METRICS` as well.
 query_datasets_to_type = {
-    QueryDatasets.EVENTS: SnubaQuery.Type.ERROR,
-    QueryDatasets.TRANSACTIONS: SnubaQuery.Type.PERFORMANCE,
-    QueryDatasets.PERFORMANCE_METRICS: SnubaQuery.Type.PERFORMANCE,
-    QueryDatasets.SESSIONS: SnubaQuery.Type.CRASH_RATE,
-    QueryDatasets.METRICS: SnubaQuery.Type.CRASH_RATE,
+    Dataset.Events: SnubaQuery.Type.ERROR,
+    Dataset.Transactions: SnubaQuery.Type.PERFORMANCE,
+    Dataset.PerformanceMetrics: SnubaQuery.Type.PERFORMANCE,
+    Dataset.Sessions: SnubaQuery.Type.CRASH_RATE,
+    Dataset.Metrics: SnubaQuery.Type.CRASH_RATE,
 }
 
 
@@ -445,7 +446,7 @@ def create_alert_rule(
     include_all_projects=False,
     excluded_projects=None,
     query_type: SnubaQuery.Type = SnubaQuery.Type.ERROR,
-    dataset=QueryDatasets.EVENTS,
+    dataset=Dataset.Events,
     user=None,
     event_types=None,
     comparison_delta: Optional[int] = None,
@@ -486,10 +487,10 @@ def create_alert_rule(
         # Since comparison alerts make twice as many queries, run the queries less frequently.
         resolution = DEFAULT_CMP_ALERT_RULE_RESOLUTION
         comparison_delta = int(timedelta(minutes=comparison_delta).total_seconds())
-    if dataset == QueryDatasets.SESSIONS and features.has(
+    if dataset == Dataset.Sessions and features.has(
         "organizations:alert-crash-free-metrics", organization, actor=user
     ):
-        dataset = QueryDatasets.METRICS
+        dataset = Dataset.Metrics
     with transaction.atomic():
         snuba_query = create_snuba_query(
             query_type,
@@ -652,10 +653,10 @@ def update_alert_rule(
     if include_all_projects is not None:
         updated_fields["include_all_projects"] = include_all_projects
     if dataset is not None:
-        if dataset == QueryDatasets.SESSIONS and features.has(
+        if dataset == Dataset.Sessions and features.has(
             "organizations:alert-crash-free-metrics", alert_rule.organization, actor=user
         ):
-            dataset = QueryDatasets.METRICS
+            dataset = Dataset.Metrics
 
         if dataset.value != alert_rule.snuba_query.dataset:
             updated_query_fields["dataset"] = dataset
@@ -689,7 +690,7 @@ def update_alert_rule(
         if updated_query_fields or environment != alert_rule.snuba_query.environment:
             snuba_query = alert_rule.snuba_query
             updated_query_fields.setdefault("query_type", SnubaQuery.Type(snuba_query.type))
-            updated_query_fields.setdefault("dataset", QueryDatasets(snuba_query.dataset))
+            updated_query_fields.setdefault("dataset", Dataset(snuba_query.dataset))
             updated_query_fields.setdefault("query", snuba_query.query)
             updated_query_fields.setdefault("aggregate", snuba_query.aggregate)
             updated_query_fields.setdefault(

+ 5 - 4
src/sentry/incidents/serializers/__init__.py

@@ -1,5 +1,6 @@
 from sentry.incidents.models import AlertRuleTriggerAction
-from sentry.snuba.models import QueryDatasets, SnubaQuery, SnubaQueryEventType
+from sentry.snuba.dataset import Dataset
+from sentry.snuba.models import SnubaQuery, SnubaQueryEventType
 
 __all__ = (
     "AlertRuleSerializer",
@@ -29,9 +30,9 @@ QUERY_TYPE_VALID_EVENT_TYPES = {
     SnubaQuery.Type.PERFORMANCE: {SnubaQueryEventType.EventType.TRANSACTION},
 }
 QUERY_TYPE_VALID_DATASETS = {
-    SnubaQuery.Type.ERROR: {QueryDatasets.EVENTS},
-    SnubaQuery.Type.PERFORMANCE: {QueryDatasets.TRANSACTIONS, QueryDatasets.PERFORMANCE_METRICS},
-    SnubaQuery.Type.CRASH_RATE: {QueryDatasets.METRICS, QueryDatasets.SESSIONS},
+    SnubaQuery.Type.ERROR: {Dataset.Events},
+    SnubaQuery.Type.PERFORMANCE: {Dataset.Transactions, Dataset.PerformanceMetrics},
+    SnubaQuery.Type.CRASH_RATE: {Dataset.Metrics, Dataset.Sessions},
 }
 
 # TODO(davidenwang): eventually we should pass some form of these to the event_search parser to raise an error

+ 5 - 5
src/sentry/incidents/serializers/alert_rule.py

@@ -32,7 +32,7 @@ from sentry.snuba.entity_subscription import (
     get_entity_key_from_query_builder,
     get_entity_subscription,
 )
-from sentry.snuba.models import QueryDatasets, QuerySubscription, SnubaQuery, SnubaQueryEventType
+from sentry.snuba.models import QuerySubscription, SnubaQuery, SnubaQueryEventType
 from sentry.snuba.tasks import build_query_builder
 
 from . import (
@@ -151,10 +151,10 @@ class AlertRuleSerializer(CamelSnakeModelSerializer):
 
     def validate_dataset(self, dataset):
         try:
-            return QueryDatasets(dataset)
+            return Dataset(dataset)
         except ValueError:
             raise serializers.ValidationError(
-                "Invalid dataset, valid values are %s" % [item.value for item in QueryDatasets]
+                "Invalid dataset, valid values are %s" % [item.value for item in Dataset]
             )
 
     def validate_event_types(self, event_types):
@@ -229,7 +229,7 @@ class AlertRuleSerializer(CamelSnakeModelSerializer):
         return data
 
     def _validate_query(self, data):
-        dataset = data.setdefault("dataset", QueryDatasets.EVENTS)
+        dataset = data.setdefault("dataset", Dataset.Events)
         query_type = data.setdefault("query_type", query_datasets_to_type[dataset])
 
         valid_datasets = QUERY_TYPE_VALID_DATASETS[query_type]
@@ -245,7 +245,7 @@ class AlertRuleSerializer(CamelSnakeModelSerializer):
                 self.context["organization"],
                 actor=self.context.get("user", None),
             )
-            and dataset == QueryDatasets.PERFORMANCE_METRICS
+            and dataset == Dataset.PerformanceMetrics
             and query_type == SnubaQuery.Type.PERFORMANCE
         ):
             raise serializers.ValidationError(

+ 5 - 5
src/sentry/incidents/subscription_processor.py

@@ -30,13 +30,13 @@ from sentry.incidents.models import (
 )
 from sentry.incidents.tasks import handle_trigger_action
 from sentry.models import Project
+from sentry.snuba.dataset import Dataset
 from sentry.snuba.entity_subscription import (
     ENTITY_TIME_COLUMNS,
     BaseCrashRateMetricsEntitySubscription,
     get_entity_key_from_query_builder,
     get_entity_subscription_from_snuba_query,
 )
-from sentry.snuba.models import QueryDatasets
 from sentry.snuba.tasks import build_query_builder
 from sentry.utils import metrics, redis
 from sentry.utils.dates import to_datetime, to_timestamp
@@ -365,9 +365,9 @@ class SubscriptionProcessor:
         return aggregation_value
 
     def get_aggregation_value(self, subscription_update):
-        if self.subscription.snuba_query.dataset == QueryDatasets.SESSIONS.value:
+        if self.subscription.snuba_query.dataset == Dataset.Sessions.value:
             aggregation_value = self.get_crash_rate_alert_aggregation_value(subscription_update)
-        elif self.subscription.snuba_query.dataset == QueryDatasets.METRICS.value:
+        elif self.subscription.snuba_query.dataset == Dataset.Metrics.value:
             aggregation_value = self.get_crash_rate_alert_metrics_aggregation_value(
                 subscription_update
             )
@@ -424,7 +424,7 @@ class SubscriptionProcessor:
 
         if (
             len(subscription_update["values"]["data"]) > 1
-            and self.subscription.snuba_query.dataset != QueryDatasets.METRICS.value
+            and self.subscription.snuba_query.dataset != Dataset.Metrics.value
         ):
             logger.warning(
                 "Subscription returned more than 1 row of data",
@@ -437,7 +437,7 @@ class SubscriptionProcessor:
             )
 
         aggregation_value = self.get_aggregation_value(subscription_update)
-        if self.subscription.snuba_query.dataset == QueryDatasets.SESSIONS.value:
+        if self.subscription.snuba_query.dataset == Dataset.Sessions.value:
             try:
                 # Temporarily logging results from session updates for comparison with data from metric
                 # updates

+ 2 - 2
src/sentry/incidents/tasks.py

@@ -16,7 +16,7 @@ from sentry.incidents.models import (
     IncidentStatusMethod,
 )
 from sentry.models import Project
-from sentry.snuba.models import QueryDatasets
+from sentry.snuba.dataset import Dataset
 from sentry.snuba.query_subscription_consumer import register_subscriber
 from sentry.tasks.base import instrumented_task
 from sentry.utils import metrics
@@ -113,7 +113,7 @@ def handle_subscription_metrics_logger(subscription_update, subscription):
     from sentry.incidents.subscription_processor import SubscriptionProcessor
 
     try:
-        if subscription.snuba_query.dataset == QueryDatasets.METRICS.value:
+        if subscription.snuba_query.dataset == Dataset.Metrics.value:
             processor = SubscriptionProcessor(subscription)
             # XXX: Temporary hack so that we can extract these values without raising an exception
             processor.reset_trigger_counts = lambda *arg, **kwargs: None

+ 4 - 3
src/sentry/migrations/0233_recreate_subscriptions_in_snuba.py

@@ -4,7 +4,8 @@ import logging
 
 from django.db import migrations
 
-from sentry.snuba.models import QueryDatasets, SnubaQueryEventType
+from sentry.snuba.dataset import Dataset
+from sentry.snuba.models import SnubaQueryEventType
 from sentry.snuba.tasks import _create_in_snuba, _delete_from_snuba
 from sentry.utils.query import RangeQuerySetWrapperWithProgressBar
 
@@ -33,14 +34,14 @@ def migrate_subscriptions(apps, schema_editor):
 
             try:
                 _delete_from_snuba(
-                    QueryDatasets(subscription.snuba_query.dataset),
+                    Dataset(subscription.snuba_query.dataset),
                     subscription.subscription_id,
                 )
             except Exception as e:
                 try:
                     # Delete the subscription we just created to avoid orphans
                     _delete_from_snuba(
-                        QueryDatasets(subscription.snuba_query.dataset),
+                        Dataset(subscription.snuba_query.dataset),
                         subscription_id,
                     )
                 except Exception as oe:

+ 4 - 3
src/sentry/migrations/0237_recreate_subscriptions_in_snuba.py

@@ -4,7 +4,8 @@ import logging
 
 from django.db import migrations
 
-from sentry.snuba.models import QueryDatasets, SnubaQueryEventType
+from sentry.snuba.dataset import Dataset
+from sentry.snuba.models import SnubaQueryEventType
 from sentry.snuba.tasks import _create_in_snuba, _delete_from_snuba
 from sentry.utils.query import RangeQuerySetWrapperWithProgressBar
 
@@ -33,14 +34,14 @@ def migrate_subscriptions(apps, schema_editor):
 
             try:
                 _delete_from_snuba(
-                    QueryDatasets(subscription.snuba_query.dataset),
+                    Dataset(subscription.snuba_query.dataset),
                     subscription.subscription_id,
                 )
             except Exception as e:
                 try:
                     # Delete the subscription we just created to avoid orphans
                     _delete_from_snuba(
-                        QueryDatasets(subscription.snuba_query.dataset),
+                        Dataset(subscription.snuba_query.dataset),
                         subscription_id,
                     )
                 except Exception as oe:

+ 11 - 12
src/sentry/migrations/0292_migrate_sessions_subs_user_counts.py

@@ -7,8 +7,7 @@ import re
 from django.db import migrations
 
 from sentry.new_migrations.migrations import CheckedMigration
-from sentry.snuba.dataset import EntityKey
-from sentry.snuba.models import QueryDatasets
+from sentry.snuba.dataset import Dataset, EntityKey
 from sentry.snuba.tasks import _create_in_snuba, _delete_from_snuba
 from sentry.utils.query import RangeQuerySetWrapperWithProgressBar
 
@@ -22,19 +21,19 @@ def create_subscription_in_snuba(subscription):
     subscription.save()
 
 
-def map_aggregate_to_entity_key(dataset: QueryDatasets, aggregate: str) -> EntityKey:
-    if dataset == QueryDatasets.EVENTS:
+def map_aggregate_to_entity_key(dataset: Dataset, aggregate: str) -> EntityKey:
+    if dataset == Dataset.Events:
         entity_key = EntityKey.Events
-    elif dataset == QueryDatasets.TRANSACTIONS:
+    elif dataset == Dataset.Transactions:
         entity_key = EntityKey.Transactions
-    elif dataset in [QueryDatasets.METRICS, QueryDatasets.SESSIONS]:
+    elif dataset in [Dataset.Metrics, Dataset.Sessions]:
         match = re.match(CRASH_RATE_ALERT_AGGREGATE_RE, aggregate)
         if not match:
             raise Exception(
                 f"Only crash free percentage queries are supported for subscriptions"
                 f"over the {dataset.value} dataset"
             )
-        if dataset == QueryDatasets.METRICS:
+        if dataset == Dataset.Metrics:
             count_col_matched = match.group(2)
             if count_col_matched == "sessions":
                 entity_key = EntityKey.MetricsCounters
@@ -47,7 +46,7 @@ def map_aggregate_to_entity_key(dataset: QueryDatasets, aggregate: str) -> Entit
     return entity_key
 
 
-def delete_subscription_from_snuba(subscription, query_dataset: QueryDatasets):
+def delete_subscription_from_snuba(subscription, query_dataset: Dataset):
     entity_key: EntityKey = map_aggregate_to_entity_key(
         query_dataset, subscription.snuba_query.aggregate
     )
@@ -66,9 +65,9 @@ def event_types(self):
 def update_metrics_subscriptions(apps, schema_editor):
     QuerySubscription = apps.get_model("sentry", "QuerySubscription")
     for subscription in RangeQuerySetWrapperWithProgressBar(
-        QuerySubscription.objects.filter(
-            snuba_query__dataset=QueryDatasets.METRICS.value
-        ).select_related("snuba_query")
+        QuerySubscription.objects.filter(snuba_query__dataset=Dataset.Metrics.value).select_related(
+            "snuba_query"
+        )
     ):
         old_subscription_id = subscription.subscription_id
         if old_subscription_id is not None:
@@ -76,7 +75,7 @@ def update_metrics_subscriptions(apps, schema_editor):
                 # The migration apps don't build this property, so patch it here:
                 subscription.snuba_query.event_types = event_types
                 create_subscription_in_snuba(subscription)
-                delete_subscription_from_snuba(subscription, QueryDatasets.METRICS)
+                delete_subscription_from_snuba(subscription, Dataset.Metrics)
             except Exception:
                 logging.exception(
                     "Failed to recreate metrics subscription in snuba",

+ 11 - 12
src/sentry/migrations/0293_restore_metrics_based_alerts.py

@@ -7,8 +7,7 @@ import re
 from django.db import migrations
 
 from sentry.new_migrations.migrations import CheckedMigration
-from sentry.snuba.dataset import EntityKey
-from sentry.snuba.models import QueryDatasets
+from sentry.snuba.dataset import Dataset, EntityKey
 from sentry.snuba.tasks import _create_in_snuba, _delete_from_snuba
 from sentry.utils.query import RangeQuerySetWrapperWithProgressBar
 
@@ -27,19 +26,19 @@ def event_types(self):
     return [type.event_type for type in self.snubaqueryeventtype_set.all()]
 
 
-def map_aggregate_to_entity_key(dataset: QueryDatasets, aggregate: str) -> EntityKey:
-    if dataset == QueryDatasets.EVENTS:
+def map_aggregate_to_entity_key(dataset: Dataset, aggregate: str) -> EntityKey:
+    if dataset == Dataset.Events:
         entity_key = EntityKey.Events
-    elif dataset == QueryDatasets.TRANSACTIONS:
+    elif dataset == Dataset.Transactions:
         entity_key = EntityKey.Transactions
-    elif dataset in [QueryDatasets.METRICS, QueryDatasets.SESSIONS]:
+    elif dataset in [Dataset.Metrics, Dataset.Sessions]:
         match = re.match(CRASH_RATE_ALERT_AGGREGATE_RE, aggregate)
         if not match:
             raise Exception(
                 f"Only crash free percentage queries are supported for subscriptions"
                 f"over the {dataset.value} dataset"
             )
-        if dataset == QueryDatasets.METRICS:
+        if dataset == Dataset.Metrics:
             count_col_matched = match.group(2)
             if count_col_matched == "sessions":
                 entity_key = EntityKey.MetricsCounters
@@ -55,9 +54,9 @@ def map_aggregate_to_entity_key(dataset: QueryDatasets, aggregate: str) -> Entit
 def update_metrics_subscriptions(apps, schema_editor):
     QuerySubscription = apps.get_model("sentry", "QuerySubscription")
     for subscription in RangeQuerySetWrapperWithProgressBar(
-        QuerySubscription.objects.filter(
-            snuba_query__dataset=QueryDatasets.METRICS.value
-        ).select_related("snuba_query")
+        QuerySubscription.objects.filter(snuba_query__dataset=Dataset.Metrics.value).select_related(
+            "snuba_query"
+        )
     ):
         old_subscription_id = subscription.subscription_id
         if old_subscription_id is not None:
@@ -66,11 +65,11 @@ def update_metrics_subscriptions(apps, schema_editor):
                 subscription.snuba_query.event_types = event_types
                 create_subscription_in_snuba(subscription)
                 entity_key: EntityKey = map_aggregate_to_entity_key(
-                    QueryDatasets.METRICS, subscription.snuba_query.aggregate
+                    Dataset.Metrics, subscription.snuba_query.aggregate
                 )
                 # Delete the old subscription ID:
                 _delete_from_snuba(
-                    QueryDatasets.METRICS,
+                    Dataset.Metrics,
                     old_subscription_id,
                     entity_key,
                 )

Some files were not shown because too many files changed in this diff