Browse Source

feat(mep): Add a param that prevents aggregate conditions (#32896)

- This adds a param that disables metric conditions, so we can disable
  them on the performance home page
William Mak 3 years ago
parent
commit
ecf72a9491

+ 2 - 0
src/sentry/api/endpoints/organization_events.py

@@ -56,6 +56,7 @@ class OrganizationEventsV2Endpoint(OrganizationEventsV2EndpointBase):
             "organizations:performance-use-metrics", organization=organization, actor=request.user
         )
         metrics_enhanced = request.GET.get("metricsEnhanced") == "1" and performance_use_metrics
+        allow_metric_aggregates = request.GET.get("preventMetricAggregates") != "1"
         referrer = (
             referrer if referrer in ALLOWED_EVENTS_V2_REFERRERS else "api.organization-events-v2"
         )
@@ -74,6 +75,7 @@ class OrganizationEventsV2Endpoint(OrganizationEventsV2EndpointBase):
                 auto_fields=True,
                 auto_aggregations=True,
                 use_aggregate_conditions=True,
+                allow_metric_aggregates=allow_metric_aggregates,
                 use_snql=features.has(
                     "organizations:discover-use-snql", organization, actor=request.user
                 ),

+ 2 - 0
src/sentry/api/endpoints/organization_events_stats.py

@@ -141,6 +141,7 @@ class OrganizationEventsStatsEndpoint(OrganizationEventsV2EndpointBase):  # type
             )
 
             metrics_enhanced = request.GET.get("metricsEnhanced") == "1" and performance_use_metrics
+            allow_metric_aggregates = request.GET.get("preventMetricAggregates") != "1"
             sentry_sdk.set_tag("performance.use_metrics", metrics_enhanced)
 
         def get_event_stats(
@@ -177,6 +178,7 @@ class OrganizationEventsStatsEndpoint(OrganizationEventsV2EndpointBase):  # type
                 referrer=referrer,
                 zerofill_results=zerofill_results,
                 comparison_delta=comparison_delta,
+                allow_metric_aggregates=allow_metric_aggregates,
                 use_snql=discover_snql,
             )
 

+ 22 - 1
src/sentry/search/events/builder.py

@@ -1447,11 +1447,12 @@ class HistogramQueryBuilder(QueryBuilder):
 
 
 class MetricsQueryBuilder(QueryBuilder):
-    def __init__(self, *args: Any, **kwargs: Any):
+    def __init__(self, *args: Any, allow_metric_aggregates: Optional[bool] = False, **kwargs: Any):
         self.distributions: List[CurriedFunction] = []
         self.sets: List[CurriedFunction] = []
         self.counters: List[CurriedFunction] = []
         self.metric_ids: List[int] = []
+        self.allow_metric_aggregates = allow_metric_aggregates
         super().__init__(
             # Dataset is always Metrics
             Dataset.Metrics,
@@ -1522,6 +1523,24 @@ class MetricsQueryBuilder(QueryBuilder):
                 Condition(Column("metric_id"), Op.IN, sorted(self.metric_ids))
             )
 
+    def resolve_having(
+        self, parsed_terms: ParsedTerms, use_aggregate_conditions: bool
+    ) -> List[WhereType]:
+        if not self.allow_metric_aggregates:
+            # Regardless of use_aggregate_conditions, check if any having_conditions exist
+            having_conditions = super().resolve_having(parsed_terms, True)
+            if len(having_conditions) > 0:
+                raise IncompatibleMetricsQuery(
+                    "Aggregate conditions were disabled, but included in filter"
+                )
+
+            # Don't resolve having conditions again if we don't have to
+            if use_aggregate_conditions:
+                return having_conditions
+            else:
+                return []
+        return super().resolve_having(parsed_terms, use_aggregate_conditions)
+
     def resolve_limit(self, limit: Optional[int]) -> Limit:
         """Impose a max limit, since we may need to create a large condition based on the group by values when the query
         is run"""
@@ -1766,12 +1785,14 @@ class TimeseriesMetricQueryBuilder(MetricsQueryBuilder):
         interval: int,
         query: Optional[str] = None,
         selected_columns: Optional[List[str]] = None,
+        allow_metric_aggregates: Optional[bool] = False,
         functions_acl: Optional[List[str]] = None,
     ):
         super().__init__(
             params=params,
             query=query,
             selected_columns=selected_columns,
+            allow_metric_aggregates=allow_metric_aggregates,
             auto_fields=False,
             functions_acl=functions_acl,
         )

+ 4 - 0
src/sentry/snuba/discover.py

@@ -209,6 +209,7 @@ def query(
     referrer=None,
     auto_fields=False,
     auto_aggregations=False,
+    allow_metric_aggregates=False,
     use_aggregate_conditions=False,
     conditions=None,
     extra_snql_condition=None,
@@ -236,6 +237,7 @@ def query(
     auto_fields (bool) Set to true to have project + eventid fields automatically added.
     auto_aggregations (bool) Whether aggregates should be added automatically if they're used
                     in conditions, and there's at least one aggregate already.
+    allow_metric_aggregates (bool) Ignored here, only used in metric enhanced performance
     use_aggregate_conditions (bool) Set to true if aggregates conditions should be used at all.
     conditions (Sequence[any]) List of conditions that are passed directly to snuba without
                     any additional processing.
@@ -483,6 +485,7 @@ def timeseries_query(
     zerofill_results: bool = True,
     comparison_delta: Optional[timedelta] = None,
     functions_acl: Optional[Sequence[str]] = None,
+    allow_metric_aggregates=False,
     use_snql: Optional[bool] = False,
 ):
     """
@@ -506,6 +509,7 @@ def timeseries_query(
     comparison_delta: A timedelta used to convert this into a comparison query. We make a second
     query time-shifted back by comparison_delta, and compare the results to get the % change for each
     time bucket. Requires that we only pass
+    allow_metric_aggregates (bool) Ignored here, only used in metric enhanced performance
     use_snql (bool) Whether to directly build the query in snql, instead of using the older
                     json construction
     """

+ 4 - 0
src/sentry/snuba/metrics_enhanced_performance.py

@@ -44,6 +44,7 @@ def query(
     auto_fields=False,
     auto_aggregations=False,
     use_aggregate_conditions=False,
+    allow_metric_aggregates=True,
     conditions=None,
     extra_snql_condition=None,
     functions_acl=None,
@@ -64,6 +65,7 @@ def query(
                 auto_fields=False,
                 auto_aggregations=auto_aggregations,
                 use_aggregate_conditions=use_aggregate_conditions,
+                allow_metric_aggregates=allow_metric_aggregates,
                 functions_acl=functions_acl,
                 limit=limit,
                 offset=offset,
@@ -116,6 +118,7 @@ def timeseries_query(
     rollup: int,
     referrer: str,
     zerofill_results: bool = True,
+    allow_metric_aggregates=True,
     comparison_delta: Optional[timedelta] = None,
     functions_acl: Optional[List[str]] = None,
     use_snql: Optional[bool] = False,
@@ -137,6 +140,7 @@ def timeseries_query(
                 query=query,
                 selected_columns=columns,
                 functions_acl=functions_acl,
+                allow_metric_aggregates=allow_metric_aggregates,
             )
             result = metrics_query.run_query(referrer + ".metrics-enhanced")
             result = discover.transform_results(result, metrics_query.function_alias_map, {}, None)

+ 78 - 0
tests/sentry/search/events/test_builder.py

@@ -1264,6 +1264,55 @@ class MetricQueryBuilderTest(MetricBuilderBaseTest):
             "95", "transaction.duration", "test"
         )
 
+    def test_error_if_aggregates_disallowed(self):
+        def run_query(query, use_aggregate_conditions):
+            with self.assertRaises(IncompatibleMetricsQuery):
+                MetricsQueryBuilder(
+                    self.params,
+                    selected_columns=[
+                        "transaction",
+                        "p95()",
+                        "count_unique(user)",
+                    ],
+                    query=query,
+                    allow_metric_aggregates=False,
+                    use_aggregate_conditions=use_aggregate_conditions,
+                )
+
+        queries = [
+            "p95():>5s",
+            "count_unique(user):>0",
+            "transaction:foo_transaction AND (!transaction:bar_transaction OR p95():>5s)",
+        ]
+        for query in queries:
+            for use_aggregate_conditions in [True, False]:
+                run_query(query, use_aggregate_conditions)
+
+    def test_no_error_if_aggregates_disallowed_but_no_aggregates_included(self):
+        MetricsQueryBuilder(
+            self.params,
+            selected_columns=[
+                "transaction",
+                "p95()",
+                "count_unique(user)",
+            ],
+            query="transaction:foo_transaction",
+            allow_metric_aggregates=False,
+            use_aggregate_conditions=True,
+        )
+
+        MetricsQueryBuilder(
+            self.params,
+            selected_columns=[
+                "transaction",
+                "p95()",
+                "count_unique(user)",
+            ],
+            query="transaction:foo_transaction",
+            allow_metric_aggregates=False,
+            use_aggregate_conditions=False,
+        )
+
 
 class TimeseriesMetricQueryBuilderTest(MetricBuilderBaseTest):
     def test_get_query(self):
@@ -1419,6 +1468,7 @@ class TimeseriesMetricQueryBuilderTest(MetricBuilderBaseTest):
             interval=900,
             query="p50(transaction.duration):>100",
             selected_columns=["p50(transaction.duration)", "count_unique(user)"],
+            allow_metric_aggregates=True,
         )
         # Aggregate conditions should be dropped
         assert query.having == []
@@ -1593,3 +1643,31 @@ class TimeseriesMetricQueryBuilderTest(MetricBuilderBaseTest):
                 {"name": "p50_transaction_duration", "type": "Float64"},
             ],
         )
+
+    def test_error_if_aggregates_disallowed(self):
+        def run_query(query):
+            with self.assertRaises(IncompatibleMetricsQuery):
+                TimeseriesMetricQueryBuilder(
+                    self.params,
+                    interval=900,
+                    query=query,
+                    selected_columns=["p50(transaction.duration)"],
+                    allow_metric_aggregates=False,
+                )
+
+        queries = [
+            "p95():>5s",
+            "count_unique(user):>0",
+            "transaction:foo_transaction AND (!transaction:bar_transaction OR p95():>5s)",
+        ]
+        for query in queries:
+            run_query(query)
+
+    def test_no_error_if_aggregates_disallowed_but_no_aggregates_included(self):
+        TimeseriesMetricQueryBuilder(
+            self.params,
+            interval=900,
+            selected_columns=["p50(transaction.duration)"],
+            query="transaction:foo_transaction",
+            allow_metric_aggregates=False,
+        )

+ 16 - 0
tests/snuba/api/endpoints/test_organization_events_stats.py

@@ -1156,6 +1156,22 @@ class OrganizationEventsStatsMetricsEnhancedPerformanceEndpointTest(
             "event.type:transaction OR transaction:foo_transaction"
         ), "boolean with mep filter"
 
+    def test_having_condition_with_preventing_aggregates(self):
+        response = self.do_request(
+            data={
+                "project": self.project.id,
+                "start": iso_format(self.day_ago),
+                "end": iso_format(self.day_ago + timedelta(hours=2)),
+                "interval": "1h",
+                "query": "p95():<5s",
+                "yAxis": ["epm()"],
+                "metricsEnhanced": "1",
+                "preventMetricAggregates": "1",
+            },
+        )
+        assert response.status_code == 200, response.content
+        assert not response.data["isMetricsData"]
+
     def test_explicit_not_mep(self):
         response = self.do_request(
             data={

+ 30 - 0
tests/snuba/api/endpoints/test_organization_events_v2.py

@@ -5111,6 +5111,36 @@ class OrganizationEventsMetricsEnhancedPerformanceEndpointTest(MetricsEnhancedPe
         assert meta["project"] == "string"
         assert meta["p50_transaction_duration"] == "duration"
 
+    def test_having_condition_with_preventing_aggregates(self):
+        self.store_metric(
+            1,
+            tags={"environment": "staging", "transaction": "foo_transaction"},
+            timestamp=self.min_ago,
+        )
+        self.store_metric(
+            100,
+            tags={"environment": "staging", "transaction": "bar_transaction"},
+            timestamp=self.min_ago,
+        )
+
+        response = self.do_request(
+            {
+                "field": ["transaction", "project", "p50(transaction.duration)"],
+                "query": "event.type:transaction p50(transaction.duration):<50",
+                "metricsEnhanced": "1",
+                "preventMetricAggregates": "1",
+                "per_page": 50,
+            }
+        )
+        assert response.status_code == 200, response.content
+        assert len(response.data["data"]) == 0
+        meta = response.data["meta"]
+
+        assert not meta["isMetricsData"]
+        assert meta["transaction"] == "string"
+        assert meta["project"] == "string"
+        assert meta["p50_transaction_duration"] == "duration"
+
     def test_having_condition_not_selected(self):
         self.store_metric(
             1,