Browse Source

Revert "feat(spans-metrics): Extend avg_if and avg_compare to frame metrics (#69680)"

This reverts commit fdb8878e9deff45d3531f5814885e07210b28c34.

Co-authored-by: narsaynorath <22846452+narsaynorath@users.noreply.github.com>
getsentry-bot 10 months ago
parent
commit
a1131eb4d3

+ 1 - 11
src/sentry/search/events/constants.py

@@ -331,10 +331,6 @@ SPAN_METRICS_MAP = {
     "http.decoded_response_content_length": "d:spans/http.decoded_response_content_length@byte",
     "http.decoded_response_content_length": "d:spans/http.decoded_response_content_length@byte",
     "http.response_transfer_size": "d:spans/http.response_transfer_size@byte",
     "http.response_transfer_size": "d:spans/http.response_transfer_size@byte",
     "cache.item_size": "d:spans/cache.item_size@byte",
     "cache.item_size": "d:spans/cache.item_size@byte",
-    "mobile.slow_frames": "g:spans/mobile.slow_frames@none",
-    "mobile.frozen_frames": "g:spans/mobile.frozen_frames@none",
-    "mobile.total_frames": "g:spans/mobile.total_frames@none",
-    "mobile.frames_delay": "g:spans/mobile.frames_delay@second",
 }
 }
 SELF_TIME_LIGHT = "d:spans/exclusive_time_light@millisecond"
 SELF_TIME_LIGHT = "d:spans/exclusive_time_light@millisecond"
 # 50 to match the size of tables in the UI + 1 for pagination reasons
 # 50 to match the size of tables in the UI + 1 for pagination reasons
@@ -356,13 +352,7 @@ METRIC_DURATION_COLUMNS = {
 SPAN_METRIC_DURATION_COLUMNS = {
 SPAN_METRIC_DURATION_COLUMNS = {
     key
     key
     for key, value in SPAN_METRICS_MAP.items()
     for key, value in SPAN_METRICS_MAP.items()
-    if (value.endswith("@millisecond") and value.startswith("d:"))
-    or (value.endswith("@second") and value.startswith("g:"))
-}
-SPAN_METRIC_COUNT_COLUMNS = {
-    key
-    for key, value in SPAN_METRICS_MAP.items()
-    if value.endswith("@none") and value.startswith("g:")
+    if value.endswith("@millisecond") and value.startswith("d:")
 }
 }
 SPAN_METRIC_BYTES_COLUMNS = {
 SPAN_METRIC_BYTES_COLUMNS = {
     key
     key

+ 2 - 4
src/sentry/search/events/datasets/spans_metrics.py

@@ -173,8 +173,7 @@ class SpansMetricsDatasetConfig(DatasetConfig):
                     required_args=[
                     required_args=[
                         fields.MetricArg(
                         fields.MetricArg(
                             "column",
                             "column",
-                            allowed_columns=constants.SPAN_METRIC_DURATION_COLUMNS
-                            | constants.SPAN_METRIC_COUNT_COLUMNS,
+                            allowed_columns=constants.SPAN_METRIC_DURATION_COLUMNS,
                         ),
                         ),
                         fields.MetricArg(
                         fields.MetricArg(
                             "if_col",
                             "if_col",
@@ -496,8 +495,7 @@ class SpansMetricsDatasetConfig(DatasetConfig):
                     required_args=[
                     required_args=[
                         fields.MetricArg(
                         fields.MetricArg(
                             "column",
                             "column",
-                            allowed_columns=constants.SPAN_METRIC_DURATION_COLUMNS
-                            | constants.SPAN_METRIC_COUNT_COLUMNS,
+                            allowed_columns=constants.SPAN_METRIC_DURATION_COLUMNS,
                             allow_custom_measurements=False,
                             allow_custom_measurements=False,
                         ),
                         ),
                         fields.MetricArg(
                         fields.MetricArg(

+ 0 - 47
tests/snuba/api/endpoints/test_organization_events_span_metrics.py

@@ -1479,53 +1479,6 @@ class OrganizationEventsMetricsEnhancedPerformanceEndpointTest(MetricsEnhancedPe
             {"count_op(queue.submit.celery)": 1, "count_op(queue.task.celery)": 1},
             {"count_op(queue.submit.celery)": 1, "count_op(queue.task.celery)": 1},
         ]
         ]
 
 
-    def test_frames_metrics(self):
-        for index, release in enumerate(["foo", "bar"]):
-            self.store_span_metric(
-                1 + 10 * index,
-                internal_metric=constants.SPAN_METRICS_MAP["mobile.slow_frames"],
-                timestamp=self.six_min_ago,
-                tags={"release": release},
-            )
-            self.store_span_metric(
-                2 + 10 * index,
-                internal_metric=constants.SPAN_METRICS_MAP["mobile.frozen_frames"],
-                timestamp=self.six_min_ago,
-                tags={"release": release},
-            )
-            self.store_span_metric(
-                3 + 10 * index,
-                internal_metric=constants.SPAN_METRICS_MAP["mobile.frames_delay"],
-                timestamp=self.six_min_ago,
-                tags={"release": release},
-            )
-
-        response = self.do_request(
-            {
-                "field": [
-                    "avg_if(mobile.slow_frames,release,foo)",
-                    "avg_if(mobile.frozen_frames,release,bar)",
-                    "avg_compare(mobile.slow_frames,release,foo,bar)",
-                    "avg_if(mobile.frames_delay,release,foo)",
-                ],
-                "query": "",
-                "project": self.project.id,
-                "dataset": "spansMetrics",
-                "statsPeriod": "1h",
-            }
-        )
-
-        assert response.status_code == 200, response.content
-        data = response.data["data"]
-        assert data == [
-            {
-                "avg_compare(mobile.slow_frames,release,foo,bar)": 10.0,
-                "avg_if(mobile.slow_frames,release,foo)": 1.0,
-                "avg_if(mobile.frames_delay,release,foo)": 3.0,
-                "avg_if(mobile.frozen_frames,release,bar)": 12.0,
-            }
-        ]
-
 
 
 class OrganizationEventsMetricsEnhancedPerformanceEndpointTestWithMetricLayer(
 class OrganizationEventsMetricsEnhancedPerformanceEndpointTestWithMetricLayer(
     OrganizationEventsMetricsEnhancedPerformanceEndpointTest
     OrganizationEventsMetricsEnhancedPerformanceEndpointTest