Browse Source

ref(discover): Remove use-snql feature checks (#33805)

- This removes all checks for discover-use-snql and performance-use-snql
- This defaults usage of snql everywhere to be on (Has been like this in
  production for quite a while now)
- This removes the wip-snql from referrer, and adds an old-json to older
  queries so we can track down anywhere that's still using json somehow
- This fixes a bug, where flagr would fail a feature check and
  count_web_vitals wouldn't exist in the json syntax
- Step 1 of a longer process to burn our old json code
William Mak 2 years ago
parent
commit
1436e3ffed

+ 0 - 6
src/sentry/api/endpoints/organization_events.py

@@ -81,9 +81,6 @@ class OrganizationEventsV2Endpoint(OrganizationEventsV2EndpointBase):
                 "auto_aggregations": True,
                 "use_aggregate_conditions": True,
                 "allow_metric_aggregates": allow_metric_aggregates,
-                "use_snql": features.has(
-                    "organizations:discover-use-snql", organization, actor=request.user
-                ),
             }
             if not metrics_enhanced and performance_dry_run_mep:
                 sentry_sdk.set_tag("query.mep_compatible", False)
@@ -147,9 +144,6 @@ class OrganizationEventsGeoEndpoint(OrganizationEventsV2EndpointBase):
                 referrer=referrer,
                 use_aggregate_conditions=True,
                 orderby=self.get_orderby(request) or maybe_aggregate,
-                use_snql=features.has(
-                    "organizations:discover-use-snql", organization, actor=request.user
-                ),
             )
 
         with self.handle_query_errors():

+ 1 - 4
src/sentry/api/endpoints/organization_events_facets.py

@@ -4,7 +4,7 @@ import sentry_sdk
 from rest_framework.request import Request
 from rest_framework.response import Response
 
-from sentry import features, tagstore
+from sentry import tagstore
 from sentry.api.bases import NoProjects, OrganizationEventsV2EndpointBase
 from sentry.snuba import discover
 
@@ -25,9 +25,6 @@ class OrganizationEventsFacetsEndpoint(OrganizationEventsV2EndpointBase):
                     query=request.GET.get("query"),
                     params=params,
                     referrer="api.organization-events-facets.top-tags",
-                    use_snql=features.has(
-                        "organizations:discover-use-snql", organization, actor=request.user
-                    ),
                 )
 
         with sentry_sdk.start_span(op="discover.endpoint", description="populate_results") as span:

+ 0 - 17
src/sentry/api/endpoints/organization_events_facets_performance.py

@@ -74,10 +74,6 @@ class OrganizationEventsFacetsPerformanceEndpoint(OrganizationEventsFacetsPerfor
         if tag_key in TAG_ALIASES:
             tag_key = TAG_ALIASES.get(tag_key)
 
-        use_snql = features.has(
-            "organizations:performance-use-snql", organization, actor=request.user
-        )
-
         def data_fn(offset, limit):
             with sentry_sdk.start_span(op="discover.endpoint", description="discover_query"):
                 referrer = "api.organization-events-facets-performance.top-tags"
@@ -85,7 +81,6 @@ class OrganizationEventsFacetsPerformanceEndpoint(OrganizationEventsFacetsPerfor
                     filter_query=filter_query,
                     aggregate_column=aggregate_column,
                     referrer=referrer,
-                    use_snql=use_snql,
                     params=params,
                 )
 
@@ -162,10 +157,6 @@ class OrganizationEventsFacetsPerformanceHistogramEndpoint(
         if tag_key in TAG_ALIASES:
             tag_key = TAG_ALIASES.get(tag_key)
 
-        use_snql = features.has(
-            "organizations:performance-use-snql", organization, actor=request.user
-        )
-
         def data_fn(offset, limit, raw_limit):
             with sentry_sdk.start_span(op="discover.endpoint", description="discover_query"):
                 referrer = "api.organization-events-facets-performance-histogram"
@@ -178,7 +169,6 @@ class OrganizationEventsFacetsPerformanceHistogramEndpoint(
                     orderby=self.get_orderby(request),
                     offset=offset,
                     referrer=referrer,
-                    use_snql=use_snql,
                 )
 
                 if not top_tags:
@@ -193,7 +183,6 @@ class OrganizationEventsFacetsPerformanceHistogramEndpoint(
                     filter_query=filter_query,
                     aggregate_column=aggregate_column,
                     referrer=referrer,
-                    use_snql=use_snql,
                     params=params,
                     limit=raw_limit,
                     num_buckets_per_key=num_buckets_per_key,
@@ -252,7 +241,6 @@ class HistogramPaginator(GenericOffsetPaginator):
 def query_tag_data(
     params: Mapping[str, str],
     referrer: str,
-    use_snql: bool,
     filter_query: Optional[str] = None,
     aggregate_column: Optional[str] = None,
 ) -> Optional[Dict]:
@@ -287,7 +275,6 @@ def query_tag_data(
             query=filter_query,
             params=params,
             referrer=f"{referrer}.all_transactions",
-            use_snql=use_snql,
             limit=1,
         )
 
@@ -310,7 +297,6 @@ def query_top_tags(
     tag_key: str,
     limit: int,
     referrer: str,
-    use_snql: bool,
     orderby: Optional[List[str]],
     offset: Optional[int] = None,
     aggregate_column: Optional[str] = None,
@@ -365,7 +351,6 @@ def query_top_tags(
             ],
             functions_acl=["array_join"],
             referrer=f"{referrer}.top_tags",
-            use_snql=use_snql,
             limit=limit,
             offset=offset,
         )
@@ -499,7 +484,6 @@ def query_facet_performance_key_histogram(
     num_buckets_per_key: int,
     limit: int,
     referrer: str,
-    use_snql: bool,
     aggregate_column: Optional[str] = None,
     filter_query: Optional[str] = None,
 ) -> Dict:
@@ -526,7 +510,6 @@ def query_facet_performance_key_histogram(
         ],
         histogram_rows=limit,
         referrer="api.organization-events-facets-performance-histogram",
-        use_snql=use_snql,
         normalize_results=False,
     )
     return results

+ 0 - 4
src/sentry/api/endpoints/organization_events_has_measurements.py

@@ -7,7 +7,6 @@ from rest_framework import serializers
 from rest_framework.request import Request
 from rest_framework.response import Response
 
-from sentry import features
 from sentry.api.bases import NoProjects, OrganizationEventsV2EndpointBase
 from sentry.snuba import discover
 from sentry.utils.hashlib import md5_text
@@ -106,9 +105,6 @@ class OrganizationEventsHasMeasurementsEndpoint(OrganizationEventsV2EndpointBase
                     auto_fields=True,
                     auto_aggregations=False,
                     use_aggregate_conditions=False,
-                    use_snql=features.has(
-                        "organizations:performance-use-snql", organization, actor=request.user
-                    ),
                 )
             has_measurements = len(results["data"]) > 0
 

+ 0 - 3
src/sentry/api/endpoints/organization_events_histogram.py

@@ -67,9 +67,6 @@ class OrganizationEventsHistogramEndpoint(OrganizationEventsV2EndpointBase):
                         max_value=data.get("max"),
                         data_filter=data.get("dataFilter"),
                         referrer="api.organization-events-histogram",
-                        use_snql=features.has(
-                            "organizations:performance-use-snql", organization, actor=request.user
-                        ),
                     )
 
                 return Response(results)

+ 1 - 4
src/sentry/api/endpoints/organization_events_meta.py

@@ -5,7 +5,7 @@ from rest_framework.exceptions import ParseError
 from rest_framework.request import Request
 from rest_framework.response import Response
 
-from sentry import features, search
+from sentry import search
 from sentry.api.base import EnvironmentMixin
 from sentry.api.bases import NoProjects, OrganizationEventsEndpointBase
 from sentry.api.event_search import parse_search_query
@@ -28,9 +28,6 @@ class OrganizationEventsMetaEndpoint(OrganizationEventsEndpointBase):
                 params=params,
                 query=request.query_params.get("query"),
                 referrer="api.organization-events-meta",
-                use_snql=features.has(
-                    "organizations:discover-use-snql", organization, actor=request.user
-                ),
             )
 
         return Response({"count": result["data"][0]["count"]})

+ 0 - 4
src/sentry/api/endpoints/organization_events_stats.py

@@ -75,7 +75,6 @@ class OrganizationEventsStatsEndpoint(OrganizationEventsV2EndpointBase):  # type
     def get_features(self, organization: Organization, request: Request) -> Mapping[str, bool]:
         feature_names = [
             "organizations:performance-chart-interpolation",
-            "organizations:discover-use-snql",
             "organizations:performance-use-metrics",
             "organizations:performance-dry-run-mep",
         ]
@@ -135,7 +134,6 @@ class OrganizationEventsStatsEndpoint(OrganizationEventsV2EndpointBase):  # type
                 else "api.organization-event-stats"
             )
             batch_features = self.get_features(organization, request)
-            discover_snql = batch_features.get("organizations:discover-use-snql", False)
             has_chart_interpolation = batch_features.get(
                 "organizations:performance-chart-interpolation", False
             )
@@ -173,7 +171,6 @@ class OrganizationEventsStatsEndpoint(OrganizationEventsV2EndpointBase):  # type
                     allow_empty=False,
                     zerofill_results=zerofill_results,
                     include_other=include_other,
-                    use_snql=discover_snql,
                 )
             dataset = discover if not metrics_enhanced else metrics_enhanced_performance
             query_details = {
@@ -185,7 +182,6 @@ class OrganizationEventsStatsEndpoint(OrganizationEventsV2EndpointBase):  # type
                 "zerofill_results": zerofill_results,
                 "comparison_delta": comparison_delta,
                 "allow_metric_aggregates": allow_metric_aggregates,
-                "use_snql": discover_snql,
             }
             if not metrics_enhanced and performance_dry_run_mep:
                 sentry_sdk.set_tag("query.mep_compatible", False)

+ 18 - 95
src/sentry/api/endpoints/organization_events_trace.py

@@ -32,7 +32,7 @@ from sentry.models import Organization
 from sentry.search.events.builder import QueryBuilder
 from sentry.snuba import discover
 from sentry.utils.numbers import format_grouped_length
-from sentry.utils.snuba import Dataset, SnubaQueryParams, bulk_raw_query, bulk_snql_query
+from sentry.utils.snuba import Dataset, bulk_snql_query
 from sentry.utils.validators import INVALID_ID_DETAILS, is_event_id
 
 logger: logging.Logger = logging.getLogger(__name__)
@@ -206,67 +206,14 @@ def child_sort_key(item: TraceEvent) -> List[int]:
 
 
 def query_trace_data(
-    trace_id: str, params: Mapping[str, str], use_snql: bool = False
+    trace_id: str, params: Mapping[str, str]
 ) -> Tuple[Sequence[SnubaTransaction], Sequence[SnubaError]]:
-    sentry_sdk.set_tag("discover.use_snql", use_snql)
     transaction_query: Union[QueryBuilder, discover.PreparedQuery]
     error_query: Union[QueryBuilder, discover.PreparedQuery]
-    if use_snql:
-        transaction_query = QueryBuilder(
-            Dataset.Transactions,
-            params,
-            query=f"trace:{trace_id}",
-            selected_columns=[
-                "id",
-                "transaction.status",
-                "transaction.op",
-                "transaction.duration",
-                "transaction",
-                "timestamp",
-                "project",
-                "project.id",
-                "trace.span",
-                "trace.parent_span",
-                'to_other(trace.parent_span, "", 0, 1) AS root',
-            ],
-            # We want to guarantee at least getting the root, and hopefully events near it with timestamp
-            # id is just for consistent results
-            orderby=["-root", "timestamp", "id"],
-            limit=MAX_TRACE_SIZE,
-        )
-        error_query = QueryBuilder(
-            Dataset.Events,
-            params,
-            query=f"trace:{trace_id}",
-            selected_columns=[
-                "id",
-                "project",
-                "project.id",
-                "timestamp",
-                "trace.span",
-                "transaction",
-                "issue",
-                "title",
-                "tags[level]",
-            ],
-            # Don't add timestamp to this orderby as snuba will have to split the time range up and make multiple queries
-            orderby=["id"],
-            auto_fields=False,
-            limit=MAX_TRACE_SIZE,
-        )
-        results = bulk_snql_query(
-            [transaction_query.get_snql_query(), error_query.get_snql_query()],
-            referrer="api.trace-view.get-events.wip-snql",
-        )
-        transformed_results = [
-            discover.transform_results(result, query.function_alias_map, {}, None)["data"]
-            for result, query in zip(results, [transaction_query, error_query])
-        ]
-        return cast(Sequence[SnubaTransaction], transformed_results[0]), cast(
-            Sequence[SnubaError], transformed_results[1]
-        )
-
-    transaction_query = discover.prepare_discover_query(
+    transaction_query = QueryBuilder(
+        Dataset.Transactions,
+        params,
+        query=f"trace:{trace_id}",
         selected_columns=[
             "id",
             "transaction.status",
@@ -274,8 +221,8 @@ def query_trace_data(
             "transaction.duration",
             "transaction",
             "timestamp",
-            # project gets the slug, and project.id gets added automatically
             "project",
+            "project.id",
             "trace.span",
             "trace.parent_span",
             'to_other(trace.parent_span, "", 0, 1) AS root',
@@ -283,13 +230,16 @@ def query_trace_data(
         # We want to guarantee at least getting the root, and hopefully events near it with timestamp
         # id is just for consistent results
         orderby=["-root", "timestamp", "id"],
-        params=params,
-        query=f"event.type:transaction trace:{trace_id}",
+        limit=MAX_TRACE_SIZE,
     )
-    error_query = discover.prepare_discover_query(
+    error_query = QueryBuilder(
+        Dataset.Events,
+        params,
+        query=f"trace:{trace_id}",
         selected_columns=[
             "id",
             "project",
+            "project.id",
             "timestamp",
             "trace.span",
             "transaction",
@@ -299,34 +249,15 @@ def query_trace_data(
         ],
         # Don't add timestamp to this orderby as snuba will have to split the time range up and make multiple queries
         orderby=["id"],
-        params=params,
-        query=f"!event.type:transaction trace:{trace_id}",
         auto_fields=False,
+        limit=MAX_TRACE_SIZE,
     )
-    snuba_params = [
-        SnubaQueryParams(
-            dataset=Dataset.Discover,
-            start=snuba_filter.start,
-            end=snuba_filter.end,
-            groupby=snuba_filter.groupby,
-            conditions=snuba_filter.conditions,
-            filter_keys=snuba_filter.filter_keys,
-            aggregations=snuba_filter.aggregations,
-            selected_columns=snuba_filter.selected_columns,
-            having=snuba_filter.having,
-            orderby=snuba_filter.orderby,
-            limit=MAX_TRACE_SIZE,
-        )
-        for snuba_filter in [transaction_query.filter, error_query.filter]
-    ]
-    results = bulk_raw_query(
-        snuba_params,
+    results = bulk_snql_query(
+        [transaction_query.get_snql_query(), error_query.get_snql_query()],
         referrer="api.trace-view.get-events",
     )
     transformed_results = [
-        discover.transform_results(result, query.fields["functions"], query.columns, query.filter)[
-            "data"
-        ]
+        discover.transform_results(result, query.function_alias_map, {}, None)["data"]
         for result, query in zip(results, [transaction_query, error_query])
     ]
     return cast(Sequence[SnubaTransaction], transformed_results[0]), cast(
@@ -340,11 +271,6 @@ class OrganizationEventsTraceEndpointBase(OrganizationEventsV2EndpointBase):  #
             features.has("organizations:performance-view", organization, actor=request.user)
         )
 
-    def has_snql_feature(self, organization: Organization, request: HttpRequest) -> bool:
-        return bool(
-            features.has("organizations:performance-use-snql", organization, actor=request.user)
-        )
-
     @staticmethod
     def serialize_error(event: SnubaError) -> TraceError:
         return {
@@ -421,9 +347,7 @@ class OrganizationEventsTraceEndpointBase(OrganizationEventsV2EndpointBase):  #
             return Response({"detail": INVALID_ID_DETAILS.format("Event ID")}, status=400)
 
         with self.handle_query_errors():
-            transactions, errors = query_trace_data(
-                trace_id, params, self.has_snql_feature(organization, request)
-            )
+            transactions, errors = query_trace_data(trace_id, params)
             if len(transactions) == 0:
                 return Response(status=404)
             self.record_analytics(transactions, trace_id, self.request.user.id, organization.id)
@@ -774,7 +698,6 @@ class OrganizationEventsTraceMetaEndpoint(OrganizationEventsTraceEndpointBase):
                 query=f"trace:{trace_id}",
                 limit=1,
                 referrer="api.trace-view.get-meta",
-                use_snql=self.has_snql_feature(organization, request),
             )
             if len(result["data"]) == 0:
                 return Response(status=404)

+ 29 - 60
src/sentry/api/endpoints/organization_events_trends.py

@@ -415,14 +415,9 @@ class OrganizationEventsTrendsEndpointBase(OrganizationEventsV2EndpointBase):
     def has_feature(self, organization, request):
         return features.has("organizations:performance-view", organization, actor=request.user)
 
-    def has_snql_feature(self, organization, request):
-        return features.has("organizations:trends-use-snql", organization, actor=request.user)
-
     def get(self, request: Request, organization) -> Response:
         if not self.has_feature(organization, request):
             return Response(status=404)
-        use_snql = self.has_snql_feature(organization, request)
-        sentry_sdk.set_tag("discover.use_snql", use_snql)
 
         try:
             params = self.get_snuba_params(request, organization)
@@ -465,59 +460,37 @@ class OrganizationEventsTrendsEndpointBase(OrganizationEventsV2EndpointBase):
         orderby = self.get_orderby(request)
         query = request.GET.get("query")
 
-        if use_snql:
-            with self.handle_query_errors():
-                trend_query = TrendQueryBuilder(
-                    dataset=Dataset.Discover,
-                    params=params,
-                    selected_columns=selected_columns,
-                    auto_fields=False,
-                    auto_aggregations=True,
-                    use_aggregate_conditions=True,
-                )
-                snql_trend_columns = self.resolve_trend_columns(
-                    trend_query, function, column, middle
-                )
-                trend_query.columns.extend(snql_trend_columns.values())
-                trend_query.aggregates.extend(snql_trend_columns.values())
-                trend_query.params["aliases"] = self.get_snql_function_aliases(
-                    snql_trend_columns, trend_type
-                )
-                # Both orderby and conditions need to be resolved after the columns because of aliasing
-                trend_query.orderby = trend_query.resolve_orderby(orderby)
-                trend_query.groupby = trend_query.resolve_groupby()
-                where, having = trend_query.resolve_conditions(query, use_aggregate_conditions=True)
-                trend_query.where += where
-                trend_query.having += having
-        else:
-            params["aliases"] = self.get_function_aliases(trend_type)
-            trend_columns = self.get_trend_columns(function, column, middle)
+        with self.handle_query_errors():
+            trend_query = TrendQueryBuilder(
+                dataset=Dataset.Discover,
+                params=params,
+                selected_columns=selected_columns,
+                auto_fields=False,
+                auto_aggregations=True,
+                use_aggregate_conditions=True,
+            )
+            snql_trend_columns = self.resolve_trend_columns(trend_query, function, column, middle)
+            trend_query.columns.extend(snql_trend_columns.values())
+            trend_query.aggregates.extend(snql_trend_columns.values())
+            trend_query.params["aliases"] = self.get_snql_function_aliases(
+                snql_trend_columns, trend_type
+            )
+            # Both orderby and conditions need to be resolved after the columns because of aliasing
+            trend_query.orderby = trend_query.resolve_orderby(orderby)
+            trend_query.groupby = trend_query.resolve_groupby()
+            where, having = trend_query.resolve_conditions(query, use_aggregate_conditions=True)
+            trend_query.where += where
+            trend_query.having += having
 
         def data_fn(offset, limit):
-            if use_snql:
-                trend_query.offset = Offset(offset)
-                trend_query.limit = Limit(limit)
-                result = raw_snql_query(
-                    trend_query.get_snql_query(),
-                    referrer="api.trends.get-percentage-change.wip-snql",
-                )
-                result = discover.transform_results(
-                    result, trend_query.function_alias_map, {}, None
-                )
-                return result
-            else:
-                return discover.query(
-                    selected_columns=selected_columns + trend_columns,
-                    query=query,
-                    params=params,
-                    orderby=orderby,
-                    offset=offset,
-                    limit=limit,
-                    referrer="api.trends.get-percentage-change",
-                    auto_fields=True,
-                    auto_aggregations=True,
-                    use_aggregate_conditions=True,
-                )
+            trend_query.offset = Offset(offset)
+            trend_query.limit = Limit(limit)
+            result = raw_snql_query(
+                trend_query.get_snql_query(),
+                referrer="api.trends.get-percentage-change",
+            )
+            result = discover.transform_results(result, trend_query.function_alias_map, {}, None)
+            return result
 
         with self.handle_query_errors():
             return self.paginate(
@@ -531,7 +504,6 @@ class OrganizationEventsTrendsEndpointBase(OrganizationEventsV2EndpointBase):
                     selected_columns,
                     orderby,
                     query,
-                    use_snql,
                 ),
                 default_per_page=5,
                 max_per_page=5,
@@ -548,7 +520,6 @@ class OrganizationEventsTrendsStatsEndpoint(OrganizationEventsTrendsEndpointBase
         selected_columns,
         orderby,
         query,
-        use_snql,
     ):
         def on_results(events_results):
             def get_event_stats(query_columns, query, params, rollup, zerofill_results, _=None):
@@ -564,7 +535,6 @@ class OrganizationEventsTrendsStatsEndpoint(OrganizationEventsTrendsEndpointBase
                     top_events=events_results,
                     referrer="api.trends.get-event-stats",
                     zerofill_results=zerofill_results,
-                    use_snql=use_snql,
                 )
 
             stats_results = (
@@ -601,7 +571,6 @@ class OrganizationEventsTrendsEndpoint(OrganizationEventsTrendsEndpointBase):
         selected_columns,
         orderby,
         query,
-        use_snql,
     ):
         return lambda events_results: self.handle_results_with_meta(
             request, organization, params["project_id"], events_results

+ 0 - 4
src/sentry/api/endpoints/organization_events_vitals.py

@@ -3,7 +3,6 @@ from rest_framework.exceptions import ParseError
 from rest_framework.request import Request
 from rest_framework.response import Response
 
-from sentry import features
 from sentry.api.bases import NoProjects, OrganizationEventsV2EndpointBase
 from sentry.search.events.fields import get_function_alias
 from sentry.snuba import discover
@@ -58,9 +57,6 @@ class OrganizationEventsVitalsEndpoint(OrganizationEventsV2EndpointBase):
                 auto_fields=True,
                 auto_aggregations=False,
                 use_aggregate_conditions=False,
-                use_snql=features.has(
-                    "organizations:performance-use-snql", organization, actor=request.user
-                ),
             )
 
         results = {}

Some files were not shown because too many files changed in this diff