Просмотр исходного кода

feat(search): adds useGroupSnubaDataset query param to use group attribute search (#67273)

We can use the query param `useGroupSnubaDataset` to start using
`GroupAttributesPostgresSnubaQueryExecutor`
Stephen Cefali 11 месяцев назад
Родитель
Сommit
3f3738df9c

+ 1 - 0
src/sentry/eventstream/kafka/backend.py

@@ -117,6 +117,7 @@ class KafkaEventStream(SnubaProtocolEventStream):
         group_states: GroupStates | None = None,
         **kwargs: Any,
     ) -> None:
+
         event_type = self._get_event_type(event)
         if event.get_tag("sample_event"):
             logger.info(

+ 4 - 0
src/sentry/issues/endpoints/organization_group_index.py

@@ -166,6 +166,10 @@ class OrganizationGroupIndexEndpoint(OrganizationEndpoint):
                 result = inbox_search(**query_kwargs)
             else:
                 query_kwargs["referrer"] = "search.group_index"
+                # optional argument to use the group snuba dataset
+                if request.GET.get("useGroupSnubaDataset"):
+                    query_kwargs["use_group_snuba_dataset"] = True
+
                 result = search.query(**query_kwargs)
             return result, query_kwargs
 

+ 7 - 0
src/sentry/search/snuba/backend.py

@@ -476,6 +476,7 @@ class SnubaSearchBackendBase(SearchBackend, metaclass=ABCMeta):
         referrer: str | None = None,
         actor: Any | None = None,
         aggregate_kwargs: TrendsSortWeights | None = None,
+        use_group_snuba_dataset: bool = False,
     ) -> CursorResult[Group]:
         search_filters = search_filters if search_filters is not None else []
         # ensure projects are from same org
@@ -508,6 +509,7 @@ class SnubaSearchBackendBase(SearchBackend, metaclass=ABCMeta):
             search_filters=search_filters,
             date_from=date_from,
             date_to=date_to,
+            use_group_snuba_dataset=use_group_snuba_dataset,
         )
 
         # ensure sort strategy is supported by executor
@@ -649,6 +651,7 @@ class SnubaSearchBackendBase(SearchBackend, metaclass=ABCMeta):
         search_filters: Sequence[SearchFilter],
         date_from: datetime | None,
         date_to: datetime | None,
+        use_group_snuba_dataset: bool,
     ) -> AbstractQueryExecutor:
         """This method should return an implementation of the AbstractQueryExecutor
         We will end up calling .query() on the class returned by this method"""
@@ -657,6 +660,10 @@ class SnubaSearchBackendBase(SearchBackend, metaclass=ABCMeta):
 
 class EventsDatasetSnubaSearchBackend(SnubaSearchBackendBase):
     def _get_query_executor(self, *args: Any, **kwargs: Any) -> AbstractQueryExecutor:
+        if kwargs.get("use_group_snuba_dataset"):
+            from sentry.search.snuba.executors import GroupAttributesPostgresSnubaQueryExecutor
+
+            return GroupAttributesPostgresSnubaQueryExecutor()
         return PostgresSnubaQueryExecutor()
 
     def _get_queryset_conditions(

+ 3 - 0
src/sentry/search/snuba/executors.py

@@ -192,6 +192,7 @@ class AbstractQueryExecutor(metaclass=ABCMeta):
         referrer: str | None = None,
         actor: Any | None = None,
         aggregate_kwargs: TrendsSortWeights | None = None,
+        use_group_snuba_dataset: bool = False,
     ) -> CursorResult[Group]:
         """This function runs your actual query and returns the results
         We usually return a paginator object, which contains the results and the number of hits"""
@@ -1230,6 +1231,7 @@ class CdcPostgresSnubaQueryExecutor(PostgresSnubaQueryExecutor):
         referrer: str | None = None,
         actor: Any | None = None,
         aggregate_kwargs: TrendsSortWeights | None = None,
+        use_group_snuba_dataset: bool = False,
     ) -> CursorResult[Group]:
         if not validate_cdc_search_filters(search_filters):
             raise InvalidQueryForExecutor("Search filters invalid for this query executor")
@@ -1425,6 +1427,7 @@ class GroupAttributesPostgresSnubaQueryExecutor(PostgresSnubaQueryExecutor):
         referrer: str | None = None,
         actor: Any | None = None,
         aggregate_kwargs: TrendsSortWeights | None = None,
+        use_group_snuba_dataset: bool = False,
     ) -> CursorResult[Group]:
         if not self.validate_search_filters(search_filters):
             raise InvalidQueryForExecutor("Search filters invalid for this query executor")

+ 18 - 0
tests/sentry/issues/endpoints/test_organization_group_index.py

@@ -43,11 +43,13 @@ from sentry.search.events.constants import (
     SEMVER_BUILD_ALIAS,
     SEMVER_PACKAGE_ALIAS,
 )
+from sentry.search.snuba.executors import GroupAttributesPostgresSnubaQueryExecutor
 from sentry.silo import SiloMode
 from sentry.testutils.cases import APITestCase, SnubaTestCase
 from sentry.testutils.helpers import parse_link_header
 from sentry.testutils.helpers.datetime import before_now, iso_format
 from sentry.testutils.helpers.features import with_feature
+from sentry.testutils.helpers.options import override_options
 from sentry.testutils.silo import assume_test_silo_mode, region_silo_test
 from sentry.types.activity import ActivityType
 from sentry.types.group import GroupSubStatus, PriorityLevel
@@ -2308,6 +2310,22 @@ class GroupListTest(APITestCase, SnubaTestCase):
             == []
         )
 
+    @override_options({"issues.group_attributes.send_kafka": True})
+    @patch(
+        "sentry.search.snuba.executors.GroupAttributesPostgresSnubaQueryExecutor.query",
+        side_effect=GroupAttributesPostgresSnubaQueryExecutor.query,
+        autospec=True,
+    )
+    def test_use_group_snuba_dataset(self, mock_query):
+        self.store_event(
+            data={"timestamp": iso_format(before_now(seconds=500)), "fingerprint": ["group-1"]},
+            project_id=self.project.id,
+        )
+        self.login_as(user=self.user)
+        response = self.get_success_response(qs_params={"query": "", "useGroupSnubaDataset": "1"})
+        assert len(response.data) == 1
+        assert mock_query.call_count == 1
+
 
 @region_silo_test
 class GroupUpdateTest(APITestCase, SnubaTestCase):