Browse Source

feat(search): run an async query against the joined events -> group_attributes dataset (#54519)

The intent of this PR is to gauge the performance of applying joins
between the `events` and the `group_attributes` dataset. When an issue
search query is applied we'll also be running an asynchronous query
against snuba that does a join between `events` and `group_attributes`.

Things to note:
* We don't care too much about the results of the join query. The
purpose of this PR is to simulate some real traffic while also doing a
join query.
* Instead of running the joined query serially with the normal issue
search query, I made the join query async while it runs in a separate
thread. This is to avoid adding load times to the regular issue search
query.
* The current query building of the join query is a bit of a mess and is
essentially a copy-paste of the CDC query. For the purposes of measuring
performance, this should be fine.
Gilbert Szeto 1 year ago
parent
commit
250dff0d1b

+ 2 - 0
src/sentry/conf/server.py

@@ -1431,6 +1431,8 @@ SENTRY_FEATURES = {
     "organizations:issue-search-use-cdc-secondary": False,
     # Adds search suggestions to the issue search bar
     "organizations:issue-search-shortcuts": False,
+    # Whether to make a side/parallel query against events -> group_attributes when searching issues
+    "organizations:issue-search-group-attributes-side-query": False,
     # Enable metric alert charts in email/slack
     "organizations:metric-alert-chartcuterie": False,
     # Extract metrics for sessions during ingestion.

+ 1 - 0
src/sentry/features/__init__.py

@@ -103,6 +103,7 @@ default_manager.add("organizations:issue-search-allow-postgres-only-search", Org
 default_manager.add("organizations:issue-search-use-cdc-primary", OrganizationFeature, FeatureHandlerStrategy.REMOTE)
 default_manager.add("organizations:issue-search-use-cdc-secondary", OrganizationFeature, FeatureHandlerStrategy.REMOTE)
 default_manager.add("organizations:issue-search-shortcuts", OrganizationFeature, FeatureHandlerStrategy.REMOTE)
+default_manager.add("organizations:issue-search-group-attributes-side-query", OrganizationFeature, FeatureHandlerStrategy.REMOTE)
 default_manager.add("organizations:js-sdk-dynamic-loader", OrganizationFeature, FeatureHandlerStrategy.REMOTE)
 default_manager.add("organizations:large-debug-files", OrganizationFeature, FeatureHandlerStrategy.INTERNAL)
 default_manager.add("organizations:mep-rollout-flag", OrganizationFeature, FeatureHandlerStrategy.REMOTE)

+ 193 - 19
src/sentry/search/snuba/backend.py

@@ -1,8 +1,11 @@
 from __future__ import annotations
 
+import atexit
 import functools
+import logging
 from abc import ABCMeta, abstractmethod
 from collections import defaultdict
+from concurrent.futures import ThreadPoolExecutor
 from datetime import datetime, timedelta
 from typing import Any, Callable, Dict, List, Mapping, Optional, Sequence
 
@@ -10,8 +13,9 @@ from django.db.models import Q, QuerySet
 from django.utils import timezone
 from django.utils.functional import SimpleLazyObject
 
-from sentry import quotas
+from sentry import features, quotas
 from sentry.api.event_search import SearchFilter
+from sentry.db.models import BaseQuerySet
 from sentry.exceptions import InvalidSearchQuery
 from sentry.issues.grouptype import ErrorGroupType
 from sentry.models import (
@@ -39,6 +43,7 @@ from sentry.search.snuba.executors import (
     PostgresSnubaQueryExecutor,
     PrioritySortWeights,
 )
+from sentry.utils import metrics
 from sentry.utils.cursors import Cursor, CursorResult
 
 
@@ -250,6 +255,129 @@ def regressed_in_release_filter(versions: Sequence[str], projects: Sequence[Proj
     )
 
 
+_side_query_pool = ThreadPoolExecutor(max_workers=10)
+
+atexit.register(_side_query_pool.shutdown, False)
+
+
+def _group_attributes_side_query(
+    events_only_search_results: CursorResult[Group],
+    builder: Callable[[], BaseQuerySet],
+    projects: Sequence[Project],
+    retention_window_start: Optional[datetime],
+    group_queryset: BaseQuerySet,
+    environments: Optional[Sequence[Environment]] = None,
+    sort_by: str = "date",
+    limit: int = 100,
+    cursor: Optional[Cursor] = None,
+    count_hits: bool = False,
+    paginator_options: Optional[Mapping[str, Any]] = None,
+    search_filters: Optional[Sequence[SearchFilter]] = None,
+    date_from: Optional[datetime] = None,
+    date_to: Optional[datetime] = None,
+    max_hits: Optional[int] = None,
+    referrer: Optional[str] = None,
+    actor: Optional[Any] = None,
+    aggregate_kwargs: Optional[PrioritySortWeights] = None,
+) -> None:
+    def __run_joined_query_and_log_metric(
+        events_only_search_results: CursorResult[Group],
+        builder: Callable[[], BaseQuerySet],
+        projects: Sequence[Project],
+        retention_window_start: Optional[datetime],
+        group_queryset: BaseQuerySet,
+        environments: Optional[Sequence[Environment]] = None,
+        sort_by: str = "date",
+        limit: int = 100,
+        cursor: Optional[Cursor] = None,
+        count_hits: bool = False,
+        paginator_options: Optional[Mapping[str, Any]] = None,
+        search_filters: Optional[Sequence[SearchFilter]] = None,
+        date_from: Optional[datetime] = None,
+        date_to: Optional[datetime] = None,
+        max_hits: Optional[int] = None,
+        referrer: Optional[str] = None,
+        actor: Optional[Any] = None,
+        aggregate_kwargs: Optional[PrioritySortWeights] = None,
+    ):
+        try:
+            from sentry.search.snuba.executors import GroupAttributesPostgresSnubaQueryExecutor
+            from sentry.utils import metrics
+
+            executor = GroupAttributesPostgresSnubaQueryExecutor()
+            with metrics.timer("snuba.search.group_attributes_joined.duration"):
+                cursor_results = executor.query(
+                    projects,
+                    retention_window_start,
+                    builder(),
+                    environments,
+                    sort_by,
+                    limit,
+                    cursor,
+                    count_hits,
+                    paginator_options,
+                    search_filters,
+                    date_from,
+                    date_to,
+                    max_hits,
+                    referrer,
+                    actor,
+                    aggregate_kwargs,
+                )
+            joined_hits = len(cursor_results.results)
+            events_only_search_hits = len(events_only_search_results.results)
+            if events_only_search_hits > 0:
+                if joined_hits == events_only_search_hits:
+                    comparison = "equal"
+                elif joined_hits > events_only_search_hits:
+                    comparison = "greater"
+                else:
+                    # the joined query shouldn't have fewer hits since the query is deliberately less restrictive
+                    comparison = "less"
+
+                metrics.incr(
+                    "snuba.search.group_attributes_joined.events_compared",
+                    tags={"comparison": comparison},
+                )
+        except Exception:
+            logging.warning(
+                "failed to load side query from _group_attributes_side_query", exc_info=True
+            )
+        finally:
+            # since this code is running in a thread and django establishes a connection per thread, we need to
+            # explicitly close the connection assigned to this thread to avoid linger connections
+            from django.db import connection
+
+            connection.close()
+
+    try:
+        _side_query_pool.submit(
+            __run_joined_query_and_log_metric,
+            events_only_search_results,
+            builder,
+            projects,
+            retention_window_start,
+            group_queryset,
+            environments,
+            sort_by,
+            limit,
+            cursor,
+            count_hits,
+            paginator_options,
+            search_filters,
+            date_from,
+            date_to,
+            max_hits,
+            referrer,
+            actor,
+            aggregate_kwargs,
+        )
+    except Exception:
+        logging.exception(
+            "failed to submit group-attributes search side-query to pool", exc_info=True
+        )
+
+
 class Condition:
     """\
     Adds a single filter to a ``QuerySet`` object. Used with
@@ -374,24 +502,70 @@ class SnubaSearchBackendBase(SearchBackend, metaclass=ABCMeta):
         if not query_executor.has_sort_strategy(sort_by):
             raise InvalidSearchQuery(f"Sort key '{sort_by}' not supported.")
 
-        return query_executor.query(
-            projects=projects,
-            retention_window_start=retention_window_start,
-            group_queryset=group_queryset,
-            environments=environments,
-            sort_by=sort_by,
-            limit=limit,
-            cursor=cursor,
-            count_hits=count_hits,
-            paginator_options=paginator_options,
-            search_filters=search_filters,
-            date_from=date_from,
-            date_to=date_to,
-            max_hits=max_hits,
-            referrer=referrer,
-            actor=actor,
-            aggregate_kwargs=aggregate_kwargs,
-        )
+        with metrics.timer("snuba.search.postgres_snuba.duration"):
+            query_results = query_executor.query(
+                projects=projects,
+                retention_window_start=retention_window_start,
+                group_queryset=group_queryset,
+                environments=environments,
+                sort_by=sort_by,
+                limit=limit,
+                cursor=cursor,
+                count_hits=count_hits,
+                paginator_options=paginator_options,
+                search_filters=search_filters,
+                date_from=date_from,
+                date_to=date_to,
+                max_hits=max_hits,
+                referrer=referrer,
+                actor=actor,
+                aggregate_kwargs=aggregate_kwargs,
+            )
+
+        if len(projects) > 0 and features.has(
+            "organizations:issue-search-group-attributes-side-query", projects[0].organization
+        ):
+            new_group_queryset = self._build_group_queryset(
+                projects=projects,
+                environments=environments,
+                search_filters=search_filters,
+                retention_window_start=retention_window_start,
+                date_from=date_from,
+                date_to=date_to,
+            )
+
+            builder = functools.partial(
+                self._build_group_queryset,
+                projects=projects,
+                environments=environments,
+                search_filters=search_filters,
+                retention_window_start=retention_window_start,
+                date_from=date_from,
+                date_to=date_to,
+            )
+
+            _group_attributes_side_query(
+                events_only_search_results=query_results,
+                builder=builder,
+                projects=projects,
+                retention_window_start=retention_window_start,
+                group_queryset=new_group_queryset,
+                environments=environments,
+                sort_by=sort_by,
+                limit=limit,
+                cursor=cursor,
+                count_hits=count_hits,
+                paginator_options=paginator_options,
+                search_filters=search_filters,
+                date_from=date_from,
+                date_to=date_to,
+                max_hits=max_hits,
+                referrer=referrer,
+                actor=actor,
+                aggregate_kwargs=aggregate_kwargs,
+            )
+
+        return query_results
 
     def _build_group_queryset(
         self,

+ 203 - 1
src/sentry/search/snuba/executors.py

@@ -47,7 +47,7 @@ from sentry.issues.search import (
 )
 from sentry.models import Environment, Group, Organization, Project
 from sentry.search.events.filter import convert_search_filter_to_snuba_query, format_search_filter
-from sentry.search.utils import validate_cdc_search_filters
+from sentry.search.utils import SupportedConditions, validate_cdc_search_filters
 from sentry.snuba.dataset import Dataset
 from sentry.utils import json, metrics, snuba
 from sentry.utils.cursors import Cursor, CursorResult
@@ -1331,3 +1331,205 @@ class CdcPostgresSnubaQueryExecutor(PostgresSnubaQueryExecutor):
 
         # TODO: Add types to paginators and remove this
         return cast(CursorResult[Group], paginator_results)
+
+
+class GroupAttributesPostgresSnubaQueryExecutor(PostgresSnubaQueryExecutor):
+    entities = {
+        "event": Entity("events", alias="e"),
+        "attrs": Entity("group_attributes", alias="g"),
+    }
+
+    supported_cdc_conditions = [
+        SupportedConditions("status", frozenset(["IN"])),
+    ]
+    supported_cdc_conditions_lookup = {
+        condition.field_name: condition for condition in supported_cdc_conditions
+    }
+
+    last_seen_aggregation = Function(
+        "ifNull",
+        [
+            Function(
+                "multiply",
+                [
+                    Function(
+                        "toUInt64", [Function("max", [Column("timestamp", entities["event"])])]
+                    ),
+                    1000,
+                ],
+            ),
+            0,
+        ],
+    )
+
+    def calculate_start_end(
+        self,
+        retention_window_start: Optional[datetime],
+        search_filters: Optional[Sequence[SearchFilter]],
+        date_from: Optional[datetime],
+        date_to: Optional[datetime],
+    ) -> Tuple[datetime, datetime, datetime]:
+        now = timezone.now()
+        end = None
+        end_params = [_f for _f in [date_to, get_search_filter(search_filters, "date", "<")] if _f]
+        if end_params:
+            end = min(end_params)
+
+        if not end:
+            end = now + ALLOWED_FUTURE_DELTA
+
+        retention_date = max(_f for _f in [retention_window_start, now - timedelta(days=90)] if _f)
+        start_params = [date_from, retention_date, get_search_filter(search_filters, "date", ">")]
+        start = max(_f for _f in start_params if _f)
+        end = max([retention_date, end])
+        return start, end, retention_date
+
+    def validate_cdc_search_filters(self, search_filters: Optional[Sequence[SearchFilter]]) -> bool:
+        """
+        Validates whether a set of search filters can be handled by the cdc search backend.
+        """
+        for search_filter in search_filters or ():
+            supported_condition = self.supported_cdc_conditions_lookup.get(search_filter.key.name)
+            if not supported_condition:
+                return False
+            if (
+                supported_condition.operators
+                and search_filter.operator not in supported_condition.operators
+            ):
+                return False
+        return True
+
+    def query(
+        self,
+        projects: Sequence[Project],
+        retention_window_start: Optional[datetime],
+        group_queryset: BaseQuerySet,
+        environments: Optional[Sequence[Environment]],
+        sort_by: str,
+        limit: int,
+        cursor: Optional[Cursor],
+        count_hits: bool,
+        paginator_options: Optional[Mapping[str, Any]],
+        search_filters: Optional[Sequence[SearchFilter]],
+        date_from: Optional[datetime],
+        date_to: Optional[datetime],
+        max_hits: Optional[int] = None,
+        referrer: Optional[str] = None,
+        actor: Optional[Any] = None,
+        aggregate_kwargs: Optional[PrioritySortWeights] = None,
+    ) -> CursorResult[Group]:
+        if not self.validate_cdc_search_filters(search_filters):
+            raise InvalidQueryForExecutor("Search filters invalid for this query executor")
+
+        start, end, retention_date = self.calculate_start_end(
+            retention_window_start, search_filters, date_from, date_to
+        )
+
+        if start == retention_date and end == retention_date:
+            # Both `start` and `end` must have been trimmed to `retention_date`,
+            # so this entire search was against a time range that is outside of
+            # retention. We'll return empty results to maintain backwards compatibility
+            # with Django search (for now).
+            return self.empty_result
+
+        if start >= end:
+            # TODO: This maintains backwards compatibility with Django search, but
+            # in the future we should find a way to notify the user that their search
+            # is invalid.
+            return self.empty_result
+
+        event_entity = self.entities["event"]
+        attr_entity = self.entities["attrs"]
+
+        where_conditions = [
+            Condition(Column("project_id", event_entity), Op.IN, [p.id for p in projects]),
+            Condition(Column("project_id", attr_entity), Op.IN, [p.id for p in projects]),
+            Condition(Column("timestamp", event_entity), Op.GTE, start),
+            Condition(Column("timestamp", event_entity), Op.LT, end),
+        ]
+        # TODO: This is still basically only handling status, handle this better once we introduce
+        # more conditions.
+        for search_filter in search_filters or ():
+            where_conditions.append(
+                Condition(
+                    Column(search_filter.key.name, attr_entity),
+                    Op.IN,
+                    search_filter.value.raw_value,
+                )
+            )
+
+        if environments:
+            # TODO: Should this be handled via filter_keys, once we have a snql compatible version?
+            where_conditions.append(
+                Condition(
+                    Column("environment", event_entity), Op.IN, [e.name for e in environments]
+                )
+            )
+
+        sort_func = self.last_seen_aggregation
+
+        having = []
+        if cursor is not None:
+            op = Op.GTE if cursor.is_prev else Op.LTE
+            having.append(Condition(sort_func, op, cursor.value))
+
+        tenant_ids = {"organization_id": projects[0].organization_id} if projects else None
+
+        query = Query(
+            match=Join([Relationship(event_entity, "attributes", attr_entity)]),
+            select=[
+                Column("group_id", attr_entity),
+                replace(sort_func, alias="score"),
+            ],
+            where=where_conditions,
+            groupby=[Column("group_id", attr_entity)],
+            having=having,
+            orderby=[OrderBy(sort_func, direction=Direction.DESC)],
+            limit=Limit(limit + 1),
+        )
+        request = Request(
+            dataset="events",
+            app_id="group_attributes",
+            query=query,
+            tenant_ids=tenant_ids,
+        )
+        data = snuba.raw_snql_query(request, referrer="search.snuba.group_attributes_search.query")[
+            "data"
+        ]
+
+        hits_query = Query(
+            match=Join([Relationship(event_entity, "attributes", attr_entity)]),
+            select=[
+                Function("uniq", [Column("group_id", attr_entity)], alias="count"),
+            ],
+            where=where_conditions,
+        )
+        hits = None
+        if count_hits:
+            request = Request(
+                dataset="events", app_id="group_attributes", query=hits_query, tenant_ids=tenant_ids
+            )
+            hits = snuba.raw_snql_query(
+                request, referrer="search.snuba.group_attributes_search.hits"
+            )["data"][0]["count"]
+
+        paginator_options = paginator_options or {}
+        paginator_results = SequencePaginator(
+            [(row["score"], row["g.group_id"]) for row in data],
+            reverse=True,
+            **paginator_options,
+        ).get_result(limit, cursor, known_hits=hits, max_hits=max_hits)
+
+        # We filter against `group_queryset` here so that we recheck all conditions in Postgres.
+        # Since replay between Postgres and Clickhouse can happen, we might get back results that
+        # have changed state in Postgres. By rechecking them we guarantee than any returned results
+        # have the correct state.
+        # TODO: This can result in us returning less than a full page of results, but shouldn't
+        # affect cursors. If we want to, we can iterate and query snuba until we manage to get a
+        # full page. In practice, this will likely only skip a couple of results at worst, and
+        # probably not be noticeable to the user, so holding off for now to reduce complexity.
+
+        groups = group_queryset.in_bulk(paginator_results.results)
+        paginator_results.results = [groups[k] for k in paginator_results.results if k in groups]
+        # TODO: Add types to paginators and remove this
+        return cast(CursorResult[Group], paginator_results)

+ 69 - 9
tests/snuba/search/test_backend.py

@@ -1,3 +1,4 @@
+import time
 import uuid
 from datetime import datetime, timedelta
 from typing import Any
@@ -5,7 +6,9 @@ from unittest import mock
 
 import pytest
 import pytz
+import urllib3
 from django.utils import timezone
+from sentry_kafka_schemas.schema_types.group_attributes_v1 import GroupAttributesSnapshot
 
 from sentry import options
 from sentry.api.issue_search import convert_query_values, issue_search_config, parse_search_query
@@ -39,11 +42,12 @@ from sentry.search.snuba.backend import (
 )
 from sentry.search.snuba.executors import InvalidQueryForExecutor, PrioritySortWeights
 from sentry.snuba.dataset import Dataset
-from sentry.testutils.cases import SnubaTestCase, TestCase
-from sentry.testutils.helpers import Feature
+from sentry.testutils.cases import SnubaTestCase, TestCase, TransactionTestCase
+from sentry.testutils.helpers import Feature, apply_feature_flag_on_cls
 from sentry.testutils.helpers.datetime import before_now, iso_format
 from sentry.testutils.skips import xfail_if_not_postgres
 from sentry.types.group import GroupSubStatus
+from sentry.utils import json
 from sentry.utils.snuba import SENTRY_SNUBA_MAP, SnubaError
 from tests.sentry.issues.test_utils import OccurrenceTestMixin
 
@@ -52,7 +56,7 @@ def date_to_query_format(date):
     return date.strftime("%Y-%m-%dT%H:%M:%S")
 
 
-class SharedSnubaTest(TestCase, SnubaTestCase):
+class SharedSnubaMixin(SnubaTestCase):
     @property
     def backend(self) -> SnubaSearchBackendBase:
         raise NotImplementedError(self)
@@ -113,7 +117,7 @@ class SharedSnubaTest(TestCase, SnubaTestCase):
         return event
 
 
-class EventsSnubaSearchTest(SharedSnubaTest):
+class EventsDatasetTestSetup(SharedSnubaMixin):
     @property
     def backend(self):
         return EventsDatasetSnubaSearchBackend()
@@ -286,6 +290,8 @@ class EventsSnubaSearchTest(SharedSnubaTest):
             results = self.make_query(search_filter_query=f"!{query}", user=user)
             assert sorted(results, key=sort_key) == sorted(expected_negative_groups, key=sort_key)
 
+
+class EventsSnubaSearchTestCases(EventsDatasetTestSetup):
     def test_query(self):
         results = self.make_query(search_filter_query="foo")
         assert set(results) == {self.group1}
@@ -2407,7 +2413,7 @@ class EventsSnubaSearchTest(SharedSnubaTest):
             try:
                 self.make_query(search_filter_query=query)
             except SnubaError as e:
-                self.fail(f"Query {query} errored. Error info: {e}")
+                self.fail(f"Query {query} errored. Error info: {e}")  # type:ignore[attr-defined]
 
         for key in SENTRY_SNUBA_MAP:
             if key in ["project.id", "issue.id", "performance.issue_ids"]:
@@ -2568,7 +2574,61 @@ class EventsSnubaSearchTest(SharedSnubaTest):
         assert len(results) == 0
 
 
-class EventsPriorityTest(SharedSnubaTest, OccurrenceTestMixin):
+class EventsSnubaSearchTest(TestCase, EventsSnubaSearchTestCases):
+    pass
+
+
+@apply_feature_flag_on_cls("organizations:issue-search-group-attributes-side-query")
+class EventsJoinedGroupAttributesSnubaSearchTest(TransactionTestCase, EventsSnubaSearchTestCases):
+    def setUp(self):
+        def post_insert(snapshot: GroupAttributesSnapshot):
+            from sentry.utils import snuba
+
+            try:
+                resp = snuba._snuba_pool.urlopen(
+                    "POST",
+                    "/tests/entities/group_attributes/insert",
+                    body=json.dumps([snapshot]),
+                    headers={},
+                )
+                if resp.status != 200:
+                    raise snuba.SnubaError(
+                        f"HTTP {resp.status} response from Snuba! {json.loads(resp.data)}"
+                    )
+                return None
+            except urllib3.exceptions.HTTPError as err:
+                raise snuba.SnubaError(err)
+
+        with self.options({"issues.group_attributes.send_kafka": True}), mock.patch(
+            "sentry.issues.attributes.produce_snapshot_to_kafka", post_insert
+        ):
+            super().setUp()
+
+    @mock.patch("sentry.utils.metrics.timer")
+    @mock.patch("sentry.utils.metrics.incr")
+    def test_empty_query_logs_metric(self, metrics_incr, metrics_timer):
+        results = self.make_query()
+        assert set(results) == {self.group1, self.group2}
+
+        # introduce a slight delay so the async future has time to run and log the metric
+        time.sleep(0.10)
+
+        metrics_incr_called = False
+        for call in metrics_incr.call_args_list:
+            args, kwargs = call
+            if "snuba.search.group_attributes_joined.events_compared" in set(args):
+                metrics_incr_called = True
+        assert metrics_incr_called
+
+        metrics_timer_called = False
+        for call in metrics_timer.call_args_list:
+            args, kwargs = call
+            if "snuba.search.group_attributes_joined.duration" in set(args):
+                metrics_timer_called = True
+        assert metrics_timer_called
+
+
+class EventsPriorityTest(TestCase, SharedSnubaMixin, OccurrenceTestMixin):
     @property
     def backend(self):
         return EventsDatasetSnubaSearchBackend()
@@ -2973,7 +3033,7 @@ class EventsPriorityTest(SharedSnubaTest, OccurrenceTestMixin):
         assert profile_group_score > 0
 
 
-class EventsTransactionsSnubaSearchTest(SharedSnubaTest):
+class EventsTransactionsSnubaSearchTest(TestCase, SharedSnubaMixin):
     @property
     def backend(self):
         return EventsDatasetSnubaSearchBackend()
@@ -3344,7 +3404,7 @@ class EventsTransactionsSnubaSearchTest(SharedSnubaTest):
         assert set(error_and_perf_issues) > set(error_issues_only)
 
 
-class EventsGenericSnubaSearchTest(SharedSnubaTest, OccurrenceTestMixin):
+class EventsGenericSnubaSearchTest(TestCase, SharedSnubaMixin, OccurrenceTestMixin):
     @property
     def backend(self):
         return EventsDatasetSnubaSearchBackend()
@@ -3630,7 +3690,7 @@ class EventsGenericSnubaSearchTest(SharedSnubaTest, OccurrenceTestMixin):
             )
 
 
-class CdcEventsSnubaSearchTest(SharedSnubaTest):
+class CdcEventsSnubaSearchTest(TestCase, SharedSnubaMixin):
     @property
     def backend(self):
         return CdcEventsDatasetSnubaSearchBackend()