Browse Source

feat(perf_issues): Fix `GroupTagKeyDetailsEndpoint` to work for performance issues (#38860)

This allows this endpoint to return results for performance issues.
Dan Fuller 2 years ago
parent
commit
72e3510821

+ 2 - 4
src/sentry/api/endpoints/group_tagkey_details.py

@@ -33,9 +33,7 @@ class GroupTagKeyDetailsEndpoint(GroupEndpoint, EnvironmentMixin):
             raise ResourceDoesNotExist
             raise ResourceDoesNotExist
 
 
         try:
         try:
-            group_tag_key = tagstore.get_group_tag_key(
-                group.project_id, group.id, environment_id, lookup_key
-            )
+            group_tag_key = tagstore.get_group_tag_key(group, environment_id, lookup_key)
         except tagstore.GroupTagKeyNotFound:
         except tagstore.GroupTagKeyNotFound:
             raise ResourceDoesNotExist
             raise ResourceDoesNotExist
 
 
@@ -46,7 +44,7 @@ class GroupTagKeyDetailsEndpoint(GroupEndpoint, EnvironmentMixin):
 
 
         if group_tag_key.top_values is None:
         if group_tag_key.top_values is None:
             group_tag_key.top_values = tagstore.get_top_group_tag_values(
             group_tag_key.top_values = tagstore.get_top_group_tag_values(
-                group.project_id, group.id, environment_id, lookup_key
+                group, environment_id, lookup_key
             )
             )
 
 
         return Response(serialize(group_tag_key, request.user))
         return Response(serialize(group_tag_key, request.user))

+ 2 - 1
src/sentry/api/endpoints/project_event_details.py

@@ -8,6 +8,7 @@ from sentry import eventstore, features
 from sentry.api.base import region_silo_endpoint
 from sentry.api.base import region_silo_endpoint
 from sentry.api.bases.project import ProjectEndpoint
 from sentry.api.bases.project import ProjectEndpoint
 from sentry.api.serializers import DetailedEventSerializer, serialize
 from sentry.api.serializers import DetailedEventSerializer, serialize
+from sentry.issues.query import apply_performance_conditions
 
 
 
 
 @region_silo_endpoint
 @region_silo_endpoint
@@ -49,7 +50,7 @@ class ProjectEventDetailsEndpoint(ProjectEndpoint):
                 features.has("organizations:performance-issues", project.organization)
                 features.has("organizations:performance-issues", project.organization)
                 and event.get_event_type() == "transaction"
                 and event.get_event_type() == "transaction"
             ):
             ):
-                conditions = [[["has", ["group_ids", group_id]], "=", 1]]
+                conditions = apply_performance_conditions([], event.group)
                 _filter = eventstore.Filter(
                 _filter = eventstore.Filter(
                     conditions=conditions,
                     conditions=conditions,
                     project_ids=[event.project_id],
                     project_ids=[event.project_id],

+ 2 - 1
src/sentry/api/helpers/events.py

@@ -7,6 +7,7 @@ from rest_framework.response import Response
 
 
 from sentry import eventstore
 from sentry import eventstore
 from sentry.api.serializers import serialize
 from sentry.api.serializers import serialize
+from sentry.issues.query import apply_performance_conditions
 from sentry.search.events.filter import get_filter
 from sentry.search.events.filter import get_filter
 from sentry.snuba.dataset import Dataset
 from sentry.snuba.dataset import Dataset
 from sentry.types.issues import GroupCategory
 from sentry.types.issues import GroupCategory
@@ -49,6 +50,6 @@ def get_filter_for_group(
         snuba_filter.group_ids = [group.id]
         snuba_filter.group_ids = [group.id]
     elif group.issue_category == GroupCategory.PERFORMANCE:
     elif group.issue_category == GroupCategory.PERFORMANCE:
         dataset = Dataset.Transactions
         dataset = Dataset.Transactions
-        snuba_filter.conditions.append([["has", ["group_ids", group.id]], "=", 1])
+        apply_performance_conditions(snuba_filter.conditions, group)
 
 
     return snuba_filter, dataset
     return snuba_filter, dataset

+ 0 - 0
src/sentry/issues/__init__.py


+ 3 - 0
src/sentry/issues/query.py

@@ -0,0 +1,3 @@
+def apply_performance_conditions(conditions, group):
+    conditions.append([["has", ["group_ids", group.id]], "=", 1])
+    return conditions

+ 2 - 1
src/sentry/models/group.py

@@ -31,6 +31,7 @@ from sentry.db.models import (
     sane_repr,
     sane_repr,
 )
 )
 from sentry.eventstore.models import GroupEvent
 from sentry.eventstore.models import GroupEvent
+from sentry.issues.query import apply_performance_conditions
 from sentry.models.grouphistory import record_group_history_from_activity_type
 from sentry.models.grouphistory import record_group_history_from_activity_type
 from sentry.snuba.dataset import Dataset
 from sentry.snuba.dataset import Dataset
 from sentry.types.activity import ActivityType
 from sentry.types.activity import ActivityType
@@ -192,7 +193,7 @@ def get_oldest_or_latest_event_for_environments(
         features.has("organizations:performance-issues", group.organization)
         features.has("organizations:performance-issues", group.organization)
         and group.issue_category == GroupCategory.PERFORMANCE
         and group.issue_category == GroupCategory.PERFORMANCE
     ):
     ):
-        conditions.append([["has", ["group_ids", group.id]], "=", 1])
+        apply_performance_conditions(conditions, group)
         _filter = eventstore.Filter(
         _filter = eventstore.Filter(
             conditions=conditions,
             conditions=conditions,
             project_ids=[group.project_id],
             project_ids=[group.project_id],

+ 11 - 15
src/sentry/tagstore/base.py

@@ -156,15 +156,15 @@ class TagStorage(Service):
         raise NotImplementedError
         raise NotImplementedError
 
 
     @raises([GroupTagKeyNotFound])
     @raises([GroupTagKeyNotFound])
-    def get_group_tag_key(self, project_id, group_id, environment_id, key):
+    def get_group_tag_key(self, group, environment_id, key):
         """
         """
-        >>> get_group_tag_key(1, 2, 3, "key1")
+        >>> get_group_tag_key(group, 3, "key1")
         """
         """
         raise NotImplementedError
         raise NotImplementedError
 
 
     def get_group_tag_keys(self, group, environment_ids, limit=None, keys=None):
     def get_group_tag_keys(self, group, environment_ids, limit=None, keys=None):
         """
         """
-        >>> get_group_tag_key(1, 2, [3])
+        >>> get_group_tag_key(group, 2, [3])
         """
         """
         raise NotImplementedError
         raise NotImplementedError
 
 
@@ -175,9 +175,9 @@ class TagStorage(Service):
         """
         """
         raise NotImplementedError
         raise NotImplementedError
 
 
-    def get_group_tag_values(self, project_id, group_id, environment_id, key):
+    def get_group_tag_values(self, group, environment_id, key):
         """
         """
-        >>> get_group_tag_values(1, 2, 3, "key1")
+        >>> get_group_tag_values(group, 3, "key1")
         """
         """
         raise NotImplementedError
         raise NotImplementedError
 
 
@@ -262,17 +262,15 @@ class TagStorage(Service):
     ):
     ):
         raise NotImplementedError
         raise NotImplementedError
 
 
-    def get_group_tag_value_count(self, project_id, group_id, environment_id, key):
+    def get_group_tag_value_count(self, group, environment_id, key):
         """
         """
-        >>> get_group_tag_value_count(1, 2, 3, 'key1')
+        >>> get_group_tag_value_count(group, 3, 'key1')
         """
         """
         raise NotImplementedError
         raise NotImplementedError
 
 
-    def get_top_group_tag_values(
-        self, project_id, group_id, environment_id, key, limit=TOP_VALUES_DEFAULT_LIMIT
-    ):
+    def get_top_group_tag_values(self, group, environment_id, key, limit=TOP_VALUES_DEFAULT_LIMIT):
         """
         """
-        >>> get_top_group_tag_values(1, 2, 3, 'key1')
+        >>> get_top_group_tag_values(group, 3, 'key1')
         """
         """
         raise NotImplementedError
         raise NotImplementedError
 
 
@@ -325,12 +323,10 @@ class TagStorage(Service):
         environment_id = environment_ids[0] if environment_ids else None
         environment_id = environment_ids[0] if environment_ids else None
         for tk in tag_keys:
         for tk in tag_keys:
             tk.top_values = self.get_top_group_tag_values(
             tk.top_values = self.get_top_group_tag_values(
-                group.project_id, group.id, environment_id, tk.key, limit=value_limit
+                group, environment_id, tk.key, limit=value_limit
             )
             )
             if tk.count is None:
             if tk.count is None:
-                tk.count = self.get_group_tag_value_count(
-                    group.project_id, group.id, environment_id, tk.key
-                )
+                tk.count = self.get_group_tag_value_count(group, environment_id, tk.key)
 
 
         return tag_keys
         return tag_keys
 
 

+ 38 - 32
src/sentry/tagstore/snuba/backend.py

@@ -10,6 +10,7 @@ from pytz import UTC
 from sentry_relay.consts import SPAN_STATUS_CODE_TO_NAME
 from sentry_relay.consts import SPAN_STATUS_CODE_TO_NAME
 
 
 from sentry.api.utils import default_start_end_dates
 from sentry.api.utils import default_start_end_dates
+from sentry.issues.query import apply_performance_conditions
 from sentry.models import (
 from sentry.models import (
     Group,
     Group,
     Project,
     Project,
@@ -106,19 +107,19 @@ class SnubaTagStorage(TagStorage):
                 return GroupTagKey(group_id=group_id, **data)
                 return GroupTagKey(group_id=group_id, **data)
 
 
     def __get_tag_key_and_top_values(
     def __get_tag_key_and_top_values(
-        self, project_id, group_id, environment_id, key, limit=3, raise_on_empty=True, **kwargs
+        self, project_id, group, environment_id, key, limit=3, raise_on_empty=True, **kwargs
     ):
     ):
         tag = f"tags[{key}]"
         tag = f"tags[{key}]"
         filters = {"project_id": get_project_list(project_id)}
         filters = {"project_id": get_project_list(project_id)}
         if environment_id:
         if environment_id:
             filters["environment"] = [environment_id]
             filters["environment"] = [environment_id]
-        if group_id is not None:
-            filters["group_id"] = [group_id]
         conditions = kwargs.get("conditions", [])
         conditions = kwargs.get("conditions", [])
         aggregations = kwargs.get("aggregations", [])
         aggregations = kwargs.get("aggregations", [])
 
 
+        dataset, conditions, filters = self.apply_group_filters_conditions(
+            group, conditions, filters
+        )
         conditions.append([tag, "!=", ""])
         conditions.append([tag, "!=", ""])
-        conditions.append(DEFAULT_TYPE_CONDITION)
         aggregations += [
         aggregations += [
             ["uniq", tag, "values_seen"],
             ["uniq", tag, "values_seen"],
             ["count()", "", "count"],
             ["count()", "", "count"],
@@ -127,7 +128,7 @@ class SnubaTagStorage(TagStorage):
         ]
         ]
 
 
         result, totals = snuba.query(
         result, totals = snuba.query(
-            dataset=Dataset.Events,
+            dataset=dataset,
             start=kwargs.get("start"),
             start=kwargs.get("start"),
             end=kwargs.get("end"),
             end=kwargs.get("end"),
             groupby=[tag],
             groupby=[tag],
@@ -141,14 +142,14 @@ class SnubaTagStorage(TagStorage):
         )
         )
 
 
         if raise_on_empty and (not result or totals.get("count", 0) == 0):
         if raise_on_empty and (not result or totals.get("count", 0) == 0):
-            raise TagKeyNotFound if group_id is None else GroupTagKeyNotFound
+            raise TagKeyNotFound if group is None else GroupTagKeyNotFound
         else:
         else:
-            if group_id is None:
+            if group is None:
                 key_ctor = TagKey
                 key_ctor = TagKey
                 value_ctor = TagValue
                 value_ctor = TagValue
             else:
             else:
-                key_ctor = functools.partial(GroupTagKey, group_id=group_id)
-                value_ctor = functools.partial(GroupTagValue, group_id=group_id)
+                key_ctor = functools.partial(GroupTagKey, group_id=group.id)
+                value_ctor = functools.partial(GroupTagValue, group_id=group.id)
 
 
             top_values = [
             top_values = [
                 value_ctor(
                 value_ctor(
@@ -240,12 +241,9 @@ class SnubaTagStorage(TagStorage):
         if group is not None:
         if group is not None:
             # We override dataset changes from `include_transactions` here. They aren't relevant
             # We override dataset changes from `include_transactions` here. They aren't relevant
             # when filtering by a group.
             # when filtering by a group.
-            dataset = Dataset.Events
-            if group.issue_category == GroupCategory.PERFORMANCE:
-                dataset = Dataset.Transactions
-                conditions.append([["has", ["group_ids", group.id]], "=", 1])
-            else:
-                filters["group_id"] = [group.id]
+            dataset, conditions, filters = self.apply_group_filters_conditions(
+                group, conditions, filters
+            )
 
 
         if keys is not None:
         if keys is not None:
             filters["tags_key"] = sorted(keys)
             filters["tags_key"] = sorted(keys)
@@ -405,9 +403,9 @@ class SnubaTagStorage(TagStorage):
         )
         )
         return set(key.top_values)
         return set(key.top_values)
 
 
-    def get_group_tag_key(self, project_id, group_id, environment_id, key):
+    def get_group_tag_key(self, group, environment_id, key):
         return self.__get_tag_key_and_top_values(
         return self.__get_tag_key_and_top_values(
-            project_id, group_id, environment_id, key, limit=TOP_VALUES_DEFAULT_LIMIT
+            group.project_id, group, environment_id, key, limit=TOP_VALUES_DEFAULT_LIMIT
         )
         )
 
 
     def get_group_tag_keys(self, group, environment_ids, limit=None, keys=None, **kwargs):
     def get_group_tag_keys(self, group, environment_ids, limit=None, keys=None, **kwargs):
@@ -426,11 +424,11 @@ class SnubaTagStorage(TagStorage):
     def get_group_tag_value(self, project_id, group_id, environment_id, key, value):
     def get_group_tag_value(self, project_id, group_id, environment_id, key, value):
         return self.__get_tag_value(project_id, group_id, environment_id, key, value)
         return self.__get_tag_value(project_id, group_id, environment_id, key, value)
 
 
-    def get_group_tag_values(self, project_id, group_id, environment_id, key):
+    def get_group_tag_values(self, group, environment_id, key):
         # NB this uses a 'top' values function, but the limit is None so it should
         # NB this uses a 'top' values function, but the limit is None so it should
         # return all values for this key.
         # return all values for this key.
         key = self.__get_tag_key_and_top_values(
         key = self.__get_tag_key_and_top_values(
-            project_id, group_id, environment_id, key, limit=None, raise_on_empty=False
+            group.project_id, group, environment_id, key, limit=None, raise_on_empty=False
         )
         )
         return set(key.top_values)
         return set(key.top_values)
 
 
@@ -515,26 +513,37 @@ class SnubaTagStorage(TagStorage):
 
 
         return {issue: fix_tag_value_data(data) for issue, data in result.items()}
         return {issue: fix_tag_value_data(data) for issue, data in result.items()}
 
 
-    def get_group_tag_value_count(self, project_id, group_id, environment_id, key):
+    def apply_group_filters_conditions(self, group: Group, conditions, filters):
+        dataset = Dataset.Events
+        if group:
+            if group.issue_category == GroupCategory.PERFORMANCE:
+                dataset = Dataset.Transactions
+                apply_performance_conditions(conditions, group)
+            else:
+                filters["group_id"] = [group.id]
+        return dataset, conditions, filters
+
+    def get_group_tag_value_count(self, group, environment_id, key):
         tag = f"tags[{key}]"
         tag = f"tags[{key}]"
-        filters = {"project_id": get_project_list(project_id), "group_id": [group_id]}
+        filters = {"project_id": get_project_list(group.project_id)}
         if environment_id:
         if environment_id:
             filters["environment"] = [environment_id]
             filters["environment"] = [environment_id]
         conditions = [[tag, "!=", ""]]
         conditions = [[tag, "!=", ""]]
         aggregations = [["count()", "", "count"]]
         aggregations = [["count()", "", "count"]]
+        dataset, conditions, filters = self.apply_group_filters_conditions(
+            group, conditions, filters
+        )
 
 
         return snuba.query(
         return snuba.query(
-            dataset=Dataset.Events,
+            dataset=dataset,
             conditions=conditions,
             conditions=conditions,
             filter_keys=filters,
             filter_keys=filters,
             aggregations=aggregations,
             aggregations=aggregations,
             referrer="tagstore.get_group_tag_value_count",
             referrer="tagstore.get_group_tag_value_count",
         )
         )
 
 
-    def get_top_group_tag_values(
-        self, project_id, group_id, environment_id, key, limit=TOP_VALUES_DEFAULT_LIMIT
-    ):
-        tag = self.__get_tag_key_and_top_values(project_id, group_id, environment_id, key, limit)
+    def get_top_group_tag_values(self, group, environment_id, key, limit=TOP_VALUES_DEFAULT_LIMIT):
+        tag = self.__get_tag_key_and_top_values(group.project_id, group, environment_id, key, limit)
         return tag.top_values
         return tag.top_values
 
 
     def get_group_tag_keys_and_top_values(
     def get_group_tag_keys_and_top_values(
@@ -561,12 +570,9 @@ class SnubaTagStorage(TagStorage):
             filters["environment"] = environment_ids
             filters["environment"] = environment_ids
         if keys is not None:
         if keys is not None:
             filters["tags_key"] = keys
             filters["tags_key"] = keys
-        dataset = Dataset.Events
-        if group.issue_category == GroupCategory.PERFORMANCE:
-            dataset = Dataset.Transactions
-            conditions.append([["has", ["group_ids", group.id]], "=", 1])
-        else:
-            filters["group_id"] = [group.id]
+        dataset, conditions, filters = self.apply_group_filters_conditions(
+            group, conditions, filters
+        )
         aggregations = kwargs.get("aggregations", [])
         aggregations = kwargs.get("aggregations", [])
         aggregations += [
         aggregations += [
             ["count()", "", "count"],
             ["count()", "", "count"],

+ 62 - 0
tests/sentry/api/endpoints/test_group_tagkey_details.py

@@ -1,7 +1,11 @@
+from unittest import mock
+
+from sentry.event_manager import _pull_out_data
 from sentry.models import Group
 from sentry.models import Group
 from sentry.testutils import APITestCase, SnubaTestCase
 from sentry.testutils import APITestCase, SnubaTestCase
 from sentry.testutils.helpers.datetime import before_now, iso_format
 from sentry.testutils.helpers.datetime import before_now, iso_format
 from sentry.testutils.silo import region_silo_test
 from sentry.testutils.silo import region_silo_test
+from sentry.types.issues import GroupType
 
 
 
 
 @region_silo_test
 @region_silo_test
@@ -26,3 +30,61 @@ class GroupTagDetailsTest(APITestCase, SnubaTestCase):
         assert response.status_code == 200, response.content
         assert response.status_code == 200, response.content
         assert response.data["key"] == "foo"
         assert response.data["key"] == "foo"
         assert response.data["totalValues"] == 3
         assert response.data["totalValues"] == 3
+
+    def test_simple_perf(self):
+        transaction_event_data = {
+            "message": "hello",
+            "type": "transaction",
+            "culprit": "app/components/events/eventEntries in map",
+            "contexts": {"trace": {"trace_id": "b" * 32, "span_id": "c" * 16, "op": ""}},
+        }
+
+        def hack_pull_out_data(jobs, projects):
+            _pull_out_data(jobs, projects)
+            for job in jobs:
+                job["event"].groups = [perf_group]
+            return jobs, projects
+
+        perf_group = self.create_group(type=GroupType.PERFORMANCE_SLOW_SPAN.value)
+
+        with mock.patch("sentry.event_manager._pull_out_data", hack_pull_out_data):
+            self.store_event(
+                data={
+                    **transaction_event_data,
+                    "event_id": "a" * 32,
+                    "timestamp": iso_format(before_now(minutes=1)),
+                    "start_timestamp": iso_format(before_now(minutes=1)),
+                    "tags": {"foo": "bar", "biz": "baz"},
+                    "release": "releaseme",
+                },
+                project_id=self.project.id,
+            )
+            self.store_event(
+                data={
+                    **transaction_event_data,
+                    "event_id": "b" * 32,
+                    "timestamp": iso_format(before_now(minutes=2)),
+                    "start_timestamp": iso_format(before_now(minutes=2)),
+                    "tags": {"foo": "quux"},
+                    "release": "releaseme",
+                },
+                project_id=self.project.id,
+            )
+
+        for i in range(3):
+            self.store_event(
+                data={
+                    "tags": {"foo": "bar"},
+                    "fingerprint": ["group1"],
+                    "timestamp": iso_format(before_now(seconds=1)),
+                },
+                project_id=self.project.id,
+            )
+
+        self.login_as(user=self.user)
+
+        url = f"/api/0/issues/{perf_group.id}/tags/foo/"
+        response = self.client.get(url, format="json")
+        assert response.status_code == 200, response.content
+        assert response.data["key"] == "foo"
+        assert response.data["totalValues"] == 2

+ 2 - 2
tests/sentry/eventstore/snuba/test_backend.py

@@ -4,6 +4,7 @@ from sentry.event_manager import _pull_out_data
 from sentry.eventstore.base import Filter
 from sentry.eventstore.base import Filter
 from sentry.eventstore.models import Event
 from sentry.eventstore.models import Event
 from sentry.eventstore.snuba.backend import SnubaEventStorage
 from sentry.eventstore.snuba.backend import SnubaEventStorage
+from sentry.issues.query import apply_performance_conditions
 from sentry.testutils import SnubaTestCase, TestCase
 from sentry.testutils import SnubaTestCase, TestCase
 from sentry.testutils.helpers.datetime import before_now, iso_format
 from sentry.testutils.helpers.datetime import before_now, iso_format
 from sentry.testutils.silo import region_silo_test
 from sentry.testutils.silo import region_silo_test
@@ -282,10 +283,9 @@ class SnubaEventStorageTest(TestCase, SnubaTestCase):
         assert next_event is None
         assert next_event is None
 
 
     def test_transaction_get_next_prev_event_id(self):
     def test_transaction_get_next_prev_event_id(self):
-
         _filter = Filter(
         _filter = Filter(
             project_ids=[self.project2.id],
             project_ids=[self.project2.id],
-            conditions=[[["has", ["group_ids", self.group.id]], "=", 1]],
+            conditions=apply_performance_conditions([], self.group),
         )
         )
         with self.feature("organizations:performance-issues"):
         with self.feature("organizations:performance-issues"):
             event = self.eventstore.get_event_by_id(self.project2.id, "f" * 32)
             event = self.eventstore.get_event_by_id(self.project2.id, "f" * 32)

Some files were not shown because too many files changed in this diff