Browse Source

ref(grouptype): Update usages of GroupType (#44004)

Replace usages of the `GroupType` enum, `GROUP_TYPE_TO_CATEGORY`,
`GROUP_TYPE_TO_TEXT`, `PERFORMANCE_TYPES`, `PROFILE_TYPES`, and
`GROUP_CATEGORY_TO_TYPES` mappings with the newer dataclass version(s).

See https://github.com/getsentry/sentry/issues/43484 for the full
context behind this change, but the tl;dr is this makes it simpler to
add a new group type rather than needing to update several variables.

---------

Co-authored-by: getsantry[bot] <66042841+getsantry[bot]@users.noreply.github.com>
Colleen O'Rourke 2 years ago
parent
commit
78177a9fb3

+ 1 - 0
mypy.ini

@@ -66,6 +66,7 @@ files = fixtures/mypy-stubs,
         src/sentry/eventstream/kafka/state.py,
         src/sentry/eventstream/kafka/synchronized.py,
         src/sentry/features/,
+        src/sentry/grouptype/grouptype.py,
         src/sentry/grouping/result.py,
         src/sentry/grouping/strategies/base.py,
         src/sentry/grouping/strategies/legacy.py,

+ 1 - 1
src/sentry/api/endpoints/event_grouping_info.py

@@ -53,7 +53,7 @@ class EventGroupingInfoEndpoint(ProjectEndpoint):
                 # TODO: Generate more unique keys, in case this event has more than
                 # one problem of a given type
                 variants = {
-                    problem.problem.type.name: PerformanceProblemVariant(problem)
+                    problem.problem.type.slug: PerformanceProblemVariant(problem)
                     for problem in problems
                     if problem
                 }

+ 2 - 2
src/sentry/api/endpoints/issue_occurrence.py

@@ -8,9 +8,9 @@ from rest_framework.response import Response
 
 from sentry.api.base import Endpoint, region_silo_endpoint
 from sentry.api.permissions import SuperuserPermission
+from sentry.grouptype.grouptype import ProfileBlockedThreadGroupType
 from sentry.models.project import Project
 from sentry.services.hybrid_cloud.user import user_service
-from sentry.types.issues import GroupType
 from sentry.utils import json
 from sentry.utils.dates import ensure_aware
 from sentry.utils.kafka_config import get_kafka_producer_cluster_options
@@ -111,7 +111,7 @@ class IssueOccurrenceEndpoint(Endpoint):
                         "important": False,
                     },
                 ],
-                "type": GroupType.PROFILE_BLOCKED_THREAD.value,
+                "type": ProfileBlockedThreadGroupType.type_id,
                 "detection_time": ensure_aware(datetime.now()),
                 "event": event,
             }

+ 6 - 5
src/sentry/api/issue_search.py

@@ -11,6 +11,7 @@ from sentry.api.event_search import (
 )
 from sentry.api.event_search import parse_search_query as base_parse_query
 from sentry.exceptions import InvalidSearchQuery
+from sentry.grouptype.grouptype import get_group_type_by_slug, get_group_types_by_category
 from sentry.models import Environment, Organization, Project, Team, User
 from sentry.models.group import STATUS_QUERY_CHOICES, GroupStatus
 from sentry.search.events.constants import EQUALITY_OPERATORS
@@ -21,7 +22,7 @@ from sentry.search.utils import (
     parse_status_value,
     parse_user_value,
 )
-from sentry.types.issues import GROUP_CATEGORY_TO_TYPES, GroupCategory, GroupType
+from sentry.types.issues import GroupCategory
 
 is_filter_translation = {
     "assigned": ("unassigned", False),
@@ -143,12 +144,12 @@ def convert_category_value(
     environments: Optional[Sequence[Environment]],
 ) -> List[int]:
     """Convert a value like 'error' or 'performance' to the GroupType value for issue lookup"""
-    results = []
+    results: List[int] = []
     for category in value:
         group_category = getattr(GroupCategory, category.upper(), None)
         if not group_category:
             raise InvalidSearchQuery(f"Invalid category value of '{category}'")
-        results.extend([type.value for type in GROUP_CATEGORY_TO_TYPES.get(group_category, [])])
+        results.extend(get_group_types_by_category(group_category.value))
     return results
 
 
@@ -161,10 +162,10 @@ def convert_type_value(
     """Convert a value like 'error' or 'performance_n_plus_one_db_queries' to the GroupType value for issue lookup"""
     results = []
     for type in value:
-        group_type = getattr(GroupType, type.upper(), None)
+        group_type = get_group_type_by_slug(type)
         if not group_type:
             raise InvalidSearchQuery(f"Invalid type value of '{type}'")
-        results.append(group_type.value)
+        results.append(group_type.type_id)
     return results
 
 

+ 4 - 6
src/sentry/api/serializers/models/event.py

@@ -9,10 +9,11 @@ from sentry_relay import meta_with_chunks
 
 from sentry.api.serializers import Serializer, register, serialize
 from sentry.eventstore.models import Event, GroupEvent
+from sentry.grouptype.grouptype import get_group_type_by_type_id, get_group_types_by_category
 from sentry.models import EventAttachment, EventError, GroupHash, Release, User, UserReport
 from sentry.sdk_updates import SdkSetupState, get_suggested_updates
 from sentry.search.utils import convert_user_tag_to_query
-from sentry.types.issues import GROUP_CATEGORY_TO_TYPES, GroupCategory, GroupType
+from sentry.types.issues import GroupCategory
 from sentry.utils.json import prune_empty_keys
 from sentry.utils.performance_issues.performance_detection import EventPerformanceProblem
 from sentry.utils.safe import get_path
@@ -113,7 +114,7 @@ def get_problems(item_list: Sequence[Event | GroupEvent]):
         group_hash.group_id: group_hash
         for group_hash in GroupHash.objects.filter(
             group__id__in={e.group_id for e in item_list if getattr(e, "group_id", None)},
-            group__type__in=[gt.value for gt in GROUP_CATEGORY_TO_TYPES[GroupCategory.PERFORMANCE]],
+            group__type__in=get_group_types_by_category(GroupCategory.PERFORMANCE.value),
         )
     }
     return EventPerformanceProblem.fetch_multi(
@@ -358,10 +359,7 @@ class DetailedEventSerializer(EventSerializer):
             return None
         converted_problem = convert_dict_key_case(perf_problem, snake_to_camel_case)
         issue_type = perf_problem.get("type")
-        if issue_type in [type.value for type in GroupType]:
-            converted_problem["issueType"] = GroupType(issue_type).name.lower()
-        else:
-            converted_problem["issueType"] = "Issue"
+        converted_problem["issueType"] = get_group_type_by_type_id(issue_type).slug
         return converted_problem
 
     def serialize(self, obj, attrs, user):

+ 1 - 1
src/sentry/api/serializers/models/group.py

@@ -353,7 +353,7 @@ class GroupSerializerBase(Serializer, ABC):
             "subscriptionDetails": subscription_details,
             "hasSeen": attrs["has_seen"],
             "annotations": attrs["annotations"],
-            "issueType": obj.issue_type.name.lower(),
+            "issueType": obj.issue_type.slug,
             "issueCategory": obj.issue_category.name.lower(),
         }
 

+ 8 - 15
src/sentry/event_manager.py

@@ -76,6 +76,7 @@ from sentry.grouping.api import (
     load_grouping_config,
 )
 from sentry.grouping.result import CalculatedHashes
+from sentry.grouptype.grouptype import GroupType
 from sentry.ingest.inbound_filters import FilterStatKeys
 from sentry.killswitches import killswitch_matches_context
 from sentry.lang.native.utils import STORE_CRASH_REPORTS_ALL, convert_crashreport_count
@@ -126,7 +127,7 @@ from sentry.tasks.integrations import kick_off_status_syncs
 from sentry.tasks.process_buffer import buffer_incr
 from sentry.tasks.relay import schedule_invalidate_project_config
 from sentry.types.activity import ActivityType
-from sentry.types.issues import GROUP_TYPE_TO_TEXT, GroupCategory, GroupType
+from sentry.types.issues import GroupCategory
 from sentry.utils import json, metrics, redis
 from sentry.utils.cache import cache_key_for_event
 from sentry.utils.canonical import CanonicalKeyDict
@@ -156,14 +157,6 @@ issue_rate_limiter = RedisSlidingWindowRateLimiter(
 )
 PERFORMANCE_ISSUE_QUOTA = Quota(3600, 60, 5)
 
-DEFAULT_GROUPHASH_IGNORE_LIMIT = 3
-GROUPHASH_IGNORE_LIMIT_MAP = {
-    GroupType.PERFORMANCE_N_PLUS_ONE_DB_QUERIES: 3,
-    GroupType.PERFORMANCE_SLOW_DB_QUERY: 100,
-    GroupType.PERFORMANCE_CONSECUTIVE_DB_QUERIES: 15,
-    GroupType.PERFORMANCE_UNCOMPRESSED_ASSETS: 100,
-}
-
 
 @dataclass
 class GroupInfo:
@@ -2332,12 +2325,12 @@ def _save_aggregate_performance(jobs: Sequence[PerformanceJob], projects: Projec
                         problem = performance_problems_by_hash[new_grouphash]
 
                         span.set_tag("create_group_transaction.outcome", "no_group")
-                        span.set_tag("group_type", problem.type.name)
+                        span.set_tag("group_type", problem.type.slug)
                         metric_tags["create_group_transaction.outcome"] = "no_group"
-                        metric_tags["group_type"] = problem.type.name
+                        metric_tags["group_type"] = problem.type.slug.upper()
 
                         group_kwargs = kwargs.copy()
-                        group_kwargs["type"] = problem.type.value
+                        group_kwargs["type"] = problem.type.type_id
 
                         group_kwargs["data"]["metadata"] = inject_performance_problem_metadata(
                             group_kwargs["data"]["metadata"], problem
@@ -2418,16 +2411,16 @@ def should_create_group(client: Any, grouphash: str, type: GroupType, project: P
         "performance.performance_issue.grouphash_counted",
         tags={
             "times_seen": times_seen,
-            "group_type": GROUP_TYPE_TO_TEXT.get(type, "Unknown Type"),
+            "group_type": type.description,
         },
         sample_rate=1.0,
     )
 
-    if times_seen >= GROUPHASH_IGNORE_LIMIT_MAP.get(type, DEFAULT_GROUPHASH_IGNORE_LIMIT):
+    if times_seen >= type.ignore_limit:
         client.delete(grouphash)
         metrics.incr(
             "performance.performance_issue.issue_will_be_created",
-            tags={"group_type": type.name},
+            tags={"group_type": type.slug},
             sample_rate=1.0,
         )
 

+ 2 - 2
src/sentry/eventstore/models.py

@@ -21,7 +21,7 @@ from sentry.issues.issue_occurrence import IssueOccurrence
 from sentry.models import EventDict
 from sentry.snuba.events import Column, Columns
 from sentry.spans.grouping.api import load_span_grouping_config
-from sentry.types.issues import GROUP_TYPE_TO_TEXT, GroupCategory
+from sentry.types.issues import GroupCategory
 from sentry.utils import json
 from sentry.utils.cache import memoize
 from sentry.utils.canonical import CanonicalKeyView
@@ -785,7 +785,7 @@ class EventSubjectTemplateData:
                 else self.event.title
             )
         elif name == "issueType":
-            return cast(str, GROUP_TYPE_TO_TEXT.get(self.event.group.issue_type, "Issue"))
+            return self.event.group.issue_type.description
         raise KeyError
 
 

+ 0 - 0
src/sentry/grouptype/__init__.py


+ 28 - 11
src/sentry/grouptype/grouptype.py

@@ -1,8 +1,14 @@
+from __future__ import annotations
+
+from collections import defaultdict
 from dataclasses import dataclass
+from typing import Any, Dict, Set, Type
 
 from sentry.types.issues import GroupCategory
 
-_group_type_registry = {}
+_group_type_registry: Dict[int, Type[GroupType]] = {}
+_slug_lookup: Dict[str, Type[GroupType]] = {}
+_category_lookup: Dict[int, Set[int]] = defaultdict(set)
 
 
 @dataclass(frozen=True)
@@ -11,31 +17,42 @@ class GroupType:
     slug: str
     description: str
     category: int
-    ignore_limit: int = 3  # CEO temp fix - this is the value of DEFAULT_GROUPHASH_IGNORE_LIMIT
+    ignore_limit: int = 3
 
-    def __init_subclass__(cls, **kwargs):
+    def __init_subclass__(cls: Type[GroupType], **kwargs: Any) -> None:
         super().__init_subclass__(**kwargs)
         if _group_type_registry.get(cls.type_id):
             raise ValueError(
                 f"A group type with the type_id {cls.type_id} has already been registered."
             )
         _group_type_registry[cls.type_id] = cls
+        _slug_lookup[cls.slug] = cls
+        _category_lookup[cls.category].add(cls.type_id)
 
-    def __post_init__(self):
+    def __post_init__(self) -> None:
         valid_categories = [category.value for category in GroupCategory]
         if self.category not in valid_categories:
             raise ValueError(f"Category must be one of {valid_categories} from GroupCategory.")
 
 
-def get_group_types_by_category(category):
-    return [child.type_id for child in GroupType.__subclasses__() if child.category == category]
+def get_all_group_type_ids() -> Set[int]:
+    return {type.type_id for type in _group_type_registry.values()}
+
+
+def get_group_types_by_category(category: int) -> Set[int]:
+    return _category_lookup[category]
+
+
+def get_group_type_by_slug(slug: str) -> Type[GroupType]:
+    if slug not in _slug_lookup:
+        raise ValueError(f"No group type with the slug {slug} is registered.")
+    return _slug_lookup[slug]
 
 
-def get_group_type_by_slug(slug):
-    for group_type in _group_type_registry.values():
-        if group_type.slug == slug:
-            return group_type
-    raise ValueError(f"No group type with the slug {slug} is registered.")
+def get_group_type_by_type_id(id: int) -> Type[GroupType]:
+    if id not in _group_type_registry:
+        raise ValueError(f"No group type with the id {id} is registered.")
+    return _group_type_registry[id]
 
 
 @dataclass(frozen=True)

Some files were not shown because too many files changed in this diff