Просмотр исходного кода

chore(issue-platform): Remove `write_occurrence_to_platform` and related options (#49497)

Also removes a few remaining references to
`performance.issues.create_issues_through_platform` in tests
Dan Fuller 1 год назад
Родитель
Сommit
4d9807f43d

+ 16 - 18
src/sentry/event_manager.py

@@ -81,7 +81,6 @@ from sentry.ingest.inbound_filters import FilterStatKeys
 from sentry.issues.grouptype import GroupCategory
 from sentry.issues.issue_occurrence import IssueOccurrence
 from sentry.issues.producer import produce_occurrence_to_kafka
-from sentry.issues.utils import write_occurrence_to_platform
 from sentry.killswitches import killswitch_matches_context
 from sentry.lang.native.utils import STORE_CRASH_REPORTS_ALL, convert_crashreport_count
 from sentry.locks import locks
@@ -2311,24 +2310,23 @@ def _send_occurrence_to_platform(jobs: Sequence[Job], projects: ProjectsMapping)
 
         performance_problems = job["performance_problems"]
         for problem in performance_problems:
-            if write_occurrence_to_platform(problem, project):
-                occurrence = IssueOccurrence(
-                    id=uuid.uuid4().hex,
-                    resource_id=None,
-                    project_id=project.id,
-                    event_id=event_id,
-                    fingerprint=[problem.fingerprint],
-                    type=problem.type,
-                    issue_title=problem.title,
-                    subtitle=problem.desc,
-                    culprit=event.transaction,
-                    evidence_data=problem.evidence_data,
-                    evidence_display=problem.evidence_display,
-                    detection_time=event.datetime,
-                    level=job["level"],
-                )
+            occurrence = IssueOccurrence(
+                id=uuid.uuid4().hex,
+                resource_id=None,
+                project_id=project.id,
+                event_id=event_id,
+                fingerprint=[problem.fingerprint],
+                type=problem.type,
+                issue_title=problem.title,
+                subtitle=problem.desc,
+                culprit=event.transaction,
+                evidence_data=problem.evidence_data,
+                evidence_display=problem.evidence_display,
+                detection_time=event.datetime,
+                level=job["level"],
+            )
 
-                produce_occurrence_to_kafka(occurrence)
+            produce_occurrence_to_kafka(occurrence)
 
 
 @metrics.wraps("event_manager.save_transaction_events")

+ 0 - 18
src/sentry/issues/utils.py

@@ -1,18 +0,0 @@
-from __future__ import annotations
-
-from typing import TYPE_CHECKING
-
-from sentry import options
-from sentry.issues.grouptype import GroupCategory
-from sentry.utils.performance_issues.performance_problem import PerformanceProblem
-
-if TYPE_CHECKING:
-    from sentry.models import Project
-
-
-def write_occurrence_to_platform(performance_problem: PerformanceProblem, project: Project) -> bool:
-    return bool(
-        performance_problem.type.category == GroupCategory.PERFORMANCE.value
-        # system-wide option
-        and options.get("performance.issues.send_to_issues_platform", False)
-    )

+ 0 - 3
src/sentry/options/defaults.py

@@ -4,7 +4,6 @@ from sentry.logging import LoggingFormat
 from sentry.options import (
     FLAG_ALLOW_EMPTY,
     FLAG_IMMUTABLE,
-    FLAG_MODIFIABLE_BOOL,
     FLAG_MODIFIABLE_RATE,
     FLAG_NOSTORE,
     FLAG_PRIORITIZE_DISK,
@@ -677,8 +676,6 @@ register("performance.issues.consecutive_http.consecutive_count_threshold", defa
 register("performance.issues.consecutive_http.span_duration_threshold", default=1000)
 register("performance.issues.large_http_payload.size_threshold", default=1000000)  # 1MB
 
-# System-wide option for sending occurrences to the issues platform
-register("performance.issues.send_to_issues_platform", default=False, flags=FLAG_MODIFIABLE_BOOL)
 
 # Dynamic Sampling system wide options
 # Killswitch to disable new dynamic sampling behavior specifically new dynamic sampling biases

+ 1 - 6
src/sentry/testutils/cases.py

@@ -537,12 +537,7 @@ class PerformanceIssueTestCase(BaseTestCase):
         ), mock.patch.object(
             issue_type, "noise_config", new=NoiseConfig(noise_limit, timedelta(minutes=1))
         ), override_options(
-            {
-                "performance.issues.all.problem-detection": 1.0,
-                detector_option: 1.0,
-                "performance.issues.send_to_issues_platform": True,
-                "performance.issues.create_issues_through_platform": True,
-            }
+            {"performance.issues.all.problem-detection": 1.0, detector_option: 1.0}
         ), self.feature(
             [
                 "projects:performance-suspect-spans-ingestion",

+ 1 - 2
src/sentry/web/frontend/debug/mail.py

@@ -49,7 +49,6 @@ from sentry.notifications.notifications.base import BaseNotification
 from sentry.notifications.notifications.digest import DigestNotification
 from sentry.notifications.types import GroupSubscriptionReason
 from sentry.notifications.utils import get_group_settings_link, get_interface_list, get_rules
-from sentry.testutils.helpers import override_options
 from sentry.testutils.helpers.datetime import before_now
 from sentry.testutils.helpers.notifications import SAMPLE_TO_OCCURRENCE_MAP, TEST_ISSUE_OCCURRENCE
 from sentry.utils import json, loremipsum
@@ -201,7 +200,7 @@ def make_performance_event(project, sample_name: str):
     perf_data["event_id"] = event_id
     perf_data["project_id"] = project.id
 
-    with override_options({"performance.issues.send_to_issues_platform": True}), mock.patch.object(
+    with mock.patch.object(
         PerformanceNPlusOneGroupType, "noise_config", new=NoiseConfig(0, timedelta(minutes=1))
     ):
         occurrence, group_info = process_event_and_issue_occurrence(

+ 0 - 4
tests/acceptance/test_performance_issues.py

@@ -81,8 +81,6 @@ class PerformanceIssuesTest(AcceptanceTestCase, SnubaTestCase, PerformanceIssueT
             PerformanceNPlusOneGroupType,
             "noise_config",
             new=NoiseConfig(0, timedelta(minutes=1)),
-        ), self.options(
-            {"performance.issues.send_to_issues_platform": True}
         ), self.feature(
             "organizations:issue-platform"
         ):
@@ -120,8 +118,6 @@ class PerformanceIssuesTest(AcceptanceTestCase, SnubaTestCase, PerformanceIssueT
             PerformanceNPlusOneAPICallsGroupType,
             "noise_config",
             new=NoiseConfig(0, timedelta(minutes=1)),
-        ), self.options(
-            {"performance.issues.send_to_issues_platform": True}
         ), self.feature(
             "organizations:issue-platform"
         ):

+ 1 - 3
tests/sentry/issues/test_escalating.py

@@ -90,9 +90,7 @@ class HistoricGroupCounts(
         )
         assert len(Group.objects.all()) == 2
 
-        with self.options({"performance.issues.send_to_issues_platform": True}):
-            perf_event = self.create_performance_issue()
-
+        perf_event = self.create_performance_issue()
         error_event = self._create_events_for_group()
 
         # store_search_issue created two groups

+ 2 - 3
tests/sentry/models/test_groupsnooze.py

@@ -148,9 +148,8 @@ class GroupSnoozeTest(
     @freeze_time()
     def test_rate_reached_perf_issue(self):
         """Test when a performance issue is ignored until it happens 10 times in a day"""
-        with self.options({"performance.issues.send_to_issues_platform": True}):
-            for i in range(0, 10):
-                event = self.create_performance_issue()
+        for i in range(0, 10):
+            event = self.create_performance_issue()
         snooze = GroupSnooze.objects.create(group=event.group, count=10, window=24 * 60)
         assert not snooze.is_valid(test_rates=True)
 

+ 0 - 17
tests/snuba/api/endpoints/test_group_events.py

@@ -435,23 +435,6 @@ class GroupEventsTest(APITestCase, SnubaTestCase, SearchIssueTestMixin, Performa
             [str(event_1.event_id), str(event_2.event_id)]
         )
 
-    def test_perf_issue_on_issue_platform(self):
-        # Just a duplicate of `test_perf_issue` to verify that perf issues read from
-        # the issue platform correctly here. Remove once we kill the related flags.
-        with self.options({"performance.issues.send_to_issues_platform": True}):
-            event_1 = self.create_performance_issue()
-            event_2 = self.create_performance_issue()
-
-        self.login_as(user=self.user)
-
-        url = f"/api/0/issues/{event_1.group.id}/events/"
-        response = self.do_request(url)
-
-        assert response.status_code == 200, response.content
-        assert sorted(map(lambda x: x["eventID"], response.data)) == sorted(
-            [str(event_1.event_id), str(event_2.event_id)]
-        )
-
     def test_generic_issue(self):
         event_1, _, group_info = self.store_search_issue(
             self.project.id,

+ 3 - 6
tests/snuba/api/endpoints/test_organization_events.py

@@ -6021,8 +6021,7 @@ class OrganizationEventsIssuePlatformDatasetEndpointTest(
             "query": f"issue.id:{event.group.id}",
             "dataset": "issuePlatform",
         }
-        with self.options({"performance.issues.create_issues_through_platform": True}):
-            response = self.do_request(query)
+        response = self.do_request(query)
         assert response.status_code == 200, response.content
         assert response.data["data"][0]["count()"] == 1
 
@@ -6088,8 +6087,7 @@ class OrganizationEventsIssuePlatformDatasetEndpointTest(
             "query": f"project:{event.group.project.slug} issue:{event.group.qualified_short_id}",
             "dataset": "issuePlatform",
         }
-        with self.options({"performance.issues.create_issues_through_platform": True}):
-            response = self.do_request(query)
+        response = self.do_request(query)
         assert response.status_code == 200, response.content
         assert response.data["data"][0]["count()"] == 1
 
@@ -6103,8 +6101,7 @@ class OrganizationEventsIssuePlatformDatasetEndpointTest(
             "query": f"project:{event1.group.project.slug} issue:[{event1.group.qualified_short_id},{event2.group.qualified_short_id}]",
             "dataset": "issuePlatform",
         }
-        with self.options({"performance.issues.create_issues_through_platform": True}):
-            response = self.do_request(query)
+        response = self.do_request(query)
         assert response.status_code == 200, response.content
         assert response.data["data"][0]["count()"] == 2
 

Некоторые файлы не были показаны из-за большого количества измененных файлов