Browse Source

ref(transactions): clean up post_process_transaction

Joshua Ferge 3 months ago
parent
commit
0be2a7aa35

+ 3 - 10
src/sentry/event_manager.py

@@ -2518,9 +2518,6 @@ def _detect_performance_problems(
 
 @sentry_sdk.tracing.trace
 def _sample_transactions_in_save(jobs: Sequence[Job], projects: ProjectsMapping) -> None:
-    if not options.get("save_event_transactions.sample_transactions_in_save"):
-        return
-
     for job in jobs:
         project = job["event"].project
         record_transaction_name_for_clustering(project, job["event"].data)
@@ -2528,9 +2525,6 @@ def _sample_transactions_in_save(jobs: Sequence[Job], projects: ProjectsMapping)
 
 @sentry_sdk.tracing.trace
 def _send_transaction_processed_signals(jobs: Sequence[Job], projects: ProjectsMapping) -> None:
-    if not options.get("save_event_transactions.sample_transactions_in_save"):
-        return
-
     for job in jobs:
         project = job["event"].project
         with sentry_sdk.start_span(op="tasks.post_process_group.transaction_processed_signal"):
@@ -2656,10 +2650,9 @@ def save_transaction_events(jobs: Sequence[Job], projects: ProjectsMapping) -> S
 
     with metrics.timer("save_transaction_events.eventstream_insert_many"):
         for job in jobs:
-            if options.get("save_event_transactions.post_process_cleanup"):
-                # we don't need to send transactions to post process
-                # so set raw so we skip post_process
-                job["raw"] = True
+            # we don't need to send transactions to post process
+            # so set raw so we skip post_process
+            job["raw"] = True
 
         _eventstream_insert_many(jobs)
 

+ 7 - 33
src/sentry/tasks/post_process.py

@@ -13,8 +13,7 @@ from django.db.models.signals import post_save
 from django.utils import timezone
 from google.api_core.exceptions import ServiceUnavailable
 
-from sentry import features, options, projectoptions
-from sentry.eventstream.types import EventStreamEventType
+from sentry import features, projectoptions
 from sentry.exceptions import PluginError
 from sentry.issues.grouptype import GroupCategory
 from sentry.issues.issue_occurrence import IssueOccurrence
@@ -23,7 +22,7 @@ from sentry.replays.lib.event_linking import transform_event_for_linking_payload
 from sentry.replays.lib.kafka import initialize_replays_publisher
 from sentry.sentry_metrics.client import generic_metrics_backend
 from sentry.sentry_metrics.use_case_id_registry import UseCaseID
-from sentry.signals import event_processed, issue_unignored, transaction_processed
+from sentry.signals import event_processed, issue_unignored
 from sentry.silo.base import SiloMode
 from sentry.tasks.base import instrumented_task
 from sentry.types.group import GroupSubStatus
@@ -471,10 +470,9 @@ def should_retry_fetch(attempt: int, e: Exception) -> bool:
 fetch_retry_policy = ConditionalRetryPolicy(should_retry_fetch, exponential_delay(1.00))
 
 
-def should_update_escalating_metrics(event: Event, is_transaction_event: bool) -> bool:
+def should_update_escalating_metrics(event: Event) -> bool:
     return (
         features.has("organizations:escalating-metrics-backend", event.project.organization)
-        and not is_transaction_event
         and event.group is not None
         and event.group.issue_type.should_detect_escalation()
     )
@@ -505,22 +503,14 @@ def post_process_group(
 
     with snuba.options_override({"consistent": True}):
         from sentry import eventstore
-        from sentry.eventstore.processing import (
-            event_processing_store,
-            transaction_processing_store,
-        )
-        from sentry.ingest.transaction_clusterer.datasource.redis import (
-            record_transaction_name as record_transaction_name_for_clustering,
-        )
+        from sentry.eventstore.processing import event_processing_store
         from sentry.issues.occurrence_consumer import EventLookupError
         from sentry.models.organization import Organization
         from sentry.models.project import Project
         from sentry.reprocessing2 import is_reprocessed_event
 
-        if eventstream_type == EventStreamEventType.Transaction.value:
-            processing_store = transaction_processing_store
-        else:
-            processing_store = event_processing_store
+        processing_store = event_processing_store
+
         if occurrence_id is None:
             # We use the data being present/missing in the processing store
             # to ensure that we don't duplicate work should the forwarding consumers
@@ -606,22 +596,6 @@ def post_process_group(
         is_reprocessed = is_reprocessed_event(event.data)
         sentry_sdk.set_tag("is_reprocessed", is_reprocessed)
 
-        is_transaction_event = event.get_event_type() == "transaction"
-
-        # Simplified post processing for transaction events.
-        # This should eventually be completely removed and transactions
-        # will not go through any post processing.
-        if is_transaction_event and not options.get(
-            "save_event_transactions.sample_transactions_in_save"
-        ):
-            record_transaction_name_for_clustering(event.project, event.data)
-            with sentry_sdk.start_span(op="tasks.post_process_group.transaction_processed_signal"):
-                transaction_processed.send_robust(
-                    sender=post_process_group,
-                    project=event.project,
-                    event=event,
-                )
-
         metric_tags = {}
         if group_id:
             group_state: GroupState = {
@@ -634,7 +608,7 @@ def post_process_group(
             group_event = update_event_group(event, group_state)
             bind_organization_context(event.project.organization)
             _capture_event_stats(event)
-            if should_update_escalating_metrics(event, is_transaction_event):
+            if should_update_escalating_metrics(event):
                 _update_escalating_metrics(event)
 
             group_event.occurrence = occurrence

+ 1 - 3
src/sentry/tasks/store.py

@@ -582,9 +582,7 @@ def _do_save_event(
             raise
 
         finally:
-            if consumer_type == ConsumerType.Transactions and options.get(
-                "save_event_transactions.post_process_cleanup"
-            ):
+            if consumer_type == ConsumerType.Transactions:
                 # we won't use the transaction data in post_process
                 # so we can delete it from the cache now.
                 if cache_key:

+ 0 - 7
tests/sentry/event_manager/test_event_manager.py

@@ -1541,7 +1541,6 @@ class EventManagerTest(TestCase, SnubaTestCase, EventManagerTestMixin, Performan
         # the basic strategy is to simply use the description
         assert spans == [{"hash": hash_values([span["description"]])} for span in data["spans"]]
 
-    @override_options({"save_event_transactions.sample_transactions_in_save": True})
     def test_transaction_sampler_and_recieve(self) -> None:
         # make sure with the option on we don't get any errors
         manager = EventManager(
@@ -1600,15 +1599,12 @@ class EventManagerTest(TestCase, SnubaTestCase, EventManagerTestMixin, Performan
         manager.normalize()
         manager.save(self.project.id)
 
-    @override_options({"save_event_transactions.sample_transactions_in_save": True})
     @patch("sentry.signals.transaction_processed.send_robust")
     @patch("sentry.ingest.transaction_clusterer.datasource.redis._record_sample")
-    @mock.patch("sentry.event_manager.eventstream.backend.insert")
     def test_transaction_sampler_and_recieve_mock_called(
         self,
         transaction_processed_signal_mock: mock.MagicMock,
         mock_record_sample: mock.MagicMock,
-        eventstream_insert: mock.MagicMock,
     ) -> None:
         manager = EventManager(
             make_event(
@@ -1669,10 +1665,7 @@ class EventManagerTest(TestCase, SnubaTestCase, EventManagerTestMixin, Performan
         assert mock_record_sample.mock_calls == [
             mock.call(ClustererNamespace.TRANSACTIONS, self.project, "wait")
         ]
-        eventstream_insert.assert_called_once()
-        assert "skip_consume" not in eventstream_insert.call_args.kwargs
 
-    @override_options({"save_event_transactions.post_process_cleanup": True})
     @mock.patch("sentry.event_manager.eventstream.backend.insert")
     def test_transaction_event_stream_insert_with_raw(
         self, eventstream_insert: mock.MagicMock

+ 0 - 75
tests/sentry/tasks/test_post_process.py

@@ -19,7 +19,6 @@ from sentry.eventstore.models import Event
 from sentry.eventstore.processing import event_processing_store
 from sentry.eventstream.types import EventStreamEventType
 from sentry.feedback.usecases.create_feedback import FeedbackCreationSource
-from sentry.ingest.transaction_clusterer import ClustererNamespace
 from sentry.integrations.models.integration import Integration
 from sentry.integrations.source_code_management.commit_context import CommitInfo, FileBlameInfo
 from sentry.issues.grouptype import (
@@ -63,7 +62,6 @@ from sentry.tasks.post_process import (
     feedback_filter_decorator,
     locks,
     post_process_group,
-    process_event,
     run_post_process_job,
 )
 from sentry.testutils.cases import BaseTestCase, PerformanceIssueTestCase, SnubaTestCase, TestCase
@@ -2697,79 +2695,6 @@ class PostProcessGroupAggregateEventTest(
         return cache_key
 
 
-class TransactionClustererTestCase(TestCase, SnubaTestCase):
-    @patch("sentry.ingest.transaction_clusterer.datasource.redis._record_sample")
-    def test_process_transaction_event_clusterer(
-        self,
-        mock_store_transaction_name,
-    ):
-        min_ago = before_now(minutes=1)
-        event = process_event(
-            data={
-                "project": self.project.id,
-                "event_id": "b" * 32,
-                "transaction": "foo",
-                "start_timestamp": str(min_ago),
-                "timestamp": str(min_ago),
-                "type": "transaction",
-                "transaction_info": {
-                    "source": "url",
-                },
-                "contexts": {"trace": {"trace_id": "b" * 32, "span_id": "c" * 16, "op": ""}},
-            },
-            group_id=0,
-        )
-        cache_key = write_event_to_cache(event)
-        post_process_group(
-            is_new=False,
-            is_regression=False,
-            is_new_group_environment=False,
-            cache_key=cache_key,
-            group_id=None,
-            project_id=self.project.id,
-            eventstream_type=EventStreamEventType.Transaction,
-        )
-
-        assert mock_store_transaction_name.mock_calls == [
-            mock.call(ClustererNamespace.TRANSACTIONS, self.project, "foo")
-        ]
-
-    @patch("sentry.ingest.transaction_clusterer.datasource.redis._record_sample")
-    @override_options({"save_event_transactions.sample_transactions_in_save": True})
-    def test_process_transaction_event_clusterer_flag_off(
-        self,
-        mock_store_transaction_name,
-    ):
-        min_ago = before_now(minutes=1)
-        event = process_event(
-            data={
-                "project": self.project.id,
-                "event_id": "b" * 32,
-                "transaction": "foo",
-                "start_timestamp": str(min_ago),
-                "timestamp": str(min_ago),
-                "type": "transaction",
-                "transaction_info": {
-                    "source": "url",
-                },
-                "contexts": {"trace": {"trace_id": "b" * 32, "span_id": "c" * 16, "op": ""}},
-            },
-            group_id=0,
-        )
-        cache_key = write_event_to_cache(event)
-        post_process_group(
-            is_new=False,
-            is_regression=False,
-            is_new_group_environment=False,
-            cache_key=cache_key,
-            group_id=None,
-            project_id=self.project.id,
-            eventstream_type=EventStreamEventType.Transaction,
-        )
-
-        assert mock_store_transaction_name.mock_calls == []
-
-
 class PostProcessGroupGenericTest(
     TestCase,
     SnubaTestCase,

+ 0 - 4
tests/sentry/tasks/test_store.py

@@ -14,7 +14,6 @@ from sentry.tasks.store import (
     save_event,
     save_event_transaction,
 )
-from sentry.testutils.helpers.options import override_options
 from sentry.testutils.pytest.fixtures import django_db_all
 
 EVENT_ID = "cc3e6c2bb6b6498097f336d1e6979f4b"
@@ -402,11 +401,9 @@ def test_store_consumer_type(
 
 
 @django_db_all
-@override_options({"save_event_transactions.post_process_cleanup": True})
 def test_store_consumer_type_transactions_cleanup(
     default_project,
     mock_save_event,
-    mock_save_event_transaction,
     register_plugin,
     mock_event_processing_store,
     mock_transaction_processing_store,
@@ -447,7 +444,6 @@ def test_store_consumer_type_transactions_cleanup(
     }
 
     mock_transaction_processing_store.get.return_value = transaction_data
-    # mock_transaction_processing_store.delete_by_key = mock.Mock()
 
     mock_transaction_processing_store.store.return_value = "tx:3"