Browse Source

ref(reports): Filter to error level (#67704)

Filter key errors to only include "error" level events, it's not very
useful to have "info" or "warning" level errors surfaced. This will
affect both weekly reports and daily summary data, and I've confirmed
with product that that's what we want.

From a feedback request
[here](https://www.notion.so/sentry/Filter-top-issues-to-error-level-issues-9da8b7d21d74439bb334f08387cabc68?pvs=4)
Colleen O'Rourke 11 months ago
parent
commit
d0129eb6bb

+ 2 - 0
src/sentry/tasks/summaries/utils.py

@@ -176,6 +176,7 @@ def project_key_errors(
                 Condition(Column("timestamp"), Op.GTE, ctx.start),
                 Condition(Column("timestamp"), Op.LT, ctx.end + timedelta(days=1)),
                 Condition(Column("project_id"), Op.EQ, project.id),
+                Condition(Column("level"), Op.EQ, "error"),
             ],
             groupby=[Column("group_id")],
             orderby=[OrderBy(Function("count", []), Direction.DESC)],
@@ -215,6 +216,7 @@ def project_key_errors(
                         Op.IN,
                         GroupStatus.UNRESOLVED,
                     ),
+                    Condition(Column("level", entity=events_entity), Op.EQ, "error"),
                 ],
                 groupby=[Column("group_id", entity=events_entity)],
                 orderby=[OrderBy(Function("count", []), Direction.DESC)],

+ 54 - 1
tests/sentry/tasks/test_daily_summary.py

@@ -44,13 +44,21 @@ class DailySummaryTest(
     OutcomesSnubaTest, SnubaTestCase, PerformanceIssueTestCase, SlackActivityNotificationTest
 ):
     def store_event_and_outcomes(
-        self, project_id, timestamp, fingerprint, category, release=None, resolve=True
+        self,
+        project_id,
+        timestamp,
+        fingerprint,
+        category,
+        release=None,
+        resolve=True,
+        level="error",
     ):
         if category == DataCategory.ERROR:
             data = {
                 "timestamp": iso_format(timestamp),
                 "stacktrace": copy.deepcopy(DEFAULT_EVENT_DATA["stacktrace"]),
                 "fingerprint": [fingerprint],
+                "level": level,
                 "exception": {
                     "values": [
                         {
@@ -325,6 +333,51 @@ class DailySummaryTest(
         assert (group1, None, 3) in project_context_map.key_errors
         assert (group2, None, 3) in project_context_map.key_errors
 
+    @with_feature("organizations:snql-join-reports")
+    def test_build_summary_data_filter_to_error_level(self):
+        """Test that non-error level issues are filtered out of the results"""
+        with self.options({"issues.group_attributes.send_kafka": True}):
+            for _ in range(3):
+                group1 = self.store_event_and_outcomes(
+                    self.project.id,
+                    self.now,
+                    fingerprint="group-1",
+                    category=DataCategory.ERROR,
+                    resolve=False,
+                    level="info",
+                )
+                group2 = self.store_event_and_outcomes(
+                    self.project.id,
+                    self.now,
+                    fingerprint="group-2",
+                    category=DataCategory.ERROR,
+                    resolve=False,
+                )
+                group3 = self.store_event_and_outcomes(
+                    self.project.id,
+                    self.now,
+                    fingerprint="group-3",
+                    category=DataCategory.ERROR,
+                    resolve=False,
+                )
+
+        summary = build_summary_data(
+            timestamp=self.now.timestamp(),
+            duration=ONE_DAY,
+            organization=self.organization,
+            daily=True,
+        )
+        project_id = self.project.id
+        project_context_map = cast(
+            DailySummaryProjectContext, summary.projects_context_map[project_id]
+        )
+        assert project_context_map.total_today == 9  # total outcomes from today
+        assert project_context_map.comparison_period_avg == 0
+        assert len(project_context_map.key_errors) == 2
+        assert (group1, None, 3) not in project_context_map.key_errors
+        assert (group2, None, 3) in project_context_map.key_errors
+        assert (group3, None, 3) in project_context_map.key_errors
+
     def test_build_summary_data_dedupes_groups(self):
         """
         Test that if a group has multiple escalated and/or regressed activity rows, we only use the group once

+ 72 - 0
tests/sentry/tasks/test_weekly_reports.py

@@ -439,6 +439,7 @@ class WeeklyReportsTest(OutcomesSnubaTest, SnubaTestCase, PerformanceIssueTestCa
     @mock.patch("sentry.analytics.record")
     @mock.patch("sentry.tasks.summaries.weekly_reports.MessageBuilder")
     def test_message_builder_simple_snql_join(self, message_builder, record):
+        """Test that with the snql-join flag enabled we filter resolved issues out of key errors"""
         now = timezone.now()
         three_days_ago = now - timedelta(days=3)
 
@@ -529,6 +530,77 @@ class WeeklyReportsTest(OutcomesSnubaTest, SnubaTestCase, PerformanceIssueTestCa
             user_project_count=1,
         )
 
+    @mock.patch("sentry.tasks.summaries.weekly_reports.MessageBuilder")
+    def test_message_builder_filter_to_error_level(self, message_builder):
+        """Test that we filter non-error level issues out of key errors"""
+        now = timezone.now()
+        three_days_ago = now - timedelta(days=3)
+
+        user = self.create_user()
+        self.create_member(teams=[self.team], user=user, organization=self.organization)
+        with self.options({"issues.group_attributes.send_kafka": True}):
+            self.store_event(
+                data={
+                    "event_id": "a" * 32,
+                    "message": "message",
+                    "timestamp": iso_format(three_days_ago),
+                    "stacktrace": copy.deepcopy(DEFAULT_EVENT_DATA["stacktrace"]),
+                    "fingerprint": ["group-1"],
+                    "level": "info",
+                },
+                project_id=self.project.id,
+            )
+
+            self.store_event(
+                data={
+                    "event_id": "b" * 32,
+                    "message": "message",
+                    "timestamp": iso_format(three_days_ago),
+                    "stacktrace": copy.deepcopy(DEFAULT_EVENT_DATA["stacktrace"]),
+                    "fingerprint": ["group-2"],
+                    "level": "error",
+                },
+                project_id=self.project.id,
+            )
+            self.store_outcomes(
+                {
+                    "org_id": self.organization.id,
+                    "project_id": self.project.id,
+                    "outcome": Outcome.ACCEPTED,
+                    "category": DataCategory.ERROR,
+                    "timestamp": three_days_ago,
+                    "key_id": 1,
+                },
+                num_times=2,
+            )
+
+            self.store_outcomes(
+                {
+                    "org_id": self.organization.id,
+                    "project_id": self.project.id,
+                    "outcome": Outcome.ACCEPTED,
+                    "category": DataCategory.TRANSACTION,
+                    "timestamp": three_days_ago,
+                    "key_id": 1,
+                },
+                num_times=10,
+            )
+        prepare_organization_report(now.timestamp(), ONE_DAY * 7, self.organization.id)
+
+        for call_args in message_builder.call_args_list:
+            message_params = call_args.kwargs
+            context = message_params["context"]
+
+            assert context["organization"] == self.organization
+            assert context["issue_summary"] == {
+                "escalating_substatus_count": 0,
+                "new_substatus_count": 0,
+                "ongoing_substatus_count": 2,
+                "regression_substatus_count": 0,
+                "total_substatus_count": 2,
+            }
+            assert len(context["key_errors"]) == 1
+
     @mock.patch("sentry.analytics.record")
     @mock.patch("sentry.tasks.summaries.weekly_reports.MessageBuilder")
     def test_message_builder_multiple_users_prevent_resend(self, message_builder, record):