Browse Source

feat(metric-alerts): Support `is:<status>` in metric alerts (#60496)

This adds support for `is:<status>` to metric alerts and the errors
dataset in discover.
Dan Fuller 1 year ago
parent
commit
f9f0e52082

+ 9 - 0
src/sentry/snuba/errors.py

@@ -6,6 +6,7 @@ import sentry_sdk
 
 from sentry.discover.arithmetic import categorize_columns
 from sentry.exceptions import InvalidSearchQuery
+from sentry.models.group import STATUS_QUERY_CHOICES
 from sentry.search.events.builder import ErrorsQueryBuilder
 from sentry.search.events.builder.errors import ErrorsTimeseriesQueryBuilder
 from sentry.search.events.fields import get_json_meta_type
@@ -15,6 +16,11 @@ from sentry.snuba.discover import EventsResponse, transform_tips, zerofill
 from sentry.snuba.metrics.extraction import MetricSpecType
 from sentry.utils.snuba import SnubaTSResult, bulk_snql_query
 
+is_filter_translation = {}
+for status_key, status_value in STATUS_QUERY_CHOICES.items():
+    is_filter_translation[status_key] = ("status", status_value)
+PARSER_CONFIG_OVERRIDES = {"is_filter_translation": is_filter_translation}
+
 
 def query(
     selected_columns,
@@ -64,6 +70,7 @@ def query(
             has_metrics=has_metrics,
             transform_alias_to_input_format=transform_alias_to_input_format,
             skip_tag_resolution=skip_tag_resolution,
+            parser_config_overrides=PARSER_CONFIG_OVERRIDES,
         ),
     )
     if conditions is not None:
@@ -100,6 +107,7 @@ def timeseries_query(
             config=QueryBuilderConfig(
                 functions_acl=functions_acl,
                 has_metrics=has_metrics,
+                parser_config_overrides=PARSER_CONFIG_OVERRIDES,
             ),
         )
         query_list = [base_builder]
@@ -116,6 +124,7 @@ def timeseries_query(
                 query=query,
                 selected_columns=columns,
                 equations=equations,
+                config=QueryBuilderConfig(parser_config_overrides=PARSER_CONFIG_OVERRIDES),
             )
             query_list.append(comparison_builder)
 

+ 43 - 0
tests/sentry/search/events/builder/test_errors.py

@@ -9,6 +9,7 @@ from snuba_sdk.function import Function
 from sentry.search.events.builder import ErrorsQueryBuilder
 from sentry.search.events.types import QueryBuilderConfig
 from sentry.snuba.dataset import Dataset
+from sentry.snuba.errors import PARSER_CONFIG_OVERRIDES
 from sentry.testutils.cases import TestCase
 
 pytestmark = pytest.mark.sentry_metrics
@@ -58,3 +59,45 @@ class ErrorsQueryBuilderTest(TestCase):
                 self.projects,
             ),
         ]
+
+    def test_is_status_simple_query(self):
+        with self.feature("organizations:metric-alert-ignore-archived"):
+            query = ErrorsQueryBuilder(
+                dataset=Dataset.Events,
+                query="is:unresolved",
+                selected_columns=["count_unique(user)"],
+                params={
+                    "project_id": self.projects,
+                },
+                offset=None,
+                limit=None,
+                config=QueryBuilderConfig(
+                    skip_time_conditions=True,
+                    parser_config_overrides=PARSER_CONFIG_OVERRIDES,
+                ),
+            ).get_snql_query()
+        query.validate()
+        e_entity = Entity(Dataset.Events.value, alias=Dataset.Events.value)
+        g_entity = Entity("group_attributes", alias="ga")
+
+        assert query.query.match == Join([Relationship(e_entity, "attributes", g_entity)])
+        assert query.query.select == [
+            Function(
+                function="uniq",
+                parameters=[Column(name="tags[sentry:user]", entity=e_entity)],
+                alias="count_unique_user",
+            )
+        ]
+        assert query.query.where == [
+            Condition(Column("group_status", entity=g_entity), Op.IN, [0]),
+            Condition(
+                Column("project_id", entity=Entity("events", alias="events")),
+                Op.IN,
+                self.projects,
+            ),
+            Condition(
+                Column("project_id", entity=g_entity),
+                Op.IN,
+                self.projects,
+            ),
+        ]

+ 49 - 0
tests/snuba/api/endpoints/test_organization_events.py

@@ -6080,6 +6080,55 @@ class OrganizationEventsErrorsDatasetEndpointTest(OrganizationEventsEndpointTest
             assert response.status_code == 200, response.content
             assert response.data["data"][0]["count()"] == 1
 
+    def test_is_status(self):
+        with self.options({"issues.group_attributes.send_kafka": True}):
+            self.store_event(
+                data={
+                    "event_id": "a" * 32,
+                    "timestamp": self.ten_mins_ago_iso,
+                    "fingerprint": ["group1"],
+                },
+                project_id=self.project.id,
+            ).group
+            group_2 = self.store_event(
+                data={
+                    "event_id": "b" * 32,
+                    "timestamp": self.ten_mins_ago_iso,
+                    "fingerprint": ["group2"],
+                },
+                project_id=self.project.id,
+            ).group
+            group_3 = self.store_event(
+                data={
+                    "event_id": "c" * 32,
+                    "timestamp": self.ten_mins_ago_iso,
+                    "fingerprint": ["group3"],
+                },
+                project_id=self.project.id,
+            ).group
+
+            query = {
+                "field": ["count()"],
+                "statsPeriod": "2h",
+                "query": "is:unresolved",
+                "dataset": "errors",
+            }
+            response = self.do_request(query)
+            assert response.status_code == 200, response.content
+            assert response.data["data"][0]["count()"] == 3
+            group_2.status = GroupStatus.IGNORED
+            group_2.substatus = GroupSubStatus.FOREVER
+            group_2.save(update_fields=["status", "substatus"])
+            group_3.status = GroupStatus.IGNORED
+            group_3.substatus = GroupSubStatus.FOREVER
+            group_3.save(update_fields=["status", "substatus"])
+            # XXX: Snuba caches query results, so change the time period so that the query
+            # changes enough to bust the cache.
+            query["statsPeriod"] = "3h"
+            response = self.do_request(query)
+            assert response.status_code == 200, response.content
+            assert response.data["data"][0]["count()"] == 1
+
     def test_short_group_id(self):
         group_1 = self.store_event(
             data={

+ 1 - 1
tests/snuba/api/endpoints/test_organization_events_meta.py

@@ -152,7 +152,7 @@ class OrganizationEventsMetaEndpoint(APITestCase, SnubaTestCase, SearchIssueTest
             response = self.client.get(
                 url,
                 {
-                    "query": f"issue:{group_1.qualified_short_id} status:unresolved",
+                    "query": f"issue:{group_1.qualified_short_id} is:unresolved",
                     "dataset": "errors",
                 },
                 format="json",

+ 1 - 1
tests/snuba/api/endpoints/test_organization_events_stats.py

@@ -183,7 +183,7 @@ class OrganizationEventsStatsEndpointTest(APITestCase, SnubaTestCase, SearchIssu
                 "end": iso_format(self.day_ago + timedelta(hours=2)),
                 "interval": "1h",
                 "dataset": "errors",
-                "query": "status:unresolved",
+                "query": "is:unresolved",
             },
         )
         assert response.status_code == 200, response.content