Browse Source

feat(discover): Aggregate orderby support in SnQL (#27743)

Add support for ordering by aggregate functions
in SnQL.
Shruthi 3 years ago
parent
commit
a0ae8f6e63
2 changed files with 96 additions and 5 deletions
  1. 9 5
      src/sentry/search/events/fields.py
  2. 87 0
      tests/sentry/snuba/test_discover.py

+ 9 - 5
src/sentry/search/events/fields.py

@@ -2358,23 +2358,27 @@ class QueryFields(QueryBase):
 
         for orderby in orderby_columns:
             bare_orderby = orderby.lstrip("-")
-            resolved_orderby = self.resolve_field(bare_orderby)
+            try:
+                resolved_orderby = self.resolve_field(bare_orderby)
+            except NotImplementedError:
+                resolved_orderby = None
+
             direction = Direction.DESC if orderby.startswith("-") else Direction.ASC
 
+            if is_function(bare_orderby):
+                bare_orderby = resolved_orderby.alias
+
             for selected_column in self.columns:
                 if isinstance(selected_column, Column) and selected_column == resolved_orderby:
                     validated.append(OrderBy(selected_column, direction))
                     continue
+
                 elif (
                     isinstance(selected_column, Function) and selected_column.alias == bare_orderby
                 ):
                     validated.append(OrderBy(selected_column, direction))
                     continue
 
-            # TODO: orderby aggregations
-
-            # TODO: orderby field aliases
-
         if len(validated) == len(orderby_columns):
             return validated
 

+ 87 - 0
tests/sentry/snuba/test_discover.py

@@ -1298,6 +1298,93 @@ class QueryIntegrationTest(SnubaTestCase, TestCase):
         assert len(data[0]["exception_frames.filename"]) == len(expected_filenames)
         assert sorted(data[0]["exception_frames.filename"]) == expected_filenames
 
+    def test_orderby_field_alias(self):
+        data = load_data("android-ndk", timestamp=before_now(minutes=10))
+        events = (
+            ("a" * 32, "not handled", False),
+            ("b" * 32, "is handled", True),
+            ("c" * 32, "undefined", None),
+        )
+        for event in events:
+            data["event_id"] = event[0]
+            data["transaction"] = event[0]
+            data["message"] = event[1]
+            data["exception"]["values"][0]["value"] = event[1]
+            data["exception"]["values"][0]["mechanism"]["handled"] = event[2]
+            self.store_event(data=data, project_id=self.project.id)
+
+        queries = [
+            (["error.unhandled"], [0, 0, 1]),
+            ("error.unhandled", [0, 0, 1]),
+            (["-error.unhandled"], [1, 0, 0]),
+            ("-error.unhandled", [1, 0, 0]),
+        ]
+
+        for orderby, expected in queries:
+            for query_fn in [discover.query, discover.wip_snql_query]:
+                result = query_fn(
+                    selected_columns=["transaction", "error.unhandled"],
+                    query="",
+                    orderby=orderby,
+                    params={
+                        "organization_id": self.organization.id,
+                        "project_id": [self.project.id],
+                        "start": before_now(minutes=12),
+                        "end": before_now(minutes=8),
+                    },
+                )
+
+                data = result["data"]
+                assert [x["error.unhandled"] for x in data] == expected
+
+    def test_orderby_aggregate_function(self):
+        project = self.create_project()
+
+        data = load_data("transaction", timestamp=before_now(minutes=5))
+        data["transaction"] = "/failure_count/success"
+        self.store_event(data, project_id=project.id)
+
+        data = load_data("transaction", timestamp=before_now(minutes=5))
+        data["transaction"] = "/failure_count/unknown"
+        data["contexts"]["trace"]["status"] = "unknown_error"
+        self.store_event(data, project_id=project.id)
+
+        for i in range(6):
+            data = load_data("transaction", timestamp=before_now(minutes=5))
+            data["transaction"] = f"/failure_count/{i}"
+            data["contexts"]["trace"]["status"] = "unauthenticated"
+            self.store_event(data, project_id=project.id)
+
+        data = load_data("transaction", timestamp=before_now(minutes=5))
+        data["transaction"] = "/failure_count/0"
+        data["contexts"]["trace"]["status"] = "unauthenticated"
+        self.store_event(data, project_id=project.id)
+
+        orderbys = [
+            (["failure_count()"], [0, 0, 1, 1, 1, 1, 1, 2]),
+            ("failure_count()", [0, 0, 1, 1, 1, 1, 1, 2]),
+            (["-failure_count()"], [2, 1, 1, 1, 1, 1, 0, 0]),
+            ("-failure_count()", [2, 1, 1, 1, 1, 1, 0, 0]),
+            ("failure_count", [0, 0, 1, 1, 1, 1, 1, 2]),
+            ("-failure_count", [2, 1, 1, 1, 1, 1, 0, 0]),
+        ]
+
+        for orderby, expected in orderbys:
+            for query_fn in [discover.query, discover.wip_snql_query]:
+                result = query_fn(
+                    selected_columns=["transaction", "failure_count()"],
+                    query="",
+                    orderby=orderby,
+                    params={
+                        "start": before_now(minutes=10),
+                        "end": before_now(minutes=2),
+                        "project_id": [project.id],
+                    },
+                )
+                data = result["data"]
+
+                assert [x["failure_count"] for x in data] == expected
+
     def test_field_aliasing_in_selected_columns(self):
         result = discover.query(
             selected_columns=["project.id", "user", "release", "timestamp.to_hour"],