Browse Source

feat(metrics): Add derived metric to get total count of transactions (#33111)

Iker Barriocanal 2 years ago
parent
commit
3fe3103b93

+ 23 - 3
src/sentry/sentry_metrics/transactions.py

@@ -1,11 +1,12 @@
-""" Base module for transactions-related metrics """
+"""Base module for transaction-related metrics"""
 from enum import Enum
 
 
 class TransactionMetricKey(Enum):
-    """Identifier for a transaction-related metric
+    """
+    Identifier for a transaction-related metric.
 
-    Values are metric names as submitted by Relay.
+    The values are the metric names sumbitted by Relay.
     """
 
     USER = "sentry.transactions.user"
@@ -32,3 +33,22 @@ class TransactionMetricKey(Enum):
     BREAKDOWNS_DB = "sentry.transactions.breakdowns.span_ops.db"
     BREAKDOWNS_BROWSER = "sentry.transactions.breakdowns.span_ops.browser"
     BREAKDOWNS_RESOURCE = "sentry.transactions.breakdowns.span_ops.resource"
+
+
+class TransactionTagsKey(Enum):
+    """Identifier for a transaction-related tag."""
+
+    TRANSACTION_STATUS = "transaction.status"
+
+
+class TransactionStatusTagValue(Enum):
+    """
+    Identifier value for a transaction status tag.
+
+    Note that only a subset of values is represented in this enum, not all values.
+    """
+
+    OK = "ok"
+    CANCELLED = "cancelled"
+    UNKNOWN = "unknown"
+    ABORTED = "aborted"

+ 12 - 0
src/sentry/snuba/metrics/fields/base.py

@@ -30,6 +30,7 @@ from sentry.api.utils import InvalidParams
 from sentry.models import Project
 from sentry.sentry_metrics import indexer
 from sentry.sentry_metrics.sessions import SessionMetricKey
+from sentry.sentry_metrics.transactions import TransactionMetricKey
 from sentry.sentry_metrics.utils import resolve_weak
 from sentry.snuba.dataset import Dataset, EntityKey
 from sentry.snuba.metrics.fields.histogram import ClickhouseHistogram, rebucket_histogram
@@ -38,6 +39,7 @@ from sentry.snuba.metrics.fields.snql import (
     abnormal_users,
     addition,
     all_sessions,
+    all_transactions,
     all_users,
     crashed_sessions,
     crashed_users,
@@ -754,6 +756,8 @@ class DerivedMetricKey(Enum):
     SESSION_CRASH_FREE_USER_RATE = "session.crash_free_user_rate"
     SESSION_DURATION = "session.duration"
 
+    TRANSACTION_ALL = "transaction.all"
+
 
 # ToDo(ahmed): Investigate dealing with derived metric keys as Enum objects rather than string
 #  values
@@ -900,6 +904,14 @@ DERIVED_METRICS: Mapping[str, DerivedMetricExpression] = {
             snql=lambda *args, org_id, metric_ids, alias=None: subtraction(*args, alias=alias),
             post_query_func=lambda *args: max(0, *args),
         ),
+        SingularEntityDerivedMetric(
+            metric_name=DerivedMetricKey.TRANSACTION_ALL.value,
+            metrics=[TransactionMetricKey.DURATION.value],
+            unit="transactions",
+            snql=lambda *_, org_id, metric_ids, alias=None: all_transactions(
+                org_id, metric_ids=metric_ids, alias=alias
+            ),
+        ),
     ]
 }
 

+ 57 - 0
src/sentry/snuba/metrics/fields/snql.py

@@ -1,5 +1,8 @@
+from typing import List
+
 from snuba_sdk import Column, Function
 
+from sentry.sentry_metrics.transactions import TransactionTagsKey
 from sentry.sentry_metrics.utils import resolve_weak
 
 
@@ -45,6 +48,51 @@ def _set_uniq_aggregation_on_session_status_factory(
     )
 
 
+def _aggregation_on_tx_status_func_factory(aggregate):
+    def _get_snql_conditions(org_id, metric_ids, exclude_tx_statuses):
+        metric_match = Function("in", [Column("metric_id"), list(metric_ids)])
+        assert exclude_tx_statuses is not None
+        if len(exclude_tx_statuses) == 0:
+            return metric_match
+
+        tx_col = Column(
+            f"tags[{resolve_weak(org_id, TransactionTagsKey.TRANSACTION_STATUS.value)}]"
+        )
+        excluded_statuses = [resolve_weak(org_id, s) for s in exclude_tx_statuses]
+        exclude_tx_statuses = Function(
+            "notIn",
+            [
+                tx_col,
+                excluded_statuses,
+            ],
+        )
+
+        return Function(
+            "and",
+            [
+                metric_match,
+                exclude_tx_statuses,
+            ],
+        )
+
+    def _snql_on_tx_status_factory(org_id, exclude_tx_statuses: List[str], metric_ids, alias=None):
+        return Function(
+            aggregate,
+            [Column("value"), _get_snql_conditions(org_id, metric_ids, exclude_tx_statuses)],
+            alias,
+        )
+
+    return _snql_on_tx_status_factory
+
+
+def _dist_count_aggregation_on_tx_status_factory(
+    org_id, exclude_tx_statuses: List[str], metric_ids, alias=None
+):
+    return _aggregation_on_tx_status_func_factory("countIf")(
+        org_id, exclude_tx_statuses, metric_ids, alias
+    )
+
+
 def all_sessions(org_id: int, metric_ids, alias=None):
     return _counter_sum_aggregation_on_session_status_factory(
         org_id, session_status="init", metric_ids=metric_ids, alias=alias
@@ -110,6 +158,15 @@ def sessions_errored_set(metric_ids, alias=None):
     )
 
 
+def all_transactions(org_id, metric_ids, alias=None):
+    return _dist_count_aggregation_on_tx_status_factory(
+        org_id,
+        exclude_tx_statuses=[],
+        metric_ids=metric_ids,
+        alias=alias,
+    )
+
+
 def percentage(arg1_snql, arg2_snql, alias=None):
     return Function("minus", [1, Function("divide", [arg1_snql, arg2_snql])], alias)
 

+ 83 - 1
tests/sentry/api/endpoints/test_organization_metric_data.py

@@ -9,7 +9,11 @@ from freezegun import freeze_time
 
 from sentry.sentry_metrics import indexer
 from sentry.sentry_metrics.sessions import SessionMetricKey
-from sentry.sentry_metrics.transactions import TransactionMetricKey
+from sentry.sentry_metrics.transactions import (
+    TransactionMetricKey,
+    TransactionStatusTagValue,
+    TransactionTagsKey,
+)
 from sentry.testutils.cases import MetricsAPIBaseTestCase
 from sentry.utils.cursors import Cursor
 from tests.sentry.api.endpoints.test_organization_metrics import MOCKED_DERIVED_METRICS
@@ -982,6 +986,8 @@ class DerivedMetricsDataTest(MetricsAPIBaseTestCase):
         self.session_error_metric = indexer.record(org_id, SessionMetricKey.SESSION_ERROR.value)
         self.session_status_tag = indexer.record(org_id, "session.status")
         self.release_tag = indexer.record(self.organization.id, "release")
+        self.tx_metric = indexer.record(org_id, TransactionMetricKey.DURATION.value)
+        self.tx_status = indexer.record(org_id, TransactionTagsKey.TRANSACTION_STATUS.value)
         self.transaction_lcp_metric = indexer.record(
             self.organization.id, TransactionMetricKey.MEASUREMENTS_LCP.value
         )
@@ -1820,6 +1826,82 @@ class DerivedMetricsDataTest(MetricsAPIBaseTestCase):
         assert group["totals"]["session.healthy_user"] == 0
         assert group["series"]["session.healthy_user"] == [0]
 
+    def test_all_transactions(self):
+        user_ts = time.time()
+        self._send_buckets(
+            [
+                {
+                    "org_id": self.organization.id,
+                    "project_id": self.project.id,
+                    "metric_id": self.tx_metric,
+                    "timestamp": user_ts,
+                    "tags": {
+                        self.tx_status: indexer.record(
+                            self.organization.id, TransactionStatusTagValue.OK.value
+                        ),
+                    },
+                    "type": "d",
+                    "value": [3.4],
+                    "retention_days": 90,
+                },
+                {
+                    "org_id": self.organization.id,
+                    "project_id": self.project.id,
+                    "metric_id": self.tx_metric,
+                    "timestamp": user_ts,
+                    "tags": {
+                        self.tx_status: indexer.record(
+                            self.organization.id, TransactionStatusTagValue.CANCELLED.value
+                        ),
+                    },
+                    "type": "d",
+                    "value": [0.3],
+                    "retention_days": 90,
+                },
+                {
+                    "org_id": self.organization.id,
+                    "project_id": self.project.id,
+                    "metric_id": self.tx_metric,
+                    "timestamp": user_ts,
+                    "tags": {
+                        self.tx_status: indexer.record(
+                            self.organization.id, TransactionStatusTagValue.UNKNOWN.value
+                        ),
+                    },
+                    "type": "d",
+                    "value": [2.3],
+                    "retention_days": 90,
+                },
+                {
+                    "org_id": self.organization.id,
+                    "project_id": self.project.id,
+                    "metric_id": self.tx_metric,
+                    "timestamp": user_ts,
+                    "tags": {
+                        self.tx_status: indexer.record(
+                            self.organization.id, TransactionStatusTagValue.ABORTED.value
+                        ),
+                    },
+                    "type": "d",
+                    "value": [0.5],
+                    "retention_days": 90,
+                },
+            ],
+            entity="metrics_distributions",
+        )
+        response = self.get_success_response(
+            self.organization.slug,
+            field=["transaction.all"],
+            statsPeriod="1m",
+            interval="1m",
+        )
+
+        assert len(response.data["groups"]) == 1
+        group = response.data["groups"][0]
+        assert group["by"] == {}
+        assert group["totals"] == {"transaction.all": 4}
+        assert group["series"] == {"transaction.all": [4]}
+
     def test_request_private_derived_metric(self):
         for private_name in [
             "session.crashed_and_abnormal_user",

+ 1 - 0
tests/sentry/snuba/metrics/fields/test_base.py

@@ -40,6 +40,7 @@ def get_entity_of_metric_mocked(_, metric_name):
         "sentry.sessions.session": EntityKey.MetricsCounters,
         "sentry.sessions.user": EntityKey.MetricsSets,
         "sentry.sessions.session.error": EntityKey.MetricsSets,
+        "sentry.transactions.transaction.duration": EntityKey.Transactions,
     }[metric_name]
 
 

+ 20 - 0
tests/sentry/snuba/metrics/test_snql.py

@@ -6,6 +6,7 @@ from sentry.snuba.metrics import (
     abnormal_users,
     addition,
     all_sessions,
+    all_transactions,
     all_users,
     crashed_sessions,
     crashed_users,
@@ -98,6 +99,25 @@ class DerivedMetricSnQLTestCase(TestCase):
             alias,
         )
 
+    def test_dist_count_aggregation_on_tx_status(self):
+        org_id = 1985
+        alias = "thefuture"
+        assert all_transactions(org_id, self.metric_ids, alias) == Function(
+            "countIf",
+            [
+                Column("value"),
+                Function(
+                    "in",
+                    [
+                        Column(name="metric_id"),
+                        list(self.metric_ids),
+                    ],
+                    alias=None,
+                ),
+            ],
+            alias=alias,
+        )
+
     def test_percentage_in_snql(self):
         org_id = 666
         alias = "foo.percentage"