Browse Source

feat(metrics): Implement two-level bucketing (#56369)

Riccardo Busetti 1 year ago
parent
commit
11b9d3d2cd

+ 34 - 68
src/minimetrics/core.py

@@ -10,7 +10,7 @@ import sentry_sdk
 from minimetrics.transport import MetricEnvelopeTransport, RelayStatsdEncoder
 from minimetrics.types import (
     BucketKey,
-    FlushedMetric,
+    FlushableBuckets,
     FlushedMetricValue,
     Metric,
     MetricTagsExternal,
@@ -140,8 +140,9 @@ class Aggregator:
     DEFAULT_SAMPLE_RATE = 1.0
 
     def __init__(self) -> None:
-        # Buckets holding the grouped metrics.
-        self.buckets: Dict[BucketKey, Metric[Any]] = {}
+        # Buckets holding the grouped metrics. The buckets are represented in two levels, in order to more efficiently
+        # perform locking.
+        self.buckets: Dict[int, Dict[BucketKey, Metric[Any]]] = {}
         # Stores the total weight of the in-memory buckets. Weight is determined on a per metric type basis and
         # represents how much weight is there to represent the metric (e.g., counter = 1, distribution = n).
         self._buckets_total_weight: int = 0
@@ -181,40 +182,43 @@ class Aggregator:
             self._flush_event.wait(5.0)
 
     def _flush(self):
+        flushable_buckets, _ = self._flushable_buckets()
+        if flushable_buckets:
+            # You should emit metrics to `metrics` only inside this method, since we know that if we received
+            # metrics the `sentry.utils.metrics` file was initialized. If we do it before, it will likely cause a
+            # circular dependency since the methods in the `sentry.utils.metrics` depend on the backend
+            # initialization, thus if you emit metrics when a backend is initialized Python will throw an error.
+            self._emit(flushable_buckets)
+
+    def _flushable_buckets(self) -> Tuple[FlushableBuckets, bool]:
         with self._lock:
-            buckets = self.buckets
             force_flush = self._force_flush
-            flushed_metrics: Optional[Iterable[FlushedMetric]] = None
+            cutoff = time.time() - self.ROLLUP_IN_SECONDS
+            flushable_buckets: Any = []
+            weight_to_remove = 0
 
             if force_flush:
-                flushed_metrics = buckets.items()
+                flushable_buckets = self.buckets.items()
                 self.buckets = {}
                 self._buckets_total_weight = 0
                 self._force_flush = False
-
             else:
-                cutoff = time.time() - self.ROLLUP_IN_SECONDS
-                weight_to_remove = 0
-                flushed_metrics = []
-                for bucket_key, metric in buckets.items():
-                    if bucket_key[0] > cutoff:
+                for buckets_timestamp, buckets in self.buckets.items():
+                    # If the timestamp of the bucket is newer that the rollup we want to skip it.
+                    if buckets_timestamp > cutoff:
                         continue
 
-                    flushed_metrics.append((bucket_key, metric))
-                    weight_to_remove += metric.weight
+                    flushable_buckets.append((buckets_timestamp, buckets))
 
-                # We remove all flushed buckets, in order to avoid memory leaks.
-                for bucket_key, _ in flushed_metrics:
-                    buckets.pop(bucket_key)
+                # We will clear the elements while holding the lock, in order to avoid requesting it downstream again.
+                for buckets_timestamp, buckets in flushable_buckets:
+                    for _, metric in buckets.items():
+                        weight_to_remove += metric.weight
+                    del self.buckets[buckets_timestamp]
 
                 self._buckets_total_weight -= weight_to_remove
 
-        if flushed_metrics:
-            # You should emit metrics to `metrics` only inside this method, since we know that if we received
-            # metrics the `sentry.utils.metrics` file was initialized. If we do it before, it will likely cause a
-            # circular dependency since the methods in the `sentry.utils.metrics` depend on the backend
-            # initialization, thus if you emit metrics when a backend is initialized Python will throw an error.
-            self._emit(flushed_metrics, force_flush)
+        return flushable_buckets, force_flush
 
     def add(
         self,
@@ -226,17 +230,15 @@ class Aggregator:
         timestamp: Optional[float],
     ) -> None:
         self._ensure_thread()
-        if in_minimetrics():
-            return
 
-        if self._flusher is None:
+        if in_minimetrics() or self._flusher is None:
             return
 
         if timestamp is None:
             timestamp = time.time()
 
+        bucket_timestamp = int((timestamp // self.ROLLUP_IN_SECONDS) * self.ROLLUP_IN_SECONDS)
         bucket_key = (
-            int((timestamp // self.ROLLUP_IN_SECONDS) * self.ROLLUP_IN_SECONDS),
             ty,
             key,
             unit,
@@ -246,12 +248,13 @@ class Aggregator:
         )
 
         with self._lock:
-            metric = self.buckets.get(bucket_key)
+            local_buckets = self.buckets.setdefault(bucket_timestamp, {})
+            metric = local_buckets.get(bucket_key)
             if metric is not None:
                 previous_weight = metric.weight
                 metric.add(value)
             else:
-                metric = self.buckets[bucket_key] = METRIC_TYPES[ty](value)
+                metric = local_buckets[bucket_key] = METRIC_TYPES[ty](value)
                 previous_weight = 0
 
             self._buckets_total_weight += metric.weight - previous_weight
@@ -288,50 +291,13 @@ class Aggregator:
             self._force_flush = True
             self._flush_event.set()
 
-    def _emit(self, flushed_metrics: Iterable[FlushedMetric], force_flush: bool) -> Any:
+    def _emit(self, flushable_buckets: FlushableBuckets) -> Any:
         if options.get("delightful_metrics.enable_envelope_forwarding"):
             try:
-                self._transport.send(flushed_metrics)
+                self._transport.send(flushable_buckets)
             except Exception as e:
                 sentry_sdk.capture_exception(e)
 
-        # We obtain the counts for each metric type of how many buckets we have and how much weight is in each
-        # bucket.
-        stats_by_type: Dict[MetricType, Tuple[int, int]] = {}
-        for bucket_key, metric in flushed_metrics:
-            (prev_buckets_count, prev_buckets_weight) = stats_by_type.get(bucket_key[1], (0, 0))
-            stats_by_type[bucket_key[1]] = (
-                prev_buckets_count + 1,
-                prev_buckets_weight + metric.weight,
-            )
-
-        for metric_type, (buckets_count, buckets_weight) in stats_by_type.items():
-            # We want to emit a metric on how many buckets and weight there was for a metric type.
-            metrics.timing(
-                key="minimetrics.flushed_buckets",
-                value=buckets_count,
-                tags={"metric_type": metric_type, "force_flush": force_flush},
-                sample_rate=self.DEFAULT_SAMPLE_RATE,
-            )
-            metrics.incr(
-                key="minimetrics.flushed_buckets_counter",
-                amount=buckets_count,
-                tags={"metric_type": metric_type, "force_flush": force_flush},
-                sample_rate=self.DEFAULT_SAMPLE_RATE,
-            )
-            metrics.timing(
-                key="minimetrics.flushed_buckets_weight",
-                value=buckets_weight,
-                tags={"metric_type": metric_type, "force_flush": force_flush},
-                sample_rate=self.DEFAULT_SAMPLE_RATE,
-            )
-            metrics.incr(
-                key="minimetrics.flushed_buckets_weight_counter",
-                amount=buckets_weight,
-                tags={"metric_type": metric_type, "force_flush": force_flush},
-                sample_rate=self.DEFAULT_SAMPLE_RATE,
-            )
-
     def _to_internal_metric_tags(self, tags: Optional[MetricTagsExternal]) -> MetricTagsInternal:
         rv = []
         for key, value in (tags or {}).items():

+ 49 - 7
src/minimetrics/transport.py

@@ -1,12 +1,12 @@
 import re
 from functools import partial
 from io import BytesIO
-from typing import Iterable
+from typing import Dict, Iterable, List, Tuple
 
 import sentry_sdk
 from sentry_sdk.envelope import Envelope, Item
 
-from minimetrics.types import FlushedMetric
+from minimetrics.types import FlushableBuckets, FlushableMetric, MetricType
 from sentry.utils import metrics
 
 
@@ -22,9 +22,9 @@ sanitize_value = partial(re.compile(r"[^a-zA-Z0-9_/.]").sub, "")
 
 
 class RelayStatsdEncoder:
-    def _encode(self, value: FlushedMetric, out: BytesIO):
+    def _encode(self, value: FlushableMetric, out: BytesIO):
         _write = out.write
-        (timestamp, metric_type, metric_name, metric_unit, metric_tags), metric = value
+        timestamp, (metric_type, metric_name, metric_unit, metric_tags), metric = value
         metric_name = sanitize_value(metric_name) or "invalid-metric-name"
         _write(f"{metric_name}@{metric_unit}".encode())
 
@@ -51,12 +51,14 @@ class RelayStatsdEncoder:
 
         _write(f"|T{timestamp}".encode("ascii"))
 
-    def encode_multiple(self, values: Iterable[FlushedMetric]) -> bytes:
+    def encode_multiple(self, values: Iterable[FlushableMetric]) -> bytes:
         out = BytesIO()
         _write = out.write
+
         for value in values:
             self._encode(value, out)
             _write(b"\n")
+
         return out.getvalue()
 
 
@@ -64,7 +66,7 @@ class MetricEnvelopeTransport:
     def __init__(self, encoder: RelayStatsdEncoder):
         self._encoder = encoder
 
-    def send(self, flushed_metrics: Iterable[FlushedMetric]):
+    def send(self, flushable_buckets: FlushableBuckets):
         client = sentry_sdk.Hub.current.client
         if client is None:
             return
@@ -73,7 +75,47 @@ class MetricEnvelopeTransport:
         if transport is None:
             return
 
-        encoded_metrics = self._encoder.encode_multiple(flushed_metrics)
+        flushable_metrics: List[FlushableMetric] = []
+        stats_by_type: Dict[MetricType, Tuple[int, int]] = {}
+        for buckets_timestamp, buckets in flushable_buckets:
+            for bucket_key, metric in buckets.items():
+                flushable_metric: FlushableMetric = (buckets_timestamp, bucket_key, metric)
+                flushable_metrics.append(flushable_metric)
+                (prev_buckets_count, prev_buckets_weight) = stats_by_type.get(bucket_key[0], (0, 0))
+                stats_by_type[bucket_key[0]] = (
+                    prev_buckets_count + 1,
+                    prev_buckets_weight + metric.weight,
+                )
+
+        encoded_metrics = self._encoder.encode_multiple(flushable_metrics)
+
+        for metric_type, (buckets_count, buckets_weight) in stats_by_type.items():
+            # We want to emit a metric on how many buckets and weight there was for a metric type.
+            metrics.timing(
+                key="minimetrics.flushed_buckets",
+                value=buckets_count,
+                tags={"metric_type": metric_type},
+                sample_rate=1.0,
+            )
+            metrics.incr(
+                key="minimetrics.flushed_buckets_counter",
+                amount=buckets_count,
+                tags={"metric_type": metric_type},
+                sample_rate=1.0,
+            )
+            metrics.timing(
+                key="minimetrics.flushed_buckets_weight",
+                value=buckets_weight,
+                tags={"metric_type": metric_type},
+                sample_rate=1.0,
+            )
+            metrics.incr(
+                key="minimetrics.flushed_buckets_weight_counter",
+                amount=buckets_weight,
+                tags={"metric_type": metric_type},
+                sample_rate=1.0,
+            )
+
         metrics.timing(
             key="minimetrics.encoded_metrics_size", value=len(encoded_metrics), sample_rate=1.0
         )

+ 16 - 5
src/minimetrics/types.py

@@ -1,4 +1,16 @@
-from typing import Generic, Iterable, List, Literal, Mapping, Tuple, TypeVar, Union
+from typing import (
+    Any,
+    Dict,
+    Generic,
+    Iterable,
+    List,
+    Literal,
+    Mapping,
+    Sequence,
+    Tuple,
+    TypeVar,
+    Union,
+)
 
 # Unit of the metrics.
 MetricUnit = Literal[
@@ -48,9 +60,7 @@ MetricTagsExternal = Mapping[MetricTagKey, MetricTagValueExternal]
 # Value inside the generator for the metric value.
 FlushedMetricValue = Union[int, float]
 
-
-BucketKey = Tuple[int, MetricType, str, MetricUnit, MetricTagsInternal]
-
+BucketKey = Tuple[MetricType, str, MetricUnit, MetricTagsInternal]
 
 T = TypeVar("T")
 
@@ -69,4 +79,5 @@ class Metric(Generic[T]):
         raise NotImplementedError()
 
 
-FlushedMetric = Tuple[BucketKey, Metric]
+FlushableMetric = Tuple[int, BucketKey, Metric[Any]]
+FlushableBuckets = Sequence[Tuple[int, Dict[BucketKey, Metric[Any]]]]

+ 92 - 122
tests/minimetrics/test_core.py

@@ -29,26 +29,30 @@ def test_client_incr(_emit):
     client.aggregator.stop()
 
     assert len(client.aggregator.buckets) == 0
-    extracted_metrics_arg = list(_emit.call_args.args[0])
-    assert len(extracted_metrics_arg) == 1
-    assert extracted_metrics_arg[0][0] == (
-        1693994400,
-        "c",
-        "button_clicked",
-        "nanosecond",
+    emit_args = list(_emit.call_args.args[0])
+    assert len(emit_args) == 1
+    assert emit_args[0][0] == 1693994400
+    keys = list(emit_args[0][1].keys())
+    assert keys == [
         (
-            ("browser", "Chrome"),
-            ("browser.version", "1.0"),
-            ("user.classes", "1"),
-            ("user.classes", "2"),
-            ("user.classes", "3"),
-            ("user.orgs", "apple"),
-            ("user.orgs", "google"),
-            ("user.orgs", "sentry"),
-        ),
-    )
-    assert isinstance(extracted_metrics_arg[0][1], CounterMetric)
-    assert list(extracted_metrics_arg[0][1].serialize_value()) == [1]
+            "c",
+            "button_clicked",
+            "nanosecond",
+            (
+                ("browser", "Chrome"),
+                ("browser.version", "1.0"),
+                ("user.classes", "1"),
+                ("user.classes", "2"),
+                ("user.classes", "3"),
+                ("user.orgs", "apple"),
+                ("user.orgs", "google"),
+                ("user.orgs", "sentry"),
+            ),
+        )
+    ]
+    values = list(emit_args[0][1].values())
+    assert isinstance(values[0], CounterMetric)
+    assert list(values[0].serialize_value()) == [1]
 
 
 @freeze_time("2023-09-06 10:00:00")
@@ -65,27 +69,30 @@ def test_client_timing(_emit):
     client.aggregator.stop()
 
     assert len(client.aggregator.buckets) == 0
-    extracted_metrics_arg = list(_emit.call_args.args[0])
-    assert len(extracted_metrics_arg) == 1
-    assert extracted_metrics_arg[0][0] == (
-        1693994400,
-        "d",
-        "execution_time",
-        "second",
+    emit_args = list(_emit.call_args.args[0])
+    assert len(emit_args) == 1
+    assert emit_args[0][0] == 1693994400
+    keys = list(emit_args[0][1].keys())
+    assert keys == [
         (
-            ("browser", "Chrome"),
-            ("browser.version", "1.0"),
-            ("user.classes", "1"),
-            ("user.classes", "2"),
-            ("user.classes", "3"),
-            ("user.orgs", "apple"),
-            ("user.orgs", "google"),
-            ("user.orgs", "sentry"),
-        ),
-    )
-    assert isinstance(extracted_metrics_arg[0][1], DistributionMetric)
-    assert list(extracted_metrics_arg[0][1].serialize_value()) == [1.0]
-    assert len(client.aggregator.buckets) == 0
+            "d",
+            "execution_time",
+            "second",
+            (
+                ("browser", "Chrome"),
+                ("browser.version", "1.0"),
+                ("user.classes", "1"),
+                ("user.classes", "2"),
+                ("user.classes", "3"),
+                ("user.orgs", "apple"),
+                ("user.orgs", "google"),
+                ("user.orgs", "sentry"),
+            ),
+        )
+    ]
+    values = list(emit_args[0][1].values())
+    assert isinstance(values[0], DistributionMetric)
+    assert list(values[0].serialize_value()) == [1.0]
 
 
 @freeze_time("2023-09-06 10:00:00")
@@ -102,27 +109,30 @@ def test_client_set(_emit):
     client.aggregator.stop()
 
     assert len(client.aggregator.buckets) == 0
-    extracted_metrics_arg = list(_emit.call_args.args[0])
-    assert len(extracted_metrics_arg) == 1
-    assert extracted_metrics_arg[0][0] == (
-        1693994400,
-        "s",
-        "user",
-        "none",
+    emit_args = list(_emit.call_args.args[0])
+    assert len(emit_args) == 1
+    assert emit_args[0][0] == 1693994400
+    keys = list(emit_args[0][1].keys())
+    assert keys == [
         (
-            ("browser", "Chrome"),
-            ("browser.version", "1.0"),
-            ("user.classes", "1"),
-            ("user.classes", "2"),
-            ("user.classes", "3"),
-            ("user.orgs", "apple"),
-            ("user.orgs", "google"),
-            ("user.orgs", "sentry"),
-        ),
-    )
-    assert isinstance(extracted_metrics_arg[0][1], SetMetric)
-    assert list(extracted_metrics_arg[0][1].serialize_value()) == [3455635177]
-    assert len(client.aggregator.buckets) == 0
+            "s",
+            "user",
+            "none",
+            (
+                ("browser", "Chrome"),
+                ("browser.version", "1.0"),
+                ("user.classes", "1"),
+                ("user.classes", "2"),
+                ("user.classes", "3"),
+                ("user.orgs", "apple"),
+                ("user.orgs", "google"),
+                ("user.orgs", "sentry"),
+            ),
+        )
+    ]
+    values = list(emit_args[0][1].values())
+    assert isinstance(values[0], SetMetric)
+    assert list(values[0].serialize_value()) == [3455635177]
 
 
 @freeze_time("2023-09-06 10:00:00")
@@ -139,67 +149,27 @@ def test_client_gauge_as_counter(_emit):
     client.aggregator.stop()
 
     assert len(client.aggregator.buckets) == 0
-    extracted_metrics_arg = list(_emit.call_args.args[0])
-    assert len(extracted_metrics_arg) == 1
-    assert extracted_metrics_arg[0][0] == (
-        1693994400,
-        "c",
-        "frontend_time",
-        "second",
+    emit_args = list(_emit.call_args.args[0])
+    assert len(emit_args) == 1
+    assert emit_args[0][0] == 1693994400
+    keys = list(emit_args[0][1].keys())
+    assert keys == [
         (
-            ("browser", "Chrome"),
-            ("browser.version", "1.0"),
-            ("user.classes", "1"),
-            ("user.classes", "2"),
-            ("user.classes", "3"),
-            ("user.orgs", "apple"),
-            ("user.orgs", "google"),
-            ("user.orgs", "sentry"),
-        ),
-    )
-    assert isinstance(extracted_metrics_arg[0][1], CounterMetric)
-    assert list(extracted_metrics_arg[0][1].serialize_value()) == [15.0]
-    assert len(client.aggregator.buckets) == 0
-
-
-# @freeze_time("2023-09-06 10:00:00")
-# @patch("minimetrics.core.Aggregator._emit")
-# def test_client_gauge(_emit):
-#     tags = {
-#         "browser": "Chrome",
-#         "browser.version": "1.0",
-#         "user.orgs": ["sentry", "google", "apple"],
-#         "user.classes": ["1", "2", "3"],
-#     }
-#     client = MiniMetricsClient()
-#     client.gauge("frontend_time", 15.0, tags=tags)  # type:ignore
-#     client.aggregator.stop()
-#
-#     assert len(client.aggregator.buckets) == 0
-#     extracted_metrics_arg = _emit.call_args.args[0]
-#     assert len(extracted_metrics_arg) == 1
-#     assert extracted_metrics_arg[0][0] == BucketKey(
-#         timestamp=1693994400,
-#         metric_type="g",
-#         metric_name="frontend_time",
-#         metric_unit="second",
-#         metric_tags=(
-#             ("browser", "Chrome"),
-#             ("browser.version", "1.0"),
-#             ("user.classes", "1"),
-#             ("user.classes", "2"),
-#             ("user.classes", "3"),
-#             ("user.orgs", "apple"),
-#             ("user.orgs", "google"),
-#             ("user.orgs", "sentry"),
-#         ),
-#     )
-#     assert isinstance(extracted_metrics_arg[0][1], GaugeMetric)
-#     assert extracted_metrics_arg[0][1].serialize_value() == {
-#         "last": 15.0,
-#         "min": 15.0,
-#         "max": 15.0,
-#         "sum": 15.0,
-#         "count": 1,
-#     }
-#     assert len(client.aggregator.buckets) == 0
+            "c",
+            "frontend_time",
+            "second",
+            (
+                ("browser", "Chrome"),
+                ("browser.version", "1.0"),
+                ("user.classes", "1"),
+                ("user.classes", "2"),
+                ("user.classes", "3"),
+                ("user.orgs", "apple"),
+                ("user.orgs", "google"),
+                ("user.orgs", "sentry"),
+            ),
+        )
+    ]
+    values = list(emit_args[0][1].values())
+    assert isinstance(values[0], CounterMetric)
+    assert list(values[0].serialize_value()) == [15.0]

+ 20 - 25
tests/minimetrics/test_transport.py

@@ -16,7 +16,6 @@ def encode_metric(value):
 
 def test_relay_encoder_with_counter():
     bucket_key: BucketKey = (
-        1693994400,
         "c",
         "button_click",
         "none",
@@ -26,7 +25,7 @@ def test_relay_encoder_with_counter():
         ),
     )
     metric = CounterMetric(first=2)
-    flushed_metric = (bucket_key, metric)
+    flushed_metric = (1693994400, bucket_key, metric)
 
     result = encode_metric(flushed_metric)
     assert result == "button_click@none:2|c|#browser:Chrome,browser.version:1.0|T1693994400"
@@ -34,7 +33,6 @@ def test_relay_encoder_with_counter():
 
 def test_relay_encoder_with_distribution():
     bucket_key: BucketKey = (
-        1693994400,
         "d",
         "execution_time",
         "second",
@@ -46,7 +44,7 @@ def test_relay_encoder_with_distribution():
     metric = DistributionMetric(first=1.0)
     metric.add(0.5)
     metric.add(3.0)
-    flushed_metric = (bucket_key, metric)
+    flushed_metric = (1693994400, bucket_key, metric)
 
     result = encode_metric(flushed_metric)
     assert (
@@ -57,7 +55,6 @@ def test_relay_encoder_with_distribution():
 
 def test_relay_encoder_with_set():
     bucket_key: BucketKey = (
-        1693994400,
         "s",
         "users",
         "none",
@@ -69,7 +66,7 @@ def test_relay_encoder_with_set():
     metric = SetMetric(first=123)
     metric.add(456)
     metric.add("riccardo")
-    flushed_metric = (bucket_key, metric)
+    flushed_metric = (1693994400, bucket_key, metric)
 
     result = encode_metric(flushed_metric)
     pieces = result.split("|")
@@ -85,7 +82,6 @@ def test_relay_encoder_with_set():
 
 def test_relay_encoder_with_gauge():
     bucket_key: BucketKey = (
-        1693994400,
         "g",
         "startup_time",
         "second",
@@ -97,7 +93,7 @@ def test_relay_encoder_with_gauge():
     metric = GaugeMetric(first=10.0)
     metric.add(5.0)
     metric.add(7.0)
-    flushed_metric = (bucket_key, metric)
+    flushed_metric = (1693994400, bucket_key, metric)
 
     result = encode_metric(flushed_metric)
     assert (
@@ -108,7 +104,6 @@ def test_relay_encoder_with_gauge():
 
 def test_relay_encoder_with_invalid_chars():
     bucket_key: BucketKey = (
-        1693994400,
         "c",
         "büttòn_click",
         "second",
@@ -126,7 +121,7 @@ def test_relay_encoder_with_invalid_chars():
         ),
     )
     metric = CounterMetric(first=1)
-    flushed_metric = (bucket_key, metric)
+    flushed_metric = (1693994400, bucket_key, metric)
 
     result = encode_metric(flushed_metric)
     assert (
@@ -135,14 +130,13 @@ def test_relay_encoder_with_invalid_chars():
     )
 
     bucket_key = (
-        1693994400,
         "c",
         "üòë",
         "second",
         (),
     )
     metric = CounterMetric(first=1)
-    flushed_metric = (bucket_key, metric)
+    flushed_metric = (1693994400, bucket_key, metric)
 
     assert encode_metric(flushed_metric) == "invalid-metric-name@second:1|c|T1693994400"
 
@@ -151,8 +145,8 @@ def test_relay_encoder_with_multiple_metrics():
     encoder = RelayStatsdEncoder()
 
     flushed_metric_1 = (
+        1693994400,
         (
-            1693994400,
             "g",
             "startup_time",
             "second",
@@ -165,8 +159,8 @@ def test_relay_encoder_with_multiple_metrics():
     )
 
     flushed_metric_2 = (
+        1693994400,
         (
-            1693994400,
             "c",
             "button_click",
             "none",
@@ -179,8 +173,8 @@ def test_relay_encoder_with_multiple_metrics():
     )
 
     flushed_metric_3 = (
+        1693994400,
         (
-            1693994400,
             "c",
             # This name will be completely scraped, resulting in an invalid metric.
             "öüâ",
@@ -206,17 +200,18 @@ def test_relay_encoder_with_multiple_metrics():
 @patch("minimetrics.transport.sentry_sdk")
 def test_send(sentry_sdk):
     flushed_metric = (
-        (
-            1693994400,
-            "c",
-            "button_click",
-            "none",
+        1693994400,
+        {
             (
-                ("browser", "Chrome"),
-                ("browser.version", "1.0"),
-            ),
-        ),
-        CounterMetric(first=1),
+                "c",
+                "button_click",
+                "none",
+                (
+                    ("browser", "Chrome"),
+                    ("browser.version", "1.0"),
+                ),
+            ): CounterMetric(first=1),
+        },
     )
 
     transport = MetricEnvelopeTransport(RelayStatsdEncoder())

+ 4 - 12
tests/sentry/metrics/test_minimetrics.py

@@ -9,38 +9,30 @@ class MiniMetricsMetricsBackendTest(TestCase):
         self.backend = MiniMetricsMetricsBackend(prefix="sentrytest.")
 
     @patch("minimetrics.core.Aggregator.ROLLUP_IN_SECONDS", 1.0)
-    @patch("minimetrics.core.metrics.timing")
-    def test_incr_called_with_no_tags(self, metrics_call):
+    def test_incr_called_with_no_tags(self):
         self.backend.incr(key="foo")
         self.backend.client.aggregator.stop()
 
         assert len(self.backend.client.aggregator.buckets) == 0
-        assert metrics_call.call_count == 2
 
     @patch("minimetrics.core.Aggregator.ROLLUP_IN_SECONDS", 1.0)
-    @patch("minimetrics.core.metrics.timing")
-    def test_incr_called_with_tag_value_as_list(self, metrics_call):
+    def test_incr_called_with_tag_value_as_list(self):
         # The minimetrics backend supports the list type.
         self.backend.incr(key="foo", tags={"foo": ["bar", "baz"]})  # type:ignore
         self.backend.client.aggregator.stop()
 
         assert len(self.backend.client.aggregator.buckets) == 0
-        assert metrics_call.call_count == 2
 
     @patch("minimetrics.core.Aggregator.ROLLUP_IN_SECONDS", 1.0)
-    @patch("minimetrics.core.metrics.timing")
-    def test_incr_not_called_after_flusher_stopped(self, metrics_call):
+    def test_incr_not_called_after_flusher_stopped(self):
         self.backend.client.aggregator.stop()
         self.backend.incr(key="foo")
 
         assert len(self.backend.client.aggregator.buckets) == 0
-        assert metrics_call.call_count == 0
 
     @patch("minimetrics.core.Aggregator.ROLLUP_IN_SECONDS", 1.0)
-    @patch("minimetrics.core.metrics.timing")
-    def test_stop_called_twice(self, metrics_call):
+    def test_stop_called_twice(self):
         self.backend.client.aggregator.stop()
         self.backend.client.aggregator.stop()
 
         assert len(self.backend.client.aggregator.buckets) == 0
-        assert metrics_call.call_count == 0