Browse Source

feat(minimetrics): Switch the backend over to sentry-sdk minimetrics (#56641)

Armin Ronacher 1 year ago
parent
commit
c773d2ae0e

+ 0 - 1
Makefile

@@ -129,7 +129,6 @@ test-python-ci: create-db
 	@echo "--> Running CI Python tests"
 	pytest \
 		tests/integration \
-		tests/minimetrics \
 		tests/relay_integration \
 		tests/sentry \
 		tests/sentry_plugins \

+ 0 - 4
src/minimetrics/__init__.py

@@ -1,4 +0,0 @@
-from .core import MiniMetricsClient
-from .types import MetricTagsExternal
-
-__all__ = ["MiniMetricsClient", "MetricTagsExternal"]

+ 0 - 377
src/minimetrics/core.py

@@ -1,377 +0,0 @@
-import os
-import threading
-import time
-import zlib
-from functools import wraps
-from threading import Event, Lock, Thread
-from typing import Any, Callable, Dict, Iterable, List, Optional, Set, Tuple, Union
-
-import sentry_sdk
-
-from minimetrics.transport import MetricEnvelopeTransport, RelayStatsdEncoder
-from minimetrics.types import (
-    BucketKey,
-    FlushableBuckets,
-    FlushedMetricValue,
-    Metric,
-    MetricTagsExternal,
-    MetricTagsInternal,
-    MetricType,
-    MetricUnit,
-    MetricValue,
-)
-from sentry import options
-from sentry.utils import metrics
-
-thread_local = threading.local()
-
-
-def in_minimetrics():
-    try:
-        return thread_local.in_minimetrics
-    except AttributeError:
-        return False
-
-
-def minimetrics_noop(func):
-    @wraps(func)
-    def new_func(*args, **kwargs):
-        try:
-            in_minimetrics = thread_local.in_minimetrics
-        except AttributeError:
-            in_minimetrics = False
-        thread_local.in_minimetrics = True
-        try:
-            if not in_minimetrics:
-                return func(*args, **kwargs)
-        finally:
-            thread_local.in_minimetrics = in_minimetrics
-
-    return new_func
-
-
-class CounterMetric(Metric[float]):
-    __slots__ = ("value",)
-
-    def __init__(self, first: float) -> None:
-        self.value = first
-
-    @property
-    def weight(self) -> int:
-        return 1
-
-    def add(self, value: float) -> None:
-        self.value += value
-
-    def serialize_value(self) -> Iterable[FlushedMetricValue]:
-        return (self.value,)
-
-
-class GaugeMetric(Metric[float]):
-    __slots__ = (
-        "last",
-        "min",
-        "max",
-        "sum",
-        "count",
-    )
-
-    def __init__(self, first: float) -> None:
-        self.last = first
-        self.min = first
-        self.max = first
-        self.sum = first
-        self.count = 1
-
-    @property
-    def weight(self) -> int:
-        # Number of elements.
-        return 5
-
-    def add(self, value: float) -> None:
-        self.last = value
-        self.min = min(self.min, value)
-        self.max = max(self.max, value)
-        self.sum += value
-        self.count += 1
-
-    def serialize_value(self) -> Iterable[FlushedMetricValue]:
-        return (
-            self.last,
-            self.min,
-            self.max,
-            self.sum,
-            self.count,
-        )
-
-
-class DistributionMetric(Metric[float]):
-    __slots__ = ("value",)
-
-    def __init__(self, first: float) -> None:
-        self.value: List[float] = [first]
-
-    @property
-    def weight(self) -> int:
-        return len(self.value)
-
-    def add(self, value: float) -> None:
-        self.value.append(float(value))
-
-    def serialize_value(self) -> Iterable[FlushedMetricValue]:
-        return self.value
-
-
-class SetMetric(Metric[Union[str, int]]):
-    __slots__ = ("value",)
-
-    def __init__(self, first: Union[str, int]) -> None:
-        self.value: Set[Union[str, int]] = {first}
-
-    @property
-    def weight(self) -> int:
-        return len(self.value)
-
-    def add(self, value: Union[str, int]) -> None:
-        self.value.add(value)
-
-    def serialize_value(self) -> Iterable[FlushedMetricValue]:
-        def _hash(x: Any) -> int:
-            if isinstance(x, str):
-                return zlib.crc32(x.encode("utf-8")) & 0xFFFFFFFF
-            return int(x)
-
-        return (_hash(value) for value in self.value)
-
-
-METRIC_TYPES: Dict[str, Callable[[Any], Metric[Any]]] = {
-    "c": CounterMetric,
-    "g": GaugeMetric,
-    "d": DistributionMetric,
-    "s": SetMetric,
-}
-
-
-class Aggregator:
-    ROLLUP_IN_SECONDS = 10.0
-    MAX_WEIGHT = 100000
-    DEFAULT_SAMPLE_RATE = 1.0
-
-    def __init__(self) -> None:
-        # Buckets holding the grouped metrics. The buckets are represented in two levels, in order to more efficiently
-        # perform locking.
-        self.buckets: Dict[int, Dict[BucketKey, Metric[Any]]] = {}
-        # Stores the total weight of the in-memory buckets. Weight is determined on a per metric type basis and
-        # represents how much weight is there to represent the metric (e.g., counter = 1, distribution = n).
-        self._buckets_total_weight: int = 0
-        # Transport layer used to send metrics.
-        self._transport: MetricEnvelopeTransport = MetricEnvelopeTransport(RelayStatsdEncoder())
-        # Lock protecting concurrent access to variables by the flusher and the calling threads that call add or stop.
-        self._lock: Lock = Lock()
-        # Signals whether the loop of the flusher is running.
-        self._running: bool = True
-        # Used to maintain synchronization between the flusher and external callers.
-        self._flush_event: Event = Event()
-        # Use to signal whether we want to flush the buckets in the next loop iteration, irrespectively of the cutoff.
-        self._force_flush: bool = False
-
-        # Thread handling the flushing loop.
-        self._flusher: Optional[Thread] = None
-        self._flusher_pid: Optional[int] = None
-        self._ensure_thread()
-
-    def _ensure_thread(self):
-        """For forking processes we might need to restart this thread.
-        This ensures that our process actually has that thread running.
-        """
-        pid = os.getpid()
-        if self._flusher_pid == pid:
-            return
-        with self._lock:
-            self._flusher_pid = pid
-            self._flusher = Thread(target=self._flush_loop)
-            self._flusher.daemon = True
-            self._flusher.start()
-
-    def _flush_loop(self) -> None:
-        thread_local.in_minimetrics = True
-        while self._running or self._force_flush:
-            self._flush()
-            self._flush_event.wait(5.0)
-
-    def _flush(self):
-        flushable_buckets, _ = self._flushable_buckets()
-        if flushable_buckets:
-            # You should emit metrics to `metrics` only inside this method, since we know that if we received
-            # metrics the `sentry.utils.metrics` file was initialized. If we do it before, it will likely cause a
-            # circular dependency since the methods in the `sentry.utils.metrics` depend on the backend
-            # initialization, thus if you emit metrics when a backend is initialized Python will throw an error.
-            self._emit(flushable_buckets)
-
-    def _flushable_buckets(self) -> Tuple[FlushableBuckets, bool]:
-        with self._lock:
-            force_flush = self._force_flush
-            cutoff = time.time() - self.ROLLUP_IN_SECONDS
-            flushable_buckets: Any = []
-            weight_to_remove = 0
-
-            if force_flush:
-                flushable_buckets = self.buckets.items()
-                self.buckets = {}
-                self._buckets_total_weight = 0
-                self._force_flush = False
-            else:
-                for buckets_timestamp, buckets in self.buckets.items():
-                    # If the timestamp of the bucket is newer that the rollup we want to skip it.
-                    if buckets_timestamp > cutoff:
-                        continue
-
-                    flushable_buckets.append((buckets_timestamp, buckets))
-
-                # We will clear the elements while holding the lock, in order to avoid requesting it downstream again.
-                for buckets_timestamp, buckets in flushable_buckets:
-                    for _, metric in buckets.items():
-                        weight_to_remove += metric.weight
-                    del self.buckets[buckets_timestamp]
-
-                self._buckets_total_weight -= weight_to_remove
-
-        return flushable_buckets, force_flush
-
-    @minimetrics_noop
-    def add(
-        self,
-        ty: MetricType,
-        key: str,
-        value: MetricValue,
-        unit: MetricUnit,
-        tags: Optional[MetricTagsExternal],
-        timestamp: Optional[float],
-    ) -> None:
-        self._ensure_thread()
-
-        if self._flusher is None:
-            return
-
-        if timestamp is None:
-            timestamp = time.time()
-
-        bucket_timestamp = int((timestamp // self.ROLLUP_IN_SECONDS) * self.ROLLUP_IN_SECONDS)
-        bucket_key = (
-            ty,
-            key,
-            unit,
-            # We have to convert tags into our own internal format, since we don't support lists as
-            # tag values.
-            self._to_internal_metric_tags(tags),
-        )
-
-        with self._lock:
-            local_buckets = self.buckets.setdefault(bucket_timestamp, {})
-            metric = local_buckets.get(bucket_key)
-            if metric is not None:
-                previous_weight = metric.weight
-                metric.add(value)
-            else:
-                metric = local_buckets[bucket_key] = METRIC_TYPES[ty](value)
-                previous_weight = 0
-
-            self._buckets_total_weight += metric.weight - previous_weight
-
-        # Given the new weight we consider whether we want to force flush.
-        self.consider_force_flush()
-
-        # We want to track how many times metrics are being added, so that we know the actual count of adds.
-        metrics.incr(
-            key="minimetrics.add",
-            amount=1,
-            tags={"metric_type": ty},
-            sample_rate=self.DEFAULT_SAMPLE_RATE,
-        )
-
-    def stop(self):
-        if self._flusher is None:
-            return
-
-        # Firstly we tell the flusher that we want to force flush.
-        with self._lock:
-            self._force_flush = True
-            self._running = False
-
-        # Secondly we notify the flusher to move on and we wait for its completion.
-        self._flush_event.set()
-        self._flusher.join()
-        self._flusher = None
-
-    def consider_force_flush(self):
-        # It's important to acquire a lock around this method, since it will touch shared data structures.
-        total_weight = len(self.buckets) + self._buckets_total_weight
-        if total_weight >= self.MAX_WEIGHT:
-            self._force_flush = True
-            self._flush_event.set()
-
-    def _emit(self, flushable_buckets: FlushableBuckets) -> Any:
-        if options.get("delightful_metrics.enable_envelope_forwarding"):
-            try:
-                self._transport.send(flushable_buckets)
-            except Exception as e:
-                sentry_sdk.capture_exception(e)
-
-    def _to_internal_metric_tags(self, tags: Optional[MetricTagsExternal]) -> MetricTagsInternal:
-        rv = []
-        for key, value in (tags or {}).items():
-            # If the value is a collection, we want to flatten it.
-            if isinstance(value, (list, tuple)):
-                for inner_value in value:
-                    rv.append((key, inner_value))
-            else:
-                rv.append((key, value))
-
-        # It's very important to sort the tags in order to obtain the same bucket key.
-        return tuple(sorted(rv))
-
-
-class MiniMetricsClient:
-    def __init__(self) -> None:
-        self.aggregator = Aggregator()
-
-    def incr(
-        self,
-        key: str,
-        value: float,
-        unit: MetricUnit = "nanosecond",
-        tags: Optional[MetricTagsExternal] = None,
-        timestamp: Optional[float] = None,
-    ) -> None:
-        self.aggregator.add("c", key, value, unit, tags, timestamp)
-
-    def timing(
-        self,
-        key: str,
-        value: float,
-        unit: MetricUnit = "second",
-        tags: Optional[MetricTagsExternal] = None,
-        timestamp: Optional[float] = None,
-    ) -> None:
-        self.aggregator.add("d", key, value, unit, tags, timestamp)
-
-    def set(
-        self,
-        key: str,
-        value: Union[str, int],
-        unit: MetricUnit = "none",
-        tags: Optional[MetricTagsExternal] = None,
-        timestamp: Optional[float] = None,
-    ) -> None:
-        self.aggregator.add("s", key, value, unit, tags, timestamp)
-
-    def gauge(
-        self,
-        key: str,
-        value: float,
-        unit: MetricUnit = "second",
-        tags: Optional[MetricTagsExternal] = None,
-        timestamp: Optional[float] = None,
-    ) -> None:
-        # For now, we emit gauges as counts.
-        self.aggregator.add("c", key, value, unit, tags, timestamp)

+ 0 - 130
src/minimetrics/transport.py

@@ -1,130 +0,0 @@
-import re
-from functools import partial
-from io import BytesIO
-from typing import Dict, Iterable, List, Tuple
-
-import sentry_sdk
-from sentry_sdk.envelope import Envelope, Item
-
-from minimetrics.types import FlushableBuckets, FlushableMetric, MetricType
-from sentry import options
-from sentry.utils import metrics
-
-
-class EncodingError(Exception):
-    """
-    Raised when the encoding of a flushed metric encounters an error.
-    """
-
-    pass
-
-
-sanitize_value = partial(re.compile(r"[^a-zA-Z0-9_/.]").sub, "")
-
-
-class RelayStatsdEncoder:
-    def _encode(self, value: FlushableMetric, out: BytesIO):
-        _write = out.write
-        timestamp, (metric_type, metric_name, metric_unit, metric_tags), metric = value
-        metric_name = sanitize_value(metric_name) or "invalid-metric-name"
-        _write(f"{metric_name}@{metric_unit}".encode())
-
-        for serialized_value in metric.serialize_value():
-            _write(b":")
-            _write(str(serialized_value).encode("utf-8"))
-
-        _write(f"|{metric_type}".encode("ascii"))
-
-        if metric_tags:
-            _write(b"|#")
-            first = True
-            for tag_key, tag_value in metric_tags:
-                tag_key = sanitize_value(tag_key)
-                if not tag_key:
-                    continue
-                if first:
-                    first = False
-                else:
-                    _write(b",")
-                _write(tag_key.encode("utf-8"))
-                _write(b":")
-                _write(sanitize_value(tag_value).encode("utf-8"))
-
-        _write(f"|T{timestamp}".encode("ascii"))
-
-    def encode_multiple(self, values: Iterable[FlushableMetric]) -> bytes:
-        out = BytesIO()
-        _write = out.write
-
-        for value in values:
-            self._encode(value, out)
-            _write(b"\n")
-
-        return out.getvalue()
-
-
-class MetricEnvelopeTransport:
-    def __init__(self, encoder: RelayStatsdEncoder):
-        self._encoder = encoder
-
-    def send(self, flushable_buckets: FlushableBuckets):
-        client = sentry_sdk.Hub.current.client
-        if client is None:
-            return
-
-        transport = client.transport
-        if transport is None:
-            return
-
-        flushable_metrics: List[FlushableMetric] = []
-        stats_by_type: Dict[MetricType, Tuple[int, int]] = {}
-        for buckets_timestamp, buckets in flushable_buckets:
-            for bucket_key, metric in buckets.items():
-                flushable_metric: FlushableMetric = (buckets_timestamp, bucket_key, metric)
-                flushable_metrics.append(flushable_metric)
-                (prev_buckets_count, prev_buckets_weight) = stats_by_type.get(bucket_key[0], (0, 0))
-                stats_by_type[bucket_key[0]] = (
-                    prev_buckets_count + 1,
-                    prev_buckets_weight + metric.weight,
-                )
-
-        for metric_type, (buckets_count, buckets_weight) in stats_by_type.items():
-            # We want to emit a metric on how many buckets and weight there was for a metric type.
-            metrics.timing(
-                key="minimetrics.flushed_buckets",
-                value=buckets_count,
-                tags={"metric_type": metric_type},
-                sample_rate=1.0,
-            )
-            metrics.incr(
-                key="minimetrics.flushed_buckets_counter",
-                amount=buckets_count,
-                tags={"metric_type": metric_type},
-                sample_rate=1.0,
-            )
-            metrics.timing(
-                key="minimetrics.flushed_buckets_weight",
-                value=buckets_weight,
-                tags={"metric_type": metric_type},
-                sample_rate=1.0,
-            )
-            metrics.incr(
-                key="minimetrics.flushed_buckets_weight_counter",
-                amount=buckets_weight,
-                tags={"metric_type": metric_type},
-                sample_rate=1.0,
-            )
-
-        if options.get("delightful_metrics.enable_envelope_serialization"):
-            encoded_metrics = self._encoder.encode_multiple(flushable_metrics)
-            metrics.timing(
-                key="minimetrics.encoded_metrics_size", value=len(encoded_metrics), sample_rate=1.0
-            )
-            metric_item = Item(payload=encoded_metrics, type="statsd")
-            envelope = Envelope(
-                headers=None,
-                items=[metric_item],
-            )
-
-            if options.get("delightful_metrics.enable_capture_envelope"):
-                transport.capture_envelope(envelope)

+ 0 - 83
src/minimetrics/types.py

@@ -1,83 +0,0 @@
-from typing import (
-    Any,
-    Dict,
-    Generic,
-    Iterable,
-    List,
-    Literal,
-    Mapping,
-    Sequence,
-    Tuple,
-    TypeVar,
-    Union,
-)
-
-# Unit of the metrics.
-MetricUnit = Literal[
-    "none",
-    "nanosecond",
-    "microsecond",
-    "millisecond",
-    "second",
-    "minute",
-    "hour",
-    "day",
-    "week",
-    "bit",
-    "byte",
-    "kilobyte",
-    "kibibyte",
-    "mebibyte",
-    "gigabyte",
-    "terabyte",
-    "tebibyte",
-    "petabyte",
-    "pebibyte",
-    "exabyte",
-    "exbibyte",
-    "ratio",
-    "percent",
-]
-
-# Type of the metric.
-MetricType = Literal["d", "s", "g", "c"]
-
-# Value of the metric.
-MetricValue = Union[int, float, str]
-
-# Tag key of a metric.
-MetricTagKey = str
-
-# Internal representation of tags as a tuple of tuples (this is done in order to allow for the same key to exist
-# multiple times).
-MetricTagValueInternal = str
-MetricTagsInternal = Tuple[Tuple[MetricTagKey, MetricTagValueInternal], ...]
-
-# External representation of tags as a dictionary.
-MetricTagValueExternal = Union[str, List[str], Tuple[str, ...]]
-MetricTagsExternal = Mapping[MetricTagKey, MetricTagValueExternal]
-
-# Value inside the generator for the metric value.
-FlushedMetricValue = Union[int, float]
-
-BucketKey = Tuple[MetricType, str, MetricUnit, MetricTagsInternal]
-
-T = TypeVar("T")
-
-
-class Metric(Generic[T]):
-    __slots__ = ()
-
-    @property
-    def weight(self) -> int:
-        raise NotImplementedError()
-
-    def add(self, value: T) -> None:
-        raise NotImplementedError()
-
-    def serialize_value(self) -> Iterable[FlushedMetricValue]:
-        raise NotImplementedError()
-
-
-FlushableMetric = Tuple[int, BucketKey, Metric[Any]]
-FlushableBuckets = Sequence[Tuple[int, Dict[BucketKey, Metric[Any]]]]

+ 88 - 55
src/sentry/metrics/minimetrics.py

@@ -1,71 +1,105 @@
+# mypy: ignore-errors
+
 import random
-from typing import Any, Optional, Union
+from functools import wraps
+from typing import Any, Dict, Iterable, Optional, Tuple, Union
 
 import sentry_sdk
 
-from minimetrics import MetricTagsExternal, MiniMetricsClient
-from sentry.metrics.base import MetricsBackend, Tags
-
-
-def _to_minimetrics_external_metric_tags(tags: Optional[Tags]) -> Optional[MetricTagsExternal]:
-    # We remove all `None` values, since then the types will be compatible.
-    casted_tags: Any = None
-    if tags is not None:
-        casted_tags = {
-            tag_key: str(tag_value) for tag_key, tag_value in tags.items() if tag_value is not None
-        }
+try:
+    from sentry_sdk.metrics import Metric, MetricsAggregator  # type: ignore
 
-    return casted_tags
+    have_minimetrics = True
+except ImportError:
+    have_minimetrics = False
 
-
-# This is needed to pass data between the sdk patcher and the
-# minimetrics backend.  This is not super clean but it allows us to
-# initialize these things in arbitrary order.
-minimetrics_client: Optional[MiniMetricsClient] = None
+from sentry import options
+from sentry.metrics.base import MetricsBackend, Tags
+from sentry.utils import metrics
 
 
 def patch_sentry_sdk():
-    client = sentry_sdk.Hub.main.client
-    if client is None:
+    if not have_minimetrics:
         return
 
-    old_flush = client.flush
-
-    def new_flush(*args, **kwargs):
-        client = minimetrics_client
-        if client is not None:
-            client.aggregator.consider_force_flush()
-        return old_flush(*args, **kwargs)
-
-    client.flush = new_flush  # type:ignore
-
-    old_close = client.close
-
-    def new_close(*args, **kwargs):
-        client = minimetrics_client
-        if client is not None:
-            client.aggregator.stop()
-        return old_close(*args, **kwargs)
+    real_add = MetricsAggregator.add
+    real_emit = MetricsAggregator._emit
+
+    @wraps(real_add)
+    def tracked_add(self, ty, *args, **kwargs):
+        real_add(self, ty, *args, **kwargs)
+        metrics.incr(
+            key="minimetrics.add",
+            amount=1,
+            tags={"metric_type": ty},
+            sample_rate=1.0,
+        )
+
+    @wraps(real_emit)
+    def patched_emit(self, flushable_buckets: Iterable[Tuple[int, Dict[Any, Metric]]]):
+        flushable_metrics = []
+        stats_by_type: Any = {}
+        for buckets_timestamp, buckets in flushable_buckets:
+            for bucket_key, metric in buckets.items():
+                flushable_metric = (buckets_timestamp, bucket_key, metric)
+                flushable_metrics.append(flushable_metric)
+                (prev_buckets_count, prev_buckets_weight) = stats_by_type.get(bucket_key[0], (0, 0))
+                stats_by_type[bucket_key[0]] = (
+                    prev_buckets_count + 1,
+                    prev_buckets_weight + metric.weight,
+                )
+
+        for metric_type, (buckets_count, buckets_weight) in stats_by_type.items():
+            metrics.timing(
+                key="minimetrics.flushed_buckets",
+                value=buckets_count,
+                tags={"metric_type": metric_type},
+                sample_rate=1.0,
+            )
+            metrics.incr(
+                key="minimetrics.flushed_buckets_counter",
+                amount=buckets_count,
+                tags={"metric_type": metric_type},
+                sample_rate=1.0,
+            )
+            metrics.timing(
+                key="minimetrics.flushed_buckets_weight",
+                value=buckets_weight,
+                tags={"metric_type": metric_type},
+                sample_rate=1.0,
+            )
+            metrics.incr(
+                key="minimetrics.flushed_buckets_weight_counter",
+                amount=buckets_weight,
+                tags={"metric_type": metric_type},
+                sample_rate=1.0,
+            )
 
-    client.close = new_close  # type:ignore
+        if options.get("delightful_metrics.enable_capture_envelope"):
+            envelope = real_emit(self, flushable_buckets)
+            metrics.timing(
+                key="minimetrics.encoded_metrics_size",
+                value=len(envelope.items[0].payload.get_bytes()),
+                sample_rate=1.0,
+            )
 
-    old_data_category = sentry_sdk.envelope.Item.data_category.fget  # type:ignore
+    MetricsAggregator.add = tracked_add  # type: ignore
+    MetricsAggregator._emit = patched_emit  # type: ignore
 
-    @property  # type:ignore
-    def data_category(self):
-        if self.headers.get("type") == "statsd":
-            return "statsd"
-        return old_data_category(self)
 
-    sentry_sdk.envelope.Item.data_category = data_category  # type:ignore
+def before_emit_metric(key: str, tags: Dict[str, Any]) -> bool:
+    if not options.get("delightful_metrics.enable_common_tags"):
+        tags.pop("transaction", None)
+        tags.pop("release", None)
+        tags.pop("environment", None)
+    return True
 
 
 class MiniMetricsMetricsBackend(MetricsBackend):
     def __init__(self, prefix: Optional[str] = None):
         super().__init__(prefix=prefix)
-        global minimetrics_client
-        self.client = MiniMetricsClient()
-        minimetrics_client = self.client
+        if not have_minimetrics:
+            raise RuntimeError("Sentry SDK too old (no minimetrics)")
 
     @staticmethod
     def _keep_metric(sample_rate: float) -> bool:
@@ -80,10 +114,10 @@ class MiniMetricsMetricsBackend(MetricsBackend):
         sample_rate: float = 1,
     ) -> None:
         if self._keep_metric(sample_rate):
-            self.client.incr(
+            sentry_sdk.metrics.incr(
                 key=self._get_key(key),
                 value=amount,
-                tags=_to_minimetrics_external_metric_tags(tags),
+                tags=tags,
             )
 
     def timing(
@@ -95,8 +129,8 @@ class MiniMetricsMetricsBackend(MetricsBackend):
         sample_rate: float = 1,
     ) -> None:
         if self._keep_metric(sample_rate):
-            self.client.timing(
-                key=self._get_key(key), value=value, tags=_to_minimetrics_external_metric_tags(tags)
+            sentry_sdk.metrics.distribution(
+                key=self._get_key(key), value=value, tags=tags, unit="second"
             )
 
     def gauge(
@@ -108,6 +142,5 @@ class MiniMetricsMetricsBackend(MetricsBackend):
         sample_rate: float = 1,
     ) -> None:
         if self._keep_metric(sample_rate):
-            self.client.gauge(
-                key=self._get_key(key), value=value, tags=_to_minimetrics_external_metric_tags(tags)
-            )
+            # XXX: make this into a gauge later
+            sentry_sdk.metrics.incr(key=self._get_key(key), value=value, tags=tags)

+ 6 - 0
src/sentry/options/defaults.py

@@ -1581,6 +1581,12 @@ register(
     flags=FLAG_AUTOMATOR_MODIFIABLE,
 )
 
+register(
+    "delightful_metrics.enable_common_tags",
+    default=False,
+    flags=FLAG_AUTOMATOR_MODIFIABLE,
+)
+
 register(
     "outbox_replication.sentry_team.replication_version",
     type=Int,

+ 9 - 3
src/sentry/utils/sdk.py

@@ -473,6 +473,8 @@ def configure_sdk():
     from sentry_sdk.integrations.redis import RedisIntegration
     from sentry_sdk.integrations.threading import ThreadingIntegration
 
+    from sentry.metrics import minimetrics
+
     # exclude monitors with sub-minute schedules from using crons
     exclude_beat_tasks = [
         "flush-buffers",
@@ -480,6 +482,12 @@ def configure_sdk():
         "schedule-digests",
     ]
 
+    # turn on minimetrics
+    sdk_options.setdefault("_experiments", {}).update(
+        enable_metrics=True,
+        before_emit_metric=minimetrics.before_emit_metric,
+    )
+
     sentry_sdk.init(
         # set back the sentry4sentry_dsn popped above since we need a default dsn on the client
         # for dynamic sampling context public_key population
@@ -501,9 +509,7 @@ def configure_sdk():
         **sdk_options,
     )
 
-    from sentry.metrics.minimetrics import patch_sentry_sdk
-
-    patch_sentry_sdk()
+    minimetrics.patch_sentry_sdk()
 
 
 class RavenShim:

+ 0 - 0
tests/minimetrics/__init__.py


+ 0 - 174
tests/minimetrics/test_core.py

@@ -1,174 +0,0 @@
-from unittest.mock import patch
-
-from minimetrics.core import CounterMetric, DistributionMetric, MiniMetricsClient, SetMetric
-from sentry.testutils.helpers.datetime import freeze_time
-from sentry.testutils.pytest.fixtures import django_db_all
-
-
-@django_db_all
-def test_envelope_forwarding():
-    client = MiniMetricsClient()
-    client.incr("button_clicked", 1.0)
-    client.aggregator.stop()
-
-    assert len(client.aggregator.buckets) == 0
-
-
-@freeze_time("2023-09-06 10:00:00")
-@patch("minimetrics.core.Aggregator._emit")
-def test_client_incr(_emit):
-    tags = {
-        "browser": "Chrome",
-        "browser.version": "1.0",
-        "user.orgs": ["sentry", "google", "apple"],
-        "user.classes": ["1", "2", "3"],
-    }
-    client = MiniMetricsClient()
-    client.incr("button_clicked", 1.0, tags=tags)  # type:ignore
-    client.aggregator.stop()
-
-    assert len(client.aggregator.buckets) == 0
-    emit_args = list(_emit.call_args.args[0])
-    assert len(emit_args) == 1
-    assert emit_args[0][0] == 1693994400
-    keys = list(emit_args[0][1].keys())
-    assert keys == [
-        (
-            "c",
-            "button_clicked",
-            "nanosecond",
-            (
-                ("browser", "Chrome"),
-                ("browser.version", "1.0"),
-                ("user.classes", "1"),
-                ("user.classes", "2"),
-                ("user.classes", "3"),
-                ("user.orgs", "apple"),
-                ("user.orgs", "google"),
-                ("user.orgs", "sentry"),
-            ),
-        )
-    ]
-    values = list(emit_args[0][1].values())
-    assert isinstance(values[0], CounterMetric)
-    assert list(values[0].serialize_value()) == [1]
-
-
-@freeze_time("2023-09-06 10:00:00")
-@patch("minimetrics.core.Aggregator._emit")
-def test_client_timing(_emit):
-    tags = {
-        "browser": "Chrome",
-        "browser.version": "1.0",
-        "user.orgs": ["sentry", "google", "apple"],
-        "user.classes": ["1", "2", "3"],
-    }
-    client = MiniMetricsClient()
-    client.timing("execution_time", 1.0, tags=tags)  # type:ignore
-    client.aggregator.stop()
-
-    assert len(client.aggregator.buckets) == 0
-    emit_args = list(_emit.call_args.args[0])
-    assert len(emit_args) == 1
-    assert emit_args[0][0] == 1693994400
-    keys = list(emit_args[0][1].keys())
-    assert keys == [
-        (
-            "d",
-            "execution_time",
-            "second",
-            (
-                ("browser", "Chrome"),
-                ("browser.version", "1.0"),
-                ("user.classes", "1"),
-                ("user.classes", "2"),
-                ("user.classes", "3"),
-                ("user.orgs", "apple"),
-                ("user.orgs", "google"),
-                ("user.orgs", "sentry"),
-            ),
-        )
-    ]
-    values = list(emit_args[0][1].values())
-    assert isinstance(values[0], DistributionMetric)
-    assert list(values[0].serialize_value()) == [1.0]
-
-
-@freeze_time("2023-09-06 10:00:00")
-@patch("minimetrics.core.Aggregator._emit")
-def test_client_set(_emit):
-    tags = {
-        "browser": "Chrome",
-        "browser.version": "1.0",
-        "user.orgs": ["sentry", "google", "apple"],
-        "user.classes": ["1", "2", "3"],
-    }
-    client = MiniMetricsClient()
-    client.set("user", "riccardo", tags=tags)  # type:ignore
-    client.aggregator.stop()
-
-    assert len(client.aggregator.buckets) == 0
-    emit_args = list(_emit.call_args.args[0])
-    assert len(emit_args) == 1
-    assert emit_args[0][0] == 1693994400
-    keys = list(emit_args[0][1].keys())
-    assert keys == [
-        (
-            "s",
-            "user",
-            "none",
-            (
-                ("browser", "Chrome"),
-                ("browser.version", "1.0"),
-                ("user.classes", "1"),
-                ("user.classes", "2"),
-                ("user.classes", "3"),
-                ("user.orgs", "apple"),
-                ("user.orgs", "google"),
-                ("user.orgs", "sentry"),
-            ),
-        )
-    ]
-    values = list(emit_args[0][1].values())
-    assert isinstance(values[0], SetMetric)
-    assert list(values[0].serialize_value()) == [3455635177]
-
-
-@freeze_time("2023-09-06 10:00:00")
-@patch("minimetrics.core.Aggregator._emit")
-def test_client_gauge_as_counter(_emit):
-    tags = {
-        "browser": "Chrome",
-        "browser.version": "1.0",
-        "user.orgs": ["sentry", "google", "apple"],
-        "user.classes": ["1", "2", "3"],
-    }
-    client = MiniMetricsClient()
-    client.gauge("frontend_time", 15.0, tags=tags)  # type:ignore
-    client.aggregator.stop()
-
-    assert len(client.aggregator.buckets) == 0
-    emit_args = list(_emit.call_args.args[0])
-    assert len(emit_args) == 1
-    assert emit_args[0][0] == 1693994400
-    keys = list(emit_args[0][1].keys())
-    assert keys == [
-        (
-            "c",
-            "frontend_time",
-            "second",
-            (
-                ("browser", "Chrome"),
-                ("browser.version", "1.0"),
-                ("user.classes", "1"),
-                ("user.classes", "2"),
-                ("user.classes", "3"),
-                ("user.orgs", "apple"),
-                ("user.orgs", "google"),
-                ("user.orgs", "sentry"),
-            ),
-        )
-    ]
-    values = list(emit_args[0][1].values())
-    assert isinstance(values[0], CounterMetric)
-    assert list(values[0].serialize_value()) == [15.0]

Some files were not shown because too many files changed in this diff