Browse Source

feat(dashboards): on demand metrics extraction (#54018)

Ogi 1 year ago
parent
commit
7185777d8e

+ 25 - 0
src/sentry/api/serializers/rest_framework/dashboard.py

@@ -4,6 +4,7 @@ from datetime import datetime, timedelta
 from django.db.models import Max
 from rest_framework import serializers
 
+from sentry import features
 from sentry.api.issue_search import parse_search_query
 from sentry.api.serializers.rest_framework import CamelSnakeSerializer, ListField
 from sentry.api.serializers.rest_framework.base import convert_dict_key_case, snake_to_camel_case
@@ -20,6 +21,7 @@ from sentry.models import (
 from sentry.search.events.builder import UnresolvedQuery
 from sentry.search.events.fields import is_function
 from sentry.snuba.dataset import Dataset
+from sentry.tasks.relay import schedule_invalidate_project_config
 from sentry.utils.dates import parse_stats_period
 
 AGGREGATE_PATTERN = r"^(\w+)\((.*)?\)$"
@@ -376,6 +378,8 @@ class DashboardDetailsSerializer(CamelSnakeSerializer):
 
         self.update_dashboard_filters(self.instance, validated_data)
 
+        schedule_update_project_configs(self.instance)
+
         return self.instance
 
     def update(self, instance, validated_data):
@@ -398,6 +402,8 @@ class DashboardDetailsSerializer(CamelSnakeSerializer):
 
         self.update_dashboard_filters(instance, validated_data)
 
+        schedule_update_project_configs(self.instance)
+
         return instance
 
     def update_widgets(self, instance, widget_data):
@@ -525,3 +531,22 @@ class DashboardDetailsSerializer(CamelSnakeSerializer):
 
 class DashboardSerializer(DashboardDetailsSerializer):
     title = serializers.CharField(required=True, max_length=255)
+
+
+def schedule_update_project_configs(dashboard: Dashboard):
+    """
+    Schedule a task to update project configs for all projects of an organization when a dashboard is updated.
+    """
+    org = dashboard.organization
+
+    on_demand_metrics = features.has("organizations:on-demand-metrics-extraction", org)
+    dashboard_on_demand_metrics = features.has(
+        "organizations:on-demand-metrics-extraction-experimental", org
+    )
+
+    if not on_demand_metrics or not dashboard_on_demand_metrics:
+        return
+
+    schedule_invalidate_project_config(
+        trigger="dashboards:create-on-demand-metric", organization_id=org.id
+    )

+ 106 - 24
src/sentry/relay/config/metric_extraction.py

@@ -7,17 +7,20 @@ from sentry.api.endpoints.project_transaction_threshold import DEFAULT_THRESHOLD
 from sentry.constants import DataCategory
 from sentry.incidents.models import AlertRule, AlertRuleStatus
 from sentry.models import (
+    DashboardWidgetQuery,
+    DashboardWidgetTypes,
     Project,
     ProjectTransactionThreshold,
     ProjectTransactionThresholdOverride,
     TransactionMetric,
 )
+from sentry.snuba.dataset import Dataset
 from sentry.snuba.metrics.extraction import (
     QUERY_HASH_KEY,
     MetricSpec,
     OndemandMetricSpec,
     RuleCondition,
-    is_on_demand_snuba_query,
+    is_on_demand_metric_query,
 )
 from sentry.snuba.models import SnubaQuery
 
@@ -28,9 +31,12 @@ logger = logging.getLogger(__name__)
 # Version of the metric extraction config.
 _METRIC_EXTRACTION_VERSION = 1
 
-# Maximum number of custom metrics that can be extracted for alert rules with
+# Maximum number of custom metrics that can be extracted for alerts and widgets with
 # advanced filter expressions.
-_MAX_ALERT_METRICS = 100
+# TODO(Ogi): remove this, or enforce limits for alerts and widgets separately.
+_MAX_ON_DEMAND_METRICS = 100
+
+HashMetricSpec = Tuple[str, MetricSpec]
 
 
 class MetricExtractionConfig(TypedDict):
@@ -46,26 +52,23 @@ def get_metric_extraction_config(project: Project) -> Optional[MetricExtractionC
 
     This requires respective feature flags to be enabled. At the moment, metrics
     for the following models are extracted:
-     - Performance alert rules which advanced filter expressions.
+     - Performance alert rules with advanced filter expressions.
+     - On-demand metrics widgets.
     """
-
     if not features.has("organizations:on-demand-metrics-extraction", project.organization):
         return None
 
-    alerts = (
-        AlertRule.objects.fetch_for_project(project)
-        .filter(status=AlertRuleStatus.PENDING.value)
-        .select_related("snuba_query")
-    )
+    alert_specs = _get_alert_metric_specs(project)
+    widget_specs = _get_widget_metric_specs(project)
 
-    metrics = _get_metric_specs(alerts)
+    metrics = _merge_metric_specs(alert_specs, widget_specs)
 
     if not metrics:
         return None
 
-    if len(metrics) > _MAX_ALERT_METRICS:
-        logger.error("Too many custom alert metrics for project")
-        metrics = metrics[:_MAX_ALERT_METRICS]
+    if len(metrics) > _MAX_ON_DEMAND_METRICS:
+        logger.error("Too many on demand metrics for project")
+        metrics = metrics[:_MAX_ON_DEMAND_METRICS]
 
     return {
         "version": _METRIC_EXTRACTION_VERSION,
@@ -73,27 +76,106 @@ def get_metric_extraction_config(project: Project) -> Optional[MetricExtractionC
     }
 
 
-def _get_metric_specs(alert_rules: Sequence[AlertRule]) -> List[MetricSpec]:
-    # We use a dict so that we can deduplicate metrics with the same query.
-    metrics: Dict[str, MetricSpec] = {}
+def _get_alert_metric_specs(project: Project) -> List[HashMetricSpec]:
+    alert_rules = AlertRule.objects.fetch_for_project(project).filter(
+        status=AlertRuleStatus.PENDING.value,
+        snuba_query__dataset=Dataset.PerformanceMetrics.value,
+    )
 
+    specs = []
     for alert in alert_rules:
-        if result := convert_query_to_metric(alert.snuba_query):
-            metrics[result[0]] = result[1]
+        if result := _convert_snuba_query_to_metric(alert.snuba_query):
+            specs.append(result)
+
+    return specs
+
 
-    return [spec for spec in metrics.values()]
+def _get_widget_metric_specs(project: Project) -> List[HashMetricSpec]:
+    if not features.has(
+        "organizations:on-demand-metrics-extraction-experimental", project.organization
+    ):
+        return []
+
+    # fetch all queries of all on demand metrics widgets of this organization
+    widget_queries = DashboardWidgetQuery.objects.filter(
+        widget__dashboard__organization=project.organization,
+        widget__widget_type=DashboardWidgetTypes.DISCOVER,
+    )
+
+    specs = []
+    for widget in widget_queries:
+        for result in _convert_widget_query_to_metric(widget):
+            specs.append(result)
 
+    return specs
 
-def convert_query_to_metric(snuba_query: SnubaQuery) -> Optional[Tuple[str, MetricSpec]]:
+
+def _merge_metric_specs(
+    alert_specs: List[HashMetricSpec], widget_specs: List[HashMetricSpec]
+) -> List[MetricSpec]:
+    # We use a dict so that we can deduplicate metrics with the same hash.
+    metrics: Dict[str, MetricSpec] = {}
+    for query_hash, spec in alert_specs + widget_specs:
+        already_present = metrics.get(query_hash)
+        if already_present and already_present != spec:
+            logger.error(
+                "Duplicate metric spec found for hash %s with different specs: %s != %s",
+                query_hash,
+                already_present,
+                spec,
+            )
+            continue
+
+        metrics[query_hash] = spec
+
+    return [metric for metric in metrics.values()]
+
+
+def _convert_snuba_query_to_metric(snuba_query: SnubaQuery) -> Optional[HashMetricSpec]:
     """
     If the passed snuba_query is a valid query for on-demand metric extraction,
-    returns a MetricSpec for the query. Otherwise, returns None.
+    returns a tuple of (hash, MetricSpec) for the query. Otherwise, returns None.
     """
+    return _convert_aggregate_and_query_to_metric(
+        snuba_query.dataset,
+        snuba_query.aggregate,
+        snuba_query.query,
+    )
+
+
+def _convert_widget_query_to_metric(
+    widget_query: DashboardWidgetQuery,
+) -> Sequence[HashMetricSpec]:
+    """
+    Converts a passed metrics widget query to one or more MetricSpecs.
+    Widget query can result in multiple metric specs if it selects multiple fields
+    """
+    metrics_specs: List[HashMetricSpec] = []
+
+    if not widget_query.aggregates:
+        return metrics_specs
+
+    for aggregate in widget_query.aggregates:
+        if result := _convert_aggregate_and_query_to_metric(
+            # there is an internal check to make sure we extract metrics oly for performance dataset
+            # however widgets do not have a dataset field, so we need to pass it explicitly
+            Dataset.PerformanceMetrics.value,
+            aggregate,
+            widget_query.conditions,
+        ):
+            metrics_specs.append(result)
+
+    return metrics_specs
+
+
+def _convert_aggregate_and_query_to_metric(
+    dataset: str, aggregate: str, query: str
+) -> Optional[HashMetricSpec]:
     try:
-        if not is_on_demand_snuba_query(snuba_query):
+        if not is_on_demand_metric_query(dataset, aggregate, query):
             return None
 
-        spec = OndemandMetricSpec(snuba_query.aggregate, snuba_query.query)
+        spec = OndemandMetricSpec(aggregate, query)
         query_hash = spec.query_hash()
 
         return query_hash, {

+ 3 - 3
src/sentry/snuba/metrics/extraction.py

@@ -258,14 +258,14 @@ def _get_aggregate_fields(aggregate: str) -> Sequence[str]:
     Returns any fields referenced by the arguments of supported aggregate
     functions, otherwise ``None``.
     """
+    _SUPPORTED_AGG_FNS = ("count_if", "count_unique")
 
-    # count_if is currently the only supported function, exit early
-    if not aggregate.startswith("count_if("):
+    if not aggregate.startswith(_SUPPORTED_AGG_FNS):
         return []
 
     try:
         function, arguments, _ = fields.parse_function(aggregate)
-        if function == "count_if" and arguments:
+        if function in _SUPPORTED_AGG_FNS and arguments:
             return [arguments[0]]
     except InvalidSearchQuery:
         logger.error(f"Failed to parse aggregate: {aggregate}", exc_info=True)

+ 309 - 54
tests/sentry/relay/config/test_metric_extraction.py

@@ -1,86 +1,341 @@
+from typing import Sequence
 from unittest.mock import ANY
 
-import sentry.relay.config.metric_extraction as extraction
 from sentry.incidents.models import AlertRule
+from sentry.models import (
+    Dashboard,
+    DashboardWidget,
+    DashboardWidgetDisplayTypes,
+    DashboardWidgetQuery,
+    DashboardWidgetTypes,
+    Project,
+)
+from sentry.relay.config.metric_extraction import get_metric_extraction_config
 from sentry.snuba.dataset import Dataset
-from sentry.snuba.models import SnubaQuery
+from sentry.snuba.models import QuerySubscription, SnubaQuery
+from sentry.testutils.helpers import Feature
+from sentry.testutils.pytest.fixtures import django_db_all
 
+ON_DEMAND_METRICS = "organizations:on-demand-metrics-extraction"
+ON_DEMAND_METRICS_WIDGETS = "organizations:on-demand-metrics-extraction-experimental"
 
-def create_alert(query: str) -> AlertRule:
-    snuba_query = SnubaQuery(
-        aggregate="count()",
+
+def create_alert(aggregate: str, query: str, project: Project) -> AlertRule:
+    snuba_query = SnubaQuery.objects.create(
+        aggregate=aggregate,
         query=query,
         dataset=Dataset.PerformanceMetrics.value,
+        time_window=300,
+        resolution=60,
+        environment=None,
+        type=SnubaQuery.Type.PERFORMANCE.value,
+    )
+
+    QuerySubscription.objects.create(
+        snuba_query=snuba_query,
+        project=project,
+    )
+
+    alert_rule = AlertRule.objects.create(
+        snuba_query=snuba_query, threshold_period=1, organization=project.organization
+    )
+
+    return alert_rule
+
+
+def create_widget(
+    aggregates: Sequence[str], query: str, project: Project, title="Dashboard"
+) -> DashboardWidgetQuery:
+    dashboard = Dashboard.objects.create(
+        organization=project.organization,
+        created_by_id=1,
+        title=title,
+    )
+
+    widget = DashboardWidget.objects.create(
+        dashboard=dashboard,
+        order=0,
+        widget_type=DashboardWidgetTypes.DISCOVER,
+        display_type=DashboardWidgetDisplayTypes.LINE_CHART,
     )
-    return AlertRule(snuba_query=snuba_query)
 
+    widget_query = DashboardWidgetQuery.objects.create(
+        aggregates=aggregates, conditions=query, order=0, widget=widget
+    )
+
+    return widget_query
+
+
+@django_db_all
+def test_get_metric_extraction_config_empty_no_alerts(default_project):
+    with Feature(ON_DEMAND_METRICS):
+        assert not get_metric_extraction_config(default_project)
+
+
+@django_db_all
+def test_get_metric_extraction_config_empty_feature_flag_off(default_project):
+    create_alert("count()", "transaction.duration:>=1000", default_project)
+
+    assert not get_metric_extraction_config(default_project)
+
+
+@django_db_all
+def test_get_metric_extraction_config_empty_standard_alerts(default_project):
+    with Feature(ON_DEMAND_METRICS):
+        # standard alerts are not included in the config
+        create_alert("count()", "", default_project)
+
+        assert not get_metric_extraction_config(default_project)
+
+
+@django_db_all
+def test_get_metric_extraction_config_single_alert(default_project):
+    with Feature(ON_DEMAND_METRICS):
+        create_alert("count()", "transaction.duration:>=1000", default_project)
+
+        config = get_metric_extraction_config(default_project)
+
+        assert config
+        assert len(config["metrics"]) == 1
+        assert config["metrics"][0] == {
+            "category": "transaction",
+            "condition": {"name": "event.duration", "op": "gte", "value": 1000.0},
+            "field": None,
+            "mri": "c:transactions/on_demand@none",
+            "tags": [{"key": "query_hash", "value": ANY}],
+        }
+
+
+@django_db_all
+def test_get_metric_extraction_config_multiple_alerts(default_project):
+    with Feature(ON_DEMAND_METRICS):
+        create_alert("count()", "transaction.duration:>=1000", default_project)
+        create_alert("count()", "transaction.duration:>=2000", default_project)
+
+        config = get_metric_extraction_config(default_project)
+
+        assert config
+        assert len(config["metrics"]) == 2
+
+        first_hash = config["metrics"][0]["tags"][0]["value"]
+        second_hash = config["metrics"][1]["tags"][0]["value"]
+
+        assert first_hash != second_hash
+
+
+@django_db_all
+def test_get_metric_extraction_config_multiple_alerts_duplicated(default_project):
+    # alerts with the same query should be deduplicated
+    with Feature(ON_DEMAND_METRICS):
+        create_alert("count()", "transaction.duration:>=1000", default_project)
+        create_alert("count()", "transaction.duration:>=1000", default_project)
+
+        config = get_metric_extraction_config(default_project)
+
+        assert config
+        assert len(config["metrics"]) == 1
+
+
+@django_db_all
+def test_get_metric_extraction_config_single_standard_widget(default_project):
+    with Feature({ON_DEMAND_METRICS: True, ON_DEMAND_METRICS_WIDGETS: True}):
+        create_widget(["count()"], "", default_project)
+
+        assert not get_metric_extraction_config(default_project)
+
+
+@django_db_all
+def test_get_metric_extraction_config_single_widget(default_project):
+    with Feature({ON_DEMAND_METRICS: True, ON_DEMAND_METRICS_WIDGETS: True}):
+        create_widget(["count()"], "transaction.duration:>=1000", default_project)
+
+        config = get_metric_extraction_config(default_project)
+
+        assert config
+        assert len(config["metrics"]) == 1
+        assert config["metrics"][0] == {
+            "category": "transaction",
+            "condition": {"name": "event.duration", "op": "gte", "value": 1000.0},
+            "field": None,
+            "mri": "c:transactions/on_demand@none",
+            "tags": [{"key": "query_hash", "value": ANY}],
+        }
+
+
+@django_db_all
+def test_get_metric_extraction_config_single_widget_multiple_aggregates(default_project):
+    # widget with multiple fields should result in multiple metrics
+    with Feature({ON_DEMAND_METRICS: True, ON_DEMAND_METRICS_WIDGETS: True}):
+        create_widget(
+            ["count()", "avg(transaction.duration)"], "transaction.duration:>=1000", default_project
+        )
 
-def test_empty_query():
-    alert = create_alert("")
+        config = get_metric_extraction_config(default_project)
 
-    assert extraction.convert_query_to_metric(alert.snuba_query) is None
+        assert config
+        assert len(config["metrics"]) == 2
+        assert config["metrics"][0] == {
+            "category": "transaction",
+            "condition": {"name": "event.duration", "op": "gte", "value": 1000.0},
+            "field": None,
+            "mri": "c:transactions/on_demand@none",
+            "tags": [{"key": "query_hash", "value": ANY}],
+        }
+        assert config["metrics"][1] == {
+            "category": "transaction",
+            "condition": {"name": "event.duration", "op": "gte", "value": 1000.0},
+            "field": "event.duration",
+            "mri": "d:transactions/on_demand@none",
+            "tags": [{"key": "query_hash", "value": ANY}],
+        }
 
 
-def test_simple_query_count():
-    alert = create_alert("transaction.duration:>=1000")
+@django_db_all
+def test_get_metric_extraction_config_single_widget_multiple_count_if(default_project):
+    # widget with multiple fields should result in multiple metrics
+    with Feature({ON_DEMAND_METRICS: True, ON_DEMAND_METRICS_WIDGETS: True}):
+        aggregates = [
+            "count()",
+            "count_if(transaction.duration, greater, 2000)",
+            "count_if(transaction.duration, greaterOrEquals, 1000)",
+        ]
+        create_widget(aggregates, "transaction.duration:>=1000", default_project)
 
-    metric = extraction.convert_query_to_metric(alert.snuba_query)
+        config = get_metric_extraction_config(default_project)
 
-    assert metric
-    assert metric[1] == {
-        "category": "transaction",
-        "condition": {"name": "event.duration", "op": "gte", "value": 1000.0},
-        "field": None,
-        "mri": "c:transactions/on_demand@none",
-        "tags": [{"key": "query_hash", "value": ANY}],
-    }
+        assert config
+        assert len(config["metrics"]) == 3
+        assert config["metrics"][0] == {
+            "category": "transaction",
+            "condition": {"name": "event.duration", "op": "gte", "value": 1000.0},
+            "field": None,
+            "mri": "c:transactions/on_demand@none",
+            "tags": [{"key": "query_hash", "value": ANY}],
+        }
+        assert config["metrics"][1] == {
+            "category": "transaction",
+            "condition": {
+                "inner": [
+                    {"name": "event.duration", "op": "gte", "value": 1000.0},
+                    {"name": "event.duration", "op": "gt", "value": 2000.0},
+                ],
+                "op": "and",
+            },
+            "field": None,
+            "mri": "c:transactions/on_demand@none",
+            "tags": [{"key": "query_hash", "value": ANY}],
+        }
+        assert config["metrics"][2] == {
+            "category": "transaction",
+            "condition": {
+                "inner": [
+                    {"name": "event.duration", "op": "gte", "value": 1000.0},
+                    {"name": "event.duration", "op": "gte", "value": 1000.0},
+                ],
+                "op": "and",
+            },
+            "field": None,
+            "mri": "c:transactions/on_demand@none",
+            "tags": [{"key": "query_hash", "value": ANY}],
+        }
 
 
-def test_get_metric_specs_empty():
-    assert len(extraction._get_metric_specs([])) == 0
+@django_db_all
+def test_get_metric_extraction_config_multiple_aggregates_single_field(default_project):
+    # widget with multiple aggregates on the same field in a single metric
+    with Feature({ON_DEMAND_METRICS: True, ON_DEMAND_METRICS_WIDGETS: True}):
+        create_widget(
+            ["sum(transaction.duration)", "avg(transaction.duration)"],
+            "transaction.duration:>=1000",
+            default_project,
+        )
 
+        config = get_metric_extraction_config(default_project)
 
-def test_get_metric_specs_single():
-    alert = create_alert("transaction.duration:>=1000")
+        assert config
+        assert len(config["metrics"]) == 1
+        assert config["metrics"][0] == {
+            "category": "transaction",
+            "condition": {"name": "event.duration", "op": "gte", "value": 1000.0},
+            "field": "event.duration",
+            "mri": "d:transactions/on_demand@none",
+            "tags": [{"key": "query_hash", "value": ANY}],
+        }
 
-    specs = extraction._get_metric_specs([alert])
 
-    assert len(specs) == 1
-    assert specs[0] == {
-        "category": "transaction",
-        "condition": {"name": "event.duration", "op": "gte", "value": 1000.0},
-        "field": None,
-        "mri": "c:transactions/on_demand@none",
-        "tags": [{"key": "query_hash", "value": ANY}],
-    }
+@django_db_all
+def test_get_metric_extraction_config_multiple_widgets_duplicated(default_project):
+    # metrics should be deduplicated across widgets
+    with Feature({ON_DEMAND_METRICS: True, ON_DEMAND_METRICS_WIDGETS: True}):
+        create_widget(
+            ["count()", "avg(transaction.duration)"], "transaction.duration:>=1000", default_project
+        )
+        create_widget(["count()"], "transaction.duration:>=1000", default_project, "Dashboard 2")
 
+        config = get_metric_extraction_config(default_project)
 
-def test_get_metric_specs_multiple():
-    alert_1 = create_alert("transaction.duration:>=1")
-    alert_2 = create_alert("transaction.duration:>=2")
+        assert config
+        assert len(config["metrics"]) == 2
+        assert config["metrics"][0] == {
+            "category": "transaction",
+            "condition": {"name": "event.duration", "op": "gte", "value": 1000.0},
+            "field": None,
+            "mri": "c:transactions/on_demand@none",
+            "tags": [{"key": "query_hash", "value": ANY}],
+        }
+        assert config["metrics"][1] == {
+            "category": "transaction",
+            "condition": {"name": "event.duration", "op": "gte", "value": 1000.0},
+            "field": "event.duration",
+            "mri": "d:transactions/on_demand@none",
+            "tags": [{"key": "query_hash", "value": ANY}],
+        }
 
-    specs = extraction._get_metric_specs([alert_1, alert_2])
 
-    assert len(specs) == 2
+@django_db_all
+def test_get_metric_extraction_config_alerts_and_widgets_off(default_project):
+    # widgets should be skipped if the feature is off
+    with Feature({ON_DEMAND_METRICS: True, ON_DEMAND_METRICS_WIDGETS: False}):
+        create_alert("count()", "transaction.duration:>=1000", default_project)
+        create_widget(["count()"], "transaction.duration:>=1000", default_project)
 
-    first_hash = specs[0]["tags"][0]["value"]
-    second_hash = specs[1]["tags"][0]["value"]
+        config = get_metric_extraction_config(default_project)
 
-    assert first_hash != second_hash
+        assert config
+        assert len(config["metrics"]) == 1
+        assert config["metrics"][0] == {
+            "category": "transaction",
+            "condition": {"name": "event.duration", "op": "gte", "value": 1000.0},
+            "field": None,
+            "mri": "c:transactions/on_demand@none",
+            "tags": [{"key": "query_hash", "value": ANY}],
+        }
 
 
-def test_get_metric_specs_multiple_duplicated():
-    alert_1 = create_alert("transaction.duration:>=1000")
-    alert_2 = create_alert("transaction.duration:>=1000")
-    alert_3 = create_alert("transaction.duration:>=1000")
+@django_db_all
+def test_get_metric_extraction_config_alerts_and_widgets(default_project):
+    # deduplication should work across alerts and widgets
+    with Feature({ON_DEMAND_METRICS: True, ON_DEMAND_METRICS_WIDGETS: True}):
+        create_alert("count()", "transaction.duration:>=1000", default_project)
+        create_widget(
+            ["count()", "avg(transaction.duration)"], "transaction.duration:>=1000", default_project
+        )
 
-    specs = extraction._get_metric_specs([alert_1, alert_2, alert_3])
+        config = get_metric_extraction_config(default_project)
 
-    assert len(specs) == 1
-    assert specs[0] == {
-        "category": "transaction",
-        "condition": {"name": "event.duration", "op": "gte", "value": 1000.0},
-        "field": None,
-        "mri": "c:transactions/on_demand@none",
-        "tags": [{"key": "query_hash", "value": ANY}],
-    }
+        assert config
+        assert len(config["metrics"]) == 2
+        assert config["metrics"][0] == {
+            "category": "transaction",
+            "condition": {"name": "event.duration", "op": "gte", "value": 1000.0},
+            "field": None,
+            "mri": "c:transactions/on_demand@none",
+            "tags": [{"key": "query_hash", "value": ANY}],
+        }
+        assert config["metrics"][1] == {
+            "category": "transaction",
+            "condition": {"name": "event.duration", "op": "gte", "value": 1000.0},
+            "field": "event.duration",
+            "mri": "d:transactions/on_demand@none",
+            "tags": [{"key": "query_hash", "value": ANY}],
+        }