123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677 |
- from __future__ import annotations
- from datetime import timedelta
- from typing import Any
- from unittest import mock
- import pytest
- from django.urls import reverse
- from sentry.models.environment import Environment
- from sentry.sentry_metrics.use_case_id_registry import UseCaseID
- from sentry.snuba.metrics.extraction import MetricSpecType, OnDemandMetricSpec
- from sentry.testutils.cases import MetricsEnhancedPerformanceTestCase
- from sentry.testutils.helpers.datetime import before_now, iso_format
- from sentry.testutils.silo import region_silo_test
- pytestmark = pytest.mark.sentry_metrics
- @region_silo_test
- class OrganizationEventsStatsMetricsEnhancedPerformanceEndpointTest(
- MetricsEnhancedPerformanceTestCase
- ):
- endpoint = "sentry-api-0-organization-events-stats"
- METRIC_STRINGS = [
- "foo_transaction",
- "d:transactions/measurements.datacenter_memory@pebibyte",
- ]
- def setUp(self):
- super().setUp()
- self.login_as(user=self.user)
- self.day_ago = before_now(days=1).replace(hour=10, minute=0, second=0, microsecond=0)
- self.DEFAULT_METRIC_TIMESTAMP = self.day_ago
- self.url = reverse(
- "sentry-api-0-organization-events-stats",
- kwargs={"organization_slug": self.project.organization.slug},
- )
- self.features = {
- "organizations:performance-use-metrics": True,
- }
- self.additional_params = dict()
- # These throughput tests should roughly match the ones in OrganizationEventsStatsEndpointTest
- def test_throughput_epm_hour_rollup(self):
- # Each of these denotes how many events to create in each hour
- event_counts = [6, 0, 6, 3, 0, 3]
- for hour, count in enumerate(event_counts):
- for minute in range(count):
- self.store_transaction_metric(
- 1, timestamp=self.day_ago + timedelta(hours=hour, minutes=minute)
- )
- for axis in ["epm()", "tpm()"]:
- response = self.do_request(
- data={
- "start": iso_format(self.day_ago),
- "end": iso_format(self.day_ago + timedelta(hours=6)),
- "interval": "1h",
- "yAxis": axis,
- "project": self.project.id,
- "dataset": "metricsEnhanced",
- **self.additional_params,
- },
- )
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert len(data) == 6
- assert response.data["isMetricsData"]
- rows = data[0:6]
- for test in zip(event_counts, rows):
- assert test[1][1][0]["count"] == test[0] / (3600.0 / 60.0)
- def test_throughput_epm_day_rollup(self):
- # Each of these denotes how many events to create in each minute
- event_counts = [6, 0, 6, 3, 0, 3]
- for hour, count in enumerate(event_counts):
- for minute in range(count):
- self.store_transaction_metric(
- 1, timestamp=self.day_ago + timedelta(hours=hour, minutes=minute)
- )
- for axis in ["epm()", "tpm()"]:
- response = self.do_request(
- data={
- "start": iso_format(self.day_ago),
- "end": iso_format(self.day_ago + timedelta(hours=24)),
- "interval": "24h",
- "yAxis": axis,
- "project": self.project.id,
- "dataset": "metricsEnhanced",
- **self.additional_params,
- },
- )
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert len(data) == 2
- assert response.data["isMetricsData"]
- assert data[0][1][0]["count"] == sum(event_counts) / (86400.0 / 60.0)
- def test_throughput_epm_hour_rollup_offset_of_hour(self):
- # Each of these denotes how many events to create in each hour
- event_counts = [6, 0, 6, 3, 0, 3]
- for hour, count in enumerate(event_counts):
- for minute in range(count):
- self.store_transaction_metric(
- 1, timestamp=self.day_ago + timedelta(hours=hour, minutes=minute + 30)
- )
- for axis in ["tpm()", "epm()"]:
- response = self.do_request(
- data={
- "start": iso_format(self.day_ago + timedelta(minutes=30)),
- "end": iso_format(self.day_ago + timedelta(hours=6, minutes=30)),
- "interval": "1h",
- "yAxis": axis,
- "project": self.project.id,
- "dataset": "metricsEnhanced",
- **self.additional_params,
- },
- )
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert len(data) == 6
- assert response.data["isMetricsData"]
- rows = data[0:6]
- for test in zip(event_counts, rows):
- assert test[1][1][0]["count"] == test[0] / (3600.0 / 60.0)
- def test_throughput_eps_minute_rollup(self):
- # Each of these denotes how many events to create in each minute
- event_counts = [6, 0, 6, 3, 0, 3]
- for minute, count in enumerate(event_counts):
- for second in range(count):
- self.store_transaction_metric(
- 1, timestamp=self.day_ago + timedelta(minutes=minute, seconds=second)
- )
- for axis in ["eps()", "tps()"]:
- response = self.do_request(
- data={
- "start": iso_format(self.day_ago),
- "end": iso_format(self.day_ago + timedelta(minutes=6)),
- "interval": "1m",
- "yAxis": axis,
- "project": self.project.id,
- "dataset": "metricsEnhanced",
- **self.additional_params,
- },
- )
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert len(data) == 6
- assert response.data["isMetricsData"]
- rows = data[0:6]
- for test in zip(event_counts, rows):
- assert test[1][1][0]["count"] == test[0] / 60.0
- def test_failure_rate(self):
- for hour in range(6):
- timestamp = self.day_ago + timedelta(hours=hour, minutes=30)
- self.store_transaction_metric(1, tags={"transaction.status": "ok"}, timestamp=timestamp)
- if hour < 3:
- self.store_transaction_metric(
- 1, tags={"transaction.status": "internal_error"}, timestamp=timestamp
- )
- response = self.do_request(
- data={
- "start": iso_format(self.day_ago),
- "end": iso_format(self.day_ago + timedelta(hours=6)),
- "interval": "1h",
- "yAxis": ["failure_rate()"],
- "project": self.project.id,
- "dataset": "metricsEnhanced",
- **self.additional_params,
- },
- )
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert len(data) == 6
- assert response.data["isMetricsData"]
- assert [attrs for time, attrs in response.data["data"]] == [
- [{"count": 0.5}],
- [{"count": 0.5}],
- [{"count": 0.5}],
- [{"count": 0}],
- [{"count": 0}],
- [{"count": 0}],
- ]
- def test_percentiles_multi_axis(self):
- for hour in range(6):
- timestamp = self.day_ago + timedelta(hours=hour, minutes=30)
- self.store_transaction_metric(111, timestamp=timestamp)
- self.store_transaction_metric(222, metric="measurements.lcp", timestamp=timestamp)
- response = self.do_request(
- data={
- "start": iso_format(self.day_ago),
- "end": iso_format(self.day_ago + timedelta(hours=6)),
- "interval": "1h",
- "yAxis": ["p75(measurements.lcp)", "p75(transaction.duration)"],
- "project": self.project.id,
- "dataset": "metricsEnhanced",
- **self.additional_params,
- },
- )
- assert response.status_code == 200, response.content
- lcp = response.data["p75(measurements.lcp)"]
- duration = response.data["p75(transaction.duration)"]
- assert len(duration["data"]) == 6
- assert duration["isMetricsData"]
- assert len(lcp["data"]) == 6
- assert lcp["isMetricsData"]
- for item in duration["data"]:
- assert item[1][0]["count"] == 111
- for item in lcp["data"]:
- assert item[1][0]["count"] == 222
- @mock.patch("sentry.snuba.metrics_enhanced_performance.timeseries_query", return_value={})
- def test_multiple_yaxis_only_one_query(self, mock_query):
- self.do_request(
- data={
- "project": self.project.id,
- "start": iso_format(self.day_ago),
- "end": iso_format(self.day_ago + timedelta(hours=2)),
- "interval": "1h",
- "yAxis": ["epm()", "eps()", "tpm()", "p50(transaction.duration)"],
- "dataset": "metricsEnhanced",
- **self.additional_params,
- },
- )
- assert mock_query.call_count == 1
- def test_aggregate_function_user_count(self):
- self.store_transaction_metric(
- 1, metric="user", timestamp=self.day_ago + timedelta(minutes=30)
- )
- self.store_transaction_metric(
- 1, metric="user", timestamp=self.day_ago + timedelta(hours=1, minutes=30)
- )
- response = self.do_request(
- data={
- "start": iso_format(self.day_ago),
- "end": iso_format(self.day_ago + timedelta(hours=2)),
- "interval": "1h",
- "yAxis": "count_unique(user)",
- "dataset": "metricsEnhanced",
- **self.additional_params,
- },
- )
- assert response.status_code == 200, response.content
- assert response.data["isMetricsData"]
- assert [attrs for time, attrs in response.data["data"]] == [[{"count": 1}], [{"count": 1}]]
- meta = response.data["meta"]
- assert meta["isMetricsData"] == response.data["isMetricsData"]
- def test_non_mep_query_fallsback(self):
- def get_mep(query):
- response = self.do_request(
- data={
- "project": self.project.id,
- "start": iso_format(self.day_ago),
- "end": iso_format(self.day_ago + timedelta(hours=2)),
- "interval": "1h",
- "query": query,
- "yAxis": ["epm()"],
- "dataset": "metricsEnhanced",
- **self.additional_params,
- },
- )
- assert response.status_code == 200, response.content
- return response.data["isMetricsData"]
- assert get_mep(""), "empty query"
- assert get_mep("event.type:transaction"), "event type transaction"
- assert not get_mep("event.type:error"), "event type error"
- assert not get_mep("transaction.duration:<15min"), "outlier filter"
- assert get_mep("epm():>0.01"), "throughput filter"
- assert not get_mep(
- "event.type:transaction OR event.type:error"
- ), "boolean with non-mep filter"
- assert get_mep(
- "event.type:transaction OR transaction:foo_transaction"
- ), "boolean with mep filter"
- def test_having_condition_with_preventing_aggregates(self):
- response = self.do_request(
- data={
- "project": self.project.id,
- "start": iso_format(self.day_ago),
- "end": iso_format(self.day_ago + timedelta(hours=2)),
- "interval": "1h",
- "query": "p95():<5s",
- "yAxis": ["epm()"],
- "dataset": "metricsEnhanced",
- "preventMetricAggregates": "1",
- **self.additional_params,
- },
- )
- assert response.status_code == 200, response.content
- assert not response.data["isMetricsData"]
- meta = response.data["meta"]
- assert meta["isMetricsData"] == response.data["isMetricsData"]
- def test_explicit_not_mep(self):
- response = self.do_request(
- data={
- "project": self.project.id,
- "start": iso_format(self.day_ago),
- "end": iso_format(self.day_ago + timedelta(hours=2)),
- "interval": "1h",
- # Should be a mep able query
- "query": "",
- "yAxis": ["epm()"],
- "metricsEnhanced": "0",
- **self.additional_params,
- },
- )
- assert response.status_code == 200, response.content
- assert not response.data["isMetricsData"]
- meta = response.data["meta"]
- assert meta["isMetricsData"] == response.data["isMetricsData"]
- def test_sum_transaction_duration(self):
- self.store_transaction_metric(123, timestamp=self.day_ago + timedelta(minutes=30))
- self.store_transaction_metric(456, timestamp=self.day_ago + timedelta(hours=1, minutes=30))
- self.store_transaction_metric(789, timestamp=self.day_ago + timedelta(hours=1, minutes=30))
- response = self.do_request(
- data={
- "start": iso_format(self.day_ago),
- "end": iso_format(self.day_ago + timedelta(hours=2)),
- "interval": "1h",
- "yAxis": "sum(transaction.duration)",
- "dataset": "metricsEnhanced",
- **self.additional_params,
- },
- )
- assert response.status_code == 200, response.content
- assert response.data["isMetricsData"]
- assert [attrs for time, attrs in response.data["data"]] == [
- [{"count": 123}],
- [{"count": 1245}],
- ]
- meta = response.data["meta"]
- assert meta["isMetricsData"] == response.data["isMetricsData"]
- assert meta["fields"] == {"time": "date", "sum_transaction_duration": "duration"}
- assert meta["units"] == {"time": None, "sum_transaction_duration": "millisecond"}
- def test_sum_transaction_duration_with_comparison(self):
- # We store the data for the previous day (in order to have values for the comparison).
- self.store_transaction_metric(
- 1, timestamp=self.day_ago - timedelta(days=1) + timedelta(minutes=30)
- )
- self.store_transaction_metric(
- 2, timestamp=self.day_ago - timedelta(days=1) + timedelta(minutes=30)
- )
- # We store the data for today.
- self.store_transaction_metric(123, timestamp=self.day_ago + timedelta(minutes=30))
- self.store_transaction_metric(456, timestamp=self.day_ago + timedelta(minutes=30))
- response = self.do_request(
- data={
- "start": iso_format(self.day_ago),
- "end": iso_format(self.day_ago + timedelta(days=1)),
- "interval": "1d",
- "yAxis": "sum(transaction.duration)",
- "comparisonDelta": 86400,
- "dataset": "metricsEnhanced",
- **self.additional_params,
- },
- )
- assert response.status_code == 200, response.content
- assert response.data["isMetricsData"]
- # For some reason, if all tests run, there is some shared state that makes this test have data in the second
- # time bucket, which is filled automatically by the zerofilling. In order to avoid this flaky failure, we will
- # only check that the first bucket contains the actual data.
- assert [attrs for time, attrs in response.data["data"]][0] == [
- {"comparisonCount": 3.0, "count": 579.0}
- ]
- meta = response.data["meta"]
- assert meta["isMetricsData"] == response.data["isMetricsData"]
- assert meta["fields"] == {"time": "date", "sum_transaction_duration": "duration"}
- assert meta["units"] == {"time": None, "sum_transaction_duration": "millisecond"}
- def test_custom_measurement(self):
- self.store_transaction_metric(
- 123,
- metric="measurements.bytes_transfered",
- internal_metric="d:transactions/measurements.datacenter_memory@pebibyte",
- entity="metrics_distributions",
- tags={"transaction": "foo_transaction"},
- timestamp=self.day_ago + timedelta(minutes=30),
- )
- self.store_transaction_metric(
- 456,
- metric="measurements.bytes_transfered",
- internal_metric="d:transactions/measurements.datacenter_memory@pebibyte",
- entity="metrics_distributions",
- tags={"transaction": "foo_transaction"},
- timestamp=self.day_ago + timedelta(hours=1, minutes=30),
- )
- self.store_transaction_metric(
- 789,
- metric="measurements.bytes_transfered",
- internal_metric="d:transactions/measurements.datacenter_memory@pebibyte",
- entity="metrics_distributions",
- tags={"transaction": "foo_transaction"},
- timestamp=self.day_ago + timedelta(hours=1, minutes=30),
- )
- response = self.do_request(
- data={
- "start": iso_format(self.day_ago),
- "end": iso_format(self.day_ago + timedelta(hours=2)),
- "interval": "1h",
- "yAxis": "sum(measurements.datacenter_memory)",
- "dataset": "metricsEnhanced",
- **self.additional_params,
- },
- )
- assert response.status_code == 200, response.content
- assert response.data["isMetricsData"]
- assert [attrs for time, attrs in response.data["data"]] == [
- [{"count": 123}],
- [{"count": 1245}],
- ]
- meta = response.data["meta"]
- assert meta["isMetricsData"] == response.data["isMetricsData"]
- assert meta["fields"] == {"time": "date", "sum_measurements_datacenter_memory": "size"}
- assert meta["units"] == {"time": None, "sum_measurements_datacenter_memory": "pebibyte"}
- def test_does_not_fallback_if_custom_metric_is_out_of_request_time_range(self):
- self.store_transaction_metric(
- 123,
- timestamp=self.day_ago + timedelta(hours=1),
- internal_metric="d:transactions/measurements.custom@kibibyte",
- entity="metrics_distributions",
- )
- response = self.do_request(
- data={
- "start": iso_format(self.day_ago),
- "end": iso_format(self.day_ago + timedelta(hours=2)),
- "interval": "1h",
- "yAxis": "p99(measurements.custom)",
- "dataset": "metricsEnhanced",
- **self.additional_params,
- },
- )
- meta = response.data["meta"]
- assert response.status_code == 200, response.content
- assert response.data["isMetricsData"]
- assert meta["isMetricsData"]
- assert meta["fields"] == {"time": "date", "p99_measurements_custom": "size"}
- assert meta["units"] == {"time": None, "p99_measurements_custom": "kibibyte"}
- def test_multi_yaxis_custom_measurement(self):
- self.store_transaction_metric(
- 123,
- metric="measurements.bytes_transfered",
- internal_metric="d:transactions/measurements.datacenter_memory@pebibyte",
- entity="metrics_distributions",
- tags={"transaction": "foo_transaction"},
- timestamp=self.day_ago + timedelta(minutes=30),
- )
- self.store_transaction_metric(
- 456,
- metric="measurements.bytes_transfered",
- internal_metric="d:transactions/measurements.datacenter_memory@pebibyte",
- entity="metrics_distributions",
- tags={"transaction": "foo_transaction"},
- timestamp=self.day_ago + timedelta(hours=1, minutes=30),
- )
- self.store_transaction_metric(
- 789,
- metric="measurements.bytes_transfered",
- internal_metric="d:transactions/measurements.datacenter_memory@pebibyte",
- entity="metrics_distributions",
- tags={"transaction": "foo_transaction"},
- timestamp=self.day_ago + timedelta(hours=1, minutes=30),
- )
- response = self.do_request(
- data={
- "start": iso_format(self.day_ago),
- "end": iso_format(self.day_ago + timedelta(hours=2)),
- "interval": "1h",
- "yAxis": [
- "sum(measurements.datacenter_memory)",
- "p50(measurements.datacenter_memory)",
- ],
- "dataset": "metricsEnhanced",
- **self.additional_params,
- },
- )
- assert response.status_code == 200, response.content
- sum_data = response.data["sum(measurements.datacenter_memory)"]
- p50_data = response.data["p50(measurements.datacenter_memory)"]
- assert sum_data["isMetricsData"]
- assert p50_data["isMetricsData"]
- assert [attrs for time, attrs in sum_data["data"]] == [
- [{"count": 123}],
- [{"count": 1245}],
- ]
- assert [attrs for time, attrs in p50_data["data"]] == [
- [{"count": 123}],
- [{"count": 622.5}],
- ]
- sum_meta = sum_data["meta"]
- assert sum_meta["isMetricsData"] == sum_data["isMetricsData"]
- assert sum_meta["fields"] == {
- "time": "date",
- "sum_measurements_datacenter_memory": "size",
- "p50_measurements_datacenter_memory": "size",
- }
- assert sum_meta["units"] == {
- "time": None,
- "sum_measurements_datacenter_memory": "pebibyte",
- "p50_measurements_datacenter_memory": "pebibyte",
- }
- p50_meta = p50_data["meta"]
- assert p50_meta["isMetricsData"] == p50_data["isMetricsData"]
- assert p50_meta["fields"] == {
- "time": "date",
- "sum_measurements_datacenter_memory": "size",
- "p50_measurements_datacenter_memory": "size",
- }
- assert p50_meta["units"] == {
- "time": None,
- "sum_measurements_datacenter_memory": "pebibyte",
- "p50_measurements_datacenter_memory": "pebibyte",
- }
- def test_dataset_metrics_does_not_fallback(self):
- self.store_transaction_metric(123, timestamp=self.day_ago + timedelta(minutes=30))
- self.store_transaction_metric(456, timestamp=self.day_ago + timedelta(hours=1, minutes=30))
- self.store_transaction_metric(789, timestamp=self.day_ago + timedelta(hours=1, minutes=30))
- response = self.do_request(
- data={
- "start": iso_format(self.day_ago),
- "end": iso_format(self.day_ago + timedelta(hours=2)),
- "interval": "1h",
- "query": "transaction.duration:<5s",
- "yAxis": "sum(transaction.duration)",
- "dataset": "metrics",
- **self.additional_params,
- },
- )
- assert response.status_code == 400, response.content
- def test_title_filter(self):
- self.store_transaction_metric(
- 123,
- tags={"transaction": "foo_transaction"},
- timestamp=self.day_ago + timedelta(minutes=30),
- )
- response = self.do_request(
- data={
- "start": iso_format(self.day_ago),
- "end": iso_format(self.day_ago + timedelta(hours=2)),
- "interval": "1h",
- "query": "title:foo_transaction",
- "yAxis": [
- "sum(transaction.duration)",
- ],
- "dataset": "metricsEnhanced",
- **self.additional_params,
- },
- )
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert [attrs for time, attrs in data] == [
- [{"count": 123}],
- [{"count": 0}],
- ]
- def test_transaction_status_unknown_error(self):
- self.store_transaction_metric(
- 123,
- tags={"transaction.status": "unknown"},
- timestamp=self.day_ago + timedelta(minutes=30),
- )
- response = self.do_request(
- data={
- "start": iso_format(self.day_ago),
- "end": iso_format(self.day_ago + timedelta(hours=2)),
- "interval": "1h",
- "query": "transaction.status:unknown_error",
- "yAxis": [
- "sum(transaction.duration)",
- ],
- "dataset": "metricsEnhanced",
- **self.additional_params,
- },
- )
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert [attrs for time, attrs in data] == [
- [{"count": 123}],
- [{"count": 0}],
- ]
- def test_custom_performance_metric_meta_contains_field_and_unit_data(self):
- self.store_transaction_metric(
- 123,
- timestamp=self.day_ago + timedelta(hours=1),
- internal_metric="d:transactions/measurements.custom@kibibyte",
- entity="metrics_distributions",
- )
- response = self.do_request(
- data={
- "start": iso_format(self.day_ago),
- "end": iso_format(self.day_ago + timedelta(hours=2)),
- "interval": "1h",
- "yAxis": "p99(measurements.custom)",
- "query": "",
- **self.additional_params,
- },
- )
- assert response.status_code == 200
- meta = response.data["meta"]
- assert meta["fields"] == {"time": "date", "p99_measurements_custom": "size"}
- assert meta["units"] == {"time": None, "p99_measurements_custom": "kibibyte"}
- def test_multi_series_custom_performance_metric_meta_contains_field_and_unit_data(self):
- self.store_transaction_metric(
- 123,
- timestamp=self.day_ago + timedelta(hours=1),
- internal_metric="d:transactions/measurements.custom@kibibyte",
- entity="metrics_distributions",
- )
- self.store_transaction_metric(
- 123,
- timestamp=self.day_ago + timedelta(hours=1),
- internal_metric="d:transactions/measurements.another.custom@pebibyte",
- entity="metrics_distributions",
- )
- response = self.do_request(
- data={
- "start": iso_format(self.day_ago),
- "end": iso_format(self.day_ago + timedelta(hours=2)),
- "interval": "1h",
- "yAxis": [
- "p95(measurements.custom)",
- "p99(measurements.custom)",
- "p99(measurements.another.custom)",
- ],
- "query": "",
- **self.additional_params,
- },
- )
- assert response.status_code == 200
- meta = response.data["p95(measurements.custom)"]["meta"]
- assert meta["fields"] == {
- "time": "date",
- "p95_measurements_custom": "size",
- "p99_measurements_custom": "size",
- "p99_measurements_another_custom": "size",
- }
- assert meta["units"] == {
- "time": None,
- "p95_measurements_custom": "kibibyte",
- "p99_measurements_custom": "kibibyte",
- "p99_measurements_another_custom": "pebibyte",
- }
- assert meta == response.data["p99(measurements.custom)"]["meta"]
- assert meta == response.data["p99(measurements.another.custom)"]["meta"]
- def test_no_top_events_with_project_field(self):
- project = self.create_project()
- response = self.do_request(
- data={
- # make sure to query the project with 0 events
- "project": project.id,
- "start": iso_format(self.day_ago),
- "end": iso_format(self.day_ago + timedelta(hours=2)),
- "interval": "1h",
- "yAxis": "count()",
- "orderby": ["-count()"],
- "field": ["count()", "project"],
- "topEvents": 5,
- "dataset": "metrics",
- **self.additional_params,
- },
- )
- assert response.status_code == 200, response.content
- # When there are no top events, we do not return an empty dict.
- # Instead, we return a single zero-filled series for an empty graph.
- data = response.data["data"]
- assert [attrs for time, attrs in data] == [[{"count": 0}], [{"count": 0}]]
- def test_top_events_with_transaction(self):
- transaction_spec = [("foo", 100), ("bar", 200), ("baz", 300)]
- for offset in range(5):
- for transaction, duration in transaction_spec:
- self.store_transaction_metric(
- duration,
- tags={"transaction": f"{transaction}_transaction"},
- timestamp=self.day_ago + timedelta(hours=offset, minutes=30),
- )
- response = self.do_request(
- data={
- # make sure to query the project with 0 events
- "project": self.project.id,
- "start": iso_format(self.day_ago),
- "end": iso_format(self.day_ago + timedelta(hours=5)),
- "interval": "1h",
- "yAxis": "p75(transaction.duration)",
- "orderby": ["-p75(transaction.duration)"],
- "field": ["p75(transaction.duration)", "transaction"],
- "topEvents": 5,
- "dataset": "metrics",
- **self.additional_params,
- },
- )
- assert response.status_code == 200, response.content
- for position, (transaction, duration) in enumerate(transaction_spec):
- data = response.data[f"{transaction}_transaction"]
- chart_data = data["data"]
- assert data["order"] == 2 - position
- assert [attrs for time, attrs in chart_data] == [[{"count": duration}]] * 5
- def test_top_events_with_project(self):
- self.store_transaction_metric(
- 100,
- timestamp=self.day_ago + timedelta(hours=1, minutes=30),
- )
- response = self.do_request(
- data={
- # make sure to query the project with 0 events
- "project": self.project.id,
- "start": iso_format(self.day_ago),
- "end": iso_format(self.day_ago + timedelta(hours=5)),
- "interval": "1h",
- "yAxis": "p75(transaction.duration)",
- "orderby": ["-p75(transaction.duration)"],
- "field": ["p75(transaction.duration)", "project"],
- "topEvents": 5,
- "dataset": "metrics",
- **self.additional_params,
- },
- )
- assert response.status_code == 200, response.content
- data = response.data[f"{self.project.slug}"]
- assert data["order"] == 0
- @region_silo_test
- class OrganizationEventsStatsMetricsEnhancedPerformanceEndpointTestWithMetricLayer(
- OrganizationEventsStatsMetricsEnhancedPerformanceEndpointTest
- ):
- def setUp(self):
- super().setUp()
- self.features["organizations:use-metrics-layer"] = True
- self.additional_params = {"forceMetricsLayer": "true"}
- def test_counter_standard_metric(self):
- mri = "c:transactions/usage@none"
- for index, value in enumerate((10, 20, 30, 40, 50, 60)):
- self.store_transaction_metric(
- value,
- metric=mri,
- internal_metric=mri,
- entity="metrics_counters",
- timestamp=self.day_ago + timedelta(minutes=index),
- use_case_id=UseCaseID.CUSTOM,
- )
- response = self.do_request(
- data={
- "start": iso_format(self.day_ago),
- "end": iso_format(self.day_ago + timedelta(hours=6)),
- "interval": "1m",
- "yAxis": [f"sum({mri})"],
- "project": self.project.id,
- "dataset": "metricsEnhanced",
- **self.additional_params,
- },
- )
- assert response.status_code == 200, response.content
- data = response.data["data"]
- for (_, value), expected_value in zip(data, [10, 20, 30, 40, 50, 60]):
- assert value[0]["count"] == expected_value # type:ignore
- def test_counter_custom_metric(self):
- mri = "c:custom/sentry.process_profile.track_outcome@second"
- for index, value in enumerate((10, 20, 30, 40, 50, 60)):
- self.store_transaction_metric(
- value,
- metric=mri,
- internal_metric=mri,
- entity="metrics_counters",
- timestamp=self.day_ago + timedelta(hours=index),
- use_case_id=UseCaseID.CUSTOM,
- )
- response = self.do_request(
- data={
- "start": iso_format(self.day_ago),
- "end": iso_format(self.day_ago + timedelta(hours=6)),
- "interval": "1h",
- "yAxis": [f"sum({mri})"],
- "project": self.project.id,
- "dataset": "metricsEnhanced",
- **self.additional_params,
- },
- )
- assert response.status_code == 200, response.content
- data = response.data["data"]
- for (_, value), expected_value in zip(data, [10, 20, 30, 40, 50, 60]):
- assert value[0]["count"] == expected_value # type:ignore
- def test_distribution_custom_metric(self):
- mri = "d:custom/sentry.process_profile.track_outcome@second"
- for index, value in enumerate((10, 20, 30, 40, 50, 60)):
- for multiplier in (1, 2, 3):
- self.store_transaction_metric(
- value * multiplier,
- metric=mri,
- internal_metric=mri,
- entity="metrics_distributions",
- timestamp=self.day_ago + timedelta(hours=index),
- use_case_id=UseCaseID.CUSTOM,
- )
- response = self.do_request(
- data={
- "start": iso_format(self.day_ago),
- "end": iso_format(self.day_ago + timedelta(hours=6)),
- "interval": "1h",
- "yAxis": [f"min({mri})", f"max({mri})", f"p90({mri})"],
- "project": self.project.id,
- "dataset": "metricsEnhanced",
- **self.additional_params,
- },
- )
- assert response.status_code == 200, response.content
- data = response.data
- min = data[f"min({mri})"]["data"]
- for (_, value), expected_value in zip(min, [10.0, 20.0, 30.0, 40.0, 50.0, 60.0]):
- assert value[0]["count"] == expected_value # type:ignore
- max = data[f"max({mri})"]["data"]
- for (_, value), expected_value in zip(max, [30.0, 60.0, 90.0, 120.0, 150.0, 180.0]):
- assert value[0]["count"] == expected_value # type:ignore
- p90 = data[f"p90({mri})"]["data"]
- for (_, value), expected_value in zip(p90, [28.0, 56.0, 84.0, 112.0, 140.0, 168.0]):
- assert value[0]["count"] == expected_value # type:ignore
- def test_set_custom_metric(self):
- mri = "s:custom/sentry.process_profile.track_outcome@second"
- for index, value in enumerate((10, 20, 30, 40, 50, 60)):
- # We store each value a second time, since we want to check the de-duplication of sets.
- for i in range(0, 2):
- self.store_transaction_metric(
- value,
- metric=mri,
- internal_metric=mri,
- entity="metrics_sets",
- timestamp=self.day_ago + timedelta(hours=index),
- use_case_id=UseCaseID.CUSTOM,
- )
- response = self.do_request(
- data={
- "start": iso_format(self.day_ago),
- "end": iso_format(self.day_ago + timedelta(hours=6)),
- "interval": "1h",
- "yAxis": [f"count_unique({mri})"],
- "project": self.project.id,
- "dataset": "metricsEnhanced",
- **self.additional_params,
- },
- )
- assert response.status_code == 200, response.content
- data = response.data["data"]
- for (_, value), expected_value in zip(data, [1, 1, 1, 1, 1, 1]):
- assert value[0]["count"] == expected_value # type:ignore
- def test_gauge_custom_metric(self):
- mri = "g:custom/sentry.process_profile.track_outcome@second"
- for index, value in enumerate((10, 20, 30, 40, 50, 60)):
- for multiplier in (1, 3):
- self.store_transaction_metric(
- value * multiplier,
- metric=mri,
- internal_metric=mri,
- entity="metrics_gauges",
- # When multiple gauges are merged, in order to make the `last` merge work deterministically it's
- # better to have the gauges with different timestamps so that the last value is always the same.
- timestamp=self.day_ago + timedelta(hours=index, minutes=multiplier),
- use_case_id=UseCaseID.CUSTOM,
- )
- response = self.do_request(
- data={
- "start": iso_format(self.day_ago),
- "end": iso_format(self.day_ago + timedelta(hours=6)),
- "interval": "1h",
- "yAxis": [
- f"min({mri})",
- f"max({mri})",
- f"last({mri})",
- f"sum({mri})",
- f"count({mri})",
- ],
- "project": self.project.id,
- "dataset": "metricsEnhanced",
- **self.additional_params,
- },
- )
- assert response.status_code == 200, response.content
- data = response.data
- min = data[f"min({mri})"]["data"]
- for (_, value), expected_value in zip(min, [10.0, 20.0, 30.0, 40.0, 50.0, 60.0]):
- assert value[0]["count"] == expected_value # type:ignore
- max = data[f"max({mri})"]["data"]
- for (_, value), expected_value in zip(max, [30.0, 60.0, 90.0, 120.0, 150.0, 180.0]):
- assert value[0]["count"] == expected_value # type:ignore
- last = data[f"last({mri})"]["data"]
- for (_, value), expected_value in zip(last, [30.0, 60.0, 90.0, 120.0, 150.0, 180.0]):
- assert value[0]["count"] == expected_value # type:ignore
- sum = data[f"sum({mri})"]["data"]
- for (_, value), expected_value in zip(sum, [40.0, 80.0, 120.0, 160.0, 200.0, 240.0]):
- assert value[0]["count"] == expected_value # type:ignore
- count = data[f"count({mri})"]["data"]
- for (_, value), expected_value in zip(count, [40, 80, 120, 160, 200, 240]):
- assert value[0]["count"] == expected_value # type:ignore
- @region_silo_test
- class OrganizationEventsStatsMetricsEnhancedPerformanceEndpointTestWithOnDemandWidgets(
- MetricsEnhancedPerformanceTestCase
- ):
- endpoint = "sentry-api-0-organization-events-stats"
- def setUp(self):
- super().setUp()
- self.login_as(user=self.user)
- self.day_ago = before_now(days=1).replace(hour=10, minute=0, second=0, microsecond=0)
- self.DEFAULT_METRIC_TIMESTAMP = self.day_ago
- Environment.get_or_create(self.project, "production")
- self.url = reverse(
- "sentry-api-0-organization-events-stats",
- kwargs={"organization_slug": self.project.organization.slug},
- )
- self.features = {
- "organizations:on-demand-metrics-extraction-widgets": True,
- "organizations:on-demand-metrics-extraction": True,
- }
- def test_top_events_wrong_on_demand_type(self):
- query = "transaction.duration:>=100"
- yAxis = ["count()", "count_web_vitals(measurements.lcp, good)"]
- response = self.do_request(
- data={
- "project": self.project.id,
- "start": iso_format(self.day_ago),
- "end": iso_format(self.day_ago + timedelta(hours=2)),
- "interval": "1h",
- "orderby": ["-count()"],
- "environment": "production",
- "query": query,
- "yAxis": yAxis,
- "field": [
- "count()",
- ],
- "topEvents": 5,
- "dataset": "metrics",
- "useOnDemandMetrics": "true",
- "onDemandType": "not_real",
- },
- )
- assert response.status_code == 400, response.content
- def test_top_events_works_without_on_demand_type(self):
- query = "transaction.duration:>=100"
- yAxis = ["count()", "count_web_vitals(measurements.lcp, good)"]
- response = self.do_request(
- data={
- "project": self.project.id,
- "start": iso_format(self.day_ago),
- "end": iso_format(self.day_ago + timedelta(hours=2)),
- "interval": "1h",
- "orderby": ["-count()"],
- "environment": "production",
- "query": query,
- "yAxis": yAxis,
- "field": [
- "count()",
- ],
- "topEvents": 5,
- "dataset": "metrics",
- "useOnDemandMetrics": "true",
- },
- )
- assert response.status_code == 200, response.content
- def test_top_events_with_transaction_on_demand(self):
- field = "count()"
- field_two = "count_web_vitals(measurements.lcp, good)"
- groupbys = ["customtag1", "customtag2"]
- query = "transaction.duration:>=100"
- spec = OnDemandMetricSpec(
- field=field, groupbys=groupbys, query=query, spec_type=MetricSpecType.DYNAMIC_QUERY
- )
- spec_two = OnDemandMetricSpec(
- field=field_two, groupbys=groupbys, query=query, spec_type=MetricSpecType.DYNAMIC_QUERY
- )
- for hour in range(0, 5):
- self.store_on_demand_metric(
- hour * 62 * 24,
- spec=spec,
- additional_tags={
- "customtag1": "foo",
- "customtag2": "red",
- "environment": "production",
- },
- timestamp=self.day_ago + timedelta(hours=hour),
- )
- self.store_on_demand_metric(
- hour * 60 * 24,
- spec=spec_two,
- additional_tags={
- "customtag1": "bar",
- "customtag2": "blue",
- "environment": "production",
- },
- timestamp=self.day_ago + timedelta(hours=hour),
- )
- yAxis = ["count()", "count_web_vitals(measurements.lcp, good)"]
- response = self.do_request(
- data={
- "project": self.project.id,
- "start": iso_format(self.day_ago),
- "end": iso_format(self.day_ago + timedelta(hours=2)),
- "interval": "1h",
- "orderby": ["-count()"],
- "environment": "production",
- "query": query,
- "yAxis": yAxis,
- "field": [
- "count()",
- "count_web_vitals(measurements.lcp, good)",
- "customtag1",
- "customtag2",
- ],
- "topEvents": 5,
- "dataset": "metricsEnhanced",
- "useOnDemandMetrics": "true",
- "onDemandType": "dynamic_query",
- },
- )
- assert response.status_code == 200, response.content
- groups = [
- ("foo,red", "count()", 0.0, 1488.0),
- ("foo,red", "count_web_vitals(measurements.lcp, good)", 0.0, 0.0),
- ("bar,blue", "count()", 0.0, 0.0),
- ("bar,blue", "count_web_vitals(measurements.lcp, good)", 0.0, 1440.0),
- ]
- assert len(response.data.keys()) == 2
- for group_count in groups:
- group, agg, row1, row2 = group_count
- row_data = response.data[group][agg]["data"][:2]
- assert [attrs for _, attrs in row_data] == [[{"count": row1}], [{"count": row2}]]
- assert response.data[group][agg]["meta"]["isMetricsExtractedData"]
- assert response.data[group]["isMetricsExtractedData"]
- def test_top_events_with_transaction_on_demand_and_no_environment(self):
- field = "count()"
- field_two = "count_web_vitals(measurements.lcp, good)"
- groupbys = ["customtag1", "customtag2"]
- query = "transaction.duration:>=100"
- spec = OnDemandMetricSpec(
- field=field, groupbys=groupbys, query=query, spec_type=MetricSpecType.DYNAMIC_QUERY
- )
- spec_two = OnDemandMetricSpec(
- field=field_two, groupbys=groupbys, query=query, spec_type=MetricSpecType.DYNAMIC_QUERY
- )
- for hour in range(0, 5):
- self.store_on_demand_metric(
- hour * 62 * 24,
- spec=spec,
- additional_tags={
- "customtag1": "foo",
- "customtag2": "red",
- "environment": "production",
- },
- timestamp=self.day_ago + timedelta(hours=hour),
- )
- self.store_on_demand_metric(
- hour * 60 * 24,
- spec=spec_two,
- additional_tags={
- "customtag1": "bar",
- "customtag2": "blue",
- "environment": "production",
- },
- timestamp=self.day_ago + timedelta(hours=hour),
- )
- yAxis = ["count()", "count_web_vitals(measurements.lcp, good)"]
- response = self.do_request(
- data={
- "project": self.project.id,
- "start": iso_format(self.day_ago),
- "end": iso_format(self.day_ago + timedelta(hours=2)),
- "interval": "1h",
- "orderby": ["-count()"],
- "query": query,
- "yAxis": yAxis,
- "field": [
- "count()",
- "count_web_vitals(measurements.lcp, good)",
- "customtag1",
- "customtag2",
- ],
- "topEvents": 5,
- "dataset": "metricsEnhanced",
- "useOnDemandMetrics": "true",
- "onDemandType": "dynamic_query",
- },
- )
- assert response.status_code == 200, response.content
- groups = [
- ("foo,red", "count()", 0.0, 1488.0),
- ("foo,red", "count_web_vitals(measurements.lcp, good)", 0.0, 0.0),
- ("bar,blue", "count()", 0.0, 0.0),
- ("bar,blue", "count_web_vitals(measurements.lcp, good)", 0.0, 1440.0),
- ]
- assert len(response.data.keys()) == 2
- for group_count in groups:
- group, agg, row1, row2 = group_count
- row_data = response.data[group][agg]["data"][:2]
- assert [attrs for time, attrs in row_data] == [[{"count": row1}], [{"count": row2}]]
- assert response.data[group][agg]["meta"]["isMetricsExtractedData"]
- assert response.data[group]["isMetricsExtractedData"]
- def test_timeseries_on_demand_with_multiple_percentiles(self):
- field = "p75(measurements.fcp)"
- field_two = "p75(measurements.lcp)"
- query = "transaction.duration:>=100"
- spec = OnDemandMetricSpec(field=field, query=query, spec_type=MetricSpecType.DYNAMIC_QUERY)
- spec_two = OnDemandMetricSpec(
- field=field_two, query=query, spec_type=MetricSpecType.DYNAMIC_QUERY
- )
- assert (
- spec._query_str_for_hash
- == "event.measurements.fcp.value;{'name': 'event.duration', 'op': 'gte', 'value': 100.0}"
- )
- assert (
- spec_two._query_str_for_hash
- == "event.measurements.lcp.value;{'name': 'event.duration', 'op': 'gte', 'value': 100.0}"
- )
- for count in range(0, 4):
- self.store_on_demand_metric(
- count * 100,
- spec=spec,
- timestamp=self.day_ago + timedelta(hours=1),
- )
- self.store_on_demand_metric(
- count * 200.0,
- spec=spec_two,
- timestamp=self.day_ago + timedelta(hours=1),
- )
- yAxis = [field, field_two]
- response = self.do_request(
- data={
- "project": self.project.id,
- "start": iso_format(self.day_ago),
- "end": iso_format(self.day_ago + timedelta(hours=2)),
- "interval": "1h",
- "orderby": [field],
- "query": query,
- "yAxis": yAxis,
- "dataset": "metricsEnhanced",
- "useOnDemandMetrics": "true",
- "onDemandType": "dynamic_query",
- },
- )
- assert response.status_code == 200, response.content
- assert response.data["p75(measurements.fcp)"]["meta"]["isMetricsExtractedData"]
- assert response.data["p75(measurements.lcp)"]["meta"]["isMetricsData"]
- assert [attrs for time, attrs in response.data["p75(measurements.fcp)"]["data"]] == [
- [{"count": 0}],
- [{"count": 225.0}],
- ]
- assert response.data["p75(measurements.lcp)"]["meta"]["isMetricsExtractedData"]
- assert response.data["p75(measurements.lcp)"]["meta"]["isMetricsData"]
- assert [attrs for time, attrs in response.data["p75(measurements.lcp)"]["data"]] == [
- [{"count": 0}],
- [{"count": 450.0}],
- ]
- def test_apdex_issue(self):
- field = "apdex(300)"
- groupbys = ["group_tag"]
- query = "transaction.duration:>=100"
- spec = OnDemandMetricSpec(
- field=field,
- groupbys=groupbys,
- query=query,
- spec_type=MetricSpecType.DYNAMIC_QUERY,
- )
- for hour in range(0, 5):
- self.store_on_demand_metric(
- 1,
- spec=spec,
- additional_tags={
- "group_tag": "group_one",
- "environment": "production",
- "satisfaction": "tolerable",
- },
- timestamp=self.day_ago + timedelta(hours=hour),
- )
- self.store_on_demand_metric(
- 1,
- spec=spec,
- additional_tags={
- "group_tag": "group_two",
- "environment": "production",
- "satisfaction": "satisfactory",
- },
- timestamp=self.day_ago + timedelta(hours=hour),
- )
- response = self.do_request(
- data={
- "dataset": "metricsEnhanced",
- "environment": "production",
- "excludeOther": 1,
- "field": [field, "group_tag"],
- "start": iso_format(self.day_ago),
- "end": iso_format(self.day_ago + timedelta(hours=2)),
- "interval": "1h",
- "orderby": f"-{field}",
- "partial": 1,
- "project": self.project.id,
- "query": query,
- "topEvents": 5,
- "yAxis": field,
- "onDemandType": "dynamic_query",
- "useOnDemandMetrics": "true",
- },
- )
- assert response.status_code == 200, response.content
- assert response.data["group_one"]["meta"]["isMetricsExtractedData"] is True
- assert [attrs for time, attrs in response.data["group_one"]["data"]] == [
- [{"count": 0.5}],
- [{"count": 0.5}],
- ]
- def test_glob_http_referer_on_demand(self):
- agg = "count()"
- network_id_tag = "networkId"
- url = "https://sentry.io"
- query = f'http.url:{url}/*/foo/bar/* http.referer:"{url}/*/bar/*" event.type:transaction'
- spec = OnDemandMetricSpec(
- field=agg,
- groupbys=[network_id_tag],
- query=query,
- spec_type=MetricSpecType.DYNAMIC_QUERY,
- )
- assert spec.to_metric_spec(self.project) == {
- "category": "transaction",
- "mri": "c:transactions/on_demand@none",
- "field": None,
- "tags": [
- {"key": "query_hash", "value": "ac241f56"},
- {"key": "networkId", "field": "event.tags.networkId"},
- {"key": "environment", "field": "event.environment"},
- ],
- "condition": {
- "op": "and",
- "inner": [
- {
- "op": "glob",
- "name": "event.request.url",
- "value": ["https://sentry.io/*/foo/bar/*"],
- },
- {
- "op": "glob",
- "name": "event.request.headers.Referer",
- "value": ["https://sentry.io/*/bar/*"],
- },
- ],
- },
- }
- for hour in range(0, 5):
- self.store_on_demand_metric(
- 1,
- spec=spec,
- additional_tags={network_id_tag: "1234"},
- timestamp=self.day_ago + timedelta(hours=hour),
- )
- self.store_on_demand_metric(
- 1,
- spec=spec,
- additional_tags={network_id_tag: "5678"},
- timestamp=self.day_ago + timedelta(hours=hour),
- )
- response = self.do_request(
- data={
- "dataset": "metricsEnhanced",
- "field": [network_id_tag, agg],
- "start": iso_format(self.day_ago),
- "end": iso_format(self.day_ago + timedelta(hours=5)),
- "onDemandType": "dynamic_query",
- "orderby": f"-{agg}",
- "interval": "1d",
- "partial": 1,
- "query": query,
- "referrer": "api.dashboards.widget.bar-chart",
- "project": self.project.id,
- "topEvents": 2,
- "useOnDemandMetrics": "true",
- "yAxis": agg,
- },
- )
- assert response.status_code == 200, response.content
- for datum in response.data.values():
- assert datum["meta"] == {
- "dataset": "metricsEnhanced",
- "datasetReason": "unchanged",
- "fields": {},
- "isMetricsData": False,
- "isMetricsExtractedData": True,
- "tips": {},
- "units": {},
- }
- def _test_is_metrics_extracted_data(
- self, params: dict[str, Any], expected_on_demand_query: bool, dataset: str
- ) -> None:
- spec = OnDemandMetricSpec(
- field="count()",
- query="transaction.duration:>1s",
- spec_type=MetricSpecType.DYNAMIC_QUERY,
- )
- self.store_on_demand_metric(1, spec=spec)
- response = self.do_request(params)
- assert response.status_code == 200, response.content
- meta = response.data["meta"]
- # This is the main thing we want to test for
- assert meta.get("isMetricsExtractedData", False) is expected_on_demand_query
- assert meta["dataset"] == dataset
- return meta
- def test_is_metrics_extracted_data_is_included(self):
- self._test_is_metrics_extracted_data(
- {
- "dataset": "metricsEnhanced",
- "query": "transaction.duration:>=91",
- "useOnDemandMetrics": "true",
- "yAxis": "count()",
- },
- expected_on_demand_query=True,
- dataset="metricsEnhanced",
- )
- def test_group_by_transaction(self):
- field = "count()"
- groupbys = ["transaction"]
- query = "transaction.duration:>=100"
- spec = OnDemandMetricSpec(
- field=field,
- groupbys=groupbys,
- query=query,
- spec_type=MetricSpecType.DYNAMIC_QUERY,
- )
- for hour in range(0, 2):
- self.store_on_demand_metric(
- (hour + 1) * 5,
- spec=spec,
- additional_tags={
- "transaction": "/performance",
- "environment": "production",
- },
- timestamp=self.day_ago + timedelta(hours=hour),
- )
- response = self.do_request(
- data={
- "dataset": "metricsEnhanced",
- "environment": "production",
- "excludeOther": 1,
- "field": [field, "transaction"],
- "start": iso_format(self.day_ago),
- "end": iso_format(self.day_ago + timedelta(hours=2)),
- "interval": "1h",
- "orderby": f"-{field}",
- "partial": 1,
- "project": self.project.id,
- "query": query,
- "topEvents": 5,
- "yAxis": field,
- "onDemandType": "dynamic_query",
- "useOnDemandMetrics": "true",
- },
- )
- assert response.status_code == 200, response.content
- assert response.data["/performance"]["meta"]["isMetricsExtractedData"] is True
- assert [attrs for time, attrs in response.data["/performance"]["data"]] == [
- [{"count": 5.0}],
- [{"count": 10.0}],
- ]
- def _setup_orderby_tests(self, query):
- count_spec = OnDemandMetricSpec(
- field="count()",
- groupbys=["networkId"],
- query=query,
- spec_type=MetricSpecType.DYNAMIC_QUERY,
- )
- p95_spec = OnDemandMetricSpec(
- field="p95(transaction.duration)",
- groupbys=["networkId"],
- query=query,
- spec_type=MetricSpecType.DYNAMIC_QUERY,
- )
- for hour in range(0, 5):
- self.store_on_demand_metric(
- 1,
- spec=count_spec,
- additional_tags={"networkId": "1234"},
- timestamp=self.day_ago + timedelta(hours=hour),
- )
- self.store_on_demand_metric(
- 100,
- spec=p95_spec,
- additional_tags={"networkId": "1234"},
- timestamp=self.day_ago + timedelta(hours=hour),
- )
- self.store_on_demand_metric(
- 200,
- spec=p95_spec,
- additional_tags={"networkId": "5678"},
- timestamp=self.day_ago + timedelta(hours=hour),
- )
- # Store twice as many 5678 so orderby puts it later
- self.store_on_demand_metric(
- 2,
- spec=count_spec,
- additional_tags={"networkId": "5678"},
- timestamp=self.day_ago + timedelta(hours=hour),
- )
- def test_order_by_aggregate_top_events_desc(self):
- url = "https://sentry.io"
- query = f'http.url:{url}/*/foo/bar/* http.referer:"{url}/*/bar/*" event.type:transaction'
- self._setup_orderby_tests(query)
- response = self.do_request(
- data={
- "dataset": "metricsEnhanced",
- "field": ["networkId", "count()"],
- "start": iso_format(self.day_ago),
- "end": iso_format(self.day_ago + timedelta(hours=5)),
- "onDemandType": "dynamic_query",
- "orderby": "-count()",
- "interval": "1d",
- "partial": 1,
- "query": query,
- "referrer": "api.dashboards.widget.bar-chart",
- "project": self.project.id,
- "topEvents": 2,
- "useOnDemandMetrics": "true",
- "yAxis": "count()",
- },
- )
- assert response.status_code == 200, response.content
- assert len(response.data) == 3
- data1 = response.data["5678"]
- assert data1["order"] == 0
- assert data1["data"][0][1][0]["count"] == 10
- data2 = response.data["1234"]
- assert data2["order"] == 1
- assert data2["data"][0][1][0]["count"] == 5
- for datum in response.data.values():
- assert datum["meta"] == {
- "dataset": "metricsEnhanced",
- "datasetReason": "unchanged",
- "fields": {},
- "isMetricsData": False,
- "isMetricsExtractedData": True,
- "tips": {},
- "units": {},
- }
- def test_order_by_aggregate_top_events_asc(self):
- url = "https://sentry.io"
- query = f'http.url:{url}/*/foo/bar/* http.referer:"{url}/*/bar/*" event.type:transaction'
- self._setup_orderby_tests(query)
- response = self.do_request(
- data={
- "dataset": "metricsEnhanced",
- "field": ["networkId", "count()"],
- "start": iso_format(self.day_ago),
- "end": iso_format(self.day_ago + timedelta(hours=5)),
- "onDemandType": "dynamic_query",
- "orderby": "count()",
- "interval": "1d",
- "partial": 1,
- "query": query,
- "referrer": "api.dashboards.widget.bar-chart",
- "project": self.project.id,
- "topEvents": 2,
- "useOnDemandMetrics": "true",
- "yAxis": "count()",
- },
- )
- assert response.status_code == 200, response.content
- assert len(response.data) == 3
- data1 = response.data["1234"]
- assert data1["order"] == 0
- assert data1["data"][0][1][0]["count"] == 5
- data2 = response.data["5678"]
- assert data2["order"] == 1
- assert data2["data"][0][1][0]["count"] == 10
- for datum in response.data.values():
- assert datum["meta"] == {
- "dataset": "metricsEnhanced",
- "datasetReason": "unchanged",
- "fields": {},
- "isMetricsData": False,
- "isMetricsExtractedData": True,
- "tips": {},
- "units": {},
- }
- def test_order_by_aggregate_top_events_graph_different_aggregate(self):
- url = "https://sentry.io"
- query = f'http.url:{url}/*/foo/bar/* http.referer:"{url}/*/bar/*" event.type:transaction'
- self._setup_orderby_tests(query)
- response = self.do_request(
- data={
- "dataset": "metricsEnhanced",
- "field": ["networkId", "count()"],
- "start": iso_format(self.day_ago),
- "end": iso_format(self.day_ago + timedelta(hours=5)),
- "onDemandType": "dynamic_query",
- "orderby": "count()",
- "interval": "1d",
- "partial": 1,
- "query": query,
- "referrer": "api.dashboards.widget.bar-chart",
- "project": self.project.id,
- "topEvents": 2,
- "useOnDemandMetrics": "true",
- "yAxis": "p95(transaction.duration)",
- },
- )
- assert response.status_code == 200, response.content
- assert len(response.data) == 3
- data1 = response.data["1234"]
- assert data1["order"] == 0
- assert data1["data"][0][1][0]["count"] == 100
- data2 = response.data["5678"]
- assert data2["order"] == 1
- assert data2["data"][0][1][0]["count"] == 200
- for datum in response.data.values():
- assert datum["meta"] == {
- "dataset": "metricsEnhanced",
- "datasetReason": "unchanged",
- "fields": {},
- "isMetricsData": False,
- "isMetricsExtractedData": True,
- "tips": {},
- "units": {},
- }
- def test_cannot_order_by_tag(self):
- url = "https://sentry.io"
- query = f'http.url:{url}/*/foo/bar/* http.referer:"{url}/*/bar/*" event.type:transaction'
- self._setup_orderby_tests(query)
- response = self.do_request(
- data={
- "dataset": "metrics",
- "field": ["networkId", "count()"],
- "start": iso_format(self.day_ago),
- "end": iso_format(self.day_ago + timedelta(hours=5)),
- "onDemandType": "dynamic_query",
- "orderby": "-networkId",
- "interval": "1d",
- "partial": 1,
- "query": query,
- "referrer": "api.dashboards.widget.bar-chart",
- "project": self.project.id,
- "topEvents": 2,
- "useOnDemandMetrics": "true",
- "yAxis": "count()",
- },
- )
- assert response.status_code == 400, response.content
- def test_order_by_two_aggregates(self):
- url = "https://sentry.io"
- query = f'http.url:{url}/*/foo/bar/* http.referer:"{url}/*/bar/*" event.type:transaction'
- self._setup_orderby_tests(query)
- response = self.do_request(
- data={
- "dataset": "metrics",
- "field": ["networkId", "count()", "p95(transaction.duration)"],
- "start": iso_format(self.day_ago),
- "end": iso_format(self.day_ago + timedelta(hours=5)),
- "onDemandType": "dynamic_query",
- "orderby": ["count()", "p95(transaction.duration)"],
- "interval": "1d",
- "partial": 1,
- "query": query,
- "referrer": "api.dashboards.widget.bar-chart",
- "project": self.project.id,
- "topEvents": 2,
- "useOnDemandMetrics": "true",
- "yAxis": "p95(transaction.duration)",
- },
- )
- assert response.status_code == 400, response.content
|