123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046 |
- from datetime import timedelta
- import pytest
- from django.urls import reverse
- from sentry.search.events import constants
- from sentry.search.utils import map_device_class_level
- from sentry.testutils.cases import MetricsEnhancedPerformanceTestCase
- from sentry.testutils.helpers.datetime import before_now
- pytestmark = pytest.mark.sentry_metrics
- SPAN_DURATION_MRI = "d:spans/duration@millisecond"
- class OrganizationEventsMetricsEnhancedPerformanceEndpointTest(MetricsEnhancedPerformanceTestCase):
- viewname = "sentry-api-0-organization-events"
- # Poor intentionally omitted for test_measurement_rating_that_does_not_exist
- METRIC_STRINGS = [
- "foo_transaction",
- "bar_transaction",
- ]
- def setUp(self):
- super().setUp()
- self.min_ago = before_now(minutes=1)
- self.six_min_ago = before_now(minutes=6)
- self.three_days_ago = before_now(days=3)
- self.features = {
- "organizations:starfish-view": True,
- }
- def do_request(self, query, features=None):
- if features is None:
- features = {"organizations:discover-basic": True}
- features.update(self.features)
- self.login_as(user=self.user)
- url = reverse(
- self.viewname,
- kwargs={"organization_id_or_slug": self.organization.slug},
- )
- with self.feature(features):
- return self.client.get(url, query, format="json")
- def test_p50_with_no_data(self):
- response = self.do_request(
- {
- "field": ["p50()"],
- "query": "",
- "project": self.project.id,
- "dataset": "spansMetrics",
- }
- )
- assert response.status_code == 200, response.content
- data = response.data["data"]
- meta = response.data["meta"]
- assert len(data) == 1
- assert data[0]["p50()"] == 0
- assert meta["dataset"] == "spansMetrics"
- @pytest.mark.querybuilder
- def test_count(self):
- self.store_span_metric(
- 1,
- internal_metric=constants.SELF_TIME_LIGHT,
- timestamp=self.three_days_ago,
- )
- response = self.do_request(
- {
- "field": ["count()"],
- "query": "",
- "project": self.project.id,
- "dataset": "spansMetrics",
- "statsPeriod": "7d",
- }
- )
- assert response.status_code == 200, response.content
- data = response.data["data"]
- meta = response.data["meta"]
- assert len(data) == 1
- assert data[0]["count()"] == 1
- assert meta["dataset"] == "spansMetrics"
- def test_count_if(self):
- self.store_span_metric(
- 2,
- internal_metric=constants.SELF_TIME_LIGHT,
- timestamp=self.three_days_ago,
- tags={"release": "1.0.0"},
- )
- self.store_span_metric(
- 2,
- internal_metric=constants.SELF_TIME_LIGHT,
- timestamp=self.three_days_ago,
- tags={"release": "1.0.0"},
- )
- self.store_span_metric(
- 2,
- internal_metric=constants.SELF_TIME_LIGHT,
- timestamp=self.three_days_ago,
- tags={"release": "2.0.0"},
- )
- fieldRelease1 = "count_if(release,1.0.0)"
- fieldRelease2 = "count_if(release,2.0.0)"
- response = self.do_request(
- {
- "field": [fieldRelease1, fieldRelease2],
- "query": "",
- "project": self.project.id,
- "dataset": "spansMetrics",
- "statsPeriod": "7d",
- }
- )
- assert response.status_code == 200, response.content
- data = response.data["data"]
- meta = response.data["meta"]
- assert len(data) == 1
- assert data[0][fieldRelease1] == 2
- assert data[0][fieldRelease2] == 1
- assert meta["dataset"] == "spansMetrics"
- def test_division_if(self):
- self.store_span_metric(
- {
- "min": 1,
- "max": 1,
- "sum": 1,
- "count": 1,
- "last": 1,
- },
- entity="metrics_gauges",
- metric="mobile.slow_frames",
- timestamp=self.three_days_ago,
- tags={"release": "1.0.0"},
- )
- self.store_span_metric(
- {
- "min": 1,
- "max": 1,
- "sum": 15,
- "count": 15,
- "last": 1,
- },
- entity="metrics_gauges",
- metric="mobile.total_frames",
- timestamp=self.three_days_ago,
- tags={"release": "1.0.0"},
- )
- self.store_span_metric(
- {
- "min": 1,
- "max": 1,
- "sum": 2,
- "count": 2,
- "last": 1,
- },
- entity="metrics_gauges",
- metric="mobile.frozen_frames",
- timestamp=self.three_days_ago,
- tags={"release": "2.0.0"},
- )
- self.store_span_metric(
- {
- "min": 1,
- "max": 1,
- "sum": 10,
- "count": 10,
- "last": 1,
- },
- entity="metrics_gauges",
- metric="mobile.total_frames",
- timestamp=self.three_days_ago,
- tags={"release": "2.0.0"},
- )
- fieldRelease1 = "division_if(mobile.slow_frames,mobile.total_frames,release,1.0.0)"
- fieldRelease2 = "division_if(mobile.frozen_frames,mobile.total_frames,release,2.0.0)"
- response = self.do_request(
- {
- "field": [fieldRelease1, fieldRelease2],
- "query": "",
- "project": self.project.id,
- "dataset": "spansMetrics",
- "statsPeriod": "7d",
- }
- )
- assert response.status_code == 200, response.content
- data = response.data["data"]
- meta = response.data["meta"]
- assert len(data) == 1
- assert data[0][fieldRelease1] == 1 / 15
- assert data[0][fieldRelease2] == 2 / 10
- assert meta["dataset"] == "spansMetrics"
- def test_count_unique(self):
- self.store_span_metric(
- 1,
- "user",
- timestamp=self.min_ago,
- )
- self.store_span_metric(
- 2,
- "user",
- timestamp=self.min_ago,
- )
- response = self.do_request(
- {
- "field": ["count_unique(user)"],
- "query": "",
- "project": self.project.id,
- "dataset": "spansMetrics",
- }
- )
- assert response.status_code == 200, response.content
- data = response.data["data"]
- meta = response.data["meta"]
- assert len(data) == 1
- assert data[0]["count_unique(user)"] == 2
- assert meta["dataset"] == "spansMetrics"
- def test_sum(self):
- self.store_span_metric(
- 321,
- internal_metric=constants.SELF_TIME_LIGHT,
- timestamp=self.min_ago,
- )
- self.store_span_metric(
- 99,
- internal_metric=constants.SELF_TIME_LIGHT,
- timestamp=self.min_ago,
- )
- response = self.do_request(
- {
- "field": ["sum(span.self_time)"],
- "query": "",
- "project": self.project.id,
- "dataset": "spansMetrics",
- }
- )
- assert response.status_code == 200, response.content
- data = response.data["data"]
- meta = response.data["meta"]
- assert len(data) == 1
- assert data[0]["sum(span.self_time)"] == 420
- assert meta["dataset"] == "spansMetrics"
- def test_percentile(self):
- self.store_span_metric(
- 1,
- internal_metric=constants.SELF_TIME_LIGHT,
- timestamp=self.min_ago,
- )
- response = self.do_request(
- {
- "field": ["percentile(span.self_time, 0.95)"],
- "query": "",
- "project": self.project.id,
- "dataset": "spansMetrics",
- }
- )
- assert response.status_code == 200, response.content
- data = response.data["data"]
- meta = response.data["meta"]
- assert len(data) == 1
- assert data[0]["percentile(span.self_time, 0.95)"] == 1
- assert meta["dataset"] == "spansMetrics"
- def test_fixed_percentile_functions(self):
- self.store_span_metric(
- 1,
- internal_metric=constants.SELF_TIME_LIGHT,
- timestamp=self.min_ago,
- )
- for function in ["p50()", "p75()", "p95()", "p99()", "p100()"]:
- response = self.do_request(
- {
- "field": [function],
- "query": "",
- "project": self.project.id,
- "dataset": "spansMetrics",
- }
- )
- assert response.status_code == 200, response.content
- data = response.data["data"]
- meta = response.data["meta"]
- assert len(data) == 1
- assert data[0][function] == 1, function
- assert meta["dataset"] == "spansMetrics", function
- assert meta["fields"][function] == "duration", function
- def test_fixed_percentile_functions_with_duration(self):
- self.store_span_metric(
- 1,
- internal_metric=constants.SPAN_METRICS_MAP["span.duration"],
- timestamp=self.min_ago,
- )
- for function in [
- "p50(span.duration)",
- "p75(span.duration)",
- "p95(span.duration)",
- "p99(span.duration)",
- "p100(span.duration)",
- ]:
- response = self.do_request(
- {
- "field": [function],
- "query": "",
- "project": self.project.id,
- "dataset": "spansMetrics",
- }
- )
- assert response.status_code == 200, response.content
- data = response.data["data"]
- meta = response.data["meta"]
- assert len(data) == 1, function
- assert data[0][function] == 1, function
- assert meta["dataset"] == "spansMetrics", function
- assert meta["fields"][function] == "duration", function
- def test_avg(self):
- self.store_span_metric(
- 1,
- internal_metric=constants.SELF_TIME_LIGHT,
- timestamp=self.min_ago,
- )
- response = self.do_request(
- {
- "field": ["avg()"],
- "query": "",
- "project": self.project.id,
- "dataset": "spansMetrics",
- }
- )
- assert response.status_code == 200, response.content
- data = response.data["data"]
- meta = response.data["meta"]
- assert len(data) == 1
- assert data[0]["avg()"] == 1
- assert meta["dataset"] == "spansMetrics"
- def test_eps(self):
- for _ in range(6):
- self.store_span_metric(
- 1,
- internal_metric=constants.SELF_TIME_LIGHT,
- timestamp=self.min_ago,
- )
- response = self.do_request(
- {
- "field": ["eps()", "sps()"],
- "query": "",
- "project": self.project.id,
- "dataset": "spansMetrics",
- "statsPeriod": "10m",
- }
- )
- assert response.status_code == 200, response.content
- data = response.data["data"]
- meta = response.data["meta"]
- assert len(data) == 1
- assert data[0]["eps()"] == 0.01
- assert data[0]["sps()"] == 0.01
- assert meta["fields"]["eps()"] == "rate"
- assert meta["fields"]["sps()"] == "rate"
- assert meta["units"]["eps()"] == "1/second"
- assert meta["units"]["sps()"] == "1/second"
- assert meta["dataset"] == "spansMetrics"
- def test_epm(self):
- for _ in range(6):
- self.store_span_metric(
- 1,
- internal_metric=constants.SELF_TIME_LIGHT,
- timestamp=self.min_ago,
- )
- response = self.do_request(
- {
- "field": ["epm()", "spm()"],
- "query": "",
- "project": self.project.id,
- "dataset": "spansMetrics",
- "statsPeriod": "10m",
- }
- )
- assert response.status_code == 200, response.content
- data = response.data["data"]
- meta = response.data["meta"]
- assert len(data) == 1
- assert data[0]["epm()"] == 0.6
- assert data[0]["spm()"] == 0.6
- assert meta["fields"]["epm()"] == "rate"
- assert meta["fields"]["spm()"] == "rate"
- assert meta["units"]["epm()"] == "1/minute"
- assert meta["units"]["spm()"] == "1/minute"
- assert meta["dataset"] == "spansMetrics"
- def test_time_spent_percentage(self):
- for _ in range(4):
- self.store_span_metric(
- 1,
- internal_metric=constants.SELF_TIME_LIGHT,
- tags={"transaction": "foo_transaction"},
- timestamp=self.min_ago,
- )
- self.store_span_metric(
- 1,
- tags={"transaction": "foo_transaction"},
- timestamp=self.min_ago,
- )
- self.store_span_metric(
- 1,
- internal_metric=constants.SELF_TIME_LIGHT,
- tags={"transaction": "bar_transaction"},
- timestamp=self.min_ago,
- )
- self.store_span_metric(
- 1,
- tags={"transaction": "bar_transaction"},
- timestamp=self.min_ago,
- )
- response = self.do_request(
- {
- "field": ["transaction", "time_spent_percentage()"],
- "query": "",
- "orderby": ["-time_spent_percentage()"],
- "project": self.project.id,
- "dataset": "spansMetrics",
- "statsPeriod": "10m",
- }
- )
- assert response.status_code == 200, response.content
- data = response.data["data"]
- meta = response.data["meta"]
- assert len(data) == 2
- assert data[0]["time_spent_percentage()"] == 0.8
- assert data[0]["transaction"] == "foo_transaction"
- assert data[1]["time_spent_percentage()"] == 0.2
- assert data[1]["transaction"] == "bar_transaction"
- assert meta["dataset"] == "spansMetrics"
- def test_time_spent_percentage_local(self):
- response = self.do_request(
- {
- "field": ["time_spent_percentage(local)"],
- "query": "",
- "orderby": ["-time_spent_percentage(local)"],
- "project": self.project.id,
- "dataset": "spansMetrics",
- "statsPeriod": "10m",
- }
- )
- assert response.status_code == 200, response.content
- data = response.data["data"]
- meta = response.data["meta"]
- assert len(data) == 1
- assert data[0]["time_spent_percentage(local)"] is None
- assert meta["dataset"] == "spansMetrics"
- def test_time_spent_percentage_on_span_duration(self):
- for _ in range(4):
- self.store_span_metric(
- 1,
- internal_metric=constants.SPAN_METRICS_MAP["span.duration"],
- tags={"transaction": "foo_transaction"},
- timestamp=self.min_ago,
- )
- self.store_span_metric(
- 1,
- internal_metric=constants.SPAN_METRICS_MAP["span.duration"],
- tags={"transaction": "bar_transaction"},
- timestamp=self.min_ago,
- )
- response = self.do_request(
- {
- "field": ["transaction", "time_spent_percentage(app,span.duration)"],
- "query": "",
- "orderby": ["-time_spent_percentage(app,span.duration)"],
- "project": self.project.id,
- "dataset": "spansMetrics",
- "statsPeriod": "10m",
- }
- )
- assert response.status_code == 200, response.content
- data = response.data["data"]
- meta = response.data["meta"]
- assert len(data) == 2
- assert data[0]["time_spent_percentage(app,span.duration)"] == 0.8
- assert data[0]["transaction"] == "foo_transaction"
- assert data[1]["time_spent_percentage(app,span.duration)"] == 0.2
- assert data[1]["transaction"] == "bar_transaction"
- assert meta["dataset"] == "spansMetrics"
- def test_http_error_rate_and_count(self):
- for _ in range(4):
- self.store_span_metric(
- 1,
- internal_metric=constants.SELF_TIME_LIGHT,
- tags={"span.status_code": "500"},
- timestamp=self.min_ago,
- )
- self.store_span_metric(
- 1,
- internal_metric=constants.SELF_TIME_LIGHT,
- tags={"span.status_code": "200"},
- timestamp=self.min_ago,
- )
- response = self.do_request(
- {
- "field": ["http_error_count()", "http_error_rate()"],
- "query": "",
- "orderby": ["-http_error_rate()"],
- "project": self.project.id,
- "dataset": "spansMetrics",
- "statsPeriod": "10m",
- }
- )
- assert response.status_code == 200, response.content
- data = response.data["data"]
- meta = response.data["meta"]
- assert len(data) == 1
- assert data[0]["http_error_rate()"] == 0.8
- assert meta["dataset"] == "spansMetrics"
- assert meta["fields"]["http_error_count()"] == "integer"
- assert meta["fields"]["http_error_rate()"] == "percentage"
- def test_ttid_rate_and_count(self):
- for _ in range(8):
- self.store_span_metric(
- 1,
- internal_metric=constants.SELF_TIME_LIGHT,
- tags={"ttid": "ttid", "ttfd": "ttfd"},
- timestamp=self.min_ago,
- )
- self.store_span_metric(
- 1,
- internal_metric=constants.SELF_TIME_LIGHT,
- tags={"ttfd": "ttfd", "ttid": ""},
- timestamp=self.min_ago,
- )
- self.store_span_metric(
- 1,
- internal_metric=constants.SELF_TIME_LIGHT,
- tags={"ttfd": "", "ttid": ""},
- timestamp=self.min_ago,
- )
- response = self.do_request(
- {
- "field": [
- "ttid_contribution_rate()",
- "ttid_count()",
- "ttfd_contribution_rate()",
- "ttfd_count()",
- ],
- "query": "",
- "orderby": ["-ttid_contribution_rate()"],
- "project": self.project.id,
- "dataset": "spansMetrics",
- "statsPeriod": "10m",
- }
- )
- assert response.status_code == 200, response.content
- data = response.data["data"]
- meta = response.data["meta"]
- assert len(data) == 1
- assert data[0]["ttid_contribution_rate()"] == 0.8
- assert data[0]["ttid_count()"] == 8
- assert data[0]["ttfd_contribution_rate()"] == 0.9
- assert data[0]["ttfd_count()"] == 9
- assert meta["dataset"] == "spansMetrics"
- assert meta["fields"]["ttid_count()"] == "integer"
- assert meta["fields"]["ttid_contribution_rate()"] == "percentage"
- assert meta["fields"]["ttfd_count()"] == "integer"
- assert meta["fields"]["ttfd_contribution_rate()"] == "percentage"
- def test_main_thread_count(self):
- for _ in range(8):
- self.store_span_metric(
- 1,
- internal_metric=constants.SELF_TIME_LIGHT,
- tags={"span.main_thread": "true"},
- timestamp=self.min_ago,
- )
- self.store_span_metric(
- 1,
- internal_metric=constants.SELF_TIME_LIGHT,
- tags={},
- timestamp=self.min_ago,
- )
- self.store_span_metric(
- 1,
- internal_metric=constants.SELF_TIME_LIGHT,
- tags={"span.main_thread": ""},
- timestamp=self.min_ago,
- )
- response = self.do_request(
- {
- "field": [
- "main_thread_count()",
- ],
- "query": "",
- "orderby": ["-main_thread_count()"],
- "project": self.project.id,
- "dataset": "spansMetrics",
- "statsPeriod": "10m",
- }
- )
- assert response.status_code == 200, response.content
- data = response.data["data"]
- meta = response.data["meta"]
- assert len(data) == 1
- assert data[0]["main_thread_count()"] == 8
- assert meta["dataset"] == "spansMetrics"
- assert meta["fields"]["main_thread_count()"] == "integer"
- def test_use_self_time_light(self):
- self.store_span_metric(
- 100,
- internal_metric=constants.SELF_TIME_LIGHT,
- tags={"transaction": "foo_transaction"},
- timestamp=self.min_ago,
- )
- response = self.do_request(
- {
- "field": ["p50(span.self_time)"],
- # Should be 0 since its filtering on transaction
- "query": "transaction:foo_transaction",
- "orderby": ["-p50(span.self_time)"],
- "project": self.project.id,
- "dataset": "spansMetrics",
- "statsPeriod": "10m",
- }
- )
- assert response.status_code == 200, response.content
- data = response.data["data"]
- meta = response.data["meta"]
- assert len(data) == 1
- assert data[0]["p50(span.self_time)"] == 0
- assert meta["dataset"] == "spansMetrics"
- assert meta["fields"]["p50(span.self_time)"] == "duration"
- response = self.do_request(
- {
- # Should be 0 since it has a transaction column
- "field": ["transaction", "p50(span.self_time)"],
- "query": "",
- "orderby": ["-p50(span.self_time)"],
- "project": self.project.id,
- "dataset": "spansMetrics",
- "statsPeriod": "10m",
- }
- )
- assert response.status_code == 200, response.content
- data = response.data["data"]
- meta = response.data["meta"]
- assert len(data) == 0
- response = self.do_request(
- {
- "field": ["p50(span.self_time)"],
- # Should be 100 since its not filtering on transaction
- "query": "",
- "orderby": ["-p50(span.self_time)"],
- "project": self.project.id,
- "dataset": "spansMetrics",
- "statsPeriod": "10m",
- }
- )
- assert response.status_code == 200, response.content
- data = response.data["data"]
- meta = response.data["meta"]
- assert len(data) == 1
- assert data[0]["p50(span.self_time)"] == 100
- assert meta["dataset"] == "spansMetrics"
- assert meta["fields"]["p50(span.self_time)"] == "duration"
- def test_span_module(self):
- self.store_span_metric(
- 1,
- internal_metric=constants.SELF_TIME_LIGHT,
- timestamp=self.six_min_ago,
- tags={"span.category": "http", "span.description": "f"},
- )
- self.store_span_metric(
- 3,
- internal_metric=constants.SELF_TIME_LIGHT,
- timestamp=self.six_min_ago,
- tags={"span.category": "db", "span.description": "e"},
- )
- self.store_span_metric(
- 5,
- internal_metric=constants.SELF_TIME_LIGHT,
- timestamp=self.six_min_ago,
- tags={"span.category": "foobar", "span.description": "d"},
- )
- self.store_span_metric(
- 7,
- internal_metric=constants.SELF_TIME_LIGHT,
- timestamp=self.six_min_ago,
- tags={"span.category": "cache", "span.description": "c"},
- )
- self.store_span_metric(
- 9,
- internal_metric=constants.SELF_TIME_LIGHT,
- timestamp=self.six_min_ago,
- tags={"span.category": "db", "span.op": "db.redis", "span.description": "b"},
- )
- self.store_span_metric(
- 11,
- internal_metric=constants.SELF_TIME_LIGHT,
- timestamp=self.six_min_ago,
- tags={"span.category": "db", "span.op": "db.sql.room", "span.description": "a"},
- )
- response = self.do_request(
- {
- "field": ["span.module", "span.description", "p50(span.self_time)"],
- "query": "",
- "orderby": ["-p50(span.self_time)"],
- "project": self.project.id,
- "dataset": "spansMetrics",
- "statsPeriod": "10m",
- }
- )
- assert response.status_code == 200, response.content
- data = response.data["data"]
- meta = response.data["meta"]
- assert len(data) == 6
- assert data[0]["p50(span.self_time)"] == 11
- assert data[0]["span.module"] == "other"
- assert data[0]["span.description"] == "a"
- assert data[1]["p50(span.self_time)"] == 9
- assert data[1]["span.module"] == "cache"
- assert data[1]["span.description"] == "b"
- assert data[2]["p50(span.self_time)"] == 7
- assert data[2]["span.module"] == "cache"
- assert data[2]["span.description"] == "c"
- assert data[3]["p50(span.self_time)"] == 5
- assert data[3]["span.module"] == "other"
- assert data[3]["span.description"] == "d"
- assert data[4]["p50(span.self_time)"] == 3
- assert data[4]["span.module"] == "db"
- assert data[4]["span.description"] == "e"
- assert data[5]["p50(span.self_time)"] == 1
- assert data[5]["span.module"] == "http"
- assert data[5]["span.description"] == "f"
- assert meta["dataset"] == "spansMetrics"
- assert meta["fields"]["p50(span.self_time)"] == "duration"
- def test_tag_search(self):
- self.store_span_metric(
- 321,
- internal_metric=constants.SELF_TIME_LIGHT,
- timestamp=self.min_ago,
- tags={"span.description": "foo"},
- )
- self.store_span_metric(
- 99,
- internal_metric=constants.SELF_TIME_LIGHT,
- timestamp=self.min_ago,
- tags={"span.description": "bar"},
- )
- response = self.do_request(
- {
- "field": ["sum(span.self_time)"],
- "query": "span.description:bar",
- "project": self.project.id,
- "dataset": "spansMetrics",
- }
- )
- assert response.status_code == 200, response.content
- data = response.data["data"]
- meta = response.data["meta"]
- assert len(data) == 1
- assert data[0]["sum(span.self_time)"] == 99
- assert meta["dataset"] == "spansMetrics"
- def test_free_text_search(self):
- self.store_span_metric(
- 321,
- internal_metric=constants.SELF_TIME_LIGHT,
- timestamp=self.min_ago,
- tags={"span.description": "foo"},
- )
- self.store_span_metric(
- 99,
- internal_metric=constants.SELF_TIME_LIGHT,
- timestamp=self.min_ago,
- tags={"span.description": "bar"},
- )
- response = self.do_request(
- {
- "field": ["sum(span.self_time)"],
- "query": "foo",
- "project": self.project.id,
- "dataset": "spansMetrics",
- }
- )
- assert response.status_code == 200, response.content
- data = response.data["data"]
- meta = response.data["meta"]
- assert len(data) == 1
- assert data[0]["sum(span.self_time)"] == 321
- assert meta["dataset"] == "spansMetrics"
- def test_avg_compare(self):
- self.store_span_metric(
- 100,
- internal_metric=constants.SELF_TIME_LIGHT,
- timestamp=self.min_ago,
- tags={"release": "foo"},
- )
- self.store_span_metric(
- 10,
- internal_metric=constants.SELF_TIME_LIGHT,
- timestamp=self.min_ago,
- tags={"release": "bar"},
- )
- for function_name in [
- "avg_compare(span.self_time, release, foo, bar)",
- 'avg_compare(span.self_time, release, "foo", "bar")',
- ]:
- response = self.do_request(
- {
- "field": [function_name],
- "query": "",
- "project": self.project.id,
- "dataset": "spansMetrics",
- }
- )
- assert response.status_code == 200, response.content
- data = response.data["data"]
- meta = response.data["meta"]
- assert len(data) == 1
- assert data[0][function_name] == -0.9
- assert meta["dataset"] == "spansMetrics"
- assert meta["fields"][function_name] == "percent_change"
- def test_avg_compare_invalid_column(self):
- response = self.do_request(
- {
- "field": ["avg_compare(span.self_time, transaction, foo, bar)"],
- "query": "",
- "project": self.project.id,
- "dataset": "spansMetrics",
- }
- )
- assert response.status_code == 400, response.content
- def test_span_domain_array(self):
- self.store_span_metric(
- 321,
- internal_metric=constants.SELF_TIME_LIGHT,
- timestamp=self.min_ago,
- tags={"span.domain": ",sentry_table1,"},
- )
- self.store_span_metric(
- 21,
- internal_metric=constants.SELF_TIME_LIGHT,
- timestamp=self.min_ago,
- tags={"span.domain": ",sentry_table1,sentry_table2,"},
- )
- response = self.do_request(
- {
- "field": ["span.domain", "p75(span.self_time)"],
- "query": "",
- "project": self.project.id,
- "orderby": ["-p75(span.self_time)"],
- "dataset": "spansMetrics",
- }
- )
- assert response.status_code == 200, response.content
- data = response.data["data"]
- meta = response.data["meta"]
- assert len(data) == 2
- assert data[0]["span.domain"] == ["sentry_table1"]
- assert data[1]["span.domain"] == ["sentry_table1", "sentry_table2"]
- assert meta["dataset"] == "spansMetrics"
- assert meta["fields"]["span.domain"] == "array"
- def test_span_domain_array_filter(self):
- self.store_span_metric(
- 321,
- internal_metric=constants.SELF_TIME_LIGHT,
- timestamp=self.min_ago,
- tags={"span.domain": ",sentry_table1,"},
- )
- self.store_span_metric(
- 21,
- internal_metric=constants.SELF_TIME_LIGHT,
- timestamp=self.min_ago,
- tags={"span.domain": ",sentry_table1,sentry_table2,"},
- )
- response = self.do_request(
- {
- "field": ["span.domain", "p75(span.self_time)"],
- "query": "span.domain:sentry_table2",
- "project": self.project.id,
- "dataset": "spansMetrics",
- }
- )
- assert response.status_code == 200, response.content
- data = response.data["data"]
- meta = response.data["meta"]
- assert len(data) == 1
- assert data[0]["span.domain"] == ["sentry_table1", "sentry_table2"]
- assert meta["dataset"] == "spansMetrics"
- assert meta["fields"]["span.domain"] == "array"
- def test_span_domain_array_filter_wildcard(self):
- self.store_span_metric(
- 321,
- internal_metric=constants.SELF_TIME_LIGHT,
- timestamp=self.min_ago,
- tags={"span.domain": ",sentry_table1,"},
- )
- self.store_span_metric(
- 21,
- internal_metric=constants.SELF_TIME_LIGHT,
- timestamp=self.min_ago,
- tags={"span.domain": ",sentry_table1,sentry_table2,"},
- )
- for query in ["sentry*2", "*table2", "sentry_table2*"]:
- response = self.do_request(
- {
- "field": ["span.domain", "p75(span.self_time)"],
- "query": f"span.domain:{query}",
- "project": self.project.id,
- "dataset": "spansMetrics",
- }
- )
- assert response.status_code == 200, response.content
- data = response.data["data"]
- meta = response.data["meta"]
- assert len(data) == 1, query
- assert data[0]["span.domain"] == ["sentry_table1", "sentry_table2"], query
- assert meta["dataset"] == "spansMetrics", query
- assert meta["fields"]["span.domain"] == "array"
- def test_span_domain_array_has_filter(self):
- self.store_span_metric(
- 321,
- internal_metric=constants.SELF_TIME_LIGHT,
- timestamp=self.min_ago,
- tags={"span.domain": ""},
- )
- self.store_span_metric(
- 21,
- internal_metric=constants.SELF_TIME_LIGHT,
- timestamp=self.min_ago,
- tags={"span.domain": ",sentry_table1,sentry_table2,"},
- )
- response = self.do_request(
- {
- "field": ["span.domain", "p75(span.self_time)"],
- "query": "has:span.domain",
- "project": self.project.id,
- "dataset": "spansMetrics",
- }
- )
- assert response.status_code == 200, response.content
- data = response.data["data"]
- meta = response.data["meta"]
- assert len(data) == 1
- assert data[0]["span.domain"] == ["sentry_table1", "sentry_table2"]
- assert meta["dataset"] == "spansMetrics"
- response = self.do_request(
- {
- "field": ["span.domain", "p75(span.self_time)"],
- "query": "!has:span.domain",
- "project": self.project.id,
- "dataset": "spansMetrics",
- }
- )
- assert response.status_code == 200, response.content
- data = response.data["data"]
- meta = response.data["meta"]
- assert len(data) == 1
- assert meta["dataset"] == "spansMetrics"
- assert meta["fields"]["span.domain"] == "array"
- def test_unique_values_span_domain(self):
- self.store_span_metric(
- 321,
- internal_metric=constants.SELF_TIME_LIGHT,
- timestamp=self.min_ago,
- tags={"span.domain": ",sentry_table1,"},
- )
- self.store_span_metric(
- 21,
- internal_metric=constants.SELF_TIME_LIGHT,
- timestamp=self.min_ago,
- tags={"span.domain": ",sentry_table2,sentry_table3,"},
- )
- response = self.do_request(
- {
- "field": ["unique.span_domains", "count()"],
- "query": "",
- "orderby": "unique.span_domains",
- "project": self.project.id,
- "dataset": "spansMetrics",
- }
- )
- assert response.status_code == 200, response.content
- data = response.data["data"]
- meta = response.data["meta"]
- assert len(data) == 3
- assert data[0]["unique.span_domains"] == "sentry_table1"
- assert data[1]["unique.span_domains"] == "sentry_table2"
- assert data[2]["unique.span_domains"] == "sentry_table3"
- assert meta["fields"]["unique.span_domains"] == "string"
- def test_unique_values_span_domain_with_filter(self):
- self.store_span_metric(
- 321,
- internal_metric=constants.SELF_TIME_LIGHT,
- timestamp=self.min_ago,
- tags={"span.domain": ",sentry_tible1,"},
- )
- self.store_span_metric(
- 21,
- internal_metric=constants.SELF_TIME_LIGHT,
- timestamp=self.min_ago,
- tags={"span.domain": ",sentry_table2,sentry_table3,"},
- )
- response = self.do_request(
- {
- "field": ["unique.span_domains", "count()"],
- "query": "span.domain:sentry_tab*",
- "orderby": "unique.span_domains",
- "project": self.project.id,
- "dataset": "spansMetrics",
- }
- )
- assert response.status_code == 200, response.content
- data = response.data["data"]
- meta = response.data["meta"]
- assert len(data) == 2
- assert data[0]["unique.span_domains"] == "sentry_table2"
- assert data[1]["unique.span_domains"] == "sentry_table3"
- assert meta["fields"]["unique.span_domains"] == "string"
- def test_avg_if(self):
- self.store_span_metric(
- 100,
- internal_metric=constants.SELF_TIME_LIGHT,
- timestamp=self.min_ago,
- tags={"release": "foo"},
- )
- self.store_span_metric(
- 200,
- internal_metric=constants.SELF_TIME_LIGHT,
- timestamp=self.min_ago,
- tags={"release": "foo"},
- )
- self.store_span_metric(
- 10,
- internal_metric=constants.SELF_TIME_LIGHT,
- timestamp=self.min_ago,
- tags={"release": "bar"},
- )
- self.store_span_metric(
- 300,
- internal_metric=constants.SELF_TIME_LIGHT,
- timestamp=self.min_ago,
- tags={"span.op": "queue.process"},
- )
- response = self.do_request(
- {
- "field": [
- "avg_if(span.self_time, release, foo)",
- "avg_if(span.self_time, span.op, queue.process)",
- ],
- "query": "",
- "project": self.project.id,
- "dataset": "spansMetrics",
- }
- )
- assert response.status_code == 200, response.content
- data = response.data["data"]
- meta = response.data["meta"]
- assert len(data) == 1
- assert data[0]["avg_if(span.self_time, release, foo)"] == 150
- assert data[0]["avg_if(span.self_time, span.op, queue.process)"] == 300
- assert meta["dataset"] == "spansMetrics"
- assert meta["fields"]["avg_if(span.self_time, release, foo)"] == "duration"
- assert meta["fields"]["avg_if(span.self_time, span.op, queue.process)"] == "duration"
- def test_device_class(self):
- self.store_span_metric(
- 123,
- internal_metric=constants.SELF_TIME_LIGHT,
- timestamp=self.min_ago,
- tags={"device.class": "1"},
- )
- self.store_span_metric(
- 678,
- internal_metric=constants.SELF_TIME_LIGHT,
- timestamp=self.min_ago,
- tags={"device.class": "2"},
- )
- self.store_span_metric(
- 999,
- internal_metric=constants.SELF_TIME_LIGHT,
- timestamp=self.min_ago,
- tags={"device.class": ""},
- )
- response = self.do_request(
- {
- "field": ["device.class", "p95()"],
- "query": "",
- "orderby": "p95()",
- "project": self.project.id,
- "dataset": "spansMetrics",
- }
- )
- assert response.status_code == 200, response.content
- data = response.data["data"]
- meta = response.data["meta"]
- assert len(data) == 3
- # Need to actually check the dict since the level for 1 isn't guaranteed to stay `low` or `medium`
- assert data[0]["device.class"] == map_device_class_level("1")
- assert data[1]["device.class"] == map_device_class_level("2")
- assert data[2]["device.class"] == "Unknown"
- assert meta["fields"]["device.class"] == "string"
- def test_device_class_filter(self):
- self.store_span_metric(
- 123,
- internal_metric=constants.SELF_TIME_LIGHT,
- timestamp=self.min_ago,
- tags={"device.class": "1"},
- )
- # Need to actually check the dict since the level for 1 isn't guaranteed to stay `low`
- level = map_device_class_level("1")
- response = self.do_request(
- {
- "field": ["device.class", "count()"],
- "query": f"device.class:{level}",
- "orderby": "count()",
- "project": self.project.id,
- "dataset": "spansMetrics",
- }
- )
- assert response.status_code == 200, response.content
- data = response.data["data"]
- meta = response.data["meta"]
- assert len(data) == 1
- assert data[0]["device.class"] == level
- assert meta["fields"]["device.class"] == "string"
- def test_device_class_filter_unknown(self):
- self.store_span_metric(
- 123,
- internal_metric=constants.SELF_TIME_LIGHT,
- timestamp=self.min_ago,
- tags={"device.class": ""},
- )
- response = self.do_request(
- {
- "field": ["device.class", "count()"],
- "query": "device.class:Unknown",
- "orderby": "count()",
- "project": self.project.id,
- "dataset": "spansMetrics",
- }
- )
- assert response.status_code == 200, response.content
- data = response.data["data"]
- meta = response.data["meta"]
- assert len(data) == 1
- assert data[0]["device.class"] == "Unknown"
- assert meta["fields"]["device.class"] == "string"
- def test_cache_hit_rate(self):
- self.store_span_metric(
- 1,
- internal_metric=constants.SELF_TIME_LIGHT,
- timestamp=self.min_ago,
- tags={"cache.hit": "true"},
- )
- self.store_span_metric(
- 1,
- internal_metric=constants.SELF_TIME_LIGHT,
- timestamp=self.min_ago,
- tags={"cache.hit": "false"},
- )
- response = self.do_request(
- {
- "field": ["cache_hit_rate()"],
- "query": "",
- "project": self.project.id,
- "dataset": "spansMetrics",
- }
- )
- assert response.status_code == 200, response.content
- data = response.data["data"]
- meta = response.data["meta"]
- assert len(data) == 1
- assert data[0]["cache_hit_rate()"] == 0.5
- assert meta["dataset"] == "spansMetrics"
- assert meta["fields"]["cache_hit_rate()"] == "percentage"
- def test_cache_miss_rate(self):
- self.store_span_metric(
- 1,
- internal_metric=constants.SELF_TIME_LIGHT,
- timestamp=self.min_ago,
- tags={"cache.hit": "true"},
- )
- self.store_span_metric(
- 1,
- internal_metric=constants.SELF_TIME_LIGHT,
- timestamp=self.min_ago,
- tags={"cache.hit": "false"},
- )
- self.store_span_metric(
- 1,
- internal_metric=constants.SELF_TIME_LIGHT,
- timestamp=self.min_ago,
- tags={"cache.hit": "false"},
- )
- self.store_span_metric(
- 1,
- internal_metric=constants.SELF_TIME_LIGHT,
- timestamp=self.min_ago,
- tags={"cache.hit": "false"},
- )
- response = self.do_request(
- {
- "field": ["cache_miss_rate()"],
- "query": "",
- "project": self.project.id,
- "dataset": "spansMetrics",
- }
- )
- assert response.status_code == 200, response.content
- data = response.data["data"]
- meta = response.data["meta"]
- assert len(data) == 1
- assert data[0]["cache_miss_rate()"] == 0.75
- assert meta["dataset"] == "spansMetrics"
- assert meta["fields"]["cache_miss_rate()"] == "percentage"
- def test_http_response_rate(self):
- self.store_span_metric(
- 1,
- internal_metric=constants.SELF_TIME_LIGHT,
- timestamp=self.min_ago,
- tags={"span.status_code": "200"},
- )
- self.store_span_metric(
- 3,
- internal_metric=constants.SELF_TIME_LIGHT,
- timestamp=self.min_ago,
- tags={"span.status_code": "301"},
- )
- self.store_span_metric(
- 3,
- internal_metric=constants.SELF_TIME_LIGHT,
- timestamp=self.min_ago,
- tags={"span.status_code": "404"},
- )
- self.store_span_metric(
- 4,
- internal_metric=constants.SELF_TIME_LIGHT,
- timestamp=self.min_ago,
- tags={"span.status_code": "503"},
- )
- self.store_span_metric(
- 5,
- internal_metric=constants.SELF_TIME_LIGHT,
- timestamp=self.min_ago,
- tags={"span.status_code": "501"},
- )
- response = self.do_request(
- {
- "field": [
- "http_response_rate(200)", # By exact code
- "http_response_rate(3)", # By code class
- "http_response_rate(4)",
- "http_response_rate(5)",
- ],
- "query": "",
- "project": self.project.id,
- "dataset": "spansMetrics",
- }
- )
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert len(data) == 1
- assert data[0]["http_response_rate(200)"] == 0.2
- assert data[0]["http_response_rate(3)"] == 0.2
- assert data[0]["http_response_rate(4)"] == 0.2
- assert data[0]["http_response_rate(5)"] == 0.4
- meta = response.data["meta"]
- assert meta["dataset"] == "spansMetrics"
- assert meta["fields"]["http_response_rate(200)"] == "percentage"
- def test_regression_score_regression(self):
- # This span increases in duration
- self.store_span_metric(
- 1,
- internal_metric=SPAN_DURATION_MRI,
- timestamp=self.six_min_ago,
- tags={"transaction": "/api/0/projects/", "span.description": "Regressed Span"},
- project=self.project.id,
- )
- self.store_span_metric(
- 100,
- internal_metric=SPAN_DURATION_MRI,
- timestamp=self.min_ago,
- tags={"transaction": "/api/0/projects/", "span.description": "Regressed Span"},
- project=self.project.id,
- )
- # This span stays the same
- self.store_span_metric(
- 1,
- internal_metric=SPAN_DURATION_MRI,
- timestamp=self.three_days_ago,
- tags={"transaction": "/api/0/projects/", "span.description": "Non-regressed"},
- project=self.project.id,
- )
- self.store_span_metric(
- 1,
- internal_metric=SPAN_DURATION_MRI,
- timestamp=self.min_ago,
- tags={"transaction": "/api/0/projects/", "span.description": "Non-regressed"},
- project=self.project.id,
- )
- response = self.do_request(
- {
- "field": [
- "span.description",
- f"regression_score(span.duration,{int(self.two_min_ago.timestamp())})",
- ],
- "query": "transaction:/api/0/projects/",
- "dataset": "spansMetrics",
- "orderby": [
- f"-regression_score(span.duration,{int(self.two_min_ago.timestamp())})"
- ],
- "start": (self.six_min_ago - timedelta(minutes=1)).isoformat(),
- "end": before_now(minutes=0),
- }
- )
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert len(data) == 2
- assert [row["span.description"] for row in data] == ["Regressed Span", "Non-regressed"]
- def test_regression_score_added_span(self):
- # This span only exists after the breakpoint
- self.store_span_metric(
- 100,
- internal_metric=SPAN_DURATION_MRI,
- timestamp=self.min_ago,
- tags={"transaction": "/api/0/projects/", "span.description": "Added span"},
- project=self.project.id,
- )
- # This span stays the same
- self.store_span_metric(
- 1,
- internal_metric=SPAN_DURATION_MRI,
- timestamp=self.three_days_ago,
- tags={"transaction": "/api/0/projects/", "span.description": "Non-regressed"},
- project=self.project.id,
- )
- self.store_span_metric(
- 1,
- internal_metric=SPAN_DURATION_MRI,
- timestamp=self.min_ago,
- tags={"transaction": "/api/0/projects/", "span.description": "Non-regressed"},
- project=self.project.id,
- )
- response = self.do_request(
- {
- "field": [
- "span.description",
- f"regression_score(span.duration,{int(self.two_min_ago.timestamp())})",
- ],
- "query": "transaction:/api/0/projects/",
- "dataset": "spansMetrics",
- "orderby": [
- f"-regression_score(span.duration,{int(self.two_min_ago.timestamp())})"
- ],
- "start": (self.six_min_ago - timedelta(minutes=1)).isoformat(),
- "end": before_now(minutes=0),
- }
- )
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert len(data) == 2
- assert [row["span.description"] for row in data] == ["Added span", "Non-regressed"]
- def test_regression_score_removed_span(self):
- # This span only exists before the breakpoint
- self.store_span_metric(
- 100,
- internal_metric=SPAN_DURATION_MRI,
- timestamp=self.six_min_ago,
- tags={"transaction": "/api/0/projects/", "span.description": "Removed span"},
- project=self.project.id,
- )
- # This span stays the same
- self.store_span_metric(
- 1,
- internal_metric=SPAN_DURATION_MRI,
- timestamp=self.three_days_ago,
- tags={"transaction": "/api/0/projects/", "span.description": "Non-regressed"},
- project=self.project.id,
- )
- self.store_span_metric(
- 1,
- internal_metric=SPAN_DURATION_MRI,
- timestamp=self.min_ago,
- tags={"transaction": "/api/0/projects/", "span.description": "Non-regressed"},
- project=self.project.id,
- )
- response = self.do_request(
- {
- "field": [
- "span.description",
- f"regression_score(span.duration,{int(self.two_min_ago.timestamp())})",
- ],
- "query": "transaction:/api/0/projects/",
- "dataset": "spansMetrics",
- "orderby": [
- f"-regression_score(span.duration,{int(self.two_min_ago.timestamp())})"
- ],
- "start": (self.six_min_ago - timedelta(minutes=1)).isoformat(),
- "end": before_now(minutes=0),
- }
- )
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert len(data) == 2
- assert [row["span.description"] for row in data] == ["Non-regressed", "Removed span"]
- # The regression score is <0 for removed spans, this can act as
- # a way to filter out removed spans when necessary
- assert data[1][f"regression_score(span.duration,{int(self.two_min_ago.timestamp())})"] < 0
- def test_avg_self_time_by_timestamp(self):
- self.store_span_metric(
- 1,
- internal_metric=constants.SELF_TIME_LIGHT,
- timestamp=self.six_min_ago,
- tags={},
- )
- self.store_span_metric(
- 3,
- internal_metric=constants.SELF_TIME_LIGHT,
- timestamp=self.min_ago,
- tags={},
- )
- response = self.do_request(
- {
- "field": [
- f"avg_by_timestamp(span.self_time,less,{int(self.two_min_ago.timestamp())})",
- f"avg_by_timestamp(span.self_time,greater,{int(self.two_min_ago.timestamp())})",
- ],
- "query": "",
- "project": self.project.id,
- "dataset": "spansMetrics",
- "statsPeriod": "1h",
- }
- )
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert len(data) == 1
- assert data[0] == {
- f"avg_by_timestamp(span.self_time,less,{int(self.two_min_ago.timestamp())})": 1.0,
- f"avg_by_timestamp(span.self_time,greater,{int(self.two_min_ago.timestamp())})": 3.0,
- }
- def test_avg_self_time_by_timestamp_invalid_condition(self):
- response = self.do_request(
- {
- "field": [
- f"avg_by_timestamp(span.self_time,INVALID_ARG,{int(self.two_min_ago.timestamp())})",
- ],
- "query": "",
- "project": self.project.id,
- "dataset": "spansMetrics",
- "statsPeriod": "1h",
- }
- )
- assert response.status_code == 400, response.content
- assert (
- response.data["detail"]
- == "avg_by_timestamp: condition argument invalid: string must be one of ['greater', 'less']"
- )
- def test_epm_by_timestamp(self):
- self.store_span_metric(
- 1,
- internal_metric=SPAN_DURATION_MRI,
- timestamp=self.six_min_ago,
- tags={},
- )
- # More events occur after the timestamp
- for _ in range(3):
- self.store_span_metric(
- 3,
- internal_metric=SPAN_DURATION_MRI,
- timestamp=self.min_ago,
- tags={},
- )
- response = self.do_request(
- {
- "field": [
- f"epm_by_timestamp(less,{int(self.two_min_ago.timestamp())})",
- f"epm_by_timestamp(greater,{int(self.two_min_ago.timestamp())})",
- ],
- "query": "",
- "project": self.project.id,
- "dataset": "spansMetrics",
- "statsPeriod": "1h",
- }
- )
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert len(data) == 1
- assert data[0][f"epm_by_timestamp(less,{int(self.two_min_ago.timestamp())})"] < 1.0
- assert data[0][f"epm_by_timestamp(greater,{int(self.two_min_ago.timestamp())})"] > 1.0
- def test_epm_by_timestamp_invalid_condition(self):
- response = self.do_request(
- {
- "field": [
- f"epm_by_timestamp(INVALID_ARG,{int(self.two_min_ago.timestamp())})",
- ],
- "query": "",
- "project": self.project.id,
- "dataset": "spansMetrics",
- "statsPeriod": "1h",
- }
- )
- assert response.status_code == 400, response.content
- assert (
- response.data["detail"]
- == "epm_by_timestamp: condition argument invalid: string must be one of ['greater', 'less']"
- )
- def test_any_function(self):
- for char in "abc":
- for transaction in ["foo", "bar"]:
- self.store_span_metric(
- 1,
- internal_metric=constants.SELF_TIME_LIGHT,
- timestamp=self.six_min_ago,
- tags={"span.description": char, "transaction": transaction},
- )
- response = self.do_request(
- {
- "field": [
- "transaction",
- "any(span.description)",
- ],
- "query": "",
- "orderby": ["transaction"],
- "project": self.project.id,
- "dataset": "spansMetrics",
- "statsPeriod": "1h",
- }
- )
- assert response.status_code == 200, response.content
- assert response.data["data"] == [
- {"transaction": "bar", "any(span.description)": "a"},
- {"transaction": "foo", "any(span.description)": "a"},
- ]
- def test_count_op(self):
- self.store_span_metric(
- 1,
- internal_metric=constants.SELF_TIME_LIGHT,
- timestamp=self.six_min_ago,
- tags={"span.op": "queue.publish"},
- )
- self.store_span_metric(
- 1,
- internal_metric=constants.SELF_TIME_LIGHT,
- timestamp=self.six_min_ago,
- tags={"span.op": "queue.process"},
- )
- response = self.do_request(
- {
- "field": [
- "count_op(queue.publish)",
- "count_op(queue.process)",
- ],
- "query": "",
- "project": self.project.id,
- "dataset": "spansMetrics",
- "statsPeriod": "1h",
- }
- )
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert data == [
- {"count_op(queue.publish)": 1, "count_op(queue.process)": 1},
- ]
- def test_project_mapping(self):
- self.store_span_metric(
- 1,
- internal_metric=constants.SELF_TIME_LIGHT,
- timestamp=self.six_min_ago,
- tags={},
- )
- # More events occur after the timestamp
- for _ in range(3):
- self.store_span_metric(
- 3,
- internal_metric=constants.SELF_TIME_LIGHT,
- timestamp=self.min_ago,
- tags={},
- )
- response = self.do_request(
- {
- "field": ["project", "project.name", "count()"],
- "query": "",
- "project": self.project.id,
- "dataset": "spansMetrics",
- "statsPeriod": "1h",
- }
- )
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert data[0]["project"] == self.project.slug
- assert data[0]["project.name"] == self.project.slug
- def test_slow_frames_gauge_metric(self):
- self.store_span_metric(
- {
- "min": 5,
- "max": 5,
- "sum": 5,
- "count": 1,
- "last": 5,
- },
- entity="metrics_gauges",
- metric="mobile.slow_frames",
- timestamp=self.six_min_ago,
- tags={"release": "foo"},
- )
- self.store_span_metric(
- {
- "min": 10,
- "max": 10,
- "sum": 10,
- "count": 1,
- "last": 10,
- },
- entity="metrics_gauges",
- metric="mobile.slow_frames",
- timestamp=self.six_min_ago,
- tags={"release": "bar"},
- )
- response = self.do_request(
- {
- "field": [
- "avg_if(mobile.slow_frames,release,foo)",
- "avg_if(mobile.slow_frames,release,bar)",
- "avg_compare(mobile.slow_frames,release,foo,bar)",
- ],
- "query": "",
- "project": self.project.id,
- "dataset": "spansMetrics",
- "statsPeriod": "1h",
- }
- )
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert data == [
- {
- "avg_compare(mobile.slow_frames,release,foo,bar)": 1.0,
- "avg_if(mobile.slow_frames,release,foo)": 5.0,
- "avg_if(mobile.slow_frames,release,bar)": 10.0,
- }
- ]
- def test_resolve_messaging_message_receive_latency_gauge(self):
- self.store_span_metric(
- {
- "min": 5,
- "max": 5,
- "sum": 5,
- "count": 1,
- "last": 5,
- },
- entity="metrics_gauges",
- metric="messaging.message.receive.latency",
- timestamp=self.six_min_ago,
- tags={"messaging.destination.name": "foo", "trace.status": "ok"},
- )
- self.store_span_metric(
- {
- "min": 10,
- "max": 10,
- "sum": 10,
- "count": 1,
- "last": 10,
- },
- entity="metrics_gauges",
- metric="messaging.message.receive.latency",
- timestamp=self.six_min_ago,
- tags={"messaging.destination.name": "bar", "trace.status": "ok"},
- )
- response = self.do_request(
- {
- "field": [
- "messaging.destination.name",
- "trace.status",
- "avg(messaging.message.receive.latency)",
- ],
- "query": "",
- "project": self.project.id,
- "dataset": "spansMetrics",
- "statsPeriod": "1h",
- }
- )
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert data == [
- {
- "messaging.destination.name": "bar",
- "trace.status": "ok",
- "avg(messaging.message.receive.latency)": 10.0,
- },
- {
- "messaging.destination.name": "foo",
- "trace.status": "ok",
- "avg(messaging.message.receive.latency)": 5.0,
- },
- ]
- def test_messaging_does_not_exist_as_metric(self):
- self.store_span_metric(
- 100,
- internal_metric=constants.SPAN_METRICS_MAP["span.duration"],
- tags={"messaging.destination.name": "foo", "trace.status": "ok"},
- timestamp=self.min_ago,
- )
- response = self.do_request(
- {
- "field": [
- "messaging.destination.name",
- "trace.status",
- "avg(messaging.message.receive.latency)",
- "avg(span.duration)",
- ],
- "query": "",
- "project": self.project.id,
- "dataset": "spansMetrics",
- "statsPeriod": "1h",
- }
- )
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert data == [
- {
- "messaging.destination.name": "foo",
- "trace.status": "ok",
- "avg(messaging.message.receive.latency)": None,
- "avg(span.duration)": 100,
- },
- ]
- meta = response.data["meta"]
- assert meta["fields"]["avg(messaging.message.receive.latency)"] == "null"
- def test_cache_item_size_does_not_exist_as_metric(self):
- self.store_span_metric(
- 100,
- internal_metric=constants.SPAN_METRICS_MAP["span.duration"],
- tags={"cache.item": "true"},
- timestamp=self.min_ago,
- )
- response = self.do_request(
- {
- "field": [
- "avg(cache.item_size)",
- "avg(span.duration)",
- ],
- "query": "",
- "project": self.project.id,
- "dataset": "spansMetrics",
- "statsPeriod": "1h",
- }
- )
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert data == [
- {
- "avg(cache.item_size)": None,
- "avg(span.duration)": 100,
- },
- ]
- meta = response.data["meta"]
- assert meta["fields"]["avg(cache.item_size)"] == "null"
- def test_trace_status_rate(self):
- self.store_span_metric(
- 1,
- internal_metric=constants.SELF_TIME_LIGHT,
- timestamp=self.min_ago,
- tags={"trace.status": "unknown"},
- )
- self.store_span_metric(
- 3,
- internal_metric=constants.SELF_TIME_LIGHT,
- timestamp=self.min_ago,
- tags={"trace.status": "internal_error"},
- )
- self.store_span_metric(
- 3,
- internal_metric=constants.SELF_TIME_LIGHT,
- timestamp=self.min_ago,
- tags={"trace.status": "unauthenticated"},
- )
- self.store_span_metric(
- 4,
- internal_metric=constants.SELF_TIME_LIGHT,
- timestamp=self.min_ago,
- tags={"trace.status": "ok"},
- )
- self.store_span_metric(
- 5,
- internal_metric=constants.SELF_TIME_LIGHT,
- timestamp=self.min_ago,
- tags={"trace.status": "ok"},
- )
- response = self.do_request(
- {
- "field": [
- "trace_status_rate(ok)",
- "trace_status_rate(unknown)",
- "trace_status_rate(internal_error)",
- "trace_status_rate(unauthenticated)",
- ],
- "query": "",
- "project": self.project.id,
- "dataset": "spansMetrics",
- "statsPeriod": "1h",
- }
- )
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert len(data) == 1
- assert data[0]["trace_status_rate(ok)"] == 0.4
- assert data[0]["trace_status_rate(unknown)"] == 0.2
- assert data[0]["trace_status_rate(internal_error)"] == 0.2
- assert data[0]["trace_status_rate(unauthenticated)"] == 0.2
- meta = response.data["meta"]
- assert meta["dataset"] == "spansMetrics"
- assert meta["fields"]["trace_status_rate(ok)"] == "percentage"
- assert meta["fields"]["trace_status_rate(unknown)"] == "percentage"
- assert meta["fields"]["trace_status_rate(internal_error)"] == "percentage"
- assert meta["fields"]["trace_status_rate(unauthenticated)"] == "percentage"
- def test_trace_error_rate(self):
- self.store_span_metric(
- 1,
- internal_metric=constants.SELF_TIME_LIGHT,
- timestamp=self.min_ago,
- tags={"trace.status": "unknown"},
- )
- self.store_span_metric(
- 3,
- internal_metric=constants.SELF_TIME_LIGHT,
- timestamp=self.min_ago,
- tags={"trace.status": "internal_error"},
- )
- self.store_span_metric(
- 3,
- internal_metric=constants.SELF_TIME_LIGHT,
- timestamp=self.min_ago,
- tags={"trace.status": "unauthenticated"},
- )
- self.store_span_metric(
- 4,
- internal_metric=constants.SELF_TIME_LIGHT,
- timestamp=self.min_ago,
- tags={"trace.status": "ok"},
- )
- self.store_span_metric(
- 5,
- internal_metric=constants.SELF_TIME_LIGHT,
- timestamp=self.min_ago,
- tags={"trace.status": "ok"},
- )
- response = self.do_request(
- {
- "field": [
- "trace_error_rate()",
- ],
- "query": "",
- "project": self.project.id,
- "dataset": "spansMetrics",
- }
- )
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert len(data) == 1
- assert data[0]["trace_error_rate()"] == 0.4
- meta = response.data["meta"]
- assert meta["dataset"] == "spansMetrics"
- assert meta["fields"]["trace_error_rate()"] == "percentage"
- class OrganizationEventsMetricsEnhancedPerformanceEndpointTestWithMetricLayer(
- OrganizationEventsMetricsEnhancedPerformanceEndpointTest
- ):
- def setUp(self):
- super().setUp()
- self.features["organizations:use-metrics-layer"] = True
- @pytest.mark.xfail(reason="Not implemented")
- def test_time_spent_percentage(self):
- super().test_time_spent_percentage()
- @pytest.mark.xfail(reason="Not implemented")
- def test_time_spent_percentage_local(self):
- super().test_time_spent_percentage_local()
- @pytest.mark.xfail(reason="Not implemented")
- def test_time_spent_percentage_on_span_duration(self):
- super().test_time_spent_percentage_on_span_duration()
- @pytest.mark.xfail(reason="Cannot group by function 'if'")
- def test_span_module(self):
- super().test_span_module()
- @pytest.mark.xfail(reason="Cannot search by tags")
- def test_tag_search(self):
- super().test_tag_search()
- @pytest.mark.xfail(reason="Cannot search by tags")
- def test_free_text_search(self):
- super().test_free_text_search()
- @pytest.mark.xfail(reason="Not implemented")
- def test_avg_compare(self):
- super().test_avg_compare()
- @pytest.mark.xfail(reason="Not implemented")
- def test_span_domain_array(self):
- super().test_span_domain_array()
- @pytest.mark.xfail(reason="Not implemented")
- def test_span_domain_array_filter(self):
- super().test_span_domain_array_filter()
- @pytest.mark.xfail(reason="Not implemented")
- def test_span_domain_array_filter_wildcard(self):
- super().test_span_domain_array_filter_wildcard()
- @pytest.mark.xfail(reason="Not implemented")
- def test_span_domain_array_has_filter(self):
- super().test_span_domain_array_has_filter()
- @pytest.mark.xfail(reason="Not implemented")
- def test_unique_values_span_domain(self):
- super().test_unique_values_span_domain()
- @pytest.mark.xfail(reason="Not implemented")
- def test_unique_values_span_domain_with_filter(self):
- super().test_unique_values_span_domain_with_filter()
- @pytest.mark.xfail(reason="Not implemented")
- def test_avg_if(self):
- super().test_avg_if()
- @pytest.mark.xfail(reason="Not implemented")
- def test_device_class_filter(self):
- super().test_device_class_filter()
- @pytest.mark.xfail(reason="Not implemented")
- def test_device_class(self):
- super().test_device_class()
- @pytest.mark.xfail(reason="Not implemented")
- def test_count_op(self):
- super().test_count_op()
|