123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997299829993000300130023003300430053006300730083009301030113012301330143015301630173018301930203021302230233024302530263027302830293030303130323033303430353036303730383039304030413042304330443045304630473048304930503051305230533054305530563057305830593060306130623063306430653066306730683069307030713072307330743075307630773078307930803081308230833084308530863087308830893090309130923093309430953096309730983099310031013102310331043105310631073108310931103111311231133114311531163117311831193120312131223123312431253126312731283129313031313132313331343135313631373138313931403141314231433144314531463147314831493150315131523153315431553156315731583159316031613162316331643165316631673168316931703171317231733174317531763177317831793180318131823183318431853186318731883189319031913192319331943195319631973198319932003201320232033204320532063207320832093210321132123213321432153216321732183219322032213222322332243225322632273228322932303231323232333234323532363237323832393240324132423243324432453246324732483249325032513252325332543255325632573258325932603261326232633264326532663267326832693270327132723273327432753276 |
- from __future__ import annotations
- import uuid
- from datetime import timedelta
- from typing import Any, TypedDict
- from unittest import mock
- from uuid import uuid4
- import pytest
- from dateutil.parser import parse as parse_date
- from django.urls import reverse
- from snuba_sdk import Entity
- from snuba_sdk.column import Column
- from snuba_sdk.conditions import Condition, Op
- from snuba_sdk.function import Function
- from sentry.constants import MAX_TOP_EVENTS
- from sentry.issues.grouptype import ProfileFileIOGroupType
- from sentry.models.project import Project
- from sentry.models.transaction_threshold import ProjectTransactionThreshold, TransactionMetric
- from sentry.snuba.discover import OTHER_KEY
- from sentry.testutils.cases import APITestCase, ProfilesSnubaTestCase, SnubaTestCase
- from sentry.testutils.helpers.datetime import before_now, iso_format
- from sentry.utils.samples import load_data
- from tests.sentry.issues.test_utils import SearchIssueTestMixin
- pytestmark = pytest.mark.sentry_metrics
- class _EventDataDict(TypedDict):
- data: dict[str, Any]
- project: Project
- count: int
- class OrganizationEventsStatsEndpointTest(APITestCase, SnubaTestCase, SearchIssueTestMixin):
- endpoint = "sentry-api-0-organization-events-stats"
- def setUp(self):
- super().setUp()
- self.login_as(user=self.user)
- self.authed_user = self.user
- self.day_ago = before_now(days=1).replace(hour=10, minute=0, second=0, microsecond=0)
- self.project = self.create_project()
- self.project2 = self.create_project()
- self.user = self.create_user()
- self.user2 = self.create_user()
- self.store_event(
- data={
- "event_id": "a" * 32,
- "message": "very bad",
- "timestamp": iso_format(self.day_ago + timedelta(minutes=1)),
- "fingerprint": ["group1"],
- "tags": {"sentry:user": self.user.email},
- },
- project_id=self.project.id,
- )
- self.store_event(
- data={
- "event_id": "b" * 32,
- "message": "oh my",
- "timestamp": iso_format(self.day_ago + timedelta(hours=1, minutes=1)),
- "fingerprint": ["group2"],
- "tags": {"sentry:user": self.user2.email},
- },
- project_id=self.project2.id,
- )
- self.store_event(
- data={
- "event_id": "c" * 32,
- "message": "very bad",
- "timestamp": iso_format(self.day_ago + timedelta(hours=1, minutes=2)),
- "fingerprint": ["group2"],
- "tags": {"sentry:user": self.user2.email},
- },
- project_id=self.project2.id,
- )
- self.url = reverse(
- "sentry-api-0-organization-events-stats",
- kwargs={"organization_id_or_slug": self.project.organization.slug},
- )
- self.features = {}
- def do_request(self, data, url=None, features=None):
- if features is None:
- features = {"organizations:discover-basic": True}
- features.update(self.features)
- with self.feature(features):
- return self.client.get(self.url if url is None else url, data=data, format="json")
- @pytest.mark.querybuilder
- def test_simple(self):
- response = self.do_request(
- {
- "start": self.day_ago,
- "end": self.day_ago + timedelta(hours=2),
- "interval": "1h",
- },
- )
- assert response.status_code == 200, response.content
- assert [attrs for time, attrs in response.data["data"]] == [[{"count": 1}], [{"count": 2}]]
- def test_generic_issue(self):
- _, _, group_info = self.store_search_issue(
- self.project.id,
- self.user.id,
- [f"{ProfileFileIOGroupType.type_id}-group1"],
- "prod",
- self.day_ago,
- )
- assert group_info is not None
- self.store_search_issue(
- self.project.id,
- self.user.id,
- [f"{ProfileFileIOGroupType.type_id}-group1"],
- "prod",
- self.day_ago + timedelta(hours=1, minutes=1),
- )
- self.store_search_issue(
- self.project.id,
- self.user.id,
- [f"{ProfileFileIOGroupType.type_id}-group1"],
- "prod",
- self.day_ago + timedelta(hours=1, minutes=2),
- )
- with self.feature(
- [
- "organizations:profiling",
- ]
- ):
- response = self.do_request(
- {
- "start": self.day_ago,
- "end": self.day_ago + timedelta(hours=2),
- "interval": "1h",
- "query": f"issue:{group_info.group.qualified_short_id}",
- "dataset": "issuePlatform",
- },
- )
- assert response.status_code == 200, response.content
- assert [attrs for time, attrs in response.data["data"]] == [[{"count": 1}], [{"count": 2}]]
- def test_generic_issue_calculated_interval(self):
- """Test that a 4h interval returns the correct generic event stats.
- This follows a different code path than 1h or 1d as the IssuePlatformTimeSeriesQueryBuilder
- does some calculation to create the time column."""
- _, _, group_info = self.store_search_issue(
- self.project.id,
- self.user.id,
- [f"{ProfileFileIOGroupType.type_id}-group1"],
- "prod",
- self.day_ago + timedelta(minutes=1),
- )
- assert group_info is not None
- self.store_search_issue(
- self.project.id,
- self.user.id,
- [f"{ProfileFileIOGroupType.type_id}-group1"],
- "prod",
- self.day_ago + timedelta(minutes=1),
- )
- self.store_search_issue(
- self.project.id,
- self.user.id,
- [f"{ProfileFileIOGroupType.type_id}-group1"],
- "prod",
- self.day_ago + timedelta(minutes=2),
- )
- with self.feature(
- [
- "organizations:profiling",
- ]
- ):
- response = self.do_request(
- {
- "start": self.day_ago,
- "end": self.day_ago + timedelta(hours=4),
- "interval": "4h",
- "query": f"issue:{group_info.group.qualified_short_id}",
- "dataset": "issuePlatform",
- },
- )
- assert response.status_code == 200, response.content
- assert [attrs for time, attrs in response.data["data"]] == [[{"count": 3}], [{"count": 0}]]
- def test_errors_dataset(self):
- response = self.do_request(
- {
- "start": self.day_ago,
- "end": self.day_ago + timedelta(hours=2),
- "interval": "1h",
- "dataset": "errors",
- "query": "is:unresolved",
- },
- )
- assert response.status_code == 200, response.content
- assert [attrs for time, attrs in response.data["data"]] == [[{"count": 1}], [{"count": 2}]]
- def test_errors_dataset_no_query(self):
- response = self.do_request(
- {
- "start": self.day_ago,
- "end": self.day_ago + timedelta(hours=2),
- "interval": "1h",
- "dataset": "errors",
- },
- )
- assert response.status_code == 200, response.content
- assert [attrs for time, attrs in response.data["data"]] == [[{"count": 1}], [{"count": 2}]]
- def test_misaligned_last_bucket(self):
- response = self.do_request(
- data={
- "start": self.day_ago - timedelta(minutes=30),
- "end": self.day_ago + timedelta(hours=1, minutes=30),
- "interval": "1h",
- "partial": "1",
- },
- )
- assert response.status_code == 200, response.content
- assert [attrs for time, attrs in response.data["data"]] == [
- [{"count": 0}],
- [{"count": 1}],
- [{"count": 2}],
- ]
- def test_no_projects(self):
- org = self.create_organization(owner=self.user)
- self.login_as(user=self.user)
- url = reverse(
- "sentry-api-0-organization-events-stats", kwargs={"organization_id_or_slug": org.slug}
- )
- response = self.do_request({}, url)
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 0
- def test_user_count(self):
- self.store_event(
- data={
- "event_id": "d" * 32,
- "message": "something",
- "timestamp": iso_format(self.day_ago + timedelta(minutes=2)),
- "tags": {"sentry:user": self.user2.email},
- "fingerprint": ["group2"],
- },
- project_id=self.project2.id,
- )
- response = self.do_request(
- data={
- "start": self.day_ago,
- "end": self.day_ago + timedelta(hours=2),
- "interval": "1h",
- "yAxis": "user_count",
- },
- )
- assert response.status_code == 200, response.content
- assert [attrs for time, attrs in response.data["data"]] == [[{"count": 2}], [{"count": 1}]]
- def test_discover2_backwards_compatibility(self):
- response = self.do_request(
- data={
- "project": self.project.id,
- "start": self.day_ago,
- "end": self.day_ago + timedelta(hours=2),
- "interval": "1h",
- "yAxis": "user_count",
- },
- )
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) > 0
- response = self.do_request(
- data={
- "project": self.project.id,
- "start": self.day_ago,
- "end": self.day_ago + timedelta(hours=2),
- "interval": "1h",
- "yAxis": "event_count",
- },
- )
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) > 0
- def test_with_event_count_flag(self):
- response = self.do_request(
- data={
- "start": self.day_ago,
- "end": self.day_ago + timedelta(hours=2),
- "interval": "1h",
- "yAxis": "event_count",
- },
- )
- assert response.status_code == 200, response.content
- assert [attrs for time, attrs in response.data["data"]] == [[{"count": 1}], [{"count": 2}]]
- def test_performance_view_feature(self):
- response = self.do_request(
- data={
- "end": before_now(),
- "start": before_now(hours=2),
- "query": "project_id:1",
- "interval": "30m",
- "yAxis": "count()",
- },
- features={
- "organizations:performance-view": True,
- "organizations:discover-basic": False,
- },
- )
- assert response.status_code == 200, response.content
- def test_apdex_divide_by_zero(self):
- ProjectTransactionThreshold.objects.create(
- project=self.project,
- organization=self.project.organization,
- threshold=600,
- metric=TransactionMetric.LCP.value,
- )
- # Shouldn't count towards apdex
- data = load_data(
- "transaction",
- start_timestamp=self.day_ago + timedelta(minutes=(1)),
- timestamp=self.day_ago + timedelta(minutes=(3)),
- )
- data["transaction"] = "/apdex/new/"
- data["user"] = {"email": "1@example.com"}
- data["measurements"] = {}
- self.store_event(data, project_id=self.project.id)
- response = self.do_request(
- data={
- "start": self.day_ago,
- "end": self.day_ago + timedelta(hours=2),
- "interval": "1h",
- "yAxis": "apdex()",
- "project": [self.project.id],
- },
- )
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 2
- data = response.data["data"]
- # 0 transactions with LCP 0/0
- assert [attrs for time, attrs in response.data["data"]] == [
- [{"count": 0}],
- [{"count": 0}],
- ]
- def test_aggregate_function_apdex(self):
- project1 = self.create_project()
- project2 = self.create_project()
- events = [
- ("one", 400, project1.id),
- ("one", 400, project1.id),
- ("two", 3000, project2.id),
- ("two", 1000, project2.id),
- ("three", 3000, project2.id),
- ]
- for idx, event in enumerate(events):
- data = load_data(
- "transaction",
- start_timestamp=self.day_ago + timedelta(minutes=(1 + idx)),
- timestamp=self.day_ago + timedelta(minutes=(1 + idx), milliseconds=event[1]),
- )
- data["event_id"] = f"{idx}" * 32
- data["transaction"] = f"/apdex/new/{event[0]}"
- data["user"] = {"email": f"{idx}@example.com"}
- self.store_event(data, project_id=event[2])
- response = self.do_request(
- data={
- "start": self.day_ago,
- "end": self.day_ago + timedelta(hours=2),
- "interval": "1h",
- "yAxis": "apdex()",
- },
- )
- assert response.status_code == 200, response.content
- assert [attrs for time, attrs in response.data["data"]] == [
- [{"count": 0.3}],
- [{"count": 0}],
- ]
- ProjectTransactionThreshold.objects.create(
- project=project1,
- organization=project1.organization,
- threshold=100,
- metric=TransactionMetric.DURATION.value,
- )
- ProjectTransactionThreshold.objects.create(
- project=project2,
- organization=project1.organization,
- threshold=100,
- metric=TransactionMetric.DURATION.value,
- )
- response = self.do_request(
- data={
- "start": self.day_ago,
- "end": self.day_ago + timedelta(hours=2),
- "interval": "1h",
- "yAxis": "apdex()",
- },
- )
- assert response.status_code == 200, response.content
- assert [attrs for time, attrs in response.data["data"]] == [
- [{"count": 0.2}],
- [{"count": 0}],
- ]
- response = self.do_request(
- data={
- "start": self.day_ago,
- "end": self.day_ago + timedelta(hours=2),
- "interval": "1h",
- "yAxis": ["user_count", "apdex()"],
- },
- )
- assert response.status_code == 200, response.content
- assert response.data["user_count"]["order"] == 0
- assert [attrs for time, attrs in response.data["user_count"]["data"]] == [
- [{"count": 5}],
- [{"count": 0}],
- ]
- assert response.data["apdex()"]["order"] == 1
- assert [attrs for time, attrs in response.data["apdex()"]["data"]] == [
- [{"count": 0.2}],
- [{"count": 0}],
- ]
- def test_aggregate_function_count(self):
- response = self.do_request(
- data={
- "start": self.day_ago,
- "end": self.day_ago + timedelta(hours=2),
- "interval": "1h",
- "yAxis": "count()",
- },
- )
- assert response.status_code == 200, response.content
- assert [attrs for time, attrs in response.data["data"]] == [[{"count": 1}], [{"count": 2}]]
- def test_invalid_aggregate(self):
- response = self.do_request(
- data={
- "start": self.day_ago,
- "end": self.day_ago + timedelta(hours=2),
- "interval": "1h",
- "yAxis": "rubbish",
- },
- )
- assert response.status_code == 400, response.content
- def test_aggregate_function_user_count(self):
- response = self.do_request(
- data={
- "start": self.day_ago,
- "end": self.day_ago + timedelta(hours=2),
- "interval": "1h",
- "yAxis": "count_unique(user)",
- },
- )
- assert response.status_code == 200, response.content
- assert [attrs for time, attrs in response.data["data"]] == [[{"count": 1}], [{"count": 1}]]
- def test_aggregate_invalid(self):
- response = self.do_request(
- data={
- "start": self.day_ago,
- "end": self.day_ago + timedelta(hours=2),
- "interval": "1h",
- "yAxis": "nope(lol)",
- },
- )
- assert response.status_code == 400, response.content
- def test_throughput_epm_hour_rollup(self):
- project = self.create_project()
- # Each of these denotes how many events to create in each hour
- event_counts = [6, 0, 6, 3, 0, 3]
- for hour, count in enumerate(event_counts):
- for minute in range(count):
- self.store_event(
- data={
- "event_id": str(uuid.uuid1()),
- "message": "very bad",
- "timestamp": (
- self.day_ago + timedelta(hours=hour, minutes=minute)
- ).isoformat(),
- "fingerprint": ["group1"],
- "tags": {"sentry:user": self.user.email},
- },
- project_id=project.id,
- )
- for axis in ["epm()", "tpm()"]:
- response = self.do_request(
- data={
- "start": self.day_ago,
- "end": self.day_ago + timedelta(hours=6),
- "interval": "1h",
- "yAxis": axis,
- "project": project.id,
- },
- )
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert len(data) == 6
- rows = data[0:6]
- for test in zip(event_counts, rows):
- assert test[1][1][0]["count"] == test[0] / (3600.0 / 60.0)
- def test_throughput_epm_day_rollup(self):
- project = self.create_project()
- # Each of these denotes how many events to create in each minute
- event_counts = [6, 0, 6, 3, 0, 3]
- for hour, count in enumerate(event_counts):
- for minute in range(count):
- self.store_event(
- data={
- "event_id": str(uuid.uuid1()),
- "message": "very bad",
- "timestamp": (
- self.day_ago + timedelta(hours=hour, minutes=minute)
- ).isoformat(),
- "fingerprint": ["group1"],
- "tags": {"sentry:user": self.user.email},
- },
- project_id=project.id,
- )
- for axis in ["epm()", "tpm()"]:
- response = self.do_request(
- data={
- "start": self.day_ago,
- "end": self.day_ago + timedelta(hours=24),
- "interval": "24h",
- "yAxis": axis,
- "project": project.id,
- },
- )
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert len(data) == 2
- assert data[0][1][0]["count"] == sum(event_counts) / (86400.0 / 60.0)
- def test_throughput_eps_minute_rollup(self):
- project = self.create_project()
- # Each of these denotes how many events to create in each minute
- event_counts = [6, 0, 6, 3, 0, 3]
- for minute, count in enumerate(event_counts):
- for second in range(count):
- self.store_event(
- data={
- "event_id": str(uuid.uuid1()),
- "message": "very bad",
- "timestamp": (
- self.day_ago + timedelta(minutes=minute, seconds=second)
- ).isoformat(),
- "fingerprint": ["group1"],
- "tags": {"sentry:user": self.user.email},
- },
- project_id=project.id,
- )
- for axis in ["eps()", "tps()"]:
- response = self.do_request(
- data={
- "start": self.day_ago,
- "end": self.day_ago + timedelta(minutes=6),
- "interval": "1m",
- "yAxis": axis,
- "project": project.id,
- },
- )
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert len(data) == 6
- rows = data[0:6]
- for test in zip(event_counts, rows):
- assert test[1][1][0]["count"] == test[0] / 60.0
- def test_throughput_eps_no_rollup(self):
- project = self.create_project()
- # Each of these denotes how many events to create in each minute
- event_counts = [6, 0, 6, 3, 0, 3]
- for minute, count in enumerate(event_counts):
- for second in range(count):
- self.store_event(
- data={
- "event_id": str(uuid.uuid1()),
- "message": "very bad",
- "timestamp": (
- self.day_ago + timedelta(minutes=minute, seconds=second)
- ).isoformat(),
- "fingerprint": ["group1"],
- "tags": {"sentry:user": self.user.email},
- },
- project_id=project.id,
- )
- response = self.do_request(
- data={
- "start": self.day_ago,
- "end": self.day_ago + timedelta(minutes=1),
- "interval": "1s",
- "yAxis": "eps()",
- "project": project.id,
- },
- )
- assert response.status_code == 200, response.content
- data = response.data["data"]
- # expect 60 data points between time span of 0 and 60 seconds
- assert len(data) == 60
- rows = data[0:6]
- for row in rows:
- assert row[1][0]["count"] == 1
- def test_transaction_events(self):
- prototype = {
- "type": "transaction",
- "transaction": "api.issue.delete",
- "spans": [],
- "contexts": {"trace": {"op": "foobar", "trace_id": "a" * 32, "span_id": "a" * 16}},
- "tags": {"important": "yes"},
- }
- fixtures = (
- ("d" * 32, before_now(minutes=32)),
- ("e" * 32, before_now(hours=1, minutes=2)),
- ("f" * 32, before_now(hours=1, minutes=35)),
- )
- for fixture in fixtures:
- data = prototype.copy()
- data["event_id"] = fixture[0]
- data["timestamp"] = fixture[1].isoformat()
- data["start_timestamp"] = iso_format(fixture[1] - timedelta(seconds=1))
- self.store_event(data=data, project_id=self.project.id)
- for dataset in ["discover", "transactions"]:
- response = self.do_request(
- data={
- "project": self.project.id,
- "end": before_now(),
- "start": before_now(hours=2),
- "query": "event.type:transaction",
- "interval": "30m",
- "yAxis": "count()",
- "dataset": dataset,
- },
- )
- assert response.status_code == 200, response.content
- items = [item for time, item in response.data["data"] if item]
- # We could get more results depending on where the 30 min
- # windows land.
- assert len(items) >= 3
- def test_project_id_query_filter(self):
- response = self.do_request(
- data={
- "end": before_now(),
- "start": before_now(hours=2),
- "query": "project_id:1",
- "interval": "30m",
- "yAxis": "count()",
- },
- )
- assert response.status_code == 200
- def test_latest_release_query_filter(self):
- response = self.do_request(
- data={
- "project": self.project.id,
- "end": before_now(),
- "start": before_now(hours=2),
- "query": "release:latest",
- "interval": "30m",
- "yAxis": "count()",
- },
- )
- assert response.status_code == 200
- def test_conditional_filter(self):
- response = self.do_request(
- data={
- "start": self.day_ago,
- "end": self.day_ago + timedelta(hours=2),
- "query": "id:{} OR id:{}".format("a" * 32, "b" * 32),
- "interval": "30m",
- "yAxis": "count()",
- },
- )
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert len(data) == 4
- assert data[0][1][0]["count"] == 1
- assert data[2][1][0]["count"] == 1
- def test_simple_multiple_yaxis(self):
- response = self.do_request(
- data={
- "start": self.day_ago,
- "end": self.day_ago + timedelta(hours=2),
- "interval": "1h",
- "yAxis": ["user_count", "event_count"],
- },
- )
- assert response.status_code == 200, response.content
- assert response.data["user_count"]["order"] == 0
- assert [attrs for time, attrs in response.data["user_count"]["data"]] == [
- [{"count": 1}],
- [{"count": 1}],
- ]
- assert response.data["event_count"]["order"] == 1
- assert [attrs for time, attrs in response.data["event_count"]["data"]] == [
- [{"count": 1}],
- [{"count": 2}],
- ]
- def test_equation_yaxis(self):
- response = self.do_request(
- data={
- "start": self.day_ago,
- "end": self.day_ago + timedelta(hours=2),
- "interval": "1h",
- "yAxis": ["equation|count() / 100"],
- },
- )
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 2
- assert [attrs for time, attrs in response.data["data"]] == [
- [{"count": 0.01}],
- [{"count": 0.02}],
- ]
- def test_eps_equation(self):
- response = self.do_request(
- data={
- "start": self.day_ago,
- "end": self.day_ago + timedelta(hours=2),
- "interval": "1h",
- "yAxis": ["equation|eps() * 2"],
- },
- )
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 2
- assert pytest.approx(0.000556, abs=0.0001) == response.data["data"][0][1][0]["count"]
- assert pytest.approx(0.001112, abs=0.0001) == response.data["data"][1][1][0]["count"]
- def test_epm_equation(self):
- response = self.do_request(
- data={
- "start": self.day_ago,
- "end": self.day_ago + timedelta(hours=2),
- "interval": "1h",
- "yAxis": ["equation|epm() * 2"],
- },
- )
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 2
- assert pytest.approx(0.03334, abs=0.01) == response.data["data"][0][1][0]["count"]
- assert pytest.approx(0.06667, abs=0.01) == response.data["data"][1][1][0]["count"]
- def test_equation_mixed_multi_yaxis(self):
- response = self.do_request(
- data={
- "start": self.day_ago,
- "end": self.day_ago + timedelta(hours=2),
- "interval": "1h",
- "yAxis": ["count()", "equation|count() * 100"],
- },
- )
- assert response.status_code == 200, response.content
- assert response.data["count()"]["order"] == 0
- assert [attrs for time, attrs in response.data["count()"]["data"]] == [
- [{"count": 1}],
- [{"count": 2}],
- ]
- assert response.data["equation|count() * 100"]["order"] == 1
- assert [attrs for time, attrs in response.data["equation|count() * 100"]["data"]] == [
- [{"count": 100}],
- [{"count": 200}],
- ]
- def test_equation_multi_yaxis(self):
- response = self.do_request(
- data={
- "start": self.day_ago,
- "end": self.day_ago + timedelta(hours=2),
- "interval": "1h",
- "yAxis": ["equation|count() / 100", "equation|count() * 100"],
- },
- )
- assert response.status_code == 200, response.content
- assert response.data["equation|count() / 100"]["order"] == 0
- assert [attrs for time, attrs in response.data["equation|count() / 100"]["data"]] == [
- [{"count": 0.01}],
- [{"count": 0.02}],
- ]
- assert response.data["equation|count() * 100"]["order"] == 1
- assert [attrs for time, attrs in response.data["equation|count() * 100"]["data"]] == [
- [{"count": 100}],
- [{"count": 200}],
- ]
- def test_large_interval_no_drop_values(self):
- self.store_event(
- data={
- "event_id": "d" * 32,
- "message": "not good",
- "timestamp": iso_format(self.day_ago - timedelta(minutes=10)),
- "fingerprint": ["group3"],
- },
- project_id=self.project.id,
- )
- response = self.do_request(
- data={
- "project": self.project.id,
- "end": self.day_ago,
- "start": self.day_ago - timedelta(hours=24),
- "query": 'message:"not good"',
- "interval": "1d",
- "yAxis": "count()",
- },
- )
- assert response.status_code == 200
- assert [attrs for time, attrs in response.data["data"]] == [[{"count": 0}], [{"count": 1}]]
- @mock.patch("sentry.snuba.discover.timeseries_query", return_value={})
- def test_multiple_yaxis_only_one_query(self, mock_query):
- self.do_request(
- data={
- "project": self.project.id,
- "start": self.day_ago,
- "end": self.day_ago + timedelta(hours=2),
- "interval": "1h",
- "yAxis": ["user_count", "event_count", "epm()", "eps()"],
- },
- )
- assert mock_query.call_count == 1
- @mock.patch("sentry.snuba.discover.bulk_snuba_queries", return_value=[{"data": []}])
- def test_invalid_interval(self, mock_query):
- self.do_request(
- data={
- "end": before_now(),
- "start": before_now(hours=24),
- "query": "",
- "interval": "1s",
- "yAxis": "count()",
- },
- )
- assert mock_query.call_count == 1
- # Should've reset to the default for 24h
- assert mock_query.mock_calls[0].args[0][0].query.granularity.granularity == 300
- self.do_request(
- data={
- "end": before_now(),
- "start": before_now(hours=24),
- "query": "",
- "interval": "0d",
- "yAxis": "count()",
- },
- )
- assert mock_query.call_count == 2
- # Should've reset to the default for 24h
- assert mock_query.mock_calls[1].args[0][0].query.granularity.granularity == 300
- def test_out_of_retention(self):
- with self.options({"system.event-retention-days": 10}):
- response = self.do_request(
- data={
- "start": before_now(days=20),
- "end": before_now(days=15),
- "query": "",
- "interval": "30m",
- "yAxis": "count()",
- },
- )
- assert response.status_code == 400
- @mock.patch("sentry.utils.snuba.quantize_time")
- def test_quantize_dates(self, mock_quantize):
- mock_quantize.return_value = before_now(days=1)
- # Don't quantize short time periods
- self.do_request(
- data={"statsPeriod": "1h", "query": "", "interval": "30m", "yAxis": "count()"},
- )
- # Don't quantize absolute date periods
- self.do_request(
- data={
- "start": before_now(days=20),
- "end": before_now(days=15),
- "query": "",
- "interval": "30m",
- "yAxis": "count()",
- },
- )
- assert len(mock_quantize.mock_calls) == 0
- # Quantize long date periods
- self.do_request(
- data={"statsPeriod": "90d", "query": "", "interval": "30m", "yAxis": "count()"},
- )
- assert len(mock_quantize.mock_calls) == 2
- def test_with_zerofill(self):
- response = self.do_request(
- data={
- "start": self.day_ago,
- "end": self.day_ago + timedelta(hours=2),
- "interval": "30m",
- },
- )
- assert response.status_code == 200, response.content
- assert [attrs for time, attrs in response.data["data"]] == [
- [{"count": 1}],
- [{"count": 0}],
- [{"count": 2}],
- [{"count": 0}],
- ]
- def test_without_zerofill(self):
- start = iso_format(self.day_ago)
- end = iso_format(self.day_ago + timedelta(hours=2))
- response = self.do_request(
- data={
- "start": start,
- "end": end,
- "interval": "30m",
- "withoutZerofill": "1",
- },
- features={
- "organizations:performance-chart-interpolation": True,
- "organizations:discover-basic": True,
- },
- )
- assert response.status_code == 200, response.content
- assert [attrs for time, attrs in response.data["data"]] == [
- [{"count": 1}],
- [{"count": 2}],
- ]
- assert response.data["start"] == parse_date(start).timestamp()
- assert response.data["end"] == parse_date(end).timestamp()
- def test_comparison_error_dataset(self):
- self.store_event(
- data={
- "timestamp": iso_format(self.day_ago + timedelta(days=-1, minutes=1)),
- },
- project_id=self.project.id,
- )
- self.store_event(
- data={
- "timestamp": iso_format(self.day_ago + timedelta(days=-1, minutes=2)),
- },
- project_id=self.project.id,
- )
- self.store_event(
- data={
- "timestamp": iso_format(self.day_ago + timedelta(days=-1, hours=1, minutes=1)),
- },
- project_id=self.project2.id,
- )
- response = self.do_request(
- data={
- "start": self.day_ago,
- "end": self.day_ago + timedelta(hours=2),
- "interval": "1h",
- "comparisonDelta": int(timedelta(days=1).total_seconds()),
- "dataset": "errors",
- }
- )
- assert response.status_code == 200, response.content
- assert [attrs for time, attrs in response.data["data"]] == [
- [{"count": 1, "comparisonCount": 2}],
- [{"count": 2, "comparisonCount": 1}],
- ]
- def test_comparison(self):
- self.store_event(
- data={
- "timestamp": iso_format(self.day_ago + timedelta(days=-1, minutes=1)),
- },
- project_id=self.project.id,
- )
- self.store_event(
- data={
- "timestamp": iso_format(self.day_ago + timedelta(days=-1, minutes=2)),
- },
- project_id=self.project.id,
- )
- self.store_event(
- data={
- "timestamp": iso_format(self.day_ago + timedelta(days=-1, hours=1, minutes=1)),
- },
- project_id=self.project2.id,
- )
- response = self.do_request(
- data={
- "start": self.day_ago,
- "end": self.day_ago + timedelta(hours=2),
- "interval": "1h",
- "comparisonDelta": int(timedelta(days=1).total_seconds()),
- }
- )
- assert response.status_code == 200, response.content
- assert [attrs for time, attrs in response.data["data"]] == [
- [{"count": 1, "comparisonCount": 2}],
- [{"count": 2, "comparisonCount": 1}],
- ]
- def test_comparison_invalid(self):
- response = self.do_request(
- data={
- "start": self.day_ago,
- "end": self.day_ago + timedelta(hours=2),
- "interval": "1h",
- "comparisonDelta": "17h",
- },
- )
- assert response.status_code == 400, response.content
- assert response.data["detail"] == "comparisonDelta must be an integer"
- start = before_now(days=85)
- end = start + timedelta(days=7)
- with self.options({"system.event-retention-days": 90}):
- response = self.do_request(
- data={
- "start": start,
- "end": end,
- "interval": "1h",
- "comparisonDelta": int(timedelta(days=7).total_seconds()),
- }
- )
- assert response.status_code == 400, response.content
- assert response.data["detail"] == "Comparison period is outside retention window"
- def test_equations_divide_by_zero(self):
- response = self.do_request(
- data={
- "start": self.day_ago,
- "end": self.day_ago + timedelta(hours=2),
- "interval": "1h",
- # force a 0 in the denominator by doing 1 - 1
- # since a 0 literal is illegal as the denominator
- "yAxis": ["equation|count() / (1-1)"],
- },
- )
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 2
- assert [attrs for time, attrs in response.data["data"]] == [
- [{"count": None}],
- [{"count": None}],
- ]
- @mock.patch("sentry.search.events.builder.base.raw_snql_query")
- def test_profiles_dataset_simple(self, mock_snql_query):
- mock_snql_query.side_effect = [{"meta": {}, "data": []}]
- query = {
- "yAxis": [
- "count()",
- "p75()",
- "p95()",
- "p99()",
- "p75(profile.duration)",
- "p95(profile.duration)",
- "p99(profile.duration)",
- ],
- "project": [self.project.id],
- "dataset": "profiles",
- }
- response = self.do_request(query, features={"organizations:profiling": True})
- assert response.status_code == 200, response.content
- def test_tag_with_conflicting_function_alias_simple(self):
- for _ in range(7):
- self.store_event(
- data={
- "timestamp": iso_format(self.day_ago + timedelta(minutes=2)),
- "tags": {"count": "9001"},
- },
- project_id=self.project2.id,
- )
- # Query for count and count()
- data = {
- "start": self.day_ago.isoformat(),
- "end": (self.day_ago + timedelta(minutes=3)).isoformat(),
- "interval": "1h",
- "yAxis": "count()",
- "orderby": ["-count()"],
- "field": ["count()", "count"],
- "partial": "1",
- }
- response = self.client.get(self.url, data, format="json")
- assert response.status_code == 200
- # Expect a count of 8 because one event from setUp
- assert response.data["data"][0][1] == [{"count": 8}]
- data["query"] = "count:9001"
- response = self.client.get(self.url, data, format="json")
- assert response.status_code == 200
- assert response.data["data"][0][1] == [{"count": 7}]
- data["query"] = "count:abc"
- response = self.client.get(self.url, data, format="json")
- assert response.status_code == 200
- assert all([interval[1][0]["count"] == 0 for interval in response.data["data"]])
- def test_group_id_tag_simple(self):
- event_data: _EventDataDict = {
- "data": {
- "message": "poof",
- "timestamp": iso_format(self.day_ago + timedelta(minutes=2)),
- "user": {"email": self.user.email},
- "tags": {"group_id": "testing"},
- "fingerprint": ["group1"],
- },
- "project": self.project2,
- "count": 7,
- }
- for i in range(event_data["count"]):
- event_data["data"]["event_id"] = f"a{i}" * 16
- self.store_event(event_data["data"], project_id=event_data["project"].id)
- data = {
- "start": self.day_ago.isoformat(),
- "end": (self.day_ago + timedelta(hours=2)).isoformat(),
- "interval": "1h",
- "yAxis": "count()",
- "orderby": ["-count()"],
- "field": ["count()", "group_id"],
- "partial": "1",
- }
- response = self.client.get(self.url, data, format="json")
- assert response.status_code == 200
- assert response.data["data"][0][1] == [{"count": 8}]
- data["query"] = "group_id:testing"
- response = self.client.get(self.url, data, format="json")
- assert response.status_code == 200
- assert response.data["data"][0][1] == [{"count": 7}]
- data["query"] = "group_id:abc"
- response = self.client.get(self.url, data, format="json")
- assert response.status_code == 200
- assert all([interval[1][0]["count"] == 0 for interval in response.data["data"]])
- class OrganizationEventsStatsTopNEvents(APITestCase, SnubaTestCase):
- def setUp(self):
- super().setUp()
- self.login_as(user=self.user)
- self.day_ago = before_now(days=1).replace(hour=10, minute=0, second=0, microsecond=0)
- self.project = self.create_project()
- self.project2 = self.create_project()
- self.user2 = self.create_user()
- transaction_data = load_data("transaction")
- transaction_data["start_timestamp"] = iso_format(self.day_ago + timedelta(minutes=2))
- transaction_data["timestamp"] = iso_format(self.day_ago + timedelta(minutes=4))
- transaction_data["tags"] = {"shared-tag": "yup"}
- self.event_data: list[_EventDataDict] = [
- {
- "data": {
- "message": "poof",
- "timestamp": iso_format(self.day_ago + timedelta(minutes=2)),
- "user": {"email": self.user.email},
- "tags": {"shared-tag": "yup"},
- "fingerprint": ["group1"],
- },
- "project": self.project2,
- "count": 7,
- },
- {
- "data": {
- "message": "voof",
- "timestamp": iso_format(self.day_ago + timedelta(hours=1, minutes=2)),
- "fingerprint": ["group2"],
- "user": {"email": self.user2.email},
- "tags": {"shared-tag": "yup"},
- },
- "project": self.project2,
- "count": 6,
- },
- {
- "data": {
- "message": "very bad",
- "timestamp": iso_format(self.day_ago + timedelta(minutes=2)),
- "fingerprint": ["group3"],
- "user": {"email": "foo@example.com"},
- "tags": {"shared-tag": "yup"},
- },
- "project": self.project,
- "count": 5,
- },
- {
- "data": {
- "message": "oh no",
- "timestamp": iso_format(self.day_ago + timedelta(minutes=2)),
- "fingerprint": ["group4"],
- "user": {"email": "bar@example.com"},
- "tags": {"shared-tag": "yup"},
- },
- "project": self.project,
- "count": 4,
- },
- {"data": transaction_data, "project": self.project, "count": 3},
- # Not in the top 5
- {
- "data": {
- "message": "sorta bad",
- "timestamp": iso_format(self.day_ago + timedelta(minutes=2)),
- "fingerprint": ["group5"],
- "user": {"email": "bar@example.com"},
- "tags": {"shared-tag": "yup"},
- },
- "project": self.project,
- "count": 2,
- },
- {
- "data": {
- "message": "not so bad",
- "timestamp": iso_format(self.day_ago + timedelta(minutes=2)),
- "fingerprint": ["group6"],
- "user": {"email": "bar@example.com"},
- "tags": {"shared-tag": "yup"},
- },
- "project": self.project,
- "count": 1,
- },
- ]
- self.events = []
- for index, event_data in enumerate(self.event_data):
- data = event_data["data"].copy()
- for i in range(event_data["count"]):
- data["event_id"] = f"{index}{i}" * 16
- event = self.store_event(data, project_id=event_data["project"].id)
- self.events.append(event)
- self.transaction = self.events[4]
- self.enabled_features = {
- "organizations:discover-basic": True,
- }
- self.url = reverse(
- "sentry-api-0-organization-events-stats",
- kwargs={"organization_id_or_slug": self.project.organization.slug},
- )
- def test_no_top_events_with_project_field(self):
- project = self.create_project()
- with self.feature(self.enabled_features):
- response = self.client.get(
- self.url,
- data={
- # make sure to query the project with 0 events
- "project": str(project.id),
- "start": self.day_ago.isoformat(),
- "end": (self.day_ago + timedelta(hours=2)).isoformat(),
- "interval": "1h",
- "yAxis": "count()",
- "orderby": ["-count()"],
- "field": ["count()", "project"],
- "topEvents": "5",
- },
- format="json",
- )
- assert response.status_code == 200, response.content
- # When there are no top events, we do not return an empty dict.
- # Instead, we return a single zero-filled series for an empty graph.
- data = response.data["data"]
- assert [attrs for time, attrs in data] == [[{"count": 0}], [{"count": 0}]]
- def test_no_top_events(self):
- project = self.create_project()
- with self.feature(self.enabled_features):
- response = self.client.get(
- self.url,
- data={
- # make sure to query the project with 0 events
- "project": str(project.id),
- "start": self.day_ago.isoformat(),
- "end": (self.day_ago + timedelta(hours=2)).isoformat(),
- "interval": "1h",
- "yAxis": "count()",
- "orderby": ["-count()"],
- "field": ["count()", "message", "user.email"],
- "topEvents": "5",
- },
- format="json",
- )
- data = response.data["data"]
- assert response.status_code == 200, response.content
- # When there are no top events, we do not return an empty dict.
- # Instead, we return a single zero-filled series for an empty graph.
- assert [attrs for time, attrs in data] == [[{"count": 0}], [{"count": 0}]]
- def test_no_top_events_with_multi_axis(self):
- project = self.create_project()
- with self.feature(self.enabled_features):
- response = self.client.get(
- self.url,
- data={
- # make sure to query the project with 0 events
- "project": str(project.id),
- "start": self.day_ago.isoformat(),
- "end": (self.day_ago + timedelta(hours=2)).isoformat(),
- "interval": "1h",
- "yAxis": ["count()", "count_unique(user)"],
- "orderby": ["-count()"],
- "field": ["count()", "count_unique(user)", "message", "user.email"],
- "topEvents": "5",
- },
- format="json",
- )
- assert response.status_code == 200
- data = response.data[""]
- assert [attrs for time, attrs in data["count()"]["data"]] == [
- [{"count": 0}],
- [{"count": 0}],
- ]
- assert [attrs for time, attrs in data["count_unique(user)"]["data"]] == [
- [{"count": 0}],
- [{"count": 0}],
- ]
- def test_simple_top_events(self):
- with self.feature(self.enabled_features):
- response = self.client.get(
- self.url,
- data={
- "start": self.day_ago.isoformat(),
- "end": (self.day_ago + timedelta(hours=2)).isoformat(),
- "interval": "1h",
- "yAxis": "count()",
- "orderby": ["-count()"],
- "field": ["count()", "message", "user.email"],
- "topEvents": "5",
- },
- format="json",
- )
- data = response.data
- assert response.status_code == 200, response.content
- assert len(data) == 6
- for index, event in enumerate(self.events[:5]):
- message = event.message or event.transaction
- results = data[
- ",".join([message, self.event_data[index]["data"]["user"].get("email", "None")])
- ]
- assert results["order"] == index
- assert [{"count": self.event_data[index]["count"]}] in [
- attrs for _, attrs in results["data"]
- ]
- other = data["Other"]
- assert other["order"] == 5
- assert [{"count": 3}] in [attrs for _, attrs in other["data"]]
- def test_top_events_with_projects_other(self):
- with self.feature(self.enabled_features):
- response = self.client.get(
- self.url,
- data={
- "start": self.day_ago.isoformat(),
- "end": (self.day_ago + timedelta(hours=2)).isoformat(),
- "interval": "1h",
- "yAxis": "count()",
- "orderby": ["-count()"],
- "field": ["count()", "project"],
- "topEvents": "1",
- },
- format="json",
- )
- data = response.data
- assert response.status_code == 200, response.content
- assert set(data.keys()) == {"Other", self.project.slug}
- assert data[self.project.slug]["order"] == 0
- assert [attrs[0]["count"] for _, attrs in data[self.project.slug]["data"]] == [15, 0]
- assert data["Other"]["order"] == 1
- assert [attrs[0]["count"] for _, attrs in data["Other"]["data"]] == [7, 6]
- def test_top_events_with_projects_fields(self):
- # We need to handle the project name fields differently
- for project_field in ["project", "project.name"]:
- with self.feature(self.enabled_features):
- response = self.client.get(
- self.url,
- data={
- "start": self.day_ago.isoformat(),
- "end": (self.day_ago + timedelta(hours=2)).isoformat(),
- "interval": "1h",
- "yAxis": "count()",
- "orderby": ["-count()"],
- "field": ["count()", project_field],
- "topEvents": "5",
- },
- format="json",
- )
- data = response.data
- assert response.status_code == 200, response.content
- assert data[self.project.slug]["order"] == 0, project_field
- assert [attrs[0]["count"] for _, attrs in data[self.project.slug]["data"]] == [
- 15,
- 0,
- ], project_field
- assert data[self.project2.slug]["order"] == 1, project_field
- assert [attrs[0]["count"] for _, attrs in data[self.project2.slug]["data"]] == [
- 7,
- 6,
- ], project_field
- def test_tag_with_conflicting_function_alias_simple(self):
- event_data: _EventDataDict = {
- "data": {
- "message": "poof",
- "timestamp": iso_format(self.day_ago + timedelta(minutes=2)),
- "user": {"email": self.user.email},
- "tags": {"count": "9001"},
- "fingerprint": ["group1"],
- },
- "project": self.project2,
- "count": 7,
- }
- for i in range(event_data["count"]):
- event_data["data"]["event_id"] = f"a{i}" * 16
- self.store_event(event_data["data"], project_id=event_data["project"].id)
- # Query for count and count()
- data = {
- "start": self.day_ago.isoformat(),
- "end": (self.day_ago + timedelta(hours=2)).isoformat(),
- "interval": "1h",
- "yAxis": "count()",
- "orderby": ["-count()"],
- "field": ["count()", "count"],
- "topEvents": "5",
- "partial": "1",
- }
- with self.feature(self.enabled_features):
- response = self.client.get(self.url, data, format="json")
- assert response.status_code == 200
- assert response.data["9001"]["data"][0][1] == [{"count": 7}]
- data["query"] = "count:9001"
- with self.feature(self.enabled_features):
- response = self.client.get(self.url, data, format="json")
- assert response.status_code == 200
- assert response.data["9001"]["data"][0][1] == [{"count": 7}]
- data["query"] = "count:abc"
- with self.feature(self.enabled_features):
- response = self.client.get(self.url, data, format="json")
- assert response.status_code == 200
- assert all([interval[1][0]["count"] == 0 for interval in response.data["data"]])
- @pytest.mark.xfail(
- reason="The response.data[Other] returns 15 locally and returns 16 or 15 remotely."
- )
- def test_tag_with_conflicting_function_alias_with_other_single_grouping(self):
- event_data: list[_EventDataDict] = [
- {
- "data": {
- "message": "poof",
- "timestamp": self.day_ago + timedelta(minutes=2),
- "user": {"email": self.user.email},
- "tags": {"count": "9001"},
- "fingerprint": ["group1"],
- },
- "project": self.project2,
- "count": 7,
- },
- {
- "data": {
- "message": "poof2",
- "timestamp": self.day_ago + timedelta(minutes=2),
- "user": {"email": self.user.email},
- "tags": {"count": "abc"},
- "fingerprint": ["group1"],
- },
- "project": self.project2,
- "count": 3,
- },
- ]
- for index, event in enumerate(event_data):
- for i in range(event["count"]):
- event["data"]["event_id"] = f"{index}{i}" * 16
- self.store_event(event["data"], project_id=event["project"].id)
- # Query for count and count()
- data = {
- "start": self.day_ago.isoformat(),
- "end": (self.day_ago + timedelta(hours=1)).isoformat(),
- "interval": "1h",
- "yAxis": "count()",
- "orderby": ["-count"],
- "field": ["count()", "count"],
- "topEvents": "2",
- "partial": "1",
- }
- with self.feature(self.enabled_features):
- response = self.client.get(self.url, data, format="json")
- assert response.status_code == 200
- assert response.data["9001"]["data"][0][1] == [{"count": 7}]
- assert response.data["abc"]["data"][0][1] == [{"count": 3}]
- assert response.data["Other"]["data"][0][1] == [{"count": 16}]
- def test_tag_with_conflicting_function_alias_with_other_multiple_groupings(self):
- event_data: list[_EventDataDict] = [
- {
- "data": {
- "message": "abc",
- "timestamp": iso_format(self.day_ago + timedelta(minutes=2)),
- "user": {"email": self.user.email},
- "tags": {"count": "2"},
- "fingerprint": ["group1"],
- },
- "project": self.project2,
- "count": 3,
- },
- {
- "data": {
- "message": "def",
- "timestamp": iso_format(self.day_ago + timedelta(minutes=2)),
- "user": {"email": self.user.email},
- "tags": {"count": "9001"},
- "fingerprint": ["group1"],
- },
- "project": self.project2,
- "count": 7,
- },
- ]
- for index, event in enumerate(event_data):
- for i in range(event["count"]):
- event["data"]["event_id"] = f"{index}{i}" * 16
- self.store_event(event["data"], project_id=event["project"].id)
- # Query for count and count()
- data = {
- "start": self.day_ago.isoformat(),
- "end": (self.day_ago + timedelta(hours=2)).isoformat(),
- "interval": "2d",
- "yAxis": "count()",
- "orderby": ["-count"],
- "field": ["count()", "count", "message"],
- "topEvents": "2",
- "partial": "1",
- }
- with self.feature(self.enabled_features):
- response = self.client.get(self.url, data, format="json")
- assert response.status_code == 200
- assert response.data["abc,2"]["data"][0][1] == [{"count": 3}]
- assert response.data["def,9001"]["data"][0][1] == [{"count": 7}]
- assert response.data["Other"]["data"][0][1] == [{"count": 25}]
- def test_group_id_tag_simple(self):
- event_data: _EventDataDict = {
- "data": {
- "message": "poof",
- "timestamp": iso_format(self.day_ago + timedelta(minutes=2)),
- "user": {"email": self.user.email},
- "tags": {"group_id": "the tag"},
- "fingerprint": ["group1"],
- },
- "project": self.project2,
- "count": 7,
- }
- for i in range(event_data["count"]):
- event_data["data"]["event_id"] = f"a{i}" * 16
- self.store_event(event_data["data"], project_id=event_data["project"].id)
- data = {
- "start": self.day_ago.isoformat(),
- "end": (self.day_ago + timedelta(hours=2)).isoformat(),
- "interval": "1h",
- "yAxis": "count()",
- "orderby": ["-count()"],
- "field": ["count()", "group_id"],
- "topEvents": "5",
- "partial": "1",
- }
- with self.feature(self.enabled_features):
- response = self.client.get(self.url, data, format="json")
- assert response.status_code == 200, response.content
- assert response.data["the tag"]["data"][0][1] == [{"count": 7}]
- data["query"] = 'group_id:"the tag"'
- with self.feature(self.enabled_features):
- response = self.client.get(self.url, data, format="json")
- assert response.status_code == 200
- assert response.data["the tag"]["data"][0][1] == [{"count": 7}]
- data["query"] = "group_id:abc"
- with self.feature(self.enabled_features):
- response = self.client.get(self.url, data, format="json")
- assert response.status_code == 200
- assert all([interval[1][0]["count"] == 0 for interval in response.data["data"]])
- def test_top_events_limits(self):
- data = {
- "start": self.day_ago.isoformat(),
- "end": (self.day_ago + timedelta(hours=2)).isoformat(),
- "interval": "1h",
- "yAxis": "count()",
- "orderby": ["-count()"],
- "field": ["count()", "message", "user.email"],
- }
- with self.feature(self.enabled_features):
- data["topEvents"] = str(MAX_TOP_EVENTS + 1)
- response = self.client.get(self.url, data, format="json")
- assert response.status_code == 400
- data["topEvents"] = "0"
- response = self.client.get(self.url, data, format="json")
- assert response.status_code == 400
- data["topEvents"] = "a"
- response = self.client.get(self.url, data, format="json")
- assert response.status_code == 400
- @pytest.mark.xfail(
- reason="The response is wrong whenever we have a top events timeseries on project + any other field + aggregation"
- )
- def test_top_events_with_projects(self):
- with self.feature(self.enabled_features):
- response = self.client.get(
- self.url,
- data={
- "start": self.day_ago.isoformat(),
- "end": (self.day_ago + timedelta(hours=2)).isoformat(),
- "interval": "1h",
- "yAxis": "count()",
- "orderby": ["-count()"],
- "field": ["count()", "message", "project"],
- "topEvents": "5",
- },
- format="json",
- )
- data = response.data
- assert response.status_code == 200, response.content
- assert len(data) == 6
- for index, event in enumerate(self.events[:5]):
- message = event.message or event.transaction
- results = data[",".join([message, event.project.slug])]
- assert results["order"] == index
- assert [{"count": self.event_data[index]["count"]}] in [
- attrs for time, attrs in results["data"]
- ]
- other = data["Other"]
- assert other["order"] == 5
- assert [{"count": 3}] in [attrs for _, attrs in other["data"]]
- def test_top_events_with_issue(self):
- # delete a group to make sure if this happens the value becomes unknown
- event_group = self.events[0].group
- event_group.delete()
- with self.feature(self.enabled_features):
- response = self.client.get(
- self.url,
- data={
- "start": self.day_ago.isoformat(),
- "end": (self.day_ago + timedelta(hours=2)).isoformat(),
- "interval": "1h",
- "yAxis": "count()",
- "orderby": ["-count()"],
- "field": ["count()", "message", "issue"],
- "topEvents": "5",
- "query": "!event.type:transaction",
- },
- format="json",
- )
- data = response.data
- assert response.status_code == 200, response.content
- assert len(data) == 6
- for index, event in enumerate(self.events[:4]):
- message = event.message
- # Because we deleted the group for event 0
- if index == 0 or event.group is None:
- issue = "unknown"
- else:
- issue = event.group.qualified_short_id
- results = data[",".join([issue, message])]
- assert results["order"] == index
- assert [{"count": self.event_data[index]["count"]}] in [
- attrs for time, attrs in results["data"]
- ]
- other = data["Other"]
- assert other["order"] == 5
- assert [{"count": 1}] in [attrs for _, attrs in other["data"]]
- def test_top_events_with_transaction_status(self):
- with self.feature(self.enabled_features):
- response = self.client.get(
- self.url,
- data={
- "start": self.day_ago.isoformat(),
- "end": (self.day_ago + timedelta(hours=2)).isoformat(),
- "interval": "1h",
- "yAxis": "count()",
- "orderby": ["-count()"],
- "field": ["count()", "transaction.status"],
- "topEvents": "5",
- },
- format="json",
- )
- data = response.data
- assert response.status_code == 200, response.content
- assert len(data) == 1
- assert "ok" in data
- @mock.patch("sentry.models.GroupManager.get_issues_mapping")
- def test_top_events_with_unknown_issue(self, mock_issues_mapping):
- event = self.events[0]
- event_data = self.event_data[0]
- # ensure that the issue mapping returns None for the issue
- mock_issues_mapping.return_value = {event.group.id: None}
- with self.feature(self.enabled_features):
- response = self.client.get(
- self.url,
- data={
- "start": self.day_ago.isoformat(),
- "end": (self.day_ago + timedelta(hours=2)).isoformat(),
- "interval": "1h",
- "yAxis": "count()",
- "orderby": ["-count()"],
- "field": ["count()", "issue"],
- "topEvents": "5",
- # narrow the search to just one issue
- "query": f"issue.id:{event.group.id}",
- },
- format="json",
- )
- assert response.status_code == 200, response.content
- data = response.data
- assert len(data) == 1
- results = data["unknown"]
- assert results["order"] == 0
- assert [{"count": event_data["count"]}] in [attrs for time, attrs in results["data"]]
- @mock.patch(
- "sentry.search.events.builder.base.raw_snql_query",
- side_effect=[{"data": [{"issue.id": 1}], "meta": []}, {"data": [], "meta": []}],
- )
- def test_top_events_with_issue_check_query_conditions(self, mock_query):
- """ "Intentionally separate from test_top_events_with_issue
- This is to test against a bug where the condition for issues wasn't included and we'd be missing data for
- the interval since we'd cap out the max rows. This was not caught by the previous test since the results
- would still be correct given the smaller interval & lack of data
- """
- with self.feature(self.enabled_features):
- self.client.get(
- self.url,
- data={
- "start": self.day_ago.isoformat(),
- "end": (self.day_ago + timedelta(hours=2)).isoformat(),
- "interval": "1h",
- "yAxis": "count()",
- "orderby": ["-count()"],
- "field": ["count()", "message", "issue"],
- "topEvents": "5",
- "query": "!event.type:transaction",
- },
- format="json",
- )
- assert (
- Condition(Function("coalesce", [Column("group_id"), 0], "issue.id"), Op.IN, [1])
- in mock_query.mock_calls[1].args[0].query.where
- )
- def test_top_events_with_functions(self):
- for dataset in ["transactions", "discover"]:
- with self.feature(self.enabled_features):
- response = self.client.get(
- self.url,
- data={
- "start": self.day_ago.isoformat(),
- "end": (self.day_ago + timedelta(hours=2)).isoformat(),
- "interval": "1h",
- "yAxis": "count()",
- "orderby": ["-p99()"],
- "field": ["transaction", "avg(transaction.duration)", "p99()"],
- "topEvents": "5",
- "dataset": dataset,
- },
- format="json",
- )
- data = response.data
- assert response.status_code == 200, response.content
- assert len(data) == 1
- results = data[self.transaction.transaction]
- assert results["order"] == 0
- assert [attrs for time, attrs in results["data"]] == [[{"count": 3}], [{"count": 0}]]
- def test_top_events_with_functions_on_different_transactions(self):
- """Transaction2 has less events, but takes longer so order should be self.transaction then transaction2"""
- transaction_data = load_data("transaction")
- transaction_data["start_timestamp"] = iso_format(self.day_ago + timedelta(minutes=2))
- transaction_data["timestamp"] = iso_format(self.day_ago + timedelta(minutes=6))
- transaction_data["transaction"] = "/foo_bar/"
- transaction2 = self.store_event(transaction_data, project_id=self.project.id)
- with self.feature(self.enabled_features):
- response = self.client.get(
- self.url,
- data={
- "start": self.day_ago.isoformat(),
- "end": (self.day_ago + timedelta(hours=2)).isoformat(),
- "interval": "1h",
- "yAxis": "count()",
- "orderby": ["-p90()"],
- "field": ["transaction", "avg(transaction.duration)", "p90()"],
- "topEvents": "5",
- },
- format="json",
- )
- data = response.data
- assert response.status_code == 200, response.content
- assert len(data) == 2
- results = data[self.transaction.transaction]
- assert results["order"] == 1
- assert [attrs for time, attrs in results["data"]] == [[{"count": 3}], [{"count": 0}]]
- results = data[transaction2.transaction]
- assert results["order"] == 0
- assert [attrs for time, attrs in results["data"]] == [[{"count": 1}], [{"count": 0}]]
- def test_top_events_with_query(self):
- transaction_data = load_data("transaction")
- transaction_data["start_timestamp"] = iso_format(self.day_ago + timedelta(minutes=2))
- transaction_data["timestamp"] = iso_format(self.day_ago + timedelta(minutes=6))
- transaction_data["transaction"] = "/foo_bar/"
- self.store_event(transaction_data, project_id=self.project.id)
- with self.feature(self.enabled_features):
- response = self.client.get(
- self.url,
- data={
- "start": self.day_ago.isoformat(),
- "end": (self.day_ago + timedelta(hours=2)).isoformat(),
- "interval": "1h",
- "yAxis": "count()",
- "orderby": ["-p99()"],
- "query": "transaction:/foo_bar/",
- "field": ["transaction", "avg(transaction.duration)", "p99()"],
- "topEvents": "5",
- },
- format="json",
- )
- data = response.data
- assert response.status_code == 200, response.content
- assert len(data) == 1
- transaction2_data = data["/foo_bar/"]
- assert transaction2_data["order"] == 0
- assert [attrs for time, attrs in transaction2_data["data"]] == [
- [{"count": 1}],
- [{"count": 0}],
- ]
- def test_top_events_with_negated_condition(self):
- with self.feature(self.enabled_features):
- response = self.client.get(
- self.url,
- data={
- "start": self.day_ago.isoformat(),
- "end": (self.day_ago + timedelta(hours=2)).isoformat(),
- "interval": "1h",
- "yAxis": "count()",
- "orderby": ["-count()"],
- "query": f"!message:{self.events[0].message}",
- "field": ["message", "count()"],
- "topEvents": "5",
- },
- format="json",
- )
- data = response.data
- assert response.status_code == 200, response.content
- assert len(data) == 6
- for index, event in enumerate(self.events[1:5]):
- message = event.message or event.transaction
- results = data[message]
- assert results["order"] == index
- assert [{"count": self.event_data[index + 1]["count"]}] in [
- attrs for _, attrs in results["data"]
- ]
- other = data["Other"]
- assert other["order"] == 5
- assert [{"count": 1}] in [attrs for _, attrs in other["data"]]
- def test_top_events_with_epm(self):
- with self.feature(self.enabled_features):
- response = self.client.get(
- self.url,
- data={
- "start": self.day_ago.isoformat(),
- "end": (self.day_ago + timedelta(hours=2)).isoformat(),
- "interval": "1h",
- "yAxis": "epm()",
- "orderby": ["-count()"],
- "field": ["message", "user.email", "count()"],
- "topEvents": "5",
- },
- format="json",
- )
- data = response.data
- assert response.status_code == 200, response.content
- assert len(data) == 6
- for index, event in enumerate(self.events[:5]):
- message = event.message or event.transaction
- results = data[
- ",".join([message, self.event_data[index]["data"]["user"].get("email", "None")])
- ]
- assert results["order"] == index
- assert [{"count": self.event_data[index]["count"] / (3600.0 / 60.0)}] in [
- attrs for time, attrs in results["data"]
- ]
- other = data["Other"]
- assert other["order"] == 5
- assert [{"count": 0.05}] in [attrs for _, attrs in other["data"]]
- def test_top_events_with_multiple_yaxis(self):
- with self.feature(self.enabled_features):
- response = self.client.get(
- self.url,
- data={
- "start": self.day_ago.isoformat(),
- "end": (self.day_ago + timedelta(hours=2)).isoformat(),
- "interval": "1h",
- "yAxis": ["epm()", "count()"],
- "orderby": ["-count()"],
- "field": ["message", "user.email", "count()"],
- "topEvents": "5",
- },
- format="json",
- )
- data = response.data
- assert response.status_code == 200, response.content
- assert len(data) == 6
- for index, event in enumerate(self.events[:5]):
- message = event.message or event.transaction
- results = data[
- ",".join([message, self.event_data[index]["data"]["user"].get("email", "None")])
- ]
- assert results["order"] == index
- assert results["epm()"]["order"] == 0
- assert results["count()"]["order"] == 1
- assert [{"count": self.event_data[index]["count"] / (3600.0 / 60.0)}] in [
- attrs for time, attrs in results["epm()"]["data"]
- ]
- assert [{"count": self.event_data[index]["count"]}] in [
- attrs for time, attrs in results["count()"]["data"]
- ]
- other = data["Other"]
- assert other["order"] == 5
- assert other["epm()"]["order"] == 0
- assert other["count()"]["order"] == 1
- assert [{"count": 0.05}] in [attrs for _, attrs in other["epm()"]["data"]]
- assert [{"count": 3}] in [attrs for _, attrs in other["count()"]["data"]]
- def test_top_events_with_boolean(self):
- with self.feature(self.enabled_features):
- response = self.client.get(
- self.url,
- data={
- "start": self.day_ago.isoformat(),
- "end": (self.day_ago + timedelta(hours=2)).isoformat(),
- "interval": "1h",
- "yAxis": "count()",
- "orderby": ["-count()"],
- "field": ["count()", "message", "device.charging"],
- "topEvents": "5",
- },
- format="json",
- )
- data = response.data
- assert response.status_code == 200, response.content
- assert len(data) == 6
- for index, event in enumerate(self.events[:5]):
- message = event.message or event.transaction
- results = data[",".join(["False", message])]
- assert results["order"] == index
- assert [{"count": self.event_data[index]["count"]}] in [
- attrs for time, attrs in results["data"]
- ]
- other = data["Other"]
- assert other["order"] == 5
- assert [{"count": 3}] in [attrs for _, attrs in other["data"]]
- def test_top_events_with_error_unhandled(self):
- self.login_as(user=self.user)
- project = self.create_project()
- prototype = load_data("android-ndk")
- prototype["event_id"] = "f" * 32
- prototype["logentry"] = {"formatted": "not handled"}
- prototype["exception"]["values"][0]["value"] = "not handled"
- prototype["exception"]["values"][0]["mechanism"]["handled"] = False
- prototype["timestamp"] = iso_format(self.day_ago + timedelta(minutes=2))
- self.store_event(data=prototype, project_id=project.id)
- with self.feature(self.enabled_features):
- response = self.client.get(
- self.url,
- data={
- "start": self.day_ago.isoformat(),
- "end": (self.day_ago + timedelta(hours=2)).isoformat(),
- "interval": "1h",
- "yAxis": "count()",
- "orderby": ["-count()"],
- "field": ["count()", "error.unhandled"],
- "topEvents": "5",
- },
- format="json",
- )
- data = response.data
- assert response.status_code == 200, response.content
- assert len(data) == 2
- def test_top_events_with_timestamp(self):
- with self.feature(self.enabled_features):
- response = self.client.get(
- self.url,
- data={
- "start": self.day_ago.isoformat(),
- "end": (self.day_ago + timedelta(hours=2)).isoformat(),
- "interval": "1h",
- "yAxis": "count()",
- "orderby": ["-count()"],
- "query": "event.type:default",
- "field": ["count()", "message", "timestamp"],
- "topEvents": "5",
- },
- format="json",
- )
- data = response.data
- assert response.status_code == 200, response.content
- assert len(data) == 6
- # Transactions won't be in the results because of the query
- del self.events[4]
- del self.event_data[4]
- for index, event in enumerate(self.events[:5]):
- results = data[",".join([event.message, event.timestamp])]
- assert results["order"] == index
- assert [{"count": self.event_data[index]["count"]}] in [
- attrs for time, attrs in results["data"]
- ]
- other = data["Other"]
- assert other["order"] == 5
- assert [{"count": 1}] in [attrs for _, attrs in other["data"]]
- def test_top_events_with_int(self):
- with self.feature(self.enabled_features):
- response = self.client.get(
- self.url,
- data={
- "start": self.day_ago.isoformat(),
- "end": (self.day_ago + timedelta(hours=2)).isoformat(),
- "interval": "1h",
- "yAxis": "count()",
- "orderby": ["-count()"],
- "field": ["count()", "message", "transaction.duration"],
- "topEvents": "5",
- },
- format="json",
- )
- data = response.data
- assert response.status_code == 200, response.content
- assert len(data) == 1
- results = data[",".join([self.transaction.transaction, "120000"])]
- assert results["order"] == 0
- assert [attrs for time, attrs in results["data"]] == [[{"count": 3}], [{"count": 0}]]
- def test_top_events_with_user(self):
- with self.feature(self.enabled_features):
- response = self.client.get(
- self.url,
- data={
- "start": self.day_ago.isoformat(),
- "end": (self.day_ago + timedelta(hours=2)).isoformat(),
- "interval": "1h",
- "yAxis": "count()",
- "orderby": ["-count()", "user"],
- "field": ["user", "count()"],
- "topEvents": "5",
- },
- format="json",
- )
- data = response.data
- assert response.status_code == 200, response.content
- assert len(data) == 5
- assert data["email:bar@example.com"]["order"] == 1
- assert [attrs for time, attrs in data["email:bar@example.com"]["data"]] == [
- [{"count": 7}],
- [{"count": 0}],
- ]
- assert [attrs for time, attrs in data["ip:127.0.0.1"]["data"]] == [
- [{"count": 3}],
- [{"count": 0}],
- ]
- def test_top_events_with_user_and_email(self):
- with self.feature(self.enabled_features):
- response = self.client.get(
- self.url,
- data={
- "start": self.day_ago.isoformat(),
- "end": (self.day_ago + timedelta(hours=2)).isoformat(),
- "interval": "1h",
- "yAxis": "count()",
- "orderby": ["-count()", "user"],
- "field": ["user", "user.email", "count()"],
- "topEvents": "5",
- },
- format="json",
- )
- data = response.data
- assert response.status_code == 200, response.content
- assert len(data) == 5
- assert data["email:bar@example.com,bar@example.com"]["order"] == 1
- assert [attrs for time, attrs in data["email:bar@example.com,bar@example.com"]["data"]] == [
- [{"count": 7}],
- [{"count": 0}],
- ]
- assert [attrs for time, attrs in data["ip:127.0.0.1,None"]["data"]] == [
- [{"count": 3}],
- [{"count": 0}],
- ]
- def test_top_events_with_user_display(self):
- with self.feature(self.enabled_features):
- response = self.client.get(
- self.url,
- data={
- "start": self.day_ago.isoformat(),
- "end": (self.day_ago + timedelta(hours=2)).isoformat(),
- "interval": "1h",
- "yAxis": "count()",
- "orderby": ["-count()"],
- "field": ["message", "user.display", "count()"],
- "topEvents": "5",
- },
- format="json",
- )
- data = response.data
- assert response.status_code == 200, response.content
- assert len(data) == 6
- for index, event in enumerate(self.events[:5]):
- message = event.message or event.transaction
- user = self.event_data[index]["data"]["user"]
- results = data[
- ",".join([message, user.get("email", None) or user.get("ip_address", "None")])
- ]
- assert results["order"] == index
- assert [{"count": self.event_data[index]["count"]}] in [
- attrs for _, attrs in results["data"]
- ]
- other = data["Other"]
- assert other["order"] == 5
- assert [{"count": 3}] in [attrs for _, attrs in other["data"]]
- @pytest.mark.skip(reason="A query with group_id will not return transactions")
- def test_top_events_none_filter(self):
- """When a field is None in one of the top events, make sure we filter by it
- In this case event[4] is a transaction and has no issue
- """
- with self.feature(self.enabled_features):
- response = self.client.get(
- self.url,
- data={
- "start": self.day_ago.isoformat(),
- "end": (self.day_ago + timedelta(hours=2)).isoformat(),
- "interval": "1h",
- "yAxis": "count()",
- "orderby": ["-count()"],
- "field": ["count()", "issue"],
- "topEvents": "5",
- },
- format="json",
- )
- data = response.data
- assert response.status_code == 200, response.content
- assert len(data) == 5
- for index, event in enumerate(self.events[:5]):
- if event.group is None:
- issue = "unknown"
- else:
- issue = event.group.qualified_short_id
- results = data[issue]
- assert results["order"] == index
- assert [{"count": self.event_data[index]["count"]}] in [
- attrs for time, attrs in results["data"]
- ]
- @pytest.mark.skip(reason="Invalid query - transaction events don't have group_id field")
- def test_top_events_one_field_with_none(self):
- with self.feature(self.enabled_features):
- response = self.client.get(
- self.url,
- data={
- "start": self.day_ago.isoformat(),
- "end": (self.day_ago + timedelta(hours=2)).isoformat(),
- "interval": "1h",
- "yAxis": "count()",
- "orderby": ["-count()"],
- "query": "event.type:transaction",
- "field": ["count()", "issue"],
- "topEvents": "5",
- },
- format="json",
- )
- data = response.data
- assert response.status_code == 200, response.content
- assert len(data) == 1
- results = data["unknown"]
- assert [attrs for time, attrs in results["data"]] == [[{"count": 3}], [{"count": 0}]]
- assert results["order"] == 0
- def test_top_events_with_error_handled(self):
- data = self.event_data[0]
- data["data"]["level"] = "error"
- data["data"]["exception"] = {
- "values": [
- {
- "type": "ValidationError",
- "value": "Bad request",
- "mechanism": {"handled": True, "type": "generic"},
- }
- ]
- }
- self.store_event(data["data"], project_id=data["project"].id)
- data["data"]["exception"] = {
- "values": [
- {
- "type": "ValidationError",
- "value": "Bad request",
- "mechanism": {"handled": False, "type": "generic"},
- }
- ]
- }
- self.store_event(data["data"], project_id=data["project"].id)
- with self.feature(self.enabled_features):
- response = self.client.get(
- self.url,
- data={
- "start": self.day_ago.isoformat(),
- "end": (self.day_ago + timedelta(hours=2)).isoformat(),
- "interval": "1h",
- "yAxis": "count()",
- "orderby": ["-count()"],
- "field": ["count()", "error.handled"],
- "topEvents": "5",
- "query": "!event.type:transaction",
- },
- format="json",
- )
- assert response.status_code == 200, response.content
- res_data = response.data
- assert len(res_data) == 2
- results = res_data["1"]
- assert [attrs for time, attrs in results["data"]] == [[{"count": 20}], [{"count": 6}]]
- results = res_data["0"]
- assert [attrs for time, attrs in results["data"]] == [[{"count": 1}], [{"count": 0}]]
- def test_top_events_with_aggregate_condition(self):
- with self.feature(self.enabled_features):
- response = self.client.get(
- self.url,
- data={
- "start": self.day_ago.isoformat(),
- "end": (self.day_ago + timedelta(hours=2)).isoformat(),
- "interval": "1h",
- "yAxis": "count()",
- "orderby": ["-count()"],
- "field": ["message", "count()"],
- "query": "count():>4",
- "topEvents": "5",
- },
- format="json",
- )
- assert response.status_code == 200, response.content
- data = response.data
- assert len(data) == 3
- for index, event in enumerate(self.events[:3]):
- message = event.message or event.transaction
- results = data[message]
- assert results["order"] == index
- assert [{"count": self.event_data[index]["count"]}] in [
- attrs for time, attrs in results["data"]
- ]
- @pytest.mark.xfail(reason="There's only 2 rows total, which mean there shouldn't be other")
- def test_top_events_with_to_other(self):
- version = "version -@'\" 1.2,3+(4)"
- version_escaped = "version -@'\\\" 1.2,3+(4)"
- # every symbol is replaced with a underscore to make the alias
- version_alias = "version_______1_2_3__4_"
- # add an event in the current release
- event = self.event_data[0]
- event_data = event["data"].copy()
- event_data["event_id"] = uuid4().hex
- event_data["release"] = version
- self.store_event(event_data, project_id=event["project"].id)
- with self.feature(self.enabled_features):
- response = self.client.get(
- self.url,
- data={
- "start": self.day_ago.isoformat(),
- "end": (self.day_ago + timedelta(hours=2)).isoformat(),
- "interval": "1h",
- "yAxis": "count()",
- # the double underscores around the version alias is because of a comma and quote
- "orderby": [f"-to_other_release__{version_alias}__others_current"],
- "field": [
- "count()",
- f'to_other(release,"{version_escaped}",others,current)',
- ],
- "topEvents": "2",
- },
- format="json",
- )
- assert response.status_code == 200, response.content
- data = response.data
- assert len(data) == 2
- current = data["current"]
- assert current["order"] == 1
- assert sum(attrs[0]["count"] for _, attrs in current["data"]) == 1
- others = data["others"]
- assert others["order"] == 0
- assert sum(attrs[0]["count"] for _, attrs in others["data"]) == sum(
- event_data["count"] for event_data in self.event_data
- )
- def test_top_events_with_equations(self):
- with self.feature(self.enabled_features):
- response = self.client.get(
- self.url,
- data={
- "start": self.day_ago.isoformat(),
- "end": (self.day_ago + timedelta(hours=2)).isoformat(),
- "interval": "1h",
- "yAxis": "equation|count() / 100",
- "orderby": ["-count()"],
- "field": ["count()", "message", "user.email", "equation|count() / 100"],
- "topEvents": "5",
- },
- format="json",
- )
- data = response.data
- assert response.status_code == 200, response.content
- assert len(data) == 6
- for index, event in enumerate(self.events[:5]):
- message = event.message or event.transaction
- results = data[
- ",".join([message, self.event_data[index]["data"]["user"].get("email", "None")])
- ]
- assert results["order"] == index
- assert [{"count": self.event_data[index]["count"] / 100}] in [
- attrs for time, attrs in results["data"]
- ]
- other = data["Other"]
- assert other["order"] == 5
- assert [{"count": 0.03}] in [attrs for _, attrs in other["data"]]
- @mock.patch("sentry.snuba.discover.bulk_snuba_queries", return_value=[{"data": [], "meta": []}])
- @mock.patch(
- "sentry.search.events.builder.base.raw_snql_query",
- return_value={"data": [], "meta": []},
- )
- def test_invalid_interval(self, mock_raw_query, mock_bulk_query):
- with self.feature(self.enabled_features):
- response = self.client.get(
- self.url,
- format="json",
- data={
- "end": before_now().isoformat(),
- # 7,200 points for each event
- "start": before_now(seconds=7200).isoformat(),
- "field": ["count()", "issue"],
- "query": "",
- "interval": "1s",
- "yAxis": "count()",
- },
- )
- assert response.status_code == 200
- assert mock_bulk_query.call_count == 1
- with self.feature(self.enabled_features):
- response = self.client.get(
- self.url,
- format="json",
- data={
- "end": before_now().isoformat(),
- "start": before_now(seconds=7200).isoformat(),
- "field": ["count()", "issue"],
- "query": "",
- "interval": "1s",
- "yAxis": "count()",
- # 7,200 points for each event * 2, should error
- "topEvents": "2",
- },
- )
- assert response.status_code == 200
- assert mock_raw_query.call_count == 2
- # Should've reset to the default for between 1 and 24h
- assert mock_raw_query.mock_calls[1].args[0].query.granularity.granularity == 300
- with self.feature(self.enabled_features):
- response = self.client.get(
- self.url,
- format="json",
- data={
- "end": before_now().isoformat(),
- # 1999 points * 5 events should just be enough to not error
- "start": before_now(seconds=1999).isoformat(),
- "field": ["count()", "issue"],
- "query": "",
- "interval": "1s",
- "yAxis": "count()",
- "topEvents": "5",
- },
- )
- assert response.status_code == 200
- assert mock_raw_query.call_count == 4
- # Should've left the interval alone since we're just below the limit
- assert mock_raw_query.mock_calls[3].args[0].query.granularity.granularity == 1
- with self.feature(self.enabled_features):
- response = self.client.get(
- self.url,
- format="json",
- data={
- "end": before_now().isoformat(),
- "start": before_now(hours=24).isoformat(),
- "field": ["count()", "issue"],
- "query": "",
- "interval": "0d",
- "yAxis": "count()",
- "topEvents": "5",
- },
- )
- assert response.status_code == 200
- assert mock_raw_query.call_count == 6
- # Should've default to 24h's default of 5m
- assert mock_raw_query.mock_calls[5].args[0].query.granularity.granularity == 300
- def test_top_events_timestamp_fields(self):
- with self.feature(self.enabled_features):
- response = self.client.get(
- self.url,
- format="json",
- data={
- "start": self.day_ago.isoformat(),
- "end": (self.day_ago + timedelta(hours=2)).isoformat(),
- "interval": "1h",
- "yAxis": "count()",
- "orderby": ["-count()"],
- "field": ["count()", "timestamp", "timestamp.to_hour", "timestamp.to_day"],
- "topEvents": "5",
- },
- )
- assert response.status_code == 200
- data = response.data
- assert len(data) == 3
- # these are the timestamps corresponding to the events stored
- timestamps = [
- self.day_ago + timedelta(minutes=2),
- self.day_ago + timedelta(hours=1, minutes=2),
- self.day_ago + timedelta(minutes=4),
- ]
- timestamp_hours = [timestamp.replace(minute=0, second=0) for timestamp in timestamps]
- timestamp_days = [timestamp.replace(hour=0, minute=0, second=0) for timestamp in timestamps]
- for ts, ts_hr, ts_day in zip(timestamps, timestamp_hours, timestamp_days):
- key = f"{iso_format(ts)}+00:00,{iso_format(ts_day)}+00:00,{iso_format(ts_hr)}+00:00"
- count = sum(e["count"] for e in self.event_data if e["data"]["timestamp"] == ts)
- results = data[key]
- assert [{"count": count}] in [attrs for time, attrs in results["data"]]
- def test_top_events_other_with_matching_columns(self):
- with self.feature(self.enabled_features):
- response = self.client.get(
- self.url,
- data={
- "start": self.day_ago.isoformat(),
- "end": (self.day_ago + timedelta(hours=2)).isoformat(),
- "interval": "1h",
- "yAxis": "count()",
- "orderby": ["-count()"],
- "field": ["count()", "tags[shared-tag]", "message"],
- "topEvents": "5",
- },
- format="json",
- )
- data = response.data
- assert response.status_code == 200, response.content
- assert len(data) == 6
- for index, event in enumerate(self.events[:5]):
- message = event.message or event.transaction
- results = data[",".join([message, "yup"])]
- assert results["order"] == index
- assert [{"count": self.event_data[index]["count"]}] in [
- attrs for _, attrs in results["data"]
- ]
- other = data["Other"]
- assert other["order"] == 5
- assert [{"count": 3}] in [attrs for _, attrs in other["data"]]
- def test_top_events_with_field_overlapping_other_key(self):
- transaction_data = load_data("transaction")
- transaction_data["start_timestamp"] = iso_format(self.day_ago + timedelta(minutes=2))
- transaction_data["timestamp"] = iso_format(self.day_ago + timedelta(minutes=6))
- transaction_data["transaction"] = OTHER_KEY
- for i in range(5):
- data = transaction_data.copy()
- data["event_id"] = "ab" + f"{i}" * 30
- data["contexts"]["trace"]["span_id"] = "ab" + f"{i}" * 14
- self.store_event(data, project_id=self.project.id)
- with self.feature(self.enabled_features):
- response = self.client.get(
- self.url,
- data={
- "start": self.day_ago.isoformat(),
- "end": (self.day_ago + timedelta(hours=2)).isoformat(),
- "interval": "1h",
- "yAxis": "count()",
- "orderby": ["-count()"],
- "field": ["count()", "message"],
- "topEvents": "5",
- },
- format="json",
- )
- data = response.data
- assert response.status_code == 200, response.content
- assert len(data) == 6
- assert f"{OTHER_KEY} (message)" in data
- results = data[f"{OTHER_KEY} (message)"]
- assert [{"count": 5}] in [attrs for _, attrs in results["data"]]
- other = data["Other"]
- assert other["order"] == 5
- assert [{"count": 4}] in [attrs for _, attrs in other["data"]]
- def test_top_events_can_exclude_other_series(self):
- with self.feature(self.enabled_features):
- response = self.client.get(
- self.url,
- data={
- "start": self.day_ago.isoformat(),
- "end": (self.day_ago + timedelta(hours=2)).isoformat(),
- "interval": "1h",
- "yAxis": "count()",
- "orderby": ["count()"],
- "field": ["count()", "message"],
- "topEvents": "5",
- "excludeOther": "1",
- },
- format="json",
- )
- data = response.data
- assert response.status_code == 200, response.content
- assert len(data) == 5
- assert "Other" not in response.data
- @pytest.mark.xfail(reason="Started failing on ClickHouse 21.8")
- def test_top_events_with_equation_including_unselected_fields_passes_field_validation(self):
- with self.feature(self.enabled_features):
- response = self.client.get(
- self.url,
- data={
- "start": self.day_ago.isoformat(),
- "end": (self.day_ago + timedelta(hours=2)).isoformat(),
- "interval": "1h",
- "yAxis": "count()",
- "orderby": ["-equation[0]"],
- "field": ["count()", "message", "equation|count_unique(user) * 2"],
- "topEvents": "5",
- },
- format="json",
- )
- data = response.data
- assert response.status_code == 200, response.content
- assert len(data) == 6
- other = data["Other"]
- assert other["order"] == 5
- assert [{"count": 4}] in [attrs for _, attrs in other["data"]]
- def test_top_events_boolean_condition_and_project_field(self):
- with self.feature(self.enabled_features):
- response = self.client.get(
- self.url,
- data={
- "start": self.day_ago.isoformat(),
- "end": (self.day_ago + timedelta(hours=2)).isoformat(),
- "interval": "1h",
- "yAxis": "count()",
- "orderby": ["-count()"],
- "field": ["project", "count()"],
- "topEvents": "5",
- "query": "event.type:transaction (transaction:*a OR transaction:b*)",
- },
- format="json",
- )
- assert response.status_code == 200
- class OrganizationEventsStatsProfileFunctionDatasetEndpointTest(
- APITestCase, ProfilesSnubaTestCase, SearchIssueTestMixin
- ):
- endpoint = "sentry-api-0-organization-events-stats"
- def setUp(self):
- super().setUp()
- self.login_as(user=self.user)
- self.one_day_ago = before_now(days=1).replace(hour=10, minute=0, second=0, microsecond=0)
- self.two_days_ago = before_now(days=2).replace(hour=10, minute=0, second=0, microsecond=0)
- self.three_days_ago = before_now(days=3).replace(hour=10, minute=0, second=0, microsecond=0)
- self.project = self.create_project()
- self.url = reverse(
- "sentry-api-0-organization-events-stats",
- kwargs={"organization_id_or_slug": self.project.organization.slug},
- )
- def test_functions_dataset_simple(self):
- transaction_function = self.store_functions(
- [
- {
- "self_times_ns": [100_000_000 for _ in range(100)],
- "package": "foo",
- "function": "bar",
- "in_app": True,
- },
- ],
- project=self.project,
- timestamp=self.two_days_ago - timedelta(hours=12),
- )
- continuous_timestamp = self.two_days_ago + timedelta(hours=12)
- continuous_function = self.store_functions_chunk(
- [
- {
- "self_times_ns": [200_000_000 for _ in range(100)],
- "package": "bar",
- "function": "bar",
- "thread_id": "1",
- "in_app": True,
- },
- ],
- project=self.project,
- timestamp=continuous_timestamp,
- )
- y_axes = [
- "cpm()",
- "p95(function.duration)",
- "all_examples()",
- ]
- data = {
- "dataset": "profileFunctions",
- "start": self.three_days_ago.isoformat(),
- "end": self.one_day_ago.isoformat(),
- "interval": "1d",
- "yAxis": y_axes,
- }
- response = self.client.get(self.url, data=data, format="json")
- assert response.status_code == 200, response.content
- assert sum(row[1][0]["count"] for row in response.data["cpm()"]["data"]) == pytest.approx(
- 200 / ((self.one_day_ago - self.three_days_ago).total_seconds() / 60), rel=1e-3
- )
- assert any(
- row[1][0]["count"] > 0 for row in response.data["p95(function.duration)"]["data"]
- )
- examples = [row[1][0]["count"] for row in response.data["all_examples()"]["data"]]
- assert examples == [
- [
- {
- "profile_id": transaction_function["transaction"]["contexts"]["profile"][
- "profile_id"
- ],
- },
- ],
- [
- {
- "profiler_id": continuous_function["profiler_id"],
- "thread_id": "1",
- "start": continuous_timestamp.timestamp(),
- "end": (continuous_timestamp + timedelta(microseconds=200_000)).timestamp(),
- },
- ],
- ]
- for y_axis in y_axes:
- assert response.data[y_axis]["meta"]["fields"] == {
- "time": "date",
- "cpm": "number",
- "p95_function_duration": "duration",
- "all_examples": "string",
- }
- assert response.data[y_axis]["meta"]["units"] == {
- "time": None,
- "cpm": None,
- "p95_function_duration": "nanosecond",
- "all_examples": None,
- }
- class OrganizationEventsStatsTopNEventsProfileFunctionDatasetEndpointTest(
- APITestCase, ProfilesSnubaTestCase, SearchIssueTestMixin
- ):
- endpoint = "sentry-api-0-organization-events-stats"
- def setUp(self):
- super().setUp()
- self.login_as(user=self.user)
- self.one_day_ago = before_now(days=1).replace(hour=10, minute=0, second=0, microsecond=0)
- self.two_days_ago = before_now(days=2).replace(hour=10, minute=0, second=0, microsecond=0)
- self.three_days_ago = before_now(days=3).replace(hour=10, minute=0, second=0, microsecond=0)
- self.project = self.create_project()
- self.url = reverse(
- "sentry-api-0-organization-events-stats",
- kwargs={"organization_id_or_slug": self.project.organization.slug},
- )
- def test_functions_dataset_simple(self):
- self.store_functions(
- [
- {
- "self_times_ns": [100 for _ in range(100)],
- "package": "pkg",
- "function": "foo",
- "in_app": True,
- },
- {
- "self_times_ns": [100 for _ in range(10)],
- "package": "pkg",
- "function": "bar",
- "in_app": True,
- },
- ],
- project=self.project,
- timestamp=self.two_days_ago,
- )
- data = {
- "dataset": "profileFunctions",
- "field": ["function", "count()"],
- "start": self.three_days_ago.isoformat(),
- "end": self.one_day_ago.isoformat(),
- "yAxis": ["cpm()", "p95(function.duration)"],
- "interval": "1d",
- "topEvents": "2",
- "excludeOther": "1",
- }
- response = self.client.get(self.url, data=data, format="json")
- assert response.status_code == 200, response.content
- assert sum(
- row[1][0]["count"] for row in response.data["foo"]["cpm()"]["data"]
- ) == pytest.approx(
- 100 / ((self.one_day_ago - self.three_days_ago).total_seconds() / 60), rel=1e-3
- )
- assert sum(
- row[1][0]["count"] for row in response.data["bar"]["cpm()"]["data"]
- ) == pytest.approx(
- 10 / ((self.one_day_ago - self.three_days_ago).total_seconds() / 60), rel=1e-3
- )
- assert any(
- row[1][0]["count"] > 0 for row in response.data["foo"]["p95(function.duration)"]["data"]
- )
- assert any(
- row[1][0]["count"] > 0 for row in response.data["bar"]["p95(function.duration)"]["data"]
- )
- class OrganizationEventsStatsTopNEventsErrors(APITestCase, SnubaTestCase):
- def setUp(self):
- super().setUp()
- self.login_as(user=self.user)
- self.day_ago = before_now(days=1).replace(hour=10, minute=0, second=0, microsecond=0)
- self.project = self.create_project()
- self.project2 = self.create_project()
- self.user2 = self.create_user()
- self.event_data: list[_EventDataDict] = [
- {
- "data": {
- "message": "poof",
- "timestamp": iso_format(self.day_ago + timedelta(minutes=2)),
- "user": {"email": self.user.email},
- "tags": {"shared-tag": "yup"},
- "fingerprint": ["group1"],
- },
- "project": self.project2,
- "count": 7,
- },
- {
- "data": {
- "message": "voof",
- "timestamp": iso_format(self.day_ago + timedelta(hours=1, minutes=2)),
- "fingerprint": ["group2"],
- "user": {"email": self.user2.email},
- "tags": {"shared-tag": "yup"},
- },
- "project": self.project2,
- "count": 6,
- },
- {
- "data": {
- "message": "very bad",
- "timestamp": iso_format(self.day_ago + timedelta(minutes=2)),
- "fingerprint": ["group3"],
- "user": {"email": "foo@example.com"},
- "tags": {"shared-tag": "yup"},
- },
- "project": self.project,
- "count": 5,
- },
- {
- "data": {
- "message": "oh no",
- "timestamp": iso_format(self.day_ago + timedelta(minutes=2)),
- "fingerprint": ["group4"],
- "user": {"email": "bar@example.com"},
- "tags": {"shared-tag": "yup"},
- },
- "project": self.project,
- "count": 4,
- },
- {
- "data": {
- "message": "kinda bad",
- "timestamp": iso_format(self.day_ago + timedelta(minutes=2)),
- "user": {"email": self.user.email},
- "tags": {"shared-tag": "yup"},
- "fingerprint": ["group7"],
- },
- "project": self.project,
- "count": 3,
- },
- # Not in the top 5
- {
- "data": {
- "message": "sorta bad",
- "timestamp": iso_format(self.day_ago + timedelta(minutes=2)),
- "fingerprint": ["group5"],
- "user": {"email": "bar@example.com"},
- "tags": {"shared-tag": "yup"},
- },
- "project": self.project,
- "count": 2,
- },
- {
- "data": {
- "message": "not so bad",
- "timestamp": iso_format(self.day_ago + timedelta(minutes=2)),
- "fingerprint": ["group6"],
- "user": {"email": "bar@example.com"},
- "tags": {"shared-tag": "yup"},
- },
- "project": self.project,
- "count": 1,
- },
- ]
- self.events = []
- for index, event_data in enumerate(self.event_data):
- data = event_data["data"].copy()
- for i in range(event_data["count"]):
- data["event_id"] = f"{index}{i}" * 16
- event = self.store_event(data, project_id=event_data["project"].id)
- self.events.append(event)
- self.enabled_features = {
- "organizations:discover-basic": True,
- }
- self.url = reverse(
- "sentry-api-0-organization-events-stats",
- kwargs={"organization_id_or_slug": self.project.organization.slug},
- )
- def test_simple_top_events(self):
- with self.feature(self.enabled_features):
- response = self.client.get(
- self.url,
- data={
- "start": self.day_ago.isoformat(),
- "end": (self.day_ago + timedelta(hours=2)).isoformat(),
- "interval": "1h",
- "yAxis": "count()",
- "orderby": ["-count()"],
- "field": ["count()", "message", "user.email"],
- "dataset": "errors",
- "topEvents": "5",
- },
- format="json",
- )
- data = response.data
- assert response.status_code == 200, response.content
- assert len(data) == 6
- for index, event in enumerate(self.events[:5]):
- message = event.message or event.transaction
- results = data[
- ",".join([message, self.event_data[index]["data"]["user"].get("email", "None")])
- ]
- assert results["order"] == index
- assert [{"count": self.event_data[index]["count"]}] in [
- attrs for _, attrs in results["data"]
- ]
- other = data["Other"]
- assert other["order"] == 5
- assert [{"count": 3}] in [attrs for _, attrs in other["data"]]
- def test_top_events_with_projects_other(self):
- with self.feature(self.enabled_features):
- response = self.client.get(
- self.url,
- data={
- "start": self.day_ago.isoformat(),
- "end": (self.day_ago + timedelta(hours=2)).isoformat(),
- "interval": "1h",
- "yAxis": "count()",
- "orderby": ["-count()"],
- "field": ["count()", "project"],
- "dataset": "errors",
- "topEvents": "1",
- },
- format="json",
- )
- data = response.data
- assert response.status_code == 200, response.content
- assert set(data.keys()) == {"Other", self.project.slug}
- assert data[self.project.slug]["order"] == 0
- assert [attrs[0]["count"] for _, attrs in data[self.project.slug]["data"]] == [15, 0]
- assert data["Other"]["order"] == 1
- assert [attrs[0]["count"] for _, attrs in data["Other"]["data"]] == [7, 6]
- def test_top_events_with_issue(self):
- # delete a group to make sure if this happens the value becomes unknown
- event_group = self.events[0].group
- event_group.delete()
- with self.feature(self.enabled_features):
- response = self.client.get(
- self.url,
- data={
- "start": self.day_ago.isoformat(),
- "end": (self.day_ago + timedelta(hours=2)).isoformat(),
- "interval": "1h",
- "yAxis": "count()",
- "orderby": ["-count()"],
- "field": ["count()", "message", "issue"],
- "topEvents": "5",
- "query": "",
- "dataset": "errors",
- },
- format="json",
- )
- data = response.data
- assert response.status_code == 200, response.content
- assert len(data) == 6
- for index, event in enumerate(self.events[:4]):
- message = event.message
- # Because we deleted the group for event 0
- if index == 0 or event.group is None:
- issue = "unknown"
- else:
- issue = event.group.qualified_short_id
- results = data[",".join([issue, message])]
- assert results["order"] == index
- assert [{"count": self.event_data[index]["count"]}] in [
- attrs for time, attrs in results["data"]
- ]
- other = data["Other"]
- assert other["order"] == 5
- assert [attrs[0]["count"] for _, attrs in data["Other"]["data"]] == [3, 0]
- @mock.patch("sentry.models.GroupManager.get_issues_mapping")
- def test_top_events_with_unknown_issue(self, mock_issues_mapping):
- event = self.events[0]
- event_data = self.event_data[0]
- # ensure that the issue mapping returns None for the issue
- mock_issues_mapping.return_value = {event.group.id: None}
- with self.feature(self.enabled_features):
- response = self.client.get(
- self.url,
- data={
- "start": self.day_ago.isoformat(),
- "end": (self.day_ago + timedelta(hours=2)).isoformat(),
- "interval": "1h",
- "yAxis": "count()",
- "orderby": ["-count()"],
- "field": ["count()", "issue"],
- "topEvents": "5",
- # narrow the search to just one issue
- "query": f"issue.id:{event.group.id}",
- "dataset": "errors",
- },
- format="json",
- )
- assert response.status_code == 200, response.content
- data = response.data
- assert len(data) == 1
- results = data["unknown"]
- assert results["order"] == 0
- assert [{"count": event_data["count"]}] in [attrs for time, attrs in results["data"]]
- @mock.patch(
- "sentry.search.events.builder.base.raw_snql_query",
- side_effect=[{"data": [{"issue.id": 1}], "meta": []}, {"data": [], "meta": []}],
- )
- def test_top_events_with_issue_check_query_conditions(self, mock_query):
- """ "Intentionally separate from test_top_events_with_issue
- This is to test against a bug where the condition for issues wasn't included and we'd be missing data for
- the interval since we'd cap out the max rows. This was not caught by the previous test since the results
- would still be correct given the smaller interval & lack of data
- """
- with self.feature(self.enabled_features):
- self.client.get(
- self.url,
- data={
- "start": self.day_ago.isoformat(),
- "end": (self.day_ago + timedelta(hours=2)).isoformat(),
- "interval": "1h",
- "yAxis": "count()",
- "orderby": ["-count()"],
- "field": ["count()", "message", "issue"],
- "topEvents": "5",
- "query": "!event.type:transaction",
- "dataset": "errors",
- },
- format="json",
- )
- assert (
- Condition(
- Function(
- "coalesce",
- [Column("group_id", entity=Entity("events", alias="events")), 0],
- "issue.id",
- ),
- Op.IN,
- [1],
- )
- in mock_query.mock_calls[1].args[0].query.where
- )
- def test_group_id_tag_simple(self):
- event_data: _EventDataDict = {
- "data": {
- "message": "poof",
- "timestamp": iso_format(self.day_ago + timedelta(minutes=2)),
- "user": {"email": self.user.email},
- "tags": {"group_id": "the tag"},
- "fingerprint": ["group1"],
- },
- "project": self.project2,
- "count": 7,
- }
- for i in range(event_data["count"]):
- event_data["data"]["event_id"] = f"a{i}" * 16
- self.store_event(event_data["data"], project_id=event_data["project"].id)
- data = {
- "start": self.day_ago.isoformat(),
- "end": (self.day_ago + timedelta(hours=2)).isoformat(),
- "interval": "1h",
- "yAxis": "count()",
- "orderby": ["-count()"],
- "field": ["count()", "group_id"],
- "topEvents": "5",
- "partial": "1",
- }
- with self.feature(self.enabled_features):
- response = self.client.get(self.url, data, format="json")
- assert response.status_code == 200, response.content
- assert response.data["the tag"]["data"][0][1] == [{"count": 7}]
- data["query"] = 'group_id:"the tag"'
- with self.feature(self.enabled_features):
- response = self.client.get(self.url, data, format="json")
- assert response.status_code == 200
- assert response.data["the tag"]["data"][0][1] == [{"count": 7}]
- data["query"] = "group_id:abc"
- with self.feature(self.enabled_features):
- response = self.client.get(self.url, data, format="json")
- assert response.status_code == 200
- assert all([interval[1][0]["count"] == 0 for interval in response.data["data"]])
- def test_top_events_with_error_unhandled(self):
- self.login_as(user=self.user)
- project = self.create_project()
- prototype = load_data("android-ndk")
- prototype["event_id"] = "f" * 32
- prototype["logentry"] = {"formatted": "not handled"}
- prototype["exception"]["values"][0]["value"] = "not handled"
- prototype["exception"]["values"][0]["mechanism"]["handled"] = False
- prototype["timestamp"] = iso_format(self.day_ago + timedelta(minutes=2))
- self.store_event(data=prototype, project_id=project.id)
- with self.feature(self.enabled_features):
- response = self.client.get(
- self.url,
- data={
- "start": self.day_ago.isoformat(),
- "end": (self.day_ago + timedelta(hours=2)).isoformat(),
- "interval": "1h",
- "yAxis": "count()",
- "orderby": ["-count()"],
- "field": ["count()", "error.unhandled"],
- "topEvents": "5",
- },
- format="json",
- )
- data = response.data
- assert response.status_code == 200, response.content
- assert len(data) == 2
|