1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271327232733274327532763277327832793280328132823283328432853286328732883289329032913292329332943295329632973298329933003301330233033304330533063307330833093310331133123313331433153316331733183319332033213322332333243325332633273328332933303331333233333334333533363337333833393340334133423343334433453346334733483349335033513352335333543355335633573358335933603361336233633364336533663367336833693370337133723373337433753376337733783379338033813382338333843385338633873388338933903391339233933394339533963397339833993400340134023403340434053406340734083409341034113412341334143415341634173418341934203421342234233424342534263427342834293430343134323433343434353436343734383439344034413442344334443445344634473448344934503451345234533454345534563457345834593460346134623463346434653466346734683469347034713472347334743475347634773478347934803481348234833484348534863487348834893490349134923493349434953496349734983499350035013502350335043505350635073508350935103511351235133514351535163517351835193520352135223523352435253526352735283529353035313532353335343535353635373538353935403541354235433544354535463547354835493550355135523553355435553556355735583559356035613562356335643565356635673568356935703571357235733574357535763577357835793580358135823583358435853586358735883589359035913592359335943595359635973598359936003601360236033604360536063607360836093610361136123613361436153616361736183619362036213622362336243625362636273628362936303631363236333634363536363637363836393640364136423643364436453646364736483649365036513652365336543655365636573658365936603661366236633664366536663667366836693670367136723673367436753676367736783679368036813682368336843685368636873688368936903691369236933694369536963697369836993700370137023703370437053706370737083709371037113712371337143715371637173718371937203721372237233724372537263727372837293730373137323733373437353736373737383739374037413742374337443745374637473748374937503751375237533754375537563757375837593760376137623763376437653766376737683769377037713772377337743775377637773778377937803781378237833784378537863787378837893790379137923793379437953796379737983799380038013802380338043805380638073808380938103811381238133814381538163817381838193820382138223823382438253826382738283829383038313832383338343835383638373838383938403841384238433844384538463847384838493850385138523853385438553856385738583859386038613862386338643865386638673868386938703871387238733874387538763877387838793880388138823883388438853886388738883889389038913892389338943895389638973898389939003901390239033904390539063907390839093910391139123913391439153916391739183919392039213922392339243925392639273928392939303931393239333934393539363937393839393940394139423943394439453946394739483949395039513952395339543955395639573958395939603961396239633964396539663967396839693970397139723973397439753976397739783979398039813982398339843985398639873988398939903991399239933994399539963997399839994000400140024003400440054006400740084009401040114012401340144015401640174018401940204021402240234024402540264027402840294030403140324033403440354036403740384039404040414042404340444045404640474048404940504051405240534054405540564057405840594060406140624063406440654066406740684069407040714072407340744075407640774078407940804081408240834084408540864087408840894090409140924093409440954096409740984099410041014102410341044105410641074108410941104111411241134114411541164117411841194120412141224123412441254126412741284129413041314132413341344135413641374138413941404141414241434144414541464147414841494150415141524153415441554156415741584159416041614162416341644165416641674168416941704171417241734174417541764177417841794180418141824183418441854186418741884189419041914192419341944195419641974198419942004201420242034204420542064207420842094210421142124213421442154216421742184219422042214222422342244225422642274228422942304231423242334234423542364237423842394240424142424243424442454246424742484249425042514252425342544255425642574258425942604261426242634264426542664267426842694270427142724273427442754276427742784279428042814282428342844285428642874288428942904291429242934294429542964297429842994300430143024303430443054306430743084309431043114312431343144315431643174318431943204321432243234324432543264327432843294330433143324333433443354336433743384339434043414342434343444345434643474348434943504351435243534354435543564357435843594360436143624363436443654366436743684369437043714372437343744375437643774378437943804381438243834384438543864387438843894390439143924393439443954396439743984399440044014402440344044405440644074408440944104411441244134414441544164417441844194420442144224423442444254426442744284429443044314432443344344435443644374438443944404441444244434444444544464447444844494450445144524453445444554456445744584459446044614462446344644465446644674468446944704471447244734474447544764477447844794480448144824483448444854486448744884489449044914492449344944495449644974498449945004501450245034504450545064507450845094510451145124513451445154516451745184519452045214522452345244525452645274528452945304531453245334534453545364537453845394540454145424543454445454546454745484549455045514552455345544555455645574558455945604561456245634564456545664567456845694570457145724573457445754576457745784579458045814582458345844585458645874588458945904591459245934594459545964597459845994600460146024603460446054606460746084609461046114612461346144615461646174618461946204621462246234624462546264627462846294630463146324633463446354636463746384639464046414642464346444645464646474648464946504651465246534654465546564657465846594660466146624663466446654666466746684669467046714672467346744675467646774678467946804681468246834684468546864687468846894690469146924693469446954696469746984699470047014702470347044705470647074708470947104711471247134714471547164717471847194720472147224723472447254726472747284729473047314732473347344735473647374738473947404741474247434744474547464747474847494750475147524753475447554756475747584759476047614762476347644765476647674768476947704771477247734774477547764777477847794780478147824783478447854786478747884789479047914792479347944795479647974798479948004801480248034804480548064807480848094810481148124813481448154816481748184819482048214822482348244825482648274828482948304831483248334834483548364837483848394840484148424843484448454846484748484849485048514852485348544855485648574858485948604861486248634864486548664867486848694870487148724873487448754876487748784879488048814882488348844885488648874888488948904891489248934894489548964897489848994900490149024903490449054906490749084909491049114912491349144915491649174918491949204921492249234924492549264927492849294930493149324933493449354936493749384939494049414942494349444945494649474948494949504951495249534954495549564957495849594960496149624963496449654966496749684969497049714972497349744975497649774978497949804981498249834984498549864987498849894990499149924993499449954996499749984999500050015002500350045005500650075008500950105011501250135014501550165017501850195020502150225023502450255026502750285029503050315032503350345035503650375038503950405041504250435044504550465047504850495050505150525053505450555056505750585059506050615062506350645065506650675068506950705071507250735074507550765077507850795080508150825083508450855086508750885089509050915092509350945095509650975098509951005101510251035104510551065107510851095110511151125113511451155116511751185119512051215122512351245125512651275128512951305131513251335134513551365137513851395140 |
- import math
- from base64 import b64encode
- from datetime import timedelta
- from unittest import mock
- import pytest
- from django.test import override_settings
- from django.urls import reverse
- from django.utils import timezone
- from freezegun import freeze_time
- from pytz import utc
- from snuba_sdk.column import Column
- from snuba_sdk.function import Function
- from sentry.discover.models import TeamKeyTransaction
- from sentry.models import ApiKey, ProjectTeam, ProjectTransactionThreshold, ReleaseStages
- from sentry.models.transaction_threshold import (
- ProjectTransactionThresholdOverride,
- TransactionMetric,
- )
- from sentry.search.events import constants
- from sentry.testutils import APITestCase, SnubaTestCase
- from sentry.testutils.helpers import parse_link_header
- from sentry.testutils.helpers.datetime import before_now, iso_format
- from sentry.testutils.skips import requires_not_arm64
- from sentry.utils import json
- from sentry.utils.samples import load_data
- from sentry.utils.snuba import QueryExecutionError, QueryIllegalTypeOfArgument, RateLimitExceeded
- MAX_QUERYABLE_TRANSACTION_THRESHOLDS = 1
- class OrganizationEventsEndpointTest(APITestCase, SnubaTestCase):
- viewname = "sentry-api-0-organization-events"
- referrer = "api.organization-events"
- def setUp(self):
- super().setUp()
- self.ten_mins_ago = iso_format(before_now(minutes=10))
- self.eleven_mins_ago = iso_format(before_now(minutes=11))
- self.transaction_data = load_data("transaction", timestamp=before_now(minutes=10))
- self.features = {}
- def client_get(self, *args, **kwargs):
- return self.client.get(*args, **kwargs)
- def reverse_url(self):
- return reverse(
- self.viewname,
- kwargs={"organization_slug": self.organization.slug},
- )
- def do_request(self, query, features=None):
- if features is None:
- features = {"organizations:discover-basic": True}
- features.update(self.features)
- self.login_as(user=self.user)
- with self.feature(features):
- return self.client_get(self.reverse_url(), query, format="json")
- def load_data(self, platform="transaction", timestamp=None, duration=None, **kwargs):
- if timestamp is None:
- timestamp = before_now(minutes=1)
- start_timestamp = None
- if duration is not None:
- start_timestamp = timestamp - duration
- start_timestamp = start_timestamp - timedelta(
- microseconds=start_timestamp.microsecond % 1000
- )
- return load_data(platform, timestamp=timestamp, start_timestamp=start_timestamp, **kwargs)
- def test_no_projects(self):
- response = self.do_request({})
- assert response.status_code == 200, response.content
- assert len(response.data) == 0
- def test_api_key_request(self):
- self.store_event(
- data={"event_id": "a" * 32, "environment": "staging", "timestamp": self.ten_mins_ago},
- project_id=self.project.id,
- )
- # Project ID cannot be inferred when using an org API key, so that must
- # be passed in the parameters
- api_key = ApiKey.objects.create(organization=self.organization, scope_list=["org:read"])
- query = {"field": ["project.name", "environment"], "project": [self.project.id]}
- url = self.reverse_url()
- response = self.client_get(
- url,
- query,
- format="json",
- HTTP_AUTHORIZATION=b"Basic " + b64encode(f"{api_key.key}:".encode()),
- )
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 1
- assert response.data["data"][0]["project.name"] == self.project.slug
- def test_performance_view_feature(self):
- self.store_event(
- data={"event_id": "a" * 32, "timestamp": self.ten_mins_ago, "fingerprint": ["group1"]},
- project_id=self.project.id,
- )
- query = {"field": ["id", "project.id"], "project": [self.project.id]}
- response = self.do_request(query)
- assert response.status_code == 200
- assert len(response.data["data"]) == 1
- def test_multi_project_feature_gate_rejection(self):
- team = self.create_team(organization=self.organization, members=[self.user])
- project = self.create_project(organization=self.organization, teams=[team])
- project2 = self.create_project(organization=self.organization, teams=[team])
- query = {"field": ["id", "project.id"], "project": [project.id, project2.id]}
- response = self.do_request(query)
- assert response.status_code == 400
- assert "events from multiple projects" in response.data["detail"]
- def test_invalid_search_terms(self):
- self.create_project()
- query = {"field": ["id"], "query": "hi \n there"}
- response = self.do_request(query)
- assert response.status_code == 400, response.content
- assert (
- response.data["detail"]
- == "Parse error at 'hi \n ther' (column 4). This is commonly caused by unmatched parentheses. Enclose any text in double quotes."
- )
- def test_invalid_trace_span(self):
- self.create_project()
- query = {"field": ["id"], "query": "trace.span:invalid"}
- response = self.do_request(query)
- assert response.status_code == 400, response.content
- assert (
- response.data["detail"]
- == "trace.span must be a valid 16 character hex (containing only digits, or a-f characters)"
- )
- query = {"field": ["id"], "query": "trace.parent_span:invalid"}
- response = self.do_request(query)
- assert response.status_code == 400, response.content
- assert (
- response.data["detail"]
- == "trace.parent_span must be a valid 16 character hex (containing only digits, or a-f characters)"
- )
- query = {"field": ["id"], "query": "trace.span:*"}
- response = self.do_request(query)
- assert response.status_code == 400, response.content
- assert (
- response.data["detail"] == "Wildcard conditions are not permitted on `trace.span` field"
- )
- query = {"field": ["id"], "query": "trace.parent_span:*"}
- response = self.do_request(query)
- assert response.status_code == 400, response.content
- assert (
- response.data["detail"]
- == "Wildcard conditions are not permitted on `trace.parent_span` field"
- )
- def test_has_trace_context(self):
- self.store_event(
- data={
- "event_id": "a" * 32,
- "message": "how to make fast",
- "timestamp": self.ten_mins_ago,
- "contexts": {
- "trace": {
- "span_id": "a" * 16,
- "trace_id": "b" * 32,
- },
- },
- },
- project_id=self.project.id,
- )
- query = {"field": ["id", "trace.parent_span"], "query": "has:trace.span"}
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 1
- assert response.data["data"][0]["id"] == "a" * 32
- query = {"field": ["id"], "query": "has:trace.parent_span"}
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 0
- def test_not_has_trace_context(self):
- self.store_event(
- data={
- "event_id": "a" * 32,
- "message": "how to make fast",
- "timestamp": self.ten_mins_ago,
- "contexts": {
- "trace": {
- "span_id": "a" * 16,
- "trace_id": "b" * 32,
- },
- },
- },
- project_id=self.project.id,
- )
- query = {"field": ["id", "trace.parent_span"], "query": "!has:trace.span"}
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 0
- query = {"field": ["id"], "query": "!has:trace.parent_span"}
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 1
- assert response.data["data"][0]["id"] == "a" * 32
- def test_parent_span_id_in_context(self):
- self.store_event(
- data={
- "event_id": "a" * 32,
- "message": "how to make fast",
- "timestamp": self.ten_mins_ago,
- "contexts": {
- "trace": {
- "span_id": "a" * 16,
- "trace_id": "b" * 32,
- "parent_span_id": "c" * 16,
- },
- },
- },
- project_id=self.project.id,
- )
- query = {"field": ["id"], "query": f"trace.parent_span:{'c' * 16}"}
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 1
- assert response.data["data"][0]["id"] == "a" * 32
- @mock.patch("sentry.search.events.builder.raw_snql_query")
- def test_handling_snuba_errors(self, mock_snql_query):
- self.create_project()
- mock_snql_query.side_effect = RateLimitExceeded("test")
- query = {"field": ["id", "timestamp"], "orderby": ["-timestamp", "-id"]}
- response = self.do_request(query)
- assert response.status_code == 400, response.content
- assert response.data["detail"] == constants.TIMEOUT_ERROR_MESSAGE
- mock_snql_query.side_effect = QueryExecutionError("test")
- query = {"field": ["id", "timestamp"], "orderby": ["-timestamp", "-id"]}
- response = self.do_request(query)
- assert response.status_code == 500, response.content
- assert response.data["detail"] == "Internal error. Your query failed to run."
- mock_snql_query.side_effect = QueryIllegalTypeOfArgument("test")
- query = {"field": ["id", "timestamp"], "orderby": ["-timestamp", "-id"]}
- response = self.do_request(query)
- assert response.status_code == 400, response.content
- assert response.data["detail"] == "Invalid query. Argument to function is wrong type."
- def test_out_of_retention(self):
- self.create_project()
- with self.options({"system.event-retention-days": 10}):
- query = {
- "field": ["id", "timestamp"],
- "orderby": ["-timestamp", "-id"],
- "start": iso_format(before_now(days=20)),
- "end": iso_format(before_now(days=15)),
- }
- response = self.do_request(query)
- assert response.status_code == 400, response.content
- assert response.data["detail"] == "Invalid date range. Please try a more recent date range."
- def test_raw_data(self):
- self.store_event(
- data={
- "event_id": "a" * 32,
- "environment": "staging",
- "timestamp": self.eleven_mins_ago,
- "user": {"ip_address": "127.0.0.1", "email": "foo@example.com"},
- },
- project_id=self.project.id,
- )
- self.store_event(
- data={
- "event_id": "b" * 32,
- "environment": "staging",
- "timestamp": self.ten_mins_ago,
- "user": {"ip_address": "127.0.0.1", "email": "foo@example.com"},
- },
- project_id=self.project.id,
- )
- query = {
- "field": ["id", "project.id", "user.email", "user.ip", "timestamp"],
- "orderby": "-timestamp",
- }
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert len(data) == 2
- assert data[0]["id"] == "b" * 32
- assert data[0]["project.id"] == self.project.id
- assert data[0]["user.email"] == "foo@example.com"
- assert "project.name" not in data[0], "project.id does not auto select name"
- assert "project" not in data[0]
- meta = response.data["meta"]
- field_meta = meta["fields"]
- assert field_meta["id"] == "string"
- assert field_meta["user.email"] == "string"
- assert field_meta["user.ip"] == "string"
- assert field_meta["timestamp"] == "date"
- def test_project_name(self):
- self.store_event(
- data={"event_id": "a" * 32, "environment": "staging", "timestamp": self.ten_mins_ago},
- project_id=self.project.id,
- )
- query = {"field": ["project.name", "environment"]}
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 1
- assert response.data["data"][0]["project.name"] == self.project.slug
- assert "project.id" not in response.data["data"][0]
- assert response.data["data"][0]["environment"] == "staging"
- def test_project_without_name(self):
- self.store_event(
- data={"event_id": "a" * 32, "environment": "staging", "timestamp": self.ten_mins_ago},
- project_id=self.project.id,
- )
- query = {"field": ["project", "environment"]}
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 1
- assert response.data["data"][0]["project"] == self.project.slug
- assert response.data["meta"]["fields"]["project"] == "string"
- assert "project.id" not in response.data["data"][0]
- assert response.data["data"][0]["environment"] == "staging"
- def test_project_in_query(self):
- self.store_event(
- data={"event_id": "a" * 32, "environment": "staging", "timestamp": self.ten_mins_ago},
- project_id=self.project.id,
- )
- query = {
- "field": ["project", "count()"],
- "query": f'project:"{self.project.slug}"',
- "statsPeriod": "14d",
- }
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 1
- assert response.data["data"][0]["project"] == self.project.slug
- assert "project.id" not in response.data["data"][0]
- def test_project_in_query_not_in_header(self):
- project = self.create_project()
- other_project = self.create_project()
- self.store_event(
- data={"event_id": "a" * 32, "environment": "staging", "timestamp": self.ten_mins_ago},
- project_id=project.id,
- )
- query = {
- "field": ["project", "count()"],
- "query": 'project:"%s"' % project.slug,
- "statsPeriod": "14d",
- "project": other_project.id,
- }
- response = self.do_request(query)
- assert response.status_code == 400, response.content
- assert (
- response.data["detail"]
- == f"Invalid query. Project(s) {project.slug} do not exist or are not actively selected."
- )
- def test_project_in_query_does_not_exist(self):
- self.create_project()
- query = {"field": ["project", "count()"], "query": "project:morty", "statsPeriod": "14d"}
- response = self.do_request(query)
- assert response.status_code == 400, response.content
- assert (
- response.data["detail"]
- == "Invalid query. Project(s) morty do not exist or are not actively selected."
- )
- def test_not_project_in_query_but_in_header(self):
- team = self.create_team(organization=self.organization, members=[self.user])
- project = self.create_project(organization=self.organization, teams=[team])
- project2 = self.create_project(organization=self.organization, teams=[team])
- self.store_event(
- data={"event_id": "a" * 32, "timestamp": self.ten_mins_ago, "fingerprint": ["group1"]},
- project_id=project.id,
- )
- self.store_event(
- data={"event_id": "b" * 32, "timestamp": self.ten_mins_ago, "fingerprint": ["group2"]},
- project_id=project2.id,
- )
- query = {
- "field": ["id", "project.id"],
- "project": [project.id],
- "query": f"!project:{project2.slug}",
- }
- response = self.do_request(query)
- assert response.status_code == 200
- assert response.data["data"] == [{"id": "a" * 32, "project.id": project.id}]
- def test_not_project_in_query_with_all_projects(self):
- team = self.create_team(organization=self.organization, members=[self.user])
- project = self.create_project(organization=self.organization, teams=[team])
- project2 = self.create_project(organization=self.organization, teams=[team])
- self.store_event(
- data={"event_id": "a" * 32, "timestamp": self.ten_mins_ago, "fingerprint": ["group1"]},
- project_id=project.id,
- )
- self.store_event(
- data={"event_id": "b" * 32, "timestamp": self.ten_mins_ago, "fingerprint": ["group2"]},
- project_id=project2.id,
- )
- features = {"organizations:discover-basic": True, "organizations:global-views": True}
- query = {
- "field": ["id", "project.id"],
- "project": [-1],
- "query": f"!project:{project2.slug}",
- }
- response = self.do_request(query, features=features)
- assert response.status_code == 200
- assert response.data["data"] == [{"id": "a" * 32, "project.id": project.id}]
- def test_project_condition_used_for_automatic_filters(self):
- self.store_event(
- data={"event_id": "a" * 32, "environment": "staging", "timestamp": self.ten_mins_ago},
- project_id=self.project.id,
- )
- query = {
- "field": ["project", "count()"],
- "query": f'project:"{self.project.slug}"',
- "statsPeriod": "14d",
- }
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 1
- assert response.data["data"][0]["project"] == self.project.slug
- assert "project.id" not in response.data["data"][0]
- def test_auto_insert_project_name_when_event_id_present(self):
- self.store_event(
- data={"event_id": "a" * 32, "environment": "staging", "timestamp": self.ten_mins_ago},
- project_id=self.project.id,
- )
- query = {"field": ["id"], "statsPeriod": "1h"}
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- assert response.data["data"] == [{"project.name": self.project.slug, "id": "a" * 32}]
- def test_auto_insert_project_name_when_event_id_present_with_aggregate(self):
- self.store_event(
- data={"event_id": "a" * 32, "environment": "staging", "timestamp": self.ten_mins_ago},
- project_id=self.project.id,
- )
- query = {"field": ["id", "count()"], "statsPeriod": "1h"}
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- assert response.data["data"] == [
- {"project.name": self.project.slug, "id": "a" * 32, "count()": 1}
- ]
- def test_event_id_with_in_search(self):
- self.store_event(
- data={"event_id": "a" * 32, "environment": "staging1", "timestamp": self.ten_mins_ago},
- project_id=self.project.id,
- )
- self.store_event(
- data={"event_id": "b" * 32, "environment": "staging2", "timestamp": self.ten_mins_ago},
- project_id=self.project.id,
- )
- # Should not show up
- self.store_event(
- data={"event_id": "c" * 32, "environment": "staging3", "timestamp": self.ten_mins_ago},
- project_id=self.project.id,
- )
- query = {
- "field": ["id", "environment"],
- "statsPeriod": "1h",
- "query": f"id:[{'a' * 32}, {'b' * 32}]",
- "orderby": "environment",
- }
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 2
- assert response.data["data"][0]["id"] == "a" * 32
- assert response.data["data"][1]["id"] == "b" * 32
- def test_user_search(self):
- self.transaction_data["user"] = {
- "email": "foo@example.com",
- "id": "123",
- "ip_address": "127.0.0.1",
- "username": "foo",
- }
- self.store_event(self.transaction_data, project_id=self.project.id)
- fields = {
- "email": "user.email",
- "id": "user.id",
- "ip_address": "user.ip",
- "username": "user.username",
- }
- features = {"organizations:discover-basic": True, "organizations:global-views": True}
- for key, value in self.transaction_data["user"].items():
- field = fields[key]
- query = {
- "field": ["project", "user"],
- "query": f"{field}:{value}",
- "statsPeriod": "14d",
- }
- response = self.do_request(query, features=features)
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 1
- assert response.data["data"][0]["project"] == self.project.slug
- assert response.data["data"][0]["user"] == "id:123"
- def test_has_user(self):
- self.store_event(self.transaction_data, project_id=self.project.id)
- features = {"organizations:discover-basic": True, "organizations:global-views": True}
- for value in self.transaction_data["user"].values():
- query = {"field": ["project", "user"], "query": "has:user", "statsPeriod": "14d"}
- response = self.do_request(query, features=features)
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 1
- assert response.data["data"][0]["user"] == "ip:{}".format(
- self.transaction_data["user"]["ip_address"]
- )
- def test_team_param_no_access(self):
- org = self.create_organization(
- owner=self.user, # use other user as owner
- name="foo",
- flags=0, # disable default allow_joinleave
- )
- project = self.create_project(name="baz", organization=org)
- user = self.create_user()
- self.login_as(user=user, superuser=False)
- team = self.create_team(organization=org, name="Team Bar")
- project.add_team(team)
- self.store_event(
- data={"event_id": "a" * 32, "timestamp": self.ten_mins_ago, "fingerprint": ["group1"]},
- project_id=project.id,
- )
- query = {"field": ["id", "project.id"], "project": [project.id], "team": [team.id]}
- response = self.do_request(query)
- assert response.status_code == 403, response.content
- assert response.data["detail"] == "You do not have permission to perform this action."
- def test_team_is_nan(self):
- query = {"field": ["id"], "project": [self.project.id], "team": [math.nan]}
- response = self.do_request(query)
- assert response.status_code == 400, response.content
- assert response.data["detail"] == "Invalid Team ID: nan"
- def test_comparison_operators_on_numeric_field(self):
- event = self.store_event(
- {"timestamp": iso_format(before_now(minutes=1))}, project_id=self.project.id
- )
- query = {"field": ["issue"], "query": f"issue.id:>{event.group.id - 1}"}
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 1
- assert response.data["data"][0]["issue"] == event.group.qualified_short_id
- query = {"field": ["issue"], "query": f"issue.id:>{event.group.id}"}
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 0
- def test_negation_on_numeric_field_excludes_issue(self):
- event = self.store_event({"timestamp": self.ten_mins_ago}, project_id=self.project.id)
- query = {"field": ["issue"], "query": f"issue.id:{event.group.id}"}
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 1
- assert response.data["data"][0]["issue"] == event.group.qualified_short_id
- query = {"field": ["issue"], "query": f"!issue.id:{event.group.id}"}
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 0
- def test_negation_on_numeric_in_filter_excludes_issue(self):
- event = self.store_event({"timestamp": self.ten_mins_ago}, project_id=self.project.id)
- query = {"field": ["issue"], "query": f"issue.id:[{event.group.id}]"}
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 1
- assert response.data["data"][0]["issue"] == event.group.qualified_short_id
- query = {"field": ["issue"], "query": f"!issue.id:[{event.group.id}]"}
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 0
- def test_negation_on_duration_filter_excludes_transaction(self):
- event = self.store_event(self.transaction_data, project_id=self.project.id)
- duration = int(event.data.get("timestamp") - event.data.get("start_timestamp")) * 1000
- query = {"field": ["transaction"], "query": f"transaction.duration:{duration}"}
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 1
- assert response.data["data"][0]["id"] == event.event_id
- query = {"field": ["transaction"], "query": f"!transaction.duration:{duration}"}
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 0
- def test_has_issue(self):
- event = self.store_event({"timestamp": self.ten_mins_ago}, project_id=self.project.id)
- self.store_event(self.transaction_data, project_id=self.project.id)
- features = {"organizations:discover-basic": True, "organizations:global-views": True}
- # should only show 1 event of type default
- query = {"field": ["project", "issue"], "query": "has:issue", "statsPeriod": "14d"}
- response = self.do_request(query, features=features)
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 1
- assert response.data["data"][0]["issue"] == event.group.qualified_short_id
- # should only show 1 event of type default
- query = {
- "field": ["project", "issue"],
- "query": "event.type:default has:issue",
- "statsPeriod": "14d",
- }
- response = self.do_request(query, features=features)
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 1
- assert response.data["data"][0]["issue"] == event.group.qualified_short_id
- # should show no results because no the default event has an issue
- query = {
- "field": ["project", "issue"],
- "query": "event.type:default !has:issue",
- "statsPeriod": "14d",
- }
- response = self.do_request(query, features=features)
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 0
- # should show no results because no transactions have issues
- query = {
- "field": ["project", "issue"],
- "query": "event.type:transaction has:issue",
- "statsPeriod": "14d",
- }
- response = self.do_request(query, features=features)
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 0
- # should only show 1 event of type transaction since they don't have issues
- query = {
- "field": ["project", "issue"],
- "query": "event.type:transaction !has:issue",
- "statsPeriod": "14d",
- }
- response = self.do_request(query, features=features)
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 1
- assert response.data["data"][0]["issue"] == "unknown"
- @pytest.mark.skip("Cannot look up group_id of transaction events")
- def test_unknown_issue(self):
- event = self.store_event({"timestamp": self.ten_mins_ago}, project_id=self.project.id)
- self.store_event(self.transaction_data, project_id=self.project.id)
- features = {"organizations:discover-basic": True, "organizations:global-views": True}
- query = {"field": ["project", "issue"], "query": "issue:unknown", "statsPeriod": "14d"}
- response = self.do_request(query, features=features)
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 1
- assert response.data["data"][0]["issue"] == "unknown"
- query = {"field": ["project", "issue"], "query": "!issue:unknown", "statsPeriod": "14d"}
- response = self.do_request(query, features=features)
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 1
- assert response.data["data"][0]["issue"] == event.group.qualified_short_id
- def test_negative_user_search(self):
- user_data = {"email": "foo@example.com", "id": "123", "username": "foo"}
- # Load an event with data that shouldn't match
- data = self.transaction_data.copy()
- data["transaction"] = "/transactions/nomatch"
- event_user = user_data.copy()
- event_user["id"] = "undefined"
- data["user"] = event_user
- self.store_event(data, project_id=self.project.id)
- # Load a matching event
- data = self.transaction_data.copy()
- data["transaction"] = "/transactions/matching"
- data["user"] = user_data
- self.store_event(data, project_id=self.project.id)
- features = {"organizations:discover-basic": True, "organizations:global-views": True}
- query = {
- "field": ["project", "user"],
- "query": '!user:"id:undefined"',
- "statsPeriod": "14d",
- }
- response = self.do_request(query, features=features)
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 1
- assert response.data["data"][0]["user"] == "id:{}".format(user_data["id"])
- assert "user.email" not in response.data["data"][0]
- assert "user.id" not in response.data["data"][0]
- def test_not_project_in_query(self):
- project1 = self.create_project()
- project2 = self.create_project()
- self.store_event(
- data={"event_id": "a" * 32, "environment": "staging", "timestamp": self.ten_mins_ago},
- project_id=project1.id,
- )
- self.store_event(
- data={"event_id": "b" * 32, "environment": "staging", "timestamp": self.ten_mins_ago},
- project_id=project2.id,
- )
- features = {"organizations:discover-basic": True, "organizations:global-views": True}
- query = {
- "field": ["project", "count()"],
- "query": '!project:"%s"' % project1.slug,
- "statsPeriod": "14d",
- }
- response = self.do_request(query, features=features)
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 1
- assert response.data["data"][0]["project"] == project2.slug
- assert "project.id" not in response.data["data"][0]
- def test_error_handled_condition(self):
- prototype = self.load_data(platform="android-ndk")
- events = (
- ("a" * 32, "not handled", False),
- ("b" * 32, "was handled", True),
- ("c" * 32, "undefined", None),
- )
- for event in events:
- prototype["event_id"] = event[0]
- prototype["message"] = event[1]
- prototype["exception"]["values"][0]["value"] = event[1]
- prototype["exception"]["values"][0]["mechanism"]["handled"] = event[2]
- prototype["timestamp"] = self.ten_mins_ago
- self.store_event(data=prototype, project_id=self.project.id)
- with self.feature("organizations:discover-basic"):
- query = {
- "field": ["message", "error.handled"],
- "query": "error.handled:0",
- "orderby": "message",
- }
- response = self.do_request(query)
- assert response.status_code == 200, response.data
- assert 1 == len(response.data["data"])
- assert [0] == response.data["data"][0]["error.handled"]
- with self.feature("organizations:discover-basic"):
- query = {
- "field": ["message", "error.handled"],
- "query": "error.handled:1",
- "orderby": "message",
- }
- response = self.do_request(query)
- assert response.status_code == 200, response.data
- assert 2 == len(response.data["data"])
- assert [None] == response.data["data"][0]["error.handled"]
- assert [1] == response.data["data"][1]["error.handled"]
- def test_error_unhandled_condition(self):
- prototype = self.load_data(platform="android-ndk")
- events = (
- ("a" * 32, "not handled", False),
- ("b" * 32, "was handled", True),
- ("c" * 32, "undefined", None),
- )
- for event in events:
- prototype["event_id"] = event[0]
- prototype["message"] = event[1]
- prototype["exception"]["values"][0]["value"] = event[1]
- prototype["exception"]["values"][0]["mechanism"]["handled"] = event[2]
- prototype["timestamp"] = self.ten_mins_ago
- self.store_event(data=prototype, project_id=self.project.id)
- with self.feature("organizations:discover-basic"):
- query = {
- "field": ["message", "error.unhandled", "error.handled"],
- "query": "error.unhandled:true",
- "orderby": "message",
- }
- response = self.do_request(query)
- assert response.status_code == 200, response.data
- assert 1 == len(response.data["data"])
- assert [0] == response.data["data"][0]["error.handled"]
- assert 1 == response.data["data"][0]["error.unhandled"]
- with self.feature("organizations:discover-basic"):
- query = {
- "field": ["message", "error.handled", "error.unhandled"],
- "query": "error.unhandled:false",
- "orderby": "message",
- }
- response = self.do_request(query)
- assert response.status_code == 200, response.data
- assert 2 == len(response.data["data"])
- assert [None] == response.data["data"][0]["error.handled"]
- assert 0 == response.data["data"][0]["error.unhandled"]
- assert [1] == response.data["data"][1]["error.handled"]
- assert 0 == response.data["data"][1]["error.unhandled"]
- def test_implicit_groupby(self):
- self.store_event(
- data={
- "event_id": "a" * 32,
- "timestamp": self.eleven_mins_ago,
- "fingerprint": ["group_1"],
- },
- project_id=self.project.id,
- )
- event1 = self.store_event(
- data={"event_id": "b" * 32, "timestamp": self.ten_mins_ago, "fingerprint": ["group_1"]},
- project_id=self.project.id,
- )
- event2 = self.store_event(
- data={"event_id": "c" * 32, "timestamp": self.ten_mins_ago, "fingerprint": ["group_2"]},
- project_id=self.project.id,
- )
- query = {"field": ["count(id)", "project.id", "issue.id"], "orderby": "issue.id"}
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 2
- data = response.data["data"]
- assert data[0] == {
- "project.id": self.project.id,
- "issue.id": event1.group_id,
- "count(id)": 2,
- }
- assert data[1] == {
- "project.id": self.project.id,
- "issue.id": event2.group_id,
- "count(id)": 1,
- }
- meta = response.data["meta"]["fields"]
- assert meta["count(id)"] == "integer"
- def test_orderby(self):
- self.store_event(
- data={"event_id": "a" * 32, "timestamp": self.eleven_mins_ago},
- project_id=self.project.id,
- )
- self.store_event(
- data={"event_id": "b" * 32, "timestamp": self.ten_mins_ago}, project_id=self.project.id
- )
- self.store_event(
- data={"event_id": "c" * 32, "timestamp": self.ten_mins_ago}, project_id=self.project.id
- )
- query = {"field": ["id", "timestamp"], "orderby": ["-timestamp", "-id"]}
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert data[0]["id"] == "c" * 32
- assert data[1]["id"] == "b" * 32
- assert data[2]["id"] == "a" * 32
- def test_sort_title(self):
- self.store_event(
- data={"event_id": "a" * 32, "message": "zlast", "timestamp": self.eleven_mins_ago},
- project_id=self.project.id,
- )
- self.store_event(
- data={"event_id": "b" * 32, "message": "second", "timestamp": self.ten_mins_ago},
- project_id=self.project.id,
- )
- self.store_event(
- data={"event_id": "c" * 32, "message": "first", "timestamp": self.ten_mins_ago},
- project_id=self.project.id,
- )
- query = {"field": ["id", "title"], "sort": "title"}
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert data[0]["id"] == "c" * 32
- assert data[1]["id"] == "b" * 32
- assert data[2]["id"] == "a" * 32
- def test_sort_invalid(self):
- self.create_project()
- query = {"field": ["id"], "sort": "garbage"}
- response = self.do_request(query)
- assert response.status_code == 400
- assert "sort by" in response.data["detail"]
- def test_latest_release_alias(self):
- event1 = self.store_event(
- data={"event_id": "a" * 32, "timestamp": self.eleven_mins_ago, "release": "0.8"},
- project_id=self.project.id,
- )
- query = {"field": ["issue.id", "release"], "query": "release:latest"}
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert data[0]["issue.id"] == event1.group_id
- assert data[0]["release"] == "0.8"
- event2 = self.store_event(
- data={"event_id": "a" * 32, "timestamp": self.ten_mins_ago, "release": "0.9"},
- project_id=self.project.id,
- )
- query = {"field": ["issue.id", "release"], "query": "release:latest"}
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert data[0]["issue.id"] == event2.group_id
- assert data[0]["release"] == "0.9"
- def test_semver(self):
- release_1 = self.create_release(version="test@1.2.3")
- release_2 = self.create_release(version="test@1.2.4")
- release_3 = self.create_release(version="test@1.2.5")
- release_1_e_1 = self.store_event(
- data={"release": release_1.version, "timestamp": self.ten_mins_ago},
- project_id=self.project.id,
- ).event_id
- release_1_e_2 = self.store_event(
- data={"release": release_1.version, "timestamp": self.ten_mins_ago},
- project_id=self.project.id,
- ).event_id
- release_2_e_1 = self.store_event(
- data={"release": release_2.version, "timestamp": self.ten_mins_ago},
- project_id=self.project.id,
- ).event_id
- release_2_e_2 = self.store_event(
- data={"release": release_2.version, "timestamp": self.ten_mins_ago},
- project_id=self.project.id,
- ).event_id
- release_3_e_1 = self.store_event(
- data={"release": release_3.version, "timestamp": self.ten_mins_ago},
- project_id=self.project.id,
- ).event_id
- release_3_e_2 = self.store_event(
- data={"release": release_3.version, "timestamp": self.ten_mins_ago},
- project_id=self.project.id,
- ).event_id
- query = {"field": ["id"], "query": f"{constants.SEMVER_ALIAS}:>1.2.3"}
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- assert {r["id"] for r in response.data["data"]} == {
- release_2_e_1,
- release_2_e_2,
- release_3_e_1,
- release_3_e_2,
- }
- query = {"field": ["id"], "query": f"{constants.SEMVER_ALIAS}:>=1.2.3"}
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- assert {r["id"] for r in response.data["data"]} == {
- release_1_e_1,
- release_1_e_2,
- release_2_e_1,
- release_2_e_2,
- release_3_e_1,
- release_3_e_2,
- }
- query = {"field": ["id"], "query": f"{constants.SEMVER_ALIAS}:<1.2.4"}
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- assert {r["id"] for r in response.data["data"]} == {
- release_1_e_1,
- release_1_e_2,
- }
- query = {"field": ["id"], "query": f"{constants.SEMVER_ALIAS}:1.2.3"}
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- assert {r["id"] for r in response.data["data"]} == {
- release_1_e_1,
- release_1_e_2,
- }
- query = {"field": ["id"], "query": f"!{constants.SEMVER_ALIAS}:1.2.3"}
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- assert {r["id"] for r in response.data["data"]} == {
- release_2_e_1,
- release_2_e_2,
- release_3_e_1,
- release_3_e_2,
- }
- def test_release_stage(self):
- replaced_release = self.create_release(
- version="replaced_release",
- environments=[self.environment],
- adopted=timezone.now(),
- unadopted=timezone.now(),
- )
- adopted_release = self.create_release(
- version="adopted_release",
- environments=[self.environment],
- adopted=timezone.now(),
- )
- self.create_release(version="not_adopted_release", environments=[self.environment])
- adopted_release_e_1 = self.store_event(
- data={
- "release": adopted_release.version,
- "timestamp": self.ten_mins_ago,
- "environment": self.environment.name,
- },
- project_id=self.project.id,
- ).event_id
- adopted_release_e_2 = self.store_event(
- data={
- "release": adopted_release.version,
- "timestamp": self.ten_mins_ago,
- "environment": self.environment.name,
- },
- project_id=self.project.id,
- ).event_id
- replaced_release_e_1 = self.store_event(
- data={
- "release": replaced_release.version,
- "timestamp": self.ten_mins_ago,
- "environment": self.environment.name,
- },
- project_id=self.project.id,
- ).event_id
- replaced_release_e_2 = self.store_event(
- data={
- "release": replaced_release.version,
- "timestamp": self.ten_mins_ago,
- "environment": self.environment.name,
- },
- project_id=self.project.id,
- ).event_id
- query = {
- "field": ["id"],
- "query": f"{constants.RELEASE_STAGE_ALIAS}:{ReleaseStages.ADOPTED}",
- "environment": [self.environment.name],
- }
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- assert {r["id"] for r in response.data["data"]} == {
- adopted_release_e_1,
- adopted_release_e_2,
- }
- query = {
- "field": ["id"],
- "query": f"!{constants.RELEASE_STAGE_ALIAS}:{ReleaseStages.LOW_ADOPTION}",
- "environment": [self.environment.name],
- }
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- assert {r["id"] for r in response.data["data"]} == {
- adopted_release_e_1,
- adopted_release_e_2,
- replaced_release_e_1,
- replaced_release_e_2,
- }
- query = {
- "field": ["id"],
- "query": f"{constants.RELEASE_STAGE_ALIAS}:[{ReleaseStages.ADOPTED}, {ReleaseStages.REPLACED}]",
- "environment": [self.environment.name],
- }
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- assert {r["id"] for r in response.data["data"]} == {
- adopted_release_e_1,
- adopted_release_e_2,
- replaced_release_e_1,
- replaced_release_e_2,
- }
- def test_semver_package(self):
- release_1 = self.create_release(version="test@1.2.3")
- release_2 = self.create_release(version="test2@1.2.4")
- release_1_e_1 = self.store_event(
- data={"release": release_1.version, "timestamp": self.ten_mins_ago},
- project_id=self.project.id,
- ).event_id
- release_1_e_2 = self.store_event(
- data={"release": release_1.version, "timestamp": self.ten_mins_ago},
- project_id=self.project.id,
- ).event_id
- release_2_e_1 = self.store_event(
- data={"release": release_2.version, "timestamp": self.ten_mins_ago},
- project_id=self.project.id,
- ).event_id
- query = {"field": ["id"], "query": f"{constants.SEMVER_PACKAGE_ALIAS}:test"}
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- assert {r["id"] for r in response.data["data"]} == {
- release_1_e_1,
- release_1_e_2,
- }
- query = {"field": ["id"], "query": f"{constants.SEMVER_PACKAGE_ALIAS}:test2"}
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- assert {r["id"] for r in response.data["data"]} == {
- release_2_e_1,
- }
- def test_semver_build(self):
- release_1 = self.create_release(version="test@1.2.3+123")
- release_2 = self.create_release(version="test2@1.2.4+124")
- release_1_e_1 = self.store_event(
- data={"release": release_1.version, "timestamp": self.ten_mins_ago},
- project_id=self.project.id,
- ).event_id
- release_1_e_2 = self.store_event(
- data={"release": release_1.version, "timestamp": self.ten_mins_ago},
- project_id=self.project.id,
- ).event_id
- release_2_e_1 = self.store_event(
- data={"release": release_2.version, "timestamp": self.ten_mins_ago},
- project_id=self.project.id,
- ).event_id
- query = {"field": ["id"], "query": f"{constants.SEMVER_BUILD_ALIAS}:123"}
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- assert {r["id"] for r in response.data["data"]} == {
- release_1_e_1,
- release_1_e_2,
- }
- query = {"field": ["id"], "query": f"{constants.SEMVER_BUILD_ALIAS}:124"}
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- assert {r["id"] for r in response.data["data"]} == {
- release_2_e_1,
- }
- query = {"field": ["id"], "query": f"!{constants.SEMVER_BUILD_ALIAS}:124"}
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- assert {r["id"] for r in response.data["data"]} == {
- release_1_e_1,
- release_1_e_2,
- }
- def test_aliased_fields(self):
- event1 = self.store_event(
- data={
- "event_id": "a" * 32,
- "timestamp": self.ten_mins_ago,
- "fingerprint": ["group_1"],
- "user": {"email": "foo@example.com"},
- },
- project_id=self.project.id,
- )
- event2 = self.store_event(
- data={
- "event_id": "b" * 32,
- "timestamp": self.ten_mins_ago,
- "fingerprint": ["group_2"],
- "user": {"email": "foo@example.com"},
- },
- project_id=self.project.id,
- )
- self.store_event(
- data={
- "event_id": "c" * 32,
- "timestamp": self.ten_mins_ago,
- "fingerprint": ["group_2"],
- "user": {"email": "bar@example.com"},
- },
- project_id=self.project.id,
- )
- query = {"field": ["issue.id", "count(id)", "count_unique(user)"], "orderby": "issue.id"}
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 2
- data = response.data["data"]
- assert data[0]["issue.id"] == event1.group_id
- assert data[0]["count(id)"] == 1
- assert data[0]["count_unique(user)"] == 1
- assert "projectid" not in data[0]
- assert "project.id" not in data[0]
- assert data[1]["issue.id"] == event2.group_id
- assert data[1]["count(id)"] == 2
- assert data[1]["count_unique(user)"] == 2
- def test_aggregate_field_with_dotted_param(self):
- event1 = self.store_event(
- data={
- "event_id": "a" * 32,
- "timestamp": self.ten_mins_ago,
- "fingerprint": ["group_1"],
- "user": {"id": "123", "email": "foo@example.com"},
- },
- project_id=self.project.id,
- )
- event2 = self.store_event(
- data={
- "event_id": "b" * 32,
- "timestamp": self.ten_mins_ago,
- "fingerprint": ["group_2"],
- "user": {"id": "123", "email": "foo@example.com"},
- },
- project_id=self.project.id,
- )
- self.store_event(
- data={
- "event_id": "c" * 32,
- "timestamp": self.ten_mins_ago,
- "fingerprint": ["group_2"],
- "user": {"id": "456", "email": "bar@example.com"},
- },
- project_id=self.project.id,
- )
- query = {
- "field": ["issue.id", "issue_title", "count(id)", "count_unique(user.email)"],
- "orderby": "issue.id",
- }
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 2
- data = response.data["data"]
- assert data[0]["issue.id"] == event1.group_id
- assert data[0]["count(id)"] == 1
- assert data[0]["count_unique(user.email)"] == 1
- assert "projectid" not in data[0]
- assert "project.id" not in data[0]
- assert data[1]["issue.id"] == event2.group_id
- assert data[1]["count(id)"] == 2
- assert data[1]["count_unique(user.email)"] == 2
- def test_failure_rate_alias_field(self):
- data = self.transaction_data.copy()
- data["transaction"] = "/failure_rate/success"
- self.store_event(data, project_id=self.project.id)
- data = self.transaction_data.copy()
- data["transaction"] = "/failure_rate/unknown"
- data["contexts"]["trace"]["status"] = "unknown_error"
- self.store_event(data, project_id=self.project.id)
- for i in range(6):
- data = self.transaction_data.copy()
- data["transaction"] = f"/failure_rate/{i}"
- data["contexts"]["trace"]["status"] = "unauthenticated"
- self.store_event(data, project_id=self.project.id)
- query = {"field": ["failure_rate()"], "query": "event.type:transaction"}
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 1
- data = response.data["data"]
- assert data[0]["failure_rate()"] == 0.75
- def test_count_miserable_alias_field(self):
- events = [
- ("one", 300),
- ("one", 300),
- ("two", 3000),
- ("two", 3000),
- ("three", 300),
- ("three", 3000),
- ]
- for idx, event in enumerate(events):
- data = self.load_data(
- timestamp=before_now(minutes=(10 + idx)),
- duration=timedelta(milliseconds=event[1]),
- )
- data["event_id"] = f"{idx}" * 32
- data["transaction"] = f"/count_miserable/horribilis/{idx}"
- data["user"] = {"email": f"{event[0]}@example.com"}
- self.store_event(data, project_id=self.project.id)
- query = {"field": ["count_miserable(user, 300)"], "query": "event.type:transaction"}
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 1
- data = response.data["data"]
- assert data[0]["count_miserable(user, 300)"] == 2
- @mock.patch(
- "sentry.search.events.fields.MAX_QUERYABLE_TRANSACTION_THRESHOLDS",
- MAX_QUERYABLE_TRANSACTION_THRESHOLDS,
- )
- @mock.patch(
- "sentry.search.events.datasets.discover.MAX_QUERYABLE_TRANSACTION_THRESHOLDS",
- MAX_QUERYABLE_TRANSACTION_THRESHOLDS,
- )
- def test_too_many_transaction_thresholds(self):
- project_transaction_thresholds = []
- project_ids = []
- for i in range(MAX_QUERYABLE_TRANSACTION_THRESHOLDS + 1):
- project = self.create_project(name=f"bulk_txn_{i}")
- project_ids.append(project.id)
- project_transaction_thresholds.append(
- ProjectTransactionThreshold(
- organization=self.organization,
- project=project,
- threshold=400,
- metric=TransactionMetric.LCP.value,
- )
- )
- ProjectTransactionThreshold.objects.bulk_create(project_transaction_thresholds)
- query = {
- "field": [
- "transaction",
- "count_miserable(user)",
- ],
- "query": "event.type:transaction",
- "project": project_ids,
- }
- response = self.do_request(
- query,
- features={
- "organizations:discover-basic": True,
- "organizations:global-views": True,
- },
- )
- assert response.status_code == 400
- assert (
- response.data["detail"]
- == "Exceeded 1 configured transaction thresholds limit, try with fewer Projects."
- )
- def test_count_miserable_new_alias_field(self):
- ProjectTransactionThreshold.objects.create(
- project=self.project,
- organization=self.project.organization,
- threshold=400,
- metric=TransactionMetric.DURATION.value,
- )
- events = [
- ("one", 400),
- ("one", 400),
- ("two", 3000),
- ("two", 3000),
- ("three", 300),
- ("three", 3000),
- ]
- for idx, event in enumerate(events):
- data = self.load_data(
- timestamp=before_now(minutes=(10 + idx)),
- duration=timedelta(milliseconds=event[1]),
- )
- data["event_id"] = f"{idx}" * 32
- data["transaction"] = f"/count_miserable/horribilis/{event[0]}"
- data["user"] = {"email": f"{idx}@example.com"}
- self.store_event(data, project_id=self.project.id)
- query = {
- "field": [
- "transaction",
- "count_miserable(user)",
- ],
- "query": "event.type:transaction",
- "project": [self.project.id],
- "sort": "count_miserable_user",
- }
- response = self.do_request(
- query,
- )
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 3
- data = response.data["data"]
- assert data[0]["count_miserable(user)"] == 0
- assert data[1]["count_miserable(user)"] == 1
- assert data[2]["count_miserable(user)"] == 2
- query["query"] = "event.type:transaction count_miserable(user):>0"
- response = self.do_request(
- query,
- )
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 2
- data = response.data["data"]
- assert abs(data[0]["count_miserable(user)"]) == 1
- assert abs(data[1]["count_miserable(user)"]) == 2
- def test_user_misery_denominator(self):
- """This is to test against a bug where the denominator of misery(total unique users) was wrong
- This is because the total unique users for a LCP misery should only count users that have had a txn with lcp,
- and not count all transactions (ie. uniq_if(transaction has lcp) not just uniq())
- """
- ProjectTransactionThreshold.objects.create(
- project=self.project,
- organization=self.project.organization,
- threshold=600,
- metric=TransactionMetric.LCP.value,
- )
- lcps = [
- 400,
- 400,
- 300,
- 3000,
- 3000,
- 3000,
- ]
- for idx, lcp in enumerate(lcps):
- data = self.load_data(
- timestamp=before_now(minutes=(10 + idx)),
- )
- data["event_id"] = f"{idx}" * 32
- data["transaction"] = "/misery/new/"
- data["user"] = {"email": f"{idx}@example.com"}
- data["measurements"] = {
- "lcp": {"value": lcp},
- }
- self.store_event(data, project_id=self.project.id)
- # Shouldn't count towards misery
- data = self.load_data(timestamp=before_now(minutes=10), duration=timedelta(milliseconds=0))
- data["transaction"] = "/misery/new/"
- data["user"] = {"email": "7@example.com"}
- data["measurements"] = {}
- self.store_event(data, project_id=self.project.id)
- query = {
- "field": [
- "transaction",
- "user_misery()",
- ],
- "query": "event.type:transaction",
- "project": [self.project.id],
- "sort": "-user_misery",
- }
- response = self.do_request(
- query,
- )
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 1
- data = response.data["data"]
- # (3 frustrated + 5.8875) / (6 + 117.75)
- assert abs(data[0]["user_misery()"] - 0.071818) < 0.0001
- def test_user_misery_alias_field(self):
- events = [
- ("one", 300),
- ("one", 300),
- ("two", 3000),
- ("two", 3000),
- ("three", 300),
- ("three", 3000),
- ]
- for idx, event in enumerate(events):
- data = self.load_data(
- timestamp=before_now(minutes=(10 + idx)),
- duration=timedelta(milliseconds=event[1]),
- )
- data["event_id"] = f"{idx}" * 32
- data["transaction"] = f"/user_misery/{idx}"
- data["user"] = {"email": f"{event[0]}@example.com"}
- self.store_event(data, project_id=self.project.id)
- query = {"field": ["user_misery(300)"], "query": "event.type:transaction"}
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 1
- data = response.data["data"]
- assert abs(data[0]["user_misery(300)"] - 0.0653) < 0.0001
- def test_apdex_denominator_correct(self):
- """This is to test against a bug where the denominator of apdex(total count) was wrong
- This is because the total_count for a LCP apdex should only count transactions that have lcp, and not count
- all transactions (ie. count_if(transaction has lcp) not just count())
- """
- ProjectTransactionThreshold.objects.create(
- project=self.project,
- organization=self.project.organization,
- threshold=600,
- metric=TransactionMetric.LCP.value,
- )
- lcps = [
- 400,
- 400,
- 300,
- 800,
- 3000,
- 3000,
- 3000,
- ]
- for idx, lcp in enumerate(lcps):
- data = self.load_data(
- timestamp=before_now(minutes=(10 + idx)),
- )
- data["event_id"] = f"{idx}" * 32
- data["transaction"] = "/apdex/new/"
- data["user"] = {"email": f"{idx}@example.com"}
- data["measurements"] = {
- "lcp": {"value": lcp},
- }
- self.store_event(data, project_id=self.project.id)
- # Shouldn't count towards apdex
- data = self.load_data(
- timestamp=before_now(minutes=10),
- duration=timedelta(milliseconds=0),
- )
- data["transaction"] = "/apdex/new/"
- data["user"] = {"email": "7@example.com"}
- data["measurements"] = {}
- self.store_event(data, project_id=self.project.id)
- query = {
- "field": [
- "transaction",
- "apdex()",
- ],
- "query": "event.type:transaction",
- "project": [self.project.id],
- "sort": "-apdex",
- }
- response = self.do_request(
- query,
- )
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 1
- data = response.data["data"]
- # 3 satisfied + 1 tolerated => 3.5/7
- assert data[0]["apdex()"] == 0.5
- def test_apdex_new_alias_field(self):
- ProjectTransactionThreshold.objects.create(
- project=self.project,
- organization=self.project.organization,
- threshold=400,
- metric=TransactionMetric.DURATION.value,
- )
- events = [
- ("one", 400),
- ("one", 400),
- ("two", 3000),
- ("two", 3000),
- ("three", 300),
- ("three", 3000),
- ]
- for idx, event in enumerate(events):
- data = self.load_data(
- timestamp=before_now(minutes=(10 + idx)),
- duration=timedelta(milliseconds=event[1]),
- )
- data["event_id"] = f"{idx}" * 32
- data["transaction"] = f"/apdex/new/{event[0]}"
- data["user"] = {"email": f"{idx}@example.com"}
- self.store_event(data, project_id=self.project.id)
- query = {
- "field": [
- "transaction",
- "apdex()",
- ],
- "query": "event.type:transaction",
- "project": [self.project.id],
- "sort": "-apdex",
- }
- response = self.do_request(
- query,
- )
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 3
- data = response.data["data"]
- assert data[0]["apdex()"] == 1.0
- assert data[1]["apdex()"] == 0.5
- assert data[2]["apdex()"] == 0.0
- query["query"] = "event.type:transaction apdex():>0.50"
- response = self.do_request(
- query,
- )
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 1
- data = response.data["data"]
- assert data[0]["apdex()"] == 1.0
- def test_user_misery_alias_field_with_project_threshold(self):
- ProjectTransactionThreshold.objects.create(
- project=self.project,
- organization=self.project.organization,
- threshold=400,
- metric=TransactionMetric.DURATION.value,
- )
- events = [
- ("one", 400),
- ("one", 400),
- ("two", 3000),
- ("two", 3000),
- ("three", 300),
- ("three", 3000),
- ]
- for idx, event in enumerate(events):
- data = self.load_data(
- timestamp=before_now(minutes=(10 + idx)),
- duration=timedelta(milliseconds=event[1]),
- )
- data["event_id"] = f"{idx}" * 32
- data["transaction"] = f"/count_miserable/horribilis/{event[0]}"
- data["user"] = {"email": f"{idx}@example.com"}
- self.store_event(data, project_id=self.project.id)
- query = {
- "field": [
- "transaction",
- "user_misery()",
- ],
- "orderby": "user_misery()",
- "query": "event.type:transaction",
- "project": [self.project.id],
- }
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 3
- data = response.data["data"]
- assert data[0]["user_misery()"] == pytest.approx(0.04916, rel=1e-3)
- assert data[1]["user_misery()"] == pytest.approx(0.05751, rel=1e-3)
- assert data[2]["user_misery()"] == pytest.approx(0.06586, rel=1e-3)
- query["query"] = "event.type:transaction user_misery():>0.050"
- response = self.do_request(
- query,
- )
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 2
- data = response.data["data"]
- assert data[0]["user_misery()"] == pytest.approx(0.05751, rel=1e-3)
- assert data[1]["user_misery()"] == pytest.approx(0.06586, rel=1e-3)
- def test_user_misery_alias_field_with_transaction_threshold(self):
- events = [
- ("one", 300),
- ("two", 300),
- ("one", 3000),
- ("two", 3000),
- ("three", 400),
- ("four", 4000),
- ]
- for idx, event in enumerate(events):
- data = self.load_data(
- timestamp=before_now(minutes=(10 + idx)),
- duration=timedelta(milliseconds=event[1]),
- )
- data["event_id"] = f"{idx}" * 32
- data["transaction"] = f"/count_miserable/horribilis/{idx}"
- data["user"] = {"email": f"{event[0]}@example.com"}
- self.store_event(data, project_id=self.project.id)
- if idx % 2:
- ProjectTransactionThresholdOverride.objects.create(
- transaction=f"/count_miserable/horribilis/{idx}",
- project=self.project,
- organization=self.project.organization,
- threshold=100 * idx,
- metric=TransactionMetric.DURATION.value,
- )
- query = {
- "field": [
- "transaction",
- "user_misery()",
- ],
- "query": "event.type:transaction",
- "orderby": "transaction",
- "project": [self.project.id],
- }
- response = self.do_request(
- query,
- )
- assert response.status_code == 200, response.content
- expected = [
- ("/count_miserable/horribilis/0", ["duration", 300], 0.049578),
- ("/count_miserable/horribilis/1", ["duration", 100], 0.049578),
- ("/count_miserable/horribilis/2", ["duration", 300], 0.058),
- ("/count_miserable/horribilis/3", ["duration", 300], 0.058),
- ("/count_miserable/horribilis/4", ["duration", 300], 0.049578),
- ("/count_miserable/horribilis/5", ["duration", 500], 0.058),
- ]
- assert len(response.data["data"]) == 6
- data = response.data["data"]
- for i, record in enumerate(expected):
- name, threshold_config, misery = record
- assert data[i]["transaction"] == name
- assert data[i]["project_threshold_config"] == threshold_config
- assert data[i]["user_misery()"] == pytest.approx(misery, rel=1e-3)
- query["query"] = "event.type:transaction user_misery():>0.050"
- response = self.do_request(
- query,
- )
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 3
- data = response.data["data"]
- assert data[0]["user_misery()"] == pytest.approx(0.058, rel=1e-3)
- assert data[1]["user_misery()"] == pytest.approx(0.058, rel=1e-3)
- assert data[2]["user_misery()"] == pytest.approx(0.058, rel=1e-3)
- def test_user_misery_alias_field_with_transaction_threshold_and_project_threshold(self):
- project = self.create_project()
- ProjectTransactionThreshold.objects.create(
- project=project,
- organization=project.organization,
- threshold=100,
- metric=TransactionMetric.DURATION.value,
- )
- events = [
- ("one", 300),
- ("two", 300),
- ("one", 3000),
- ("two", 3000),
- ("three", 400),
- ("four", 4000),
- ]
- for idx, event in enumerate(events):
- data = self.load_data(
- timestamp=before_now(minutes=(10 + idx)),
- duration=timedelta(milliseconds=event[1]),
- )
- data["event_id"] = f"{idx}" * 32
- data["transaction"] = f"/count_miserable/horribilis/{idx}"
- data["user"] = {"email": f"{event[0]}@example.com"}
- self.store_event(data, project_id=project.id)
- if idx % 2:
- ProjectTransactionThresholdOverride.objects.create(
- transaction=f"/count_miserable/horribilis/{idx}",
- project=project,
- organization=project.organization,
- threshold=100 * idx,
- metric=TransactionMetric.DURATION.value,
- )
- project2 = self.create_project()
- data = self.load_data(timestamp=before_now(minutes=1))
- data["transaction"] = "/count_miserable/horribilis/project2"
- data["user"] = {"email": "project2@example.com"}
- self.store_event(data, project_id=project2.id)
- query = {
- "field": [
- "transaction",
- "user_misery()",
- ],
- "query": "event.type:transaction",
- "orderby": "transaction",
- "project": [project.id, project2.id],
- }
- response = self.do_request(
- query,
- features={
- "organizations:discover-basic": True,
- "organizations:global-views": True,
- },
- )
- assert response.status_code == 200, response.content
- expected = [
- (
- "/count_miserable/horribilis/0",
- ["duration", 100],
- 0.049578,
- ), # Uses project threshold
- ("/count_miserable/horribilis/1", ["duration", 100], 0.049578), # Uses txn threshold
- ("/count_miserable/horribilis/2", ["duration", 100], 0.058), # Uses project threshold
- ("/count_miserable/horribilis/3", ["duration", 300], 0.058), # Uses txn threshold
- (
- "/count_miserable/horribilis/4",
- ["duration", 100],
- 0.049578,
- ), # Uses project threshold
- ("/count_miserable/horribilis/5", ["duration", 500], 0.058), # Uses txn threshold
- ("/count_miserable/horribilis/project2", ["duration", 300], 0.058), # Uses fallback
- ]
- assert len(response.data["data"]) == 7
- data = response.data["data"]
- for i, record in enumerate(expected):
- name, threshold_config, misery = record
- assert data[i]["transaction"] == name
- assert data[i]["project_threshold_config"] == threshold_config
- assert data[i]["user_misery()"] == pytest.approx(misery, rel=1e-3)
- query["query"] = "event.type:transaction user_misery():>0.050"
- response = self.do_request(
- query,
- features={
- "organizations:discover-basic": True,
- "organizations:global-views": True,
- },
- )
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 4
- def test_aggregation(self):
- self.store_event(
- data={
- "event_id": "a" * 32,
- "timestamp": self.ten_mins_ago,
- "fingerprint": ["group_1"],
- "user": {"email": "foo@example.com"},
- "environment": "prod",
- "tags": {"sub_customer.is-Enterprise-42": "1"},
- },
- project_id=self.project.id,
- )
- self.store_event(
- data={
- "event_id": "b" * 32,
- "timestamp": self.ten_mins_ago,
- "fingerprint": ["group_2"],
- "user": {"email": "foo@example.com"},
- "environment": "staging",
- "tags": {"sub_customer.is-Enterprise-42": "1"},
- },
- project_id=self.project.id,
- )
- self.store_event(
- data={
- "event_id": "c" * 32,
- "timestamp": self.ten_mins_ago,
- "fingerprint": ["group_2"],
- "user": {"email": "foo@example.com"},
- "environment": "prod",
- "tags": {"sub_customer.is-Enterprise-42": "0"},
- },
- project_id=self.project.id,
- )
- self.store_event(
- data={
- "event_id": "d" * 32,
- "timestamp": self.ten_mins_ago,
- "fingerprint": ["group_2"],
- "user": {"email": "foo@example.com"},
- "environment": "prod",
- "tags": {"sub_customer.is-Enterprise-42": "1"},
- },
- project_id=self.project.id,
- )
- query = {
- "field": ["sub_customer.is-Enterprise-42", "count(sub_customer.is-Enterprise-42)"],
- "orderby": "sub_customer.is-Enterprise-42",
- }
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 2
- data = response.data["data"]
- assert data[0]["count(sub_customer.is-Enterprise-42)"] == 1
- assert data[1]["count(sub_customer.is-Enterprise-42)"] == 3
- def test_aggregation_comparison(self):
- self.store_event(
- data={
- "event_id": "a" * 32,
- "timestamp": self.ten_mins_ago,
- "fingerprint": ["group_1"],
- "user": {"email": "foo@example.com"},
- },
- project_id=self.project.id,
- )
- event = self.store_event(
- data={
- "event_id": "b" * 32,
- "timestamp": self.ten_mins_ago,
- "fingerprint": ["group_2"],
- "user": {"email": "foo@example.com"},
- },
- project_id=self.project.id,
- )
- self.store_event(
- data={
- "event_id": "c" * 32,
- "timestamp": self.ten_mins_ago,
- "fingerprint": ["group_2"],
- "user": {"email": "bar@example.com"},
- },
- project_id=self.project.id,
- )
- self.store_event(
- data={
- "event_id": "d" * 32,
- "timestamp": self.ten_mins_ago,
- "fingerprint": ["group_3"],
- "user": {"email": "bar@example.com"},
- },
- project_id=self.project.id,
- )
- self.store_event(
- data={
- "event_id": "e" * 32,
- "timestamp": self.ten_mins_ago,
- "fingerprint": ["group_3"],
- "user": {"email": "bar@example.com"},
- },
- project_id=self.project.id,
- )
- query = {
- "field": ["issue.id", "count(id)", "count_unique(user)"],
- "query": "count(id):>1 count_unique(user):>1",
- "orderby": "issue.id",
- }
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 1
- data = response.data["data"]
- assert data[0]["issue.id"] == event.group_id
- assert data[0]["count(id)"] == 2
- assert data[0]["count_unique(user)"] == 2
- def test_aggregation_alias_comparison(self):
- data = self.load_data(
- timestamp=before_now(minutes=10),
- duration=timedelta(seconds=5),
- )
- data["transaction"] = "/aggregates/1"
- self.store_event(data, project_id=self.project.id)
- data = self.load_data(
- timestamp=before_now(minutes=10),
- duration=timedelta(seconds=3),
- )
- data["transaction"] = "/aggregates/2"
- event = self.store_event(data, project_id=self.project.id)
- query = {
- "field": ["transaction", "p95()"],
- "query": "event.type:transaction p95():<4000",
- "orderby": ["transaction"],
- }
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 1
- data = response.data["data"]
- assert data[0]["transaction"] == event.transaction
- assert data[0]["p95()"] == 3000
- def test_auto_aggregations(self):
- data = self.load_data(
- timestamp=before_now(minutes=10),
- duration=timedelta(seconds=5),
- )
- data["transaction"] = "/aggregates/1"
- self.store_event(data, project_id=self.project.id)
- data = self.load_data(
- timestamp=before_now(minutes=10),
- duration=timedelta(seconds=3),
- )
- data["transaction"] = "/aggregates/2"
- event = self.store_event(data, project_id=self.project.id)
- query = {
- "field": ["transaction", "p75()"],
- "query": "event.type:transaction p95():<4000",
- "orderby": ["transaction"],
- }
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 1
- data = response.data["data"]
- assert data[0]["transaction"] == event.transaction
- query = {
- "field": ["transaction"],
- "query": "event.type:transaction p95():<4000",
- "orderby": ["transaction"],
- }
- response = self.do_request(query)
- assert response.status_code == 400, response.content
- def test_aggregation_comparison_with_conditions(self):
- self.store_event(
- data={
- "event_id": "a" * 32,
- "timestamp": self.ten_mins_ago,
- "fingerprint": ["group_1"],
- "user": {"email": "foo@example.com"},
- "environment": "prod",
- },
- project_id=self.project.id,
- )
- self.store_event(
- data={
- "event_id": "b" * 32,
- "timestamp": self.ten_mins_ago,
- "fingerprint": ["group_2"],
- "user": {"email": "foo@example.com"},
- "environment": "staging",
- },
- project_id=self.project.id,
- )
- event = self.store_event(
- data={
- "event_id": "c" * 32,
- "timestamp": self.ten_mins_ago,
- "fingerprint": ["group_2"],
- "user": {"email": "foo@example.com"},
- "environment": "prod",
- },
- project_id=self.project.id,
- )
- self.store_event(
- data={
- "event_id": "d" * 32,
- "timestamp": self.ten_mins_ago,
- "fingerprint": ["group_2"],
- "user": {"email": "foo@example.com"},
- "environment": "prod",
- },
- project_id=self.project.id,
- )
- query = {
- "field": ["issue.id", "count(id)"],
- "query": "count(id):>1 user.email:foo@example.com environment:prod",
- "orderby": "issue.id",
- }
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 1
- data = response.data["data"]
- assert data[0]["issue.id"] == event.group_id
- assert data[0]["count(id)"] == 2
- def test_aggregation_date_comparison_with_conditions(self):
- event = self.store_event(
- data={
- "event_id": "a" * 32,
- "timestamp": self.ten_mins_ago,
- "fingerprint": ["group_1"],
- "user": {"email": "foo@example.com"},
- "environment": "prod",
- },
- project_id=self.project.id,
- )
- self.store_event(
- data={
- "event_id": "b" * 32,
- "timestamp": self.ten_mins_ago,
- "fingerprint": ["group_2"],
- "user": {"email": "foo@example.com"},
- "environment": "staging",
- },
- project_id=self.project.id,
- )
- self.store_event(
- data={
- "event_id": "c" * 32,
- "timestamp": self.ten_mins_ago,
- "fingerprint": ["group_2"],
- "user": {"email": "foo@example.com"},
- "environment": "prod",
- },
- project_id=self.project.id,
- )
- self.store_event(
- data={
- "event_id": "d" * 32,
- "timestamp": self.ten_mins_ago,
- "fingerprint": ["group_2"],
- "user": {"email": "foo@example.com"},
- "environment": "prod",
- },
- project_id=self.project.id,
- )
- query = {
- "field": ["issue.id", "max(timestamp)"],
- "query": "max(timestamp):>1 user.email:foo@example.com environment:prod",
- "orderby": "issue.id",
- }
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 2
- assert response.data["meta"]["fields"]["max(timestamp)"] == "date"
- data = response.data["data"]
- assert data[0]["issue.id"] == event.group_id
- def test_percentile_function(self):
- data = self.load_data(
- timestamp=before_now(minutes=10),
- duration=timedelta(seconds=5),
- )
- data["transaction"] = "/aggregates/1"
- event1 = self.store_event(data, project_id=self.project.id)
- data = self.load_data(
- timestamp=before_now(minutes=10),
- duration=timedelta(seconds=3),
- )
- data["transaction"] = "/aggregates/2"
- event2 = self.store_event(data, project_id=self.project.id)
- query = {
- "field": ["transaction", "percentile(transaction.duration, 0.95)"],
- "query": "event.type:transaction",
- "orderby": ["transaction"],
- }
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 2
- data = response.data["data"]
- assert data[0]["transaction"] == event1.transaction
- assert data[0]["percentile(transaction.duration, 0.95)"] == 5000
- assert data[1]["transaction"] == event2.transaction
- assert data[1]["percentile(transaction.duration, 0.95)"] == 3000
- def test_percentile_function_as_condition(self):
- data = self.load_data(
- timestamp=before_now(minutes=10),
- duration=timedelta(seconds=5),
- )
- data["transaction"] = "/aggregates/1"
- event1 = self.store_event(data, project_id=self.project.id)
- data = self.load_data(
- timestamp=before_now(minutes=10),
- duration=timedelta(seconds=3),
- )
- data["transaction"] = "/aggregates/2"
- self.store_event(data, project_id=self.project.id)
- query = {
- "field": ["transaction", "percentile(transaction.duration, 0.95)"],
- "query": "event.type:transaction percentile(transaction.duration, 0.95):>4000",
- "orderby": ["transaction"],
- }
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 1
- data = response.data["data"]
- assert data[0]["transaction"] == event1.transaction
- assert data[0]["percentile(transaction.duration, 0.95)"] == 5000
- def test_epm_function(self):
- data = self.load_data(
- timestamp=before_now(minutes=10),
- duration=timedelta(seconds=5),
- )
- data["transaction"] = "/aggregates/1"
- event1 = self.store_event(data, project_id=self.project.id)
- data = self.load_data(
- timestamp=before_now(minutes=10),
- duration=timedelta(seconds=3),
- )
- data["transaction"] = "/aggregates/2"
- event2 = self.store_event(data, project_id=self.project.id)
- query = {
- "field": ["transaction", "epm()"],
- "query": "event.type:transaction",
- "orderby": ["transaction"],
- "start": iso_format(before_now(minutes=11)),
- "end": iso_format(before_now(minutes=9)),
- }
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 2
- data = response.data["data"]
- assert data[0]["transaction"] == event1.transaction
- assert data[0]["epm()"] == 0.5
- assert data[1]["transaction"] == event2.transaction
- assert data[1]["epm()"] == 0.5
- def test_nonexistent_fields(self):
- self.store_event(
- data={
- "event_id": "a" * 32,
- "message": "how to make fast",
- "timestamp": self.ten_mins_ago,
- },
- project_id=self.project.id,
- )
- query = {"field": ["issue_world.id"]}
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- assert response.data["data"][0]["issue_world.id"] == ""
- def test_no_requested_fields_or_grouping(self):
- self.store_event(
- data={
- "event_id": "a" * 32,
- "message": "how to make fast",
- "timestamp": self.ten_mins_ago,
- },
- project_id=self.project.id,
- )
- query = {"query": "test"}
- response = self.do_request(query)
- assert response.status_code == 400, response.content
- assert response.data["detail"] == "No columns selected"
- def test_condition_on_aggregate_misses(self):
- self.store_event(
- data={
- "event_id": "c" * 32,
- "timestamp": self.ten_mins_ago,
- "fingerprint": ["group_2"],
- "user": {"email": "bar@example.com"},
- },
- project_id=self.project.id,
- )
- query = {"field": ["issue.id"], "query": "event_count:>0", "orderby": "issue.id"}
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 0
- def test_next_prev_link_headers(self):
- events = [("a", "group_1"), ("b", "group_2"), ("c", "group_2"), ("d", "group_2")]
- for e in events:
- self.store_event(
- data={
- "event_id": e[0] * 32,
- "timestamp": self.ten_mins_ago,
- "fingerprint": [e[1]],
- "user": {"email": "foo@example.com"},
- "tags": {"language": "C++"},
- },
- project_id=self.project.id,
- )
- query = {
- "field": ["count(id)", "issue.id", "context.key"],
- "sort": "-count_id",
- "query": "language:C++",
- }
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- links = parse_link_header(response["Link"])
- for link in links:
- assert "field=issue.id" in link
- assert "field=count%28id%29" in link
- assert "field=context.key" in link
- assert "sort=-count_id" in link
- assert "query=language%3AC%2B%2B" in link
- assert len(response.data["data"]) == 2
- data = response.data["data"]
- assert data[0]["count(id)"] == 3
- assert data[1]["count(id)"] == 1
- def test_empty_count_query(self):
- event = self.store_event(
- data={
- "event_id": "a" * 32,
- "timestamp": self.ten_mins_ago,
- "fingerprint": ["1123581321"],
- "user": {"email": "foo@example.com"},
- "tags": {"language": "C++"},
- },
- project_id=self.project.id,
- )
- query = {
- "field": ["count()"],
- "query": f"issue.id:{event.group_id} timestamp:>{self.ten_mins_ago}",
- "statsPeriod": "14d",
- }
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert len(data) == 1
- assert data[0]["count()"] == 0
- def test_stack_wildcard_condition(self):
- data = self.load_data(platform="javascript")
- data["timestamp"] = self.ten_mins_ago
- self.store_event(data=data, project_id=self.project.id)
- query = {"field": ["stack.filename", "message"], "query": "stack.filename:*.js"}
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 1
- assert response.data["meta"]["fields"]["message"] == "string"
- def test_email_wildcard_condition(self):
- data = self.load_data(platform="javascript")
- data["timestamp"] = self.ten_mins_ago
- self.store_event(data=data, project_id=self.project.id)
- query = {"field": ["stack.filename", "message"], "query": "user.email:*@example.org"}
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 1
- assert response.data["meta"]["fields"]["message"] == "string"
- def test_release_wildcard_condition(self):
- release = self.create_release(version="test@1.2.3+123")
- self.store_event(
- data={"release": release.version, "timestamp": self.ten_mins_ago},
- project_id=self.project.id,
- )
- query = {"field": ["stack.filename", "release"], "query": "release:test*"}
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 1
- assert response.data["data"][0]["release"] == release.version
- def test_transaction_event_type(self):
- self.store_event(data=self.transaction_data, project_id=self.project.id)
- query = {
- "field": ["transaction", "transaction.duration", "transaction.status"],
- "query": "event.type:transaction",
- }
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 1
- assert response.data["meta"]["fields"]["transaction.duration"] == "duration"
- assert response.data["meta"]["fields"]["transaction.status"] == "string"
- assert response.data["meta"]["units"]["transaction.duration"] == "millisecond"
- assert response.data["data"][0]["transaction.status"] == "ok"
- def test_trace_columns(self):
- self.store_event(data=self.transaction_data, project_id=self.project.id)
- query = {"field": ["trace"], "query": "event.type:transaction"}
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 1
- assert response.data["meta"]["fields"]["trace"] == "string"
- assert (
- response.data["data"][0]["trace"]
- == self.transaction_data["contexts"]["trace"]["trace_id"]
- )
- def test_issue_in_columns(self):
- project1 = self.create_project()
- project2 = self.create_project()
- event1 = self.store_event(
- data={
- "event_id": "a" * 32,
- "transaction": "/example",
- "message": "how to make fast",
- "timestamp": self.ten_mins_ago,
- "fingerprint": ["group_1"],
- },
- project_id=project1.id,
- )
- event2 = self.store_event(
- data={
- "event_id": "b" * 32,
- "transaction": "/example",
- "message": "how to make fast",
- "timestamp": self.ten_mins_ago,
- "fingerprint": ["group_1"],
- },
- project_id=project2.id,
- )
- features = {"organizations:discover-basic": True, "organizations:global-views": True}
- query = {"field": ["id", "issue"], "orderby": ["id"]}
- response = self.do_request(query, features=features)
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert len(data) == 2
- assert data[0]["id"] == event1.event_id
- assert data[0]["issue.id"] == event1.group_id
- assert data[0]["issue"] == event1.group.qualified_short_id
- assert data[1]["id"] == event2.event_id
- assert data[1]["issue.id"] == event2.group_id
- assert data[1]["issue"] == event2.group.qualified_short_id
- def test_issue_in_search_and_columns(self):
- project1 = self.create_project()
- project2 = self.create_project()
- event1 = self.store_event(
- data={
- "event_id": "a" * 32,
- "transaction": "/example",
- "message": "how to make fast",
- "timestamp": self.ten_mins_ago,
- "fingerprint": ["group_1"],
- },
- project_id=project1.id,
- )
- self.store_event(
- data={
- "event_id": "b" * 32,
- "transaction": "/example",
- "message": "how to make fast",
- "timestamp": self.ten_mins_ago,
- "fingerprint": ["group_1"],
- },
- project_id=project2.id,
- )
- tests = [
- ("issue", "issue:%s" % event1.group.qualified_short_id),
- ("issue.id", "issue:%s" % event1.group.qualified_short_id),
- ("issue", "issue.id:%s" % event1.group_id),
- ("issue.id", "issue.id:%s" % event1.group_id),
- ]
- features = {"organizations:discover-basic": True, "organizations:global-views": True}
- for testdata in tests:
- query = {"field": [testdata[0]], "query": testdata[1]}
- response = self.do_request(query, features=features)
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert len(data) == 1
- assert data[0]["id"] == event1.event_id
- assert data[0]["issue.id"] == event1.group_id
- if testdata[0] == "issue":
- assert data[0]["issue"] == event1.group.qualified_short_id
- else:
- assert data[0].get("issue", None) is None
- def test_issue_negation(self):
- project1 = self.create_project()
- project2 = self.create_project()
- event1 = self.store_event(
- data={
- "event_id": "a" * 32,
- "transaction": "/example",
- "message": "how to make fast",
- "timestamp": self.ten_mins_ago,
- "fingerprint": ["group_1"],
- },
- project_id=project1.id,
- )
- event2 = self.store_event(
- data={
- "event_id": "b" * 32,
- "transaction": "/example",
- "message": "go really fast plz",
- "timestamp": self.ten_mins_ago,
- "fingerprint": ["group_2"],
- },
- project_id=project2.id,
- )
- features = {"organizations:discover-basic": True, "organizations:global-views": True}
- query = {
- "field": ["title", "issue.id"],
- "query": f"!issue:{event1.group.qualified_short_id}",
- }
- response = self.do_request(query, features=features)
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert len(data) == 1
- assert data[0]["title"] == event2.title
- assert data[0]["issue.id"] == event2.group_id
- def test_search_for_nonexistent_issue(self):
- self.store_event(
- data={
- "event_id": "a" * 32,
- "transaction": "/example",
- "message": "how to make fast",
- "timestamp": self.ten_mins_ago,
- "fingerprint": ["group_1"],
- },
- project_id=self.project.id,
- )
- features = {"organizations:discover-basic": True, "organizations:global-views": True}
- query = {"field": ["count()"], "query": "issue.id:112358"}
- response = self.do_request(query, features=features)
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert len(data) == 1
- assert data[0]["count()"] == 0
- def test_issue_alias_inside_aggregate(self):
- self.store_event(
- data={
- "event_id": "a" * 32,
- "transaction": "/example",
- "message": "how to make fast",
- "timestamp": self.ten_mins_ago,
- "fingerprint": ["group_1"],
- },
- project_id=self.project.id,
- )
- self.store_event(
- data={
- "event_id": "b" * 32,
- "transaction": "/example",
- "message": "how to make fast",
- "timestamp": self.ten_mins_ago,
- "fingerprint": ["group_2"],
- },
- project_id=self.project.id,
- )
- features = {"organizations:discover-basic": True, "organizations:global-views": True}
- query = {
- "field": ["project", "count(id)", "count_unique(issue.id)", "count_unique(issue)"],
- "sort": "-count(id)",
- "statsPeriod": "24h",
- }
- response = self.do_request(query, features=features)
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert len(data) == 1
- assert data[0]["count(id)"] == 2
- assert data[0]["count_unique(issue.id)"] == 2
- assert data[0]["count_unique(issue)"] == 2
- def test_project_alias_inside_aggregate(self):
- project1 = self.create_project()
- project2 = self.create_project()
- self.store_event(
- data={
- "event_id": "a" * 32,
- "transaction": "/example",
- "message": "how to make fast",
- "timestamp": self.ten_mins_ago,
- "fingerprint": ["group_1"],
- },
- project_id=project1.id,
- )
- self.store_event(
- data={
- "event_id": "b" * 32,
- "transaction": "/example",
- "message": "how to make fast",
- "timestamp": self.ten_mins_ago,
- "fingerprint": ["group_2"],
- },
- project_id=project2.id,
- )
- features = {"organizations:discover-basic": True, "organizations:global-views": True}
- query = {
- "field": [
- "event.type",
- "count(id)",
- "count_unique(project.id)",
- "count_unique(project)",
- ],
- "sort": "-count(id)",
- "statsPeriod": "24h",
- }
- response = self.do_request(query, features=features)
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert len(data) == 1
- assert data[0]["count(id)"] == 2
- assert data[0]["count_unique(project.id)"] == 2
- assert data[0]["count_unique(project)"] == 2
- def test_user_display(self):
- project1 = self.create_project()
- project2 = self.create_project()
- self.store_event(
- data={
- "event_id": "a" * 32,
- "transaction": "/example",
- "message": "how to make fast",
- "timestamp": self.ten_mins_ago,
- "user": {"email": "cathy@example.com"},
- },
- project_id=project1.id,
- )
- self.store_event(
- data={
- "event_id": "b" * 32,
- "transaction": "/example",
- "message": "how to make fast",
- "timestamp": self.ten_mins_ago,
- "user": {"username": "catherine"},
- },
- project_id=project2.id,
- )
- features = {"organizations:discover-basic": True, "organizations:global-views": True}
- query = {
- "field": ["event.type", "user.display"],
- "query": "user.display:cath*",
- "statsPeriod": "24h",
- }
- response = self.do_request(query, features=features)
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert len(data) == 2
- result = {r["user.display"] for r in data}
- assert result == {"catherine", "cathy@example.com"}
- def test_user_display_with_aggregates(self):
- self.store_event(
- data={
- "event_id": "a" * 32,
- "transaction": "/example",
- "message": "how to make fast",
- "timestamp": self.ten_mins_ago,
- "user": {"email": "cathy@example.com"},
- },
- project_id=self.project.id,
- )
- features = {"organizations:discover-basic": True, "organizations:global-views": True}
- query = {
- "field": ["event.type", "user.display", "count_unique(title)"],
- "statsPeriod": "24h",
- }
- response = self.do_request(query, features=features)
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert len(data) == 1
- result = {r["user.display"] for r in data}
- assert result == {"cathy@example.com"}
- query = {"field": ["event.type", "count_unique(user.display)"], "statsPeriod": "24h"}
- response = self.do_request(query, features=features)
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert len(data) == 1
- assert data[0]["count_unique(user.display)"] == 1
- def test_orderby_user_display(self):
- project1 = self.create_project()
- project2 = self.create_project()
- self.store_event(
- data={
- "event_id": "a" * 32,
- "transaction": "/example",
- "message": "how to make fast",
- "timestamp": self.ten_mins_ago,
- "user": {"email": "cathy@example.com"},
- },
- project_id=project1.id,
- )
- self.store_event(
- data={
- "event_id": "b" * 32,
- "transaction": "/example",
- "message": "how to make fast",
- "timestamp": self.ten_mins_ago,
- "user": {"username": "catherine"},
- },
- project_id=project2.id,
- )
- features = {"organizations:discover-basic": True, "organizations:global-views": True}
- query = {
- "field": ["event.type", "user.display"],
- "query": "user.display:cath*",
- "statsPeriod": "24h",
- "orderby": "-user.display",
- }
- response = self.do_request(query, features=features)
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert len(data) == 2
- result = [r["user.display"] for r in data]
- # because we're ordering by `-user.display`, we expect the results in reverse sorted order
- assert result == ["cathy@example.com", "catherine"]
- def test_orderby_user_display_with_aggregates(self):
- project1 = self.create_project()
- project2 = self.create_project()
- self.store_event(
- data={
- "event_id": "a" * 32,
- "transaction": "/example",
- "message": "how to make fast",
- "timestamp": self.ten_mins_ago,
- "user": {"email": "cathy@example.com"},
- },
- project_id=project1.id,
- )
- self.store_event(
- data={
- "event_id": "b" * 32,
- "transaction": "/example",
- "message": "how to make fast",
- "timestamp": self.ten_mins_ago,
- "user": {"username": "catherine"},
- },
- project_id=project2.id,
- )
- features = {"organizations:discover-basic": True, "organizations:global-views": True}
- query = {
- "field": ["event.type", "user.display", "count_unique(title)"],
- "query": "user.display:cath*",
- "statsPeriod": "24h",
- "orderby": "user.display",
- }
- response = self.do_request(query, features=features)
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert len(data) == 2
- result = [r["user.display"] for r in data]
- # because we're ordering by `user.display`, we expect the results in sorted order
- assert result == ["catherine", "cathy@example.com"]
- def test_any_field_alias(self):
- day_ago = before_now(days=1).replace(hour=10, minute=11, second=12, microsecond=13)
- self.store_event(
- data={
- "event_id": "a" * 32,
- "transaction": "/example",
- "message": "how to make fast",
- "timestamp": iso_format(day_ago),
- "user": {"email": "cathy@example.com"},
- },
- project_id=self.project.id,
- )
- features = {"organizations:discover-basic": True, "organizations:global-views": True}
- query = {
- "field": [
- "event.type",
- "any(user.display)",
- "any(timestamp.to_day)",
- "any(timestamp.to_hour)",
- ],
- "statsPeriod": "7d",
- }
- response = self.do_request(query, features=features)
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert len(data) == 1
- result = {r["any(user.display)"] for r in data}
- assert result == {"cathy@example.com"}
- result = {r["any(timestamp.to_day)"][:19] for r in data}
- assert result == {iso_format(day_ago.replace(hour=0, minute=0, second=0, microsecond=0))}
- result = {r["any(timestamp.to_hour)"][:19] for r in data}
- assert result == {iso_format(day_ago.replace(minute=0, second=0, microsecond=0))}
- def test_field_aliases_in_conflicting_functions(self):
- self.store_event(
- data={
- "event_id": "a" * 32,
- "transaction": "/example",
- "message": "how to make fast",
- "timestamp": iso_format(
- before_now(days=1).replace(hour=10, minute=11, second=12, microsecond=13)
- ),
- "user": {"email": "cathy@example.com"},
- },
- project_id=self.project.id,
- )
- features = {"organizations:discover-basic": True, "organizations:global-views": True}
- field_aliases = ["user.display", "timestamp.to_day", "timestamp.to_hour"]
- for alias in field_aliases:
- query = {
- "field": [alias, f"any({alias})"],
- "statsPeriod": "7d",
- }
- response = self.do_request(query, features=features)
- assert response.status_code == 400, response.content
- assert (
- response.data["detail"]
- == f"A single field cannot be used both inside and outside a function in the same query. To use {alias} you must first remove the function(s): any({alias})"
- )
- @pytest.mark.skip(
- """
- For some reason ClickHouse errors when there are two of the same string literals
- (in this case the empty string "") in a query and one is in the prewhere clause.
- Does not affect production or ClickHouse versions > 20.4.
- """
- )
- def test_has_message(self):
- event = self.store_event(
- {"timestamp": self.ten_mins_ago, "message": "a"}, project_id=self.project.id
- )
- features = {"organizations:discover-basic": True, "organizations:global-views": True}
- query = {"field": ["project", "message"], "query": "has:message", "statsPeriod": "14d"}
- response = self.do_request(query, features=features)
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 1
- assert response.data["data"][0]["message"] == event.message
- query = {"field": ["project", "message"], "query": "!has:message", "statsPeriod": "14d"}
- response = self.do_request(query, features=features)
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 0
- def test_has_transaction_status(self):
- self.store_event(self.transaction_data, project_id=self.project.id)
- features = {"organizations:discover-basic": True, "organizations:global-views": True}
- query = {
- "field": ["event.type", "count(id)"],
- "query": "event.type:transaction has:transaction.status",
- "sort": "-count(id)",
- "statsPeriod": "24h",
- }
- response = self.do_request(query, features=features)
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert len(data) == 1
- assert data[0]["count(id)"] == 1
- def test_not_has_transaction_status(self):
- self.store_event(self.transaction_data, project_id=self.project.id)
- features = {"organizations:discover-basic": True, "organizations:global-views": True}
- query = {
- "field": ["event.type", "count(id)"],
- "query": "event.type:transaction !has:transaction.status",
- "sort": "-count(id)",
- "statsPeriod": "24h",
- }
- response = self.do_request(query, features=features)
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert len(data) == 1
- assert data[0]["count(id)"] == 0
- def test_tag_that_looks_like_aggregation(self):
- data = {
- "message": "Failure state",
- "timestamp": self.ten_mins_ago,
- "tags": {"count_diff": 99},
- }
- self.store_event(data, project_id=self.project.id)
- query = {
- "field": ["message", "count_diff", "count()"],
- "query": "",
- "project": [self.project.id],
- "statsPeriod": "24h",
- }
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- meta = response.data["meta"]["fields"]
- assert "string" == meta["count_diff"], "tags should not be counted as integers"
- assert "string" == meta["message"]
- assert "integer" == meta["count()"]
- assert 1 == len(response.data["data"])
- data = response.data["data"][0]
- assert "99" == data["count_diff"]
- assert "Failure state" == data["message"]
- assert 1 == data["count()"]
- def test_aggregate_negation(self):
- data = self.load_data(
- timestamp=before_now(minutes=10),
- duration=timedelta(seconds=5),
- )
- self.store_event(data, project_id=self.project.id)
- features = {"organizations:discover-basic": True, "organizations:global-views": True}
- query = {
- "field": ["event.type", "count()"],
- "query": "event.type:transaction count():1",
- "statsPeriod": "24h",
- }
- response = self.do_request(query, features=features)
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert len(data) == 1
- query = {
- "field": ["event.type", "count()"],
- "query": "event.type:transaction !count():1",
- "statsPeriod": "24h",
- }
- response = self.do_request(query, features=features)
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert len(data) == 0
- def test_all_aggregates_in_columns(self):
- data = self.load_data(
- timestamp=before_now(minutes=11),
- duration=timedelta(seconds=5),
- )
- data["transaction"] = "/failure_rate/1"
- self.store_event(data, project_id=self.project.id)
- data = self.load_data(
- timestamp=before_now(minutes=10),
- duration=timedelta(seconds=5),
- )
- data["transaction"] = "/failure_rate/1"
- data["contexts"]["trace"]["status"] = "unauthenticated"
- event = self.store_event(data, project_id=self.project.id)
- features = {"organizations:discover-basic": True, "organizations:global-views": True}
- query = {
- "field": [
- "event.type",
- "p50()",
- "p75()",
- "p95()",
- "p99()",
- "p100()",
- "percentile(transaction.duration, 0.99)",
- "apdex(300)",
- "count_miserable(user, 300)",
- "user_misery(300)",
- "failure_rate()",
- ],
- "query": "event.type:transaction",
- }
- response = self.do_request(query, features=features)
- assert response.status_code == 200, response.content
- meta = response.data["meta"]["fields"]
- units = response.data["meta"]["units"]
- assert meta["p50()"] == "duration"
- assert meta["p75()"] == "duration"
- assert meta["p95()"] == "duration"
- assert meta["p99()"] == "duration"
- assert meta["p100()"] == "duration"
- assert meta["percentile(transaction.duration, 0.99)"] == "duration"
- assert meta["apdex(300)"] == "number"
- assert meta["failure_rate()"] == "percentage"
- assert meta["user_misery(300)"] == "number"
- assert meta["count_miserable(user, 300)"] == "integer"
- assert units["p50()"] == "millisecond"
- assert units["p75()"] == "millisecond"
- assert units["p95()"] == "millisecond"
- assert units["p99()"] == "millisecond"
- assert units["p100()"] == "millisecond"
- assert units["percentile(transaction.duration, 0.99)"] == "millisecond"
- data = response.data["data"]
- assert len(data) == 1
- assert data[0]["p50()"] == 5000
- assert data[0]["p75()"] == 5000
- assert data[0]["p95()"] == 5000
- assert data[0]["p99()"] == 5000
- assert data[0]["p100()"] == 5000
- assert data[0]["percentile(transaction.duration, 0.99)"] == 5000
- assert data[0]["apdex(300)"] == 0.0
- assert data[0]["count_miserable(user, 300)"] == 1
- assert data[0]["user_misery(300)"] == 0.058
- assert data[0]["failure_rate()"] == 0.5
- features = {
- "organizations:discover-basic": True,
- "organizations:global-views": True,
- }
- query = {
- "field": [
- "event.type",
- "p50()",
- "p75()",
- "p95()",
- "p99()",
- "p100()",
- "percentile(transaction.duration, 0.99)",
- "apdex(300)",
- "apdex()",
- "count_miserable(user, 300)",
- "user_misery(300)",
- "failure_rate()",
- "count_miserable(user)",
- "user_misery()",
- ],
- "query": "event.type:transaction",
- "project": [self.project.id],
- }
- response = self.do_request(query, features=features)
- assert response.status_code == 200, response.content
- meta = response.data["meta"]["fields"]
- units = response.data["meta"]["units"]
- assert meta["p50()"] == "duration"
- assert meta["p75()"] == "duration"
- assert meta["p95()"] == "duration"
- assert meta["p99()"] == "duration"
- assert meta["p100()"] == "duration"
- assert meta["percentile(transaction.duration, 0.99)"] == "duration"
- assert meta["apdex(300)"] == "number"
- assert meta["apdex()"] == "number"
- assert meta["failure_rate()"] == "percentage"
- assert meta["user_misery(300)"] == "number"
- assert meta["count_miserable(user, 300)"] == "integer"
- assert meta["project_threshold_config"] == "string"
- assert meta["user_misery()"] == "number"
- assert meta["count_miserable(user)"] == "integer"
- assert units["p50()"] == "millisecond"
- assert units["p75()"] == "millisecond"
- assert units["p95()"] == "millisecond"
- assert units["p99()"] == "millisecond"
- assert units["p100()"] == "millisecond"
- assert units["percentile(transaction.duration, 0.99)"] == "millisecond"
- data = response.data["data"]
- assert len(data) == 1
- assert data[0]["p50()"] == 5000
- assert data[0]["p75()"] == 5000
- assert data[0]["p95()"] == 5000
- assert data[0]["p99()"] == 5000
- assert data[0]["p100()"] == 5000
- assert data[0]["percentile(transaction.duration, 0.99)"] == 5000
- assert data[0]["apdex(300)"] == 0.0
- assert data[0]["apdex()"] == 0.0
- assert data[0]["count_miserable(user, 300)"] == 1
- assert data[0]["user_misery(300)"] == 0.058
- assert data[0]["failure_rate()"] == 0.5
- assert data[0]["project_threshold_config"] == ["duration", 300]
- assert data[0]["user_misery()"] == 0.058
- assert data[0]["count_miserable(user)"] == 1
- query = {
- "field": ["event.type", "last_seen()", "latest_event()"],
- "query": "event.type:transaction",
- }
- response = self.do_request(query, features=features)
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert len(data) == 1
- assert self.ten_mins_ago[:-5] in data[0]["last_seen()"]
- assert data[0]["latest_event()"] == event.event_id
- query = {
- "field": [
- "event.type",
- "count()",
- "count(id)",
- "count_unique(project)",
- "min(transaction.duration)",
- "max(transaction.duration)",
- "avg(transaction.duration)",
- "stddev(transaction.duration)",
- "var(transaction.duration)",
- "cov(transaction.duration, transaction.duration)",
- "corr(transaction.duration, transaction.duration)",
- "sum(transaction.duration)",
- ],
- "query": "event.type:transaction",
- }
- response = self.do_request(query, features=features)
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert len(data) == 1
- assert data[0]["count()"] == 2
- assert data[0]["count(id)"] == 2
- assert data[0]["count_unique(project)"] == 1
- assert data[0]["min(transaction.duration)"] == 5000
- assert data[0]["max(transaction.duration)"] == 5000
- assert data[0]["avg(transaction.duration)"] == 5000
- assert data[0]["stddev(transaction.duration)"] == 0.0
- assert data[0]["var(transaction.duration)"] == 0.0
- assert data[0]["cov(transaction.duration, transaction.duration)"] == 0.0
- assert data[0]["corr(transaction.duration, transaction.duration)"] == 0.0
- assert data[0]["sum(transaction.duration)"] == 10000
- @requires_not_arm64
- def test_null_user_misery_returns_zero(self):
- self.transaction_data["user"] = None
- self.transaction_data["transaction"] = "/no_users/1"
- self.store_event(self.transaction_data, project_id=self.project.id)
- features = {"organizations:discover-basic": True, "organizations:global-views": True}
- query = {
- "field": ["user_misery(300)"],
- "query": "event.type:transaction",
- }
- response = self.do_request(query, features=features)
- assert response.status_code == 200, response.content
- meta = response.data["meta"]["fields"]
- assert meta["user_misery(300)"] == "number"
- data = response.data["data"]
- assert data[0]["user_misery(300)"] == 0
- @requires_not_arm64
- def test_null_user_misery_new_returns_zero(self):
- self.transaction_data["user"] = None
- self.transaction_data["transaction"] = "/no_users/1"
- self.store_event(self.transaction_data, project_id=self.project.id)
- features = {
- "organizations:discover-basic": True,
- }
- query = {
- "field": ["user_misery()"],
- "query": "event.type:transaction",
- }
- response = self.do_request(query, features=features)
- assert response.status_code == 200, response.content
- meta = response.data["meta"]["fields"]
- assert meta["user_misery()"] == "number"
- data = response.data["data"]
- assert data[0]["user_misery()"] == 0
- def test_all_aggregates_in_query(self):
- data = self.load_data(
- timestamp=before_now(minutes=11),
- duration=timedelta(seconds=5),
- )
- data["transaction"] = "/failure_rate/1"
- self.store_event(data, project_id=self.project.id)
- data = self.load_data(
- timestamp=before_now(minutes=10),
- duration=timedelta(seconds=5),
- )
- data["transaction"] = "/failure_rate/2"
- data["contexts"]["trace"]["status"] = "unauthenticated"
- self.store_event(data, project_id=self.project.id)
- features = {"organizations:discover-basic": True, "organizations:global-views": True}
- query = {
- "field": [
- "event.type",
- "p50()",
- "p75()",
- "p95()",
- "percentile(transaction.duration, 0.99)",
- "p100()",
- ],
- "query": "event.type:transaction p50():>100 p75():>1000 p95():>1000 p100():>1000 percentile(transaction.duration, 0.99):>1000",
- }
- response = self.do_request(query, features=features)
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert len(data) == 1
- assert data[0]["p50()"] == 5000
- assert data[0]["p75()"] == 5000
- assert data[0]["p95()"] == 5000
- assert data[0]["p100()"] == 5000
- assert data[0]["percentile(transaction.duration, 0.99)"] == 5000
- query = {
- "field": [
- "event.type",
- "apdex(300)",
- "count_miserable(user, 300)",
- "user_misery(300)",
- "failure_rate()",
- ],
- "query": "event.type:transaction apdex(300):>-1.0 failure_rate():>0.25",
- }
- response = self.do_request(query, features=features)
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert len(data) == 1
- assert data[0]["apdex(300)"] == 0.0
- assert data[0]["count_miserable(user, 300)"] == 1
- assert data[0]["user_misery(300)"] == 0.058
- assert data[0]["failure_rate()"] == 0.5
- query = {
- "field": ["event.type", "last_seen()", "latest_event()"],
- "query": "event.type:transaction last_seen():>1990-12-01T00:00:00",
- }
- response = self.do_request(query, features=features)
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert len(data) == 1
- query = {
- "field": ["event.type", "count()", "count(id)", "count_unique(transaction)"],
- "query": "event.type:transaction count():>1 count(id):>1 count_unique(transaction):>1",
- }
- response = self.do_request(query, features=features)
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert len(data) == 1
- assert data[0]["count()"] == 2
- assert data[0]["count(id)"] == 2
- assert data[0]["count_unique(transaction)"] == 2
- query = {
- "field": [
- "event.type",
- "min(transaction.duration)",
- "max(transaction.duration)",
- "avg(transaction.duration)",
- "sum(transaction.duration)",
- "stddev(transaction.duration)",
- "var(transaction.duration)",
- "cov(transaction.duration, transaction.duration)",
- "corr(transaction.duration, transaction.duration)",
- ],
- "query": " ".join(
- [
- "event.type:transaction",
- "min(transaction.duration):>1000",
- "max(transaction.duration):>1000",
- "avg(transaction.duration):>1000",
- "sum(transaction.duration):>1000",
- "stddev(transaction.duration):>=0.0",
- "var(transaction.duration):>=0.0",
- "cov(transaction.duration, transaction.duration):>=0.0",
- # correlation is nan because variance is 0
- # "corr(transaction.duration, transaction.duration):>=0.0",
- ]
- ),
- }
- response = self.do_request(query, features=features)
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert len(data) == 1
- assert data[0]["min(transaction.duration)"] == 5000
- assert data[0]["max(transaction.duration)"] == 5000
- assert data[0]["avg(transaction.duration)"] == 5000
- assert data[0]["sum(transaction.duration)"] == 10000
- assert data[0]["stddev(transaction.duration)"] == 0.0
- assert data[0]["var(transaction.duration)"] == 0.0
- assert data[0]["cov(transaction.duration, transaction.duration)"] == 0.0
- assert data[0]["corr(transaction.duration, transaction.duration)"] == 0.0
- query = {
- "field": ["event.type", "apdex(400)"],
- "query": "event.type:transaction apdex(400):0",
- }
- response = self.do_request(query, features=features)
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert len(data) == 1
- assert data[0]["apdex(400)"] == 0
- def test_functions_in_orderby(self):
- data = self.load_data(
- timestamp=before_now(minutes=11),
- duration=timedelta(seconds=5),
- )
- data["transaction"] = "/failure_rate/1"
- self.store_event(data, project_id=self.project.id)
- data = self.load_data(
- timestamp=before_now(minutes=10),
- duration=timedelta(seconds=5),
- )
- data["transaction"] = "/failure_rate/2"
- data["contexts"]["trace"]["status"] = "unauthenticated"
- event = self.store_event(data, project_id=self.project.id)
- features = {"organizations:discover-basic": True, "organizations:global-views": True}
- query = {
- "field": ["event.type", "p75()"],
- "sort": "-p75",
- "query": "event.type:transaction",
- }
- response = self.do_request(query, features=features)
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert len(data) == 1
- assert data[0]["p75()"] == 5000
- query = {
- "field": ["event.type", "percentile(transaction.duration, 0.99)"],
- "sort": "-percentile_transaction_duration_0_99",
- "query": "event.type:transaction",
- }
- response = self.do_request(query, features=features)
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert len(data) == 1
- assert data[0]["percentile(transaction.duration, 0.99)"] == 5000
- query = {
- "field": ["event.type", "apdex(300)"],
- "sort": "-apdex(300)",
- "query": "event.type:transaction",
- }
- response = self.do_request(query, features=features)
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert len(data) == 1
- assert data[0]["apdex(300)"] == 0.0
- query = {
- "field": ["event.type", "latest_event()"],
- "query": "event.type:transaction",
- "sort": "latest_event",
- }
- response = self.do_request(query, features=features)
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert len(data) == 1
- assert data[0]["latest_event()"] == event.event_id
- query = {
- "field": ["event.type", "count_unique(transaction)"],
- "query": "event.type:transaction",
- "sort": "-count_unique_transaction",
- }
- response = self.do_request(query, features=features)
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert len(data) == 1
- assert data[0]["count_unique(transaction)"] == 2
- query = {
- "field": ["event.type", "min(transaction.duration)"],
- "query": "event.type:transaction",
- "sort": "-min_transaction_duration",
- }
- response = self.do_request(query, features=features)
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert len(data) == 1
- assert data[0]["min(transaction.duration)"] == 5000
- def test_issue_alias_in_aggregate(self):
- self.store_event(
- data={
- "event_id": "a" * 32,
- "timestamp": self.eleven_mins_ago,
- "fingerprint": ["group_1"],
- },
- project_id=self.project.id,
- )
- self.store_event(
- data={"event_id": "b" * 32, "timestamp": self.ten_mins_ago, "fingerprint": ["group_2"]},
- project_id=self.project.id,
- )
- query = {"field": ["event.type", "count_unique(issue)"], "query": "count_unique(issue):>1"}
- features = {"organizations:discover-basic": True, "organizations:global-views": True}
- response = self.do_request(query, features=features)
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert len(data) == 1
- assert data[0]["count_unique(issue)"] == 2
- def test_deleted_issue_in_results(self):
- event1 = self.store_event(
- data={
- "event_id": "a" * 32,
- "timestamp": self.eleven_mins_ago,
- "fingerprint": ["group_1"],
- },
- project_id=self.project.id,
- )
- event2 = self.store_event(
- data={"event_id": "b" * 32, "timestamp": self.ten_mins_ago, "fingerprint": ["group_2"]},
- project_id=self.project.id,
- )
- event2.group.delete()
- features = {"organizations:discover-basic": True, "organizations:global-views": True}
- query = {"field": ["issue", "count()"], "sort": "issue.id"}
- response = self.do_request(query, features=features)
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert len(data) == 2
- assert data[0]["issue"] == event1.group.qualified_short_id
- assert data[1]["issue"] == "unknown"
- def test_last_seen_negative_duration(self):
- self.store_event(
- data={"event_id": "f" * 32, "timestamp": self.ten_mins_ago, "fingerprint": ["group_1"]},
- project_id=self.project.id,
- )
- features = {"organizations:discover-basic": True, "organizations:global-views": True}
- query = {"field": ["id", "last_seen()"], "query": "last_seen():-30d"}
- response = self.do_request(query, features=features)
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert len(data) == 1
- assert data[0]["id"] == "f" * 32
- def test_last_seen_aggregate_condition(self):
- self.store_event(
- data={"event_id": "f" * 32, "timestamp": self.ten_mins_ago, "fingerprint": ["group_1"]},
- project_id=self.project.id,
- )
- query = {
- "field": ["id", "last_seen()"],
- "query": f"last_seen():>{iso_format(before_now(days=30))}",
- }
- features = {"organizations:discover-basic": True, "organizations:global-views": True}
- response = self.do_request(query, features=features)
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert len(data) == 1
- assert data[0]["id"] == "f" * 32
- def test_conditional_filter(self):
- for v in ["a", "b"]:
- self.store_event(
- data={
- "event_id": v * 32,
- "timestamp": self.ten_mins_ago,
- "fingerprint": ["group_1"],
- },
- project_id=self.project.id,
- )
- query = {
- "field": ["id"],
- "query": "id:{} OR id:{}".format("a" * 32, "b" * 32),
- "orderby": "id",
- }
- features = {"organizations:discover-basic": True, "organizations:global-views": True}
- response = self.do_request(query, features=features)
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert len(data) == 2
- assert data[0]["id"] == "a" * 32
- assert data[1]["id"] == "b" * 32
- def test_aggregation_comparison_with_conditional_filter(self):
- self.store_event(
- data={
- "event_id": "a" * 32,
- "timestamp": self.ten_mins_ago,
- "fingerprint": ["group_1"],
- "user": {"email": "foo@example.com"},
- "environment": "prod",
- },
- project_id=self.project.id,
- )
- self.store_event(
- data={
- "event_id": "b" * 32,
- "timestamp": self.ten_mins_ago,
- "fingerprint": ["group_2"],
- "user": {"email": "foo@example.com"},
- "environment": "staging",
- },
- project_id=self.project.id,
- )
- event = self.store_event(
- data={
- "event_id": "c" * 32,
- "timestamp": self.ten_mins_ago,
- "fingerprint": ["group_2"],
- "user": {"email": "foo@example.com"},
- "environment": "prod",
- },
- project_id=self.project.id,
- )
- self.store_event(
- data={
- "event_id": "d" * 32,
- "timestamp": self.ten_mins_ago,
- "fingerprint": ["group_2"],
- "user": {"email": "foo@example.com"},
- "environment": "canary",
- },
- project_id=self.project.id,
- )
- query = {
- "field": ["issue.id", "count(id)"],
- "query": "count(id):>1 user.email:foo@example.com AND (environment:prod OR environment:staging)",
- "orderby": "issue.id",
- }
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 1
- data = response.data["data"]
- assert data[0]["issue.id"] == event.group_id
- assert data[0]["count(id)"] == 2
- def run_test_in_query(self, query, expected_events, expected_negative_events=None):
- params = {
- "field": ["id"],
- "query": query,
- "orderby": "id",
- }
- response = self.do_request(
- params, {"organizations:discover-basic": True, "organizations:global-views": True}
- )
- assert response.status_code == 200, response.content
- assert [row["id"] for row in response.data["data"]] == [e.event_id for e in expected_events]
- if expected_negative_events is not None:
- params["query"] = f"!{query}"
- response = self.do_request(
- params,
- {"organizations:discover-basic": True, "organizations:global-views": True},
- )
- assert response.status_code == 200, response.content
- assert [row["id"] for row in response.data["data"]] == [
- e.event_id for e in expected_negative_events
- ]
- def test_in_query_events(self):
- project_1 = self.create_project()
- event_1 = self.store_event(
- data={
- "event_id": "a" * 32,
- "timestamp": self.ten_mins_ago,
- "fingerprint": ["group_1"],
- "message": "group1",
- "user": {"email": "hello@example.com"},
- "environment": "prod",
- "tags": {"random": "123"},
- "release": "1.0",
- },
- project_id=project_1.id,
- )
- project_2 = self.create_project()
- event_2 = self.store_event(
- data={
- "event_id": "b" * 32,
- "timestamp": self.ten_mins_ago,
- "fingerprint": ["group_2"],
- "message": "group2",
- "user": {"email": "bar@example.com"},
- "environment": "staging",
- "tags": {"random": "456"},
- "stacktrace": {"frames": [{"filename": "src/app/group2.py"}]},
- "release": "1.2",
- },
- project_id=project_2.id,
- )
- project_3 = self.create_project()
- event_3 = self.store_event(
- data={
- "event_id": "c" * 32,
- "timestamp": self.ten_mins_ago,
- "fingerprint": ["group_3"],
- "message": "group3",
- "user": {"email": "foo@example.com"},
- "environment": "canary",
- "tags": {"random": "789"},
- },
- project_id=project_3.id,
- )
- self.run_test_in_query("environment:[prod, staging]", [event_1, event_2], [event_3])
- self.run_test_in_query("environment:[staging]", [event_2], [event_1, event_3])
- self.run_test_in_query(
- "user.email:[foo@example.com, hello@example.com]", [event_1, event_3], [event_2]
- )
- self.run_test_in_query("user.email:[foo@example.com]", [event_3], [event_1, event_2])
- self.run_test_in_query(
- "user.display:[foo@example.com, hello@example.com]", [event_1, event_3], [event_2]
- )
- self.run_test_in_query(
- 'message:["group2 src/app/group2.py in ?", group1]', [event_1, event_2], [event_3]
- )
- self.run_test_in_query(
- f"issue.id:[{event_1.group_id},{event_2.group_id}]", [event_1, event_2]
- )
- self.run_test_in_query(
- f"issue:[{event_1.group.qualified_short_id},{event_2.group.qualified_short_id}]",
- [event_1, event_2],
- )
- self.run_test_in_query(
- f"issue:[{event_1.group.qualified_short_id},{event_2.group.qualified_short_id}, unknown]",
- [event_1, event_2],
- )
- self.run_test_in_query(f"project_id:[{project_3.id},{project_2.id}]", [event_2, event_3])
- self.run_test_in_query(
- f"project.name:[{project_3.slug},{project_2.slug}]", [event_2, event_3]
- )
- self.run_test_in_query("random:[789,456]", [event_2, event_3], [event_1])
- self.run_test_in_query("tags[random]:[789,456]", [event_2, event_3], [event_1])
- self.run_test_in_query("release:[1.0,1.2]", [event_1, event_2], [event_3])
- def test_in_query_events_stack(self):
- test_js = self.store_event(
- self.load_data(
- platform="javascript",
- timestamp=before_now(minutes=10),
- duration=timedelta(seconds=5),
- ),
- project_id=self.project.id,
- )
- test_java = self.store_event(
- self.load_data(
- platform="java",
- timestamp=before_now(minutes=10),
- duration=timedelta(seconds=5),
- ),
- project_id=self.project.id,
- )
- self.run_test_in_query(
- "stack.filename:[../../sentry/scripts/views.js]", [test_js], [test_java]
- )
- def test_in_query_transactions(self):
- data = self.transaction_data.copy()
- data["event_id"] = "a" * 32
- data["contexts"]["trace"]["status"] = "ok"
- transaction_1 = self.store_event(data, project_id=self.project.id)
- data = self.transaction_data.copy()
- data["event_id"] = "b" * 32
- data["contexts"]["trace"]["status"] = "aborted"
- transaction_2 = self.store_event(data, project_id=self.project.id)
- data = self.transaction_data.copy()
- data["event_id"] = "c" * 32
- data["contexts"]["trace"]["status"] = "already_exists"
- transaction_3 = self.store_event(data, project_id=self.project.id)
- self.run_test_in_query(
- "transaction.status:[aborted, already_exists]",
- [transaction_2, transaction_3],
- [transaction_1],
- )
- def test_messed_up_function_values(self):
- # TODO (evanh): It would be nice if this surfaced an error to the user.
- # The problem: The && causes the parser to treat that term not as a bad
- # function call but a valid raw search with parens in it. It's not trivial
- # to change the parser to recognize "bad function values" and surface them.
- for v in ["a", "b"]:
- self.store_event(
- data={
- "event_id": v * 32,
- "timestamp": self.ten_mins_ago,
- "fingerprint": ["group_1"],
- },
- project_id=self.project.id,
- )
- features = {"organizations:discover-basic": True, "organizations:global-views": True}
- query = {
- "field": [
- "transaction",
- "project",
- "epm()",
- "p50()",
- "p95()",
- "failure_rate()",
- "apdex(300)",
- "count_unique(user)",
- "user_misery(300)",
- "count_miserable(user, 300)",
- ],
- "query": "failure_rate():>0.003&& users:>10 event.type:transaction",
- "sort": "-failure_rate",
- "statsPeriod": "24h",
- }
- response = self.do_request(query, features=features)
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert len(data) == 0
- def test_context_fields_between_datasets(self):
- event_data = self.load_data(platform="android")
- transaction_data = self.load_data()
- event_data["spans"] = transaction_data["spans"]
- event_data["contexts"]["trace"] = transaction_data["contexts"]["trace"]
- event_data["type"] = "transaction"
- event_data["transaction"] = "/failure_rate/1"
- event_data["timestamp"] = iso_format(before_now(minutes=10))
- event_data["start_timestamp"] = iso_format(before_now(minutes=10, seconds=5))
- event_data["user"]["geo"] = {"country_code": "US", "region": "CA", "city": "San Francisco"}
- self.store_event(event_data, project_id=self.project.id)
- event_data["type"] = "error"
- self.store_event(event_data, project_id=self.project.id)
- fields = [
- "os.build",
- "os.kernel_version",
- "device.arch",
- # TODO: battery level is not consistent across both datasets
- # "device.battery_level",
- "device.brand",
- "device.charging",
- "device.locale",
- "device.model_id",
- "device.name",
- "device.online",
- "device.orientation",
- "device.simulator",
- "device.uuid",
- ]
- data = [
- {"field": fields + ["location", "count()"], "query": "event.type:error"},
- {"field": fields + ["duration", "count()"], "query": "event.type:transaction"},
- ]
- for datum in data:
- response = self.do_request(datum)
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 1, datum
- results = response.data["data"]
- assert results[0]["count()"] == 1, datum
- for field in fields:
- key, value = field.split(".", 1)
- expected = str(event_data["contexts"][key][value])
- assert results[0][field] == expected, field + str(datum)
- def test_http_fields_between_datasets(self):
- event_data = self.load_data(platform="android")
- transaction_data = self.load_data()
- event_data["spans"] = transaction_data["spans"]
- event_data["contexts"]["trace"] = transaction_data["contexts"]["trace"]
- event_data["type"] = "transaction"
- event_data["transaction"] = "/failure_rate/1"
- event_data["timestamp"] = iso_format(before_now(minutes=10))
- event_data["start_timestamp"] = iso_format(before_now(minutes=10, seconds=5))
- event_data["user"]["geo"] = {"country_code": "US", "region": "CA", "city": "San Francisco"}
- event_data["request"] = transaction_data["request"]
- self.store_event(event_data, project_id=self.project.id)
- event_data["type"] = "error"
- self.store_event(event_data, project_id=self.project.id)
- fields = ["http.method", "http.referer", "http.url"]
- expected = ["GET", "fixtures.transaction", "http://countries:8010/country_by_code/"]
- data = [
- {"field": fields + ["location", "count()"], "query": "event.type:error"},
- {"field": fields + ["duration", "count()"], "query": "event.type:transaction"},
- ]
- for datum in data:
- response = self.do_request(datum)
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 1, datum
- results = response.data["data"]
- assert results[0]["count()"] == 1, datum
- for (field, exp) in zip(fields, expected):
- assert results[0][field] == exp, field + str(datum)
- def test_failure_count_alias_field(self):
- data = self.transaction_data.copy()
- data["transaction"] = "/failure_count/success"
- self.store_event(data, project_id=self.project.id)
- data = self.transaction_data.copy()
- data["transaction"] = "/failure_count/unknown"
- data["contexts"]["trace"]["status"] = "unknown_error"
- self.store_event(data, project_id=self.project.id)
- for i in range(6):
- data = self.transaction_data.copy()
- data["transaction"] = f"/failure_count/{i}"
- data["contexts"]["trace"]["status"] = "unauthenticated"
- self.store_event(data, project_id=self.project.id)
- query = {"field": ["count()", "failure_count()"], "query": "event.type:transaction"}
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 1
- data = response.data["data"]
- assert data[0]["count()"] == 8
- assert data[0]["failure_count()"] == 6
- @mock.patch("sentry.utils.snuba.quantize_time")
- def test_quantize_dates(self, mock_quantize):
- self.create_project()
- mock_quantize.return_value = before_now(days=1).replace(tzinfo=utc)
- # Don't quantize short time periods
- query = {"statsPeriod": "1h", "query": "", "field": ["id", "timestamp"]}
- self.do_request(query)
- # Don't quantize absolute date periods
- self.do_request(query)
- query = {
- "start": iso_format(before_now(days=20)),
- "end": iso_format(before_now(days=15)),
- "query": "",
- "field": ["id", "timestamp"],
- }
- self.do_request(query)
- assert len(mock_quantize.mock_calls) == 0
- # Quantize long date periods
- query = {"field": ["id", "timestamp"], "statsPeriod": "90d", "query": ""}
- self.do_request(query)
- assert len(mock_quantize.mock_calls) == 2
- @mock.patch("sentry.snuba.discover.query")
- def test_valid_referrer(self, mock):
- mock.return_value = {}
- query = {
- "field": ["user"],
- "referrer": "api.performance.transaction-summary",
- "project": [self.project.id],
- }
- self.do_request(query)
- _, kwargs = mock.call_args
- self.assertEqual(kwargs["referrer"], "api.performance.transaction-summary")
- @mock.patch("sentry.snuba.discover.query")
- def test_invalid_referrer(self, mock):
- mock.return_value = {}
- query = {
- "field": ["user"],
- "referrer": "api.performance.invalid",
- "project": [self.project.id],
- }
- self.do_request(query)
- _, kwargs = mock.call_args
- self.assertEqual(kwargs["referrer"], self.referrer)
- @mock.patch("sentry.snuba.discover.query")
- def test_empty_referrer(self, mock):
- mock.return_value = {}
- query = {
- "field": ["user"],
- "project": [self.project.id],
- }
- self.do_request(query)
- _, kwargs = mock.call_args
- self.assertEqual(kwargs["referrer"], self.referrer)
- @mock.patch("sentry.snuba.discover.query")
- def test_api_token_referrer(self, mock):
- mock.return_value = {}
- # Project ID cannot be inferred when using an org API key, so that must
- # be passed in the parameters
- api_key = ApiKey.objects.create(organization=self.organization, scope_list=["org:read"])
- query = {
- "field": ["project.name", "environment"],
- "project": [self.project.id],
- }
- features = {"organizations:discover-basic": True}
- features.update(self.features)
- url = self.reverse_url()
- with self.feature(features):
- self.client_get(
- url,
- query,
- format="json",
- HTTP_AUTHORIZATION=b"Basic " + b64encode(f"{api_key.key}:".encode()),
- )
- _, kwargs = mock.call_args
- self.assertEqual(kwargs["referrer"], "api.auth-token.events")
- def test_limit_number_of_fields(self):
- self.create_project()
- for i in range(1, 25):
- response = self.do_request({"field": ["id"] * i})
- if i <= 20:
- assert response.status_code == 200
- else:
- assert response.status_code == 400
- assert (
- response.data["detail"]
- == "You can view up to 20 fields at a time. Please delete some and try again."
- )
- def test_percentile_function_meta_types(self):
- self.store_event(self.transaction_data, project_id=self.project.id)
- query = {
- "field": [
- "transaction",
- "percentile(transaction.duration, 0.95)",
- "percentile(measurements.fp, 0.95)",
- "percentile(measurements.fcp, 0.95)",
- "percentile(measurements.lcp, 0.95)",
- "percentile(measurements.fid, 0.95)",
- "percentile(measurements.ttfb, 0.95)",
- "percentile(measurements.ttfb.requesttime, 0.95)",
- "percentile(measurements.cls, 0.95)",
- "percentile(measurements.foo, 0.95)",
- "percentile(measurements.bar, 0.95)",
- ],
- "query": "",
- "orderby": ["transaction"],
- }
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- meta = response.data["meta"]["fields"]
- assert meta["percentile(transaction.duration, 0.95)"] == "duration"
- assert meta["percentile(measurements.fp, 0.95)"] == "duration"
- assert meta["percentile(measurements.fcp, 0.95)"] == "duration"
- assert meta["percentile(measurements.lcp, 0.95)"] == "duration"
- assert meta["percentile(measurements.fid, 0.95)"] == "duration"
- assert meta["percentile(measurements.ttfb, 0.95)"] == "duration"
- assert meta["percentile(measurements.ttfb.requesttime, 0.95)"] == "duration"
- assert meta["percentile(measurements.cls, 0.95)"] == "number"
- assert meta["percentile(measurements.foo, 0.95)"] == "number"
- assert meta["percentile(measurements.bar, 0.95)"] == "number"
- units = response.data["meta"]["units"]
- assert units["percentile(transaction.duration, 0.95)"] == "millisecond"
- assert units["percentile(measurements.fp, 0.95)"] == "millisecond"
- assert units["percentile(measurements.fcp, 0.95)"] == "millisecond"
- assert units["percentile(measurements.lcp, 0.95)"] == "millisecond"
- assert units["percentile(measurements.fid, 0.95)"] == "millisecond"
- assert units["percentile(measurements.ttfb, 0.95)"] == "millisecond"
- assert units["percentile(measurements.ttfb.requesttime, 0.95)"] == "millisecond"
- def test_count_at_least_query(self):
- self.store_event(self.transaction_data, self.project.id)
- response = self.do_request({"field": "count_at_least(measurements.fcp, 0)"})
- assert response.status_code == 200
- assert len(response.data["data"]) == 1
- assert response.data["data"][0]["count_at_least(measurements.fcp, 0)"] == 1
- # a value that's a little bigger than the stored fcp
- fcp = int(self.transaction_data["measurements"]["fcp"]["value"] + 1)
- response = self.do_request({"field": f"count_at_least(measurements.fcp, {fcp})"})
- assert response.status_code == 200
- assert len(response.data["data"]) == 1
- assert response.data["data"][0][f"count_at_least(measurements.fcp, {fcp})"] == 0
- def test_measurements_query(self):
- self.store_event(self.transaction_data, self.project.id)
- query = {
- "field": [
- "measurements.fp",
- "measurements.fcp",
- "measurements.lcp",
- "measurements.fid",
- ]
- }
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 1
- for field in query["field"]:
- measure = field.split(".", 1)[1]
- assert (
- response.data["data"][0][field]
- == self.transaction_data["measurements"][measure]["value"]
- )
- query = {
- "field": [
- "measurements.fP",
- "measurements.Fcp",
- "measurements.LcP",
- "measurements.FID",
- ]
- }
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 1
- for field in query["field"]:
- measure = field.split(".", 1)[1].lower()
- assert (
- response.data["data"][0][field]
- == self.transaction_data["measurements"][measure]["value"]
- )
- def test_measurements_aggregations(self):
- self.store_event(self.transaction_data, self.project.id)
- # should try all the potential aggregates
- # Skipped tests for stddev and var since sampling one data point
- # results in nan.
- query = {
- "field": [
- "percentile(measurements.fcp, 0.5)",
- "count_unique(measurements.fcp)",
- "min(measurements.fcp)",
- "max(measurements.fcp)",
- "avg(measurements.fcp)",
- "sum(measurements.fcp)",
- ],
- }
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 1
- assert (
- response.data["data"][0]["percentile(measurements.fcp, 0.5)"]
- == self.transaction_data["measurements"]["fcp"]["value"]
- )
- assert response.data["data"][0]["count_unique(measurements.fcp)"] == 1
- assert (
- response.data["data"][0]["min(measurements.fcp)"]
- == self.transaction_data["measurements"]["fcp"]["value"]
- )
- assert (
- response.data["data"][0]["max(measurements.fcp)"]
- == self.transaction_data["measurements"]["fcp"]["value"]
- )
- assert (
- response.data["data"][0]["avg(measurements.fcp)"]
- == self.transaction_data["measurements"]["fcp"]["value"]
- )
- assert (
- response.data["data"][0]["sum(measurements.fcp)"]
- == self.transaction_data["measurements"]["fcp"]["value"]
- )
- def get_measurement_condition_response(self, query_str, field):
- query = {
- "field": ["transaction", "count()"] + (field if field else []),
- "query": query_str,
- }
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- return response
- def assert_measurement_condition_without_results(self, query_str, field=None):
- response = self.get_measurement_condition_response(query_str, field)
- assert len(response.data["data"]) == 0
- def assert_measurement_condition_with_results(self, query_str, field=None):
- response = self.get_measurement_condition_response(query_str, field)
- assert len(response.data["data"]) == 1
- assert response.data["data"][0]["transaction"] == self.transaction_data["metadata"]["title"]
- assert response.data["data"][0]["count()"] == 1
- def test_measurements_conditions(self):
- self.store_event(self.transaction_data, self.project.id)
- fcp = self.transaction_data["measurements"]["fcp"]["value"]
- # equality condition
- # We use json dumps here to ensure precision when converting from float to str
- # This is necessary because equality on floating point values need to be precise
- self.assert_measurement_condition_with_results(f"measurements.fcp:{json.dumps(fcp)}")
- # greater than condition
- self.assert_measurement_condition_with_results(f"measurements.fcp:>{fcp - 1}")
- self.assert_measurement_condition_without_results(f"measurements.fcp:>{fcp + 1}")
- # less than condition
- self.assert_measurement_condition_with_results(f"measurements.fcp:<{fcp + 1}")
- self.assert_measurement_condition_without_results(f"measurements.fcp:<{fcp - 1}")
- # has condition
- self.assert_measurement_condition_with_results("has:measurements.fcp")
- self.assert_measurement_condition_without_results("!has:measurements.fcp")
- def test_measurements_aggregation_conditions(self):
- self.store_event(self.transaction_data, self.project.id)
- fcp = self.transaction_data["measurements"]["fcp"]["value"]
- functions = [
- "percentile(measurements.fcp, 0.5)",
- "min(measurements.fcp)",
- "max(measurements.fcp)",
- "avg(measurements.fcp)",
- "sum(measurements.fcp)",
- ]
- for function in functions:
- self.assert_measurement_condition_with_results(
- f"{function}:>{fcp - 1}", field=[function]
- )
- self.assert_measurement_condition_without_results(
- f"{function}:>{fcp + 1}", field=[function]
- )
- self.assert_measurement_condition_with_results(
- f"{function}:<{fcp + 1}", field=[function]
- )
- self.assert_measurement_condition_without_results(
- f"{function}:<{fcp - 1}", field=[function]
- )
- count_unique = "count_unique(measurements.fcp)"
- self.assert_measurement_condition_with_results(f"{count_unique}:1", field=[count_unique])
- self.assert_measurement_condition_without_results(f"{count_unique}:0", field=[count_unique])
- def test_compare_numeric_aggregate(self):
- self.store_event(self.transaction_data, self.project.id)
- query = {
- "field": [
- "p75(measurements.fcp)",
- "compare_numeric_aggregate(p75_measurements_fcp,greater,0)",
- ],
- }
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 1
- assert (
- response.data["data"][0]["compare_numeric_aggregate(p75_measurements_fcp,greater,0)"]
- == 1
- )
- query = {
- "field": ["p75()", "compare_numeric_aggregate(p75,equals,0)"],
- }
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 1
- assert response.data["data"][0]["compare_numeric_aggregate(p75,equals,0)"] == 0
- def test_no_team_key_transactions(self):
- transactions = [
- "/blah_transaction/",
- "/foo_transaction/",
- "/zoo_transaction/",
- ]
- for transaction in transactions:
- self.transaction_data["transaction"] = transaction
- self.store_event(self.transaction_data, self.project.id)
- query = {
- "team": "myteams",
- "project": [self.project.id],
- # use the order by to ensure the result order
- "orderby": "transaction",
- "field": [
- "team_key_transaction",
- "transaction",
- "transaction.status",
- "project",
- "epm()",
- "failure_rate()",
- "percentile(transaction.duration, 0.95)",
- ],
- }
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert len(data) == 3
- assert data[0]["team_key_transaction"] == 0
- assert data[0]["transaction"] == "/blah_transaction/"
- assert data[1]["team_key_transaction"] == 0
- assert data[1]["transaction"] == "/foo_transaction/"
- assert data[2]["team_key_transaction"] == 0
- assert data[2]["transaction"] == "/zoo_transaction/"
- def test_team_key_transactions_my_teams(self):
- team1 = self.create_team(organization=self.organization, name="Team A")
- self.create_team_membership(team1, user=self.user)
- self.project.add_team(team1)
- team2 = self.create_team(organization=self.organization, name="Team B")
- self.project.add_team(team2)
- transactions = ["/blah_transaction/"]
- key_transactions = [
- (team1, "/foo_transaction/"),
- (team2, "/zoo_transaction/"),
- ]
- for transaction in transactions:
- self.transaction_data["transaction"] = transaction
- self.store_event(self.transaction_data, self.project.id)
- for team, transaction in key_transactions:
- self.transaction_data["transaction"] = transaction
- self.store_event(self.transaction_data, self.project.id)
- TeamKeyTransaction.objects.create(
- organization=self.organization,
- transaction=transaction,
- project_team=ProjectTeam.objects.get(project=self.project, team=team),
- )
- query = {
- "team": "myteams",
- "project": [self.project.id],
- "field": [
- "team_key_transaction",
- "transaction",
- "transaction.status",
- "project",
- "epm()",
- "failure_rate()",
- "percentile(transaction.duration, 0.95)",
- ],
- }
- query["orderby"] = ["team_key_transaction", "transaction"]
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert len(data) == 3
- assert data[0]["team_key_transaction"] == 0
- assert data[0]["transaction"] == "/blah_transaction/"
- assert data[1]["team_key_transaction"] == 0
- assert data[1]["transaction"] == "/zoo_transaction/"
- assert data[2]["team_key_transaction"] == 1
- assert data[2]["transaction"] == "/foo_transaction/"
- # not specifying any teams should use my teams
- query = {
- "project": [self.project.id],
- "field": [
- "team_key_transaction",
- "transaction",
- "transaction.status",
- "project",
- "epm()",
- "failure_rate()",
- "percentile(transaction.duration, 0.95)",
- ],
- }
- query["orderby"] = ["team_key_transaction", "transaction"]
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert len(data) == 3
- assert data[0]["team_key_transaction"] == 0
- assert data[0]["transaction"] == "/blah_transaction/"
- assert data[1]["team_key_transaction"] == 0
- assert data[1]["transaction"] == "/zoo_transaction/"
- assert data[2]["team_key_transaction"] == 1
- assert data[2]["transaction"] == "/foo_transaction/"
- def test_team_key_transactions_orderby(self):
- team1 = self.create_team(organization=self.organization, name="Team A")
- team2 = self.create_team(organization=self.organization, name="Team B")
- transactions = ["/blah_transaction/"]
- key_transactions = [
- (team1, "/foo_transaction/"),
- (team2, "/zoo_transaction/"),
- ]
- for transaction in transactions:
- self.transaction_data["transaction"] = transaction
- self.store_event(self.transaction_data, self.project.id)
- for team, transaction in key_transactions:
- self.create_team_membership(team, user=self.user)
- self.project.add_team(team)
- self.transaction_data["transaction"] = transaction
- self.store_event(self.transaction_data, self.project.id)
- TeamKeyTransaction.objects.create(
- organization=self.organization,
- transaction=transaction,
- project_team=ProjectTeam.objects.get(project=self.project, team=team),
- )
- query = {
- "team": "myteams",
- "project": [self.project.id],
- "field": [
- "team_key_transaction",
- "transaction",
- "transaction.status",
- "project",
- "epm()",
- "failure_rate()",
- "percentile(transaction.duration, 0.95)",
- ],
- }
- # test ascending order
- query["orderby"] = ["team_key_transaction", "transaction"]
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert len(data) == 3
- assert data[0]["team_key_transaction"] == 0
- assert data[0]["transaction"] == "/blah_transaction/"
- assert data[1]["team_key_transaction"] == 1
- assert data[1]["transaction"] == "/foo_transaction/"
- assert data[2]["team_key_transaction"] == 1
- assert data[2]["transaction"] == "/zoo_transaction/"
- # test descending order
- query["orderby"] = ["-team_key_transaction", "-transaction"]
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert len(data) == 3
- assert data[0]["team_key_transaction"] == 1
- assert data[0]["transaction"] == "/zoo_transaction/"
- assert data[1]["team_key_transaction"] == 1
- assert data[1]["transaction"] == "/foo_transaction/"
- assert data[2]["team_key_transaction"] == 0
- assert data[2]["transaction"] == "/blah_transaction/"
- def test_team_key_transactions_query(self):
- team1 = self.create_team(organization=self.organization, name="Team A")
- team2 = self.create_team(organization=self.organization, name="Team B")
- transactions = ["/blah_transaction/"]
- key_transactions = [
- (team1, "/foo_transaction/"),
- (team2, "/zoo_transaction/"),
- ]
- for transaction in transactions:
- self.transaction_data["transaction"] = transaction
- self.store_event(self.transaction_data, self.project.id)
- for team, transaction in key_transactions:
- self.create_team_membership(team, user=self.user)
- self.project.add_team(team)
- self.transaction_data["transaction"] = transaction
- self.store_event(self.transaction_data, self.project.id)
- TeamKeyTransaction.objects.create(
- organization=self.organization,
- project_team=ProjectTeam.objects.get(
- project=self.project,
- team=team,
- ),
- transaction=transaction,
- )
- query = {
- "team": "myteams",
- "project": [self.project.id],
- # use the order by to ensure the result order
- "orderby": "transaction",
- "field": [
- "team_key_transaction",
- "transaction",
- "transaction.status",
- "project",
- "epm()",
- "failure_rate()",
- "percentile(transaction.duration, 0.95)",
- ],
- }
- # key transactions
- query["query"] = "has:team_key_transaction"
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert len(data) == 2
- assert data[0]["team_key_transaction"] == 1
- assert data[0]["transaction"] == "/foo_transaction/"
- assert data[1]["team_key_transaction"] == 1
- assert data[1]["transaction"] == "/zoo_transaction/"
- # key transactions
- query["query"] = "team_key_transaction:true"
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert len(data) == 2
- assert data[0]["team_key_transaction"] == 1
- assert data[0]["transaction"] == "/foo_transaction/"
- assert data[1]["team_key_transaction"] == 1
- assert data[1]["transaction"] == "/zoo_transaction/"
- # not key transactions
- query["query"] = "!has:team_key_transaction"
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert len(data) == 1
- assert data[0]["team_key_transaction"] == 0
- assert data[0]["transaction"] == "/blah_transaction/"
- # not key transactions
- query["query"] = "team_key_transaction:false"
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert len(data) == 1
- assert data[0]["team_key_transaction"] == 0
- assert data[0]["transaction"] == "/blah_transaction/"
- def test_too_many_team_key_transactions(self):
- MAX_QUERYABLE_TEAM_KEY_TRANSACTIONS = 1
- with mock.patch(
- "sentry.search.events.fields.MAX_QUERYABLE_TEAM_KEY_TRANSACTIONS",
- MAX_QUERYABLE_TEAM_KEY_TRANSACTIONS,
- ):
- team = self.create_team(organization=self.organization, name="Team A")
- self.create_team_membership(team, user=self.user)
- self.project.add_team(team)
- project_team = ProjectTeam.objects.get(project=self.project, team=team)
- for i in range(MAX_QUERYABLE_TEAM_KEY_TRANSACTIONS + 1):
- transaction = f"transaction-{team.id}-{i}"
- self.transaction_data["transaction"] = transaction
- self.store_event(self.transaction_data, self.project.id)
- TeamKeyTransaction.objects.bulk_create(
- [
- TeamKeyTransaction(
- organization=self.organization,
- project_team=project_team,
- transaction=f"transaction-{team.id}-{i}",
- )
- for i in range(MAX_QUERYABLE_TEAM_KEY_TRANSACTIONS + 1)
- ]
- )
- query = {
- "team": "myteams",
- "project": [self.project.id],
- "orderby": "transaction",
- "field": [
- "team_key_transaction",
- "transaction",
- "transaction.status",
- "project",
- "epm()",
- "failure_rate()",
- "percentile(transaction.duration, 0.95)",
- ],
- }
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert len(data) == 2
- assert (
- sum(row["team_key_transaction"] for row in data)
- == MAX_QUERYABLE_TEAM_KEY_TRANSACTIONS
- )
- def test_no_pagination_param(self):
- self.store_event(
- data={"event_id": "a" * 32, "timestamp": self.ten_mins_ago, "fingerprint": ["group1"]},
- project_id=self.project.id,
- )
- query = {"field": ["id", "project.id"], "project": [self.project.id], "noPagination": True}
- response = self.do_request(query)
- assert response.status_code == 200
- assert len(response.data["data"]) == 1
- assert "Link" not in response
- def test_nan_result(self):
- query = {"field": ["apdex(300)"], "project": [self.project.id], "query": f"id:{'0' * 32}"}
- response = self.do_request(query)
- assert response.status_code == 200
- assert len(response.data["data"]) == 1
- assert response.data["data"][0]["apdex(300)"] == 0
- def test_equation_simple(self):
- event_data = self.load_data(
- timestamp=before_now(minutes=10),
- )
- event_data["breakdowns"]["span_ops"]["ops.http"]["value"] = 1500
- self.store_event(data=event_data, project_id=self.project.id)
- query = {
- "field": ["spans.http", "equation|spans.http / 3"],
- "project": [self.project.id],
- "query": "event.type:transaction",
- }
- response = self.do_request(
- query,
- {
- "organizations:discover-basic": True,
- },
- )
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 1
- assert (
- response.data["data"][0]["equation|spans.http / 3"]
- == event_data["breakdowns"]["span_ops"]["ops.http"]["value"] / 3
- )
- assert response.data["meta"]["fields"]["equation|spans.http / 3"] == "number"
- def test_equation_sort(self):
- event_data = self.transaction_data.copy()
- event_data["breakdowns"] = {"span_ops": {"ops.http": {"value": 1500}}}
- self.store_event(data=event_data, project_id=self.project.id)
- event_data2 = self.transaction_data.copy()
- event_data2["breakdowns"] = {"span_ops": {"ops.http": {"value": 2000}}}
- self.store_event(data=event_data2, project_id=self.project.id)
- query = {
- "field": ["spans.http", "equation|spans.http / 3"],
- "project": [self.project.id],
- "orderby": "equation|spans.http / 3",
- "query": "event.type:transaction",
- }
- response = self.do_request(
- query,
- {
- "organizations:discover-basic": True,
- },
- )
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 2
- assert (
- response.data["data"][0]["equation|spans.http / 3"]
- == event_data["breakdowns"]["span_ops"]["ops.http"]["value"] / 3
- )
- assert (
- response.data["data"][1]["equation|spans.http / 3"]
- == event_data2["breakdowns"]["span_ops"]["ops.http"]["value"] / 3
- )
- def test_equation_operation_limit(self):
- query = {
- "field": ["spans.http", f"equation|spans.http{' * 2' * 11}"],
- "project": [self.project.id],
- "query": "event.type:transaction",
- }
- response = self.do_request(
- query,
- {
- "organizations:discover-basic": True,
- },
- )
- assert response.status_code == 400
- @mock.patch("sentry.api.bases.organization_events.MAX_FIELDS", 2)
- def test_equation_field_limit(self):
- query = {
- "field": ["spans.http", "transaction.duration", "equation|5 * 2"],
- "project": [self.project.id],
- "query": "event.type:transaction",
- }
- response = self.do_request(
- query,
- {
- "organizations:discover-basic": True,
- },
- )
- assert response.status_code == 400
- def test_count_if(self):
- unicode_phrase1 = "\u716e\u6211\u66f4\u591a\u7684\u98df\u7269\uff0c\u6211\u9913\u4e86"
- for i in range(5):
- data = self.load_data(
- timestamp=before_now(minutes=(10 + i)),
- duration=timedelta(milliseconds=100 if i < 3 else 200),
- )
- data["tags"] = {
- "sub_customer.is-Enterprise-42": "yes" if i == 0 else "no",
- "unicode-phrase": unicode_phrase1 if i == 0 else "no",
- }
- self.store_event(data, project_id=self.project.id)
- query = {
- "field": [
- "count_if(transaction.duration, less, 150)",
- "count_if(transaction.duration, greater, 150)",
- "count_if(sub_customer.is-Enterprise-42, equals, yes)",
- "count_if(sub_customer.is-Enterprise-42, notEquals, yes)",
- f"count_if(unicode-phrase, equals, {unicode_phrase1})",
- ],
- "project": [self.project.id],
- }
- response = self.do_request(query)
- assert response.status_code == 200
- assert len(response.data["data"]) == 1
- assert response.data["data"][0]["count_if(transaction.duration, less, 150)"] == 3
- assert response.data["data"][0]["count_if(transaction.duration, greater, 150)"] == 2
- assert response.data["data"][0]["count_if(sub_customer.is-Enterprise-42, equals, yes)"] == 1
- assert (
- response.data["data"][0]["count_if(sub_customer.is-Enterprise-42, notEquals, yes)"] == 4
- )
- assert response.data["data"][0][f"count_if(unicode-phrase, equals, {unicode_phrase1})"] == 1
- def test_count_if_measurements_cls(self):
- data = self.transaction_data.copy()
- data["measurements"] = {"cls": {"value": 0.5}}
- self.store_event(data, project_id=self.project.id)
- data["measurements"] = {"cls": {"value": 0.1}}
- self.store_event(data, project_id=self.project.id)
- query = {
- "field": [
- "count_if(measurements.cls, greater, 0.05)",
- "count_if(measurements.cls, less, 0.3)",
- ],
- "project": [self.project.id],
- }
- response = self.do_request(query)
- assert response.status_code == 200
- assert len(response.data["data"]) == 1
- assert response.data["data"][0]["count_if(measurements.cls, greater, 0.05)"] == 2
- assert response.data["data"][0]["count_if(measurements.cls, less, 0.3)"] == 1
- def test_count_if_filter(self):
- for i in range(5):
- data = self.load_data(
- timestamp=before_now(minutes=(10 + i)),
- duration=timedelta(milliseconds=100 if i < 3 else 200),
- )
- data["tags"] = {"sub_customer.is-Enterprise-42": "yes" if i == 0 else "no"}
- self.store_event(data, project_id=self.project.id)
- query = {
- "field": [
- "count_if(transaction.duration, less, 150)",
- ],
- "query": "count_if(transaction.duration, less, 150):>2",
- "project": [self.project.id],
- }
- response = self.do_request(query)
- assert response.status_code == 200
- assert len(response.data["data"]) == 1
- assert response.data["data"][0]["count_if(transaction.duration, less, 150)"] == 3
- query = {
- "field": [
- "count_if(transaction.duration, less, 150)",
- ],
- "query": "count_if(transaction.duration, less, 150):<2",
- "project": [self.project.id],
- }
- response = self.do_request(query)
- assert response.status_code == 200
- assert len(response.data["data"]) == 0
- def test_filters_with_escaped_asterisk(self):
- self.transaction_data["transaction"] = r"/:a*/:b-:c(\d\.\e+)"
- self.store_event(self.transaction_data, project_id=self.project.id)
- query = {
- "field": ["transaction", "transaction.duration"],
- # make sure to escape the asterisk so it's not treated as a wildcard
- "query": r'transaction:"/:a\*/:b-:c(\d\.\e+)"',
- "project": [self.project.id],
- }
- response = self.do_request(query)
- assert response.status_code == 200
- assert len(response.data["data"]) == 1
- def test_filters_with_back_slashes(self):
- self.transaction_data["transaction"] = r"a\b\c@d"
- self.store_event(self.transaction_data, project_id=self.project.id)
- query = {
- "field": ["transaction", "transaction.duration"],
- "query": r'transaction:"a\b\c@d"',
- "project": [self.project.id],
- }
- response = self.do_request(query)
- assert response.status_code == 200
- assert len(response.data["data"]) == 1
- def test_mobile_measurements(self):
- self.transaction_data["measurements"]["frames_total"] = {"value": 100}
- self.transaction_data["measurements"]["frames_slow"] = {"value": 10}
- self.transaction_data["measurements"]["frames_frozen"] = {"value": 5}
- self.transaction_data["measurements"]["stall_count"] = {"value": 2}
- self.transaction_data["measurements"]["stall_total_time"] = {"value": 12}
- self.transaction_data["measurements"]["stall_longest_time"] = {"value": 7}
- self.store_event(self.transaction_data, project_id=self.project.id)
- query = {
- "field": [
- "measurements.frames_total",
- "measurements.frames_slow",
- "measurements.frames_frozen",
- "measurements.frames_slow_rate",
- "measurements.frames_frozen_rate",
- "measurements.stall_count",
- "measurements.stall_total_time",
- "measurements.stall_longest_time",
- "measurements.stall_percentage",
- ],
- "query": "",
- "project": [self.project.id],
- }
- response = self.do_request(query)
- assert response.status_code == 200
- data = response.data["data"]
- assert len(data) == 1
- assert data[0]["measurements.frames_total"] == 100
- assert data[0]["measurements.frames_slow"] == 10
- assert data[0]["measurements.frames_frozen"] == 5
- assert data[0]["measurements.frames_slow_rate"] == 0.1
- assert data[0]["measurements.frames_frozen_rate"] == 0.05
- assert data[0]["measurements.stall_count"] == 2
- assert data[0]["measurements.stall_total_time"] == 12
- assert data[0]["measurements.stall_longest_time"] == 7
- assert data[0]["measurements.stall_percentage"] == 0.004
- meta = response.data["meta"]["fields"]
- assert meta["measurements.frames_total"] == "number"
- assert meta["measurements.frames_slow"] == "number"
- assert meta["measurements.frames_frozen"] == "number"
- assert meta["measurements.frames_slow_rate"] == "percentage"
- assert meta["measurements.frames_frozen_rate"] == "percentage"
- assert meta["measurements.stall_count"] == "number"
- assert meta["measurements.stall_total_time"] == "number"
- assert meta["measurements.stall_longest_time"] == "number"
- assert meta["measurements.stall_percentage"] == "percentage"
- query = {
- "field": [
- "p75(measurements.frames_slow_rate)",
- "p75(measurements.frames_frozen_rate)",
- "percentile(measurements.frames_slow_rate,0.5)",
- "percentile(measurements.frames_frozen_rate,0.5)",
- "p75(measurements.stall_percentage)",
- "percentile(measurements.stall_percentage,0.5)",
- ],
- "query": "",
- "project": [self.project.id],
- }
- response = self.do_request(query)
- assert response.status_code == 200
- data = response.data["data"]
- assert len(data) == 1
- assert data[0]["p75(measurements.frames_slow_rate)"] == 0.1
- assert data[0]["p75(measurements.frames_frozen_rate)"] == 0.05
- assert data[0]["p75(measurements.stall_percentage)"] == 0.004
- assert data[0]["percentile(measurements.frames_slow_rate,0.5)"] == 0.1
- assert data[0]["percentile(measurements.frames_frozen_rate,0.5)"] == 0.05
- assert data[0]["percentile(measurements.stall_percentage,0.5)"] == 0.004
- meta = response.data["meta"]["fields"]
- assert meta["p75(measurements.frames_slow_rate)"] == "percentage"
- assert meta["p75(measurements.frames_frozen_rate)"] == "percentage"
- assert meta["p75(measurements.stall_percentage)"] == "percentage"
- assert meta["percentile(measurements.frames_slow_rate,0.5)"] == "percentage"
- assert meta["percentile(measurements.stall_percentage,0.5)"] == "percentage"
- def test_project_auto_fields(self):
- self.store_event(
- data={"event_id": "a" * 32, "environment": "staging", "timestamp": self.ten_mins_ago},
- project_id=self.project.id,
- )
- query = {"field": ["environment"]}
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 1
- assert response.data["data"][0]["environment"] == "staging"
- assert response.data["data"][0]["project.name"] == self.project.slug
- def test_timestamp_different_from_params(self):
- fifteen_days_ago = iso_format(before_now(days=15))
- fifteen_days_later = iso_format(before_now(days=-15))
- for query_text in [
- f"timestamp:<{fifteen_days_ago}",
- f"timestamp:<={fifteen_days_ago}",
- f"timestamp:>{fifteen_days_later}",
- f"timestamp:>={fifteen_days_later}",
- ]:
- query = {
- "field": ["count()"],
- "query": query_text,
- "statsPeriod": "14d",
- "project": self.project.id,
- }
- response = self.do_request(query)
- assert response.status_code == 400, query_text
- @mock.patch("sentry.search.events.builder.raw_snql_query")
- def test_removes_unnecessary_default_project_and_transaction_thresholds(self, mock_snql_query):
- mock_snql_query.side_effect = [{"meta": {}, "data": []}]
- ProjectTransactionThreshold.objects.create(
- project=self.project,
- organization=self.organization,
- # these are the default values that we use
- threshold=constants.DEFAULT_PROJECT_THRESHOLD,
- metric=TransactionMetric.DURATION.value,
- )
- ProjectTransactionThresholdOverride.objects.create(
- transaction="transaction",
- project=self.project,
- organization=self.organization,
- # these are the default values that we use
- threshold=constants.DEFAULT_PROJECT_THRESHOLD,
- metric=TransactionMetric.DURATION.value,
- )
- query = {
- "field": ["apdex()", "user_misery()"],
- "query": "event.type:transaction",
- "project": [self.project.id],
- }
- response = self.do_request(
- query,
- features={
- "organizations:discover-basic": True,
- "organizations:global-views": True,
- },
- )
- assert response.status_code == 200, response.content
- assert mock_snql_query.call_count == 1
- assert (
- Function("tuple", ["duration", 300], "project_threshold_config")
- in mock_snql_query.call_args_list[0][0][0].query.select
- )
- @mock.patch("sentry.search.events.builder.raw_snql_query")
- def test_removes_unnecessary_default_project_and_transaction_thresholds_keeps_others(
- self, mock_snql_query
- ):
- mock_snql_query.side_effect = [{"meta": {}, "data": []}]
- ProjectTransactionThreshold.objects.create(
- project=self.project,
- organization=self.organization,
- # these are the default values that we use
- threshold=constants.DEFAULT_PROJECT_THRESHOLD,
- metric=TransactionMetric.DURATION.value,
- )
- ProjectTransactionThresholdOverride.objects.create(
- transaction="transaction",
- project=self.project,
- organization=self.organization,
- # these are the default values that we use
- threshold=constants.DEFAULT_PROJECT_THRESHOLD,
- metric=TransactionMetric.DURATION.value,
- )
- project = self.create_project()
- ProjectTransactionThreshold.objects.create(
- project=project,
- organization=self.organization,
- threshold=100,
- metric=TransactionMetric.LCP.value,
- )
- ProjectTransactionThresholdOverride.objects.create(
- transaction="transaction",
- project=project,
- organization=self.organization,
- threshold=200,
- metric=TransactionMetric.LCP.value,
- )
- query = {
- "field": ["apdex()", "user_misery()"],
- "query": "event.type:transaction",
- "project": [self.project.id, project.id],
- }
- response = self.do_request(
- query,
- features={
- "organizations:discover-basic": True,
- "organizations:global-views": True,
- },
- )
- assert response.status_code == 200, response.content
- assert mock_snql_query.call_count == 1
- project_threshold_override_config_index = Function(
- "indexOf",
- [
- # only 1 transaction override is present here
- # because the other use to the default values
- [(Function("toUInt64", [project.id]), "transaction")],
- (Column("project_id"), Column("transaction")),
- ],
- "project_threshold_override_config_index",
- )
- project_threshold_config_index = Function(
- "indexOf",
- [
- # only 1 project override is present here
- # because the other use to the default values
- [Function("toUInt64", [project.id])],
- Column("project_id"),
- ],
- "project_threshold_config_index",
- )
- assert (
- Function(
- "if",
- [
- Function("equals", [project_threshold_override_config_index, 0]),
- Function(
- "if",
- [
- Function("equals", [project_threshold_config_index, 0]),
- ("duration", 300),
- Function(
- "arrayElement", [[("lcp", 100)], project_threshold_config_index]
- ),
- ],
- ),
- Function(
- "arrayElement",
- [[("lcp", 200)], project_threshold_override_config_index],
- ),
- ],
- "project_threshold_config",
- )
- in mock_snql_query.call_args_list[0][0][0].query.select
- )
- def test_count_web_vitals(self):
- # Good
- self.transaction_data["measurements"] = {
- "lcp": {"value": constants.VITAL_THRESHOLDS["lcp"]["meh"] - 100},
- }
- self.store_event(self.transaction_data, self.project.id)
- # Meh
- self.transaction_data["measurements"] = {
- "lcp": {"value": constants.VITAL_THRESHOLDS["lcp"]["meh"] + 100},
- }
- self.store_event(self.transaction_data, self.project.id)
- self.store_event(self.transaction_data, self.project.id)
- query = {
- "field": [
- "count_web_vitals(measurements.lcp, poor)",
- "count_web_vitals(measurements.lcp, meh)",
- "count_web_vitals(measurements.lcp, good)",
- ]
- }
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- assert len(response.data["data"]) == 1
- assert response.data["data"][0] == {
- "count_web_vitals(measurements.lcp, poor)": 0,
- "count_web_vitals(measurements.lcp, meh)": 2,
- "count_web_vitals(measurements.lcp, good)": 1,
- }
- def test_count_web_vitals_invalid_vital(self):
- query = {
- "field": [
- "count_web_vitals(measurements.foo, poor)",
- ],
- "project": [self.project.id],
- }
- response = self.do_request(query)
- assert response.status_code == 400, response.content
- query = {
- "field": [
- "count_web_vitals(tags[lcp], poor)",
- ],
- "project": [self.project.id],
- }
- response = self.do_request(query)
- assert response.status_code == 400, response.content
- query = {
- "field": [
- "count_web_vitals(transaction.duration, poor)",
- ],
- "project": [self.project.id],
- }
- response = self.do_request(query)
- assert response.status_code == 400, response.content
- query = {
- "field": [
- "count_web_vitals(measurements.lcp, bad)",
- ],
- "project": [self.project.id],
- }
- response = self.do_request(query)
- assert response.status_code == 400, response.content
- def test_tag_that_looks_like_aggregate(self):
- data = self.load_data(
- timestamp=before_now(minutes=1),
- )
- data["tags"] = {"p95": "<5k"}
- self.store_event(data, project_id=self.project.id)
- query = {
- "field": ["p95"],
- "query": "p95:<5k",
- "project": [self.project.id],
- }
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- data = response.data["data"]
- assert len(data) == 1
- assert data[0]["p95"] == "<5k"
- def test_chained_or_query_meta_tip(self):
- query = {
- "field": ["transaction"],
- "query": "transaction:a OR transaction:b",
- "project": [self.project.id],
- }
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- meta = response.data["meta"]
- assert meta["tips"] == {
- "query": "Did you know you can replace chained or conditions like `field:a OR field:b OR field:c` with `field:[a,b,c]`",
- "columns": None,
- }
- @override_settings(SENTRY_SELF_HOSTED=False)
- def test_ratelimit(self):
- query = {
- "field": ["transaction"],
- "project": [self.project.id],
- }
- with freeze_time("2000-01-01"):
- for _ in range(15):
- self.do_request(query, features={"organizations:discover-events-rate-limit": True})
- response = self.do_request(
- query, features={"organizations:discover-events-rate-limit": True}
- )
- assert response.status_code == 429, response.content
- @override_settings(SENTRY_SELF_HOSTED=False)
- def test_no_ratelimit(self):
- query = {
- "field": ["transaction"],
- "project": [self.project.id],
- }
- with freeze_time("2000-01-01"):
- for _ in range(15):
- self.do_request(query)
- response = self.do_request(query)
- assert response.status_code == 200, response.content
- def test_transaction_source(self):
- query = {
- "field": ["transaction"],
- "query": "transaction.source:task",
- "project": [self.project.id],
- }
- response = self.do_request(query)
- assert response.status_code == 200, response.content
|