test_organization_events_stats.py 126 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852285328542855285628572858285928602861286228632864286528662867286828692870287128722873287428752876287728782879288028812882288328842885288628872888288928902891289228932894289528962897289828992900290129022903290429052906290729082909291029112912291329142915291629172918291929202921292229232924292529262927292829292930293129322933293429352936293729382939294029412942294329442945294629472948294929502951295229532954295529562957295829592960296129622963296429652966296729682969297029712972297329742975297629772978297929802981298229832984298529862987298829892990299129922993299429952996299729982999300030013002300330043005300630073008300930103011301230133014301530163017301830193020302130223023302430253026302730283029303030313032303330343035303630373038303930403041304230433044304530463047304830493050305130523053305430553056305730583059306030613062306330643065306630673068306930703071307230733074307530763077307830793080308130823083308430853086308730883089309030913092309330943095309630973098309931003101310231033104310531063107310831093110311131123113311431153116311731183119312031213122312331243125312631273128312931303131313231333134313531363137313831393140314131423143314431453146314731483149315031513152315331543155315631573158315931603161316231633164316531663167316831693170317131723173317431753176317731783179318031813182318331843185318631873188318931903191319231933194319531963197319831993200320132023203320432053206320732083209321032113212321332143215321632173218321932203221322232233224322532263227322832293230323132323233323432353236323732383239324032413242324332443245324632473248324932503251325232533254325532563257325832593260326132623263326432653266326732683269327032713272327332743275327632773278327932803281328232833284328532863287328832893290329132923293329432953296329732983299330033013302330333043305330633073308330933103311331233133314331533163317331833193320332133223323332433253326332733283329333033313332333333343335333633373338333933403341334233433344334533463347334833493350335133523353335433553356335733583359336033613362336333643365336633673368336933703371337233733374337533763377337833793380
  1. from __future__ import annotations
  2. import uuid
  3. from datetime import timedelta
  4. from typing import Any, TypedDict
  5. from unittest import mock
  6. from uuid import uuid4
  7. import pytest
  8. from dateutil.parser import parse as parse_date
  9. from django.urls import reverse
  10. from snuba_sdk import Entity
  11. from snuba_sdk.column import Column
  12. from snuba_sdk.conditions import Condition, Op
  13. from snuba_sdk.function import Function
  14. from sentry.constants import MAX_TOP_EVENTS
  15. from sentry.issues.grouptype import ProfileFileIOGroupType
  16. from sentry.models.project import Project
  17. from sentry.models.transaction_threshold import ProjectTransactionThreshold, TransactionMetric
  18. from sentry.snuba.discover import OTHER_KEY
  19. from sentry.testutils.cases import APITestCase, ProfilesSnubaTestCase, SnubaTestCase
  20. from sentry.testutils.helpers.datetime import before_now
  21. from sentry.utils.samples import load_data
  22. from tests.sentry.issues.test_utils import SearchIssueTestMixin
  23. pytestmark = pytest.mark.sentry_metrics
  24. class _EventDataDict(TypedDict):
  25. data: dict[str, Any]
  26. project: Project
  27. count: int
  28. class OrganizationEventsStatsEndpointTest(APITestCase, SnubaTestCase, SearchIssueTestMixin):
  29. endpoint = "sentry-api-0-organization-events-stats"
  30. def setUp(self):
  31. super().setUp()
  32. self.login_as(user=self.user)
  33. self.authed_user = self.user
  34. self.day_ago = before_now(days=1).replace(hour=10, minute=0, second=0, microsecond=0)
  35. self.project = self.create_project()
  36. self.project2 = self.create_project()
  37. self.user = self.create_user()
  38. self.user2 = self.create_user()
  39. self.store_event(
  40. data={
  41. "event_id": "a" * 32,
  42. "message": "very bad",
  43. "timestamp": (self.day_ago + timedelta(minutes=1)).isoformat(),
  44. "fingerprint": ["group1"],
  45. "tags": {"sentry:user": self.user.email},
  46. },
  47. project_id=self.project.id,
  48. )
  49. self.store_event(
  50. data={
  51. "event_id": "b" * 32,
  52. "message": "oh my",
  53. "timestamp": (self.day_ago + timedelta(hours=1, minutes=1)).isoformat(),
  54. "fingerprint": ["group2"],
  55. "tags": {"sentry:user": self.user2.email},
  56. },
  57. project_id=self.project2.id,
  58. )
  59. self.store_event(
  60. data={
  61. "event_id": "c" * 32,
  62. "message": "very bad",
  63. "timestamp": (self.day_ago + timedelta(hours=1, minutes=2)).isoformat(),
  64. "fingerprint": ["group2"],
  65. "tags": {"sentry:user": self.user2.email},
  66. },
  67. project_id=self.project2.id,
  68. )
  69. self.url = reverse(
  70. "sentry-api-0-organization-events-stats",
  71. kwargs={"organization_id_or_slug": self.project.organization.slug},
  72. )
  73. self.features = {}
  74. def do_request(self, data, url=None, features=None):
  75. if features is None:
  76. features = {"organizations:discover-basic": True}
  77. features.update(self.features)
  78. with self.feature(features):
  79. return self.client.get(self.url if url is None else url, data=data, format="json")
  80. @pytest.mark.querybuilder
  81. def test_simple(self):
  82. response = self.do_request(
  83. {
  84. "start": self.day_ago,
  85. "end": self.day_ago + timedelta(hours=2),
  86. "interval": "1h",
  87. },
  88. )
  89. assert response.status_code == 200, response.content
  90. assert [attrs for time, attrs in response.data["data"]] == [[{"count": 1}], [{"count": 2}]]
  91. def test_generic_issue(self):
  92. _, _, group_info = self.store_search_issue(
  93. self.project.id,
  94. self.user.id,
  95. [f"{ProfileFileIOGroupType.type_id}-group1"],
  96. "prod",
  97. self.day_ago,
  98. )
  99. assert group_info is not None
  100. self.store_search_issue(
  101. self.project.id,
  102. self.user.id,
  103. [f"{ProfileFileIOGroupType.type_id}-group1"],
  104. "prod",
  105. self.day_ago + timedelta(hours=1, minutes=1),
  106. )
  107. self.store_search_issue(
  108. self.project.id,
  109. self.user.id,
  110. [f"{ProfileFileIOGroupType.type_id}-group1"],
  111. "prod",
  112. self.day_ago + timedelta(hours=1, minutes=2),
  113. )
  114. with self.feature(
  115. [
  116. "organizations:profiling",
  117. ]
  118. ):
  119. response = self.do_request(
  120. {
  121. "start": self.day_ago,
  122. "end": self.day_ago + timedelta(hours=2),
  123. "interval": "1h",
  124. "query": f"issue:{group_info.group.qualified_short_id}",
  125. "dataset": "issuePlatform",
  126. },
  127. )
  128. assert response.status_code == 200, response.content
  129. assert [attrs for time, attrs in response.data["data"]] == [[{"count": 1}], [{"count": 2}]]
  130. def test_generic_issue_calculated_interval(self):
  131. """Test that a 4h interval returns the correct generic event stats.
  132. This follows a different code path than 1h or 1d as the IssuePlatformTimeSeriesQueryBuilder
  133. does some calculation to create the time column."""
  134. _, _, group_info = self.store_search_issue(
  135. self.project.id,
  136. self.user.id,
  137. [f"{ProfileFileIOGroupType.type_id}-group1"],
  138. "prod",
  139. self.day_ago + timedelta(minutes=1),
  140. )
  141. assert group_info is not None
  142. self.store_search_issue(
  143. self.project.id,
  144. self.user.id,
  145. [f"{ProfileFileIOGroupType.type_id}-group1"],
  146. "prod",
  147. self.day_ago + timedelta(minutes=1),
  148. )
  149. self.store_search_issue(
  150. self.project.id,
  151. self.user.id,
  152. [f"{ProfileFileIOGroupType.type_id}-group1"],
  153. "prod",
  154. self.day_ago + timedelta(minutes=2),
  155. )
  156. with self.feature(
  157. [
  158. "organizations:profiling",
  159. ]
  160. ):
  161. response = self.do_request(
  162. {
  163. "start": self.day_ago,
  164. "end": self.day_ago + timedelta(hours=4),
  165. "interval": "4h",
  166. "query": f"issue:{group_info.group.qualified_short_id}",
  167. "dataset": "issuePlatform",
  168. },
  169. )
  170. assert response.status_code == 200, response.content
  171. assert [attrs for time, attrs in response.data["data"]] == [[{"count": 3}], [{"count": 0}]]
  172. def test_errors_dataset(self):
  173. response = self.do_request(
  174. {
  175. "start": self.day_ago,
  176. "end": self.day_ago + timedelta(hours=2),
  177. "interval": "1h",
  178. "dataset": "errors",
  179. "query": "is:unresolved",
  180. },
  181. )
  182. assert response.status_code == 200, response.content
  183. assert [attrs for time, attrs in response.data["data"]] == [[{"count": 1}], [{"count": 2}]]
  184. def test_errors_dataset_no_query(self):
  185. response = self.do_request(
  186. {
  187. "start": self.day_ago,
  188. "end": self.day_ago + timedelta(hours=2),
  189. "interval": "1h",
  190. "dataset": "errors",
  191. },
  192. )
  193. assert response.status_code == 200, response.content
  194. assert [attrs for time, attrs in response.data["data"]] == [[{"count": 1}], [{"count": 2}]]
  195. def test_misaligned_last_bucket(self):
  196. response = self.do_request(
  197. data={
  198. "start": self.day_ago - timedelta(minutes=30),
  199. "end": self.day_ago + timedelta(hours=1, minutes=30),
  200. "interval": "1h",
  201. "partial": "1",
  202. },
  203. )
  204. assert response.status_code == 200, response.content
  205. assert [attrs for time, attrs in response.data["data"]] == [
  206. [{"count": 0}],
  207. [{"count": 1}],
  208. [{"count": 2}],
  209. ]
  210. def test_no_projects(self):
  211. org = self.create_organization(owner=self.user)
  212. self.login_as(user=self.user)
  213. url = reverse(
  214. "sentry-api-0-organization-events-stats", kwargs={"organization_id_or_slug": org.slug}
  215. )
  216. response = self.do_request({}, url)
  217. assert response.status_code == 200, response.content
  218. assert len(response.data["data"]) == 0
  219. def test_user_count(self):
  220. self.store_event(
  221. data={
  222. "event_id": "d" * 32,
  223. "message": "something",
  224. "timestamp": (self.day_ago + timedelta(minutes=2)).isoformat(),
  225. "tags": {"sentry:user": self.user2.email},
  226. "fingerprint": ["group2"],
  227. },
  228. project_id=self.project2.id,
  229. )
  230. response = self.do_request(
  231. data={
  232. "start": self.day_ago,
  233. "end": self.day_ago + timedelta(hours=2),
  234. "interval": "1h",
  235. "yAxis": "user_count",
  236. },
  237. )
  238. assert response.status_code == 200, response.content
  239. assert [attrs for time, attrs in response.data["data"]] == [[{"count": 2}], [{"count": 1}]]
  240. def test_discover2_backwards_compatibility(self):
  241. response = self.do_request(
  242. data={
  243. "project": self.project.id,
  244. "start": self.day_ago,
  245. "end": self.day_ago + timedelta(hours=2),
  246. "interval": "1h",
  247. "yAxis": "user_count",
  248. },
  249. )
  250. assert response.status_code == 200, response.content
  251. assert len(response.data["data"]) > 0
  252. response = self.do_request(
  253. data={
  254. "project": self.project.id,
  255. "start": self.day_ago,
  256. "end": self.day_ago + timedelta(hours=2),
  257. "interval": "1h",
  258. "yAxis": "event_count",
  259. },
  260. )
  261. assert response.status_code == 200, response.content
  262. assert len(response.data["data"]) > 0
  263. def test_with_event_count_flag(self):
  264. response = self.do_request(
  265. data={
  266. "start": self.day_ago,
  267. "end": self.day_ago + timedelta(hours=2),
  268. "interval": "1h",
  269. "yAxis": "event_count",
  270. },
  271. )
  272. assert response.status_code == 200, response.content
  273. assert [attrs for time, attrs in response.data["data"]] == [[{"count": 1}], [{"count": 2}]]
  274. def test_performance_view_feature(self):
  275. response = self.do_request(
  276. data={
  277. "end": before_now(),
  278. "start": before_now(hours=2),
  279. "query": "project_id:1",
  280. "interval": "30m",
  281. "yAxis": "count()",
  282. },
  283. features={
  284. "organizations:performance-view": True,
  285. "organizations:discover-basic": False,
  286. },
  287. )
  288. assert response.status_code == 200, response.content
  289. def test_apdex_divide_by_zero(self):
  290. ProjectTransactionThreshold.objects.create(
  291. project=self.project,
  292. organization=self.project.organization,
  293. threshold=600,
  294. metric=TransactionMetric.LCP.value,
  295. )
  296. # Shouldn't count towards apdex
  297. data = load_data(
  298. "transaction",
  299. start_timestamp=self.day_ago + timedelta(minutes=(1)),
  300. timestamp=self.day_ago + timedelta(minutes=(3)),
  301. )
  302. data["transaction"] = "/apdex/new/"
  303. data["user"] = {"email": "1@example.com"}
  304. data["measurements"] = {}
  305. self.store_event(data, project_id=self.project.id)
  306. response = self.do_request(
  307. data={
  308. "start": self.day_ago,
  309. "end": self.day_ago + timedelta(hours=2),
  310. "interval": "1h",
  311. "yAxis": "apdex()",
  312. "project": [self.project.id],
  313. },
  314. )
  315. assert response.status_code == 200, response.content
  316. assert len(response.data["data"]) == 2
  317. data = response.data["data"]
  318. # 0 transactions with LCP 0/0
  319. assert [attrs for time, attrs in response.data["data"]] == [
  320. [{"count": 0}],
  321. [{"count": 0}],
  322. ]
  323. def test_aggregate_function_apdex(self):
  324. project1 = self.create_project()
  325. project2 = self.create_project()
  326. events = [
  327. ("one", 400, project1.id),
  328. ("one", 400, project1.id),
  329. ("two", 3000, project2.id),
  330. ("two", 1000, project2.id),
  331. ("three", 3000, project2.id),
  332. ]
  333. for idx, event in enumerate(events):
  334. data = load_data(
  335. "transaction",
  336. start_timestamp=self.day_ago + timedelta(minutes=(1 + idx)),
  337. timestamp=self.day_ago + timedelta(minutes=(1 + idx), milliseconds=event[1]),
  338. )
  339. data["event_id"] = f"{idx}" * 32
  340. data["transaction"] = f"/apdex/new/{event[0]}"
  341. data["user"] = {"email": f"{idx}@example.com"}
  342. self.store_event(data, project_id=event[2])
  343. response = self.do_request(
  344. data={
  345. "start": self.day_ago,
  346. "end": self.day_ago + timedelta(hours=2),
  347. "interval": "1h",
  348. "yAxis": "apdex()",
  349. },
  350. )
  351. assert response.status_code == 200, response.content
  352. assert [attrs for time, attrs in response.data["data"]] == [
  353. [{"count": 0.3}],
  354. [{"count": 0}],
  355. ]
  356. ProjectTransactionThreshold.objects.create(
  357. project=project1,
  358. organization=project1.organization,
  359. threshold=100,
  360. metric=TransactionMetric.DURATION.value,
  361. )
  362. ProjectTransactionThreshold.objects.create(
  363. project=project2,
  364. organization=project1.organization,
  365. threshold=100,
  366. metric=TransactionMetric.DURATION.value,
  367. )
  368. response = self.do_request(
  369. data={
  370. "start": self.day_ago,
  371. "end": self.day_ago + timedelta(hours=2),
  372. "interval": "1h",
  373. "yAxis": "apdex()",
  374. },
  375. )
  376. assert response.status_code == 200, response.content
  377. assert [attrs for time, attrs in response.data["data"]] == [
  378. [{"count": 0.2}],
  379. [{"count": 0}],
  380. ]
  381. response = self.do_request(
  382. data={
  383. "start": self.day_ago,
  384. "end": self.day_ago + timedelta(hours=2),
  385. "interval": "1h",
  386. "yAxis": ["user_count", "apdex()"],
  387. },
  388. )
  389. assert response.status_code == 200, response.content
  390. assert response.data["user_count"]["order"] == 0
  391. assert [attrs for time, attrs in response.data["user_count"]["data"]] == [
  392. [{"count": 5}],
  393. [{"count": 0}],
  394. ]
  395. assert response.data["apdex()"]["order"] == 1
  396. assert [attrs for time, attrs in response.data["apdex()"]["data"]] == [
  397. [{"count": 0.2}],
  398. [{"count": 0}],
  399. ]
  400. def test_aggregate_function_count(self):
  401. response = self.do_request(
  402. data={
  403. "start": self.day_ago,
  404. "end": self.day_ago + timedelta(hours=2),
  405. "interval": "1h",
  406. "yAxis": "count()",
  407. },
  408. )
  409. assert response.status_code == 200, response.content
  410. assert [attrs for time, attrs in response.data["data"]] == [[{"count": 1}], [{"count": 2}]]
  411. def test_invalid_aggregate(self):
  412. response = self.do_request(
  413. data={
  414. "start": self.day_ago,
  415. "end": self.day_ago + timedelta(hours=2),
  416. "interval": "1h",
  417. "yAxis": "rubbish",
  418. },
  419. )
  420. assert response.status_code == 400, response.content
  421. def test_aggregate_function_user_count(self):
  422. response = self.do_request(
  423. data={
  424. "start": self.day_ago,
  425. "end": self.day_ago + timedelta(hours=2),
  426. "interval": "1h",
  427. "yAxis": "count_unique(user)",
  428. },
  429. )
  430. assert response.status_code == 200, response.content
  431. assert [attrs for time, attrs in response.data["data"]] == [[{"count": 1}], [{"count": 1}]]
  432. def test_aggregate_invalid(self):
  433. response = self.do_request(
  434. data={
  435. "start": self.day_ago,
  436. "end": self.day_ago + timedelta(hours=2),
  437. "interval": "1h",
  438. "yAxis": "nope(lol)",
  439. },
  440. )
  441. assert response.status_code == 400, response.content
  442. def test_throughput_meta(self):
  443. project = self.create_project()
  444. # Each of these denotes how many events to create in each hour
  445. event_counts = [6, 0, 6, 3, 0, 3]
  446. for hour, count in enumerate(event_counts):
  447. for minute in range(count):
  448. self.store_event(
  449. data={
  450. "event_id": str(uuid.uuid1()),
  451. "message": "very bad",
  452. "timestamp": (
  453. self.day_ago + timedelta(hours=hour, minutes=minute)
  454. ).isoformat(),
  455. "fingerprint": ["group1"],
  456. "tags": {"sentry:user": self.user.email},
  457. },
  458. project_id=project.id,
  459. )
  460. for axis in ["epm()", "tpm()"]:
  461. response = self.do_request(
  462. data={
  463. "transformAliasToInputFormat": 1,
  464. "start": self.day_ago,
  465. "end": self.day_ago + timedelta(hours=6),
  466. "interval": "1h",
  467. "yAxis": axis,
  468. "project": project.id,
  469. },
  470. )
  471. meta = response.data["meta"]
  472. assert meta["fields"] == {
  473. "time": "date",
  474. axis: "rate",
  475. }
  476. assert meta["units"] == {"time": None, axis: "1/minute"}
  477. data = response.data["data"]
  478. assert len(data) == 6
  479. rows = data[0:6]
  480. for test in zip(event_counts, rows):
  481. assert test[1][1][0]["count"] == test[0] / (3600.0 / 60.0)
  482. for axis in ["eps()", "tps()"]:
  483. response = self.do_request(
  484. data={
  485. "transformAliasToInputFormat": 1,
  486. "start": self.day_ago,
  487. "end": self.day_ago + timedelta(hours=6),
  488. "interval": "1h",
  489. "yAxis": axis,
  490. "project": project.id,
  491. },
  492. )
  493. meta = response.data["meta"]
  494. assert meta["fields"] == {
  495. "time": "date",
  496. axis: "rate",
  497. }
  498. assert meta["units"] == {"time": None, axis: "1/second"}
  499. def test_throughput_epm_hour_rollup(self):
  500. project = self.create_project()
  501. # Each of these denotes how many events to create in each hour
  502. event_counts = [6, 0, 6, 3, 0, 3]
  503. for hour, count in enumerate(event_counts):
  504. for minute in range(count):
  505. self.store_event(
  506. data={
  507. "event_id": str(uuid.uuid1()),
  508. "message": "very bad",
  509. "timestamp": (
  510. self.day_ago + timedelta(hours=hour, minutes=minute)
  511. ).isoformat(),
  512. "fingerprint": ["group1"],
  513. "tags": {"sentry:user": self.user.email},
  514. },
  515. project_id=project.id,
  516. )
  517. for axis in ["epm()", "tpm()"]:
  518. response = self.do_request(
  519. data={
  520. "start": self.day_ago,
  521. "end": self.day_ago + timedelta(hours=6),
  522. "interval": "1h",
  523. "yAxis": axis,
  524. "project": project.id,
  525. },
  526. )
  527. assert response.status_code == 200, response.content
  528. data = response.data["data"]
  529. assert len(data) == 6
  530. rows = data[0:6]
  531. for test in zip(event_counts, rows):
  532. assert test[1][1][0]["count"] == test[0] / (3600.0 / 60.0)
  533. def test_throughput_epm_day_rollup(self):
  534. project = self.create_project()
  535. # Each of these denotes how many events to create in each minute
  536. event_counts = [6, 0, 6, 3, 0, 3]
  537. for hour, count in enumerate(event_counts):
  538. for minute in range(count):
  539. self.store_event(
  540. data={
  541. "event_id": str(uuid.uuid1()),
  542. "message": "very bad",
  543. "timestamp": (
  544. self.day_ago + timedelta(hours=hour, minutes=minute)
  545. ).isoformat(),
  546. "fingerprint": ["group1"],
  547. "tags": {"sentry:user": self.user.email},
  548. },
  549. project_id=project.id,
  550. )
  551. for axis in ["epm()", "tpm()"]:
  552. response = self.do_request(
  553. data={
  554. "start": self.day_ago,
  555. "end": self.day_ago + timedelta(hours=24),
  556. "interval": "24h",
  557. "yAxis": axis,
  558. "project": project.id,
  559. },
  560. )
  561. assert response.status_code == 200, response.content
  562. data = response.data["data"]
  563. assert len(data) == 2
  564. assert data[0][1][0]["count"] == sum(event_counts) / (86400.0 / 60.0)
  565. def test_throughput_eps_minute_rollup(self):
  566. project = self.create_project()
  567. # Each of these denotes how many events to create in each minute
  568. event_counts = [6, 0, 6, 3, 0, 3]
  569. for minute, count in enumerate(event_counts):
  570. for second in range(count):
  571. self.store_event(
  572. data={
  573. "event_id": str(uuid.uuid1()),
  574. "message": "very bad",
  575. "timestamp": (
  576. self.day_ago + timedelta(minutes=minute, seconds=second)
  577. ).isoformat(),
  578. "fingerprint": ["group1"],
  579. "tags": {"sentry:user": self.user.email},
  580. },
  581. project_id=project.id,
  582. )
  583. for axis in ["eps()", "tps()"]:
  584. response = self.do_request(
  585. data={
  586. "start": self.day_ago,
  587. "end": self.day_ago + timedelta(minutes=6),
  588. "interval": "1m",
  589. "yAxis": axis,
  590. "project": project.id,
  591. },
  592. )
  593. assert response.status_code == 200, response.content
  594. data = response.data["data"]
  595. assert len(data) == 6
  596. rows = data[0:6]
  597. for test in zip(event_counts, rows):
  598. assert test[1][1][0]["count"] == test[0] / 60.0
  599. def test_throughput_eps_no_rollup(self):
  600. project = self.create_project()
  601. # Each of these denotes how many events to create in each minute
  602. event_counts = [6, 0, 6, 3, 0, 3]
  603. for minute, count in enumerate(event_counts):
  604. for second in range(count):
  605. self.store_event(
  606. data={
  607. "event_id": str(uuid.uuid1()),
  608. "message": "very bad",
  609. "timestamp": (
  610. self.day_ago + timedelta(minutes=minute, seconds=second)
  611. ).isoformat(),
  612. "fingerprint": ["group1"],
  613. "tags": {"sentry:user": self.user.email},
  614. },
  615. project_id=project.id,
  616. )
  617. response = self.do_request(
  618. data={
  619. "start": self.day_ago,
  620. "end": self.day_ago + timedelta(minutes=1),
  621. "interval": "1s",
  622. "yAxis": "eps()",
  623. "project": project.id,
  624. },
  625. )
  626. assert response.status_code == 200, response.content
  627. data = response.data["data"]
  628. # expect 60 data points between time span of 0 and 60 seconds
  629. assert len(data) == 60
  630. rows = data[0:6]
  631. for row in rows:
  632. assert row[1][0]["count"] == 1
  633. def test_transaction_events(self):
  634. prototype = {
  635. "type": "transaction",
  636. "transaction": "api.issue.delete",
  637. "spans": [],
  638. "contexts": {"trace": {"op": "foobar", "trace_id": "a" * 32, "span_id": "a" * 16}},
  639. "tags": {"important": "yes"},
  640. }
  641. fixtures = (
  642. ("d" * 32, before_now(minutes=32)),
  643. ("e" * 32, before_now(hours=1, minutes=2)),
  644. ("f" * 32, before_now(hours=1, minutes=35)),
  645. )
  646. for fixture in fixtures:
  647. data = prototype.copy()
  648. data["event_id"] = fixture[0]
  649. data["timestamp"] = fixture[1].isoformat()
  650. data["start_timestamp"] = (fixture[1] - timedelta(seconds=1)).isoformat()
  651. self.store_event(data=data, project_id=self.project.id)
  652. for dataset in ["discover", "transactions"]:
  653. response = self.do_request(
  654. data={
  655. "project": self.project.id,
  656. "end": before_now(),
  657. "start": before_now(hours=2),
  658. "query": "event.type:transaction",
  659. "interval": "30m",
  660. "yAxis": "count()",
  661. "dataset": dataset,
  662. },
  663. )
  664. assert response.status_code == 200, response.content
  665. items = [item for time, item in response.data["data"] if item]
  666. # We could get more results depending on where the 30 min
  667. # windows land.
  668. assert len(items) >= 3
  669. def test_project_id_query_filter(self):
  670. response = self.do_request(
  671. data={
  672. "end": before_now(),
  673. "start": before_now(hours=2),
  674. "query": "project_id:1",
  675. "interval": "30m",
  676. "yAxis": "count()",
  677. },
  678. )
  679. assert response.status_code == 200
  680. def test_latest_release_query_filter(self):
  681. response = self.do_request(
  682. data={
  683. "project": self.project.id,
  684. "end": before_now(),
  685. "start": before_now(hours=2),
  686. "query": "release:latest",
  687. "interval": "30m",
  688. "yAxis": "count()",
  689. },
  690. )
  691. assert response.status_code == 200
  692. def test_conditional_filter(self):
  693. response = self.do_request(
  694. data={
  695. "start": self.day_ago,
  696. "end": self.day_ago + timedelta(hours=2),
  697. "query": "id:{} OR id:{}".format("a" * 32, "b" * 32),
  698. "interval": "30m",
  699. "yAxis": "count()",
  700. },
  701. )
  702. assert response.status_code == 200, response.content
  703. data = response.data["data"]
  704. assert len(data) == 4
  705. assert data[0][1][0]["count"] == 1
  706. assert data[2][1][0]["count"] == 1
  707. def test_simple_multiple_yaxis(self):
  708. response = self.do_request(
  709. data={
  710. "start": self.day_ago,
  711. "end": self.day_ago + timedelta(hours=2),
  712. "interval": "1h",
  713. "yAxis": ["user_count", "event_count"],
  714. },
  715. )
  716. assert response.status_code == 200, response.content
  717. assert response.data["user_count"]["order"] == 0
  718. assert [attrs for time, attrs in response.data["user_count"]["data"]] == [
  719. [{"count": 1}],
  720. [{"count": 1}],
  721. ]
  722. assert response.data["event_count"]["order"] == 1
  723. assert [attrs for time, attrs in response.data["event_count"]["data"]] == [
  724. [{"count": 1}],
  725. [{"count": 2}],
  726. ]
  727. def test_equation_yaxis(self):
  728. response = self.do_request(
  729. data={
  730. "start": self.day_ago,
  731. "end": self.day_ago + timedelta(hours=2),
  732. "interval": "1h",
  733. "yAxis": ["equation|count() / 100"],
  734. },
  735. )
  736. assert response.status_code == 200, response.content
  737. assert len(response.data["data"]) == 2
  738. assert [attrs for time, attrs in response.data["data"]] == [
  739. [{"count": 0.01}],
  740. [{"count": 0.02}],
  741. ]
  742. def test_eps_equation(self):
  743. response = self.do_request(
  744. data={
  745. "start": self.day_ago,
  746. "end": self.day_ago + timedelta(hours=2),
  747. "interval": "1h",
  748. "yAxis": ["equation|eps() * 2"],
  749. },
  750. )
  751. assert response.status_code == 200, response.content
  752. assert len(response.data["data"]) == 2
  753. assert pytest.approx(0.000556, abs=0.0001) == response.data["data"][0][1][0]["count"]
  754. assert pytest.approx(0.001112, abs=0.0001) == response.data["data"][1][1][0]["count"]
  755. def test_epm_equation(self):
  756. response = self.do_request(
  757. data={
  758. "start": self.day_ago,
  759. "end": self.day_ago + timedelta(hours=2),
  760. "interval": "1h",
  761. "yAxis": ["equation|epm() * 2"],
  762. },
  763. )
  764. assert response.status_code == 200, response.content
  765. assert len(response.data["data"]) == 2
  766. assert pytest.approx(0.03334, abs=0.01) == response.data["data"][0][1][0]["count"]
  767. assert pytest.approx(0.06667, abs=0.01) == response.data["data"][1][1][0]["count"]
  768. def test_equation_mixed_multi_yaxis(self):
  769. response = self.do_request(
  770. data={
  771. "start": self.day_ago,
  772. "end": self.day_ago + timedelta(hours=2),
  773. "interval": "1h",
  774. "yAxis": ["count()", "equation|count() * 100"],
  775. },
  776. )
  777. assert response.status_code == 200, response.content
  778. assert response.data["count()"]["order"] == 0
  779. assert [attrs for time, attrs in response.data["count()"]["data"]] == [
  780. [{"count": 1}],
  781. [{"count": 2}],
  782. ]
  783. assert response.data["equation|count() * 100"]["order"] == 1
  784. assert [attrs for time, attrs in response.data["equation|count() * 100"]["data"]] == [
  785. [{"count": 100}],
  786. [{"count": 200}],
  787. ]
  788. def test_equation_multi_yaxis(self):
  789. response = self.do_request(
  790. data={
  791. "start": self.day_ago,
  792. "end": self.day_ago + timedelta(hours=2),
  793. "interval": "1h",
  794. "yAxis": ["equation|count() / 100", "equation|count() * 100"],
  795. },
  796. )
  797. assert response.status_code == 200, response.content
  798. assert response.data["equation|count() / 100"]["order"] == 0
  799. assert [attrs for time, attrs in response.data["equation|count() / 100"]["data"]] == [
  800. [{"count": 0.01}],
  801. [{"count": 0.02}],
  802. ]
  803. assert response.data["equation|count() * 100"]["order"] == 1
  804. assert [attrs for time, attrs in response.data["equation|count() * 100"]["data"]] == [
  805. [{"count": 100}],
  806. [{"count": 200}],
  807. ]
  808. def test_large_interval_no_drop_values(self):
  809. self.store_event(
  810. data={
  811. "event_id": "d" * 32,
  812. "message": "not good",
  813. "timestamp": (self.day_ago - timedelta(minutes=10)).isoformat(),
  814. "fingerprint": ["group3"],
  815. },
  816. project_id=self.project.id,
  817. )
  818. response = self.do_request(
  819. data={
  820. "project": self.project.id,
  821. "end": self.day_ago,
  822. "start": self.day_ago - timedelta(hours=24),
  823. "query": 'message:"not good"',
  824. "interval": "1d",
  825. "yAxis": "count()",
  826. },
  827. )
  828. assert response.status_code == 200
  829. assert [attrs for time, attrs in response.data["data"]] == [[{"count": 0}], [{"count": 1}]]
  830. @mock.patch("sentry.snuba.discover.timeseries_query", return_value={})
  831. def test_multiple_yaxis_only_one_query(self, mock_query):
  832. self.do_request(
  833. data={
  834. "project": self.project.id,
  835. "start": self.day_ago,
  836. "end": self.day_ago + timedelta(hours=2),
  837. "interval": "1h",
  838. "yAxis": ["user_count", "event_count", "epm()", "eps()"],
  839. },
  840. )
  841. assert mock_query.call_count == 1
  842. @mock.patch("sentry.snuba.discover.bulk_snuba_queries", return_value=[{"data": []}])
  843. def test_invalid_interval(self, mock_query):
  844. self.do_request(
  845. data={
  846. "end": before_now(),
  847. "start": before_now(hours=24),
  848. "query": "",
  849. "interval": "1s",
  850. "yAxis": "count()",
  851. },
  852. )
  853. assert mock_query.call_count == 1
  854. # Should've reset to the default for 24h
  855. assert mock_query.mock_calls[0].args[0][0].query.granularity.granularity == 300
  856. self.do_request(
  857. data={
  858. "end": before_now(),
  859. "start": before_now(hours=24),
  860. "query": "",
  861. "interval": "0d",
  862. "yAxis": "count()",
  863. },
  864. )
  865. assert mock_query.call_count == 2
  866. # Should've reset to the default for 24h
  867. assert mock_query.mock_calls[1].args[0][0].query.granularity.granularity == 300
  868. def test_out_of_retention(self):
  869. with self.options({"system.event-retention-days": 10}):
  870. response = self.do_request(
  871. data={
  872. "start": before_now(days=20),
  873. "end": before_now(days=15),
  874. "query": "",
  875. "interval": "30m",
  876. "yAxis": "count()",
  877. },
  878. )
  879. assert response.status_code == 400
  880. @mock.patch("sentry.utils.snuba.quantize_time")
  881. def test_quantize_dates(self, mock_quantize):
  882. mock_quantize.return_value = before_now(days=1)
  883. # Don't quantize short time periods
  884. self.do_request(
  885. data={"statsPeriod": "1h", "query": "", "interval": "30m", "yAxis": "count()"},
  886. )
  887. # Don't quantize absolute date periods
  888. self.do_request(
  889. data={
  890. "start": before_now(days=20),
  891. "end": before_now(days=15),
  892. "query": "",
  893. "interval": "30m",
  894. "yAxis": "count()",
  895. },
  896. )
  897. assert len(mock_quantize.mock_calls) == 0
  898. # Quantize long date periods
  899. self.do_request(
  900. data={"statsPeriod": "90d", "query": "", "interval": "30m", "yAxis": "count()"},
  901. )
  902. assert len(mock_quantize.mock_calls) == 2
  903. def test_with_zerofill(self):
  904. response = self.do_request(
  905. data={
  906. "start": self.day_ago,
  907. "end": self.day_ago + timedelta(hours=2),
  908. "interval": "30m",
  909. },
  910. )
  911. assert response.status_code == 200, response.content
  912. assert [attrs for time, attrs in response.data["data"]] == [
  913. [{"count": 1}],
  914. [{"count": 0}],
  915. [{"count": 2}],
  916. [{"count": 0}],
  917. ]
  918. def test_without_zerofill(self):
  919. start = self.day_ago.isoformat()
  920. end = (self.day_ago + timedelta(hours=2)).isoformat()
  921. response = self.do_request(
  922. data={
  923. "start": start,
  924. "end": end,
  925. "interval": "30m",
  926. "withoutZerofill": "1",
  927. },
  928. features={
  929. "organizations:performance-chart-interpolation": True,
  930. "organizations:discover-basic": True,
  931. },
  932. )
  933. assert response.status_code == 200, response.content
  934. assert [attrs for time, attrs in response.data["data"]] == [
  935. [{"count": 1}],
  936. [{"count": 2}],
  937. ]
  938. assert response.data["start"] == parse_date(start).timestamp()
  939. assert response.data["end"] == parse_date(end).timestamp()
  940. def test_comparison_error_dataset(self):
  941. self.store_event(
  942. data={
  943. "timestamp": (self.day_ago + timedelta(days=-1, minutes=1)).isoformat(),
  944. },
  945. project_id=self.project.id,
  946. )
  947. self.store_event(
  948. data={
  949. "timestamp": (self.day_ago + timedelta(days=-1, minutes=2)).isoformat(),
  950. },
  951. project_id=self.project.id,
  952. )
  953. self.store_event(
  954. data={
  955. "timestamp": (self.day_ago + timedelta(days=-1, hours=1, minutes=1)).isoformat(),
  956. },
  957. project_id=self.project2.id,
  958. )
  959. response = self.do_request(
  960. data={
  961. "start": self.day_ago,
  962. "end": self.day_ago + timedelta(hours=2),
  963. "interval": "1h",
  964. "comparisonDelta": int(timedelta(days=1).total_seconds()),
  965. "dataset": "errors",
  966. }
  967. )
  968. assert response.status_code == 200, response.content
  969. assert [attrs for time, attrs in response.data["data"]] == [
  970. [{"count": 1, "comparisonCount": 2}],
  971. [{"count": 2, "comparisonCount": 1}],
  972. ]
  973. def test_comparison(self):
  974. self.store_event(
  975. data={
  976. "timestamp": (self.day_ago + timedelta(days=-1, minutes=1)).isoformat(),
  977. },
  978. project_id=self.project.id,
  979. )
  980. self.store_event(
  981. data={
  982. "timestamp": (self.day_ago + timedelta(days=-1, minutes=2)).isoformat(),
  983. },
  984. project_id=self.project.id,
  985. )
  986. self.store_event(
  987. data={
  988. "timestamp": (self.day_ago + timedelta(days=-1, hours=1, minutes=1)).isoformat(),
  989. },
  990. project_id=self.project2.id,
  991. )
  992. response = self.do_request(
  993. data={
  994. "start": self.day_ago,
  995. "end": self.day_ago + timedelta(hours=2),
  996. "interval": "1h",
  997. "comparisonDelta": int(timedelta(days=1).total_seconds()),
  998. }
  999. )
  1000. assert response.status_code == 200, response.content
  1001. assert [attrs for time, attrs in response.data["data"]] == [
  1002. [{"count": 1, "comparisonCount": 2}],
  1003. [{"count": 2, "comparisonCount": 1}],
  1004. ]
  1005. def test_comparison_invalid(self):
  1006. response = self.do_request(
  1007. data={
  1008. "start": self.day_ago,
  1009. "end": self.day_ago + timedelta(hours=2),
  1010. "interval": "1h",
  1011. "comparisonDelta": "17h",
  1012. },
  1013. )
  1014. assert response.status_code == 400, response.content
  1015. assert response.data["detail"] == "comparisonDelta must be an integer"
  1016. start = before_now(days=85)
  1017. end = start + timedelta(days=7)
  1018. with self.options({"system.event-retention-days": 90}):
  1019. response = self.do_request(
  1020. data={
  1021. "start": start,
  1022. "end": end,
  1023. "interval": "1h",
  1024. "comparisonDelta": int(timedelta(days=7).total_seconds()),
  1025. }
  1026. )
  1027. assert response.status_code == 400, response.content
  1028. assert response.data["detail"] == "Comparison period is outside retention window"
  1029. def test_equations_divide_by_zero(self):
  1030. response = self.do_request(
  1031. data={
  1032. "start": self.day_ago,
  1033. "end": self.day_ago + timedelta(hours=2),
  1034. "interval": "1h",
  1035. # force a 0 in the denominator by doing 1 - 1
  1036. # since a 0 literal is illegal as the denominator
  1037. "yAxis": ["equation|count() / (1-1)"],
  1038. },
  1039. )
  1040. assert response.status_code == 200, response.content
  1041. assert len(response.data["data"]) == 2
  1042. assert [attrs for time, attrs in response.data["data"]] == [
  1043. [{"count": None}],
  1044. [{"count": None}],
  1045. ]
  1046. @mock.patch("sentry.search.events.builder.base.raw_snql_query")
  1047. def test_profiles_dataset_simple(self, mock_snql_query):
  1048. mock_snql_query.side_effect = [{"meta": {}, "data": []}]
  1049. query = {
  1050. "yAxis": [
  1051. "count()",
  1052. "p75()",
  1053. "p95()",
  1054. "p99()",
  1055. "p75(profile.duration)",
  1056. "p95(profile.duration)",
  1057. "p99(profile.duration)",
  1058. ],
  1059. "project": [self.project.id],
  1060. "dataset": "profiles",
  1061. }
  1062. response = self.do_request(query, features={"organizations:profiling": True})
  1063. assert response.status_code == 200, response.content
  1064. def test_tag_with_conflicting_function_alias_simple(self):
  1065. for _ in range(7):
  1066. self.store_event(
  1067. data={
  1068. "timestamp": (self.day_ago + timedelta(minutes=2)).isoformat(),
  1069. "tags": {"count": "9001"},
  1070. },
  1071. project_id=self.project2.id,
  1072. )
  1073. # Query for count and count()
  1074. data = {
  1075. "start": self.day_ago.isoformat(),
  1076. "end": (self.day_ago + timedelta(minutes=3)).isoformat(),
  1077. "interval": "1h",
  1078. "yAxis": "count()",
  1079. "orderby": ["-count()"],
  1080. "field": ["count()", "count"],
  1081. "partial": "1",
  1082. }
  1083. response = self.client.get(self.url, data, format="json")
  1084. assert response.status_code == 200
  1085. # Expect a count of 8 because one event from setUp
  1086. assert response.data["data"][0][1] == [{"count": 8}]
  1087. data["query"] = "count:9001"
  1088. response = self.client.get(self.url, data, format="json")
  1089. assert response.status_code == 200
  1090. assert response.data["data"][0][1] == [{"count": 7}]
  1091. data["query"] = "count:abc"
  1092. response = self.client.get(self.url, data, format="json")
  1093. assert response.status_code == 200
  1094. assert all([interval[1][0]["count"] == 0 for interval in response.data["data"]])
  1095. def test_group_id_tag_simple(self):
  1096. event_data: _EventDataDict = {
  1097. "data": {
  1098. "message": "poof",
  1099. "timestamp": (self.day_ago + timedelta(minutes=2)).isoformat(),
  1100. "user": {"email": self.user.email},
  1101. "tags": {"group_id": "testing"},
  1102. "fingerprint": ["group1"],
  1103. },
  1104. "project": self.project2,
  1105. "count": 7,
  1106. }
  1107. for i in range(event_data["count"]):
  1108. event_data["data"]["event_id"] = f"a{i}" * 16
  1109. self.store_event(event_data["data"], project_id=event_data["project"].id)
  1110. data = {
  1111. "start": self.day_ago.isoformat(),
  1112. "end": (self.day_ago + timedelta(hours=2)).isoformat(),
  1113. "interval": "1h",
  1114. "yAxis": "count()",
  1115. "orderby": ["-count()"],
  1116. "field": ["count()", "group_id"],
  1117. "partial": "1",
  1118. }
  1119. response = self.client.get(self.url, data, format="json")
  1120. assert response.status_code == 200
  1121. assert response.data["data"][0][1] == [{"count": 8}]
  1122. data["query"] = "group_id:testing"
  1123. response = self.client.get(self.url, data, format="json")
  1124. assert response.status_code == 200
  1125. assert response.data["data"][0][1] == [{"count": 7}]
  1126. data["query"] = "group_id:abc"
  1127. response = self.client.get(self.url, data, format="json")
  1128. assert response.status_code == 200
  1129. assert all([interval[1][0]["count"] == 0 for interval in response.data["data"]])
  1130. class OrganizationEventsStatsTopNEvents(APITestCase, SnubaTestCase):
  1131. def setUp(self):
  1132. super().setUp()
  1133. self.login_as(user=self.user)
  1134. self.day_ago = before_now(days=1).replace(hour=10, minute=0, second=0, microsecond=0)
  1135. self.project = self.create_project()
  1136. self.project2 = self.create_project()
  1137. self.user2 = self.create_user()
  1138. transaction_data = load_data("transaction")
  1139. transaction_data["start_timestamp"] = (self.day_ago + timedelta(minutes=2)).isoformat()
  1140. transaction_data["timestamp"] = (self.day_ago + timedelta(minutes=4)).isoformat()
  1141. transaction_data["tags"] = {"shared-tag": "yup"}
  1142. self.event_data: list[_EventDataDict] = [
  1143. {
  1144. "data": {
  1145. "message": "poof",
  1146. "timestamp": (self.day_ago + timedelta(minutes=2)).isoformat(),
  1147. "user": {"email": self.user.email},
  1148. "tags": {"shared-tag": "yup"},
  1149. "fingerprint": ["group1"],
  1150. },
  1151. "project": self.project2,
  1152. "count": 7,
  1153. },
  1154. {
  1155. "data": {
  1156. "message": "voof",
  1157. "timestamp": (self.day_ago + timedelta(hours=1, minutes=2)).isoformat(),
  1158. "fingerprint": ["group2"],
  1159. "user": {"email": self.user2.email},
  1160. "tags": {"shared-tag": "yup"},
  1161. },
  1162. "project": self.project2,
  1163. "count": 6,
  1164. },
  1165. {
  1166. "data": {
  1167. "message": "very bad",
  1168. "timestamp": (self.day_ago + timedelta(minutes=2)).isoformat(),
  1169. "fingerprint": ["group3"],
  1170. "user": {"email": "foo@example.com"},
  1171. "tags": {"shared-tag": "yup"},
  1172. },
  1173. "project": self.project,
  1174. "count": 5,
  1175. },
  1176. {
  1177. "data": {
  1178. "message": "oh no",
  1179. "timestamp": (self.day_ago + timedelta(minutes=2)).isoformat(),
  1180. "fingerprint": ["group4"],
  1181. "user": {"email": "bar@example.com"},
  1182. "tags": {"shared-tag": "yup"},
  1183. },
  1184. "project": self.project,
  1185. "count": 4,
  1186. },
  1187. {"data": transaction_data, "project": self.project, "count": 3},
  1188. # Not in the top 5
  1189. {
  1190. "data": {
  1191. "message": "sorta bad",
  1192. "timestamp": (self.day_ago + timedelta(minutes=2)).isoformat(),
  1193. "fingerprint": ["group5"],
  1194. "user": {"email": "bar@example.com"},
  1195. "tags": {"shared-tag": "yup"},
  1196. },
  1197. "project": self.project,
  1198. "count": 2,
  1199. },
  1200. {
  1201. "data": {
  1202. "message": "not so bad",
  1203. "timestamp": (self.day_ago + timedelta(minutes=2)).isoformat(),
  1204. "fingerprint": ["group6"],
  1205. "user": {"email": "bar@example.com"},
  1206. "tags": {"shared-tag": "yup"},
  1207. },
  1208. "project": self.project,
  1209. "count": 1,
  1210. },
  1211. ]
  1212. self.events = []
  1213. for index, event_data in enumerate(self.event_data):
  1214. data = event_data["data"].copy()
  1215. for i in range(event_data["count"]):
  1216. data["event_id"] = f"{index}{i}" * 16
  1217. event = self.store_event(data, project_id=event_data["project"].id)
  1218. self.events.append(event)
  1219. self.transaction = self.events[4]
  1220. self.enabled_features = {
  1221. "organizations:discover-basic": True,
  1222. }
  1223. self.url = reverse(
  1224. "sentry-api-0-organization-events-stats",
  1225. kwargs={"organization_id_or_slug": self.project.organization.slug},
  1226. )
  1227. def test_no_top_events_with_project_field(self):
  1228. project = self.create_project()
  1229. with self.feature(self.enabled_features):
  1230. response = self.client.get(
  1231. self.url,
  1232. data={
  1233. # make sure to query the project with 0 events
  1234. "project": str(project.id),
  1235. "start": self.day_ago.isoformat(),
  1236. "end": (self.day_ago + timedelta(hours=2)).isoformat(),
  1237. "interval": "1h",
  1238. "yAxis": "count()",
  1239. "orderby": ["-count()"],
  1240. "field": ["count()", "project"],
  1241. "topEvents": "5",
  1242. },
  1243. format="json",
  1244. )
  1245. assert response.status_code == 200, response.content
  1246. # When there are no top events, we do not return an empty dict.
  1247. # Instead, we return a single zero-filled series for an empty graph.
  1248. data = response.data["data"]
  1249. assert [attrs for time, attrs in data] == [[{"count": 0}], [{"count": 0}]]
  1250. def test_no_top_events(self):
  1251. project = self.create_project()
  1252. with self.feature(self.enabled_features):
  1253. response = self.client.get(
  1254. self.url,
  1255. data={
  1256. # make sure to query the project with 0 events
  1257. "project": str(project.id),
  1258. "start": self.day_ago.isoformat(),
  1259. "end": (self.day_ago + timedelta(hours=2)).isoformat(),
  1260. "interval": "1h",
  1261. "yAxis": "count()",
  1262. "orderby": ["-count()"],
  1263. "field": ["count()", "message", "user.email"],
  1264. "topEvents": "5",
  1265. },
  1266. format="json",
  1267. )
  1268. data = response.data["data"]
  1269. assert response.status_code == 200, response.content
  1270. # When there are no top events, we do not return an empty dict.
  1271. # Instead, we return a single zero-filled series for an empty graph.
  1272. assert [attrs for time, attrs in data] == [[{"count": 0}], [{"count": 0}]]
  1273. def test_no_top_events_with_multi_axis(self):
  1274. project = self.create_project()
  1275. with self.feature(self.enabled_features):
  1276. response = self.client.get(
  1277. self.url,
  1278. data={
  1279. # make sure to query the project with 0 events
  1280. "project": str(project.id),
  1281. "start": self.day_ago.isoformat(),
  1282. "end": (self.day_ago + timedelta(hours=2)).isoformat(),
  1283. "interval": "1h",
  1284. "yAxis": ["count()", "count_unique(user)"],
  1285. "orderby": ["-count()"],
  1286. "field": ["count()", "count_unique(user)", "message", "user.email"],
  1287. "topEvents": "5",
  1288. },
  1289. format="json",
  1290. )
  1291. assert response.status_code == 200
  1292. data = response.data[""]
  1293. assert [attrs for time, attrs in data["count()"]["data"]] == [
  1294. [{"count": 0}],
  1295. [{"count": 0}],
  1296. ]
  1297. assert [attrs for time, attrs in data["count_unique(user)"]["data"]] == [
  1298. [{"count": 0}],
  1299. [{"count": 0}],
  1300. ]
  1301. def test_simple_top_events(self):
  1302. with self.feature(self.enabled_features):
  1303. response = self.client.get(
  1304. self.url,
  1305. data={
  1306. "start": self.day_ago.isoformat(),
  1307. "end": (self.day_ago + timedelta(hours=2)).isoformat(),
  1308. "interval": "1h",
  1309. "yAxis": "count()",
  1310. "orderby": ["-count()"],
  1311. "field": ["count()", "message", "user.email"],
  1312. "topEvents": "5",
  1313. },
  1314. format="json",
  1315. )
  1316. data = response.data
  1317. assert response.status_code == 200, response.content
  1318. assert len(data) == 6
  1319. for index, event in enumerate(self.events[:5]):
  1320. message = event.message or event.transaction
  1321. results = data[
  1322. ",".join([message, self.event_data[index]["data"]["user"].get("email", "None")])
  1323. ]
  1324. assert results["order"] == index
  1325. assert [{"count": self.event_data[index]["count"]}] in [
  1326. attrs for _, attrs in results["data"]
  1327. ]
  1328. other = data["Other"]
  1329. assert other["order"] == 5
  1330. assert [{"count": 3}] in [attrs for _, attrs in other["data"]]
  1331. def test_top_events_with_projects_other(self):
  1332. with self.feature(self.enabled_features):
  1333. response = self.client.get(
  1334. self.url,
  1335. data={
  1336. "start": self.day_ago.isoformat(),
  1337. "end": (self.day_ago + timedelta(hours=2)).isoformat(),
  1338. "interval": "1h",
  1339. "yAxis": "count()",
  1340. "orderby": ["-count()"],
  1341. "field": ["count()", "project"],
  1342. "topEvents": "1",
  1343. },
  1344. format="json",
  1345. )
  1346. data = response.data
  1347. assert response.status_code == 200, response.content
  1348. assert set(data.keys()) == {"Other", self.project.slug}
  1349. assert data[self.project.slug]["order"] == 0
  1350. assert [attrs[0]["count"] for _, attrs in data[self.project.slug]["data"]] == [15, 0]
  1351. assert data["Other"]["order"] == 1
  1352. assert [attrs[0]["count"] for _, attrs in data["Other"]["data"]] == [7, 6]
  1353. def test_top_events_with_projects_fields(self):
  1354. # We need to handle the project name fields differently
  1355. for project_field in ["project", "project.name"]:
  1356. with self.feature(self.enabled_features):
  1357. response = self.client.get(
  1358. self.url,
  1359. data={
  1360. "start": self.day_ago.isoformat(),
  1361. "end": (self.day_ago + timedelta(hours=2)).isoformat(),
  1362. "interval": "1h",
  1363. "yAxis": "count()",
  1364. "orderby": ["-count()"],
  1365. "field": ["count()", project_field],
  1366. "topEvents": "5",
  1367. },
  1368. format="json",
  1369. )
  1370. data = response.data
  1371. assert response.status_code == 200, response.content
  1372. assert data[self.project.slug]["order"] == 0, project_field
  1373. assert [attrs[0]["count"] for _, attrs in data[self.project.slug]["data"]] == [
  1374. 15,
  1375. 0,
  1376. ], project_field
  1377. assert data[self.project2.slug]["order"] == 1, project_field
  1378. assert [attrs[0]["count"] for _, attrs in data[self.project2.slug]["data"]] == [
  1379. 7,
  1380. 6,
  1381. ], project_field
  1382. def test_tag_with_conflicting_function_alias_simple(self):
  1383. event_data: _EventDataDict = {
  1384. "data": {
  1385. "message": "poof",
  1386. "timestamp": (self.day_ago + timedelta(minutes=2)).isoformat(),
  1387. "user": {"email": self.user.email},
  1388. "tags": {"count": "9001"},
  1389. "fingerprint": ["group1"],
  1390. },
  1391. "project": self.project2,
  1392. "count": 7,
  1393. }
  1394. for i in range(event_data["count"]):
  1395. event_data["data"]["event_id"] = f"a{i}" * 16
  1396. self.store_event(event_data["data"], project_id=event_data["project"].id)
  1397. # Query for count and count()
  1398. data = {
  1399. "start": self.day_ago.isoformat(),
  1400. "end": (self.day_ago + timedelta(hours=2)).isoformat(),
  1401. "interval": "1h",
  1402. "yAxis": "count()",
  1403. "orderby": ["-count()"],
  1404. "field": ["count()", "count"],
  1405. "topEvents": "5",
  1406. "partial": "1",
  1407. }
  1408. with self.feature(self.enabled_features):
  1409. response = self.client.get(self.url, data, format="json")
  1410. assert response.status_code == 200
  1411. assert response.data["9001"]["data"][0][1] == [{"count": 7}]
  1412. data["query"] = "count:9001"
  1413. with self.feature(self.enabled_features):
  1414. response = self.client.get(self.url, data, format="json")
  1415. assert response.status_code == 200
  1416. assert response.data["9001"]["data"][0][1] == [{"count": 7}]
  1417. data["query"] = "count:abc"
  1418. with self.feature(self.enabled_features):
  1419. response = self.client.get(self.url, data, format="json")
  1420. assert response.status_code == 200
  1421. assert all([interval[1][0]["count"] == 0 for interval in response.data["data"]])
  1422. @pytest.mark.xfail(
  1423. reason="The response.data[Other] returns 15 locally and returns 16 or 15 remotely."
  1424. )
  1425. def test_tag_with_conflicting_function_alias_with_other_single_grouping(self):
  1426. event_data: list[_EventDataDict] = [
  1427. {
  1428. "data": {
  1429. "message": "poof",
  1430. "timestamp": self.day_ago + timedelta(minutes=2),
  1431. "user": {"email": self.user.email},
  1432. "tags": {"count": "9001"},
  1433. "fingerprint": ["group1"],
  1434. },
  1435. "project": self.project2,
  1436. "count": 7,
  1437. },
  1438. {
  1439. "data": {
  1440. "message": "poof2",
  1441. "timestamp": self.day_ago + timedelta(minutes=2),
  1442. "user": {"email": self.user.email},
  1443. "tags": {"count": "abc"},
  1444. "fingerprint": ["group1"],
  1445. },
  1446. "project": self.project2,
  1447. "count": 3,
  1448. },
  1449. ]
  1450. for index, event in enumerate(event_data):
  1451. for i in range(event["count"]):
  1452. event["data"]["event_id"] = f"{index}{i}" * 16
  1453. self.store_event(event["data"], project_id=event["project"].id)
  1454. # Query for count and count()
  1455. data = {
  1456. "start": self.day_ago.isoformat(),
  1457. "end": (self.day_ago + timedelta(hours=1)).isoformat(),
  1458. "interval": "1h",
  1459. "yAxis": "count()",
  1460. "orderby": ["-count"],
  1461. "field": ["count()", "count"],
  1462. "topEvents": "2",
  1463. "partial": "1",
  1464. }
  1465. with self.feature(self.enabled_features):
  1466. response = self.client.get(self.url, data, format="json")
  1467. assert response.status_code == 200
  1468. assert response.data["9001"]["data"][0][1] == [{"count": 7}]
  1469. assert response.data["abc"]["data"][0][1] == [{"count": 3}]
  1470. assert response.data["Other"]["data"][0][1] == [{"count": 16}]
  1471. def test_tag_with_conflicting_function_alias_with_other_multiple_groupings(self):
  1472. event_data: list[_EventDataDict] = [
  1473. {
  1474. "data": {
  1475. "message": "abc",
  1476. "timestamp": (self.day_ago + timedelta(minutes=2)).isoformat(),
  1477. "user": {"email": self.user.email},
  1478. "tags": {"count": "2"},
  1479. "fingerprint": ["group1"],
  1480. },
  1481. "project": self.project2,
  1482. "count": 3,
  1483. },
  1484. {
  1485. "data": {
  1486. "message": "def",
  1487. "timestamp": (self.day_ago + timedelta(minutes=2)).isoformat(),
  1488. "user": {"email": self.user.email},
  1489. "tags": {"count": "9001"},
  1490. "fingerprint": ["group1"],
  1491. },
  1492. "project": self.project2,
  1493. "count": 7,
  1494. },
  1495. ]
  1496. for index, event in enumerate(event_data):
  1497. for i in range(event["count"]):
  1498. event["data"]["event_id"] = f"{index}{i}" * 16
  1499. self.store_event(event["data"], project_id=event["project"].id)
  1500. # Query for count and count()
  1501. data = {
  1502. "start": self.day_ago.isoformat(),
  1503. "end": (self.day_ago + timedelta(hours=2)).isoformat(),
  1504. "interval": "2d",
  1505. "yAxis": "count()",
  1506. "orderby": ["-count"],
  1507. "field": ["count()", "count", "message"],
  1508. "topEvents": "2",
  1509. "partial": "1",
  1510. }
  1511. with self.feature(self.enabled_features):
  1512. response = self.client.get(self.url, data, format="json")
  1513. assert response.status_code == 200
  1514. assert response.data["abc,2"]["data"][0][1] == [{"count": 3}]
  1515. assert response.data["def,9001"]["data"][0][1] == [{"count": 7}]
  1516. assert response.data["Other"]["data"][0][1] == [{"count": 25}]
  1517. def test_group_id_tag_simple(self):
  1518. event_data: _EventDataDict = {
  1519. "data": {
  1520. "message": "poof",
  1521. "timestamp": (self.day_ago + timedelta(minutes=2)).isoformat(),
  1522. "user": {"email": self.user.email},
  1523. "tags": {"group_id": "the tag"},
  1524. "fingerprint": ["group1"],
  1525. },
  1526. "project": self.project2,
  1527. "count": 7,
  1528. }
  1529. for i in range(event_data["count"]):
  1530. event_data["data"]["event_id"] = f"a{i}" * 16
  1531. self.store_event(event_data["data"], project_id=event_data["project"].id)
  1532. data = {
  1533. "start": self.day_ago.isoformat(),
  1534. "end": (self.day_ago + timedelta(hours=2)).isoformat(),
  1535. "interval": "1h",
  1536. "yAxis": "count()",
  1537. "orderby": ["-count()"],
  1538. "field": ["count()", "group_id"],
  1539. "topEvents": "5",
  1540. "partial": "1",
  1541. }
  1542. with self.feature(self.enabled_features):
  1543. response = self.client.get(self.url, data, format="json")
  1544. assert response.status_code == 200, response.content
  1545. assert response.data["the tag"]["data"][0][1] == [{"count": 7}]
  1546. data["query"] = 'group_id:"the tag"'
  1547. with self.feature(self.enabled_features):
  1548. response = self.client.get(self.url, data, format="json")
  1549. assert response.status_code == 200
  1550. assert response.data["the tag"]["data"][0][1] == [{"count": 7}]
  1551. data["query"] = "group_id:abc"
  1552. with self.feature(self.enabled_features):
  1553. response = self.client.get(self.url, data, format="json")
  1554. assert response.status_code == 200
  1555. assert all([interval[1][0]["count"] == 0 for interval in response.data["data"]])
  1556. def test_top_events_limits(self):
  1557. data = {
  1558. "start": self.day_ago.isoformat(),
  1559. "end": (self.day_ago + timedelta(hours=2)).isoformat(),
  1560. "interval": "1h",
  1561. "yAxis": "count()",
  1562. "orderby": ["-count()"],
  1563. "field": ["count()", "message", "user.email"],
  1564. }
  1565. with self.feature(self.enabled_features):
  1566. data["topEvents"] = str(MAX_TOP_EVENTS + 1)
  1567. response = self.client.get(self.url, data, format="json")
  1568. assert response.status_code == 400
  1569. data["topEvents"] = "0"
  1570. response = self.client.get(self.url, data, format="json")
  1571. assert response.status_code == 400
  1572. data["topEvents"] = "a"
  1573. response = self.client.get(self.url, data, format="json")
  1574. assert response.status_code == 400
  1575. @pytest.mark.xfail(
  1576. reason="The response is wrong whenever we have a top events timeseries on project + any other field + aggregation"
  1577. )
  1578. def test_top_events_with_projects(self):
  1579. with self.feature(self.enabled_features):
  1580. response = self.client.get(
  1581. self.url,
  1582. data={
  1583. "start": self.day_ago.isoformat(),
  1584. "end": (self.day_ago + timedelta(hours=2)).isoformat(),
  1585. "interval": "1h",
  1586. "yAxis": "count()",
  1587. "orderby": ["-count()"],
  1588. "field": ["count()", "message", "project"],
  1589. "topEvents": "5",
  1590. },
  1591. format="json",
  1592. )
  1593. data = response.data
  1594. assert response.status_code == 200, response.content
  1595. assert len(data) == 6
  1596. for index, event in enumerate(self.events[:5]):
  1597. message = event.message or event.transaction
  1598. results = data[",".join([message, event.project.slug])]
  1599. assert results["order"] == index
  1600. assert [{"count": self.event_data[index]["count"]}] in [
  1601. attrs for time, attrs in results["data"]
  1602. ]
  1603. other = data["Other"]
  1604. assert other["order"] == 5
  1605. assert [{"count": 3}] in [attrs for _, attrs in other["data"]]
  1606. def test_top_events_with_issue(self):
  1607. # delete a group to make sure if this happens the value becomes unknown
  1608. event_group = self.events[0].group
  1609. event_group.delete()
  1610. with self.feature(self.enabled_features):
  1611. response = self.client.get(
  1612. self.url,
  1613. data={
  1614. "start": self.day_ago.isoformat(),
  1615. "end": (self.day_ago + timedelta(hours=2)).isoformat(),
  1616. "interval": "1h",
  1617. "yAxis": "count()",
  1618. "orderby": ["-count()"],
  1619. "field": ["count()", "message", "issue"],
  1620. "topEvents": "5",
  1621. "query": "!event.type:transaction",
  1622. },
  1623. format="json",
  1624. )
  1625. data = response.data
  1626. assert response.status_code == 200, response.content
  1627. assert len(data) == 6
  1628. for index, event in enumerate(self.events[:4]):
  1629. message = event.message
  1630. # Because we deleted the group for event 0
  1631. if index == 0 or event.group is None:
  1632. issue = "unknown"
  1633. else:
  1634. issue = event.group.qualified_short_id
  1635. results = data[",".join([issue, message])]
  1636. assert results["order"] == index
  1637. assert [{"count": self.event_data[index]["count"]}] in [
  1638. attrs for time, attrs in results["data"]
  1639. ]
  1640. other = data["Other"]
  1641. assert other["order"] == 5
  1642. assert [{"count": 1}] in [attrs for _, attrs in other["data"]]
  1643. def test_transactions_top_events_with_issue(self):
  1644. # delete a group to make sure if this happens the value becomes unknown
  1645. event_group = self.events[0].group
  1646. event_group.delete()
  1647. with self.feature(self.enabled_features):
  1648. response = self.client.get(
  1649. self.url,
  1650. data={
  1651. "start": self.day_ago.isoformat(),
  1652. "end": (self.day_ago + timedelta(hours=2)).isoformat(),
  1653. "interval": "1h",
  1654. "yAxis": "count()",
  1655. "orderby": ["-count()"],
  1656. "field": ["count()", "message", "issue"],
  1657. "topEvents": "5",
  1658. "query": "!event.type:transaction",
  1659. "dataset": "transactions",
  1660. },
  1661. format="json",
  1662. )
  1663. assert response.status_code == 200, response.content
  1664. # Just asserting that this doesn't fail, issue on transactions dataset doesn't mean anything
  1665. def test_top_events_with_transaction_status(self):
  1666. with self.feature(self.enabled_features):
  1667. response = self.client.get(
  1668. self.url,
  1669. data={
  1670. "start": self.day_ago.isoformat(),
  1671. "end": (self.day_ago + timedelta(hours=2)).isoformat(),
  1672. "interval": "1h",
  1673. "yAxis": "count()",
  1674. "orderby": ["-count()"],
  1675. "field": ["count()", "transaction.status"],
  1676. "topEvents": "5",
  1677. },
  1678. format="json",
  1679. )
  1680. data = response.data
  1681. assert response.status_code == 200, response.content
  1682. assert len(data) == 1
  1683. assert "ok" in data
  1684. @mock.patch("sentry.models.GroupManager.get_issues_mapping")
  1685. def test_top_events_with_unknown_issue(self, mock_issues_mapping):
  1686. event = self.events[0]
  1687. event_data = self.event_data[0]
  1688. # ensure that the issue mapping returns None for the issue
  1689. mock_issues_mapping.return_value = {event.group.id: None}
  1690. with self.feature(self.enabled_features):
  1691. response = self.client.get(
  1692. self.url,
  1693. data={
  1694. "start": self.day_ago.isoformat(),
  1695. "end": (self.day_ago + timedelta(hours=2)).isoformat(),
  1696. "interval": "1h",
  1697. "yAxis": "count()",
  1698. "orderby": ["-count()"],
  1699. "field": ["count()", "issue"],
  1700. "topEvents": "5",
  1701. # narrow the search to just one issue
  1702. "query": f"issue.id:{event.group.id}",
  1703. },
  1704. format="json",
  1705. )
  1706. assert response.status_code == 200, response.content
  1707. data = response.data
  1708. assert len(data) == 1
  1709. results = data["unknown"]
  1710. assert results["order"] == 0
  1711. assert [{"count": event_data["count"]}] in [attrs for time, attrs in results["data"]]
  1712. @mock.patch(
  1713. "sentry.search.events.builder.base.raw_snql_query",
  1714. side_effect=[{"data": [{"issue.id": 1}], "meta": []}, {"data": [], "meta": []}],
  1715. )
  1716. def test_top_events_with_issue_check_query_conditions(self, mock_query):
  1717. """ "Intentionally separate from test_top_events_with_issue
  1718. This is to test against a bug where the condition for issues wasn't included and we'd be missing data for
  1719. the interval since we'd cap out the max rows. This was not caught by the previous test since the results
  1720. would still be correct given the smaller interval & lack of data
  1721. """
  1722. with self.feature(self.enabled_features):
  1723. self.client.get(
  1724. self.url,
  1725. data={
  1726. "start": self.day_ago.isoformat(),
  1727. "end": (self.day_ago + timedelta(hours=2)).isoformat(),
  1728. "interval": "1h",
  1729. "yAxis": "count()",
  1730. "orderby": ["-count()"],
  1731. "field": ["count()", "message", "issue"],
  1732. "topEvents": "5",
  1733. "query": "!event.type:transaction",
  1734. },
  1735. format="json",
  1736. )
  1737. assert (
  1738. Condition(Function("coalesce", [Column("group_id"), 0], "issue.id"), Op.IN, [1])
  1739. in mock_query.mock_calls[1].args[0].query.where
  1740. )
  1741. def test_top_events_with_functions(self):
  1742. for dataset in ["transactions", "discover"]:
  1743. with self.feature(self.enabled_features):
  1744. response = self.client.get(
  1745. self.url,
  1746. data={
  1747. "start": self.day_ago.isoformat(),
  1748. "end": (self.day_ago + timedelta(hours=2)).isoformat(),
  1749. "interval": "1h",
  1750. "yAxis": "count()",
  1751. "orderby": ["-p99()"],
  1752. "field": ["transaction", "avg(transaction.duration)", "p99()"],
  1753. "topEvents": "5",
  1754. "dataset": dataset,
  1755. },
  1756. format="json",
  1757. )
  1758. data = response.data
  1759. assert response.status_code == 200, response.content
  1760. assert len(data) == 1
  1761. results = data[self.transaction.transaction]
  1762. assert results["order"] == 0
  1763. assert [attrs for time, attrs in results["data"]] == [[{"count": 3}], [{"count": 0}]]
  1764. def test_top_events_with_functions_on_different_transactions(self):
  1765. """Transaction2 has less events, but takes longer so order should be self.transaction then transaction2"""
  1766. transaction_data = load_data("transaction")
  1767. transaction_data["start_timestamp"] = (self.day_ago + timedelta(minutes=2)).isoformat()
  1768. transaction_data["timestamp"] = (self.day_ago + timedelta(minutes=6)).isoformat()
  1769. transaction_data["transaction"] = "/foo_bar/"
  1770. transaction2 = self.store_event(transaction_data, project_id=self.project.id)
  1771. with self.feature(self.enabled_features):
  1772. response = self.client.get(
  1773. self.url,
  1774. data={
  1775. "start": self.day_ago.isoformat(),
  1776. "end": (self.day_ago + timedelta(hours=2)).isoformat(),
  1777. "interval": "1h",
  1778. "yAxis": "count()",
  1779. "orderby": ["-p90()"],
  1780. "field": ["transaction", "avg(transaction.duration)", "p90()"],
  1781. "topEvents": "5",
  1782. },
  1783. format="json",
  1784. )
  1785. data = response.data
  1786. assert response.status_code == 200, response.content
  1787. assert len(data) == 2
  1788. results = data[self.transaction.transaction]
  1789. assert results["order"] == 1
  1790. assert [attrs for time, attrs in results["data"]] == [[{"count": 3}], [{"count": 0}]]
  1791. results = data[transaction2.transaction]
  1792. assert results["order"] == 0
  1793. assert [attrs for time, attrs in results["data"]] == [[{"count": 1}], [{"count": 0}]]
  1794. def test_top_events_with_query(self):
  1795. transaction_data = load_data("transaction")
  1796. transaction_data["start_timestamp"] = (self.day_ago + timedelta(minutes=2)).isoformat()
  1797. transaction_data["timestamp"] = (self.day_ago + timedelta(minutes=6)).isoformat()
  1798. transaction_data["transaction"] = "/foo_bar/"
  1799. self.store_event(transaction_data, project_id=self.project.id)
  1800. with self.feature(self.enabled_features):
  1801. response = self.client.get(
  1802. self.url,
  1803. data={
  1804. "start": self.day_ago.isoformat(),
  1805. "end": (self.day_ago + timedelta(hours=2)).isoformat(),
  1806. "interval": "1h",
  1807. "yAxis": "count()",
  1808. "orderby": ["-p99()"],
  1809. "query": "transaction:/foo_bar/",
  1810. "field": ["transaction", "avg(transaction.duration)", "p99()"],
  1811. "topEvents": "5",
  1812. },
  1813. format="json",
  1814. )
  1815. data = response.data
  1816. assert response.status_code == 200, response.content
  1817. assert len(data) == 1
  1818. transaction2_data = data["/foo_bar/"]
  1819. assert transaction2_data["order"] == 0
  1820. assert [attrs for time, attrs in transaction2_data["data"]] == [
  1821. [{"count": 1}],
  1822. [{"count": 0}],
  1823. ]
  1824. def test_top_events_with_negated_condition(self):
  1825. with self.feature(self.enabled_features):
  1826. response = self.client.get(
  1827. self.url,
  1828. data={
  1829. "start": self.day_ago.isoformat(),
  1830. "end": (self.day_ago + timedelta(hours=2)).isoformat(),
  1831. "interval": "1h",
  1832. "yAxis": "count()",
  1833. "orderby": ["-count()"],
  1834. "query": f"!message:{self.events[0].message}",
  1835. "field": ["message", "count()"],
  1836. "topEvents": "5",
  1837. },
  1838. format="json",
  1839. )
  1840. data = response.data
  1841. assert response.status_code == 200, response.content
  1842. assert len(data) == 6
  1843. for index, event in enumerate(self.events[1:5]):
  1844. message = event.message or event.transaction
  1845. results = data[message]
  1846. assert results["order"] == index
  1847. assert [{"count": self.event_data[index + 1]["count"]}] in [
  1848. attrs for _, attrs in results["data"]
  1849. ]
  1850. other = data["Other"]
  1851. assert other["order"] == 5
  1852. assert [{"count": 1}] in [attrs for _, attrs in other["data"]]
  1853. def test_top_events_with_epm(self):
  1854. with self.feature(self.enabled_features):
  1855. response = self.client.get(
  1856. self.url,
  1857. data={
  1858. "start": self.day_ago.isoformat(),
  1859. "end": (self.day_ago + timedelta(hours=2)).isoformat(),
  1860. "interval": "1h",
  1861. "yAxis": "epm()",
  1862. "orderby": ["-count()"],
  1863. "field": ["message", "user.email", "count()"],
  1864. "topEvents": "5",
  1865. },
  1866. format="json",
  1867. )
  1868. data = response.data
  1869. assert response.status_code == 200, response.content
  1870. assert len(data) == 6
  1871. for index, event in enumerate(self.events[:5]):
  1872. message = event.message or event.transaction
  1873. results = data[
  1874. ",".join([message, self.event_data[index]["data"]["user"].get("email", "None")])
  1875. ]
  1876. assert results["order"] == index
  1877. assert [{"count": self.event_data[index]["count"] / (3600.0 / 60.0)}] in [
  1878. attrs for time, attrs in results["data"]
  1879. ]
  1880. other = data["Other"]
  1881. assert other["order"] == 5
  1882. assert [{"count": 0.05}] in [attrs for _, attrs in other["data"]]
  1883. def test_top_events_with_multiple_yaxis(self):
  1884. with self.feature(self.enabled_features):
  1885. response = self.client.get(
  1886. self.url,
  1887. data={
  1888. "start": self.day_ago.isoformat(),
  1889. "end": (self.day_ago + timedelta(hours=2)).isoformat(),
  1890. "interval": "1h",
  1891. "yAxis": ["epm()", "count()"],
  1892. "orderby": ["-count()"],
  1893. "field": ["message", "user.email", "count()"],
  1894. "topEvents": "5",
  1895. },
  1896. format="json",
  1897. )
  1898. data = response.data
  1899. assert response.status_code == 200, response.content
  1900. assert len(data) == 6
  1901. for index, event in enumerate(self.events[:5]):
  1902. message = event.message or event.transaction
  1903. results = data[
  1904. ",".join([message, self.event_data[index]["data"]["user"].get("email", "None")])
  1905. ]
  1906. assert results["order"] == index
  1907. assert results["epm()"]["order"] == 0
  1908. assert results["count()"]["order"] == 1
  1909. assert [{"count": self.event_data[index]["count"] / (3600.0 / 60.0)}] in [
  1910. attrs for time, attrs in results["epm()"]["data"]
  1911. ]
  1912. assert [{"count": self.event_data[index]["count"]}] in [
  1913. attrs for time, attrs in results["count()"]["data"]
  1914. ]
  1915. other = data["Other"]
  1916. assert other["order"] == 5
  1917. assert other["epm()"]["order"] == 0
  1918. assert other["count()"]["order"] == 1
  1919. assert [{"count": 0.05}] in [attrs for _, attrs in other["epm()"]["data"]]
  1920. assert [{"count": 3}] in [attrs for _, attrs in other["count()"]["data"]]
  1921. def test_top_events_with_boolean(self):
  1922. with self.feature(self.enabled_features):
  1923. response = self.client.get(
  1924. self.url,
  1925. data={
  1926. "start": self.day_ago.isoformat(),
  1927. "end": (self.day_ago + timedelta(hours=2)).isoformat(),
  1928. "interval": "1h",
  1929. "yAxis": "count()",
  1930. "orderby": ["-count()"],
  1931. "field": ["count()", "message", "device.charging"],
  1932. "topEvents": "5",
  1933. },
  1934. format="json",
  1935. )
  1936. data = response.data
  1937. assert response.status_code == 200, response.content
  1938. assert len(data) == 6
  1939. for index, event in enumerate(self.events[:5]):
  1940. message = event.message or event.transaction
  1941. results = data[",".join(["False", message])]
  1942. assert results["order"] == index
  1943. assert [{"count": self.event_data[index]["count"]}] in [
  1944. attrs for time, attrs in results["data"]
  1945. ]
  1946. other = data["Other"]
  1947. assert other["order"] == 5
  1948. assert [{"count": 3}] in [attrs for _, attrs in other["data"]]
  1949. def test_top_events_with_error_unhandled(self):
  1950. self.login_as(user=self.user)
  1951. project = self.create_project()
  1952. prototype = load_data("android-ndk")
  1953. prototype["event_id"] = "f" * 32
  1954. prototype["logentry"] = {"formatted": "not handled"}
  1955. prototype["exception"]["values"][0]["value"] = "not handled"
  1956. prototype["exception"]["values"][0]["mechanism"]["handled"] = False
  1957. prototype["timestamp"] = (self.day_ago + timedelta(minutes=2)).isoformat()
  1958. self.store_event(data=prototype, project_id=project.id)
  1959. with self.feature(self.enabled_features):
  1960. response = self.client.get(
  1961. self.url,
  1962. data={
  1963. "start": self.day_ago.isoformat(),
  1964. "end": (self.day_ago + timedelta(hours=2)).isoformat(),
  1965. "interval": "1h",
  1966. "yAxis": "count()",
  1967. "orderby": ["-count()"],
  1968. "field": ["count()", "error.unhandled"],
  1969. "topEvents": "5",
  1970. },
  1971. format="json",
  1972. )
  1973. data = response.data
  1974. assert response.status_code == 200, response.content
  1975. assert len(data) == 2
  1976. def test_top_events_with_timestamp(self):
  1977. with self.feature(self.enabled_features):
  1978. response = self.client.get(
  1979. self.url,
  1980. data={
  1981. "start": self.day_ago.isoformat(),
  1982. "end": (self.day_ago + timedelta(hours=2)).isoformat(),
  1983. "interval": "1h",
  1984. "yAxis": "count()",
  1985. "orderby": ["-count()"],
  1986. "query": "event.type:default",
  1987. "field": ["count()", "message", "timestamp"],
  1988. "topEvents": "5",
  1989. },
  1990. format="json",
  1991. )
  1992. data = response.data
  1993. assert response.status_code == 200, response.content
  1994. assert len(data) == 6
  1995. # Transactions won't be in the results because of the query
  1996. del self.events[4]
  1997. del self.event_data[4]
  1998. for index, event in enumerate(self.events[:5]):
  1999. results = data[",".join([event.message, event.timestamp])]
  2000. assert results["order"] == index
  2001. assert [{"count": self.event_data[index]["count"]}] in [
  2002. attrs for time, attrs in results["data"]
  2003. ]
  2004. other = data["Other"]
  2005. assert other["order"] == 5
  2006. assert [{"count": 1}] in [attrs for _, attrs in other["data"]]
  2007. def test_top_events_with_int(self):
  2008. with self.feature(self.enabled_features):
  2009. response = self.client.get(
  2010. self.url,
  2011. data={
  2012. "start": self.day_ago.isoformat(),
  2013. "end": (self.day_ago + timedelta(hours=2)).isoformat(),
  2014. "interval": "1h",
  2015. "yAxis": "count()",
  2016. "orderby": ["-count()"],
  2017. "field": ["count()", "message", "transaction.duration"],
  2018. "topEvents": "5",
  2019. },
  2020. format="json",
  2021. )
  2022. data = response.data
  2023. assert response.status_code == 200, response.content
  2024. assert len(data) == 1
  2025. results = data[",".join([self.transaction.transaction, "120000"])]
  2026. assert results["order"] == 0
  2027. assert [attrs for time, attrs in results["data"]] == [[{"count": 3}], [{"count": 0}]]
  2028. def test_top_events_with_user(self):
  2029. with self.feature(self.enabled_features):
  2030. response = self.client.get(
  2031. self.url,
  2032. data={
  2033. "start": self.day_ago.isoformat(),
  2034. "end": (self.day_ago + timedelta(hours=2)).isoformat(),
  2035. "interval": "1h",
  2036. "yAxis": "count()",
  2037. "orderby": ["-count()", "user"],
  2038. "field": ["user", "count()"],
  2039. "topEvents": "5",
  2040. },
  2041. format="json",
  2042. )
  2043. data = response.data
  2044. assert response.status_code == 200, response.content
  2045. assert len(data) == 5
  2046. assert data["email:bar@example.com"]["order"] == 1
  2047. assert [attrs for time, attrs in data["email:bar@example.com"]["data"]] == [
  2048. [{"count": 7}],
  2049. [{"count": 0}],
  2050. ]
  2051. assert [attrs for time, attrs in data["ip:127.0.0.1"]["data"]] == [
  2052. [{"count": 3}],
  2053. [{"count": 0}],
  2054. ]
  2055. def test_top_events_with_user_and_email(self):
  2056. with self.feature(self.enabled_features):
  2057. response = self.client.get(
  2058. self.url,
  2059. data={
  2060. "start": self.day_ago.isoformat(),
  2061. "end": (self.day_ago + timedelta(hours=2)).isoformat(),
  2062. "interval": "1h",
  2063. "yAxis": "count()",
  2064. "orderby": ["-count()", "user"],
  2065. "field": ["user", "user.email", "count()"],
  2066. "topEvents": "5",
  2067. },
  2068. format="json",
  2069. )
  2070. data = response.data
  2071. assert response.status_code == 200, response.content
  2072. assert len(data) == 5
  2073. assert data["email:bar@example.com,bar@example.com"]["order"] == 1
  2074. assert [attrs for time, attrs in data["email:bar@example.com,bar@example.com"]["data"]] == [
  2075. [{"count": 7}],
  2076. [{"count": 0}],
  2077. ]
  2078. assert [attrs for time, attrs in data["ip:127.0.0.1,None"]["data"]] == [
  2079. [{"count": 3}],
  2080. [{"count": 0}],
  2081. ]
  2082. def test_top_events_with_user_display(self):
  2083. with self.feature(self.enabled_features):
  2084. response = self.client.get(
  2085. self.url,
  2086. data={
  2087. "start": self.day_ago.isoformat(),
  2088. "end": (self.day_ago + timedelta(hours=2)).isoformat(),
  2089. "interval": "1h",
  2090. "yAxis": "count()",
  2091. "orderby": ["-count()"],
  2092. "field": ["message", "user.display", "count()"],
  2093. "topEvents": "5",
  2094. },
  2095. format="json",
  2096. )
  2097. data = response.data
  2098. assert response.status_code == 200, response.content
  2099. assert len(data) == 6
  2100. for index, event in enumerate(self.events[:5]):
  2101. message = event.message or event.transaction
  2102. user = self.event_data[index]["data"]["user"]
  2103. results = data[
  2104. ",".join([message, user.get("email", None) or user.get("ip_address", "None")])
  2105. ]
  2106. assert results["order"] == index
  2107. assert [{"count": self.event_data[index]["count"]}] in [
  2108. attrs for _, attrs in results["data"]
  2109. ]
  2110. other = data["Other"]
  2111. assert other["order"] == 5
  2112. assert [{"count": 3}] in [attrs for _, attrs in other["data"]]
  2113. @pytest.mark.skip(reason="A query with group_id will not return transactions")
  2114. def test_top_events_none_filter(self):
  2115. """When a field is None in one of the top events, make sure we filter by it
  2116. In this case event[4] is a transaction and has no issue
  2117. """
  2118. with self.feature(self.enabled_features):
  2119. response = self.client.get(
  2120. self.url,
  2121. data={
  2122. "start": self.day_ago.isoformat(),
  2123. "end": (self.day_ago + timedelta(hours=2)).isoformat(),
  2124. "interval": "1h",
  2125. "yAxis": "count()",
  2126. "orderby": ["-count()"],
  2127. "field": ["count()", "issue"],
  2128. "topEvents": "5",
  2129. },
  2130. format="json",
  2131. )
  2132. data = response.data
  2133. assert response.status_code == 200, response.content
  2134. assert len(data) == 5
  2135. for index, event in enumerate(self.events[:5]):
  2136. if event.group is None:
  2137. issue = "unknown"
  2138. else:
  2139. issue = event.group.qualified_short_id
  2140. results = data[issue]
  2141. assert results["order"] == index
  2142. assert [{"count": self.event_data[index]["count"]}] in [
  2143. attrs for time, attrs in results["data"]
  2144. ]
  2145. @pytest.mark.skip(reason="Invalid query - transaction events don't have group_id field")
  2146. def test_top_events_one_field_with_none(self):
  2147. with self.feature(self.enabled_features):
  2148. response = self.client.get(
  2149. self.url,
  2150. data={
  2151. "start": self.day_ago.isoformat(),
  2152. "end": (self.day_ago + timedelta(hours=2)).isoformat(),
  2153. "interval": "1h",
  2154. "yAxis": "count()",
  2155. "orderby": ["-count()"],
  2156. "query": "event.type:transaction",
  2157. "field": ["count()", "issue"],
  2158. "topEvents": "5",
  2159. },
  2160. format="json",
  2161. )
  2162. data = response.data
  2163. assert response.status_code == 200, response.content
  2164. assert len(data) == 1
  2165. results = data["unknown"]
  2166. assert [attrs for time, attrs in results["data"]] == [[{"count": 3}], [{"count": 0}]]
  2167. assert results["order"] == 0
  2168. def test_top_events_with_error_handled(self):
  2169. data = self.event_data[0]
  2170. data["data"]["level"] = "error"
  2171. data["data"]["exception"] = {
  2172. "values": [
  2173. {
  2174. "type": "ValidationError",
  2175. "value": "Bad request",
  2176. "mechanism": {"handled": True, "type": "generic"},
  2177. }
  2178. ]
  2179. }
  2180. self.store_event(data["data"], project_id=data["project"].id)
  2181. data["data"]["exception"] = {
  2182. "values": [
  2183. {
  2184. "type": "ValidationError",
  2185. "value": "Bad request",
  2186. "mechanism": {"handled": False, "type": "generic"},
  2187. }
  2188. ]
  2189. }
  2190. self.store_event(data["data"], project_id=data["project"].id)
  2191. with self.feature(self.enabled_features):
  2192. response = self.client.get(
  2193. self.url,
  2194. data={
  2195. "start": self.day_ago.isoformat(),
  2196. "end": (self.day_ago + timedelta(hours=2)).isoformat(),
  2197. "interval": "1h",
  2198. "yAxis": "count()",
  2199. "orderby": ["-count()"],
  2200. "field": ["count()", "error.handled"],
  2201. "topEvents": "5",
  2202. "query": "!event.type:transaction",
  2203. },
  2204. format="json",
  2205. )
  2206. assert response.status_code == 200, response.content
  2207. res_data = response.data
  2208. assert len(res_data) == 2
  2209. results = res_data["1"]
  2210. assert [attrs for time, attrs in results["data"]] == [[{"count": 20}], [{"count": 6}]]
  2211. results = res_data["0"]
  2212. assert [attrs for time, attrs in results["data"]] == [[{"count": 1}], [{"count": 0}]]
  2213. def test_top_events_with_aggregate_condition(self):
  2214. with self.feature(self.enabled_features):
  2215. response = self.client.get(
  2216. self.url,
  2217. data={
  2218. "start": self.day_ago.isoformat(),
  2219. "end": (self.day_ago + timedelta(hours=2)).isoformat(),
  2220. "interval": "1h",
  2221. "yAxis": "count()",
  2222. "orderby": ["-count()"],
  2223. "field": ["message", "count()"],
  2224. "query": "count():>4",
  2225. "topEvents": "5",
  2226. },
  2227. format="json",
  2228. )
  2229. assert response.status_code == 200, response.content
  2230. data = response.data
  2231. assert len(data) == 3
  2232. for index, event in enumerate(self.events[:3]):
  2233. message = event.message or event.transaction
  2234. results = data[message]
  2235. assert results["order"] == index
  2236. assert [{"count": self.event_data[index]["count"]}] in [
  2237. attrs for time, attrs in results["data"]
  2238. ]
  2239. @pytest.mark.xfail(reason="There's only 2 rows total, which mean there shouldn't be other")
  2240. def test_top_events_with_to_other(self):
  2241. version = "version -@'\" 1.2,3+(4)"
  2242. version_escaped = "version -@'\\\" 1.2,3+(4)"
  2243. # every symbol is replaced with a underscore to make the alias
  2244. version_alias = "version_______1_2_3__4_"
  2245. # add an event in the current release
  2246. event = self.event_data[0]
  2247. event_data = event["data"].copy()
  2248. event_data["event_id"] = uuid4().hex
  2249. event_data["release"] = version
  2250. self.store_event(event_data, project_id=event["project"].id)
  2251. with self.feature(self.enabled_features):
  2252. response = self.client.get(
  2253. self.url,
  2254. data={
  2255. "start": self.day_ago.isoformat(),
  2256. "end": (self.day_ago + timedelta(hours=2)).isoformat(),
  2257. "interval": "1h",
  2258. "yAxis": "count()",
  2259. # the double underscores around the version alias is because of a comma and quote
  2260. "orderby": [f"-to_other_release__{version_alias}__others_current"],
  2261. "field": [
  2262. "count()",
  2263. f'to_other(release,"{version_escaped}",others,current)',
  2264. ],
  2265. "topEvents": "2",
  2266. },
  2267. format="json",
  2268. )
  2269. assert response.status_code == 200, response.content
  2270. data = response.data
  2271. assert len(data) == 2
  2272. current = data["current"]
  2273. assert current["order"] == 1
  2274. assert sum(attrs[0]["count"] for _, attrs in current["data"]) == 1
  2275. others = data["others"]
  2276. assert others["order"] == 0
  2277. assert sum(attrs[0]["count"] for _, attrs in others["data"]) == sum(
  2278. event_data["count"] for event_data in self.event_data
  2279. )
  2280. def test_top_events_with_equations(self):
  2281. with self.feature(self.enabled_features):
  2282. response = self.client.get(
  2283. self.url,
  2284. data={
  2285. "start": self.day_ago.isoformat(),
  2286. "end": (self.day_ago + timedelta(hours=2)).isoformat(),
  2287. "interval": "1h",
  2288. "yAxis": "equation|count() / 100",
  2289. "orderby": ["-count()"],
  2290. "field": ["count()", "message", "user.email", "equation|count() / 100"],
  2291. "topEvents": "5",
  2292. },
  2293. format="json",
  2294. )
  2295. data = response.data
  2296. assert response.status_code == 200, response.content
  2297. assert len(data) == 6
  2298. for index, event in enumerate(self.events[:5]):
  2299. message = event.message or event.transaction
  2300. results = data[
  2301. ",".join([message, self.event_data[index]["data"]["user"].get("email", "None")])
  2302. ]
  2303. assert results["order"] == index
  2304. assert [{"count": self.event_data[index]["count"] / 100}] in [
  2305. attrs for time, attrs in results["data"]
  2306. ]
  2307. other = data["Other"]
  2308. assert other["order"] == 5
  2309. assert [{"count": 0.03}] in [attrs for _, attrs in other["data"]]
  2310. @mock.patch("sentry.snuba.discover.bulk_snuba_queries", return_value=[{"data": [], "meta": []}])
  2311. @mock.patch(
  2312. "sentry.search.events.builder.base.raw_snql_query",
  2313. return_value={"data": [], "meta": []},
  2314. )
  2315. def test_invalid_interval(self, mock_raw_query, mock_bulk_query):
  2316. with self.feature(self.enabled_features):
  2317. response = self.client.get(
  2318. self.url,
  2319. format="json",
  2320. data={
  2321. "end": before_now().isoformat(),
  2322. # 7,200 points for each event
  2323. "start": before_now(seconds=7200).isoformat(),
  2324. "field": ["count()", "issue"],
  2325. "query": "",
  2326. "interval": "1s",
  2327. "yAxis": "count()",
  2328. },
  2329. )
  2330. assert response.status_code == 200
  2331. assert mock_bulk_query.call_count == 1
  2332. with self.feature(self.enabled_features):
  2333. response = self.client.get(
  2334. self.url,
  2335. format="json",
  2336. data={
  2337. "end": before_now().isoformat(),
  2338. "start": before_now(seconds=7200).isoformat(),
  2339. "field": ["count()", "issue"],
  2340. "query": "",
  2341. "interval": "1s",
  2342. "yAxis": "count()",
  2343. # 7,200 points for each event * 2, should error
  2344. "topEvents": "2",
  2345. },
  2346. )
  2347. assert response.status_code == 200
  2348. assert mock_raw_query.call_count == 2
  2349. # Should've reset to the default for between 1 and 24h
  2350. assert mock_raw_query.mock_calls[1].args[0].query.granularity.granularity == 300
  2351. with self.feature(self.enabled_features):
  2352. response = self.client.get(
  2353. self.url,
  2354. format="json",
  2355. data={
  2356. "end": before_now().isoformat(),
  2357. # 1999 points * 5 events should just be enough to not error
  2358. "start": before_now(seconds=1999).isoformat(),
  2359. "field": ["count()", "issue"],
  2360. "query": "",
  2361. "interval": "1s",
  2362. "yAxis": "count()",
  2363. "topEvents": "5",
  2364. },
  2365. )
  2366. assert response.status_code == 200
  2367. assert mock_raw_query.call_count == 4
  2368. # Should've left the interval alone since we're just below the limit
  2369. assert mock_raw_query.mock_calls[3].args[0].query.granularity.granularity == 1
  2370. with self.feature(self.enabled_features):
  2371. response = self.client.get(
  2372. self.url,
  2373. format="json",
  2374. data={
  2375. "end": before_now().isoformat(),
  2376. "start": before_now(hours=24).isoformat(),
  2377. "field": ["count()", "issue"],
  2378. "query": "",
  2379. "interval": "0d",
  2380. "yAxis": "count()",
  2381. "topEvents": "5",
  2382. },
  2383. )
  2384. assert response.status_code == 200
  2385. assert mock_raw_query.call_count == 6
  2386. # Should've default to 24h's default of 5m
  2387. assert mock_raw_query.mock_calls[5].args[0].query.granularity.granularity == 300
  2388. def test_top_events_timestamp_fields(self):
  2389. with self.feature(self.enabled_features):
  2390. response = self.client.get(
  2391. self.url,
  2392. format="json",
  2393. data={
  2394. "start": self.day_ago.isoformat(),
  2395. "end": (self.day_ago + timedelta(hours=2)).isoformat(),
  2396. "interval": "1h",
  2397. "yAxis": "count()",
  2398. "orderby": ["-count()"],
  2399. "field": ["count()", "timestamp", "timestamp.to_hour", "timestamp.to_day"],
  2400. "topEvents": "5",
  2401. },
  2402. )
  2403. assert response.status_code == 200
  2404. data = response.data
  2405. assert len(data) == 3
  2406. # these are the timestamps corresponding to the events stored
  2407. timestamps = [
  2408. self.day_ago + timedelta(minutes=2),
  2409. self.day_ago + timedelta(hours=1, minutes=2),
  2410. self.day_ago + timedelta(minutes=4),
  2411. ]
  2412. timestamp_hours = [timestamp.replace(minute=0, second=0) for timestamp in timestamps]
  2413. timestamp_days = [timestamp.replace(hour=0, minute=0, second=0) for timestamp in timestamps]
  2414. for ts, ts_hr, ts_day in zip(timestamps, timestamp_hours, timestamp_days):
  2415. key = f"{ts.isoformat()},{ts_day.isoformat()},{ts_hr.isoformat()}"
  2416. count = sum(e["count"] for e in self.event_data if e["data"]["timestamp"] == ts)
  2417. results = data[key]
  2418. assert [{"count": count}] in [attrs for time, attrs in results["data"]]
  2419. def test_top_events_other_with_matching_columns(self):
  2420. with self.feature(self.enabled_features):
  2421. response = self.client.get(
  2422. self.url,
  2423. data={
  2424. "start": self.day_ago.isoformat(),
  2425. "end": (self.day_ago + timedelta(hours=2)).isoformat(),
  2426. "interval": "1h",
  2427. "yAxis": "count()",
  2428. "orderby": ["-count()"],
  2429. "field": ["count()", "tags[shared-tag]", "message"],
  2430. "topEvents": "5",
  2431. },
  2432. format="json",
  2433. )
  2434. data = response.data
  2435. assert response.status_code == 200, response.content
  2436. assert len(data) == 6
  2437. for index, event in enumerate(self.events[:5]):
  2438. message = event.message or event.transaction
  2439. results = data[",".join([message, "yup"])]
  2440. assert results["order"] == index
  2441. assert [{"count": self.event_data[index]["count"]}] in [
  2442. attrs for _, attrs in results["data"]
  2443. ]
  2444. other = data["Other"]
  2445. assert other["order"] == 5
  2446. assert [{"count": 3}] in [attrs for _, attrs in other["data"]]
  2447. def test_top_events_with_field_overlapping_other_key(self):
  2448. transaction_data = load_data("transaction")
  2449. transaction_data["start_timestamp"] = (self.day_ago + timedelta(minutes=2)).isoformat()
  2450. transaction_data["timestamp"] = (self.day_ago + timedelta(minutes=6)).isoformat()
  2451. transaction_data["transaction"] = OTHER_KEY
  2452. for i in range(5):
  2453. data = transaction_data.copy()
  2454. data["event_id"] = "ab" + f"{i}" * 30
  2455. data["contexts"]["trace"]["span_id"] = "ab" + f"{i}" * 14
  2456. self.store_event(data, project_id=self.project.id)
  2457. with self.feature(self.enabled_features):
  2458. response = self.client.get(
  2459. self.url,
  2460. data={
  2461. "start": self.day_ago.isoformat(),
  2462. "end": (self.day_ago + timedelta(hours=2)).isoformat(),
  2463. "interval": "1h",
  2464. "yAxis": "count()",
  2465. "orderby": ["-count()"],
  2466. "field": ["count()", "message"],
  2467. "topEvents": "5",
  2468. },
  2469. format="json",
  2470. )
  2471. data = response.data
  2472. assert response.status_code == 200, response.content
  2473. assert len(data) == 6
  2474. assert f"{OTHER_KEY} (message)" in data
  2475. results = data[f"{OTHER_KEY} (message)"]
  2476. assert [{"count": 5}] in [attrs for _, attrs in results["data"]]
  2477. other = data["Other"]
  2478. assert other["order"] == 5
  2479. assert [{"count": 4}] in [attrs for _, attrs in other["data"]]
  2480. def test_top_events_can_exclude_other_series(self):
  2481. with self.feature(self.enabled_features):
  2482. response = self.client.get(
  2483. self.url,
  2484. data={
  2485. "start": self.day_ago.isoformat(),
  2486. "end": (self.day_ago + timedelta(hours=2)).isoformat(),
  2487. "interval": "1h",
  2488. "yAxis": "count()",
  2489. "orderby": ["count()"],
  2490. "field": ["count()", "message"],
  2491. "topEvents": "5",
  2492. "excludeOther": "1",
  2493. },
  2494. format="json",
  2495. )
  2496. data = response.data
  2497. assert response.status_code == 200, response.content
  2498. assert len(data) == 5
  2499. assert "Other" not in response.data
  2500. @pytest.mark.xfail(reason="Started failing on ClickHouse 21.8")
  2501. def test_top_events_with_equation_including_unselected_fields_passes_field_validation(self):
  2502. with self.feature(self.enabled_features):
  2503. response = self.client.get(
  2504. self.url,
  2505. data={
  2506. "start": self.day_ago.isoformat(),
  2507. "end": (self.day_ago + timedelta(hours=2)).isoformat(),
  2508. "interval": "1h",
  2509. "yAxis": "count()",
  2510. "orderby": ["-equation[0]"],
  2511. "field": ["count()", "message", "equation|count_unique(user) * 2"],
  2512. "topEvents": "5",
  2513. },
  2514. format="json",
  2515. )
  2516. data = response.data
  2517. assert response.status_code == 200, response.content
  2518. assert len(data) == 6
  2519. other = data["Other"]
  2520. assert other["order"] == 5
  2521. assert [{"count": 4}] in [attrs for _, attrs in other["data"]]
  2522. def test_top_events_boolean_condition_and_project_field(self):
  2523. with self.feature(self.enabled_features):
  2524. response = self.client.get(
  2525. self.url,
  2526. data={
  2527. "start": self.day_ago.isoformat(),
  2528. "end": (self.day_ago + timedelta(hours=2)).isoformat(),
  2529. "interval": "1h",
  2530. "yAxis": "count()",
  2531. "orderby": ["-count()"],
  2532. "field": ["project", "count()"],
  2533. "topEvents": "5",
  2534. "query": "event.type:transaction (transaction:*a OR transaction:b*)",
  2535. },
  2536. format="json",
  2537. )
  2538. assert response.status_code == 200
  2539. class OrganizationEventsStatsProfileFunctionDatasetEndpointTest(
  2540. APITestCase, ProfilesSnubaTestCase, SearchIssueTestMixin
  2541. ):
  2542. endpoint = "sentry-api-0-organization-events-stats"
  2543. def setUp(self):
  2544. super().setUp()
  2545. self.login_as(user=self.user)
  2546. self.one_day_ago = before_now(days=1).replace(hour=10, minute=0, second=0, microsecond=0)
  2547. self.two_days_ago = before_now(days=2).replace(hour=10, minute=0, second=0, microsecond=0)
  2548. self.three_days_ago = before_now(days=3).replace(hour=10, minute=0, second=0, microsecond=0)
  2549. self.project = self.create_project()
  2550. self.url = reverse(
  2551. "sentry-api-0-organization-events-stats",
  2552. kwargs={"organization_id_or_slug": self.project.organization.slug},
  2553. )
  2554. def test_functions_dataset_simple(self):
  2555. transaction_function = self.store_functions(
  2556. [
  2557. {
  2558. "self_times_ns": [100_000_000 for _ in range(100)],
  2559. "package": "foo",
  2560. "function": "bar",
  2561. "in_app": True,
  2562. },
  2563. ],
  2564. project=self.project,
  2565. timestamp=self.two_days_ago - timedelta(hours=12),
  2566. )
  2567. continuous_timestamp = self.two_days_ago + timedelta(hours=12)
  2568. continuous_function = self.store_functions_chunk(
  2569. [
  2570. {
  2571. "self_times_ns": [200_000_000 for _ in range(100)],
  2572. "package": "bar",
  2573. "function": "bar",
  2574. "thread_id": "1",
  2575. "in_app": True,
  2576. },
  2577. ],
  2578. project=self.project,
  2579. timestamp=continuous_timestamp,
  2580. )
  2581. y_axes = [
  2582. "cpm()",
  2583. "p95(function.duration)",
  2584. "all_examples()",
  2585. ]
  2586. data = {
  2587. "dataset": "profileFunctions",
  2588. "start": self.three_days_ago.isoformat(),
  2589. "end": self.one_day_ago.isoformat(),
  2590. "interval": "1d",
  2591. "yAxis": y_axes,
  2592. }
  2593. response = self.client.get(self.url, data=data, format="json")
  2594. assert response.status_code == 200, response.content
  2595. assert sum(row[1][0]["count"] for row in response.data["cpm()"]["data"]) == pytest.approx(
  2596. 200 / ((self.one_day_ago - self.three_days_ago).total_seconds() / 60), rel=1e-3
  2597. )
  2598. assert any(
  2599. row[1][0]["count"] > 0 for row in response.data["p95(function.duration)"]["data"]
  2600. )
  2601. examples = [row[1][0]["count"] for row in response.data["all_examples()"]["data"]]
  2602. assert examples == [
  2603. [
  2604. {
  2605. "profile_id": transaction_function["transaction"]["contexts"]["profile"][
  2606. "profile_id"
  2607. ],
  2608. },
  2609. ],
  2610. [
  2611. {
  2612. "profiler_id": continuous_function["profiler_id"],
  2613. "thread_id": "1",
  2614. "start": continuous_timestamp.timestamp(),
  2615. "end": (continuous_timestamp + timedelta(microseconds=200_000)).timestamp(),
  2616. },
  2617. ],
  2618. ]
  2619. for y_axis in y_axes:
  2620. assert response.data[y_axis]["meta"]["fields"] == {
  2621. "time": "date",
  2622. "cpm": "number",
  2623. "p95_function_duration": "duration",
  2624. "all_examples": "string",
  2625. }
  2626. assert response.data[y_axis]["meta"]["units"] == {
  2627. "time": None,
  2628. "cpm": None,
  2629. "p95_function_duration": "nanosecond",
  2630. "all_examples": None,
  2631. }
  2632. class OrganizationEventsStatsTopNEventsProfileFunctionDatasetEndpointTest(
  2633. APITestCase, ProfilesSnubaTestCase, SearchIssueTestMixin
  2634. ):
  2635. endpoint = "sentry-api-0-organization-events-stats"
  2636. def setUp(self):
  2637. super().setUp()
  2638. self.login_as(user=self.user)
  2639. self.one_day_ago = before_now(days=1).replace(hour=10, minute=0, second=0, microsecond=0)
  2640. self.two_days_ago = before_now(days=2).replace(hour=10, minute=0, second=0, microsecond=0)
  2641. self.three_days_ago = before_now(days=3).replace(hour=10, minute=0, second=0, microsecond=0)
  2642. self.project = self.create_project()
  2643. self.url = reverse(
  2644. "sentry-api-0-organization-events-stats",
  2645. kwargs={"organization_id_or_slug": self.project.organization.slug},
  2646. )
  2647. def test_functions_dataset_simple(self):
  2648. self.store_functions(
  2649. [
  2650. {
  2651. "self_times_ns": [100 for _ in range(100)],
  2652. "package": "pkg",
  2653. "function": "foo",
  2654. "in_app": True,
  2655. },
  2656. {
  2657. "self_times_ns": [100 for _ in range(10)],
  2658. "package": "pkg",
  2659. "function": "bar",
  2660. "in_app": True,
  2661. },
  2662. ],
  2663. project=self.project,
  2664. timestamp=self.two_days_ago,
  2665. )
  2666. y_axes = [
  2667. "cpm()",
  2668. "p95(function.duration)",
  2669. "all_examples()",
  2670. ]
  2671. data = {
  2672. "dataset": "profileFunctions",
  2673. "field": ["function", "count()"],
  2674. "start": self.three_days_ago.isoformat(),
  2675. "end": self.one_day_ago.isoformat(),
  2676. "yAxis": y_axes,
  2677. "interval": "1d",
  2678. "topEvents": "2",
  2679. "excludeOther": "1",
  2680. }
  2681. response = self.client.get(self.url, data=data, format="json")
  2682. assert response.status_code == 200, response.content
  2683. assert sum(
  2684. row[1][0]["count"] for row in response.data["foo"]["cpm()"]["data"]
  2685. ) == pytest.approx(
  2686. 100 / ((self.one_day_ago - self.three_days_ago).total_seconds() / 60), rel=1e-3
  2687. )
  2688. assert sum(
  2689. row[1][0]["count"] for row in response.data["bar"]["cpm()"]["data"]
  2690. ) == pytest.approx(
  2691. 10 / ((self.one_day_ago - self.three_days_ago).total_seconds() / 60), rel=1e-3
  2692. )
  2693. assert any(
  2694. row[1][0]["count"] > 0 for row in response.data["foo"]["p95(function.duration)"]["data"]
  2695. )
  2696. assert any(
  2697. row[1][0]["count"] > 0 for row in response.data["bar"]["p95(function.duration)"]["data"]
  2698. )
  2699. for func in ["foo", "bar"]:
  2700. for y_axis in y_axes:
  2701. assert response.data[func][y_axis]["meta"]["units"] == {
  2702. "time": None,
  2703. "count": None,
  2704. "cpm": None,
  2705. "function": None,
  2706. "p95_function_duration": "nanosecond",
  2707. "all_examples": None,
  2708. }
  2709. class OrganizationEventsStatsTopNEventsErrors(APITestCase, SnubaTestCase):
  2710. def setUp(self):
  2711. super().setUp()
  2712. self.login_as(user=self.user)
  2713. self.day_ago = before_now(days=1).replace(hour=10, minute=0, second=0, microsecond=0)
  2714. self.project = self.create_project()
  2715. self.project2 = self.create_project()
  2716. self.user2 = self.create_user()
  2717. self.event_data: list[_EventDataDict] = [
  2718. {
  2719. "data": {
  2720. "message": "poof",
  2721. "timestamp": (self.day_ago + timedelta(minutes=2)).isoformat(),
  2722. "user": {"email": self.user.email},
  2723. "tags": {"shared-tag": "yup"},
  2724. "fingerprint": ["group1"],
  2725. },
  2726. "project": self.project2,
  2727. "count": 7,
  2728. },
  2729. {
  2730. "data": {
  2731. "message": "voof",
  2732. "timestamp": (self.day_ago + timedelta(hours=1, minutes=2)).isoformat(),
  2733. "fingerprint": ["group2"],
  2734. "user": {"email": self.user2.email},
  2735. "tags": {"shared-tag": "yup"},
  2736. },
  2737. "project": self.project2,
  2738. "count": 6,
  2739. },
  2740. {
  2741. "data": {
  2742. "message": "very bad",
  2743. "timestamp": (self.day_ago + timedelta(minutes=2)).isoformat(),
  2744. "fingerprint": ["group3"],
  2745. "user": {"email": "foo@example.com"},
  2746. "tags": {"shared-tag": "yup"},
  2747. },
  2748. "project": self.project,
  2749. "count": 5,
  2750. },
  2751. {
  2752. "data": {
  2753. "message": "oh no",
  2754. "timestamp": (self.day_ago + timedelta(minutes=2)).isoformat(),
  2755. "fingerprint": ["group4"],
  2756. "user": {"email": "bar@example.com"},
  2757. "tags": {"shared-tag": "yup"},
  2758. },
  2759. "project": self.project,
  2760. "count": 4,
  2761. },
  2762. {
  2763. "data": {
  2764. "message": "kinda bad",
  2765. "timestamp": (self.day_ago + timedelta(minutes=2)).isoformat(),
  2766. "user": {"email": self.user.email},
  2767. "tags": {"shared-tag": "yup"},
  2768. "fingerprint": ["group7"],
  2769. },
  2770. "project": self.project,
  2771. "count": 3,
  2772. },
  2773. # Not in the top 5
  2774. {
  2775. "data": {
  2776. "message": "sorta bad",
  2777. "timestamp": (self.day_ago + timedelta(minutes=2)).isoformat(),
  2778. "fingerprint": ["group5"],
  2779. "user": {"email": "bar@example.com"},
  2780. "tags": {"shared-tag": "yup"},
  2781. },
  2782. "project": self.project,
  2783. "count": 2,
  2784. },
  2785. {
  2786. "data": {
  2787. "message": "not so bad",
  2788. "timestamp": (self.day_ago + timedelta(minutes=2)).isoformat(),
  2789. "fingerprint": ["group6"],
  2790. "user": {"email": "bar@example.com"},
  2791. "tags": {"shared-tag": "yup"},
  2792. },
  2793. "project": self.project,
  2794. "count": 1,
  2795. },
  2796. ]
  2797. self.events = []
  2798. for index, event_data in enumerate(self.event_data):
  2799. data = event_data["data"].copy()
  2800. for i in range(event_data["count"]):
  2801. data["event_id"] = f"{index}{i}" * 16
  2802. event = self.store_event(data, project_id=event_data["project"].id)
  2803. self.events.append(event)
  2804. self.enabled_features = {
  2805. "organizations:discover-basic": True,
  2806. }
  2807. self.url = reverse(
  2808. "sentry-api-0-organization-events-stats",
  2809. kwargs={"organization_id_or_slug": self.project.organization.slug},
  2810. )
  2811. def test_simple_top_events(self):
  2812. with self.feature(self.enabled_features):
  2813. response = self.client.get(
  2814. self.url,
  2815. data={
  2816. "start": self.day_ago.isoformat(),
  2817. "end": (self.day_ago + timedelta(hours=2)).isoformat(),
  2818. "interval": "1h",
  2819. "yAxis": "count()",
  2820. "orderby": ["-count()"],
  2821. "field": ["count()", "message", "user.email"],
  2822. "dataset": "errors",
  2823. "topEvents": "5",
  2824. },
  2825. format="json",
  2826. )
  2827. data = response.data
  2828. assert response.status_code == 200, response.content
  2829. assert len(data) == 6
  2830. for index, event in enumerate(self.events[:5]):
  2831. message = event.message or event.transaction
  2832. results = data[
  2833. ",".join([message, self.event_data[index]["data"]["user"].get("email", "None")])
  2834. ]
  2835. assert results["order"] == index
  2836. assert [{"count": self.event_data[index]["count"]}] in [
  2837. attrs for _, attrs in results["data"]
  2838. ]
  2839. other = data["Other"]
  2840. assert other["order"] == 5
  2841. assert [{"count": 3}] in [attrs for _, attrs in other["data"]]
  2842. def test_top_events_with_projects_other(self):
  2843. with self.feature(self.enabled_features):
  2844. response = self.client.get(
  2845. self.url,
  2846. data={
  2847. "start": self.day_ago.isoformat(),
  2848. "end": (self.day_ago + timedelta(hours=2)).isoformat(),
  2849. "interval": "1h",
  2850. "yAxis": "count()",
  2851. "orderby": ["-count()"],
  2852. "field": ["count()", "project"],
  2853. "dataset": "errors",
  2854. "topEvents": "1",
  2855. },
  2856. format="json",
  2857. )
  2858. data = response.data
  2859. assert response.status_code == 200, response.content
  2860. assert set(data.keys()) == {"Other", self.project.slug}
  2861. assert data[self.project.slug]["order"] == 0
  2862. assert [attrs[0]["count"] for _, attrs in data[self.project.slug]["data"]] == [15, 0]
  2863. assert data["Other"]["order"] == 1
  2864. assert [attrs[0]["count"] for _, attrs in data["Other"]["data"]] == [7, 6]
  2865. def test_top_events_with_issue(self):
  2866. # delete a group to make sure if this happens the value becomes unknown
  2867. event_group = self.events[0].group
  2868. event_group.delete()
  2869. with self.feature(self.enabled_features):
  2870. response = self.client.get(
  2871. self.url,
  2872. data={
  2873. "start": self.day_ago.isoformat(),
  2874. "end": (self.day_ago + timedelta(hours=2)).isoformat(),
  2875. "interval": "1h",
  2876. "yAxis": "count()",
  2877. "orderby": ["-count()"],
  2878. "field": ["count()", "message", "issue"],
  2879. "topEvents": "5",
  2880. "query": "",
  2881. "dataset": "errors",
  2882. },
  2883. format="json",
  2884. )
  2885. data = response.data
  2886. assert response.status_code == 200, response.content
  2887. assert len(data) == 6
  2888. for index, event in enumerate(self.events[:4]):
  2889. message = event.message
  2890. # Because we deleted the group for event 0
  2891. if index == 0 or event.group is None:
  2892. issue = "unknown"
  2893. else:
  2894. issue = event.group.qualified_short_id
  2895. results = data[",".join([issue, message])]
  2896. assert results["order"] == index
  2897. assert [{"count": self.event_data[index]["count"]}] in [
  2898. attrs for time, attrs in results["data"]
  2899. ]
  2900. other = data["Other"]
  2901. assert other["order"] == 5
  2902. assert [attrs[0]["count"] for _, attrs in data["Other"]["data"]] == [3, 0]
  2903. @mock.patch("sentry.models.GroupManager.get_issues_mapping")
  2904. def test_top_events_with_unknown_issue(self, mock_issues_mapping):
  2905. event = self.events[0]
  2906. event_data = self.event_data[0]
  2907. # ensure that the issue mapping returns None for the issue
  2908. mock_issues_mapping.return_value = {event.group.id: None}
  2909. with self.feature(self.enabled_features):
  2910. response = self.client.get(
  2911. self.url,
  2912. data={
  2913. "start": self.day_ago.isoformat(),
  2914. "end": (self.day_ago + timedelta(hours=2)).isoformat(),
  2915. "interval": "1h",
  2916. "yAxis": "count()",
  2917. "orderby": ["-count()"],
  2918. "field": ["count()", "issue"],
  2919. "topEvents": "5",
  2920. # narrow the search to just one issue
  2921. "query": f"issue.id:{event.group.id}",
  2922. "dataset": "errors",
  2923. },
  2924. format="json",
  2925. )
  2926. assert response.status_code == 200, response.content
  2927. data = response.data
  2928. assert len(data) == 1
  2929. results = data["unknown"]
  2930. assert results["order"] == 0
  2931. assert [{"count": event_data["count"]}] in [attrs for time, attrs in results["data"]]
  2932. @mock.patch(
  2933. "sentry.search.events.builder.base.raw_snql_query",
  2934. side_effect=[{"data": [{"issue.id": 1}], "meta": []}, {"data": [], "meta": []}],
  2935. )
  2936. def test_top_events_with_issue_check_query_conditions(self, mock_query):
  2937. """ "Intentionally separate from test_top_events_with_issue
  2938. This is to test against a bug where the condition for issues wasn't included and we'd be missing data for
  2939. the interval since we'd cap out the max rows. This was not caught by the previous test since the results
  2940. would still be correct given the smaller interval & lack of data
  2941. """
  2942. with self.feature(self.enabled_features):
  2943. self.client.get(
  2944. self.url,
  2945. data={
  2946. "start": self.day_ago.isoformat(),
  2947. "end": (self.day_ago + timedelta(hours=2)).isoformat(),
  2948. "interval": "1h",
  2949. "yAxis": "count()",
  2950. "orderby": ["-count()"],
  2951. "field": ["count()", "message", "issue"],
  2952. "topEvents": "5",
  2953. "query": "!event.type:transaction",
  2954. "dataset": "errors",
  2955. },
  2956. format="json",
  2957. )
  2958. assert (
  2959. Condition(
  2960. Function(
  2961. "coalesce",
  2962. [Column("group_id", entity=Entity("events", alias="events")), 0],
  2963. "issue.id",
  2964. ),
  2965. Op.IN,
  2966. [1],
  2967. )
  2968. in mock_query.mock_calls[1].args[0].query.where
  2969. )
  2970. def test_group_id_tag_simple(self):
  2971. event_data: _EventDataDict = {
  2972. "data": {
  2973. "message": "poof",
  2974. "timestamp": (self.day_ago + timedelta(minutes=2)).isoformat(),
  2975. "user": {"email": self.user.email},
  2976. "tags": {"group_id": "the tag"},
  2977. "fingerprint": ["group1"],
  2978. },
  2979. "project": self.project2,
  2980. "count": 7,
  2981. }
  2982. for i in range(event_data["count"]):
  2983. event_data["data"]["event_id"] = f"a{i}" * 16
  2984. self.store_event(event_data["data"], project_id=event_data["project"].id)
  2985. data = {
  2986. "start": self.day_ago.isoformat(),
  2987. "end": (self.day_ago + timedelta(hours=2)).isoformat(),
  2988. "interval": "1h",
  2989. "yAxis": "count()",
  2990. "orderby": ["-count()"],
  2991. "field": ["count()", "group_id"],
  2992. "topEvents": "5",
  2993. "partial": "1",
  2994. }
  2995. with self.feature(self.enabled_features):
  2996. response = self.client.get(self.url, data, format="json")
  2997. assert response.status_code == 200, response.content
  2998. assert response.data["the tag"]["data"][0][1] == [{"count": 7}]
  2999. data["query"] = 'group_id:"the tag"'
  3000. with self.feature(self.enabled_features):
  3001. response = self.client.get(self.url, data, format="json")
  3002. assert response.status_code == 200
  3003. assert response.data["the tag"]["data"][0][1] == [{"count": 7}]
  3004. data["query"] = "group_id:abc"
  3005. with self.feature(self.enabled_features):
  3006. response = self.client.get(self.url, data, format="json")
  3007. assert response.status_code == 200
  3008. assert all([interval[1][0]["count"] == 0 for interval in response.data["data"]])
  3009. def test_top_events_with_error_unhandled(self):
  3010. self.login_as(user=self.user)
  3011. project = self.create_project()
  3012. prototype = load_data("android-ndk")
  3013. prototype["event_id"] = "f" * 32
  3014. prototype["logentry"] = {"formatted": "not handled"}
  3015. prototype["exception"]["values"][0]["value"] = "not handled"
  3016. prototype["exception"]["values"][0]["mechanism"]["handled"] = False
  3017. prototype["timestamp"] = (self.day_ago + timedelta(minutes=2)).isoformat()
  3018. self.store_event(data=prototype, project_id=project.id)
  3019. with self.feature(self.enabled_features):
  3020. response = self.client.get(
  3021. self.url,
  3022. data={
  3023. "start": self.day_ago.isoformat(),
  3024. "end": (self.day_ago + timedelta(hours=2)).isoformat(),
  3025. "interval": "1h",
  3026. "yAxis": "count()",
  3027. "orderby": ["-count()"],
  3028. "field": ["count()", "error.unhandled"],
  3029. "topEvents": "5",
  3030. },
  3031. format="json",
  3032. )
  3033. data = response.data
  3034. assert response.status_code == 200, response.content
  3035. assert len(data) == 2