test_organization_events_stats.py 95 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497
  1. import uuid
  2. from datetime import timedelta
  3. from unittest import mock
  4. from uuid import uuid4
  5. import pytest
  6. from dateutil.parser import parse as parse_date
  7. from django.urls import reverse
  8. from pytz import utc
  9. from snuba_sdk.column import Column
  10. from snuba_sdk.conditions import Condition, Op
  11. from snuba_sdk.function import Function
  12. from sentry.constants import MAX_TOP_EVENTS
  13. from sentry.models.transaction_threshold import ProjectTransactionThreshold, TransactionMetric
  14. from sentry.snuba.discover import OTHER_KEY
  15. from sentry.testutils import APITestCase, MetricsEnhancedPerformanceTestCase, SnubaTestCase
  16. from sentry.testutils.helpers.datetime import before_now, iso_format
  17. from sentry.utils.samples import load_data
  18. class OrganizationEventsStatsEndpointTest(APITestCase, SnubaTestCase):
  19. endpoint = "sentry-api-0-organization-events-stats"
  20. def setUp(self):
  21. super().setUp()
  22. self.login_as(user=self.user)
  23. self.authed_user = self.user
  24. self.day_ago = before_now(days=1).replace(hour=10, minute=0, second=0, microsecond=0)
  25. self.project = self.create_project()
  26. self.project2 = self.create_project()
  27. self.user = self.create_user()
  28. self.user2 = self.create_user()
  29. self.store_event(
  30. data={
  31. "event_id": "a" * 32,
  32. "message": "very bad",
  33. "timestamp": iso_format(self.day_ago + timedelta(minutes=1)),
  34. "fingerprint": ["group1"],
  35. "tags": {"sentry:user": self.user.email},
  36. },
  37. project_id=self.project.id,
  38. )
  39. self.store_event(
  40. data={
  41. "event_id": "b" * 32,
  42. "message": "oh my",
  43. "timestamp": iso_format(self.day_ago + timedelta(hours=1, minutes=1)),
  44. "fingerprint": ["group2"],
  45. "tags": {"sentry:user": self.user2.email},
  46. },
  47. project_id=self.project2.id,
  48. )
  49. self.store_event(
  50. data={
  51. "event_id": "c" * 32,
  52. "message": "very bad",
  53. "timestamp": iso_format(self.day_ago + timedelta(hours=1, minutes=2)),
  54. "fingerprint": ["group2"],
  55. "tags": {"sentry:user": self.user2.email},
  56. },
  57. project_id=self.project2.id,
  58. )
  59. self.url = reverse(
  60. "sentry-api-0-organization-events-stats",
  61. kwargs={"organization_slug": self.project.organization.slug},
  62. )
  63. self.features = {}
  64. def do_request(self, data, url=None, features=None):
  65. if features is None:
  66. features = {"organizations:discover-basic": True}
  67. features.update(self.features)
  68. with self.feature(features):
  69. return self.client.get(self.url if url is None else url, data=data, format="json")
  70. def test_simple(self):
  71. response = self.do_request(
  72. {
  73. "start": iso_format(self.day_ago),
  74. "end": iso_format(self.day_ago + timedelta(hours=2)),
  75. "interval": "1h",
  76. },
  77. )
  78. assert response.status_code == 200, response.content
  79. assert [attrs for time, attrs in response.data["data"]] == [[{"count": 1}], [{"count": 2}]]
  80. def test_misaligned_last_bucket(self):
  81. response = self.do_request(
  82. data={
  83. "start": iso_format(self.day_ago - timedelta(minutes=30)),
  84. "end": iso_format(self.day_ago + timedelta(hours=1, minutes=30)),
  85. "interval": "1h",
  86. "partial": "1",
  87. },
  88. )
  89. assert response.status_code == 200, response.content
  90. assert [attrs for time, attrs in response.data["data"]] == [
  91. [{"count": 0}],
  92. [{"count": 1}],
  93. [{"count": 2}],
  94. ]
  95. def test_no_projects(self):
  96. org = self.create_organization(owner=self.user)
  97. self.login_as(user=self.user)
  98. url = reverse(
  99. "sentry-api-0-organization-events-stats", kwargs={"organization_slug": org.slug}
  100. )
  101. response = self.do_request({}, url)
  102. assert response.status_code == 200, response.content
  103. assert len(response.data["data"]) == 0
  104. def test_user_count(self):
  105. self.store_event(
  106. data={
  107. "event_id": "d" * 32,
  108. "message": "something",
  109. "timestamp": iso_format(self.day_ago + timedelta(minutes=2)),
  110. "tags": {"sentry:user": self.user2.email},
  111. "fingerprint": ["group2"],
  112. },
  113. project_id=self.project2.id,
  114. )
  115. response = self.do_request(
  116. data={
  117. "start": iso_format(self.day_ago),
  118. "end": iso_format(self.day_ago + timedelta(hours=2)),
  119. "interval": "1h",
  120. "yAxis": "user_count",
  121. },
  122. )
  123. assert response.status_code == 200, response.content
  124. assert [attrs for time, attrs in response.data["data"]] == [[{"count": 2}], [{"count": 1}]]
  125. def test_discover2_backwards_compatibility(self):
  126. response = self.do_request(
  127. data={
  128. "project": self.project.id,
  129. "start": iso_format(self.day_ago),
  130. "end": iso_format(self.day_ago + timedelta(hours=2)),
  131. "interval": "1h",
  132. "yAxis": "user_count",
  133. },
  134. )
  135. assert response.status_code == 200, response.content
  136. assert len(response.data["data"]) > 0
  137. response = self.do_request(
  138. data={
  139. "project": self.project.id,
  140. "start": iso_format(self.day_ago),
  141. "end": iso_format(self.day_ago + timedelta(hours=2)),
  142. "interval": "1h",
  143. "yAxis": "event_count",
  144. },
  145. )
  146. assert response.status_code == 200, response.content
  147. assert len(response.data["data"]) > 0
  148. def test_with_event_count_flag(self):
  149. response = self.do_request(
  150. data={
  151. "start": iso_format(self.day_ago),
  152. "end": iso_format(self.day_ago + timedelta(hours=2)),
  153. "interval": "1h",
  154. "yAxis": "event_count",
  155. },
  156. )
  157. assert response.status_code == 200, response.content
  158. assert [attrs for time, attrs in response.data["data"]] == [[{"count": 1}], [{"count": 2}]]
  159. def test_performance_view_feature(self):
  160. response = self.do_request(
  161. data={
  162. "end": iso_format(before_now()),
  163. "start": iso_format(before_now(hours=2)),
  164. "query": "project_id:1",
  165. "interval": "30m",
  166. "yAxis": "count()",
  167. },
  168. features={
  169. "organizations:performance-view": True,
  170. "organizations:discover-basic": False,
  171. },
  172. )
  173. assert response.status_code == 200
  174. def test_aggregate_function_apdex(self):
  175. project1 = self.create_project()
  176. project2 = self.create_project()
  177. events = [
  178. ("one", 400, project1.id),
  179. ("one", 400, project1.id),
  180. ("two", 3000, project2.id),
  181. ("two", 1000, project2.id),
  182. ("three", 3000, project2.id),
  183. ]
  184. for idx, event in enumerate(events):
  185. data = load_data(
  186. "transaction",
  187. start_timestamp=self.day_ago + timedelta(minutes=(1 + idx)),
  188. timestamp=self.day_ago + timedelta(minutes=(1 + idx), milliseconds=event[1]),
  189. )
  190. data["event_id"] = f"{idx}" * 32
  191. data["transaction"] = f"/apdex/new/{event[0]}"
  192. data["user"] = {"email": f"{idx}@example.com"}
  193. self.store_event(data, project_id=event[2])
  194. response = self.do_request(
  195. data={
  196. "start": iso_format(self.day_ago),
  197. "end": iso_format(self.day_ago + timedelta(hours=2)),
  198. "interval": "1h",
  199. "yAxis": "apdex()",
  200. },
  201. )
  202. assert response.status_code == 200, response.content
  203. assert [attrs for time, attrs in response.data["data"]] == [
  204. [{"count": 0.3}],
  205. [{"count": 0}],
  206. ]
  207. ProjectTransactionThreshold.objects.create(
  208. project=project1,
  209. organization=project1.organization,
  210. threshold=100,
  211. metric=TransactionMetric.DURATION.value,
  212. )
  213. ProjectTransactionThreshold.objects.create(
  214. project=project2,
  215. organization=project1.organization,
  216. threshold=100,
  217. metric=TransactionMetric.DURATION.value,
  218. )
  219. response = self.do_request(
  220. data={
  221. "start": iso_format(self.day_ago),
  222. "end": iso_format(self.day_ago + timedelta(hours=2)),
  223. "interval": "1h",
  224. "yAxis": "apdex()",
  225. },
  226. )
  227. assert response.status_code == 200, response.content
  228. assert [attrs for time, attrs in response.data["data"]] == [
  229. [{"count": 0.2}],
  230. [{"count": 0}],
  231. ]
  232. response = self.do_request(
  233. data={
  234. "start": iso_format(self.day_ago),
  235. "end": iso_format(self.day_ago + timedelta(hours=2)),
  236. "interval": "1h",
  237. "yAxis": ["user_count", "apdex()"],
  238. },
  239. )
  240. assert response.status_code == 200, response.content
  241. assert response.data["user_count"]["order"] == 0
  242. assert [attrs for time, attrs in response.data["user_count"]["data"]] == [
  243. [{"count": 5}],
  244. [{"count": 0}],
  245. ]
  246. assert response.data["apdex()"]["order"] == 1
  247. assert [attrs for time, attrs in response.data["apdex()"]["data"]] == [
  248. [{"count": 0.2}],
  249. [{"count": 0}],
  250. ]
  251. def test_aggregate_function_count(self):
  252. response = self.do_request(
  253. data={
  254. "start": iso_format(self.day_ago),
  255. "end": iso_format(self.day_ago + timedelta(hours=2)),
  256. "interval": "1h",
  257. "yAxis": "count()",
  258. },
  259. )
  260. assert response.status_code == 200, response.content
  261. assert [attrs for time, attrs in response.data["data"]] == [[{"count": 1}], [{"count": 2}]]
  262. def test_invalid_aggregate(self):
  263. response = self.do_request(
  264. data={
  265. "start": iso_format(self.day_ago),
  266. "end": iso_format(self.day_ago + timedelta(hours=2)),
  267. "interval": "1h",
  268. "yAxis": "rubbish",
  269. },
  270. )
  271. assert response.status_code == 400, response.content
  272. def test_aggregate_function_user_count(self):
  273. response = self.do_request(
  274. data={
  275. "start": iso_format(self.day_ago),
  276. "end": iso_format(self.day_ago + timedelta(hours=2)),
  277. "interval": "1h",
  278. "yAxis": "count_unique(user)",
  279. },
  280. )
  281. assert response.status_code == 200, response.content
  282. assert [attrs for time, attrs in response.data["data"]] == [[{"count": 1}], [{"count": 1}]]
  283. def test_aggregate_invalid(self):
  284. response = self.do_request(
  285. data={
  286. "start": iso_format(self.day_ago),
  287. "end": iso_format(self.day_ago + timedelta(hours=2)),
  288. "interval": "1h",
  289. "yAxis": "nope(lol)",
  290. },
  291. )
  292. assert response.status_code == 400, response.content
  293. def test_throughput_epm_hour_rollup(self):
  294. project = self.create_project()
  295. # Each of these denotes how many events to create in each hour
  296. event_counts = [6, 0, 6, 3, 0, 3]
  297. for hour, count in enumerate(event_counts):
  298. for minute in range(count):
  299. self.store_event(
  300. data={
  301. "event_id": str(uuid.uuid1()),
  302. "message": "very bad",
  303. "timestamp": iso_format(
  304. self.day_ago + timedelta(hours=hour, minutes=minute)
  305. ),
  306. "fingerprint": ["group1"],
  307. "tags": {"sentry:user": self.user.email},
  308. },
  309. project_id=project.id,
  310. )
  311. for axis in ["epm()", "tpm()"]:
  312. response = self.do_request(
  313. data={
  314. "start": iso_format(self.day_ago),
  315. "end": iso_format(self.day_ago + timedelta(hours=6)),
  316. "interval": "1h",
  317. "yAxis": axis,
  318. "project": project.id,
  319. },
  320. )
  321. assert response.status_code == 200, response.content
  322. data = response.data["data"]
  323. assert len(data) == 6
  324. rows = data[0:6]
  325. for test in zip(event_counts, rows):
  326. assert test[1][1][0]["count"] == test[0] / (3600.0 / 60.0)
  327. def test_throughput_epm_day_rollup(self):
  328. project = self.create_project()
  329. # Each of these denotes how many events to create in each minute
  330. event_counts = [6, 0, 6, 3, 0, 3]
  331. for hour, count in enumerate(event_counts):
  332. for minute in range(count):
  333. self.store_event(
  334. data={
  335. "event_id": str(uuid.uuid1()),
  336. "message": "very bad",
  337. "timestamp": iso_format(
  338. self.day_ago + timedelta(hours=hour, minutes=minute)
  339. ),
  340. "fingerprint": ["group1"],
  341. "tags": {"sentry:user": self.user.email},
  342. },
  343. project_id=project.id,
  344. )
  345. for axis in ["epm()", "tpm()"]:
  346. response = self.do_request(
  347. data={
  348. "start": iso_format(self.day_ago),
  349. "end": iso_format(self.day_ago + timedelta(hours=24)),
  350. "interval": "24h",
  351. "yAxis": axis,
  352. "project": project.id,
  353. },
  354. )
  355. assert response.status_code == 200, response.content
  356. data = response.data["data"]
  357. assert len(data) == 2
  358. assert data[0][1][0]["count"] == sum(event_counts) / (86400.0 / 60.0)
  359. def test_throughput_eps_minute_rollup(self):
  360. project = self.create_project()
  361. # Each of these denotes how many events to create in each minute
  362. event_counts = [6, 0, 6, 3, 0, 3]
  363. for minute, count in enumerate(event_counts):
  364. for second in range(count):
  365. self.store_event(
  366. data={
  367. "event_id": str(uuid.uuid1()),
  368. "message": "very bad",
  369. "timestamp": iso_format(
  370. self.day_ago + timedelta(minutes=minute, seconds=second)
  371. ),
  372. "fingerprint": ["group1"],
  373. "tags": {"sentry:user": self.user.email},
  374. },
  375. project_id=project.id,
  376. )
  377. for axis in ["eps()", "tps()"]:
  378. response = self.do_request(
  379. data={
  380. "start": iso_format(self.day_ago),
  381. "end": iso_format(self.day_ago + timedelta(minutes=6)),
  382. "interval": "1m",
  383. "yAxis": axis,
  384. "project": project.id,
  385. },
  386. )
  387. assert response.status_code == 200, response.content
  388. data = response.data["data"]
  389. assert len(data) == 6
  390. rows = data[0:6]
  391. for test in zip(event_counts, rows):
  392. assert test[1][1][0]["count"] == test[0] / 60.0
  393. def test_throughput_eps_no_rollup(self):
  394. project = self.create_project()
  395. # Each of these denotes how many events to create in each minute
  396. event_counts = [6, 0, 6, 3, 0, 3]
  397. for minute, count in enumerate(event_counts):
  398. for second in range(count):
  399. self.store_event(
  400. data={
  401. "event_id": str(uuid.uuid1()),
  402. "message": "very bad",
  403. "timestamp": iso_format(
  404. self.day_ago + timedelta(minutes=minute, seconds=second)
  405. ),
  406. "fingerprint": ["group1"],
  407. "tags": {"sentry:user": self.user.email},
  408. },
  409. project_id=project.id,
  410. )
  411. response = self.do_request(
  412. data={
  413. "start": iso_format(self.day_ago),
  414. "end": iso_format(self.day_ago + timedelta(minutes=1)),
  415. "interval": "1s",
  416. "yAxis": "eps()",
  417. "project": project.id,
  418. },
  419. )
  420. assert response.status_code == 200, response.content
  421. data = response.data["data"]
  422. # expect 60 data points between time span of 0 and 60 seconds
  423. assert len(data) == 60
  424. rows = data[0:6]
  425. for row in rows:
  426. assert row[1][0]["count"] == 1
  427. def test_transaction_events(self):
  428. prototype = {
  429. "type": "transaction",
  430. "transaction": "api.issue.delete",
  431. "spans": [],
  432. "contexts": {"trace": {"op": "foobar", "trace_id": "a" * 32, "span_id": "a" * 16}},
  433. "tags": {"important": "yes"},
  434. }
  435. fixtures = (
  436. ("d" * 32, before_now(minutes=32)),
  437. ("e" * 32, before_now(hours=1, minutes=2)),
  438. ("f" * 32, before_now(hours=1, minutes=35)),
  439. )
  440. for fixture in fixtures:
  441. data = prototype.copy()
  442. data["event_id"] = fixture[0]
  443. data["timestamp"] = iso_format(fixture[1])
  444. data["start_timestamp"] = iso_format(fixture[1] - timedelta(seconds=1))
  445. self.store_event(data=data, project_id=self.project.id)
  446. response = self.do_request(
  447. data={
  448. "project": self.project.id,
  449. "end": iso_format(before_now()),
  450. "start": iso_format(before_now(hours=2)),
  451. "query": "event.type:transaction",
  452. "interval": "30m",
  453. "yAxis": "count()",
  454. },
  455. )
  456. assert response.status_code == 200, response.content
  457. items = [item for time, item in response.data["data"] if item]
  458. # We could get more results depending on where the 30 min
  459. # windows land.
  460. assert len(items) >= 3
  461. def test_project_id_query_filter(self):
  462. response = self.do_request(
  463. data={
  464. "end": iso_format(before_now()),
  465. "start": iso_format(before_now(hours=2)),
  466. "query": "project_id:1",
  467. "interval": "30m",
  468. "yAxis": "count()",
  469. },
  470. )
  471. assert response.status_code == 200
  472. def test_latest_release_query_filter(self):
  473. response = self.do_request(
  474. data={
  475. "project": self.project.id,
  476. "end": iso_format(before_now()),
  477. "start": iso_format(before_now(hours=2)),
  478. "query": "release:latest",
  479. "interval": "30m",
  480. "yAxis": "count()",
  481. },
  482. )
  483. assert response.status_code == 200
  484. def test_conditional_filter(self):
  485. response = self.do_request(
  486. data={
  487. "start": iso_format(self.day_ago),
  488. "end": iso_format(self.day_ago + timedelta(hours=2)),
  489. "query": "id:{} OR id:{}".format("a" * 32, "b" * 32),
  490. "interval": "30m",
  491. "yAxis": "count()",
  492. },
  493. )
  494. assert response.status_code == 200, response.content
  495. data = response.data["data"]
  496. assert len(data) == 4
  497. assert data[0][1][0]["count"] == 1
  498. assert data[2][1][0]["count"] == 1
  499. def test_simple_multiple_yaxis(self):
  500. response = self.do_request(
  501. data={
  502. "start": iso_format(self.day_ago),
  503. "end": iso_format(self.day_ago + timedelta(hours=2)),
  504. "interval": "1h",
  505. "yAxis": ["user_count", "event_count"],
  506. },
  507. )
  508. assert response.status_code == 200, response.content
  509. assert response.data["user_count"]["order"] == 0
  510. assert [attrs for time, attrs in response.data["user_count"]["data"]] == [
  511. [{"count": 1}],
  512. [{"count": 1}],
  513. ]
  514. assert response.data["event_count"]["order"] == 1
  515. assert [attrs for time, attrs in response.data["event_count"]["data"]] == [
  516. [{"count": 1}],
  517. [{"count": 2}],
  518. ]
  519. def test_equation_yaxis(self):
  520. response = self.do_request(
  521. data={
  522. "start": iso_format(self.day_ago),
  523. "end": iso_format(self.day_ago + timedelta(hours=2)),
  524. "interval": "1h",
  525. "yAxis": ["equation|count() / 100"],
  526. },
  527. )
  528. assert response.status_code == 200, response.content
  529. assert len(response.data["data"]) == 2
  530. assert [attrs for time, attrs in response.data["data"]] == [
  531. [{"count": 0.01}],
  532. [{"count": 0.02}],
  533. ]
  534. def test_equation_mixed_multi_yaxis(self):
  535. response = self.do_request(
  536. data={
  537. "start": iso_format(self.day_ago),
  538. "end": iso_format(self.day_ago + timedelta(hours=2)),
  539. "interval": "1h",
  540. "yAxis": ["count()", "equation|count() * 100"],
  541. },
  542. )
  543. assert response.status_code == 200, response.content
  544. assert response.data["count()"]["order"] == 0
  545. assert [attrs for time, attrs in response.data["count()"]["data"]] == [
  546. [{"count": 1}],
  547. [{"count": 2}],
  548. ]
  549. assert response.data["equation|count() * 100"]["order"] == 1
  550. assert [attrs for time, attrs in response.data["equation|count() * 100"]["data"]] == [
  551. [{"count": 100}],
  552. [{"count": 200}],
  553. ]
  554. def test_equation_multi_yaxis(self):
  555. response = self.do_request(
  556. data={
  557. "start": iso_format(self.day_ago),
  558. "end": iso_format(self.day_ago + timedelta(hours=2)),
  559. "interval": "1h",
  560. "yAxis": ["equation|count() / 100", "equation|count() * 100"],
  561. },
  562. )
  563. assert response.status_code == 200, response.content
  564. assert response.data["equation|count() / 100"]["order"] == 0
  565. assert [attrs for time, attrs in response.data["equation|count() / 100"]["data"]] == [
  566. [{"count": 0.01}],
  567. [{"count": 0.02}],
  568. ]
  569. assert response.data["equation|count() * 100"]["order"] == 1
  570. assert [attrs for time, attrs in response.data["equation|count() * 100"]["data"]] == [
  571. [{"count": 100}],
  572. [{"count": 200}],
  573. ]
  574. def test_large_interval_no_drop_values(self):
  575. self.store_event(
  576. data={
  577. "event_id": "d" * 32,
  578. "message": "not good",
  579. "timestamp": iso_format(self.day_ago - timedelta(minutes=10)),
  580. "fingerprint": ["group3"],
  581. },
  582. project_id=self.project.id,
  583. )
  584. response = self.do_request(
  585. data={
  586. "project": self.project.id,
  587. "end": iso_format(self.day_ago),
  588. "start": iso_format(self.day_ago - timedelta(hours=24)),
  589. "query": 'message:"not good"',
  590. "interval": "1d",
  591. "yAxis": "count()",
  592. },
  593. )
  594. assert response.status_code == 200
  595. assert [attrs for time, attrs in response.data["data"]] == [[{"count": 0}], [{"count": 1}]]
  596. @mock.patch("sentry.snuba.discover.timeseries_query", return_value={})
  597. def test_multiple_yaxis_only_one_query(self, mock_query):
  598. self.do_request(
  599. data={
  600. "project": self.project.id,
  601. "start": iso_format(self.day_ago),
  602. "end": iso_format(self.day_ago + timedelta(hours=2)),
  603. "interval": "1h",
  604. "yAxis": ["user_count", "event_count", "epm()", "eps()"],
  605. },
  606. )
  607. assert mock_query.call_count == 1
  608. @mock.patch("sentry.snuba.discover.bulk_raw_query", return_value=[{"data": []}])
  609. def test_invalid_interval(self, mock_query):
  610. self.do_request(
  611. data={
  612. "end": iso_format(before_now()),
  613. "start": iso_format(before_now(hours=24)),
  614. "query": "",
  615. "interval": "1s",
  616. "yAxis": "count()",
  617. },
  618. )
  619. assert mock_query.call_count == 1
  620. # Should've reset to the default for 24h
  621. assert mock_query.mock_calls[0].args[0][0].rollup == 300
  622. self.do_request(
  623. data={
  624. "end": iso_format(before_now()),
  625. "start": iso_format(before_now(hours=24)),
  626. "query": "",
  627. "interval": "0d",
  628. "yAxis": "count()",
  629. },
  630. )
  631. assert mock_query.call_count == 2
  632. # Should've reset to the default for 24h
  633. assert mock_query.mock_calls[0].args[0][0].rollup == 300
  634. def test_out_of_retention(self):
  635. with self.options({"system.event-retention-days": 10}):
  636. response = self.do_request(
  637. data={
  638. "start": iso_format(before_now(days=20)),
  639. "end": iso_format(before_now(days=15)),
  640. "query": "",
  641. "interval": "30m",
  642. "yAxis": "count()",
  643. },
  644. )
  645. assert response.status_code == 400
  646. @mock.patch("sentry.utils.snuba.quantize_time")
  647. def test_quantize_dates(self, mock_quantize):
  648. mock_quantize.return_value = before_now(days=1).replace(tzinfo=utc)
  649. # Don't quantize short time periods
  650. self.do_request(
  651. data={"statsPeriod": "1h", "query": "", "interval": "30m", "yAxis": "count()"},
  652. )
  653. # Don't quantize absolute date periods
  654. self.do_request(
  655. data={
  656. "start": iso_format(before_now(days=20)),
  657. "end": iso_format(before_now(days=15)),
  658. "query": "",
  659. "interval": "30m",
  660. "yAxis": "count()",
  661. },
  662. )
  663. assert len(mock_quantize.mock_calls) == 0
  664. # Quantize long date periods
  665. self.do_request(
  666. data={"statsPeriod": "90d", "query": "", "interval": "30m", "yAxis": "count()"},
  667. )
  668. assert len(mock_quantize.mock_calls) == 2
  669. def test_with_zerofill(self):
  670. response = self.do_request(
  671. data={
  672. "start": iso_format(self.day_ago),
  673. "end": iso_format(self.day_ago + timedelta(hours=2)),
  674. "interval": "30m",
  675. },
  676. )
  677. assert response.status_code == 200, response.content
  678. assert [attrs for time, attrs in response.data["data"]] == [
  679. [{"count": 1}],
  680. [{"count": 0}],
  681. [{"count": 2}],
  682. [{"count": 0}],
  683. ]
  684. def test_without_zerofill(self):
  685. start = iso_format(self.day_ago)
  686. end = iso_format(self.day_ago + timedelta(hours=2))
  687. response = self.do_request(
  688. data={
  689. "start": start,
  690. "end": end,
  691. "interval": "30m",
  692. "withoutZerofill": "1",
  693. },
  694. features={
  695. "organizations:performance-chart-interpolation": True,
  696. "organizations:discover-basic": True,
  697. },
  698. )
  699. assert response.status_code == 200, response.content
  700. assert [attrs for time, attrs in response.data["data"]] == [
  701. [{"count": 1}],
  702. [{"count": 2}],
  703. ]
  704. assert response.data["start"] == parse_date(start).timestamp()
  705. assert response.data["end"] == parse_date(end).timestamp()
  706. def test_comparison(self):
  707. self.store_event(
  708. data={
  709. "timestamp": iso_format(self.day_ago + timedelta(days=-1, minutes=1)),
  710. },
  711. project_id=self.project.id,
  712. )
  713. self.store_event(
  714. data={
  715. "timestamp": iso_format(self.day_ago + timedelta(days=-1, minutes=2)),
  716. },
  717. project_id=self.project.id,
  718. )
  719. self.store_event(
  720. data={
  721. "timestamp": iso_format(self.day_ago + timedelta(days=-1, hours=1, minutes=1)),
  722. },
  723. project_id=self.project2.id,
  724. )
  725. response = self.do_request(
  726. data={
  727. "start": iso_format(self.day_ago),
  728. "end": iso_format(self.day_ago + timedelta(hours=2)),
  729. "interval": "1h",
  730. "comparisonDelta": int(timedelta(days=1).total_seconds()),
  731. }
  732. )
  733. assert response.status_code == 200, response.content
  734. assert [attrs for time, attrs in response.data["data"]] == [
  735. [{"count": 1, "comparisonCount": 2}],
  736. [{"count": 2, "comparisonCount": 1}],
  737. ]
  738. def test_comparison_invalid(self):
  739. response = self.do_request(
  740. data={
  741. "start": iso_format(self.day_ago),
  742. "end": iso_format(self.day_ago + timedelta(hours=2)),
  743. "interval": "1h",
  744. "comparisonDelta": "17h",
  745. },
  746. )
  747. assert response.status_code == 400, response.content
  748. assert response.data["detail"] == "comparisonDelta must be an integer"
  749. start = before_now(days=85)
  750. end = start + timedelta(days=7)
  751. with self.options({"system.event-retention-days": 90}):
  752. response = self.do_request(
  753. data={
  754. "start": iso_format(start),
  755. "end": iso_format(end),
  756. "interval": "1h",
  757. "comparisonDelta": int(timedelta(days=7).total_seconds()),
  758. }
  759. )
  760. assert response.status_code == 400, response.content
  761. assert response.data["detail"] == "Comparison period is outside retention window"
  762. class OrganizationEventsStatsEndpointTestWithSnql(OrganizationEventsStatsEndpointTest):
  763. def setUp(self):
  764. super().setUp()
  765. self.features["organizations:discover-use-snql"] = True
  766. # Separate test for now to keep the patching simpler
  767. @mock.patch("sentry.snuba.discover.bulk_snql_query", return_value=[{"data": []}])
  768. def test_invalid_interval(self, mock_query):
  769. self.do_request(
  770. data={
  771. "end": iso_format(before_now()),
  772. "start": iso_format(before_now(hours=24)),
  773. "query": "",
  774. "interval": "1s",
  775. "yAxis": "count()",
  776. },
  777. )
  778. assert mock_query.call_count == 1
  779. # Should've reset to the default for 24h
  780. assert mock_query.mock_calls[0].args[0][0].granularity.granularity == 300
  781. self.do_request(
  782. data={
  783. "end": iso_format(before_now()),
  784. "start": iso_format(before_now(hours=24)),
  785. "query": "",
  786. "interval": "0d",
  787. "yAxis": "count()",
  788. },
  789. )
  790. assert mock_query.call_count == 2
  791. # Should've reset to the default for 24h
  792. assert mock_query.mock_calls[1].args[0][0].granularity.granularity == 300
  793. def test_equations_divide_by_zero(self):
  794. response = self.do_request(
  795. data={
  796. "start": iso_format(self.day_ago),
  797. "end": iso_format(self.day_ago + timedelta(hours=2)),
  798. "interval": "1h",
  799. # force a 0 in the denominator by doing 1 - 1
  800. # since a 0 literal is illegal as the denominator
  801. "yAxis": ["equation|count() / (1-1)"],
  802. },
  803. )
  804. assert response.status_code == 200, response.content
  805. assert len(response.data["data"]) == 2
  806. assert [attrs for time, attrs in response.data["data"]] == [
  807. [{"count": None}],
  808. [{"count": None}],
  809. ]
  810. class OrganizationEventsStatsMetricsEnhancedPerformanceEndpointTest(
  811. MetricsEnhancedPerformanceTestCase
  812. ):
  813. endpoint = "sentry-api-0-organization-events-stats"
  814. METRIC_STRINGS = ["foo_transaction"]
  815. def setUp(self):
  816. super().setUp()
  817. self.login_as(user=self.user)
  818. self.day_ago = before_now(days=1).replace(hour=10, minute=0, second=0, microsecond=0)
  819. self.DEFAULT_METRIC_TIMESTAMP = self.day_ago
  820. self.url = reverse(
  821. "sentry-api-0-organization-events-stats",
  822. kwargs={"organization_slug": self.project.organization.slug},
  823. )
  824. self.features = {
  825. "organizations:performance-use-metrics": True,
  826. "organizations:discover-use-snql": True,
  827. }
  828. def do_request(self, data, url=None, features=None):
  829. if features is None:
  830. features = {"organizations:discover-basic": True}
  831. features.update(self.features)
  832. with self.feature(features):
  833. return self.client.get(self.url if url is None else url, data=data, format="json")
  834. # These throughput tests should roughly match the ones in OrganizationEventsStatsEndpointTest
  835. def test_throughput_epm_hour_rollup(self):
  836. # Each of these denotes how many events to create in each hour
  837. event_counts = [6, 0, 6, 3, 0, 3]
  838. for hour, count in enumerate(event_counts):
  839. for minute in range(count):
  840. self.store_metric(1, timestamp=self.day_ago + timedelta(hours=hour, minutes=minute))
  841. for axis in ["epm()", "tpm()"]:
  842. response = self.do_request(
  843. data={
  844. "start": iso_format(self.day_ago),
  845. "end": iso_format(self.day_ago + timedelta(hours=6)),
  846. "interval": "1h",
  847. "yAxis": axis,
  848. "project": self.project.id,
  849. "metricsEnhanced": "1",
  850. },
  851. )
  852. assert response.status_code == 200, response.content
  853. data = response.data["data"]
  854. assert len(data) == 6
  855. assert response.data["isMetricsData"]
  856. rows = data[0:6]
  857. for test in zip(event_counts, rows):
  858. assert test[1][1][0]["count"] == test[0] / (3600.0 / 60.0)
  859. def test_throughput_epm_day_rollup(self):
  860. # Each of these denotes how many events to create in each minute
  861. event_counts = [6, 0, 6, 3, 0, 3]
  862. for hour, count in enumerate(event_counts):
  863. for minute in range(count):
  864. self.store_metric(1, timestamp=self.day_ago + timedelta(hours=hour, minutes=minute))
  865. for axis in ["epm()", "tpm()"]:
  866. response = self.do_request(
  867. data={
  868. "start": iso_format(self.day_ago),
  869. "end": iso_format(self.day_ago + timedelta(hours=24)),
  870. "interval": "24h",
  871. "yAxis": axis,
  872. "project": self.project.id,
  873. "metricsEnhanced": "1",
  874. },
  875. )
  876. assert response.status_code == 200, response.content
  877. data = response.data["data"]
  878. assert len(data) == 2
  879. assert response.data["isMetricsData"]
  880. assert data[0][1][0]["count"] == sum(event_counts) / (86400.0 / 60.0)
  881. def test_throughput_epm_hour_rollup_offset_of_hour(self):
  882. # Each of these denotes how many events to create in each hour
  883. event_counts = [6, 0, 6, 3, 0, 3]
  884. for hour, count in enumerate(event_counts):
  885. for minute in range(count):
  886. self.store_metric(
  887. 1, timestamp=self.day_ago + timedelta(hours=hour, minutes=minute + 30)
  888. )
  889. for axis in ["tpm()", "epm()"]:
  890. response = self.do_request(
  891. data={
  892. "start": iso_format(self.day_ago + timedelta(minutes=30)),
  893. "end": iso_format(self.day_ago + timedelta(hours=6, minutes=30)),
  894. "interval": "1h",
  895. "yAxis": axis,
  896. "project": self.project.id,
  897. "metricsEnhanced": "1",
  898. },
  899. )
  900. assert response.status_code == 200, response.content
  901. data = response.data["data"]
  902. assert len(data) == 6
  903. assert response.data["isMetricsData"]
  904. rows = data[0:6]
  905. for test in zip(event_counts, rows):
  906. assert test[1][1][0]["count"] == test[0] / (3600.0 / 60.0)
  907. def test_throughput_eps_minute_rollup(self):
  908. # Each of these denotes how many events to create in each minute
  909. event_counts = [6, 0, 6, 3, 0, 3]
  910. for minute, count in enumerate(event_counts):
  911. for second in range(count):
  912. self.store_metric(
  913. 1, timestamp=self.day_ago + timedelta(minutes=minute, seconds=second)
  914. )
  915. for axis in ["eps()", "tps()"]:
  916. response = self.do_request(
  917. data={
  918. "start": iso_format(self.day_ago),
  919. "end": iso_format(self.day_ago + timedelta(minutes=6)),
  920. "interval": "1m",
  921. "yAxis": axis,
  922. "project": self.project.id,
  923. "metricsEnhanced": "1",
  924. },
  925. )
  926. assert response.status_code == 200, response.content
  927. data = response.data["data"]
  928. assert len(data) == 6
  929. assert response.data["isMetricsData"]
  930. rows = data[0:6]
  931. for test in zip(event_counts, rows):
  932. assert test[1][1][0]["count"] == test[0] / 60.0
  933. def test_failure_rate(self):
  934. for hour in range(6):
  935. timestamp = self.day_ago + timedelta(hours=hour, minutes=30)
  936. self.store_metric(1, tags={"transaction.status": "ok"}, timestamp=timestamp)
  937. if hour < 3:
  938. self.store_metric(
  939. 1, tags={"transaction.status": "internal_error"}, timestamp=timestamp
  940. )
  941. response = self.do_request(
  942. data={
  943. "start": iso_format(self.day_ago),
  944. "end": iso_format(self.day_ago + timedelta(hours=6)),
  945. "interval": "1h",
  946. "yAxis": ["failure_rate()"],
  947. "project": self.project.id,
  948. "metricsEnhanced": "1",
  949. },
  950. )
  951. assert response.status_code == 200, response.content
  952. data = response.data["data"]
  953. assert len(data) == 6
  954. assert response.data["isMetricsData"]
  955. assert [attrs for time, attrs in response.data["data"]] == [
  956. [{"count": 0.5}],
  957. [{"count": 0.5}],
  958. [{"count": 0.5}],
  959. [{"count": 0}],
  960. [{"count": 0}],
  961. [{"count": 0}],
  962. ]
  963. def test_percentiles_multi_axis(self):
  964. for hour in range(6):
  965. timestamp = self.day_ago + timedelta(hours=hour, minutes=30)
  966. self.store_metric(111, timestamp=timestamp)
  967. self.store_metric(222, metric="measurements.lcp", timestamp=timestamp)
  968. response = self.do_request(
  969. data={
  970. "start": iso_format(self.day_ago),
  971. "end": iso_format(self.day_ago + timedelta(hours=6)),
  972. "interval": "1h",
  973. "yAxis": ["p75(measurements.lcp)", "p75(transaction.duration)"],
  974. "project": self.project.id,
  975. "metricsEnhanced": "1",
  976. },
  977. )
  978. assert response.status_code == 200, response.content
  979. lcp = response.data["p75(measurements.lcp)"]
  980. duration = response.data["p75(transaction.duration)"]
  981. assert len(duration["data"]) == 6
  982. assert duration["isMetricsData"]
  983. assert len(lcp["data"]) == 6
  984. assert lcp["isMetricsData"]
  985. for item in duration["data"]:
  986. assert item[1][0]["count"] == 111
  987. for item in lcp["data"]:
  988. assert item[1][0]["count"] == 222
  989. @mock.patch("sentry.snuba.metrics_enhanced_performance.timeseries_query", return_value={})
  990. def test_multiple_yaxis_only_one_query(self, mock_query):
  991. self.do_request(
  992. data={
  993. "project": self.project.id,
  994. "start": iso_format(self.day_ago),
  995. "end": iso_format(self.day_ago + timedelta(hours=2)),
  996. "interval": "1h",
  997. "yAxis": ["epm()", "eps()", "tpm()", "p50(transaction.duration)"],
  998. "metricsEnhanced": "1",
  999. },
  1000. )
  1001. assert mock_query.call_count == 1
  1002. def test_aggregate_function_user_count(self):
  1003. self.store_metric(1, metric="user", timestamp=self.day_ago + timedelta(minutes=30))
  1004. self.store_metric(1, metric="user", timestamp=self.day_ago + timedelta(hours=1, minutes=30))
  1005. response = self.do_request(
  1006. data={
  1007. "start": iso_format(self.day_ago),
  1008. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1009. "interval": "1h",
  1010. "yAxis": "count_unique(user)",
  1011. "metricsEnhanced": "1",
  1012. },
  1013. )
  1014. assert response.status_code == 200, response.content
  1015. assert response.data["isMetricsData"]
  1016. assert [attrs for time, attrs in response.data["data"]] == [[{"count": 1}], [{"count": 1}]]
  1017. def test_non_mep_query_fallsback(self):
  1018. def get_mep(query):
  1019. response = self.do_request(
  1020. data={
  1021. "project": self.project.id,
  1022. "start": iso_format(self.day_ago),
  1023. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1024. "interval": "1h",
  1025. "query": query,
  1026. "yAxis": ["epm()"],
  1027. "metricsEnhanced": "1",
  1028. },
  1029. )
  1030. assert response.status_code == 200, response.content
  1031. return response.data["isMetricsData"]
  1032. assert get_mep(""), "empty query"
  1033. assert get_mep("event.type:transaction"), "event type transaction"
  1034. assert not get_mep("event.type:error"), "event type error"
  1035. assert not get_mep("transaction.duration:<15min"), "outlier filter"
  1036. assert get_mep("epm():>0.01"), "throughput filter"
  1037. assert not get_mep(
  1038. "event.type:transaction OR event.type:error"
  1039. ), "boolean with non-mep filter"
  1040. assert get_mep(
  1041. "event.type:transaction OR transaction:foo_transaction"
  1042. ), "boolean with mep filter"
  1043. def test_explicit_not_mep(self):
  1044. response = self.do_request(
  1045. data={
  1046. "project": self.project.id,
  1047. "start": iso_format(self.day_ago),
  1048. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1049. "interval": "1h",
  1050. # Should be a mep able query
  1051. "query": "",
  1052. "yAxis": ["epm()"],
  1053. "metricsEnhanced": "0",
  1054. },
  1055. )
  1056. assert response.status_code == 200, response.content
  1057. return not response.data["isMetricsData"]
  1058. class OrganizationEventsStatsTopNEvents(APITestCase, SnubaTestCase):
  1059. def setUp(self):
  1060. super().setUp()
  1061. self.login_as(user=self.user)
  1062. self.day_ago = before_now(days=1).replace(hour=10, minute=0, second=0, microsecond=0)
  1063. self.project = self.create_project()
  1064. self.project2 = self.create_project()
  1065. self.user2 = self.create_user()
  1066. transaction_data = load_data("transaction")
  1067. transaction_data["start_timestamp"] = iso_format(self.day_ago + timedelta(minutes=2))
  1068. transaction_data["timestamp"] = iso_format(self.day_ago + timedelta(minutes=4))
  1069. transaction_data["tags"] = {"shared-tag": "yup"}
  1070. self.event_data = [
  1071. {
  1072. "data": {
  1073. "message": "poof",
  1074. "timestamp": iso_format(self.day_ago + timedelta(minutes=2)),
  1075. "user": {"email": self.user.email},
  1076. "tags": {"shared-tag": "yup"},
  1077. "fingerprint": ["group1"],
  1078. },
  1079. "project": self.project2,
  1080. "count": 7,
  1081. },
  1082. {
  1083. "data": {
  1084. "message": "voof",
  1085. "timestamp": iso_format(self.day_ago + timedelta(hours=1, minutes=2)),
  1086. "fingerprint": ["group2"],
  1087. "user": {"email": self.user2.email},
  1088. "tags": {"shared-tag": "yup"},
  1089. },
  1090. "project": self.project2,
  1091. "count": 6,
  1092. },
  1093. {
  1094. "data": {
  1095. "message": "very bad",
  1096. "timestamp": iso_format(self.day_ago + timedelta(minutes=2)),
  1097. "fingerprint": ["group3"],
  1098. "user": {"email": "foo@example.com"},
  1099. "tags": {"shared-tag": "yup"},
  1100. },
  1101. "project": self.project,
  1102. "count": 5,
  1103. },
  1104. {
  1105. "data": {
  1106. "message": "oh no",
  1107. "timestamp": iso_format(self.day_ago + timedelta(minutes=2)),
  1108. "fingerprint": ["group4"],
  1109. "user": {"email": "bar@example.com"},
  1110. "tags": {"shared-tag": "yup"},
  1111. },
  1112. "project": self.project,
  1113. "count": 4,
  1114. },
  1115. {"data": transaction_data, "project": self.project, "count": 3},
  1116. # Not in the top 5
  1117. {
  1118. "data": {
  1119. "message": "sorta bad",
  1120. "timestamp": iso_format(self.day_ago + timedelta(minutes=2)),
  1121. "fingerprint": ["group5"],
  1122. "user": {"email": "bar@example.com"},
  1123. "tags": {"shared-tag": "yup"},
  1124. },
  1125. "project": self.project,
  1126. "count": 2,
  1127. },
  1128. {
  1129. "data": {
  1130. "message": "not so bad",
  1131. "timestamp": iso_format(self.day_ago + timedelta(minutes=2)),
  1132. "fingerprint": ["group6"],
  1133. "user": {"email": "bar@example.com"},
  1134. "tags": {"shared-tag": "yup"},
  1135. },
  1136. "project": self.project,
  1137. "count": 1,
  1138. },
  1139. ]
  1140. self.events = []
  1141. for index, event_data in enumerate(self.event_data):
  1142. data = event_data["data"].copy()
  1143. event = {}
  1144. for i in range(event_data["count"]):
  1145. data["event_id"] = f"{index}{i}" * 16
  1146. event = self.store_event(data, project_id=event_data["project"].id)
  1147. self.events.append(event)
  1148. self.transaction = self.events[4]
  1149. self.enabled_features = {
  1150. "organizations:discover-basic": True,
  1151. }
  1152. self.url = reverse(
  1153. "sentry-api-0-organization-events-stats",
  1154. kwargs={"organization_slug": self.project.organization.slug},
  1155. )
  1156. def test_no_top_events_with_project_field(self):
  1157. project = self.create_project()
  1158. with self.feature(self.enabled_features):
  1159. response = self.client.get(
  1160. self.url,
  1161. data={
  1162. # make sure to query the project with 0 events
  1163. "project": project.id,
  1164. "start": iso_format(self.day_ago),
  1165. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1166. "interval": "1h",
  1167. "yAxis": "count()",
  1168. "orderby": ["-count()"],
  1169. "field": ["count()", "project"],
  1170. "topEvents": 5,
  1171. },
  1172. format="json",
  1173. )
  1174. assert response.status_code == 200, response.content
  1175. # When there are no top events, we do not return an empty dict.
  1176. # Instead, we return a single zero-filled series for an empty graph.
  1177. data = response.data["data"]
  1178. assert [attrs for time, attrs in data] == [[{"count": 0}], [{"count": 0}]]
  1179. def test_no_top_events(self):
  1180. project = self.create_project()
  1181. with self.feature(self.enabled_features):
  1182. response = self.client.get(
  1183. self.url,
  1184. data={
  1185. # make sure to query the project with 0 events
  1186. "project": project.id,
  1187. "start": iso_format(self.day_ago),
  1188. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1189. "interval": "1h",
  1190. "yAxis": "count()",
  1191. "orderby": ["-count()"],
  1192. "field": ["count()", "message", "user.email"],
  1193. "topEvents": 5,
  1194. },
  1195. format="json",
  1196. )
  1197. data = response.data["data"]
  1198. assert response.status_code == 200, response.content
  1199. # When there are no top events, we do not return an empty dict.
  1200. # Instead, we return a single zero-filled series for an empty graph.
  1201. assert [attrs for time, attrs in data] == [[{"count": 0}], [{"count": 0}]]
  1202. def test_simple_top_events(self):
  1203. with self.feature(self.enabled_features):
  1204. response = self.client.get(
  1205. self.url,
  1206. data={
  1207. "start": iso_format(self.day_ago),
  1208. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1209. "interval": "1h",
  1210. "yAxis": "count()",
  1211. "orderby": ["-count()"],
  1212. "field": ["count()", "message", "user.email"],
  1213. "topEvents": 5,
  1214. },
  1215. format="json",
  1216. )
  1217. data = response.data
  1218. assert response.status_code == 200, response.content
  1219. assert len(data) == 6
  1220. for index, event in enumerate(self.events[:5]):
  1221. message = event.message or event.transaction
  1222. results = data[
  1223. ",".join([message, self.event_data[index]["data"]["user"].get("email", "None")])
  1224. ]
  1225. assert results["order"] == index
  1226. assert [{"count": self.event_data[index]["count"]}] in [
  1227. attrs for _, attrs in results["data"]
  1228. ]
  1229. other = data["Other"]
  1230. assert other["order"] == 5
  1231. assert [{"count": 3}] in [attrs for _, attrs in other["data"]]
  1232. def test_top_events_limits(self):
  1233. data = {
  1234. "start": iso_format(self.day_ago),
  1235. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1236. "interval": "1h",
  1237. "yAxis": "count()",
  1238. "orderby": ["-count()"],
  1239. "field": ["count()", "message", "user.email"],
  1240. }
  1241. with self.feature(self.enabled_features):
  1242. data["topEvents"] = MAX_TOP_EVENTS + 1
  1243. response = self.client.get(self.url, data, format="json")
  1244. assert response.status_code == 400
  1245. data["topEvents"] = 0
  1246. response = self.client.get(self.url, data, format="json")
  1247. assert response.status_code == 400
  1248. data["topEvents"] = "a"
  1249. response = self.client.get(self.url, data, format="json")
  1250. assert response.status_code == 400
  1251. def test_top_events_with_projects(self):
  1252. with self.feature(self.enabled_features):
  1253. response = self.client.get(
  1254. self.url,
  1255. data={
  1256. "start": iso_format(self.day_ago),
  1257. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1258. "interval": "1h",
  1259. "yAxis": "count()",
  1260. "orderby": ["-count()"],
  1261. "field": ["count()", "message", "project"],
  1262. "topEvents": 5,
  1263. },
  1264. format="json",
  1265. )
  1266. data = response.data
  1267. assert response.status_code == 200, response.content
  1268. assert len(data) == 6
  1269. for index, event in enumerate(self.events[:5]):
  1270. message = event.message or event.transaction
  1271. results = data[",".join([message, event.project.slug])]
  1272. assert results["order"] == index
  1273. assert [{"count": self.event_data[index]["count"]}] in [
  1274. attrs for time, attrs in results["data"]
  1275. ]
  1276. other = data["Other"]
  1277. assert other["order"] == 5
  1278. assert [{"count": 3}] in [attrs for _, attrs in other["data"]]
  1279. def test_top_events_with_issue(self):
  1280. # delete a group to make sure if this happens the value becomes unknown
  1281. event_group = self.events[0].group
  1282. event_group.delete()
  1283. with self.feature(self.enabled_features):
  1284. response = self.client.get(
  1285. self.url,
  1286. data={
  1287. "start": iso_format(self.day_ago),
  1288. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1289. "interval": "1h",
  1290. "yAxis": "count()",
  1291. "orderby": ["-count()"],
  1292. "field": ["count()", "message", "issue"],
  1293. "topEvents": 5,
  1294. "query": "!event.type:transaction",
  1295. },
  1296. format="json",
  1297. )
  1298. data = response.data
  1299. assert response.status_code == 200, response.content
  1300. assert len(data) == 6
  1301. for index, event in enumerate(self.events[:4]):
  1302. message = event.message
  1303. # Because we deleted the group for event 0
  1304. if index == 0 or event.group is None:
  1305. issue = "unknown"
  1306. else:
  1307. issue = event.group.qualified_short_id
  1308. results = data[",".join([issue, message])]
  1309. assert results["order"] == index
  1310. assert [{"count": self.event_data[index]["count"]}] in [
  1311. attrs for time, attrs in results["data"]
  1312. ]
  1313. other = data["Other"]
  1314. assert other["order"] == 5
  1315. assert [{"count": 1}] in [attrs for _, attrs in other["data"]]
  1316. @mock.patch("sentry.models.GroupManager.get_issues_mapping")
  1317. def test_top_events_with_unknown_issue(self, mock_issues_mapping):
  1318. event = self.events[0]
  1319. event_data = self.event_data[0]
  1320. # ensure that the issue mapping returns None for the issue
  1321. mock_issues_mapping.return_value = {event.group.id: None}
  1322. with self.feature(self.enabled_features):
  1323. response = self.client.get(
  1324. self.url,
  1325. data={
  1326. "start": iso_format(self.day_ago),
  1327. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1328. "interval": "1h",
  1329. "yAxis": "count()",
  1330. "orderby": ["-count()"],
  1331. "field": ["count()", "issue"],
  1332. "topEvents": 5,
  1333. # narrow the search to just one issue
  1334. "query": f"issue.id:{event.group.id}",
  1335. },
  1336. format="json",
  1337. )
  1338. assert response.status_code == 200, response.content
  1339. data = response.data
  1340. assert len(data) == 1
  1341. results = data["unknown"]
  1342. assert results["order"] == 0
  1343. assert [{"count": event_data["count"]}] in [attrs for time, attrs in results["data"]]
  1344. @mock.patch(
  1345. "sentry.snuba.discover.raw_query",
  1346. side_effect=[{"data": [{"group_id": 1}], "meta": []}, {"data": [], "meta": []}],
  1347. )
  1348. def test_top_events_with_issue_check_query_conditions(self, mock_query):
  1349. """ "Intentionally separate from test_top_events_with_issue
  1350. This is to test against a bug where the condition for issues wasn't included and we'd be missing data for
  1351. the interval since we'd cap out the max rows. This was not caught by the previous test since the results
  1352. would still be correct given the smaller interval & lack of data
  1353. """
  1354. with self.feature(self.enabled_features):
  1355. self.client.get(
  1356. self.url,
  1357. data={
  1358. "start": iso_format(self.day_ago),
  1359. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1360. "interval": "1h",
  1361. "yAxis": "count()",
  1362. "orderby": ["-count()"],
  1363. "field": ["count()", "message", "issue"],
  1364. "topEvents": 5,
  1365. "query": "!event.type:transaction",
  1366. },
  1367. format="json",
  1368. )
  1369. assert ["group_id", "IN", [1]] in mock_query.mock_calls[1].kwargs["conditions"]
  1370. def test_top_events_with_functions(self):
  1371. with self.feature(self.enabled_features):
  1372. response = self.client.get(
  1373. self.url,
  1374. data={
  1375. "start": iso_format(self.day_ago),
  1376. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1377. "interval": "1h",
  1378. "yAxis": "count()",
  1379. "orderby": ["-p99()"],
  1380. "field": ["transaction", "avg(transaction.duration)", "p99()"],
  1381. "topEvents": 5,
  1382. },
  1383. format="json",
  1384. )
  1385. data = response.data
  1386. assert response.status_code == 200, response.content
  1387. assert len(data) == 1
  1388. results = data[self.transaction.transaction]
  1389. assert results["order"] == 0
  1390. assert [attrs for time, attrs in results["data"]] == [[{"count": 3}], [{"count": 0}]]
  1391. def test_top_events_with_functions_on_different_transactions(self):
  1392. """Transaction2 has less events, but takes longer so order should be self.transaction then transaction2"""
  1393. transaction_data = load_data("transaction")
  1394. transaction_data["start_timestamp"] = iso_format(self.day_ago + timedelta(minutes=2))
  1395. transaction_data["timestamp"] = iso_format(self.day_ago + timedelta(minutes=6))
  1396. transaction_data["transaction"] = "/foo_bar/"
  1397. transaction2 = self.store_event(transaction_data, project_id=self.project.id)
  1398. with self.feature(self.enabled_features):
  1399. response = self.client.get(
  1400. self.url,
  1401. data={
  1402. "start": iso_format(self.day_ago),
  1403. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1404. "interval": "1h",
  1405. "yAxis": "count()",
  1406. "orderby": ["-p99()"],
  1407. "field": ["transaction", "avg(transaction.duration)", "p99()"],
  1408. "topEvents": 5,
  1409. },
  1410. format="json",
  1411. )
  1412. data = response.data
  1413. assert response.status_code == 200, response.content
  1414. assert len(data) == 2
  1415. results = data[self.transaction.transaction]
  1416. assert results["order"] == 1
  1417. assert [attrs for time, attrs in results["data"]] == [[{"count": 3}], [{"count": 0}]]
  1418. results = data[transaction2.transaction]
  1419. assert results["order"] == 0
  1420. assert [attrs for time, attrs in results["data"]] == [[{"count": 1}], [{"count": 0}]]
  1421. def test_top_events_with_query(self):
  1422. transaction_data = load_data("transaction")
  1423. transaction_data["start_timestamp"] = iso_format(self.day_ago + timedelta(minutes=2))
  1424. transaction_data["timestamp"] = iso_format(self.day_ago + timedelta(minutes=6))
  1425. transaction_data["transaction"] = "/foo_bar/"
  1426. self.store_event(transaction_data, project_id=self.project.id)
  1427. with self.feature(self.enabled_features):
  1428. response = self.client.get(
  1429. self.url,
  1430. data={
  1431. "start": iso_format(self.day_ago),
  1432. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1433. "interval": "1h",
  1434. "yAxis": "count()",
  1435. "orderby": ["-p99()"],
  1436. "query": "transaction:/foo_bar/",
  1437. "field": ["transaction", "avg(transaction.duration)", "p99()"],
  1438. "topEvents": 5,
  1439. },
  1440. format="json",
  1441. )
  1442. data = response.data
  1443. assert response.status_code == 200, response.content
  1444. assert len(data) == 1
  1445. transaction2_data = data["/foo_bar/"]
  1446. assert transaction2_data["order"] == 0
  1447. assert [attrs for time, attrs in transaction2_data["data"]] == [
  1448. [{"count": 1}],
  1449. [{"count": 0}],
  1450. ]
  1451. def test_top_events_with_negated_condition(self):
  1452. with self.feature(self.enabled_features):
  1453. response = self.client.get(
  1454. self.url,
  1455. data={
  1456. "start": iso_format(self.day_ago),
  1457. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1458. "interval": "1h",
  1459. "yAxis": "count()",
  1460. "orderby": ["-count()"],
  1461. "query": f"!message:{self.events[0].message}",
  1462. "field": ["message", "count()"],
  1463. "topEvents": 5,
  1464. },
  1465. format="json",
  1466. )
  1467. data = response.data
  1468. assert response.status_code == 200, response.content
  1469. assert len(data) == 6
  1470. for index, event in enumerate(self.events[1:5]):
  1471. message = event.message or event.transaction
  1472. results = data[message]
  1473. assert results["order"] == index
  1474. assert [{"count": self.event_data[index + 1]["count"]}] in [
  1475. attrs for _, attrs in results["data"]
  1476. ]
  1477. other = data["Other"]
  1478. assert other["order"] == 5
  1479. assert [{"count": 1}] in [attrs for _, attrs in other["data"]]
  1480. def test_top_events_with_epm(self):
  1481. with self.feature(self.enabled_features):
  1482. response = self.client.get(
  1483. self.url,
  1484. data={
  1485. "start": iso_format(self.day_ago),
  1486. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1487. "interval": "1h",
  1488. "yAxis": "epm()",
  1489. "orderby": ["-count()"],
  1490. "field": ["message", "user.email", "count()"],
  1491. "topEvents": 5,
  1492. },
  1493. format="json",
  1494. )
  1495. data = response.data
  1496. assert response.status_code == 200, response.content
  1497. assert len(data) == 6
  1498. for index, event in enumerate(self.events[:5]):
  1499. message = event.message or event.transaction
  1500. results = data[
  1501. ",".join([message, self.event_data[index]["data"]["user"].get("email", "None")])
  1502. ]
  1503. assert results["order"] == index
  1504. assert [{"count": self.event_data[index]["count"] / (3600.0 / 60.0)}] in [
  1505. attrs for time, attrs in results["data"]
  1506. ]
  1507. other = data["Other"]
  1508. assert other["order"] == 5
  1509. assert [{"count": 0.05}] in [attrs for _, attrs in other["data"]]
  1510. def test_top_events_with_multiple_yaxis(self):
  1511. with self.feature(self.enabled_features):
  1512. response = self.client.get(
  1513. self.url,
  1514. data={
  1515. "start": iso_format(self.day_ago),
  1516. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1517. "interval": "1h",
  1518. "yAxis": ["epm()", "count()"],
  1519. "orderby": ["-count()"],
  1520. "field": ["message", "user.email", "count()"],
  1521. "topEvents": 5,
  1522. },
  1523. format="json",
  1524. )
  1525. data = response.data
  1526. assert response.status_code == 200, response.content
  1527. assert len(data) == 6
  1528. for index, event in enumerate(self.events[:5]):
  1529. message = event.message or event.transaction
  1530. results = data[
  1531. ",".join([message, self.event_data[index]["data"]["user"].get("email", "None")])
  1532. ]
  1533. assert results["order"] == index
  1534. assert results["epm()"]["order"] == 0
  1535. assert results["count()"]["order"] == 1
  1536. assert [{"count": self.event_data[index]["count"] / (3600.0 / 60.0)}] in [
  1537. attrs for time, attrs in results["epm()"]["data"]
  1538. ]
  1539. assert [{"count": self.event_data[index]["count"]}] in [
  1540. attrs for time, attrs in results["count()"]["data"]
  1541. ]
  1542. other = data["Other"]
  1543. assert other["order"] == 5
  1544. assert other["epm()"]["order"] == 0
  1545. assert other["count()"]["order"] == 1
  1546. assert [{"count": 0.05}] in [attrs for _, attrs in other["epm()"]["data"]]
  1547. assert [{"count": 3}] in [attrs for _, attrs in other["count()"]["data"]]
  1548. def test_top_events_with_boolean(self):
  1549. with self.feature(self.enabled_features):
  1550. response = self.client.get(
  1551. self.url,
  1552. data={
  1553. "start": iso_format(self.day_ago),
  1554. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1555. "interval": "1h",
  1556. "yAxis": "count()",
  1557. "orderby": ["-count()"],
  1558. "field": ["count()", "message", "device.charging"],
  1559. "topEvents": 5,
  1560. },
  1561. format="json",
  1562. )
  1563. data = response.data
  1564. assert response.status_code == 200, response.content
  1565. assert len(data) == 6
  1566. for index, event in enumerate(self.events[:5]):
  1567. message = event.message or event.transaction
  1568. results = data[",".join(["False", message])]
  1569. assert results["order"] == index
  1570. assert [{"count": self.event_data[index]["count"]}] in [
  1571. attrs for time, attrs in results["data"]
  1572. ]
  1573. other = data["Other"]
  1574. assert other["order"] == 5
  1575. assert [{"count": 3}] in [attrs for _, attrs in other["data"]]
  1576. def test_top_events_with_error_unhandled(self):
  1577. self.login_as(user=self.user)
  1578. project = self.create_project()
  1579. prototype = load_data("android-ndk")
  1580. prototype["event_id"] = "f" * 32
  1581. prototype["message"] = "not handled"
  1582. prototype["exception"]["values"][0]["value"] = "not handled"
  1583. prototype["exception"]["values"][0]["mechanism"]["handled"] = False
  1584. prototype["timestamp"] = iso_format(self.day_ago + timedelta(minutes=2))
  1585. self.store_event(data=prototype, project_id=project.id)
  1586. with self.feature(self.enabled_features):
  1587. response = self.client.get(
  1588. self.url,
  1589. data={
  1590. "start": iso_format(self.day_ago),
  1591. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1592. "interval": "1h",
  1593. "yAxis": "count()",
  1594. "orderby": ["-count()"],
  1595. "field": ["count()", "error.unhandled"],
  1596. "topEvents": 5,
  1597. },
  1598. format="json",
  1599. )
  1600. data = response.data
  1601. assert response.status_code == 200, response.content
  1602. assert len(data) == 2
  1603. def test_top_events_with_timestamp(self):
  1604. with self.feature(self.enabled_features):
  1605. response = self.client.get(
  1606. self.url,
  1607. data={
  1608. "start": iso_format(self.day_ago),
  1609. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1610. "interval": "1h",
  1611. "yAxis": "count()",
  1612. "orderby": ["-count()"],
  1613. "query": "event.type:default",
  1614. "field": ["count()", "message", "timestamp"],
  1615. "topEvents": 5,
  1616. },
  1617. format="json",
  1618. )
  1619. data = response.data
  1620. assert response.status_code == 200, response.content
  1621. assert len(data) == 6
  1622. # Transactions won't be in the results because of the query
  1623. del self.events[4]
  1624. del self.event_data[4]
  1625. for index, event in enumerate(self.events[:5]):
  1626. results = data[",".join([event.message, event.timestamp])]
  1627. assert results["order"] == index
  1628. assert [{"count": self.event_data[index]["count"]}] in [
  1629. attrs for time, attrs in results["data"]
  1630. ]
  1631. other = data["Other"]
  1632. assert other["order"] == 5
  1633. assert [{"count": 1}] in [attrs for _, attrs in other["data"]]
  1634. def test_top_events_with_int(self):
  1635. with self.feature(self.enabled_features):
  1636. response = self.client.get(
  1637. self.url,
  1638. data={
  1639. "start": iso_format(self.day_ago),
  1640. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1641. "interval": "1h",
  1642. "yAxis": "count()",
  1643. "orderby": ["-count()"],
  1644. "field": ["count()", "message", "transaction.duration"],
  1645. "topEvents": 5,
  1646. },
  1647. format="json",
  1648. )
  1649. data = response.data
  1650. assert response.status_code == 200, response.content
  1651. assert len(data) == 1
  1652. results = data[",".join([self.transaction.transaction, "120000"])]
  1653. assert results["order"] == 0
  1654. assert [attrs for time, attrs in results["data"]] == [[{"count": 3}], [{"count": 0}]]
  1655. def test_top_events_with_user(self):
  1656. with self.feature(self.enabled_features):
  1657. response = self.client.get(
  1658. self.url,
  1659. data={
  1660. "start": iso_format(self.day_ago),
  1661. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1662. "interval": "1h",
  1663. "yAxis": "count()",
  1664. "orderby": ["-count()", "user"],
  1665. "field": ["user", "count()"],
  1666. "topEvents": 5,
  1667. },
  1668. format="json",
  1669. )
  1670. data = response.data
  1671. assert response.status_code == 200, response.content
  1672. assert len(data) == 5
  1673. assert data["email:bar@example.com"]["order"] == 1
  1674. assert [attrs for time, attrs in data["email:bar@example.com"]["data"]] == [
  1675. [{"count": 7}],
  1676. [{"count": 0}],
  1677. ]
  1678. assert [attrs for time, attrs in data["ip:127.0.0.1"]["data"]] == [
  1679. [{"count": 3}],
  1680. [{"count": 0}],
  1681. ]
  1682. def test_top_events_with_user_and_email(self):
  1683. with self.feature(self.enabled_features):
  1684. response = self.client.get(
  1685. self.url,
  1686. data={
  1687. "start": iso_format(self.day_ago),
  1688. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1689. "interval": "1h",
  1690. "yAxis": "count()",
  1691. "orderby": ["-count()", "user"],
  1692. "field": ["user", "user.email", "count()"],
  1693. "topEvents": 5,
  1694. },
  1695. format="json",
  1696. )
  1697. data = response.data
  1698. assert response.status_code == 200, response.content
  1699. assert len(data) == 5
  1700. assert data["email:bar@example.com,bar@example.com"]["order"] == 1
  1701. assert [attrs for time, attrs in data["email:bar@example.com,bar@example.com"]["data"]] == [
  1702. [{"count": 7}],
  1703. [{"count": 0}],
  1704. ]
  1705. assert [attrs for time, attrs in data["ip:127.0.0.1,None"]["data"]] == [
  1706. [{"count": 3}],
  1707. [{"count": 0}],
  1708. ]
  1709. def test_top_events_with_user_display(self):
  1710. with self.feature(self.enabled_features):
  1711. response = self.client.get(
  1712. self.url,
  1713. data={
  1714. "start": iso_format(self.day_ago),
  1715. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1716. "interval": "1h",
  1717. "yAxis": "count()",
  1718. "orderby": ["-count()"],
  1719. "field": ["message", "user.display", "count()"],
  1720. "topEvents": 5,
  1721. },
  1722. format="json",
  1723. )
  1724. data = response.data
  1725. assert response.status_code == 200, response.content
  1726. assert len(data) == 6
  1727. for index, event in enumerate(self.events[:5]):
  1728. message = event.message or event.transaction
  1729. user = self.event_data[index]["data"]["user"]
  1730. results = data[
  1731. ",".join([message, user.get("email", None) or user.get("ip_address", "None")])
  1732. ]
  1733. assert results["order"] == index
  1734. assert [{"count": self.event_data[index]["count"]}] in [
  1735. attrs for _, attrs in results["data"]
  1736. ]
  1737. other = data["Other"]
  1738. assert other["order"] == 5
  1739. assert [{"count": 3}] in [attrs for _, attrs in other["data"]]
  1740. @pytest.mark.skip(reason="A query with group_id will not return transactions")
  1741. def test_top_events_none_filter(self):
  1742. """When a field is None in one of the top events, make sure we filter by it
  1743. In this case event[4] is a transaction and has no issue
  1744. """
  1745. with self.feature(self.enabled_features):
  1746. response = self.client.get(
  1747. self.url,
  1748. data={
  1749. "start": iso_format(self.day_ago),
  1750. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1751. "interval": "1h",
  1752. "yAxis": "count()",
  1753. "orderby": ["-count()"],
  1754. "field": ["count()", "issue"],
  1755. "topEvents": 5,
  1756. },
  1757. format="json",
  1758. )
  1759. data = response.data
  1760. assert response.status_code == 200, response.content
  1761. assert len(data) == 5
  1762. for index, event in enumerate(self.events[:5]):
  1763. if event.group is None:
  1764. issue = "unknown"
  1765. else:
  1766. issue = event.group.qualified_short_id
  1767. results = data[issue]
  1768. assert results["order"] == index
  1769. assert [{"count": self.event_data[index]["count"]}] in [
  1770. attrs for time, attrs in results["data"]
  1771. ]
  1772. @pytest.mark.skip(reason="Invalid query - transaction events don't have group_id field")
  1773. def test_top_events_one_field_with_none(self):
  1774. with self.feature(self.enabled_features):
  1775. response = self.client.get(
  1776. self.url,
  1777. data={
  1778. "start": iso_format(self.day_ago),
  1779. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1780. "interval": "1h",
  1781. "yAxis": "count()",
  1782. "orderby": ["-count()"],
  1783. "query": "event.type:transaction",
  1784. "field": ["count()", "issue"],
  1785. "topEvents": 5,
  1786. },
  1787. format="json",
  1788. )
  1789. data = response.data
  1790. assert response.status_code == 200, response.content
  1791. assert len(data) == 1
  1792. results = data["unknown"]
  1793. assert [attrs for time, attrs in results["data"]] == [[{"count": 3}], [{"count": 0}]]
  1794. assert results["order"] == 0
  1795. def test_top_events_with_error_handled(self):
  1796. data = self.event_data[0]
  1797. data["data"]["level"] = "error"
  1798. data["data"]["exception"] = {
  1799. "values": [
  1800. {
  1801. "type": "ValidationError",
  1802. "value": "Bad request",
  1803. "mechanism": {"handled": True, "type": "generic"},
  1804. }
  1805. ]
  1806. }
  1807. self.store_event(data["data"], project_id=data["project"].id)
  1808. data["data"]["exception"] = {
  1809. "values": [
  1810. {
  1811. "type": "ValidationError",
  1812. "value": "Bad request",
  1813. "mechanism": {"handled": False, "type": "generic"},
  1814. }
  1815. ]
  1816. }
  1817. self.store_event(data["data"], project_id=data["project"].id)
  1818. with self.feature(self.enabled_features):
  1819. response = self.client.get(
  1820. self.url,
  1821. data={
  1822. "start": iso_format(self.day_ago),
  1823. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1824. "interval": "1h",
  1825. "yAxis": "count()",
  1826. "orderby": ["-count()"],
  1827. "field": ["count()", "error.handled"],
  1828. "topEvents": 5,
  1829. "query": "!event.type:transaction",
  1830. },
  1831. format="json",
  1832. )
  1833. assert response.status_code == 200, response.content
  1834. data = response.data
  1835. assert len(data) == 3
  1836. results = data[""]
  1837. assert [attrs for time, attrs in results["data"]] == [[{"count": 19}], [{"count": 6}]]
  1838. assert results["order"] == 0
  1839. results = data["1"]
  1840. assert [attrs for time, attrs in results["data"]] == [[{"count": 1}], [{"count": 0}]]
  1841. results = data["0"]
  1842. assert [attrs for time, attrs in results["data"]] == [[{"count": 1}], [{"count": 0}]]
  1843. def test_top_events_with_aggregate_condition(self):
  1844. with self.feature(self.enabled_features):
  1845. response = self.client.get(
  1846. self.url,
  1847. data={
  1848. "start": iso_format(self.day_ago),
  1849. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1850. "interval": "1h",
  1851. "yAxis": "count()",
  1852. "orderby": ["-count()"],
  1853. "field": ["message", "count()"],
  1854. "query": "count():>4",
  1855. "topEvents": 5,
  1856. },
  1857. format="json",
  1858. )
  1859. assert response.status_code == 200, response.content
  1860. data = response.data
  1861. assert len(data) == 3
  1862. for index, event in enumerate(self.events[:3]):
  1863. message = event.message or event.transaction
  1864. results = data[message]
  1865. assert results["order"] == index
  1866. assert [{"count": self.event_data[index]["count"]}] in [
  1867. attrs for time, attrs in results["data"]
  1868. ]
  1869. @pytest.mark.xfail(reason="There's only 2 rows total, which mean there shouldn't be other")
  1870. def test_top_events_with_to_other(self):
  1871. version = "version -@'\" 1.2,3+(4)"
  1872. version_escaped = "version -@'\\\" 1.2,3+(4)"
  1873. # every symbol is replaced with a underscore to make the alias
  1874. version_alias = "version_______1_2_3__4_"
  1875. # add an event in the current release
  1876. event = self.event_data[0]
  1877. event_data = event["data"].copy()
  1878. event_data["event_id"] = uuid4().hex
  1879. event_data["release"] = version
  1880. self.store_event(event_data, project_id=event["project"].id)
  1881. with self.feature(self.enabled_features):
  1882. response = self.client.get(
  1883. self.url,
  1884. data={
  1885. "start": iso_format(self.day_ago),
  1886. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1887. "interval": "1h",
  1888. "yAxis": "count()",
  1889. # the double underscores around the version alias is because of a comma and quote
  1890. "orderby": [f"-to_other_release__{version_alias}__others_current"],
  1891. "field": [
  1892. "count()",
  1893. f'to_other(release,"{version_escaped}",others,current)',
  1894. ],
  1895. "topEvents": 2,
  1896. },
  1897. format="json",
  1898. )
  1899. assert response.status_code == 200, response.content
  1900. data = response.data
  1901. assert len(data) == 2
  1902. current = data["current"]
  1903. assert current["order"] == 1
  1904. assert sum(attrs[0]["count"] for _, attrs in current["data"]) == 1
  1905. others = data["others"]
  1906. assert others["order"] == 0
  1907. assert sum(attrs[0]["count"] for _, attrs in others["data"]) == sum(
  1908. event_data["count"] for event_data in self.event_data
  1909. )
  1910. def test_top_events_with_equations(self):
  1911. with self.feature(self.enabled_features):
  1912. response = self.client.get(
  1913. self.url,
  1914. data={
  1915. "start": iso_format(self.day_ago),
  1916. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1917. "interval": "1h",
  1918. "yAxis": "equation|count() / 100",
  1919. "orderby": ["-count()"],
  1920. "field": ["count()", "message", "user.email", "equation|count() / 100"],
  1921. "topEvents": 5,
  1922. },
  1923. format="json",
  1924. )
  1925. data = response.data
  1926. assert response.status_code == 200, response.content
  1927. assert len(data) == 6
  1928. for index, event in enumerate(self.events[:5]):
  1929. message = event.message or event.transaction
  1930. results = data[
  1931. ",".join([message, self.event_data[index]["data"]["user"].get("email", "None")])
  1932. ]
  1933. assert results["order"] == index
  1934. assert [{"count": self.event_data[index]["count"] / 100}] in [
  1935. attrs for time, attrs in results["data"]
  1936. ]
  1937. other = data["Other"]
  1938. assert other["order"] == 5
  1939. assert [{"count": 0.03}] in [attrs for _, attrs in other["data"]]
  1940. @mock.patch("sentry.snuba.discover.bulk_raw_query", return_value=[{"data": [], "meta": []}])
  1941. @mock.patch("sentry.snuba.discover.raw_query", return_value={"data": [], "meta": []})
  1942. def test_invalid_interval(self, mock_raw_query, mock_bulk_query):
  1943. with self.feature(self.enabled_features):
  1944. response = self.client.get(
  1945. self.url,
  1946. format="json",
  1947. data={
  1948. "end": iso_format(before_now()),
  1949. # 7,200 points for each event
  1950. "start": iso_format(before_now(seconds=7200)),
  1951. "field": ["count()", "issue"],
  1952. "query": "",
  1953. "interval": "1s",
  1954. "yAxis": "count()",
  1955. },
  1956. )
  1957. assert response.status_code == 200
  1958. assert mock_bulk_query.call_count == 1
  1959. with self.feature(self.enabled_features):
  1960. response = self.client.get(
  1961. self.url,
  1962. format="json",
  1963. data={
  1964. "end": iso_format(before_now()),
  1965. "start": iso_format(before_now(seconds=7200)),
  1966. "field": ["count()", "issue"],
  1967. "query": "",
  1968. "interval": "1s",
  1969. "yAxis": "count()",
  1970. # 7,200 points for each event * 2, should error
  1971. "topEvents": 2,
  1972. },
  1973. )
  1974. assert response.status_code == 200
  1975. assert mock_raw_query.call_count == 2
  1976. # Should've reset to the default for between 1 and 24h
  1977. assert mock_raw_query.mock_calls[1].kwargs["rollup"] == 300
  1978. with self.feature(self.enabled_features):
  1979. response = self.client.get(
  1980. self.url,
  1981. format="json",
  1982. data={
  1983. "end": iso_format(before_now()),
  1984. # 1999 points * 5 events should just be enough to not error
  1985. "start": iso_format(before_now(seconds=1999)),
  1986. "field": ["count()", "issue"],
  1987. "query": "",
  1988. "interval": "1s",
  1989. "yAxis": "count()",
  1990. "topEvents": 5,
  1991. },
  1992. )
  1993. assert response.status_code == 200
  1994. assert mock_raw_query.call_count == 4
  1995. # Should've left the interval alone since we're just below the limit
  1996. assert mock_raw_query.mock_calls[3].kwargs["rollup"] == 1
  1997. with self.feature(self.enabled_features):
  1998. response = self.client.get(
  1999. self.url,
  2000. format="json",
  2001. data={
  2002. "end": iso_format(before_now()),
  2003. "start": iso_format(before_now(hours=24)),
  2004. "field": ["count()", "issue"],
  2005. "query": "",
  2006. "interval": "0d",
  2007. "yAxis": "count()",
  2008. "topEvents": 5,
  2009. },
  2010. )
  2011. assert response.status_code == 200
  2012. assert mock_raw_query.call_count == 6
  2013. # Should've default to 24h's default of 5m
  2014. assert mock_raw_query.mock_calls[5].kwargs["rollup"] == 300
  2015. def test_top_events_timestamp_fields(self):
  2016. with self.feature(self.enabled_features):
  2017. response = self.client.get(
  2018. self.url,
  2019. format="json",
  2020. data={
  2021. "start": iso_format(self.day_ago),
  2022. "end": iso_format(self.day_ago + timedelta(hours=2)),
  2023. "interval": "1h",
  2024. "yAxis": "count()",
  2025. "orderby": ["-count()"],
  2026. "field": ["count()", "timestamp", "timestamp.to_hour", "timestamp.to_day"],
  2027. "topEvents": 5,
  2028. },
  2029. )
  2030. assert response.status_code == 200
  2031. data = response.data
  2032. assert len(data) == 3
  2033. # these are the timestamps corresponding to the events stored
  2034. timestamps = [
  2035. self.day_ago + timedelta(minutes=2),
  2036. self.day_ago + timedelta(hours=1, minutes=2),
  2037. self.day_ago + timedelta(minutes=4),
  2038. ]
  2039. timestamp_hours = [timestamp.replace(minute=0, second=0) for timestamp in timestamps]
  2040. timestamp_days = [timestamp.replace(hour=0, minute=0, second=0) for timestamp in timestamps]
  2041. for ts, ts_hr, ts_day in zip(timestamps, timestamp_hours, timestamp_days):
  2042. key = f"{iso_format(ts)}+00:00,{iso_format(ts_day)}+00:00,{iso_format(ts_hr)}+00:00"
  2043. count = sum(
  2044. e["count"] for e in self.event_data if e["data"]["timestamp"] == iso_format(ts)
  2045. )
  2046. results = data[key]
  2047. assert [{"count": count}] in [attrs for time, attrs in results["data"]]
  2048. def test_top_events_other_with_matching_columns(self):
  2049. with self.feature(self.enabled_features):
  2050. response = self.client.get(
  2051. self.url,
  2052. data={
  2053. "start": iso_format(self.day_ago),
  2054. "end": iso_format(self.day_ago + timedelta(hours=2)),
  2055. "interval": "1h",
  2056. "yAxis": "count()",
  2057. "orderby": ["-count()"],
  2058. "field": ["count()", "tags[shared-tag]", "message"],
  2059. "topEvents": 5,
  2060. },
  2061. format="json",
  2062. )
  2063. data = response.data
  2064. assert response.status_code == 200, response.content
  2065. assert len(data) == 6
  2066. for index, event in enumerate(self.events[:5]):
  2067. message = event.message or event.transaction
  2068. results = data[",".join([message, "yup"])]
  2069. assert results["order"] == index
  2070. assert [{"count": self.event_data[index]["count"]}] in [
  2071. attrs for _, attrs in results["data"]
  2072. ]
  2073. other = data["Other"]
  2074. assert other["order"] == 5
  2075. assert [{"count": 3}] in [attrs for _, attrs in other["data"]]
  2076. def test_top_events_with_field_overlapping_other_key(self):
  2077. transaction_data = load_data("transaction")
  2078. transaction_data["start_timestamp"] = iso_format(self.day_ago + timedelta(minutes=2))
  2079. transaction_data["timestamp"] = iso_format(self.day_ago + timedelta(minutes=6))
  2080. transaction_data["transaction"] = OTHER_KEY
  2081. for i in range(5):
  2082. data = transaction_data.copy()
  2083. data["event_id"] = "ab" + f"{i}" * 30
  2084. data["contexts"]["trace"]["span_id"] = "ab" + f"{i}" * 14
  2085. self.store_event(data, project_id=self.project.id)
  2086. with self.feature(self.enabled_features):
  2087. response = self.client.get(
  2088. self.url,
  2089. data={
  2090. "start": iso_format(self.day_ago),
  2091. "end": iso_format(self.day_ago + timedelta(hours=2)),
  2092. "interval": "1h",
  2093. "yAxis": "count()",
  2094. "orderby": ["-count()"],
  2095. "field": ["count()", "message"],
  2096. "topEvents": 5,
  2097. },
  2098. format="json",
  2099. )
  2100. data = response.data
  2101. assert response.status_code == 200, response.content
  2102. assert len(data) == 6
  2103. assert f"{OTHER_KEY} (message)" in data
  2104. results = data[f"{OTHER_KEY} (message)"]
  2105. assert [{"count": 5}] in [attrs for _, attrs in results["data"]]
  2106. other = data["Other"]
  2107. assert other["order"] == 5
  2108. assert [{"count": 4}] in [attrs for _, attrs in other["data"]]
  2109. class OrganizationEventsStatsTopNEventsWithSnql(OrganizationEventsStatsTopNEvents):
  2110. def setUp(self):
  2111. super().setUp()
  2112. self.enabled_features["organizations:discover-use-snql"] = True
  2113. # Separate test for now to keep the patching simpler
  2114. @mock.patch("sentry.snuba.discover.bulk_snql_query", return_value=[{"data": [], "meta": []}])
  2115. @mock.patch(
  2116. "sentry.search.events.builder.raw_snql_query", return_value={"data": [], "meta": []}
  2117. )
  2118. def test_invalid_interval(self, mock_raw_query, mock_bulk_query):
  2119. with self.feature(self.enabled_features):
  2120. response = self.client.get(
  2121. self.url,
  2122. format="json",
  2123. data={
  2124. "end": iso_format(before_now()),
  2125. # 7,200 points for each event
  2126. "start": iso_format(before_now(seconds=7200)),
  2127. "field": ["count()", "issue"],
  2128. "query": "",
  2129. "interval": "1s",
  2130. "yAxis": "count()",
  2131. },
  2132. )
  2133. assert response.status_code == 200
  2134. assert mock_bulk_query.call_count == 1
  2135. with self.feature(self.enabled_features):
  2136. response = self.client.get(
  2137. self.url,
  2138. format="json",
  2139. data={
  2140. "end": iso_format(before_now()),
  2141. "start": iso_format(before_now(seconds=7200)),
  2142. "field": ["count()", "issue"],
  2143. "query": "",
  2144. "interval": "1s",
  2145. "yAxis": "count()",
  2146. # 7,200 points for each event * 2, should error
  2147. "topEvents": 2,
  2148. },
  2149. )
  2150. assert response.status_code == 200
  2151. assert mock_raw_query.call_count == 2
  2152. # Should've reset to the default for between 1 and 24h
  2153. assert mock_raw_query.mock_calls[1].args[0].granularity.granularity == 300
  2154. with self.feature(self.enabled_features):
  2155. response = self.client.get(
  2156. self.url,
  2157. format="json",
  2158. data={
  2159. "end": iso_format(before_now()),
  2160. # 1999 points * 5 events should just be enough to not error
  2161. "start": iso_format(before_now(seconds=1999)),
  2162. "field": ["count()", "issue"],
  2163. "query": "",
  2164. "interval": "1s",
  2165. "yAxis": "count()",
  2166. "topEvents": 5,
  2167. },
  2168. )
  2169. assert response.status_code == 200
  2170. assert mock_raw_query.call_count == 4
  2171. # Should've left the interval alone since we're just below the limit
  2172. assert mock_raw_query.mock_calls[3].args[0].granularity.granularity == 1
  2173. with self.feature(self.enabled_features):
  2174. response = self.client.get(
  2175. self.url,
  2176. format="json",
  2177. data={
  2178. "end": iso_format(before_now()),
  2179. "start": iso_format(before_now(hours=24)),
  2180. "field": ["count()", "issue"],
  2181. "query": "",
  2182. "interval": "0d",
  2183. "yAxis": "count()",
  2184. "topEvents": 5,
  2185. },
  2186. )
  2187. assert response.status_code == 200
  2188. assert mock_raw_query.call_count == 6
  2189. # Should've default to 24h's default of 5m
  2190. assert mock_raw_query.mock_calls[5].args[0].granularity.granularity == 300
  2191. @mock.patch(
  2192. "sentry.search.events.builder.raw_snql_query",
  2193. side_effect=[{"data": [{"issue.id": 1}], "meta": []}, {"data": [], "meta": []}],
  2194. )
  2195. def test_top_events_with_issue_check_query_conditions(self, mock_query):
  2196. """ "Intentionally separate from test_top_events_with_issue
  2197. This is to test against a bug where the condition for issues wasn't included and we'd be missing data for
  2198. the interval since we'd cap out the max rows. This was not caught by the previous test since the results
  2199. would still be correct given the smaller interval & lack of data
  2200. """
  2201. with self.feature(self.enabled_features):
  2202. self.client.get(
  2203. self.url,
  2204. data={
  2205. "start": iso_format(self.day_ago),
  2206. "end": iso_format(self.day_ago + timedelta(hours=2)),
  2207. "interval": "1h",
  2208. "yAxis": "count()",
  2209. "orderby": ["-count()"],
  2210. "field": ["count()", "message", "issue"],
  2211. "topEvents": 5,
  2212. "query": "!event.type:transaction",
  2213. },
  2214. format="json",
  2215. )
  2216. assert (
  2217. Condition(Function("coalesce", [Column("group_id"), 0], "issue.id"), Op.IN, [1])
  2218. in mock_query.mock_calls[1].args[0].where
  2219. )
  2220. def test_top_events_boolean_condition_and_project_field(self):
  2221. with self.feature(self.enabled_features):
  2222. response = self.client.get(
  2223. self.url,
  2224. data={
  2225. "start": iso_format(self.day_ago),
  2226. "end": iso_format(self.day_ago + timedelta(hours=2)),
  2227. "interval": "1h",
  2228. "yAxis": "count()",
  2229. "orderby": ["-count()"],
  2230. "field": ["project", "count()"],
  2231. "topEvents": 5,
  2232. "query": "event.type:transaction (transaction:*a OR transaction:b*)",
  2233. },
  2234. format="json",
  2235. )
  2236. assert response.status_code == 200
  2237. def test_top_events_with_to_other(self):
  2238. super().test_top_events_with_to_other()