test_organization_events_stats.py 83 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227
  1. import uuid
  2. from datetime import timedelta
  3. from unittest import mock
  4. from uuid import uuid4
  5. import pytest
  6. from dateutil.parser import parse as parse_date
  7. from django.urls import reverse
  8. from pytz import utc
  9. from snuba_sdk.column import Column
  10. from snuba_sdk.conditions import Condition, Op
  11. from snuba_sdk.function import Function
  12. from sentry.constants import MAX_TOP_EVENTS
  13. from sentry.models.transaction_threshold import ProjectTransactionThreshold, TransactionMetric
  14. from sentry.snuba.discover import OTHER_KEY
  15. from sentry.testutils import APITestCase, SnubaTestCase
  16. from sentry.testutils.helpers.datetime import before_now, iso_format
  17. from sentry.testutils.silo import region_silo_test
  18. from sentry.utils.samples import load_data
  19. pytestmark = pytest.mark.sentry_metrics
  20. @region_silo_test
  21. class OrganizationEventsStatsEndpointTest(APITestCase, SnubaTestCase):
  22. endpoint = "sentry-api-0-organization-events-stats"
  23. def setUp(self):
  24. super().setUp()
  25. self.login_as(user=self.user)
  26. self.authed_user = self.user
  27. self.day_ago = before_now(days=1).replace(hour=10, minute=0, second=0, microsecond=0)
  28. self.project = self.create_project()
  29. self.project2 = self.create_project()
  30. self.user = self.create_user()
  31. self.user2 = self.create_user()
  32. self.store_event(
  33. data={
  34. "event_id": "a" * 32,
  35. "message": "very bad",
  36. "timestamp": iso_format(self.day_ago + timedelta(minutes=1)),
  37. "fingerprint": ["group1"],
  38. "tags": {"sentry:user": self.user.email},
  39. },
  40. project_id=self.project.id,
  41. )
  42. self.store_event(
  43. data={
  44. "event_id": "b" * 32,
  45. "message": "oh my",
  46. "timestamp": iso_format(self.day_ago + timedelta(hours=1, minutes=1)),
  47. "fingerprint": ["group2"],
  48. "tags": {"sentry:user": self.user2.email},
  49. },
  50. project_id=self.project2.id,
  51. )
  52. self.store_event(
  53. data={
  54. "event_id": "c" * 32,
  55. "message": "very bad",
  56. "timestamp": iso_format(self.day_ago + timedelta(hours=1, minutes=2)),
  57. "fingerprint": ["group2"],
  58. "tags": {"sentry:user": self.user2.email},
  59. },
  60. project_id=self.project2.id,
  61. )
  62. self.url = reverse(
  63. "sentry-api-0-organization-events-stats",
  64. kwargs={"organization_slug": self.project.organization.slug},
  65. )
  66. self.features = {}
  67. def do_request(self, data, url=None, features=None):
  68. if features is None:
  69. features = {"organizations:discover-basic": True}
  70. features.update(self.features)
  71. with self.feature(features):
  72. return self.client.get(self.url if url is None else url, data=data, format="json")
  73. def test_simple(self):
  74. response = self.do_request(
  75. {
  76. "start": iso_format(self.day_ago),
  77. "end": iso_format(self.day_ago + timedelta(hours=2)),
  78. "interval": "1h",
  79. },
  80. )
  81. assert response.status_code == 200, response.content
  82. assert [attrs for time, attrs in response.data["data"]] == [[{"count": 1}], [{"count": 2}]]
  83. def test_misaligned_last_bucket(self):
  84. response = self.do_request(
  85. data={
  86. "start": iso_format(self.day_ago - timedelta(minutes=30)),
  87. "end": iso_format(self.day_ago + timedelta(hours=1, minutes=30)),
  88. "interval": "1h",
  89. "partial": "1",
  90. },
  91. )
  92. assert response.status_code == 200, response.content
  93. assert [attrs for time, attrs in response.data["data"]] == [
  94. [{"count": 0}],
  95. [{"count": 1}],
  96. [{"count": 2}],
  97. ]
  98. def test_no_projects(self):
  99. org = self.create_organization(owner=self.user)
  100. self.login_as(user=self.user)
  101. url = reverse(
  102. "sentry-api-0-organization-events-stats", kwargs={"organization_slug": org.slug}
  103. )
  104. response = self.do_request({}, url)
  105. assert response.status_code == 200, response.content
  106. assert len(response.data["data"]) == 0
  107. def test_user_count(self):
  108. self.store_event(
  109. data={
  110. "event_id": "d" * 32,
  111. "message": "something",
  112. "timestamp": iso_format(self.day_ago + timedelta(minutes=2)),
  113. "tags": {"sentry:user": self.user2.email},
  114. "fingerprint": ["group2"],
  115. },
  116. project_id=self.project2.id,
  117. )
  118. response = self.do_request(
  119. data={
  120. "start": iso_format(self.day_ago),
  121. "end": iso_format(self.day_ago + timedelta(hours=2)),
  122. "interval": "1h",
  123. "yAxis": "user_count",
  124. },
  125. )
  126. assert response.status_code == 200, response.content
  127. assert [attrs for time, attrs in response.data["data"]] == [[{"count": 2}], [{"count": 1}]]
  128. def test_discover2_backwards_compatibility(self):
  129. response = self.do_request(
  130. data={
  131. "project": self.project.id,
  132. "start": iso_format(self.day_ago),
  133. "end": iso_format(self.day_ago + timedelta(hours=2)),
  134. "interval": "1h",
  135. "yAxis": "user_count",
  136. },
  137. )
  138. assert response.status_code == 200, response.content
  139. assert len(response.data["data"]) > 0
  140. response = self.do_request(
  141. data={
  142. "project": self.project.id,
  143. "start": iso_format(self.day_ago),
  144. "end": iso_format(self.day_ago + timedelta(hours=2)),
  145. "interval": "1h",
  146. "yAxis": "event_count",
  147. },
  148. )
  149. assert response.status_code == 200, response.content
  150. assert len(response.data["data"]) > 0
  151. def test_with_event_count_flag(self):
  152. response = self.do_request(
  153. data={
  154. "start": iso_format(self.day_ago),
  155. "end": iso_format(self.day_ago + timedelta(hours=2)),
  156. "interval": "1h",
  157. "yAxis": "event_count",
  158. },
  159. )
  160. assert response.status_code == 200, response.content
  161. assert [attrs for time, attrs in response.data["data"]] == [[{"count": 1}], [{"count": 2}]]
  162. def test_performance_view_feature(self):
  163. response = self.do_request(
  164. data={
  165. "end": iso_format(before_now()),
  166. "start": iso_format(before_now(hours=2)),
  167. "query": "project_id:1",
  168. "interval": "30m",
  169. "yAxis": "count()",
  170. },
  171. features={
  172. "organizations:performance-view": True,
  173. "organizations:discover-basic": False,
  174. },
  175. )
  176. assert response.status_code == 200, response.content
  177. def test_apdex_divide_by_zero(self):
  178. ProjectTransactionThreshold.objects.create(
  179. project=self.project,
  180. organization=self.project.organization,
  181. threshold=600,
  182. metric=TransactionMetric.LCP.value,
  183. )
  184. # Shouldn't count towards apdex
  185. data = load_data(
  186. "transaction",
  187. start_timestamp=self.day_ago + timedelta(minutes=(1)),
  188. timestamp=self.day_ago + timedelta(minutes=(3)),
  189. )
  190. data["transaction"] = "/apdex/new/"
  191. data["user"] = {"email": "1@example.com"}
  192. data["measurements"] = {}
  193. self.store_event(data, project_id=self.project.id)
  194. response = self.do_request(
  195. data={
  196. "start": iso_format(self.day_ago),
  197. "end": iso_format(self.day_ago + timedelta(hours=2)),
  198. "interval": "1h",
  199. "yAxis": "apdex()",
  200. "project": [self.project.id],
  201. },
  202. )
  203. assert response.status_code == 200, response.content
  204. assert len(response.data["data"]) == 2
  205. data = response.data["data"]
  206. # 0 transactions with LCP 0/0
  207. assert [attrs for time, attrs in response.data["data"]] == [
  208. [{"count": 0}],
  209. [{"count": 0}],
  210. ]
  211. def test_aggregate_function_apdex(self):
  212. project1 = self.create_project()
  213. project2 = self.create_project()
  214. events = [
  215. ("one", 400, project1.id),
  216. ("one", 400, project1.id),
  217. ("two", 3000, project2.id),
  218. ("two", 1000, project2.id),
  219. ("three", 3000, project2.id),
  220. ]
  221. for idx, event in enumerate(events):
  222. data = load_data(
  223. "transaction",
  224. start_timestamp=self.day_ago + timedelta(minutes=(1 + idx)),
  225. timestamp=self.day_ago + timedelta(minutes=(1 + idx), milliseconds=event[1]),
  226. )
  227. data["event_id"] = f"{idx}" * 32
  228. data["transaction"] = f"/apdex/new/{event[0]}"
  229. data["user"] = {"email": f"{idx}@example.com"}
  230. self.store_event(data, project_id=event[2])
  231. response = self.do_request(
  232. data={
  233. "start": iso_format(self.day_ago),
  234. "end": iso_format(self.day_ago + timedelta(hours=2)),
  235. "interval": "1h",
  236. "yAxis": "apdex()",
  237. },
  238. )
  239. assert response.status_code == 200, response.content
  240. assert [attrs for time, attrs in response.data["data"]] == [
  241. [{"count": 0.3}],
  242. [{"count": 0}],
  243. ]
  244. ProjectTransactionThreshold.objects.create(
  245. project=project1,
  246. organization=project1.organization,
  247. threshold=100,
  248. metric=TransactionMetric.DURATION.value,
  249. )
  250. ProjectTransactionThreshold.objects.create(
  251. project=project2,
  252. organization=project1.organization,
  253. threshold=100,
  254. metric=TransactionMetric.DURATION.value,
  255. )
  256. response = self.do_request(
  257. data={
  258. "start": iso_format(self.day_ago),
  259. "end": iso_format(self.day_ago + timedelta(hours=2)),
  260. "interval": "1h",
  261. "yAxis": "apdex()",
  262. },
  263. )
  264. assert response.status_code == 200, response.content
  265. assert [attrs for time, attrs in response.data["data"]] == [
  266. [{"count": 0.2}],
  267. [{"count": 0}],
  268. ]
  269. response = self.do_request(
  270. data={
  271. "start": iso_format(self.day_ago),
  272. "end": iso_format(self.day_ago + timedelta(hours=2)),
  273. "interval": "1h",
  274. "yAxis": ["user_count", "apdex()"],
  275. },
  276. )
  277. assert response.status_code == 200, response.content
  278. assert response.data["user_count"]["order"] == 0
  279. assert [attrs for time, attrs in response.data["user_count"]["data"]] == [
  280. [{"count": 5}],
  281. [{"count": 0}],
  282. ]
  283. assert response.data["apdex()"]["order"] == 1
  284. assert [attrs for time, attrs in response.data["apdex()"]["data"]] == [
  285. [{"count": 0.2}],
  286. [{"count": 0}],
  287. ]
  288. def test_aggregate_function_count(self):
  289. response = self.do_request(
  290. data={
  291. "start": iso_format(self.day_ago),
  292. "end": iso_format(self.day_ago + timedelta(hours=2)),
  293. "interval": "1h",
  294. "yAxis": "count()",
  295. },
  296. )
  297. assert response.status_code == 200, response.content
  298. assert [attrs for time, attrs in response.data["data"]] == [[{"count": 1}], [{"count": 2}]]
  299. def test_invalid_aggregate(self):
  300. response = self.do_request(
  301. data={
  302. "start": iso_format(self.day_ago),
  303. "end": iso_format(self.day_ago + timedelta(hours=2)),
  304. "interval": "1h",
  305. "yAxis": "rubbish",
  306. },
  307. )
  308. assert response.status_code == 400, response.content
  309. def test_aggregate_function_user_count(self):
  310. response = self.do_request(
  311. data={
  312. "start": iso_format(self.day_ago),
  313. "end": iso_format(self.day_ago + timedelta(hours=2)),
  314. "interval": "1h",
  315. "yAxis": "count_unique(user)",
  316. },
  317. )
  318. assert response.status_code == 200, response.content
  319. assert [attrs for time, attrs in response.data["data"]] == [[{"count": 1}], [{"count": 1}]]
  320. def test_aggregate_invalid(self):
  321. response = self.do_request(
  322. data={
  323. "start": iso_format(self.day_ago),
  324. "end": iso_format(self.day_ago + timedelta(hours=2)),
  325. "interval": "1h",
  326. "yAxis": "nope(lol)",
  327. },
  328. )
  329. assert response.status_code == 400, response.content
  330. def test_throughput_epm_hour_rollup(self):
  331. project = self.create_project()
  332. # Each of these denotes how many events to create in each hour
  333. event_counts = [6, 0, 6, 3, 0, 3]
  334. for hour, count in enumerate(event_counts):
  335. for minute in range(count):
  336. self.store_event(
  337. data={
  338. "event_id": str(uuid.uuid1()),
  339. "message": "very bad",
  340. "timestamp": iso_format(
  341. self.day_ago + timedelta(hours=hour, minutes=minute)
  342. ),
  343. "fingerprint": ["group1"],
  344. "tags": {"sentry:user": self.user.email},
  345. },
  346. project_id=project.id,
  347. )
  348. for axis in ["epm()", "tpm()"]:
  349. response = self.do_request(
  350. data={
  351. "start": iso_format(self.day_ago),
  352. "end": iso_format(self.day_ago + timedelta(hours=6)),
  353. "interval": "1h",
  354. "yAxis": axis,
  355. "project": project.id,
  356. },
  357. )
  358. assert response.status_code == 200, response.content
  359. data = response.data["data"]
  360. assert len(data) == 6
  361. rows = data[0:6]
  362. for test in zip(event_counts, rows):
  363. assert test[1][1][0]["count"] == test[0] / (3600.0 / 60.0)
  364. def test_throughput_epm_day_rollup(self):
  365. project = self.create_project()
  366. # Each of these denotes how many events to create in each minute
  367. event_counts = [6, 0, 6, 3, 0, 3]
  368. for hour, count in enumerate(event_counts):
  369. for minute in range(count):
  370. self.store_event(
  371. data={
  372. "event_id": str(uuid.uuid1()),
  373. "message": "very bad",
  374. "timestamp": iso_format(
  375. self.day_ago + timedelta(hours=hour, minutes=minute)
  376. ),
  377. "fingerprint": ["group1"],
  378. "tags": {"sentry:user": self.user.email},
  379. },
  380. project_id=project.id,
  381. )
  382. for axis in ["epm()", "tpm()"]:
  383. response = self.do_request(
  384. data={
  385. "start": iso_format(self.day_ago),
  386. "end": iso_format(self.day_ago + timedelta(hours=24)),
  387. "interval": "24h",
  388. "yAxis": axis,
  389. "project": project.id,
  390. },
  391. )
  392. assert response.status_code == 200, response.content
  393. data = response.data["data"]
  394. assert len(data) == 2
  395. assert data[0][1][0]["count"] == sum(event_counts) / (86400.0 / 60.0)
  396. def test_throughput_eps_minute_rollup(self):
  397. project = self.create_project()
  398. # Each of these denotes how many events to create in each minute
  399. event_counts = [6, 0, 6, 3, 0, 3]
  400. for minute, count in enumerate(event_counts):
  401. for second in range(count):
  402. self.store_event(
  403. data={
  404. "event_id": str(uuid.uuid1()),
  405. "message": "very bad",
  406. "timestamp": iso_format(
  407. self.day_ago + timedelta(minutes=minute, seconds=second)
  408. ),
  409. "fingerprint": ["group1"],
  410. "tags": {"sentry:user": self.user.email},
  411. },
  412. project_id=project.id,
  413. )
  414. for axis in ["eps()", "tps()"]:
  415. response = self.do_request(
  416. data={
  417. "start": iso_format(self.day_ago),
  418. "end": iso_format(self.day_ago + timedelta(minutes=6)),
  419. "interval": "1m",
  420. "yAxis": axis,
  421. "project": project.id,
  422. },
  423. )
  424. assert response.status_code == 200, response.content
  425. data = response.data["data"]
  426. assert len(data) == 6
  427. rows = data[0:6]
  428. for test in zip(event_counts, rows):
  429. assert test[1][1][0]["count"] == test[0] / 60.0
  430. def test_throughput_eps_no_rollup(self):
  431. project = self.create_project()
  432. # Each of these denotes how many events to create in each minute
  433. event_counts = [6, 0, 6, 3, 0, 3]
  434. for minute, count in enumerate(event_counts):
  435. for second in range(count):
  436. self.store_event(
  437. data={
  438. "event_id": str(uuid.uuid1()),
  439. "message": "very bad",
  440. "timestamp": iso_format(
  441. self.day_ago + timedelta(minutes=minute, seconds=second)
  442. ),
  443. "fingerprint": ["group1"],
  444. "tags": {"sentry:user": self.user.email},
  445. },
  446. project_id=project.id,
  447. )
  448. response = self.do_request(
  449. data={
  450. "start": iso_format(self.day_ago),
  451. "end": iso_format(self.day_ago + timedelta(minutes=1)),
  452. "interval": "1s",
  453. "yAxis": "eps()",
  454. "project": project.id,
  455. },
  456. )
  457. assert response.status_code == 200, response.content
  458. data = response.data["data"]
  459. # expect 60 data points between time span of 0 and 60 seconds
  460. assert len(data) == 60
  461. rows = data[0:6]
  462. for row in rows:
  463. assert row[1][0]["count"] == 1
  464. def test_transaction_events(self):
  465. prototype = {
  466. "type": "transaction",
  467. "transaction": "api.issue.delete",
  468. "spans": [],
  469. "contexts": {"trace": {"op": "foobar", "trace_id": "a" * 32, "span_id": "a" * 16}},
  470. "tags": {"important": "yes"},
  471. }
  472. fixtures = (
  473. ("d" * 32, before_now(minutes=32)),
  474. ("e" * 32, before_now(hours=1, minutes=2)),
  475. ("f" * 32, before_now(hours=1, minutes=35)),
  476. )
  477. for fixture in fixtures:
  478. data = prototype.copy()
  479. data["event_id"] = fixture[0]
  480. data["timestamp"] = iso_format(fixture[1])
  481. data["start_timestamp"] = iso_format(fixture[1] - timedelta(seconds=1))
  482. self.store_event(data=data, project_id=self.project.id)
  483. response = self.do_request(
  484. data={
  485. "project": self.project.id,
  486. "end": iso_format(before_now()),
  487. "start": iso_format(before_now(hours=2)),
  488. "query": "event.type:transaction",
  489. "interval": "30m",
  490. "yAxis": "count()",
  491. },
  492. )
  493. assert response.status_code == 200, response.content
  494. items = [item for time, item in response.data["data"] if item]
  495. # We could get more results depending on where the 30 min
  496. # windows land.
  497. assert len(items) >= 3
  498. def test_project_id_query_filter(self):
  499. response = self.do_request(
  500. data={
  501. "end": iso_format(before_now()),
  502. "start": iso_format(before_now(hours=2)),
  503. "query": "project_id:1",
  504. "interval": "30m",
  505. "yAxis": "count()",
  506. },
  507. )
  508. assert response.status_code == 200
  509. def test_latest_release_query_filter(self):
  510. response = self.do_request(
  511. data={
  512. "project": self.project.id,
  513. "end": iso_format(before_now()),
  514. "start": iso_format(before_now(hours=2)),
  515. "query": "release:latest",
  516. "interval": "30m",
  517. "yAxis": "count()",
  518. },
  519. )
  520. assert response.status_code == 200
  521. def test_conditional_filter(self):
  522. response = self.do_request(
  523. data={
  524. "start": iso_format(self.day_ago),
  525. "end": iso_format(self.day_ago + timedelta(hours=2)),
  526. "query": "id:{} OR id:{}".format("a" * 32, "b" * 32),
  527. "interval": "30m",
  528. "yAxis": "count()",
  529. },
  530. )
  531. assert response.status_code == 200, response.content
  532. data = response.data["data"]
  533. assert len(data) == 4
  534. assert data[0][1][0]["count"] == 1
  535. assert data[2][1][0]["count"] == 1
  536. def test_simple_multiple_yaxis(self):
  537. response = self.do_request(
  538. data={
  539. "start": iso_format(self.day_ago),
  540. "end": iso_format(self.day_ago + timedelta(hours=2)),
  541. "interval": "1h",
  542. "yAxis": ["user_count", "event_count"],
  543. },
  544. )
  545. assert response.status_code == 200, response.content
  546. assert response.data["user_count"]["order"] == 0
  547. assert [attrs for time, attrs in response.data["user_count"]["data"]] == [
  548. [{"count": 1}],
  549. [{"count": 1}],
  550. ]
  551. assert response.data["event_count"]["order"] == 1
  552. assert [attrs for time, attrs in response.data["event_count"]["data"]] == [
  553. [{"count": 1}],
  554. [{"count": 2}],
  555. ]
  556. def test_equation_yaxis(self):
  557. response = self.do_request(
  558. data={
  559. "start": iso_format(self.day_ago),
  560. "end": iso_format(self.day_ago + timedelta(hours=2)),
  561. "interval": "1h",
  562. "yAxis": ["equation|count() / 100"],
  563. },
  564. )
  565. assert response.status_code == 200, response.content
  566. assert len(response.data["data"]) == 2
  567. assert [attrs for time, attrs in response.data["data"]] == [
  568. [{"count": 0.01}],
  569. [{"count": 0.02}],
  570. ]
  571. def test_equation_mixed_multi_yaxis(self):
  572. response = self.do_request(
  573. data={
  574. "start": iso_format(self.day_ago),
  575. "end": iso_format(self.day_ago + timedelta(hours=2)),
  576. "interval": "1h",
  577. "yAxis": ["count()", "equation|count() * 100"],
  578. },
  579. )
  580. assert response.status_code == 200, response.content
  581. assert response.data["count()"]["order"] == 0
  582. assert [attrs for time, attrs in response.data["count()"]["data"]] == [
  583. [{"count": 1}],
  584. [{"count": 2}],
  585. ]
  586. assert response.data["equation|count() * 100"]["order"] == 1
  587. assert [attrs for time, attrs in response.data["equation|count() * 100"]["data"]] == [
  588. [{"count": 100}],
  589. [{"count": 200}],
  590. ]
  591. def test_equation_multi_yaxis(self):
  592. response = self.do_request(
  593. data={
  594. "start": iso_format(self.day_ago),
  595. "end": iso_format(self.day_ago + timedelta(hours=2)),
  596. "interval": "1h",
  597. "yAxis": ["equation|count() / 100", "equation|count() * 100"],
  598. },
  599. )
  600. assert response.status_code == 200, response.content
  601. assert response.data["equation|count() / 100"]["order"] == 0
  602. assert [attrs for time, attrs in response.data["equation|count() / 100"]["data"]] == [
  603. [{"count": 0.01}],
  604. [{"count": 0.02}],
  605. ]
  606. assert response.data["equation|count() * 100"]["order"] == 1
  607. assert [attrs for time, attrs in response.data["equation|count() * 100"]["data"]] == [
  608. [{"count": 100}],
  609. [{"count": 200}],
  610. ]
  611. def test_large_interval_no_drop_values(self):
  612. self.store_event(
  613. data={
  614. "event_id": "d" * 32,
  615. "message": "not good",
  616. "timestamp": iso_format(self.day_ago - timedelta(minutes=10)),
  617. "fingerprint": ["group3"],
  618. },
  619. project_id=self.project.id,
  620. )
  621. response = self.do_request(
  622. data={
  623. "project": self.project.id,
  624. "end": iso_format(self.day_ago),
  625. "start": iso_format(self.day_ago - timedelta(hours=24)),
  626. "query": 'message:"not good"',
  627. "interval": "1d",
  628. "yAxis": "count()",
  629. },
  630. )
  631. assert response.status_code == 200
  632. assert [attrs for time, attrs in response.data["data"]] == [[{"count": 0}], [{"count": 1}]]
  633. @mock.patch("sentry.snuba.discover.timeseries_query", return_value={})
  634. def test_multiple_yaxis_only_one_query(self, mock_query):
  635. self.do_request(
  636. data={
  637. "project": self.project.id,
  638. "start": iso_format(self.day_ago),
  639. "end": iso_format(self.day_ago + timedelta(hours=2)),
  640. "interval": "1h",
  641. "yAxis": ["user_count", "event_count", "epm()", "eps()"],
  642. },
  643. )
  644. assert mock_query.call_count == 1
  645. @mock.patch("sentry.snuba.discover.bulk_snql_query", return_value=[{"data": []}])
  646. def test_invalid_interval(self, mock_query):
  647. self.do_request(
  648. data={
  649. "end": iso_format(before_now()),
  650. "start": iso_format(before_now(hours=24)),
  651. "query": "",
  652. "interval": "1s",
  653. "yAxis": "count()",
  654. },
  655. )
  656. assert mock_query.call_count == 1
  657. # Should've reset to the default for 24h
  658. assert mock_query.mock_calls[0].args[0][0].query.granularity.granularity == 300
  659. self.do_request(
  660. data={
  661. "end": iso_format(before_now()),
  662. "start": iso_format(before_now(hours=24)),
  663. "query": "",
  664. "interval": "0d",
  665. "yAxis": "count()",
  666. },
  667. )
  668. assert mock_query.call_count == 2
  669. # Should've reset to the default for 24h
  670. assert mock_query.mock_calls[1].args[0][0].query.granularity.granularity == 300
  671. def test_out_of_retention(self):
  672. with self.options({"system.event-retention-days": 10}):
  673. response = self.do_request(
  674. data={
  675. "start": iso_format(before_now(days=20)),
  676. "end": iso_format(before_now(days=15)),
  677. "query": "",
  678. "interval": "30m",
  679. "yAxis": "count()",
  680. },
  681. )
  682. assert response.status_code == 400
  683. @mock.patch("sentry.utils.snuba.quantize_time")
  684. def test_quantize_dates(self, mock_quantize):
  685. mock_quantize.return_value = before_now(days=1).replace(tzinfo=utc)
  686. # Don't quantize short time periods
  687. self.do_request(
  688. data={"statsPeriod": "1h", "query": "", "interval": "30m", "yAxis": "count()"},
  689. )
  690. # Don't quantize absolute date periods
  691. self.do_request(
  692. data={
  693. "start": iso_format(before_now(days=20)),
  694. "end": iso_format(before_now(days=15)),
  695. "query": "",
  696. "interval": "30m",
  697. "yAxis": "count()",
  698. },
  699. )
  700. assert len(mock_quantize.mock_calls) == 0
  701. # Quantize long date periods
  702. self.do_request(
  703. data={"statsPeriod": "90d", "query": "", "interval": "30m", "yAxis": "count()"},
  704. )
  705. assert len(mock_quantize.mock_calls) == 2
  706. def test_with_zerofill(self):
  707. response = self.do_request(
  708. data={
  709. "start": iso_format(self.day_ago),
  710. "end": iso_format(self.day_ago + timedelta(hours=2)),
  711. "interval": "30m",
  712. },
  713. )
  714. assert response.status_code == 200, response.content
  715. assert [attrs for time, attrs in response.data["data"]] == [
  716. [{"count": 1}],
  717. [{"count": 0}],
  718. [{"count": 2}],
  719. [{"count": 0}],
  720. ]
  721. def test_without_zerofill(self):
  722. start = iso_format(self.day_ago)
  723. end = iso_format(self.day_ago + timedelta(hours=2))
  724. response = self.do_request(
  725. data={
  726. "start": start,
  727. "end": end,
  728. "interval": "30m",
  729. "withoutZerofill": "1",
  730. },
  731. features={
  732. "organizations:performance-chart-interpolation": True,
  733. "organizations:discover-basic": True,
  734. },
  735. )
  736. assert response.status_code == 200, response.content
  737. assert [attrs for time, attrs in response.data["data"]] == [
  738. [{"count": 1}],
  739. [{"count": 2}],
  740. ]
  741. assert response.data["start"] == parse_date(start).timestamp()
  742. assert response.data["end"] == parse_date(end).timestamp()
  743. def test_comparison(self):
  744. self.store_event(
  745. data={
  746. "timestamp": iso_format(self.day_ago + timedelta(days=-1, minutes=1)),
  747. },
  748. project_id=self.project.id,
  749. )
  750. self.store_event(
  751. data={
  752. "timestamp": iso_format(self.day_ago + timedelta(days=-1, minutes=2)),
  753. },
  754. project_id=self.project.id,
  755. )
  756. self.store_event(
  757. data={
  758. "timestamp": iso_format(self.day_ago + timedelta(days=-1, hours=1, minutes=1)),
  759. },
  760. project_id=self.project2.id,
  761. )
  762. response = self.do_request(
  763. data={
  764. "start": iso_format(self.day_ago),
  765. "end": iso_format(self.day_ago + timedelta(hours=2)),
  766. "interval": "1h",
  767. "comparisonDelta": int(timedelta(days=1).total_seconds()),
  768. }
  769. )
  770. assert response.status_code == 200, response.content
  771. assert [attrs for time, attrs in response.data["data"]] == [
  772. [{"count": 1, "comparisonCount": 2}],
  773. [{"count": 2, "comparisonCount": 1}],
  774. ]
  775. def test_comparison_invalid(self):
  776. response = self.do_request(
  777. data={
  778. "start": iso_format(self.day_ago),
  779. "end": iso_format(self.day_ago + timedelta(hours=2)),
  780. "interval": "1h",
  781. "comparisonDelta": "17h",
  782. },
  783. )
  784. assert response.status_code == 400, response.content
  785. assert response.data["detail"] == "comparisonDelta must be an integer"
  786. start = before_now(days=85)
  787. end = start + timedelta(days=7)
  788. with self.options({"system.event-retention-days": 90}):
  789. response = self.do_request(
  790. data={
  791. "start": iso_format(start),
  792. "end": iso_format(end),
  793. "interval": "1h",
  794. "comparisonDelta": int(timedelta(days=7).total_seconds()),
  795. }
  796. )
  797. assert response.status_code == 400, response.content
  798. assert response.data["detail"] == "Comparison period is outside retention window"
  799. def test_equations_divide_by_zero(self):
  800. response = self.do_request(
  801. data={
  802. "start": iso_format(self.day_ago),
  803. "end": iso_format(self.day_ago + timedelta(hours=2)),
  804. "interval": "1h",
  805. # force a 0 in the denominator by doing 1 - 1
  806. # since a 0 literal is illegal as the denominator
  807. "yAxis": ["equation|count() / (1-1)"],
  808. },
  809. )
  810. assert response.status_code == 200, response.content
  811. assert len(response.data["data"]) == 2
  812. assert [attrs for time, attrs in response.data["data"]] == [
  813. [{"count": None}],
  814. [{"count": None}],
  815. ]
  816. @mock.patch("sentry.search.events.builder.raw_snql_query")
  817. def test_profiles_dataset_simple(self, mock_snql_query):
  818. mock_snql_query.side_effect = [{"meta": {}, "data": []}]
  819. query = {
  820. "yAxis": [
  821. "count()",
  822. "p75()",
  823. "p95()",
  824. "p99()",
  825. "p75(profile.duration)",
  826. "p95(profile.duration)",
  827. "p99(profile.duration)",
  828. ],
  829. "project": [self.project.id],
  830. "dataset": "profiles",
  831. }
  832. response = self.do_request(query, features={"organizations:profiling": True})
  833. assert response.status_code == 200, response.content
  834. @region_silo_test
  835. class OrganizationEventsStatsTopNEvents(APITestCase, SnubaTestCase):
  836. def setUp(self):
  837. super().setUp()
  838. self.login_as(user=self.user)
  839. self.day_ago = before_now(days=1).replace(hour=10, minute=0, second=0, microsecond=0)
  840. self.project = self.create_project()
  841. self.project2 = self.create_project()
  842. self.user2 = self.create_user()
  843. transaction_data = load_data("transaction")
  844. transaction_data["start_timestamp"] = iso_format(self.day_ago + timedelta(minutes=2))
  845. transaction_data["timestamp"] = iso_format(self.day_ago + timedelta(minutes=4))
  846. transaction_data["tags"] = {"shared-tag": "yup"}
  847. self.event_data = [
  848. {
  849. "data": {
  850. "message": "poof",
  851. "timestamp": iso_format(self.day_ago + timedelta(minutes=2)),
  852. "user": {"email": self.user.email},
  853. "tags": {"shared-tag": "yup"},
  854. "fingerprint": ["group1"],
  855. },
  856. "project": self.project2,
  857. "count": 7,
  858. },
  859. {
  860. "data": {
  861. "message": "voof",
  862. "timestamp": iso_format(self.day_ago + timedelta(hours=1, minutes=2)),
  863. "fingerprint": ["group2"],
  864. "user": {"email": self.user2.email},
  865. "tags": {"shared-tag": "yup"},
  866. },
  867. "project": self.project2,
  868. "count": 6,
  869. },
  870. {
  871. "data": {
  872. "message": "very bad",
  873. "timestamp": iso_format(self.day_ago + timedelta(minutes=2)),
  874. "fingerprint": ["group3"],
  875. "user": {"email": "foo@example.com"},
  876. "tags": {"shared-tag": "yup"},
  877. },
  878. "project": self.project,
  879. "count": 5,
  880. },
  881. {
  882. "data": {
  883. "message": "oh no",
  884. "timestamp": iso_format(self.day_ago + timedelta(minutes=2)),
  885. "fingerprint": ["group4"],
  886. "user": {"email": "bar@example.com"},
  887. "tags": {"shared-tag": "yup"},
  888. },
  889. "project": self.project,
  890. "count": 4,
  891. },
  892. {"data": transaction_data, "project": self.project, "count": 3},
  893. # Not in the top 5
  894. {
  895. "data": {
  896. "message": "sorta bad",
  897. "timestamp": iso_format(self.day_ago + timedelta(minutes=2)),
  898. "fingerprint": ["group5"],
  899. "user": {"email": "bar@example.com"},
  900. "tags": {"shared-tag": "yup"},
  901. },
  902. "project": self.project,
  903. "count": 2,
  904. },
  905. {
  906. "data": {
  907. "message": "not so bad",
  908. "timestamp": iso_format(self.day_ago + timedelta(minutes=2)),
  909. "fingerprint": ["group6"],
  910. "user": {"email": "bar@example.com"},
  911. "tags": {"shared-tag": "yup"},
  912. },
  913. "project": self.project,
  914. "count": 1,
  915. },
  916. ]
  917. self.events = []
  918. for index, event_data in enumerate(self.event_data):
  919. data = event_data["data"].copy()
  920. event = {}
  921. for i in range(event_data["count"]):
  922. data["event_id"] = f"{index}{i}" * 16
  923. event = self.store_event(data, project_id=event_data["project"].id)
  924. self.events.append(event)
  925. self.transaction = self.events[4]
  926. self.enabled_features = {
  927. "organizations:discover-basic": True,
  928. }
  929. self.url = reverse(
  930. "sentry-api-0-organization-events-stats",
  931. kwargs={"organization_slug": self.project.organization.slug},
  932. )
  933. def test_no_top_events_with_project_field(self):
  934. project = self.create_project()
  935. with self.feature(self.enabled_features):
  936. response = self.client.get(
  937. self.url,
  938. data={
  939. # make sure to query the project with 0 events
  940. "project": project.id,
  941. "start": iso_format(self.day_ago),
  942. "end": iso_format(self.day_ago + timedelta(hours=2)),
  943. "interval": "1h",
  944. "yAxis": "count()",
  945. "orderby": ["-count()"],
  946. "field": ["count()", "project"],
  947. "topEvents": 5,
  948. },
  949. format="json",
  950. )
  951. assert response.status_code == 200, response.content
  952. # When there are no top events, we do not return an empty dict.
  953. # Instead, we return a single zero-filled series for an empty graph.
  954. data = response.data["data"]
  955. assert [attrs for time, attrs in data] == [[{"count": 0}], [{"count": 0}]]
  956. def test_no_top_events(self):
  957. project = self.create_project()
  958. with self.feature(self.enabled_features):
  959. response = self.client.get(
  960. self.url,
  961. data={
  962. # make sure to query the project with 0 events
  963. "project": project.id,
  964. "start": iso_format(self.day_ago),
  965. "end": iso_format(self.day_ago + timedelta(hours=2)),
  966. "interval": "1h",
  967. "yAxis": "count()",
  968. "orderby": ["-count()"],
  969. "field": ["count()", "message", "user.email"],
  970. "topEvents": 5,
  971. },
  972. format="json",
  973. )
  974. data = response.data["data"]
  975. assert response.status_code == 200, response.content
  976. # When there are no top events, we do not return an empty dict.
  977. # Instead, we return a single zero-filled series for an empty graph.
  978. assert [attrs for time, attrs in data] == [[{"count": 0}], [{"count": 0}]]
  979. def test_no_top_events_with_multi_axis(self):
  980. project = self.create_project()
  981. with self.feature(self.enabled_features):
  982. response = self.client.get(
  983. self.url,
  984. data={
  985. # make sure to query the project with 0 events
  986. "project": project.id,
  987. "start": iso_format(self.day_ago),
  988. "end": iso_format(self.day_ago + timedelta(hours=2)),
  989. "interval": "1h",
  990. "yAxis": ["count()", "count_unique(user)"],
  991. "orderby": ["-count()"],
  992. "field": ["count()", "count_unique(user)", "message", "user.email"],
  993. "topEvents": 5,
  994. },
  995. format="json",
  996. )
  997. assert response.status_code == 200
  998. data = response.data[""]
  999. assert [attrs for time, attrs in data["count()"]["data"]] == [
  1000. [{"count": 0}],
  1001. [{"count": 0}],
  1002. ]
  1003. assert [attrs for time, attrs in data["count_unique(user)"]["data"]] == [
  1004. [{"count": 0}],
  1005. [{"count": 0}],
  1006. ]
  1007. def test_simple_top_events(self):
  1008. with self.feature(self.enabled_features):
  1009. response = self.client.get(
  1010. self.url,
  1011. data={
  1012. "start": iso_format(self.day_ago),
  1013. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1014. "interval": "1h",
  1015. "yAxis": "count()",
  1016. "orderby": ["-count()"],
  1017. "field": ["count()", "message", "user.email"],
  1018. "topEvents": 5,
  1019. },
  1020. format="json",
  1021. )
  1022. data = response.data
  1023. assert response.status_code == 200, response.content
  1024. assert len(data) == 6
  1025. for index, event in enumerate(self.events[:5]):
  1026. message = event.message or event.transaction
  1027. results = data[
  1028. ",".join([message, self.event_data[index]["data"]["user"].get("email", "None")])
  1029. ]
  1030. assert results["order"] == index
  1031. assert [{"count": self.event_data[index]["count"]}] in [
  1032. attrs for _, attrs in results["data"]
  1033. ]
  1034. other = data["Other"]
  1035. assert other["order"] == 5
  1036. assert [{"count": 3}] in [attrs for _, attrs in other["data"]]
  1037. def test_top_events_limits(self):
  1038. data = {
  1039. "start": iso_format(self.day_ago),
  1040. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1041. "interval": "1h",
  1042. "yAxis": "count()",
  1043. "orderby": ["-count()"],
  1044. "field": ["count()", "message", "user.email"],
  1045. }
  1046. with self.feature(self.enabled_features):
  1047. data["topEvents"] = MAX_TOP_EVENTS + 1
  1048. response = self.client.get(self.url, data, format="json")
  1049. assert response.status_code == 400
  1050. data["topEvents"] = 0
  1051. response = self.client.get(self.url, data, format="json")
  1052. assert response.status_code == 400
  1053. data["topEvents"] = "a"
  1054. response = self.client.get(self.url, data, format="json")
  1055. assert response.status_code == 400
  1056. def test_top_events_with_projects(self):
  1057. with self.feature(self.enabled_features):
  1058. response = self.client.get(
  1059. self.url,
  1060. data={
  1061. "start": iso_format(self.day_ago),
  1062. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1063. "interval": "1h",
  1064. "yAxis": "count()",
  1065. "orderby": ["-count()"],
  1066. "field": ["count()", "message", "project"],
  1067. "topEvents": 5,
  1068. },
  1069. format="json",
  1070. )
  1071. data = response.data
  1072. assert response.status_code == 200, response.content
  1073. assert len(data) == 6
  1074. for index, event in enumerate(self.events[:5]):
  1075. message = event.message or event.transaction
  1076. results = data[",".join([message, event.project.slug])]
  1077. assert results["order"] == index
  1078. assert [{"count": self.event_data[index]["count"]}] in [
  1079. attrs for time, attrs in results["data"]
  1080. ]
  1081. other = data["Other"]
  1082. assert other["order"] == 5
  1083. assert [{"count": 3}] in [attrs for _, attrs in other["data"]]
  1084. def test_top_events_with_issue(self):
  1085. # delete a group to make sure if this happens the value becomes unknown
  1086. event_group = self.events[0].group
  1087. event_group.delete()
  1088. with self.feature(self.enabled_features):
  1089. response = self.client.get(
  1090. self.url,
  1091. data={
  1092. "start": iso_format(self.day_ago),
  1093. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1094. "interval": "1h",
  1095. "yAxis": "count()",
  1096. "orderby": ["-count()"],
  1097. "field": ["count()", "message", "issue"],
  1098. "topEvents": 5,
  1099. "query": "!event.type:transaction",
  1100. },
  1101. format="json",
  1102. )
  1103. data = response.data
  1104. assert response.status_code == 200, response.content
  1105. assert len(data) == 6
  1106. for index, event in enumerate(self.events[:4]):
  1107. message = event.message
  1108. # Because we deleted the group for event 0
  1109. if index == 0 or event.group is None:
  1110. issue = "unknown"
  1111. else:
  1112. issue = event.group.qualified_short_id
  1113. results = data[",".join([issue, message])]
  1114. assert results["order"] == index
  1115. assert [{"count": self.event_data[index]["count"]}] in [
  1116. attrs for time, attrs in results["data"]
  1117. ]
  1118. other = data["Other"]
  1119. assert other["order"] == 5
  1120. assert [{"count": 1}] in [attrs for _, attrs in other["data"]]
  1121. def test_top_events_with_transaction_status(self):
  1122. with self.feature(self.enabled_features):
  1123. response = self.client.get(
  1124. self.url,
  1125. data={
  1126. "start": iso_format(self.day_ago),
  1127. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1128. "interval": "1h",
  1129. "yAxis": "count()",
  1130. "orderby": ["-count()"],
  1131. "field": ["count()", "transaction.status"],
  1132. "topEvents": 5,
  1133. },
  1134. format="json",
  1135. )
  1136. data = response.data
  1137. assert response.status_code == 200, response.content
  1138. assert len(data) == 1
  1139. assert "ok" in data
  1140. @mock.patch("sentry.models.GroupManager.get_issues_mapping")
  1141. def test_top_events_with_unknown_issue(self, mock_issues_mapping):
  1142. event = self.events[0]
  1143. event_data = self.event_data[0]
  1144. # ensure that the issue mapping returns None for the issue
  1145. mock_issues_mapping.return_value = {event.group.id: None}
  1146. with self.feature(self.enabled_features):
  1147. response = self.client.get(
  1148. self.url,
  1149. data={
  1150. "start": iso_format(self.day_ago),
  1151. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1152. "interval": "1h",
  1153. "yAxis": "count()",
  1154. "orderby": ["-count()"],
  1155. "field": ["count()", "issue"],
  1156. "topEvents": 5,
  1157. # narrow the search to just one issue
  1158. "query": f"issue.id:{event.group.id}",
  1159. },
  1160. format="json",
  1161. )
  1162. assert response.status_code == 200, response.content
  1163. data = response.data
  1164. assert len(data) == 1
  1165. results = data["unknown"]
  1166. assert results["order"] == 0
  1167. assert [{"count": event_data["count"]}] in [attrs for time, attrs in results["data"]]
  1168. @mock.patch(
  1169. "sentry.search.events.builder.raw_snql_query",
  1170. side_effect=[{"data": [{"issue.id": 1}], "meta": []}, {"data": [], "meta": []}],
  1171. )
  1172. def test_top_events_with_issue_check_query_conditions(self, mock_query):
  1173. """ "Intentionally separate from test_top_events_with_issue
  1174. This is to test against a bug where the condition for issues wasn't included and we'd be missing data for
  1175. the interval since we'd cap out the max rows. This was not caught by the previous test since the results
  1176. would still be correct given the smaller interval & lack of data
  1177. """
  1178. with self.feature(self.enabled_features):
  1179. self.client.get(
  1180. self.url,
  1181. data={
  1182. "start": iso_format(self.day_ago),
  1183. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1184. "interval": "1h",
  1185. "yAxis": "count()",
  1186. "orderby": ["-count()"],
  1187. "field": ["count()", "message", "issue"],
  1188. "topEvents": 5,
  1189. "query": "!event.type:transaction",
  1190. },
  1191. format="json",
  1192. )
  1193. assert (
  1194. Condition(Function("coalesce", [Column("group_id"), 0], "issue.id"), Op.IN, [1])
  1195. in mock_query.mock_calls[1].args[0].query.where
  1196. )
  1197. def test_top_events_with_functions(self):
  1198. with self.feature(self.enabled_features):
  1199. response = self.client.get(
  1200. self.url,
  1201. data={
  1202. "start": iso_format(self.day_ago),
  1203. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1204. "interval": "1h",
  1205. "yAxis": "count()",
  1206. "orderby": ["-p99()"],
  1207. "field": ["transaction", "avg(transaction.duration)", "p99()"],
  1208. "topEvents": 5,
  1209. },
  1210. format="json",
  1211. )
  1212. data = response.data
  1213. assert response.status_code == 200, response.content
  1214. assert len(data) == 1
  1215. results = data[self.transaction.transaction]
  1216. assert results["order"] == 0
  1217. assert [attrs for time, attrs in results["data"]] == [[{"count": 3}], [{"count": 0}]]
  1218. def test_top_events_with_functions_on_different_transactions(self):
  1219. """Transaction2 has less events, but takes longer so order should be self.transaction then transaction2"""
  1220. transaction_data = load_data("transaction")
  1221. transaction_data["start_timestamp"] = iso_format(self.day_ago + timedelta(minutes=2))
  1222. transaction_data["timestamp"] = iso_format(self.day_ago + timedelta(minutes=6))
  1223. transaction_data["transaction"] = "/foo_bar/"
  1224. transaction2 = self.store_event(transaction_data, project_id=self.project.id)
  1225. with self.feature(self.enabled_features):
  1226. response = self.client.get(
  1227. self.url,
  1228. data={
  1229. "start": iso_format(self.day_ago),
  1230. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1231. "interval": "1h",
  1232. "yAxis": "count()",
  1233. "orderby": ["-p99()"],
  1234. "field": ["transaction", "avg(transaction.duration)", "p99()"],
  1235. "topEvents": 5,
  1236. },
  1237. format="json",
  1238. )
  1239. data = response.data
  1240. assert response.status_code == 200, response.content
  1241. assert len(data) == 2
  1242. results = data[self.transaction.transaction]
  1243. assert results["order"] == 1
  1244. assert [attrs for time, attrs in results["data"]] == [[{"count": 3}], [{"count": 0}]]
  1245. results = data[transaction2.transaction]
  1246. assert results["order"] == 0
  1247. assert [attrs for time, attrs in results["data"]] == [[{"count": 1}], [{"count": 0}]]
  1248. def test_top_events_with_query(self):
  1249. transaction_data = load_data("transaction")
  1250. transaction_data["start_timestamp"] = iso_format(self.day_ago + timedelta(minutes=2))
  1251. transaction_data["timestamp"] = iso_format(self.day_ago + timedelta(minutes=6))
  1252. transaction_data["transaction"] = "/foo_bar/"
  1253. self.store_event(transaction_data, project_id=self.project.id)
  1254. with self.feature(self.enabled_features):
  1255. response = self.client.get(
  1256. self.url,
  1257. data={
  1258. "start": iso_format(self.day_ago),
  1259. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1260. "interval": "1h",
  1261. "yAxis": "count()",
  1262. "orderby": ["-p99()"],
  1263. "query": "transaction:/foo_bar/",
  1264. "field": ["transaction", "avg(transaction.duration)", "p99()"],
  1265. "topEvents": 5,
  1266. },
  1267. format="json",
  1268. )
  1269. data = response.data
  1270. assert response.status_code == 200, response.content
  1271. assert len(data) == 1
  1272. transaction2_data = data["/foo_bar/"]
  1273. assert transaction2_data["order"] == 0
  1274. assert [attrs for time, attrs in transaction2_data["data"]] == [
  1275. [{"count": 1}],
  1276. [{"count": 0}],
  1277. ]
  1278. def test_top_events_with_negated_condition(self):
  1279. with self.feature(self.enabled_features):
  1280. response = self.client.get(
  1281. self.url,
  1282. data={
  1283. "start": iso_format(self.day_ago),
  1284. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1285. "interval": "1h",
  1286. "yAxis": "count()",
  1287. "orderby": ["-count()"],
  1288. "query": f"!message:{self.events[0].message}",
  1289. "field": ["message", "count()"],
  1290. "topEvents": 5,
  1291. },
  1292. format="json",
  1293. )
  1294. data = response.data
  1295. assert response.status_code == 200, response.content
  1296. assert len(data) == 6
  1297. for index, event in enumerate(self.events[1:5]):
  1298. message = event.message or event.transaction
  1299. results = data[message]
  1300. assert results["order"] == index
  1301. assert [{"count": self.event_data[index + 1]["count"]}] in [
  1302. attrs for _, attrs in results["data"]
  1303. ]
  1304. other = data["Other"]
  1305. assert other["order"] == 5
  1306. assert [{"count": 1}] in [attrs for _, attrs in other["data"]]
  1307. def test_top_events_with_epm(self):
  1308. with self.feature(self.enabled_features):
  1309. response = self.client.get(
  1310. self.url,
  1311. data={
  1312. "start": iso_format(self.day_ago),
  1313. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1314. "interval": "1h",
  1315. "yAxis": "epm()",
  1316. "orderby": ["-count()"],
  1317. "field": ["message", "user.email", "count()"],
  1318. "topEvents": 5,
  1319. },
  1320. format="json",
  1321. )
  1322. data = response.data
  1323. assert response.status_code == 200, response.content
  1324. assert len(data) == 6
  1325. for index, event in enumerate(self.events[:5]):
  1326. message = event.message or event.transaction
  1327. results = data[
  1328. ",".join([message, self.event_data[index]["data"]["user"].get("email", "None")])
  1329. ]
  1330. assert results["order"] == index
  1331. assert [{"count": self.event_data[index]["count"] / (3600.0 / 60.0)}] in [
  1332. attrs for time, attrs in results["data"]
  1333. ]
  1334. other = data["Other"]
  1335. assert other["order"] == 5
  1336. assert [{"count": 0.05}] in [attrs for _, attrs in other["data"]]
  1337. def test_top_events_with_multiple_yaxis(self):
  1338. with self.feature(self.enabled_features):
  1339. response = self.client.get(
  1340. self.url,
  1341. data={
  1342. "start": iso_format(self.day_ago),
  1343. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1344. "interval": "1h",
  1345. "yAxis": ["epm()", "count()"],
  1346. "orderby": ["-count()"],
  1347. "field": ["message", "user.email", "count()"],
  1348. "topEvents": 5,
  1349. },
  1350. format="json",
  1351. )
  1352. data = response.data
  1353. assert response.status_code == 200, response.content
  1354. assert len(data) == 6
  1355. for index, event in enumerate(self.events[:5]):
  1356. message = event.message or event.transaction
  1357. results = data[
  1358. ",".join([message, self.event_data[index]["data"]["user"].get("email", "None")])
  1359. ]
  1360. assert results["order"] == index
  1361. assert results["epm()"]["order"] == 0
  1362. assert results["count()"]["order"] == 1
  1363. assert [{"count": self.event_data[index]["count"] / (3600.0 / 60.0)}] in [
  1364. attrs for time, attrs in results["epm()"]["data"]
  1365. ]
  1366. assert [{"count": self.event_data[index]["count"]}] in [
  1367. attrs for time, attrs in results["count()"]["data"]
  1368. ]
  1369. other = data["Other"]
  1370. assert other["order"] == 5
  1371. assert other["epm()"]["order"] == 0
  1372. assert other["count()"]["order"] == 1
  1373. assert [{"count": 0.05}] in [attrs for _, attrs in other["epm()"]["data"]]
  1374. assert [{"count": 3}] in [attrs for _, attrs in other["count()"]["data"]]
  1375. def test_top_events_with_boolean(self):
  1376. with self.feature(self.enabled_features):
  1377. response = self.client.get(
  1378. self.url,
  1379. data={
  1380. "start": iso_format(self.day_ago),
  1381. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1382. "interval": "1h",
  1383. "yAxis": "count()",
  1384. "orderby": ["-count()"],
  1385. "field": ["count()", "message", "device.charging"],
  1386. "topEvents": 5,
  1387. },
  1388. format="json",
  1389. )
  1390. data = response.data
  1391. assert response.status_code == 200, response.content
  1392. assert len(data) == 6
  1393. for index, event in enumerate(self.events[:5]):
  1394. message = event.message or event.transaction
  1395. results = data[",".join(["False", message])]
  1396. assert results["order"] == index
  1397. assert [{"count": self.event_data[index]["count"]}] in [
  1398. attrs for time, attrs in results["data"]
  1399. ]
  1400. other = data["Other"]
  1401. assert other["order"] == 5
  1402. assert [{"count": 3}] in [attrs for _, attrs in other["data"]]
  1403. def test_top_events_with_error_unhandled(self):
  1404. self.login_as(user=self.user)
  1405. project = self.create_project()
  1406. prototype = load_data("android-ndk")
  1407. prototype["event_id"] = "f" * 32
  1408. prototype["message"] = "not handled"
  1409. prototype["exception"]["values"][0]["value"] = "not handled"
  1410. prototype["exception"]["values"][0]["mechanism"]["handled"] = False
  1411. prototype["timestamp"] = iso_format(self.day_ago + timedelta(minutes=2))
  1412. self.store_event(data=prototype, project_id=project.id)
  1413. with self.feature(self.enabled_features):
  1414. response = self.client.get(
  1415. self.url,
  1416. data={
  1417. "start": iso_format(self.day_ago),
  1418. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1419. "interval": "1h",
  1420. "yAxis": "count()",
  1421. "orderby": ["-count()"],
  1422. "field": ["count()", "error.unhandled"],
  1423. "topEvents": 5,
  1424. },
  1425. format="json",
  1426. )
  1427. data = response.data
  1428. assert response.status_code == 200, response.content
  1429. assert len(data) == 2
  1430. def test_top_events_with_timestamp(self):
  1431. with self.feature(self.enabled_features):
  1432. response = self.client.get(
  1433. self.url,
  1434. data={
  1435. "start": iso_format(self.day_ago),
  1436. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1437. "interval": "1h",
  1438. "yAxis": "count()",
  1439. "orderby": ["-count()"],
  1440. "query": "event.type:default",
  1441. "field": ["count()", "message", "timestamp"],
  1442. "topEvents": 5,
  1443. },
  1444. format="json",
  1445. )
  1446. data = response.data
  1447. assert response.status_code == 200, response.content
  1448. assert len(data) == 6
  1449. # Transactions won't be in the results because of the query
  1450. del self.events[4]
  1451. del self.event_data[4]
  1452. for index, event in enumerate(self.events[:5]):
  1453. results = data[",".join([event.message, event.timestamp])]
  1454. assert results["order"] == index
  1455. assert [{"count": self.event_data[index]["count"]}] in [
  1456. attrs for time, attrs in results["data"]
  1457. ]
  1458. other = data["Other"]
  1459. assert other["order"] == 5
  1460. assert [{"count": 1}] in [attrs for _, attrs in other["data"]]
  1461. def test_top_events_with_int(self):
  1462. with self.feature(self.enabled_features):
  1463. response = self.client.get(
  1464. self.url,
  1465. data={
  1466. "start": iso_format(self.day_ago),
  1467. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1468. "interval": "1h",
  1469. "yAxis": "count()",
  1470. "orderby": ["-count()"],
  1471. "field": ["count()", "message", "transaction.duration"],
  1472. "topEvents": 5,
  1473. },
  1474. format="json",
  1475. )
  1476. data = response.data
  1477. assert response.status_code == 200, response.content
  1478. assert len(data) == 1
  1479. results = data[",".join([self.transaction.transaction, "120000"])]
  1480. assert results["order"] == 0
  1481. assert [attrs for time, attrs in results["data"]] == [[{"count": 3}], [{"count": 0}]]
  1482. def test_top_events_with_user(self):
  1483. with self.feature(self.enabled_features):
  1484. response = self.client.get(
  1485. self.url,
  1486. data={
  1487. "start": iso_format(self.day_ago),
  1488. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1489. "interval": "1h",
  1490. "yAxis": "count()",
  1491. "orderby": ["-count()", "user"],
  1492. "field": ["user", "count()"],
  1493. "topEvents": 5,
  1494. },
  1495. format="json",
  1496. )
  1497. data = response.data
  1498. assert response.status_code == 200, response.content
  1499. assert len(data) == 5
  1500. assert data["email:bar@example.com"]["order"] == 1
  1501. assert [attrs for time, attrs in data["email:bar@example.com"]["data"]] == [
  1502. [{"count": 7}],
  1503. [{"count": 0}],
  1504. ]
  1505. assert [attrs for time, attrs in data["ip:127.0.0.1"]["data"]] == [
  1506. [{"count": 3}],
  1507. [{"count": 0}],
  1508. ]
  1509. def test_top_events_with_user_and_email(self):
  1510. with self.feature(self.enabled_features):
  1511. response = self.client.get(
  1512. self.url,
  1513. data={
  1514. "start": iso_format(self.day_ago),
  1515. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1516. "interval": "1h",
  1517. "yAxis": "count()",
  1518. "orderby": ["-count()", "user"],
  1519. "field": ["user", "user.email", "count()"],
  1520. "topEvents": 5,
  1521. },
  1522. format="json",
  1523. )
  1524. data = response.data
  1525. assert response.status_code == 200, response.content
  1526. assert len(data) == 5
  1527. assert data["email:bar@example.com,bar@example.com"]["order"] == 1
  1528. assert [attrs for time, attrs in data["email:bar@example.com,bar@example.com"]["data"]] == [
  1529. [{"count": 7}],
  1530. [{"count": 0}],
  1531. ]
  1532. assert [attrs for time, attrs in data["ip:127.0.0.1,None"]["data"]] == [
  1533. [{"count": 3}],
  1534. [{"count": 0}],
  1535. ]
  1536. def test_top_events_with_user_display(self):
  1537. with self.feature(self.enabled_features):
  1538. response = self.client.get(
  1539. self.url,
  1540. data={
  1541. "start": iso_format(self.day_ago),
  1542. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1543. "interval": "1h",
  1544. "yAxis": "count()",
  1545. "orderby": ["-count()"],
  1546. "field": ["message", "user.display", "count()"],
  1547. "topEvents": 5,
  1548. },
  1549. format="json",
  1550. )
  1551. data = response.data
  1552. assert response.status_code == 200, response.content
  1553. assert len(data) == 6
  1554. for index, event in enumerate(self.events[:5]):
  1555. message = event.message or event.transaction
  1556. user = self.event_data[index]["data"]["user"]
  1557. results = data[
  1558. ",".join([message, user.get("email", None) or user.get("ip_address", "None")])
  1559. ]
  1560. assert results["order"] == index
  1561. assert [{"count": self.event_data[index]["count"]}] in [
  1562. attrs for _, attrs in results["data"]
  1563. ]
  1564. other = data["Other"]
  1565. assert other["order"] == 5
  1566. assert [{"count": 3}] in [attrs for _, attrs in other["data"]]
  1567. @pytest.mark.skip(reason="A query with group_id will not return transactions")
  1568. def test_top_events_none_filter(self):
  1569. """When a field is None in one of the top events, make sure we filter by it
  1570. In this case event[4] is a transaction and has no issue
  1571. """
  1572. with self.feature(self.enabled_features):
  1573. response = self.client.get(
  1574. self.url,
  1575. data={
  1576. "start": iso_format(self.day_ago),
  1577. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1578. "interval": "1h",
  1579. "yAxis": "count()",
  1580. "orderby": ["-count()"],
  1581. "field": ["count()", "issue"],
  1582. "topEvents": 5,
  1583. },
  1584. format="json",
  1585. )
  1586. data = response.data
  1587. assert response.status_code == 200, response.content
  1588. assert len(data) == 5
  1589. for index, event in enumerate(self.events[:5]):
  1590. if event.group is None:
  1591. issue = "unknown"
  1592. else:
  1593. issue = event.group.qualified_short_id
  1594. results = data[issue]
  1595. assert results["order"] == index
  1596. assert [{"count": self.event_data[index]["count"]}] in [
  1597. attrs for time, attrs in results["data"]
  1598. ]
  1599. @pytest.mark.skip(reason="Invalid query - transaction events don't have group_id field")
  1600. def test_top_events_one_field_with_none(self):
  1601. with self.feature(self.enabled_features):
  1602. response = self.client.get(
  1603. self.url,
  1604. data={
  1605. "start": iso_format(self.day_ago),
  1606. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1607. "interval": "1h",
  1608. "yAxis": "count()",
  1609. "orderby": ["-count()"],
  1610. "query": "event.type:transaction",
  1611. "field": ["count()", "issue"],
  1612. "topEvents": 5,
  1613. },
  1614. format="json",
  1615. )
  1616. data = response.data
  1617. assert response.status_code == 200, response.content
  1618. assert len(data) == 1
  1619. results = data["unknown"]
  1620. assert [attrs for time, attrs in results["data"]] == [[{"count": 3}], [{"count": 0}]]
  1621. assert results["order"] == 0
  1622. def test_top_events_with_error_handled(self):
  1623. data = self.event_data[0]
  1624. data["data"]["level"] = "error"
  1625. data["data"]["exception"] = {
  1626. "values": [
  1627. {
  1628. "type": "ValidationError",
  1629. "value": "Bad request",
  1630. "mechanism": {"handled": True, "type": "generic"},
  1631. }
  1632. ]
  1633. }
  1634. self.store_event(data["data"], project_id=data["project"].id)
  1635. data["data"]["exception"] = {
  1636. "values": [
  1637. {
  1638. "type": "ValidationError",
  1639. "value": "Bad request",
  1640. "mechanism": {"handled": False, "type": "generic"},
  1641. }
  1642. ]
  1643. }
  1644. self.store_event(data["data"], project_id=data["project"].id)
  1645. with self.feature(self.enabled_features):
  1646. response = self.client.get(
  1647. self.url,
  1648. data={
  1649. "start": iso_format(self.day_ago),
  1650. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1651. "interval": "1h",
  1652. "yAxis": "count()",
  1653. "orderby": ["-count()"],
  1654. "field": ["count()", "error.handled"],
  1655. "topEvents": 5,
  1656. "query": "!event.type:transaction",
  1657. },
  1658. format="json",
  1659. )
  1660. assert response.status_code == 200, response.content
  1661. data = response.data
  1662. assert len(data) == 2
  1663. results = data["1"]
  1664. assert [attrs for time, attrs in results["data"]] == [[{"count": 20}], [{"count": 6}]]
  1665. results = data["0"]
  1666. assert [attrs for time, attrs in results["data"]] == [[{"count": 1}], [{"count": 0}]]
  1667. def test_top_events_with_aggregate_condition(self):
  1668. with self.feature(self.enabled_features):
  1669. response = self.client.get(
  1670. self.url,
  1671. data={
  1672. "start": iso_format(self.day_ago),
  1673. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1674. "interval": "1h",
  1675. "yAxis": "count()",
  1676. "orderby": ["-count()"],
  1677. "field": ["message", "count()"],
  1678. "query": "count():>4",
  1679. "topEvents": 5,
  1680. },
  1681. format="json",
  1682. )
  1683. assert response.status_code == 200, response.content
  1684. data = response.data
  1685. assert len(data) == 3
  1686. for index, event in enumerate(self.events[:3]):
  1687. message = event.message or event.transaction
  1688. results = data[message]
  1689. assert results["order"] == index
  1690. assert [{"count": self.event_data[index]["count"]}] in [
  1691. attrs for time, attrs in results["data"]
  1692. ]
  1693. @pytest.mark.xfail(reason="There's only 2 rows total, which mean there shouldn't be other")
  1694. def test_top_events_with_to_other(self):
  1695. version = "version -@'\" 1.2,3+(4)"
  1696. version_escaped = "version -@'\\\" 1.2,3+(4)"
  1697. # every symbol is replaced with a underscore to make the alias
  1698. version_alias = "version_______1_2_3__4_"
  1699. # add an event in the current release
  1700. event = self.event_data[0]
  1701. event_data = event["data"].copy()
  1702. event_data["event_id"] = uuid4().hex
  1703. event_data["release"] = version
  1704. self.store_event(event_data, project_id=event["project"].id)
  1705. with self.feature(self.enabled_features):
  1706. response = self.client.get(
  1707. self.url,
  1708. data={
  1709. "start": iso_format(self.day_ago),
  1710. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1711. "interval": "1h",
  1712. "yAxis": "count()",
  1713. # the double underscores around the version alias is because of a comma and quote
  1714. "orderby": [f"-to_other_release__{version_alias}__others_current"],
  1715. "field": [
  1716. "count()",
  1717. f'to_other(release,"{version_escaped}",others,current)',
  1718. ],
  1719. "topEvents": 2,
  1720. },
  1721. format="json",
  1722. )
  1723. assert response.status_code == 200, response.content
  1724. data = response.data
  1725. assert len(data) == 2
  1726. current = data["current"]
  1727. assert current["order"] == 1
  1728. assert sum(attrs[0]["count"] for _, attrs in current["data"]) == 1
  1729. others = data["others"]
  1730. assert others["order"] == 0
  1731. assert sum(attrs[0]["count"] for _, attrs in others["data"]) == sum(
  1732. event_data["count"] for event_data in self.event_data
  1733. )
  1734. def test_top_events_with_equations(self):
  1735. with self.feature(self.enabled_features):
  1736. response = self.client.get(
  1737. self.url,
  1738. data={
  1739. "start": iso_format(self.day_ago),
  1740. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1741. "interval": "1h",
  1742. "yAxis": "equation|count() / 100",
  1743. "orderby": ["-count()"],
  1744. "field": ["count()", "message", "user.email", "equation|count() / 100"],
  1745. "topEvents": 5,
  1746. },
  1747. format="json",
  1748. )
  1749. data = response.data
  1750. assert response.status_code == 200, response.content
  1751. assert len(data) == 6
  1752. for index, event in enumerate(self.events[:5]):
  1753. message = event.message or event.transaction
  1754. results = data[
  1755. ",".join([message, self.event_data[index]["data"]["user"].get("email", "None")])
  1756. ]
  1757. assert results["order"] == index
  1758. assert [{"count": self.event_data[index]["count"] / 100}] in [
  1759. attrs for time, attrs in results["data"]
  1760. ]
  1761. other = data["Other"]
  1762. assert other["order"] == 5
  1763. assert [{"count": 0.03}] in [attrs for _, attrs in other["data"]]
  1764. @mock.patch("sentry.snuba.discover.bulk_snql_query", return_value=[{"data": [], "meta": []}])
  1765. @mock.patch(
  1766. "sentry.search.events.builder.raw_snql_query", return_value={"data": [], "meta": []}
  1767. )
  1768. def test_invalid_interval(self, mock_raw_query, mock_bulk_query):
  1769. with self.feature(self.enabled_features):
  1770. response = self.client.get(
  1771. self.url,
  1772. format="json",
  1773. data={
  1774. "end": iso_format(before_now()),
  1775. # 7,200 points for each event
  1776. "start": iso_format(before_now(seconds=7200)),
  1777. "field": ["count()", "issue"],
  1778. "query": "",
  1779. "interval": "1s",
  1780. "yAxis": "count()",
  1781. },
  1782. )
  1783. assert response.status_code == 200
  1784. assert mock_bulk_query.call_count == 1
  1785. with self.feature(self.enabled_features):
  1786. response = self.client.get(
  1787. self.url,
  1788. format="json",
  1789. data={
  1790. "end": iso_format(before_now()),
  1791. "start": iso_format(before_now(seconds=7200)),
  1792. "field": ["count()", "issue"],
  1793. "query": "",
  1794. "interval": "1s",
  1795. "yAxis": "count()",
  1796. # 7,200 points for each event * 2, should error
  1797. "topEvents": 2,
  1798. },
  1799. )
  1800. assert response.status_code == 200
  1801. assert mock_raw_query.call_count == 2
  1802. # Should've reset to the default for between 1 and 24h
  1803. assert mock_raw_query.mock_calls[1].args[0].query.granularity.granularity == 300
  1804. with self.feature(self.enabled_features):
  1805. response = self.client.get(
  1806. self.url,
  1807. format="json",
  1808. data={
  1809. "end": iso_format(before_now()),
  1810. # 1999 points * 5 events should just be enough to not error
  1811. "start": iso_format(before_now(seconds=1999)),
  1812. "field": ["count()", "issue"],
  1813. "query": "",
  1814. "interval": "1s",
  1815. "yAxis": "count()",
  1816. "topEvents": 5,
  1817. },
  1818. )
  1819. assert response.status_code == 200
  1820. assert mock_raw_query.call_count == 4
  1821. # Should've left the interval alone since we're just below the limit
  1822. assert mock_raw_query.mock_calls[3].args[0].query.granularity.granularity == 1
  1823. with self.feature(self.enabled_features):
  1824. response = self.client.get(
  1825. self.url,
  1826. format="json",
  1827. data={
  1828. "end": iso_format(before_now()),
  1829. "start": iso_format(before_now(hours=24)),
  1830. "field": ["count()", "issue"],
  1831. "query": "",
  1832. "interval": "0d",
  1833. "yAxis": "count()",
  1834. "topEvents": 5,
  1835. },
  1836. )
  1837. assert response.status_code == 200
  1838. assert mock_raw_query.call_count == 6
  1839. # Should've default to 24h's default of 5m
  1840. assert mock_raw_query.mock_calls[5].args[0].query.granularity.granularity == 300
  1841. def test_top_events_timestamp_fields(self):
  1842. with self.feature(self.enabled_features):
  1843. response = self.client.get(
  1844. self.url,
  1845. format="json",
  1846. data={
  1847. "start": iso_format(self.day_ago),
  1848. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1849. "interval": "1h",
  1850. "yAxis": "count()",
  1851. "orderby": ["-count()"],
  1852. "field": ["count()", "timestamp", "timestamp.to_hour", "timestamp.to_day"],
  1853. "topEvents": 5,
  1854. },
  1855. )
  1856. assert response.status_code == 200
  1857. data = response.data
  1858. assert len(data) == 3
  1859. # these are the timestamps corresponding to the events stored
  1860. timestamps = [
  1861. self.day_ago + timedelta(minutes=2),
  1862. self.day_ago + timedelta(hours=1, minutes=2),
  1863. self.day_ago + timedelta(minutes=4),
  1864. ]
  1865. timestamp_hours = [timestamp.replace(minute=0, second=0) for timestamp in timestamps]
  1866. timestamp_days = [timestamp.replace(hour=0, minute=0, second=0) for timestamp in timestamps]
  1867. for ts, ts_hr, ts_day in zip(timestamps, timestamp_hours, timestamp_days):
  1868. key = f"{iso_format(ts)}+00:00,{iso_format(ts_day)}+00:00,{iso_format(ts_hr)}+00:00"
  1869. count = sum(
  1870. e["count"] for e in self.event_data if e["data"]["timestamp"] == iso_format(ts)
  1871. )
  1872. results = data[key]
  1873. assert [{"count": count}] in [attrs for time, attrs in results["data"]]
  1874. def test_top_events_other_with_matching_columns(self):
  1875. with self.feature(self.enabled_features):
  1876. response = self.client.get(
  1877. self.url,
  1878. data={
  1879. "start": iso_format(self.day_ago),
  1880. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1881. "interval": "1h",
  1882. "yAxis": "count()",
  1883. "orderby": ["-count()"],
  1884. "field": ["count()", "tags[shared-tag]", "message"],
  1885. "topEvents": 5,
  1886. },
  1887. format="json",
  1888. )
  1889. data = response.data
  1890. assert response.status_code == 200, response.content
  1891. assert len(data) == 6
  1892. for index, event in enumerate(self.events[:5]):
  1893. message = event.message or event.transaction
  1894. results = data[",".join([message, "yup"])]
  1895. assert results["order"] == index
  1896. assert [{"count": self.event_data[index]["count"]}] in [
  1897. attrs for _, attrs in results["data"]
  1898. ]
  1899. other = data["Other"]
  1900. assert other["order"] == 5
  1901. assert [{"count": 3}] in [attrs for _, attrs in other["data"]]
  1902. def test_top_events_with_field_overlapping_other_key(self):
  1903. transaction_data = load_data("transaction")
  1904. transaction_data["start_timestamp"] = iso_format(self.day_ago + timedelta(minutes=2))
  1905. transaction_data["timestamp"] = iso_format(self.day_ago + timedelta(minutes=6))
  1906. transaction_data["transaction"] = OTHER_KEY
  1907. for i in range(5):
  1908. data = transaction_data.copy()
  1909. data["event_id"] = "ab" + f"{i}" * 30
  1910. data["contexts"]["trace"]["span_id"] = "ab" + f"{i}" * 14
  1911. self.store_event(data, project_id=self.project.id)
  1912. with self.feature(self.enabled_features):
  1913. response = self.client.get(
  1914. self.url,
  1915. data={
  1916. "start": iso_format(self.day_ago),
  1917. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1918. "interval": "1h",
  1919. "yAxis": "count()",
  1920. "orderby": ["-count()"],
  1921. "field": ["count()", "message"],
  1922. "topEvents": 5,
  1923. },
  1924. format="json",
  1925. )
  1926. data = response.data
  1927. assert response.status_code == 200, response.content
  1928. assert len(data) == 6
  1929. assert f"{OTHER_KEY} (message)" in data
  1930. results = data[f"{OTHER_KEY} (message)"]
  1931. assert [{"count": 5}] in [attrs for _, attrs in results["data"]]
  1932. other = data["Other"]
  1933. assert other["order"] == 5
  1934. assert [{"count": 4}] in [attrs for _, attrs in other["data"]]
  1935. def test_top_events_can_exclude_other_series(self):
  1936. with self.feature(self.enabled_features):
  1937. response = self.client.get(
  1938. self.url,
  1939. data={
  1940. "start": iso_format(self.day_ago),
  1941. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1942. "interval": "1h",
  1943. "yAxis": "count()",
  1944. "orderby": ["count()"],
  1945. "field": ["count()", "message"],
  1946. "topEvents": 5,
  1947. "excludeOther": "1",
  1948. },
  1949. format="json",
  1950. )
  1951. data = response.data
  1952. assert response.status_code == 200, response.content
  1953. assert len(data) == 5
  1954. assert "Other" not in response.data
  1955. def test_top_events_with_equation_including_unselected_fields_passes_field_validation(self):
  1956. with self.feature(self.enabled_features):
  1957. response = self.client.get(
  1958. self.url,
  1959. data={
  1960. "start": iso_format(self.day_ago),
  1961. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1962. "interval": "1h",
  1963. "yAxis": "count()",
  1964. "orderby": ["-equation[0]"],
  1965. "field": ["count()", "message", "equation|count_unique(user) * 2"],
  1966. "topEvents": 5,
  1967. },
  1968. format="json",
  1969. )
  1970. data = response.data
  1971. assert response.status_code == 200, response.content
  1972. assert len(data) == 6
  1973. other = data["Other"]
  1974. assert other["order"] == 5
  1975. assert [{"count": 4}] in [attrs for _, attrs in other["data"]]
  1976. def test_top_events_boolean_condition_and_project_field(self):
  1977. with self.feature(self.enabled_features):
  1978. response = self.client.get(
  1979. self.url,
  1980. data={
  1981. "start": iso_format(self.day_ago),
  1982. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1983. "interval": "1h",
  1984. "yAxis": "count()",
  1985. "orderby": ["-count()"],
  1986. "field": ["project", "count()"],
  1987. "topEvents": 5,
  1988. "query": "event.type:transaction (transaction:*a OR transaction:b*)",
  1989. },
  1990. format="json",
  1991. )
  1992. assert response.status_code == 200