test_organization_events_stats.py 80 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148
  1. import uuid
  2. from datetime import timedelta
  3. from unittest import mock
  4. from uuid import uuid4
  5. import pytest
  6. from dateutil.parser import parse as parse_date
  7. from django.urls import reverse
  8. from pytz import utc
  9. from snuba_sdk.column import Column
  10. from snuba_sdk.conditions import Condition, Op
  11. from snuba_sdk.function import Function
  12. from sentry.constants import MAX_TOP_EVENTS
  13. from sentry.models.transaction_threshold import ProjectTransactionThreshold, TransactionMetric
  14. from sentry.snuba.discover import OTHER_KEY
  15. from sentry.testutils import APITestCase, SnubaTestCase
  16. from sentry.testutils.helpers.datetime import before_now, iso_format
  17. from sentry.utils.samples import load_data
  18. pytestmark = pytest.mark.sentry_metrics
  19. class OrganizationEventsStatsEndpointTest(APITestCase, SnubaTestCase):
  20. endpoint = "sentry-api-0-organization-events-stats"
  21. def setUp(self):
  22. super().setUp()
  23. self.login_as(user=self.user)
  24. self.authed_user = self.user
  25. self.day_ago = before_now(days=1).replace(hour=10, minute=0, second=0, microsecond=0)
  26. self.project = self.create_project()
  27. self.project2 = self.create_project()
  28. self.user = self.create_user()
  29. self.user2 = self.create_user()
  30. self.store_event(
  31. data={
  32. "event_id": "a" * 32,
  33. "message": "very bad",
  34. "timestamp": iso_format(self.day_ago + timedelta(minutes=1)),
  35. "fingerprint": ["group1"],
  36. "tags": {"sentry:user": self.user.email},
  37. },
  38. project_id=self.project.id,
  39. )
  40. self.store_event(
  41. data={
  42. "event_id": "b" * 32,
  43. "message": "oh my",
  44. "timestamp": iso_format(self.day_ago + timedelta(hours=1, minutes=1)),
  45. "fingerprint": ["group2"],
  46. "tags": {"sentry:user": self.user2.email},
  47. },
  48. project_id=self.project2.id,
  49. )
  50. self.store_event(
  51. data={
  52. "event_id": "c" * 32,
  53. "message": "very bad",
  54. "timestamp": iso_format(self.day_ago + timedelta(hours=1, minutes=2)),
  55. "fingerprint": ["group2"],
  56. "tags": {"sentry:user": self.user2.email},
  57. },
  58. project_id=self.project2.id,
  59. )
  60. self.url = reverse(
  61. "sentry-api-0-organization-events-stats",
  62. kwargs={"organization_slug": self.project.organization.slug},
  63. )
  64. self.features = {}
  65. def do_request(self, data, url=None, features=None):
  66. if features is None:
  67. features = {"organizations:discover-basic": True}
  68. features.update(self.features)
  69. with self.feature(features):
  70. return self.client.get(self.url if url is None else url, data=data, format="json")
  71. def test_simple(self):
  72. response = self.do_request(
  73. {
  74. "start": iso_format(self.day_ago),
  75. "end": iso_format(self.day_ago + timedelta(hours=2)),
  76. "interval": "1h",
  77. },
  78. )
  79. assert response.status_code == 200, response.content
  80. assert [attrs for time, attrs in response.data["data"]] == [[{"count": 1}], [{"count": 2}]]
  81. def test_misaligned_last_bucket(self):
  82. response = self.do_request(
  83. data={
  84. "start": iso_format(self.day_ago - timedelta(minutes=30)),
  85. "end": iso_format(self.day_ago + timedelta(hours=1, minutes=30)),
  86. "interval": "1h",
  87. "partial": "1",
  88. },
  89. )
  90. assert response.status_code == 200, response.content
  91. assert [attrs for time, attrs in response.data["data"]] == [
  92. [{"count": 0}],
  93. [{"count": 1}],
  94. [{"count": 2}],
  95. ]
  96. def test_no_projects(self):
  97. org = self.create_organization(owner=self.user)
  98. self.login_as(user=self.user)
  99. url = reverse(
  100. "sentry-api-0-organization-events-stats", kwargs={"organization_slug": org.slug}
  101. )
  102. response = self.do_request({}, url)
  103. assert response.status_code == 200, response.content
  104. assert len(response.data["data"]) == 0
  105. def test_user_count(self):
  106. self.store_event(
  107. data={
  108. "event_id": "d" * 32,
  109. "message": "something",
  110. "timestamp": iso_format(self.day_ago + timedelta(minutes=2)),
  111. "tags": {"sentry:user": self.user2.email},
  112. "fingerprint": ["group2"],
  113. },
  114. project_id=self.project2.id,
  115. )
  116. response = self.do_request(
  117. data={
  118. "start": iso_format(self.day_ago),
  119. "end": iso_format(self.day_ago + timedelta(hours=2)),
  120. "interval": "1h",
  121. "yAxis": "user_count",
  122. },
  123. )
  124. assert response.status_code == 200, response.content
  125. assert [attrs for time, attrs in response.data["data"]] == [[{"count": 2}], [{"count": 1}]]
  126. def test_discover2_backwards_compatibility(self):
  127. response = self.do_request(
  128. data={
  129. "project": self.project.id,
  130. "start": iso_format(self.day_ago),
  131. "end": iso_format(self.day_ago + timedelta(hours=2)),
  132. "interval": "1h",
  133. "yAxis": "user_count",
  134. },
  135. )
  136. assert response.status_code == 200, response.content
  137. assert len(response.data["data"]) > 0
  138. response = self.do_request(
  139. data={
  140. "project": self.project.id,
  141. "start": iso_format(self.day_ago),
  142. "end": iso_format(self.day_ago + timedelta(hours=2)),
  143. "interval": "1h",
  144. "yAxis": "event_count",
  145. },
  146. )
  147. assert response.status_code == 200, response.content
  148. assert len(response.data["data"]) > 0
  149. def test_with_event_count_flag(self):
  150. response = self.do_request(
  151. data={
  152. "start": iso_format(self.day_ago),
  153. "end": iso_format(self.day_ago + timedelta(hours=2)),
  154. "interval": "1h",
  155. "yAxis": "event_count",
  156. },
  157. )
  158. assert response.status_code == 200, response.content
  159. assert [attrs for time, attrs in response.data["data"]] == [[{"count": 1}], [{"count": 2}]]
  160. def test_performance_view_feature(self):
  161. response = self.do_request(
  162. data={
  163. "end": iso_format(before_now()),
  164. "start": iso_format(before_now(hours=2)),
  165. "query": "project_id:1",
  166. "interval": "30m",
  167. "yAxis": "count()",
  168. },
  169. features={
  170. "organizations:performance-view": True,
  171. "organizations:discover-basic": False,
  172. },
  173. )
  174. assert response.status_code == 200, response.content
  175. def test_aggregate_function_apdex(self):
  176. project1 = self.create_project()
  177. project2 = self.create_project()
  178. events = [
  179. ("one", 400, project1.id),
  180. ("one", 400, project1.id),
  181. ("two", 3000, project2.id),
  182. ("two", 1000, project2.id),
  183. ("three", 3000, project2.id),
  184. ]
  185. for idx, event in enumerate(events):
  186. data = load_data(
  187. "transaction",
  188. start_timestamp=self.day_ago + timedelta(minutes=(1 + idx)),
  189. timestamp=self.day_ago + timedelta(minutes=(1 + idx), milliseconds=event[1]),
  190. )
  191. data["event_id"] = f"{idx}" * 32
  192. data["transaction"] = f"/apdex/new/{event[0]}"
  193. data["user"] = {"email": f"{idx}@example.com"}
  194. self.store_event(data, project_id=event[2])
  195. response = self.do_request(
  196. data={
  197. "start": iso_format(self.day_ago),
  198. "end": iso_format(self.day_ago + timedelta(hours=2)),
  199. "interval": "1h",
  200. "yAxis": "apdex()",
  201. },
  202. )
  203. assert response.status_code == 200, response.content
  204. assert [attrs for time, attrs in response.data["data"]] == [
  205. [{"count": 0.3}],
  206. [{"count": 0}],
  207. ]
  208. ProjectTransactionThreshold.objects.create(
  209. project=project1,
  210. organization=project1.organization,
  211. threshold=100,
  212. metric=TransactionMetric.DURATION.value,
  213. )
  214. ProjectTransactionThreshold.objects.create(
  215. project=project2,
  216. organization=project1.organization,
  217. threshold=100,
  218. metric=TransactionMetric.DURATION.value,
  219. )
  220. response = self.do_request(
  221. data={
  222. "start": iso_format(self.day_ago),
  223. "end": iso_format(self.day_ago + timedelta(hours=2)),
  224. "interval": "1h",
  225. "yAxis": "apdex()",
  226. },
  227. )
  228. assert response.status_code == 200, response.content
  229. assert [attrs for time, attrs in response.data["data"]] == [
  230. [{"count": 0.2}],
  231. [{"count": 0}],
  232. ]
  233. response = self.do_request(
  234. data={
  235. "start": iso_format(self.day_ago),
  236. "end": iso_format(self.day_ago + timedelta(hours=2)),
  237. "interval": "1h",
  238. "yAxis": ["user_count", "apdex()"],
  239. },
  240. )
  241. assert response.status_code == 200, response.content
  242. assert response.data["user_count"]["order"] == 0
  243. assert [attrs for time, attrs in response.data["user_count"]["data"]] == [
  244. [{"count": 5}],
  245. [{"count": 0}],
  246. ]
  247. assert response.data["apdex()"]["order"] == 1
  248. assert [attrs for time, attrs in response.data["apdex()"]["data"]] == [
  249. [{"count": 0.2}],
  250. [{"count": 0}],
  251. ]
  252. def test_aggregate_function_count(self):
  253. response = self.do_request(
  254. data={
  255. "start": iso_format(self.day_ago),
  256. "end": iso_format(self.day_ago + timedelta(hours=2)),
  257. "interval": "1h",
  258. "yAxis": "count()",
  259. },
  260. )
  261. assert response.status_code == 200, response.content
  262. assert [attrs for time, attrs in response.data["data"]] == [[{"count": 1}], [{"count": 2}]]
  263. def test_invalid_aggregate(self):
  264. response = self.do_request(
  265. data={
  266. "start": iso_format(self.day_ago),
  267. "end": iso_format(self.day_ago + timedelta(hours=2)),
  268. "interval": "1h",
  269. "yAxis": "rubbish",
  270. },
  271. )
  272. assert response.status_code == 400, response.content
  273. def test_aggregate_function_user_count(self):
  274. response = self.do_request(
  275. data={
  276. "start": iso_format(self.day_ago),
  277. "end": iso_format(self.day_ago + timedelta(hours=2)),
  278. "interval": "1h",
  279. "yAxis": "count_unique(user)",
  280. },
  281. )
  282. assert response.status_code == 200, response.content
  283. assert [attrs for time, attrs in response.data["data"]] == [[{"count": 1}], [{"count": 1}]]
  284. def test_aggregate_invalid(self):
  285. response = self.do_request(
  286. data={
  287. "start": iso_format(self.day_ago),
  288. "end": iso_format(self.day_ago + timedelta(hours=2)),
  289. "interval": "1h",
  290. "yAxis": "nope(lol)",
  291. },
  292. )
  293. assert response.status_code == 400, response.content
  294. def test_throughput_epm_hour_rollup(self):
  295. project = self.create_project()
  296. # Each of these denotes how many events to create in each hour
  297. event_counts = [6, 0, 6, 3, 0, 3]
  298. for hour, count in enumerate(event_counts):
  299. for minute in range(count):
  300. self.store_event(
  301. data={
  302. "event_id": str(uuid.uuid1()),
  303. "message": "very bad",
  304. "timestamp": iso_format(
  305. self.day_ago + timedelta(hours=hour, minutes=minute)
  306. ),
  307. "fingerprint": ["group1"],
  308. "tags": {"sentry:user": self.user.email},
  309. },
  310. project_id=project.id,
  311. )
  312. for axis in ["epm()", "tpm()"]:
  313. response = self.do_request(
  314. data={
  315. "start": iso_format(self.day_ago),
  316. "end": iso_format(self.day_ago + timedelta(hours=6)),
  317. "interval": "1h",
  318. "yAxis": axis,
  319. "project": project.id,
  320. },
  321. )
  322. assert response.status_code == 200, response.content
  323. data = response.data["data"]
  324. assert len(data) == 6
  325. rows = data[0:6]
  326. for test in zip(event_counts, rows):
  327. assert test[1][1][0]["count"] == test[0] / (3600.0 / 60.0)
  328. def test_throughput_epm_day_rollup(self):
  329. project = self.create_project()
  330. # Each of these denotes how many events to create in each minute
  331. event_counts = [6, 0, 6, 3, 0, 3]
  332. for hour, count in enumerate(event_counts):
  333. for minute in range(count):
  334. self.store_event(
  335. data={
  336. "event_id": str(uuid.uuid1()),
  337. "message": "very bad",
  338. "timestamp": iso_format(
  339. self.day_ago + timedelta(hours=hour, minutes=minute)
  340. ),
  341. "fingerprint": ["group1"],
  342. "tags": {"sentry:user": self.user.email},
  343. },
  344. project_id=project.id,
  345. )
  346. for axis in ["epm()", "tpm()"]:
  347. response = self.do_request(
  348. data={
  349. "start": iso_format(self.day_ago),
  350. "end": iso_format(self.day_ago + timedelta(hours=24)),
  351. "interval": "24h",
  352. "yAxis": axis,
  353. "project": project.id,
  354. },
  355. )
  356. assert response.status_code == 200, response.content
  357. data = response.data["data"]
  358. assert len(data) == 2
  359. assert data[0][1][0]["count"] == sum(event_counts) / (86400.0 / 60.0)
  360. def test_throughput_eps_minute_rollup(self):
  361. project = self.create_project()
  362. # Each of these denotes how many events to create in each minute
  363. event_counts = [6, 0, 6, 3, 0, 3]
  364. for minute, count in enumerate(event_counts):
  365. for second in range(count):
  366. self.store_event(
  367. data={
  368. "event_id": str(uuid.uuid1()),
  369. "message": "very bad",
  370. "timestamp": iso_format(
  371. self.day_ago + timedelta(minutes=minute, seconds=second)
  372. ),
  373. "fingerprint": ["group1"],
  374. "tags": {"sentry:user": self.user.email},
  375. },
  376. project_id=project.id,
  377. )
  378. for axis in ["eps()", "tps()"]:
  379. response = self.do_request(
  380. data={
  381. "start": iso_format(self.day_ago),
  382. "end": iso_format(self.day_ago + timedelta(minutes=6)),
  383. "interval": "1m",
  384. "yAxis": axis,
  385. "project": project.id,
  386. },
  387. )
  388. assert response.status_code == 200, response.content
  389. data = response.data["data"]
  390. assert len(data) == 6
  391. rows = data[0:6]
  392. for test in zip(event_counts, rows):
  393. assert test[1][1][0]["count"] == test[0] / 60.0
  394. def test_throughput_eps_no_rollup(self):
  395. project = self.create_project()
  396. # Each of these denotes how many events to create in each minute
  397. event_counts = [6, 0, 6, 3, 0, 3]
  398. for minute, count in enumerate(event_counts):
  399. for second in range(count):
  400. self.store_event(
  401. data={
  402. "event_id": str(uuid.uuid1()),
  403. "message": "very bad",
  404. "timestamp": iso_format(
  405. self.day_ago + timedelta(minutes=minute, seconds=second)
  406. ),
  407. "fingerprint": ["group1"],
  408. "tags": {"sentry:user": self.user.email},
  409. },
  410. project_id=project.id,
  411. )
  412. response = self.do_request(
  413. data={
  414. "start": iso_format(self.day_ago),
  415. "end": iso_format(self.day_ago + timedelta(minutes=1)),
  416. "interval": "1s",
  417. "yAxis": "eps()",
  418. "project": project.id,
  419. },
  420. )
  421. assert response.status_code == 200, response.content
  422. data = response.data["data"]
  423. # expect 60 data points between time span of 0 and 60 seconds
  424. assert len(data) == 60
  425. rows = data[0:6]
  426. for row in rows:
  427. assert row[1][0]["count"] == 1
  428. def test_transaction_events(self):
  429. prototype = {
  430. "type": "transaction",
  431. "transaction": "api.issue.delete",
  432. "spans": [],
  433. "contexts": {"trace": {"op": "foobar", "trace_id": "a" * 32, "span_id": "a" * 16}},
  434. "tags": {"important": "yes"},
  435. }
  436. fixtures = (
  437. ("d" * 32, before_now(minutes=32)),
  438. ("e" * 32, before_now(hours=1, minutes=2)),
  439. ("f" * 32, before_now(hours=1, minutes=35)),
  440. )
  441. for fixture in fixtures:
  442. data = prototype.copy()
  443. data["event_id"] = fixture[0]
  444. data["timestamp"] = iso_format(fixture[1])
  445. data["start_timestamp"] = iso_format(fixture[1] - timedelta(seconds=1))
  446. self.store_event(data=data, project_id=self.project.id)
  447. response = self.do_request(
  448. data={
  449. "project": self.project.id,
  450. "end": iso_format(before_now()),
  451. "start": iso_format(before_now(hours=2)),
  452. "query": "event.type:transaction",
  453. "interval": "30m",
  454. "yAxis": "count()",
  455. },
  456. )
  457. assert response.status_code == 200, response.content
  458. items = [item for time, item in response.data["data"] if item]
  459. # We could get more results depending on where the 30 min
  460. # windows land.
  461. assert len(items) >= 3
  462. def test_project_id_query_filter(self):
  463. response = self.do_request(
  464. data={
  465. "end": iso_format(before_now()),
  466. "start": iso_format(before_now(hours=2)),
  467. "query": "project_id:1",
  468. "interval": "30m",
  469. "yAxis": "count()",
  470. },
  471. )
  472. assert response.status_code == 200
  473. def test_latest_release_query_filter(self):
  474. response = self.do_request(
  475. data={
  476. "project": self.project.id,
  477. "end": iso_format(before_now()),
  478. "start": iso_format(before_now(hours=2)),
  479. "query": "release:latest",
  480. "interval": "30m",
  481. "yAxis": "count()",
  482. },
  483. )
  484. assert response.status_code == 200
  485. def test_conditional_filter(self):
  486. response = self.do_request(
  487. data={
  488. "start": iso_format(self.day_ago),
  489. "end": iso_format(self.day_ago + timedelta(hours=2)),
  490. "query": "id:{} OR id:{}".format("a" * 32, "b" * 32),
  491. "interval": "30m",
  492. "yAxis": "count()",
  493. },
  494. )
  495. assert response.status_code == 200, response.content
  496. data = response.data["data"]
  497. assert len(data) == 4
  498. assert data[0][1][0]["count"] == 1
  499. assert data[2][1][0]["count"] == 1
  500. def test_simple_multiple_yaxis(self):
  501. response = self.do_request(
  502. data={
  503. "start": iso_format(self.day_ago),
  504. "end": iso_format(self.day_ago + timedelta(hours=2)),
  505. "interval": "1h",
  506. "yAxis": ["user_count", "event_count"],
  507. },
  508. )
  509. assert response.status_code == 200, response.content
  510. assert response.data["user_count"]["order"] == 0
  511. assert [attrs for time, attrs in response.data["user_count"]["data"]] == [
  512. [{"count": 1}],
  513. [{"count": 1}],
  514. ]
  515. assert response.data["event_count"]["order"] == 1
  516. assert [attrs for time, attrs in response.data["event_count"]["data"]] == [
  517. [{"count": 1}],
  518. [{"count": 2}],
  519. ]
  520. def test_equation_yaxis(self):
  521. response = self.do_request(
  522. data={
  523. "start": iso_format(self.day_ago),
  524. "end": iso_format(self.day_ago + timedelta(hours=2)),
  525. "interval": "1h",
  526. "yAxis": ["equation|count() / 100"],
  527. },
  528. )
  529. assert response.status_code == 200, response.content
  530. assert len(response.data["data"]) == 2
  531. assert [attrs for time, attrs in response.data["data"]] == [
  532. [{"count": 0.01}],
  533. [{"count": 0.02}],
  534. ]
  535. def test_equation_mixed_multi_yaxis(self):
  536. response = self.do_request(
  537. data={
  538. "start": iso_format(self.day_ago),
  539. "end": iso_format(self.day_ago + timedelta(hours=2)),
  540. "interval": "1h",
  541. "yAxis": ["count()", "equation|count() * 100"],
  542. },
  543. )
  544. assert response.status_code == 200, response.content
  545. assert response.data["count()"]["order"] == 0
  546. assert [attrs for time, attrs in response.data["count()"]["data"]] == [
  547. [{"count": 1}],
  548. [{"count": 2}],
  549. ]
  550. assert response.data["equation|count() * 100"]["order"] == 1
  551. assert [attrs for time, attrs in response.data["equation|count() * 100"]["data"]] == [
  552. [{"count": 100}],
  553. [{"count": 200}],
  554. ]
  555. def test_equation_multi_yaxis(self):
  556. response = self.do_request(
  557. data={
  558. "start": iso_format(self.day_ago),
  559. "end": iso_format(self.day_ago + timedelta(hours=2)),
  560. "interval": "1h",
  561. "yAxis": ["equation|count() / 100", "equation|count() * 100"],
  562. },
  563. )
  564. assert response.status_code == 200, response.content
  565. assert response.data["equation|count() / 100"]["order"] == 0
  566. assert [attrs for time, attrs in response.data["equation|count() / 100"]["data"]] == [
  567. [{"count": 0.01}],
  568. [{"count": 0.02}],
  569. ]
  570. assert response.data["equation|count() * 100"]["order"] == 1
  571. assert [attrs for time, attrs in response.data["equation|count() * 100"]["data"]] == [
  572. [{"count": 100}],
  573. [{"count": 200}],
  574. ]
  575. def test_large_interval_no_drop_values(self):
  576. self.store_event(
  577. data={
  578. "event_id": "d" * 32,
  579. "message": "not good",
  580. "timestamp": iso_format(self.day_ago - timedelta(minutes=10)),
  581. "fingerprint": ["group3"],
  582. },
  583. project_id=self.project.id,
  584. )
  585. response = self.do_request(
  586. data={
  587. "project": self.project.id,
  588. "end": iso_format(self.day_ago),
  589. "start": iso_format(self.day_ago - timedelta(hours=24)),
  590. "query": 'message:"not good"',
  591. "interval": "1d",
  592. "yAxis": "count()",
  593. },
  594. )
  595. assert response.status_code == 200
  596. assert [attrs for time, attrs in response.data["data"]] == [[{"count": 0}], [{"count": 1}]]
  597. @mock.patch("sentry.snuba.discover.timeseries_query", return_value={})
  598. def test_multiple_yaxis_only_one_query(self, mock_query):
  599. self.do_request(
  600. data={
  601. "project": self.project.id,
  602. "start": iso_format(self.day_ago),
  603. "end": iso_format(self.day_ago + timedelta(hours=2)),
  604. "interval": "1h",
  605. "yAxis": ["user_count", "event_count", "epm()", "eps()"],
  606. },
  607. )
  608. assert mock_query.call_count == 1
  609. @mock.patch("sentry.snuba.discover.bulk_snql_query", return_value=[{"data": []}])
  610. def test_invalid_interval(self, mock_query):
  611. self.do_request(
  612. data={
  613. "end": iso_format(before_now()),
  614. "start": iso_format(before_now(hours=24)),
  615. "query": "",
  616. "interval": "1s",
  617. "yAxis": "count()",
  618. },
  619. )
  620. assert mock_query.call_count == 1
  621. # Should've reset to the default for 24h
  622. assert mock_query.mock_calls[0].args[0][0].query.granularity.granularity == 300
  623. self.do_request(
  624. data={
  625. "end": iso_format(before_now()),
  626. "start": iso_format(before_now(hours=24)),
  627. "query": "",
  628. "interval": "0d",
  629. "yAxis": "count()",
  630. },
  631. )
  632. assert mock_query.call_count == 2
  633. # Should've reset to the default for 24h
  634. assert mock_query.mock_calls[1].args[0][0].query.granularity.granularity == 300
  635. def test_out_of_retention(self):
  636. with self.options({"system.event-retention-days": 10}):
  637. response = self.do_request(
  638. data={
  639. "start": iso_format(before_now(days=20)),
  640. "end": iso_format(before_now(days=15)),
  641. "query": "",
  642. "interval": "30m",
  643. "yAxis": "count()",
  644. },
  645. )
  646. assert response.status_code == 400
  647. @mock.patch("sentry.utils.snuba.quantize_time")
  648. def test_quantize_dates(self, mock_quantize):
  649. mock_quantize.return_value = before_now(days=1).replace(tzinfo=utc)
  650. # Don't quantize short time periods
  651. self.do_request(
  652. data={"statsPeriod": "1h", "query": "", "interval": "30m", "yAxis": "count()"},
  653. )
  654. # Don't quantize absolute date periods
  655. self.do_request(
  656. data={
  657. "start": iso_format(before_now(days=20)),
  658. "end": iso_format(before_now(days=15)),
  659. "query": "",
  660. "interval": "30m",
  661. "yAxis": "count()",
  662. },
  663. )
  664. assert len(mock_quantize.mock_calls) == 0
  665. # Quantize long date periods
  666. self.do_request(
  667. data={"statsPeriod": "90d", "query": "", "interval": "30m", "yAxis": "count()"},
  668. )
  669. assert len(mock_quantize.mock_calls) == 2
  670. def test_with_zerofill(self):
  671. response = self.do_request(
  672. data={
  673. "start": iso_format(self.day_ago),
  674. "end": iso_format(self.day_ago + timedelta(hours=2)),
  675. "interval": "30m",
  676. },
  677. )
  678. assert response.status_code == 200, response.content
  679. assert [attrs for time, attrs in response.data["data"]] == [
  680. [{"count": 1}],
  681. [{"count": 0}],
  682. [{"count": 2}],
  683. [{"count": 0}],
  684. ]
  685. def test_without_zerofill(self):
  686. start = iso_format(self.day_ago)
  687. end = iso_format(self.day_ago + timedelta(hours=2))
  688. response = self.do_request(
  689. data={
  690. "start": start,
  691. "end": end,
  692. "interval": "30m",
  693. "withoutZerofill": "1",
  694. },
  695. features={
  696. "organizations:performance-chart-interpolation": True,
  697. "organizations:discover-basic": True,
  698. },
  699. )
  700. assert response.status_code == 200, response.content
  701. assert [attrs for time, attrs in response.data["data"]] == [
  702. [{"count": 1}],
  703. [{"count": 2}],
  704. ]
  705. assert response.data["start"] == parse_date(start).timestamp()
  706. assert response.data["end"] == parse_date(end).timestamp()
  707. def test_comparison(self):
  708. self.store_event(
  709. data={
  710. "timestamp": iso_format(self.day_ago + timedelta(days=-1, minutes=1)),
  711. },
  712. project_id=self.project.id,
  713. )
  714. self.store_event(
  715. data={
  716. "timestamp": iso_format(self.day_ago + timedelta(days=-1, minutes=2)),
  717. },
  718. project_id=self.project.id,
  719. )
  720. self.store_event(
  721. data={
  722. "timestamp": iso_format(self.day_ago + timedelta(days=-1, hours=1, minutes=1)),
  723. },
  724. project_id=self.project2.id,
  725. )
  726. response = self.do_request(
  727. data={
  728. "start": iso_format(self.day_ago),
  729. "end": iso_format(self.day_ago + timedelta(hours=2)),
  730. "interval": "1h",
  731. "comparisonDelta": int(timedelta(days=1).total_seconds()),
  732. }
  733. )
  734. assert response.status_code == 200, response.content
  735. assert [attrs for time, attrs in response.data["data"]] == [
  736. [{"count": 1, "comparisonCount": 2}],
  737. [{"count": 2, "comparisonCount": 1}],
  738. ]
  739. def test_comparison_invalid(self):
  740. response = self.do_request(
  741. data={
  742. "start": iso_format(self.day_ago),
  743. "end": iso_format(self.day_ago + timedelta(hours=2)),
  744. "interval": "1h",
  745. "comparisonDelta": "17h",
  746. },
  747. )
  748. assert response.status_code == 400, response.content
  749. assert response.data["detail"] == "comparisonDelta must be an integer"
  750. start = before_now(days=85)
  751. end = start + timedelta(days=7)
  752. with self.options({"system.event-retention-days": 90}):
  753. response = self.do_request(
  754. data={
  755. "start": iso_format(start),
  756. "end": iso_format(end),
  757. "interval": "1h",
  758. "comparisonDelta": int(timedelta(days=7).total_seconds()),
  759. }
  760. )
  761. assert response.status_code == 400, response.content
  762. assert response.data["detail"] == "Comparison period is outside retention window"
  763. def test_equations_divide_by_zero(self):
  764. response = self.do_request(
  765. data={
  766. "start": iso_format(self.day_ago),
  767. "end": iso_format(self.day_ago + timedelta(hours=2)),
  768. "interval": "1h",
  769. # force a 0 in the denominator by doing 1 - 1
  770. # since a 0 literal is illegal as the denominator
  771. "yAxis": ["equation|count() / (1-1)"],
  772. },
  773. )
  774. assert response.status_code == 200, response.content
  775. assert len(response.data["data"]) == 2
  776. assert [attrs for time, attrs in response.data["data"]] == [
  777. [{"count": None}],
  778. [{"count": None}],
  779. ]
  780. class OrganizationEventsStatsTopNEvents(APITestCase, SnubaTestCase):
  781. def setUp(self):
  782. super().setUp()
  783. self.login_as(user=self.user)
  784. self.day_ago = before_now(days=1).replace(hour=10, minute=0, second=0, microsecond=0)
  785. self.project = self.create_project()
  786. self.project2 = self.create_project()
  787. self.user2 = self.create_user()
  788. transaction_data = load_data("transaction")
  789. transaction_data["start_timestamp"] = iso_format(self.day_ago + timedelta(minutes=2))
  790. transaction_data["timestamp"] = iso_format(self.day_ago + timedelta(minutes=4))
  791. transaction_data["tags"] = {"shared-tag": "yup"}
  792. self.event_data = [
  793. {
  794. "data": {
  795. "message": "poof",
  796. "timestamp": iso_format(self.day_ago + timedelta(minutes=2)),
  797. "user": {"email": self.user.email},
  798. "tags": {"shared-tag": "yup"},
  799. "fingerprint": ["group1"],
  800. },
  801. "project": self.project2,
  802. "count": 7,
  803. },
  804. {
  805. "data": {
  806. "message": "voof",
  807. "timestamp": iso_format(self.day_ago + timedelta(hours=1, minutes=2)),
  808. "fingerprint": ["group2"],
  809. "user": {"email": self.user2.email},
  810. "tags": {"shared-tag": "yup"},
  811. },
  812. "project": self.project2,
  813. "count": 6,
  814. },
  815. {
  816. "data": {
  817. "message": "very bad",
  818. "timestamp": iso_format(self.day_ago + timedelta(minutes=2)),
  819. "fingerprint": ["group3"],
  820. "user": {"email": "foo@example.com"},
  821. "tags": {"shared-tag": "yup"},
  822. },
  823. "project": self.project,
  824. "count": 5,
  825. },
  826. {
  827. "data": {
  828. "message": "oh no",
  829. "timestamp": iso_format(self.day_ago + timedelta(minutes=2)),
  830. "fingerprint": ["group4"],
  831. "user": {"email": "bar@example.com"},
  832. "tags": {"shared-tag": "yup"},
  833. },
  834. "project": self.project,
  835. "count": 4,
  836. },
  837. {"data": transaction_data, "project": self.project, "count": 3},
  838. # Not in the top 5
  839. {
  840. "data": {
  841. "message": "sorta bad",
  842. "timestamp": iso_format(self.day_ago + timedelta(minutes=2)),
  843. "fingerprint": ["group5"],
  844. "user": {"email": "bar@example.com"},
  845. "tags": {"shared-tag": "yup"},
  846. },
  847. "project": self.project,
  848. "count": 2,
  849. },
  850. {
  851. "data": {
  852. "message": "not so bad",
  853. "timestamp": iso_format(self.day_ago + timedelta(minutes=2)),
  854. "fingerprint": ["group6"],
  855. "user": {"email": "bar@example.com"},
  856. "tags": {"shared-tag": "yup"},
  857. },
  858. "project": self.project,
  859. "count": 1,
  860. },
  861. ]
  862. self.events = []
  863. for index, event_data in enumerate(self.event_data):
  864. data = event_data["data"].copy()
  865. event = {}
  866. for i in range(event_data["count"]):
  867. data["event_id"] = f"{index}{i}" * 16
  868. event = self.store_event(data, project_id=event_data["project"].id)
  869. self.events.append(event)
  870. self.transaction = self.events[4]
  871. self.enabled_features = {
  872. "organizations:discover-basic": True,
  873. }
  874. self.url = reverse(
  875. "sentry-api-0-organization-events-stats",
  876. kwargs={"organization_slug": self.project.organization.slug},
  877. )
  878. def test_no_top_events_with_project_field(self):
  879. project = self.create_project()
  880. with self.feature(self.enabled_features):
  881. response = self.client.get(
  882. self.url,
  883. data={
  884. # make sure to query the project with 0 events
  885. "project": project.id,
  886. "start": iso_format(self.day_ago),
  887. "end": iso_format(self.day_ago + timedelta(hours=2)),
  888. "interval": "1h",
  889. "yAxis": "count()",
  890. "orderby": ["-count()"],
  891. "field": ["count()", "project"],
  892. "topEvents": 5,
  893. },
  894. format="json",
  895. )
  896. assert response.status_code == 200, response.content
  897. # When there are no top events, we do not return an empty dict.
  898. # Instead, we return a single zero-filled series for an empty graph.
  899. data = response.data["data"]
  900. assert [attrs for time, attrs in data] == [[{"count": 0}], [{"count": 0}]]
  901. def test_no_top_events(self):
  902. project = self.create_project()
  903. with self.feature(self.enabled_features):
  904. response = self.client.get(
  905. self.url,
  906. data={
  907. # make sure to query the project with 0 events
  908. "project": project.id,
  909. "start": iso_format(self.day_ago),
  910. "end": iso_format(self.day_ago + timedelta(hours=2)),
  911. "interval": "1h",
  912. "yAxis": "count()",
  913. "orderby": ["-count()"],
  914. "field": ["count()", "message", "user.email"],
  915. "topEvents": 5,
  916. },
  917. format="json",
  918. )
  919. data = response.data["data"]
  920. assert response.status_code == 200, response.content
  921. # When there are no top events, we do not return an empty dict.
  922. # Instead, we return a single zero-filled series for an empty graph.
  923. assert [attrs for time, attrs in data] == [[{"count": 0}], [{"count": 0}]]
  924. def test_no_top_events_with_multi_axis(self):
  925. project = self.create_project()
  926. with self.feature(self.enabled_features):
  927. response = self.client.get(
  928. self.url,
  929. data={
  930. # make sure to query the project with 0 events
  931. "project": project.id,
  932. "start": iso_format(self.day_ago),
  933. "end": iso_format(self.day_ago + timedelta(hours=2)),
  934. "interval": "1h",
  935. "yAxis": ["count()", "count_unique(user)"],
  936. "orderby": ["-count()"],
  937. "field": ["count()", "count_unique(user)", "message", "user.email"],
  938. "topEvents": 5,
  939. },
  940. format="json",
  941. )
  942. assert response.status_code == 200
  943. data = response.data[""]
  944. assert [attrs for time, attrs in data["count()"]["data"]] == [
  945. [{"count": 0}],
  946. [{"count": 0}],
  947. ]
  948. assert [attrs for time, attrs in data["count_unique(user)"]["data"]] == [
  949. [{"count": 0}],
  950. [{"count": 0}],
  951. ]
  952. def test_simple_top_events(self):
  953. with self.feature(self.enabled_features):
  954. response = self.client.get(
  955. self.url,
  956. data={
  957. "start": iso_format(self.day_ago),
  958. "end": iso_format(self.day_ago + timedelta(hours=2)),
  959. "interval": "1h",
  960. "yAxis": "count()",
  961. "orderby": ["-count()"],
  962. "field": ["count()", "message", "user.email"],
  963. "topEvents": 5,
  964. },
  965. format="json",
  966. )
  967. data = response.data
  968. assert response.status_code == 200, response.content
  969. assert len(data) == 6
  970. for index, event in enumerate(self.events[:5]):
  971. message = event.message or event.transaction
  972. results = data[
  973. ",".join([message, self.event_data[index]["data"]["user"].get("email", "None")])
  974. ]
  975. assert results["order"] == index
  976. assert [{"count": self.event_data[index]["count"]}] in [
  977. attrs for _, attrs in results["data"]
  978. ]
  979. other = data["Other"]
  980. assert other["order"] == 5
  981. assert [{"count": 3}] in [attrs for _, attrs in other["data"]]
  982. def test_top_events_limits(self):
  983. data = {
  984. "start": iso_format(self.day_ago),
  985. "end": iso_format(self.day_ago + timedelta(hours=2)),
  986. "interval": "1h",
  987. "yAxis": "count()",
  988. "orderby": ["-count()"],
  989. "field": ["count()", "message", "user.email"],
  990. }
  991. with self.feature(self.enabled_features):
  992. data["topEvents"] = MAX_TOP_EVENTS + 1
  993. response = self.client.get(self.url, data, format="json")
  994. assert response.status_code == 400
  995. data["topEvents"] = 0
  996. response = self.client.get(self.url, data, format="json")
  997. assert response.status_code == 400
  998. data["topEvents"] = "a"
  999. response = self.client.get(self.url, data, format="json")
  1000. assert response.status_code == 400
  1001. def test_top_events_with_projects(self):
  1002. with self.feature(self.enabled_features):
  1003. response = self.client.get(
  1004. self.url,
  1005. data={
  1006. "start": iso_format(self.day_ago),
  1007. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1008. "interval": "1h",
  1009. "yAxis": "count()",
  1010. "orderby": ["-count()"],
  1011. "field": ["count()", "message", "project"],
  1012. "topEvents": 5,
  1013. },
  1014. format="json",
  1015. )
  1016. data = response.data
  1017. assert response.status_code == 200, response.content
  1018. assert len(data) == 6
  1019. for index, event in enumerate(self.events[:5]):
  1020. message = event.message or event.transaction
  1021. results = data[",".join([message, event.project.slug])]
  1022. assert results["order"] == index
  1023. assert [{"count": self.event_data[index]["count"]}] in [
  1024. attrs for time, attrs in results["data"]
  1025. ]
  1026. other = data["Other"]
  1027. assert other["order"] == 5
  1028. assert [{"count": 3}] in [attrs for _, attrs in other["data"]]
  1029. def test_top_events_with_issue(self):
  1030. # delete a group to make sure if this happens the value becomes unknown
  1031. event_group = self.events[0].group
  1032. event_group.delete()
  1033. with self.feature(self.enabled_features):
  1034. response = self.client.get(
  1035. self.url,
  1036. data={
  1037. "start": iso_format(self.day_ago),
  1038. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1039. "interval": "1h",
  1040. "yAxis": "count()",
  1041. "orderby": ["-count()"],
  1042. "field": ["count()", "message", "issue"],
  1043. "topEvents": 5,
  1044. "query": "!event.type:transaction",
  1045. },
  1046. format="json",
  1047. )
  1048. data = response.data
  1049. assert response.status_code == 200, response.content
  1050. assert len(data) == 6
  1051. for index, event in enumerate(self.events[:4]):
  1052. message = event.message
  1053. # Because we deleted the group for event 0
  1054. if index == 0 or event.group is None:
  1055. issue = "unknown"
  1056. else:
  1057. issue = event.group.qualified_short_id
  1058. results = data[",".join([issue, message])]
  1059. assert results["order"] == index
  1060. assert [{"count": self.event_data[index]["count"]}] in [
  1061. attrs for time, attrs in results["data"]
  1062. ]
  1063. other = data["Other"]
  1064. assert other["order"] == 5
  1065. assert [{"count": 1}] in [attrs for _, attrs in other["data"]]
  1066. @mock.patch("sentry.models.GroupManager.get_issues_mapping")
  1067. def test_top_events_with_unknown_issue(self, mock_issues_mapping):
  1068. event = self.events[0]
  1069. event_data = self.event_data[0]
  1070. # ensure that the issue mapping returns None for the issue
  1071. mock_issues_mapping.return_value = {event.group.id: None}
  1072. with self.feature(self.enabled_features):
  1073. response = self.client.get(
  1074. self.url,
  1075. data={
  1076. "start": iso_format(self.day_ago),
  1077. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1078. "interval": "1h",
  1079. "yAxis": "count()",
  1080. "orderby": ["-count()"],
  1081. "field": ["count()", "issue"],
  1082. "topEvents": 5,
  1083. # narrow the search to just one issue
  1084. "query": f"issue.id:{event.group.id}",
  1085. },
  1086. format="json",
  1087. )
  1088. assert response.status_code == 200, response.content
  1089. data = response.data
  1090. assert len(data) == 1
  1091. results = data["unknown"]
  1092. assert results["order"] == 0
  1093. assert [{"count": event_data["count"]}] in [attrs for time, attrs in results["data"]]
  1094. @mock.patch(
  1095. "sentry.search.events.builder.raw_snql_query",
  1096. side_effect=[{"data": [{"issue.id": 1}], "meta": []}, {"data": [], "meta": []}],
  1097. )
  1098. def test_top_events_with_issue_check_query_conditions(self, mock_query):
  1099. """ "Intentionally separate from test_top_events_with_issue
  1100. This is to test against a bug where the condition for issues wasn't included and we'd be missing data for
  1101. the interval since we'd cap out the max rows. This was not caught by the previous test since the results
  1102. would still be correct given the smaller interval & lack of data
  1103. """
  1104. with self.feature(self.enabled_features):
  1105. self.client.get(
  1106. self.url,
  1107. data={
  1108. "start": iso_format(self.day_ago),
  1109. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1110. "interval": "1h",
  1111. "yAxis": "count()",
  1112. "orderby": ["-count()"],
  1113. "field": ["count()", "message", "issue"],
  1114. "topEvents": 5,
  1115. "query": "!event.type:transaction",
  1116. },
  1117. format="json",
  1118. )
  1119. assert (
  1120. Condition(Function("coalesce", [Column("group_id"), 0], "issue.id"), Op.IN, [1])
  1121. in mock_query.mock_calls[1].args[0].query.where
  1122. )
  1123. def test_top_events_with_functions(self):
  1124. with self.feature(self.enabled_features):
  1125. response = self.client.get(
  1126. self.url,
  1127. data={
  1128. "start": iso_format(self.day_ago),
  1129. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1130. "interval": "1h",
  1131. "yAxis": "count()",
  1132. "orderby": ["-p99()"],
  1133. "field": ["transaction", "avg(transaction.duration)", "p99()"],
  1134. "topEvents": 5,
  1135. },
  1136. format="json",
  1137. )
  1138. data = response.data
  1139. assert response.status_code == 200, response.content
  1140. assert len(data) == 1
  1141. results = data[self.transaction.transaction]
  1142. assert results["order"] == 0
  1143. assert [attrs for time, attrs in results["data"]] == [[{"count": 3}], [{"count": 0}]]
  1144. def test_top_events_with_functions_on_different_transactions(self):
  1145. """Transaction2 has less events, but takes longer so order should be self.transaction then transaction2"""
  1146. transaction_data = load_data("transaction")
  1147. transaction_data["start_timestamp"] = iso_format(self.day_ago + timedelta(minutes=2))
  1148. transaction_data["timestamp"] = iso_format(self.day_ago + timedelta(minutes=6))
  1149. transaction_data["transaction"] = "/foo_bar/"
  1150. transaction2 = self.store_event(transaction_data, project_id=self.project.id)
  1151. with self.feature(self.enabled_features):
  1152. response = self.client.get(
  1153. self.url,
  1154. data={
  1155. "start": iso_format(self.day_ago),
  1156. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1157. "interval": "1h",
  1158. "yAxis": "count()",
  1159. "orderby": ["-p99()"],
  1160. "field": ["transaction", "avg(transaction.duration)", "p99()"],
  1161. "topEvents": 5,
  1162. },
  1163. format="json",
  1164. )
  1165. data = response.data
  1166. assert response.status_code == 200, response.content
  1167. assert len(data) == 2
  1168. results = data[self.transaction.transaction]
  1169. assert results["order"] == 1
  1170. assert [attrs for time, attrs in results["data"]] == [[{"count": 3}], [{"count": 0}]]
  1171. results = data[transaction2.transaction]
  1172. assert results["order"] == 0
  1173. assert [attrs for time, attrs in results["data"]] == [[{"count": 1}], [{"count": 0}]]
  1174. def test_top_events_with_query(self):
  1175. transaction_data = load_data("transaction")
  1176. transaction_data["start_timestamp"] = iso_format(self.day_ago + timedelta(minutes=2))
  1177. transaction_data["timestamp"] = iso_format(self.day_ago + timedelta(minutes=6))
  1178. transaction_data["transaction"] = "/foo_bar/"
  1179. self.store_event(transaction_data, project_id=self.project.id)
  1180. with self.feature(self.enabled_features):
  1181. response = self.client.get(
  1182. self.url,
  1183. data={
  1184. "start": iso_format(self.day_ago),
  1185. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1186. "interval": "1h",
  1187. "yAxis": "count()",
  1188. "orderby": ["-p99()"],
  1189. "query": "transaction:/foo_bar/",
  1190. "field": ["transaction", "avg(transaction.duration)", "p99()"],
  1191. "topEvents": 5,
  1192. },
  1193. format="json",
  1194. )
  1195. data = response.data
  1196. assert response.status_code == 200, response.content
  1197. assert len(data) == 1
  1198. transaction2_data = data["/foo_bar/"]
  1199. assert transaction2_data["order"] == 0
  1200. assert [attrs for time, attrs in transaction2_data["data"]] == [
  1201. [{"count": 1}],
  1202. [{"count": 0}],
  1203. ]
  1204. def test_top_events_with_negated_condition(self):
  1205. with self.feature(self.enabled_features):
  1206. response = self.client.get(
  1207. self.url,
  1208. data={
  1209. "start": iso_format(self.day_ago),
  1210. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1211. "interval": "1h",
  1212. "yAxis": "count()",
  1213. "orderby": ["-count()"],
  1214. "query": f"!message:{self.events[0].message}",
  1215. "field": ["message", "count()"],
  1216. "topEvents": 5,
  1217. },
  1218. format="json",
  1219. )
  1220. data = response.data
  1221. assert response.status_code == 200, response.content
  1222. assert len(data) == 6
  1223. for index, event in enumerate(self.events[1:5]):
  1224. message = event.message or event.transaction
  1225. results = data[message]
  1226. assert results["order"] == index
  1227. assert [{"count": self.event_data[index + 1]["count"]}] in [
  1228. attrs for _, attrs in results["data"]
  1229. ]
  1230. other = data["Other"]
  1231. assert other["order"] == 5
  1232. assert [{"count": 1}] in [attrs for _, attrs in other["data"]]
  1233. def test_top_events_with_epm(self):
  1234. with self.feature(self.enabled_features):
  1235. response = self.client.get(
  1236. self.url,
  1237. data={
  1238. "start": iso_format(self.day_ago),
  1239. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1240. "interval": "1h",
  1241. "yAxis": "epm()",
  1242. "orderby": ["-count()"],
  1243. "field": ["message", "user.email", "count()"],
  1244. "topEvents": 5,
  1245. },
  1246. format="json",
  1247. )
  1248. data = response.data
  1249. assert response.status_code == 200, response.content
  1250. assert len(data) == 6
  1251. for index, event in enumerate(self.events[:5]):
  1252. message = event.message or event.transaction
  1253. results = data[
  1254. ",".join([message, self.event_data[index]["data"]["user"].get("email", "None")])
  1255. ]
  1256. assert results["order"] == index
  1257. assert [{"count": self.event_data[index]["count"] / (3600.0 / 60.0)}] in [
  1258. attrs for time, attrs in results["data"]
  1259. ]
  1260. other = data["Other"]
  1261. assert other["order"] == 5
  1262. assert [{"count": 0.05}] in [attrs for _, attrs in other["data"]]
  1263. def test_top_events_with_multiple_yaxis(self):
  1264. with self.feature(self.enabled_features):
  1265. response = self.client.get(
  1266. self.url,
  1267. data={
  1268. "start": iso_format(self.day_ago),
  1269. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1270. "interval": "1h",
  1271. "yAxis": ["epm()", "count()"],
  1272. "orderby": ["-count()"],
  1273. "field": ["message", "user.email", "count()"],
  1274. "topEvents": 5,
  1275. },
  1276. format="json",
  1277. )
  1278. data = response.data
  1279. assert response.status_code == 200, response.content
  1280. assert len(data) == 6
  1281. for index, event in enumerate(self.events[:5]):
  1282. message = event.message or event.transaction
  1283. results = data[
  1284. ",".join([message, self.event_data[index]["data"]["user"].get("email", "None")])
  1285. ]
  1286. assert results["order"] == index
  1287. assert results["epm()"]["order"] == 0
  1288. assert results["count()"]["order"] == 1
  1289. assert [{"count": self.event_data[index]["count"] / (3600.0 / 60.0)}] in [
  1290. attrs for time, attrs in results["epm()"]["data"]
  1291. ]
  1292. assert [{"count": self.event_data[index]["count"]}] in [
  1293. attrs for time, attrs in results["count()"]["data"]
  1294. ]
  1295. other = data["Other"]
  1296. assert other["order"] == 5
  1297. assert other["epm()"]["order"] == 0
  1298. assert other["count()"]["order"] == 1
  1299. assert [{"count": 0.05}] in [attrs for _, attrs in other["epm()"]["data"]]
  1300. assert [{"count": 3}] in [attrs for _, attrs in other["count()"]["data"]]
  1301. def test_top_events_with_boolean(self):
  1302. with self.feature(self.enabled_features):
  1303. response = self.client.get(
  1304. self.url,
  1305. data={
  1306. "start": iso_format(self.day_ago),
  1307. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1308. "interval": "1h",
  1309. "yAxis": "count()",
  1310. "orderby": ["-count()"],
  1311. "field": ["count()", "message", "device.charging"],
  1312. "topEvents": 5,
  1313. },
  1314. format="json",
  1315. )
  1316. data = response.data
  1317. assert response.status_code == 200, response.content
  1318. assert len(data) == 6
  1319. for index, event in enumerate(self.events[:5]):
  1320. message = event.message or event.transaction
  1321. results = data[",".join(["False", message])]
  1322. assert results["order"] == index
  1323. assert [{"count": self.event_data[index]["count"]}] in [
  1324. attrs for time, attrs in results["data"]
  1325. ]
  1326. other = data["Other"]
  1327. assert other["order"] == 5
  1328. assert [{"count": 3}] in [attrs for _, attrs in other["data"]]
  1329. def test_top_events_with_error_unhandled(self):
  1330. self.login_as(user=self.user)
  1331. project = self.create_project()
  1332. prototype = load_data("android-ndk")
  1333. prototype["event_id"] = "f" * 32
  1334. prototype["message"] = "not handled"
  1335. prototype["exception"]["values"][0]["value"] = "not handled"
  1336. prototype["exception"]["values"][0]["mechanism"]["handled"] = False
  1337. prototype["timestamp"] = iso_format(self.day_ago + timedelta(minutes=2))
  1338. self.store_event(data=prototype, project_id=project.id)
  1339. with self.feature(self.enabled_features):
  1340. response = self.client.get(
  1341. self.url,
  1342. data={
  1343. "start": iso_format(self.day_ago),
  1344. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1345. "interval": "1h",
  1346. "yAxis": "count()",
  1347. "orderby": ["-count()"],
  1348. "field": ["count()", "error.unhandled"],
  1349. "topEvents": 5,
  1350. },
  1351. format="json",
  1352. )
  1353. data = response.data
  1354. assert response.status_code == 200, response.content
  1355. assert len(data) == 2
  1356. def test_top_events_with_timestamp(self):
  1357. with self.feature(self.enabled_features):
  1358. response = self.client.get(
  1359. self.url,
  1360. data={
  1361. "start": iso_format(self.day_ago),
  1362. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1363. "interval": "1h",
  1364. "yAxis": "count()",
  1365. "orderby": ["-count()"],
  1366. "query": "event.type:default",
  1367. "field": ["count()", "message", "timestamp"],
  1368. "topEvents": 5,
  1369. },
  1370. format="json",
  1371. )
  1372. data = response.data
  1373. assert response.status_code == 200, response.content
  1374. assert len(data) == 6
  1375. # Transactions won't be in the results because of the query
  1376. del self.events[4]
  1377. del self.event_data[4]
  1378. for index, event in enumerate(self.events[:5]):
  1379. results = data[",".join([event.message, event.timestamp])]
  1380. assert results["order"] == index
  1381. assert [{"count": self.event_data[index]["count"]}] in [
  1382. attrs for time, attrs in results["data"]
  1383. ]
  1384. other = data["Other"]
  1385. assert other["order"] == 5
  1386. assert [{"count": 1}] in [attrs for _, attrs in other["data"]]
  1387. def test_top_events_with_int(self):
  1388. with self.feature(self.enabled_features):
  1389. response = self.client.get(
  1390. self.url,
  1391. data={
  1392. "start": iso_format(self.day_ago),
  1393. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1394. "interval": "1h",
  1395. "yAxis": "count()",
  1396. "orderby": ["-count()"],
  1397. "field": ["count()", "message", "transaction.duration"],
  1398. "topEvents": 5,
  1399. },
  1400. format="json",
  1401. )
  1402. data = response.data
  1403. assert response.status_code == 200, response.content
  1404. assert len(data) == 1
  1405. results = data[",".join([self.transaction.transaction, "120000"])]
  1406. assert results["order"] == 0
  1407. assert [attrs for time, attrs in results["data"]] == [[{"count": 3}], [{"count": 0}]]
  1408. def test_top_events_with_user(self):
  1409. with self.feature(self.enabled_features):
  1410. response = self.client.get(
  1411. self.url,
  1412. data={
  1413. "start": iso_format(self.day_ago),
  1414. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1415. "interval": "1h",
  1416. "yAxis": "count()",
  1417. "orderby": ["-count()", "user"],
  1418. "field": ["user", "count()"],
  1419. "topEvents": 5,
  1420. },
  1421. format="json",
  1422. )
  1423. data = response.data
  1424. assert response.status_code == 200, response.content
  1425. assert len(data) == 5
  1426. assert data["email:bar@example.com"]["order"] == 1
  1427. assert [attrs for time, attrs in data["email:bar@example.com"]["data"]] == [
  1428. [{"count": 7}],
  1429. [{"count": 0}],
  1430. ]
  1431. assert [attrs for time, attrs in data["ip:127.0.0.1"]["data"]] == [
  1432. [{"count": 3}],
  1433. [{"count": 0}],
  1434. ]
  1435. def test_top_events_with_user_and_email(self):
  1436. with self.feature(self.enabled_features):
  1437. response = self.client.get(
  1438. self.url,
  1439. data={
  1440. "start": iso_format(self.day_ago),
  1441. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1442. "interval": "1h",
  1443. "yAxis": "count()",
  1444. "orderby": ["-count()", "user"],
  1445. "field": ["user", "user.email", "count()"],
  1446. "topEvents": 5,
  1447. },
  1448. format="json",
  1449. )
  1450. data = response.data
  1451. assert response.status_code == 200, response.content
  1452. assert len(data) == 5
  1453. assert data["email:bar@example.com,bar@example.com"]["order"] == 1
  1454. assert [attrs for time, attrs in data["email:bar@example.com,bar@example.com"]["data"]] == [
  1455. [{"count": 7}],
  1456. [{"count": 0}],
  1457. ]
  1458. assert [attrs for time, attrs in data["ip:127.0.0.1,None"]["data"]] == [
  1459. [{"count": 3}],
  1460. [{"count": 0}],
  1461. ]
  1462. def test_top_events_with_user_display(self):
  1463. with self.feature(self.enabled_features):
  1464. response = self.client.get(
  1465. self.url,
  1466. data={
  1467. "start": iso_format(self.day_ago),
  1468. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1469. "interval": "1h",
  1470. "yAxis": "count()",
  1471. "orderby": ["-count()"],
  1472. "field": ["message", "user.display", "count()"],
  1473. "topEvents": 5,
  1474. },
  1475. format="json",
  1476. )
  1477. data = response.data
  1478. assert response.status_code == 200, response.content
  1479. assert len(data) == 6
  1480. for index, event in enumerate(self.events[:5]):
  1481. message = event.message or event.transaction
  1482. user = self.event_data[index]["data"]["user"]
  1483. results = data[
  1484. ",".join([message, user.get("email", None) or user.get("ip_address", "None")])
  1485. ]
  1486. assert results["order"] == index
  1487. assert [{"count": self.event_data[index]["count"]}] in [
  1488. attrs for _, attrs in results["data"]
  1489. ]
  1490. other = data["Other"]
  1491. assert other["order"] == 5
  1492. assert [{"count": 3}] in [attrs for _, attrs in other["data"]]
  1493. @pytest.mark.skip(reason="A query with group_id will not return transactions")
  1494. def test_top_events_none_filter(self):
  1495. """When a field is None in one of the top events, make sure we filter by it
  1496. In this case event[4] is a transaction and has no issue
  1497. """
  1498. with self.feature(self.enabled_features):
  1499. response = self.client.get(
  1500. self.url,
  1501. data={
  1502. "start": iso_format(self.day_ago),
  1503. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1504. "interval": "1h",
  1505. "yAxis": "count()",
  1506. "orderby": ["-count()"],
  1507. "field": ["count()", "issue"],
  1508. "topEvents": 5,
  1509. },
  1510. format="json",
  1511. )
  1512. data = response.data
  1513. assert response.status_code == 200, response.content
  1514. assert len(data) == 5
  1515. for index, event in enumerate(self.events[:5]):
  1516. if event.group is None:
  1517. issue = "unknown"
  1518. else:
  1519. issue = event.group.qualified_short_id
  1520. results = data[issue]
  1521. assert results["order"] == index
  1522. assert [{"count": self.event_data[index]["count"]}] in [
  1523. attrs for time, attrs in results["data"]
  1524. ]
  1525. @pytest.mark.skip(reason="Invalid query - transaction events don't have group_id field")
  1526. def test_top_events_one_field_with_none(self):
  1527. with self.feature(self.enabled_features):
  1528. response = self.client.get(
  1529. self.url,
  1530. data={
  1531. "start": iso_format(self.day_ago),
  1532. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1533. "interval": "1h",
  1534. "yAxis": "count()",
  1535. "orderby": ["-count()"],
  1536. "query": "event.type:transaction",
  1537. "field": ["count()", "issue"],
  1538. "topEvents": 5,
  1539. },
  1540. format="json",
  1541. )
  1542. data = response.data
  1543. assert response.status_code == 200, response.content
  1544. assert len(data) == 1
  1545. results = data["unknown"]
  1546. assert [attrs for time, attrs in results["data"]] == [[{"count": 3}], [{"count": 0}]]
  1547. assert results["order"] == 0
  1548. def test_top_events_with_error_handled(self):
  1549. data = self.event_data[0]
  1550. data["data"]["level"] = "error"
  1551. data["data"]["exception"] = {
  1552. "values": [
  1553. {
  1554. "type": "ValidationError",
  1555. "value": "Bad request",
  1556. "mechanism": {"handled": True, "type": "generic"},
  1557. }
  1558. ]
  1559. }
  1560. self.store_event(data["data"], project_id=data["project"].id)
  1561. data["data"]["exception"] = {
  1562. "values": [
  1563. {
  1564. "type": "ValidationError",
  1565. "value": "Bad request",
  1566. "mechanism": {"handled": False, "type": "generic"},
  1567. }
  1568. ]
  1569. }
  1570. self.store_event(data["data"], project_id=data["project"].id)
  1571. with self.feature(self.enabled_features):
  1572. response = self.client.get(
  1573. self.url,
  1574. data={
  1575. "start": iso_format(self.day_ago),
  1576. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1577. "interval": "1h",
  1578. "yAxis": "count()",
  1579. "orderby": ["-count()"],
  1580. "field": ["count()", "error.handled"],
  1581. "topEvents": 5,
  1582. "query": "!event.type:transaction",
  1583. },
  1584. format="json",
  1585. )
  1586. assert response.status_code == 200, response.content
  1587. data = response.data
  1588. assert len(data) == 3
  1589. results = data[""]
  1590. assert [attrs for time, attrs in results["data"]] == [[{"count": 19}], [{"count": 6}]]
  1591. assert results["order"] == 0
  1592. results = data["1"]
  1593. assert [attrs for time, attrs in results["data"]] == [[{"count": 1}], [{"count": 0}]]
  1594. results = data["0"]
  1595. assert [attrs for time, attrs in results["data"]] == [[{"count": 1}], [{"count": 0}]]
  1596. def test_top_events_with_aggregate_condition(self):
  1597. with self.feature(self.enabled_features):
  1598. response = self.client.get(
  1599. self.url,
  1600. data={
  1601. "start": iso_format(self.day_ago),
  1602. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1603. "interval": "1h",
  1604. "yAxis": "count()",
  1605. "orderby": ["-count()"],
  1606. "field": ["message", "count()"],
  1607. "query": "count():>4",
  1608. "topEvents": 5,
  1609. },
  1610. format="json",
  1611. )
  1612. assert response.status_code == 200, response.content
  1613. data = response.data
  1614. assert len(data) == 3
  1615. for index, event in enumerate(self.events[:3]):
  1616. message = event.message or event.transaction
  1617. results = data[message]
  1618. assert results["order"] == index
  1619. assert [{"count": self.event_data[index]["count"]}] in [
  1620. attrs for time, attrs in results["data"]
  1621. ]
  1622. @pytest.mark.xfail(reason="There's only 2 rows total, which mean there shouldn't be other")
  1623. def test_top_events_with_to_other(self):
  1624. version = "version -@'\" 1.2,3+(4)"
  1625. version_escaped = "version -@'\\\" 1.2,3+(4)"
  1626. # every symbol is replaced with a underscore to make the alias
  1627. version_alias = "version_______1_2_3__4_"
  1628. # add an event in the current release
  1629. event = self.event_data[0]
  1630. event_data = event["data"].copy()
  1631. event_data["event_id"] = uuid4().hex
  1632. event_data["release"] = version
  1633. self.store_event(event_data, project_id=event["project"].id)
  1634. with self.feature(self.enabled_features):
  1635. response = self.client.get(
  1636. self.url,
  1637. data={
  1638. "start": iso_format(self.day_ago),
  1639. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1640. "interval": "1h",
  1641. "yAxis": "count()",
  1642. # the double underscores around the version alias is because of a comma and quote
  1643. "orderby": [f"-to_other_release__{version_alias}__others_current"],
  1644. "field": [
  1645. "count()",
  1646. f'to_other(release,"{version_escaped}",others,current)',
  1647. ],
  1648. "topEvents": 2,
  1649. },
  1650. format="json",
  1651. )
  1652. assert response.status_code == 200, response.content
  1653. data = response.data
  1654. assert len(data) == 2
  1655. current = data["current"]
  1656. assert current["order"] == 1
  1657. assert sum(attrs[0]["count"] for _, attrs in current["data"]) == 1
  1658. others = data["others"]
  1659. assert others["order"] == 0
  1660. assert sum(attrs[0]["count"] for _, attrs in others["data"]) == sum(
  1661. event_data["count"] for event_data in self.event_data
  1662. )
  1663. def test_top_events_with_equations(self):
  1664. with self.feature(self.enabled_features):
  1665. response = self.client.get(
  1666. self.url,
  1667. data={
  1668. "start": iso_format(self.day_ago),
  1669. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1670. "interval": "1h",
  1671. "yAxis": "equation|count() / 100",
  1672. "orderby": ["-count()"],
  1673. "field": ["count()", "message", "user.email", "equation|count() / 100"],
  1674. "topEvents": 5,
  1675. },
  1676. format="json",
  1677. )
  1678. data = response.data
  1679. assert response.status_code == 200, response.content
  1680. assert len(data) == 6
  1681. for index, event in enumerate(self.events[:5]):
  1682. message = event.message or event.transaction
  1683. results = data[
  1684. ",".join([message, self.event_data[index]["data"]["user"].get("email", "None")])
  1685. ]
  1686. assert results["order"] == index
  1687. assert [{"count": self.event_data[index]["count"] / 100}] in [
  1688. attrs for time, attrs in results["data"]
  1689. ]
  1690. other = data["Other"]
  1691. assert other["order"] == 5
  1692. assert [{"count": 0.03}] in [attrs for _, attrs in other["data"]]
  1693. @mock.patch("sentry.snuba.discover.bulk_snql_query", return_value=[{"data": [], "meta": []}])
  1694. @mock.patch(
  1695. "sentry.search.events.builder.raw_snql_query", return_value={"data": [], "meta": []}
  1696. )
  1697. def test_invalid_interval(self, mock_raw_query, mock_bulk_query):
  1698. with self.feature(self.enabled_features):
  1699. response = self.client.get(
  1700. self.url,
  1701. format="json",
  1702. data={
  1703. "end": iso_format(before_now()),
  1704. # 7,200 points for each event
  1705. "start": iso_format(before_now(seconds=7200)),
  1706. "field": ["count()", "issue"],
  1707. "query": "",
  1708. "interval": "1s",
  1709. "yAxis": "count()",
  1710. },
  1711. )
  1712. assert response.status_code == 200
  1713. assert mock_bulk_query.call_count == 1
  1714. with self.feature(self.enabled_features):
  1715. response = self.client.get(
  1716. self.url,
  1717. format="json",
  1718. data={
  1719. "end": iso_format(before_now()),
  1720. "start": iso_format(before_now(seconds=7200)),
  1721. "field": ["count()", "issue"],
  1722. "query": "",
  1723. "interval": "1s",
  1724. "yAxis": "count()",
  1725. # 7,200 points for each event * 2, should error
  1726. "topEvents": 2,
  1727. },
  1728. )
  1729. assert response.status_code == 200
  1730. assert mock_raw_query.call_count == 2
  1731. # Should've reset to the default for between 1 and 24h
  1732. assert mock_raw_query.mock_calls[1].args[0].query.granularity.granularity == 300
  1733. with self.feature(self.enabled_features):
  1734. response = self.client.get(
  1735. self.url,
  1736. format="json",
  1737. data={
  1738. "end": iso_format(before_now()),
  1739. # 1999 points * 5 events should just be enough to not error
  1740. "start": iso_format(before_now(seconds=1999)),
  1741. "field": ["count()", "issue"],
  1742. "query": "",
  1743. "interval": "1s",
  1744. "yAxis": "count()",
  1745. "topEvents": 5,
  1746. },
  1747. )
  1748. assert response.status_code == 200
  1749. assert mock_raw_query.call_count == 4
  1750. # Should've left the interval alone since we're just below the limit
  1751. assert mock_raw_query.mock_calls[3].args[0].query.granularity.granularity == 1
  1752. with self.feature(self.enabled_features):
  1753. response = self.client.get(
  1754. self.url,
  1755. format="json",
  1756. data={
  1757. "end": iso_format(before_now()),
  1758. "start": iso_format(before_now(hours=24)),
  1759. "field": ["count()", "issue"],
  1760. "query": "",
  1761. "interval": "0d",
  1762. "yAxis": "count()",
  1763. "topEvents": 5,
  1764. },
  1765. )
  1766. assert response.status_code == 200
  1767. assert mock_raw_query.call_count == 6
  1768. # Should've default to 24h's default of 5m
  1769. assert mock_raw_query.mock_calls[5].args[0].query.granularity.granularity == 300
  1770. def test_top_events_timestamp_fields(self):
  1771. with self.feature(self.enabled_features):
  1772. response = self.client.get(
  1773. self.url,
  1774. format="json",
  1775. data={
  1776. "start": iso_format(self.day_ago),
  1777. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1778. "interval": "1h",
  1779. "yAxis": "count()",
  1780. "orderby": ["-count()"],
  1781. "field": ["count()", "timestamp", "timestamp.to_hour", "timestamp.to_day"],
  1782. "topEvents": 5,
  1783. },
  1784. )
  1785. assert response.status_code == 200
  1786. data = response.data
  1787. assert len(data) == 3
  1788. # these are the timestamps corresponding to the events stored
  1789. timestamps = [
  1790. self.day_ago + timedelta(minutes=2),
  1791. self.day_ago + timedelta(hours=1, minutes=2),
  1792. self.day_ago + timedelta(minutes=4),
  1793. ]
  1794. timestamp_hours = [timestamp.replace(minute=0, second=0) for timestamp in timestamps]
  1795. timestamp_days = [timestamp.replace(hour=0, minute=0, second=0) for timestamp in timestamps]
  1796. for ts, ts_hr, ts_day in zip(timestamps, timestamp_hours, timestamp_days):
  1797. key = f"{iso_format(ts)}+00:00,{iso_format(ts_day)}+00:00,{iso_format(ts_hr)}+00:00"
  1798. count = sum(
  1799. e["count"] for e in self.event_data if e["data"]["timestamp"] == iso_format(ts)
  1800. )
  1801. results = data[key]
  1802. assert [{"count": count}] in [attrs for time, attrs in results["data"]]
  1803. def test_top_events_other_with_matching_columns(self):
  1804. with self.feature(self.enabled_features):
  1805. response = self.client.get(
  1806. self.url,
  1807. data={
  1808. "start": iso_format(self.day_ago),
  1809. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1810. "interval": "1h",
  1811. "yAxis": "count()",
  1812. "orderby": ["-count()"],
  1813. "field": ["count()", "tags[shared-tag]", "message"],
  1814. "topEvents": 5,
  1815. },
  1816. format="json",
  1817. )
  1818. data = response.data
  1819. assert response.status_code == 200, response.content
  1820. assert len(data) == 6
  1821. for index, event in enumerate(self.events[:5]):
  1822. message = event.message or event.transaction
  1823. results = data[",".join([message, "yup"])]
  1824. assert results["order"] == index
  1825. assert [{"count": self.event_data[index]["count"]}] in [
  1826. attrs for _, attrs in results["data"]
  1827. ]
  1828. other = data["Other"]
  1829. assert other["order"] == 5
  1830. assert [{"count": 3}] in [attrs for _, attrs in other["data"]]
  1831. def test_top_events_with_field_overlapping_other_key(self):
  1832. transaction_data = load_data("transaction")
  1833. transaction_data["start_timestamp"] = iso_format(self.day_ago + timedelta(minutes=2))
  1834. transaction_data["timestamp"] = iso_format(self.day_ago + timedelta(minutes=6))
  1835. transaction_data["transaction"] = OTHER_KEY
  1836. for i in range(5):
  1837. data = transaction_data.copy()
  1838. data["event_id"] = "ab" + f"{i}" * 30
  1839. data["contexts"]["trace"]["span_id"] = "ab" + f"{i}" * 14
  1840. self.store_event(data, project_id=self.project.id)
  1841. with self.feature(self.enabled_features):
  1842. response = self.client.get(
  1843. self.url,
  1844. data={
  1845. "start": iso_format(self.day_ago),
  1846. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1847. "interval": "1h",
  1848. "yAxis": "count()",
  1849. "orderby": ["-count()"],
  1850. "field": ["count()", "message"],
  1851. "topEvents": 5,
  1852. },
  1853. format="json",
  1854. )
  1855. data = response.data
  1856. assert response.status_code == 200, response.content
  1857. assert len(data) == 6
  1858. assert f"{OTHER_KEY} (message)" in data
  1859. results = data[f"{OTHER_KEY} (message)"]
  1860. assert [{"count": 5}] in [attrs for _, attrs in results["data"]]
  1861. other = data["Other"]
  1862. assert other["order"] == 5
  1863. assert [{"count": 4}] in [attrs for _, attrs in other["data"]]
  1864. def test_top_events_can_exclude_other_series(self):
  1865. with self.feature(self.enabled_features):
  1866. response = self.client.get(
  1867. self.url,
  1868. data={
  1869. "start": iso_format(self.day_ago),
  1870. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1871. "interval": "1h",
  1872. "yAxis": "count()",
  1873. "orderby": ["count()"],
  1874. "field": ["count()", "message"],
  1875. "topEvents": 5,
  1876. "excludeOther": "1",
  1877. },
  1878. format="json",
  1879. )
  1880. data = response.data
  1881. assert response.status_code == 200, response.content
  1882. assert len(data) == 5
  1883. assert "Other" not in response.data
  1884. def test_top_events_with_equation_including_unselected_fields_passes_field_validation(self):
  1885. with self.feature(self.enabled_features):
  1886. response = self.client.get(
  1887. self.url,
  1888. data={
  1889. "start": iso_format(self.day_ago),
  1890. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1891. "interval": "1h",
  1892. "yAxis": "count()",
  1893. "orderby": ["-equation[0]"],
  1894. "field": ["count()", "message", "equation|count_unique(user) * 2"],
  1895. "topEvents": 5,
  1896. },
  1897. format="json",
  1898. )
  1899. data = response.data
  1900. assert response.status_code == 200, response.content
  1901. assert len(data) == 6
  1902. other = data["Other"]
  1903. assert other["order"] == 5
  1904. assert [{"count": 4}] in [attrs for _, attrs in other["data"]]
  1905. def test_top_events_boolean_condition_and_project_field(self):
  1906. with self.feature(self.enabled_features):
  1907. response = self.client.get(
  1908. self.url,
  1909. data={
  1910. "start": iso_format(self.day_ago),
  1911. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1912. "interval": "1h",
  1913. "yAxis": "count()",
  1914. "orderby": ["-count()"],
  1915. "field": ["project", "count()"],
  1916. "topEvents": 5,
  1917. "query": "event.type:transaction (transaction:*a OR transaction:b*)",
  1918. },
  1919. format="json",
  1920. )
  1921. assert response.status_code == 200