test_organization_events_stats.py 103 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736
  1. import uuid
  2. from datetime import timedelta, timezone
  3. from unittest import mock
  4. from uuid import uuid4
  5. import pytest
  6. from dateutil.parser import parse as parse_date
  7. from django.urls import reverse
  8. from snuba_sdk.column import Column
  9. from snuba_sdk.conditions import Condition, Op
  10. from snuba_sdk.function import Function
  11. from sentry.constants import MAX_TOP_EVENTS
  12. from sentry.issues.grouptype import ProfileFileIOGroupType
  13. from sentry.models.transaction_threshold import ProjectTransactionThreshold, TransactionMetric
  14. from sentry.snuba.discover import OTHER_KEY
  15. from sentry.testutils.cases import APITestCase, ProfilesSnubaTestCase, SnubaTestCase
  16. from sentry.testutils.helpers.datetime import before_now, iso_format
  17. from sentry.testutils.silo import region_silo_test
  18. from sentry.utils.samples import load_data
  19. from tests.sentry.issues.test_utils import SearchIssueTestMixin
  20. pytestmark = pytest.mark.sentry_metrics
  21. @region_silo_test
  22. class OrganizationEventsStatsEndpointTest(APITestCase, SnubaTestCase, SearchIssueTestMixin):
  23. endpoint = "sentry-api-0-organization-events-stats"
  24. def setUp(self):
  25. super().setUp()
  26. self.login_as(user=self.user)
  27. self.authed_user = self.user
  28. self.day_ago = before_now(days=1).replace(hour=10, minute=0, second=0, microsecond=0)
  29. self.project = self.create_project()
  30. self.project2 = self.create_project()
  31. self.user = self.create_user()
  32. self.user2 = self.create_user()
  33. with self.options({"issues.group_attributes.send_kafka": True}):
  34. self.store_event(
  35. data={
  36. "event_id": "a" * 32,
  37. "message": "very bad",
  38. "timestamp": iso_format(self.day_ago + timedelta(minutes=1)),
  39. "fingerprint": ["group1"],
  40. "tags": {"sentry:user": self.user.email},
  41. },
  42. project_id=self.project.id,
  43. )
  44. self.store_event(
  45. data={
  46. "event_id": "b" * 32,
  47. "message": "oh my",
  48. "timestamp": iso_format(self.day_ago + timedelta(hours=1, minutes=1)),
  49. "fingerprint": ["group2"],
  50. "tags": {"sentry:user": self.user2.email},
  51. },
  52. project_id=self.project2.id,
  53. )
  54. self.store_event(
  55. data={
  56. "event_id": "c" * 32,
  57. "message": "very bad",
  58. "timestamp": iso_format(self.day_ago + timedelta(hours=1, minutes=2)),
  59. "fingerprint": ["group2"],
  60. "tags": {"sentry:user": self.user2.email},
  61. },
  62. project_id=self.project2.id,
  63. )
  64. self.url = reverse(
  65. "sentry-api-0-organization-events-stats",
  66. kwargs={"organization_slug": self.project.organization.slug},
  67. )
  68. self.features = {}
  69. def do_request(self, data, url=None, features=None):
  70. if features is None:
  71. features = {"organizations:discover-basic": True}
  72. features.update(self.features)
  73. with self.feature(features):
  74. return self.client.get(self.url if url is None else url, data=data, format="json")
  75. def test_simple(self):
  76. response = self.do_request(
  77. {
  78. "start": iso_format(self.day_ago),
  79. "end": iso_format(self.day_ago + timedelta(hours=2)),
  80. "interval": "1h",
  81. },
  82. )
  83. assert response.status_code == 200, response.content
  84. assert [attrs for time, attrs in response.data["data"]] == [[{"count": 1}], [{"count": 2}]]
  85. def test_generic_issue(self):
  86. _, _, group_info = self.store_search_issue(
  87. self.project.id,
  88. self.user.id,
  89. [f"{ProfileFileIOGroupType.type_id}-group1"],
  90. "prod",
  91. self.day_ago.replace(tzinfo=timezone.utc),
  92. )
  93. assert group_info is not None
  94. self.store_search_issue(
  95. self.project.id,
  96. self.user.id,
  97. [f"{ProfileFileIOGroupType.type_id}-group1"],
  98. "prod",
  99. self.day_ago.replace(tzinfo=timezone.utc) + timedelta(hours=1, minutes=1),
  100. )
  101. self.store_search_issue(
  102. self.project.id,
  103. self.user.id,
  104. [f"{ProfileFileIOGroupType.type_id}-group1"],
  105. "prod",
  106. self.day_ago.replace(tzinfo=timezone.utc) + timedelta(hours=1, minutes=2),
  107. )
  108. with self.feature(
  109. [
  110. "organizations:profiling",
  111. ]
  112. ):
  113. response = self.do_request(
  114. {
  115. "start": iso_format(self.day_ago),
  116. "end": iso_format(self.day_ago + timedelta(hours=2)),
  117. "interval": "1h",
  118. "query": f"issue:{group_info.group.qualified_short_id}",
  119. "dataset": "issuePlatform",
  120. },
  121. )
  122. assert response.status_code == 200, response.content
  123. assert [attrs for time, attrs in response.data["data"]] == [[{"count": 1}], [{"count": 2}]]
  124. def test_generic_issue_calculated_interval(self):
  125. """Test that a 4h interval returns the correct generic event stats.
  126. This follows a different code path than 1h or 1d as the IssuePlatformTimeSeriesQueryBuilder
  127. does some calculation to create the time column."""
  128. _, _, group_info = self.store_search_issue(
  129. self.project.id,
  130. self.user.id,
  131. [f"{ProfileFileIOGroupType.type_id}-group1"],
  132. "prod",
  133. self.day_ago.replace(tzinfo=timezone.utc) + timedelta(minutes=1),
  134. )
  135. assert group_info is not None
  136. self.store_search_issue(
  137. self.project.id,
  138. self.user.id,
  139. [f"{ProfileFileIOGroupType.type_id}-group1"],
  140. "prod",
  141. self.day_ago.replace(tzinfo=timezone.utc) + timedelta(minutes=1),
  142. )
  143. self.store_search_issue(
  144. self.project.id,
  145. self.user.id,
  146. [f"{ProfileFileIOGroupType.type_id}-group1"],
  147. "prod",
  148. self.day_ago.replace(tzinfo=timezone.utc) + timedelta(minutes=2),
  149. )
  150. with self.feature(
  151. [
  152. "organizations:profiling",
  153. ]
  154. ):
  155. response = self.do_request(
  156. {
  157. "start": iso_format(self.day_ago),
  158. "end": iso_format(self.day_ago + timedelta(hours=4)),
  159. "interval": "4h",
  160. "query": f"issue:{group_info.group.qualified_short_id}",
  161. "dataset": "issuePlatform",
  162. },
  163. )
  164. assert response.status_code == 200, response.content
  165. assert [attrs for time, attrs in response.data["data"]] == [[{"count": 3}], [{"count": 0}]]
  166. def test_errors_dataset(self):
  167. response = self.do_request(
  168. {
  169. "start": iso_format(self.day_ago),
  170. "end": iso_format(self.day_ago + timedelta(hours=2)),
  171. "interval": "1h",
  172. "dataset": "errors",
  173. "query": "is:unresolved",
  174. },
  175. )
  176. assert response.status_code == 200, response.content
  177. assert [attrs for time, attrs in response.data["data"]] == [[{"count": 1}], [{"count": 2}]]
  178. def test_errors_dataset_no_query(self):
  179. response = self.do_request(
  180. {
  181. "start": iso_format(self.day_ago),
  182. "end": iso_format(self.day_ago + timedelta(hours=2)),
  183. "interval": "1h",
  184. "dataset": "errors",
  185. },
  186. )
  187. assert response.status_code == 200, response.content
  188. assert [attrs for time, attrs in response.data["data"]] == [[{"count": 1}], [{"count": 2}]]
  189. def test_misaligned_last_bucket(self):
  190. response = self.do_request(
  191. data={
  192. "start": iso_format(self.day_ago - timedelta(minutes=30)),
  193. "end": iso_format(self.day_ago + timedelta(hours=1, minutes=30)),
  194. "interval": "1h",
  195. "partial": "1",
  196. },
  197. )
  198. assert response.status_code == 200, response.content
  199. assert [attrs for time, attrs in response.data["data"]] == [
  200. [{"count": 0}],
  201. [{"count": 1}],
  202. [{"count": 2}],
  203. ]
  204. def test_no_projects(self):
  205. org = self.create_organization(owner=self.user)
  206. self.login_as(user=self.user)
  207. url = reverse(
  208. "sentry-api-0-organization-events-stats", kwargs={"organization_slug": org.slug}
  209. )
  210. response = self.do_request({}, url)
  211. assert response.status_code == 200, response.content
  212. assert len(response.data["data"]) == 0
  213. def test_user_count(self):
  214. self.store_event(
  215. data={
  216. "event_id": "d" * 32,
  217. "message": "something",
  218. "timestamp": iso_format(self.day_ago + timedelta(minutes=2)),
  219. "tags": {"sentry:user": self.user2.email},
  220. "fingerprint": ["group2"],
  221. },
  222. project_id=self.project2.id,
  223. )
  224. response = self.do_request(
  225. data={
  226. "start": iso_format(self.day_ago),
  227. "end": iso_format(self.day_ago + timedelta(hours=2)),
  228. "interval": "1h",
  229. "yAxis": "user_count",
  230. },
  231. )
  232. assert response.status_code == 200, response.content
  233. assert [attrs for time, attrs in response.data["data"]] == [[{"count": 2}], [{"count": 1}]]
  234. def test_discover2_backwards_compatibility(self):
  235. response = self.do_request(
  236. data={
  237. "project": self.project.id,
  238. "start": iso_format(self.day_ago),
  239. "end": iso_format(self.day_ago + timedelta(hours=2)),
  240. "interval": "1h",
  241. "yAxis": "user_count",
  242. },
  243. )
  244. assert response.status_code == 200, response.content
  245. assert len(response.data["data"]) > 0
  246. response = self.do_request(
  247. data={
  248. "project": self.project.id,
  249. "start": iso_format(self.day_ago),
  250. "end": iso_format(self.day_ago + timedelta(hours=2)),
  251. "interval": "1h",
  252. "yAxis": "event_count",
  253. },
  254. )
  255. assert response.status_code == 200, response.content
  256. assert len(response.data["data"]) > 0
  257. def test_with_event_count_flag(self):
  258. response = self.do_request(
  259. data={
  260. "start": iso_format(self.day_ago),
  261. "end": iso_format(self.day_ago + timedelta(hours=2)),
  262. "interval": "1h",
  263. "yAxis": "event_count",
  264. },
  265. )
  266. assert response.status_code == 200, response.content
  267. assert [attrs for time, attrs in response.data["data"]] == [[{"count": 1}], [{"count": 2}]]
  268. def test_performance_view_feature(self):
  269. response = self.do_request(
  270. data={
  271. "end": iso_format(before_now()),
  272. "start": iso_format(before_now(hours=2)),
  273. "query": "project_id:1",
  274. "interval": "30m",
  275. "yAxis": "count()",
  276. },
  277. features={
  278. "organizations:performance-view": True,
  279. "organizations:discover-basic": False,
  280. },
  281. )
  282. assert response.status_code == 200, response.content
  283. def test_apdex_divide_by_zero(self):
  284. ProjectTransactionThreshold.objects.create(
  285. project=self.project,
  286. organization=self.project.organization,
  287. threshold=600,
  288. metric=TransactionMetric.LCP.value,
  289. )
  290. # Shouldn't count towards apdex
  291. data = load_data(
  292. "transaction",
  293. start_timestamp=self.day_ago + timedelta(minutes=(1)),
  294. timestamp=self.day_ago + timedelta(minutes=(3)),
  295. )
  296. data["transaction"] = "/apdex/new/"
  297. data["user"] = {"email": "1@example.com"}
  298. data["measurements"] = {}
  299. self.store_event(data, project_id=self.project.id)
  300. response = self.do_request(
  301. data={
  302. "start": iso_format(self.day_ago),
  303. "end": iso_format(self.day_ago + timedelta(hours=2)),
  304. "interval": "1h",
  305. "yAxis": "apdex()",
  306. "project": [self.project.id],
  307. },
  308. )
  309. assert response.status_code == 200, response.content
  310. assert len(response.data["data"]) == 2
  311. data = response.data["data"]
  312. # 0 transactions with LCP 0/0
  313. assert [attrs for time, attrs in response.data["data"]] == [
  314. [{"count": 0}],
  315. [{"count": 0}],
  316. ]
  317. def test_aggregate_function_apdex(self):
  318. project1 = self.create_project()
  319. project2 = self.create_project()
  320. events = [
  321. ("one", 400, project1.id),
  322. ("one", 400, project1.id),
  323. ("two", 3000, project2.id),
  324. ("two", 1000, project2.id),
  325. ("three", 3000, project2.id),
  326. ]
  327. for idx, event in enumerate(events):
  328. data = load_data(
  329. "transaction",
  330. start_timestamp=self.day_ago + timedelta(minutes=(1 + idx)),
  331. timestamp=self.day_ago + timedelta(minutes=(1 + idx), milliseconds=event[1]),
  332. )
  333. data["event_id"] = f"{idx}" * 32
  334. data["transaction"] = f"/apdex/new/{event[0]}"
  335. data["user"] = {"email": f"{idx}@example.com"}
  336. self.store_event(data, project_id=event[2])
  337. response = self.do_request(
  338. data={
  339. "start": iso_format(self.day_ago),
  340. "end": iso_format(self.day_ago + timedelta(hours=2)),
  341. "interval": "1h",
  342. "yAxis": "apdex()",
  343. },
  344. )
  345. assert response.status_code == 200, response.content
  346. assert [attrs for time, attrs in response.data["data"]] == [
  347. [{"count": 0.3}],
  348. [{"count": 0}],
  349. ]
  350. ProjectTransactionThreshold.objects.create(
  351. project=project1,
  352. organization=project1.organization,
  353. threshold=100,
  354. metric=TransactionMetric.DURATION.value,
  355. )
  356. ProjectTransactionThreshold.objects.create(
  357. project=project2,
  358. organization=project1.organization,
  359. threshold=100,
  360. metric=TransactionMetric.DURATION.value,
  361. )
  362. response = self.do_request(
  363. data={
  364. "start": iso_format(self.day_ago),
  365. "end": iso_format(self.day_ago + timedelta(hours=2)),
  366. "interval": "1h",
  367. "yAxis": "apdex()",
  368. },
  369. )
  370. assert response.status_code == 200, response.content
  371. assert [attrs for time, attrs in response.data["data"]] == [
  372. [{"count": 0.2}],
  373. [{"count": 0}],
  374. ]
  375. response = self.do_request(
  376. data={
  377. "start": iso_format(self.day_ago),
  378. "end": iso_format(self.day_ago + timedelta(hours=2)),
  379. "interval": "1h",
  380. "yAxis": ["user_count", "apdex()"],
  381. },
  382. )
  383. assert response.status_code == 200, response.content
  384. assert response.data["user_count"]["order"] == 0
  385. assert [attrs for time, attrs in response.data["user_count"]["data"]] == [
  386. [{"count": 5}],
  387. [{"count": 0}],
  388. ]
  389. assert response.data["apdex()"]["order"] == 1
  390. assert [attrs for time, attrs in response.data["apdex()"]["data"]] == [
  391. [{"count": 0.2}],
  392. [{"count": 0}],
  393. ]
  394. def test_aggregate_function_count(self):
  395. response = self.do_request(
  396. data={
  397. "start": iso_format(self.day_ago),
  398. "end": iso_format(self.day_ago + timedelta(hours=2)),
  399. "interval": "1h",
  400. "yAxis": "count()",
  401. },
  402. )
  403. assert response.status_code == 200, response.content
  404. assert [attrs for time, attrs in response.data["data"]] == [[{"count": 1}], [{"count": 2}]]
  405. def test_invalid_aggregate(self):
  406. response = self.do_request(
  407. data={
  408. "start": iso_format(self.day_ago),
  409. "end": iso_format(self.day_ago + timedelta(hours=2)),
  410. "interval": "1h",
  411. "yAxis": "rubbish",
  412. },
  413. )
  414. assert response.status_code == 400, response.content
  415. def test_aggregate_function_user_count(self):
  416. response = self.do_request(
  417. data={
  418. "start": iso_format(self.day_ago),
  419. "end": iso_format(self.day_ago + timedelta(hours=2)),
  420. "interval": "1h",
  421. "yAxis": "count_unique(user)",
  422. },
  423. )
  424. assert response.status_code == 200, response.content
  425. assert [attrs for time, attrs in response.data["data"]] == [[{"count": 1}], [{"count": 1}]]
  426. def test_aggregate_invalid(self):
  427. response = self.do_request(
  428. data={
  429. "start": iso_format(self.day_ago),
  430. "end": iso_format(self.day_ago + timedelta(hours=2)),
  431. "interval": "1h",
  432. "yAxis": "nope(lol)",
  433. },
  434. )
  435. assert response.status_code == 400, response.content
  436. def test_throughput_epm_hour_rollup(self):
  437. project = self.create_project()
  438. # Each of these denotes how many events to create in each hour
  439. event_counts = [6, 0, 6, 3, 0, 3]
  440. for hour, count in enumerate(event_counts):
  441. for minute in range(count):
  442. self.store_event(
  443. data={
  444. "event_id": str(uuid.uuid1()),
  445. "message": "very bad",
  446. "timestamp": iso_format(
  447. self.day_ago + timedelta(hours=hour, minutes=minute)
  448. ),
  449. "fingerprint": ["group1"],
  450. "tags": {"sentry:user": self.user.email},
  451. },
  452. project_id=project.id,
  453. )
  454. for axis in ["epm()", "tpm()"]:
  455. response = self.do_request(
  456. data={
  457. "start": iso_format(self.day_ago),
  458. "end": iso_format(self.day_ago + timedelta(hours=6)),
  459. "interval": "1h",
  460. "yAxis": axis,
  461. "project": project.id,
  462. },
  463. )
  464. assert response.status_code == 200, response.content
  465. data = response.data["data"]
  466. assert len(data) == 6
  467. rows = data[0:6]
  468. for test in zip(event_counts, rows):
  469. assert test[1][1][0]["count"] == test[0] / (3600.0 / 60.0)
  470. def test_throughput_epm_day_rollup(self):
  471. project = self.create_project()
  472. # Each of these denotes how many events to create in each minute
  473. event_counts = [6, 0, 6, 3, 0, 3]
  474. for hour, count in enumerate(event_counts):
  475. for minute in range(count):
  476. self.store_event(
  477. data={
  478. "event_id": str(uuid.uuid1()),
  479. "message": "very bad",
  480. "timestamp": iso_format(
  481. self.day_ago + timedelta(hours=hour, minutes=minute)
  482. ),
  483. "fingerprint": ["group1"],
  484. "tags": {"sentry:user": self.user.email},
  485. },
  486. project_id=project.id,
  487. )
  488. for axis in ["epm()", "tpm()"]:
  489. response = self.do_request(
  490. data={
  491. "start": iso_format(self.day_ago),
  492. "end": iso_format(self.day_ago + timedelta(hours=24)),
  493. "interval": "24h",
  494. "yAxis": axis,
  495. "project": project.id,
  496. },
  497. )
  498. assert response.status_code == 200, response.content
  499. data = response.data["data"]
  500. assert len(data) == 2
  501. assert data[0][1][0]["count"] == sum(event_counts) / (86400.0 / 60.0)
  502. def test_throughput_eps_minute_rollup(self):
  503. project = self.create_project()
  504. # Each of these denotes how many events to create in each minute
  505. event_counts = [6, 0, 6, 3, 0, 3]
  506. for minute, count in enumerate(event_counts):
  507. for second in range(count):
  508. self.store_event(
  509. data={
  510. "event_id": str(uuid.uuid1()),
  511. "message": "very bad",
  512. "timestamp": iso_format(
  513. self.day_ago + timedelta(minutes=minute, seconds=second)
  514. ),
  515. "fingerprint": ["group1"],
  516. "tags": {"sentry:user": self.user.email},
  517. },
  518. project_id=project.id,
  519. )
  520. for axis in ["eps()", "tps()"]:
  521. response = self.do_request(
  522. data={
  523. "start": iso_format(self.day_ago),
  524. "end": iso_format(self.day_ago + timedelta(minutes=6)),
  525. "interval": "1m",
  526. "yAxis": axis,
  527. "project": project.id,
  528. },
  529. )
  530. assert response.status_code == 200, response.content
  531. data = response.data["data"]
  532. assert len(data) == 6
  533. rows = data[0:6]
  534. for test in zip(event_counts, rows):
  535. assert test[1][1][0]["count"] == test[0] / 60.0
  536. def test_throughput_eps_no_rollup(self):
  537. project = self.create_project()
  538. # Each of these denotes how many events to create in each minute
  539. event_counts = [6, 0, 6, 3, 0, 3]
  540. for minute, count in enumerate(event_counts):
  541. for second in range(count):
  542. self.store_event(
  543. data={
  544. "event_id": str(uuid.uuid1()),
  545. "message": "very bad",
  546. "timestamp": iso_format(
  547. self.day_ago + timedelta(minutes=minute, seconds=second)
  548. ),
  549. "fingerprint": ["group1"],
  550. "tags": {"sentry:user": self.user.email},
  551. },
  552. project_id=project.id,
  553. )
  554. response = self.do_request(
  555. data={
  556. "start": iso_format(self.day_ago),
  557. "end": iso_format(self.day_ago + timedelta(minutes=1)),
  558. "interval": "1s",
  559. "yAxis": "eps()",
  560. "project": project.id,
  561. },
  562. )
  563. assert response.status_code == 200, response.content
  564. data = response.data["data"]
  565. # expect 60 data points between time span of 0 and 60 seconds
  566. assert len(data) == 60
  567. rows = data[0:6]
  568. for row in rows:
  569. assert row[1][0]["count"] == 1
  570. def test_transaction_events(self):
  571. prototype = {
  572. "type": "transaction",
  573. "transaction": "api.issue.delete",
  574. "spans": [],
  575. "contexts": {"trace": {"op": "foobar", "trace_id": "a" * 32, "span_id": "a" * 16}},
  576. "tags": {"important": "yes"},
  577. }
  578. fixtures = (
  579. ("d" * 32, before_now(minutes=32)),
  580. ("e" * 32, before_now(hours=1, minutes=2)),
  581. ("f" * 32, before_now(hours=1, minutes=35)),
  582. )
  583. for fixture in fixtures:
  584. data = prototype.copy()
  585. data["event_id"] = fixture[0]
  586. data["timestamp"] = iso_format(fixture[1])
  587. data["start_timestamp"] = iso_format(fixture[1] - timedelta(seconds=1))
  588. self.store_event(data=data, project_id=self.project.id)
  589. response = self.do_request(
  590. data={
  591. "project": self.project.id,
  592. "end": iso_format(before_now()),
  593. "start": iso_format(before_now(hours=2)),
  594. "query": "event.type:transaction",
  595. "interval": "30m",
  596. "yAxis": "count()",
  597. },
  598. )
  599. assert response.status_code == 200, response.content
  600. items = [item for time, item in response.data["data"] if item]
  601. # We could get more results depending on where the 30 min
  602. # windows land.
  603. assert len(items) >= 3
  604. def test_project_id_query_filter(self):
  605. response = self.do_request(
  606. data={
  607. "end": iso_format(before_now()),
  608. "start": iso_format(before_now(hours=2)),
  609. "query": "project_id:1",
  610. "interval": "30m",
  611. "yAxis": "count()",
  612. },
  613. )
  614. assert response.status_code == 200
  615. def test_latest_release_query_filter(self):
  616. response = self.do_request(
  617. data={
  618. "project": self.project.id,
  619. "end": iso_format(before_now()),
  620. "start": iso_format(before_now(hours=2)),
  621. "query": "release:latest",
  622. "interval": "30m",
  623. "yAxis": "count()",
  624. },
  625. )
  626. assert response.status_code == 200
  627. def test_conditional_filter(self):
  628. response = self.do_request(
  629. data={
  630. "start": iso_format(self.day_ago),
  631. "end": iso_format(self.day_ago + timedelta(hours=2)),
  632. "query": "id:{} OR id:{}".format("a" * 32, "b" * 32),
  633. "interval": "30m",
  634. "yAxis": "count()",
  635. },
  636. )
  637. assert response.status_code == 200, response.content
  638. data = response.data["data"]
  639. assert len(data) == 4
  640. assert data[0][1][0]["count"] == 1
  641. assert data[2][1][0]["count"] == 1
  642. def test_simple_multiple_yaxis(self):
  643. response = self.do_request(
  644. data={
  645. "start": iso_format(self.day_ago),
  646. "end": iso_format(self.day_ago + timedelta(hours=2)),
  647. "interval": "1h",
  648. "yAxis": ["user_count", "event_count"],
  649. },
  650. )
  651. assert response.status_code == 200, response.content
  652. assert response.data["user_count"]["order"] == 0
  653. assert [attrs for time, attrs in response.data["user_count"]["data"]] == [
  654. [{"count": 1}],
  655. [{"count": 1}],
  656. ]
  657. assert response.data["event_count"]["order"] == 1
  658. assert [attrs for time, attrs in response.data["event_count"]["data"]] == [
  659. [{"count": 1}],
  660. [{"count": 2}],
  661. ]
  662. def test_equation_yaxis(self):
  663. response = self.do_request(
  664. data={
  665. "start": iso_format(self.day_ago),
  666. "end": iso_format(self.day_ago + timedelta(hours=2)),
  667. "interval": "1h",
  668. "yAxis": ["equation|count() / 100"],
  669. },
  670. )
  671. assert response.status_code == 200, response.content
  672. assert len(response.data["data"]) == 2
  673. assert [attrs for time, attrs in response.data["data"]] == [
  674. [{"count": 0.01}],
  675. [{"count": 0.02}],
  676. ]
  677. def test_equation_mixed_multi_yaxis(self):
  678. response = self.do_request(
  679. data={
  680. "start": iso_format(self.day_ago),
  681. "end": iso_format(self.day_ago + timedelta(hours=2)),
  682. "interval": "1h",
  683. "yAxis": ["count()", "equation|count() * 100"],
  684. },
  685. )
  686. assert response.status_code == 200, response.content
  687. assert response.data["count()"]["order"] == 0
  688. assert [attrs for time, attrs in response.data["count()"]["data"]] == [
  689. [{"count": 1}],
  690. [{"count": 2}],
  691. ]
  692. assert response.data["equation|count() * 100"]["order"] == 1
  693. assert [attrs for time, attrs in response.data["equation|count() * 100"]["data"]] == [
  694. [{"count": 100}],
  695. [{"count": 200}],
  696. ]
  697. def test_equation_multi_yaxis(self):
  698. response = self.do_request(
  699. data={
  700. "start": iso_format(self.day_ago),
  701. "end": iso_format(self.day_ago + timedelta(hours=2)),
  702. "interval": "1h",
  703. "yAxis": ["equation|count() / 100", "equation|count() * 100"],
  704. },
  705. )
  706. assert response.status_code == 200, response.content
  707. assert response.data["equation|count() / 100"]["order"] == 0
  708. assert [attrs for time, attrs in response.data["equation|count() / 100"]["data"]] == [
  709. [{"count": 0.01}],
  710. [{"count": 0.02}],
  711. ]
  712. assert response.data["equation|count() * 100"]["order"] == 1
  713. assert [attrs for time, attrs in response.data["equation|count() * 100"]["data"]] == [
  714. [{"count": 100}],
  715. [{"count": 200}],
  716. ]
  717. def test_large_interval_no_drop_values(self):
  718. self.store_event(
  719. data={
  720. "event_id": "d" * 32,
  721. "message": "not good",
  722. "timestamp": iso_format(self.day_ago - timedelta(minutes=10)),
  723. "fingerprint": ["group3"],
  724. },
  725. project_id=self.project.id,
  726. )
  727. response = self.do_request(
  728. data={
  729. "project": self.project.id,
  730. "end": iso_format(self.day_ago),
  731. "start": iso_format(self.day_ago - timedelta(hours=24)),
  732. "query": 'message:"not good"',
  733. "interval": "1d",
  734. "yAxis": "count()",
  735. },
  736. )
  737. assert response.status_code == 200
  738. assert [attrs for time, attrs in response.data["data"]] == [[{"count": 0}], [{"count": 1}]]
  739. @mock.patch("sentry.snuba.discover.timeseries_query", return_value={})
  740. def test_multiple_yaxis_only_one_query(self, mock_query):
  741. self.do_request(
  742. data={
  743. "project": self.project.id,
  744. "start": iso_format(self.day_ago),
  745. "end": iso_format(self.day_ago + timedelta(hours=2)),
  746. "interval": "1h",
  747. "yAxis": ["user_count", "event_count", "epm()", "eps()"],
  748. },
  749. )
  750. assert mock_query.call_count == 1
  751. @mock.patch("sentry.snuba.discover.bulk_snql_query", return_value=[{"data": []}])
  752. def test_invalid_interval(self, mock_query):
  753. self.do_request(
  754. data={
  755. "end": iso_format(before_now()),
  756. "start": iso_format(before_now(hours=24)),
  757. "query": "",
  758. "interval": "1s",
  759. "yAxis": "count()",
  760. },
  761. )
  762. assert mock_query.call_count == 1
  763. # Should've reset to the default for 24h
  764. assert mock_query.mock_calls[0].args[0][0].query.granularity.granularity == 300
  765. self.do_request(
  766. data={
  767. "end": iso_format(before_now()),
  768. "start": iso_format(before_now(hours=24)),
  769. "query": "",
  770. "interval": "0d",
  771. "yAxis": "count()",
  772. },
  773. )
  774. assert mock_query.call_count == 2
  775. # Should've reset to the default for 24h
  776. assert mock_query.mock_calls[1].args[0][0].query.granularity.granularity == 300
  777. def test_out_of_retention(self):
  778. with self.options({"system.event-retention-days": 10}):
  779. response = self.do_request(
  780. data={
  781. "start": iso_format(before_now(days=20)),
  782. "end": iso_format(before_now(days=15)),
  783. "query": "",
  784. "interval": "30m",
  785. "yAxis": "count()",
  786. },
  787. )
  788. assert response.status_code == 400
  789. @mock.patch("sentry.utils.snuba.quantize_time")
  790. def test_quantize_dates(self, mock_quantize):
  791. mock_quantize.return_value = before_now(days=1).replace(tzinfo=timezone.utc)
  792. # Don't quantize short time periods
  793. self.do_request(
  794. data={"statsPeriod": "1h", "query": "", "interval": "30m", "yAxis": "count()"},
  795. )
  796. # Don't quantize absolute date periods
  797. self.do_request(
  798. data={
  799. "start": iso_format(before_now(days=20)),
  800. "end": iso_format(before_now(days=15)),
  801. "query": "",
  802. "interval": "30m",
  803. "yAxis": "count()",
  804. },
  805. )
  806. assert len(mock_quantize.mock_calls) == 0
  807. # Quantize long date periods
  808. self.do_request(
  809. data={"statsPeriod": "90d", "query": "", "interval": "30m", "yAxis": "count()"},
  810. )
  811. assert len(mock_quantize.mock_calls) == 2
  812. def test_with_zerofill(self):
  813. response = self.do_request(
  814. data={
  815. "start": iso_format(self.day_ago),
  816. "end": iso_format(self.day_ago + timedelta(hours=2)),
  817. "interval": "30m",
  818. },
  819. )
  820. assert response.status_code == 200, response.content
  821. assert [attrs for time, attrs in response.data["data"]] == [
  822. [{"count": 1}],
  823. [{"count": 0}],
  824. [{"count": 2}],
  825. [{"count": 0}],
  826. ]
  827. def test_without_zerofill(self):
  828. start = iso_format(self.day_ago)
  829. end = iso_format(self.day_ago + timedelta(hours=2))
  830. response = self.do_request(
  831. data={
  832. "start": start,
  833. "end": end,
  834. "interval": "30m",
  835. "withoutZerofill": "1",
  836. },
  837. features={
  838. "organizations:performance-chart-interpolation": True,
  839. "organizations:discover-basic": True,
  840. },
  841. )
  842. assert response.status_code == 200, response.content
  843. assert [attrs for time, attrs in response.data["data"]] == [
  844. [{"count": 1}],
  845. [{"count": 2}],
  846. ]
  847. assert response.data["start"] == parse_date(start).timestamp()
  848. assert response.data["end"] == parse_date(end).timestamp()
  849. def test_comparison(self):
  850. self.store_event(
  851. data={
  852. "timestamp": iso_format(self.day_ago + timedelta(days=-1, minutes=1)),
  853. },
  854. project_id=self.project.id,
  855. )
  856. self.store_event(
  857. data={
  858. "timestamp": iso_format(self.day_ago + timedelta(days=-1, minutes=2)),
  859. },
  860. project_id=self.project.id,
  861. )
  862. self.store_event(
  863. data={
  864. "timestamp": iso_format(self.day_ago + timedelta(days=-1, hours=1, minutes=1)),
  865. },
  866. project_id=self.project2.id,
  867. )
  868. response = self.do_request(
  869. data={
  870. "start": iso_format(self.day_ago),
  871. "end": iso_format(self.day_ago + timedelta(hours=2)),
  872. "interval": "1h",
  873. "comparisonDelta": int(timedelta(days=1).total_seconds()),
  874. }
  875. )
  876. assert response.status_code == 200, response.content
  877. assert [attrs for time, attrs in response.data["data"]] == [
  878. [{"count": 1, "comparisonCount": 2}],
  879. [{"count": 2, "comparisonCount": 1}],
  880. ]
  881. def test_comparison_invalid(self):
  882. response = self.do_request(
  883. data={
  884. "start": iso_format(self.day_ago),
  885. "end": iso_format(self.day_ago + timedelta(hours=2)),
  886. "interval": "1h",
  887. "comparisonDelta": "17h",
  888. },
  889. )
  890. assert response.status_code == 400, response.content
  891. assert response.data["detail"] == "comparisonDelta must be an integer"
  892. start = before_now(days=85)
  893. end = start + timedelta(days=7)
  894. with self.options({"system.event-retention-days": 90}):
  895. response = self.do_request(
  896. data={
  897. "start": iso_format(start),
  898. "end": iso_format(end),
  899. "interval": "1h",
  900. "comparisonDelta": int(timedelta(days=7).total_seconds()),
  901. }
  902. )
  903. assert response.status_code == 400, response.content
  904. assert response.data["detail"] == "Comparison period is outside retention window"
  905. def test_equations_divide_by_zero(self):
  906. response = self.do_request(
  907. data={
  908. "start": iso_format(self.day_ago),
  909. "end": iso_format(self.day_ago + timedelta(hours=2)),
  910. "interval": "1h",
  911. # force a 0 in the denominator by doing 1 - 1
  912. # since a 0 literal is illegal as the denominator
  913. "yAxis": ["equation|count() / (1-1)"],
  914. },
  915. )
  916. assert response.status_code == 200, response.content
  917. assert len(response.data["data"]) == 2
  918. assert [attrs for time, attrs in response.data["data"]] == [
  919. [{"count": None}],
  920. [{"count": None}],
  921. ]
  922. @mock.patch("sentry.search.events.builder.discover.raw_snql_query")
  923. def test_profiles_dataset_simple(self, mock_snql_query):
  924. mock_snql_query.side_effect = [{"meta": {}, "data": []}]
  925. query = {
  926. "yAxis": [
  927. "count()",
  928. "p75()",
  929. "p95()",
  930. "p99()",
  931. "p75(profile.duration)",
  932. "p95(profile.duration)",
  933. "p99(profile.duration)",
  934. ],
  935. "project": [self.project.id],
  936. "dataset": "profiles",
  937. }
  938. response = self.do_request(query, features={"organizations:profiling": True})
  939. assert response.status_code == 200, response.content
  940. def test_tag_with_conflicting_function_alias_simple(self):
  941. for _ in range(7):
  942. self.store_event(
  943. data={
  944. "timestamp": iso_format(self.day_ago + timedelta(minutes=2)),
  945. "tags": {"count": "9001"},
  946. },
  947. project_id=self.project2.id,
  948. )
  949. # Query for count and count()
  950. data = {
  951. "start": iso_format(self.day_ago),
  952. "end": iso_format(self.day_ago + timedelta(minutes=3)),
  953. "interval": "1h",
  954. "yAxis": "count()",
  955. "orderby": ["-count()"],
  956. "field": ["count()", "count"],
  957. "partial": 1,
  958. }
  959. response = self.client.get(self.url, data, format="json")
  960. assert response.status_code == 200
  961. # Expect a count of 8 because one event from setUp
  962. assert response.data["data"][0][1] == [{"count": 8}]
  963. data["query"] = "count:9001"
  964. response = self.client.get(self.url, data, format="json")
  965. assert response.status_code == 200
  966. assert response.data["data"][0][1] == [{"count": 7}]
  967. data["query"] = "count:abc"
  968. response = self.client.get(self.url, data, format="json")
  969. assert response.status_code == 200
  970. assert all([interval[1][0]["count"] == 0 for interval in response.data["data"]])
  971. def test_group_id_tag_simple(self):
  972. event_data = {
  973. "data": {
  974. "message": "poof",
  975. "timestamp": iso_format(self.day_ago + timedelta(minutes=2)),
  976. "user": {"email": self.user.email},
  977. "tags": {"group_id": "testing"},
  978. "fingerprint": ["group1"],
  979. },
  980. "project": self.project2,
  981. "count": 7,
  982. }
  983. for i in range(event_data["count"]):
  984. event_data["data"]["event_id"] = f"a{i}" * 16
  985. self.store_event(event_data["data"], project_id=event_data["project"].id)
  986. data = {
  987. "start": iso_format(self.day_ago),
  988. "end": iso_format(self.day_ago + timedelta(hours=2)),
  989. "interval": "1h",
  990. "yAxis": "count()",
  991. "orderby": ["-count()"],
  992. "field": ["count()", "group_id"],
  993. "partial": 1,
  994. }
  995. response = self.client.get(self.url, data, format="json")
  996. assert response.status_code == 200
  997. assert response.data["data"][0][1] == [{"count": 8}]
  998. data["query"] = "group_id:testing"
  999. response = self.client.get(self.url, data, format="json")
  1000. assert response.status_code == 200
  1001. assert response.data["data"][0][1] == [{"count": 7}]
  1002. data["query"] = "group_id:abc"
  1003. response = self.client.get(self.url, data, format="json")
  1004. assert response.status_code == 200
  1005. assert all([interval[1][0]["count"] == 0 for interval in response.data["data"]])
  1006. @region_silo_test
  1007. class OrganizationEventsStatsTopNEvents(APITestCase, SnubaTestCase):
  1008. def setUp(self):
  1009. super().setUp()
  1010. self.login_as(user=self.user)
  1011. self.day_ago = before_now(days=1).replace(hour=10, minute=0, second=0, microsecond=0)
  1012. self.project = self.create_project()
  1013. self.project2 = self.create_project()
  1014. self.user2 = self.create_user()
  1015. transaction_data = load_data("transaction")
  1016. transaction_data["start_timestamp"] = iso_format(self.day_ago + timedelta(minutes=2))
  1017. transaction_data["timestamp"] = iso_format(self.day_ago + timedelta(minutes=4))
  1018. transaction_data["tags"] = {"shared-tag": "yup"}
  1019. self.event_data = [
  1020. {
  1021. "data": {
  1022. "message": "poof",
  1023. "timestamp": iso_format(self.day_ago + timedelta(minutes=2)),
  1024. "user": {"email": self.user.email},
  1025. "tags": {"shared-tag": "yup"},
  1026. "fingerprint": ["group1"],
  1027. },
  1028. "project": self.project2,
  1029. "count": 7,
  1030. },
  1031. {
  1032. "data": {
  1033. "message": "voof",
  1034. "timestamp": iso_format(self.day_ago + timedelta(hours=1, minutes=2)),
  1035. "fingerprint": ["group2"],
  1036. "user": {"email": self.user2.email},
  1037. "tags": {"shared-tag": "yup"},
  1038. },
  1039. "project": self.project2,
  1040. "count": 6,
  1041. },
  1042. {
  1043. "data": {
  1044. "message": "very bad",
  1045. "timestamp": iso_format(self.day_ago + timedelta(minutes=2)),
  1046. "fingerprint": ["group3"],
  1047. "user": {"email": "foo@example.com"},
  1048. "tags": {"shared-tag": "yup"},
  1049. },
  1050. "project": self.project,
  1051. "count": 5,
  1052. },
  1053. {
  1054. "data": {
  1055. "message": "oh no",
  1056. "timestamp": iso_format(self.day_ago + timedelta(minutes=2)),
  1057. "fingerprint": ["group4"],
  1058. "user": {"email": "bar@example.com"},
  1059. "tags": {"shared-tag": "yup"},
  1060. },
  1061. "project": self.project,
  1062. "count": 4,
  1063. },
  1064. {"data": transaction_data, "project": self.project, "count": 3},
  1065. # Not in the top 5
  1066. {
  1067. "data": {
  1068. "message": "sorta bad",
  1069. "timestamp": iso_format(self.day_ago + timedelta(minutes=2)),
  1070. "fingerprint": ["group5"],
  1071. "user": {"email": "bar@example.com"},
  1072. "tags": {"shared-tag": "yup"},
  1073. },
  1074. "project": self.project,
  1075. "count": 2,
  1076. },
  1077. {
  1078. "data": {
  1079. "message": "not so bad",
  1080. "timestamp": iso_format(self.day_ago + timedelta(minutes=2)),
  1081. "fingerprint": ["group6"],
  1082. "user": {"email": "bar@example.com"},
  1083. "tags": {"shared-tag": "yup"},
  1084. },
  1085. "project": self.project,
  1086. "count": 1,
  1087. },
  1088. ]
  1089. self.events = []
  1090. for index, event_data in enumerate(self.event_data):
  1091. data = event_data["data"].copy()
  1092. for i in range(event_data["count"]):
  1093. data["event_id"] = f"{index}{i}" * 16
  1094. event = self.store_event(data, project_id=event_data["project"].id)
  1095. self.events.append(event)
  1096. self.transaction = self.events[4]
  1097. self.enabled_features = {
  1098. "organizations:discover-basic": True,
  1099. }
  1100. self.url = reverse(
  1101. "sentry-api-0-organization-events-stats",
  1102. kwargs={"organization_slug": self.project.organization.slug},
  1103. )
  1104. def test_no_top_events_with_project_field(self):
  1105. project = self.create_project()
  1106. with self.feature(self.enabled_features):
  1107. response = self.client.get(
  1108. self.url,
  1109. data={
  1110. # make sure to query the project with 0 events
  1111. "project": project.id,
  1112. "start": iso_format(self.day_ago),
  1113. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1114. "interval": "1h",
  1115. "yAxis": "count()",
  1116. "orderby": ["-count()"],
  1117. "field": ["count()", "project"],
  1118. "topEvents": 5,
  1119. },
  1120. format="json",
  1121. )
  1122. assert response.status_code == 200, response.content
  1123. # When there are no top events, we do not return an empty dict.
  1124. # Instead, we return a single zero-filled series for an empty graph.
  1125. data = response.data["data"]
  1126. assert [attrs for time, attrs in data] == [[{"count": 0}], [{"count": 0}]]
  1127. def test_no_top_events(self):
  1128. project = self.create_project()
  1129. with self.feature(self.enabled_features):
  1130. response = self.client.get(
  1131. self.url,
  1132. data={
  1133. # make sure to query the project with 0 events
  1134. "project": project.id,
  1135. "start": iso_format(self.day_ago),
  1136. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1137. "interval": "1h",
  1138. "yAxis": "count()",
  1139. "orderby": ["-count()"],
  1140. "field": ["count()", "message", "user.email"],
  1141. "topEvents": 5,
  1142. },
  1143. format="json",
  1144. )
  1145. data = response.data["data"]
  1146. assert response.status_code == 200, response.content
  1147. # When there are no top events, we do not return an empty dict.
  1148. # Instead, we return a single zero-filled series for an empty graph.
  1149. assert [attrs for time, attrs in data] == [[{"count": 0}], [{"count": 0}]]
  1150. def test_no_top_events_with_multi_axis(self):
  1151. project = self.create_project()
  1152. with self.feature(self.enabled_features):
  1153. response = self.client.get(
  1154. self.url,
  1155. data={
  1156. # make sure to query the project with 0 events
  1157. "project": project.id,
  1158. "start": iso_format(self.day_ago),
  1159. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1160. "interval": "1h",
  1161. "yAxis": ["count()", "count_unique(user)"],
  1162. "orderby": ["-count()"],
  1163. "field": ["count()", "count_unique(user)", "message", "user.email"],
  1164. "topEvents": 5,
  1165. },
  1166. format="json",
  1167. )
  1168. assert response.status_code == 200
  1169. data = response.data[""]
  1170. assert [attrs for time, attrs in data["count()"]["data"]] == [
  1171. [{"count": 0}],
  1172. [{"count": 0}],
  1173. ]
  1174. assert [attrs for time, attrs in data["count_unique(user)"]["data"]] == [
  1175. [{"count": 0}],
  1176. [{"count": 0}],
  1177. ]
  1178. def test_simple_top_events(self):
  1179. with self.feature(self.enabled_features):
  1180. response = self.client.get(
  1181. self.url,
  1182. data={
  1183. "start": iso_format(self.day_ago),
  1184. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1185. "interval": "1h",
  1186. "yAxis": "count()",
  1187. "orderby": ["-count()"],
  1188. "field": ["count()", "message", "user.email"],
  1189. "topEvents": 5,
  1190. },
  1191. format="json",
  1192. )
  1193. data = response.data
  1194. assert response.status_code == 200, response.content
  1195. assert len(data) == 6
  1196. for index, event in enumerate(self.events[:5]):
  1197. message = event.message or event.transaction
  1198. results = data[
  1199. ",".join([message, self.event_data[index]["data"]["user"].get("email", "None")])
  1200. ]
  1201. assert results["order"] == index
  1202. assert [{"count": self.event_data[index]["count"]}] in [
  1203. attrs for _, attrs in results["data"]
  1204. ]
  1205. other = data["Other"]
  1206. assert other["order"] == 5
  1207. assert [{"count": 3}] in [attrs for _, attrs in other["data"]]
  1208. def test_tag_with_conflicting_function_alias_simple(self):
  1209. event_data = {
  1210. "data": {
  1211. "message": "poof",
  1212. "timestamp": iso_format(self.day_ago + timedelta(minutes=2)),
  1213. "user": {"email": self.user.email},
  1214. "tags": {"count": "9001"},
  1215. "fingerprint": ["group1"],
  1216. },
  1217. "project": self.project2,
  1218. "count": 7,
  1219. }
  1220. for i in range(event_data["count"]):
  1221. event_data["data"]["event_id"] = f"a{i}" * 16
  1222. self.store_event(event_data["data"], project_id=event_data["project"].id)
  1223. # Query for count and count()
  1224. data = {
  1225. "start": iso_format(self.day_ago),
  1226. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1227. "interval": "1h",
  1228. "yAxis": "count()",
  1229. "orderby": ["-count()"],
  1230. "field": ["count()", "count"],
  1231. "topEvents": 5,
  1232. "partial": 1,
  1233. }
  1234. with self.feature(self.enabled_features):
  1235. response = self.client.get(self.url, data, format="json")
  1236. assert response.status_code == 200
  1237. assert response.data["9001"]["data"][0][1] == [{"count": 7}]
  1238. data["query"] = "count:9001"
  1239. with self.feature(self.enabled_features):
  1240. response = self.client.get(self.url, data, format="json")
  1241. assert response.status_code == 200
  1242. assert response.data["9001"]["data"][0][1] == [{"count": 7}]
  1243. data["query"] = "count:abc"
  1244. with self.feature(self.enabled_features):
  1245. response = self.client.get(self.url, data, format="json")
  1246. assert response.status_code == 200
  1247. assert all([interval[1][0]["count"] == 0 for interval in response.data["data"]])
  1248. @pytest.mark.xfail(
  1249. reason="The response.data[Other] returns 15 locally and returns 16 or 15 remotely."
  1250. )
  1251. def test_tag_with_conflicting_function_alias_with_other_single_grouping(self):
  1252. event_data = [
  1253. {
  1254. "data": {
  1255. "message": "poof",
  1256. "timestamp": iso_format(self.day_ago + timedelta(minutes=2)),
  1257. "user": {"email": self.user.email},
  1258. "tags": {"count": "9001"},
  1259. "fingerprint": ["group1"],
  1260. },
  1261. "project": self.project2,
  1262. "count": 7,
  1263. },
  1264. {
  1265. "data": {
  1266. "message": "poof2",
  1267. "timestamp": iso_format(self.day_ago + timedelta(minutes=2)),
  1268. "user": {"email": self.user.email},
  1269. "tags": {"count": "abc"},
  1270. "fingerprint": ["group1"],
  1271. },
  1272. "project": self.project2,
  1273. "count": 3,
  1274. },
  1275. ]
  1276. for index, event in enumerate(event_data):
  1277. for i in range(event["count"]):
  1278. event["data"]["event_id"] = f"{index}{i}" * 16
  1279. self.store_event(event["data"], project_id=event["project"].id)
  1280. # Query for count and count()
  1281. data = {
  1282. "start": iso_format(self.day_ago),
  1283. "end": iso_format(self.day_ago + timedelta(hours=1)),
  1284. "interval": "1h",
  1285. "yAxis": "count()",
  1286. "orderby": ["-count"],
  1287. "field": ["count()", "count"],
  1288. "topEvents": 2,
  1289. "partial": 1,
  1290. }
  1291. with self.feature(self.enabled_features):
  1292. response = self.client.get(self.url, data, format="json")
  1293. assert response.status_code == 200
  1294. assert response.data["9001"]["data"][0][1] == [{"count": 7}]
  1295. assert response.data["abc"]["data"][0][1] == [{"count": 3}]
  1296. assert response.data["Other"]["data"][0][1] == [{"count": 16}]
  1297. def test_tag_with_conflicting_function_alias_with_other_multiple_groupings(self):
  1298. event_data = [
  1299. {
  1300. "data": {
  1301. "message": "abc",
  1302. "timestamp": iso_format(self.day_ago + timedelta(minutes=2)),
  1303. "user": {"email": self.user.email},
  1304. "tags": {"count": "2"},
  1305. "fingerprint": ["group1"],
  1306. },
  1307. "project": self.project2,
  1308. "count": 3,
  1309. },
  1310. {
  1311. "data": {
  1312. "message": "def",
  1313. "timestamp": iso_format(self.day_ago + timedelta(minutes=2)),
  1314. "user": {"email": self.user.email},
  1315. "tags": {"count": "9001"},
  1316. "fingerprint": ["group1"],
  1317. },
  1318. "project": self.project2,
  1319. "count": 7,
  1320. },
  1321. ]
  1322. for index, event in enumerate(event_data):
  1323. for i in range(event["count"]):
  1324. event["data"]["event_id"] = f"{index}{i}" * 16
  1325. self.store_event(event["data"], project_id=event["project"].id)
  1326. # Query for count and count()
  1327. data = {
  1328. "start": iso_format(self.day_ago),
  1329. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1330. "interval": "2d",
  1331. "yAxis": "count()",
  1332. "orderby": ["-count"],
  1333. "field": ["count()", "count", "message"],
  1334. "topEvents": 2,
  1335. "partial": 1,
  1336. }
  1337. with self.feature(self.enabled_features):
  1338. response = self.client.get(self.url, data, format="json")
  1339. assert response.status_code == 200
  1340. assert response.data["abc,2"]["data"][0][1] == [{"count": 3}]
  1341. assert response.data["def,9001"]["data"][0][1] == [{"count": 7}]
  1342. assert response.data["Other"]["data"][0][1] == [{"count": 25}]
  1343. def test_group_id_tag_simple(self):
  1344. event_data = {
  1345. "data": {
  1346. "message": "poof",
  1347. "timestamp": iso_format(self.day_ago + timedelta(minutes=2)),
  1348. "user": {"email": self.user.email},
  1349. "tags": {"group_id": "the tag"},
  1350. "fingerprint": ["group1"],
  1351. },
  1352. "project": self.project2,
  1353. "count": 7,
  1354. }
  1355. for i in range(event_data["count"]):
  1356. event_data["data"]["event_id"] = f"a{i}" * 16
  1357. self.store_event(event_data["data"], project_id=event_data["project"].id)
  1358. data = {
  1359. "start": iso_format(self.day_ago),
  1360. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1361. "interval": "1h",
  1362. "yAxis": "count()",
  1363. "orderby": ["-count()"],
  1364. "field": ["count()", "group_id"],
  1365. "topEvents": 5,
  1366. "partial": 1,
  1367. }
  1368. with self.feature(self.enabled_features):
  1369. response = self.client.get(self.url, data, format="json")
  1370. assert response.status_code == 200, response.content
  1371. assert response.data["the tag"]["data"][0][1] == [{"count": 7}]
  1372. data["query"] = 'group_id:"the tag"'
  1373. with self.feature(self.enabled_features):
  1374. response = self.client.get(self.url, data, format="json")
  1375. assert response.status_code == 200
  1376. assert response.data["the tag"]["data"][0][1] == [{"count": 7}]
  1377. data["query"] = "group_id:abc"
  1378. with self.feature(self.enabled_features):
  1379. response = self.client.get(self.url, data, format="json")
  1380. assert response.status_code == 200
  1381. assert all([interval[1][0]["count"] == 0 for interval in response.data["data"]])
  1382. def test_top_events_limits(self):
  1383. data = {
  1384. "start": iso_format(self.day_ago),
  1385. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1386. "interval": "1h",
  1387. "yAxis": "count()",
  1388. "orderby": ["-count()"],
  1389. "field": ["count()", "message", "user.email"],
  1390. }
  1391. with self.feature(self.enabled_features):
  1392. data["topEvents"] = MAX_TOP_EVENTS + 1
  1393. response = self.client.get(self.url, data, format="json")
  1394. assert response.status_code == 400
  1395. data["topEvents"] = 0
  1396. response = self.client.get(self.url, data, format="json")
  1397. assert response.status_code == 400
  1398. data["topEvents"] = "a"
  1399. response = self.client.get(self.url, data, format="json")
  1400. assert response.status_code == 400
  1401. def test_top_events_with_projects(self):
  1402. with self.feature(self.enabled_features):
  1403. response = self.client.get(
  1404. self.url,
  1405. data={
  1406. "start": iso_format(self.day_ago),
  1407. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1408. "interval": "1h",
  1409. "yAxis": "count()",
  1410. "orderby": ["-count()"],
  1411. "field": ["count()", "message", "project"],
  1412. "topEvents": 5,
  1413. },
  1414. format="json",
  1415. )
  1416. data = response.data
  1417. assert response.status_code == 200, response.content
  1418. assert len(data) == 6
  1419. for index, event in enumerate(self.events[:5]):
  1420. message = event.message or event.transaction
  1421. results = data[",".join([message, event.project.slug])]
  1422. assert results["order"] == index
  1423. assert [{"count": self.event_data[index]["count"]}] in [
  1424. attrs for time, attrs in results["data"]
  1425. ]
  1426. other = data["Other"]
  1427. assert other["order"] == 5
  1428. assert [{"count": 3}] in [attrs for _, attrs in other["data"]]
  1429. def test_top_events_with_issue(self):
  1430. # delete a group to make sure if this happens the value becomes unknown
  1431. event_group = self.events[0].group
  1432. event_group.delete()
  1433. with self.feature(self.enabled_features):
  1434. response = self.client.get(
  1435. self.url,
  1436. data={
  1437. "start": iso_format(self.day_ago),
  1438. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1439. "interval": "1h",
  1440. "yAxis": "count()",
  1441. "orderby": ["-count()"],
  1442. "field": ["count()", "message", "issue"],
  1443. "topEvents": 5,
  1444. "query": "!event.type:transaction",
  1445. },
  1446. format="json",
  1447. )
  1448. data = response.data
  1449. assert response.status_code == 200, response.content
  1450. assert len(data) == 6
  1451. for index, event in enumerate(self.events[:4]):
  1452. message = event.message
  1453. # Because we deleted the group for event 0
  1454. if index == 0 or event.group is None:
  1455. issue = "unknown"
  1456. else:
  1457. issue = event.group.qualified_short_id
  1458. results = data[",".join([issue, message])]
  1459. assert results["order"] == index
  1460. assert [{"count": self.event_data[index]["count"]}] in [
  1461. attrs for time, attrs in results["data"]
  1462. ]
  1463. other = data["Other"]
  1464. assert other["order"] == 5
  1465. assert [{"count": 1}] in [attrs for _, attrs in other["data"]]
  1466. def test_top_events_with_transaction_status(self):
  1467. with self.feature(self.enabled_features):
  1468. response = self.client.get(
  1469. self.url,
  1470. data={
  1471. "start": iso_format(self.day_ago),
  1472. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1473. "interval": "1h",
  1474. "yAxis": "count()",
  1475. "orderby": ["-count()"],
  1476. "field": ["count()", "transaction.status"],
  1477. "topEvents": 5,
  1478. },
  1479. format="json",
  1480. )
  1481. data = response.data
  1482. assert response.status_code == 200, response.content
  1483. assert len(data) == 1
  1484. assert "ok" in data
  1485. @mock.patch("sentry.models.GroupManager.get_issues_mapping")
  1486. def test_top_events_with_unknown_issue(self, mock_issues_mapping):
  1487. event = self.events[0]
  1488. event_data = self.event_data[0]
  1489. # ensure that the issue mapping returns None for the issue
  1490. mock_issues_mapping.return_value = {event.group.id: None}
  1491. with self.feature(self.enabled_features):
  1492. response = self.client.get(
  1493. self.url,
  1494. data={
  1495. "start": iso_format(self.day_ago),
  1496. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1497. "interval": "1h",
  1498. "yAxis": "count()",
  1499. "orderby": ["-count()"],
  1500. "field": ["count()", "issue"],
  1501. "topEvents": 5,
  1502. # narrow the search to just one issue
  1503. "query": f"issue.id:{event.group.id}",
  1504. },
  1505. format="json",
  1506. )
  1507. assert response.status_code == 200, response.content
  1508. data = response.data
  1509. assert len(data) == 1
  1510. results = data["unknown"]
  1511. assert results["order"] == 0
  1512. assert [{"count": event_data["count"]}] in [attrs for time, attrs in results["data"]]
  1513. @mock.patch(
  1514. "sentry.search.events.builder.discover.raw_snql_query",
  1515. side_effect=[{"data": [{"issue.id": 1}], "meta": []}, {"data": [], "meta": []}],
  1516. )
  1517. def test_top_events_with_issue_check_query_conditions(self, mock_query):
  1518. """ "Intentionally separate from test_top_events_with_issue
  1519. This is to test against a bug where the condition for issues wasn't included and we'd be missing data for
  1520. the interval since we'd cap out the max rows. This was not caught by the previous test since the results
  1521. would still be correct given the smaller interval & lack of data
  1522. """
  1523. with self.feature(self.enabled_features):
  1524. self.client.get(
  1525. self.url,
  1526. data={
  1527. "start": iso_format(self.day_ago),
  1528. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1529. "interval": "1h",
  1530. "yAxis": "count()",
  1531. "orderby": ["-count()"],
  1532. "field": ["count()", "message", "issue"],
  1533. "topEvents": 5,
  1534. "query": "!event.type:transaction",
  1535. },
  1536. format="json",
  1537. )
  1538. assert (
  1539. Condition(Function("coalesce", [Column("group_id"), 0], "issue.id"), Op.IN, [1])
  1540. in mock_query.mock_calls[1].args[0].query.where
  1541. )
  1542. def test_top_events_with_functions(self):
  1543. with self.feature(self.enabled_features):
  1544. response = self.client.get(
  1545. self.url,
  1546. data={
  1547. "start": iso_format(self.day_ago),
  1548. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1549. "interval": "1h",
  1550. "yAxis": "count()",
  1551. "orderby": ["-p99()"],
  1552. "field": ["transaction", "avg(transaction.duration)", "p99()"],
  1553. "topEvents": 5,
  1554. },
  1555. format="json",
  1556. )
  1557. data = response.data
  1558. assert response.status_code == 200, response.content
  1559. assert len(data) == 1
  1560. results = data[self.transaction.transaction]
  1561. assert results["order"] == 0
  1562. assert [attrs for time, attrs in results["data"]] == [[{"count": 3}], [{"count": 0}]]
  1563. def test_top_events_with_functions_on_different_transactions(self):
  1564. """Transaction2 has less events, but takes longer so order should be self.transaction then transaction2"""
  1565. transaction_data = load_data("transaction")
  1566. transaction_data["start_timestamp"] = iso_format(self.day_ago + timedelta(minutes=2))
  1567. transaction_data["timestamp"] = iso_format(self.day_ago + timedelta(minutes=6))
  1568. transaction_data["transaction"] = "/foo_bar/"
  1569. transaction2 = self.store_event(transaction_data, project_id=self.project.id)
  1570. with self.feature(self.enabled_features):
  1571. response = self.client.get(
  1572. self.url,
  1573. data={
  1574. "start": iso_format(self.day_ago),
  1575. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1576. "interval": "1h",
  1577. "yAxis": "count()",
  1578. "orderby": ["-p90()"],
  1579. "field": ["transaction", "avg(transaction.duration)", "p90()"],
  1580. "topEvents": 5,
  1581. },
  1582. format="json",
  1583. )
  1584. data = response.data
  1585. assert response.status_code == 200, response.content
  1586. assert len(data) == 2
  1587. results = data[self.transaction.transaction]
  1588. assert results["order"] == 1
  1589. assert [attrs for time, attrs in results["data"]] == [[{"count": 3}], [{"count": 0}]]
  1590. results = data[transaction2.transaction]
  1591. assert results["order"] == 0
  1592. assert [attrs for time, attrs in results["data"]] == [[{"count": 1}], [{"count": 0}]]
  1593. def test_top_events_with_query(self):
  1594. transaction_data = load_data("transaction")
  1595. transaction_data["start_timestamp"] = iso_format(self.day_ago + timedelta(minutes=2))
  1596. transaction_data["timestamp"] = iso_format(self.day_ago + timedelta(minutes=6))
  1597. transaction_data["transaction"] = "/foo_bar/"
  1598. self.store_event(transaction_data, project_id=self.project.id)
  1599. with self.feature(self.enabled_features):
  1600. response = self.client.get(
  1601. self.url,
  1602. data={
  1603. "start": iso_format(self.day_ago),
  1604. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1605. "interval": "1h",
  1606. "yAxis": "count()",
  1607. "orderby": ["-p99()"],
  1608. "query": "transaction:/foo_bar/",
  1609. "field": ["transaction", "avg(transaction.duration)", "p99()"],
  1610. "topEvents": 5,
  1611. },
  1612. format="json",
  1613. )
  1614. data = response.data
  1615. assert response.status_code == 200, response.content
  1616. assert len(data) == 1
  1617. transaction2_data = data["/foo_bar/"]
  1618. assert transaction2_data["order"] == 0
  1619. assert [attrs for time, attrs in transaction2_data["data"]] == [
  1620. [{"count": 1}],
  1621. [{"count": 0}],
  1622. ]
  1623. def test_top_events_with_negated_condition(self):
  1624. with self.feature(self.enabled_features):
  1625. response = self.client.get(
  1626. self.url,
  1627. data={
  1628. "start": iso_format(self.day_ago),
  1629. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1630. "interval": "1h",
  1631. "yAxis": "count()",
  1632. "orderby": ["-count()"],
  1633. "query": f"!message:{self.events[0].message}",
  1634. "field": ["message", "count()"],
  1635. "topEvents": 5,
  1636. },
  1637. format="json",
  1638. )
  1639. data = response.data
  1640. assert response.status_code == 200, response.content
  1641. assert len(data) == 6
  1642. for index, event in enumerate(self.events[1:5]):
  1643. message = event.message or event.transaction
  1644. results = data[message]
  1645. assert results["order"] == index
  1646. assert [{"count": self.event_data[index + 1]["count"]}] in [
  1647. attrs for _, attrs in results["data"]
  1648. ]
  1649. other = data["Other"]
  1650. assert other["order"] == 5
  1651. assert [{"count": 1}] in [attrs for _, attrs in other["data"]]
  1652. def test_top_events_with_epm(self):
  1653. with self.feature(self.enabled_features):
  1654. response = self.client.get(
  1655. self.url,
  1656. data={
  1657. "start": iso_format(self.day_ago),
  1658. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1659. "interval": "1h",
  1660. "yAxis": "epm()",
  1661. "orderby": ["-count()"],
  1662. "field": ["message", "user.email", "count()"],
  1663. "topEvents": 5,
  1664. },
  1665. format="json",
  1666. )
  1667. data = response.data
  1668. assert response.status_code == 200, response.content
  1669. assert len(data) == 6
  1670. for index, event in enumerate(self.events[:5]):
  1671. message = event.message or event.transaction
  1672. results = data[
  1673. ",".join([message, self.event_data[index]["data"]["user"].get("email", "None")])
  1674. ]
  1675. assert results["order"] == index
  1676. assert [{"count": self.event_data[index]["count"] / (3600.0 / 60.0)}] in [
  1677. attrs for time, attrs in results["data"]
  1678. ]
  1679. other = data["Other"]
  1680. assert other["order"] == 5
  1681. assert [{"count": 0.05}] in [attrs for _, attrs in other["data"]]
  1682. def test_top_events_with_multiple_yaxis(self):
  1683. with self.feature(self.enabled_features):
  1684. response = self.client.get(
  1685. self.url,
  1686. data={
  1687. "start": iso_format(self.day_ago),
  1688. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1689. "interval": "1h",
  1690. "yAxis": ["epm()", "count()"],
  1691. "orderby": ["-count()"],
  1692. "field": ["message", "user.email", "count()"],
  1693. "topEvents": 5,
  1694. },
  1695. format="json",
  1696. )
  1697. data = response.data
  1698. assert response.status_code == 200, response.content
  1699. assert len(data) == 6
  1700. for index, event in enumerate(self.events[:5]):
  1701. message = event.message or event.transaction
  1702. results = data[
  1703. ",".join([message, self.event_data[index]["data"]["user"].get("email", "None")])
  1704. ]
  1705. assert results["order"] == index
  1706. assert results["epm()"]["order"] == 0
  1707. assert results["count()"]["order"] == 1
  1708. assert [{"count": self.event_data[index]["count"] / (3600.0 / 60.0)}] in [
  1709. attrs for time, attrs in results["epm()"]["data"]
  1710. ]
  1711. assert [{"count": self.event_data[index]["count"]}] in [
  1712. attrs for time, attrs in results["count()"]["data"]
  1713. ]
  1714. other = data["Other"]
  1715. assert other["order"] == 5
  1716. assert other["epm()"]["order"] == 0
  1717. assert other["count()"]["order"] == 1
  1718. assert [{"count": 0.05}] in [attrs for _, attrs in other["epm()"]["data"]]
  1719. assert [{"count": 3}] in [attrs for _, attrs in other["count()"]["data"]]
  1720. def test_top_events_with_boolean(self):
  1721. with self.feature(self.enabled_features):
  1722. response = self.client.get(
  1723. self.url,
  1724. data={
  1725. "start": iso_format(self.day_ago),
  1726. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1727. "interval": "1h",
  1728. "yAxis": "count()",
  1729. "orderby": ["-count()"],
  1730. "field": ["count()", "message", "device.charging"],
  1731. "topEvents": 5,
  1732. },
  1733. format="json",
  1734. )
  1735. data = response.data
  1736. assert response.status_code == 200, response.content
  1737. assert len(data) == 6
  1738. for index, event in enumerate(self.events[:5]):
  1739. message = event.message or event.transaction
  1740. results = data[",".join(["False", message])]
  1741. assert results["order"] == index
  1742. assert [{"count": self.event_data[index]["count"]}] in [
  1743. attrs for time, attrs in results["data"]
  1744. ]
  1745. other = data["Other"]
  1746. assert other["order"] == 5
  1747. assert [{"count": 3}] in [attrs for _, attrs in other["data"]]
  1748. def test_top_events_with_error_unhandled(self):
  1749. self.login_as(user=self.user)
  1750. project = self.create_project()
  1751. prototype = load_data("android-ndk")
  1752. prototype["event_id"] = "f" * 32
  1753. prototype["message"] = "not handled"
  1754. prototype["exception"]["values"][0]["value"] = "not handled"
  1755. prototype["exception"]["values"][0]["mechanism"]["handled"] = False
  1756. prototype["timestamp"] = iso_format(self.day_ago + timedelta(minutes=2))
  1757. self.store_event(data=prototype, project_id=project.id)
  1758. with self.feature(self.enabled_features):
  1759. response = self.client.get(
  1760. self.url,
  1761. data={
  1762. "start": iso_format(self.day_ago),
  1763. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1764. "interval": "1h",
  1765. "yAxis": "count()",
  1766. "orderby": ["-count()"],
  1767. "field": ["count()", "error.unhandled"],
  1768. "topEvents": 5,
  1769. },
  1770. format="json",
  1771. )
  1772. data = response.data
  1773. assert response.status_code == 200, response.content
  1774. assert len(data) == 2
  1775. def test_top_events_with_timestamp(self):
  1776. with self.feature(self.enabled_features):
  1777. response = self.client.get(
  1778. self.url,
  1779. data={
  1780. "start": iso_format(self.day_ago),
  1781. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1782. "interval": "1h",
  1783. "yAxis": "count()",
  1784. "orderby": ["-count()"],
  1785. "query": "event.type:default",
  1786. "field": ["count()", "message", "timestamp"],
  1787. "topEvents": 5,
  1788. },
  1789. format="json",
  1790. )
  1791. data = response.data
  1792. assert response.status_code == 200, response.content
  1793. assert len(data) == 6
  1794. # Transactions won't be in the results because of the query
  1795. del self.events[4]
  1796. del self.event_data[4]
  1797. for index, event in enumerate(self.events[:5]):
  1798. results = data[",".join([event.message, event.timestamp])]
  1799. assert results["order"] == index
  1800. assert [{"count": self.event_data[index]["count"]}] in [
  1801. attrs for time, attrs in results["data"]
  1802. ]
  1803. other = data["Other"]
  1804. assert other["order"] == 5
  1805. assert [{"count": 1}] in [attrs for _, attrs in other["data"]]
  1806. def test_top_events_with_int(self):
  1807. with self.feature(self.enabled_features):
  1808. response = self.client.get(
  1809. self.url,
  1810. data={
  1811. "start": iso_format(self.day_ago),
  1812. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1813. "interval": "1h",
  1814. "yAxis": "count()",
  1815. "orderby": ["-count()"],
  1816. "field": ["count()", "message", "transaction.duration"],
  1817. "topEvents": 5,
  1818. },
  1819. format="json",
  1820. )
  1821. data = response.data
  1822. assert response.status_code == 200, response.content
  1823. assert len(data) == 1
  1824. results = data[",".join([self.transaction.transaction, "120000"])]
  1825. assert results["order"] == 0
  1826. assert [attrs for time, attrs in results["data"]] == [[{"count": 3}], [{"count": 0}]]
  1827. def test_top_events_with_user(self):
  1828. with self.feature(self.enabled_features):
  1829. response = self.client.get(
  1830. self.url,
  1831. data={
  1832. "start": iso_format(self.day_ago),
  1833. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1834. "interval": "1h",
  1835. "yAxis": "count()",
  1836. "orderby": ["-count()", "user"],
  1837. "field": ["user", "count()"],
  1838. "topEvents": 5,
  1839. },
  1840. format="json",
  1841. )
  1842. data = response.data
  1843. assert response.status_code == 200, response.content
  1844. assert len(data) == 5
  1845. assert data["email:bar@example.com"]["order"] == 1
  1846. assert [attrs for time, attrs in data["email:bar@example.com"]["data"]] == [
  1847. [{"count": 7}],
  1848. [{"count": 0}],
  1849. ]
  1850. assert [attrs for time, attrs in data["ip:127.0.0.1"]["data"]] == [
  1851. [{"count": 3}],
  1852. [{"count": 0}],
  1853. ]
  1854. def test_top_events_with_user_and_email(self):
  1855. with self.feature(self.enabled_features):
  1856. response = self.client.get(
  1857. self.url,
  1858. data={
  1859. "start": iso_format(self.day_ago),
  1860. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1861. "interval": "1h",
  1862. "yAxis": "count()",
  1863. "orderby": ["-count()", "user"],
  1864. "field": ["user", "user.email", "count()"],
  1865. "topEvents": 5,
  1866. },
  1867. format="json",
  1868. )
  1869. data = response.data
  1870. assert response.status_code == 200, response.content
  1871. assert len(data) == 5
  1872. assert data["email:bar@example.com,bar@example.com"]["order"] == 1
  1873. assert [attrs for time, attrs in data["email:bar@example.com,bar@example.com"]["data"]] == [
  1874. [{"count": 7}],
  1875. [{"count": 0}],
  1876. ]
  1877. assert [attrs for time, attrs in data["ip:127.0.0.1,None"]["data"]] == [
  1878. [{"count": 3}],
  1879. [{"count": 0}],
  1880. ]
  1881. def test_top_events_with_user_display(self):
  1882. with self.feature(self.enabled_features):
  1883. response = self.client.get(
  1884. self.url,
  1885. data={
  1886. "start": iso_format(self.day_ago),
  1887. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1888. "interval": "1h",
  1889. "yAxis": "count()",
  1890. "orderby": ["-count()"],
  1891. "field": ["message", "user.display", "count()"],
  1892. "topEvents": 5,
  1893. },
  1894. format="json",
  1895. )
  1896. data = response.data
  1897. assert response.status_code == 200, response.content
  1898. assert len(data) == 6
  1899. for index, event in enumerate(self.events[:5]):
  1900. message = event.message or event.transaction
  1901. user = self.event_data[index]["data"]["user"]
  1902. results = data[
  1903. ",".join([message, user.get("email", None) or user.get("ip_address", "None")])
  1904. ]
  1905. assert results["order"] == index
  1906. assert [{"count": self.event_data[index]["count"]}] in [
  1907. attrs for _, attrs in results["data"]
  1908. ]
  1909. other = data["Other"]
  1910. assert other["order"] == 5
  1911. assert [{"count": 3}] in [attrs for _, attrs in other["data"]]
  1912. @pytest.mark.skip(reason="A query with group_id will not return transactions")
  1913. def test_top_events_none_filter(self):
  1914. """When a field is None in one of the top events, make sure we filter by it
  1915. In this case event[4] is a transaction and has no issue
  1916. """
  1917. with self.feature(self.enabled_features):
  1918. response = self.client.get(
  1919. self.url,
  1920. data={
  1921. "start": iso_format(self.day_ago),
  1922. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1923. "interval": "1h",
  1924. "yAxis": "count()",
  1925. "orderby": ["-count()"],
  1926. "field": ["count()", "issue"],
  1927. "topEvents": 5,
  1928. },
  1929. format="json",
  1930. )
  1931. data = response.data
  1932. assert response.status_code == 200, response.content
  1933. assert len(data) == 5
  1934. for index, event in enumerate(self.events[:5]):
  1935. if event.group is None:
  1936. issue = "unknown"
  1937. else:
  1938. issue = event.group.qualified_short_id
  1939. results = data[issue]
  1940. assert results["order"] == index
  1941. assert [{"count": self.event_data[index]["count"]}] in [
  1942. attrs for time, attrs in results["data"]
  1943. ]
  1944. @pytest.mark.skip(reason="Invalid query - transaction events don't have group_id field")
  1945. def test_top_events_one_field_with_none(self):
  1946. with self.feature(self.enabled_features):
  1947. response = self.client.get(
  1948. self.url,
  1949. data={
  1950. "start": iso_format(self.day_ago),
  1951. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1952. "interval": "1h",
  1953. "yAxis": "count()",
  1954. "orderby": ["-count()"],
  1955. "query": "event.type:transaction",
  1956. "field": ["count()", "issue"],
  1957. "topEvents": 5,
  1958. },
  1959. format="json",
  1960. )
  1961. data = response.data
  1962. assert response.status_code == 200, response.content
  1963. assert len(data) == 1
  1964. results = data["unknown"]
  1965. assert [attrs for time, attrs in results["data"]] == [[{"count": 3}], [{"count": 0}]]
  1966. assert results["order"] == 0
  1967. def test_top_events_with_error_handled(self):
  1968. data = self.event_data[0]
  1969. data["data"]["level"] = "error"
  1970. data["data"]["exception"] = {
  1971. "values": [
  1972. {
  1973. "type": "ValidationError",
  1974. "value": "Bad request",
  1975. "mechanism": {"handled": True, "type": "generic"},
  1976. }
  1977. ]
  1978. }
  1979. self.store_event(data["data"], project_id=data["project"].id)
  1980. data["data"]["exception"] = {
  1981. "values": [
  1982. {
  1983. "type": "ValidationError",
  1984. "value": "Bad request",
  1985. "mechanism": {"handled": False, "type": "generic"},
  1986. }
  1987. ]
  1988. }
  1989. self.store_event(data["data"], project_id=data["project"].id)
  1990. with self.feature(self.enabled_features):
  1991. response = self.client.get(
  1992. self.url,
  1993. data={
  1994. "start": iso_format(self.day_ago),
  1995. "end": iso_format(self.day_ago + timedelta(hours=2)),
  1996. "interval": "1h",
  1997. "yAxis": "count()",
  1998. "orderby": ["-count()"],
  1999. "field": ["count()", "error.handled"],
  2000. "topEvents": 5,
  2001. "query": "!event.type:transaction",
  2002. },
  2003. format="json",
  2004. )
  2005. assert response.status_code == 200, response.content
  2006. data = response.data
  2007. assert len(data) == 2
  2008. results = data["1"]
  2009. assert [attrs for time, attrs in results["data"]] == [[{"count": 20}], [{"count": 6}]]
  2010. results = data["0"]
  2011. assert [attrs for time, attrs in results["data"]] == [[{"count": 1}], [{"count": 0}]]
  2012. def test_top_events_with_aggregate_condition(self):
  2013. with self.feature(self.enabled_features):
  2014. response = self.client.get(
  2015. self.url,
  2016. data={
  2017. "start": iso_format(self.day_ago),
  2018. "end": iso_format(self.day_ago + timedelta(hours=2)),
  2019. "interval": "1h",
  2020. "yAxis": "count()",
  2021. "orderby": ["-count()"],
  2022. "field": ["message", "count()"],
  2023. "query": "count():>4",
  2024. "topEvents": 5,
  2025. },
  2026. format="json",
  2027. )
  2028. assert response.status_code == 200, response.content
  2029. data = response.data
  2030. assert len(data) == 3
  2031. for index, event in enumerate(self.events[:3]):
  2032. message = event.message or event.transaction
  2033. results = data[message]
  2034. assert results["order"] == index
  2035. assert [{"count": self.event_data[index]["count"]}] in [
  2036. attrs for time, attrs in results["data"]
  2037. ]
  2038. @pytest.mark.xfail(reason="There's only 2 rows total, which mean there shouldn't be other")
  2039. def test_top_events_with_to_other(self):
  2040. version = "version -@'\" 1.2,3+(4)"
  2041. version_escaped = "version -@'\\\" 1.2,3+(4)"
  2042. # every symbol is replaced with a underscore to make the alias
  2043. version_alias = "version_______1_2_3__4_"
  2044. # add an event in the current release
  2045. event = self.event_data[0]
  2046. event_data = event["data"].copy()
  2047. event_data["event_id"] = uuid4().hex
  2048. event_data["release"] = version
  2049. self.store_event(event_data, project_id=event["project"].id)
  2050. with self.feature(self.enabled_features):
  2051. response = self.client.get(
  2052. self.url,
  2053. data={
  2054. "start": iso_format(self.day_ago),
  2055. "end": iso_format(self.day_ago + timedelta(hours=2)),
  2056. "interval": "1h",
  2057. "yAxis": "count()",
  2058. # the double underscores around the version alias is because of a comma and quote
  2059. "orderby": [f"-to_other_release__{version_alias}__others_current"],
  2060. "field": [
  2061. "count()",
  2062. f'to_other(release,"{version_escaped}",others,current)',
  2063. ],
  2064. "topEvents": 2,
  2065. },
  2066. format="json",
  2067. )
  2068. assert response.status_code == 200, response.content
  2069. data = response.data
  2070. assert len(data) == 2
  2071. current = data["current"]
  2072. assert current["order"] == 1
  2073. assert sum(attrs[0]["count"] for _, attrs in current["data"]) == 1
  2074. others = data["others"]
  2075. assert others["order"] == 0
  2076. assert sum(attrs[0]["count"] for _, attrs in others["data"]) == sum(
  2077. event_data["count"] for event_data in self.event_data
  2078. )
  2079. def test_top_events_with_equations(self):
  2080. with self.feature(self.enabled_features):
  2081. response = self.client.get(
  2082. self.url,
  2083. data={
  2084. "start": iso_format(self.day_ago),
  2085. "end": iso_format(self.day_ago + timedelta(hours=2)),
  2086. "interval": "1h",
  2087. "yAxis": "equation|count() / 100",
  2088. "orderby": ["-count()"],
  2089. "field": ["count()", "message", "user.email", "equation|count() / 100"],
  2090. "topEvents": 5,
  2091. },
  2092. format="json",
  2093. )
  2094. data = response.data
  2095. assert response.status_code == 200, response.content
  2096. assert len(data) == 6
  2097. for index, event in enumerate(self.events[:5]):
  2098. message = event.message or event.transaction
  2099. results = data[
  2100. ",".join([message, self.event_data[index]["data"]["user"].get("email", "None")])
  2101. ]
  2102. assert results["order"] == index
  2103. assert [{"count": self.event_data[index]["count"] / 100}] in [
  2104. attrs for time, attrs in results["data"]
  2105. ]
  2106. other = data["Other"]
  2107. assert other["order"] == 5
  2108. assert [{"count": 0.03}] in [attrs for _, attrs in other["data"]]
  2109. @mock.patch("sentry.snuba.discover.bulk_snql_query", return_value=[{"data": [], "meta": []}])
  2110. @mock.patch(
  2111. "sentry.search.events.builder.discover.raw_snql_query",
  2112. return_value={"data": [], "meta": []},
  2113. )
  2114. def test_invalid_interval(self, mock_raw_query, mock_bulk_query):
  2115. with self.feature(self.enabled_features):
  2116. response = self.client.get(
  2117. self.url,
  2118. format="json",
  2119. data={
  2120. "end": iso_format(before_now()),
  2121. # 7,200 points for each event
  2122. "start": iso_format(before_now(seconds=7200)),
  2123. "field": ["count()", "issue"],
  2124. "query": "",
  2125. "interval": "1s",
  2126. "yAxis": "count()",
  2127. },
  2128. )
  2129. assert response.status_code == 200
  2130. assert mock_bulk_query.call_count == 1
  2131. with self.feature(self.enabled_features):
  2132. response = self.client.get(
  2133. self.url,
  2134. format="json",
  2135. data={
  2136. "end": iso_format(before_now()),
  2137. "start": iso_format(before_now(seconds=7200)),
  2138. "field": ["count()", "issue"],
  2139. "query": "",
  2140. "interval": "1s",
  2141. "yAxis": "count()",
  2142. # 7,200 points for each event * 2, should error
  2143. "topEvents": 2,
  2144. },
  2145. )
  2146. assert response.status_code == 200
  2147. assert mock_raw_query.call_count == 2
  2148. # Should've reset to the default for between 1 and 24h
  2149. assert mock_raw_query.mock_calls[1].args[0].query.granularity.granularity == 300
  2150. with self.feature(self.enabled_features):
  2151. response = self.client.get(
  2152. self.url,
  2153. format="json",
  2154. data={
  2155. "end": iso_format(before_now()),
  2156. # 1999 points * 5 events should just be enough to not error
  2157. "start": iso_format(before_now(seconds=1999)),
  2158. "field": ["count()", "issue"],
  2159. "query": "",
  2160. "interval": "1s",
  2161. "yAxis": "count()",
  2162. "topEvents": 5,
  2163. },
  2164. )
  2165. assert response.status_code == 200
  2166. assert mock_raw_query.call_count == 4
  2167. # Should've left the interval alone since we're just below the limit
  2168. assert mock_raw_query.mock_calls[3].args[0].query.granularity.granularity == 1
  2169. with self.feature(self.enabled_features):
  2170. response = self.client.get(
  2171. self.url,
  2172. format="json",
  2173. data={
  2174. "end": iso_format(before_now()),
  2175. "start": iso_format(before_now(hours=24)),
  2176. "field": ["count()", "issue"],
  2177. "query": "",
  2178. "interval": "0d",
  2179. "yAxis": "count()",
  2180. "topEvents": 5,
  2181. },
  2182. )
  2183. assert response.status_code == 200
  2184. assert mock_raw_query.call_count == 6
  2185. # Should've default to 24h's default of 5m
  2186. assert mock_raw_query.mock_calls[5].args[0].query.granularity.granularity == 300
  2187. def test_top_events_timestamp_fields(self):
  2188. with self.feature(self.enabled_features):
  2189. response = self.client.get(
  2190. self.url,
  2191. format="json",
  2192. data={
  2193. "start": iso_format(self.day_ago),
  2194. "end": iso_format(self.day_ago + timedelta(hours=2)),
  2195. "interval": "1h",
  2196. "yAxis": "count()",
  2197. "orderby": ["-count()"],
  2198. "field": ["count()", "timestamp", "timestamp.to_hour", "timestamp.to_day"],
  2199. "topEvents": 5,
  2200. },
  2201. )
  2202. assert response.status_code == 200
  2203. data = response.data
  2204. assert len(data) == 3
  2205. # these are the timestamps corresponding to the events stored
  2206. timestamps = [
  2207. self.day_ago + timedelta(minutes=2),
  2208. self.day_ago + timedelta(hours=1, minutes=2),
  2209. self.day_ago + timedelta(minutes=4),
  2210. ]
  2211. timestamp_hours = [timestamp.replace(minute=0, second=0) for timestamp in timestamps]
  2212. timestamp_days = [timestamp.replace(hour=0, minute=0, second=0) for timestamp in timestamps]
  2213. for ts, ts_hr, ts_day in zip(timestamps, timestamp_hours, timestamp_days):
  2214. key = f"{iso_format(ts)}+00:00,{iso_format(ts_day)}+00:00,{iso_format(ts_hr)}+00:00"
  2215. count = sum(
  2216. e["count"] for e in self.event_data if e["data"]["timestamp"] == iso_format(ts)
  2217. )
  2218. results = data[key]
  2219. assert [{"count": count}] in [attrs for time, attrs in results["data"]]
  2220. def test_top_events_other_with_matching_columns(self):
  2221. with self.feature(self.enabled_features):
  2222. response = self.client.get(
  2223. self.url,
  2224. data={
  2225. "start": iso_format(self.day_ago),
  2226. "end": iso_format(self.day_ago + timedelta(hours=2)),
  2227. "interval": "1h",
  2228. "yAxis": "count()",
  2229. "orderby": ["-count()"],
  2230. "field": ["count()", "tags[shared-tag]", "message"],
  2231. "topEvents": 5,
  2232. },
  2233. format="json",
  2234. )
  2235. data = response.data
  2236. assert response.status_code == 200, response.content
  2237. assert len(data) == 6
  2238. for index, event in enumerate(self.events[:5]):
  2239. message = event.message or event.transaction
  2240. results = data[",".join([message, "yup"])]
  2241. assert results["order"] == index
  2242. assert [{"count": self.event_data[index]["count"]}] in [
  2243. attrs for _, attrs in results["data"]
  2244. ]
  2245. other = data["Other"]
  2246. assert other["order"] == 5
  2247. assert [{"count": 3}] in [attrs for _, attrs in other["data"]]
  2248. def test_top_events_with_field_overlapping_other_key(self):
  2249. transaction_data = load_data("transaction")
  2250. transaction_data["start_timestamp"] = iso_format(self.day_ago + timedelta(minutes=2))
  2251. transaction_data["timestamp"] = iso_format(self.day_ago + timedelta(minutes=6))
  2252. transaction_data["transaction"] = OTHER_KEY
  2253. for i in range(5):
  2254. data = transaction_data.copy()
  2255. data["event_id"] = "ab" + f"{i}" * 30
  2256. data["contexts"]["trace"]["span_id"] = "ab" + f"{i}" * 14
  2257. self.store_event(data, project_id=self.project.id)
  2258. with self.feature(self.enabled_features):
  2259. response = self.client.get(
  2260. self.url,
  2261. data={
  2262. "start": iso_format(self.day_ago),
  2263. "end": iso_format(self.day_ago + timedelta(hours=2)),
  2264. "interval": "1h",
  2265. "yAxis": "count()",
  2266. "orderby": ["-count()"],
  2267. "field": ["count()", "message"],
  2268. "topEvents": 5,
  2269. },
  2270. format="json",
  2271. )
  2272. data = response.data
  2273. assert response.status_code == 200, response.content
  2274. assert len(data) == 6
  2275. assert f"{OTHER_KEY} (message)" in data
  2276. results = data[f"{OTHER_KEY} (message)"]
  2277. assert [{"count": 5}] in [attrs for _, attrs in results["data"]]
  2278. other = data["Other"]
  2279. assert other["order"] == 5
  2280. assert [{"count": 4}] in [attrs for _, attrs in other["data"]]
  2281. def test_top_events_can_exclude_other_series(self):
  2282. with self.feature(self.enabled_features):
  2283. response = self.client.get(
  2284. self.url,
  2285. data={
  2286. "start": iso_format(self.day_ago),
  2287. "end": iso_format(self.day_ago + timedelta(hours=2)),
  2288. "interval": "1h",
  2289. "yAxis": "count()",
  2290. "orderby": ["count()"],
  2291. "field": ["count()", "message"],
  2292. "topEvents": 5,
  2293. "excludeOther": "1",
  2294. },
  2295. format="json",
  2296. )
  2297. data = response.data
  2298. assert response.status_code == 200, response.content
  2299. assert len(data) == 5
  2300. assert "Other" not in response.data
  2301. @pytest.mark.xfail(reason="Started failing on ClickHouse 21.8")
  2302. def test_top_events_with_equation_including_unselected_fields_passes_field_validation(self):
  2303. with self.feature(self.enabled_features):
  2304. response = self.client.get(
  2305. self.url,
  2306. data={
  2307. "start": iso_format(self.day_ago),
  2308. "end": iso_format(self.day_ago + timedelta(hours=2)),
  2309. "interval": "1h",
  2310. "yAxis": "count()",
  2311. "orderby": ["-equation[0]"],
  2312. "field": ["count()", "message", "equation|count_unique(user) * 2"],
  2313. "topEvents": 5,
  2314. },
  2315. format="json",
  2316. )
  2317. data = response.data
  2318. assert response.status_code == 200, response.content
  2319. assert len(data) == 6
  2320. other = data["Other"]
  2321. assert other["order"] == 5
  2322. assert [{"count": 4}] in [attrs for _, attrs in other["data"]]
  2323. def test_top_events_boolean_condition_and_project_field(self):
  2324. with self.feature(self.enabled_features):
  2325. response = self.client.get(
  2326. self.url,
  2327. data={
  2328. "start": iso_format(self.day_ago),
  2329. "end": iso_format(self.day_ago + timedelta(hours=2)),
  2330. "interval": "1h",
  2331. "yAxis": "count()",
  2332. "orderby": ["-count()"],
  2333. "field": ["project", "count()"],
  2334. "topEvents": 5,
  2335. "query": "event.type:transaction (transaction:*a OR transaction:b*)",
  2336. },
  2337. format="json",
  2338. )
  2339. assert response.status_code == 200
  2340. @region_silo_test
  2341. class OrganizationEventsStatsProfileFunctionDatasetEndpointTest(
  2342. APITestCase, ProfilesSnubaTestCase, SearchIssueTestMixin
  2343. ):
  2344. endpoint = "sentry-api-0-organization-events-stats"
  2345. def setUp(self):
  2346. super().setUp()
  2347. self.login_as(user=self.user)
  2348. self.one_day_ago = before_now(days=1).replace(hour=10, minute=0, second=0, microsecond=0)
  2349. self.two_days_ago = before_now(days=2).replace(hour=10, minute=0, second=0, microsecond=0)
  2350. self.three_days_ago = before_now(days=3).replace(hour=10, minute=0, second=0, microsecond=0)
  2351. self.project = self.create_project()
  2352. self.url = reverse(
  2353. "sentry-api-0-organization-events-stats",
  2354. kwargs={"organization_slug": self.project.organization.slug},
  2355. )
  2356. def test_functions_dataset_simple(self):
  2357. self.store_functions(
  2358. [
  2359. {
  2360. "self_times_ns": [100 for _ in range(100)],
  2361. "package": "foo",
  2362. "function": "bar",
  2363. "in_app": True,
  2364. },
  2365. ],
  2366. project=self.project,
  2367. timestamp=self.two_days_ago,
  2368. )
  2369. data = {
  2370. "dataset": "profileFunctions",
  2371. "start": iso_format(self.three_days_ago),
  2372. "end": iso_format(self.one_day_ago),
  2373. "interval": "1d",
  2374. "yAxis": ["cpm()", "p95(function.duration)"],
  2375. }
  2376. response = self.client.get(self.url, data=data, format="json")
  2377. assert response.status_code == 200, response.content
  2378. assert sum(row[1][0]["count"] for row in response.data["cpm()"]["data"]) == pytest.approx(
  2379. 100 / ((self.one_day_ago - self.three_days_ago).total_seconds() / 60), rel=1e-3
  2380. )
  2381. assert any(
  2382. row[1][0]["count"] > 0 for row in response.data["p95(function.duration)"]["data"]
  2383. )
  2384. for y_axis in ["cpm()", "p95(function.duration)"]:
  2385. assert response.data[y_axis]["meta"]["fields"] == {
  2386. "time": "date",
  2387. "cpm": "number",
  2388. "p95_function_duration": "duration",
  2389. }
  2390. assert response.data[y_axis]["meta"]["units"] == {
  2391. "time": None,
  2392. "cpm": None,
  2393. "p95_function_duration": "nanosecond",
  2394. }
  2395. @region_silo_test
  2396. class OrganizationEventsStatsTopNEventsProfileFunctionDatasetEndpointTest(
  2397. APITestCase, ProfilesSnubaTestCase, SearchIssueTestMixin
  2398. ):
  2399. endpoint = "sentry-api-0-organization-events-stats"
  2400. def setUp(self):
  2401. super().setUp()
  2402. self.login_as(user=self.user)
  2403. self.one_day_ago = before_now(days=1).replace(hour=10, minute=0, second=0, microsecond=0)
  2404. self.two_days_ago = before_now(days=2).replace(hour=10, minute=0, second=0, microsecond=0)
  2405. self.three_days_ago = before_now(days=3).replace(hour=10, minute=0, second=0, microsecond=0)
  2406. self.project = self.create_project()
  2407. self.url = reverse(
  2408. "sentry-api-0-organization-events-stats",
  2409. kwargs={"organization_slug": self.project.organization.slug},
  2410. )
  2411. def test_functions_dataset_simple(self):
  2412. self.store_functions(
  2413. [
  2414. {
  2415. "self_times_ns": [100 for _ in range(100)],
  2416. "package": "pkg",
  2417. "function": "foo",
  2418. "in_app": True,
  2419. },
  2420. {
  2421. "self_times_ns": [100 for _ in range(10)],
  2422. "package": "pkg",
  2423. "function": "bar",
  2424. "in_app": True,
  2425. },
  2426. ],
  2427. project=self.project,
  2428. timestamp=self.two_days_ago,
  2429. )
  2430. data = {
  2431. "dataset": "profileFunctions",
  2432. "field": ["function", "count()"],
  2433. "start": iso_format(self.three_days_ago),
  2434. "end": iso_format(self.one_day_ago),
  2435. "yAxis": ["cpm()", "p95(function.duration)"],
  2436. "interval": "1d",
  2437. "topEvents": 2,
  2438. "excludeOther": 1,
  2439. }
  2440. response = self.client.get(self.url, data=data, format="json")
  2441. assert response.status_code == 200, response.content
  2442. assert sum(
  2443. row[1][0]["count"] for row in response.data["foo"]["cpm()"]["data"]
  2444. ) == pytest.approx(
  2445. 100 / ((self.one_day_ago - self.three_days_ago).total_seconds() / 60), rel=1e-3
  2446. )
  2447. assert sum(
  2448. row[1][0]["count"] for row in response.data["bar"]["cpm()"]["data"]
  2449. ) == pytest.approx(
  2450. 10 / ((self.one_day_ago - self.three_days_ago).total_seconds() / 60), rel=1e-3
  2451. )
  2452. assert any(
  2453. row[1][0]["count"] > 0 for row in response.data["foo"]["p95(function.duration)"]["data"]
  2454. )
  2455. assert any(
  2456. row[1][0]["count"] > 0 for row in response.data["bar"]["p95(function.duration)"]["data"]
  2457. )