test_organization_sessions.py 87 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383
  1. import datetime
  2. from unittest.mock import patch
  3. from uuid import uuid4
  4. import pytest
  5. from django.urls import reverse
  6. from django.utils import timezone
  7. from sentry import release_health
  8. from sentry.models.releaseprojectenvironment import ReleaseProjectEnvironment
  9. from sentry.release_health.metrics import MetricsReleaseHealthBackend
  10. from sentry.snuba.metrics import to_intervals
  11. from sentry.testutils.cases import APITestCase, BaseMetricsTestCase, SnubaTestCase
  12. from sentry.testutils.helpers.datetime import freeze_time
  13. from sentry.testutils.helpers.link_header import parse_link_header
  14. from sentry.testutils.silo import region_silo_test
  15. from sentry.utils.cursors import Cursor
  16. pytestmark = pytest.mark.sentry_metrics
  17. def result_sorted(result):
  18. """sort the groups of the results array by the `by` object, ensuring a stable order"""
  19. def stable_dict(d):
  20. return tuple(sorted(d.items(), key=lambda t: t[0]))
  21. result["groups"].sort(key=lambda group: stable_dict(group["by"]))
  22. return result
  23. ONE_DAY_AGO = datetime.datetime.now(tz=datetime.UTC) - datetime.timedelta(days=1)
  24. TWO_DAYS_AGO = datetime.datetime.now(tz=datetime.UTC) - datetime.timedelta(days=2)
  25. MOCK_DATETIME = ONE_DAY_AGO.replace(hour=12, minute=27, second=28, microsecond=303000)
  26. MOCK_DATETIME_PLUS_TEN_MINUTES = MOCK_DATETIME + datetime.timedelta(minutes=10)
  27. MOCK_DATETIME_PLUS_ONE_HOUR = MOCK_DATETIME + datetime.timedelta(hours=1)
  28. SNUBA_TIME_FORMAT = "%Y-%m-%dT%H:%M:%SZ"
  29. MOCK_DATETIME_START_OF_DAY = MOCK_DATETIME.replace(hour=0, minute=0, second=0)
  30. TIMESTAMP = MOCK_DATETIME.timestamp()
  31. RECEIVED = TIMESTAMP
  32. SESSION_STARTED = TIMESTAMP // 3600 * 3600 # round to the hour
  33. TEMPLATE = {
  34. "distinct_id": "00000000-0000-0000-0000-000000000000",
  35. "status": "exited",
  36. "seq": 0,
  37. "release": "foo@1.0.0",
  38. "environment": "production",
  39. "retention_days": 90,
  40. "duration": 123.4,
  41. "errors": 0,
  42. "started": SESSION_STARTED,
  43. "received": RECEIVED,
  44. }
  45. def make_duration(kwargs):
  46. """Randomish but deterministic duration"""
  47. return float(len(str(kwargs)))
  48. def make_session(project, **kwargs):
  49. return dict(
  50. dict(
  51. TEMPLATE,
  52. session_id=uuid4().hex,
  53. org_id=project.organization_id,
  54. project_id=project.id,
  55. duration=make_duration(kwargs),
  56. ),
  57. **kwargs,
  58. )
  59. def adjust_start(start: datetime.datetime, interval: int) -> datetime.datetime:
  60. # align start and end to the beginning of the intervals
  61. start, _end, _num_intervals = to_intervals(
  62. start, start + datetime.timedelta(minutes=1), interval
  63. )
  64. return start
  65. def adjust_end(end: datetime.datetime, interval: int) -> datetime.datetime:
  66. # align start and end to the beginning of the intervals
  67. _start, end, _num_intervals = to_intervals(end - datetime.timedelta(minutes=1), end, interval)
  68. return end
  69. class OrganizationSessionsEndpointTest(APITestCase, SnubaTestCase):
  70. def setUp(self):
  71. super().setUp()
  72. self.setup_fixture()
  73. def setup_fixture(self):
  74. self.organization1 = self.organization
  75. self.organization2 = self.create_organization()
  76. self.organization3 = self.create_organization()
  77. self.project1 = self.project
  78. self.project2 = self.create_project()
  79. self.project3 = self.create_project()
  80. self.project4 = self.create_project(organization=self.organization2)
  81. self.user2 = self.create_user(is_superuser=False)
  82. self.create_member(
  83. user=self.user2, organization=self.organization1, role="member", teams=[]
  84. )
  85. self.create_member(user=self.user, organization=self.organization3, role="admin", teams=[])
  86. self.create_environment(self.project2, name="development")
  87. self.bulk_store_sessions(
  88. [
  89. make_session(self.project1, started=SESSION_STARTED + 12 * 60),
  90. make_session(self.project1, started=SESSION_STARTED + 24 * 60, release="foo@1.1.0"),
  91. make_session(self.project1, started=SESSION_STARTED - 60 * 60),
  92. make_session(self.project1, started=SESSION_STARTED - 12 * 60 * 60),
  93. make_session(self.project2, status="crashed"),
  94. make_session(self.project2, environment="development"),
  95. make_session(self.project3, errors=1, release="foo@1.2.0"),
  96. make_session(
  97. self.project3,
  98. distinct_id="39887d89-13b2-4c84-8c23-5d13d2102664",
  99. started=SESSION_STARTED - 60 * 60,
  100. ),
  101. make_session(
  102. self.project3, distinct_id="39887d89-13b2-4c84-8c23-5d13d2102664", errors=1
  103. ),
  104. make_session(self.project4),
  105. ]
  106. )
  107. def do_request(self, query, user=None, org=None):
  108. self.login_as(user=user or self.user)
  109. url = reverse(
  110. "sentry-api-0-organization-sessions",
  111. kwargs={"organization_slug": (org or self.organization).slug},
  112. )
  113. return self.client.get(url, query, format="json")
  114. def test_empty_request(self):
  115. response = self.do_request({})
  116. assert response.status_code == 400, response.content
  117. assert response.data == {"detail": 'Request is missing a "field"'}
  118. def test_inaccessible_project(self):
  119. response = self.do_request({"project": [self.project4.id]})
  120. assert response.status_code == 403, response.content
  121. assert response.data == {"detail": "You do not have permission to perform this action."}
  122. def test_unknown_field(self):
  123. response = self.do_request({"field": ["summ(session)"]})
  124. assert response.status_code == 400, response.content
  125. assert response.data == {"detail": 'Invalid field: "summ(session)"'}
  126. def test_unknown_groupby(self):
  127. response = self.do_request({"field": ["sum(session)"], "groupBy": ["environment_"]})
  128. assert response.status_code == 400, response.content
  129. assert response.data == {"detail": 'Invalid groupBy: "environment_"'}
  130. def test_illegal_groupby(self):
  131. response = self.do_request({"field": ["sum(session)"], "groupBy": ["issue.id"]})
  132. assert response.status_code == 400, response.content
  133. assert response.data == {"detail": 'Invalid groupBy: "issue.id"'}
  134. def test_invalid_query(self):
  135. response = self.do_request(
  136. {"statsPeriod": "1d", "field": ["sum(session)"], "query": ["foo:bar"]}
  137. )
  138. assert response.status_code == 400, response.content
  139. assert response.data["detail"] == "Invalid search filter: foo"
  140. response = self.do_request(
  141. {
  142. "statsPeriod": "1d",
  143. "field": ["sum(session)"],
  144. "query": ["release:foo-bar@1.2.3 (123)"],
  145. }
  146. )
  147. assert response.status_code == 400, response.content
  148. # TODO: it would be good to provide a better error here,
  149. # since its not obvious where `message` comes from.
  150. assert response.data["detail"] == "Invalid search filter: message"
  151. def test_illegal_query(self):
  152. response = self.do_request(
  153. {"statsPeriod": "1d", "field": ["sum(session)"], "query": ["issue.id:123"]}
  154. )
  155. assert response.status_code == 400, response.content
  156. assert response.data["detail"] == "Invalid search filter: issue.id"
  157. def test_too_many_points(self):
  158. # default statsPeriod is 90d
  159. response = self.do_request({"field": ["sum(session)"], "interval": "1h"})
  160. assert response.status_code == 400, response.content
  161. assert response.data == {
  162. "detail": "Your interval and date range would create too many results. "
  163. "Use a larger interval, or a smaller date range."
  164. }
  165. @freeze_time(MOCK_DATETIME)
  166. def test_future_request(self):
  167. start = MOCK_DATETIME + datetime.timedelta(days=1)
  168. end = MOCK_DATETIME + datetime.timedelta(days=2)
  169. response = self.do_request(
  170. {
  171. "project": [-1],
  172. "interval": "1h",
  173. "field": ["sum(session)"],
  174. "start": start.strftime(SNUBA_TIME_FORMAT),
  175. "end": end.strftime(SNUBA_TIME_FORMAT),
  176. }
  177. )
  178. assert response.status_code == 200, response.content
  179. @freeze_time(MOCK_DATETIME)
  180. def test_timeseries_interval(self):
  181. response = self.do_request(
  182. {"project": [-1], "statsPeriod": "1d", "interval": "1d", "field": ["sum(session)"]}
  183. )
  184. start_of_day_snuba_format = MOCK_DATETIME_START_OF_DAY.strftime(SNUBA_TIME_FORMAT)
  185. previous_start_of_day_snuba_format = (
  186. MOCK_DATETIME_START_OF_DAY - datetime.timedelta(days=1)
  187. ).strftime(SNUBA_TIME_FORMAT)
  188. expected_start = MOCK_DATETIME_START_OF_DAY - datetime.timedelta(days=1)
  189. expected_end = MOCK_DATETIME_START_OF_DAY + datetime.timedelta(days=1)
  190. assert response.status_code == 200, response.content
  191. assert result_sorted(response.data) == {
  192. "start": expected_start.strftime(SNUBA_TIME_FORMAT),
  193. "end": expected_end.strftime(SNUBA_TIME_FORMAT),
  194. "query": "",
  195. "intervals": [previous_start_of_day_snuba_format, start_of_day_snuba_format],
  196. "groups": [
  197. {"by": {}, "series": {"sum(session)": [0, 9]}, "totals": {"sum(session)": 9}}
  198. ],
  199. }
  200. response = self.do_request(
  201. {"project": [-1], "statsPeriod": "1d", "interval": "6h", "field": ["sum(session)"]}
  202. )
  203. six_hours = 6 * 60 * 60
  204. expected_start = adjust_start(TWO_DAYS_AGO.replace(hour=12, minute=0, second=0), six_hours)
  205. expected_end = adjust_end(MOCK_DATETIME.replace(minute=28, second=0), six_hours)
  206. assert response.status_code == 200, response.content
  207. assert result_sorted(response.data) == {
  208. "start": expected_start.strftime(SNUBA_TIME_FORMAT),
  209. "end": expected_end.strftime(SNUBA_TIME_FORMAT),
  210. "query": "",
  211. "intervals": [
  212. TWO_DAYS_AGO.replace(hour=12, minute=0, second=0).strftime(SNUBA_TIME_FORMAT),
  213. TWO_DAYS_AGO.replace(hour=18, minute=0, second=0).strftime(SNUBA_TIME_FORMAT),
  214. MOCK_DATETIME.replace(hour=0, minute=0, second=0).strftime(SNUBA_TIME_FORMAT),
  215. MOCK_DATETIME.replace(hour=6, minute=0, second=0).strftime(SNUBA_TIME_FORMAT),
  216. MOCK_DATETIME.replace(hour=12, minute=0, second=0).strftime(SNUBA_TIME_FORMAT),
  217. ],
  218. "groups": [
  219. {
  220. "by": {},
  221. "series": {"sum(session)": [0, 0, 1, 2, 6]},
  222. "totals": {"sum(session)": 9},
  223. }
  224. ],
  225. }
  226. @freeze_time(MOCK_DATETIME)
  227. def test_user_all_accessible(self):
  228. response = self.do_request(
  229. {"project": [-1], "statsPeriod": "1d", "interval": "1d", "field": ["sum(session)"]},
  230. user=self.user2,
  231. )
  232. start_of_previous_day = MOCK_DATETIME_START_OF_DAY - datetime.timedelta(days=1)
  233. start_of_day_snuba_format = MOCK_DATETIME_START_OF_DAY.strftime(SNUBA_TIME_FORMAT)
  234. start_of_previous_day_snuba_format = start_of_previous_day.strftime(SNUBA_TIME_FORMAT)
  235. expected_start = MOCK_DATETIME_START_OF_DAY - datetime.timedelta(days=1)
  236. expected_end = MOCK_DATETIME_START_OF_DAY + datetime.timedelta(days=1)
  237. assert response.status_code == 200, response.content
  238. assert result_sorted(response.data) == {
  239. "start": expected_start.strftime(SNUBA_TIME_FORMAT),
  240. "end": expected_end.strftime(SNUBA_TIME_FORMAT),
  241. "query": "",
  242. "intervals": [start_of_previous_day_snuba_format, start_of_day_snuba_format],
  243. "groups": [
  244. {"by": {}, "series": {"sum(session)": [0, 9]}, "totals": {"sum(session)": 9}}
  245. ],
  246. }
  247. def test_no_projects(self):
  248. response = self.do_request(
  249. {"project": [-1], "statsPeriod": "1d", "interval": "1d", "field": ["sum(session)"]},
  250. org=self.organization3,
  251. )
  252. assert response.status_code == 400, response.content
  253. assert response.data == {"detail": "No projects available"}
  254. @freeze_time(MOCK_DATETIME_PLUS_TEN_MINUTES)
  255. def test_minute_resolution(self):
  256. with self.feature("organizations:minute-resolution-sessions"):
  257. response = self.do_request(
  258. {
  259. "project": [self.project1.id, self.project2.id],
  260. "statsPeriod": "30m",
  261. "interval": "10m",
  262. "field": ["sum(session)"],
  263. }
  264. )
  265. assert response.status_code == 200, response.content
  266. ten_min = 10 * 60
  267. expected_start = adjust_start(
  268. MOCK_DATETIME.replace(hour=12, minute=0, second=0), ten_min
  269. )
  270. expected_end = adjust_end(MOCK_DATETIME.replace(hour=12, minute=38, second=0), ten_min)
  271. assert result_sorted(response.data) == {
  272. "start": expected_start.strftime(SNUBA_TIME_FORMAT),
  273. "end": expected_end.strftime(SNUBA_TIME_FORMAT),
  274. "query": "",
  275. "intervals": [
  276. *[
  277. MOCK_DATETIME.replace(hour=12, minute=min, second=0).strftime(
  278. SNUBA_TIME_FORMAT
  279. )
  280. for min in [0, 10, 20, 30]
  281. ],
  282. ],
  283. "groups": [
  284. {
  285. "by": {},
  286. "series": {"sum(session)": [2, 1, 1, 0]},
  287. "totals": {"sum(session)": 4},
  288. }
  289. ],
  290. }
  291. @freeze_time(MOCK_DATETIME_PLUS_TEN_MINUTES)
  292. def test_10s_resolution(self):
  293. with self.feature("organizations:minute-resolution-sessions"):
  294. response = self.do_request(
  295. {
  296. "project": [self.project1.id],
  297. "statsPeriod": "1m",
  298. "interval": "10s",
  299. "field": ["sum(session)"],
  300. }
  301. )
  302. assert response.status_code == 200, response.content
  303. if release_health.backend.is_metrics_based():
  304. # With the metrics backend, we should get exactly what we asked for,
  305. # 6 intervals with 10 second length. However, since we add both the
  306. # starting and ending interval we get 7 intervals.
  307. assert len(response.data["intervals"]) == 7
  308. else:
  309. # With the sessions backend, the resolution will be 1m and will include starting and ending
  310. # minute so 2 intervals
  311. assert len(response.data["intervals"]) == 2
  312. @freeze_time(MOCK_DATETIME)
  313. def test_filter_projects(self):
  314. response = self.do_request(
  315. {
  316. "statsPeriod": "1d",
  317. "interval": "1d",
  318. "field": ["sum(session)"],
  319. "project": [self.project2.id, self.project3.id],
  320. }
  321. )
  322. assert response.status_code == 200, response.content
  323. assert result_sorted(response.data)["groups"] == [
  324. {"by": {}, "series": {"sum(session)": [0, 5]}, "totals": {"sum(session)": 5}}
  325. ]
  326. @freeze_time(MOCK_DATETIME)
  327. def test_anr_invalid_aggregates(self):
  328. default_request = {
  329. "project": [-1],
  330. "statsPeriod": "1d",
  331. "interval": "1d",
  332. "field": ["anr_rate()", "crash_free_rate(user)"],
  333. }
  334. def req(**kwargs):
  335. return self.do_request(dict(default_request, **kwargs))
  336. response = req()
  337. assert response.status_code == 200
  338. if release_health.backend.is_metrics_based():
  339. # Both these fields are supported by the metrics backend
  340. assert response.data["groups"] == [
  341. {
  342. "by": {},
  343. "totals": {"anr_rate()": 0.0, "crash_free_rate(user)": 1.0},
  344. "series": {"anr_rate()": [None, 0.0], "crash_free_rate(user)": [None, 1.0]},
  345. }
  346. ]
  347. else:
  348. # Both these fields are not supported by the sessions backend
  349. assert response.data["groups"] == []
  350. response = req(field=["anr_rate()", "sum(session)"])
  351. assert response.status_code == 200
  352. if release_health.backend.is_metrics_based():
  353. # Both these fields are supported by the metrics backend
  354. assert response.data["groups"] == [
  355. {
  356. "by": {},
  357. "totals": {"anr_rate()": 0.0, "sum(session)": 9},
  358. "series": {"anr_rate()": [None, 0.0], "sum(session)": [0, 9]},
  359. }
  360. ]
  361. else:
  362. # Only sum(session) is supported by the sessions backend
  363. assert response.data["groups"] == [
  364. {"by": {}, "totals": {"sum(session)": 9}, "series": {"sum(session)": [0, 9]}}
  365. ]
  366. @freeze_time(MOCK_DATETIME)
  367. def test_filter_environment(self):
  368. response = self.do_request(
  369. {
  370. "project": [-1],
  371. "statsPeriod": "1d",
  372. "interval": "1d",
  373. "field": ["sum(session)"],
  374. "query": "environment:development",
  375. }
  376. )
  377. assert response.status_code == 200, response.content
  378. assert result_sorted(response.data)["groups"] == [
  379. {"by": {}, "series": {"sum(session)": [0, 1]}, "totals": {"sum(session)": 1}}
  380. ]
  381. response = self.do_request(
  382. {
  383. "project": [-1],
  384. "statsPeriod": "1d",
  385. "interval": "1d",
  386. "field": ["sum(session)"],
  387. "environment": ["development"],
  388. }
  389. )
  390. assert response.status_code == 200, response.content
  391. assert result_sorted(response.data)["groups"] == [
  392. {"by": {}, "series": {"sum(session)": [0, 1]}, "totals": {"sum(session)": 1}}
  393. ]
  394. @freeze_time(MOCK_DATETIME)
  395. def test_filter_release(self):
  396. response = self.do_request(
  397. {
  398. "project": [-1],
  399. "statsPeriod": "1d",
  400. "interval": "1d",
  401. "field": ["sum(session)"],
  402. "query": "release:foo@1.1.0",
  403. }
  404. )
  405. assert response.status_code == 200, response.content
  406. assert result_sorted(response.data)["groups"] == [
  407. {"by": {}, "series": {"sum(session)": [0, 1]}, "totals": {"sum(session)": 1}}
  408. ]
  409. response = self.do_request(
  410. {
  411. "project": [-1],
  412. "statsPeriod": "1d",
  413. "interval": "1d",
  414. "field": ["sum(session)"],
  415. "query": 'release:"foo@1.1.0" or release:"foo@1.2.0"',
  416. }
  417. )
  418. assert response.status_code == 200, response.content
  419. assert result_sorted(response.data)["groups"] == [
  420. {"by": {}, "series": {"sum(session)": [0, 2]}, "totals": {"sum(session)": 2}}
  421. ]
  422. response = self.do_request(
  423. {
  424. "project": [-1],
  425. "statsPeriod": "1d",
  426. "interval": "1d",
  427. "field": ["sum(session)"],
  428. "query": 'release:"foo@1.1.0" or release:["foo@1.2.0", release:"foo@1.3.0"]',
  429. "groupBy": ["release"],
  430. }
  431. )
  432. assert response.status_code == 200, response.content
  433. assert result_sorted(response.data)["groups"] == [
  434. {
  435. "by": {"release": "foo@1.1.0"},
  436. "series": {"sum(session)": [0, 1]},
  437. "totals": {"sum(session)": 1},
  438. },
  439. {
  440. "by": {"release": "foo@1.2.0"},
  441. "series": {"sum(session)": [0, 1]},
  442. "totals": {"sum(session)": 1},
  443. },
  444. ]
  445. @freeze_time(MOCK_DATETIME)
  446. def test_filter_unknown_release(self):
  447. response = self.do_request(
  448. {
  449. "project": [-1],
  450. "statsPeriod": "1d",
  451. "interval": "1h",
  452. "field": ["sum(session)"],
  453. "query": "release:foo@6.6.6",
  454. "groupBy": "session.status",
  455. }
  456. )
  457. assert response.status_code == 200, response.content
  458. @freeze_time(MOCK_DATETIME)
  459. def test_filter_unknown_release_in(self):
  460. response = self.do_request(
  461. {
  462. "project": [-1],
  463. "statsPeriod": "1d",
  464. "interval": "1d",
  465. "field": ["sum(session)"],
  466. "query": "release:[foo@6.6.6]",
  467. "groupBy": "session.status",
  468. }
  469. )
  470. assert response.status_code == 200, response.content
  471. assert result_sorted(response.data)["groups"] == [
  472. {
  473. "by": {"session.status": status},
  474. "series": {"sum(session)": [0, 0]},
  475. "totals": {"sum(session)": 0},
  476. }
  477. for status in ("abnormal", "crashed", "errored", "healthy")
  478. ]
  479. @freeze_time(MOCK_DATETIME)
  480. def test_groupby_project(self):
  481. response = self.do_request(
  482. {
  483. "project": [-1],
  484. "statsPeriod": "1d",
  485. "interval": "1d",
  486. "field": ["sum(session)"],
  487. "groupBy": ["project"],
  488. }
  489. )
  490. assert response.status_code == 200, response.content
  491. assert result_sorted(response.data)["groups"] == [
  492. {
  493. "by": {"project": self.project1.id},
  494. "series": {"sum(session)": [0, 4]},
  495. "totals": {"sum(session)": 4},
  496. },
  497. {
  498. "by": {"project": self.project2.id},
  499. "series": {"sum(session)": [0, 2]},
  500. "totals": {"sum(session)": 2},
  501. },
  502. {
  503. "by": {"project": self.project3.id},
  504. "series": {"sum(session)": [0, 3]},
  505. "totals": {"sum(session)": 3},
  506. },
  507. ]
  508. @freeze_time(MOCK_DATETIME)
  509. def test_groupby_environment(self):
  510. response = self.do_request(
  511. {
  512. "project": [-1],
  513. "statsPeriod": "1d",
  514. "interval": "1d",
  515. "field": ["sum(session)"],
  516. "groupBy": ["environment"],
  517. }
  518. )
  519. assert response.status_code == 200, response.content
  520. assert result_sorted(response.data)["groups"] == [
  521. {
  522. "by": {"environment": "development"},
  523. "series": {"sum(session)": [0, 1]},
  524. "totals": {"sum(session)": 1},
  525. },
  526. {
  527. "by": {"environment": "production"},
  528. "series": {"sum(session)": [0, 8]},
  529. "totals": {"sum(session)": 8},
  530. },
  531. ]
  532. @freeze_time(MOCK_DATETIME)
  533. def test_groupby_release(self):
  534. response = self.do_request(
  535. {
  536. "project": [-1],
  537. "statsPeriod": "1d",
  538. "interval": "1d",
  539. "field": ["sum(session)"],
  540. "groupBy": ["release"],
  541. }
  542. )
  543. assert response.status_code == 200, response.content
  544. assert result_sorted(response.data)["groups"] == [
  545. {
  546. "by": {"release": "foo@1.0.0"},
  547. "series": {"sum(session)": [0, 7]},
  548. "totals": {"sum(session)": 7},
  549. },
  550. {
  551. "by": {"release": "foo@1.1.0"},
  552. "series": {"sum(session)": [0, 1]},
  553. "totals": {"sum(session)": 1},
  554. },
  555. {
  556. "by": {"release": "foo@1.2.0"},
  557. "series": {"sum(session)": [0, 1]},
  558. "totals": {"sum(session)": 1},
  559. },
  560. ]
  561. @freeze_time(MOCK_DATETIME)
  562. def test_groupby_status(self):
  563. response = self.do_request(
  564. {
  565. "project": [-1],
  566. "statsPeriod": "1d",
  567. "interval": "1d",
  568. "field": ["sum(session)"],
  569. "groupBy": ["session.status"],
  570. }
  571. )
  572. assert response.status_code == 200, response.content
  573. assert result_sorted(response.data)["groups"] == [
  574. {
  575. "by": {"session.status": "abnormal"},
  576. "series": {"sum(session)": [0, 0]},
  577. "totals": {"sum(session)": 0},
  578. },
  579. {
  580. "by": {"session.status": "crashed"},
  581. "series": {"sum(session)": [0, 1]},
  582. "totals": {"sum(session)": 1},
  583. },
  584. {
  585. "by": {"session.status": "errored"},
  586. "series": {"sum(session)": [0, 2]},
  587. "totals": {"sum(session)": 2},
  588. },
  589. {
  590. "by": {"session.status": "healthy"},
  591. "series": {"sum(session)": [0, 6]},
  592. "totals": {"sum(session)": 6},
  593. },
  594. ]
  595. @freeze_time(MOCK_DATETIME)
  596. def test_groupby_cross(self):
  597. response = self.do_request(
  598. {
  599. "project": [-1],
  600. "statsPeriod": "1d",
  601. "interval": "1d",
  602. "field": ["sum(session)"],
  603. "groupBy": ["release", "environment"],
  604. }
  605. )
  606. assert response.status_code == 200, response.content
  607. assert result_sorted(response.data)["groups"] == [
  608. {
  609. "by": {"environment": "development", "release": "foo@1.0.0"},
  610. "series": {"sum(session)": [0, 1]},
  611. "totals": {"sum(session)": 1},
  612. },
  613. {
  614. "by": {"environment": "production", "release": "foo@1.0.0"},
  615. "series": {"sum(session)": [0, 6]},
  616. "totals": {"sum(session)": 6},
  617. },
  618. {
  619. "by": {"environment": "production", "release": "foo@1.1.0"},
  620. "series": {"sum(session)": [0, 1]},
  621. "totals": {"sum(session)": 1},
  622. },
  623. {
  624. "by": {"environment": "production", "release": "foo@1.2.0"},
  625. "series": {"sum(session)": [0, 1]},
  626. "totals": {"sum(session)": 1},
  627. },
  628. ]
  629. @freeze_time(MOCK_DATETIME)
  630. def test_users_groupby(self):
  631. response = self.do_request(
  632. {
  633. "project": [-1],
  634. "statsPeriod": "1d",
  635. "interval": "1d",
  636. "field": ["count_unique(user)"],
  637. }
  638. )
  639. assert response.status_code == 200, response.content
  640. assert result_sorted(response.data)["groups"] == [
  641. {
  642. "by": {},
  643. "series": {"count_unique(user)": [0, 1]},
  644. "totals": {"count_unique(user)": 1},
  645. }
  646. ]
  647. response = self.do_request(
  648. {
  649. "project": [-1],
  650. "statsPeriod": "1d",
  651. "interval": "1d",
  652. "field": ["count_unique(user)"],
  653. "groupBy": ["session.status"],
  654. }
  655. )
  656. assert response.status_code == 200, response.content
  657. assert result_sorted(response.data)["groups"] == [
  658. {
  659. "by": {"session.status": "abnormal"},
  660. "series": {"count_unique(user)": [0, 0]},
  661. "totals": {"count_unique(user)": 0},
  662. },
  663. {
  664. "by": {"session.status": "crashed"},
  665. "series": {"count_unique(user)": [0, 0]},
  666. "totals": {"count_unique(user)": 0},
  667. },
  668. {
  669. "by": {"session.status": "errored"},
  670. "series": {"count_unique(user)": [0, 1]},
  671. "totals": {"count_unique(user)": 1},
  672. },
  673. {
  674. "by": {"session.status": "healthy"},
  675. "series": {"count_unique(user)": [0, 0]},
  676. "totals": {"count_unique(user)": 0},
  677. },
  678. ]
  679. expected_duration_values = {
  680. "avg(session.duration)": 42375.0,
  681. "max(session.duration)": 80000.0,
  682. "p50(session.duration)": 33500.0,
  683. "p75(session.duration)": 53750.0,
  684. "p90(session.duration)": 71600.0,
  685. "p95(session.duration)": 75800.0,
  686. "p99(session.duration)": 79159.99999999999,
  687. }
  688. @freeze_time(MOCK_DATETIME)
  689. def test_users_groupby_status_advanced(self):
  690. project = self.create_project()
  691. user1 = uuid4().hex
  692. session1 = uuid4().hex
  693. user2 = uuid4().hex
  694. session2a = uuid4().hex
  695. session2b = uuid4().hex
  696. user3 = uuid4().hex
  697. session3 = uuid4().hex
  698. self.store_session(
  699. make_session(project, session_id=session1, distinct_id=user1, status="ok")
  700. )
  701. self.store_session(
  702. make_session(
  703. project, session_id=session1, distinct_id=user1, seq=1, errors=1, status="errored"
  704. )
  705. )
  706. self.store_session(
  707. make_session(project, session_id=session1, distinct_id=user1, seq=2, status="crashed")
  708. )
  709. self.store_session(
  710. make_session(project, session_id=session2a, distinct_id=user2, status="ok")
  711. )
  712. self.store_session(
  713. make_session(project, session_id=session2b, distinct_id=user2, status="ok")
  714. )
  715. self.store_session(
  716. make_session(project, session_id=session2b, distinct_id=user2, status="abnormal")
  717. )
  718. self.store_session(
  719. make_session(
  720. project, session_id=session3, distinct_id=user3, errors=123, status="errored"
  721. )
  722. )
  723. # Add some extra healthy users:
  724. for _ in range(3):
  725. user = uuid4().hex
  726. self.store_session(make_session(project, distinct_id=user))
  727. # First, check if totals make sense:
  728. response = self.do_request(
  729. {
  730. "project": [project.id],
  731. "statsPeriod": "1d",
  732. "interval": "1d",
  733. "field": ["count_unique(user)"],
  734. }
  735. )
  736. assert response.status_code == 200, response.content
  737. assert result_sorted(response.data)["groups"] == [
  738. {
  739. "by": {},
  740. "series": {"count_unique(user)": [0, 6]},
  741. "totals": {"count_unique(user)": 6},
  742. },
  743. ]
  744. # Then check if grouping makes sense:
  745. response = self.do_request(
  746. {
  747. "project": [project.id],
  748. "statsPeriod": "1d",
  749. "interval": "1d",
  750. "field": ["count_unique(user)"],
  751. "groupBy": ["session.status"],
  752. }
  753. )
  754. assert response.status_code == 200, response.content
  755. assert result_sorted(response.data)["groups"] == [
  756. {
  757. "by": {"session.status": "abnormal"},
  758. "series": {"count_unique(user)": [0, 1]},
  759. "totals": {"count_unique(user)": 1},
  760. },
  761. {
  762. "by": {"session.status": "crashed"},
  763. "series": {"count_unique(user)": [0, 1]},
  764. "totals": {"count_unique(user)": 1},
  765. },
  766. {
  767. "by": {"session.status": "errored"},
  768. "series": {"count_unique(user)": [0, 1]},
  769. "totals": {"count_unique(user)": 1},
  770. },
  771. {
  772. # user
  773. "by": {"session.status": "healthy"},
  774. "series": {"count_unique(user)": [0, 3]},
  775. "totals": {"count_unique(user)": 3},
  776. },
  777. ]
  778. @freeze_time(MOCK_DATETIME)
  779. def test_duration_percentiles(self):
  780. response = self.do_request(
  781. {
  782. "project": [-1],
  783. "statsPeriod": "1d",
  784. "interval": "1d",
  785. "field": [
  786. "avg(session.duration)",
  787. "p50(session.duration)",
  788. "p75(session.duration)",
  789. "p90(session.duration)",
  790. "p95(session.duration)",
  791. "p99(session.duration)",
  792. "max(session.duration)",
  793. ],
  794. }
  795. )
  796. assert response.status_code == 200, response.content
  797. expected = self.expected_duration_values
  798. groups = result_sorted(response.data)["groups"]
  799. assert len(groups) == 1, groups
  800. group = groups[0]
  801. assert group["by"] == {}
  802. assert group["totals"] == pytest.approx(expected)
  803. for key, series in group["series"].items():
  804. assert series == pytest.approx([None, expected[key]])
  805. @freeze_time(MOCK_DATETIME)
  806. def test_duration_percentiles_groupby(self):
  807. response = self.do_request(
  808. {
  809. "project": [-1],
  810. "statsPeriod": "1d",
  811. "interval": "1d",
  812. "field": [
  813. "avg(session.duration)",
  814. "p50(session.duration)",
  815. "p75(session.duration)",
  816. "p90(session.duration)",
  817. "p95(session.duration)",
  818. "p99(session.duration)",
  819. "max(session.duration)",
  820. ],
  821. "groupBy": "session.status",
  822. }
  823. )
  824. assert response.status_code == 200, response.content
  825. expected = self.expected_duration_values
  826. seen = set() # Make sure all session statuses are listed
  827. for group in result_sorted(response.data)["groups"]:
  828. seen.add(group["by"].get("session.status"))
  829. if group["by"] == {"session.status": "healthy"}:
  830. assert group["totals"] == pytest.approx(expected)
  831. for key, series in group["series"].items():
  832. assert series == pytest.approx([None, expected[key]])
  833. else:
  834. # Everything's none:
  835. assert group["totals"] == {key: None for key in expected}, group["by"]
  836. assert group["series"] == {key: [None, None] for key in expected}
  837. assert seen == {"abnormal", "crashed", "errored", "healthy"}
  838. @freeze_time(MOCK_DATETIME)
  839. def test_snuba_limit_exceeded(self):
  840. # 2 * 4 => only show two groups
  841. with patch("sentry.snuba.sessions_v2.SNUBA_LIMIT", 8), patch(
  842. "sentry.snuba.metrics.query.MAX_POINTS", 8
  843. ):
  844. response = self.do_request(
  845. {
  846. "project": [-1],
  847. "statsPeriod": "3d",
  848. "interval": "1d",
  849. # "user" is the first field, but "session" always wins:
  850. "field": ["count_unique(user)", "sum(session)"],
  851. "groupBy": ["project", "release", "environment"],
  852. }
  853. )
  854. assert response.status_code == 200, response.content
  855. assert result_sorted(response.data)["groups"] == [
  856. {
  857. "by": {
  858. "release": "foo@1.0.0",
  859. "environment": "production",
  860. "project": self.project1.id,
  861. },
  862. "totals": {"sum(session)": 3, "count_unique(user)": 0},
  863. "series": {"sum(session)": [0, 0, 0, 3], "count_unique(user)": [0, 0, 0, 0]},
  864. },
  865. {
  866. "by": {
  867. "release": "foo@1.0.0",
  868. "environment": "production",
  869. "project": self.project3.id,
  870. },
  871. "totals": {"sum(session)": 2, "count_unique(user)": 1},
  872. "series": {"sum(session)": [0, 0, 0, 2], "count_unique(user)": [0, 0, 0, 1]},
  873. },
  874. ]
  875. @freeze_time(MOCK_DATETIME)
  876. def test_snuba_limit_exceeded_groupby_status(self):
  877. """Get consistent result when grouping by status"""
  878. # 2 * 4 => only show two groups
  879. with patch("sentry.snuba.sessions_v2.SNUBA_LIMIT", 8), patch(
  880. "sentry.snuba.metrics.query.MAX_POINTS", 8
  881. ):
  882. response = self.do_request(
  883. {
  884. "project": [-1],
  885. "statsPeriod": "3d",
  886. "interval": "1d",
  887. "field": ["sum(session)", "count_unique(user)"],
  888. "groupBy": ["project", "release", "environment", "session.status"],
  889. }
  890. )
  891. assert response.status_code == 200, response.content
  892. assert result_sorted(response.data)["groups"] == [
  893. {
  894. "by": {
  895. "project": self.project1.id,
  896. "release": "foo@1.0.0",
  897. "session.status": "abnormal",
  898. "environment": "production",
  899. },
  900. "totals": {"sum(session)": 0, "count_unique(user)": 0},
  901. "series": {"sum(session)": [0, 0, 0, 0], "count_unique(user)": [0, 0, 0, 0]},
  902. },
  903. {
  904. "by": {
  905. "project": self.project1.id,
  906. "release": "foo@1.0.0",
  907. "session.status": "crashed",
  908. "environment": "production",
  909. },
  910. "totals": {"sum(session)": 0, "count_unique(user)": 0},
  911. "series": {"sum(session)": [0, 0, 0, 0], "count_unique(user)": [0, 0, 0, 0]},
  912. },
  913. {
  914. "by": {
  915. "project": self.project1.id,
  916. "release": "foo@1.0.0",
  917. "environment": "production",
  918. "session.status": "errored",
  919. },
  920. "totals": {"sum(session)": 0, "count_unique(user)": 0},
  921. "series": {"sum(session)": [0, 0, 0, 0], "count_unique(user)": [0, 0, 0, 0]},
  922. },
  923. {
  924. "by": {
  925. "project": self.project1.id,
  926. "session.status": "healthy",
  927. "release": "foo@1.0.0",
  928. "environment": "production",
  929. },
  930. "totals": {"sum(session)": 3, "count_unique(user)": 0},
  931. "series": {"sum(session)": [0, 0, 0, 3], "count_unique(user)": [0, 0, 0, 0]},
  932. },
  933. {
  934. "by": {
  935. "session.status": "abnormal",
  936. "release": "foo@1.0.0",
  937. "project": self.project3.id,
  938. "environment": "production",
  939. },
  940. "totals": {"sum(session)": 0, "count_unique(user)": 0},
  941. "series": {"sum(session)": [0, 0, 0, 0], "count_unique(user)": [0, 0, 0, 0]},
  942. },
  943. {
  944. "by": {
  945. "release": "foo@1.0.0",
  946. "project": self.project3.id,
  947. "session.status": "crashed",
  948. "environment": "production",
  949. },
  950. "totals": {"sum(session)": 0, "count_unique(user)": 0},
  951. "series": {"sum(session)": [0, 0, 0, 0], "count_unique(user)": [0, 0, 0, 0]},
  952. },
  953. {
  954. "by": {
  955. "release": "foo@1.0.0",
  956. "project": self.project3.id,
  957. "environment": "production",
  958. "session.status": "errored",
  959. },
  960. "totals": {"sum(session)": 1, "count_unique(user)": 1},
  961. "series": {"sum(session)": [0, 0, 0, 1], "count_unique(user)": [0, 0, 0, 1]},
  962. },
  963. {
  964. "by": {
  965. "session.status": "healthy",
  966. "release": "foo@1.0.0",
  967. "project": self.project3.id,
  968. "environment": "production",
  969. },
  970. "totals": {"sum(session)": 1, "count_unique(user)": 0},
  971. "series": {"sum(session)": [0, 0, 0, 1], "count_unique(user)": [0, 0, 0, 0]},
  972. },
  973. ]
  974. @freeze_time(MOCK_DATETIME)
  975. def test_environment_filter_not_present_in_query(self):
  976. self.create_environment(name="abc")
  977. response = self.do_request(
  978. {
  979. "project": [-1],
  980. "statsPeriod": "1d",
  981. "interval": "1d",
  982. "field": ["sum(session)"],
  983. "environment": ["development", "abc"],
  984. }
  985. )
  986. assert response.status_code == 200, response.content
  987. assert result_sorted(response.data)["groups"] == [
  988. {"by": {}, "series": {"sum(session)": [0, 1]}, "totals": {"sum(session)": 1}}
  989. ]
  990. @freeze_time(MOCK_DATETIME)
  991. def test_sessions_without_users(self):
  992. # The first field defines by which groups additional queries are filtered
  993. # But if the first field is the user count, the series should still
  994. # contain the session counts even if the project does not track users
  995. response = self.do_request(
  996. {
  997. "project": self.project.id, # project without users
  998. "statsPeriod": "1d",
  999. "interval": "1d",
  1000. "field": ["count_unique(user)", "sum(session)"],
  1001. "groupBy": "release",
  1002. }
  1003. )
  1004. assert response.status_code == 200, response.content
  1005. assert result_sorted(response.data)["groups"] == [
  1006. {
  1007. "by": {"release": "foo@1.0.0"},
  1008. "series": {"count_unique(user)": [0, 0], "sum(session)": [0, 3]},
  1009. "totals": {"count_unique(user)": 0, "sum(session)": 3},
  1010. },
  1011. {
  1012. "by": {"release": "foo@1.1.0"},
  1013. "series": {"count_unique(user)": [0, 0], "sum(session)": [0, 1]},
  1014. "totals": {"count_unique(user)": 0, "sum(session)": 1},
  1015. },
  1016. ]
  1017. @freeze_time(MOCK_DATETIME + datetime.timedelta(days=2))
  1018. def test_groupby_no_data(self):
  1019. # Empty results for everything
  1020. response = self.do_request(
  1021. {
  1022. "project": self.project.id, # project without users
  1023. "statsPeriod": "1d",
  1024. "interval": "1d",
  1025. "field": ["count_unique(user)", "sum(session)"],
  1026. "groupBy": "release",
  1027. }
  1028. )
  1029. assert response.status_code == 200, response.content
  1030. assert result_sorted(response.data)["groups"] == []
  1031. @freeze_time(MOCK_DATETIME)
  1032. def test_mix_known_and_unknown_strings(self):
  1033. response = self.do_request(
  1034. {
  1035. "project": self.project.id, # project without users
  1036. "statsPeriod": "1d",
  1037. "interval": "1d",
  1038. "field": ["count_unique(user)", "sum(session)"],
  1039. "query": "environment:[production,foo]",
  1040. }
  1041. )
  1042. assert response.status_code == 200, response.data
  1043. @freeze_time(MOCK_DATETIME)
  1044. def test_release_semver_filter(self):
  1045. r1 = self.create_release(version="ahmed@1.0.0")
  1046. r2 = self.create_release(version="ahmed@1.1.0")
  1047. r3 = self.create_release(version="ahmed@2.0.0")
  1048. for r in (r1, r2, r3):
  1049. self.store_session(make_session(self.project, release=r.version))
  1050. response = self.do_request(
  1051. {
  1052. "project": self.project.id,
  1053. "statsPeriod": "1d",
  1054. "interval": "1d",
  1055. "field": ["sum(session)"],
  1056. "groupBy": ["release"],
  1057. "query": "release.version:1.*",
  1058. }
  1059. )
  1060. assert response.status_code == 200
  1061. assert sorted(response.data["groups"], key=lambda x: x["by"]["release"]) == [
  1062. {
  1063. "by": {"release": "ahmed@1.0.0"},
  1064. "totals": {"sum(session)": 1},
  1065. "series": {"sum(session)": [0, 1]},
  1066. },
  1067. {
  1068. "by": {"release": "ahmed@1.1.0"},
  1069. "totals": {"sum(session)": 1},
  1070. "series": {"sum(session)": [0, 1]},
  1071. },
  1072. ]
  1073. @freeze_time(MOCK_DATETIME)
  1074. def test_release_package_filter(self):
  1075. r1 = self.create_release(version="ahmed@1.2.4+124")
  1076. r2 = self.create_release(version="ahmed2@1.2.5+125")
  1077. r3 = self.create_release(version="ahmed2@1.2.6+126")
  1078. for r in (r1, r2, r3):
  1079. self.store_session(make_session(self.project, release=r.version))
  1080. response = self.do_request(
  1081. {
  1082. "project": self.project.id,
  1083. "statsPeriod": "1d",
  1084. "interval": "1d",
  1085. "field": ["sum(session)"],
  1086. "groupBy": ["release"],
  1087. "query": "release.package:ahmed2",
  1088. }
  1089. )
  1090. assert response.status_code == 200
  1091. assert sorted(response.data["groups"], key=lambda x: x["by"]["release"]) == [
  1092. {
  1093. "by": {"release": "ahmed2@1.2.5+125"},
  1094. "totals": {"sum(session)": 1},
  1095. "series": {"sum(session)": [0, 1]},
  1096. },
  1097. {
  1098. "by": {"release": "ahmed2@1.2.6+126"},
  1099. "totals": {"sum(session)": 1},
  1100. "series": {"sum(session)": [0, 1]},
  1101. },
  1102. ]
  1103. @freeze_time(MOCK_DATETIME)
  1104. def test_release_build_filter(self):
  1105. r1 = self.create_release(version="ahmed@1.2.4+124")
  1106. r2 = self.create_release(version="ahmed@1.2.3+123")
  1107. r3 = self.create_release(version="ahmed2@1.2.5+125")
  1108. for r in (r1, r2, r3):
  1109. self.store_session(make_session(self.project, release=r.version))
  1110. response = self.do_request(
  1111. {
  1112. "project": self.project.id,
  1113. "statsPeriod": "1d",
  1114. "interval": "1d",
  1115. "field": ["sum(session)"],
  1116. "groupBy": ["release"],
  1117. "query": "release.build:<125",
  1118. }
  1119. )
  1120. assert response.status_code == 200
  1121. assert sorted(response.data["groups"], key=lambda x: x["by"]["release"]) == [
  1122. {
  1123. "by": {"release": "ahmed@1.2.3+123"},
  1124. "totals": {"sum(session)": 1},
  1125. "series": {"sum(session)": [0, 1]},
  1126. },
  1127. {
  1128. "by": {"release": "ahmed@1.2.4+124"},
  1129. "totals": {"sum(session)": 1},
  1130. "series": {"sum(session)": [0, 1]},
  1131. },
  1132. ]
  1133. @freeze_time(MOCK_DATETIME)
  1134. def test_release_stage_filter(self):
  1135. new_env = self.create_environment(name="new_env")
  1136. adopted_release = self.create_release(version="adopted_release")
  1137. not_adopted_release = self.create_release(version="not_adopted_release")
  1138. ReleaseProjectEnvironment.objects.create(
  1139. project_id=self.project.id,
  1140. release_id=adopted_release.id,
  1141. environment_id=new_env.id,
  1142. adopted=timezone.now(),
  1143. )
  1144. ReleaseProjectEnvironment.objects.create(
  1145. project_id=self.project.id,
  1146. release_id=not_adopted_release.id,
  1147. environment_id=new_env.id,
  1148. )
  1149. for r in (adopted_release, not_adopted_release):
  1150. self.store_session(
  1151. make_session(self.project, release=r.version, environment=new_env.name)
  1152. )
  1153. response = self.do_request(
  1154. {
  1155. "project": self.project.id,
  1156. "statsPeriod": "1d",
  1157. "interval": "1d",
  1158. "field": ["sum(session)"],
  1159. "groupBy": ["release"],
  1160. "query": "release.stage:adopted",
  1161. "environment": new_env.name,
  1162. }
  1163. )
  1164. assert response.status_code == 200
  1165. assert response.data["groups"] == [
  1166. {
  1167. "by": {"release": "adopted_release"},
  1168. "totals": {"sum(session)": 1},
  1169. "series": {"sum(session)": [0, 1]},
  1170. },
  1171. ]
  1172. @region_silo_test
  1173. @patch("sentry.release_health.backend", MetricsReleaseHealthBackend())
  1174. class OrganizationSessionsEndpointMetricsTest(
  1175. BaseMetricsTestCase, OrganizationSessionsEndpointTest
  1176. ):
  1177. """Repeat all tests with metrics backend"""
  1178. @freeze_time(MOCK_DATETIME)
  1179. def test_orderby(self):
  1180. response = self.do_request(
  1181. {
  1182. "project": [-1],
  1183. "statsPeriod": "2d",
  1184. "interval": "1d",
  1185. "field": ["sum(session)"],
  1186. "orderBy": "foobar",
  1187. }
  1188. )
  1189. assert response.status_code == 400
  1190. assert response.data == {"detail": "'orderBy' must be one of the provided 'fields'"}
  1191. response = self.do_request(
  1192. {
  1193. "project": [-1],
  1194. "statsPeriod": "2d",
  1195. "interval": "1d",
  1196. "field": ["sum(session)"],
  1197. "orderBy": "count_unique(user)", # wrong field
  1198. }
  1199. )
  1200. assert response.status_code == 400
  1201. assert response.data == {"detail": "'orderBy' must be one of the provided 'fields'"}
  1202. # Cannot sort by more than one field
  1203. response = self.do_request(
  1204. {
  1205. "project": [-1],
  1206. "statsPeriod": "2d",
  1207. "interval": "1d",
  1208. "field": ["sum(session)", "count_unique(user)"],
  1209. "orderBy": ["sum(session)", "count_unique(user)"],
  1210. }
  1211. )
  1212. assert response.status_code == 400
  1213. assert response.data == {"detail": "Cannot order by multiple fields"}
  1214. response = self.do_request(
  1215. {
  1216. "project": [-1],
  1217. "statsPeriod": "2d",
  1218. "interval": "1d",
  1219. "field": ["sum(session)"],
  1220. "orderBy": "sum(session)", # misses group by, but why not
  1221. }
  1222. )
  1223. assert response.status_code == 200
  1224. response = self.do_request(
  1225. {
  1226. "project": [-1],
  1227. "statsPeriod": "2d",
  1228. "interval": "1d",
  1229. "field": ["sum(session)"],
  1230. "orderBy": "sum(session)",
  1231. "groupBy": ["session.status"],
  1232. }
  1233. )
  1234. assert response.status_code == 400
  1235. assert response.data == {"detail": "Cannot use 'orderBy' when grouping by sessions.status"}
  1236. response = self.do_request(
  1237. {
  1238. "project": [self.project.id, self.project3.id],
  1239. "statsPeriod": "2d",
  1240. "interval": "1d",
  1241. "field": ["sum(session)", "p95(session.duration)"],
  1242. "orderBy": "p95(session.duration)",
  1243. "groupBy": ["project", "release", "environment"],
  1244. }
  1245. )
  1246. expected_groups = [
  1247. {
  1248. "by": {
  1249. "project": self.project.id,
  1250. "release": "foo@1.0.0",
  1251. "environment": "production",
  1252. },
  1253. "totals": {"sum(session)": 3, "p95(session.duration)": 25000.0},
  1254. "series": {
  1255. "sum(session)": [0, 0, 3],
  1256. "p95(session.duration)": [None, None, 25000.0],
  1257. },
  1258. },
  1259. {
  1260. "by": {
  1261. "project": self.project3.id,
  1262. "release": "foo@1.2.0",
  1263. "environment": "production",
  1264. },
  1265. "totals": {"sum(session)": 1, "p95(session.duration)": 37000.0},
  1266. "series": {
  1267. "sum(session)": [0, 0, 1],
  1268. "p95(session.duration)": [None, None, 37000.0],
  1269. },
  1270. },
  1271. {
  1272. "by": {
  1273. "project": self.project.id,
  1274. "release": "foo@1.1.0",
  1275. "environment": "production",
  1276. },
  1277. "totals": {"sum(session)": 1, "p95(session.duration)": 49000.0},
  1278. "series": {
  1279. "sum(session)": [0, 0, 1],
  1280. "p95(session.duration)": [None, None, 49000.0],
  1281. },
  1282. },
  1283. {
  1284. "by": {
  1285. "project": self.project3.id,
  1286. "release": "foo@1.0.0",
  1287. "environment": "production",
  1288. },
  1289. "totals": {"sum(session)": 2, "p95(session.duration)": 79400.0},
  1290. "series": {
  1291. "sum(session)": [0, 0, 2],
  1292. "p95(session.duration)": [None, None, 79400.0],
  1293. },
  1294. },
  1295. ]
  1296. # Not using `result_sorted` here, because we want to verify the order
  1297. assert response.status_code == 200, response.data
  1298. assert response.data["groups"] == expected_groups
  1299. # Sort descending
  1300. response = self.do_request(
  1301. {
  1302. "project": [self.project.id, self.project3.id],
  1303. "statsPeriod": "2d",
  1304. "interval": "1d",
  1305. "field": ["sum(session)", "p95(session.duration)"],
  1306. "orderBy": "-p95(session.duration)",
  1307. "groupBy": ["project", "release", "environment"],
  1308. }
  1309. )
  1310. assert response.status_code == 200
  1311. assert response.data["groups"] == list(reversed(expected_groups))
  1312. # Add some more code coverage
  1313. all_fields = [
  1314. "sum(session)",
  1315. "count_unique(user)",
  1316. "avg(session.duration)",
  1317. ]
  1318. for field in all_fields:
  1319. assert (
  1320. self.do_request(
  1321. {
  1322. "project": [self.project.id, self.project3.id],
  1323. "statsPeriod": "2d",
  1324. "interval": "1d",
  1325. "field": all_fields,
  1326. "orderBy": field,
  1327. "groupBy": ["project", "release", "environment"],
  1328. }
  1329. ).status_code
  1330. == 200
  1331. )
  1332. @freeze_time(MOCK_DATETIME)
  1333. def test_wildcard_search(self):
  1334. default_request = {
  1335. "project": [-1],
  1336. "statsPeriod": "2d",
  1337. "interval": "1d",
  1338. }
  1339. def req(**kwargs):
  1340. return self.do_request(dict(default_request, **kwargs))
  1341. response = req(field=["sum(session)"], query="release:foo@*")
  1342. assert response.status_code == 400
  1343. assert response.data == {"detail": "Invalid condition: wildcard search is not supported"}
  1344. response = req(field=["sum(session)"], query="release:foo@* AND release:bar@*")
  1345. assert response.status_code == 400
  1346. assert response.data == {"detail": "Invalid condition: wildcard search is not supported"}
  1347. response = req(field=["sum(session)"], query="release:foo@* OR release:bar@*")
  1348. assert response.status_code == 400
  1349. assert response.data == {"detail": "Invalid condition: wildcard search is not supported"}
  1350. response = req(field=["sum(session)"], query="(release:foo@* OR release:bar) OR project:1")
  1351. assert response.status_code == 400
  1352. assert response.data == {"detail": "Invalid condition: wildcard search is not supported"}
  1353. @freeze_time(MOCK_DATETIME)
  1354. def test_filter_by_session_status(self):
  1355. default_request = {
  1356. "project": [-1],
  1357. "statsPeriod": "1d",
  1358. "interval": "1d",
  1359. }
  1360. def req(**kwargs):
  1361. return self.do_request(dict(default_request, **kwargs))
  1362. response = req(field=["sum(session)"], query="session.status:bogus")
  1363. assert response.status_code == 200, response.content
  1364. assert result_sorted(response.data)["groups"] == []
  1365. response = req(field=["sum(session)"], query="!session.status:healthy")
  1366. assert response.status_code == 200, response.content
  1367. assert result_sorted(response.data)["groups"] == [
  1368. {"by": {}, "series": {"sum(session)": [0, 3]}, "totals": {"sum(session)": 3}}
  1369. ]
  1370. # sum(session) filtered by multiple statuses adds them
  1371. response = req(field=["sum(session)"], query="session.status:[healthy, errored]")
  1372. assert response.status_code == 200, response.content
  1373. assert result_sorted(response.data)["groups"] == [
  1374. {"by": {}, "series": {"sum(session)": [0, 8]}, "totals": {"sum(session)": 8}}
  1375. ]
  1376. response = req(
  1377. field=["sum(session)"],
  1378. query="session.status:[healthy, errored]",
  1379. groupBy="session.status",
  1380. )
  1381. assert response.status_code == 200, response.content
  1382. assert result_sorted(response.data)["groups"] == [
  1383. {
  1384. "by": {"session.status": "errored"},
  1385. "totals": {"sum(session)": 2},
  1386. "series": {"sum(session)": [0, 2]},
  1387. },
  1388. {
  1389. "by": {"session.status": "healthy"},
  1390. "totals": {"sum(session)": 6},
  1391. "series": {"sum(session)": [0, 6]},
  1392. },
  1393. ]
  1394. response = req(field=["sum(session)"], query="session.status:healthy release:foo@1.1.0")
  1395. assert response.status_code == 200, response.content
  1396. assert result_sorted(response.data)["groups"] == [
  1397. {"by": {}, "series": {"sum(session)": [0, 1]}, "totals": {"sum(session)": 1}}
  1398. ]
  1399. response = req(field=["sum(session)"], query="session.status:healthy OR release:foo@1.1.0")
  1400. assert response.status_code == 400, response.data
  1401. assert response.data == {"detail": "Unable to parse condition with session.status"}
  1402. # count_unique(user) does not work with multiple session statuses selected
  1403. response = req(field=["count_unique(user)"], query="session.status:[healthy, errored]")
  1404. assert response.status_code == 400, response.data
  1405. assert response.data == {
  1406. "detail": "Cannot filter count_unique by multiple session.status unless it is in groupBy"
  1407. }
  1408. response = req(field=["p95(session.duration)"], query="session.status:abnormal")
  1409. assert response.status_code == 200, response.content
  1410. assert result_sorted(response.data)["groups"] == []
  1411. @freeze_time(MOCK_DATETIME)
  1412. def test_filter_by_session_status_with_groupby(self):
  1413. default_request = {
  1414. "project": [-1],
  1415. "statsPeriod": "1d",
  1416. "interval": "1d",
  1417. "groupBy": "release",
  1418. }
  1419. def req(**kwargs):
  1420. return self.do_request(dict(default_request, **kwargs))
  1421. response = req(field=["sum(session)"], query="session.status:healthy")
  1422. assert response.status_code == 200, response.content
  1423. assert result_sorted(response.data)["groups"] == [
  1424. {
  1425. "by": {"release": "foo@1.0.0"},
  1426. "series": {"sum(session)": [0, 5]},
  1427. "totals": {"sum(session)": 5},
  1428. },
  1429. {
  1430. "by": {"release": "foo@1.1.0"},
  1431. "series": {"sum(session)": [0, 1]},
  1432. "totals": {"sum(session)": 1},
  1433. },
  1434. {
  1435. "by": {"release": "foo@1.2.0"},
  1436. "series": {"sum(session)": [0, 0]},
  1437. "totals": {"sum(session)": 0},
  1438. },
  1439. ]
  1440. @freeze_time(MOCK_DATETIME)
  1441. def test_filter_by_session_status_with_orderby(self):
  1442. default_request = {
  1443. "project": [-1],
  1444. "statsPeriod": "1d",
  1445. "interval": "1d",
  1446. }
  1447. def req(**kwargs):
  1448. return self.do_request(dict(default_request, **kwargs))
  1449. response = req(
  1450. field=["sum(session)"],
  1451. query="session.status:[abnormal,crashed]",
  1452. groupBy="release",
  1453. orderBy="sum(session)",
  1454. )
  1455. assert response.status_code == 400, response.content
  1456. assert response.data == {"detail": "Cannot order by sum(session) with the current filters"}
  1457. response = req(
  1458. field=["sum(session)"],
  1459. query="session.status:healthy",
  1460. groupBy="release",
  1461. orderBy="sum(session)",
  1462. )
  1463. assert response.status_code == 400, response.content
  1464. assert response.data == {"detail": "Cannot order by sum(session) with the current filters"}
  1465. @freeze_time(MOCK_DATETIME)
  1466. def test_anr_rate(self):
  1467. def store_anr_session(user_id, mechanism):
  1468. self.store_session(
  1469. make_session(
  1470. self.project2,
  1471. distinct_id=user_id,
  1472. errors=1,
  1473. status="abnormal",
  1474. abnormal_mechanism=mechanism,
  1475. )
  1476. )
  1477. self.store_session(
  1478. make_session(
  1479. self.project2,
  1480. distinct_id="610c480b-3c47-4871-8c03-05ea04595eb0",
  1481. started=SESSION_STARTED - 60 * 60,
  1482. )
  1483. )
  1484. store_anr_session("610c480b-3c47-4871-8c03-05ea04595eb0", "anr_foreground")
  1485. self.store_session(
  1486. make_session(
  1487. self.project2,
  1488. distinct_id="ac0b74a2-8ace-415a-82d2-0fdb0d81dec4",
  1489. started=SESSION_STARTED - 60 * 60,
  1490. )
  1491. )
  1492. store_anr_session("ac0b74a2-8ace-415a-82d2-0fdb0d81dec4", "anr_background")
  1493. self.store_session(
  1494. make_session(
  1495. self.project2,
  1496. distinct_id="5344c005-653b-48b7-bbaf-d362c2f268dd",
  1497. started=SESSION_STARTED - 60 * 60,
  1498. )
  1499. )
  1500. default_request = {
  1501. "project": [-1],
  1502. "statsPeriod": "1d",
  1503. "interval": "1d",
  1504. "field": ["anr_rate()"],
  1505. }
  1506. def req(**kwargs):
  1507. return self.do_request(dict(default_request, **kwargs))
  1508. # basic test case
  1509. response = req()
  1510. assert response.status_code == 200
  1511. assert response.data["groups"] == [
  1512. {"by": {}, "totals": {"anr_rate()": 0.5}, "series": {"anr_rate()": [None, 0.5]}}
  1513. ]
  1514. # group by session.status
  1515. response = req(
  1516. groupBy="session.status",
  1517. )
  1518. assert response.status_code == 400, response.content
  1519. assert response.data == {"detail": "Cannot group field anr_rate() by session.status"}
  1520. # valid group by
  1521. response = req(
  1522. field=["anr_rate()", "foreground_anr_rate()"],
  1523. groupBy=["release", "environment"],
  1524. orderBy=["anr_rate()"],
  1525. query="release:foo@1.0.0",
  1526. )
  1527. assert response.status_code == 200, response.content
  1528. assert response.data["groups"] == [
  1529. {
  1530. "by": {"environment": "production", "release": "foo@1.0.0"},
  1531. "series": {
  1532. "anr_rate()": [None, 0.5],
  1533. "foreground_anr_rate()": [None, 0.25],
  1534. },
  1535. "totals": {
  1536. "anr_rate()": 0.5,
  1537. "foreground_anr_rate()": 0.25,
  1538. },
  1539. },
  1540. ]
  1541. @freeze_time(MOCK_DATETIME)
  1542. def test_crash_rate(self):
  1543. default_request = {
  1544. "project": [-1],
  1545. "statsPeriod": "1d",
  1546. "interval": "1d",
  1547. "field": ["crash_rate(session)"],
  1548. }
  1549. def req(**kwargs):
  1550. return self.do_request(dict(default_request, **kwargs))
  1551. # 1 - filter session.status
  1552. response = req(
  1553. query="session.status:[abnormal,crashed]",
  1554. )
  1555. assert response.status_code == 400, response.content
  1556. assert response.data == {
  1557. "detail": "Cannot filter field crash_rate(session) by session.status"
  1558. }
  1559. # 2 - group by session.status
  1560. response = req(
  1561. groupBy="session.status",
  1562. )
  1563. assert response.status_code == 400, response.content
  1564. assert response.data == {
  1565. "detail": "Cannot group field crash_rate(session) by session.status"
  1566. }
  1567. # 4 - fetch all
  1568. response = req(
  1569. field=[
  1570. "crash_rate(session)",
  1571. "crash_rate(user)",
  1572. "crash_free_rate(session)",
  1573. "crash_free_rate(user)",
  1574. ],
  1575. groupBy=["release", "environment"],
  1576. orderBy=["crash_free_rate(session)"],
  1577. query="release:foo@1.0.0",
  1578. )
  1579. assert response.status_code == 200, response.content
  1580. assert response.data["groups"] == [
  1581. {
  1582. "by": {"environment": "production", "release": "foo@1.0.0"},
  1583. "series": {
  1584. "crash_free_rate(session)": [None, 0.8333333333333334],
  1585. "crash_free_rate(user)": [None, 1.0],
  1586. "crash_rate(session)": [None, 0.16666666666666666],
  1587. "crash_rate(user)": [None, 0.0],
  1588. },
  1589. "totals": {
  1590. "crash_free_rate(session)": 0.8333333333333334,
  1591. "crash_free_rate(user)": 1.0,
  1592. "crash_rate(session)": 0.16666666666666666,
  1593. "crash_rate(user)": 0.0,
  1594. },
  1595. },
  1596. {
  1597. "by": {"environment": "development", "release": "foo@1.0.0"},
  1598. "series": {
  1599. "crash_free_rate(session)": [None, 1.0],
  1600. "crash_free_rate(user)": [None, None],
  1601. "crash_rate(session)": [None, 0.0],
  1602. "crash_rate(user)": [None, None],
  1603. },
  1604. "totals": {
  1605. "crash_free_rate(session)": 1.0,
  1606. "crash_free_rate(user)": None,
  1607. "crash_rate(session)": 0.0,
  1608. "crash_rate(user)": None,
  1609. },
  1610. },
  1611. ]
  1612. @freeze_time(MOCK_DATETIME)
  1613. def test_pagination(self):
  1614. def do_request(cursor):
  1615. return self.do_request(
  1616. {
  1617. "project": self.project.id, # project without users
  1618. "statsPeriod": "1d",
  1619. "interval": "1d",
  1620. "field": ["count_unique(user)", "sum(session)"],
  1621. "query": "",
  1622. "groupBy": "release",
  1623. "orderBy": "sum(session)",
  1624. "per_page": 1,
  1625. **({"cursor": cursor} if cursor else {}),
  1626. }
  1627. )
  1628. response = do_request(None)
  1629. assert response.status_code == 200, response.data
  1630. assert len(response.data["groups"]) == 1
  1631. assert response.data["groups"] == [
  1632. {
  1633. "by": {"release": "foo@1.1.0"},
  1634. "series": {"count_unique(user)": [0, 0], "sum(session)": [0, 1]},
  1635. "totals": {"count_unique(user)": 0, "sum(session)": 1},
  1636. }
  1637. ]
  1638. links = {link["rel"]: link for url, link in parse_link_header(response["Link"]).items()}
  1639. assert links["previous"]["results"] == "false"
  1640. assert links["next"]["results"] == "true"
  1641. response = do_request(links["next"]["cursor"])
  1642. assert response.status_code == 200, response.data
  1643. assert len(response.data["groups"]) == 1
  1644. assert response.data["groups"] == [
  1645. {
  1646. "by": {"release": "foo@1.0.0"},
  1647. "series": {"count_unique(user)": [0, 0], "sum(session)": [0, 3]},
  1648. "totals": {"count_unique(user)": 0, "sum(session)": 3},
  1649. }
  1650. ]
  1651. links = {link["rel"]: link for url, link in parse_link_header(response["Link"]).items()}
  1652. assert links["previous"]["results"] == "true"
  1653. assert links["next"]["results"] == "false"
  1654. def test_unrestricted_date_range(self):
  1655. response = self.do_request(
  1656. {
  1657. "project": [-1],
  1658. "statsPeriod": "7h",
  1659. "interval": "5m",
  1660. "field": ["sum(session)"],
  1661. }
  1662. )
  1663. assert response.status_code == 200
  1664. @freeze_time(MOCK_DATETIME)
  1665. def test_release_is_empty(self):
  1666. self.store_session(
  1667. make_session(
  1668. self.project1, started=SESSION_STARTED + 12 * 60, release="", environment=""
  1669. )
  1670. )
  1671. for query in ('release:"" environment:""', 'release:"" OR environment:""'):
  1672. # Empty strings are invalid values for releases and environments, but we should still handle those cases
  1673. # correctly at the query layer
  1674. response = self.do_request(
  1675. {
  1676. "project": self.project.id, # project without users
  1677. "statsPeriod": "1d",
  1678. "interval": "1d",
  1679. "field": ["sum(session)"],
  1680. "query": query,
  1681. "groupBy": ["release", "environment"],
  1682. }
  1683. )
  1684. assert response.status_code == 200, response.content
  1685. assert result_sorted(response.data)["groups"] == [
  1686. {
  1687. "by": {"environment": "", "release": ""},
  1688. "series": {"sum(session)": [0, 1]},
  1689. "totals": {"sum(session)": 1},
  1690. }
  1691. ]
  1692. @region_silo_test
  1693. @patch("sentry.release_health.backend", MetricsReleaseHealthBackend())
  1694. class SessionsMetricsSortReleaseTimestampTest(BaseMetricsTestCase, APITestCase):
  1695. def do_request(self, query, user=None, org=None):
  1696. self.login_as(user=user or self.user)
  1697. url = reverse(
  1698. "sentry-api-0-organization-sessions",
  1699. kwargs={"organization_slug": (org or self.organization).slug},
  1700. )
  1701. return self.client.get(url, query, format="json")
  1702. @freeze_time(MOCK_DATETIME)
  1703. def test_order_by_with_no_releases(self):
  1704. """
  1705. Test that ensures if we have no releases in the preflight query when trying to order by
  1706. `release.timestamp`, we get no groups.
  1707. Essentially testing the empty preflight query filters branch.
  1708. """
  1709. project_random = self.create_project()
  1710. for _ in range(0, 2):
  1711. self.store_session(make_session(project_random))
  1712. self.store_session(make_session(project_random, status="crashed"))
  1713. response = self.do_request(
  1714. {
  1715. "project": project_random.id,
  1716. "statsPeriod": "1d",
  1717. "interval": "1d",
  1718. "field": ["crash_free_rate(session)"],
  1719. "groupBy": ["release"],
  1720. "orderBy": "-release.timestamp",
  1721. "per_page": 3,
  1722. }
  1723. )
  1724. assert response.data["groups"] == []
  1725. def test_order_by_max_limit(self):
  1726. response = self.do_request(
  1727. {
  1728. "project": self.project.id,
  1729. "statsPeriod": "1d",
  1730. "interval": "1d",
  1731. "field": ["crash_free_rate(session)"],
  1732. "groupBy": ["release"],
  1733. "orderBy": "-release.timestamp",
  1734. "per_page": 103,
  1735. }
  1736. )
  1737. assert response.data["detail"] == (
  1738. "This limit is too high for queries that requests a preflight query. "
  1739. "Please choose a limit below 100"
  1740. )
  1741. @freeze_time(MOCK_DATETIME)
  1742. def test_order_by(self):
  1743. """
  1744. Test that ensures that we are able to get the crash_free_rate for the most 2 recent
  1745. releases when grouping by release
  1746. """
  1747. # Step 1: Create 3 releases
  1748. release1b = self.create_release(version="1B")
  1749. release1c = self.create_release(version="1C")
  1750. release1d = self.create_release(version="1D")
  1751. # Step 2: Create crash free rate for each of those releases
  1752. # Release 1c -> 66.7% Crash free rate
  1753. for _ in range(0, 2):
  1754. self.store_session(make_session(self.project, release=release1c.version))
  1755. self.store_session(make_session(self.project, release=release1c.version, status="crashed"))
  1756. # Release 1b -> 33.3% Crash free rate
  1757. for _ in range(0, 2):
  1758. self.store_session(
  1759. make_session(self.project, release=release1b.version, status="crashed")
  1760. )
  1761. self.store_session(make_session(self.project, release=release1b.version))
  1762. # Create Sessions in each of these releases
  1763. # Release 1d -> 80% Crash free rate
  1764. for _ in range(0, 4):
  1765. self.store_session(make_session(self.project, release=release1d.version))
  1766. self.store_session(make_session(self.project, release=release1d.version, status="crashed"))
  1767. # Step 3: Make request
  1768. response = self.do_request(
  1769. {
  1770. "project": self.project.id, # project without users
  1771. "statsPeriod": "1d",
  1772. "interval": "1d",
  1773. "field": ["crash_free_rate(session)"],
  1774. "groupBy": ["release"],
  1775. "orderBy": "-release.timestamp",
  1776. "per_page": 3,
  1777. }
  1778. )
  1779. # Step 4: Validate Results
  1780. assert response.data["groups"] == [
  1781. {
  1782. "by": {"release": "1D"},
  1783. "totals": {"crash_free_rate(session)": 0.8},
  1784. "series": {"crash_free_rate(session)": [None, 0.8]},
  1785. },
  1786. {
  1787. "by": {"release": "1C"},
  1788. "totals": {"crash_free_rate(session)": 0.6666666666666667},
  1789. "series": {"crash_free_rate(session)": [None, 0.6666666666666667]},
  1790. },
  1791. {
  1792. "by": {"release": "1B"},
  1793. "totals": {"crash_free_rate(session)": 0.33333333333333337},
  1794. "series": {"crash_free_rate(session)": [None, 0.33333333333333337]},
  1795. },
  1796. ]
  1797. @freeze_time(MOCK_DATETIME)
  1798. def test_order_by_with_session_status_groupby(self):
  1799. """
  1800. Test that ensures we are able to group by session.status and order by `release.timestamp`
  1801. since `release.timestamp` is generated from a preflight query
  1802. """
  1803. rando_project = self.create_project()
  1804. release_1a = self.create_release(project=rando_project, version="1A")
  1805. release_1b = self.create_release(project=rando_project, version="1B")
  1806. # Release 1B sessions
  1807. for _ in range(4):
  1808. self.store_session(
  1809. make_session(rando_project, release=release_1b.version, status="crashed")
  1810. )
  1811. for _ in range(10):
  1812. self.store_session(make_session(rando_project, release=release_1b.version))
  1813. for _ in range(3):
  1814. self.store_session(make_session(rando_project, errors=1, release=release_1b.version))
  1815. # Release 1A sessions
  1816. for _ in range(0, 2):
  1817. self.store_session(
  1818. make_session(rando_project, release=release_1a.version, status="crashed")
  1819. )
  1820. self.store_session(make_session(rando_project, release=release_1a.version))
  1821. for _ in range(3):
  1822. self.store_session(make_session(rando_project, errors=1, release=release_1a.version))
  1823. response = self.do_request(
  1824. {
  1825. "project": rando_project.id,
  1826. "statsPeriod": "1d",
  1827. "interval": "1d",
  1828. "field": ["sum(session)"],
  1829. "groupBy": ["release", "session.status"],
  1830. "orderBy": "-release.timestamp",
  1831. }
  1832. )
  1833. assert response.data["groups"] == [
  1834. {
  1835. "by": {"release": "1B", "session.status": "abnormal"},
  1836. "totals": {"sum(session)": 0},
  1837. "series": {"sum(session)": [0, 0]},
  1838. },
  1839. {
  1840. "by": {"release": "1B", "session.status": "crashed"},
  1841. "totals": {"sum(session)": 4},
  1842. "series": {"sum(session)": [0, 4]},
  1843. },
  1844. {
  1845. "by": {"release": "1B", "session.status": "errored"},
  1846. "totals": {"sum(session)": 3},
  1847. "series": {"sum(session)": [0, 3]},
  1848. },
  1849. {
  1850. "by": {"release": "1B", "session.status": "healthy"},
  1851. "totals": {"sum(session)": 10},
  1852. "series": {"sum(session)": [0, 10]},
  1853. },
  1854. {
  1855. "by": {"release": "1A", "session.status": "abnormal"},
  1856. "totals": {"sum(session)": 0},
  1857. "series": {"sum(session)": [0, 0]},
  1858. },
  1859. {
  1860. "by": {"release": "1A", "session.status": "crashed"},
  1861. "totals": {"sum(session)": 2},
  1862. "series": {"sum(session)": [0, 2]},
  1863. },
  1864. {
  1865. "by": {"release": "1A", "session.status": "errored"},
  1866. "totals": {"sum(session)": 3},
  1867. "series": {"sum(session)": [0, 3]},
  1868. },
  1869. {
  1870. "by": {"release": "1A", "session.status": "healthy"},
  1871. "totals": {"sum(session)": 1},
  1872. "series": {"sum(session)": [0, 1]},
  1873. },
  1874. ]
  1875. @freeze_time(MOCK_DATETIME)
  1876. def test_order_by_with_limit(self):
  1877. rando_project = self.create_project()
  1878. # Create two releases with no metrics data and then two releases with metric data
  1879. release_1a = self.create_release(project=rando_project, version="1A")
  1880. release_1b = self.create_release(project=rando_project, version="1B")
  1881. self.create_release(project=rando_project, version="1C")
  1882. self.create_release(project=rando_project, version="1D")
  1883. self.store_session(make_session(rando_project, release=release_1a.version))
  1884. self.store_session(make_session(rando_project, release=release_1b.version))
  1885. self.store_session(
  1886. make_session(rando_project, release=release_1b.version, status="crashed")
  1887. )
  1888. response = self.do_request(
  1889. {
  1890. "project": rando_project.id,
  1891. "statsPeriod": "1d",
  1892. "interval": "1d",
  1893. "field": ["sum(session)"],
  1894. "groupBy": ["release"],
  1895. "orderBy": "-release.timestamp",
  1896. "per_page": 3,
  1897. }
  1898. )
  1899. assert response.data["groups"] == [
  1900. {
  1901. "by": {"release": "1D"},
  1902. "totals": {"sum(session)": 0},
  1903. "series": {"sum(session)": [0, 0]},
  1904. },
  1905. {
  1906. "by": {"release": "1C"},
  1907. "totals": {"sum(session)": 0},
  1908. "series": {"sum(session)": [0, 0]},
  1909. },
  1910. {
  1911. "by": {"release": "1B"},
  1912. "totals": {"sum(session)": 2},
  1913. "series": {"sum(session)": [0, 2]},
  1914. },
  1915. ]
  1916. response = self.do_request(
  1917. {
  1918. "project": rando_project.id,
  1919. "statsPeriod": "1d",
  1920. "interval": "1d",
  1921. "field": ["sum(session)"],
  1922. "groupBy": ["release", "session.status"],
  1923. "orderBy": "-release.timestamp",
  1924. "per_page": 4,
  1925. }
  1926. )
  1927. assert response.data["groups"] == [
  1928. {
  1929. "by": {"release": "1D", "session.status": None},
  1930. "totals": {"sum(session)": 0},
  1931. "series": {"sum(session)": [0, 0]},
  1932. },
  1933. {
  1934. "by": {"release": "1C", "session.status": None},
  1935. "totals": {"sum(session)": 0},
  1936. "series": {"sum(session)": [0, 0]},
  1937. },
  1938. {
  1939. "by": {"release": "1B", "session.status": "abnormal"},
  1940. "totals": {"sum(session)": 0},
  1941. "series": {"sum(session)": [0, 0]},
  1942. },
  1943. {
  1944. "by": {"release": "1B", "session.status": "crashed"},
  1945. "totals": {"sum(session)": 1},
  1946. "series": {"sum(session)": [0, 1]},
  1947. },
  1948. ]
  1949. response = self.do_request(
  1950. {
  1951. "project": rando_project.id,
  1952. "statsPeriod": "1d",
  1953. "interval": "1d",
  1954. "field": ["sum(session)"],
  1955. "groupBy": ["release", "session.status", "project"],
  1956. "orderBy": "-release.timestamp",
  1957. "per_page": 2,
  1958. }
  1959. )
  1960. assert response.data["groups"] == [
  1961. {
  1962. "by": {"release": "1D", "session.status": None, "project": None},
  1963. "totals": {"sum(session)": 0},
  1964. "series": {"sum(session)": [0, 0]},
  1965. },
  1966. {
  1967. "by": {"release": "1C", "session.status": None, "project": None},
  1968. "totals": {"sum(session)": 0},
  1969. "series": {"sum(session)": [0, 0]},
  1970. },
  1971. ]
  1972. @freeze_time(MOCK_DATETIME)
  1973. def test_order_by_with_limit_and_offset(self):
  1974. rando_project = self.create_project()
  1975. # Create two releases with no metrics data and then two releases with metric data
  1976. release_1a = self.create_release(project=rando_project, version="1A")
  1977. release_1b = self.create_release(project=rando_project, version="1B")
  1978. self.create_release(project=rando_project, version="1C")
  1979. self.create_release(project=rando_project, version="1D")
  1980. self.store_session(make_session(rando_project, release=release_1a.version))
  1981. self.store_session(make_session(rando_project, release=release_1b.version))
  1982. response = self.do_request(
  1983. {
  1984. "project": rando_project.id,
  1985. "statsPeriod": "1d",
  1986. "interval": "1d",
  1987. "field": ["sum(session)"],
  1988. "groupBy": ["release"],
  1989. "orderBy": "-release.timestamp",
  1990. "per_page": 3,
  1991. "cursor": Cursor(0, 1),
  1992. }
  1993. )
  1994. assert response.data["detail"] == (
  1995. "Passing an offset value greater than 0 when ordering by release.timestamp "
  1996. "is not permitted"
  1997. )
  1998. @freeze_time(MOCK_DATETIME)
  1999. def test_order_by_with_environment_filter_on_preflight(self):
  2000. rando_project = self.create_project()
  2001. rando_env = self.create_environment(name="rando_env", project=self.project)
  2002. # Create two releases with no metrics data and then two releases with metric data
  2003. release_1a = self.create_release(
  2004. project=rando_project, version="1A", environments=[rando_env]
  2005. )
  2006. release_1b = self.create_release(
  2007. project=rando_project, version="1B", environments=[rando_env]
  2008. )
  2009. release_1c = self.create_release(project=rando_project, version="1C")
  2010. release_1d = self.create_release(project=rando_project, version="1D")
  2011. self.store_session(
  2012. make_session(rando_project, release=release_1a.version, environment="rando_env")
  2013. )
  2014. self.store_session(
  2015. make_session(rando_project, release=release_1b.version, environment="rando_env")
  2016. )
  2017. self.store_session(make_session(rando_project, release=release_1c.version))
  2018. self.store_session(make_session(rando_project, release=release_1d.version))
  2019. # Test env condition with IN
  2020. response = self.do_request(
  2021. {
  2022. "project": rando_project.id,
  2023. "statsPeriod": "1d",
  2024. "interval": "1d",
  2025. "field": ["sum(session)"],
  2026. "query": "environment:[rando_env,rando_enc2]",
  2027. "groupBy": ["release", "environment"],
  2028. "orderBy": "-release.timestamp",
  2029. "per_page": 4,
  2030. }
  2031. )
  2032. assert response.data["groups"] == [
  2033. {
  2034. "by": {"release": "1B", "environment": "rando_env"},
  2035. "totals": {"sum(session)": 1},
  2036. "series": {"sum(session)": [0, 1]},
  2037. },
  2038. {
  2039. "by": {"release": "1A", "environment": "rando_env"},
  2040. "totals": {"sum(session)": 1},
  2041. "series": {"sum(session)": [0, 1]},
  2042. },
  2043. ]
  2044. # Test env condition with NOT IN
  2045. response = self.do_request(
  2046. {
  2047. "project": rando_project.id,
  2048. "statsPeriod": "1d",
  2049. "interval": "1d",
  2050. "field": ["sum(session)"],
  2051. "query": "!environment:[rando_env,rando_enc2]",
  2052. "groupBy": ["release", "environment"],
  2053. "orderBy": "-release.timestamp",
  2054. "per_page": 4,
  2055. }
  2056. )
  2057. assert response.data["groups"] == [
  2058. {
  2059. "by": {"release": "1D", "environment": "production"},
  2060. "totals": {"sum(session)": 1},
  2061. "series": {"sum(session)": [0, 1]},
  2062. },
  2063. {
  2064. "by": {"release": "1C", "environment": "production"},
  2065. "totals": {"sum(session)": 1},
  2066. "series": {"sum(session)": [0, 1]},
  2067. },
  2068. ]
  2069. # Test env condition with invalid OR operation
  2070. response = self.do_request(
  2071. {
  2072. "project": rando_project.id,
  2073. "statsPeriod": "1d",
  2074. "interval": "1d",
  2075. "field": ["sum(session)"],
  2076. "query": "environment:rando_env OR environment:rando_enc2",
  2077. "groupBy": ["release", "environment"],
  2078. "orderBy": "-release.timestamp",
  2079. "per_page": 4,
  2080. }
  2081. )
  2082. assert response.json()["detail"] == "Unable to parse condition with environment"
  2083. @freeze_time(MOCK_DATETIME)
  2084. def test_order_by_without_release_groupby(self):
  2085. rando_project = self.create_project()
  2086. response = self.do_request(
  2087. {
  2088. "project": rando_project.id,
  2089. "statsPeriod": "1d",
  2090. "interval": "1d",
  2091. "query": "session.status:[crashed,errored]",
  2092. "field": ["sum(session)"],
  2093. "orderBy": "-release.timestamp",
  2094. "per_page": 2,
  2095. }
  2096. )
  2097. assert response.data["detail"] == (
  2098. "To sort by release.timestamp, tag release must be in the groupBy"
  2099. )
  2100. @freeze_time(MOCK_DATETIME)
  2101. def test_order_by_release_with_session_status_current_filter(self):
  2102. rando_project = self.create_project()
  2103. release_1a = self.create_release(project=rando_project, version="1A")
  2104. release_1b = self.create_release(project=rando_project, version="1B")
  2105. # Release 1B sessions
  2106. for _ in range(4):
  2107. self.store_session(
  2108. make_session(rando_project, release=release_1b.version, status="crashed")
  2109. )
  2110. for _ in range(10):
  2111. self.store_session(make_session(rando_project, release=release_1b.version))
  2112. for _ in range(3):
  2113. self.store_session(make_session(rando_project, errors=1, release=release_1b.version))
  2114. # Release 1A sessions
  2115. for _ in range(0, 2):
  2116. self.store_session(
  2117. make_session(rando_project, release=release_1a.version, status="crashed")
  2118. )
  2119. self.store_session(make_session(rando_project, release=release_1a.version))
  2120. for _ in range(3):
  2121. self.store_session(make_session(rando_project, errors=1, release=release_1a.version))
  2122. response = self.do_request(
  2123. {
  2124. "project": rando_project.id,
  2125. "statsPeriod": "1d",
  2126. "interval": "1d",
  2127. "query": "session.status:[crashed,errored]",
  2128. "field": ["sum(session)"],
  2129. "groupBy": ["release"],
  2130. "orderBy": "-release.timestamp",
  2131. }
  2132. )
  2133. assert response.data["groups"] == [
  2134. {
  2135. "by": {"release": "1B"},
  2136. "totals": {"sum(session)": 7},
  2137. "series": {"sum(session)": [0, 7]},
  2138. },
  2139. {
  2140. "by": {"release": "1A"},
  2141. "totals": {"sum(session)": 5},
  2142. "series": {"sum(session)": [0, 5]},
  2143. },
  2144. ]