test_organization_sessions.py 81 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211
  1. import datetime
  2. from unittest.mock import patch
  3. from uuid import uuid4
  4. import pytest
  5. from django.urls import reverse
  6. from django.utils import timezone
  7. from freezegun import freeze_time
  8. from sentry.models import ReleaseProjectEnvironment
  9. from sentry.release_health.duplex import DuplexReleaseHealthBackend
  10. from sentry.release_health.metrics import MetricsReleaseHealthBackend
  11. from sentry.testutils import APITestCase, SnubaTestCase
  12. from sentry.testutils.cases import BaseMetricsTestCase
  13. from sentry.testutils.helpers.features import Feature
  14. from sentry.testutils.helpers.link_header import parse_link_header
  15. from sentry.testutils.silo import region_silo_test
  16. from sentry.utils.cursors import Cursor
  17. from sentry.utils.dates import to_timestamp
  18. pytestmark = pytest.mark.sentry_metrics
  19. def result_sorted(result):
  20. """sort the groups of the results array by the `by` object, ensuring a stable order"""
  21. def stable_dict(d):
  22. return tuple(sorted(d.items(), key=lambda t: t[0]))
  23. result["groups"].sort(key=lambda group: stable_dict(group["by"]))
  24. return result
  25. ONE_DAY_AGO = datetime.datetime.now(tz=datetime.timezone.utc) - datetime.timedelta(days=1)
  26. TWO_DAYS_AGO = datetime.datetime.now(tz=datetime.timezone.utc) - datetime.timedelta(days=2)
  27. MOCK_DATETIME = ONE_DAY_AGO.replace(hour=12, minute=27, second=28, microsecond=303000)
  28. MOCK_DATETIME_PLUS_TEN_MINUTES = MOCK_DATETIME + datetime.timedelta(minutes=10)
  29. SNUBA_TIME_FORMAT = "%Y-%m-%dT%H:%M:%SZ"
  30. MOCK_DATETIME_START_OF_DAY = MOCK_DATETIME.replace(hour=0, minute=0, second=0)
  31. TIMESTAMP = to_timestamp(MOCK_DATETIME)
  32. RECEIVED = TIMESTAMP
  33. SESSION_STARTED = TIMESTAMP // 3600 * 3600 # round to the hour
  34. TEMPLATE = {
  35. "distinct_id": "00000000-0000-0000-0000-000000000000",
  36. "status": "exited",
  37. "seq": 0,
  38. "release": "foo@1.0.0",
  39. "environment": "production",
  40. "retention_days": 90,
  41. "duration": 123.4,
  42. "errors": 0,
  43. "started": SESSION_STARTED,
  44. "received": RECEIVED,
  45. }
  46. def make_duration(kwargs):
  47. """Randomish but deterministic duration"""
  48. return float(len(str(kwargs)))
  49. def make_session(project, **kwargs):
  50. return dict(
  51. dict(
  52. TEMPLATE,
  53. session_id=uuid4().hex,
  54. org_id=project.organization_id,
  55. project_id=project.id,
  56. duration=make_duration(kwargs),
  57. ),
  58. **kwargs,
  59. )
  60. @region_silo_test
  61. class OrganizationSessionsEndpointTest(APITestCase, SnubaTestCase):
  62. def setUp(self):
  63. super().setUp()
  64. self.setup_fixture()
  65. def setup_fixture(self):
  66. self.organization1 = self.organization
  67. self.organization2 = self.create_organization()
  68. self.organization3 = self.create_organization()
  69. self.project1 = self.project
  70. self.project2 = self.create_project()
  71. self.project3 = self.create_project()
  72. self.project4 = self.create_project(organization=self.organization2)
  73. self.user2 = self.create_user(is_superuser=False)
  74. self.create_member(
  75. user=self.user2, organization=self.organization1, role="member", teams=[]
  76. )
  77. self.create_member(user=self.user, organization=self.organization3, role="admin", teams=[])
  78. self.create_environment(self.project2, name="development")
  79. self.store_session(make_session(self.project1, started=SESSION_STARTED + 12 * 60))
  80. self.store_session(
  81. make_session(self.project1, started=SESSION_STARTED + 24 * 60, release="foo@1.1.0")
  82. )
  83. self.store_session(make_session(self.project1, started=SESSION_STARTED - 60 * 60))
  84. self.store_session(make_session(self.project1, started=SESSION_STARTED - 12 * 60 * 60))
  85. self.store_session(make_session(self.project2, status="crashed"))
  86. self.store_session(make_session(self.project2, environment="development"))
  87. self.store_session(make_session(self.project3, errors=1, release="foo@1.2.0"))
  88. self.store_session(
  89. make_session(
  90. self.project3,
  91. distinct_id="39887d89-13b2-4c84-8c23-5d13d2102664",
  92. started=SESSION_STARTED - 60 * 60,
  93. )
  94. )
  95. self.store_session(
  96. make_session(
  97. self.project3, distinct_id="39887d89-13b2-4c84-8c23-5d13d2102664", errors=1
  98. )
  99. )
  100. self.store_session(make_session(self.project4))
  101. def do_request(self, query, user=None, org=None):
  102. self.login_as(user=user or self.user)
  103. url = reverse(
  104. "sentry-api-0-organization-sessions",
  105. kwargs={"organization_slug": (org or self.organization).slug},
  106. )
  107. return self.client.get(url, query, format="json")
  108. def test_empty_request(self):
  109. response = self.do_request({})
  110. assert response.status_code == 400, response.content
  111. assert response.data == {"detail": 'Request is missing a "field"'}
  112. def test_inaccessible_project(self):
  113. response = self.do_request({"project": [self.project4.id]})
  114. assert response.status_code == 403, response.content
  115. assert response.data == {"detail": "You do not have permission to perform this action."}
  116. def test_unknown_field(self):
  117. response = self.do_request({"field": ["summ(session)"]})
  118. assert response.status_code == 400, response.content
  119. assert response.data == {"detail": 'Invalid field: "summ(session)"'}
  120. def test_unknown_groupby(self):
  121. response = self.do_request({"field": ["sum(session)"], "groupBy": ["environment_"]})
  122. assert response.status_code == 400, response.content
  123. assert response.data == {"detail": 'Invalid groupBy: "environment_"'}
  124. def test_illegal_groupby(self):
  125. response = self.do_request({"field": ["sum(session)"], "groupBy": ["issue.id"]})
  126. assert response.status_code == 400, response.content
  127. assert response.data == {"detail": 'Invalid groupBy: "issue.id"'}
  128. def test_invalid_query(self):
  129. response = self.do_request(
  130. {"statsPeriod": "1d", "field": ["sum(session)"], "query": ["foo:bar"]}
  131. )
  132. assert response.status_code == 400, response.content
  133. assert response.data["detail"] == "Invalid search filter: foo"
  134. response = self.do_request(
  135. {
  136. "statsPeriod": "1d",
  137. "field": ["sum(session)"],
  138. "query": ["release:foo-bar@1.2.3 (123)"],
  139. }
  140. )
  141. assert response.status_code == 400, response.content
  142. # TODO: it would be good to provide a better error here,
  143. # since its not obvious where `message` comes from.
  144. assert response.data["detail"] == "Invalid search filter: message"
  145. def test_illegal_query(self):
  146. response = self.do_request(
  147. {"statsPeriod": "1d", "field": ["sum(session)"], "query": ["issue.id:123"]}
  148. )
  149. assert response.status_code == 400, response.content
  150. assert response.data["detail"] == "Invalid search filter: issue.id"
  151. def test_too_many_points(self):
  152. # default statsPeriod is 90d
  153. response = self.do_request({"field": ["sum(session)"], "interval": "1h"})
  154. assert response.status_code == 400, response.content
  155. assert response.data == {
  156. "detail": "Your interval and date range would create too many results. "
  157. "Use a larger interval, or a smaller date range."
  158. }
  159. @freeze_time(MOCK_DATETIME)
  160. def test_future_request(self):
  161. start = MOCK_DATETIME + datetime.timedelta(days=1)
  162. end = MOCK_DATETIME + datetime.timedelta(days=2)
  163. response = self.do_request(
  164. {
  165. "project": [-1],
  166. "interval": "1h",
  167. "field": ["sum(session)"],
  168. "start": start.strftime(SNUBA_TIME_FORMAT),
  169. "end": end.strftime(SNUBA_TIME_FORMAT),
  170. }
  171. )
  172. assert response.status_code == 200, response.content
  173. @freeze_time(MOCK_DATETIME)
  174. def test_timeseries_interval(self):
  175. response = self.do_request(
  176. {"project": [-1], "statsPeriod": "1d", "interval": "1d", "field": ["sum(session)"]}
  177. )
  178. start_of_day_snuba_format = MOCK_DATETIME_START_OF_DAY.strftime(SNUBA_TIME_FORMAT)
  179. assert response.status_code == 200, response.content
  180. assert result_sorted(response.data) == {
  181. "start": start_of_day_snuba_format,
  182. "end": MOCK_DATETIME.replace(minute=28, second=0).strftime(SNUBA_TIME_FORMAT),
  183. "query": "",
  184. "intervals": [start_of_day_snuba_format],
  185. "groups": [{"by": {}, "series": {"sum(session)": [9]}, "totals": {"sum(session)": 9}}],
  186. }
  187. response = self.do_request(
  188. {"project": [-1], "statsPeriod": "1d", "interval": "6h", "field": ["sum(session)"]}
  189. )
  190. assert response.status_code == 200, response.content
  191. assert result_sorted(response.data) == {
  192. "start": TWO_DAYS_AGO.replace(hour=18, minute=0, second=0).strftime(SNUBA_TIME_FORMAT),
  193. "end": MOCK_DATETIME.replace(minute=28, second=0).strftime(SNUBA_TIME_FORMAT),
  194. "query": "",
  195. "intervals": [
  196. TWO_DAYS_AGO.replace(hour=18, minute=0, second=0).strftime(SNUBA_TIME_FORMAT),
  197. MOCK_DATETIME.replace(hour=0, minute=0, second=0).strftime(SNUBA_TIME_FORMAT),
  198. MOCK_DATETIME.replace(hour=6, minute=0, second=0).strftime(SNUBA_TIME_FORMAT),
  199. MOCK_DATETIME.replace(hour=12, minute=0, second=0).strftime(SNUBA_TIME_FORMAT),
  200. ],
  201. "groups": [
  202. {"by": {}, "series": {"sum(session)": [0, 1, 2, 6]}, "totals": {"sum(session)": 9}}
  203. ],
  204. }
  205. @freeze_time(MOCK_DATETIME)
  206. def test_user_all_accessible(self):
  207. response = self.do_request(
  208. {"project": [-1], "statsPeriod": "1d", "interval": "1d", "field": ["sum(session)"]},
  209. user=self.user2,
  210. )
  211. start_of_day_snuba_format = MOCK_DATETIME_START_OF_DAY.strftime(SNUBA_TIME_FORMAT)
  212. assert response.status_code == 200, response.content
  213. assert result_sorted(response.data) == {
  214. "start": start_of_day_snuba_format,
  215. "end": MOCK_DATETIME.replace(hour=12, minute=28, second=0).strftime(SNUBA_TIME_FORMAT),
  216. "query": "",
  217. "intervals": [start_of_day_snuba_format],
  218. "groups": [{"by": {}, "series": {"sum(session)": [9]}, "totals": {"sum(session)": 9}}],
  219. }
  220. def test_no_projects(self):
  221. response = self.do_request(
  222. {"project": [-1], "statsPeriod": "1d", "interval": "1d", "field": ["sum(session)"]},
  223. org=self.organization3,
  224. )
  225. assert response.status_code == 400, response.content
  226. assert response.data == {"detail": "No projects available"}
  227. @freeze_time(MOCK_DATETIME_PLUS_TEN_MINUTES)
  228. def test_minute_resolution(self):
  229. with self.feature("organizations:minute-resolution-sessions"):
  230. response = self.do_request(
  231. {
  232. "project": [self.project1.id, self.project2.id],
  233. "statsPeriod": "30m",
  234. "interval": "10m",
  235. "field": ["sum(session)"],
  236. }
  237. )
  238. assert response.status_code == 200, response.content
  239. assert result_sorted(response.data) == {
  240. "start": MOCK_DATETIME.replace(hour=12, minute=0, second=0).strftime(
  241. SNUBA_TIME_FORMAT
  242. ),
  243. "end": MOCK_DATETIME.replace(hour=12, minute=38, second=0).strftime(
  244. SNUBA_TIME_FORMAT
  245. ),
  246. "query": "",
  247. "intervals": [
  248. *[
  249. MOCK_DATETIME.replace(hour=12, minute=min, second=0).strftime(
  250. SNUBA_TIME_FORMAT
  251. )
  252. for min in [0, 10, 20, 30]
  253. ],
  254. ],
  255. "groups": [
  256. {
  257. "by": {},
  258. "series": {"sum(session)": [2, 1, 1, 0]},
  259. "totals": {"sum(session)": 4},
  260. }
  261. ],
  262. }
  263. @freeze_time(MOCK_DATETIME_PLUS_TEN_MINUTES)
  264. def test_10s_resolution(self):
  265. with self.feature("organizations:minute-resolution-sessions"):
  266. response = self.do_request(
  267. {
  268. "project": [self.project1.id],
  269. "statsPeriod": "1m",
  270. "interval": "10s",
  271. "field": ["sum(session)"],
  272. }
  273. )
  274. assert response.status_code == 200, response.content
  275. from sentry.api.endpoints.organization_sessions import release_health
  276. if release_health.is_metrics_based():
  277. # With the metrics backend, we should get exactly what we asked for,
  278. # 6 intervals with 10 second length. However, because of rounding,
  279. # we get it rounded to the next minute (see https://github.com/getsentry/sentry/blob/d6c59c32307eee7162301c76b74af419055b9b39/src/sentry/snuba/sessions_v2.py#L388-L392)
  280. assert len(response.data["intervals"]) == 9
  281. else:
  282. # With the sessions backend, the entire period will be aligned
  283. # to one hour, and the resolution will still be one minute:
  284. assert len(response.data["intervals"]) == 38
  285. @freeze_time(MOCK_DATETIME)
  286. def test_filter_projects(self):
  287. response = self.do_request(
  288. {
  289. "statsPeriod": "1d",
  290. "interval": "1d",
  291. "field": ["sum(session)"],
  292. "project": [self.project2.id, self.project3.id],
  293. }
  294. )
  295. assert response.status_code == 200, response.content
  296. assert result_sorted(response.data)["groups"] == [
  297. {"by": {}, "series": {"sum(session)": [5]}, "totals": {"sum(session)": 5}}
  298. ]
  299. @freeze_time(MOCK_DATETIME)
  300. def test_filter_environment(self):
  301. response = self.do_request(
  302. {
  303. "project": [-1],
  304. "statsPeriod": "1d",
  305. "interval": "1d",
  306. "field": ["sum(session)"],
  307. "query": "environment:development",
  308. }
  309. )
  310. assert response.status_code == 200, response.content
  311. assert result_sorted(response.data)["groups"] == [
  312. {"by": {}, "series": {"sum(session)": [1]}, "totals": {"sum(session)": 1}}
  313. ]
  314. response = self.do_request(
  315. {
  316. "project": [-1],
  317. "statsPeriod": "1d",
  318. "interval": "1d",
  319. "field": ["sum(session)"],
  320. "environment": ["development"],
  321. }
  322. )
  323. assert response.status_code == 200, response.content
  324. assert result_sorted(response.data)["groups"] == [
  325. {"by": {}, "series": {"sum(session)": [1]}, "totals": {"sum(session)": 1}}
  326. ]
  327. @freeze_time(MOCK_DATETIME)
  328. def test_filter_release(self):
  329. response = self.do_request(
  330. {
  331. "project": [-1],
  332. "statsPeriod": "1d",
  333. "interval": "1d",
  334. "field": ["sum(session)"],
  335. "query": "release:foo@1.1.0",
  336. }
  337. )
  338. assert response.status_code == 200, response.content
  339. assert result_sorted(response.data)["groups"] == [
  340. {"by": {}, "series": {"sum(session)": [1]}, "totals": {"sum(session)": 1}}
  341. ]
  342. response = self.do_request(
  343. {
  344. "project": [-1],
  345. "statsPeriod": "1d",
  346. "interval": "1d",
  347. "field": ["sum(session)"],
  348. "query": 'release:"foo@1.1.0" or release:"foo@1.2.0"',
  349. }
  350. )
  351. assert response.status_code == 200, response.content
  352. assert result_sorted(response.data)["groups"] == [
  353. {"by": {}, "series": {"sum(session)": [2]}, "totals": {"sum(session)": 2}}
  354. ]
  355. response = self.do_request(
  356. {
  357. "project": [-1],
  358. "statsPeriod": "1d",
  359. "interval": "1d",
  360. "field": ["sum(session)"],
  361. "query": 'release:"foo@1.1.0" or release:["foo@1.2.0", release:"foo@1.3.0"]',
  362. "groupBy": ["release"],
  363. }
  364. )
  365. assert response.status_code == 200, response.content
  366. assert result_sorted(response.data)["groups"] == [
  367. {
  368. "by": {"release": "foo@1.1.0"},
  369. "series": {"sum(session)": [1]},
  370. "totals": {"sum(session)": 1},
  371. },
  372. {
  373. "by": {"release": "foo@1.2.0"},
  374. "series": {"sum(session)": [1]},
  375. "totals": {"sum(session)": 1},
  376. },
  377. ]
  378. @freeze_time(MOCK_DATETIME)
  379. def test_filter_unknown_release(self):
  380. response = self.do_request(
  381. {
  382. "project": [-1],
  383. "statsPeriod": "1d",
  384. "interval": "1h",
  385. "field": ["sum(session)"],
  386. "query": "release:foo@6.6.6",
  387. "groupBy": "session.status",
  388. }
  389. )
  390. assert response.status_code == 200, response.content
  391. @freeze_time(MOCK_DATETIME)
  392. def test_filter_unknown_release_in(self):
  393. response = self.do_request(
  394. {
  395. "project": [-1],
  396. "statsPeriod": "1d",
  397. "interval": "1d",
  398. "field": ["sum(session)"],
  399. "query": "release:[foo@6.6.6]",
  400. "groupBy": "session.status",
  401. }
  402. )
  403. assert response.status_code == 200, response.content
  404. assert result_sorted(response.data)["groups"] == [
  405. {
  406. "by": {"session.status": status},
  407. "series": {"sum(session)": [0]},
  408. "totals": {"sum(session)": 0},
  409. }
  410. for status in ("abnormal", "crashed", "errored", "healthy")
  411. ]
  412. @freeze_time(MOCK_DATETIME)
  413. def test_groupby_project(self):
  414. response = self.do_request(
  415. {
  416. "project": [-1],
  417. "statsPeriod": "1d",
  418. "interval": "1d",
  419. "field": ["sum(session)"],
  420. "groupBy": ["project"],
  421. }
  422. )
  423. assert response.status_code == 200, response.content
  424. assert result_sorted(response.data)["groups"] == [
  425. {
  426. "by": {"project": self.project1.id},
  427. "series": {"sum(session)": [4]},
  428. "totals": {"sum(session)": 4},
  429. },
  430. {
  431. "by": {"project": self.project2.id},
  432. "series": {"sum(session)": [2]},
  433. "totals": {"sum(session)": 2},
  434. },
  435. {
  436. "by": {"project": self.project3.id},
  437. "series": {"sum(session)": [3]},
  438. "totals": {"sum(session)": 3},
  439. },
  440. ]
  441. @freeze_time(MOCK_DATETIME)
  442. def test_groupby_environment(self):
  443. response = self.do_request(
  444. {
  445. "project": [-1],
  446. "statsPeriod": "1d",
  447. "interval": "1d",
  448. "field": ["sum(session)"],
  449. "groupBy": ["environment"],
  450. }
  451. )
  452. assert response.status_code == 200, response.content
  453. assert result_sorted(response.data)["groups"] == [
  454. {
  455. "by": {"environment": "development"},
  456. "series": {"sum(session)": [1]},
  457. "totals": {"sum(session)": 1},
  458. },
  459. {
  460. "by": {"environment": "production"},
  461. "series": {"sum(session)": [8]},
  462. "totals": {"sum(session)": 8},
  463. },
  464. ]
  465. @freeze_time(MOCK_DATETIME)
  466. def test_groupby_release(self):
  467. response = self.do_request(
  468. {
  469. "project": [-1],
  470. "statsPeriod": "1d",
  471. "interval": "1d",
  472. "field": ["sum(session)"],
  473. "groupBy": ["release"],
  474. }
  475. )
  476. assert response.status_code == 200, response.content
  477. assert result_sorted(response.data)["groups"] == [
  478. {
  479. "by": {"release": "foo@1.0.0"},
  480. "series": {"sum(session)": [7]},
  481. "totals": {"sum(session)": 7},
  482. },
  483. {
  484. "by": {"release": "foo@1.1.0"},
  485. "series": {"sum(session)": [1]},
  486. "totals": {"sum(session)": 1},
  487. },
  488. {
  489. "by": {"release": "foo@1.2.0"},
  490. "series": {"sum(session)": [1]},
  491. "totals": {"sum(session)": 1},
  492. },
  493. ]
  494. @freeze_time(MOCK_DATETIME)
  495. def test_groupby_status(self):
  496. response = self.do_request(
  497. {
  498. "project": [-1],
  499. "statsPeriod": "1d",
  500. "interval": "1d",
  501. "field": ["sum(session)"],
  502. "groupBy": ["session.status"],
  503. }
  504. )
  505. assert response.status_code == 200, response.content
  506. assert result_sorted(response.data)["groups"] == [
  507. {
  508. "by": {"session.status": "abnormal"},
  509. "series": {"sum(session)": [0]},
  510. "totals": {"sum(session)": 0},
  511. },
  512. {
  513. "by": {"session.status": "crashed"},
  514. "series": {"sum(session)": [1]},
  515. "totals": {"sum(session)": 1},
  516. },
  517. {
  518. "by": {"session.status": "errored"},
  519. "series": {"sum(session)": [2]},
  520. "totals": {"sum(session)": 2},
  521. },
  522. {
  523. "by": {"session.status": "healthy"},
  524. "series": {"sum(session)": [6]},
  525. "totals": {"sum(session)": 6},
  526. },
  527. ]
  528. @freeze_time(MOCK_DATETIME)
  529. def test_groupby_cross(self):
  530. response = self.do_request(
  531. {
  532. "project": [-1],
  533. "statsPeriod": "1d",
  534. "interval": "1d",
  535. "field": ["sum(session)"],
  536. "groupBy": ["release", "environment"],
  537. }
  538. )
  539. assert response.status_code == 200, response.content
  540. assert result_sorted(response.data)["groups"] == [
  541. {
  542. "by": {"environment": "development", "release": "foo@1.0.0"},
  543. "series": {"sum(session)": [1]},
  544. "totals": {"sum(session)": 1},
  545. },
  546. {
  547. "by": {"environment": "production", "release": "foo@1.0.0"},
  548. "series": {"sum(session)": [6]},
  549. "totals": {"sum(session)": 6},
  550. },
  551. {
  552. "by": {"environment": "production", "release": "foo@1.1.0"},
  553. "series": {"sum(session)": [1]},
  554. "totals": {"sum(session)": 1},
  555. },
  556. {
  557. "by": {"environment": "production", "release": "foo@1.2.0"},
  558. "series": {"sum(session)": [1]},
  559. "totals": {"sum(session)": 1},
  560. },
  561. ]
  562. @freeze_time(MOCK_DATETIME)
  563. def test_users_groupby(self):
  564. response = self.do_request(
  565. {
  566. "project": [-1],
  567. "statsPeriod": "1d",
  568. "interval": "1d",
  569. "field": ["count_unique(user)"],
  570. }
  571. )
  572. assert response.status_code == 200, response.content
  573. assert result_sorted(response.data)["groups"] == [
  574. {"by": {}, "series": {"count_unique(user)": [1]}, "totals": {"count_unique(user)": 1}}
  575. ]
  576. response = self.do_request(
  577. {
  578. "project": [-1],
  579. "statsPeriod": "1d",
  580. "interval": "1d",
  581. "field": ["count_unique(user)"],
  582. "groupBy": ["session.status"],
  583. }
  584. )
  585. assert response.status_code == 200, response.content
  586. assert result_sorted(response.data)["groups"] == [
  587. {
  588. "by": {"session.status": "abnormal"},
  589. "series": {"count_unique(user)": [0]},
  590. "totals": {"count_unique(user)": 0},
  591. },
  592. {
  593. "by": {"session.status": "crashed"},
  594. "series": {"count_unique(user)": [0]},
  595. "totals": {"count_unique(user)": 0},
  596. },
  597. {
  598. "by": {"session.status": "errored"},
  599. "series": {"count_unique(user)": [1]},
  600. "totals": {"count_unique(user)": 1},
  601. },
  602. {
  603. "by": {"session.status": "healthy"},
  604. "series": {"count_unique(user)": [0]},
  605. "totals": {"count_unique(user)": 0},
  606. },
  607. ]
  608. expected_duration_values = {
  609. "avg(session.duration)": 42375.0,
  610. "max(session.duration)": 80000.0,
  611. "p50(session.duration)": 33500.0,
  612. "p75(session.duration)": 53750.0,
  613. "p90(session.duration)": 71600.0,
  614. "p95(session.duration)": 75800.0,
  615. "p99(session.duration)": 79159.99999999999,
  616. }
  617. @freeze_time(MOCK_DATETIME)
  618. def test_users_groupby_status_advanced(self):
  619. project = self.create_project()
  620. user1 = uuid4().hex
  621. session1 = uuid4().hex
  622. user2 = uuid4().hex
  623. session2a = uuid4().hex
  624. session2b = uuid4().hex
  625. user3 = uuid4().hex
  626. session3 = uuid4().hex
  627. self.store_session(
  628. make_session(project, session_id=session1, distinct_id=user1, status="ok")
  629. )
  630. self.store_session(
  631. make_session(
  632. project, session_id=session1, distinct_id=user1, seq=1, errors=1, status="errored"
  633. )
  634. )
  635. self.store_session(
  636. make_session(project, session_id=session1, distinct_id=user1, seq=2, status="crashed")
  637. )
  638. self.store_session(
  639. make_session(project, session_id=session2a, distinct_id=user2, status="ok")
  640. )
  641. self.store_session(
  642. make_session(project, session_id=session2b, distinct_id=user2, status="ok")
  643. )
  644. self.store_session(
  645. make_session(project, session_id=session2b, distinct_id=user2, status="abnormal")
  646. )
  647. self.store_session(
  648. make_session(
  649. project, session_id=session3, distinct_id=user3, errors=123, status="errored"
  650. )
  651. )
  652. # Add some extra healthy users:
  653. for _ in range(3):
  654. user = uuid4().hex
  655. self.store_session(make_session(project, distinct_id=user))
  656. # First, check if totals make sense:
  657. response = self.do_request(
  658. {
  659. "project": [project.id],
  660. "statsPeriod": "1d",
  661. "interval": "1d",
  662. "field": ["count_unique(user)"],
  663. }
  664. )
  665. assert response.status_code == 200, response.content
  666. assert result_sorted(response.data)["groups"] == [
  667. {
  668. "by": {},
  669. "series": {"count_unique(user)": [6]},
  670. "totals": {"count_unique(user)": 6},
  671. },
  672. ]
  673. # Then check if grouping makes sense:
  674. response = self.do_request(
  675. {
  676. "project": [project.id],
  677. "statsPeriod": "1d",
  678. "interval": "1d",
  679. "field": ["count_unique(user)"],
  680. "groupBy": ["session.status"],
  681. }
  682. )
  683. assert response.status_code == 200, response.content
  684. assert result_sorted(response.data)["groups"] == [
  685. {
  686. "by": {"session.status": "abnormal"},
  687. "series": {"count_unique(user)": [1]},
  688. "totals": {"count_unique(user)": 1},
  689. },
  690. {
  691. "by": {"session.status": "crashed"},
  692. "series": {"count_unique(user)": [1]},
  693. "totals": {"count_unique(user)": 1},
  694. },
  695. {
  696. "by": {"session.status": "errored"},
  697. "series": {"count_unique(user)": [1]},
  698. "totals": {"count_unique(user)": 1},
  699. },
  700. {
  701. # user
  702. "by": {"session.status": "healthy"},
  703. "series": {"count_unique(user)": [3]},
  704. "totals": {"count_unique(user)": 3},
  705. },
  706. ]
  707. @freeze_time(MOCK_DATETIME)
  708. def test_duration_percentiles(self):
  709. response = self.do_request(
  710. {
  711. "project": [-1],
  712. "statsPeriod": "1d",
  713. "interval": "1d",
  714. "field": [
  715. "avg(session.duration)",
  716. "p50(session.duration)",
  717. "p75(session.duration)",
  718. "p90(session.duration)",
  719. "p95(session.duration)",
  720. "p99(session.duration)",
  721. "max(session.duration)",
  722. ],
  723. }
  724. )
  725. assert response.status_code == 200, response.content
  726. expected = self.expected_duration_values
  727. groups = result_sorted(response.data)["groups"]
  728. assert len(groups) == 1, groups
  729. group = groups[0]
  730. assert group["by"] == {}
  731. assert group["totals"] == pytest.approx(expected)
  732. for key, series in group["series"].items():
  733. assert series == pytest.approx([expected[key]])
  734. @freeze_time(MOCK_DATETIME)
  735. def test_duration_percentiles_groupby(self):
  736. response = self.do_request(
  737. {
  738. "project": [-1],
  739. "statsPeriod": "1d",
  740. "interval": "1d",
  741. "field": [
  742. "avg(session.duration)",
  743. "p50(session.duration)",
  744. "p75(session.duration)",
  745. "p90(session.duration)",
  746. "p95(session.duration)",
  747. "p99(session.duration)",
  748. "max(session.duration)",
  749. ],
  750. "groupBy": "session.status",
  751. }
  752. )
  753. assert response.status_code == 200, response.content
  754. expected = self.expected_duration_values
  755. seen = set() # Make sure all session statuses are listed
  756. for group in result_sorted(response.data)["groups"]:
  757. seen.add(group["by"].get("session.status"))
  758. if group["by"] == {"session.status": "healthy"}:
  759. assert group["totals"] == pytest.approx(expected)
  760. for key, series in group["series"].items():
  761. assert series == pytest.approx([expected[key]])
  762. else:
  763. # Everything's none:
  764. assert group["totals"] == {key: None for key in expected}, group["by"]
  765. assert group["series"] == {key: [None] for key in expected}
  766. assert seen == {"abnormal", "crashed", "errored", "healthy"}
  767. @freeze_time(MOCK_DATETIME)
  768. def test_snuba_limit_exceeded(self):
  769. # 2 * 3 => only show two groups
  770. with patch("sentry.snuba.sessions_v2.SNUBA_LIMIT", 6), patch(
  771. "sentry.snuba.metrics.query.MAX_POINTS", 6
  772. ):
  773. response = self.do_request(
  774. {
  775. "project": [-1],
  776. "statsPeriod": "3d",
  777. "interval": "1d",
  778. # "user" is the first field, but "session" always wins:
  779. "field": ["count_unique(user)", "sum(session)"],
  780. "groupBy": ["project", "release", "environment"],
  781. }
  782. )
  783. assert response.status_code == 200, response.content
  784. assert result_sorted(response.data)["groups"] == [
  785. {
  786. "by": {
  787. "release": "foo@1.0.0",
  788. "environment": "production",
  789. "project": self.project1.id,
  790. },
  791. "totals": {"sum(session)": 3, "count_unique(user)": 0},
  792. "series": {"sum(session)": [0, 0, 3], "count_unique(user)": [0, 0, 0]},
  793. },
  794. {
  795. "by": {
  796. "release": "foo@1.0.0",
  797. "environment": "production",
  798. "project": self.project3.id,
  799. },
  800. "totals": {"sum(session)": 2, "count_unique(user)": 1},
  801. "series": {"sum(session)": [0, 0, 2], "count_unique(user)": [0, 0, 1]},
  802. },
  803. ]
  804. @freeze_time(MOCK_DATETIME)
  805. def test_snuba_limit_exceeded_groupby_status(self):
  806. """Get consistent result when grouping by status"""
  807. # 2 * 3 => only show two groups
  808. with patch("sentry.snuba.sessions_v2.SNUBA_LIMIT", 6), patch(
  809. "sentry.snuba.metrics.query.MAX_POINTS", 6
  810. ):
  811. response = self.do_request(
  812. {
  813. "project": [-1],
  814. "statsPeriod": "3d",
  815. "interval": "1d",
  816. "field": ["sum(session)", "count_unique(user)"],
  817. "groupBy": ["project", "release", "environment", "session.status"],
  818. }
  819. )
  820. assert response.status_code == 200, response.content
  821. assert result_sorted(response.data)["groups"] == [
  822. {
  823. "by": {
  824. "project": self.project1.id,
  825. "release": "foo@1.0.0",
  826. "session.status": "abnormal",
  827. "environment": "production",
  828. },
  829. "totals": {"sum(session)": 0, "count_unique(user)": 0},
  830. "series": {"sum(session)": [0, 0, 0], "count_unique(user)": [0, 0, 0]},
  831. },
  832. {
  833. "by": {
  834. "project": self.project1.id,
  835. "release": "foo@1.0.0",
  836. "session.status": "crashed",
  837. "environment": "production",
  838. },
  839. "totals": {"sum(session)": 0, "count_unique(user)": 0},
  840. "series": {"sum(session)": [0, 0, 0], "count_unique(user)": [0, 0, 0]},
  841. },
  842. {
  843. "by": {
  844. "project": self.project1.id,
  845. "release": "foo@1.0.0",
  846. "environment": "production",
  847. "session.status": "errored",
  848. },
  849. "totals": {"sum(session)": 0, "count_unique(user)": 0},
  850. "series": {"sum(session)": [0, 0, 0], "count_unique(user)": [0, 0, 0]},
  851. },
  852. {
  853. "by": {
  854. "project": self.project1.id,
  855. "session.status": "healthy",
  856. "release": "foo@1.0.0",
  857. "environment": "production",
  858. },
  859. "totals": {"sum(session)": 3, "count_unique(user)": 0},
  860. "series": {"sum(session)": [0, 0, 3], "count_unique(user)": [0, 0, 0]},
  861. },
  862. {
  863. "by": {
  864. "session.status": "abnormal",
  865. "release": "foo@1.0.0",
  866. "project": self.project3.id,
  867. "environment": "production",
  868. },
  869. "totals": {"sum(session)": 0, "count_unique(user)": 0},
  870. "series": {"sum(session)": [0, 0, 0], "count_unique(user)": [0, 0, 0]},
  871. },
  872. {
  873. "by": {
  874. "release": "foo@1.0.0",
  875. "project": self.project3.id,
  876. "session.status": "crashed",
  877. "environment": "production",
  878. },
  879. "totals": {"sum(session)": 0, "count_unique(user)": 0},
  880. "series": {"sum(session)": [0, 0, 0], "count_unique(user)": [0, 0, 0]},
  881. },
  882. {
  883. "by": {
  884. "release": "foo@1.0.0",
  885. "project": self.project3.id,
  886. "environment": "production",
  887. "session.status": "errored",
  888. },
  889. "totals": {"sum(session)": 1, "count_unique(user)": 1},
  890. "series": {"sum(session)": [0, 0, 1], "count_unique(user)": [0, 0, 1]},
  891. },
  892. {
  893. "by": {
  894. "session.status": "healthy",
  895. "release": "foo@1.0.0",
  896. "project": self.project3.id,
  897. "environment": "production",
  898. },
  899. "totals": {"sum(session)": 1, "count_unique(user)": 0},
  900. "series": {"sum(session)": [0, 0, 1], "count_unique(user)": [0, 0, 0]},
  901. },
  902. ]
  903. @freeze_time(MOCK_DATETIME)
  904. def test_environment_filter_not_present_in_query(self):
  905. self.create_environment(name="abc")
  906. response = self.do_request(
  907. {
  908. "project": [-1],
  909. "statsPeriod": "1d",
  910. "interval": "1d",
  911. "field": ["sum(session)"],
  912. "environment": ["development", "abc"],
  913. }
  914. )
  915. assert response.status_code == 200, response.content
  916. assert result_sorted(response.data)["groups"] == [
  917. {"by": {}, "series": {"sum(session)": [1]}, "totals": {"sum(session)": 1}}
  918. ]
  919. @freeze_time(MOCK_DATETIME)
  920. def test_sessions_without_users(self):
  921. # The first field defines by which groups additional queries are filtered
  922. # But if the first field is the user count, the series should still
  923. # contain the session counts even if the project does not track users
  924. response = self.do_request(
  925. {
  926. "project": self.project.id, # project without users
  927. "statsPeriod": "1d",
  928. "interval": "1d",
  929. "field": ["count_unique(user)", "sum(session)"],
  930. "groupBy": "release",
  931. }
  932. )
  933. assert response.status_code == 200, response.content
  934. assert result_sorted(response.data)["groups"] == [
  935. {
  936. "by": {"release": "foo@1.0.0"},
  937. "series": {"count_unique(user)": [0], "sum(session)": [3]},
  938. "totals": {"count_unique(user)": 0, "sum(session)": 3},
  939. },
  940. {
  941. "by": {"release": "foo@1.1.0"},
  942. "series": {"count_unique(user)": [0], "sum(session)": [1]},
  943. "totals": {"count_unique(user)": 0, "sum(session)": 1},
  944. },
  945. ]
  946. @freeze_time(MOCK_DATETIME + datetime.timedelta(days=2))
  947. def test_groupby_no_data(self):
  948. # Empty results for everything
  949. response = self.do_request(
  950. {
  951. "project": self.project.id, # project without users
  952. "statsPeriod": "1d",
  953. "interval": "1d",
  954. "field": ["count_unique(user)", "sum(session)"],
  955. "groupBy": "release",
  956. }
  957. )
  958. assert response.status_code == 200, response.content
  959. assert result_sorted(response.data)["groups"] == []
  960. @freeze_time(MOCK_DATETIME)
  961. def test_mix_known_and_unknown_strings(self):
  962. response = self.do_request(
  963. {
  964. "project": self.project.id, # project without users
  965. "statsPeriod": "1d",
  966. "interval": "1d",
  967. "field": ["count_unique(user)", "sum(session)"],
  968. "query": "environment:[production,foo]",
  969. }
  970. )
  971. assert response.status_code == 200, response.data
  972. @freeze_time(MOCK_DATETIME)
  973. def test_release_semver_filter(self):
  974. r1 = self.create_release(version="ahmed@1.0.0")
  975. r2 = self.create_release(version="ahmed@1.1.0")
  976. r3 = self.create_release(version="ahmed@2.0.0")
  977. for r in (r1, r2, r3):
  978. self.store_session(make_session(self.project, release=r.version))
  979. response = self.do_request(
  980. {
  981. "project": self.project.id,
  982. "statsPeriod": "1d",
  983. "interval": "1d",
  984. "field": ["sum(session)"],
  985. "groupBy": ["release"],
  986. "query": "release.version:1.*",
  987. }
  988. )
  989. assert response.status_code == 200
  990. assert sorted(response.data["groups"], key=lambda x: x["by"]["release"]) == [
  991. {
  992. "by": {"release": "ahmed@1.0.0"},
  993. "totals": {"sum(session)": 1},
  994. "series": {"sum(session)": [1]},
  995. },
  996. {
  997. "by": {"release": "ahmed@1.1.0"},
  998. "totals": {"sum(session)": 1},
  999. "series": {"sum(session)": [1]},
  1000. },
  1001. ]
  1002. @freeze_time(MOCK_DATETIME)
  1003. def test_release_package_filter(self):
  1004. r1 = self.create_release(version="ahmed@1.2.4+124")
  1005. r2 = self.create_release(version="ahmed2@1.2.5+125")
  1006. r3 = self.create_release(version="ahmed2@1.2.6+126")
  1007. for r in (r1, r2, r3):
  1008. self.store_session(make_session(self.project, release=r.version))
  1009. response = self.do_request(
  1010. {
  1011. "project": self.project.id,
  1012. "statsPeriod": "1d",
  1013. "interval": "1d",
  1014. "field": ["sum(session)"],
  1015. "groupBy": ["release"],
  1016. "query": "release.package:ahmed2",
  1017. }
  1018. )
  1019. assert response.status_code == 200
  1020. assert sorted(response.data["groups"], key=lambda x: x["by"]["release"]) == [
  1021. {
  1022. "by": {"release": "ahmed2@1.2.5+125"},
  1023. "totals": {"sum(session)": 1},
  1024. "series": {"sum(session)": [1]},
  1025. },
  1026. {
  1027. "by": {"release": "ahmed2@1.2.6+126"},
  1028. "totals": {"sum(session)": 1},
  1029. "series": {"sum(session)": [1]},
  1030. },
  1031. ]
  1032. @freeze_time(MOCK_DATETIME)
  1033. def test_release_build_filter(self):
  1034. r1 = self.create_release(version="ahmed@1.2.4+124")
  1035. r2 = self.create_release(version="ahmed@1.2.3+123")
  1036. r3 = self.create_release(version="ahmed2@1.2.5+125")
  1037. for r in (r1, r2, r3):
  1038. self.store_session(make_session(self.project, release=r.version))
  1039. response = self.do_request(
  1040. {
  1041. "project": self.project.id,
  1042. "statsPeriod": "1d",
  1043. "interval": "1d",
  1044. "field": ["sum(session)"],
  1045. "groupBy": ["release"],
  1046. "query": "release.build:<125",
  1047. }
  1048. )
  1049. assert response.status_code == 200
  1050. assert sorted(response.data["groups"], key=lambda x: x["by"]["release"]) == [
  1051. {
  1052. "by": {"release": "ahmed@1.2.3+123"},
  1053. "totals": {"sum(session)": 1},
  1054. "series": {"sum(session)": [1]},
  1055. },
  1056. {
  1057. "by": {"release": "ahmed@1.2.4+124"},
  1058. "totals": {"sum(session)": 1},
  1059. "series": {"sum(session)": [1]},
  1060. },
  1061. ]
  1062. @freeze_time(MOCK_DATETIME)
  1063. def test_release_stage_filter(self):
  1064. new_env = self.create_environment(name="new_env")
  1065. adopted_release = self.create_release(version="adopted_release")
  1066. not_adopted_release = self.create_release(version="not_adopted_release")
  1067. ReleaseProjectEnvironment.objects.create(
  1068. project_id=self.project.id,
  1069. release_id=adopted_release.id,
  1070. environment_id=new_env.id,
  1071. adopted=timezone.now(),
  1072. )
  1073. ReleaseProjectEnvironment.objects.create(
  1074. project_id=self.project.id,
  1075. release_id=not_adopted_release.id,
  1076. environment_id=new_env.id,
  1077. )
  1078. for r in (adopted_release, not_adopted_release):
  1079. self.store_session(
  1080. make_session(self.project, release=r.version, environment=new_env.name)
  1081. )
  1082. response = self.do_request(
  1083. {
  1084. "project": self.project.id,
  1085. "statsPeriod": "1d",
  1086. "interval": "1d",
  1087. "field": ["sum(session)"],
  1088. "groupBy": ["release"],
  1089. "query": "release.stage:adopted",
  1090. "environment": new_env.name,
  1091. }
  1092. )
  1093. assert response.status_code == 200
  1094. assert response.data["groups"] == [
  1095. {
  1096. "by": {"release": "adopted_release"},
  1097. "totals": {"sum(session)": 1},
  1098. "series": {"sum(session)": [1]},
  1099. },
  1100. ]
  1101. @patch("sentry.api.endpoints.organization_sessions.release_health", MetricsReleaseHealthBackend())
  1102. @region_silo_test
  1103. class OrganizationSessionsEndpointMetricsTest(
  1104. BaseMetricsTestCase, OrganizationSessionsEndpointTest
  1105. ):
  1106. """Repeat all tests with metrics backend"""
  1107. @freeze_time(MOCK_DATETIME)
  1108. def test_orderby(self):
  1109. response = self.do_request(
  1110. {
  1111. "project": [-1],
  1112. "statsPeriod": "2d",
  1113. "interval": "1d",
  1114. "field": ["sum(session)"],
  1115. "orderBy": "foobar",
  1116. }
  1117. )
  1118. assert response.status_code == 400
  1119. assert response.data == {"detail": "'orderBy' must be one of the provided 'fields'"}
  1120. response = self.do_request(
  1121. {
  1122. "project": [-1],
  1123. "statsPeriod": "2d",
  1124. "interval": "1d",
  1125. "field": ["sum(session)"],
  1126. "orderBy": "count_unique(user)", # wrong field
  1127. }
  1128. )
  1129. assert response.status_code == 400
  1130. assert response.data == {"detail": "'orderBy' must be one of the provided 'fields'"}
  1131. # Cannot sort by more than one field
  1132. response = self.do_request(
  1133. {
  1134. "project": [-1],
  1135. "statsPeriod": "2d",
  1136. "interval": "1d",
  1137. "field": ["sum(session)", "count_unique(user)"],
  1138. "orderBy": ["sum(session)", "count_unique(user)"],
  1139. }
  1140. )
  1141. assert response.status_code == 400
  1142. assert response.data == {"detail": "Cannot order by multiple fields"}
  1143. response = self.do_request(
  1144. {
  1145. "project": [-1],
  1146. "statsPeriod": "2d",
  1147. "interval": "1d",
  1148. "field": ["sum(session)"],
  1149. "orderBy": "sum(session)", # misses group by, but why not
  1150. }
  1151. )
  1152. assert response.status_code == 200
  1153. response = self.do_request(
  1154. {
  1155. "project": [-1],
  1156. "statsPeriod": "2d",
  1157. "interval": "1d",
  1158. "field": ["sum(session)"],
  1159. "orderBy": "sum(session)",
  1160. "groupBy": ["session.status"],
  1161. }
  1162. )
  1163. assert response.status_code == 400
  1164. assert response.data == {"detail": "Cannot use 'orderBy' when grouping by sessions.status"}
  1165. response = self.do_request(
  1166. {
  1167. "project": [self.project.id, self.project3.id],
  1168. "statsPeriod": "2d",
  1169. "interval": "1d",
  1170. "field": ["sum(session)", "p95(session.duration)"],
  1171. "orderBy": "p95(session.duration)",
  1172. "groupBy": ["project", "release", "environment"],
  1173. }
  1174. )
  1175. expected_groups = [
  1176. {
  1177. "by": {
  1178. "project": self.project.id,
  1179. "release": "foo@1.0.0",
  1180. "environment": "production",
  1181. },
  1182. "totals": {"sum(session)": 3, "p95(session.duration)": 25000.0},
  1183. "series": {"sum(session)": [0, 3], "p95(session.duration)": [None, 25000.0]},
  1184. },
  1185. {
  1186. "by": {
  1187. "project": self.project3.id,
  1188. "release": "foo@1.2.0",
  1189. "environment": "production",
  1190. },
  1191. "totals": {"sum(session)": 1, "p95(session.duration)": 37000.0},
  1192. "series": {"sum(session)": [0, 1], "p95(session.duration)": [None, 37000.0]},
  1193. },
  1194. {
  1195. "by": {
  1196. "project": self.project.id,
  1197. "release": "foo@1.1.0",
  1198. "environment": "production",
  1199. },
  1200. "totals": {"sum(session)": 1, "p95(session.duration)": 49000.0},
  1201. "series": {"sum(session)": [0, 1], "p95(session.duration)": [None, 49000.0]},
  1202. },
  1203. {
  1204. "by": {
  1205. "project": self.project3.id,
  1206. "release": "foo@1.0.0",
  1207. "environment": "production",
  1208. },
  1209. "totals": {"sum(session)": 2, "p95(session.duration)": 79400.0},
  1210. "series": {"sum(session)": [0, 2], "p95(session.duration)": [None, 79400.0]},
  1211. },
  1212. ]
  1213. # Not using `result_sorted` here, because we want to verify the order
  1214. assert response.status_code == 200, response.data
  1215. assert response.data["groups"] == expected_groups
  1216. # Sort descending
  1217. response = self.do_request(
  1218. {
  1219. "project": [self.project.id, self.project3.id],
  1220. "statsPeriod": "2d",
  1221. "interval": "1d",
  1222. "field": ["sum(session)", "p95(session.duration)"],
  1223. "orderBy": "-p95(session.duration)",
  1224. "groupBy": ["project", "release", "environment"],
  1225. }
  1226. )
  1227. assert response.status_code == 200
  1228. assert response.data["groups"] == list(reversed(expected_groups))
  1229. # Add some more code coverage
  1230. all_fields = [
  1231. "sum(session)",
  1232. "count_unique(user)",
  1233. "avg(session.duration)",
  1234. ]
  1235. for field in all_fields:
  1236. assert (
  1237. self.do_request(
  1238. {
  1239. "project": [self.project.id, self.project3.id],
  1240. "statsPeriod": "2d",
  1241. "interval": "1d",
  1242. "field": all_fields,
  1243. "orderBy": field,
  1244. "groupBy": ["project", "release", "environment"],
  1245. }
  1246. ).status_code
  1247. == 200
  1248. )
  1249. @freeze_time(MOCK_DATETIME)
  1250. def test_filter_by_session_status(self):
  1251. default_request = {
  1252. "project": [-1],
  1253. "statsPeriod": "1d",
  1254. "interval": "1d",
  1255. }
  1256. def req(**kwargs):
  1257. return self.do_request(dict(default_request, **kwargs))
  1258. response = req(field=["sum(session)"], query="session.status:bogus")
  1259. assert response.status_code == 200, response.content
  1260. assert result_sorted(response.data)["groups"] == []
  1261. response = req(field=["sum(session)"], query="!session.status:healthy")
  1262. assert response.status_code == 200, response.content
  1263. assert result_sorted(response.data)["groups"] == [
  1264. {"by": {}, "series": {"sum(session)": [3]}, "totals": {"sum(session)": 3}}
  1265. ]
  1266. # sum(session) filtered by multiple statuses adds them
  1267. response = req(field=["sum(session)"], query="session.status:[healthy, errored]")
  1268. assert response.status_code == 200, response.content
  1269. assert result_sorted(response.data)["groups"] == [
  1270. {"by": {}, "series": {"sum(session)": [8]}, "totals": {"sum(session)": 8}}
  1271. ]
  1272. response = req(
  1273. field=["sum(session)"],
  1274. query="session.status:[healthy, errored]",
  1275. groupBy="session.status",
  1276. )
  1277. assert response.status_code == 200, response.content
  1278. assert result_sorted(response.data)["groups"] == [
  1279. {
  1280. "by": {"session.status": "errored"},
  1281. "totals": {"sum(session)": 2},
  1282. "series": {"sum(session)": [2]},
  1283. },
  1284. {
  1285. "by": {"session.status": "healthy"},
  1286. "totals": {"sum(session)": 6},
  1287. "series": {"sum(session)": [6]},
  1288. },
  1289. ]
  1290. response = req(field=["sum(session)"], query="session.status:healthy release:foo@1.1.0")
  1291. assert response.status_code == 200, response.content
  1292. assert result_sorted(response.data)["groups"] == [
  1293. {"by": {}, "series": {"sum(session)": [1]}, "totals": {"sum(session)": 1}}
  1294. ]
  1295. response = req(field=["sum(session)"], query="session.status:healthy OR release:foo@1.1.0")
  1296. assert response.status_code == 400, response.data
  1297. assert response.data == {"detail": "Unable to parse condition with session.status"}
  1298. # count_unique(user) does not work with multiple session statuses selected
  1299. response = req(field=["count_unique(user)"], query="session.status:[healthy, errored]")
  1300. assert response.status_code == 400, response.data
  1301. assert response.data == {
  1302. "detail": "Cannot filter count_unique by multiple session.status unless it is in groupBy"
  1303. }
  1304. response = req(field=["p95(session.duration)"], query="session.status:abnormal")
  1305. assert response.status_code == 200, response.content
  1306. assert result_sorted(response.data)["groups"] == []
  1307. @freeze_time(MOCK_DATETIME)
  1308. def test_filter_by_session_status_with_groupby(self):
  1309. default_request = {
  1310. "project": [-1],
  1311. "statsPeriod": "1d",
  1312. "interval": "1d",
  1313. "groupBy": "release",
  1314. }
  1315. def req(**kwargs):
  1316. return self.do_request(dict(default_request, **kwargs))
  1317. response = req(field=["sum(session)"], query="session.status:healthy")
  1318. assert response.status_code == 200, response.content
  1319. assert result_sorted(response.data)["groups"] == [
  1320. {
  1321. "by": {"release": "foo@1.0.0"},
  1322. "series": {"sum(session)": [5]},
  1323. "totals": {"sum(session)": 5},
  1324. },
  1325. {
  1326. "by": {"release": "foo@1.1.0"},
  1327. "series": {"sum(session)": [1]},
  1328. "totals": {"sum(session)": 1},
  1329. },
  1330. {
  1331. "by": {"release": "foo@1.2.0"},
  1332. "series": {"sum(session)": [0]},
  1333. "totals": {"sum(session)": 0},
  1334. },
  1335. ]
  1336. @freeze_time(MOCK_DATETIME)
  1337. def test_filter_by_session_status_with_orderby(self):
  1338. default_request = {
  1339. "project": [-1],
  1340. "statsPeriod": "1d",
  1341. "interval": "1d",
  1342. }
  1343. def req(**kwargs):
  1344. return self.do_request(dict(default_request, **kwargs))
  1345. response = req(
  1346. field=["sum(session)"],
  1347. query="session.status:[abnormal,crashed]",
  1348. groupBy="release",
  1349. orderBy="sum(session)",
  1350. )
  1351. assert response.status_code == 400, response.content
  1352. assert response.data == {"detail": "Cannot order by sum(session) with the current filters"}
  1353. response = req(
  1354. field=["sum(session)"],
  1355. query="session.status:healthy",
  1356. groupBy="release",
  1357. orderBy="sum(session)",
  1358. )
  1359. assert response.status_code == 400, response.content
  1360. assert response.data == {"detail": "Cannot order by sum(session) with the current filters"}
  1361. @freeze_time(MOCK_DATETIME)
  1362. def test_crash_rate(self):
  1363. default_request = {
  1364. "project": [-1],
  1365. "statsPeriod": "1d",
  1366. "interval": "1d",
  1367. "field": ["crash_rate(session)"],
  1368. }
  1369. def req(**kwargs):
  1370. return self.do_request(dict(default_request, **kwargs))
  1371. # 1 - filter session.status
  1372. response = req(
  1373. query="session.status:[abnormal,crashed]",
  1374. )
  1375. assert response.status_code == 400, response.content
  1376. assert response.data == {
  1377. "detail": "Cannot filter field crash_rate(session) by session.status"
  1378. }
  1379. # 2 - group by session.status
  1380. response = req(
  1381. groupBy="session.status",
  1382. )
  1383. assert response.status_code == 400, response.content
  1384. assert response.data == {
  1385. "detail": "Cannot group field crash_rate(session) by session.status"
  1386. }
  1387. # 4 - fetch all
  1388. response = req(
  1389. field=[
  1390. "crash_rate(session)",
  1391. "crash_rate(user)",
  1392. "crash_free_rate(session)",
  1393. "crash_free_rate(user)",
  1394. ],
  1395. groupBy=["release", "environment"],
  1396. orderBy=["crash_free_rate(session)"],
  1397. query="release:foo@1.0.0",
  1398. )
  1399. assert response.status_code == 200, response.content
  1400. assert response.data["groups"] == [
  1401. {
  1402. "by": {"environment": "production", "release": "foo@1.0.0"},
  1403. "series": {
  1404. "crash_free_rate(session)": [0.8333333333333334],
  1405. "crash_free_rate(user)": [1.0],
  1406. "crash_rate(session)": [0.16666666666666666],
  1407. "crash_rate(user)": [0.0],
  1408. },
  1409. "totals": {
  1410. "crash_free_rate(session)": 0.8333333333333334,
  1411. "crash_free_rate(user)": 1.0,
  1412. "crash_rate(session)": 0.16666666666666666,
  1413. "crash_rate(user)": 0.0,
  1414. },
  1415. },
  1416. {
  1417. "by": {"environment": "development", "release": "foo@1.0.0"},
  1418. "series": {
  1419. "crash_free_rate(session)": [1.0],
  1420. "crash_free_rate(user)": [None],
  1421. "crash_rate(session)": [0.0],
  1422. "crash_rate(user)": [None],
  1423. },
  1424. "totals": {
  1425. "crash_free_rate(session)": 1.0,
  1426. "crash_free_rate(user)": None,
  1427. "crash_rate(session)": 0.0,
  1428. "crash_rate(user)": None,
  1429. },
  1430. },
  1431. ]
  1432. @freeze_time(MOCK_DATETIME)
  1433. def test_pagination(self):
  1434. def do_request(cursor):
  1435. return self.do_request(
  1436. {
  1437. "project": self.project.id, # project without users
  1438. "statsPeriod": "1d",
  1439. "interval": "1d",
  1440. "field": ["count_unique(user)", "sum(session)"],
  1441. "query": "",
  1442. "groupBy": "release",
  1443. "orderBy": "sum(session)",
  1444. "per_page": 1,
  1445. **({"cursor": cursor} if cursor else {}),
  1446. }
  1447. )
  1448. response = do_request(None)
  1449. assert response.status_code == 200, response.data
  1450. assert len(response.data["groups"]) == 1
  1451. assert response.data["groups"] == [
  1452. {
  1453. "by": {"release": "foo@1.1.0"},
  1454. "series": {"count_unique(user)": [0], "sum(session)": [1]},
  1455. "totals": {"count_unique(user)": 0, "sum(session)": 1},
  1456. }
  1457. ]
  1458. links = {link["rel"]: link for url, link in parse_link_header(response["Link"]).items()}
  1459. assert links["previous"]["results"] == "false"
  1460. assert links["next"]["results"] == "true"
  1461. response = do_request(links["next"]["cursor"])
  1462. assert response.status_code == 200, response.data
  1463. assert len(response.data["groups"]) == 1
  1464. assert response.data["groups"] == [
  1465. {
  1466. "by": {"release": "foo@1.0.0"},
  1467. "series": {"count_unique(user)": [0], "sum(session)": [3]},
  1468. "totals": {"count_unique(user)": 0, "sum(session)": 3},
  1469. }
  1470. ]
  1471. links = {link["rel"]: link for url, link in parse_link_header(response["Link"]).items()}
  1472. assert links["previous"]["results"] == "true"
  1473. assert links["next"]["results"] == "false"
  1474. def test_unrestricted_date_range(self):
  1475. response = self.do_request(
  1476. {
  1477. "project": [-1],
  1478. "statsPeriod": "7h",
  1479. "interval": "5m",
  1480. "field": ["sum(session)"],
  1481. }
  1482. )
  1483. assert response.status_code == 200
  1484. @freeze_time(MOCK_DATETIME)
  1485. def test_release_is_empty(self):
  1486. self.store_session(
  1487. make_session(
  1488. self.project1, started=SESSION_STARTED + 12 * 60, release="", environment=""
  1489. )
  1490. )
  1491. for query in ('release:"" environment:""', 'release:"" OR environment:""'):
  1492. # Empty strings are invalid values for releases and environments, but we should still handle those cases correctly at the query layer
  1493. response = self.do_request(
  1494. {
  1495. "project": self.project.id, # project without users
  1496. "statsPeriod": "1d",
  1497. "interval": "1d",
  1498. "field": ["sum(session)"],
  1499. "query": query,
  1500. "groupBy": ["release", "environment"],
  1501. }
  1502. )
  1503. assert response.status_code == 200, response.content
  1504. assert result_sorted(response.data)["groups"] == [
  1505. {
  1506. "by": {"environment": "", "release": ""},
  1507. "series": {"sum(session)": [1]},
  1508. "totals": {"sum(session)": 1},
  1509. }
  1510. ]
  1511. @patch("sentry.api.endpoints.organization_sessions.release_health", MetricsReleaseHealthBackend())
  1512. class SessionsMetricsSortReleaseTimestampTest(BaseMetricsTestCase, APITestCase):
  1513. def do_request(self, query, user=None, org=None):
  1514. self.login_as(user=user or self.user)
  1515. url = reverse(
  1516. "sentry-api-0-organization-sessions",
  1517. kwargs={"organization_slug": (org or self.organization).slug},
  1518. )
  1519. return self.client.get(url, query, format="json")
  1520. @freeze_time(MOCK_DATETIME)
  1521. def test_order_by_with_no_releases(self):
  1522. """
  1523. Test that ensures if we have no releases in the preflight query when trying to order by
  1524. `release.timestamp`, we get no groups.
  1525. Essentially testing the empty preflight query filters branch.
  1526. """
  1527. project_random = self.create_project()
  1528. for _ in range(0, 2):
  1529. self.store_session(make_session(project_random))
  1530. self.store_session(make_session(project_random, status="crashed"))
  1531. response = self.do_request(
  1532. {
  1533. "project": project_random.id,
  1534. "statsPeriod": "1d",
  1535. "interval": "1d",
  1536. "field": ["crash_free_rate(session)"],
  1537. "groupBy": ["release"],
  1538. "orderBy": "-release.timestamp",
  1539. "per_page": 3,
  1540. }
  1541. )
  1542. assert response.data["groups"] == []
  1543. def test_order_by_max_limit(self):
  1544. response = self.do_request(
  1545. {
  1546. "project": self.project.id,
  1547. "statsPeriod": "1d",
  1548. "interval": "1d",
  1549. "field": ["crash_free_rate(session)"],
  1550. "groupBy": ["release"],
  1551. "orderBy": "-release.timestamp",
  1552. "per_page": 103,
  1553. }
  1554. )
  1555. assert response.data["detail"] == (
  1556. "This limit is too high for queries that requests a preflight query. "
  1557. "Please choose a limit below 100"
  1558. )
  1559. @freeze_time(MOCK_DATETIME)
  1560. def test_order_by(self):
  1561. """
  1562. Test that ensures that we are able to get the crash_free_rate for the most 2 recent
  1563. releases when grouping by release
  1564. """
  1565. # Step 1: Create 3 releases
  1566. release1b = self.create_release(version="1B")
  1567. release1c = self.create_release(version="1C")
  1568. release1d = self.create_release(version="1D")
  1569. # Step 2: Create crash free rate for each of those releases
  1570. # Release 1c -> 66.7% Crash free rate
  1571. for _ in range(0, 2):
  1572. self.store_session(make_session(self.project, release=release1c.version))
  1573. self.store_session(make_session(self.project, release=release1c.version, status="crashed"))
  1574. # Release 1b -> 33.3% Crash free rate
  1575. for _ in range(0, 2):
  1576. self.store_session(
  1577. make_session(self.project, release=release1b.version, status="crashed")
  1578. )
  1579. self.store_session(make_session(self.project, release=release1b.version))
  1580. # Create Sessions in each of these releases
  1581. # Release 1d -> 80% Crash free rate
  1582. for _ in range(0, 4):
  1583. self.store_session(make_session(self.project, release=release1d.version))
  1584. self.store_session(make_session(self.project, release=release1d.version, status="crashed"))
  1585. # Step 3: Make request
  1586. response = self.do_request(
  1587. {
  1588. "project": self.project.id, # project without users
  1589. "statsPeriod": "1d",
  1590. "interval": "1d",
  1591. "field": ["crash_free_rate(session)"],
  1592. "groupBy": ["release"],
  1593. "orderBy": "-release.timestamp",
  1594. "per_page": 3,
  1595. }
  1596. )
  1597. # Step 4: Validate Results
  1598. assert response.data["groups"] == [
  1599. {
  1600. "by": {"release": "1D"},
  1601. "totals": {"crash_free_rate(session)": 0.8},
  1602. "series": {"crash_free_rate(session)": [0.8]},
  1603. },
  1604. {
  1605. "by": {"release": "1C"},
  1606. "totals": {"crash_free_rate(session)": 0.6666666666666667},
  1607. "series": {"crash_free_rate(session)": [0.6666666666666667]},
  1608. },
  1609. {
  1610. "by": {"release": "1B"},
  1611. "totals": {"crash_free_rate(session)": 0.33333333333333337},
  1612. "series": {"crash_free_rate(session)": [0.33333333333333337]},
  1613. },
  1614. ]
  1615. @freeze_time(MOCK_DATETIME)
  1616. def test_order_by_with_session_status_groupby(self):
  1617. """
  1618. Test that ensures we are able to group by session.status and order by `release.timestamp`
  1619. since `release.timestamp` is generated from a preflight query
  1620. """
  1621. rando_project = self.create_project()
  1622. release_1a = self.create_release(project=rando_project, version="1A")
  1623. release_1b = self.create_release(project=rando_project, version="1B")
  1624. # Release 1B sessions
  1625. for _ in range(4):
  1626. self.store_session(
  1627. make_session(rando_project, release=release_1b.version, status="crashed")
  1628. )
  1629. for _ in range(10):
  1630. self.store_session(make_session(rando_project, release=release_1b.version))
  1631. for _ in range(3):
  1632. self.store_session(make_session(rando_project, errors=1, release=release_1b.version))
  1633. # Release 1A sessions
  1634. for _ in range(0, 2):
  1635. self.store_session(
  1636. make_session(rando_project, release=release_1a.version, status="crashed")
  1637. )
  1638. self.store_session(make_session(rando_project, release=release_1a.version))
  1639. for _ in range(3):
  1640. self.store_session(make_session(rando_project, errors=1, release=release_1a.version))
  1641. response = self.do_request(
  1642. {
  1643. "project": rando_project.id,
  1644. "statsPeriod": "1d",
  1645. "interval": "1d",
  1646. "field": ["sum(session)"],
  1647. "groupBy": ["release", "session.status"],
  1648. "orderBy": "-release.timestamp",
  1649. }
  1650. )
  1651. assert response.data["groups"] == [
  1652. {
  1653. "by": {"release": "1B", "session.status": "abnormal"},
  1654. "totals": {"sum(session)": 0},
  1655. "series": {"sum(session)": [0]},
  1656. },
  1657. {
  1658. "by": {"release": "1B", "session.status": "crashed"},
  1659. "totals": {"sum(session)": 4},
  1660. "series": {"sum(session)": [4]},
  1661. },
  1662. {
  1663. "by": {"release": "1B", "session.status": "errored"},
  1664. "totals": {"sum(session)": 3},
  1665. "series": {"sum(session)": [3]},
  1666. },
  1667. {
  1668. "by": {"release": "1B", "session.status": "healthy"},
  1669. "totals": {"sum(session)": 10},
  1670. "series": {"sum(session)": [10]},
  1671. },
  1672. {
  1673. "by": {"release": "1A", "session.status": "abnormal"},
  1674. "totals": {"sum(session)": 0},
  1675. "series": {"sum(session)": [0]},
  1676. },
  1677. {
  1678. "by": {"release": "1A", "session.status": "crashed"},
  1679. "totals": {"sum(session)": 2},
  1680. "series": {"sum(session)": [2]},
  1681. },
  1682. {
  1683. "by": {"release": "1A", "session.status": "errored"},
  1684. "totals": {"sum(session)": 3},
  1685. "series": {"sum(session)": [3]},
  1686. },
  1687. {
  1688. "by": {"release": "1A", "session.status": "healthy"},
  1689. "totals": {"sum(session)": 1},
  1690. "series": {"sum(session)": [1]},
  1691. },
  1692. ]
  1693. @freeze_time(MOCK_DATETIME)
  1694. def test_order_by_with_limit(self):
  1695. rando_project = self.create_project()
  1696. # Create two releases with no metrics data and then two releases with metric data
  1697. release_1a = self.create_release(project=rando_project, version="1A")
  1698. release_1b = self.create_release(project=rando_project, version="1B")
  1699. self.create_release(project=rando_project, version="1C")
  1700. self.create_release(project=rando_project, version="1D")
  1701. self.store_session(make_session(rando_project, release=release_1a.version))
  1702. self.store_session(make_session(rando_project, release=release_1b.version))
  1703. self.store_session(
  1704. make_session(rando_project, release=release_1b.version, status="crashed")
  1705. )
  1706. response = self.do_request(
  1707. {
  1708. "project": rando_project.id,
  1709. "statsPeriod": "1d",
  1710. "interval": "1d",
  1711. "field": ["sum(session)"],
  1712. "groupBy": ["release"],
  1713. "orderBy": "-release.timestamp",
  1714. "per_page": 3,
  1715. }
  1716. )
  1717. assert response.data["groups"] == [
  1718. {
  1719. "by": {"release": "1D"},
  1720. "totals": {"sum(session)": 0},
  1721. "series": {"sum(session)": [0]},
  1722. },
  1723. {
  1724. "by": {"release": "1C"},
  1725. "totals": {"sum(session)": 0},
  1726. "series": {"sum(session)": [0]},
  1727. },
  1728. {
  1729. "by": {"release": "1B"},
  1730. "totals": {"sum(session)": 2},
  1731. "series": {"sum(session)": [2]},
  1732. },
  1733. ]
  1734. response = self.do_request(
  1735. {
  1736. "project": rando_project.id,
  1737. "statsPeriod": "1d",
  1738. "interval": "1d",
  1739. "field": ["sum(session)"],
  1740. "groupBy": ["release", "session.status"],
  1741. "orderBy": "-release.timestamp",
  1742. "per_page": 4,
  1743. }
  1744. )
  1745. assert response.data["groups"] == [
  1746. {
  1747. "by": {"release": "1D", "session.status": None},
  1748. "totals": {"sum(session)": 0},
  1749. "series": {"sum(session)": [0]},
  1750. },
  1751. {
  1752. "by": {"release": "1C", "session.status": None},
  1753. "totals": {"sum(session)": 0},
  1754. "series": {"sum(session)": [0]},
  1755. },
  1756. {
  1757. "by": {"release": "1B", "session.status": "abnormal"},
  1758. "totals": {"sum(session)": 0},
  1759. "series": {"sum(session)": [0]},
  1760. },
  1761. {
  1762. "by": {"release": "1B", "session.status": "crashed"},
  1763. "totals": {"sum(session)": 1},
  1764. "series": {"sum(session)": [1]},
  1765. },
  1766. ]
  1767. response = self.do_request(
  1768. {
  1769. "project": rando_project.id,
  1770. "statsPeriod": "1d",
  1771. "interval": "1d",
  1772. "field": ["sum(session)"],
  1773. "groupBy": ["release", "session.status", "project"],
  1774. "orderBy": "-release.timestamp",
  1775. "per_page": 2,
  1776. }
  1777. )
  1778. assert response.data["groups"] == [
  1779. {
  1780. "by": {"release": "1D", "session.status": None, "project": None},
  1781. "totals": {"sum(session)": 0},
  1782. "series": {"sum(session)": [0]},
  1783. },
  1784. {
  1785. "by": {"release": "1C", "session.status": None, "project": None},
  1786. "totals": {"sum(session)": 0},
  1787. "series": {"sum(session)": [0]},
  1788. },
  1789. ]
  1790. @freeze_time(MOCK_DATETIME)
  1791. def test_order_by_with_limit_and_offset(self):
  1792. rando_project = self.create_project()
  1793. # Create two releases with no metrics data and then two releases with metric data
  1794. release_1a = self.create_release(project=rando_project, version="1A")
  1795. release_1b = self.create_release(project=rando_project, version="1B")
  1796. self.create_release(project=rando_project, version="1C")
  1797. self.create_release(project=rando_project, version="1D")
  1798. self.store_session(make_session(rando_project, release=release_1a.version))
  1799. self.store_session(make_session(rando_project, release=release_1b.version))
  1800. response = self.do_request(
  1801. {
  1802. "project": rando_project.id,
  1803. "statsPeriod": "1d",
  1804. "interval": "1d",
  1805. "field": ["sum(session)"],
  1806. "groupBy": ["release"],
  1807. "orderBy": "-release.timestamp",
  1808. "per_page": 3,
  1809. "cursor": Cursor(0, 1),
  1810. }
  1811. )
  1812. assert response.data["detail"] == (
  1813. "Passing an offset value greater than 0 when ordering by release.timestamp "
  1814. "is not permitted"
  1815. )
  1816. @freeze_time(MOCK_DATETIME)
  1817. def test_order_by_with_environment_filter_on_preflight(self):
  1818. rando_project = self.create_project()
  1819. rando_env = self.create_environment(name="rando_env", project=self.project)
  1820. # Create two releases with no metrics data and then two releases with metric data
  1821. release_1a = self.create_release(
  1822. project=rando_project, version="1A", environments=[rando_env]
  1823. )
  1824. release_1b = self.create_release(
  1825. project=rando_project, version="1B", environments=[rando_env]
  1826. )
  1827. release_1c = self.create_release(project=rando_project, version="1C")
  1828. release_1d = self.create_release(project=rando_project, version="1D")
  1829. self.store_session(
  1830. make_session(rando_project, release=release_1a.version, environment="rando_env")
  1831. )
  1832. self.store_session(
  1833. make_session(rando_project, release=release_1b.version, environment="rando_env")
  1834. )
  1835. self.store_session(make_session(rando_project, release=release_1c.version))
  1836. self.store_session(make_session(rando_project, release=release_1d.version))
  1837. # Test env condition with IN
  1838. response = self.do_request(
  1839. {
  1840. "project": rando_project.id,
  1841. "statsPeriod": "1d",
  1842. "interval": "1d",
  1843. "field": ["sum(session)"],
  1844. "query": "environment:[rando_env,rando_enc2]",
  1845. "groupBy": ["release", "environment"],
  1846. "orderBy": "-release.timestamp",
  1847. "per_page": 4,
  1848. }
  1849. )
  1850. assert response.data["groups"] == [
  1851. {
  1852. "by": {"release": "1B", "environment": "rando_env"},
  1853. "totals": {"sum(session)": 1},
  1854. "series": {"sum(session)": [1]},
  1855. },
  1856. {
  1857. "by": {"release": "1A", "environment": "rando_env"},
  1858. "totals": {"sum(session)": 1},
  1859. "series": {"sum(session)": [1]},
  1860. },
  1861. ]
  1862. # Test env condition with NOT IN
  1863. response = self.do_request(
  1864. {
  1865. "project": rando_project.id,
  1866. "statsPeriod": "1d",
  1867. "interval": "1d",
  1868. "field": ["sum(session)"],
  1869. "query": "!environment:[rando_env,rando_enc2]",
  1870. "groupBy": ["release", "environment"],
  1871. "orderBy": "-release.timestamp",
  1872. "per_page": 4,
  1873. }
  1874. )
  1875. assert response.data["groups"] == [
  1876. {
  1877. "by": {"release": "1D", "environment": "production"},
  1878. "totals": {"sum(session)": 1},
  1879. "series": {"sum(session)": [1]},
  1880. },
  1881. {
  1882. "by": {"release": "1C", "environment": "production"},
  1883. "totals": {"sum(session)": 1},
  1884. "series": {"sum(session)": [1]},
  1885. },
  1886. ]
  1887. # Test env condition with invalid OR operation
  1888. response = self.do_request(
  1889. {
  1890. "project": rando_project.id,
  1891. "statsPeriod": "1d",
  1892. "interval": "1d",
  1893. "field": ["sum(session)"],
  1894. "query": "environment:rando_env OR environment:rando_enc2",
  1895. "groupBy": ["release", "environment"],
  1896. "orderBy": "-release.timestamp",
  1897. "per_page": 4,
  1898. }
  1899. )
  1900. assert response.json()["detail"] == "Unable to parse condition with environment"
  1901. @freeze_time(MOCK_DATETIME)
  1902. def test_order_by_without_release_groupby(self):
  1903. rando_project = self.create_project()
  1904. response = self.do_request(
  1905. {
  1906. "project": rando_project.id,
  1907. "statsPeriod": "1d",
  1908. "interval": "1d",
  1909. "query": "session.status:[crashed,errored]",
  1910. "field": ["sum(session)"],
  1911. "orderBy": "-release.timestamp",
  1912. "per_page": 2,
  1913. }
  1914. )
  1915. assert response.data["detail"] == (
  1916. "To sort by release.timestamp, tag release must be in the groupBy"
  1917. )
  1918. @freeze_time(MOCK_DATETIME)
  1919. def test_order_by_release_with_session_status_current_filter(self):
  1920. rando_project = self.create_project()
  1921. release_1a = self.create_release(project=rando_project, version="1A")
  1922. release_1b = self.create_release(project=rando_project, version="1B")
  1923. # Release 1B sessions
  1924. for _ in range(4):
  1925. self.store_session(
  1926. make_session(rando_project, release=release_1b.version, status="crashed")
  1927. )
  1928. for _ in range(10):
  1929. self.store_session(make_session(rando_project, release=release_1b.version))
  1930. for _ in range(3):
  1931. self.store_session(make_session(rando_project, errors=1, release=release_1b.version))
  1932. # Release 1A sessions
  1933. for _ in range(0, 2):
  1934. self.store_session(
  1935. make_session(rando_project, release=release_1a.version, status="crashed")
  1936. )
  1937. self.store_session(make_session(rando_project, release=release_1a.version))
  1938. for _ in range(3):
  1939. self.store_session(make_session(rando_project, errors=1, release=release_1a.version))
  1940. response = self.do_request(
  1941. {
  1942. "project": rando_project.id,
  1943. "statsPeriod": "1d",
  1944. "interval": "1d",
  1945. "query": "session.status:[crashed,errored]",
  1946. "field": ["sum(session)"],
  1947. "groupBy": ["release"],
  1948. "orderBy": "-release.timestamp",
  1949. }
  1950. )
  1951. assert response.data["groups"] == [
  1952. {
  1953. "by": {"release": "1B"},
  1954. "totals": {"sum(session)": 7},
  1955. "series": {"sum(session)": [7]},
  1956. },
  1957. {
  1958. "by": {"release": "1A"},
  1959. "totals": {"sum(session)": 5},
  1960. "series": {"sum(session)": [5]},
  1961. },
  1962. ]
  1963. @patch(
  1964. "sentry.api.endpoints.organization_sessions.release_health",
  1965. DuplexReleaseHealthBackend(datetime.datetime(2022, 4, 28, 16, 0, tzinfo=datetime.timezone.utc)),
  1966. )
  1967. class DuplexTestCase(BaseMetricsTestCase, APITestCase):
  1968. """Tests specific to the duplex backend"""
  1969. def do_request(self, query, user=None, org=None):
  1970. self.login_as(user=user or self.user)
  1971. url = reverse(
  1972. "sentry-api-0-organization-sessions",
  1973. kwargs={"organization_slug": (org or self.organization).slug},
  1974. )
  1975. return self.client.get(url, query, format="json")
  1976. @freeze_time(MOCK_DATETIME)
  1977. def test_invalid_params(self):
  1978. """InvalidParams in metrics backend leads to 400 response when return-metrics is enabled"""
  1979. self.create_project()
  1980. with Feature("organizations:release-health-return-metrics"):
  1981. response = self.do_request(
  1982. {
  1983. "project": [-1],
  1984. "statsPeriod": ["24h"],
  1985. "interval": ["1h"],
  1986. "field": ["crash_rate(session)"],
  1987. "groupBy": ["session.status"], # Cannot group crash rate by session status
  1988. }
  1989. )
  1990. assert response.status_code == 400
  1991. assert response.data == {
  1992. "detail": "Cannot group field crash_rate(session) by session.status"
  1993. }