test_sessions_v2.py 27 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797
  1. import math
  2. from datetime import datetime, timezone
  3. import pytest
  4. from django.http import QueryDict
  5. from sentry.release_health.base import AllowedResolution, SessionsQueryConfig
  6. from sentry.snuba.sessions_v2 import (
  7. InvalidParams,
  8. QueryDefinition,
  9. get_constrained_date_range,
  10. get_timestamps,
  11. massage_sessions_result,
  12. )
  13. from sentry.testutils.helpers.datetime import freeze_time
  14. from sentry.testutils.pytest.fixtures import django_db_all
  15. def _make_query(qs, allow_minute_resolution=True, params=None):
  16. query_config = SessionsQueryConfig(
  17. (AllowedResolution.one_minute if allow_minute_resolution else AllowedResolution.one_hour),
  18. allow_session_status_query=False,
  19. restrict_date_range=True,
  20. )
  21. return QueryDefinition(QueryDict(qs), params or {}, query_config)
  22. def result_sorted(result):
  23. """sort the groups of the results array by the `by` object, ensuring a stable order"""
  24. def stable_dict(d):
  25. return tuple(sorted(d.items(), key=lambda t: t[0]))
  26. result["groups"].sort(key=lambda group: stable_dict(group["by"]))
  27. return result
  28. @freeze_time("2018-12-11 03:21:00")
  29. def test_round_range():
  30. start, end, interval = get_constrained_date_range({"statsPeriod": "2d"})
  31. assert start == datetime(2018, 12, 9, 3, tzinfo=timezone.utc)
  32. assert end == datetime(2018, 12, 11, 4, 00, tzinfo=timezone.utc)
  33. start, end, interval = get_constrained_date_range({"statsPeriod": "2d", "interval": "1d"})
  34. assert start == datetime(2018, 12, 9, tzinfo=timezone.utc)
  35. assert end == datetime(2018, 12, 12, 0, 0, tzinfo=timezone.utc)
  36. def test_invalid_interval():
  37. with pytest.raises(InvalidParams):
  38. start, end, interval = get_constrained_date_range({"interval": "0d"})
  39. def test_round_exact():
  40. start, end, interval = get_constrained_date_range(
  41. {"start": "2021-01-12T04:06:16", "end": "2021-01-17T08:26:13", "interval": "1d"},
  42. )
  43. assert start == datetime(2021, 1, 12, tzinfo=timezone.utc)
  44. assert end == datetime(2021, 1, 18, tzinfo=timezone.utc)
  45. def test_inclusive_end():
  46. start, end, interval = get_constrained_date_range(
  47. {"start": "2021-02-24T00:00:00", "end": "2021-02-25T00:00:00", "interval": "1h"},
  48. )
  49. assert start == datetime(2021, 2, 24, tzinfo=timezone.utc)
  50. assert end == datetime(2021, 2, 25, 0, tzinfo=timezone.utc)
  51. @freeze_time("2021-03-05T11:00:00.000Z")
  52. def test_future_request():
  53. start, end, interval = get_constrained_date_range(
  54. {"start": "2021-03-05T12:00:00", "end": "2021-03-05T13:00:00", "interval": "1h"},
  55. )
  56. assert start == datetime(2021, 3, 5, 11, tzinfo=timezone.utc)
  57. assert end == datetime(2021, 3, 5, 13, 0, tzinfo=timezone.utc)
  58. @freeze_time("2021-03-05T11:14:17.105Z")
  59. def test_interval_restrictions():
  60. # making sure intervals are cleanly divisible
  61. with pytest.raises(InvalidParams, match="The interval has to be less than one day."):
  62. _make_query("statsPeriod=4d&interval=2d&field=sum(session)")
  63. with pytest.raises(
  64. InvalidParams, match="The interval should divide one day without a remainder."
  65. ):
  66. _make_query("statsPeriod=6h&interval=59m&field=sum(session)")
  67. with pytest.raises(
  68. InvalidParams, match="The interval should divide one day without a remainder."
  69. ):
  70. _make_query("statsPeriod=4d&interval=5h&field=sum(session)")
  71. _make_query("statsPeriod=6h&interval=90m&field=sum(session)")
  72. with pytest.raises(
  73. InvalidParams,
  74. match="The interval has to be a multiple of the minimum interval of one hour.",
  75. ):
  76. _make_query("statsPeriod=6h&interval=90m&field=sum(session)", False)
  77. with pytest.raises(
  78. InvalidParams,
  79. match="The interval has to be a multiple of the minimum interval of one minute.",
  80. ):
  81. _make_query("statsPeriod=1h&interval=90s&field=sum(session)")
  82. with pytest.raises(
  83. InvalidParams, match="Your interval and date range would create too many results."
  84. ):
  85. _make_query("statsPeriod=90d&interval=1h&field=sum(session)")
  86. @freeze_time("2020-12-18T11:14:17.105Z")
  87. def test_timestamps():
  88. query = _make_query("statsPeriod=1d&interval=12h&field=sum(session)")
  89. expected_timestamps = ["2020-12-17T00:00:00Z", "2020-12-17T12:00:00Z", "2020-12-18T00:00:00Z"]
  90. actual_timestamps = get_timestamps(query)
  91. assert actual_timestamps == expected_timestamps
  92. @freeze_time("2021-03-08T09:34:00.000Z")
  93. def test_hourly_rounded_start():
  94. query = _make_query("statsPeriod=30m&interval=1m&field=sum(session)")
  95. actual_timestamps = get_timestamps(query)
  96. assert actual_timestamps[0] == "2021-03-08T09:04:00Z"
  97. assert actual_timestamps[-1] == "2021-03-08T09:33:00Z"
  98. assert len(actual_timestamps) == 30
  99. # in this case "45m" means from 08:49:00-09:34:00, but since we round start/end
  100. # to hours, we extend the start time to 08:00:00.
  101. query = _make_query("statsPeriod=45m&interval=1m&field=sum(session)")
  102. actual_timestamps = get_timestamps(query)
  103. assert actual_timestamps[0] == "2021-03-08T08:49:00Z"
  104. assert actual_timestamps[-1] == "2021-03-08T09:33:00Z"
  105. assert len(actual_timestamps) == 45
  106. def test_rounded_end():
  107. query = _make_query(
  108. "field=sum(session)&interval=1h&start=2021-02-24T00:00:00Z&end=2021-02-25T00:00:00Z"
  109. )
  110. expected_timestamps = [
  111. "2021-02-24T00:00:00Z",
  112. "2021-02-24T01:00:00Z",
  113. "2021-02-24T02:00:00Z",
  114. "2021-02-24T03:00:00Z",
  115. "2021-02-24T04:00:00Z",
  116. "2021-02-24T05:00:00Z",
  117. "2021-02-24T06:00:00Z",
  118. "2021-02-24T07:00:00Z",
  119. "2021-02-24T08:00:00Z",
  120. "2021-02-24T09:00:00Z",
  121. "2021-02-24T10:00:00Z",
  122. "2021-02-24T11:00:00Z",
  123. "2021-02-24T12:00:00Z",
  124. "2021-02-24T13:00:00Z",
  125. "2021-02-24T14:00:00Z",
  126. "2021-02-24T15:00:00Z",
  127. "2021-02-24T16:00:00Z",
  128. "2021-02-24T17:00:00Z",
  129. "2021-02-24T18:00:00Z",
  130. "2021-02-24T19:00:00Z",
  131. "2021-02-24T20:00:00Z",
  132. "2021-02-24T21:00:00Z",
  133. "2021-02-24T22:00:00Z",
  134. "2021-02-24T23:00:00Z",
  135. ]
  136. actual_timestamps = get_timestamps(query)
  137. assert len(actual_timestamps) == 24
  138. assert actual_timestamps == expected_timestamps
  139. def test_simple_query():
  140. query = _make_query("statsPeriod=1d&interval=12h&field=sum(session)")
  141. assert query.query_columns == ["sessions"]
  142. def test_groupby_query():
  143. query = _make_query("statsPeriod=1d&interval=12h&field=sum(session)&groupBy=release")
  144. assert sorted(query.query_columns) == ["release", "sessions"]
  145. assert query.query_groupby == ["release"]
  146. def test_virtual_groupby_query():
  147. query = _make_query("statsPeriod=1d&interval=12h&field=sum(session)&groupBy=session.status")
  148. assert sorted(query.query_columns) == [
  149. "sessions",
  150. "sessions_abnormal",
  151. "sessions_crashed",
  152. "sessions_errored",
  153. ]
  154. assert query.query_groupby == []
  155. query = _make_query(
  156. "statsPeriod=1d&interval=12h&field=count_unique(user)&groupBy=session.status"
  157. )
  158. assert sorted(query.query_columns) == [
  159. "users",
  160. "users_abnormal",
  161. "users_crashed",
  162. "users_errored",
  163. ]
  164. assert query.query_groupby == []
  165. @freeze_time("2022-05-04T09:00:00.000Z")
  166. def _get_query_maker_params(project):
  167. # These parameters are computed in the API endpoint, before the
  168. # QueryDefinition is built. Since we're only testing the query
  169. # definition here, we can safely mock these.
  170. return {
  171. "start": datetime.now(),
  172. "end": datetime.now(),
  173. "organization_id": project.organization_id,
  174. }
  175. @django_db_all
  176. def test_filter_proj_slug_in_query(default_project):
  177. params = _get_query_maker_params(default_project)
  178. params["project_id"] = [default_project.id]
  179. query_def = _make_query(
  180. f"field=sum(session)&interval=2h&statsPeriod=2h&query=project%3A{default_project.slug}",
  181. params=params,
  182. )
  183. assert query_def.query == f"project:{default_project.slug}"
  184. assert query_def.params["project_id"] == [default_project.id]
  185. @django_db_all
  186. def test_filter_proj_slug_in_top_filter(default_project):
  187. params = _get_query_maker_params(default_project)
  188. params["project_id"] = [default_project.id]
  189. query_def = _make_query(
  190. f"field=sum(session)&interval=2h&statsPeriod=2h&project={default_project.id}",
  191. params=params,
  192. )
  193. assert query_def.query == ""
  194. assert query_def.params["project_id"] == [default_project.id]
  195. @django_db_all
  196. def test_filter_proj_slug_in_top_filter_and_query(default_project):
  197. params = _get_query_maker_params(default_project)
  198. params["project_id"] = [default_project.id]
  199. query_def = _make_query(
  200. f"field=sum(session)&interval=2h&statsPeriod=2h&project={default_project.id}&query=project%3A"
  201. f"{default_project.slug}",
  202. params=params,
  203. )
  204. assert query_def.query == f"project:{default_project.slug}"
  205. assert query_def.params["project_id"] == [default_project.id]
  206. @django_db_all
  207. def test_proj_neither_in_top_filter_nor_query(default_project):
  208. params = _get_query_maker_params(default_project)
  209. query_def = _make_query(
  210. "field=sum(session)&interval=2h&statsPeriod=2h",
  211. params=params,
  212. )
  213. assert query_def.query == ""
  214. assert "project_id" not in query_def.params
  215. @django_db_all
  216. def test_filter_env_in_query(default_project):
  217. env = "prod"
  218. params = _get_query_maker_params(default_project)
  219. query_def = _make_query(
  220. f"field=sum(session)&interval=2h&statsPeriod=2h&query=environment%3A{env}",
  221. params=params,
  222. )
  223. assert query_def.query == f"environment:{env}"
  224. @django_db_all
  225. def test_filter_env_in_top_filter(default_project):
  226. env = "prod"
  227. params = _get_query_maker_params(default_project)
  228. params["environment"] = "prod"
  229. query_def = _make_query(
  230. f"field=sum(session)&interval=2h&statsPeriod=2h&environment={env}",
  231. params=params,
  232. )
  233. assert query_def.query == ""
  234. @django_db_all
  235. def test_filter_env_in_top_filter_and_query(default_project):
  236. env = "prod"
  237. params = _get_query_maker_params(default_project)
  238. params["environment"] = "prod"
  239. query_def = _make_query(
  240. f"field=sum(session)&interval=2h&statsPeriod=2h&environment={env}&query=environment%3A{env}",
  241. params=params,
  242. )
  243. assert query_def.query == f"environment:{env}"
  244. @django_db_all
  245. def test_env_neither_in_top_filter_nor_query(default_project):
  246. params = _get_query_maker_params(default_project)
  247. query_def = _make_query(
  248. "field=sum(session)&interval=2h&statsPeriod=2h",
  249. params=params,
  250. )
  251. assert query_def.query == ""
  252. @freeze_time("2020-12-18T11:14:17.105Z")
  253. def test_massage_empty():
  254. query = _make_query("statsPeriod=1d&interval=1d&field=sum(session)")
  255. expected_result = {
  256. "end": "2020-12-19T00:00:00Z",
  257. "groups": [],
  258. "intervals": ["2020-12-17T00:00:00Z", "2020-12-18T00:00:00Z"],
  259. "query": "",
  260. "start": "2020-12-17T00:00:00Z",
  261. }
  262. actual_result = result_sorted(massage_sessions_result(query, [], []))
  263. assert actual_result == expected_result
  264. @freeze_time("2020-12-18T11:14:17.105Z")
  265. def test_massage_unbalanced_results():
  266. query = _make_query("statsPeriod=1d&interval=1d&field=sum(session)&groupBy=release")
  267. result_totals = [
  268. {"release": "test-example-release", "sessions": 1},
  269. ]
  270. expected_result = {
  271. "start": "2020-12-17T00:00:00Z",
  272. "end": "2020-12-19T00:00:00Z",
  273. "query": "",
  274. "intervals": ["2020-12-17T00:00:00Z", "2020-12-18T00:00:00Z"],
  275. "groups": [
  276. {
  277. "by": {"release": "test-example-release"},
  278. "series": {"sum(session)": [0, 0]},
  279. "totals": {"sum(session)": 1},
  280. }
  281. ],
  282. }
  283. actual_result = result_sorted(massage_sessions_result(query, result_totals, []))
  284. assert actual_result == expected_result
  285. result_totals = []
  286. result_timeseries = [
  287. {
  288. "release": "test-example-release",
  289. "sessions": 1,
  290. "bucketed_started": "2020-12-18T00:00:00+00:00",
  291. },
  292. ]
  293. expected_result = {
  294. "start": "2020-12-17T00:00:00Z",
  295. "end": "2020-12-19T00:00:00Z",
  296. "query": "",
  297. "intervals": ["2020-12-17T00:00:00Z", "2020-12-18T00:00:00Z"],
  298. "groups": [
  299. {
  300. "by": {"release": "test-example-release"},
  301. "series": {"sum(session)": [0, 1]},
  302. "totals": {"sum(session)": 0},
  303. }
  304. ],
  305. }
  306. actual_result = result_sorted(massage_sessions_result(query, result_totals, result_timeseries))
  307. assert actual_result == expected_result
  308. @freeze_time("2020-12-18T11:14:17.105Z")
  309. def test_massage_simple_timeseries():
  310. """A timeseries is filled up when it only receives partial data"""
  311. query = _make_query("statsPeriod=1d&interval=6h&field=sum(session)")
  312. result_totals = [{"sessions": 4}]
  313. # snuba returns the datetimes as strings for now
  314. result_timeseries = [
  315. {"sessions": 2, "bucketed_started": "2020-12-18T06:00:00+00:00"},
  316. {"sessions": 2, "bucketed_started": "2020-12-17T12:00:00+00:00"},
  317. ]
  318. expected_result = {
  319. "start": "2020-12-17T12:00:00Z",
  320. "end": "2020-12-18T11:15:00Z",
  321. "query": "",
  322. "intervals": [
  323. "2020-12-17T12:00:00Z",
  324. "2020-12-17T18:00:00Z",
  325. "2020-12-18T00:00:00Z",
  326. "2020-12-18T06:00:00Z",
  327. ],
  328. "groups": [
  329. {"by": {}, "series": {"sum(session)": [2, 0, 0, 2]}, "totals": {"sum(session)": 4}}
  330. ],
  331. }
  332. actual_result = result_sorted(
  333. massage_sessions_result(query, result_totals, result_timeseries)
  334. )
  335. assert actual_result == expected_result
  336. @freeze_time("2020-12-18T11:14:17.105Z")
  337. def test_massage_unordered_timeseries():
  338. query = _make_query("statsPeriod=1d&interval=6h&field=sum(session)")
  339. result_totals = [{"sessions": 10}]
  340. # snuba returns the datetimes as strings for now
  341. result_timeseries = [
  342. {"sessions": 3, "bucketed_started": "2020-12-18T00:00:00+00:00"},
  343. {"sessions": 2, "bucketed_started": "2020-12-17T18:00:00+00:00"},
  344. {"sessions": 4, "bucketed_started": "2020-12-18T06:00:00+00:00"},
  345. {"sessions": 1, "bucketed_started": "2020-12-17T12:00:00+00:00"},
  346. ]
  347. expected_result = {
  348. "start": "2020-12-17T12:00:00Z",
  349. "end": "2020-12-18T11:15:00Z",
  350. "query": "",
  351. "intervals": [
  352. "2020-12-17T12:00:00Z",
  353. "2020-12-17T18:00:00Z",
  354. "2020-12-18T00:00:00Z",
  355. "2020-12-18T06:00:00Z",
  356. ],
  357. "groups": [
  358. {"by": {}, "series": {"sum(session)": [1, 2, 3, 4]}, "totals": {"sum(session)": 10}}
  359. ],
  360. }
  361. actual_result = result_sorted(
  362. massage_sessions_result(query, result_totals, result_timeseries)
  363. )
  364. assert actual_result == expected_result
  365. @freeze_time("2020-12-18T11:14:17.105Z")
  366. def test_massage_no_timeseries():
  367. query = _make_query("statsPeriod=1d&interval=6h&field=sum(session)&groupby=projects")
  368. result_totals = [{"sessions": 4}]
  369. # snuba returns the datetimes as strings for now
  370. result_timeseries = None
  371. expected_result = {
  372. "start": "2020-12-17T12:00:00Z",
  373. "end": "2020-12-18T11:15:00Z",
  374. "query": "",
  375. "intervals": [
  376. "2020-12-17T12:00:00Z",
  377. "2020-12-17T18:00:00Z",
  378. "2020-12-18T00:00:00Z",
  379. "2020-12-18T06:00:00Z",
  380. ],
  381. "groups": [{"by": {}, "totals": {"sum(session)": 4}}],
  382. }
  383. actual_result = result_sorted(
  384. massage_sessions_result(query, result_totals, result_timeseries)
  385. )
  386. assert actual_result == expected_result
  387. def test_massage_exact_timeseries():
  388. query = _make_query(
  389. "start=2020-12-17T15:12:34Z&end=2020-12-18T11:14:17Z&interval=6h&field=sum(session)"
  390. )
  391. result_totals = [{"sessions": 4}]
  392. result_timeseries = [
  393. {"sessions": 2, "bucketed_started": "2020-12-18T06:00:00+00:00"},
  394. {"sessions": 2, "bucketed_started": "2020-12-17T12:00:00+00:00"},
  395. ]
  396. expected_result = {
  397. "start": "2020-12-17T12:00:00Z",
  398. "end": "2020-12-18T12:00:00Z",
  399. "query": "",
  400. "intervals": [
  401. "2020-12-17T12:00:00Z",
  402. "2020-12-17T18:00:00Z",
  403. "2020-12-18T00:00:00Z",
  404. "2020-12-18T06:00:00Z",
  405. ],
  406. "groups": [
  407. {"by": {}, "series": {"sum(session)": [2, 0, 0, 2]}, "totals": {"sum(session)": 4}}
  408. ],
  409. }
  410. actual_result = result_sorted(
  411. massage_sessions_result(query, result_totals, result_timeseries)
  412. )
  413. assert actual_result == expected_result
  414. @freeze_time("2020-12-18T11:14:17.105Z")
  415. def test_massage_groupby_timeseries():
  416. query = _make_query("statsPeriod=1d&interval=6h&field=sum(session)&groupBy=release")
  417. result_totals = [
  418. {"release": "test-example-release", "sessions": 4},
  419. {"release": "test-example-release-2", "sessions": 1},
  420. ]
  421. # snuba returns the datetimes as strings for now
  422. result_timeseries = [
  423. {
  424. "release": "test-example-release",
  425. "sessions": 2,
  426. "bucketed_started": "2020-12-18T06:00:00+00:00",
  427. },
  428. {
  429. "release": "test-example-release-2",
  430. "sessions": 1,
  431. "bucketed_started": "2020-12-18T06:00:00+00:00",
  432. },
  433. {
  434. "release": "test-example-release",
  435. "sessions": 2,
  436. "bucketed_started": "2020-12-17T12:00:00+00:00",
  437. },
  438. ]
  439. expected_result = {
  440. "start": "2020-12-17T12:00:00Z",
  441. "end": "2020-12-18T11:15:00Z",
  442. "query": "",
  443. "intervals": [
  444. "2020-12-17T12:00:00Z",
  445. "2020-12-17T18:00:00Z",
  446. "2020-12-18T00:00:00Z",
  447. "2020-12-18T06:00:00Z",
  448. ],
  449. "groups": [
  450. {
  451. "by": {"release": "test-example-release"},
  452. "series": {"sum(session)": [2, 0, 0, 2]},
  453. "totals": {"sum(session)": 4},
  454. },
  455. {
  456. "by": {"release": "test-example-release-2"},
  457. "series": {"sum(session)": [0, 0, 0, 1]},
  458. "totals": {"sum(session)": 1},
  459. },
  460. ],
  461. }
  462. actual_result = result_sorted(
  463. massage_sessions_result(query, result_totals, result_timeseries)
  464. )
  465. assert actual_result == expected_result
  466. @freeze_time("2020-12-18T13:25:15.769Z")
  467. def test_massage_virtual_groupby_timeseries():
  468. query = _make_query(
  469. "statsPeriod=1d&interval=6h&field=sum(session)&field=count_unique(user)&groupBy=session.status"
  470. )
  471. result_totals = [
  472. {
  473. "users": 1,
  474. "users_crashed": 1,
  475. "sessions": 31,
  476. "sessions_errored": 15,
  477. "users_errored": 1,
  478. "sessions_abnormal": 6,
  479. "sessions_crashed": 8,
  480. "users_abnormal": 0,
  481. }
  482. ]
  483. # snuba returns the datetimes as strings for now
  484. result_timeseries = [
  485. {
  486. "sessions_errored": 1,
  487. "users": 1,
  488. "users_crashed": 1,
  489. "sessions_abnormal": 0,
  490. "sessions": 3,
  491. "users_errored": 1,
  492. "users_abnormal": 0,
  493. "sessions_crashed": 1,
  494. "bucketed_started": "2020-12-18T12:00:00+00:00",
  495. },
  496. {
  497. "sessions_errored": 0,
  498. "users": 1,
  499. "users_crashed": 0,
  500. "sessions_abnormal": 0,
  501. "sessions": 3,
  502. "users_errored": 0,
  503. "users_abnormal": 0,
  504. "sessions_crashed": 0,
  505. "bucketed_started": "2020-12-18T06:00:00+00:00",
  506. },
  507. {
  508. "sessions_errored": 10,
  509. "users": 1,
  510. "users_crashed": 0,
  511. "sessions_abnormal": 2,
  512. "sessions": 15,
  513. "users_errored": 0,
  514. "users_abnormal": 0,
  515. "sessions_crashed": 4,
  516. "bucketed_started": "2020-12-18T00:00:00+00:00",
  517. },
  518. {
  519. "sessions_errored": 4,
  520. "users": 1,
  521. "users_crashed": 0,
  522. "sessions_abnormal": 4,
  523. "sessions": 10,
  524. "users_errored": 0,
  525. "users_abnormal": 0,
  526. "sessions_crashed": 3,
  527. "bucketed_started": "2020-12-17T18:00:00+00:00",
  528. },
  529. ]
  530. expected_result = {
  531. "start": "2020-12-17T18:00:00Z",
  532. "end": "2020-12-18T13:26:00Z",
  533. "query": "",
  534. "intervals": [
  535. "2020-12-17T18:00:00Z",
  536. "2020-12-18T00:00:00Z",
  537. "2020-12-18T06:00:00Z",
  538. "2020-12-18T12:00:00Z",
  539. ],
  540. "groups": [
  541. {
  542. "by": {"session.status": "abnormal"},
  543. "series": {"count_unique(user)": [0, 0, 0, 0], "sum(session)": [4, 2, 0, 0]},
  544. "totals": {"count_unique(user)": 0, "sum(session)": 6},
  545. },
  546. {
  547. "by": {"session.status": "crashed"},
  548. "series": {"count_unique(user)": [0, 0, 0, 1], "sum(session)": [3, 4, 0, 1]},
  549. "totals": {"count_unique(user)": 1, "sum(session)": 8},
  550. },
  551. {
  552. "by": {"session.status": "errored"},
  553. "series": {"count_unique(user)": [0, 0, 0, 0], "sum(session)": [0, 4, 0, 0]},
  554. "totals": {"count_unique(user)": 0, "sum(session)": 1},
  555. },
  556. {
  557. "by": {"session.status": "healthy"},
  558. "series": {"count_unique(user)": [1, 1, 1, 0], "sum(session)": [6, 5, 3, 2]},
  559. # while in one of the time slots, we have a healthy user, it is
  560. # the *same* user as the one experiencing a crash later on,
  561. # so in the *whole* time window, that one user is not counted as healthy,
  562. # so the `0` here is expected, as that's an example of the `count_unique` behavior.
  563. "totals": {"count_unique(user)": 0, "sum(session)": 16},
  564. },
  565. ],
  566. }
  567. actual_result = result_sorted(
  568. massage_sessions_result(query, result_totals, result_timeseries)
  569. )
  570. assert actual_result == expected_result
  571. @freeze_time("2020-12-18T13:25:15.769Z")
  572. def test_clamping_in_massage_sessions_results_with_groupby_timeseries():
  573. query = _make_query(
  574. "statsPeriod=12h&interval=6h&field=sum(session)&field=count_unique(user)&groupBy=session.status"
  575. )
  576. # snuba returns the datetimes as strings for now
  577. result_timeseries = [
  578. {
  579. "sessions": 7,
  580. "sessions_errored": 3,
  581. "sessions_crashed": 2,
  582. "sessions_abnormal": 2,
  583. "users": 7,
  584. "users_errored": 3,
  585. "users_crashed": 2,
  586. "users_abnormal": 2,
  587. "bucketed_started": "2020-12-18T12:00:00+00:00",
  588. },
  589. {
  590. "sessions": 5,
  591. "sessions_errored": 10,
  592. "sessions_crashed": 0,
  593. "sessions_abnormal": 0,
  594. "users": 5,
  595. "users_errored": 10,
  596. "users_crashed": 0,
  597. "users_abnormal": 0,
  598. "bucketed_started": "2020-12-18T06:00:00+00:00",
  599. },
  600. ]
  601. expected_result = {
  602. "start": "2020-12-18T06:00:00Z",
  603. "end": "2020-12-18T13:26:00Z",
  604. "query": "",
  605. "intervals": [
  606. "2020-12-18T06:00:00Z",
  607. "2020-12-18T12:00:00Z",
  608. ],
  609. "groups": [
  610. {
  611. "by": {"session.status": "abnormal"},
  612. "series": {"count_unique(user)": [0, 2], "sum(session)": [0, 2]},
  613. "totals": {"count_unique(user)": 0, "sum(session)": 0},
  614. },
  615. {
  616. "by": {"session.status": "crashed"},
  617. "series": {"count_unique(user)": [0, 2], "sum(session)": [0, 2]},
  618. "totals": {"count_unique(user)": 0, "sum(session)": 0},
  619. },
  620. {
  621. "by": {"session.status": "errored"},
  622. "series": {"count_unique(user)": [10, 0], "sum(session)": [10, 0]},
  623. "totals": {"count_unique(user)": 0, "sum(session)": 0},
  624. },
  625. {
  626. "by": {"session.status": "healthy"},
  627. "series": {"count_unique(user)": [0, 4], "sum(session)": [0, 4]},
  628. "totals": {"count_unique(user)": 0, "sum(session)": 0},
  629. },
  630. ],
  631. }
  632. actual_result = result_sorted(massage_sessions_result(query, [], result_timeseries))
  633. assert actual_result == expected_result
  634. @freeze_time("2020-12-18T11:14:17.105Z")
  635. def test_nan_duration():
  636. query = _make_query(
  637. "statsPeriod=1d&interval=6h&field=avg(session.duration)&field=p50(session.duration)"
  638. )
  639. result_totals = [
  640. {
  641. "duration_avg": math.nan,
  642. "duration_quantiles": [math.inf, math.inf, math.inf, math.inf, math.inf, math.inf],
  643. },
  644. ]
  645. result_timeseries = [
  646. {
  647. "duration_avg": math.inf,
  648. "duration_quantiles": [math.inf, math.inf, math.inf, math.inf, math.inf, math.inf],
  649. "bucketed_started": "2020-12-18T06:00:00+00:00",
  650. },
  651. {
  652. "duration_avg": math.nan,
  653. "duration_quantiles": [math.nan, math.nan, math.nan, math.nan, math.nan, math.nan],
  654. "bucketed_started": "2020-12-17T12:00:00+00:00",
  655. },
  656. ]
  657. expected_result = {
  658. "start": "2020-12-17T12:00:00Z",
  659. "end": "2020-12-18T11:15:00Z",
  660. "query": "",
  661. "intervals": [
  662. "2020-12-17T12:00:00Z",
  663. "2020-12-17T18:00:00Z",
  664. "2020-12-18T00:00:00Z",
  665. "2020-12-18T06:00:00Z",
  666. ],
  667. "groups": [
  668. {
  669. "by": {},
  670. "series": {
  671. "avg(session.duration)": [None, None, None, None],
  672. "p50(session.duration)": [None, None, None, None],
  673. },
  674. "totals": {"avg(session.duration)": None, "p50(session.duration)": None},
  675. },
  676. ],
  677. }
  678. actual_result = result_sorted(
  679. massage_sessions_result(query, result_totals, result_timeseries)
  680. )
  681. assert actual_result == expected_result