test_group_stream.py 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290
  1. import time
  2. from datetime import timedelta, timezone
  3. from unittest import mock
  4. from django.utils import timezone as django_timezone
  5. from sentry.api.event_search import SearchFilter, SearchKey, SearchValue
  6. from sentry.api.serializers import serialize
  7. from sentry.api.serializers.models.group import snuba_tsdb
  8. from sentry.api.serializers.models.group_stream import StreamGroupSerializerSnuba
  9. from sentry.models.environment import Environment
  10. from sentry.testutils.cases import APITestCase, SnubaTestCase
  11. from sentry.testutils.helpers.datetime import before_now, iso_format
  12. from sentry.testutils.silo import region_silo_test
  13. from sentry.utils.cache import cache
  14. from sentry.utils.hashlib import hash_values
  15. @region_silo_test
  16. class StreamGroupSerializerTestCase(APITestCase, SnubaTestCase):
  17. def test_environment(self):
  18. group = self.group
  19. organization_id = group.project.organization_id
  20. environment = Environment.get_or_create(group.project, "production")
  21. with mock.patch(
  22. "sentry.api.serializers.models.group_stream.snuba_tsdb.get_range",
  23. side_effect=snuba_tsdb.get_range,
  24. ) as get_range:
  25. serialize(
  26. [group],
  27. serializer=StreamGroupSerializerSnuba(
  28. environment_ids=[environment.id],
  29. stats_period="14d",
  30. organization_id=organization_id,
  31. ),
  32. request=self.make_request(),
  33. )
  34. assert get_range.call_count == 1
  35. for args, kwargs in get_range.call_args_list:
  36. assert kwargs["environment_ids"] == [environment.id]
  37. with mock.patch(
  38. "sentry.api.serializers.models.group.snuba_tsdb.get_range",
  39. side_effect=snuba_tsdb.get_range,
  40. ) as get_range:
  41. serialize(
  42. [group],
  43. serializer=StreamGroupSerializerSnuba(
  44. environment_ids=None, stats_period="14d", organization_id=organization_id
  45. ),
  46. request=self.make_request(),
  47. )
  48. assert get_range.call_count == 1
  49. for args, kwargs in get_range.call_args_list:
  50. assert kwargs["environment_ids"] is None
  51. def test_session_count(self):
  52. group = self.group
  53. organization_id = group.project.organization_id
  54. environment = Environment.get_or_create(group.project, "prod")
  55. dev_environment = Environment.get_or_create(group.project, "dev")
  56. no_sessions_environment = Environment.get_or_create(group.project, "no_sessions")
  57. self.received = time.time()
  58. self.session_started = time.time() // 60 * 60
  59. self.session_release = "foo@1.0.0"
  60. self.session_crashed_release = "foo@2.0.0"
  61. self.store_session(
  62. {
  63. "session_id": "5d52fd05-fcc9-4bf3-9dc9-267783670341",
  64. "distinct_id": "39887d89-13b2-4c84-8c23-5d13d2102667",
  65. "status": "ok",
  66. "seq": 0,
  67. "release": self.session_release,
  68. "environment": "dev",
  69. "retention_days": 90,
  70. "org_id": self.project.organization_id,
  71. "project_id": self.project.id,
  72. "duration": 1,
  73. "errors": 0,
  74. "started": self.session_started - 120,
  75. "received": self.received - 120,
  76. }
  77. )
  78. self.store_session(
  79. {
  80. "session_id": "5e910c1a-6941-460e-9843-24103fb6a63c",
  81. "distinct_id": "39887d89-13b2-4c84-8c23-5d13d2102668",
  82. "status": "ok",
  83. "seq": 0,
  84. "release": self.session_release,
  85. "environment": "prod",
  86. "retention_days": 90,
  87. "org_id": self.project.organization_id,
  88. "project_id": self.project.id,
  89. "duration": 60.0,
  90. "errors": 0,
  91. "started": self.session_started - 240,
  92. "received": self.received - 240,
  93. }
  94. )
  95. self.store_session(
  96. {
  97. "session_id": "5e910c1a-6941-460e-9843-24103fb6a63c",
  98. "distinct_id": "39887d89-13b2-4c84-8c23-5d13d2102669",
  99. "status": "exited",
  100. "seq": 1,
  101. "release": self.session_release,
  102. "environment": "prod",
  103. "retention_days": 90,
  104. "org_id": self.project.organization_id,
  105. "project_id": self.project.id,
  106. "duration": 30.0,
  107. "errors": 0,
  108. "started": self.session_started,
  109. "received": self.received,
  110. }
  111. )
  112. self.store_session(
  113. {
  114. "session_id": "a148c0c5-06a2-423b-8901-6b43b812cf82",
  115. "distinct_id": "39887d89-13b2-4c84-8c23-5d13d2102660",
  116. "status": "crashed",
  117. "seq": 0,
  118. "release": self.session_crashed_release,
  119. "environment": "prod",
  120. "retention_days": 90,
  121. "org_id": self.project.organization_id,
  122. "project_id": self.project.id,
  123. "duration": 60.0,
  124. "errors": 0,
  125. "started": self.session_started,
  126. "received": self.received,
  127. }
  128. )
  129. result = serialize(
  130. [group],
  131. serializer=StreamGroupSerializerSnuba(
  132. stats_period="14d", organization_id=organization_id
  133. ),
  134. request=self.make_request(),
  135. )
  136. assert "sessionCount" not in result[0]
  137. result = serialize(
  138. [group],
  139. serializer=StreamGroupSerializerSnuba(
  140. stats_period="14d", expand=["sessions"], organization_id=organization_id
  141. ),
  142. request=self.make_request(),
  143. )
  144. assert result[0]["sessionCount"] == 3
  145. result = serialize(
  146. [group],
  147. serializer=StreamGroupSerializerSnuba(
  148. environment_ids=[environment.id],
  149. stats_period="14d",
  150. expand=["sessions"],
  151. organization_id=organization_id,
  152. ),
  153. request=self.make_request(),
  154. )
  155. assert result[0]["sessionCount"] == 2
  156. result = serialize(
  157. [group],
  158. serializer=StreamGroupSerializerSnuba(
  159. environment_ids=[no_sessions_environment.id],
  160. stats_period="14d",
  161. expand=["sessions"],
  162. organization_id=organization_id,
  163. ),
  164. request=self.make_request(),
  165. )
  166. assert result[0]["sessionCount"] is None
  167. result = serialize(
  168. [group],
  169. serializer=StreamGroupSerializerSnuba(
  170. environment_ids=[dev_environment.id],
  171. stats_period="14d",
  172. expand=["sessions"],
  173. organization_id=organization_id,
  174. ),
  175. request=self.make_request(),
  176. )
  177. assert result[0]["sessionCount"] == 1
  178. self.store_session(
  179. {
  180. "session_id": "a148c0c5-06a2-423b-8901-6b43b812cf83",
  181. "distinct_id": "39887d89-13b2-4c84-8c23-5d13d2102627",
  182. "status": "ok",
  183. "seq": 0,
  184. "release": self.session_release,
  185. "environment": "dev",
  186. "retention_days": 90,
  187. "org_id": self.project.organization_id,
  188. "project_id": self.project.id,
  189. "duration": 60.0,
  190. "errors": 0,
  191. "started": self.session_started - 1590061, # approximately 18 days
  192. "received": self.received - 1590061, # approximately 18 days
  193. }
  194. )
  195. result = serialize(
  196. [group],
  197. serializer=StreamGroupSerializerSnuba(
  198. environment_ids=[dev_environment.id],
  199. stats_period="14d",
  200. expand=["sessions"],
  201. start=django_timezone.now() - timedelta(days=30),
  202. end=django_timezone.now() - timedelta(days=15),
  203. organization_id=organization_id,
  204. ),
  205. request=self.make_request(),
  206. )
  207. assert result[0]["sessionCount"] == 1
  208. # Delete the cache from the query we did above, else this result comes back as 1 instead of 0.5
  209. key_hash = hash_values([group.project.id, "", "", f"{dev_environment.id}"])
  210. cache.delete(f"w-s:{key_hash}")
  211. project2 = self.create_project(
  212. organization=self.organization, teams=[self.team], name="Another project"
  213. )
  214. data = {
  215. "fingerprint": ["meow"],
  216. "timestamp": iso_format(django_timezone.now()),
  217. "type": "error",
  218. "exception": [{"type": "Foo"}],
  219. }
  220. event = self.store_event(data=data, project_id=project2.id)
  221. self.store_event(data=data, project_id=project2.id)
  222. self.store_event(data=data, project_id=project2.id)
  223. result = serialize(
  224. [group, event.group],
  225. serializer=StreamGroupSerializerSnuba(
  226. environment_ids=[dev_environment.id],
  227. stats_period="14d",
  228. expand=["sessions"],
  229. organization_id=organization_id,
  230. ),
  231. request=self.make_request(),
  232. )
  233. assert result[0]["sessionCount"] == 2
  234. # No sessions in project2
  235. assert result[1]["sessionCount"] is None
  236. def test_skipped_date_timestamp_filters(self):
  237. group = self.create_group()
  238. serializer = StreamGroupSerializerSnuba(
  239. search_filters=[
  240. SearchFilter(
  241. SearchKey("timestamp"),
  242. ">",
  243. SearchValue(before_now(hours=1).replace(tzinfo=timezone.utc)),
  244. ),
  245. SearchFilter(
  246. SearchKey("timestamp"),
  247. "<",
  248. SearchValue(before_now(seconds=1).replace(tzinfo=timezone.utc)),
  249. ),
  250. SearchFilter(
  251. SearchKey("date"),
  252. ">",
  253. SearchValue(before_now(hours=1).replace(tzinfo=timezone.utc)),
  254. ),
  255. SearchFilter(
  256. SearchKey("date"),
  257. "<",
  258. SearchValue(before_now(seconds=1).replace(tzinfo=timezone.utc)),
  259. ),
  260. ]
  261. )
  262. assert not serializer.conditions
  263. result = serialize(
  264. [group],
  265. self.user,
  266. serializer=serializer,
  267. request=self.make_request(),
  268. )
  269. assert result[0]["id"] == str(group.id)