test_group_stream.py 10 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288
  1. import time
  2. from datetime import timedelta
  3. from unittest import mock
  4. from django.utils import timezone
  5. from sentry.api.event_search import SearchFilter, SearchKey, SearchValue
  6. from sentry.api.serializers import serialize
  7. from sentry.api.serializers.models.group import snuba_tsdb
  8. from sentry.api.serializers.models.group_stream import StreamGroupSerializerSnuba
  9. from sentry.models.environment import Environment
  10. from sentry.testutils.cases import APITestCase, SnubaTestCase
  11. from sentry.testutils.helpers.datetime import before_now, iso_format
  12. from sentry.utils.cache import cache
  13. from sentry.utils.hashlib import hash_values
  14. class StreamGroupSerializerTestCase(APITestCase, SnubaTestCase):
  15. def test_environment(self):
  16. group = self.group
  17. organization_id = group.project.organization_id
  18. environment = Environment.get_or_create(group.project, "production")
  19. with mock.patch(
  20. "sentry.api.serializers.models.group_stream.snuba_tsdb.get_range",
  21. side_effect=snuba_tsdb.get_range,
  22. ) as get_range:
  23. serialize(
  24. [group],
  25. serializer=StreamGroupSerializerSnuba(
  26. environment_ids=[environment.id],
  27. stats_period="14d",
  28. organization_id=organization_id,
  29. ),
  30. request=self.make_request(),
  31. )
  32. assert get_range.call_count == 1
  33. for args, kwargs in get_range.call_args_list:
  34. assert kwargs["environment_ids"] == [environment.id]
  35. with mock.patch(
  36. "sentry.api.serializers.models.group.snuba_tsdb.get_range",
  37. side_effect=snuba_tsdb.get_range,
  38. ) as get_range:
  39. serialize(
  40. [group],
  41. serializer=StreamGroupSerializerSnuba(
  42. environment_ids=None, stats_period="14d", organization_id=organization_id
  43. ),
  44. request=self.make_request(),
  45. )
  46. assert get_range.call_count == 1
  47. for args, kwargs in get_range.call_args_list:
  48. assert kwargs["environment_ids"] is None
  49. def test_session_count(self):
  50. group = self.group
  51. organization_id = group.project.organization_id
  52. environment = Environment.get_or_create(group.project, "prod")
  53. dev_environment = Environment.get_or_create(group.project, "dev")
  54. no_sessions_environment = Environment.get_or_create(group.project, "no_sessions")
  55. self.received = time.time()
  56. self.session_started = time.time() // 60 * 60
  57. self.session_release = "foo@1.0.0"
  58. self.session_crashed_release = "foo@2.0.0"
  59. self.store_session(
  60. {
  61. "session_id": "5d52fd05-fcc9-4bf3-9dc9-267783670341",
  62. "distinct_id": "39887d89-13b2-4c84-8c23-5d13d2102667",
  63. "status": "ok",
  64. "seq": 0,
  65. "release": self.session_release,
  66. "environment": "dev",
  67. "retention_days": 90,
  68. "org_id": self.project.organization_id,
  69. "project_id": self.project.id,
  70. "duration": 1,
  71. "errors": 0,
  72. "started": self.session_started - 120,
  73. "received": self.received - 120,
  74. }
  75. )
  76. self.store_session(
  77. {
  78. "session_id": "5e910c1a-6941-460e-9843-24103fb6a63c",
  79. "distinct_id": "39887d89-13b2-4c84-8c23-5d13d2102668",
  80. "status": "ok",
  81. "seq": 0,
  82. "release": self.session_release,
  83. "environment": "prod",
  84. "retention_days": 90,
  85. "org_id": self.project.organization_id,
  86. "project_id": self.project.id,
  87. "duration": 60.0,
  88. "errors": 0,
  89. "started": self.session_started - 240,
  90. "received": self.received - 240,
  91. }
  92. )
  93. self.store_session(
  94. {
  95. "session_id": "5e910c1a-6941-460e-9843-24103fb6a63c",
  96. "distinct_id": "39887d89-13b2-4c84-8c23-5d13d2102669",
  97. "status": "exited",
  98. "seq": 1,
  99. "release": self.session_release,
  100. "environment": "prod",
  101. "retention_days": 90,
  102. "org_id": self.project.organization_id,
  103. "project_id": self.project.id,
  104. "duration": 30.0,
  105. "errors": 0,
  106. "started": self.session_started,
  107. "received": self.received,
  108. }
  109. )
  110. self.store_session(
  111. {
  112. "session_id": "a148c0c5-06a2-423b-8901-6b43b812cf82",
  113. "distinct_id": "39887d89-13b2-4c84-8c23-5d13d2102660",
  114. "status": "crashed",
  115. "seq": 0,
  116. "release": self.session_crashed_release,
  117. "environment": "prod",
  118. "retention_days": 90,
  119. "org_id": self.project.organization_id,
  120. "project_id": self.project.id,
  121. "duration": 60.0,
  122. "errors": 0,
  123. "started": self.session_started,
  124. "received": self.received,
  125. }
  126. )
  127. result = serialize(
  128. [group],
  129. serializer=StreamGroupSerializerSnuba(
  130. stats_period="14d", organization_id=organization_id
  131. ),
  132. request=self.make_request(),
  133. )
  134. assert "sessionCount" not in result[0]
  135. result = serialize(
  136. [group],
  137. serializer=StreamGroupSerializerSnuba(
  138. stats_period="14d", expand=["sessions"], organization_id=organization_id
  139. ),
  140. request=self.make_request(),
  141. )
  142. assert result[0]["sessionCount"] == 3
  143. result = serialize(
  144. [group],
  145. serializer=StreamGroupSerializerSnuba(
  146. environment_ids=[environment.id],
  147. stats_period="14d",
  148. expand=["sessions"],
  149. organization_id=organization_id,
  150. ),
  151. request=self.make_request(),
  152. )
  153. assert result[0]["sessionCount"] == 2
  154. result = serialize(
  155. [group],
  156. serializer=StreamGroupSerializerSnuba(
  157. environment_ids=[no_sessions_environment.id],
  158. stats_period="14d",
  159. expand=["sessions"],
  160. organization_id=organization_id,
  161. ),
  162. request=self.make_request(),
  163. )
  164. assert result[0]["sessionCount"] is None
  165. result = serialize(
  166. [group],
  167. serializer=StreamGroupSerializerSnuba(
  168. environment_ids=[dev_environment.id],
  169. stats_period="14d",
  170. expand=["sessions"],
  171. organization_id=organization_id,
  172. ),
  173. request=self.make_request(),
  174. )
  175. assert result[0]["sessionCount"] == 1
  176. self.store_session(
  177. {
  178. "session_id": "a148c0c5-06a2-423b-8901-6b43b812cf83",
  179. "distinct_id": "39887d89-13b2-4c84-8c23-5d13d2102627",
  180. "status": "ok",
  181. "seq": 0,
  182. "release": self.session_release,
  183. "environment": "dev",
  184. "retention_days": 90,
  185. "org_id": self.project.organization_id,
  186. "project_id": self.project.id,
  187. "duration": 60.0,
  188. "errors": 0,
  189. "started": self.session_started - 1590061, # approximately 18 days
  190. "received": self.received - 1590061, # approximately 18 days
  191. }
  192. )
  193. result = serialize(
  194. [group],
  195. serializer=StreamGroupSerializerSnuba(
  196. environment_ids=[dev_environment.id],
  197. stats_period="14d",
  198. expand=["sessions"],
  199. start=timezone.now() - timedelta(days=30),
  200. end=timezone.now() - timedelta(days=15),
  201. organization_id=organization_id,
  202. ),
  203. request=self.make_request(),
  204. )
  205. assert result[0]["sessionCount"] == 1
  206. # Delete the cache from the query we did above, else this result comes back as 1 instead of 0.5
  207. key_hash = hash_values([group.project.id, "", "", f"{dev_environment.id}"])
  208. cache.delete(f"w-s:{key_hash}")
  209. project2 = self.create_project(
  210. organization=self.organization, teams=[self.team], name="Another project"
  211. )
  212. data = {
  213. "fingerprint": ["meow"],
  214. "timestamp": iso_format(timezone.now()),
  215. "type": "error",
  216. "exception": [{"type": "Foo"}],
  217. }
  218. event = self.store_event(data=data, project_id=project2.id)
  219. self.store_event(data=data, project_id=project2.id)
  220. self.store_event(data=data, project_id=project2.id)
  221. result = serialize(
  222. [group, event.group],
  223. serializer=StreamGroupSerializerSnuba(
  224. environment_ids=[dev_environment.id],
  225. stats_period="14d",
  226. expand=["sessions"],
  227. organization_id=organization_id,
  228. ),
  229. request=self.make_request(),
  230. )
  231. assert result[0]["sessionCount"] == 2
  232. # No sessions in project2
  233. assert result[1]["sessionCount"] is None
  234. def test_skipped_date_timestamp_filters(self):
  235. group = self.create_group()
  236. serializer = StreamGroupSerializerSnuba(
  237. search_filters=[
  238. SearchFilter(
  239. SearchKey("timestamp"),
  240. ">",
  241. SearchValue(before_now(hours=1)),
  242. ),
  243. SearchFilter(
  244. SearchKey("timestamp"),
  245. "<",
  246. SearchValue(before_now(seconds=1)),
  247. ),
  248. SearchFilter(
  249. SearchKey("date"),
  250. ">",
  251. SearchValue(before_now(hours=1)),
  252. ),
  253. SearchFilter(
  254. SearchKey("date"),
  255. "<",
  256. SearchValue(before_now(seconds=1)),
  257. ),
  258. ]
  259. )
  260. assert not serializer.conditions
  261. result = serialize(
  262. [group],
  263. self.user,
  264. serializer=serializer,
  265. request=self.make_request(),
  266. )
  267. assert result[0]["id"] == str(group.id)