test_group_stream.py 10 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277
  1. import time
  2. from datetime import timedelta
  3. from unittest import mock
  4. import pytz
  5. from django.utils import timezone
  6. from sentry.api.event_search import SearchFilter, SearchKey, SearchValue
  7. from sentry.api.serializers import serialize
  8. from sentry.api.serializers.models.group import snuba_tsdb
  9. from sentry.api.serializers.models.group_stream import StreamGroupSerializerSnuba
  10. from sentry.models import Environment
  11. from sentry.testutils import APITestCase, SnubaTestCase
  12. from sentry.testutils.helpers.datetime import before_now, iso_format
  13. from sentry.testutils.silo import region_silo_test
  14. from sentry.utils.cache import cache
  15. from sentry.utils.hashlib import hash_values
  16. @region_silo_test
  17. class StreamGroupSerializerTestCase(APITestCase, SnubaTestCase):
  18. def test_environment(self):
  19. group = self.group
  20. organization_id = group.project.organization_id
  21. environment = Environment.get_or_create(group.project, "production")
  22. with mock.patch(
  23. "sentry.api.serializers.models.group_stream.snuba_tsdb.get_range",
  24. side_effect=snuba_tsdb.get_range,
  25. ) as get_range:
  26. serialize(
  27. [group],
  28. serializer=StreamGroupSerializerSnuba(
  29. environment_ids=[environment.id],
  30. stats_period="14d",
  31. organization_id=organization_id,
  32. ),
  33. )
  34. assert get_range.call_count == 1
  35. for args, kwargs in get_range.call_args_list:
  36. assert kwargs["environment_ids"] == [environment.id]
  37. with mock.patch(
  38. "sentry.api.serializers.models.group.snuba_tsdb.get_range",
  39. side_effect=snuba_tsdb.get_range,
  40. ) as get_range:
  41. serialize(
  42. [group],
  43. serializer=StreamGroupSerializerSnuba(
  44. environment_ids=None, stats_period="14d", organization_id=organization_id
  45. ),
  46. )
  47. assert get_range.call_count == 1
  48. for args, kwargs in get_range.call_args_list:
  49. assert kwargs["environment_ids"] is None
  50. def test_session_count(self):
  51. group = self.group
  52. organization_id = group.project.organization_id
  53. environment = Environment.get_or_create(group.project, "prod")
  54. dev_environment = Environment.get_or_create(group.project, "dev")
  55. no_sessions_environment = Environment.get_or_create(group.project, "no_sessions")
  56. self.received = time.time()
  57. self.session_started = time.time() // 60 * 60
  58. self.session_release = "foo@1.0.0"
  59. self.session_crashed_release = "foo@2.0.0"
  60. self.store_session(
  61. {
  62. "session_id": "5d52fd05-fcc9-4bf3-9dc9-267783670341",
  63. "distinct_id": "39887d89-13b2-4c84-8c23-5d13d2102667",
  64. "status": "ok",
  65. "seq": 0,
  66. "release": self.session_release,
  67. "environment": "dev",
  68. "retention_days": 90,
  69. "org_id": self.project.organization_id,
  70. "project_id": self.project.id,
  71. "duration": 1,
  72. "errors": 0,
  73. "started": self.session_started - 120,
  74. "received": self.received - 120,
  75. }
  76. )
  77. self.store_session(
  78. {
  79. "session_id": "5e910c1a-6941-460e-9843-24103fb6a63c",
  80. "distinct_id": "39887d89-13b2-4c84-8c23-5d13d2102668",
  81. "status": "ok",
  82. "seq": 0,
  83. "release": self.session_release,
  84. "environment": "prod",
  85. "retention_days": 90,
  86. "org_id": self.project.organization_id,
  87. "project_id": self.project.id,
  88. "duration": 60.0,
  89. "errors": 0,
  90. "started": self.session_started - 240,
  91. "received": self.received - 240,
  92. }
  93. )
  94. self.store_session(
  95. {
  96. "session_id": "5e910c1a-6941-460e-9843-24103fb6a63c",
  97. "distinct_id": "39887d89-13b2-4c84-8c23-5d13d2102669",
  98. "status": "exited",
  99. "seq": 1,
  100. "release": self.session_release,
  101. "environment": "prod",
  102. "retention_days": 90,
  103. "org_id": self.project.organization_id,
  104. "project_id": self.project.id,
  105. "duration": 30.0,
  106. "errors": 0,
  107. "started": self.session_started,
  108. "received": self.received,
  109. }
  110. )
  111. self.store_session(
  112. {
  113. "session_id": "a148c0c5-06a2-423b-8901-6b43b812cf82",
  114. "distinct_id": "39887d89-13b2-4c84-8c23-5d13d2102660",
  115. "status": "crashed",
  116. "seq": 0,
  117. "release": self.session_crashed_release,
  118. "environment": "prod",
  119. "retention_days": 90,
  120. "org_id": self.project.organization_id,
  121. "project_id": self.project.id,
  122. "duration": 60.0,
  123. "errors": 0,
  124. "started": self.session_started,
  125. "received": self.received,
  126. }
  127. )
  128. result = serialize(
  129. [group],
  130. serializer=StreamGroupSerializerSnuba(
  131. stats_period="14d", organization_id=organization_id
  132. ),
  133. )
  134. assert "sessionCount" not in result[0]
  135. result = serialize(
  136. [group],
  137. serializer=StreamGroupSerializerSnuba(
  138. stats_period="14d", expand=["sessions"], organization_id=organization_id
  139. ),
  140. )
  141. assert result[0]["sessionCount"] == 3
  142. result = serialize(
  143. [group],
  144. serializer=StreamGroupSerializerSnuba(
  145. environment_ids=[environment.id],
  146. stats_period="14d",
  147. expand=["sessions"],
  148. organization_id=organization_id,
  149. ),
  150. )
  151. assert result[0]["sessionCount"] == 2
  152. result = serialize(
  153. [group],
  154. serializer=StreamGroupSerializerSnuba(
  155. environment_ids=[no_sessions_environment.id],
  156. stats_period="14d",
  157. expand=["sessions"],
  158. organization_id=organization_id,
  159. ),
  160. )
  161. assert result[0]["sessionCount"] is None
  162. result = serialize(
  163. [group],
  164. serializer=StreamGroupSerializerSnuba(
  165. environment_ids=[dev_environment.id],
  166. stats_period="14d",
  167. expand=["sessions"],
  168. organization_id=organization_id,
  169. ),
  170. )
  171. assert result[0]["sessionCount"] == 1
  172. self.store_session(
  173. {
  174. "session_id": "a148c0c5-06a2-423b-8901-6b43b812cf83",
  175. "distinct_id": "39887d89-13b2-4c84-8c23-5d13d2102627",
  176. "status": "ok",
  177. "seq": 0,
  178. "release": self.session_release,
  179. "environment": "dev",
  180. "retention_days": 90,
  181. "org_id": self.project.organization_id,
  182. "project_id": self.project.id,
  183. "duration": 60.0,
  184. "errors": 0,
  185. "started": self.session_started - 1590061, # approximately 18 days
  186. "received": self.received - 1590061, # approximately 18 days
  187. }
  188. )
  189. result = serialize(
  190. [group],
  191. serializer=StreamGroupSerializerSnuba(
  192. environment_ids=[dev_environment.id],
  193. stats_period="14d",
  194. expand=["sessions"],
  195. start=timezone.now() - timedelta(days=30),
  196. end=timezone.now() - timedelta(days=15),
  197. organization_id=organization_id,
  198. ),
  199. )
  200. assert result[0]["sessionCount"] == 1
  201. # Delete the cache from the query we did above, else this result comes back as 1 instead of 0.5
  202. key_hash = hash_values([group.project.id, "", "", f"{dev_environment.id}"])
  203. cache.delete(f"w-s:{key_hash}")
  204. project2 = self.create_project(
  205. organization=self.organization, teams=[self.team], name="Another project"
  206. )
  207. data = {
  208. "fingerprint": ["meow"],
  209. "timestamp": iso_format(timezone.now()),
  210. "type": "error",
  211. "exception": [{"type": "Foo"}],
  212. }
  213. event = self.store_event(data=data, project_id=project2.id)
  214. self.store_event(data=data, project_id=project2.id)
  215. self.store_event(data=data, project_id=project2.id)
  216. result = serialize(
  217. [group, event.group],
  218. serializer=StreamGroupSerializerSnuba(
  219. environment_ids=[dev_environment.id],
  220. stats_period="14d",
  221. expand=["sessions"],
  222. organization_id=organization_id,
  223. ),
  224. )
  225. assert result[0]["sessionCount"] == 2
  226. # No sessions in project2
  227. assert result[1]["sessionCount"] is None
  228. def test_skipped_date_timestamp_filters(self):
  229. group = self.create_group()
  230. serializer = StreamGroupSerializerSnuba(
  231. search_filters=[
  232. SearchFilter(
  233. SearchKey("timestamp"),
  234. ">",
  235. SearchValue(before_now(hours=1).replace(tzinfo=pytz.UTC)),
  236. ),
  237. SearchFilter(
  238. SearchKey("timestamp"),
  239. "<",
  240. SearchValue(before_now(seconds=1).replace(tzinfo=pytz.UTC)),
  241. ),
  242. SearchFilter(
  243. SearchKey("date"),
  244. ">",
  245. SearchValue(before_now(hours=1).replace(tzinfo=pytz.UTC)),
  246. ),
  247. SearchFilter(
  248. SearchKey("date"),
  249. "<",
  250. SearchValue(before_now(seconds=1).replace(tzinfo=pytz.UTC)),
  251. ),
  252. ]
  253. )
  254. assert not serializer.conditions
  255. result = serialize([group], self.user, serializer=serializer)
  256. assert result[0]["id"] == str(group.id)