test_group_stream.py 8.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226
  1. import time
  2. from datetime import timedelta
  3. from unittest import mock
  4. from django.utils import timezone
  5. from sentry.api.serializers import serialize
  6. from sentry.api.serializers.models.group_stream import StreamGroupSerializerSnuba, snuba_tsdb
  7. from sentry.models import Environment
  8. from sentry.testutils import APITestCase, SnubaTestCase
  9. from sentry.testutils.helpers.datetime import iso_format
  10. from sentry.utils.cache import cache
  11. from sentry.utils.hashlib import hash_values
  12. class StreamGroupSerializerTestCase(APITestCase, SnubaTestCase):
  13. def test_environment(self):
  14. group = self.group
  15. environment = Environment.get_or_create(group.project, "production")
  16. with mock.patch(
  17. "sentry.api.serializers.models.group_stream.snuba_tsdb.get_range",
  18. side_effect=snuba_tsdb.get_range,
  19. ) as get_range:
  20. serialize(
  21. [group],
  22. serializer=StreamGroupSerializerSnuba(
  23. environment_ids=[environment.id], stats_period="14d"
  24. ),
  25. )
  26. assert get_range.call_count == 1
  27. for args, kwargs in get_range.call_args_list:
  28. assert kwargs["environment_ids"] == [environment.id]
  29. with mock.patch(
  30. "sentry.api.serializers.models.group.snuba_tsdb.get_range",
  31. side_effect=snuba_tsdb.get_range,
  32. ) as get_range:
  33. serialize(
  34. [group],
  35. serializer=StreamGroupSerializerSnuba(environment_ids=None, stats_period="14d"),
  36. )
  37. assert get_range.call_count == 1
  38. for args, kwargs in get_range.call_args_list:
  39. assert kwargs["environment_ids"] is None
  40. def test_session_count(self):
  41. group = self.group
  42. environment = Environment.get_or_create(group.project, "prod")
  43. dev_environment = Environment.get_or_create(group.project, "dev")
  44. no_sessions_environment = Environment.get_or_create(group.project, "no_sessions")
  45. self.received = time.time()
  46. self.session_started = time.time() // 60 * 60
  47. self.session_release = "foo@1.0.0"
  48. self.session_crashed_release = "foo@2.0.0"
  49. self.store_session(
  50. {
  51. "session_id": "5d52fd05-fcc9-4bf3-9dc9-267783670341",
  52. "distinct_id": "39887d89-13b2-4c84-8c23-5d13d2102667",
  53. "status": "ok",
  54. "seq": 0,
  55. "release": self.session_release,
  56. "environment": "dev",
  57. "retention_days": 90,
  58. "org_id": self.project.organization_id,
  59. "project_id": self.project.id,
  60. "duration": 1,
  61. "errors": 0,
  62. "started": self.session_started - 120,
  63. "received": self.received - 120,
  64. }
  65. )
  66. self.store_session(
  67. {
  68. "session_id": "5e910c1a-6941-460e-9843-24103fb6a63c",
  69. "distinct_id": "39887d89-13b2-4c84-8c23-5d13d2102668",
  70. "status": "ok",
  71. "seq": 0,
  72. "release": self.session_release,
  73. "environment": "prod",
  74. "retention_days": 90,
  75. "org_id": self.project.organization_id,
  76. "project_id": self.project.id,
  77. "duration": 60.0,
  78. "errors": 0,
  79. "started": self.session_started - 240,
  80. "received": self.received - 240,
  81. }
  82. )
  83. self.store_session(
  84. {
  85. "session_id": "5e910c1a-6941-460e-9843-24103fb6a63c",
  86. "distinct_id": "39887d89-13b2-4c84-8c23-5d13d2102669",
  87. "status": "exited",
  88. "seq": 1,
  89. "release": self.session_release,
  90. "environment": "prod",
  91. "retention_days": 90,
  92. "org_id": self.project.organization_id,
  93. "project_id": self.project.id,
  94. "duration": 30.0,
  95. "errors": 0,
  96. "started": self.session_started,
  97. "received": self.received,
  98. }
  99. )
  100. self.store_session(
  101. {
  102. "session_id": "a148c0c5-06a2-423b-8901-6b43b812cf82",
  103. "distinct_id": "39887d89-13b2-4c84-8c23-5d13d2102660",
  104. "status": "crashed",
  105. "seq": 0,
  106. "release": self.session_crashed_release,
  107. "environment": "prod",
  108. "retention_days": 90,
  109. "org_id": self.project.organization_id,
  110. "project_id": self.project.id,
  111. "duration": 60.0,
  112. "errors": 0,
  113. "started": self.session_started,
  114. "received": self.received,
  115. }
  116. )
  117. result = serialize(
  118. [group],
  119. serializer=StreamGroupSerializerSnuba(stats_period="14d"),
  120. )
  121. assert "sessionCount" not in result[0]
  122. result = serialize(
  123. [group],
  124. serializer=StreamGroupSerializerSnuba(
  125. stats_period="14d",
  126. expand=["sessions"],
  127. ),
  128. )
  129. assert result[0]["sessionCount"] == 3
  130. result = serialize(
  131. [group],
  132. serializer=StreamGroupSerializerSnuba(
  133. environment_ids=[environment.id], stats_period="14d", expand=["sessions"]
  134. ),
  135. )
  136. assert result[0]["sessionCount"] == 2
  137. result = serialize(
  138. [group],
  139. serializer=StreamGroupSerializerSnuba(
  140. environment_ids=[no_sessions_environment.id],
  141. stats_period="14d",
  142. expand=["sessions"],
  143. ),
  144. )
  145. assert result[0]["sessionCount"] is None
  146. result = serialize(
  147. [group],
  148. serializer=StreamGroupSerializerSnuba(
  149. environment_ids=[dev_environment.id], stats_period="14d", expand=["sessions"]
  150. ),
  151. )
  152. assert result[0]["sessionCount"] == 1
  153. self.store_session(
  154. {
  155. "session_id": "a148c0c5-06a2-423b-8901-6b43b812cf83",
  156. "distinct_id": "39887d89-13b2-4c84-8c23-5d13d2102627",
  157. "status": "ok",
  158. "seq": 0,
  159. "release": self.session_release,
  160. "environment": "dev",
  161. "retention_days": 90,
  162. "org_id": self.project.organization_id,
  163. "project_id": self.project.id,
  164. "duration": 60.0,
  165. "errors": 0,
  166. "started": self.session_started - 1590061, # approximately 18 days
  167. "received": self.received - 1590061, # approximately 18 days
  168. }
  169. )
  170. result = serialize(
  171. [group],
  172. serializer=StreamGroupSerializerSnuba(
  173. environment_ids=[dev_environment.id],
  174. stats_period="14d",
  175. expand=["sessions"],
  176. start=timezone.now() - timedelta(days=30),
  177. end=timezone.now() - timedelta(days=15),
  178. ),
  179. )
  180. assert result[0]["sessionCount"] == 1
  181. # Delete the cache from the query we did above, else this result comes back as 1 instead of 0.5
  182. key_hash = hash_values([group.project.id, "", "", f"{dev_environment.id}"])
  183. cache.delete(f"w-s:{key_hash}")
  184. project2 = self.create_project(
  185. organization=self.organization, teams=[self.team], name="Another project"
  186. )
  187. data = {
  188. "fingerprint": ["meow"],
  189. "timestamp": iso_format(timezone.now()),
  190. "type": "error",
  191. "exception": [{"type": "Foo"}],
  192. }
  193. event = self.store_event(data=data, project_id=project2.id)
  194. self.store_event(data=data, project_id=project2.id)
  195. self.store_event(data=data, project_id=project2.id)
  196. result = serialize(
  197. [group, event.group],
  198. serializer=StreamGroupSerializerSnuba(
  199. environment_ids=[dev_environment.id],
  200. stats_period="14d",
  201. expand=["sessions"],
  202. ),
  203. )
  204. assert result[0]["sessionCount"] == 2
  205. # No sessions in project2
  206. assert result[1]["sessionCount"] is None