test_snuba.py 7.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213
  1. from __future__ import absolute_import
  2. from datetime import datetime, timedelta
  3. import pytest
  4. import time
  5. import uuid
  6. from sentry import options
  7. from sentry.models import GroupHash, GroupHashTombstone
  8. from sentry.testutils import SnubaTestCase
  9. from sentry.utils import snuba
  10. class SnubaTest(SnubaTestCase):
  11. def test(self):
  12. "This is just a simple 'hello, world' example test."
  13. now = datetime.now()
  14. events = [{
  15. 'event_id': 'x' * 32,
  16. 'primary_hash': '1' * 32,
  17. 'group_id': 1,
  18. 'project_id': self.project.id,
  19. 'message': 'message',
  20. 'platform': 'python',
  21. 'datetime': now.strftime('%Y-%m-%dT%H:%M:%S.%fZ'),
  22. 'data': {
  23. 'received': time.mktime(now.timetuple()),
  24. }
  25. }]
  26. self.snuba_insert(events)
  27. assert snuba.query(
  28. start=now - timedelta(days=1),
  29. end=now + timedelta(days=1),
  30. groupby=['project_id'],
  31. filter_keys={'project_id': [self.project.id]},
  32. ) == {self.project.id: 1}
  33. def test_fail(self):
  34. now = datetime.now()
  35. with pytest.raises(snuba.SnubaError):
  36. snuba.query(
  37. start=now - timedelta(days=1),
  38. end=now + timedelta(days=1),
  39. filter_keys={'project_id': [self.project.id]},
  40. groupby=[")("],
  41. )
  42. def test_project_issues_with_legacy_hash(self):
  43. a_hash = 'a' * 32
  44. for h in [a_hash, 'A' * 8]:
  45. GroupHash.objects.create(
  46. project=self.project,
  47. group=self.group,
  48. hash=h,
  49. )
  50. assert snuba.get_project_issues([self.project], [self.group.id]) == \
  51. [(self.group.id, self.group.project_id, [('aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', None)])]
  52. # GroupHash without a group_id, should not be included in get_project_issues
  53. GroupHash.objects.create(
  54. project=self.project,
  55. hash='0' * 32,
  56. )
  57. group_ids = [i[0] for i in (snuba.get_project_issues([self.project]))]
  58. assert self.group.id in group_ids
  59. assert None not in group_ids
  60. def _insert_event_for_time(self, ts, hash='a' * 32, group_id=None):
  61. self.snuba_insert({
  62. 'event_id': uuid.uuid4().hex,
  63. 'primary_hash': hash,
  64. 'group_id': group_id if group_id else int(hash[:16], 16),
  65. 'project_id': self.project.id,
  66. 'message': 'message',
  67. 'platform': 'python',
  68. 'datetime': ts.strftime('%Y-%m-%dT%H:%M:%S.%fZ'),
  69. 'data': {
  70. 'received': time.mktime(ts.timetuple()),
  71. }
  72. })
  73. def test_project_issues_with_tombstones(self):
  74. # Nothing to be done if we're using `group_id`.
  75. # When this option is the default we can remove
  76. # this test.
  77. if options.get('snuba.use_group_id_column'):
  78. return
  79. base_time = datetime.utcnow()
  80. hash = 'a' * 32
  81. def _query_for_issue(group_id):
  82. return snuba.query(
  83. start=base_time - timedelta(days=1),
  84. end=base_time + timedelta(days=1),
  85. groupby=['issue'],
  86. filter_keys={
  87. 'project_id': [self.project.id],
  88. 'issue': [group_id]
  89. },
  90. )
  91. group1 = self.create_group()
  92. group2 = self.create_group()
  93. GroupHash.objects.create(
  94. project=self.project,
  95. group=group1,
  96. hash=hash
  97. )
  98. assert snuba.get_project_issues([self.project], [group1.id]) == \
  99. [(group1.id, group1.project_id, [('aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', None)])]
  100. # 1 event in the groups, no deletes have happened
  101. self._insert_event_for_time(base_time, hash)
  102. assert _query_for_issue(group1.id) == {group1.id: 1}
  103. # group is deleted and then returns (as a new group with the same hash)
  104. GroupHashTombstone.tombstone_groups(self.project.id, [group1.id])
  105. ght = GroupHashTombstone.objects.get(project_id=self.project.id)
  106. assert ght
  107. GroupHash.objects.create(
  108. project=self.project,
  109. group=group2,
  110. hash=hash,
  111. )
  112. # tombstone time is returned as expected
  113. assert snuba.get_project_issues([self.project], [group2.id]) == \
  114. [(group2.id, group2.project_id, [('aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa',
  115. ght.deleted_at.strftime("%Y-%m-%d %H:%M:%S"))])]
  116. # events <= to the tombstone date aren't returned
  117. self._insert_event_for_time(ght.deleted_at, hash)
  118. assert _query_for_issue(group2.id) == {}
  119. # only the event > than the tombstone date is returned
  120. self._insert_event_for_time(ght.deleted_at + timedelta(seconds=1), hash)
  121. assert _query_for_issue(group2.id) == {group2.id: 1}
  122. def test_organization_retention_respected(self):
  123. base_time = datetime.utcnow()
  124. self._insert_event_for_time(base_time - timedelta(minutes=1))
  125. self._insert_event_for_time(base_time - timedelta(days=2))
  126. def _get_event_count():
  127. # attempt to query back 90 days
  128. return snuba.query(
  129. start=base_time - timedelta(days=90),
  130. end=base_time + timedelta(days=1),
  131. groupby=['project_id'],
  132. filter_keys={
  133. 'project_id': [self.project.id],
  134. },
  135. )
  136. assert _get_event_count() == {self.project.id: 2}
  137. with self.options({'system.event-retention-days': 1}):
  138. assert _get_event_count() == {self.project.id: 1}
  139. def test_organization_retention_larger_than_end_date(self):
  140. base_time = datetime.utcnow()
  141. with self.options({'system.event-retention-days': 1}):
  142. assert snuba.query(
  143. start=base_time - timedelta(days=90),
  144. end=base_time - timedelta(days=60),
  145. groupby=['project_id'],
  146. filter_keys={
  147. 'project_id': [self.project.id],
  148. },
  149. ) == {}
  150. def test_use_group_id(self):
  151. base_time = datetime.utcnow()
  152. group = self.create_group()
  153. self._insert_event_for_time(base_time, group_id=group.id)
  154. with self.options({'snuba.use_group_id_column': True}):
  155. # verify filter_keys and aggregation
  156. assert snuba.query(
  157. start=base_time - timedelta(days=1),
  158. end=base_time + timedelta(days=1),
  159. groupby=['issue'],
  160. filter_keys={
  161. 'project_id': [self.project.id],
  162. 'issue': [group.id]
  163. },
  164. ) == {group.id: 1}
  165. # verify raw_query selecting issue row
  166. assert snuba.raw_query(
  167. start=base_time - timedelta(days=1),
  168. end=base_time + timedelta(days=1),
  169. selected_columns=['issue', 'timestamp'],
  170. filter_keys={
  171. 'project_id': [self.project.id],
  172. 'issue': [group.id]
  173. },
  174. )['data'] == [{
  175. 'issue': group.id,
  176. 'timestamp': base_time.strftime('%Y-%m-%dT%H:%M:%S+00:00'),
  177. }]