test_eventstream.py 5.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154
  1. import logging
  2. import time
  3. from datetime import datetime, timedelta
  4. from unittest.mock import Mock, patch
  5. from django.conf import settings
  6. from sentry.event_manager import EventManager
  7. from sentry.eventstream.kafka import KafkaEventStream
  8. from sentry.eventstream.snuba import SnubaEventStream
  9. from sentry.testutils import SnubaTestCase, TestCase
  10. from sentry.utils import json, snuba
  11. from sentry.utils.samples import load_data
  12. class SnubaEventStreamTest(TestCase, SnubaTestCase):
  13. def setUp(self):
  14. super().setUp()
  15. self.kafka_eventstream = KafkaEventStream()
  16. self.producer_mock = Mock()
  17. self.kafka_eventstream.get_producer = Mock(return_value=self.producer_mock)
  18. def __build_event(self, timestamp):
  19. raw_event = {
  20. "event_id": "a" * 32,
  21. "message": "foo",
  22. "timestamp": time.mktime(timestamp.timetuple()),
  23. "level": logging.ERROR,
  24. "logger": "default",
  25. "tags": [],
  26. }
  27. manager = EventManager(raw_event)
  28. manager.normalize()
  29. return manager.save(self.project.id)
  30. def __build_transaction_event(self):
  31. manager = EventManager(load_data("transaction"))
  32. manager.normalize()
  33. return manager.save(self.project.id)
  34. def __produce_event(self, *insert_args, **insert_kwargs):
  35. is_transaction_event = insert_kwargs["event"].get_event_type() == "transaction"
  36. # pass arguments on to Kafka EventManager
  37. self.kafka_eventstream.insert(*insert_args, **insert_kwargs)
  38. producer = self.producer_mock
  39. produce_args, produce_kwargs = list(producer.produce.call_args)
  40. assert not produce_args
  41. assert produce_kwargs["topic"] == settings.KAFKA_EVENTS
  42. assert produce_kwargs["key"] == str(self.project.id).encode("utf-8")
  43. version, type_, payload1, payload2 = json.loads(produce_kwargs["value"])
  44. assert version == 2
  45. assert type_ == "insert"
  46. # insert what would have been the Kafka payload directly
  47. # into Snuba, expect an HTTP 200 and for the event to now exist
  48. snuba_eventstream = SnubaEventStream()
  49. snuba_eventstream._send(
  50. self.project.id,
  51. "insert",
  52. (payload1, payload2),
  53. is_transaction_event=is_transaction_event,
  54. )
  55. @patch("sentry.eventstream.insert")
  56. def test(self, mock_eventstream_insert):
  57. now = datetime.utcnow()
  58. event = self.__build_event(now)
  59. # verify eventstream was called by EventManager
  60. insert_args, insert_kwargs = list(mock_eventstream_insert.call_args)
  61. assert not insert_args
  62. assert insert_kwargs == {
  63. "event": event,
  64. "is_new_group_environment": True,
  65. "is_new": True,
  66. "is_regression": False,
  67. "primary_hash": "acbd18db4cc2f85cedef654fccc4a4d8",
  68. "skip_consume": False,
  69. "received_timestamp": event.data["received"],
  70. }
  71. self.__produce_event(*insert_args, **insert_kwargs)
  72. assert (
  73. snuba.query(
  74. start=now - timedelta(days=1),
  75. end=now + timedelta(days=1),
  76. groupby=["project_id"],
  77. filter_keys={"project_id": [self.project.id]},
  78. ).get(self.project.id, 0)
  79. == 1
  80. )
  81. @patch("sentry.eventstream.insert")
  82. def test_issueless(self, mock_eventstream_insert):
  83. now = datetime.utcnow()
  84. event = self.__build_transaction_event()
  85. event.group_id = None
  86. insert_args = ()
  87. insert_kwargs = {
  88. "event": event,
  89. "is_new_group_environment": True,
  90. "is_new": True,
  91. "is_regression": False,
  92. "primary_hash": "acbd18db4cc2f85cedef654fccc4a4d8",
  93. "skip_consume": False,
  94. "received_timestamp": event.data["received"],
  95. }
  96. self.__produce_event(*insert_args, **insert_kwargs)
  97. result = snuba.raw_query(
  98. dataset=snuba.Dataset.Transactions,
  99. start=now - timedelta(days=1),
  100. end=now + timedelta(days=1),
  101. selected_columns=["event_id"],
  102. groupby=None,
  103. filter_keys={"project_id": [self.project.id], "event_id": [event.event_id]},
  104. )
  105. assert len(result["data"]) == 1
  106. @patch("sentry.eventstream.insert")
  107. def test_multiple_groups(self, mock_eventstream_insert):
  108. now = datetime.utcnow()
  109. event = self.__build_transaction_event()
  110. event.group_id = None
  111. event.groups = [self.group]
  112. insert_args = ()
  113. insert_kwargs = {
  114. "event": event,
  115. "is_new_group_environment": True,
  116. "is_new": True,
  117. "is_regression": False,
  118. "primary_hash": "acbd18db4cc2f85cedef654fccc4a4d8",
  119. "skip_consume": False,
  120. "received_timestamp": event.data["received"],
  121. }
  122. self.__produce_event(*insert_args, **insert_kwargs)
  123. result = snuba.raw_query(
  124. dataset=snuba.Dataset.Transactions,
  125. start=now - timedelta(days=1),
  126. end=now + timedelta(days=1),
  127. selected_columns=["event_id", "group_ids"],
  128. groupby=None,
  129. filter_keys={"project_id": [self.project.id], "event_id": [event.event_id]},
  130. )
  131. assert len(result["data"]) == 1
  132. assert result["data"][0]["group_ids"] == [self.group.id]