test_metrics_extraction.py 6.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174
  1. import time
  2. import uuid
  3. import confluent_kafka as kafka
  4. import pytest
  5. from sentry.sentry_metrics.indexer.strings import SHARED_STRINGS
  6. from sentry.testutils.cases import TransactionTestCase
  7. from sentry.testutils.helpers.datetime import before_now
  8. from sentry.testutils.helpers.features import Feature
  9. from sentry.testutils.helpers.options import override_options
  10. from sentry.testutils.relay import RelayStoreHelper
  11. from sentry.testutils.skips import requires_kafka
  12. from sentry.utils import json
  13. pytestmark = [requires_kafka]
  14. class MetricsExtractionTest(RelayStoreHelper, TransactionTestCase):
  15. @pytest.mark.skip("breaks in Relay for unknown reasons")
  16. @override_options({"relay.transaction-names-client-based": 1.0})
  17. def test_all_transaction_metrics_emitted(self):
  18. with Feature(
  19. {
  20. "organizations:transaction-metrics-extraction": True,
  21. }
  22. ):
  23. event_data = {
  24. "type": "transaction",
  25. "transaction": "foo",
  26. "transaction_info": {"source": "url"}, # 'transaction' tag not extracted
  27. "timestamp": before_now(seconds=1),
  28. "start_timestamp": before_now(seconds=2),
  29. "contexts": {
  30. "trace": {
  31. "trace_id": 32 * "b",
  32. "span_id": 16 * "c",
  33. "type": "trace",
  34. }
  35. },
  36. "user": {"id": 123},
  37. "measurements": {
  38. "fp": {"value": 2258.060000000114},
  39. "fcp": {"value": 2258.060000000114},
  40. "lcp": {"value": 2807.335},
  41. "inp": {"value": 51.318},
  42. "fid": {"value": 3.4900000027846545},
  43. "cls": {"value": 0.0382},
  44. "frames_total": {"value": 100},
  45. "frames_slow": {"value": 10},
  46. "frames_frozen": {"value": 5},
  47. "stall_count": {"value": 2},
  48. "stall_total_time": {"value": 12},
  49. "stall_longest_time": {"value": 7},
  50. "app_start_warm": {"value": 0.001},
  51. "app_start_cold": {"value": 0.001},
  52. "ttfb": {"value": 5},
  53. "ttfb.requesttime": {"value": 6},
  54. },
  55. "spans": [
  56. {
  57. "op": op,
  58. "trace_id": 32 * "b",
  59. "span_id": 16 * "1",
  60. "start_timestamp": before_now(seconds=2),
  61. "timestamp": before_now(seconds=1),
  62. }
  63. for op in ("db", "http", "resource", "browser", "ui")
  64. ],
  65. }
  66. settings = {
  67. "bootstrap.servers": "127.0.0.1:9092", # TODO: read from django settings here
  68. "group.id": "test-consumer-%s" % uuid.uuid4().hex,
  69. "enable.auto.commit": True,
  70. "auto.offset.reset": "earliest",
  71. }
  72. consumer = kafka.Consumer(settings)
  73. consumer.assign([kafka.TopicPartition("ingest-metrics", 0)])
  74. self.post_and_retrieve_event(event_data)
  75. strings_emitted = set()
  76. for _ in range(1000):
  77. message = consumer.poll(timeout=1.0)
  78. if message is None:
  79. break
  80. message = json.loads(message.value())
  81. if message["project_id"] == self.project.id:
  82. strings_emitted.add(message["name"])
  83. for key, value in message["tags"].items():
  84. strings_emitted.add(key)
  85. strings_emitted.add(value)
  86. consumer.close()
  87. #: These strings should be common strings, but we cannot add them
  88. #: to the indexer because they already exist in the release health
  89. #: indexer db.
  90. known_non_common_strings = {
  91. "other",
  92. "platform",
  93. "d:transactions/measurements.inp@millisecond",
  94. }
  95. # Make sure that all the standard strings are part of the list of common strings:
  96. non_common_strings = strings_emitted - SHARED_STRINGS.keys()
  97. assert non_common_strings == known_non_common_strings
  98. def test_histogram_outliers(self):
  99. with Feature(
  100. {
  101. "organizations:transaction-metrics-extraction": True,
  102. }
  103. ):
  104. event_data = {
  105. "type": "transaction",
  106. "transaction": "foo",
  107. "transaction_info": {"source": "url"}, # 'transaction' tag not extracted
  108. "timestamp": before_now(seconds=1).isoformat(),
  109. "start_timestamp": before_now(seconds=2).isoformat(),
  110. "platform": "javascript",
  111. "contexts": {
  112. "trace": {
  113. "op": "pageload",
  114. "trace_id": 32 * "b",
  115. "span_id": 16 * "c",
  116. "type": "trace",
  117. }
  118. },
  119. "user": {"id": 123},
  120. "measurements": {
  121. "fcp": {"value": 999999999.0},
  122. "lcp": {"value": 0.0},
  123. },
  124. }
  125. settings = {
  126. "bootstrap.servers": "127.0.0.1:9092", # TODO: read from django settings here
  127. "group.id": "test-consumer-%s" % uuid.uuid4().hex,
  128. "enable.auto.commit": True,
  129. "auto.offset.reset": "earliest",
  130. }
  131. consumer = kafka.Consumer(settings)
  132. consumer.assign([kafka.TopicPartition("ingest-performance-metrics", 0)])
  133. self.post_and_retrieve_event(event_data)
  134. histogram_outlier_tags = {}
  135. buckets = []
  136. t0 = time.monotonic()
  137. for attempt in range(1000):
  138. message = consumer.poll(timeout=1.0)
  139. if message is None:
  140. break
  141. bucket = json.loads(message.value())
  142. buckets.append(bucket)
  143. try:
  144. histogram_outlier_tags[bucket["name"]] = bucket["tags"]["histogram_outlier"]
  145. except KeyError:
  146. pass
  147. consumer.close()
  148. assert histogram_outlier_tags == {
  149. "d:transactions/duration@millisecond": "inlier",
  150. "d:transactions/measurements.fcp@millisecond": "outlier",
  151. "d:transactions/measurements.lcp@millisecond": "inlier",
  152. }, {
  153. "attempts": attempt,
  154. "time_elapsed": time.monotonic() - t0,
  155. "bucket_count": len(buckets),
  156. }