test_metrics_extraction.py 4.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112
  1. import uuid
  2. import confluent_kafka as kafka
  3. import pytest
  4. from sentry.sentry_metrics.indexer.strings import SHARED_STRINGS
  5. from sentry.tasks.relay import compute_projectkey_config
  6. from sentry.testutils import RelayStoreHelper, TransactionTestCase
  7. from sentry.testutils.helpers.datetime import before_now, iso_format
  8. from sentry.testutils.helpers.features import Feature
  9. from sentry.utils import json
  10. class MetricsExtractionTest(RelayStoreHelper, TransactionTestCase):
  11. @pytest.mark.skip(
  12. "TET-627: We need to release new metric first in relay and than adjust the test"
  13. )
  14. def test_all_transaction_metrics_emitted(self):
  15. with Feature(
  16. {
  17. "organizations:transaction-metrics-extraction": True,
  18. }
  19. ):
  20. event_data = {
  21. "type": "transaction",
  22. "transaction": "foo",
  23. "timestamp": iso_format(before_now(seconds=1)),
  24. "start_timestamp": iso_format(before_now(seconds=2)),
  25. "contexts": {
  26. "trace": {
  27. "trace_id": 32 * "b",
  28. "span_id": 16 * "c",
  29. "type": "trace",
  30. }
  31. },
  32. "user": {"id": 123},
  33. "measurements": {
  34. "fp": {"value": 2258.060000000114},
  35. "fcp": {"value": 2258.060000000114},
  36. "lcp": {"value": 2807.335},
  37. "inp": {"value": 51.318},
  38. "fid": {"value": 3.4900000027846545},
  39. "cls": {"value": 0.0382},
  40. "frames_total": {"value": 100},
  41. "frames_slow": {"value": 10},
  42. "frames_frozen": {"value": 5},
  43. "stall_count": {"value": 2},
  44. "stall_total_time": {"value": 12},
  45. "stall_longest_time": {"value": 7},
  46. "app_start_warm": {"value": 0.001},
  47. "app_start_cold": {"value": 0.001},
  48. "ttfb": {"value": 5},
  49. "ttfb.requesttime": {"value": 6},
  50. },
  51. "spans": [
  52. {
  53. "op": op,
  54. "trace_id": 32 * "b",
  55. "span_id": 16 * "1",
  56. "start_timestamp": iso_format(before_now(seconds=2)),
  57. "timestamp": iso_format(before_now(seconds=1)),
  58. }
  59. for op in ("db", "http", "resource", "browser", "ui")
  60. ],
  61. }
  62. settings = {
  63. "bootstrap.servers": "127.0.0.1:9092", # TODO: read from django settings here
  64. "group.id": "test-consumer-%s" % uuid.uuid4().hex,
  65. "enable.auto.commit": True,
  66. "auto.offset.reset": "earliest",
  67. }
  68. consumer = kafka.Consumer(settings)
  69. consumer.assign([kafka.TopicPartition("ingest-metrics", 0)])
  70. self.post_and_retrieve_event(event_data)
  71. metrics_emitted = set()
  72. strings_emitted = set()
  73. for _ in range(1000):
  74. message = consumer.poll(timeout=1.0)
  75. if message is None:
  76. break
  77. message = json.loads(message.value())
  78. if message["project_id"] == self.project.id:
  79. metrics_emitted.add(message["name"])
  80. strings_emitted.add(message["name"])
  81. for key, value in message["tags"].items():
  82. strings_emitted.add(key)
  83. strings_emitted.add(value)
  84. consumer.close()
  85. # Make sure that all expected metrics were extracted:
  86. project_config = compute_projectkey_config(self.projectkey)
  87. extraction_config = project_config["config"]["transactionMetrics"]
  88. metrics_expected = set(extraction_config["extractMetrics"])
  89. assert sorted(metrics_emitted) == sorted(metrics_expected)
  90. #: These strings should be common strings, but we cannot add them
  91. #: to the indexer because they already exist in the release health
  92. #: indexer db.
  93. known_non_common_strings = {
  94. "other",
  95. "platform",
  96. "d:transactions/measurements.inp@millisecond",
  97. }
  98. # Make sure that all the standard strings are part of the list of common strings:
  99. non_common_strings = strings_emitted - SHARED_STRINGS.keys()
  100. assert non_common_strings == known_non_common_strings