test_organization_spans_aggregation.py 21 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592
  1. import hashlib
  2. from datetime import timedelta
  3. from unittest import mock
  4. from uuid import uuid4
  5. from django.urls import reverse
  6. from snuba_sdk import Column, Condition, Function, Op
  7. from sentry.api.endpoints.organization_spans_aggregation import NULL_GROUP
  8. from sentry.testutils.cases import APITestCase, SnubaTestCase
  9. from sentry.testutils.helpers.datetime import before_now
  10. from sentry.utils.samples import load_data
  11. MOCK_SNUBA_RESPONSE = {
  12. "data": [
  13. {
  14. "transaction_id": "80fe542aea4945ffbe612646987ee449",
  15. "count": 71,
  16. "spans": [
  17. [
  18. "root_1",
  19. 1,
  20. "parent_1",
  21. "e238e6c2e2466b07",
  22. "api/0/foo",
  23. "other",
  24. "2023-09-13 17:12:19",
  25. 100,
  26. 1000,
  27. 1000,
  28. ],
  29. [
  30. "B1",
  31. 0,
  32. "root_1",
  33. "B",
  34. "connect",
  35. "db",
  36. "2023-09-13 17:12:19",
  37. 150,
  38. 50,
  39. 50.0,
  40. ],
  41. [
  42. "C1",
  43. 0,
  44. "root_1",
  45. "C",
  46. "resolve_conditions",
  47. "discover.endpoint",
  48. "2023-09-13 17:12:19",
  49. 155,
  50. 0,
  51. 10.0,
  52. ],
  53. [
  54. "D1",
  55. 0,
  56. "C1",
  57. "D",
  58. "resolve_orderby",
  59. "discover.snql",
  60. "2023-09-13 17:12:19",
  61. 157,
  62. 0,
  63. 20.0,
  64. ],
  65. [
  66. "E1",
  67. 0,
  68. "C1",
  69. NULL_GROUP,
  70. "resolve_columns",
  71. "discover.snql",
  72. "2023-09-13 17:12:19",
  73. 157,
  74. 0,
  75. 20.0,
  76. ],
  77. ],
  78. },
  79. {
  80. "transaction_id": "86b21833d1854d9b811000b91e7fccfa",
  81. "count": 71,
  82. "spans": [
  83. [
  84. "root_2",
  85. 1,
  86. "parent_2",
  87. "e238e6c2e2466b07",
  88. "bind_organization_context",
  89. "other",
  90. "2023-09-13 17:12:39",
  91. 100,
  92. 700,
  93. 0.0,
  94. ],
  95. [
  96. "B2",
  97. 0,
  98. "root_2",
  99. "B",
  100. "connect",
  101. "db",
  102. "2023-09-13 17:12:39",
  103. 110,
  104. 10,
  105. 30.0,
  106. ],
  107. [
  108. "C2",
  109. 0,
  110. "root_2",
  111. "C",
  112. "resolve_conditions",
  113. "discover.endpoint",
  114. "2023-09-13 17:12:39",
  115. 115,
  116. 0,
  117. 40.0,
  118. ],
  119. [
  120. "D2",
  121. 0,
  122. "C2",
  123. "D",
  124. "resolve_orderby",
  125. "discover.snql",
  126. "2023-09-13 17:12:39",
  127. 150,
  128. 0,
  129. 10.0,
  130. ],
  131. [
  132. "D2-duplicate",
  133. 0,
  134. "C2",
  135. "D",
  136. "resolve_orderby",
  137. "discover.snql",
  138. "2023-09-13 17:12:40",
  139. 155,
  140. 0,
  141. 20.0,
  142. ],
  143. [
  144. "E2",
  145. 0,
  146. "C2",
  147. NULL_GROUP,
  148. "resolve_columns",
  149. "discover.snql",
  150. "2023-09-13 17:12:39",
  151. 157,
  152. 0,
  153. 20.0,
  154. ],
  155. ],
  156. },
  157. ]
  158. }
  159. class OrganizationSpansAggregationTest(APITestCase, SnubaTestCase):
  160. url_name = "sentry-api-0-organization-spans-aggregation"
  161. FEATURES = [
  162. "organizations:starfish-aggregate-span-waterfall",
  163. "organizations:performance-view",
  164. ]
  165. def get_start_end(self, duration):
  166. return self.day_ago, self.day_ago + timedelta(milliseconds=duration)
  167. def create_event(
  168. self,
  169. trace,
  170. transaction,
  171. spans,
  172. parent_span_id,
  173. project_id,
  174. tags=None,
  175. duration=4000,
  176. span_id=None,
  177. measurements=None,
  178. trace_context=None,
  179. environment=None,
  180. **kwargs,
  181. ):
  182. start, end = self.get_start_end(duration)
  183. data = load_data(
  184. "transaction",
  185. trace=trace,
  186. spans=spans,
  187. timestamp=end,
  188. start_timestamp=start,
  189. trace_context=trace_context,
  190. )
  191. data["transaction"] = transaction
  192. data["contexts"]["trace"]["parent_span_id"] = parent_span_id
  193. if span_id:
  194. data["contexts"]["trace"]["span_id"] = span_id
  195. if measurements:
  196. for key, value in measurements.items():
  197. data["measurements"][key]["value"] = value
  198. if tags is not None:
  199. data["tags"] = tags
  200. if environment is not None:
  201. data["environment"] = environment
  202. with self.feature(self.FEATURES):
  203. return self.store_event(data, project_id=project_id, **kwargs)
  204. def setUp(self):
  205. super().setUp()
  206. self.login_as(user=self.user)
  207. self.day_ago = before_now(days=1).replace(hour=10, minute=0, second=0, microsecond=0)
  208. self.span_ids_event_1 = dict(
  209. zip(["A", "B", "C", "D", "E"], [uuid4().hex[:16] for _ in range(5)])
  210. )
  211. self.trace_id_1 = uuid4().hex
  212. self.root_event_1 = self.create_event(
  213. trace=self.trace_id_1,
  214. trace_context={
  215. "trace_id": self.trace_id_1,
  216. "span_id": self.span_ids_event_1["A"],
  217. "exclusive_time": 100,
  218. },
  219. transaction="api/0/foo",
  220. spans=[
  221. {
  222. "same_process_as_parent": True,
  223. "op": "db",
  224. "description": "connect",
  225. "span_id": self.span_ids_event_1["B"],
  226. "trace_id": self.trace_id_1,
  227. "parent_span_id": self.span_ids_event_1["A"],
  228. "exclusive_time": 50.0,
  229. "data": {
  230. "duration": 0.050,
  231. "offset": 0.050,
  232. "span.group": "B",
  233. "span.description": "connect",
  234. },
  235. "sentry_tags": {
  236. "description": "connect",
  237. },
  238. },
  239. {
  240. "same_process_as_parent": True,
  241. "op": "discover.endpoint",
  242. "description": "resolve_conditions",
  243. "span_id": self.span_ids_event_1["C"],
  244. "trace_id": self.trace_id_1,
  245. "parent_span_id": self.span_ids_event_1["A"],
  246. "exclusive_time": 10,
  247. "data": {
  248. "duration": 0.00,
  249. "offset": 0.055,
  250. "span.group": "C",
  251. "span.description": "connect",
  252. },
  253. "sentry_tags": {
  254. "description": "connect",
  255. },
  256. },
  257. {
  258. "same_process_as_parent": True,
  259. "op": "discover.snql",
  260. "description": "resolve_orderby",
  261. "span_id": self.span_ids_event_1["D"],
  262. "trace_id": self.trace_id_1,
  263. "parent_span_id": self.span_ids_event_1["C"],
  264. "exclusive_time": 20,
  265. "data": {
  266. "duration": 0.00,
  267. "offset": 0.057,
  268. "span.group": "D",
  269. "span.description": "resolve_orderby",
  270. },
  271. "sentry_tags": {
  272. "description": "resolve_orderby",
  273. },
  274. },
  275. {
  276. "same_process_as_parent": True,
  277. "op": "discover.snql",
  278. "description": "resolve_columns",
  279. "span_id": self.span_ids_event_1["E"],
  280. "trace_id": self.trace_id_1,
  281. "parent_span_id": self.span_ids_event_1["C"],
  282. "exclusive_time": 20,
  283. "data": {
  284. "duration": 0.00,
  285. "offset": 0.057,
  286. "span.description": "resolve_columns",
  287. },
  288. },
  289. ],
  290. parent_span_id=None,
  291. project_id=self.project.id,
  292. duration=1000,
  293. environment="production",
  294. )
  295. self.span_ids_event_2 = dict(
  296. zip(["A", "B", "C", "D", "D2", "E"], [uuid4().hex[:16] for _ in range(6)])
  297. )
  298. self.trace_id_2 = uuid4().hex
  299. self.root_event_2 = self.create_event(
  300. trace=self.trace_id_2,
  301. trace_context={
  302. "trace_id": self.trace_id_2,
  303. "span_id": self.span_ids_event_2["A"],
  304. "exclusive_time": 100,
  305. },
  306. transaction="api/0/foo",
  307. spans=[
  308. {
  309. "same_process_as_parent": True,
  310. "op": "db",
  311. "description": "connect",
  312. "span_id": self.span_ids_event_2["B"],
  313. "trace_id": self.trace_id_2,
  314. "parent_span_id": self.span_ids_event_2["A"],
  315. "exclusive_time": 50.0,
  316. "data": {
  317. "duration": 0.010,
  318. "offset": 0.010,
  319. "span.group": "B",
  320. "span.description": "connect",
  321. },
  322. "sentry_tags": {
  323. "description": "connect",
  324. },
  325. },
  326. {
  327. "same_process_as_parent": True,
  328. "op": "discover.endpoint",
  329. "description": "resolve_conditions",
  330. "span_id": self.span_ids_event_2["C"],
  331. "trace_id": self.trace_id_2,
  332. "parent_span_id": self.span_ids_event_2["A"],
  333. "exclusive_time": 10,
  334. "data": {
  335. "duration": 0.00,
  336. "offset": 0.015,
  337. "span.group": "C",
  338. "span.description": "connect",
  339. },
  340. "sentry_tags": {
  341. "description": "connect",
  342. },
  343. },
  344. {
  345. "same_process_as_parent": True,
  346. "op": "discover.snql",
  347. "description": "resolve_orderby",
  348. "span_id": self.span_ids_event_2["D"],
  349. "trace_id": self.trace_id_2,
  350. "parent_span_id": self.span_ids_event_2["C"],
  351. "exclusive_time": 10,
  352. "data": {
  353. "duration": 0.00,
  354. "offset": 0.050,
  355. "span.group": "D",
  356. "span.description": "resolve_orderby",
  357. },
  358. "sentry_tags": {
  359. "description": "resolve_orderby",
  360. },
  361. },
  362. {
  363. "same_process_as_parent": True,
  364. "op": "discover.snql",
  365. "description": "resolve_orderby",
  366. "span_id": self.span_ids_event_2["D2"],
  367. "trace_id": self.trace_id_2,
  368. "parent_span_id": self.span_ids_event_2["C"],
  369. "exclusive_time": 20,
  370. "data": {
  371. "duration": 0.00,
  372. "offset": 1.055,
  373. "span.group": "D",
  374. "span.description": "resolve_orderby",
  375. },
  376. "sentry_tags": {
  377. "description": "resolve_orderby",
  378. },
  379. },
  380. {
  381. "same_process_as_parent": True,
  382. "op": "discover.snql",
  383. "description": "resolve_columns",
  384. "span_id": self.span_ids_event_2["E"],
  385. "trace_id": self.trace_id_2,
  386. "parent_span_id": self.span_ids_event_2["C"],
  387. "exclusive_time": 20,
  388. "data": {
  389. "duration": 0.00,
  390. "offset": 0.057,
  391. "span.description": "resolve_columns",
  392. },
  393. },
  394. ],
  395. parent_span_id=None,
  396. project_id=self.project.id,
  397. duration=700,
  398. environment="development",
  399. )
  400. self.url = reverse(
  401. self.url_name,
  402. kwargs={"organization_slug": self.project.organization.slug},
  403. )
  404. @mock.patch("sentry.api.endpoints.organization_spans_aggregation.raw_snql_query")
  405. def test_simple(self, mock_query):
  406. mock_query.side_effect = [MOCK_SNUBA_RESPONSE]
  407. for backend in ["indexedSpans", "nodestore"]:
  408. with self.feature(self.FEATURES):
  409. response = self.client.get(
  410. self.url,
  411. data={"transaction": "api/0/foo", "backend": backend},
  412. format="json",
  413. )
  414. assert response.data
  415. data = response.data
  416. root_fingerprint = hashlib.md5(b"e238e6c2e2466b07").hexdigest()[:16]
  417. assert root_fingerprint in data
  418. assert data[root_fingerprint]["count()"] == 2
  419. assert data[root_fingerprint]["description"] == "api/0/foo"
  420. assert round(data[root_fingerprint]["avg(duration)"]) == 850
  421. if backend == "indexedSpans":
  422. assert data[root_fingerprint]["samples"] == {
  423. ("80fe542aea4945ffbe612646987ee449", "root_1"),
  424. ("86b21833d1854d9b811000b91e7fccfa", "root_2"),
  425. }
  426. else:
  427. assert data[root_fingerprint]["samples"] == {
  428. (self.root_event_1.event_id, self.span_ids_event_1["A"]),
  429. (self.root_event_2.event_id, self.span_ids_event_2["A"]),
  430. }
  431. fingerprint = hashlib.md5(b"e238e6c2e2466b07-B").hexdigest()[:16]
  432. assert data[fingerprint]["description"] == "connect"
  433. assert round(data[fingerprint]["avg(duration)"]) == 30
  434. fingerprint = hashlib.md5(b"e238e6c2e2466b07-C-D").hexdigest()[:16]
  435. assert data[fingerprint]["description"] == "resolve_orderby"
  436. assert data[fingerprint]["avg(exclusive_time)"] == 15.0
  437. assert data[fingerprint]["count()"] == 2
  438. fingerprint = hashlib.md5(b"e238e6c2e2466b07-C-D2").hexdigest()[:16]
  439. assert data[fingerprint]["description"] == "resolve_orderby"
  440. assert data[fingerprint]["avg(exclusive_time)"] == 20.0
  441. assert data[fingerprint]["count()"] == 1
  442. @mock.patch("sentry.api.endpoints.organization_spans_aggregation.raw_snql_query")
  443. def test_offset_logic(self, mock_query):
  444. mock_query.side_effect = [MOCK_SNUBA_RESPONSE]
  445. for backend in ["indexedSpans", "nodestore"]:
  446. with self.feature(self.FEATURES):
  447. response = self.client.get(
  448. self.url,
  449. data={"transaction": "api/0/foo", "backend": backend},
  450. format="json",
  451. )
  452. assert response.data
  453. data = response.data
  454. root_fingerprint = hashlib.md5(b"e238e6c2e2466b07").hexdigest()[:16]
  455. assert root_fingerprint in data
  456. assert data[root_fingerprint]["avg(absolute_offset)"] == 0.0
  457. fingerprint = hashlib.md5(b"e238e6c2e2466b07-B").hexdigest()[:16]
  458. assert data[fingerprint]["avg(absolute_offset)"] == 30.0
  459. fingerprint = hashlib.md5(b"e238e6c2e2466b07-C").hexdigest()[:16]
  460. assert data[fingerprint]["avg(absolute_offset)"] == 35.0
  461. fingerprint = hashlib.md5(b"e238e6c2e2466b07-C-D").hexdigest()[:16]
  462. assert data[fingerprint]["avg(absolute_offset)"] == 53.5
  463. fingerprint = hashlib.md5(b"e238e6c2e2466b07-C-D2").hexdigest()[:16]
  464. assert data[fingerprint]["avg(absolute_offset)"] == 1075.0
  465. @mock.patch("sentry.api.endpoints.organization_spans_aggregation.raw_snql_query")
  466. def test_null_group_fallback(self, mock_query):
  467. mock_query.side_effect = [MOCK_SNUBA_RESPONSE]
  468. for backend in ["indexedSpans", "nodestore"]:
  469. with self.feature(self.FEATURES):
  470. response = self.client.get(
  471. self.url,
  472. data={"transaction": "api/0/foo", "backend": backend},
  473. format="json",
  474. )
  475. assert response.data
  476. data = response.data
  477. root_fingerprint = hashlib.md5(b"e238e6c2e2466b07-C-discover.snql").hexdigest()[:16]
  478. assert root_fingerprint in data
  479. assert data[root_fingerprint]["description"] == ""
  480. assert data[root_fingerprint]["count()"] == 2
  481. @mock.patch("sentry.api.endpoints.organization_spans_aggregation.raw_snql_query")
  482. def test_http_method_filter(self, mock_query):
  483. with self.feature(self.FEATURES):
  484. response = self.client.get(
  485. self.url,
  486. data={"transaction": "api/0/foo", "backend": "nodestore", "http.method": "GET"},
  487. format="json",
  488. )
  489. assert response.data
  490. data = response.data
  491. root_fingerprint = hashlib.md5(b"e238e6c2e2466b07").hexdigest()[:16]
  492. assert root_fingerprint in data
  493. assert data[root_fingerprint]["count()"] == 2
  494. with self.feature(self.FEATURES):
  495. response = self.client.get(
  496. self.url,
  497. data={"transaction": "api/0/foo", "backend": "nodestore", "http.method": "POST"},
  498. format="json",
  499. )
  500. assert response.data == {}
  501. with self.feature(self.FEATURES):
  502. self.client.get(
  503. self.url,
  504. data={"transaction": "api/0/foo", "backend": "indexedSpans", "http.method": "GET"},
  505. format="json",
  506. )
  507. assert (
  508. Condition(
  509. lhs=Function(
  510. function="ifNull",
  511. parameters=[
  512. Column(
  513. name="sentry_tags[transaction.method]",
  514. ),
  515. "",
  516. ],
  517. alias=None,
  518. ),
  519. op=Op.EQ,
  520. rhs="GET",
  521. )
  522. in mock_query.mock_calls[0].args[0].query.where
  523. )
  524. def test_environment_filter(self):
  525. with self.feature(self.FEATURES):
  526. response = self.client.get(
  527. self.url,
  528. data={
  529. "transaction": "api/0/foo",
  530. "backend": "nodestore",
  531. "environment": "production",
  532. },
  533. format="json",
  534. )
  535. assert response.data
  536. data = response.data
  537. root_fingerprint = hashlib.md5(b"e238e6c2e2466b07").hexdigest()[:16]
  538. assert root_fingerprint in data
  539. assert data[root_fingerprint]["count()"] == 1
  540. with self.feature(self.FEATURES):
  541. response = self.client.get(
  542. self.url,
  543. data={
  544. "transaction": "api/0/foo",
  545. "backend": "nodestore",
  546. "environment": ["production", "development"],
  547. },
  548. format="json",
  549. )
  550. assert response.data
  551. data = response.data
  552. root_fingerprint = hashlib.md5(b"e238e6c2e2466b07").hexdigest()[:16]
  553. assert root_fingerprint in data
  554. assert data[root_fingerprint]["count()"] == 2