test_unmerge.py 22 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628
  1. from __future__ import annotations
  2. import functools
  3. import hashlib
  4. import itertools
  5. import logging
  6. import uuid
  7. from datetime import datetime, timedelta, timezone
  8. from unittest import mock
  9. from unittest.mock import patch
  10. from django.utils import timezone as django_timezone
  11. from sentry import eventstream, tagstore, tsdb
  12. from sentry.eventstore.models import Event
  13. from sentry.models.environment import Environment
  14. from sentry.models.group import Group
  15. from sentry.models.grouphash import GroupHash
  16. from sentry.models.grouprelease import GroupRelease
  17. from sentry.models.release import Release
  18. from sentry.models.userreport import UserReport
  19. from sentry.similarity import _make_index_backend, features
  20. from sentry.tasks.merge import merge_groups
  21. from sentry.tasks.unmerge import (
  22. get_caches,
  23. get_event_user_from_interface,
  24. get_fingerprint,
  25. get_group_backfill_attributes,
  26. get_group_creation_attributes,
  27. unmerge,
  28. )
  29. from sentry.testutils.cases import SnubaTestCase, TestCase
  30. from sentry.testutils.helpers.datetime import before_now, iso_format
  31. from sentry.testutils.helpers.features import with_feature
  32. from sentry.tsdb.base import TSDBModel
  33. from sentry.utils import redis
  34. from sentry.utils.dates import to_timestamp
  35. # Use the default redis client as a cluster client in the similarity index
  36. index = _make_index_backend(redis.clusters.get("default").get_local_client(0))
  37. @patch.object(features, "index", new=index)
  38. class UnmergeTestCase(TestCase, SnubaTestCase):
  39. def test_get_fingerprint(self):
  40. assert (
  41. get_fingerprint(
  42. self.store_event(data={"message": "Hello world"}, project_id=self.project.id)
  43. )
  44. == hashlib.md5(b"Hello world").hexdigest()
  45. )
  46. assert (
  47. get_fingerprint(
  48. self.store_event(
  49. data={"message": "Hello world", "fingerprint": ["Not hello world"]},
  50. project_id=self.project.id,
  51. )
  52. )
  53. == hashlib.md5(b"Not hello world").hexdigest()
  54. )
  55. def test_get_group_creation_attributes(self):
  56. now = datetime.utcnow().replace(microsecond=0, tzinfo=timezone.utc)
  57. e1 = self.store_event(
  58. data={
  59. "fingerprint": ["group1"],
  60. "platform": "javascript",
  61. "message": "Hello from JavaScript",
  62. "type": "default",
  63. "level": "info",
  64. "tags": {"logger": "javascript"},
  65. "timestamp": iso_format(now),
  66. },
  67. project_id=self.project.id,
  68. )
  69. e2 = self.store_event(
  70. data={
  71. "fingerprint": ["group1"],
  72. "platform": "python",
  73. "message": "Hello from Python",
  74. "type": "default",
  75. "level": "error",
  76. "tags": {"logger": "python"},
  77. "timestamp": iso_format(now),
  78. },
  79. project_id=self.project.id,
  80. )
  81. e3 = self.store_event(
  82. data={
  83. "fingerprint": ["group1"],
  84. "platform": "java",
  85. "message": "Hello from Java",
  86. "type": "default",
  87. "level": "debug",
  88. "tags": {"logger": "java"},
  89. "timestamp": iso_format(now),
  90. },
  91. project_id=self.project.id,
  92. )
  93. events = [e1, e2, e3]
  94. assert get_group_creation_attributes(get_caches(), events) == {
  95. "active_at": now,
  96. "first_seen": now,
  97. "last_seen": now,
  98. "platform": "java",
  99. "message": "Hello from JavaScript",
  100. "level": logging.INFO,
  101. "score": Group.calculate_score(3, now),
  102. "logger": "java",
  103. "times_seen": 3,
  104. "first_release": None,
  105. "culprit": "",
  106. "data": {
  107. "type": "default",
  108. "last_received": e1.data["received"],
  109. "metadata": {"title": "Hello from JavaScript"},
  110. },
  111. }
  112. def test_get_group_backfill_attributes(self):
  113. now = datetime.utcnow().replace(microsecond=0, tzinfo=timezone.utc)
  114. assert get_group_backfill_attributes(
  115. get_caches(),
  116. Group(
  117. active_at=now,
  118. first_seen=now,
  119. last_seen=now,
  120. platform="javascript",
  121. message="Hello from JavaScript",
  122. level=logging.INFO,
  123. score=Group.calculate_score(3, now),
  124. logger="javascript",
  125. times_seen=1,
  126. first_release=None,
  127. culprit="",
  128. data={"type": "default", "last_received": to_timestamp(now), "metadata": {}},
  129. ),
  130. [
  131. self.store_event(
  132. data={
  133. "platform": "python",
  134. "message": "Hello from Python",
  135. "timestamp": iso_format(now - timedelta(hours=1)),
  136. "type": "default",
  137. "level": "debug",
  138. "tags": {"logger": "java"},
  139. },
  140. project_id=self.project.id,
  141. ),
  142. self.store_event(
  143. data={
  144. "platform": "java",
  145. "message": "Hello from Java",
  146. "timestamp": iso_format(now - timedelta(hours=2)),
  147. "type": "default",
  148. "level": "debug",
  149. "tags": {"logger": "java"},
  150. },
  151. project_id=self.project.id,
  152. ),
  153. ],
  154. ) == {
  155. "active_at": now - timedelta(hours=2),
  156. "first_seen": now - timedelta(hours=2),
  157. "platform": "java",
  158. "score": Group.calculate_score(3, now),
  159. "logger": "java",
  160. "times_seen": 3,
  161. "first_release": None,
  162. }
  163. @with_feature("projects:similarity-indexing")
  164. @mock.patch("sentry.analytics.record")
  165. def test_unmerge(self, mock_record):
  166. now = before_now(minutes=5).replace(microsecond=0, tzinfo=timezone.utc)
  167. def time_from_now(offset=0):
  168. return now + timedelta(seconds=offset)
  169. project = self.create_project()
  170. project.date_added = django_timezone.now() - timedelta(minutes=10)
  171. project.save()
  172. sequence = itertools.count(0)
  173. tag_values = itertools.cycle(["red", "green", "blue"])
  174. user_values = itertools.cycle([{"id": 1}, {"id": 2}])
  175. def create_message_event(
  176. template, parameters, environment, release, fingerprint="group1"
  177. ) -> Event:
  178. i = next(sequence)
  179. event_id = uuid.UUID(fields=(i, 0x0, 0x1000, 0x80, 0x80, 0x808080808080)).hex
  180. tags = [["color", next(tag_values)]]
  181. if release:
  182. tags.append(["sentry:release", release])
  183. event = self.store_event(
  184. data={
  185. "event_id": event_id,
  186. "message": template % parameters,
  187. "type": "default",
  188. "user": next(user_values),
  189. "tags": tags,
  190. "fingerprint": [fingerprint],
  191. "timestamp": iso_format(now + timedelta(seconds=i)),
  192. "environment": environment,
  193. "release": release,
  194. },
  195. project_id=project.id,
  196. )
  197. UserReport.objects.create(
  198. project_id=project.id,
  199. group_id=event.group.id,
  200. event_id=event_id,
  201. name="Log Hat",
  202. email="ceo@corptron.com",
  203. comments="Quack",
  204. )
  205. features.record([event])
  206. return event
  207. events: dict[str | None, list[Event]] = {}
  208. for event in (
  209. create_message_event(
  210. "This is message #%s.", i, environment="production", release="version"
  211. )
  212. for i in range(10)
  213. ):
  214. events.setdefault(get_fingerprint(event), []).append(event)
  215. for event in (
  216. create_message_event(
  217. "This is message #%s!",
  218. i,
  219. environment="production",
  220. release="version2",
  221. fingerprint="group2",
  222. )
  223. for i in range(10, 16)
  224. ):
  225. events.setdefault(get_fingerprint(event), []).append(event)
  226. event = create_message_event(
  227. "This is message #%s!",
  228. 17,
  229. environment="staging",
  230. release="version3",
  231. fingerprint="group3",
  232. )
  233. events.setdefault(get_fingerprint(event), []).append(event)
  234. merge_source, source, destination = list(Group.objects.all())
  235. assert len(events) == 3
  236. assert sum(len(x) for x in events.values()) == 17
  237. production_environment = Environment.objects.get(
  238. organization_id=project.organization_id, name="production"
  239. )
  240. with self.tasks():
  241. eventstream_state = eventstream.backend.start_merge(
  242. project.id, [merge_source.id], source.id
  243. )
  244. merge_groups.delay([merge_source.id], source.id)
  245. eventstream.backend.end_merge(eventstream_state)
  246. assert {
  247. (gtv.value, gtv.times_seen)
  248. for gtv in tagstore.backend.get_group_tag_values(
  249. source,
  250. production_environment.id,
  251. "color",
  252. tenant_ids={"referrer": "get_tag_values", "organization_id": 1},
  253. )
  254. } == {("red", 6), ("green", 5), ("blue", 5)}
  255. similar_items = features.compare(source)
  256. assert len(similar_items) == 2
  257. assert similar_items[0][0] == source.id
  258. assert similar_items[0][1]["message:message:character-shingles"] == 1.0
  259. assert similar_items[1][0] == destination.id
  260. assert similar_items[1][1]["message:message:character-shingles"] < 1.0
  261. with self.tasks():
  262. unmerge.delay(
  263. project.id, source.id, destination.id, [list(events.keys())[0]], None, batch_size=5
  264. )
  265. assert (
  266. list(
  267. Group.objects.filter(id=merge_source.id).values_list(
  268. "times_seen", "first_seen", "last_seen"
  269. )
  270. )
  271. == []
  272. )
  273. assert list(
  274. Group.objects.filter(id=source.id).values_list("times_seen", "first_seen", "last_seen")
  275. ) == [(6, time_from_now(10), time_from_now(15))]
  276. assert list(
  277. Group.objects.filter(id=destination.id).values_list(
  278. "times_seen", "first_seen", "last_seen"
  279. )
  280. ) == [(11, time_from_now(0), time_from_now(16))]
  281. assert source.id != destination.id
  282. assert source.project == destination.project
  283. destination_event_ids = set(map(lambda event: event.event_id, list(events.values())[1]))
  284. assert destination_event_ids == set(
  285. UserReport.objects.filter(group_id=source.id).values_list("event_id", flat=True)
  286. )
  287. assert list(
  288. GroupHash.objects.filter(group_id=source.id).values_list("hash", flat=True)
  289. ) == [list(events.keys())[1]]
  290. assert set(
  291. GroupRelease.objects.filter(group_id=source.id).values_list(
  292. "environment", "first_seen", "last_seen"
  293. )
  294. ) == {("production", time_from_now(10), time_from_now(15))}
  295. assert {
  296. (gtv.value, gtv.times_seen)
  297. for gtv in tagstore.backend.get_group_tag_values(
  298. destination,
  299. production_environment.id,
  300. "color",
  301. tenant_ids={"referrer": "get_tag_values", "organization_id": 1},
  302. )
  303. } == {("red", 4), ("green", 3), ("blue", 3)}
  304. destination_event_ids = set(
  305. map(lambda event: event.event_id, list(events.values())[0] + list(events.values())[2])
  306. )
  307. assert destination_event_ids == set(
  308. UserReport.objects.filter(group_id=destination.id).values_list("event_id", flat=True)
  309. )
  310. assert set(
  311. GroupHash.objects.filter(group_id=destination.id).values_list("hash", flat=True)
  312. ) == {list(events.keys())[0], list(events.keys())[2]}
  313. assert set(
  314. GroupRelease.objects.filter(group_id=destination.id).values_list(
  315. "environment", "first_seen", "last_seen"
  316. )
  317. ) == {
  318. ("production", time_from_now(0), time_from_now(9)),
  319. ("staging", time_from_now(16), time_from_now(16)),
  320. }
  321. assert {
  322. (gtk.value, gtk.times_seen)
  323. for gtk in tagstore.backend.get_group_tag_values(
  324. destination,
  325. production_environment.id,
  326. "color",
  327. tenant_ids={"referrer": "get_tag_values", "organization_id": 1},
  328. )
  329. } == {("red", 4), ("blue", 3), ("green", 3)}
  330. rollup_duration = 3600
  331. time_series = tsdb.backend.get_range(
  332. TSDBModel.group,
  333. [source.id, destination.id],
  334. now - timedelta(seconds=rollup_duration),
  335. time_from_now(17),
  336. rollup_duration,
  337. tenant_ids={"referrer": "get_range", "organization_id": 1},
  338. )
  339. environment_time_series = tsdb.backend.get_range(
  340. TSDBModel.group,
  341. [source.id, destination.id],
  342. now - timedelta(seconds=rollup_duration),
  343. time_from_now(17),
  344. rollup_duration,
  345. environment_ids=[production_environment.id],
  346. tenant_ids={"referrer": "get_range", "organization_id": 1},
  347. )
  348. def get_expected_series_values(rollup, events, function=None):
  349. if function is None:
  350. def function(aggregate, event):
  351. return (aggregate if aggregate is not None else 0) + 1
  352. expected: dict[float, float] = {}
  353. for event in events:
  354. k = float((to_timestamp(event.datetime) // rollup_duration) * rollup_duration)
  355. expected[k] = function(expected.get(k), event)
  356. return expected
  357. def assert_series_contains(expected, actual, default=0):
  358. actual = dict(actual)
  359. for key, value in expected.items():
  360. assert actual.get(key, 0) == value
  361. for key in set(actual.keys()) - set(expected.keys()):
  362. assert actual.get(key, 0) == default
  363. assert_series_contains(
  364. get_expected_series_values(rollup_duration, list(events.values())[1]),
  365. time_series[source.id],
  366. 0,
  367. )
  368. assert_series_contains(
  369. get_expected_series_values(
  370. rollup_duration, list(events.values())[0] + list(events.values())[2]
  371. ),
  372. time_series[destination.id],
  373. 0,
  374. )
  375. assert_series_contains(
  376. get_expected_series_values(rollup_duration, list(events.values())[1]),
  377. environment_time_series[source.id],
  378. 0,
  379. )
  380. assert_series_contains(
  381. get_expected_series_values(
  382. rollup_duration, list(events.values())[0][:-1] + list(events.values())[2]
  383. ),
  384. environment_time_series[destination.id],
  385. 0,
  386. )
  387. time_series = tsdb.backend.get_distinct_counts_series(
  388. TSDBModel.users_affected_by_group,
  389. [source.id, destination.id],
  390. now - timedelta(seconds=rollup_duration),
  391. time_from_now(17),
  392. rollup_duration,
  393. tenant_ids={"referrer": "r", "organization_id": 1234},
  394. )
  395. environment_time_series = tsdb.backend.get_distinct_counts_series(
  396. TSDBModel.users_affected_by_group,
  397. [source.id, destination.id],
  398. now - timedelta(seconds=rollup_duration),
  399. time_from_now(17),
  400. rollup_duration,
  401. environment_id=production_environment.id,
  402. tenant_ids={"referrer": "r", "organization_id": 1234},
  403. )
  404. def collect_by_user_tag(aggregate, event):
  405. aggregate = aggregate if aggregate is not None else set()
  406. aggregate.add(
  407. get_event_user_from_interface(event.data["user"], event.group.project).tag_value
  408. )
  409. mock_record.assert_called_with(
  410. "eventuser_endpoint.request",
  411. project_id=event.group.project.id,
  412. endpoint="sentry.tasks.unmerge.get_event_user_from_interface",
  413. )
  414. return aggregate
  415. for series in [time_series, environment_time_series]:
  416. assert_series_contains(
  417. {
  418. timestamp: len(values)
  419. for timestamp, values in get_expected_series_values(
  420. rollup_duration, list(events.values())[1], collect_by_user_tag
  421. ).items()
  422. },
  423. series[source.id],
  424. )
  425. assert_series_contains(
  426. {
  427. timestamp: len(values)
  428. for timestamp, values in get_expected_series_values(
  429. rollup_duration,
  430. list(events.values())[0] + list(events.values())[2],
  431. collect_by_user_tag,
  432. ).items()
  433. },
  434. time_series[destination.id],
  435. )
  436. def strip_zeroes(data):
  437. for group_id, series in data.items():
  438. for _, values in series:
  439. for key, val in list(values.items()):
  440. if val == 0:
  441. values.pop(key)
  442. return data
  443. def collect_by_release(group, aggregate, event):
  444. aggregate = aggregate if aggregate is not None else {}
  445. release = event.get_tag("sentry:release")
  446. if not release:
  447. return aggregate
  448. release = GroupRelease.objects.get(
  449. group_id=group.id,
  450. environment=event.data["environment"],
  451. release_id=Release.objects.get(
  452. organization_id=project.organization_id, version=release
  453. ).id,
  454. ).id
  455. aggregate[release] = aggregate.get(release, 0) + 1
  456. return aggregate
  457. items = {}
  458. for i in [source.id, destination.id]:
  459. items[i] = list(GroupRelease.objects.filter(group_id=i).values_list("id", flat=True))
  460. time_series = strip_zeroes(
  461. tsdb.backend.get_frequency_series(
  462. TSDBModel.frequent_releases_by_group,
  463. items,
  464. now - timedelta(seconds=rollup_duration),
  465. time_from_now(17),
  466. rollup_duration,
  467. tenant_ids={"referrer": "r", "organization_id": 1234},
  468. )
  469. )
  470. assert_series_contains(
  471. get_expected_series_values(
  472. rollup_duration,
  473. list(events.values())[1],
  474. functools.partial(collect_by_release, source),
  475. ),
  476. time_series[source.id],
  477. {},
  478. )
  479. assert_series_contains(
  480. get_expected_series_values(
  481. rollup_duration,
  482. list(events.values())[0] + list(events.values())[2],
  483. functools.partial(collect_by_release, destination),
  484. ),
  485. time_series[destination.id],
  486. {},
  487. )
  488. items = {}
  489. for i in [source.id, destination.id]:
  490. items[i] = list(Environment.objects.all().values_list("id", flat=True))
  491. time_series = strip_zeroes(
  492. tsdb.backend.get_frequency_series(
  493. TSDBModel.frequent_environments_by_group,
  494. items,
  495. now - timedelta(seconds=rollup_duration),
  496. time_from_now(17),
  497. rollup_duration,
  498. tenant_ids={"referrer": "r", "organization_id": 1234},
  499. )
  500. )
  501. def collect_by_environment(aggregate, event):
  502. aggregate = aggregate if aggregate is not None else {}
  503. environment = Environment.objects.get(
  504. organization_id=project.organization_id, name=event.data["environment"]
  505. ).id
  506. aggregate[environment] = aggregate.get(environment, 0) + 1
  507. return aggregate
  508. assert_series_contains(
  509. get_expected_series_values(
  510. rollup_duration, list(events.values())[1], collect_by_environment
  511. ),
  512. time_series[source.id],
  513. {},
  514. )
  515. assert_series_contains(
  516. get_expected_series_values(
  517. rollup_duration,
  518. list(events.values())[0] + list(events.values())[2],
  519. collect_by_environment,
  520. ),
  521. time_series[destination.id],
  522. {},
  523. )
  524. source_similar_items = features.compare(source)
  525. assert source_similar_items[0] == (
  526. source.id,
  527. {
  528. "exception:message:character-shingles": None,
  529. "exception:stacktrace:application-chunks": None,
  530. "exception:stacktrace:pairs": None,
  531. "message:message:character-shingles": 1.0,
  532. },
  533. )
  534. assert source_similar_items[1][0] == destination.id
  535. assert source_similar_items[1][1]["message:message:character-shingles"] < 1.0
  536. destination_similar_items = features.compare(destination)
  537. assert destination_similar_items[0] == (
  538. destination.id,
  539. {
  540. "exception:message:character-shingles": None,
  541. "exception:stacktrace:application-chunks": None,
  542. "exception:stacktrace:pairs": None,
  543. "message:message:character-shingles": 1.0,
  544. },
  545. )
  546. assert destination_similar_items[1][0] == source.id
  547. assert destination_similar_items[1][1]["message:message:character-shingles"] < 1.0