test_post_process.py 74 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039
  1. from __future__ import annotations
  2. import abc
  3. import time
  4. import uuid
  5. from datetime import datetime, timedelta, timezone
  6. from hashlib import md5
  7. from typing import Any
  8. from unittest import mock
  9. from unittest.mock import Mock, patch
  10. import pytest
  11. from django.db import router
  12. from django.test import override_settings
  13. from django.utils import timezone as django_timezone
  14. from sentry import buffer
  15. from sentry.buffer.redis import RedisBuffer
  16. from sentry.eventstore.models import Event
  17. from sentry.eventstore.processing import event_processing_store
  18. from sentry.ingest.transaction_clusterer import ClustererNamespace
  19. from sentry.issues.escalating import manage_issue_states
  20. from sentry.issues.grouptype import PerformanceNPlusOneGroupType, ProfileFileIOGroupType
  21. from sentry.issues.ingest import save_issue_occurrence
  22. from sentry.models import (
  23. Activity,
  24. Group,
  25. GroupAssignee,
  26. GroupInbox,
  27. GroupInboxReason,
  28. GroupOwner,
  29. GroupOwnerType,
  30. GroupSnooze,
  31. GroupStatus,
  32. Integration,
  33. )
  34. from sentry.models.activity import ActivityIntegration
  35. from sentry.models.groupowner import (
  36. ASSIGNEE_EXISTS_DURATION,
  37. ASSIGNEE_EXISTS_KEY,
  38. ISSUE_OWNERS_DEBOUNCE_DURATION,
  39. ISSUE_OWNERS_DEBOUNCE_KEY,
  40. )
  41. from sentry.models.projectownership import ProjectOwnership
  42. from sentry.models.projectteam import ProjectTeam
  43. from sentry.ownership.grammar import Matcher, Owner, Rule, dump_schema
  44. from sentry.replays.lib import kafka as replays_kafka
  45. from sentry.rules import init_registry
  46. from sentry.services.hybrid_cloud.user.service import user_service
  47. from sentry.silo import unguarded_write
  48. from sentry.tasks.derive_code_mappings import SUPPORTED_LANGUAGES
  49. from sentry.tasks.merge import merge_groups
  50. from sentry.tasks.post_process import (
  51. ISSUE_OWNERS_PER_PROJECT_PER_MIN_RATELIMIT,
  52. post_process_group,
  53. process_event,
  54. )
  55. from sentry.testutils.cases import BaseTestCase, PerformanceIssueTestCase, SnubaTestCase, TestCase
  56. from sentry.testutils.helpers import with_feature
  57. from sentry.testutils.helpers.datetime import before_now, iso_format
  58. from sentry.testutils.helpers.eventprocessing import write_event_to_cache
  59. from sentry.testutils.performance_issues.store_transaction import PerfIssueTransactionTestMixin
  60. from sentry.testutils.silo import region_silo_test
  61. from sentry.types.activity import ActivityType
  62. from sentry.types.group import GroupSubStatus
  63. from sentry.utils import json
  64. from sentry.utils.cache import cache
  65. from tests.sentry.issues.test_utils import OccurrenceTestMixin
  66. class EventMatcher:
  67. def __init__(self, expected, group=None):
  68. self.expected = expected
  69. self.expected_group = group
  70. def __eq__(self, other):
  71. matching_id = other.event_id == self.expected.event_id
  72. if self.expected_group:
  73. return (
  74. matching_id
  75. and self.expected_group == other.group
  76. and self.expected_group.id == other.group_id
  77. )
  78. return matching_id
  79. class BasePostProgressGroupMixin(BaseTestCase, metaclass=abc.ABCMeta):
  80. @abc.abstractmethod
  81. def create_event(self, data, project_id, assert_no_errors=True):
  82. pass
  83. @abc.abstractmethod
  84. def call_post_process_group(
  85. self, is_new, is_regression, is_new_group_environment, event, cache_key=None
  86. ):
  87. pass
  88. class CorePostProcessGroupTestMixin(BasePostProgressGroupMixin):
  89. @patch("sentry.rules.processor.RuleProcessor")
  90. @patch("sentry.tasks.servicehooks.process_service_hook")
  91. @patch("sentry.tasks.sentry_apps.process_resource_change_bound.delay")
  92. @patch("sentry.signals.event_processed.send_robust")
  93. def test_issueless(
  94. self,
  95. mock_signal,
  96. mock_process_resource_change_bound,
  97. mock_process_service_hook,
  98. mock_processor,
  99. ):
  100. min_ago = iso_format(before_now(minutes=1))
  101. event = self.store_event(
  102. data={
  103. "type": "transaction",
  104. "timestamp": min_ago,
  105. "start_timestamp": min_ago,
  106. "contexts": {"trace": {"trace_id": "b" * 32, "span_id": "c" * 16, "op": ""}},
  107. },
  108. project_id=self.project.id,
  109. )
  110. cache_key = write_event_to_cache(event)
  111. self.call_post_process_group(
  112. is_new=True,
  113. is_regression=False,
  114. is_new_group_environment=True,
  115. event=event,
  116. cache_key=cache_key,
  117. )
  118. assert mock_processor.call_count == 0
  119. assert mock_process_service_hook.call_count == 0
  120. assert mock_process_resource_change_bound.call_count == 0
  121. # transaction events do not call event.processed
  122. assert mock_signal.call_count == 0
  123. @patch("sentry.rules.processor.RuleProcessor")
  124. def test_no_cache_abort(self, mock_processor):
  125. event = self.create_event(data={}, project_id=self.project.id)
  126. self.call_post_process_group(
  127. is_new=True,
  128. is_regression=False,
  129. is_new_group_environment=True,
  130. event=event,
  131. cache_key="total-rubbish",
  132. )
  133. assert mock_processor.call_count == 0
  134. def test_processing_cache_cleared(self):
  135. event = self.create_event(data={}, project_id=self.project.id)
  136. cache_key = self.call_post_process_group(
  137. is_new=True,
  138. is_regression=False,
  139. is_new_group_environment=True,
  140. event=event,
  141. )
  142. assert event_processing_store.get(cache_key) is None
  143. def test_processing_cache_cleared_with_commits(self):
  144. # Regression test to guard against suspect commit calculations breaking the
  145. # cache
  146. event = self.create_event(data={}, project_id=self.project.id)
  147. self.create_commit(repo=self.create_repo())
  148. cache_key = self.call_post_process_group(
  149. is_new=True,
  150. is_regression=False,
  151. is_new_group_environment=True,
  152. event=event,
  153. )
  154. assert event_processing_store.get(cache_key) is None
  155. class DeriveCodeMappingsProcessGroupTestMixin(BasePostProgressGroupMixin):
  156. def _create_event(
  157. self,
  158. data: dict[str, Any],
  159. project_id: int | None = None,
  160. ) -> Event:
  161. data.setdefault("platform", "javascript")
  162. return self.store_event(data=data, project_id=project_id or self.project.id)
  163. def _call_post_process_group(self, event: Event) -> None:
  164. self.call_post_process_group(
  165. is_new=True,
  166. is_regression=False,
  167. is_new_group_environment=True,
  168. event=event,
  169. )
  170. @patch("sentry.tasks.derive_code_mappings.derive_code_mappings")
  171. def test_derive_invalid_platform(self, mock_derive_code_mappings):
  172. event = self._create_event({"platform": "elixir"})
  173. self._call_post_process_group(event)
  174. assert mock_derive_code_mappings.delay.call_count == 0
  175. @patch("sentry.tasks.derive_code_mappings.derive_code_mappings")
  176. def test_derive_supported_languages(self, mock_derive_code_mappings):
  177. for platform in SUPPORTED_LANGUAGES:
  178. event = self._create_event({"platform": platform})
  179. self._call_post_process_group(event)
  180. assert mock_derive_code_mappings.delay.call_count == 1
  181. @patch("sentry.tasks.derive_code_mappings.derive_code_mappings")
  182. def test_only_maps_a_given_project_once_per_hour(self, mock_derive_code_mappings):
  183. dogs_project = self.create_project()
  184. maisey_event = self._create_event(
  185. {
  186. "fingerprint": ["themaiseymasieydog"],
  187. },
  188. dogs_project.id,
  189. )
  190. charlie_event = self._create_event(
  191. {
  192. "fingerprint": ["charliebear"],
  193. },
  194. dogs_project.id,
  195. )
  196. cory_event = self._create_event(
  197. {
  198. "fingerprint": ["thenudge"],
  199. },
  200. dogs_project.id,
  201. )
  202. bodhi_event = self._create_event(
  203. {
  204. "fingerprint": ["theescapeartist"],
  205. },
  206. dogs_project.id,
  207. )
  208. self._call_post_process_group(maisey_event)
  209. assert mock_derive_code_mappings.delay.call_count == 1
  210. # second event from project should bail (no increase in call count)
  211. self._call_post_process_group(charlie_event)
  212. assert mock_derive_code_mappings.delay.call_count == 1
  213. # advance the clock 59 minutes, and it should still bail
  214. with patch("time.time", return_value=time.time() + 60 * 59):
  215. self._call_post_process_group(cory_event)
  216. assert mock_derive_code_mappings.delay.call_count == 1
  217. # now advance the clock 61 minutes, and this time it should go through
  218. with patch("time.time", return_value=time.time() + 60 * 61):
  219. self._call_post_process_group(bodhi_event)
  220. assert mock_derive_code_mappings.delay.call_count == 2
  221. @patch("sentry.tasks.derive_code_mappings.derive_code_mappings")
  222. def test_only_maps_a_given_issue_once_per_day(self, mock_derive_code_mappings):
  223. dogs_project = self.create_project()
  224. maisey_event1 = self._create_event(
  225. {
  226. "fingerprint": ["themaiseymaiseydog"],
  227. },
  228. dogs_project.id,
  229. )
  230. maisey_event2 = self._create_event(
  231. {
  232. "fingerprint": ["themaiseymaiseydog"],
  233. },
  234. dogs_project.id,
  235. )
  236. maisey_event3 = self._create_event(
  237. {
  238. "fingerprint": ["themaiseymaiseydog"],
  239. },
  240. dogs_project.id,
  241. )
  242. maisey_event4 = self._create_event(
  243. {
  244. "fingerprint": ["themaiseymaiseydog"],
  245. },
  246. dogs_project.id,
  247. )
  248. # because of the fingerprint, the events should always end up in the same group,
  249. # but the rest of the test is bogus if they aren't, so let's be sure
  250. assert maisey_event1.group_id == maisey_event2.group_id
  251. assert maisey_event2.group_id == maisey_event3.group_id
  252. assert maisey_event3.group_id == maisey_event4.group_id
  253. self._call_post_process_group(maisey_event1)
  254. assert mock_derive_code_mappings.delay.call_count == 1
  255. # second event from group should bail (no increase in call count)
  256. self._call_post_process_group(maisey_event2)
  257. assert mock_derive_code_mappings.delay.call_count == 1
  258. # advance the clock 23 hours and 59 minutes, and it should still bail
  259. with patch("time.time", return_value=time.time() + (60 * 60 * 23) + (60 * 59)):
  260. self._call_post_process_group(maisey_event3)
  261. assert mock_derive_code_mappings.delay.call_count == 1
  262. # now advance the clock 24 hours and 1 minute, and this time it should go through
  263. with patch("time.time", return_value=time.time() + (60 * 60 * 24) + (60 * 1)):
  264. self._call_post_process_group(maisey_event4)
  265. assert mock_derive_code_mappings.delay.call_count == 2
  266. @patch("sentry.tasks.derive_code_mappings.derive_code_mappings")
  267. def test_skipping_an_issue_doesnt_mark_it_processed(self, mock_derive_code_mappings):
  268. dogs_project = self.create_project()
  269. maisey_event = self._create_event(
  270. {
  271. "fingerprint": ["themaiseymasieydog"],
  272. },
  273. dogs_project.id,
  274. )
  275. charlie_event1 = self._create_event(
  276. {
  277. "fingerprint": ["charliebear"],
  278. },
  279. dogs_project.id,
  280. )
  281. charlie_event2 = self._create_event(
  282. {
  283. "fingerprint": ["charliebear"],
  284. },
  285. dogs_project.id,
  286. )
  287. # because of the fingerprint, the two Charlie events should always end up in the same group,
  288. # but the rest of the test is bogus if they aren't, so let's be sure
  289. assert charlie_event1.group_id == charlie_event2.group_id
  290. self._call_post_process_group(maisey_event)
  291. assert mock_derive_code_mappings.delay.call_count == 1
  292. # second event from project should bail (no increase in call count)
  293. self._call_post_process_group(charlie_event1)
  294. assert mock_derive_code_mappings.delay.call_count == 1
  295. # now advance the clock 61 minutes (so the project should clear the cache), and another
  296. # event from the Charlie group should go through
  297. with patch("time.time", return_value=time.time() + 60 * 61):
  298. self._call_post_process_group(charlie_event2)
  299. assert mock_derive_code_mappings.delay.call_count == 2
  300. class RuleProcessorTestMixin(BasePostProgressGroupMixin):
  301. @patch("sentry.rules.processor.RuleProcessor")
  302. def test_rule_processor_backwards_compat(self, mock_processor):
  303. event = self.create_event(data={}, project_id=self.project.id)
  304. mock_callback = Mock()
  305. mock_futures = [Mock()]
  306. mock_processor.return_value.apply.return_value = [(mock_callback, mock_futures)]
  307. self.call_post_process_group(
  308. is_new=True,
  309. is_regression=False,
  310. is_new_group_environment=True,
  311. event=event,
  312. )
  313. mock_processor.assert_called_once_with(EventMatcher(event), True, False, True, False)
  314. mock_processor.return_value.apply.assert_called_once_with()
  315. mock_callback.assert_called_once_with(EventMatcher(event), mock_futures)
  316. @patch("sentry.rules.processor.RuleProcessor")
  317. def test_rule_processor(self, mock_processor):
  318. event = self.create_event(data={"message": "testing"}, project_id=self.project.id)
  319. mock_callback = Mock()
  320. mock_futures = [Mock()]
  321. mock_processor.return_value.apply.return_value = [(mock_callback, mock_futures)]
  322. self.call_post_process_group(
  323. is_new=True,
  324. is_regression=False,
  325. is_new_group_environment=True,
  326. event=event,
  327. )
  328. mock_processor.assert_called_once_with(EventMatcher(event), True, False, True, False)
  329. mock_processor.return_value.apply.assert_called_once_with()
  330. mock_callback.assert_called_once_with(EventMatcher(event), mock_futures)
  331. def test_rule_processor_buffer_values(self):
  332. # Test that pending buffer values for `times_seen` are applied to the group and that alerts
  333. # fire as expected
  334. from sentry.models import Rule
  335. MOCK_RULES = ("sentry.rules.filters.issue_occurrences.IssueOccurrencesFilter",)
  336. redis_buffer = RedisBuffer()
  337. with mock.patch("sentry.buffer.backend.get", redis_buffer.get), mock.patch(
  338. "sentry.buffer.backend.incr", redis_buffer.incr
  339. ), patch("sentry.constants._SENTRY_RULES", MOCK_RULES), patch(
  340. "sentry.rules.processor.rules", init_registry()
  341. ) as rules:
  342. MockAction = mock.Mock()
  343. MockAction.rule_type = "action/event"
  344. MockAction.id = "tests.sentry.tasks.post_process.tests.MockAction"
  345. MockAction.return_value.after.return_value = []
  346. rules.add(MockAction)
  347. conditions = [
  348. {
  349. "id": "sentry.rules.filters.issue_occurrences.IssueOccurrencesFilter",
  350. "value": 10,
  351. },
  352. ]
  353. actions = [{"id": "tests.sentry.tasks.post_process.tests.MockAction"}]
  354. Rule.objects.filter(project=self.project).delete()
  355. Rule.objects.create(
  356. project=self.project, data={"conditions": conditions, "actions": actions}
  357. )
  358. event = self.create_event(
  359. data={"message": "testing", "fingerprint": ["group-1"]}, project_id=self.project.id
  360. )
  361. event_2 = self.create_event(
  362. data={"message": "testing", "fingerprint": ["group-1"]}, project_id=self.project.id
  363. )
  364. self.call_post_process_group(
  365. is_new=True,
  366. is_regression=False,
  367. is_new_group_environment=True,
  368. event=event,
  369. )
  370. event.group.update(times_seen=2)
  371. assert MockAction.return_value.after.call_count == 0
  372. buffer.backend.incr(Group, {"times_seen": 15}, filters={"pk": event.group.id})
  373. self.call_post_process_group(
  374. is_new=True,
  375. is_regression=False,
  376. is_new_group_environment=True,
  377. event=event_2,
  378. )
  379. assert MockAction.return_value.after.call_count == 1
  380. @patch("sentry.rules.processor.RuleProcessor")
  381. def test_group_refresh(self, mock_processor):
  382. event = self.create_event(data={"message": "testing"}, project_id=self.project.id)
  383. group1 = event.group
  384. group2 = self.create_group(project=self.project)
  385. assert event.group_id == group1.id
  386. assert event.group == group1
  387. with self.tasks():
  388. merge_groups([group1.id], group2.id)
  389. mock_callback = Mock()
  390. mock_futures = [Mock()]
  391. mock_processor.return_value.apply.return_value = [(mock_callback, mock_futures)]
  392. self.call_post_process_group(
  393. is_new=True,
  394. is_regression=False,
  395. is_new_group_environment=True,
  396. event=event,
  397. )
  398. # Ensure that rule processing sees the merged group.
  399. mock_processor.assert_called_with(
  400. EventMatcher(event, group=group2), True, False, True, False
  401. )
  402. @patch("sentry.rules.processor.RuleProcessor")
  403. def test_group_last_seen_buffer(self, mock_processor):
  404. first_event_date = datetime.now(timezone.utc) - timedelta(days=90)
  405. event1 = self.create_event(
  406. data={"message": "testing"},
  407. project_id=self.project.id,
  408. )
  409. group1 = event1.group
  410. group1.update(last_seen=first_event_date)
  411. event2 = self.create_event(data={"message": "testing"}, project_id=self.project.id)
  412. # Mock set the last_seen value to the first event date
  413. # To simulate the update to last_seen being buffered
  414. event2.group.last_seen = first_event_date
  415. event2.group.update(last_seen=first_event_date)
  416. assert event2.group_id == group1.id
  417. mock_callback = Mock()
  418. mock_futures = [Mock()]
  419. mock_processor.return_value.apply.return_value = [(mock_callback, mock_futures)]
  420. self.call_post_process_group(
  421. is_new=False,
  422. is_regression=True,
  423. is_new_group_environment=False,
  424. event=event2,
  425. )
  426. mock_processor.assert_called_with(
  427. EventMatcher(event2, group=group1), False, True, False, False
  428. )
  429. sent_group_date = mock_processor.call_args[0][0].group.last_seen
  430. # Check that last_seen was updated to be at least the new event's date
  431. self.assertAlmostEqual(sent_group_date, event2.datetime, delta=timedelta(seconds=10))
  432. class ServiceHooksTestMixin(BasePostProgressGroupMixin):
  433. @patch("sentry.tasks.servicehooks.process_service_hook")
  434. def test_service_hook_fires_on_new_event(self, mock_process_service_hook):
  435. event = self.create_event(data={}, project_id=self.project.id)
  436. hook = self.create_service_hook(
  437. project=self.project,
  438. organization=self.project.organization,
  439. actor=self.user,
  440. events=["event.created"],
  441. )
  442. with self.feature("projects:servicehooks"):
  443. self.call_post_process_group(
  444. is_new=False,
  445. is_regression=False,
  446. is_new_group_environment=False,
  447. event=event,
  448. )
  449. mock_process_service_hook.delay.assert_called_once_with(
  450. servicehook_id=hook.id, event=EventMatcher(event)
  451. )
  452. @patch("sentry.tasks.servicehooks.process_service_hook")
  453. @patch("sentry.rules.processor.RuleProcessor")
  454. def test_service_hook_fires_on_alert(self, mock_processor, mock_process_service_hook):
  455. event = self.create_event(data={}, project_id=self.project.id)
  456. mock_callback = Mock()
  457. mock_futures = [Mock()]
  458. mock_processor.return_value.apply.return_value = [(mock_callback, mock_futures)]
  459. hook = self.create_service_hook(
  460. project=self.project,
  461. organization=self.project.organization,
  462. actor=self.user,
  463. events=["event.alert"],
  464. )
  465. with self.feature("projects:servicehooks"):
  466. self.call_post_process_group(
  467. is_new=False,
  468. is_regression=False,
  469. is_new_group_environment=False,
  470. event=event,
  471. )
  472. mock_process_service_hook.delay.assert_called_once_with(
  473. servicehook_id=hook.id, event=EventMatcher(event)
  474. )
  475. @patch("sentry.tasks.servicehooks.process_service_hook")
  476. @patch("sentry.rules.processor.RuleProcessor")
  477. def test_service_hook_does_not_fire_without_alert(
  478. self, mock_processor, mock_process_service_hook
  479. ):
  480. event = self.create_event(data={}, project_id=self.project.id)
  481. mock_processor.return_value.apply.return_value = []
  482. self.create_service_hook(
  483. project=self.project,
  484. organization=self.project.organization,
  485. actor=self.user,
  486. events=["event.alert"],
  487. )
  488. with self.feature("projects:servicehooks"):
  489. self.call_post_process_group(
  490. is_new=False,
  491. is_regression=False,
  492. is_new_group_environment=False,
  493. event=event,
  494. )
  495. assert not mock_process_service_hook.delay.mock_calls
  496. @patch("sentry.tasks.servicehooks.process_service_hook")
  497. def test_service_hook_does_not_fire_without_event(self, mock_process_service_hook):
  498. event = self.create_event(data={}, project_id=self.project.id)
  499. self.create_service_hook(
  500. project=self.project, organization=self.project.organization, actor=self.user, events=[]
  501. )
  502. with self.feature("projects:servicehooks"):
  503. self.call_post_process_group(
  504. is_new=True,
  505. is_regression=False,
  506. is_new_group_environment=False,
  507. event=event,
  508. )
  509. assert not mock_process_service_hook.delay.mock_calls
  510. class ResourceChangeBoundsTestMixin(BasePostProgressGroupMixin):
  511. @patch("sentry.tasks.sentry_apps.process_resource_change_bound.delay")
  512. def test_processes_resource_change_task_on_new_group(self, delay):
  513. event = self.create_event(data={}, project_id=self.project.id)
  514. group = event.group
  515. self.call_post_process_group(
  516. is_new=True,
  517. is_regression=False,
  518. is_new_group_environment=False,
  519. event=event,
  520. )
  521. delay.assert_called_once_with(action="created", sender="Group", instance_id=group.id)
  522. @with_feature("organizations:integrations-event-hooks")
  523. @patch("sentry.tasks.sentry_apps.process_resource_change_bound.delay")
  524. def test_processes_resource_change_task_on_error_events(self, delay):
  525. event = self.create_event(
  526. data={
  527. "message": "Foo bar",
  528. "exception": {"type": "Foo", "value": "oh no"},
  529. "level": "error",
  530. "timestamp": iso_format(django_timezone.now()),
  531. },
  532. project_id=self.project.id,
  533. assert_no_errors=False,
  534. )
  535. self.create_service_hook(
  536. project=self.project,
  537. organization=self.project.organization,
  538. actor=self.user,
  539. events=["error.created"],
  540. )
  541. self.call_post_process_group(
  542. is_new=False,
  543. is_regression=False,
  544. is_new_group_environment=False,
  545. event=event,
  546. )
  547. delay.assert_called_once_with(
  548. action="created",
  549. sender="Error",
  550. instance_id=event.event_id,
  551. instance=EventMatcher(event),
  552. )
  553. @with_feature("organizations:integrations-event-hooks")
  554. @patch("sentry.tasks.sentry_apps.process_resource_change_bound.delay")
  555. def test_processes_resource_change_task_not_called_for_non_errors(self, delay):
  556. event = self.create_event(
  557. data={
  558. "message": "Foo bar",
  559. "level": "info",
  560. "timestamp": iso_format(django_timezone.now()),
  561. },
  562. project_id=self.project.id,
  563. assert_no_errors=False,
  564. )
  565. self.call_post_process_group(
  566. is_new=False,
  567. is_regression=False,
  568. is_new_group_environment=False,
  569. event=event,
  570. )
  571. assert not delay.called
  572. @patch("sentry.tasks.sentry_apps.process_resource_change_bound.delay")
  573. def test_processes_resource_change_task_not_called_without_feature_flag(self, delay):
  574. event = self.create_event(
  575. data={
  576. "message": "Foo bar",
  577. "level": "info",
  578. "timestamp": iso_format(django_timezone.now()),
  579. },
  580. project_id=self.project.id,
  581. assert_no_errors=False,
  582. )
  583. self.call_post_process_group(
  584. is_new=False,
  585. is_regression=False,
  586. is_new_group_environment=False,
  587. event=event,
  588. )
  589. assert not delay.called
  590. @with_feature("organizations:integrations-event-hooks")
  591. @patch("sentry.tasks.sentry_apps.process_resource_change_bound.delay")
  592. def test_processes_resource_change_task_not_called_without_error_created(self, delay):
  593. event = self.create_event(
  594. data={
  595. "message": "Foo bar",
  596. "level": "error",
  597. "exception": {"type": "Foo", "value": "oh no"},
  598. "timestamp": iso_format(django_timezone.now()),
  599. },
  600. project_id=self.project.id,
  601. assert_no_errors=False,
  602. )
  603. self.create_service_hook(
  604. project=self.project, organization=self.project.organization, actor=self.user, events=[]
  605. )
  606. self.call_post_process_group(
  607. is_new=False,
  608. is_regression=False,
  609. is_new_group_environment=False,
  610. event=event,
  611. )
  612. assert not delay.called
  613. class InboxTestMixin(BasePostProgressGroupMixin):
  614. @patch("sentry.rules.processor.RuleProcessor")
  615. def test_group_inbox_regression(self, mock_processor):
  616. new_event = self.create_event(data={"message": "testing"}, project_id=self.project.id)
  617. group = new_event.group
  618. assert group.status == GroupStatus.UNRESOLVED
  619. assert group.substatus == GroupSubStatus.ONGOING
  620. self.call_post_process_group(
  621. is_new=True,
  622. is_regression=True,
  623. is_new_group_environment=False,
  624. event=new_event,
  625. )
  626. assert GroupInbox.objects.filter(group=group, reason=GroupInboxReason.NEW.value).exists()
  627. GroupInbox.objects.filter(
  628. group=group
  629. ).delete() # Delete so it creates the .REGRESSION entry.
  630. group.refresh_from_db()
  631. assert group.status == GroupStatus.UNRESOLVED
  632. assert group.substatus == GroupSubStatus.NEW
  633. mock_processor.assert_called_with(EventMatcher(new_event), True, True, False, False)
  634. # resolve the new issue so regression actually happens
  635. group.status = GroupStatus.RESOLVED
  636. group.substatus = None
  637. group.active_at = group.active_at - timedelta(minutes=1)
  638. group.save(update_fields=["status", "substatus", "active_at"])
  639. # trigger a transition from resolved to regressed by firing an event that groups to that issue
  640. regressed_event = self.create_event(data={"message": "testing"}, project_id=self.project.id)
  641. assert regressed_event.group == new_event.group
  642. group = regressed_event.group
  643. group.refresh_from_db()
  644. assert group.status == GroupStatus.UNRESOLVED
  645. assert group.substatus == GroupSubStatus.REGRESSED
  646. self.call_post_process_group(
  647. is_new=False,
  648. is_regression=True,
  649. is_new_group_environment=False,
  650. event=regressed_event,
  651. )
  652. mock_processor.assert_called_with(EventMatcher(regressed_event), False, True, False, False)
  653. group.refresh_from_db()
  654. assert group.status == GroupStatus.UNRESOLVED
  655. assert group.substatus == GroupSubStatus.REGRESSED
  656. assert GroupInbox.objects.filter(
  657. group=group, reason=GroupInboxReason.REGRESSION.value
  658. ).exists()
  659. class AssignmentTestMixin(BasePostProgressGroupMixin):
  660. def make_ownership(self, extra_rules=None):
  661. self.user_2 = self.create_user()
  662. self.create_team_membership(team=self.team, user=self.user_2)
  663. rules = [
  664. Rule(Matcher("path", "src/app/*"), [Owner("team", self.team.name)]),
  665. Rule(Matcher("path", "src/*"), [Owner("user", self.user.email)]),
  666. Rule(Matcher("path", "tests/*"), [Owner("user", self.user_2.email)]),
  667. ]
  668. if extra_rules:
  669. rules.extend(extra_rules)
  670. self.prj_ownership = ProjectOwnership.objects.create(
  671. project_id=self.project.id,
  672. schema=dump_schema(rules),
  673. fallthrough=True,
  674. auto_assignment=True,
  675. )
  676. def test_owner_assignment_order_precedence(self):
  677. self.make_ownership()
  678. event = self.create_event(
  679. data={
  680. "message": "oh no",
  681. "platform": "python",
  682. "stacktrace": {"frames": [{"filename": "src/app/example.py"}]},
  683. },
  684. project_id=self.project.id,
  685. )
  686. self.call_post_process_group(
  687. is_new=False,
  688. is_regression=False,
  689. is_new_group_environment=False,
  690. event=event,
  691. )
  692. assignee = event.group.assignee_set.first()
  693. assert assignee.user_id == self.user.id
  694. assert assignee.team is None
  695. owners = list(GroupOwner.objects.filter(group=event.group))
  696. assert len(owners) == 2
  697. assert {(self.user.id, None), (None, self.team.id)} == {
  698. (o.user_id, o.team_id) for o in owners
  699. }
  700. activity = Activity.objects.filter(group=event.group).first()
  701. assert activity.data == {
  702. "assignee": str(self.user.id),
  703. "assigneeEmail": self.user.email,
  704. "assigneeType": "user",
  705. "integration": ActivityIntegration.PROJECT_OWNERSHIP.value,
  706. "rule": str(Rule(Matcher("path", "src/*"), [Owner("user", self.user.email)])),
  707. }
  708. def test_owner_assignment_extra_groups(self):
  709. extra_user = self.create_user()
  710. self.create_team_membership(self.team, user=extra_user)
  711. self.make_ownership(
  712. [Rule(Matcher("path", "src/app/things/in/*"), [Owner("user", extra_user.email)])],
  713. )
  714. event = self.create_event(
  715. data={
  716. "message": "oh no",
  717. "platform": "python",
  718. "stacktrace": {"frames": [{"filename": "src/app/things/in/a/path/example2.py"}]},
  719. },
  720. project_id=self.project.id,
  721. )
  722. self.call_post_process_group(
  723. is_new=False,
  724. is_regression=False,
  725. is_new_group_environment=False,
  726. event=event,
  727. )
  728. assignee = event.group.assignee_set.first()
  729. assert assignee.user_id == extra_user.id
  730. assert assignee.team is None
  731. owners = list(GroupOwner.objects.filter(group=event.group))
  732. assert len(owners) == 2
  733. assert {(extra_user.id, None), (self.user.id, None)} == {
  734. (o.user_id, o.team_id) for o in owners
  735. }
  736. def test_owner_assignment_existing_owners(self):
  737. extra_team = self.create_team()
  738. ProjectTeam.objects.create(team=extra_team, project=self.project)
  739. self.make_ownership(
  740. [Rule(Matcher("path", "src/app/things/in/*"), [Owner("team", extra_team.slug)])],
  741. )
  742. GroupOwner.objects.create(
  743. group=self.group,
  744. project=self.project,
  745. organization=self.organization,
  746. user_id=self.user.id,
  747. type=GroupOwnerType.OWNERSHIP_RULE.value,
  748. )
  749. event = self.create_event(
  750. data={
  751. "message": "oh no",
  752. "platform": "python",
  753. "stacktrace": {"frames": [{"filename": "src/app/things/in/a/path/example2.py"}]},
  754. },
  755. project_id=self.project.id,
  756. )
  757. self.call_post_process_group(
  758. is_new=False,
  759. is_regression=False,
  760. is_new_group_environment=False,
  761. event=event,
  762. )
  763. assignee = event.group.assignee_set.first()
  764. assert assignee.user_id is None
  765. assert assignee.team == extra_team
  766. owners = list(GroupOwner.objects.filter(group=event.group))
  767. assert {(None, extra_team.id), (self.user.id, None)} == {
  768. (o.user_id, o.team_id) for o in owners
  769. }
  770. def test_owner_assignment_assign_user(self):
  771. self.make_ownership()
  772. event = self.create_event(
  773. data={
  774. "message": "oh no",
  775. "platform": "python",
  776. "stacktrace": {"frames": [{"filename": "src/app.py"}]},
  777. },
  778. project_id=self.project.id,
  779. )
  780. self.call_post_process_group(
  781. is_new=False,
  782. is_regression=False,
  783. is_new_group_environment=False,
  784. event=event,
  785. )
  786. assignee = event.group.assignee_set.first()
  787. assert assignee.user_id == self.user.id
  788. assert assignee.team is None
  789. def test_owner_assignment_ownership_no_matching_owners(self):
  790. event = self.create_event(
  791. data={
  792. "message": "oh no",
  793. "platform": "python",
  794. "stacktrace": {"frames": [{"filename": "src/app/example.py"}]},
  795. },
  796. project_id=self.project.id,
  797. )
  798. self.call_post_process_group(
  799. is_new=False,
  800. is_regression=False,
  801. is_new_group_environment=False,
  802. event=event,
  803. )
  804. assert not event.group.assignee_set.exists()
  805. def test_owner_assignment_existing_assignment(self):
  806. self.make_ownership()
  807. event = self.create_event(
  808. data={
  809. "message": "oh no",
  810. "platform": "python",
  811. "stacktrace": {"frames": [{"filename": "src/app/example.py"}]},
  812. },
  813. project_id=self.project.id,
  814. )
  815. event.group.assignee_set.create(team=self.team, project=self.project)
  816. self.call_post_process_group(
  817. is_new=False,
  818. is_regression=False,
  819. is_new_group_environment=False,
  820. event=event,
  821. )
  822. assignee = event.group.assignee_set.first()
  823. assert assignee.user_id is None
  824. assert assignee.team == self.team
  825. def test_only_first_assignment_works(self):
  826. self.make_ownership()
  827. event = self.create_event(
  828. data={
  829. "message": "oh no",
  830. "platform": "python",
  831. "stacktrace": {"frames": [{"filename": "src/app/example.py"}]},
  832. "fingerprint": ["group1"],
  833. },
  834. project_id=self.project.id,
  835. )
  836. self.call_post_process_group(
  837. is_new=False,
  838. is_regression=False,
  839. is_new_group_environment=False,
  840. event=event,
  841. )
  842. assignee = event.group.assignee_set.first()
  843. assert assignee.user_id == self.user.id
  844. assert assignee.team is None
  845. event = self.create_event(
  846. data={
  847. "message": "oh no",
  848. "platform": "python",
  849. "stacktrace": {"frames": [{"filename": "tests/src/app/test_example.py"}]},
  850. "fingerprint": ["group1"],
  851. },
  852. project_id=self.project.id,
  853. )
  854. self.call_post_process_group(
  855. is_new=False,
  856. is_regression=False,
  857. is_new_group_environment=False,
  858. event=event,
  859. )
  860. assignee = event.group.assignee_set.first()
  861. # Assignment shouldn't change.
  862. assert assignee.user_id == self.user.id
  863. assert assignee.team is None
  864. def test_owner_assignment_owner_is_gone(self):
  865. self.make_ownership()
  866. # Remove the team so the rule match will fail to resolve
  867. self.team.delete()
  868. event = self.create_event(
  869. data={
  870. "message": "oh no",
  871. "platform": "python",
  872. "stacktrace": {"frames": [{"filename": "src/app/example.py"}]},
  873. },
  874. project_id=self.project.id,
  875. )
  876. self.call_post_process_group(
  877. is_new=False,
  878. is_regression=False,
  879. is_new_group_environment=False,
  880. event=event,
  881. )
  882. assignee = event.group.assignee_set.first()
  883. assert assignee is None
  884. def test_suspect_committer_affect_cache_debouncing_issue_owners_calculations(self):
  885. self.make_ownership()
  886. committer = GroupOwner(
  887. group=self.created_event.group,
  888. project=self.created_event.project,
  889. organization=self.created_event.project.organization,
  890. type=GroupOwnerType.SUSPECT_COMMIT.value,
  891. )
  892. committer.save()
  893. event = self.create_event(
  894. data={
  895. "message": "oh no",
  896. "platform": "python",
  897. "stacktrace": {"frames": [{"filename": "src/app/example.py"}]},
  898. },
  899. project_id=self.project.id,
  900. )
  901. event.group.assignee_set.create(team=self.team, project=self.project)
  902. self.call_post_process_group(
  903. is_new=False,
  904. is_regression=False,
  905. is_new_group_environment=False,
  906. event=event,
  907. )
  908. assignee = event.group.assignee_set.first()
  909. assert assignee.user_id is None
  910. assert assignee.team == self.team
  911. def test_owner_assignment_when_owners_have_been_unassigned(self):
  912. """
  913. Test that ensures that if certain assignees get unassigned, and project rules are changed
  914. then the new group assignees should be re-calculated and re-assigned
  915. """
  916. # Create rules and check assignees
  917. self.make_ownership()
  918. event = self.create_event(
  919. data={
  920. "message": "oh no",
  921. "platform": "python",
  922. "stacktrace": {"frames": [{"filename": "src/app/example.py"}]},
  923. },
  924. project_id=self.project.id,
  925. )
  926. event_2 = self.create_event(
  927. data={
  928. "message": "Exception",
  929. "platform": "python",
  930. "stacktrace": {"frames": [{"filename": "src/app/integration.py"}]},
  931. },
  932. project_id=self.project.id,
  933. )
  934. self.call_post_process_group(
  935. is_new=False,
  936. is_regression=False,
  937. is_new_group_environment=False,
  938. event=event,
  939. )
  940. self.call_post_process_group(
  941. is_new=False,
  942. is_regression=False,
  943. is_new_group_environment=False,
  944. event=event_2,
  945. )
  946. assignee = event.group.assignee_set.first()
  947. assert assignee.user_id == self.user.id
  948. user_3 = self.create_user()
  949. self.create_team_membership(self.team, user=user_3)
  950. # De-assign group assignees
  951. GroupAssignee.objects.deassign(event.group, self.user)
  952. assert event.group.assignee_set.first() is None
  953. # Change ProjectOwnership rules
  954. rules = [
  955. Rule(Matcher("path", "src/*"), [Owner("user", user_3.email)]),
  956. ]
  957. self.prj_ownership.schema = dump_schema(rules)
  958. self.prj_ownership.save()
  959. self.call_post_process_group(
  960. is_new=False,
  961. is_regression=False,
  962. is_new_group_environment=False,
  963. event=event,
  964. )
  965. self.call_post_process_group(
  966. is_new=False,
  967. is_regression=False,
  968. is_new_group_environment=False,
  969. event=event_2,
  970. )
  971. # Group should be re-assigned to the new group owner
  972. assignee = event.group.assignee_set.first()
  973. assert assignee.user_id == user_3.id
  974. # De-assign group assignees
  975. GroupAssignee.objects.deassign(event.group, user_service.get_user(user_id=assignee.user_id))
  976. assert event.group.assignee_set.first() is None
  977. user_4 = self.create_user()
  978. self.create_team_membership(self.team, user=user_4)
  979. self.prj_ownership.schema = dump_schema([])
  980. self.prj_ownership.save()
  981. code_owners_rule = Rule(
  982. Matcher("codeowners", "*.py"),
  983. [Owner("user", user_4.email)],
  984. )
  985. self.code_mapping = self.create_code_mapping(project=self.project)
  986. self.code_owners = self.create_codeowners(
  987. self.project,
  988. self.code_mapping,
  989. schema=dump_schema([code_owners_rule]),
  990. )
  991. self.call_post_process_group(
  992. is_new=False,
  993. is_regression=False,
  994. is_new_group_environment=False,
  995. event=event,
  996. )
  997. self.call_post_process_group(
  998. is_new=False,
  999. is_regression=False,
  1000. is_new_group_environment=False,
  1001. event=event_2,
  1002. )
  1003. # Group should be re-assigned to the new group owner
  1004. assignee = event.group.assignee_set.first()
  1005. assert assignee.user_id == user_4.id
  1006. def test_auto_assignment_when_owners_have_been_unassigned(self):
  1007. """
  1008. Test that ensures that if assignee gets unassigned and project rules are changed,
  1009. then the new group assignees should be re-calculated and re-assigned
  1010. """
  1011. # Create rules and check assignees
  1012. self.make_ownership()
  1013. event = self.create_event(
  1014. data={
  1015. "message": "oh no",
  1016. "platform": "python",
  1017. "stacktrace": {"frames": [{"filename": "src/app/example.py"}]},
  1018. },
  1019. project_id=self.project.id,
  1020. )
  1021. self.call_post_process_group(
  1022. is_new=False,
  1023. is_regression=False,
  1024. is_new_group_environment=False,
  1025. event=event,
  1026. )
  1027. assignee = (
  1028. GroupOwner.objects.filter()
  1029. .exclude(user_id__isnull=True, team_id__isnull=True)
  1030. .order_by("type")
  1031. .first()
  1032. )
  1033. assert assignee.user_id == self.user.id
  1034. user_3 = self.create_user()
  1035. self.create_team_membership(self.team, user=user_3)
  1036. # Set assignee_exists cache to self.user
  1037. cache.set(ASSIGNEE_EXISTS_KEY(event.group_id), self.user, ASSIGNEE_EXISTS_DURATION)
  1038. # De-assign group assignees
  1039. GroupAssignee.objects.deassign(event.group, self.user)
  1040. assert event.group.assignee_set.first() is None
  1041. # Change ProjectOwnership rules
  1042. rules = [
  1043. Rule(Matcher("path", "src/*"), [Owner("user", user_3.email)]),
  1044. ]
  1045. self.prj_ownership.schema = dump_schema(rules)
  1046. self.prj_ownership.save()
  1047. self.call_post_process_group(
  1048. is_new=False,
  1049. is_regression=False,
  1050. is_new_group_environment=False,
  1051. event=event,
  1052. )
  1053. # Mimic filter used in get_autoassigned_owner_cached to get the issue owner to be
  1054. # auto-assigned
  1055. assignee = (
  1056. GroupOwner.objects.filter()
  1057. .exclude(user_id__isnull=True, team_id__isnull=True)
  1058. .order_by("type")
  1059. .first()
  1060. )
  1061. # Group should be re-assigned to the new group owner
  1062. assert assignee.user_id == user_3.id
  1063. def test_ensure_when_assignees_and_owners_are_cached_does_not_cause_unbound_errors(self):
  1064. self.make_ownership()
  1065. event = self.create_event(
  1066. data={
  1067. "message": "oh no",
  1068. "platform": "python",
  1069. "stacktrace": {"frames": [{"filename": "src/app.py"}]},
  1070. },
  1071. project_id=self.project.id,
  1072. )
  1073. assignee_cache_key = "assignee_exists:1:%s" % event.group.id
  1074. owner_cache_key = "owner_exists:1:%s" % event.group.id
  1075. for key in [assignee_cache_key, owner_cache_key]:
  1076. cache.set(key, True)
  1077. self.call_post_process_group(
  1078. is_new=False,
  1079. is_regression=False,
  1080. is_new_group_environment=False,
  1081. event=event,
  1082. )
  1083. def test_auto_assignment_when_owners_are_invalid(self):
  1084. """
  1085. Test that invalid group owners (that exist due to bugs) are deleted and not assigned
  1086. when no valid issue owner exists
  1087. """
  1088. event = self.create_event(
  1089. data={
  1090. "message": "oh no",
  1091. "platform": "python",
  1092. "stacktrace": {"frames": [{"filename": "src/app/example.py"}]},
  1093. },
  1094. project_id=self.project.id,
  1095. )
  1096. # Hard code an invalid group owner
  1097. invalid_codeowner = GroupOwner(
  1098. group=event.group,
  1099. project=event.project,
  1100. organization=event.project.organization,
  1101. type=GroupOwnerType.CODEOWNERS.value,
  1102. context={"rule": "codeowners:/**/*.css " + self.user.email},
  1103. user_id=self.user.id,
  1104. )
  1105. invalid_codeowner.save()
  1106. self.call_post_process_group(
  1107. is_new=False,
  1108. is_regression=False,
  1109. is_new_group_environment=False,
  1110. event=event,
  1111. )
  1112. assignee = event.group.assignee_set.first()
  1113. assert assignee is None
  1114. assert len(GroupOwner.objects.filter(group_id=event.group)) == 0
  1115. @patch("sentry.tasks.post_process.logger")
  1116. def test_debounces_handle_owner_assignments(self, logger):
  1117. self.make_ownership()
  1118. event = self.create_event(
  1119. data={
  1120. "message": "oh no",
  1121. "platform": "python",
  1122. "stacktrace": {"frames": [{"filename": "src/app.py"}]},
  1123. },
  1124. project_id=self.project.id,
  1125. )
  1126. cache.set(ISSUE_OWNERS_DEBOUNCE_KEY(event.group_id), True, ISSUE_OWNERS_DEBOUNCE_DURATION)
  1127. self.call_post_process_group(
  1128. is_new=False,
  1129. is_regression=False,
  1130. is_new_group_environment=False,
  1131. event=event,
  1132. )
  1133. logger.info.assert_any_call(
  1134. "handle_owner_assignment.issue_owners_exist",
  1135. extra={
  1136. "event": event.event_id,
  1137. "group": event.group_id,
  1138. "project": event.project_id,
  1139. "organization": event.project.organization_id,
  1140. "reason": "issue_owners_exist",
  1141. },
  1142. )
  1143. @patch("sentry.tasks.post_process.logger")
  1144. def test_issue_owners_should_ratelimit(self, logger):
  1145. cache.set(
  1146. f"issue_owner_assignment_ratelimiter:{self.project.id}",
  1147. (set(range(0, ISSUE_OWNERS_PER_PROJECT_PER_MIN_RATELIMIT * 10, 10)), datetime.now()),
  1148. )
  1149. cache.set(f"commit-context-scm-integration:{self.project.organization_id}", True, 60)
  1150. event = self.create_event(
  1151. data={
  1152. "message": "oh no",
  1153. "platform": "python",
  1154. "stacktrace": {"frames": [{"filename": "src/app.py"}]},
  1155. },
  1156. project_id=self.project.id,
  1157. )
  1158. self.call_post_process_group(
  1159. is_new=False,
  1160. is_regression=False,
  1161. is_new_group_environment=False,
  1162. event=event,
  1163. )
  1164. logger.info.assert_any_call(
  1165. "handle_owner_assignment.ratelimited",
  1166. extra={
  1167. "event": event.event_id,
  1168. "group": event.group_id,
  1169. "project": event.project_id,
  1170. "organization": event.project.organization_id,
  1171. "reason": "ratelimited",
  1172. },
  1173. )
  1174. class ProcessCommitsTestMixin(BasePostProgressGroupMixin):
  1175. github_blame_return_value = {
  1176. "commitId": "asdfwreqr",
  1177. "committedDate": (datetime.now(timezone.utc) - timedelta(days=2)),
  1178. "commitMessage": "placeholder commit message",
  1179. "commitAuthorName": "",
  1180. "commitAuthorEmail": "admin@localhost",
  1181. }
  1182. def setUp(self):
  1183. self.created_event = self.create_event(
  1184. data={
  1185. "message": "Kaboom!",
  1186. "platform": "python",
  1187. "timestamp": iso_format(before_now(seconds=10)),
  1188. "stacktrace": {
  1189. "frames": [
  1190. {
  1191. "function": "handle_set_commits",
  1192. "abs_path": "/usr/src/sentry/src/sentry/tasks.py",
  1193. "module": "sentry.tasks",
  1194. "in_app": False,
  1195. "lineno": 30,
  1196. "filename": "sentry/tasks.py",
  1197. },
  1198. {
  1199. "function": "set_commits",
  1200. "abs_path": "/usr/src/sentry/src/sentry/models/release.py",
  1201. "module": "sentry.models.release",
  1202. "in_app": True,
  1203. "lineno": 39,
  1204. "filename": "sentry/models/release.py",
  1205. },
  1206. ]
  1207. },
  1208. "fingerprint": ["put-me-in-the-control-group"],
  1209. },
  1210. project_id=self.project.id,
  1211. )
  1212. self.cache_key = write_event_to_cache(self.created_event)
  1213. self.repo = self.create_repo(
  1214. name="example",
  1215. integration_id=self.integration.id,
  1216. )
  1217. self.code_mapping = self.create_code_mapping(
  1218. repo=self.repo, project=self.project, stack_root="src/"
  1219. )
  1220. self.commit_author = self.create_commit_author(project=self.project, user=self.user)
  1221. self.commit = self.create_commit(
  1222. project=self.project,
  1223. repo=self.repo,
  1224. author=self.commit_author,
  1225. key="asdfwreqr",
  1226. message="placeholder commit message",
  1227. )
  1228. @with_feature("organizations:commit-context")
  1229. @patch(
  1230. "sentry.integrations.github.GitHubIntegration.get_commit_context",
  1231. return_value=github_blame_return_value,
  1232. )
  1233. def test_debounce_cache_is_set(self, mock_get_commit_context):
  1234. with self.tasks():
  1235. self.call_post_process_group(
  1236. is_new=True,
  1237. is_regression=False,
  1238. is_new_group_environment=True,
  1239. event=self.created_event,
  1240. )
  1241. assert GroupOwner.objects.get(
  1242. group=self.created_event.group,
  1243. project=self.created_event.project,
  1244. organization=self.created_event.project.organization,
  1245. type=GroupOwnerType.SUSPECT_COMMIT.value,
  1246. )
  1247. assert cache.has_key(f"process-commit-context-{self.created_event.group_id}")
  1248. @with_feature("organizations:commit-context")
  1249. @patch(
  1250. "sentry.integrations.github.GitHubIntegration.get_commit_context",
  1251. return_value=github_blame_return_value,
  1252. )
  1253. def test_logic_fallback_no_scm(self, mock_get_commit_context):
  1254. with unguarded_write(using=router.db_for_write(Integration)):
  1255. Integration.objects.all().delete()
  1256. integration = Integration.objects.create(provider="bitbucket")
  1257. integration.add_organization(self.organization)
  1258. with self.tasks():
  1259. self.call_post_process_group(
  1260. is_new=True,
  1261. is_regression=False,
  1262. is_new_group_environment=True,
  1263. event=self.created_event,
  1264. )
  1265. assert not cache.has_key(f"process-commit-context-{self.created_event.group_id}")
  1266. class SnoozeTestMixin(BasePostProgressGroupMixin):
  1267. @with_feature("organizations:escalating-issues")
  1268. @patch("sentry.signals.issue_escalating.send_robust")
  1269. @patch("sentry.signals.issue_unignored.send_robust")
  1270. @patch("sentry.rules.processor.RuleProcessor")
  1271. def test_invalidates_snooze(
  1272. self, mock_processor, mock_send_unignored_robust, mock_send_escalating_robust
  1273. ):
  1274. event = self.create_event(data={"message": "testing"}, project_id=self.project.id)
  1275. group = event.group
  1276. # Check for has_reappeared=False if is_new=True
  1277. self.call_post_process_group(
  1278. is_new=True,
  1279. is_regression=False,
  1280. is_new_group_environment=True,
  1281. event=event,
  1282. )
  1283. assert GroupInbox.objects.filter(group=group, reason=GroupInboxReason.NEW.value).exists()
  1284. GroupInbox.objects.filter(group=group).delete() # Delete so it creates the UNIGNORED entry.
  1285. Activity.objects.filter(group=group).delete()
  1286. mock_processor.assert_called_with(EventMatcher(event), True, False, True, False)
  1287. event = self.create_event(data={"message": "testing"}, project_id=self.project.id)
  1288. group.status = GroupStatus.IGNORED
  1289. group.substatus = GroupSubStatus.UNTIL_CONDITION_MET
  1290. group.save(update_fields=["status", "substatus"])
  1291. snooze = GroupSnooze.objects.create(
  1292. group=group, until=django_timezone.now() - timedelta(hours=1)
  1293. )
  1294. # Check for has_reappeared=True if is_new=False
  1295. self.call_post_process_group(
  1296. is_new=False,
  1297. is_regression=False,
  1298. is_new_group_environment=True,
  1299. event=event,
  1300. )
  1301. mock_processor.assert_called_with(EventMatcher(event), False, False, True, True)
  1302. mock_send_escalating_robust.assert_called_once_with(
  1303. project=group.project,
  1304. group=group,
  1305. event=EventMatcher(event),
  1306. sender=manage_issue_states,
  1307. was_until_escalating=False,
  1308. )
  1309. assert not GroupSnooze.objects.filter(id=snooze.id).exists()
  1310. group = Group.objects.get(id=group.id)
  1311. assert group.status == GroupStatus.UNRESOLVED
  1312. assert group.substatus == GroupSubStatus.ESCALATING
  1313. assert GroupInbox.objects.filter(
  1314. group=group, reason=GroupInboxReason.ESCALATING.value
  1315. ).exists()
  1316. assert Activity.objects.filter(
  1317. group=group, project=group.project, type=ActivityType.SET_ESCALATING.value
  1318. ).exists()
  1319. assert mock_send_unignored_robust.called
  1320. @override_settings(SENTRY_BUFFER="sentry.buffer.redis.RedisBuffer")
  1321. @patch("sentry.signals.issue_unignored.send_robust")
  1322. @patch("sentry.rules.processor.RuleProcessor")
  1323. def test_invalidates_snooze_with_buffers(self, mock_processor, send_robust):
  1324. redis_buffer = RedisBuffer()
  1325. with mock.patch("sentry.buffer.backend.get", redis_buffer.get), mock.patch(
  1326. "sentry.buffer.backend.incr", redis_buffer.incr
  1327. ):
  1328. event = self.create_event(
  1329. data={"message": "testing", "fingerprint": ["group-1"]}, project_id=self.project.id
  1330. )
  1331. event_2 = self.create_event(
  1332. data={"message": "testing", "fingerprint": ["group-1"]}, project_id=self.project.id
  1333. )
  1334. group = event.group
  1335. group.times_seen = 50
  1336. group.status = GroupStatus.IGNORED
  1337. group.substatus = GroupSubStatus.UNTIL_CONDITION_MET
  1338. group.save(update_fields=["times_seen", "status", "substatus"])
  1339. snooze = GroupSnooze.objects.create(group=group, count=100, state={"times_seen": 0})
  1340. self.call_post_process_group(
  1341. is_new=False,
  1342. is_regression=False,
  1343. is_new_group_environment=True,
  1344. event=event,
  1345. )
  1346. assert GroupSnooze.objects.filter(id=snooze.id).exists()
  1347. buffer.backend.incr(Group, {"times_seen": 60}, filters={"pk": event.group.id})
  1348. self.call_post_process_group(
  1349. is_new=False,
  1350. is_regression=False,
  1351. is_new_group_environment=True,
  1352. event=event_2,
  1353. )
  1354. assert not GroupSnooze.objects.filter(id=snooze.id).exists()
  1355. @patch("sentry.rules.processor.RuleProcessor")
  1356. def test_maintains_valid_snooze(self, mock_processor):
  1357. event = self.create_event(data={}, project_id=self.project.id)
  1358. group = event.group
  1359. assert group.status == GroupStatus.UNRESOLVED
  1360. assert group.substatus == GroupSubStatus.ONGOING
  1361. snooze = GroupSnooze.objects.create(
  1362. group=group, until=django_timezone.now() + timedelta(hours=1)
  1363. )
  1364. self.call_post_process_group(
  1365. is_new=True,
  1366. is_regression=False,
  1367. is_new_group_environment=True,
  1368. event=event,
  1369. )
  1370. mock_processor.assert_called_with(EventMatcher(event), True, False, True, False)
  1371. assert GroupSnooze.objects.filter(id=snooze.id).exists()
  1372. group.refresh_from_db()
  1373. assert group.status == GroupStatus.UNRESOLVED
  1374. assert group.substatus == GroupSubStatus.NEW
  1375. @with_feature("organizations:escalating-issues")
  1376. @patch("sentry.issues.escalating.is_escalating", return_value=(True, 0))
  1377. def test_forecast_in_activity(self, mock_is_escalating):
  1378. """
  1379. Test that the forecast is added to the activity for escalating issues that were
  1380. previously ignored until_escalating.
  1381. """
  1382. event = self.create_event(data={"message": "testing"}, project_id=self.project.id)
  1383. group = event.group
  1384. group.status = GroupStatus.IGNORED
  1385. group.substatus = GroupSubStatus.UNTIL_ESCALATING
  1386. group.save()
  1387. self.call_post_process_group(
  1388. is_new=False,
  1389. is_regression=False,
  1390. is_new_group_environment=True,
  1391. event=event,
  1392. )
  1393. assert Activity.objects.filter(
  1394. group=group,
  1395. project=group.project,
  1396. type=ActivityType.SET_ESCALATING.value,
  1397. data={"event_id": event.event_id, "forecast": 0},
  1398. ).exists()
  1399. @patch("sentry.utils.sdk_crashes.sdk_crash_detection.sdk_crash_detection")
  1400. class SDKCrashMonitoringTestMixin(BasePostProgressGroupMixin):
  1401. @with_feature("organizations:sdk-crash-detection")
  1402. @override_settings(SDK_CRASH_DETECTION_PROJECT_ID=1234)
  1403. @override_settings(SDK_CRASH_DETECTION_SAMPLE_RATE=0.1234)
  1404. def test_sdk_crash_monitoring_is_called(self, mock_sdk_crash_detection):
  1405. event = self.create_event(
  1406. data={"message": "testing"},
  1407. project_id=self.project.id,
  1408. )
  1409. self.call_post_process_group(
  1410. is_new=True,
  1411. is_regression=False,
  1412. is_new_group_environment=True,
  1413. event=event,
  1414. )
  1415. mock_sdk_crash_detection.detect_sdk_crash.assert_called_once()
  1416. args = mock_sdk_crash_detection.detect_sdk_crash.call_args[-1]
  1417. assert args["event"].project.id == event.project.id
  1418. assert args["event_project_id"] == 1234
  1419. assert args["sample_rate"] == 0.1234
  1420. def test_sdk_crash_monitoring_is_not_called_with_disabled_feature(
  1421. self, mock_sdk_crash_detection
  1422. ):
  1423. event = self.create_event(
  1424. data={"message": "testing"},
  1425. project_id=self.project.id,
  1426. )
  1427. self.call_post_process_group(
  1428. is_new=True,
  1429. is_regression=False,
  1430. is_new_group_environment=True,
  1431. event=event,
  1432. )
  1433. mock_sdk_crash_detection.detect_sdk_crash.assert_not_called()
  1434. @with_feature("organizations:sdk-crash-detection")
  1435. def test_sdk_crash_monitoring_is_not_called_without_project_id(self, mock_sdk_crash_detection):
  1436. event = self.create_event(
  1437. data={"message": "testing"},
  1438. project_id=self.project.id,
  1439. )
  1440. self.call_post_process_group(
  1441. is_new=True,
  1442. is_regression=False,
  1443. is_new_group_environment=True,
  1444. event=event,
  1445. )
  1446. mock_sdk_crash_detection.detect_sdk_crash.assert_not_called()
  1447. @mock.patch.object(replays_kafka, "get_kafka_producer_cluster_options")
  1448. @mock.patch.object(replays_kafka, "KafkaPublisher")
  1449. @mock.patch("sentry.utils.metrics.incr")
  1450. class ReplayLinkageTestMixin(BasePostProgressGroupMixin):
  1451. def test_replay_linkage(self, incr, kafka_producer, kafka_publisher):
  1452. replay_id = uuid.uuid4().hex
  1453. event = self.create_event(
  1454. data={"message": "testing", "contexts": {"replay": {"replay_id": replay_id}}},
  1455. project_id=self.project.id,
  1456. )
  1457. with self.feature({"organizations:session-replay-event-linking": True}):
  1458. self.call_post_process_group(
  1459. is_new=True,
  1460. is_regression=False,
  1461. is_new_group_environment=True,
  1462. event=event,
  1463. )
  1464. assert kafka_producer.return_value.publish.call_count == 1
  1465. assert kafka_producer.return_value.publish.call_args[0][0] == "ingest-replay-events"
  1466. ret_value = json.loads(kafka_producer.return_value.publish.call_args[0][1])
  1467. assert ret_value["type"] == "replay_event"
  1468. assert ret_value["start_time"] == int(event.datetime.timestamp())
  1469. assert ret_value["replay_id"] == replay_id
  1470. assert ret_value["project_id"] == self.project.id
  1471. assert ret_value["segment_id"] is None
  1472. assert ret_value["retention_days"] == 90
  1473. # convert ret_value_payload which is a list of bytes to a string
  1474. ret_value_payload = json.loads(bytes(ret_value["payload"]).decode("utf-8"))
  1475. assert ret_value_payload == {
  1476. "type": "event_link",
  1477. "replay_id": replay_id,
  1478. "error_id": event.event_id,
  1479. "timestamp": int(event.datetime.timestamp()),
  1480. "event_hash": str(uuid.UUID(md5((event.event_id).encode("utf-8")).hexdigest())),
  1481. }
  1482. incr.assert_any_call("post_process.process_replay_link.id_sampled")
  1483. incr.assert_any_call("post_process.process_replay_link.id_exists")
  1484. def test_no_replay(self, incr, kafka_producer, kafka_publisher):
  1485. event = self.create_event(
  1486. data={"message": "testing"},
  1487. project_id=self.project.id,
  1488. )
  1489. with self.feature({"organizations:session-replay-event-linking": True}):
  1490. self.call_post_process_group(
  1491. is_new=True,
  1492. is_regression=False,
  1493. is_new_group_environment=True,
  1494. event=event,
  1495. )
  1496. assert kafka_producer.return_value.publish.call_count == 0
  1497. incr.assert_called_with("post_process.process_replay_link.id_sampled")
  1498. def test_0_sample_rate_replays(self, incr, kafka_producer, kafka_publisher):
  1499. event = self.create_event(
  1500. data={"message": "testing"},
  1501. project_id=self.project.id,
  1502. )
  1503. with self.feature({"organizations:session-replay-event-linking": False}):
  1504. self.call_post_process_group(
  1505. is_new=True,
  1506. is_regression=False,
  1507. is_new_group_environment=True,
  1508. event=event,
  1509. )
  1510. assert kafka_producer.return_value.publish.call_count == 0
  1511. for args, _ in incr.call_args_list:
  1512. self.assertNotEqual(args, ("post_process.process_replay_link.id_sampled"))
  1513. @region_silo_test
  1514. class PostProcessGroupErrorTest(
  1515. TestCase,
  1516. AssignmentTestMixin,
  1517. ProcessCommitsTestMixin,
  1518. CorePostProcessGroupTestMixin,
  1519. DeriveCodeMappingsProcessGroupTestMixin,
  1520. InboxTestMixin,
  1521. ResourceChangeBoundsTestMixin,
  1522. RuleProcessorTestMixin,
  1523. ServiceHooksTestMixin,
  1524. SnoozeTestMixin,
  1525. SDKCrashMonitoringTestMixin,
  1526. ReplayLinkageTestMixin,
  1527. ):
  1528. def create_event(self, data, project_id, assert_no_errors=True):
  1529. return self.store_event(data=data, project_id=project_id, assert_no_errors=assert_no_errors)
  1530. def call_post_process_group(
  1531. self, is_new, is_regression, is_new_group_environment, event, cache_key=None
  1532. ):
  1533. if cache_key is None:
  1534. cache_key = write_event_to_cache(event)
  1535. post_process_group(
  1536. is_new=is_new,
  1537. is_regression=is_regression,
  1538. is_new_group_environment=is_new_group_environment,
  1539. cache_key=cache_key,
  1540. group_id=event.group_id,
  1541. project_id=event.project_id,
  1542. )
  1543. return cache_key
  1544. @with_feature("organizations:escalating-metrics-backend")
  1545. @patch("sentry.sentry_metrics.client.generic_metrics_backend.counter")
  1546. def test_generic_metrics_backend_counter(self, generic_metrics_backend_mock):
  1547. min_ago = iso_format(before_now(minutes=1))
  1548. event = self.create_event(
  1549. data={
  1550. "exception": {
  1551. "values": [
  1552. {
  1553. "type": "ZeroDivisionError",
  1554. "stacktrace": {"frames": [{"function": f} for f in ["a", "b"]]},
  1555. }
  1556. ]
  1557. },
  1558. "timestamp": min_ago,
  1559. "start_timestamp": min_ago,
  1560. "contexts": {"trace": {"trace_id": "b" * 32, "span_id": "c" * 16, "op": ""}},
  1561. },
  1562. project_id=self.project.id,
  1563. )
  1564. self.call_post_process_group(
  1565. is_new=True, is_regression=False, is_new_group_environment=True, event=event
  1566. )
  1567. assert generic_metrics_backend_mock.call_count == 1
  1568. @region_silo_test
  1569. class PostProcessGroupPerformanceTest(
  1570. TestCase,
  1571. SnubaTestCase,
  1572. PerfIssueTransactionTestMixin,
  1573. CorePostProcessGroupTestMixin,
  1574. InboxTestMixin,
  1575. RuleProcessorTestMixin,
  1576. SnoozeTestMixin,
  1577. PerformanceIssueTestCase,
  1578. ):
  1579. def create_event(self, data, project_id, assert_no_errors=True):
  1580. fingerprint = data["fingerprint"][0] if data.get("fingerprint") else "some_group"
  1581. fingerprint = f"{PerformanceNPlusOneGroupType.type_id}-{fingerprint}"
  1582. return self.create_performance_issue(fingerprint=fingerprint)
  1583. def call_post_process_group(
  1584. self, is_new, is_regression, is_new_group_environment, event, cache_key=None
  1585. ):
  1586. group_states = (
  1587. [
  1588. {
  1589. "id": event.group_id,
  1590. "is_new": is_new,
  1591. "is_regression": is_regression,
  1592. "is_new_group_environment": is_new_group_environment,
  1593. }
  1594. ]
  1595. if event.group_id
  1596. else None
  1597. )
  1598. if cache_key is None:
  1599. cache_key = write_event_to_cache(event)
  1600. with self.feature(PerformanceNPlusOneGroupType.build_post_process_group_feature_name()):
  1601. post_process_group(
  1602. is_new=is_new,
  1603. is_regression=is_regression,
  1604. is_new_group_environment=is_new_group_environment,
  1605. cache_key=cache_key,
  1606. group_states=group_states,
  1607. project_id=event.project_id,
  1608. )
  1609. return cache_key
  1610. @patch("sentry.sentry_metrics.client.generic_metrics_backend.counter")
  1611. @patch("sentry.tasks.post_process.run_post_process_job")
  1612. @patch("sentry.rules.processor.RuleProcessor")
  1613. @patch("sentry.signals.transaction_processed.send_robust")
  1614. @patch("sentry.signals.event_processed.send_robust")
  1615. def test_process_transaction_event_with_no_group(
  1616. self,
  1617. event_processed_signal_mock,
  1618. transaction_processed_signal_mock,
  1619. mock_processor,
  1620. run_post_process_job_mock,
  1621. generic_metrics_backend_mock,
  1622. ):
  1623. min_ago = before_now(minutes=1).replace(tzinfo=timezone.utc)
  1624. event = self.store_transaction(
  1625. project_id=self.project.id,
  1626. user_id=self.create_user(name="user1").name,
  1627. fingerprint=[],
  1628. environment=None,
  1629. timestamp=min_ago,
  1630. )
  1631. assert len(event.groups) == 0
  1632. cache_key = write_event_to_cache(event)
  1633. post_process_group(
  1634. is_new=False,
  1635. is_regression=False,
  1636. is_new_group_environment=False,
  1637. cache_key=cache_key,
  1638. group_id=None,
  1639. group_states=None,
  1640. )
  1641. assert transaction_processed_signal_mock.call_count == 1
  1642. assert event_processed_signal_mock.call_count == 0
  1643. assert mock_processor.call_count == 0
  1644. assert run_post_process_job_mock.call_count == 0
  1645. assert generic_metrics_backend_mock.call_count == 0
  1646. @patch("sentry.tasks.post_process.handle_owner_assignment")
  1647. @patch("sentry.tasks.post_process.handle_auto_assignment")
  1648. @patch("sentry.tasks.post_process.process_rules")
  1649. @patch("sentry.tasks.post_process.run_post_process_job")
  1650. @patch("sentry.rules.processor.RuleProcessor")
  1651. @patch("sentry.signals.transaction_processed.send_robust")
  1652. @patch("sentry.signals.event_processed.send_robust")
  1653. def test_full_pipeline_with_group_states(
  1654. self,
  1655. event_processed_signal_mock,
  1656. transaction_processed_signal_mock,
  1657. mock_processor,
  1658. run_post_process_job_mock,
  1659. mock_process_rules,
  1660. mock_handle_auto_assignment,
  1661. mock_handle_owner_assignment,
  1662. ):
  1663. event = self.create_performance_issue()
  1664. assert event.group
  1665. # cache_key = write_event_to_cache(event)
  1666. group_state = dict(
  1667. is_new=True,
  1668. is_regression=False,
  1669. is_new_group_environment=True,
  1670. )
  1671. # TODO(jangjodi): Fix this ordering test; side_effects should be a function (lambda),
  1672. # but because post-processing is async, this causes the assert to fail because it doesn't
  1673. # wait for the side effects to happen
  1674. call_order = [mock_handle_owner_assignment, mock_handle_auto_assignment, mock_process_rules]
  1675. mock_handle_owner_assignment.side_effect = None
  1676. mock_handle_auto_assignment.side_effect = None
  1677. mock_process_rules.side_effect = None
  1678. post_process_group(
  1679. **group_state,
  1680. cache_key="dummykey",
  1681. group_id=event.group_id,
  1682. group_states=[{"id": event.group.id, **group_state}],
  1683. occurrence_id=event.occurrence_id,
  1684. project_id=self.project.id,
  1685. )
  1686. assert transaction_processed_signal_mock.call_count == 1
  1687. assert event_processed_signal_mock.call_count == 0
  1688. assert mock_processor.call_count == 0
  1689. assert run_post_process_job_mock.call_count == 1
  1690. assert call_order == [
  1691. mock_handle_owner_assignment,
  1692. mock_handle_auto_assignment,
  1693. mock_process_rules,
  1694. ]
  1695. class TransactionClustererTestCase(TestCase, SnubaTestCase):
  1696. @patch("sentry.ingest.transaction_clusterer.datasource.redis._record_sample")
  1697. def test_process_transaction_event_clusterer(
  1698. self,
  1699. mock_store_transaction_name,
  1700. ):
  1701. min_ago = before_now(minutes=1).replace(tzinfo=timezone.utc)
  1702. event = process_event(
  1703. data={
  1704. "project": self.project.id,
  1705. "event_id": "b" * 32,
  1706. "transaction": "foo",
  1707. "start_timestamp": str(min_ago),
  1708. "timestamp": str(min_ago),
  1709. "type": "transaction",
  1710. "transaction_info": {
  1711. "source": "url",
  1712. },
  1713. "contexts": {"trace": {"trace_id": "b" * 32, "span_id": "c" * 16, "op": ""}},
  1714. },
  1715. group_id=0,
  1716. )
  1717. cache_key = write_event_to_cache(event)
  1718. post_process_group(
  1719. is_new=False,
  1720. is_regression=False,
  1721. is_new_group_environment=False,
  1722. cache_key=cache_key,
  1723. group_id=None,
  1724. group_states=None,
  1725. )
  1726. assert mock_store_transaction_name.mock_calls == [
  1727. mock.call(ClustererNamespace.TRANSACTIONS, self.project, "foo")
  1728. ]
  1729. @region_silo_test
  1730. class PostProcessGroupGenericTest(
  1731. TestCase,
  1732. SnubaTestCase,
  1733. OccurrenceTestMixin,
  1734. CorePostProcessGroupTestMixin,
  1735. InboxTestMixin,
  1736. RuleProcessorTestMixin,
  1737. SnoozeTestMixin,
  1738. ):
  1739. def create_event(self, data, project_id, assert_no_errors=True):
  1740. data["type"] = "generic"
  1741. event = self.store_event(
  1742. data=data, project_id=project_id, assert_no_errors=assert_no_errors
  1743. )
  1744. occurrence_data = self.build_occurrence_data(event_id=event.event_id, project_id=project_id)
  1745. occurrence, group_info = save_issue_occurrence(occurrence_data, event)
  1746. assert group_info is not None
  1747. group_event = event.for_group(group_info.group)
  1748. group_event.occurrence = occurrence
  1749. return group_event
  1750. def call_post_process_group(
  1751. self, is_new, is_regression, is_new_group_environment, event, cache_key=None
  1752. ):
  1753. with self.feature(ProfileFileIOGroupType.build_post_process_group_feature_name()):
  1754. post_process_group(
  1755. is_new=is_new,
  1756. is_regression=is_regression,
  1757. is_new_group_environment=is_new_group_environment,
  1758. cache_key=None,
  1759. group_id=event.group_id,
  1760. occurrence_id=event.occurrence.id,
  1761. project_id=event.group.project_id,
  1762. )
  1763. return cache_key
  1764. def test_issueless(self):
  1765. # Skip this test since there's no way to have issueless events in the issue platform
  1766. pass
  1767. def test_no_cache_abort(self):
  1768. # We don't use the cache for generic issues, so skip this test
  1769. pass
  1770. @patch("sentry.rules.processor.RuleProcessor")
  1771. def test_occurrence_deduping(self, mock_processor):
  1772. event = self.create_event(data={"message": "testing"}, project_id=self.project.id)
  1773. self.call_post_process_group(
  1774. is_new=True,
  1775. is_regression=True,
  1776. is_new_group_environment=False,
  1777. event=event,
  1778. )
  1779. assert mock_processor.call_count == 1
  1780. mock_processor.assert_called_with(EventMatcher(event), True, True, False, False)
  1781. # Calling this again should do nothing, since we've already processed this occurrence.
  1782. self.call_post_process_group(
  1783. is_new=False,
  1784. is_regression=True,
  1785. is_new_group_environment=False,
  1786. event=event,
  1787. )
  1788. # Make sure we haven't called this again, since we should exit early.
  1789. assert mock_processor.call_count == 1
  1790. @pytest.mark.skip(reason="those tests do not work with the given call_post_process_group impl")
  1791. def test_processing_cache_cleared(self):
  1792. pass
  1793. @pytest.mark.skip(reason="those tests do not work with the given call_post_process_group impl")
  1794. def test_processing_cache_cleared_with_commits(self):
  1795. pass