test_post_process.py 108 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901
  1. from __future__ import annotations
  2. import abc
  3. import time
  4. import uuid
  5. from datetime import datetime, timedelta
  6. from hashlib import md5
  7. from typing import Any
  8. from unittest import mock
  9. from unittest.mock import Mock, patch
  10. import pytest
  11. from django.db import router
  12. from django.test import override_settings
  13. from django.utils import timezone
  14. from sentry import buffer
  15. from sentry.buffer.redis import RedisBuffer
  16. from sentry.eventstore.models import Event
  17. from sentry.eventstore.processing import event_processing_store
  18. from sentry.feedback.usecases.create_feedback import FeedbackCreationSource
  19. from sentry.ingest.transaction_clusterer import ClustererNamespace
  20. from sentry.integrations.mixins.commit_context import CommitInfo, FileBlameInfo
  21. from sentry.issues.grouptype import (
  22. FeedbackGroup,
  23. GroupCategory,
  24. PerformanceNPlusOneGroupType,
  25. PerformanceP95EndpointRegressionGroupType,
  26. ProfileFileIOGroupType,
  27. )
  28. from sentry.issues.ingest import save_issue_occurrence
  29. from sentry.models.activity import Activity, ActivityIntegration
  30. from sentry.models.group import GROUP_SUBSTATUS_TO_STATUS_MAP, Group, GroupStatus
  31. from sentry.models.groupassignee import GroupAssignee
  32. from sentry.models.groupinbox import GroupInbox, GroupInboxReason
  33. from sentry.models.groupowner import (
  34. ASSIGNEE_EXISTS_DURATION,
  35. ASSIGNEE_EXISTS_KEY,
  36. ISSUE_OWNERS_DEBOUNCE_DURATION,
  37. ISSUE_OWNERS_DEBOUNCE_KEY,
  38. GroupOwner,
  39. GroupOwnerType,
  40. )
  41. from sentry.models.groupsnooze import GroupSnooze
  42. from sentry.models.integrations.integration import Integration
  43. from sentry.models.projectownership import ProjectOwnership
  44. from sentry.models.projectteam import ProjectTeam
  45. from sentry.ownership.grammar import Matcher, Owner, Rule, dump_schema
  46. from sentry.replays.lib import kafka as replays_kafka
  47. from sentry.replays.lib.kafka import clear_replay_publisher
  48. from sentry.rules import init_registry
  49. from sentry.rules.actions.base import EventAction
  50. from sentry.services.hybrid_cloud.user.service import user_service
  51. from sentry.silo.base import SiloMode
  52. from sentry.silo.safety import unguarded_write
  53. from sentry.tasks.derive_code_mappings import SUPPORTED_LANGUAGES
  54. from sentry.tasks.merge import merge_groups
  55. from sentry.tasks.post_process import (
  56. HIGHER_ISSUE_OWNERS_PER_PROJECT_PER_MIN_RATELIMIT,
  57. ISSUE_OWNERS_PER_PROJECT_PER_MIN_RATELIMIT,
  58. feedback_filter_decorator,
  59. locks,
  60. post_process_group,
  61. process_event,
  62. run_post_process_job,
  63. )
  64. from sentry.testutils.cases import BaseTestCase, PerformanceIssueTestCase, SnubaTestCase, TestCase
  65. from sentry.testutils.helpers import with_feature
  66. from sentry.testutils.helpers.datetime import before_now, iso_format
  67. from sentry.testutils.helpers.eventprocessing import write_event_to_cache
  68. from sentry.testutils.helpers.options import override_options
  69. from sentry.testutils.performance_issues.store_transaction import store_transaction
  70. from sentry.testutils.silo import assume_test_silo_mode
  71. from sentry.testutils.skips import requires_snuba
  72. from sentry.types.activity import ActivityType
  73. from sentry.types.group import GroupSubStatus, PriorityLevel
  74. from sentry.utils import json
  75. from sentry.utils.cache import cache
  76. from sentry.utils.sdk_crashes.sdk_crash_detection_config import SdkName
  77. from tests.sentry.issues.test_utils import OccurrenceTestMixin
  78. pytestmark = [requires_snuba]
  79. class EventMatcher:
  80. def __init__(self, expected, group=None):
  81. self.expected = expected
  82. self.expected_group = group
  83. def __eq__(self, other):
  84. matching_id = other.event_id == self.expected.event_id
  85. if self.expected_group:
  86. return (
  87. matching_id
  88. and self.expected_group == other.group
  89. and self.expected_group.id == other.group_id
  90. )
  91. return matching_id
  92. class BasePostProgressGroupMixin(BaseTestCase, metaclass=abc.ABCMeta):
  93. @abc.abstractmethod
  94. def create_event(self, data, project_id, assert_no_errors=True):
  95. pass
  96. @abc.abstractmethod
  97. def call_post_process_group(
  98. self, is_new, is_regression, is_new_group_environment, event, cache_key=None
  99. ):
  100. pass
  101. class CorePostProcessGroupTestMixin(BasePostProgressGroupMixin):
  102. @patch("sentry.rules.processing.processor.RuleProcessor")
  103. @patch("sentry.tasks.servicehooks.process_service_hook")
  104. @patch("sentry.tasks.sentry_apps.process_resource_change_bound.delay")
  105. @patch("sentry.signals.event_processed.send_robust")
  106. def test_issueless(
  107. self,
  108. mock_signal,
  109. mock_process_resource_change_bound,
  110. mock_process_service_hook,
  111. mock_processor,
  112. ):
  113. min_ago = iso_format(before_now(minutes=1))
  114. event = self.store_event(
  115. data={
  116. "type": "transaction",
  117. "timestamp": min_ago,
  118. "start_timestamp": min_ago,
  119. "contexts": {"trace": {"trace_id": "b" * 32, "span_id": "c" * 16, "op": ""}},
  120. },
  121. project_id=self.project.id,
  122. )
  123. cache_key = write_event_to_cache(event)
  124. self.call_post_process_group(
  125. is_new=True,
  126. is_regression=False,
  127. is_new_group_environment=True,
  128. event=event,
  129. cache_key=cache_key,
  130. )
  131. assert mock_processor.call_count == 0
  132. assert mock_process_service_hook.call_count == 0
  133. assert mock_process_resource_change_bound.call_count == 0
  134. # transaction events do not call event.processed
  135. assert mock_signal.call_count == 0
  136. @patch("sentry.rules.processing.processor.RuleProcessor")
  137. def test_no_cache_abort(self, mock_processor):
  138. event = self.create_event(data={}, project_id=self.project.id)
  139. self.call_post_process_group(
  140. is_new=True,
  141. is_regression=False,
  142. is_new_group_environment=True,
  143. event=event,
  144. cache_key="total-rubbish",
  145. )
  146. assert mock_processor.call_count == 0
  147. def test_processing_cache_cleared(self):
  148. event = self.create_event(data={}, project_id=self.project.id)
  149. cache_key = self.call_post_process_group(
  150. is_new=True,
  151. is_regression=False,
  152. is_new_group_environment=True,
  153. event=event,
  154. )
  155. assert event_processing_store.get(cache_key) is None
  156. def test_processing_cache_cleared_with_commits(self):
  157. # Regression test to guard against suspect commit calculations breaking the
  158. # cache
  159. event = self.create_event(data={}, project_id=self.project.id)
  160. self.create_commit(repo=self.create_repo())
  161. cache_key = self.call_post_process_group(
  162. is_new=True,
  163. is_regression=False,
  164. is_new_group_environment=True,
  165. event=event,
  166. )
  167. assert event_processing_store.get(cache_key) is None
  168. @patch("sentry.utils.metrics.timing")
  169. @patch("sentry.tasks.post_process.logger")
  170. def test_time_to_process_metric(self, logger_mock, metric_timing_mock):
  171. event = self.create_event(data={}, project_id=self.project.id)
  172. self.call_post_process_group(
  173. is_new=True,
  174. is_regression=False,
  175. is_new_group_environment=True,
  176. event=event,
  177. )
  178. metric_timing_mock.assert_any_call(
  179. "events.time-to-post-process",
  180. mock.ANY,
  181. instance=mock.ANY,
  182. tags={"occurrence_type": mock.ANY},
  183. )
  184. logger_mock.warning.assert_not_called()
  185. class DeriveCodeMappingsProcessGroupTestMixin(BasePostProgressGroupMixin):
  186. def _create_event(
  187. self,
  188. data: dict[str, Any],
  189. project_id: int | None = None,
  190. ) -> Event:
  191. data.setdefault("platform", "javascript")
  192. return self.store_event(data=data, project_id=project_id or self.project.id)
  193. def _call_post_process_group(self, event: Event) -> None:
  194. self.call_post_process_group(
  195. is_new=True,
  196. is_regression=False,
  197. is_new_group_environment=True,
  198. event=event,
  199. )
  200. @patch("sentry.tasks.derive_code_mappings.derive_code_mappings")
  201. def test_derive_invalid_platform(self, mock_derive_code_mappings):
  202. event = self._create_event({"platform": "elixir"})
  203. self._call_post_process_group(event)
  204. assert mock_derive_code_mappings.delay.call_count == 0
  205. @patch("sentry.tasks.derive_code_mappings.derive_code_mappings")
  206. def test_derive_supported_languages(self, mock_derive_code_mappings):
  207. for platform in SUPPORTED_LANGUAGES:
  208. event = self._create_event({"platform": platform})
  209. self._call_post_process_group(event)
  210. assert mock_derive_code_mappings.delay.call_count == 1
  211. @patch("sentry.tasks.derive_code_mappings.derive_code_mappings")
  212. def test_only_maps_a_given_project_once_per_hour(self, mock_derive_code_mappings):
  213. dogs_project = self.create_project()
  214. maisey_event = self._create_event(
  215. {
  216. "fingerprint": ["themaiseymasieydog"],
  217. },
  218. dogs_project.id,
  219. )
  220. charlie_event = self._create_event(
  221. {
  222. "fingerprint": ["charliebear"],
  223. },
  224. dogs_project.id,
  225. )
  226. cory_event = self._create_event(
  227. {
  228. "fingerprint": ["thenudge"],
  229. },
  230. dogs_project.id,
  231. )
  232. bodhi_event = self._create_event(
  233. {
  234. "fingerprint": ["theescapeartist"],
  235. },
  236. dogs_project.id,
  237. )
  238. self._call_post_process_group(maisey_event)
  239. assert mock_derive_code_mappings.delay.call_count == 1
  240. # second event from project should bail (no increase in call count)
  241. self._call_post_process_group(charlie_event)
  242. assert mock_derive_code_mappings.delay.call_count == 1
  243. # advance the clock 59 minutes, and it should still bail
  244. with patch("time.time", return_value=time.time() + 60 * 59):
  245. self._call_post_process_group(cory_event)
  246. assert mock_derive_code_mappings.delay.call_count == 1
  247. # now advance the clock 61 minutes, and this time it should go through
  248. with patch("time.time", return_value=time.time() + 60 * 61):
  249. self._call_post_process_group(bodhi_event)
  250. assert mock_derive_code_mappings.delay.call_count == 2
  251. @patch("sentry.tasks.derive_code_mappings.derive_code_mappings")
  252. def test_only_maps_a_given_issue_once_per_day(self, mock_derive_code_mappings):
  253. dogs_project = self.create_project()
  254. maisey_event1 = self._create_event(
  255. {
  256. "fingerprint": ["themaiseymaiseydog"],
  257. },
  258. dogs_project.id,
  259. )
  260. maisey_event2 = self._create_event(
  261. {
  262. "fingerprint": ["themaiseymaiseydog"],
  263. },
  264. dogs_project.id,
  265. )
  266. maisey_event3 = self._create_event(
  267. {
  268. "fingerprint": ["themaiseymaiseydog"],
  269. },
  270. dogs_project.id,
  271. )
  272. maisey_event4 = self._create_event(
  273. {
  274. "fingerprint": ["themaiseymaiseydog"],
  275. },
  276. dogs_project.id,
  277. )
  278. # because of the fingerprint, the events should always end up in the same group,
  279. # but the rest of the test is bogus if they aren't, so let's be sure
  280. assert maisey_event1.group_id == maisey_event2.group_id
  281. assert maisey_event2.group_id == maisey_event3.group_id
  282. assert maisey_event3.group_id == maisey_event4.group_id
  283. self._call_post_process_group(maisey_event1)
  284. assert mock_derive_code_mappings.delay.call_count == 1
  285. # second event from group should bail (no increase in call count)
  286. self._call_post_process_group(maisey_event2)
  287. assert mock_derive_code_mappings.delay.call_count == 1
  288. # advance the clock 23 hours and 59 minutes, and it should still bail
  289. with patch("time.time", return_value=time.time() + (60 * 60 * 23) + (60 * 59)):
  290. self._call_post_process_group(maisey_event3)
  291. assert mock_derive_code_mappings.delay.call_count == 1
  292. # now advance the clock 24 hours and 1 minute, and this time it should go through
  293. with patch("time.time", return_value=time.time() + (60 * 60 * 24) + (60 * 1)):
  294. self._call_post_process_group(maisey_event4)
  295. assert mock_derive_code_mappings.delay.call_count == 2
  296. @patch("sentry.tasks.derive_code_mappings.derive_code_mappings")
  297. def test_skipping_an_issue_doesnt_mark_it_processed(self, mock_derive_code_mappings):
  298. dogs_project = self.create_project()
  299. maisey_event = self._create_event(
  300. {
  301. "fingerprint": ["themaiseymasieydog"],
  302. },
  303. dogs_project.id,
  304. )
  305. charlie_event1 = self._create_event(
  306. {
  307. "fingerprint": ["charliebear"],
  308. },
  309. dogs_project.id,
  310. )
  311. charlie_event2 = self._create_event(
  312. {
  313. "fingerprint": ["charliebear"],
  314. },
  315. dogs_project.id,
  316. )
  317. # because of the fingerprint, the two Charlie events should always end up in the same group,
  318. # but the rest of the test is bogus if they aren't, so let's be sure
  319. assert charlie_event1.group_id == charlie_event2.group_id
  320. self._call_post_process_group(maisey_event)
  321. assert mock_derive_code_mappings.delay.call_count == 1
  322. # second event from project should bail (no increase in call count)
  323. self._call_post_process_group(charlie_event1)
  324. assert mock_derive_code_mappings.delay.call_count == 1
  325. # now advance the clock 61 minutes (so the project should clear the cache), and another
  326. # event from the Charlie group should go through
  327. with patch("time.time", return_value=time.time() + 60 * 61):
  328. self._call_post_process_group(charlie_event2)
  329. assert mock_derive_code_mappings.delay.call_count == 2
  330. class RuleProcessorTestMixin(BasePostProgressGroupMixin):
  331. @patch("sentry.rules.processing.processor.RuleProcessor")
  332. def test_rule_processor_backwards_compat(self, mock_processor):
  333. event = self.create_event(data={}, project_id=self.project.id)
  334. mock_callback = Mock()
  335. mock_futures = [Mock()]
  336. mock_processor.return_value.apply.return_value = [(mock_callback, mock_futures)]
  337. self.call_post_process_group(
  338. is_new=True,
  339. is_regression=False,
  340. is_new_group_environment=True,
  341. event=event,
  342. )
  343. mock_processor.assert_called_once_with(EventMatcher(event), True, False, True, False, False)
  344. mock_processor.return_value.apply.assert_called_once_with()
  345. mock_callback.assert_called_once_with(EventMatcher(event), mock_futures)
  346. @patch("sentry.rules.processing.processor.RuleProcessor")
  347. def test_rule_processor(self, mock_processor):
  348. event = self.create_event(data={"message": "testing"}, project_id=self.project.id)
  349. mock_callback = Mock()
  350. mock_futures = [Mock()]
  351. mock_processor.return_value.apply.return_value = [(mock_callback, mock_futures)]
  352. self.call_post_process_group(
  353. is_new=True,
  354. is_regression=False,
  355. is_new_group_environment=True,
  356. event=event,
  357. )
  358. mock_processor.return_value.apply.assert_called_once_with()
  359. mock_callback.assert_called_once_with(EventMatcher(event), mock_futures)
  360. def test_rule_processor_buffer_values(self):
  361. # Test that pending buffer values for `times_seen` are applied to the group and that alerts
  362. # fire as expected
  363. from sentry.models.rule import Rule
  364. MOCK_RULES = ("sentry.rules.filters.issue_occurrences.IssueOccurrencesFilter",)
  365. redis_buffer = RedisBuffer()
  366. with (
  367. mock.patch("sentry.buffer.backend.get", redis_buffer.get),
  368. mock.patch("sentry.buffer.backend.incr", redis_buffer.incr),
  369. patch("sentry.constants._SENTRY_RULES", MOCK_RULES),
  370. patch("sentry.rules.rules", init_registry()) as rules,
  371. ):
  372. MockAction = mock.Mock()
  373. MockAction.id = "tests.sentry.tasks.post_process.tests.MockAction"
  374. MockAction.return_value = mock.Mock(spec=EventAction)
  375. MockAction.return_value.after.return_value = []
  376. rules.add(MockAction)
  377. conditions = [
  378. {
  379. "id": "sentry.rules.filters.issue_occurrences.IssueOccurrencesFilter",
  380. "value": 10,
  381. },
  382. ]
  383. actions = [{"id": "tests.sentry.tasks.post_process.tests.MockAction"}]
  384. Rule.objects.filter(project=self.project).delete()
  385. Rule.objects.create(
  386. project=self.project, data={"conditions": conditions, "actions": actions}
  387. )
  388. event = self.create_event(
  389. data={"message": "testing", "fingerprint": ["group-1"]}, project_id=self.project.id
  390. )
  391. event_2 = self.create_event(
  392. data={"message": "testing", "fingerprint": ["group-1"]}, project_id=self.project.id
  393. )
  394. self.call_post_process_group(
  395. is_new=True,
  396. is_regression=False,
  397. is_new_group_environment=True,
  398. event=event,
  399. )
  400. event.group.update(times_seen=2)
  401. assert MockAction.return_value.after.call_count == 0
  402. buffer.backend.incr(Group, {"times_seen": 15}, filters={"id": event.group.id})
  403. self.call_post_process_group(
  404. is_new=True,
  405. is_regression=False,
  406. is_new_group_environment=True,
  407. event=event_2,
  408. )
  409. assert MockAction.return_value.after.call_count == 1
  410. @patch("sentry.rules.processing.processor.RuleProcessor")
  411. def test_group_refresh(self, mock_processor):
  412. event = self.create_event(data={"message": "testing"}, project_id=self.project.id)
  413. group1 = event.group
  414. group2 = self.create_group(project=self.project)
  415. assert event.group_id == group1.id
  416. assert event.group == group1
  417. with self.tasks():
  418. merge_groups([group1.id], group2.id)
  419. mock_callback = Mock()
  420. mock_futures = [Mock()]
  421. mock_processor.return_value.apply.return_value = [(mock_callback, mock_futures)]
  422. self.call_post_process_group(
  423. is_new=True,
  424. is_regression=False,
  425. is_new_group_environment=True,
  426. event=event,
  427. )
  428. # Ensure that rule processing sees the merged group.
  429. mock_processor.assert_called_with(
  430. EventMatcher(event, group=group2), True, False, True, False, False
  431. )
  432. @patch("sentry.rules.processing.processor.RuleProcessor")
  433. def test_group_last_seen_buffer(self, mock_processor):
  434. first_event_date = timezone.now() - timedelta(days=90)
  435. event1 = self.create_event(
  436. data={"message": "testing"},
  437. project_id=self.project.id,
  438. )
  439. group1 = event1.group
  440. group1.update(last_seen=first_event_date)
  441. event2 = self.create_event(data={"message": "testing"}, project_id=self.project.id)
  442. # Mock set the last_seen value to the first event date
  443. # To simulate the update to last_seen being buffered
  444. event2.group.last_seen = first_event_date
  445. event2.group.update(last_seen=first_event_date)
  446. assert event2.group_id == group1.id
  447. mock_callback = Mock()
  448. mock_futures = [Mock()]
  449. mock_processor.return_value.apply.return_value = [(mock_callback, mock_futures)]
  450. self.call_post_process_group(
  451. is_new=False,
  452. is_regression=True,
  453. is_new_group_environment=False,
  454. event=event2,
  455. )
  456. mock_processor.assert_called_with(
  457. EventMatcher(event2, group=group1), False, True, False, False, False
  458. )
  459. sent_group_date = mock_processor.call_args[0][0].group.last_seen
  460. # Check that last_seen was updated to be at least the new event's date
  461. self.assertAlmostEqual(sent_group_date, event2.datetime, delta=timedelta(seconds=10))
  462. class ServiceHooksTestMixin(BasePostProgressGroupMixin):
  463. @patch("sentry.tasks.servicehooks.process_service_hook")
  464. def test_service_hook_fires_on_new_event(self, mock_process_service_hook):
  465. event = self.create_event(data={}, project_id=self.project.id)
  466. hook = self.create_service_hook(
  467. project=self.project,
  468. organization=self.project.organization,
  469. actor=self.user,
  470. events=["event.created"],
  471. )
  472. with self.feature("projects:servicehooks"):
  473. self.call_post_process_group(
  474. is_new=False,
  475. is_regression=False,
  476. is_new_group_environment=False,
  477. event=event,
  478. )
  479. mock_process_service_hook.delay.assert_called_once_with(
  480. servicehook_id=hook.id, event=EventMatcher(event)
  481. )
  482. @patch("sentry.tasks.servicehooks.process_service_hook")
  483. @patch("sentry.rules.processing.processor.RuleProcessor")
  484. def test_service_hook_fires_on_alert(self, mock_processor, mock_process_service_hook):
  485. event = self.create_event(data={}, project_id=self.project.id)
  486. mock_callback = Mock()
  487. mock_futures = [Mock()]
  488. mock_processor.return_value.apply.return_value = [(mock_callback, mock_futures)]
  489. hook = self.create_service_hook(
  490. project=self.project,
  491. organization=self.project.organization,
  492. actor=self.user,
  493. events=["event.alert"],
  494. )
  495. with self.feature("projects:servicehooks"):
  496. self.call_post_process_group(
  497. is_new=False,
  498. is_regression=False,
  499. is_new_group_environment=False,
  500. event=event,
  501. )
  502. mock_process_service_hook.delay.assert_called_once_with(
  503. servicehook_id=hook.id, event=EventMatcher(event)
  504. )
  505. @patch("sentry.tasks.servicehooks.process_service_hook")
  506. @patch("sentry.rules.processing.processor.RuleProcessor")
  507. def test_service_hook_does_not_fire_without_alert(
  508. self, mock_processor, mock_process_service_hook
  509. ):
  510. event = self.create_event(data={}, project_id=self.project.id)
  511. mock_processor.return_value.apply.return_value = []
  512. self.create_service_hook(
  513. project=self.project,
  514. organization=self.project.organization,
  515. actor=self.user,
  516. events=["event.alert"],
  517. )
  518. with self.feature("projects:servicehooks"):
  519. self.call_post_process_group(
  520. is_new=False,
  521. is_regression=False,
  522. is_new_group_environment=False,
  523. event=event,
  524. )
  525. assert not mock_process_service_hook.delay.mock_calls
  526. @patch("sentry.tasks.servicehooks.process_service_hook")
  527. def test_service_hook_does_not_fire_without_event(self, mock_process_service_hook):
  528. event = self.create_event(data={}, project_id=self.project.id)
  529. self.create_service_hook(
  530. project=self.project, organization=self.project.organization, actor=self.user, events=[]
  531. )
  532. with self.feature("projects:servicehooks"):
  533. self.call_post_process_group(
  534. is_new=True,
  535. is_regression=False,
  536. is_new_group_environment=False,
  537. event=event,
  538. )
  539. assert not mock_process_service_hook.delay.mock_calls
  540. class ResourceChangeBoundsTestMixin(BasePostProgressGroupMixin):
  541. @patch("sentry.tasks.sentry_apps.process_resource_change_bound.delay")
  542. def test_processes_resource_change_task_on_new_group(self, delay):
  543. event = self.create_event(data={}, project_id=self.project.id)
  544. group = event.group
  545. self.call_post_process_group(
  546. is_new=True,
  547. is_regression=False,
  548. is_new_group_environment=False,
  549. event=event,
  550. )
  551. delay.assert_called_once_with(action="created", sender="Group", instance_id=group.id)
  552. @with_feature("organizations:integrations-event-hooks")
  553. @patch("sentry.tasks.sentry_apps.process_resource_change_bound.delay")
  554. def test_processes_resource_change_task_on_error_events(self, delay):
  555. event = self.create_event(
  556. data={
  557. "message": "Foo bar",
  558. "exception": {"type": "Foo", "value": "oh no"},
  559. "level": "error",
  560. "timestamp": iso_format(timezone.now()),
  561. },
  562. project_id=self.project.id,
  563. assert_no_errors=False,
  564. )
  565. self.create_service_hook(
  566. project=self.project,
  567. organization=self.project.organization,
  568. actor=self.user,
  569. events=["error.created"],
  570. )
  571. self.call_post_process_group(
  572. is_new=False,
  573. is_regression=False,
  574. is_new_group_environment=False,
  575. event=event,
  576. )
  577. delay.assert_called_once_with(
  578. action="created",
  579. sender="Error",
  580. instance_id=event.event_id,
  581. instance=EventMatcher(event),
  582. )
  583. @with_feature("organizations:integrations-event-hooks")
  584. @patch("sentry.tasks.sentry_apps.process_resource_change_bound.delay")
  585. def test_processes_resource_change_task_not_called_for_non_errors(self, delay):
  586. event = self.create_event(
  587. data={
  588. "message": "Foo bar",
  589. "level": "info",
  590. "timestamp": iso_format(timezone.now()),
  591. },
  592. project_id=self.project.id,
  593. assert_no_errors=False,
  594. )
  595. self.call_post_process_group(
  596. is_new=False,
  597. is_regression=False,
  598. is_new_group_environment=False,
  599. event=event,
  600. )
  601. assert not delay.called
  602. @patch("sentry.tasks.sentry_apps.process_resource_change_bound.delay")
  603. def test_processes_resource_change_task_not_called_without_feature_flag(self, delay):
  604. event = self.create_event(
  605. data={
  606. "message": "Foo bar",
  607. "level": "info",
  608. "timestamp": iso_format(timezone.now()),
  609. },
  610. project_id=self.project.id,
  611. assert_no_errors=False,
  612. )
  613. self.call_post_process_group(
  614. is_new=False,
  615. is_regression=False,
  616. is_new_group_environment=False,
  617. event=event,
  618. )
  619. assert not delay.called
  620. @with_feature("organizations:integrations-event-hooks")
  621. @patch("sentry.tasks.sentry_apps.process_resource_change_bound.delay")
  622. def test_processes_resource_change_task_not_called_without_error_created(self, delay):
  623. event = self.create_event(
  624. data={
  625. "message": "Foo bar",
  626. "level": "error",
  627. "exception": {"type": "Foo", "value": "oh no"},
  628. "timestamp": iso_format(timezone.now()),
  629. },
  630. project_id=self.project.id,
  631. assert_no_errors=False,
  632. )
  633. self.create_service_hook(
  634. project=self.project, organization=self.project.organization, actor=self.user, events=[]
  635. )
  636. self.call_post_process_group(
  637. is_new=False,
  638. is_regression=False,
  639. is_new_group_environment=False,
  640. event=event,
  641. )
  642. assert not delay.called
  643. class InboxTestMixin(BasePostProgressGroupMixin):
  644. @patch("sentry.rules.processing.processor.RuleProcessor")
  645. def test_group_inbox_regression(self, mock_processor):
  646. new_event = self.create_event(data={"message": "testing"}, project_id=self.project.id)
  647. group = new_event.group
  648. assert group.status == GroupStatus.UNRESOLVED
  649. assert group.substatus == GroupSubStatus.ONGOING
  650. self.call_post_process_group(
  651. is_new=True,
  652. is_regression=True,
  653. is_new_group_environment=False,
  654. event=new_event,
  655. )
  656. assert GroupInbox.objects.filter(group=group, reason=GroupInboxReason.NEW.value).exists()
  657. GroupInbox.objects.filter(
  658. group=group
  659. ).delete() # Delete so it creates the .REGRESSION entry.
  660. group.refresh_from_db()
  661. assert group.status == GroupStatus.UNRESOLVED
  662. assert group.substatus == GroupSubStatus.NEW
  663. mock_processor.assert_called_with(EventMatcher(new_event), True, True, False, False, False)
  664. # resolve the new issue so regression actually happens
  665. group.status = GroupStatus.RESOLVED
  666. group.substatus = None
  667. group.active_at = group.active_at - timedelta(minutes=1)
  668. group.save(update_fields=["status", "substatus", "active_at"])
  669. # trigger a transition from resolved to regressed by firing an event that groups to that issue
  670. regressed_event = self.create_event(data={"message": "testing"}, project_id=self.project.id)
  671. assert regressed_event.group == new_event.group
  672. group = regressed_event.group
  673. group.refresh_from_db()
  674. assert group.status == GroupStatus.UNRESOLVED
  675. assert group.substatus == GroupSubStatus.REGRESSED
  676. self.call_post_process_group(
  677. is_new=False,
  678. is_regression=True,
  679. is_new_group_environment=False,
  680. event=regressed_event,
  681. )
  682. mock_processor.assert_called_with(
  683. EventMatcher(regressed_event), False, True, False, False, False
  684. )
  685. group.refresh_from_db()
  686. assert group.status == GroupStatus.UNRESOLVED
  687. assert group.substatus == GroupSubStatus.REGRESSED
  688. assert GroupInbox.objects.filter(
  689. group=group, reason=GroupInboxReason.REGRESSION.value
  690. ).exists()
  691. class AssignmentTestMixin(BasePostProgressGroupMixin):
  692. def make_ownership(self, extra_rules=None):
  693. self.user_2 = self.create_user()
  694. self.create_team_membership(team=self.team, user=self.user_2)
  695. rules = [
  696. Rule(Matcher("path", "src/app/*"), [Owner("team", self.team.name)]),
  697. Rule(Matcher("path", "src/*"), [Owner("user", self.user.email)]),
  698. Rule(Matcher("path", "tests/*"), [Owner("user", self.user_2.email)]),
  699. ]
  700. if extra_rules:
  701. rules.extend(extra_rules)
  702. self.prj_ownership = ProjectOwnership.objects.create(
  703. project_id=self.project.id,
  704. schema=dump_schema(rules),
  705. fallthrough=True,
  706. auto_assignment=True,
  707. )
  708. def test_owner_assignment_order_precedence(self):
  709. self.make_ownership()
  710. event = self.create_event(
  711. data={
  712. "message": "oh no",
  713. "platform": "python",
  714. "stacktrace": {"frames": [{"filename": "src/app/example.py"}]},
  715. },
  716. project_id=self.project.id,
  717. )
  718. self.call_post_process_group(
  719. is_new=False,
  720. is_regression=False,
  721. is_new_group_environment=False,
  722. event=event,
  723. )
  724. assignee = event.group.assignee_set.first()
  725. assert assignee.user_id == self.user.id
  726. assert assignee.team is None
  727. owners = list(GroupOwner.objects.filter(group=event.group))
  728. assert len(owners) == 2
  729. assert {(self.user.id, None), (None, self.team.id)} == {
  730. (o.user_id, o.team_id) for o in owners
  731. }
  732. activity = Activity.objects.filter(group=event.group).first()
  733. assert activity.data == {
  734. "assignee": str(self.user.id),
  735. "assigneeEmail": self.user.email,
  736. "assigneeType": "user",
  737. "integration": ActivityIntegration.PROJECT_OWNERSHIP.value,
  738. "rule": str(Rule(Matcher("path", "src/*"), [Owner("user", self.user.email)])),
  739. }
  740. def test_owner_assignment_extra_groups(self):
  741. extra_user = self.create_user()
  742. self.create_team_membership(self.team, user=extra_user)
  743. self.make_ownership(
  744. [Rule(Matcher("path", "src/app/things/in/*"), [Owner("user", extra_user.email)])],
  745. )
  746. event = self.create_event(
  747. data={
  748. "message": "oh no",
  749. "platform": "python",
  750. "stacktrace": {"frames": [{"filename": "src/app/things/in/a/path/example2.py"}]},
  751. },
  752. project_id=self.project.id,
  753. )
  754. self.call_post_process_group(
  755. is_new=False,
  756. is_regression=False,
  757. is_new_group_environment=False,
  758. event=event,
  759. )
  760. assignee = event.group.assignee_set.first()
  761. assert assignee.user_id == extra_user.id
  762. assert assignee.team is None
  763. owners = list(GroupOwner.objects.filter(group=event.group))
  764. assert len(owners) == 2
  765. assert {(extra_user.id, None), (self.user.id, None)} == {
  766. (o.user_id, o.team_id) for o in owners
  767. }
  768. def test_owner_assignment_existing_owners(self):
  769. extra_team = self.create_team()
  770. ProjectTeam.objects.create(team=extra_team, project=self.project)
  771. self.make_ownership(
  772. [Rule(Matcher("path", "src/app/things/in/*"), [Owner("team", extra_team.slug)])],
  773. )
  774. GroupOwner.objects.create(
  775. group=self.group,
  776. project=self.project,
  777. organization=self.organization,
  778. user_id=self.user.id,
  779. type=GroupOwnerType.OWNERSHIP_RULE.value,
  780. )
  781. event = self.create_event(
  782. data={
  783. "message": "oh no",
  784. "platform": "python",
  785. "stacktrace": {"frames": [{"filename": "src/app/things/in/a/path/example2.py"}]},
  786. },
  787. project_id=self.project.id,
  788. )
  789. self.call_post_process_group(
  790. is_new=False,
  791. is_regression=False,
  792. is_new_group_environment=False,
  793. event=event,
  794. )
  795. assignee = event.group.assignee_set.first()
  796. assert assignee.user_id is None
  797. assert assignee.team == extra_team
  798. owners = list(GroupOwner.objects.filter(group=event.group))
  799. assert {(None, extra_team.id), (self.user.id, None)} == {
  800. (o.user_id, o.team_id) for o in owners
  801. }
  802. def test_owner_assignment_assign_user(self):
  803. self.make_ownership()
  804. event = self.create_event(
  805. data={
  806. "message": "oh no",
  807. "platform": "python",
  808. "stacktrace": {"frames": [{"filename": "src/app.py"}]},
  809. },
  810. project_id=self.project.id,
  811. )
  812. self.call_post_process_group(
  813. is_new=False,
  814. is_regression=False,
  815. is_new_group_environment=False,
  816. event=event,
  817. )
  818. assignee = event.group.assignee_set.first()
  819. assert assignee.user_id == self.user.id
  820. assert assignee.team is None
  821. def test_owner_assignment_ownership_no_matching_owners(self):
  822. event = self.create_event(
  823. data={
  824. "message": "oh no",
  825. "platform": "python",
  826. "stacktrace": {"frames": [{"filename": "src/app/example.py"}]},
  827. },
  828. project_id=self.project.id,
  829. )
  830. self.call_post_process_group(
  831. is_new=False,
  832. is_regression=False,
  833. is_new_group_environment=False,
  834. event=event,
  835. )
  836. assert not event.group.assignee_set.exists()
  837. def test_owner_assignment_existing_assignment(self):
  838. self.make_ownership()
  839. event = self.create_event(
  840. data={
  841. "message": "oh no",
  842. "platform": "python",
  843. "stacktrace": {"frames": [{"filename": "src/app/example.py"}]},
  844. },
  845. project_id=self.project.id,
  846. )
  847. event.group.assignee_set.create(team=self.team, project=self.project)
  848. self.call_post_process_group(
  849. is_new=False,
  850. is_regression=False,
  851. is_new_group_environment=False,
  852. event=event,
  853. )
  854. assignee = event.group.assignee_set.first()
  855. assert assignee.user_id is None
  856. assert assignee.team == self.team
  857. def test_only_first_assignment_works(self):
  858. self.make_ownership()
  859. event = self.create_event(
  860. data={
  861. "message": "oh no",
  862. "platform": "python",
  863. "stacktrace": {"frames": [{"filename": "src/app/example.py"}]},
  864. "fingerprint": ["group1"],
  865. },
  866. project_id=self.project.id,
  867. )
  868. self.call_post_process_group(
  869. is_new=False,
  870. is_regression=False,
  871. is_new_group_environment=False,
  872. event=event,
  873. )
  874. assignee = event.group.assignee_set.first()
  875. assert assignee.user_id == self.user.id
  876. assert assignee.team is None
  877. event = self.create_event(
  878. data={
  879. "message": "oh no",
  880. "platform": "python",
  881. "stacktrace": {"frames": [{"filename": "tests/src/app/test_example.py"}]},
  882. "fingerprint": ["group1"],
  883. },
  884. project_id=self.project.id,
  885. )
  886. self.call_post_process_group(
  887. is_new=False,
  888. is_regression=False,
  889. is_new_group_environment=False,
  890. event=event,
  891. )
  892. assignee = event.group.assignee_set.first()
  893. # Assignment shouldn't change.
  894. assert assignee.user_id == self.user.id
  895. assert assignee.team is None
  896. def test_owner_assignment_owner_is_gone(self):
  897. self.make_ownership()
  898. # Remove the team so the rule match will fail to resolve
  899. self.team.delete()
  900. event = self.create_event(
  901. data={
  902. "message": "oh no",
  903. "platform": "python",
  904. "stacktrace": {"frames": [{"filename": "src/app/example.py"}]},
  905. },
  906. project_id=self.project.id,
  907. )
  908. self.call_post_process_group(
  909. is_new=False,
  910. is_regression=False,
  911. is_new_group_environment=False,
  912. event=event,
  913. )
  914. assignee = event.group.assignee_set.first()
  915. assert assignee is None
  916. def test_suspect_committer_affect_cache_debouncing_issue_owners_calculations(self):
  917. self.make_ownership()
  918. committer = GroupOwner(
  919. group=self.created_event.group,
  920. project=self.created_event.project,
  921. organization=self.created_event.project.organization,
  922. type=GroupOwnerType.SUSPECT_COMMIT.value,
  923. )
  924. committer.save()
  925. event = self.create_event(
  926. data={
  927. "message": "oh no",
  928. "platform": "python",
  929. "stacktrace": {"frames": [{"filename": "src/app/example.py"}]},
  930. },
  931. project_id=self.project.id,
  932. )
  933. event.group.assignee_set.create(team=self.team, project=self.project)
  934. self.call_post_process_group(
  935. is_new=False,
  936. is_regression=False,
  937. is_new_group_environment=False,
  938. event=event,
  939. )
  940. assignee = event.group.assignee_set.first()
  941. assert assignee.user_id is None
  942. assert assignee.team == self.team
  943. def test_owner_assignment_when_owners_have_been_unassigned(self):
  944. """
  945. Test that ensures that if certain assignees get unassigned, and project rules are changed
  946. then the new group assignees should be re-calculated and re-assigned
  947. """
  948. # Create rules and check assignees
  949. self.make_ownership()
  950. event = self.create_event(
  951. data={
  952. "message": "oh no",
  953. "platform": "python",
  954. "stacktrace": {"frames": [{"filename": "src/app/example.py"}]},
  955. },
  956. project_id=self.project.id,
  957. )
  958. event_2 = self.create_event(
  959. data={
  960. "message": "Exception",
  961. "platform": "python",
  962. "stacktrace": {"frames": [{"filename": "src/app/integration.py"}]},
  963. },
  964. project_id=self.project.id,
  965. )
  966. self.call_post_process_group(
  967. is_new=False,
  968. is_regression=False,
  969. is_new_group_environment=False,
  970. event=event,
  971. )
  972. self.call_post_process_group(
  973. is_new=False,
  974. is_regression=False,
  975. is_new_group_environment=False,
  976. event=event_2,
  977. )
  978. assignee = event.group.assignee_set.first()
  979. assert assignee.user_id == self.user.id
  980. user_3 = self.create_user()
  981. self.create_team_membership(self.team, user=user_3)
  982. # De-assign group assignees
  983. GroupAssignee.objects.deassign(event.group, self.user)
  984. assert event.group.assignee_set.first() is None
  985. # Change ProjectOwnership rules
  986. rules = [
  987. Rule(Matcher("path", "src/*"), [Owner("user", user_3.email)]),
  988. ]
  989. self.prj_ownership.schema = dump_schema(rules)
  990. self.prj_ownership.save()
  991. self.call_post_process_group(
  992. is_new=False,
  993. is_regression=False,
  994. is_new_group_environment=False,
  995. event=event,
  996. )
  997. self.call_post_process_group(
  998. is_new=False,
  999. is_regression=False,
  1000. is_new_group_environment=False,
  1001. event=event_2,
  1002. )
  1003. # Group should be re-assigned to the new group owner
  1004. assignee = event.group.assignee_set.first()
  1005. assert assignee.user_id == user_3.id
  1006. # De-assign group assignees
  1007. GroupAssignee.objects.deassign(event.group, user_service.get_user(user_id=assignee.user_id))
  1008. assert event.group.assignee_set.first() is None
  1009. user_4 = self.create_user()
  1010. self.create_team_membership(self.team, user=user_4)
  1011. self.prj_ownership.schema = dump_schema([])
  1012. self.prj_ownership.save()
  1013. code_owners_rule = Rule(
  1014. Matcher("codeowners", "*.py"),
  1015. [Owner("user", user_4.email)],
  1016. )
  1017. self.code_mapping = self.create_code_mapping(project=self.project)
  1018. self.code_owners = self.create_codeowners(
  1019. self.project,
  1020. self.code_mapping,
  1021. schema=dump_schema([code_owners_rule]),
  1022. )
  1023. self.call_post_process_group(
  1024. is_new=False,
  1025. is_regression=False,
  1026. is_new_group_environment=False,
  1027. event=event,
  1028. )
  1029. self.call_post_process_group(
  1030. is_new=False,
  1031. is_regression=False,
  1032. is_new_group_environment=False,
  1033. event=event_2,
  1034. )
  1035. # Group should be re-assigned to the new group owner
  1036. assignee = event.group.assignee_set.first()
  1037. assert assignee.user_id == user_4.id
  1038. def test_auto_assignment_when_owners_have_been_unassigned(self):
  1039. """
  1040. Test that ensures that if assignee gets unassigned and project rules are changed,
  1041. then the new group assignees should be re-calculated and re-assigned
  1042. """
  1043. # Create rules and check assignees
  1044. self.make_ownership()
  1045. event = self.create_event(
  1046. data={
  1047. "message": "oh no",
  1048. "platform": "python",
  1049. "stacktrace": {"frames": [{"filename": "src/app/example.py"}]},
  1050. },
  1051. project_id=self.project.id,
  1052. )
  1053. self.call_post_process_group(
  1054. is_new=False,
  1055. is_regression=False,
  1056. is_new_group_environment=False,
  1057. event=event,
  1058. )
  1059. assignee = (
  1060. GroupOwner.objects.filter()
  1061. .exclude(user_id__isnull=True, team_id__isnull=True)
  1062. .order_by("type")
  1063. .first()
  1064. )
  1065. assert assignee.user_id == self.user.id
  1066. user_3 = self.create_user()
  1067. self.create_team_membership(self.team, user=user_3)
  1068. # Set assignee_exists cache to self.user
  1069. cache.set(ASSIGNEE_EXISTS_KEY(event.group_id), self.user, ASSIGNEE_EXISTS_DURATION)
  1070. # De-assign group assignees
  1071. GroupAssignee.objects.deassign(event.group, self.user)
  1072. assert event.group.assignee_set.first() is None
  1073. # Change ProjectOwnership rules
  1074. rules = [
  1075. Rule(Matcher("path", "src/*"), [Owner("user", user_3.email)]),
  1076. ]
  1077. self.prj_ownership.schema = dump_schema(rules)
  1078. self.prj_ownership.save()
  1079. self.call_post_process_group(
  1080. is_new=False,
  1081. is_regression=False,
  1082. is_new_group_environment=False,
  1083. event=event,
  1084. )
  1085. # Mimic filter used in get_autoassigned_owner_cached to get the issue owner to be
  1086. # auto-assigned
  1087. assignee = (
  1088. GroupOwner.objects.filter()
  1089. .exclude(user_id__isnull=True, team_id__isnull=True)
  1090. .order_by("type")
  1091. .first()
  1092. )
  1093. # Group should be re-assigned to the new group owner
  1094. assert assignee.user_id == user_3.id
  1095. def test_ensure_when_assignees_and_owners_are_cached_does_not_cause_unbound_errors(self):
  1096. self.make_ownership()
  1097. event = self.create_event(
  1098. data={
  1099. "message": "oh no",
  1100. "platform": "python",
  1101. "stacktrace": {"frames": [{"filename": "src/app.py"}]},
  1102. },
  1103. project_id=self.project.id,
  1104. )
  1105. assignee_cache_key = "assignee_exists:1:%s" % event.group.id
  1106. owner_cache_key = "owner_exists:1:%s" % event.group.id
  1107. for key in [assignee_cache_key, owner_cache_key]:
  1108. cache.set(key, True)
  1109. self.call_post_process_group(
  1110. is_new=False,
  1111. is_regression=False,
  1112. is_new_group_environment=False,
  1113. event=event,
  1114. )
  1115. def test_auto_assignment_when_owners_are_invalid(self):
  1116. """
  1117. Test that invalid group owners (that exist due to bugs) are deleted and not assigned
  1118. when no valid issue owner exists
  1119. """
  1120. event = self.create_event(
  1121. data={
  1122. "message": "oh no",
  1123. "platform": "python",
  1124. "stacktrace": {"frames": [{"filename": "src/app/example.py"}]},
  1125. },
  1126. project_id=self.project.id,
  1127. )
  1128. # Hard code an invalid group owner
  1129. invalid_codeowner = GroupOwner(
  1130. group=event.group,
  1131. project=event.project,
  1132. organization=event.project.organization,
  1133. type=GroupOwnerType.CODEOWNERS.value,
  1134. context={"rule": "codeowners:/**/*.css " + self.user.email},
  1135. user_id=self.user.id,
  1136. )
  1137. invalid_codeowner.save()
  1138. self.call_post_process_group(
  1139. is_new=False,
  1140. is_regression=False,
  1141. is_new_group_environment=False,
  1142. event=event,
  1143. )
  1144. assignee = event.group.assignee_set.first()
  1145. assert assignee is None
  1146. assert len(GroupOwner.objects.filter(group_id=event.group)) == 0
  1147. @patch("sentry.tasks.post_process.logger")
  1148. def test_debounces_handle_owner_assignments(self, logger):
  1149. self.make_ownership()
  1150. event = self.create_event(
  1151. data={
  1152. "message": "oh no",
  1153. "platform": "python",
  1154. "stacktrace": {"frames": [{"filename": "src/app.py"}]},
  1155. },
  1156. project_id=self.project.id,
  1157. )
  1158. cache.set(ISSUE_OWNERS_DEBOUNCE_KEY(event.group_id), True, ISSUE_OWNERS_DEBOUNCE_DURATION)
  1159. self.call_post_process_group(
  1160. is_new=False,
  1161. is_regression=False,
  1162. is_new_group_environment=False,
  1163. event=event,
  1164. )
  1165. logger.info.assert_any_call(
  1166. "handle_owner_assignment.issue_owners_exist",
  1167. extra={
  1168. "event": event.event_id,
  1169. "group": event.group_id,
  1170. "project": event.project_id,
  1171. "organization": event.project.organization_id,
  1172. "reason": "issue_owners_exist",
  1173. },
  1174. )
  1175. @patch("sentry.tasks.post_process.logger")
  1176. def test_issue_owners_should_ratelimit(self, mock_logger):
  1177. cache.set(
  1178. f"issue_owner_assignment_ratelimiter:{self.project.id}",
  1179. (set(range(0, ISSUE_OWNERS_PER_PROJECT_PER_MIN_RATELIMIT * 10, 10)), datetime.now()),
  1180. )
  1181. event = self.create_event(
  1182. data={
  1183. "message": "oh no",
  1184. "platform": "python",
  1185. "stacktrace": {"frames": [{"filename": "src/app.py"}]},
  1186. },
  1187. project_id=self.project.id,
  1188. )
  1189. self.call_post_process_group(
  1190. is_new=False,
  1191. is_regression=False,
  1192. is_new_group_environment=False,
  1193. event=event,
  1194. )
  1195. expected_extra = {
  1196. "event": event.event_id,
  1197. "group": event.group_id,
  1198. "project": event.project_id,
  1199. "organization": event.project.organization_id,
  1200. "reason": "ratelimited",
  1201. }
  1202. mock_logger.info.assert_any_call(
  1203. "handle_owner_assignment.ratelimited", extra=expected_extra
  1204. )
  1205. mock_logger.reset_mock()
  1206. # Raise this organization's ratelimit
  1207. with self.feature("organizations:increased-issue-owners-rate-limit"):
  1208. self.call_post_process_group(
  1209. is_new=False,
  1210. is_regression=False,
  1211. is_new_group_environment=False,
  1212. event=event,
  1213. )
  1214. with pytest.raises(AssertionError):
  1215. mock_logger.info.assert_any_call(
  1216. "handle_owner_assignment.ratelimited", extra=expected_extra
  1217. )
  1218. # Still enforce the raised limit
  1219. mock_logger.reset_mock()
  1220. cache.set(
  1221. f"issue_owner_assignment_ratelimiter:{self.project.id}",
  1222. (
  1223. set(range(0, HIGHER_ISSUE_OWNERS_PER_PROJECT_PER_MIN_RATELIMIT * 10, 10)),
  1224. datetime.now(),
  1225. ),
  1226. )
  1227. with self.feature("organizations:increased-issue-owners-rate-limit"):
  1228. self.call_post_process_group(
  1229. is_new=False,
  1230. is_regression=False,
  1231. is_new_group_environment=False,
  1232. event=event,
  1233. )
  1234. mock_logger.info.assert_any_call(
  1235. "handle_owner_assignment.ratelimited", extra=expected_extra
  1236. )
  1237. class ProcessCommitsTestMixin(BasePostProgressGroupMixin):
  1238. github_blame_return_value = {
  1239. "commitId": "asdfwreqr",
  1240. "committedDate": (timezone.now() - timedelta(days=2)),
  1241. "commitMessage": "placeholder commit message",
  1242. "commitAuthorName": "",
  1243. "commitAuthorEmail": "admin@localhost",
  1244. }
  1245. def setUp(self):
  1246. self.created_event = self.create_event(
  1247. data={
  1248. "message": "Kaboom!",
  1249. "platform": "python",
  1250. "timestamp": iso_format(before_now(seconds=10)),
  1251. "stacktrace": {
  1252. "frames": [
  1253. {
  1254. "function": "handle_set_commits",
  1255. "abs_path": "/usr/src/sentry/src/sentry/tasks.py",
  1256. "module": "sentry.tasks",
  1257. "in_app": False,
  1258. "lineno": 30,
  1259. "filename": "sentry/tasks.py",
  1260. },
  1261. {
  1262. "function": "set_commits",
  1263. "abs_path": "/usr/src/sentry/src/sentry/models/release.py",
  1264. "module": "sentry.models.release",
  1265. "in_app": True,
  1266. "lineno": 39,
  1267. "filename": "sentry/models/release.py",
  1268. },
  1269. ]
  1270. },
  1271. "fingerprint": ["put-me-in-the-control-group"],
  1272. },
  1273. project_id=self.project.id,
  1274. )
  1275. self.cache_key = write_event_to_cache(self.created_event)
  1276. self.repo = self.create_repo(
  1277. name="org/example", integration_id=self.integration.id, provider="integrations:github"
  1278. )
  1279. self.code_mapping = self.create_code_mapping(
  1280. repo=self.repo, project=self.project, stack_root="sentry/", source_root="sentry/"
  1281. )
  1282. self.commit_author = self.create_commit_author(project=self.project, user=self.user)
  1283. self.commit = self.create_commit(
  1284. project=self.project,
  1285. repo=self.repo,
  1286. author=self.commit_author,
  1287. key="asdfwreqr",
  1288. message="placeholder commit message",
  1289. )
  1290. self.github_blame_all_files_return_value = [
  1291. FileBlameInfo(
  1292. code_mapping=self.code_mapping,
  1293. lineno=39,
  1294. path="sentry/models/release.py",
  1295. ref="master",
  1296. repo=self.repo,
  1297. commit=CommitInfo(
  1298. commitId="asdfwreqr",
  1299. committedDate=(timezone.now() - timedelta(days=2)),
  1300. commitMessage="placeholder commit message",
  1301. commitAuthorName="",
  1302. commitAuthorEmail="admin@localhost",
  1303. ),
  1304. )
  1305. ]
  1306. @patch(
  1307. "sentry.integrations.github.GitHubIntegration.get_commit_context_all_frames",
  1308. return_value=github_blame_return_value,
  1309. )
  1310. def test_logic_fallback_no_scm(self, mock_get_commit_context):
  1311. with assume_test_silo_mode(SiloMode.CONTROL):
  1312. with unguarded_write(using=router.db_for_write(Integration)):
  1313. Integration.objects.all().delete()
  1314. integration = self.create_provider_integration(provider="bitbucket")
  1315. integration.add_organization(self.organization)
  1316. with self.tasks():
  1317. self.call_post_process_group(
  1318. is_new=True,
  1319. is_regression=False,
  1320. is_new_group_environment=True,
  1321. event=self.created_event,
  1322. )
  1323. assert not mock_get_commit_context.called
  1324. @patch(
  1325. "sentry.integrations.github_enterprise.GitHubEnterpriseIntegration.get_commit_context_all_frames",
  1326. )
  1327. def test_github_enterprise(self, mock_get_commit_context):
  1328. mock_get_commit_context.return_value = self.github_blame_all_files_return_value
  1329. with assume_test_silo_mode(SiloMode.CONTROL):
  1330. with unguarded_write(using=router.db_for_write(Integration)):
  1331. Integration.objects.all().delete()
  1332. integration = self.create_provider_integration(
  1333. external_id="35.232.149.196:12345",
  1334. provider="github_enterprise",
  1335. metadata={
  1336. "domain_name": "35.232.149.196/baxterthehacker",
  1337. "installation_id": "12345",
  1338. "installation": {"id": "2", "private_key": "private_key", "verify_ssl": True},
  1339. },
  1340. )
  1341. organization_integration = integration.add_organization(self.organization)
  1342. self.repo.update(integration_id=integration.id, provider="integrations:github_enterprise")
  1343. self.code_mapping.update(organization_integration_id=organization_integration.id)
  1344. with self.tasks():
  1345. self.call_post_process_group(
  1346. is_new=True,
  1347. is_regression=False,
  1348. is_new_group_environment=True,
  1349. event=self.created_event,
  1350. )
  1351. assert GroupOwner.objects.get(
  1352. group=self.created_event.group,
  1353. project=self.created_event.project,
  1354. organization=self.created_event.project.organization,
  1355. type=GroupOwnerType.SUSPECT_COMMIT.value,
  1356. )
  1357. @patch("sentry.integrations.github.GitHubIntegration.get_commit_context_all_frames")
  1358. def test_skip_when_not_is_new(self, mock_get_commit_context):
  1359. """
  1360. Tests that we do not process commit context if the group isn't new.
  1361. """
  1362. with self.tasks():
  1363. self.call_post_process_group(
  1364. is_new=False,
  1365. is_regression=False,
  1366. is_new_group_environment=True,
  1367. event=self.created_event,
  1368. )
  1369. assert not mock_get_commit_context.called
  1370. assert not GroupOwner.objects.filter(
  1371. group=self.created_event.group,
  1372. project=self.created_event.project,
  1373. organization=self.created_event.project.organization,
  1374. type=GroupOwnerType.SUSPECT_COMMIT.value,
  1375. ).exists()
  1376. @patch(
  1377. "sentry.integrations.github.GitHubIntegration.get_commit_context_all_frames",
  1378. )
  1379. def test_does_not_skip_when_is_new(self, mock_get_commit_context):
  1380. """
  1381. Tests that the commit context should be processed when the group is new.
  1382. """
  1383. mock_get_commit_context.return_value = self.github_blame_all_files_return_value
  1384. with self.tasks():
  1385. self.call_post_process_group(
  1386. is_new=True,
  1387. is_regression=False,
  1388. is_new_group_environment=True,
  1389. event=self.created_event,
  1390. )
  1391. assert mock_get_commit_context.called
  1392. assert GroupOwner.objects.get(
  1393. group=self.created_event.group,
  1394. project=self.created_event.project,
  1395. organization=self.created_event.project.organization,
  1396. type=GroupOwnerType.SUSPECT_COMMIT.value,
  1397. )
  1398. class SnoozeTestSkipSnoozeMixin(BasePostProgressGroupMixin):
  1399. @patch("sentry.signals.issue_unignored.send_robust")
  1400. @patch("sentry.rules.processing.processor.RuleProcessor")
  1401. def test_invalidates_snooze_issue_platform(self, mock_processor, mock_send_unignored_robust):
  1402. event = self.create_event(data={"message": "testing"}, project_id=self.project.id)
  1403. group = event.group
  1404. should_detect_escalation = group.issue_type.should_detect_escalation(
  1405. self.project.organization
  1406. )
  1407. # Check for has_reappeared=False if is_new=True
  1408. self.call_post_process_group(
  1409. is_new=True,
  1410. is_regression=False,
  1411. is_new_group_environment=True,
  1412. event=event,
  1413. )
  1414. assert GroupInbox.objects.filter(group=group, reason=GroupInboxReason.NEW.value).exists()
  1415. GroupInbox.objects.filter(group=group).delete() # Delete so it creates the UNIGNORED entry.
  1416. Activity.objects.filter(group=group).delete()
  1417. mock_processor.assert_called_with(EventMatcher(event), True, False, True, False, False)
  1418. event = self.create_event(data={"message": "testing"}, project_id=self.project.id)
  1419. group.status = GroupStatus.IGNORED
  1420. group.substatus = GroupSubStatus.UNTIL_CONDITION_MET
  1421. group.save(update_fields=["status", "substatus"])
  1422. snooze = GroupSnooze.objects.create(group=group, until=timezone.now() - timedelta(hours=1))
  1423. # Check for has_reappeared=True if is_new=False
  1424. self.call_post_process_group(
  1425. is_new=False,
  1426. is_regression=False,
  1427. is_new_group_environment=True,
  1428. event=event,
  1429. )
  1430. mock_processor.assert_called_with(EventMatcher(event), False, False, True, True, False)
  1431. if should_detect_escalation:
  1432. assert not GroupSnooze.objects.filter(id=snooze.id).exists()
  1433. else:
  1434. assert GroupSnooze.objects.filter(id=snooze.id).exists()
  1435. group.refresh_from_db()
  1436. if should_detect_escalation:
  1437. assert group.status == GroupStatus.UNRESOLVED
  1438. assert group.substatus == GroupSubStatus.ONGOING
  1439. assert GroupInbox.objects.filter(
  1440. group=group, reason=GroupInboxReason.ONGOING.value
  1441. ).exists()
  1442. assert Activity.objects.filter(
  1443. group=group, project=group.project, type=ActivityType.SET_UNRESOLVED.value
  1444. ).exists()
  1445. assert mock_send_unignored_robust.called
  1446. else:
  1447. assert group.status == GroupStatus.IGNORED
  1448. assert group.substatus == GroupSubStatus.UNTIL_CONDITION_MET
  1449. assert not GroupInbox.objects.filter(
  1450. group=group, reason=GroupInboxReason.ESCALATING.value
  1451. ).exists()
  1452. assert not Activity.objects.filter(
  1453. group=group, project=group.project, type=ActivityType.SET_ESCALATING.value
  1454. ).exists()
  1455. assert not mock_send_unignored_robust.called
  1456. class SnoozeTestMixin(BasePostProgressGroupMixin):
  1457. @patch("sentry.signals.issue_unignored.send_robust")
  1458. @patch("sentry.rules.processing.processor.RuleProcessor")
  1459. def test_invalidates_snooze(self, mock_processor, mock_send_unignored_robust):
  1460. event = self.create_event(data={"message": "testing"}, project_id=self.project.id)
  1461. group = event.group
  1462. # Check for has_reappeared=False if is_new=True
  1463. self.call_post_process_group(
  1464. is_new=True,
  1465. is_regression=False,
  1466. is_new_group_environment=True,
  1467. event=event,
  1468. )
  1469. assert GroupInbox.objects.filter(group=group, reason=GroupInboxReason.NEW.value).exists()
  1470. GroupInbox.objects.filter(group=group).delete() # Delete so it creates the UNIGNORED entry.
  1471. Activity.objects.filter(group=group).delete()
  1472. mock_processor.assert_called_with(EventMatcher(event), True, False, True, False, False)
  1473. event = self.create_event(data={"message": "testing"}, project_id=self.project.id)
  1474. group.status = GroupStatus.IGNORED
  1475. group.substatus = GroupSubStatus.UNTIL_CONDITION_MET
  1476. group.save(update_fields=["status", "substatus"])
  1477. snooze = GroupSnooze.objects.create(group=group, until=timezone.now() - timedelta(hours=1))
  1478. # Check for has_reappeared=True if is_new=False
  1479. self.call_post_process_group(
  1480. is_new=False,
  1481. is_regression=False,
  1482. is_new_group_environment=True,
  1483. event=event,
  1484. )
  1485. mock_processor.assert_called_with(EventMatcher(event), False, False, True, True, False)
  1486. assert not GroupSnooze.objects.filter(id=snooze.id).exists()
  1487. group.refresh_from_db()
  1488. assert group.status == GroupStatus.UNRESOLVED
  1489. assert group.substatus == GroupSubStatus.ONGOING
  1490. assert GroupInbox.objects.filter(
  1491. group=group, reason=GroupInboxReason.ONGOING.value
  1492. ).exists()
  1493. assert Activity.objects.filter(
  1494. group=group, project=group.project, type=ActivityType.SET_UNRESOLVED.value
  1495. ).exists()
  1496. assert mock_send_unignored_robust.called
  1497. @override_settings(SENTRY_BUFFER="sentry.buffer.redis.RedisBuffer")
  1498. @patch("sentry.signals.issue_unignored.send_robust")
  1499. @patch("sentry.rules.processing.processor.RuleProcessor")
  1500. def test_invalidates_snooze_with_buffers(self, mock_processor, send_robust):
  1501. redis_buffer = RedisBuffer()
  1502. with (
  1503. mock.patch("sentry.buffer.backend.get", redis_buffer.get),
  1504. mock.patch("sentry.buffer.backend.incr", redis_buffer.incr),
  1505. ):
  1506. event = self.create_event(
  1507. data={"message": "testing", "fingerprint": ["group-1"]}, project_id=self.project.id
  1508. )
  1509. event_2 = self.create_event(
  1510. data={"message": "testing", "fingerprint": ["group-1"]}, project_id=self.project.id
  1511. )
  1512. group = event.group
  1513. group.times_seen = 50
  1514. group.status = GroupStatus.IGNORED
  1515. group.substatus = GroupSubStatus.UNTIL_CONDITION_MET
  1516. group.save(update_fields=["times_seen", "status", "substatus"])
  1517. snooze = GroupSnooze.objects.create(group=group, count=100, state={"times_seen": 0})
  1518. self.call_post_process_group(
  1519. is_new=False,
  1520. is_regression=False,
  1521. is_new_group_environment=True,
  1522. event=event,
  1523. )
  1524. assert GroupSnooze.objects.filter(id=snooze.id).exists()
  1525. buffer.backend.incr(Group, {"times_seen": 60}, filters={"id": event.group.id})
  1526. self.call_post_process_group(
  1527. is_new=False,
  1528. is_regression=False,
  1529. is_new_group_environment=True,
  1530. event=event_2,
  1531. )
  1532. assert not GroupSnooze.objects.filter(id=snooze.id).exists()
  1533. @patch("sentry.rules.processing.processor.RuleProcessor")
  1534. def test_maintains_valid_snooze(self, mock_processor):
  1535. event = self.create_event(data={}, project_id=self.project.id)
  1536. group = event.group
  1537. assert group.status == GroupStatus.UNRESOLVED
  1538. assert group.substatus == GroupSubStatus.ONGOING
  1539. snooze = GroupSnooze.objects.create(group=group, until=timezone.now() + timedelta(hours=1))
  1540. self.call_post_process_group(
  1541. is_new=True,
  1542. is_regression=False,
  1543. is_new_group_environment=True,
  1544. event=event,
  1545. )
  1546. mock_processor.assert_called_with(EventMatcher(event), True, False, True, False, False)
  1547. assert GroupSnooze.objects.filter(id=snooze.id).exists()
  1548. group.refresh_from_db()
  1549. assert group.status == GroupStatus.UNRESOLVED
  1550. assert group.substatus == GroupSubStatus.NEW
  1551. @patch("sentry.issues.escalating.is_escalating", return_value=(True, 0))
  1552. def test_forecast_in_activity(self, mock_is_escalating):
  1553. """
  1554. Test that the forecast is added to the activity for escalating issues that were
  1555. previously ignored until_escalating.
  1556. """
  1557. event = self.create_event(data={"message": "testing"}, project_id=self.project.id)
  1558. group = event.group
  1559. group.status = GroupStatus.IGNORED
  1560. group.substatus = GroupSubStatus.UNTIL_ESCALATING
  1561. group.save()
  1562. self.call_post_process_group(
  1563. is_new=False,
  1564. is_regression=False,
  1565. is_new_group_environment=True,
  1566. event=event,
  1567. )
  1568. assert Activity.objects.filter(
  1569. group=group,
  1570. project=group.project,
  1571. type=ActivityType.SET_ESCALATING.value,
  1572. data={"event_id": event.event_id, "forecast": 0},
  1573. ).exists()
  1574. @with_feature("projects:first-event-severity-new-escalation")
  1575. @patch("sentry.issues.escalating.is_escalating")
  1576. def test_skip_escalation_logic_for_new_groups(self, mock_is_escalating):
  1577. """
  1578. Test that we skip checking escalation in the process_snoozes job if the group is less than
  1579. a day old.
  1580. """
  1581. event = self.create_event(data={"message": "testing"}, project_id=self.project.id)
  1582. group = event.group
  1583. group.status = GroupStatus.IGNORED
  1584. group.substatus = GroupSubStatus.UNTIL_ESCALATING
  1585. group.update(first_seen=timezone.now() - timedelta(hours=1))
  1586. group.save()
  1587. self.call_post_process_group(
  1588. is_new=False,
  1589. is_regression=False,
  1590. is_new_group_environment=True,
  1591. event=event,
  1592. )
  1593. mock_is_escalating.assert_not_called()
  1594. @patch("sentry.utils.sdk_crashes.sdk_crash_detection.sdk_crash_detection")
  1595. class SDKCrashMonitoringTestMixin(BasePostProgressGroupMixin):
  1596. @with_feature("organizations:sdk-crash-detection")
  1597. @override_options(
  1598. {
  1599. "issues.sdk_crash_detection.cocoa.project_id": 1234,
  1600. "issues.sdk_crash_detection.cocoa.sample_rate": 1.0,
  1601. "issues.sdk_crash_detection.react-native.project_id": 12345,
  1602. "issues.sdk_crash_detection.react-native.sample_rate": 1.0,
  1603. "issues.sdk_crash_detection.react-native.organization_allowlist": [1],
  1604. }
  1605. )
  1606. def test_sdk_crash_monitoring_is_called(self, mock_sdk_crash_detection):
  1607. event = self.create_event(
  1608. data={"message": "testing"},
  1609. project_id=self.project.id,
  1610. )
  1611. self.call_post_process_group(
  1612. is_new=True,
  1613. is_regression=False,
  1614. is_new_group_environment=True,
  1615. event=event,
  1616. )
  1617. mock_sdk_crash_detection.detect_sdk_crash.assert_called_once()
  1618. args = mock_sdk_crash_detection.detect_sdk_crash.call_args[-1]
  1619. assert args["event"].project.id == event.project.id
  1620. assert len(args["configs"]) == 2
  1621. cocoa_config = args["configs"][0]
  1622. assert cocoa_config.sdk_name == SdkName.Cocoa
  1623. assert cocoa_config.project_id == 1234
  1624. assert cocoa_config.sample_rate == 1.0
  1625. assert cocoa_config.organization_allowlist == []
  1626. react_native_config = args["configs"][1]
  1627. assert react_native_config.sdk_name == SdkName.ReactNative
  1628. assert react_native_config.project_id == 12345
  1629. assert react_native_config.sample_rate == 1.0
  1630. assert react_native_config.organization_allowlist == [1]
  1631. @with_feature("organizations:sdk-crash-detection")
  1632. @override_options(
  1633. {
  1634. "issues.sdk_crash_detection.cocoa.project_id": 1234,
  1635. "issues.sdk_crash_detection.cocoa.sample_rate": 0.0,
  1636. }
  1637. )
  1638. def test_sdk_crash_monitoring_not_called_without_sample_rate(self, mock_sdk_crash_detection):
  1639. event = self.create_event(
  1640. data={"message": "testing"},
  1641. project_id=self.project.id,
  1642. )
  1643. self.call_post_process_group(
  1644. is_new=True,
  1645. is_regression=False,
  1646. is_new_group_environment=True,
  1647. event=event,
  1648. )
  1649. mock_sdk_crash_detection.detect_sdk_crash.assert_not_called()
  1650. def test_sdk_crash_monitoring_is_not_called_with_disabled_feature(
  1651. self, mock_sdk_crash_detection
  1652. ):
  1653. event = self.create_event(
  1654. data={"message": "testing"},
  1655. project_id=self.project.id,
  1656. )
  1657. self.call_post_process_group(
  1658. is_new=True,
  1659. is_regression=False,
  1660. is_new_group_environment=True,
  1661. event=event,
  1662. )
  1663. mock_sdk_crash_detection.detect_sdk_crash.assert_not_called()
  1664. @override_options(
  1665. {
  1666. "issues.sdk_crash_detection.cocoa.project_id": None,
  1667. }
  1668. )
  1669. @with_feature("organizations:sdk-crash-detection")
  1670. def test_sdk_crash_monitoring_is_not_called_without_project_id(self, mock_sdk_crash_detection):
  1671. event = self.create_event(
  1672. data={"message": "testing"},
  1673. project_id=self.project.id,
  1674. )
  1675. self.call_post_process_group(
  1676. is_new=True,
  1677. is_regression=False,
  1678. is_new_group_environment=True,
  1679. event=event,
  1680. )
  1681. mock_sdk_crash_detection.detect_sdk_crash.assert_not_called()
  1682. @mock.patch.object(replays_kafka, "get_kafka_producer_cluster_options")
  1683. @mock.patch.object(replays_kafka, "KafkaPublisher")
  1684. @mock.patch("sentry.utils.metrics.incr")
  1685. class ReplayLinkageTestMixin(BasePostProgressGroupMixin):
  1686. def test_replay_linkage(self, incr, kafka_producer, kafka_publisher):
  1687. replay_id = uuid.uuid4().hex
  1688. event = self.create_event(
  1689. data={"message": "testing", "contexts": {"replay": {"replay_id": replay_id}}},
  1690. project_id=self.project.id,
  1691. )
  1692. self.call_post_process_group(
  1693. is_new=True,
  1694. is_regression=False,
  1695. is_new_group_environment=True,
  1696. event=event,
  1697. )
  1698. assert kafka_producer.return_value.publish.call_count == 1
  1699. assert kafka_producer.return_value.publish.call_args[0][0] == "ingest-replay-events"
  1700. ret_value = json.loads(kafka_producer.return_value.publish.call_args[0][1])
  1701. assert ret_value["type"] == "replay_event"
  1702. assert ret_value["start_time"]
  1703. assert ret_value["replay_id"] == replay_id
  1704. assert ret_value["project_id"] == self.project.id
  1705. assert ret_value["segment_id"] is None
  1706. assert ret_value["retention_days"] == 90
  1707. assert ret_value["payload"] == {
  1708. "type": "event_link",
  1709. "replay_id": replay_id,
  1710. "error_id": event.event_id,
  1711. "timestamp": int(event.datetime.timestamp()),
  1712. "event_hash": str(uuid.UUID(md5((event.event_id).encode("utf-8")).hexdigest())),
  1713. }
  1714. incr.assert_any_call("post_process.process_replay_link.id_sampled")
  1715. incr.assert_any_call("post_process.process_replay_link.id_exists")
  1716. def test_replay_linkage_with_tag(self, incr, kafka_producer, kafka_publisher):
  1717. replay_id = uuid.uuid4().hex
  1718. event = self.create_event(
  1719. data={"message": "testing", "tags": {"replayId": replay_id}},
  1720. project_id=self.project.id,
  1721. )
  1722. self.call_post_process_group(
  1723. is_new=True,
  1724. is_regression=False,
  1725. is_new_group_environment=True,
  1726. event=event,
  1727. )
  1728. assert kafka_producer.return_value.publish.call_count == 1
  1729. assert kafka_producer.return_value.publish.call_args[0][0] == "ingest-replay-events"
  1730. ret_value = json.loads(kafka_producer.return_value.publish.call_args[0][1])
  1731. assert ret_value["type"] == "replay_event"
  1732. assert ret_value["start_time"]
  1733. assert ret_value["replay_id"] == replay_id
  1734. assert ret_value["project_id"] == self.project.id
  1735. assert ret_value["segment_id"] is None
  1736. assert ret_value["retention_days"] == 90
  1737. assert ret_value["payload"] == {
  1738. "type": "event_link",
  1739. "replay_id": replay_id,
  1740. "error_id": event.event_id,
  1741. "timestamp": int(event.datetime.timestamp()),
  1742. "event_hash": str(uuid.UUID(md5((event.event_id).encode("utf-8")).hexdigest())),
  1743. }
  1744. incr.assert_any_call("post_process.process_replay_link.id_sampled")
  1745. incr.assert_any_call("post_process.process_replay_link.id_exists")
  1746. def test_replay_linkage_with_tag_pii_scrubbed(self, incr, kafka_producer, kafka_publisher):
  1747. event = self.create_event(
  1748. data={"message": "testing", "tags": {"replayId": "***"}},
  1749. project_id=self.project.id,
  1750. )
  1751. self.call_post_process_group(
  1752. is_new=True,
  1753. is_regression=False,
  1754. is_new_group_environment=True,
  1755. event=event,
  1756. )
  1757. assert kafka_producer.return_value.publish.call_count == 0
  1758. def test_no_replay(self, incr, kafka_producer, kafka_publisher):
  1759. event = self.create_event(
  1760. data={"message": "testing"},
  1761. project_id=self.project.id,
  1762. )
  1763. self.call_post_process_group(
  1764. is_new=True,
  1765. is_regression=False,
  1766. is_new_group_environment=True,
  1767. event=event,
  1768. )
  1769. assert kafka_producer.return_value.publish.call_count == 0
  1770. incr.assert_any_call("post_process.process_replay_link.id_sampled")
  1771. def test_0_sample_rate_replays(self, incr, kafka_producer, kafka_publisher):
  1772. event = self.create_event(
  1773. data={"message": "testing"},
  1774. project_id=self.project.id,
  1775. )
  1776. self.call_post_process_group(
  1777. is_new=True,
  1778. is_regression=False,
  1779. is_new_group_environment=True,
  1780. event=event,
  1781. )
  1782. assert kafka_producer.return_value.publish.call_count == 0
  1783. for args, _ in incr.call_args_list:
  1784. self.assertNotEqual(args, ("post_process.process_replay_link.id_sampled"))
  1785. class DetectNewEscalationTestMixin(BasePostProgressGroupMixin):
  1786. @patch("sentry.tasks.post_process.run_post_process_job", side_effect=run_post_process_job)
  1787. @with_feature("projects:issue-priority")
  1788. def test_has_escalated(self, mock_run_post_process_job):
  1789. event = self.create_event(data={}, project_id=self.project.id)
  1790. group = event.group
  1791. group.update(
  1792. first_seen=timezone.now() - timedelta(hours=1),
  1793. times_seen=10,
  1794. priority=PriorityLevel.LOW,
  1795. )
  1796. event.group = Group.objects.get(id=group.id)
  1797. with self.feature("projects:first-event-severity-new-escalation"):
  1798. with patch("sentry.issues.issue_velocity.calculate_threshold", return_value=9):
  1799. self.call_post_process_group(
  1800. is_new=True,
  1801. is_regression=False,
  1802. is_new_group_environment=True,
  1803. event=event,
  1804. )
  1805. job = mock_run_post_process_job.call_args[0][0]
  1806. assert job["has_escalated"]
  1807. group.refresh_from_db()
  1808. assert group.substatus == GroupSubStatus.ESCALATING
  1809. assert group.priority == PriorityLevel.MEDIUM
  1810. @patch("sentry.issues.issue_velocity.get_latest_threshold", return_value=1)
  1811. @patch("sentry.tasks.post_process.run_post_process_job", side_effect=run_post_process_job)
  1812. @with_feature("projects:issue-priority")
  1813. def test_has_escalated_no_flag(self, mock_run_post_process_job, mock_threshold):
  1814. event = self.create_event(data={}, project_id=self.project.id)
  1815. group = event.group
  1816. group.update(first_seen=timezone.now() - timedelta(hours=1), times_seen=10000)
  1817. self.call_post_process_group(
  1818. is_new=True,
  1819. is_regression=False,
  1820. is_new_group_environment=True,
  1821. event=event,
  1822. )
  1823. job = mock_run_post_process_job.call_args[0][0]
  1824. assert not job["has_escalated"]
  1825. group.refresh_from_db()
  1826. assert group.substatus == GroupSubStatus.NEW
  1827. assert group.priority == PriorityLevel.HIGH
  1828. @patch("sentry.issues.issue_velocity.get_latest_threshold")
  1829. @patch("sentry.tasks.post_process.run_post_process_job", side_effect=run_post_process_job)
  1830. @with_feature("projects:issue-priority")
  1831. def test_has_escalated_old(self, mock_run_post_process_job, mock_threshold):
  1832. event = self.create_event(data={}, project_id=self.project.id)
  1833. group = event.group
  1834. group.update(first_seen=timezone.now() - timedelta(days=2), times_seen=10000)
  1835. with self.feature("projects:first-event-severity-new-escalation"):
  1836. self.call_post_process_group(
  1837. is_new=True,
  1838. is_regression=False,
  1839. is_new_group_environment=True,
  1840. event=event,
  1841. )
  1842. mock_threshold.assert_not_called()
  1843. job = mock_run_post_process_job.call_args[0][0]
  1844. assert not job["has_escalated"]
  1845. group.refresh_from_db()
  1846. assert group.substatus == GroupSubStatus.NEW
  1847. assert group.priority == PriorityLevel.HIGH
  1848. @patch("sentry.issues.issue_velocity.get_latest_threshold", return_value=11)
  1849. @patch("sentry.tasks.post_process.run_post_process_job", side_effect=run_post_process_job)
  1850. @with_feature("projects:issue-priority")
  1851. def test_has_not_escalated(self, mock_run_post_process_job, mock_threshold):
  1852. event = self.create_event(data={}, project_id=self.project.id)
  1853. group = event.group
  1854. group.update(
  1855. first_seen=timezone.now() - timedelta(hours=1),
  1856. times_seen=10,
  1857. priority=PriorityLevel.LOW,
  1858. )
  1859. with self.feature("projects:first-event-severity-new-escalation"):
  1860. self.call_post_process_group(
  1861. is_new=True,
  1862. is_regression=False,
  1863. is_new_group_environment=True,
  1864. event=event,
  1865. )
  1866. mock_threshold.assert_called()
  1867. job = mock_run_post_process_job.call_args[0][0]
  1868. assert not job["has_escalated"]
  1869. group.refresh_from_db()
  1870. assert group.substatus == GroupSubStatus.NEW
  1871. assert group.priority == PriorityLevel.LOW
  1872. @patch("sentry.issues.issue_velocity.get_latest_threshold")
  1873. @patch("sentry.tasks.post_process.run_post_process_job", side_effect=run_post_process_job)
  1874. def test_has_escalated_locked(self, mock_run_post_process_job, mock_threshold):
  1875. event = self.create_event(data={}, project_id=self.project.id)
  1876. group = event.group
  1877. group.update(first_seen=timezone.now() - timedelta(hours=1), times_seen=10000)
  1878. lock = locks.get(f"detect_escalation:{group.id}", duration=10, name="detect_escalation")
  1879. with self.feature("projects:first-event-severity-new-escalation"), lock.acquire():
  1880. self.call_post_process_group(
  1881. is_new=True,
  1882. is_regression=False,
  1883. is_new_group_environment=True,
  1884. event=event,
  1885. )
  1886. mock_threshold.assert_not_called()
  1887. job = mock_run_post_process_job.call_args[0][0]
  1888. assert not job["has_escalated"]
  1889. group.refresh_from_db()
  1890. assert group.substatus == GroupSubStatus.NEW
  1891. @patch("sentry.issues.issue_velocity.get_latest_threshold")
  1892. @patch("sentry.tasks.post_process.run_post_process_job", side_effect=run_post_process_job)
  1893. def test_has_escalated_already_escalated(self, mock_run_post_process_job, mock_threshold):
  1894. event = self.create_event(data={}, project_id=self.project.id)
  1895. group = event.group
  1896. self.call_post_process_group(
  1897. is_new=True,
  1898. is_regression=False,
  1899. is_new_group_environment=True,
  1900. event=event,
  1901. )
  1902. group.update(
  1903. first_seen=timezone.now() - timedelta(hours=1),
  1904. times_seen=10000,
  1905. substatus=GroupSubStatus.ESCALATING,
  1906. priority=PriorityLevel.MEDIUM,
  1907. )
  1908. with self.feature("projects:first-event-severity-new-escalation"):
  1909. self.call_post_process_group(
  1910. is_new=False,
  1911. is_regression=False,
  1912. is_new_group_environment=False,
  1913. event=event,
  1914. )
  1915. mock_threshold.assert_not_called()
  1916. job = mock_run_post_process_job.call_args[0][0]
  1917. assert not job["has_escalated"]
  1918. group.refresh_from_db()
  1919. assert group.substatus == GroupSubStatus.ESCALATING
  1920. assert group.priority == PriorityLevel.MEDIUM
  1921. @patch("sentry.issues.issue_velocity.get_latest_threshold")
  1922. @patch("sentry.tasks.post_process.run_post_process_job", side_effect=run_post_process_job)
  1923. def test_does_not_escalate_non_new_substatus(self, mock_run_post_process_job, mock_threshold):
  1924. for substatus, status in GROUP_SUBSTATUS_TO_STATUS_MAP.items():
  1925. if substatus == GroupSubStatus.NEW:
  1926. continue
  1927. event = self.create_event(data={}, project_id=self.project.id)
  1928. group = event.group
  1929. group.update(
  1930. first_seen=timezone.now() - timedelta(hours=1),
  1931. times_seen=10000,
  1932. status=status,
  1933. substatus=substatus,
  1934. )
  1935. group.save()
  1936. with self.feature("projects:first-event-severity-new-escalation"):
  1937. self.call_post_process_group(
  1938. is_new=False, # when true, post_process sets the substatus to NEW
  1939. is_regression=False,
  1940. is_new_group_environment=True,
  1941. event=event,
  1942. )
  1943. mock_threshold.assert_not_called()
  1944. job = mock_run_post_process_job.call_args[0][0]
  1945. assert not job["has_escalated"]
  1946. group.refresh_from_db()
  1947. assert group.substatus == substatus
  1948. @patch("sentry.issues.issue_velocity.get_latest_threshold", return_value=8)
  1949. @patch("sentry.tasks.post_process.run_post_process_job", side_effect=run_post_process_job)
  1950. def test_no_escalation_less_than_floor(self, mock_run_post_process_job, mock_threshold):
  1951. event = self.create_event(data={}, project_id=self.project.id)
  1952. group = event.group
  1953. group.update(first_seen=timezone.now() - timedelta(hours=1), times_seen=9)
  1954. event.group = Group.objects.get(id=group.id)
  1955. with self.feature("projects:first-event-severity-new-escalation"):
  1956. self.call_post_process_group(
  1957. is_new=True,
  1958. is_regression=False,
  1959. is_new_group_environment=True,
  1960. event=event,
  1961. )
  1962. mock_threshold.assert_not_called()
  1963. job = mock_run_post_process_job.call_args[0][0]
  1964. assert not job["has_escalated"]
  1965. group.refresh_from_db()
  1966. assert group.substatus == GroupSubStatus.NEW
  1967. @patch("sentry.issues.issue_velocity.get_latest_threshold", return_value=11)
  1968. @patch("sentry.tasks.post_process.run_post_process_job", side_effect=run_post_process_job)
  1969. def test_has_not_escalated_less_than_an_hour(self, mock_run_post_process_job, mock_threshold):
  1970. event = self.create_event(data={}, project_id=self.project.id)
  1971. group = event.group
  1972. # the group is less than an hour old, but we use 1 hr for the hourly event rate anyway
  1973. group.update(first_seen=timezone.now() - timedelta(minutes=1), times_seen=10)
  1974. event.group = Group.objects.get(id=group.id)
  1975. with self.feature("projects:first-event-severity-new-escalation"):
  1976. self.call_post_process_group(
  1977. is_new=True,
  1978. is_regression=False,
  1979. is_new_group_environment=True,
  1980. event=event,
  1981. )
  1982. job = mock_run_post_process_job.call_args[0][0]
  1983. assert not job["has_escalated"]
  1984. group.refresh_from_db()
  1985. assert group.substatus == GroupSubStatus.NEW
  1986. @patch("sentry.issues.issue_velocity.get_latest_threshold", return_value=0)
  1987. @patch("sentry.tasks.post_process.run_post_process_job", side_effect=run_post_process_job)
  1988. def test_zero_escalation_rate(self, mock_run_post_process_job, mock_threshold):
  1989. event = self.create_event(data={}, project_id=self.project.id)
  1990. group = event.group
  1991. group.update(first_seen=timezone.now() - timedelta(hours=1), times_seen=10000)
  1992. with self.feature("projects:first-event-severity-new-escalation"):
  1993. self.call_post_process_group(
  1994. is_new=True,
  1995. is_regression=False,
  1996. is_new_group_environment=True,
  1997. event=event,
  1998. )
  1999. mock_threshold.assert_called()
  2000. job = mock_run_post_process_job.call_args[0][0]
  2001. assert not job["has_escalated"]
  2002. group.refresh_from_db()
  2003. assert group.substatus == GroupSubStatus.NEW
  2004. class PostProcessGroupErrorTest(
  2005. TestCase,
  2006. AssignmentTestMixin,
  2007. ProcessCommitsTestMixin,
  2008. CorePostProcessGroupTestMixin,
  2009. DeriveCodeMappingsProcessGroupTestMixin,
  2010. InboxTestMixin,
  2011. ResourceChangeBoundsTestMixin,
  2012. RuleProcessorTestMixin,
  2013. ServiceHooksTestMixin,
  2014. SnoozeTestMixin,
  2015. SnoozeTestSkipSnoozeMixin,
  2016. SDKCrashMonitoringTestMixin,
  2017. ReplayLinkageTestMixin,
  2018. DetectNewEscalationTestMixin,
  2019. ):
  2020. def setUp(self):
  2021. super().setUp()
  2022. clear_replay_publisher()
  2023. def create_event(self, data, project_id, assert_no_errors=True):
  2024. return self.store_event(data=data, project_id=project_id, assert_no_errors=assert_no_errors)
  2025. def call_post_process_group(
  2026. self, is_new, is_regression, is_new_group_environment, event, cache_key=None
  2027. ):
  2028. if cache_key is None:
  2029. cache_key = write_event_to_cache(event)
  2030. post_process_group(
  2031. is_new=is_new,
  2032. is_regression=is_regression,
  2033. is_new_group_environment=is_new_group_environment,
  2034. cache_key=cache_key,
  2035. group_id=event.group_id,
  2036. project_id=event.project_id,
  2037. )
  2038. return cache_key
  2039. @with_feature("organizations:escalating-metrics-backend")
  2040. @patch("sentry.sentry_metrics.client.generic_metrics_backend.counter")
  2041. @patch("sentry.utils.metrics.incr")
  2042. @patch("sentry.utils.metrics.timer")
  2043. def test_generic_metrics_backend_counter(
  2044. self, metric_timer_mock, metric_incr_mock, generic_metrics_backend_mock
  2045. ):
  2046. min_ago = iso_format(before_now(minutes=1))
  2047. event = self.create_event(
  2048. data={
  2049. "exception": {
  2050. "values": [
  2051. {
  2052. "type": "ZeroDivisionError",
  2053. "stacktrace": {"frames": [{"function": f} for f in ["a", "b"]]},
  2054. }
  2055. ]
  2056. },
  2057. "timestamp": min_ago,
  2058. "start_timestamp": min_ago,
  2059. "contexts": {"trace": {"trace_id": "b" * 32, "span_id": "c" * 16, "op": ""}},
  2060. },
  2061. project_id=self.project.id,
  2062. )
  2063. self.call_post_process_group(
  2064. is_new=True, is_regression=False, is_new_group_environment=True, event=event
  2065. )
  2066. assert generic_metrics_backend_mock.call_count == 1
  2067. metric_incr_mock.assert_any_call(
  2068. "sentry.tasks.post_process.post_process_group.completed",
  2069. tags={"issue_category": "error", "pipeline": "process_rules"},
  2070. )
  2071. metric_timer_mock.assert_any_call(
  2072. "tasks.post_process.run_post_process_job.pipeline.duration",
  2073. tags={
  2074. "pipeline": "process_rules",
  2075. "issue_category": "error",
  2076. "is_reprocessed": False,
  2077. },
  2078. )
  2079. class PostProcessGroupPerformanceTest(
  2080. TestCase,
  2081. SnubaTestCase,
  2082. CorePostProcessGroupTestMixin,
  2083. InboxTestMixin,
  2084. RuleProcessorTestMixin,
  2085. SnoozeTestMixin,
  2086. SnoozeTestSkipSnoozeMixin,
  2087. PerformanceIssueTestCase,
  2088. ):
  2089. def create_event(self, data, project_id, assert_no_errors=True):
  2090. fingerprint = data["fingerprint"][0] if data.get("fingerprint") else "some_group"
  2091. fingerprint = f"{PerformanceNPlusOneGroupType.type_id}-{fingerprint}"
  2092. return self.create_performance_issue(fingerprint=fingerprint)
  2093. def call_post_process_group(
  2094. self, is_new, is_regression, is_new_group_environment, event, cache_key=None
  2095. ):
  2096. if cache_key is None:
  2097. cache_key = write_event_to_cache(event)
  2098. with self.feature(PerformanceNPlusOneGroupType.build_post_process_group_feature_name()):
  2099. post_process_group(
  2100. is_new=is_new,
  2101. is_regression=is_regression,
  2102. is_new_group_environment=is_new_group_environment,
  2103. cache_key=cache_key,
  2104. group_id=event.group_id,
  2105. project_id=event.project_id,
  2106. )
  2107. return cache_key
  2108. @patch("sentry.sentry_metrics.client.generic_metrics_backend.counter")
  2109. @patch("sentry.tasks.post_process.run_post_process_job")
  2110. @patch("sentry.rules.processing.processor.RuleProcessor")
  2111. @patch("sentry.signals.transaction_processed.send_robust")
  2112. @patch("sentry.signals.event_processed.send_robust")
  2113. def test_process_transaction_event_with_no_group(
  2114. self,
  2115. event_processed_signal_mock,
  2116. transaction_processed_signal_mock,
  2117. mock_processor,
  2118. run_post_process_job_mock,
  2119. generic_metrics_backend_mock,
  2120. ):
  2121. min_ago = before_now(minutes=1)
  2122. event = store_transaction(
  2123. test_case=self,
  2124. project_id=self.project.id,
  2125. user_id=self.create_user(name="user1").name,
  2126. fingerprint=[],
  2127. environment=None,
  2128. timestamp=min_ago,
  2129. )
  2130. assert len(event.groups) == 0
  2131. cache_key = write_event_to_cache(event)
  2132. post_process_group(
  2133. is_new=False,
  2134. is_regression=False,
  2135. is_new_group_environment=False,
  2136. cache_key=cache_key,
  2137. group_id=None,
  2138. group_states=None,
  2139. )
  2140. assert transaction_processed_signal_mock.call_count == 1
  2141. assert event_processed_signal_mock.call_count == 0
  2142. assert mock_processor.call_count == 0
  2143. assert run_post_process_job_mock.call_count == 0
  2144. assert generic_metrics_backend_mock.call_count == 0
  2145. @patch("sentry.tasks.post_process.handle_owner_assignment")
  2146. @patch("sentry.tasks.post_process.handle_auto_assignment")
  2147. @patch("sentry.tasks.post_process.process_rules")
  2148. @patch("sentry.tasks.post_process.run_post_process_job")
  2149. @patch("sentry.rules.processing.processor.RuleProcessor")
  2150. @patch("sentry.signals.transaction_processed.send_robust")
  2151. @patch("sentry.signals.event_processed.send_robust")
  2152. def test_full_pipeline_with_group_states(
  2153. self,
  2154. event_processed_signal_mock,
  2155. transaction_processed_signal_mock,
  2156. mock_processor,
  2157. run_post_process_job_mock,
  2158. mock_process_rules,
  2159. mock_handle_auto_assignment,
  2160. mock_handle_owner_assignment,
  2161. ):
  2162. event = self.create_performance_issue()
  2163. assert event.group
  2164. # TODO(jangjodi): Fix this ordering test; side_effects should be a function (lambda),
  2165. # but because post-processing is async, this causes the assert to fail because it doesn't
  2166. # wait for the side effects to happen
  2167. call_order = [mock_handle_owner_assignment, mock_handle_auto_assignment, mock_process_rules]
  2168. mock_handle_owner_assignment.side_effect = None
  2169. mock_handle_auto_assignment.side_effect = None
  2170. mock_process_rules.side_effect = None
  2171. post_process_group(
  2172. is_new=True,
  2173. is_regression=False,
  2174. is_new_group_environment=True,
  2175. cache_key="dummykey",
  2176. group_id=event.group_id,
  2177. occurrence_id=event.occurrence_id,
  2178. project_id=self.project.id,
  2179. )
  2180. assert transaction_processed_signal_mock.call_count == 1
  2181. assert event_processed_signal_mock.call_count == 0
  2182. assert mock_processor.call_count == 0
  2183. assert run_post_process_job_mock.call_count == 1
  2184. assert call_order == [
  2185. mock_handle_owner_assignment,
  2186. mock_handle_auto_assignment,
  2187. mock_process_rules,
  2188. ]
  2189. class PostProcessGroupAggregateEventTest(
  2190. TestCase,
  2191. SnubaTestCase,
  2192. CorePostProcessGroupTestMixin,
  2193. SnoozeTestSkipSnoozeMixin,
  2194. PerformanceIssueTestCase,
  2195. ):
  2196. def create_event(self, data, project_id):
  2197. group = self.create_group(
  2198. type=PerformanceP95EndpointRegressionGroupType.type_id,
  2199. )
  2200. event = self.store_event(data=data, project_id=project_id)
  2201. event.group = group
  2202. event = event.for_group(group)
  2203. return event
  2204. def call_post_process_group(
  2205. self, is_new, is_regression, is_new_group_environment, event, cache_key=None
  2206. ):
  2207. if cache_key is None:
  2208. cache_key = write_event_to_cache(event)
  2209. with self.feature(
  2210. PerformanceP95EndpointRegressionGroupType.build_post_process_group_feature_name()
  2211. ):
  2212. post_process_group(
  2213. is_new=is_new,
  2214. is_regression=is_regression,
  2215. is_new_group_environment=is_new_group_environment,
  2216. cache_key=cache_key,
  2217. group_id=event.group_id,
  2218. project_id=event.project_id,
  2219. )
  2220. return cache_key
  2221. class TransactionClustererTestCase(TestCase, SnubaTestCase):
  2222. @patch("sentry.ingest.transaction_clusterer.datasource.redis._record_sample")
  2223. def test_process_transaction_event_clusterer(
  2224. self,
  2225. mock_store_transaction_name,
  2226. ):
  2227. min_ago = before_now(minutes=1)
  2228. event = process_event(
  2229. data={
  2230. "project": self.project.id,
  2231. "event_id": "b" * 32,
  2232. "transaction": "foo",
  2233. "start_timestamp": str(min_ago),
  2234. "timestamp": str(min_ago),
  2235. "type": "transaction",
  2236. "transaction_info": {
  2237. "source": "url",
  2238. },
  2239. "contexts": {"trace": {"trace_id": "b" * 32, "span_id": "c" * 16, "op": ""}},
  2240. },
  2241. group_id=0,
  2242. )
  2243. cache_key = write_event_to_cache(event)
  2244. post_process_group(
  2245. is_new=False,
  2246. is_regression=False,
  2247. is_new_group_environment=False,
  2248. cache_key=cache_key,
  2249. group_id=None,
  2250. )
  2251. assert mock_store_transaction_name.mock_calls == [
  2252. mock.call(ClustererNamespace.TRANSACTIONS, self.project, "foo")
  2253. ]
  2254. class PostProcessGroupGenericTest(
  2255. TestCase,
  2256. SnubaTestCase,
  2257. OccurrenceTestMixin,
  2258. CorePostProcessGroupTestMixin,
  2259. InboxTestMixin,
  2260. RuleProcessorTestMixin,
  2261. SnoozeTestMixin,
  2262. ):
  2263. def create_event(self, data, project_id, assert_no_errors=True):
  2264. data["type"] = "generic"
  2265. event = self.store_event(
  2266. data=data, project_id=project_id, assert_no_errors=assert_no_errors
  2267. )
  2268. occurrence_data = self.build_occurrence_data(event_id=event.event_id, project_id=project_id)
  2269. occurrence, group_info = save_issue_occurrence(occurrence_data, event)
  2270. assert group_info is not None
  2271. group_event = event.for_group(group_info.group)
  2272. group_event.occurrence = occurrence
  2273. return group_event
  2274. def call_post_process_group(
  2275. self, is_new, is_regression, is_new_group_environment, event, cache_key=None
  2276. ):
  2277. with self.feature(ProfileFileIOGroupType.build_post_process_group_feature_name()):
  2278. post_process_group(
  2279. is_new=is_new,
  2280. is_regression=is_regression,
  2281. is_new_group_environment=is_new_group_environment,
  2282. cache_key=None,
  2283. group_id=event.group_id,
  2284. occurrence_id=event.occurrence.id,
  2285. project_id=event.group.project_id,
  2286. )
  2287. return cache_key
  2288. def test_issueless(self):
  2289. # Skip this test since there's no way to have issueless events in the issue platform
  2290. pass
  2291. def test_no_cache_abort(self):
  2292. # We don't use the cache for generic issues, so skip this test
  2293. pass
  2294. @patch("sentry.rules.processing.processor.RuleProcessor")
  2295. def test_occurrence_deduping(self, mock_processor):
  2296. event = self.create_event(data={"message": "testing"}, project_id=self.project.id)
  2297. self.call_post_process_group(
  2298. is_new=True,
  2299. is_regression=True,
  2300. is_new_group_environment=False,
  2301. event=event,
  2302. )
  2303. assert mock_processor.call_count == 1
  2304. mock_processor.assert_called_with(EventMatcher(event), True, True, False, False, False)
  2305. # Calling this again should do nothing, since we've already processed this occurrence.
  2306. self.call_post_process_group(
  2307. is_new=False,
  2308. is_regression=True,
  2309. is_new_group_environment=False,
  2310. event=event,
  2311. )
  2312. # Make sure we haven't called this again, since we should exit early.
  2313. assert mock_processor.call_count == 1
  2314. @patch("sentry.tasks.post_process.handle_owner_assignment")
  2315. @patch("sentry.tasks.post_process.handle_auto_assignment")
  2316. @patch("sentry.tasks.post_process.process_rules")
  2317. @patch("sentry.tasks.post_process.run_post_process_job")
  2318. @patch("sentry.rules.processing.processor.RuleProcessor")
  2319. @patch("sentry.signals.event_processed.send_robust")
  2320. @patch("sentry.utils.snuba.raw_query")
  2321. def test_full_pipeline_with_group_states(
  2322. self,
  2323. snuba_raw_query_mock,
  2324. event_processed_signal_mock,
  2325. mock_processor,
  2326. run_post_process_job_mock,
  2327. mock_process_rules,
  2328. mock_handle_auto_assignment,
  2329. mock_handle_owner_assignment,
  2330. ):
  2331. event = self.create_event(data={"message": "testing"}, project_id=self.project.id)
  2332. call_order = [mock_handle_owner_assignment, mock_handle_auto_assignment, mock_process_rules]
  2333. mock_handle_owner_assignment.side_effect = None
  2334. mock_handle_auto_assignment.side_effect = None
  2335. mock_process_rules.side_effect = None
  2336. self.call_post_process_group(
  2337. is_new=False,
  2338. is_regression=True,
  2339. is_new_group_environment=False,
  2340. event=event,
  2341. )
  2342. assert event_processed_signal_mock.call_count == 0
  2343. assert mock_processor.call_count == 0
  2344. assert run_post_process_job_mock.call_count == 1
  2345. assert call_order == [
  2346. mock_handle_owner_assignment,
  2347. mock_handle_auto_assignment,
  2348. mock_process_rules,
  2349. ]
  2350. assert snuba_raw_query_mock.call_count == 0
  2351. @pytest.mark.skip(reason="those tests do not work with the given call_post_process_group impl")
  2352. def test_processing_cache_cleared(self):
  2353. pass
  2354. @pytest.mark.skip(reason="those tests do not work with the given call_post_process_group impl")
  2355. def test_processing_cache_cleared_with_commits(self):
  2356. pass
  2357. class PostProcessGroupFeedbackTest(
  2358. TestCase,
  2359. SnubaTestCase,
  2360. OccurrenceTestMixin,
  2361. CorePostProcessGroupTestMixin,
  2362. InboxTestMixin,
  2363. RuleProcessorTestMixin,
  2364. SnoozeTestMixin,
  2365. ):
  2366. def create_event(
  2367. self,
  2368. data,
  2369. project_id,
  2370. assert_no_errors=True,
  2371. feedback_type=FeedbackCreationSource.NEW_FEEDBACK_ENVELOPE,
  2372. is_spam=False,
  2373. ):
  2374. data["type"] = "generic"
  2375. event = self.store_event(
  2376. data=data, project_id=project_id, assert_no_errors=assert_no_errors
  2377. )
  2378. evidence_data = {
  2379. "Test": 123,
  2380. "source": feedback_type.value,
  2381. }
  2382. evidence_display = [
  2383. {"name": "hi", "value": "bye", "important": True},
  2384. {"name": "what", "value": "where", "important": False},
  2385. ]
  2386. if is_spam:
  2387. evidence_data["is_spam"] = True
  2388. occurrence_data = self.build_occurrence_data(
  2389. event_id=event.event_id,
  2390. project_id=project_id,
  2391. **{
  2392. "id": uuid.uuid4().hex,
  2393. "fingerprint": ["c" * 32],
  2394. "issue_title": "User Feedback",
  2395. "subtitle": "it was bad",
  2396. "culprit": "api/123",
  2397. "resource_id": "1234",
  2398. "evidence_data": evidence_data,
  2399. "evidence_display": evidence_display,
  2400. "type": FeedbackGroup.type_id,
  2401. "detection_time": datetime.now().timestamp(),
  2402. "level": "info",
  2403. },
  2404. )
  2405. occurrence, group_info = save_issue_occurrence(occurrence_data, event)
  2406. assert group_info is not None
  2407. group_event = event.for_group(group_info.group)
  2408. group_event.occurrence = occurrence
  2409. return group_event
  2410. @override_options({"feedback.spam-detection-actions": True})
  2411. def call_post_process_group(
  2412. self, is_new, is_regression, is_new_group_environment, event, cache_key=None
  2413. ):
  2414. with self.feature(FeedbackGroup.build_post_process_group_feature_name()):
  2415. post_process_group(
  2416. is_new=is_new,
  2417. is_regression=is_regression,
  2418. is_new_group_environment=is_new_group_environment,
  2419. cache_key=None,
  2420. group_id=event.group_id,
  2421. occurrence_id=event.occurrence.id,
  2422. project_id=event.group.project_id,
  2423. )
  2424. return cache_key
  2425. @override_options({"feedback.spam-detection-actions": True})
  2426. def test_not_ran_if_crash_report_option_disabled(self):
  2427. self.project.update_option("sentry:feedback_user_report_notifications", False)
  2428. event = self.create_event(
  2429. data={},
  2430. project_id=self.project.id,
  2431. feedback_type=FeedbackCreationSource.CRASH_REPORT_EMBED_FORM,
  2432. )
  2433. mock_process_func = Mock()
  2434. with patch(
  2435. "sentry.tasks.post_process.GROUP_CATEGORY_POST_PROCESS_PIPELINE",
  2436. {
  2437. GroupCategory.FEEDBACK: [
  2438. feedback_filter_decorator(mock_process_func),
  2439. ]
  2440. },
  2441. ):
  2442. self.call_post_process_group(
  2443. is_new=True,
  2444. is_regression=False,
  2445. is_new_group_environment=True,
  2446. event=event,
  2447. cache_key="total_rubbish",
  2448. )
  2449. assert mock_process_func.call_count == 0
  2450. @override_options({"feedback.spam-detection-actions": True})
  2451. def test_not_ran_if_spam(self):
  2452. event = self.create_event(
  2453. data={},
  2454. project_id=self.project.id,
  2455. feedback_type=FeedbackCreationSource.CRASH_REPORT_EMBED_FORM,
  2456. is_spam=True,
  2457. )
  2458. mock_process_func = Mock()
  2459. with patch(
  2460. "sentry.tasks.post_process.GROUP_CATEGORY_POST_PROCESS_PIPELINE",
  2461. {
  2462. GroupCategory.FEEDBACK: [
  2463. feedback_filter_decorator(mock_process_func),
  2464. ]
  2465. },
  2466. ):
  2467. self.call_post_process_group(
  2468. is_new=True,
  2469. is_regression=False,
  2470. is_new_group_environment=True,
  2471. event=event,
  2472. cache_key="total_rubbish",
  2473. )
  2474. assert mock_process_func.call_count == 0
  2475. @override_options({"feedback.spam-detection-actions": True})
  2476. def test_not_ran_if_crash_report_project_option_enabled(self):
  2477. self.project.update_option("sentry:feedback_user_report_notifications", True)
  2478. event = self.create_event(
  2479. data={},
  2480. project_id=self.project.id,
  2481. feedback_type=FeedbackCreationSource.CRASH_REPORT_EMBED_FORM,
  2482. )
  2483. mock_process_func = Mock()
  2484. with patch(
  2485. "sentry.tasks.post_process.GROUP_CATEGORY_POST_PROCESS_PIPELINE",
  2486. {
  2487. GroupCategory.FEEDBACK: [
  2488. feedback_filter_decorator(mock_process_func),
  2489. ]
  2490. },
  2491. ):
  2492. self.call_post_process_group(
  2493. is_new=True,
  2494. is_regression=False,
  2495. is_new_group_environment=True,
  2496. event=event,
  2497. cache_key="total_rubbish",
  2498. )
  2499. assert mock_process_func.call_count == 1
  2500. @override_options({"feedback.spam-detection-actions": True})
  2501. def test_not_ran_if_crash_report_setting_option_epoch_0(self):
  2502. self.project.update_option("sentry:option-epoch", 1)
  2503. event = self.create_event(
  2504. data={},
  2505. project_id=self.project.id,
  2506. feedback_type=FeedbackCreationSource.CRASH_REPORT_EMBED_FORM,
  2507. )
  2508. mock_process_func = Mock()
  2509. with patch(
  2510. "sentry.tasks.post_process.GROUP_CATEGORY_POST_PROCESS_PIPELINE",
  2511. {
  2512. GroupCategory.FEEDBACK: [
  2513. feedback_filter_decorator(mock_process_func),
  2514. ]
  2515. },
  2516. ):
  2517. self.call_post_process_group(
  2518. is_new=True,
  2519. is_regression=False,
  2520. is_new_group_environment=True,
  2521. event=event,
  2522. cache_key="total_rubbish",
  2523. )
  2524. assert mock_process_func.call_count == 0
  2525. @override_options({"feedback.spam-detection-actions": True})
  2526. def test_ran_if_default_on_new_projects(self):
  2527. event = self.create_event(
  2528. data={},
  2529. project_id=self.project.id,
  2530. feedback_type=FeedbackCreationSource.CRASH_REPORT_EMBED_FORM,
  2531. )
  2532. mock_process_func = Mock()
  2533. with patch(
  2534. "sentry.tasks.post_process.GROUP_CATEGORY_POST_PROCESS_PIPELINE",
  2535. {
  2536. GroupCategory.FEEDBACK: [
  2537. feedback_filter_decorator(mock_process_func),
  2538. ]
  2539. },
  2540. ):
  2541. self.call_post_process_group(
  2542. is_new=True,
  2543. is_regression=False,
  2544. is_new_group_environment=True,
  2545. event=event,
  2546. cache_key="total_rubbish",
  2547. )
  2548. assert mock_process_func.call_count == 1
  2549. @override_options({"feedback.spam-detection-actions": True})
  2550. def test_ran_if_crash_feedback_envelope(self):
  2551. event = self.create_event(
  2552. data={},
  2553. project_id=self.project.id,
  2554. feedback_type=FeedbackCreationSource.NEW_FEEDBACK_ENVELOPE,
  2555. )
  2556. mock_process_func = Mock()
  2557. with patch(
  2558. "sentry.tasks.post_process.GROUP_CATEGORY_POST_PROCESS_PIPELINE",
  2559. {
  2560. GroupCategory.FEEDBACK: [
  2561. feedback_filter_decorator(mock_process_func),
  2562. ]
  2563. },
  2564. ):
  2565. self.call_post_process_group(
  2566. is_new=True,
  2567. is_regression=False,
  2568. is_new_group_environment=True,
  2569. event=event,
  2570. cache_key="total_rubbish",
  2571. )
  2572. assert mock_process_func.call_count == 1
  2573. @pytest.mark.skip(
  2574. reason="Skip this test since there's no way to have issueless events in the issue platform"
  2575. )
  2576. def test_issueless(self):
  2577. ...
  2578. def test_no_cache_abort(self):
  2579. # We don't use the cache for generic issues, so skip this test
  2580. pass
  2581. @pytest.mark.skip(reason="those tests do not work with the given call_post_process_group impl")
  2582. def test_processing_cache_cleared(self):
  2583. pass
  2584. @pytest.mark.skip(reason="those tests do not work with the given call_post_process_group impl")
  2585. def test_processing_cache_cleared_with_commits(self):
  2586. pass