12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039 |
- from __future__ import annotations
- import abc
- import time
- import uuid
- from datetime import datetime, timedelta, timezone
- from hashlib import md5
- from typing import Any
- from unittest import mock
- from unittest.mock import Mock, patch
- import pytest
- from django.db import router
- from django.test import override_settings
- from django.utils import timezone as django_timezone
- from sentry import buffer
- from sentry.buffer.redis import RedisBuffer
- from sentry.eventstore.models import Event
- from sentry.eventstore.processing import event_processing_store
- from sentry.ingest.transaction_clusterer import ClustererNamespace
- from sentry.issues.escalating import manage_issue_states
- from sentry.issues.grouptype import PerformanceNPlusOneGroupType, ProfileFileIOGroupType
- from sentry.issues.ingest import save_issue_occurrence
- from sentry.models import (
- Activity,
- Group,
- GroupAssignee,
- GroupInbox,
- GroupInboxReason,
- GroupOwner,
- GroupOwnerType,
- GroupSnooze,
- GroupStatus,
- Integration,
- )
- from sentry.models.activity import ActivityIntegration
- from sentry.models.groupowner import (
- ASSIGNEE_EXISTS_DURATION,
- ASSIGNEE_EXISTS_KEY,
- ISSUE_OWNERS_DEBOUNCE_DURATION,
- ISSUE_OWNERS_DEBOUNCE_KEY,
- )
- from sentry.models.projectownership import ProjectOwnership
- from sentry.models.projectteam import ProjectTeam
- from sentry.ownership.grammar import Matcher, Owner, Rule, dump_schema
- from sentry.replays.lib import kafka as replays_kafka
- from sentry.rules import init_registry
- from sentry.services.hybrid_cloud.user.service import user_service
- from sentry.silo import unguarded_write
- from sentry.tasks.derive_code_mappings import SUPPORTED_LANGUAGES
- from sentry.tasks.merge import merge_groups
- from sentry.tasks.post_process import (
- ISSUE_OWNERS_PER_PROJECT_PER_MIN_RATELIMIT,
- post_process_group,
- process_event,
- )
- from sentry.testutils.cases import BaseTestCase, PerformanceIssueTestCase, SnubaTestCase, TestCase
- from sentry.testutils.helpers import with_feature
- from sentry.testutils.helpers.datetime import before_now, iso_format
- from sentry.testutils.helpers.eventprocessing import write_event_to_cache
- from sentry.testutils.performance_issues.store_transaction import PerfIssueTransactionTestMixin
- from sentry.testutils.silo import region_silo_test
- from sentry.types.activity import ActivityType
- from sentry.types.group import GroupSubStatus
- from sentry.utils import json
- from sentry.utils.cache import cache
- from tests.sentry.issues.test_utils import OccurrenceTestMixin
- class EventMatcher:
- def __init__(self, expected, group=None):
- self.expected = expected
- self.expected_group = group
- def __eq__(self, other):
- matching_id = other.event_id == self.expected.event_id
- if self.expected_group:
- return (
- matching_id
- and self.expected_group == other.group
- and self.expected_group.id == other.group_id
- )
- return matching_id
- class BasePostProgressGroupMixin(BaseTestCase, metaclass=abc.ABCMeta):
- @abc.abstractmethod
- def create_event(self, data, project_id, assert_no_errors=True):
- pass
- @abc.abstractmethod
- def call_post_process_group(
- self, is_new, is_regression, is_new_group_environment, event, cache_key=None
- ):
- pass
- class CorePostProcessGroupTestMixin(BasePostProgressGroupMixin):
- @patch("sentry.rules.processor.RuleProcessor")
- @patch("sentry.tasks.servicehooks.process_service_hook")
- @patch("sentry.tasks.sentry_apps.process_resource_change_bound.delay")
- @patch("sentry.signals.event_processed.send_robust")
- def test_issueless(
- self,
- mock_signal,
- mock_process_resource_change_bound,
- mock_process_service_hook,
- mock_processor,
- ):
- min_ago = iso_format(before_now(minutes=1))
- event = self.store_event(
- data={
- "type": "transaction",
- "timestamp": min_ago,
- "start_timestamp": min_ago,
- "contexts": {"trace": {"trace_id": "b" * 32, "span_id": "c" * 16, "op": ""}},
- },
- project_id=self.project.id,
- )
- cache_key = write_event_to_cache(event)
- self.call_post_process_group(
- is_new=True,
- is_regression=False,
- is_new_group_environment=True,
- event=event,
- cache_key=cache_key,
- )
- assert mock_processor.call_count == 0
- assert mock_process_service_hook.call_count == 0
- assert mock_process_resource_change_bound.call_count == 0
- # transaction events do not call event.processed
- assert mock_signal.call_count == 0
- @patch("sentry.rules.processor.RuleProcessor")
- def test_no_cache_abort(self, mock_processor):
- event = self.create_event(data={}, project_id=self.project.id)
- self.call_post_process_group(
- is_new=True,
- is_regression=False,
- is_new_group_environment=True,
- event=event,
- cache_key="total-rubbish",
- )
- assert mock_processor.call_count == 0
- def test_processing_cache_cleared(self):
- event = self.create_event(data={}, project_id=self.project.id)
- cache_key = self.call_post_process_group(
- is_new=True,
- is_regression=False,
- is_new_group_environment=True,
- event=event,
- )
- assert event_processing_store.get(cache_key) is None
- def test_processing_cache_cleared_with_commits(self):
- # Regression test to guard against suspect commit calculations breaking the
- # cache
- event = self.create_event(data={}, project_id=self.project.id)
- self.create_commit(repo=self.create_repo())
- cache_key = self.call_post_process_group(
- is_new=True,
- is_regression=False,
- is_new_group_environment=True,
- event=event,
- )
- assert event_processing_store.get(cache_key) is None
- class DeriveCodeMappingsProcessGroupTestMixin(BasePostProgressGroupMixin):
- def _create_event(
- self,
- data: dict[str, Any],
- project_id: int | None = None,
- ) -> Event:
- data.setdefault("platform", "javascript")
- return self.store_event(data=data, project_id=project_id or self.project.id)
- def _call_post_process_group(self, event: Event) -> None:
- self.call_post_process_group(
- is_new=True,
- is_regression=False,
- is_new_group_environment=True,
- event=event,
- )
- @patch("sentry.tasks.derive_code_mappings.derive_code_mappings")
- def test_derive_invalid_platform(self, mock_derive_code_mappings):
- event = self._create_event({"platform": "elixir"})
- self._call_post_process_group(event)
- assert mock_derive_code_mappings.delay.call_count == 0
- @patch("sentry.tasks.derive_code_mappings.derive_code_mappings")
- def test_derive_supported_languages(self, mock_derive_code_mappings):
- for platform in SUPPORTED_LANGUAGES:
- event = self._create_event({"platform": platform})
- self._call_post_process_group(event)
- assert mock_derive_code_mappings.delay.call_count == 1
- @patch("sentry.tasks.derive_code_mappings.derive_code_mappings")
- def test_only_maps_a_given_project_once_per_hour(self, mock_derive_code_mappings):
- dogs_project = self.create_project()
- maisey_event = self._create_event(
- {
- "fingerprint": ["themaiseymasieydog"],
- },
- dogs_project.id,
- )
- charlie_event = self._create_event(
- {
- "fingerprint": ["charliebear"],
- },
- dogs_project.id,
- )
- cory_event = self._create_event(
- {
- "fingerprint": ["thenudge"],
- },
- dogs_project.id,
- )
- bodhi_event = self._create_event(
- {
- "fingerprint": ["theescapeartist"],
- },
- dogs_project.id,
- )
- self._call_post_process_group(maisey_event)
- assert mock_derive_code_mappings.delay.call_count == 1
- # second event from project should bail (no increase in call count)
- self._call_post_process_group(charlie_event)
- assert mock_derive_code_mappings.delay.call_count == 1
- # advance the clock 59 minutes, and it should still bail
- with patch("time.time", return_value=time.time() + 60 * 59):
- self._call_post_process_group(cory_event)
- assert mock_derive_code_mappings.delay.call_count == 1
- # now advance the clock 61 minutes, and this time it should go through
- with patch("time.time", return_value=time.time() + 60 * 61):
- self._call_post_process_group(bodhi_event)
- assert mock_derive_code_mappings.delay.call_count == 2
- @patch("sentry.tasks.derive_code_mappings.derive_code_mappings")
- def test_only_maps_a_given_issue_once_per_day(self, mock_derive_code_mappings):
- dogs_project = self.create_project()
- maisey_event1 = self._create_event(
- {
- "fingerprint": ["themaiseymaiseydog"],
- },
- dogs_project.id,
- )
- maisey_event2 = self._create_event(
- {
- "fingerprint": ["themaiseymaiseydog"],
- },
- dogs_project.id,
- )
- maisey_event3 = self._create_event(
- {
- "fingerprint": ["themaiseymaiseydog"],
- },
- dogs_project.id,
- )
- maisey_event4 = self._create_event(
- {
- "fingerprint": ["themaiseymaiseydog"],
- },
- dogs_project.id,
- )
- # because of the fingerprint, the events should always end up in the same group,
- # but the rest of the test is bogus if they aren't, so let's be sure
- assert maisey_event1.group_id == maisey_event2.group_id
- assert maisey_event2.group_id == maisey_event3.group_id
- assert maisey_event3.group_id == maisey_event4.group_id
- self._call_post_process_group(maisey_event1)
- assert mock_derive_code_mappings.delay.call_count == 1
- # second event from group should bail (no increase in call count)
- self._call_post_process_group(maisey_event2)
- assert mock_derive_code_mappings.delay.call_count == 1
- # advance the clock 23 hours and 59 minutes, and it should still bail
- with patch("time.time", return_value=time.time() + (60 * 60 * 23) + (60 * 59)):
- self._call_post_process_group(maisey_event3)
- assert mock_derive_code_mappings.delay.call_count == 1
- # now advance the clock 24 hours and 1 minute, and this time it should go through
- with patch("time.time", return_value=time.time() + (60 * 60 * 24) + (60 * 1)):
- self._call_post_process_group(maisey_event4)
- assert mock_derive_code_mappings.delay.call_count == 2
- @patch("sentry.tasks.derive_code_mappings.derive_code_mappings")
- def test_skipping_an_issue_doesnt_mark_it_processed(self, mock_derive_code_mappings):
- dogs_project = self.create_project()
- maisey_event = self._create_event(
- {
- "fingerprint": ["themaiseymasieydog"],
- },
- dogs_project.id,
- )
- charlie_event1 = self._create_event(
- {
- "fingerprint": ["charliebear"],
- },
- dogs_project.id,
- )
- charlie_event2 = self._create_event(
- {
- "fingerprint": ["charliebear"],
- },
- dogs_project.id,
- )
- # because of the fingerprint, the two Charlie events should always end up in the same group,
- # but the rest of the test is bogus if they aren't, so let's be sure
- assert charlie_event1.group_id == charlie_event2.group_id
- self._call_post_process_group(maisey_event)
- assert mock_derive_code_mappings.delay.call_count == 1
- # second event from project should bail (no increase in call count)
- self._call_post_process_group(charlie_event1)
- assert mock_derive_code_mappings.delay.call_count == 1
- # now advance the clock 61 minutes (so the project should clear the cache), and another
- # event from the Charlie group should go through
- with patch("time.time", return_value=time.time() + 60 * 61):
- self._call_post_process_group(charlie_event2)
- assert mock_derive_code_mappings.delay.call_count == 2
- class RuleProcessorTestMixin(BasePostProgressGroupMixin):
- @patch("sentry.rules.processor.RuleProcessor")
- def test_rule_processor_backwards_compat(self, mock_processor):
- event = self.create_event(data={}, project_id=self.project.id)
- mock_callback = Mock()
- mock_futures = [Mock()]
- mock_processor.return_value.apply.return_value = [(mock_callback, mock_futures)]
- self.call_post_process_group(
- is_new=True,
- is_regression=False,
- is_new_group_environment=True,
- event=event,
- )
- mock_processor.assert_called_once_with(EventMatcher(event), True, False, True, False)
- mock_processor.return_value.apply.assert_called_once_with()
- mock_callback.assert_called_once_with(EventMatcher(event), mock_futures)
- @patch("sentry.rules.processor.RuleProcessor")
- def test_rule_processor(self, mock_processor):
- event = self.create_event(data={"message": "testing"}, project_id=self.project.id)
- mock_callback = Mock()
- mock_futures = [Mock()]
- mock_processor.return_value.apply.return_value = [(mock_callback, mock_futures)]
- self.call_post_process_group(
- is_new=True,
- is_regression=False,
- is_new_group_environment=True,
- event=event,
- )
- mock_processor.assert_called_once_with(EventMatcher(event), True, False, True, False)
- mock_processor.return_value.apply.assert_called_once_with()
- mock_callback.assert_called_once_with(EventMatcher(event), mock_futures)
- def test_rule_processor_buffer_values(self):
- # Test that pending buffer values for `times_seen` are applied to the group and that alerts
- # fire as expected
- from sentry.models import Rule
- MOCK_RULES = ("sentry.rules.filters.issue_occurrences.IssueOccurrencesFilter",)
- redis_buffer = RedisBuffer()
- with mock.patch("sentry.buffer.backend.get", redis_buffer.get), mock.patch(
- "sentry.buffer.backend.incr", redis_buffer.incr
- ), patch("sentry.constants._SENTRY_RULES", MOCK_RULES), patch(
- "sentry.rules.processor.rules", init_registry()
- ) as rules:
- MockAction = mock.Mock()
- MockAction.rule_type = "action/event"
- MockAction.id = "tests.sentry.tasks.post_process.tests.MockAction"
- MockAction.return_value.after.return_value = []
- rules.add(MockAction)
- conditions = [
- {
- "id": "sentry.rules.filters.issue_occurrences.IssueOccurrencesFilter",
- "value": 10,
- },
- ]
- actions = [{"id": "tests.sentry.tasks.post_process.tests.MockAction"}]
- Rule.objects.filter(project=self.project).delete()
- Rule.objects.create(
- project=self.project, data={"conditions": conditions, "actions": actions}
- )
- event = self.create_event(
- data={"message": "testing", "fingerprint": ["group-1"]}, project_id=self.project.id
- )
- event_2 = self.create_event(
- data={"message": "testing", "fingerprint": ["group-1"]}, project_id=self.project.id
- )
- self.call_post_process_group(
- is_new=True,
- is_regression=False,
- is_new_group_environment=True,
- event=event,
- )
- event.group.update(times_seen=2)
- assert MockAction.return_value.after.call_count == 0
- buffer.backend.incr(Group, {"times_seen": 15}, filters={"pk": event.group.id})
- self.call_post_process_group(
- is_new=True,
- is_regression=False,
- is_new_group_environment=True,
- event=event_2,
- )
- assert MockAction.return_value.after.call_count == 1
- @patch("sentry.rules.processor.RuleProcessor")
- def test_group_refresh(self, mock_processor):
- event = self.create_event(data={"message": "testing"}, project_id=self.project.id)
- group1 = event.group
- group2 = self.create_group(project=self.project)
- assert event.group_id == group1.id
- assert event.group == group1
- with self.tasks():
- merge_groups([group1.id], group2.id)
- mock_callback = Mock()
- mock_futures = [Mock()]
- mock_processor.return_value.apply.return_value = [(mock_callback, mock_futures)]
- self.call_post_process_group(
- is_new=True,
- is_regression=False,
- is_new_group_environment=True,
- event=event,
- )
- # Ensure that rule processing sees the merged group.
- mock_processor.assert_called_with(
- EventMatcher(event, group=group2), True, False, True, False
- )
- @patch("sentry.rules.processor.RuleProcessor")
- def test_group_last_seen_buffer(self, mock_processor):
- first_event_date = datetime.now(timezone.utc) - timedelta(days=90)
- event1 = self.create_event(
- data={"message": "testing"},
- project_id=self.project.id,
- )
- group1 = event1.group
- group1.update(last_seen=first_event_date)
- event2 = self.create_event(data={"message": "testing"}, project_id=self.project.id)
- # Mock set the last_seen value to the first event date
- # To simulate the update to last_seen being buffered
- event2.group.last_seen = first_event_date
- event2.group.update(last_seen=first_event_date)
- assert event2.group_id == group1.id
- mock_callback = Mock()
- mock_futures = [Mock()]
- mock_processor.return_value.apply.return_value = [(mock_callback, mock_futures)]
- self.call_post_process_group(
- is_new=False,
- is_regression=True,
- is_new_group_environment=False,
- event=event2,
- )
- mock_processor.assert_called_with(
- EventMatcher(event2, group=group1), False, True, False, False
- )
- sent_group_date = mock_processor.call_args[0][0].group.last_seen
- # Check that last_seen was updated to be at least the new event's date
- self.assertAlmostEqual(sent_group_date, event2.datetime, delta=timedelta(seconds=10))
- class ServiceHooksTestMixin(BasePostProgressGroupMixin):
- @patch("sentry.tasks.servicehooks.process_service_hook")
- def test_service_hook_fires_on_new_event(self, mock_process_service_hook):
- event = self.create_event(data={}, project_id=self.project.id)
- hook = self.create_service_hook(
- project=self.project,
- organization=self.project.organization,
- actor=self.user,
- events=["event.created"],
- )
- with self.feature("projects:servicehooks"):
- self.call_post_process_group(
- is_new=False,
- is_regression=False,
- is_new_group_environment=False,
- event=event,
- )
- mock_process_service_hook.delay.assert_called_once_with(
- servicehook_id=hook.id, event=EventMatcher(event)
- )
- @patch("sentry.tasks.servicehooks.process_service_hook")
- @patch("sentry.rules.processor.RuleProcessor")
- def test_service_hook_fires_on_alert(self, mock_processor, mock_process_service_hook):
- event = self.create_event(data={}, project_id=self.project.id)
- mock_callback = Mock()
- mock_futures = [Mock()]
- mock_processor.return_value.apply.return_value = [(mock_callback, mock_futures)]
- hook = self.create_service_hook(
- project=self.project,
- organization=self.project.organization,
- actor=self.user,
- events=["event.alert"],
- )
- with self.feature("projects:servicehooks"):
- self.call_post_process_group(
- is_new=False,
- is_regression=False,
- is_new_group_environment=False,
- event=event,
- )
- mock_process_service_hook.delay.assert_called_once_with(
- servicehook_id=hook.id, event=EventMatcher(event)
- )
- @patch("sentry.tasks.servicehooks.process_service_hook")
- @patch("sentry.rules.processor.RuleProcessor")
- def test_service_hook_does_not_fire_without_alert(
- self, mock_processor, mock_process_service_hook
- ):
- event = self.create_event(data={}, project_id=self.project.id)
- mock_processor.return_value.apply.return_value = []
- self.create_service_hook(
- project=self.project,
- organization=self.project.organization,
- actor=self.user,
- events=["event.alert"],
- )
- with self.feature("projects:servicehooks"):
- self.call_post_process_group(
- is_new=False,
- is_regression=False,
- is_new_group_environment=False,
- event=event,
- )
- assert not mock_process_service_hook.delay.mock_calls
- @patch("sentry.tasks.servicehooks.process_service_hook")
- def test_service_hook_does_not_fire_without_event(self, mock_process_service_hook):
- event = self.create_event(data={}, project_id=self.project.id)
- self.create_service_hook(
- project=self.project, organization=self.project.organization, actor=self.user, events=[]
- )
- with self.feature("projects:servicehooks"):
- self.call_post_process_group(
- is_new=True,
- is_regression=False,
- is_new_group_environment=False,
- event=event,
- )
- assert not mock_process_service_hook.delay.mock_calls
- class ResourceChangeBoundsTestMixin(BasePostProgressGroupMixin):
- @patch("sentry.tasks.sentry_apps.process_resource_change_bound.delay")
- def test_processes_resource_change_task_on_new_group(self, delay):
- event = self.create_event(data={}, project_id=self.project.id)
- group = event.group
- self.call_post_process_group(
- is_new=True,
- is_regression=False,
- is_new_group_environment=False,
- event=event,
- )
- delay.assert_called_once_with(action="created", sender="Group", instance_id=group.id)
- @with_feature("organizations:integrations-event-hooks")
- @patch("sentry.tasks.sentry_apps.process_resource_change_bound.delay")
- def test_processes_resource_change_task_on_error_events(self, delay):
- event = self.create_event(
- data={
- "message": "Foo bar",
- "exception": {"type": "Foo", "value": "oh no"},
- "level": "error",
- "timestamp": iso_format(django_timezone.now()),
- },
- project_id=self.project.id,
- assert_no_errors=False,
- )
- self.create_service_hook(
- project=self.project,
- organization=self.project.organization,
- actor=self.user,
- events=["error.created"],
- )
- self.call_post_process_group(
- is_new=False,
- is_regression=False,
- is_new_group_environment=False,
- event=event,
- )
- delay.assert_called_once_with(
- action="created",
- sender="Error",
- instance_id=event.event_id,
- instance=EventMatcher(event),
- )
- @with_feature("organizations:integrations-event-hooks")
- @patch("sentry.tasks.sentry_apps.process_resource_change_bound.delay")
- def test_processes_resource_change_task_not_called_for_non_errors(self, delay):
- event = self.create_event(
- data={
- "message": "Foo bar",
- "level": "info",
- "timestamp": iso_format(django_timezone.now()),
- },
- project_id=self.project.id,
- assert_no_errors=False,
- )
- self.call_post_process_group(
- is_new=False,
- is_regression=False,
- is_new_group_environment=False,
- event=event,
- )
- assert not delay.called
- @patch("sentry.tasks.sentry_apps.process_resource_change_bound.delay")
- def test_processes_resource_change_task_not_called_without_feature_flag(self, delay):
- event = self.create_event(
- data={
- "message": "Foo bar",
- "level": "info",
- "timestamp": iso_format(django_timezone.now()),
- },
- project_id=self.project.id,
- assert_no_errors=False,
- )
- self.call_post_process_group(
- is_new=False,
- is_regression=False,
- is_new_group_environment=False,
- event=event,
- )
- assert not delay.called
- @with_feature("organizations:integrations-event-hooks")
- @patch("sentry.tasks.sentry_apps.process_resource_change_bound.delay")
- def test_processes_resource_change_task_not_called_without_error_created(self, delay):
- event = self.create_event(
- data={
- "message": "Foo bar",
- "level": "error",
- "exception": {"type": "Foo", "value": "oh no"},
- "timestamp": iso_format(django_timezone.now()),
- },
- project_id=self.project.id,
- assert_no_errors=False,
- )
- self.create_service_hook(
- project=self.project, organization=self.project.organization, actor=self.user, events=[]
- )
- self.call_post_process_group(
- is_new=False,
- is_regression=False,
- is_new_group_environment=False,
- event=event,
- )
- assert not delay.called
- class InboxTestMixin(BasePostProgressGroupMixin):
- @patch("sentry.rules.processor.RuleProcessor")
- def test_group_inbox_regression(self, mock_processor):
- new_event = self.create_event(data={"message": "testing"}, project_id=self.project.id)
- group = new_event.group
- assert group.status == GroupStatus.UNRESOLVED
- assert group.substatus == GroupSubStatus.ONGOING
- self.call_post_process_group(
- is_new=True,
- is_regression=True,
- is_new_group_environment=False,
- event=new_event,
- )
- assert GroupInbox.objects.filter(group=group, reason=GroupInboxReason.NEW.value).exists()
- GroupInbox.objects.filter(
- group=group
- ).delete() # Delete so it creates the .REGRESSION entry.
- group.refresh_from_db()
- assert group.status == GroupStatus.UNRESOLVED
- assert group.substatus == GroupSubStatus.NEW
- mock_processor.assert_called_with(EventMatcher(new_event), True, True, False, False)
- # resolve the new issue so regression actually happens
- group.status = GroupStatus.RESOLVED
- group.substatus = None
- group.active_at = group.active_at - timedelta(minutes=1)
- group.save(update_fields=["status", "substatus", "active_at"])
- # trigger a transition from resolved to regressed by firing an event that groups to that issue
- regressed_event = self.create_event(data={"message": "testing"}, project_id=self.project.id)
- assert regressed_event.group == new_event.group
- group = regressed_event.group
- group.refresh_from_db()
- assert group.status == GroupStatus.UNRESOLVED
- assert group.substatus == GroupSubStatus.REGRESSED
- self.call_post_process_group(
- is_new=False,
- is_regression=True,
- is_new_group_environment=False,
- event=regressed_event,
- )
- mock_processor.assert_called_with(EventMatcher(regressed_event), False, True, False, False)
- group.refresh_from_db()
- assert group.status == GroupStatus.UNRESOLVED
- assert group.substatus == GroupSubStatus.REGRESSED
- assert GroupInbox.objects.filter(
- group=group, reason=GroupInboxReason.REGRESSION.value
- ).exists()
- class AssignmentTestMixin(BasePostProgressGroupMixin):
- def make_ownership(self, extra_rules=None):
- self.user_2 = self.create_user()
- self.create_team_membership(team=self.team, user=self.user_2)
- rules = [
- Rule(Matcher("path", "src/app/*"), [Owner("team", self.team.name)]),
- Rule(Matcher("path", "src/*"), [Owner("user", self.user.email)]),
- Rule(Matcher("path", "tests/*"), [Owner("user", self.user_2.email)]),
- ]
- if extra_rules:
- rules.extend(extra_rules)
- self.prj_ownership = ProjectOwnership.objects.create(
- project_id=self.project.id,
- schema=dump_schema(rules),
- fallthrough=True,
- auto_assignment=True,
- )
- def test_owner_assignment_order_precedence(self):
- self.make_ownership()
- event = self.create_event(
- data={
- "message": "oh no",
- "platform": "python",
- "stacktrace": {"frames": [{"filename": "src/app/example.py"}]},
- },
- project_id=self.project.id,
- )
- self.call_post_process_group(
- is_new=False,
- is_regression=False,
- is_new_group_environment=False,
- event=event,
- )
- assignee = event.group.assignee_set.first()
- assert assignee.user_id == self.user.id
- assert assignee.team is None
- owners = list(GroupOwner.objects.filter(group=event.group))
- assert len(owners) == 2
- assert {(self.user.id, None), (None, self.team.id)} == {
- (o.user_id, o.team_id) for o in owners
- }
- activity = Activity.objects.filter(group=event.group).first()
- assert activity.data == {
- "assignee": str(self.user.id),
- "assigneeEmail": self.user.email,
- "assigneeType": "user",
- "integration": ActivityIntegration.PROJECT_OWNERSHIP.value,
- "rule": str(Rule(Matcher("path", "src/*"), [Owner("user", self.user.email)])),
- }
- def test_owner_assignment_extra_groups(self):
- extra_user = self.create_user()
- self.create_team_membership(self.team, user=extra_user)
- self.make_ownership(
- [Rule(Matcher("path", "src/app/things/in/*"), [Owner("user", extra_user.email)])],
- )
- event = self.create_event(
- data={
- "message": "oh no",
- "platform": "python",
- "stacktrace": {"frames": [{"filename": "src/app/things/in/a/path/example2.py"}]},
- },
- project_id=self.project.id,
- )
- self.call_post_process_group(
- is_new=False,
- is_regression=False,
- is_new_group_environment=False,
- event=event,
- )
- assignee = event.group.assignee_set.first()
- assert assignee.user_id == extra_user.id
- assert assignee.team is None
- owners = list(GroupOwner.objects.filter(group=event.group))
- assert len(owners) == 2
- assert {(extra_user.id, None), (self.user.id, None)} == {
- (o.user_id, o.team_id) for o in owners
- }
- def test_owner_assignment_existing_owners(self):
- extra_team = self.create_team()
- ProjectTeam.objects.create(team=extra_team, project=self.project)
- self.make_ownership(
- [Rule(Matcher("path", "src/app/things/in/*"), [Owner("team", extra_team.slug)])],
- )
- GroupOwner.objects.create(
- group=self.group,
- project=self.project,
- organization=self.organization,
- user_id=self.user.id,
- type=GroupOwnerType.OWNERSHIP_RULE.value,
- )
- event = self.create_event(
- data={
- "message": "oh no",
- "platform": "python",
- "stacktrace": {"frames": [{"filename": "src/app/things/in/a/path/example2.py"}]},
- },
- project_id=self.project.id,
- )
- self.call_post_process_group(
- is_new=False,
- is_regression=False,
- is_new_group_environment=False,
- event=event,
- )
- assignee = event.group.assignee_set.first()
- assert assignee.user_id is None
- assert assignee.team == extra_team
- owners = list(GroupOwner.objects.filter(group=event.group))
- assert {(None, extra_team.id), (self.user.id, None)} == {
- (o.user_id, o.team_id) for o in owners
- }
- def test_owner_assignment_assign_user(self):
- self.make_ownership()
- event = self.create_event(
- data={
- "message": "oh no",
- "platform": "python",
- "stacktrace": {"frames": [{"filename": "src/app.py"}]},
- },
- project_id=self.project.id,
- )
- self.call_post_process_group(
- is_new=False,
- is_regression=False,
- is_new_group_environment=False,
- event=event,
- )
- assignee = event.group.assignee_set.first()
- assert assignee.user_id == self.user.id
- assert assignee.team is None
- def test_owner_assignment_ownership_no_matching_owners(self):
- event = self.create_event(
- data={
- "message": "oh no",
- "platform": "python",
- "stacktrace": {"frames": [{"filename": "src/app/example.py"}]},
- },
- project_id=self.project.id,
- )
- self.call_post_process_group(
- is_new=False,
- is_regression=False,
- is_new_group_environment=False,
- event=event,
- )
- assert not event.group.assignee_set.exists()
- def test_owner_assignment_existing_assignment(self):
- self.make_ownership()
- event = self.create_event(
- data={
- "message": "oh no",
- "platform": "python",
- "stacktrace": {"frames": [{"filename": "src/app/example.py"}]},
- },
- project_id=self.project.id,
- )
- event.group.assignee_set.create(team=self.team, project=self.project)
- self.call_post_process_group(
- is_new=False,
- is_regression=False,
- is_new_group_environment=False,
- event=event,
- )
- assignee = event.group.assignee_set.first()
- assert assignee.user_id is None
- assert assignee.team == self.team
- def test_only_first_assignment_works(self):
- self.make_ownership()
- event = self.create_event(
- data={
- "message": "oh no",
- "platform": "python",
- "stacktrace": {"frames": [{"filename": "src/app/example.py"}]},
- "fingerprint": ["group1"],
- },
- project_id=self.project.id,
- )
- self.call_post_process_group(
- is_new=False,
- is_regression=False,
- is_new_group_environment=False,
- event=event,
- )
- assignee = event.group.assignee_set.first()
- assert assignee.user_id == self.user.id
- assert assignee.team is None
- event = self.create_event(
- data={
- "message": "oh no",
- "platform": "python",
- "stacktrace": {"frames": [{"filename": "tests/src/app/test_example.py"}]},
- "fingerprint": ["group1"],
- },
- project_id=self.project.id,
- )
- self.call_post_process_group(
- is_new=False,
- is_regression=False,
- is_new_group_environment=False,
- event=event,
- )
- assignee = event.group.assignee_set.first()
- # Assignment shouldn't change.
- assert assignee.user_id == self.user.id
- assert assignee.team is None
- def test_owner_assignment_owner_is_gone(self):
- self.make_ownership()
- # Remove the team so the rule match will fail to resolve
- self.team.delete()
- event = self.create_event(
- data={
- "message": "oh no",
- "platform": "python",
- "stacktrace": {"frames": [{"filename": "src/app/example.py"}]},
- },
- project_id=self.project.id,
- )
- self.call_post_process_group(
- is_new=False,
- is_regression=False,
- is_new_group_environment=False,
- event=event,
- )
- assignee = event.group.assignee_set.first()
- assert assignee is None
- def test_suspect_committer_affect_cache_debouncing_issue_owners_calculations(self):
- self.make_ownership()
- committer = GroupOwner(
- group=self.created_event.group,
- project=self.created_event.project,
- organization=self.created_event.project.organization,
- type=GroupOwnerType.SUSPECT_COMMIT.value,
- )
- committer.save()
- event = self.create_event(
- data={
- "message": "oh no",
- "platform": "python",
- "stacktrace": {"frames": [{"filename": "src/app/example.py"}]},
- },
- project_id=self.project.id,
- )
- event.group.assignee_set.create(team=self.team, project=self.project)
- self.call_post_process_group(
- is_new=False,
- is_regression=False,
- is_new_group_environment=False,
- event=event,
- )
- assignee = event.group.assignee_set.first()
- assert assignee.user_id is None
- assert assignee.team == self.team
- def test_owner_assignment_when_owners_have_been_unassigned(self):
- """
- Test that ensures that if certain assignees get unassigned, and project rules are changed
- then the new group assignees should be re-calculated and re-assigned
- """
- # Create rules and check assignees
- self.make_ownership()
- event = self.create_event(
- data={
- "message": "oh no",
- "platform": "python",
- "stacktrace": {"frames": [{"filename": "src/app/example.py"}]},
- },
- project_id=self.project.id,
- )
- event_2 = self.create_event(
- data={
- "message": "Exception",
- "platform": "python",
- "stacktrace": {"frames": [{"filename": "src/app/integration.py"}]},
- },
- project_id=self.project.id,
- )
- self.call_post_process_group(
- is_new=False,
- is_regression=False,
- is_new_group_environment=False,
- event=event,
- )
- self.call_post_process_group(
- is_new=False,
- is_regression=False,
- is_new_group_environment=False,
- event=event_2,
- )
- assignee = event.group.assignee_set.first()
- assert assignee.user_id == self.user.id
- user_3 = self.create_user()
- self.create_team_membership(self.team, user=user_3)
- # De-assign group assignees
- GroupAssignee.objects.deassign(event.group, self.user)
- assert event.group.assignee_set.first() is None
- # Change ProjectOwnership rules
- rules = [
- Rule(Matcher("path", "src/*"), [Owner("user", user_3.email)]),
- ]
- self.prj_ownership.schema = dump_schema(rules)
- self.prj_ownership.save()
- self.call_post_process_group(
- is_new=False,
- is_regression=False,
- is_new_group_environment=False,
- event=event,
- )
- self.call_post_process_group(
- is_new=False,
- is_regression=False,
- is_new_group_environment=False,
- event=event_2,
- )
- # Group should be re-assigned to the new group owner
- assignee = event.group.assignee_set.first()
- assert assignee.user_id == user_3.id
- # De-assign group assignees
- GroupAssignee.objects.deassign(event.group, user_service.get_user(user_id=assignee.user_id))
- assert event.group.assignee_set.first() is None
- user_4 = self.create_user()
- self.create_team_membership(self.team, user=user_4)
- self.prj_ownership.schema = dump_schema([])
- self.prj_ownership.save()
- code_owners_rule = Rule(
- Matcher("codeowners", "*.py"),
- [Owner("user", user_4.email)],
- )
- self.code_mapping = self.create_code_mapping(project=self.project)
- self.code_owners = self.create_codeowners(
- self.project,
- self.code_mapping,
- schema=dump_schema([code_owners_rule]),
- )
- self.call_post_process_group(
- is_new=False,
- is_regression=False,
- is_new_group_environment=False,
- event=event,
- )
- self.call_post_process_group(
- is_new=False,
- is_regression=False,
- is_new_group_environment=False,
- event=event_2,
- )
- # Group should be re-assigned to the new group owner
- assignee = event.group.assignee_set.first()
- assert assignee.user_id == user_4.id
- def test_auto_assignment_when_owners_have_been_unassigned(self):
- """
- Test that ensures that if assignee gets unassigned and project rules are changed,
- then the new group assignees should be re-calculated and re-assigned
- """
- # Create rules and check assignees
- self.make_ownership()
- event = self.create_event(
- data={
- "message": "oh no",
- "platform": "python",
- "stacktrace": {"frames": [{"filename": "src/app/example.py"}]},
- },
- project_id=self.project.id,
- )
- self.call_post_process_group(
- is_new=False,
- is_regression=False,
- is_new_group_environment=False,
- event=event,
- )
- assignee = (
- GroupOwner.objects.filter()
- .exclude(user_id__isnull=True, team_id__isnull=True)
- .order_by("type")
- .first()
- )
- assert assignee.user_id == self.user.id
- user_3 = self.create_user()
- self.create_team_membership(self.team, user=user_3)
- # Set assignee_exists cache to self.user
- cache.set(ASSIGNEE_EXISTS_KEY(event.group_id), self.user, ASSIGNEE_EXISTS_DURATION)
- # De-assign group assignees
- GroupAssignee.objects.deassign(event.group, self.user)
- assert event.group.assignee_set.first() is None
- # Change ProjectOwnership rules
- rules = [
- Rule(Matcher("path", "src/*"), [Owner("user", user_3.email)]),
- ]
- self.prj_ownership.schema = dump_schema(rules)
- self.prj_ownership.save()
- self.call_post_process_group(
- is_new=False,
- is_regression=False,
- is_new_group_environment=False,
- event=event,
- )
- # Mimic filter used in get_autoassigned_owner_cached to get the issue owner to be
- # auto-assigned
- assignee = (
- GroupOwner.objects.filter()
- .exclude(user_id__isnull=True, team_id__isnull=True)
- .order_by("type")
- .first()
- )
- # Group should be re-assigned to the new group owner
- assert assignee.user_id == user_3.id
- def test_ensure_when_assignees_and_owners_are_cached_does_not_cause_unbound_errors(self):
- self.make_ownership()
- event = self.create_event(
- data={
- "message": "oh no",
- "platform": "python",
- "stacktrace": {"frames": [{"filename": "src/app.py"}]},
- },
- project_id=self.project.id,
- )
- assignee_cache_key = "assignee_exists:1:%s" % event.group.id
- owner_cache_key = "owner_exists:1:%s" % event.group.id
- for key in [assignee_cache_key, owner_cache_key]:
- cache.set(key, True)
- self.call_post_process_group(
- is_new=False,
- is_regression=False,
- is_new_group_environment=False,
- event=event,
- )
- def test_auto_assignment_when_owners_are_invalid(self):
- """
- Test that invalid group owners (that exist due to bugs) are deleted and not assigned
- when no valid issue owner exists
- """
- event = self.create_event(
- data={
- "message": "oh no",
- "platform": "python",
- "stacktrace": {"frames": [{"filename": "src/app/example.py"}]},
- },
- project_id=self.project.id,
- )
- # Hard code an invalid group owner
- invalid_codeowner = GroupOwner(
- group=event.group,
- project=event.project,
- organization=event.project.organization,
- type=GroupOwnerType.CODEOWNERS.value,
- context={"rule": "codeowners:/**/*.css " + self.user.email},
- user_id=self.user.id,
- )
- invalid_codeowner.save()
- self.call_post_process_group(
- is_new=False,
- is_regression=False,
- is_new_group_environment=False,
- event=event,
- )
- assignee = event.group.assignee_set.first()
- assert assignee is None
- assert len(GroupOwner.objects.filter(group_id=event.group)) == 0
- @patch("sentry.tasks.post_process.logger")
- def test_debounces_handle_owner_assignments(self, logger):
- self.make_ownership()
- event = self.create_event(
- data={
- "message": "oh no",
- "platform": "python",
- "stacktrace": {"frames": [{"filename": "src/app.py"}]},
- },
- project_id=self.project.id,
- )
- cache.set(ISSUE_OWNERS_DEBOUNCE_KEY(event.group_id), True, ISSUE_OWNERS_DEBOUNCE_DURATION)
- self.call_post_process_group(
- is_new=False,
- is_regression=False,
- is_new_group_environment=False,
- event=event,
- )
- logger.info.assert_any_call(
- "handle_owner_assignment.issue_owners_exist",
- extra={
- "event": event.event_id,
- "group": event.group_id,
- "project": event.project_id,
- "organization": event.project.organization_id,
- "reason": "issue_owners_exist",
- },
- )
- @patch("sentry.tasks.post_process.logger")
- def test_issue_owners_should_ratelimit(self, logger):
- cache.set(
- f"issue_owner_assignment_ratelimiter:{self.project.id}",
- (set(range(0, ISSUE_OWNERS_PER_PROJECT_PER_MIN_RATELIMIT * 10, 10)), datetime.now()),
- )
- cache.set(f"commit-context-scm-integration:{self.project.organization_id}", True, 60)
- event = self.create_event(
- data={
- "message": "oh no",
- "platform": "python",
- "stacktrace": {"frames": [{"filename": "src/app.py"}]},
- },
- project_id=self.project.id,
- )
- self.call_post_process_group(
- is_new=False,
- is_regression=False,
- is_new_group_environment=False,
- event=event,
- )
- logger.info.assert_any_call(
- "handle_owner_assignment.ratelimited",
- extra={
- "event": event.event_id,
- "group": event.group_id,
- "project": event.project_id,
- "organization": event.project.organization_id,
- "reason": "ratelimited",
- },
- )
- class ProcessCommitsTestMixin(BasePostProgressGroupMixin):
- github_blame_return_value = {
- "commitId": "asdfwreqr",
- "committedDate": (datetime.now(timezone.utc) - timedelta(days=2)),
- "commitMessage": "placeholder commit message",
- "commitAuthorName": "",
- "commitAuthorEmail": "admin@localhost",
- }
- def setUp(self):
- self.created_event = self.create_event(
- data={
- "message": "Kaboom!",
- "platform": "python",
- "timestamp": iso_format(before_now(seconds=10)),
- "stacktrace": {
- "frames": [
- {
- "function": "handle_set_commits",
- "abs_path": "/usr/src/sentry/src/sentry/tasks.py",
- "module": "sentry.tasks",
- "in_app": False,
- "lineno": 30,
- "filename": "sentry/tasks.py",
- },
- {
- "function": "set_commits",
- "abs_path": "/usr/src/sentry/src/sentry/models/release.py",
- "module": "sentry.models.release",
- "in_app": True,
- "lineno": 39,
- "filename": "sentry/models/release.py",
- },
- ]
- },
- "fingerprint": ["put-me-in-the-control-group"],
- },
- project_id=self.project.id,
- )
- self.cache_key = write_event_to_cache(self.created_event)
- self.repo = self.create_repo(
- name="example",
- integration_id=self.integration.id,
- )
- self.code_mapping = self.create_code_mapping(
- repo=self.repo, project=self.project, stack_root="src/"
- )
- self.commit_author = self.create_commit_author(project=self.project, user=self.user)
- self.commit = self.create_commit(
- project=self.project,
- repo=self.repo,
- author=self.commit_author,
- key="asdfwreqr",
- message="placeholder commit message",
- )
- @with_feature("organizations:commit-context")
- @patch(
- "sentry.integrations.github.GitHubIntegration.get_commit_context",
- return_value=github_blame_return_value,
- )
- def test_debounce_cache_is_set(self, mock_get_commit_context):
- with self.tasks():
- self.call_post_process_group(
- is_new=True,
- is_regression=False,
- is_new_group_environment=True,
- event=self.created_event,
- )
- assert GroupOwner.objects.get(
- group=self.created_event.group,
- project=self.created_event.project,
- organization=self.created_event.project.organization,
- type=GroupOwnerType.SUSPECT_COMMIT.value,
- )
- assert cache.has_key(f"process-commit-context-{self.created_event.group_id}")
- @with_feature("organizations:commit-context")
- @patch(
- "sentry.integrations.github.GitHubIntegration.get_commit_context",
- return_value=github_blame_return_value,
- )
- def test_logic_fallback_no_scm(self, mock_get_commit_context):
- with unguarded_write(using=router.db_for_write(Integration)):
- Integration.objects.all().delete()
- integration = Integration.objects.create(provider="bitbucket")
- integration.add_organization(self.organization)
- with self.tasks():
- self.call_post_process_group(
- is_new=True,
- is_regression=False,
- is_new_group_environment=True,
- event=self.created_event,
- )
- assert not cache.has_key(f"process-commit-context-{self.created_event.group_id}")
- class SnoozeTestMixin(BasePostProgressGroupMixin):
- @with_feature("organizations:escalating-issues")
- @patch("sentry.signals.issue_escalating.send_robust")
- @patch("sentry.signals.issue_unignored.send_robust")
- @patch("sentry.rules.processor.RuleProcessor")
- def test_invalidates_snooze(
- self, mock_processor, mock_send_unignored_robust, mock_send_escalating_robust
- ):
- event = self.create_event(data={"message": "testing"}, project_id=self.project.id)
- group = event.group
- # Check for has_reappeared=False if is_new=True
- self.call_post_process_group(
- is_new=True,
- is_regression=False,
- is_new_group_environment=True,
- event=event,
- )
- assert GroupInbox.objects.filter(group=group, reason=GroupInboxReason.NEW.value).exists()
- GroupInbox.objects.filter(group=group).delete() # Delete so it creates the UNIGNORED entry.
- Activity.objects.filter(group=group).delete()
- mock_processor.assert_called_with(EventMatcher(event), True, False, True, False)
- event = self.create_event(data={"message": "testing"}, project_id=self.project.id)
- group.status = GroupStatus.IGNORED
- group.substatus = GroupSubStatus.UNTIL_CONDITION_MET
- group.save(update_fields=["status", "substatus"])
- snooze = GroupSnooze.objects.create(
- group=group, until=django_timezone.now() - timedelta(hours=1)
- )
- # Check for has_reappeared=True if is_new=False
- self.call_post_process_group(
- is_new=False,
- is_regression=False,
- is_new_group_environment=True,
- event=event,
- )
- mock_processor.assert_called_with(EventMatcher(event), False, False, True, True)
- mock_send_escalating_robust.assert_called_once_with(
- project=group.project,
- group=group,
- event=EventMatcher(event),
- sender=manage_issue_states,
- was_until_escalating=False,
- )
- assert not GroupSnooze.objects.filter(id=snooze.id).exists()
- group = Group.objects.get(id=group.id)
- assert group.status == GroupStatus.UNRESOLVED
- assert group.substatus == GroupSubStatus.ESCALATING
- assert GroupInbox.objects.filter(
- group=group, reason=GroupInboxReason.ESCALATING.value
- ).exists()
- assert Activity.objects.filter(
- group=group, project=group.project, type=ActivityType.SET_ESCALATING.value
- ).exists()
- assert mock_send_unignored_robust.called
- @override_settings(SENTRY_BUFFER="sentry.buffer.redis.RedisBuffer")
- @patch("sentry.signals.issue_unignored.send_robust")
- @patch("sentry.rules.processor.RuleProcessor")
- def test_invalidates_snooze_with_buffers(self, mock_processor, send_robust):
- redis_buffer = RedisBuffer()
- with mock.patch("sentry.buffer.backend.get", redis_buffer.get), mock.patch(
- "sentry.buffer.backend.incr", redis_buffer.incr
- ):
- event = self.create_event(
- data={"message": "testing", "fingerprint": ["group-1"]}, project_id=self.project.id
- )
- event_2 = self.create_event(
- data={"message": "testing", "fingerprint": ["group-1"]}, project_id=self.project.id
- )
- group = event.group
- group.times_seen = 50
- group.status = GroupStatus.IGNORED
- group.substatus = GroupSubStatus.UNTIL_CONDITION_MET
- group.save(update_fields=["times_seen", "status", "substatus"])
- snooze = GroupSnooze.objects.create(group=group, count=100, state={"times_seen": 0})
- self.call_post_process_group(
- is_new=False,
- is_regression=False,
- is_new_group_environment=True,
- event=event,
- )
- assert GroupSnooze.objects.filter(id=snooze.id).exists()
- buffer.backend.incr(Group, {"times_seen": 60}, filters={"pk": event.group.id})
- self.call_post_process_group(
- is_new=False,
- is_regression=False,
- is_new_group_environment=True,
- event=event_2,
- )
- assert not GroupSnooze.objects.filter(id=snooze.id).exists()
- @patch("sentry.rules.processor.RuleProcessor")
- def test_maintains_valid_snooze(self, mock_processor):
- event = self.create_event(data={}, project_id=self.project.id)
- group = event.group
- assert group.status == GroupStatus.UNRESOLVED
- assert group.substatus == GroupSubStatus.ONGOING
- snooze = GroupSnooze.objects.create(
- group=group, until=django_timezone.now() + timedelta(hours=1)
- )
- self.call_post_process_group(
- is_new=True,
- is_regression=False,
- is_new_group_environment=True,
- event=event,
- )
- mock_processor.assert_called_with(EventMatcher(event), True, False, True, False)
- assert GroupSnooze.objects.filter(id=snooze.id).exists()
- group.refresh_from_db()
- assert group.status == GroupStatus.UNRESOLVED
- assert group.substatus == GroupSubStatus.NEW
- @with_feature("organizations:escalating-issues")
- @patch("sentry.issues.escalating.is_escalating", return_value=(True, 0))
- def test_forecast_in_activity(self, mock_is_escalating):
- """
- Test that the forecast is added to the activity for escalating issues that were
- previously ignored until_escalating.
- """
- event = self.create_event(data={"message": "testing"}, project_id=self.project.id)
- group = event.group
- group.status = GroupStatus.IGNORED
- group.substatus = GroupSubStatus.UNTIL_ESCALATING
- group.save()
- self.call_post_process_group(
- is_new=False,
- is_regression=False,
- is_new_group_environment=True,
- event=event,
- )
- assert Activity.objects.filter(
- group=group,
- project=group.project,
- type=ActivityType.SET_ESCALATING.value,
- data={"event_id": event.event_id, "forecast": 0},
- ).exists()
- @patch("sentry.utils.sdk_crashes.sdk_crash_detection.sdk_crash_detection")
- class SDKCrashMonitoringTestMixin(BasePostProgressGroupMixin):
- @with_feature("organizations:sdk-crash-detection")
- @override_settings(SDK_CRASH_DETECTION_PROJECT_ID=1234)
- @override_settings(SDK_CRASH_DETECTION_SAMPLE_RATE=0.1234)
- def test_sdk_crash_monitoring_is_called(self, mock_sdk_crash_detection):
- event = self.create_event(
- data={"message": "testing"},
- project_id=self.project.id,
- )
- self.call_post_process_group(
- is_new=True,
- is_regression=False,
- is_new_group_environment=True,
- event=event,
- )
- mock_sdk_crash_detection.detect_sdk_crash.assert_called_once()
- args = mock_sdk_crash_detection.detect_sdk_crash.call_args[-1]
- assert args["event"].project.id == event.project.id
- assert args["event_project_id"] == 1234
- assert args["sample_rate"] == 0.1234
- def test_sdk_crash_monitoring_is_not_called_with_disabled_feature(
- self, mock_sdk_crash_detection
- ):
- event = self.create_event(
- data={"message": "testing"},
- project_id=self.project.id,
- )
- self.call_post_process_group(
- is_new=True,
- is_regression=False,
- is_new_group_environment=True,
- event=event,
- )
- mock_sdk_crash_detection.detect_sdk_crash.assert_not_called()
- @with_feature("organizations:sdk-crash-detection")
- def test_sdk_crash_monitoring_is_not_called_without_project_id(self, mock_sdk_crash_detection):
- event = self.create_event(
- data={"message": "testing"},
- project_id=self.project.id,
- )
- self.call_post_process_group(
- is_new=True,
- is_regression=False,
- is_new_group_environment=True,
- event=event,
- )
- mock_sdk_crash_detection.detect_sdk_crash.assert_not_called()
- @mock.patch.object(replays_kafka, "get_kafka_producer_cluster_options")
- @mock.patch.object(replays_kafka, "KafkaPublisher")
- @mock.patch("sentry.utils.metrics.incr")
- class ReplayLinkageTestMixin(BasePostProgressGroupMixin):
- def test_replay_linkage(self, incr, kafka_producer, kafka_publisher):
- replay_id = uuid.uuid4().hex
- event = self.create_event(
- data={"message": "testing", "contexts": {"replay": {"replay_id": replay_id}}},
- project_id=self.project.id,
- )
- with self.feature({"organizations:session-replay-event-linking": True}):
- self.call_post_process_group(
- is_new=True,
- is_regression=False,
- is_new_group_environment=True,
- event=event,
- )
- assert kafka_producer.return_value.publish.call_count == 1
- assert kafka_producer.return_value.publish.call_args[0][0] == "ingest-replay-events"
- ret_value = json.loads(kafka_producer.return_value.publish.call_args[0][1])
- assert ret_value["type"] == "replay_event"
- assert ret_value["start_time"] == int(event.datetime.timestamp())
- assert ret_value["replay_id"] == replay_id
- assert ret_value["project_id"] == self.project.id
- assert ret_value["segment_id"] is None
- assert ret_value["retention_days"] == 90
- # convert ret_value_payload which is a list of bytes to a string
- ret_value_payload = json.loads(bytes(ret_value["payload"]).decode("utf-8"))
- assert ret_value_payload == {
- "type": "event_link",
- "replay_id": replay_id,
- "error_id": event.event_id,
- "timestamp": int(event.datetime.timestamp()),
- "event_hash": str(uuid.UUID(md5((event.event_id).encode("utf-8")).hexdigest())),
- }
- incr.assert_any_call("post_process.process_replay_link.id_sampled")
- incr.assert_any_call("post_process.process_replay_link.id_exists")
- def test_no_replay(self, incr, kafka_producer, kafka_publisher):
- event = self.create_event(
- data={"message": "testing"},
- project_id=self.project.id,
- )
- with self.feature({"organizations:session-replay-event-linking": True}):
- self.call_post_process_group(
- is_new=True,
- is_regression=False,
- is_new_group_environment=True,
- event=event,
- )
- assert kafka_producer.return_value.publish.call_count == 0
- incr.assert_called_with("post_process.process_replay_link.id_sampled")
- def test_0_sample_rate_replays(self, incr, kafka_producer, kafka_publisher):
- event = self.create_event(
- data={"message": "testing"},
- project_id=self.project.id,
- )
- with self.feature({"organizations:session-replay-event-linking": False}):
- self.call_post_process_group(
- is_new=True,
- is_regression=False,
- is_new_group_environment=True,
- event=event,
- )
- assert kafka_producer.return_value.publish.call_count == 0
- for args, _ in incr.call_args_list:
- self.assertNotEqual(args, ("post_process.process_replay_link.id_sampled"))
- @region_silo_test
- class PostProcessGroupErrorTest(
- TestCase,
- AssignmentTestMixin,
- ProcessCommitsTestMixin,
- CorePostProcessGroupTestMixin,
- DeriveCodeMappingsProcessGroupTestMixin,
- InboxTestMixin,
- ResourceChangeBoundsTestMixin,
- RuleProcessorTestMixin,
- ServiceHooksTestMixin,
- SnoozeTestMixin,
- SDKCrashMonitoringTestMixin,
- ReplayLinkageTestMixin,
- ):
- def create_event(self, data, project_id, assert_no_errors=True):
- return self.store_event(data=data, project_id=project_id, assert_no_errors=assert_no_errors)
- def call_post_process_group(
- self, is_new, is_regression, is_new_group_environment, event, cache_key=None
- ):
- if cache_key is None:
- cache_key = write_event_to_cache(event)
- post_process_group(
- is_new=is_new,
- is_regression=is_regression,
- is_new_group_environment=is_new_group_environment,
- cache_key=cache_key,
- group_id=event.group_id,
- project_id=event.project_id,
- )
- return cache_key
- @with_feature("organizations:escalating-metrics-backend")
- @patch("sentry.sentry_metrics.client.generic_metrics_backend.counter")
- def test_generic_metrics_backend_counter(self, generic_metrics_backend_mock):
- min_ago = iso_format(before_now(minutes=1))
- event = self.create_event(
- data={
- "exception": {
- "values": [
- {
- "type": "ZeroDivisionError",
- "stacktrace": {"frames": [{"function": f} for f in ["a", "b"]]},
- }
- ]
- },
- "timestamp": min_ago,
- "start_timestamp": min_ago,
- "contexts": {"trace": {"trace_id": "b" * 32, "span_id": "c" * 16, "op": ""}},
- },
- project_id=self.project.id,
- )
- self.call_post_process_group(
- is_new=True, is_regression=False, is_new_group_environment=True, event=event
- )
- assert generic_metrics_backend_mock.call_count == 1
- @region_silo_test
- class PostProcessGroupPerformanceTest(
- TestCase,
- SnubaTestCase,
- PerfIssueTransactionTestMixin,
- CorePostProcessGroupTestMixin,
- InboxTestMixin,
- RuleProcessorTestMixin,
- SnoozeTestMixin,
- PerformanceIssueTestCase,
- ):
- def create_event(self, data, project_id, assert_no_errors=True):
- fingerprint = data["fingerprint"][0] if data.get("fingerprint") else "some_group"
- fingerprint = f"{PerformanceNPlusOneGroupType.type_id}-{fingerprint}"
- return self.create_performance_issue(fingerprint=fingerprint)
- def call_post_process_group(
- self, is_new, is_regression, is_new_group_environment, event, cache_key=None
- ):
- group_states = (
- [
- {
- "id": event.group_id,
- "is_new": is_new,
- "is_regression": is_regression,
- "is_new_group_environment": is_new_group_environment,
- }
- ]
- if event.group_id
- else None
- )
- if cache_key is None:
- cache_key = write_event_to_cache(event)
- with self.feature(PerformanceNPlusOneGroupType.build_post_process_group_feature_name()):
- post_process_group(
- is_new=is_new,
- is_regression=is_regression,
- is_new_group_environment=is_new_group_environment,
- cache_key=cache_key,
- group_states=group_states,
- project_id=event.project_id,
- )
- return cache_key
- @patch("sentry.sentry_metrics.client.generic_metrics_backend.counter")
- @patch("sentry.tasks.post_process.run_post_process_job")
- @patch("sentry.rules.processor.RuleProcessor")
- @patch("sentry.signals.transaction_processed.send_robust")
- @patch("sentry.signals.event_processed.send_robust")
- def test_process_transaction_event_with_no_group(
- self,
- event_processed_signal_mock,
- transaction_processed_signal_mock,
- mock_processor,
- run_post_process_job_mock,
- generic_metrics_backend_mock,
- ):
- min_ago = before_now(minutes=1).replace(tzinfo=timezone.utc)
- event = self.store_transaction(
- project_id=self.project.id,
- user_id=self.create_user(name="user1").name,
- fingerprint=[],
- environment=None,
- timestamp=min_ago,
- )
- assert len(event.groups) == 0
- cache_key = write_event_to_cache(event)
- post_process_group(
- is_new=False,
- is_regression=False,
- is_new_group_environment=False,
- cache_key=cache_key,
- group_id=None,
- group_states=None,
- )
- assert transaction_processed_signal_mock.call_count == 1
- assert event_processed_signal_mock.call_count == 0
- assert mock_processor.call_count == 0
- assert run_post_process_job_mock.call_count == 0
- assert generic_metrics_backend_mock.call_count == 0
- @patch("sentry.tasks.post_process.handle_owner_assignment")
- @patch("sentry.tasks.post_process.handle_auto_assignment")
- @patch("sentry.tasks.post_process.process_rules")
- @patch("sentry.tasks.post_process.run_post_process_job")
- @patch("sentry.rules.processor.RuleProcessor")
- @patch("sentry.signals.transaction_processed.send_robust")
- @patch("sentry.signals.event_processed.send_robust")
- def test_full_pipeline_with_group_states(
- self,
- event_processed_signal_mock,
- transaction_processed_signal_mock,
- mock_processor,
- run_post_process_job_mock,
- mock_process_rules,
- mock_handle_auto_assignment,
- mock_handle_owner_assignment,
- ):
- event = self.create_performance_issue()
- assert event.group
- # cache_key = write_event_to_cache(event)
- group_state = dict(
- is_new=True,
- is_regression=False,
- is_new_group_environment=True,
- )
- # TODO(jangjodi): Fix this ordering test; side_effects should be a function (lambda),
- # but because post-processing is async, this causes the assert to fail because it doesn't
- # wait for the side effects to happen
- call_order = [mock_handle_owner_assignment, mock_handle_auto_assignment, mock_process_rules]
- mock_handle_owner_assignment.side_effect = None
- mock_handle_auto_assignment.side_effect = None
- mock_process_rules.side_effect = None
- post_process_group(
- **group_state,
- cache_key="dummykey",
- group_id=event.group_id,
- group_states=[{"id": event.group.id, **group_state}],
- occurrence_id=event.occurrence_id,
- project_id=self.project.id,
- )
- assert transaction_processed_signal_mock.call_count == 1
- assert event_processed_signal_mock.call_count == 0
- assert mock_processor.call_count == 0
- assert run_post_process_job_mock.call_count == 1
- assert call_order == [
- mock_handle_owner_assignment,
- mock_handle_auto_assignment,
- mock_process_rules,
- ]
- class TransactionClustererTestCase(TestCase, SnubaTestCase):
- @patch("sentry.ingest.transaction_clusterer.datasource.redis._record_sample")
- def test_process_transaction_event_clusterer(
- self,
- mock_store_transaction_name,
- ):
- min_ago = before_now(minutes=1).replace(tzinfo=timezone.utc)
- event = process_event(
- data={
- "project": self.project.id,
- "event_id": "b" * 32,
- "transaction": "foo",
- "start_timestamp": str(min_ago),
- "timestamp": str(min_ago),
- "type": "transaction",
- "transaction_info": {
- "source": "url",
- },
- "contexts": {"trace": {"trace_id": "b" * 32, "span_id": "c" * 16, "op": ""}},
- },
- group_id=0,
- )
- cache_key = write_event_to_cache(event)
- post_process_group(
- is_new=False,
- is_regression=False,
- is_new_group_environment=False,
- cache_key=cache_key,
- group_id=None,
- group_states=None,
- )
- assert mock_store_transaction_name.mock_calls == [
- mock.call(ClustererNamespace.TRANSACTIONS, self.project, "foo")
- ]
- @region_silo_test
- class PostProcessGroupGenericTest(
- TestCase,
- SnubaTestCase,
- OccurrenceTestMixin,
- CorePostProcessGroupTestMixin,
- InboxTestMixin,
- RuleProcessorTestMixin,
- SnoozeTestMixin,
- ):
- def create_event(self, data, project_id, assert_no_errors=True):
- data["type"] = "generic"
- event = self.store_event(
- data=data, project_id=project_id, assert_no_errors=assert_no_errors
- )
- occurrence_data = self.build_occurrence_data(event_id=event.event_id, project_id=project_id)
- occurrence, group_info = save_issue_occurrence(occurrence_data, event)
- assert group_info is not None
- group_event = event.for_group(group_info.group)
- group_event.occurrence = occurrence
- return group_event
- def call_post_process_group(
- self, is_new, is_regression, is_new_group_environment, event, cache_key=None
- ):
- with self.feature(ProfileFileIOGroupType.build_post_process_group_feature_name()):
- post_process_group(
- is_new=is_new,
- is_regression=is_regression,
- is_new_group_environment=is_new_group_environment,
- cache_key=None,
- group_id=event.group_id,
- occurrence_id=event.occurrence.id,
- project_id=event.group.project_id,
- )
- return cache_key
- def test_issueless(self):
- # Skip this test since there's no way to have issueless events in the issue platform
- pass
- def test_no_cache_abort(self):
- # We don't use the cache for generic issues, so skip this test
- pass
- @patch("sentry.rules.processor.RuleProcessor")
- def test_occurrence_deduping(self, mock_processor):
- event = self.create_event(data={"message": "testing"}, project_id=self.project.id)
- self.call_post_process_group(
- is_new=True,
- is_regression=True,
- is_new_group_environment=False,
- event=event,
- )
- assert mock_processor.call_count == 1
- mock_processor.assert_called_with(EventMatcher(event), True, True, False, False)
- # Calling this again should do nothing, since we've already processed this occurrence.
- self.call_post_process_group(
- is_new=False,
- is_regression=True,
- is_new_group_environment=False,
- event=event,
- )
- # Make sure we haven't called this again, since we should exit early.
- assert mock_processor.call_count == 1
- @pytest.mark.skip(reason="those tests do not work with the given call_post_process_group impl")
- def test_processing_cache_cleared(self):
- pass
- @pytest.mark.skip(reason="those tests do not work with the given call_post_process_group impl")
- def test_processing_cache_cleared_with_commits(self):
- pass
|