12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852285328542855285628572858285928602861286228632864286528662867286828692870287128722873287428752876287728782879288028812882288328842885288628872888288928902891289228932894289528962897289828992900290129022903290429052906290729082909291029112912291329142915291629172918291929202921292229232924292529262927292829292930293129322933293429352936293729382939294029412942294329442945294629472948294929502951295229532954295529562957295829592960296129622963296429652966296729682969297029712972297329742975297629772978297929802981298229832984298529862987298829892990299129922993299429952996299729982999300030013002300330043005300630073008300930103011301230133014301530163017301830193020302130223023302430253026302730283029303030313032303330343035303630373038303930403041304230433044304530463047304830493050305130523053305430553056305730583059306030613062306330643065306630673068306930703071307230733074307530763077307830793080308130823083308430853086308730883089309030913092309330943095309630973098309931003101310231033104310531063107310831093110311131123113311431153116311731183119312031213122312331243125312631273128312931303131313231333134313531363137313831393140314131423143314431453146314731483149315031513152315331543155315631573158315931603161316231633164316531663167316831693170317131723173317431753176317731783179318031813182318331843185318631873188318931903191319231933194319531963197319831993200320132023203320432053206320732083209321032113212321332143215321632173218321932203221322232233224322532263227322832293230323132323233323432353236323732383239324032413242324332443245324632473248324932503251325232533254325532563257325832593260326132623263326432653266326732683269327032713272327332743275327632773278327932803281328232833284328532863287328832893290329132923293329432953296329732983299330033013302330333043305330633073308330933103311331233133314331533163317331833193320332133223323332433253326332733283329333033313332333333343335333633373338333933403341334233433344334533463347334833493350335133523353335433553356335733583359336033613362336333643365336633673368336933703371337233733374337533763377337833793380338133823383338433853386338733883389339033913392339333943395339633973398339934003401340234033404340534063407340834093410341134123413341434153416341734183419342034213422342334243425342634273428342934303431343234333434343534363437343834393440344134423443344434453446344734483449345034513452345334543455345634573458345934603461346234633464346534663467346834693470347134723473347434753476347734783479348034813482348334843485348634873488348934903491349234933494349534963497349834993500350135023503350435053506350735083509 |
- from __future__ import annotations
- import logging
- import uuid
- from collections.abc import Mapping
- from datetime import UTC, datetime, timedelta
- from time import time
- from typing import Any
- from unittest import mock
- from unittest.mock import MagicMock, patch
- import pytest
- import responses
- from arroyo.backends.kafka.consumer import KafkaPayload
- from arroyo.backends.local.backend import LocalBroker
- from arroyo.backends.local.storages.memory import MemoryMessageStorage
- from arroyo.types import Partition, Topic
- from django.conf import settings
- from django.core.cache import cache
- from django.utils import timezone
- from fixtures.github import (
- COMPARE_COMMITS_EXAMPLE_WITH_INTERMEDIATE,
- EARLIER_COMMIT_SHA,
- GET_COMMIT_EXAMPLE,
- GET_LAST_2_COMMITS_EXAMPLE,
- GET_PRIOR_COMMIT_EXAMPLE,
- LATER_COMMIT_SHA,
- )
- from sentry import eventstore, nodestore, tsdb
- from sentry.attachments import CachedAttachment, attachment_cache
- from sentry.constants import MAX_VERSION_LENGTH, DataCategory
- from sentry.dynamic_sampling import (
- ExtendedBoostedRelease,
- Platform,
- ProjectBoostedReleases,
- get_redis_client_for_ds,
- )
- from sentry.event_manager import (
- EventManager,
- _get_event_instance,
- get_event_type,
- has_pending_commit_resolution,
- materialize_metadata,
- save_grouphash_and_group,
- )
- from sentry.eventstore.models import Event
- from sentry.exceptions import HashDiscarded
- from sentry.grouping.api import GroupingConfig, load_grouping_config
- from sentry.grouping.utils import hash_from_values
- from sentry.ingest.inbound_filters import FilterStatKeys
- from sentry.issues.grouptype import (
- ErrorGroupType,
- GroupCategory,
- PerformanceNPlusOneGroupType,
- PerformanceSlowDBQueryGroupType,
- )
- from sentry.issues.issue_occurrence import IssueEvidence
- from sentry.models.activity import Activity
- from sentry.models.commit import Commit
- from sentry.models.environment import Environment
- from sentry.models.group import Group, GroupStatus
- from sentry.models.groupenvironment import GroupEnvironment
- from sentry.models.grouphash import GroupHash
- from sentry.models.grouplink import GroupLink
- from sentry.models.grouprelease import GroupRelease
- from sentry.models.groupresolution import GroupResolution
- from sentry.models.grouptombstone import GroupTombstone
- from sentry.models.integrations import Integration
- from sentry.models.integrations.external_issue import ExternalIssue
- from sentry.models.pullrequest import PullRequest, PullRequestCommit
- from sentry.models.release import Release
- from sentry.models.releasecommit import ReleaseCommit
- from sentry.models.releaseheadcommit import ReleaseHeadCommit
- from sentry.models.releaseprojectenvironment import ReleaseProjectEnvironment
- from sentry.options import set
- from sentry.spans.grouping.utils import hash_values
- from sentry.testutils.asserts import assert_mock_called_once_with_partial
- from sentry.testutils.cases import (
- PerformanceIssueTestCase,
- SnubaTestCase,
- TestCase,
- TransactionTestCase,
- )
- from sentry.testutils.helpers import apply_feature_flag_on_cls, override_options
- from sentry.testutils.helpers.datetime import before_now, freeze_time, iso_format
- from sentry.testutils.performance_issues.event_generators import get_event
- from sentry.testutils.pytest.fixtures import django_db_all
- from sentry.testutils.silo import assume_test_silo_mode_of
- from sentry.testutils.skips import requires_snuba
- from sentry.tsdb.base import TSDBModel
- from sentry.types.activity import ActivityType
- from sentry.types.group import PriorityLevel
- from sentry.usage_accountant import accountant
- from sentry.utils import json
- from sentry.utils.cache import cache_key_for_event
- from sentry.utils.eventuser import EventUser
- from sentry.utils.outcomes import Outcome
- from sentry.utils.samples import load_data
- pytestmark = [requires_snuba]
- def make_event(**kwargs: Any) -> dict[str, Any]:
- result = {
- "event_id": uuid.uuid1().hex,
- "level": logging.ERROR,
- "logger": "default",
- "tags": [],
- }
- result.update(kwargs)
- return result
- class EventManagerTestMixin:
- def make_release_event(self, release_name: str, project_id: int) -> Event:
- manager = EventManager(make_event(release=release_name))
- manager.normalize()
- event = manager.save(project_id)
- return event
- class EventManagerTest(TestCase, SnubaTestCase, EventManagerTestMixin, PerformanceIssueTestCase):
- def test_ephemeral_interfaces_removed_on_save(self) -> None:
- manager = EventManager(make_event(platform="python"))
- manager.normalize()
- event = manager.save(self.project.id)
- group = event.group
- assert group is not None
- assert group.platform == "python"
- assert event.platform == "python"
- @mock.patch("sentry.event_manager.eventstream.backend.insert")
- def test_dupe_message_id(self, eventstream_insert: mock.MagicMock) -> None:
- # Saves the latest event to nodestore and eventstream
- project_id = self.project.id
- event_id = "a" * 32
- node_id = Event.generate_node_id(project_id, event_id)
- manager = EventManager(make_event(event_id=event_id, message="first"))
- manager.normalize()
- manager.save(project_id)
- assert nodestore.backend.get(node_id)["logentry"]["formatted"] == "first"
- manager = EventManager(make_event(event_id=event_id, message="second"))
- manager.normalize()
- manager.save(project_id)
- assert nodestore.backend.get(node_id)["logentry"]["formatted"] == "second"
- assert eventstream_insert.call_count == 2
- def test_materialze_metadata_simple(self) -> None:
- manager = EventManager(make_event(transaction="/dogs/are/great/"))
- event = manager.save(self.project.id)
- event_type = get_event_type(event.data)
- event_metadata = event_type.get_metadata(event.data)
- assert materialize_metadata(event.data, event_type, event_metadata) == {
- "type": "default",
- "culprit": "/dogs/are/great/",
- "metadata": {"title": "<unlabeled event>"},
- "title": "<unlabeled event>",
- "location": None,
- }
- def test_materialze_metadata_preserves_existing_metadata(self) -> None:
- manager = EventManager(make_event())
- event = manager.save(self.project.id)
- event.data.setdefault("metadata", {})
- event.data["metadata"]["dogs"] = "are great" # should not get clobbered
- event_type = get_event_type(event.data)
- event_metadata_from_type = event_type.get_metadata(event.data)
- materialized = materialize_metadata(event.data, event_type, event_metadata_from_type)
- assert materialized["metadata"] == {"title": "<unlabeled event>", "dogs": "are great"}
- def test_react_error_picks_cause_error_title_subtitle(self) -> None:
- cause_error_value = "Load failed"
- # React 19 hydration error include the hydration error and a cause
- # If we derive the title from the cause error the developer will more easily distinguish them
- manager = EventManager(
- make_event(
- exception={
- "values": [
- {
- "type": "TypeError",
- "value": cause_error_value,
- "mechanism": {
- "type": "onerror",
- "handled": False,
- "source": "cause",
- "exception_id": 1,
- "parent_id": 0,
- },
- },
- {
- "type": "Error",
- "value": "There was an error during concurrent rendering but React was able to recover by instead synchronously rendering the entire root.",
- "mechanism": {
- "type": "generic",
- "handled": True,
- "exception_id": 0,
- },
- },
- ]
- },
- )
- )
- event = manager.save(self.project.id)
- assert event.data["metadata"]["value"] == cause_error_value
- assert event.data["metadata"]["type"] == "TypeError"
- assert event.group is not None
- assert event.group.title == f"TypeError: {cause_error_value}"
- def test_react_hydration_error_picks_cause_error_title_subtitle(self) -> None:
- cause_error_value = "Cannot read properties of undefined (reading 'nodeName')"
- # React 19 hydration error include the hydration error and a cause
- # If we derive the title from the cause error the developer will more easily distinguish them
- manager = EventManager(
- make_event(
- exception={
- "values": [
- {
- "type": "TypeError",
- "value": cause_error_value,
- "mechanism": {
- "type": "chained",
- "source": "cause",
- "exception_id": 1,
- "parent_id": 0,
- },
- },
- {
- "type": "Error",
- "value": "There was an error while hydrating but React was able to recover by instead client rendering from the nearest Suspense boundary.",
- "mechanism": {
- "type": "generic",
- "exception_id": 0,
- },
- },
- ]
- },
- )
- )
- event = manager.save(self.project.id)
- assert event.data["metadata"]["value"] == cause_error_value
- assert event.data["metadata"]["type"] == "TypeError"
- assert event.group is not None
- assert event.group.title == f"TypeError: {cause_error_value}"
- @mock.patch("sentry.signals.issue_unresolved.send_robust")
- def test_unresolves_group(self, send_robust: mock.MagicMock) -> None:
- ts = time() - 300
- # N.B. EventManager won't unresolve the group unless the event2 has a
- # later timestamp than event1.
- manager = EventManager(make_event(event_id="a" * 32, checksum="a" * 32, timestamp=ts))
- with self.tasks():
- event = manager.save(self.project.id)
- group = Group.objects.get(id=event.group_id)
- group.status = GroupStatus.RESOLVED
- group.substatus = None
- group.save()
- assert group.is_resolved()
- manager = EventManager(make_event(event_id="b" * 32, checksum="a" * 32, timestamp=ts + 50))
- event2 = manager.save(self.project.id)
- assert event.group_id == event2.group_id
- group = Group.objects.get(id=group.id)
- assert not group.is_resolved()
- assert send_robust.called
- @mock.patch("sentry.event_manager.plugin_is_regression")
- def test_does_not_unresolve_group(self, plugin_is_regression: mock.MagicMock) -> None:
- # N.B. EventManager won't unresolve the group unless the event2 has a
- # later timestamp than event1.
- plugin_is_regression.return_value = False
- manager = EventManager(
- make_event(event_id="a" * 32, checksum="a" * 32, timestamp=1403007314)
- )
- with self.tasks():
- manager.normalize()
- event = manager.save(self.project.id)
- group = Group.objects.get(id=event.group_id)
- group.status = GroupStatus.RESOLVED
- group.substatus = None
- group.save()
- assert group.is_resolved()
- manager = EventManager(
- make_event(event_id="b" * 32, checksum="a" * 32, timestamp=1403007315)
- )
- manager.normalize()
- event2 = manager.save(self.project.id)
- assert event.group_id == event2.group_id
- group = Group.objects.get(id=group.id)
- assert group.is_resolved()
- @mock.patch("sentry.tasks.activity.send_activity_notifications.delay")
- @mock.patch("sentry.event_manager.plugin_is_regression")
- def test_marks_as_unresolved_with_new_release(
- self,
- plugin_is_regression: mock.MagicMock,
- mock_send_activity_notifications_delay: mock.MagicMock,
- ) -> None:
- plugin_is_regression.return_value = True
- old_release = Release.objects.create(
- version="a",
- organization_id=self.project.organization_id,
- date_added=timezone.now() - timedelta(minutes=30),
- )
- old_release.add_project(self.project)
- manager = EventManager(
- make_event(
- event_id="a" * 32,
- checksum="a" * 32,
- timestamp=time() - 50000, # need to work around active_at
- release=old_release.version,
- )
- )
- event = manager.save(self.project.id)
- assert event.group is not None
- group = event.group
- group.update(status=GroupStatus.RESOLVED, substatus=None)
- resolution = GroupResolution.objects.create(release=old_release, group=group)
- activity = Activity.objects.create(
- group=group,
- project=group.project,
- type=ActivityType.SET_RESOLVED_IN_RELEASE.value,
- ident=resolution.id,
- data={"version": ""},
- )
- manager = EventManager(
- make_event(
- event_id="b" * 32, checksum="a" * 32, timestamp=time(), release=old_release.version
- )
- )
- event = manager.save(self.project.id)
- assert event.group_id == group.id
- group = Group.objects.get(id=group.id)
- assert group.status == GroupStatus.RESOLVED
- activity = Activity.objects.get(id=activity.id)
- assert activity.data is not None
- assert activity.data["version"] == ""
- assert GroupResolution.objects.filter(group=group).exists()
- manager = EventManager(
- make_event(event_id="c" * 32, checksum="a" * 32, timestamp=time(), release="b")
- )
- event = manager.save(self.project.id)
- assert event.group_id == group.id
- group = Group.objects.get(id=group.id)
- assert group.status == GroupStatus.UNRESOLVED
- activity = Activity.objects.get(id=activity.id)
- assert activity.data is not None
- assert activity.data["version"] == "b"
- assert not GroupResolution.objects.filter(group=group).exists()
- activity = Activity.objects.get(group=group, type=ActivityType.SET_REGRESSION.value)
- mock_send_activity_notifications_delay.assert_called_once_with(activity.id)
- @mock.patch("sentry.tasks.activity.send_activity_notifications.delay")
- @mock.patch("sentry.event_manager.plugin_is_regression")
- def test_that_release_in_latest_activity_prior_to_regression_is_not_overridden(
- self,
- plugin_is_regression: mock.MagicMock,
- mock_send_activity_notifications_delay: mock.MagicMock,
- ) -> None:
- """
- Test that ensures in the case where a regression occurs, the release prior to the latest
- activity to that regression is not overridden.
- It should only be overridden if the activity was awaiting the upcoming release
- """
- plugin_is_regression.return_value = True
- # Create a release and a group associated with it
- old_release = self.create_release(
- version="foobar", date_added=timezone.now() - timedelta(minutes=30)
- )
- manager = EventManager(
- make_event(
- event_id="a" * 32,
- checksum="a" * 32,
- timestamp=time() - 50000, # need to work around active_at
- release=old_release.version,
- )
- )
- event = manager.save(self.project.id)
- assert event.group is not None
- group = event.group
- group.update(status=GroupStatus.RESOLVED, substatus=None)
- # Resolve the group in old_release
- resolution = GroupResolution.objects.create(release=old_release, group=group)
- activity = Activity.objects.create(
- group=group,
- project=group.project,
- type=ActivityType.SET_RESOLVED_IN_RELEASE.value,
- ident=resolution.id,
- data={"version": "foobar"},
- )
- # Create a regression
- manager = EventManager(
- make_event(event_id="c" * 32, checksum="a" * 32, timestamp=time(), release="b")
- )
- event = manager.save(self.project.id)
- assert event.group_id == group.id
- group = Group.objects.get(id=group.id)
- assert group.status == GroupStatus.UNRESOLVED
- activity = Activity.objects.get(id=activity.id)
- assert activity.data is not None
- assert activity.data["version"] == "foobar"
- regressed_activity = Activity.objects.get(
- group=group, type=ActivityType.SET_REGRESSION.value
- )
- assert regressed_activity.data is not None
- assert regressed_activity.data["version"] == "b"
- assert regressed_activity.data["follows_semver"] is False
- mock_send_activity_notifications_delay.assert_called_once_with(regressed_activity.id)
- @mock.patch("sentry.tasks.activity.send_activity_notifications.delay")
- @mock.patch("sentry.event_manager.plugin_is_regression")
- def test_current_release_version_in_latest_activity_prior_to_regression_is_not_overridden(
- self,
- plugin_is_regression: mock.MagicMock,
- mock_send_activity_notifications_delay: mock.MagicMock,
- ) -> None:
- """
- Test that ensures in the case where a regression occurs, the release prior to the latest
- activity to that regression is overridden with the release regression occurred in but the
- value of `current_release_version` used for semver is not lost in the update.
- """
- plugin_is_regression.return_value = True
- # Create a release and a group associated with it
- old_release = self.create_release(
- version="a", date_added=timezone.now() - timedelta(minutes=30)
- )
- manager = EventManager(
- make_event(
- event_id="a" * 32,
- checksum="a" * 32,
- timestamp=time() - 50000, # need to work around active_at
- release=old_release.version,
- )
- )
- event = manager.save(self.project.id)
- assert event.group is not None
- group = event.group
- group.update(status=GroupStatus.RESOLVED, substatus=None)
- # Resolve the group in old_release
- resolution = GroupResolution.objects.create(release=old_release, group=group)
- activity = Activity.objects.create(
- group=group,
- project=group.project,
- type=ActivityType.SET_RESOLVED_IN_RELEASE.value,
- ident=resolution.id,
- data={"version": "", "current_release_version": "pre foobar"},
- )
- # Create a regression
- manager = EventManager(
- make_event(event_id="c" * 32, checksum="a" * 32, timestamp=time(), release="b")
- )
- event = manager.save(self.project.id)
- assert event.group_id == group.id
- group = Group.objects.get(id=group.id)
- assert group.status == GroupStatus.UNRESOLVED
- activity = Activity.objects.get(id=activity.id)
- assert activity.data is not None
- assert activity.data["version"] == "b"
- assert activity.data["current_release_version"] == "pre foobar"
- regressed_activity = Activity.objects.get(
- group=group, type=ActivityType.SET_REGRESSION.value
- )
- assert regressed_activity.data is not None
- assert regressed_activity.data["version"] == "b"
- mock_send_activity_notifications_delay.assert_called_once_with(regressed_activity.id)
- @mock.patch("sentry.event_manager.plugin_is_regression")
- def test_resolved_in_release_regression_activity_follows_semver(
- self, plugin_is_regression: mock.MagicMock
- ) -> None:
- """
- Issue was marked resolved in 1.0.0, regression occurred in 2.0.0.
- If the project follows semver then the regression activity should have `follows_semver` set.
- We should also record which version the issue was resolved in as `resolved_in_version`.
- This allows the UI to say the issue was resolved in 1.0.0, regressed in 2.0.0 and
- the versions were compared using semver.
- """
- plugin_is_regression.return_value = True
- # Create a release and a group associated with it
- old_release = self.create_release(
- version="foo@1.0.0", date_added=timezone.now() - timedelta(minutes=30)
- )
- manager = EventManager(
- make_event(
- event_id="a" * 32,
- checksum="a" * 32,
- timestamp=time() - 50000, # need to work around active_at
- release=old_release.version,
- )
- )
- event = manager.save(self.project.id)
- assert event.group is not None
- group = event.group
- group.update(status=GroupStatus.RESOLVED, substatus=None)
- # Resolve the group in old_release
- resolution = GroupResolution.objects.create(release=old_release, group=group)
- activity = Activity.objects.create(
- group=group,
- project=group.project,
- type=ActivityType.SET_RESOLVED_IN_RELEASE.value,
- ident=resolution.id,
- data={"version": "foo@1.0.0"},
- )
- # Create a regression
- manager = EventManager(
- make_event(event_id="c" * 32, checksum="a" * 32, timestamp=time(), release="foo@2.0.0")
- )
- event = manager.save(self.project.id)
- assert event.group_id == group.id
- group = Group.objects.get(id=group.id)
- assert group.status == GroupStatus.UNRESOLVED
- activity = Activity.objects.get(id=activity.id)
- assert activity.data is not None
- assert activity.data["version"] == "foo@1.0.0"
- regressed_activity = Activity.objects.get(
- group=group, type=ActivityType.SET_REGRESSION.value
- )
- assert regressed_activity.data is not None
- assert regressed_activity.data["version"] == "foo@2.0.0"
- assert regressed_activity.data["follows_semver"] is True
- assert regressed_activity.data["resolved_in_version"] == "foo@1.0.0"
- def test_has_pending_commit_resolution(self) -> None:
- project_id = self.project.id
- event = self.make_release_event("1.0", project_id)
- group = event.group
- assert group is not None
- assert group.first_release.version == "1.0"
- assert not has_pending_commit_resolution(group)
- # Add a commit with no associated release
- repo = self.create_repo(project=group.project)
- commit = Commit.objects.create(
- organization_id=group.project.organization_id, repository_id=repo.id, key="a" * 40
- )
- GroupLink.objects.create(
- group_id=group.id,
- project_id=group.project_id,
- linked_type=GroupLink.LinkedType.commit,
- linked_id=commit.id,
- relationship=GroupLink.Relationship.resolves,
- )
- assert has_pending_commit_resolution(group)
- def test_multiple_pending_commit_resolution(self) -> None:
- project_id = self.project.id
- event = self.make_release_event("1.0", project_id)
- group = event.group
- assert group is not None
- # Add a few commits with no associated release
- repo = self.create_repo(project=group.project)
- for key in ["a", "b", "c"]:
- commit = Commit.objects.create(
- organization_id=group.project.organization_id,
- repository_id=repo.id,
- key=key * 40,
- )
- GroupLink.objects.create(
- group_id=group.id,
- project_id=group.project_id,
- linked_type=GroupLink.LinkedType.commit,
- linked_id=commit.id,
- relationship=GroupLink.Relationship.resolves,
- )
- pending = has_pending_commit_resolution(group)
- assert pending
- # Most recent commit has been associated with a release
- latest_commit = Commit.objects.create(
- organization_id=group.project.organization_id, repository_id=repo.id, key="d" * 40
- )
- GroupLink.objects.create(
- group_id=group.id,
- project_id=group.project_id,
- linked_type=GroupLink.LinkedType.commit,
- linked_id=latest_commit.id,
- relationship=GroupLink.Relationship.resolves,
- )
- ReleaseCommit.objects.create(
- organization_id=group.project.organization_id,
- release=group.first_release,
- commit=latest_commit,
- order=0,
- )
- pending = has_pending_commit_resolution(group)
- assert pending is False
- def test_has_pending_commit_resolution_issue_regression(self) -> None:
- project_id = self.project.id
- event = self.make_release_event("1.0", project_id)
- group = event.group
- assert group is not None
- repo = self.create_repo(project=group.project)
- # commit that resolved the issue is part of a PR, but all commits within the PR are unreleased
- commit = Commit.objects.create(
- organization_id=group.project.organization_id, repository_id=repo.id, key="a" * 40
- )
- second_commit = Commit.objects.create(
- organization_id=group.project.organization_id, repository_id=repo.id, key="b" * 40
- )
- GroupLink.objects.create(
- group_id=group.id,
- project_id=group.project_id,
- linked_type=GroupLink.LinkedType.commit,
- linked_id=commit.id,
- relationship=GroupLink.Relationship.resolves,
- )
- pr = PullRequest.objects.create(
- organization_id=group.project.organization_id,
- repository_id=repo.id,
- key="1",
- )
- PullRequestCommit.objects.create(pull_request_id=pr.id, commit_id=commit.id)
- PullRequestCommit.objects.create(pull_request_id=pr.id, commit_id=second_commit.id)
- assert PullRequestCommit.objects.filter(pull_request_id=pr.id, commit_id=commit.id).exists()
- assert PullRequestCommit.objects.filter(
- pull_request_id=pr.id, commit_id=second_commit.id
- ).exists()
- assert not ReleaseCommit.objects.filter(commit__pullrequestcommit__id=commit.id).exists()
- assert not ReleaseCommit.objects.filter(
- commit__pullrequestcommit__id=second_commit.id
- ).exists()
- pending = has_pending_commit_resolution(group)
- assert pending
- def test_has_pending_commit_resolution_issue_regression_released_commits(self) -> None:
- project_id = self.project.id
- event = self.make_release_event("1.0", project_id)
- group = event.group
- assert group is not None
- release = self.create_release(project=self.project, version="1.1")
- repo = self.create_repo(project=group.project)
- # commit 1 is part of the PR, it resolves the issue in the commit message, and is unreleased
- commit = Commit.objects.create(
- organization_id=group.project.organization_id, repository_id=repo.id, key="a" * 38
- )
- GroupLink.objects.create(
- group_id=group.id,
- project_id=group.project_id,
- linked_type=GroupLink.LinkedType.commit,
- linked_id=commit.id,
- relationship=GroupLink.Relationship.resolves,
- )
- # commit 2 is part of the PR, but does not resolve the issue, and is released
- released_commit = Commit.objects.create(
- organization_id=group.project.organization_id, repository_id=repo.id, key="b" * 38
- )
- # commit 3 is part of the PR, but does not resolve the issue, and is unreleased
- unreleased_commit = Commit.objects.create(
- organization_id=group.project.organization_id, repository_id=repo.id, key="c" * 38
- )
- pr = PullRequest.objects.create(
- organization_id=group.project.organization_id,
- repository_id=repo.id,
- key="19",
- )
- PullRequestCommit.objects.create(pull_request_id=pr.id, commit_id=commit.id)
- released_pr_commit = PullRequestCommit.objects.create(
- pull_request_id=pr.id, commit_id=released_commit.id
- )
- unreleased_pr_commit = PullRequestCommit.objects.create(
- pull_request_id=pr.id, commit_id=unreleased_commit.id
- )
- ReleaseCommit.objects.create(
- organization_id=group.project.organization_id,
- release=release,
- commit=released_commit,
- order=1,
- )
- assert Commit.objects.all().count() == 3
- assert PullRequestCommit.objects.filter(pull_request_id=pr.id, commit_id=commit.id).exists()
- assert PullRequestCommit.objects.filter(
- pull_request_id=pr.id, commit_id=released_commit.id
- ).exists()
- assert PullRequestCommit.objects.filter(commit__id=unreleased_pr_commit.commit.id).exists()
- assert ReleaseCommit.objects.filter(
- commit__pullrequestcommit__id=released_pr_commit.id
- ).exists()
- pending = has_pending_commit_resolution(group)
- assert pending is False
- @mock.patch("sentry.integrations.example.integration.ExampleIntegration.sync_status_outbound")
- @mock.patch("sentry.tasks.activity.send_activity_notifications.delay")
- @mock.patch("sentry.event_manager.plugin_is_regression")
- def test_marks_as_unresolved_with_new_release_with_integration(
- self,
- plugin_is_regression: mock.MagicMock,
- mock_send_activity_notifications_delay: mock.MagicMock,
- mock_sync_status_outbound: mock.MagicMock,
- ) -> None:
- plugin_is_regression.return_value = True
- old_release = Release.objects.create(
- version="a",
- organization_id=self.project.organization_id,
- date_added=timezone.now() - timedelta(minutes=30),
- )
- old_release.add_project(self.project)
- manager = EventManager(
- make_event(
- event_id="a" * 32,
- checksum="a" * 32,
- timestamp=time() - 50000, # need to work around active_at
- release=old_release.version,
- )
- )
- event = manager.save(self.project.id)
- assert event.group is not None
- group = event.group
- org = group.organization
- integration = self.create_integration(
- organization=org,
- external_id="example",
- oi_params={
- "config": {
- "sync_comments": True,
- "sync_status_outbound": True,
- "sync_status_inbound": True,
- "sync_assignee_outbound": True,
- "sync_assignee_inbound": True,
- }
- },
- provider="example",
- name="Example",
- )
- external_issue = ExternalIssue.objects.get_or_create(
- organization_id=org.id, integration_id=integration.id, key="APP-%s" % group.id
- )[0]
- GroupLink.objects.get_or_create(
- group_id=group.id,
- project_id=group.project_id,
- linked_type=GroupLink.LinkedType.issue,
- linked_id=external_issue.id,
- relationship=GroupLink.Relationship.references,
- )[0]
- group.update(status=GroupStatus.RESOLVED, substatus=None)
- resolution = GroupResolution.objects.create(release=old_release, group=group)
- activity = Activity.objects.create(
- group=group,
- project=group.project,
- type=ActivityType.SET_RESOLVED_IN_RELEASE.value,
- ident=resolution.id,
- data={"version": ""},
- )
- manager = EventManager(
- make_event(
- event_id="b" * 32, checksum="a" * 32, timestamp=time(), release=old_release.version
- )
- )
- with self.tasks():
- with self.feature({"organizations:integrations-issue-sync": True}):
- event = manager.save(self.project.id)
- assert event.group_id == group.id
- group = Group.objects.get(id=group.id)
- assert group.status == GroupStatus.RESOLVED
- activity = Activity.objects.get(id=activity.id)
- assert activity.data is not None
- assert activity.data["version"] == ""
- assert GroupResolution.objects.filter(group=group).exists()
- manager = EventManager(
- make_event(event_id="c" * 32, checksum="a" * 32, timestamp=time(), release="b")
- )
- event = manager.save(self.project.id)
- assert event.group is not None
- mock_sync_status_outbound.assert_called_once_with(
- external_issue, False, event.group.project_id
- )
- assert event.group_id == group.id
- group = Group.objects.get(id=group.id)
- assert group.status == GroupStatus.UNRESOLVED
- activity = Activity.objects.get(id=activity.id)
- assert activity.data is not None
- assert activity.data["version"] == "b"
- assert not GroupResolution.objects.filter(group=group).exists()
- activity = Activity.objects.get(group=group, type=ActivityType.SET_REGRESSION.value)
- mock_send_activity_notifications_delay.assert_called_once_with(activity.id)
- @mock.patch("sentry.tasks.activity.send_activity_notifications.delay")
- @mock.patch("sentry.event_manager.plugin_is_regression")
- def test_does_not_mark_as_unresolved_with_pending_commit(
- self,
- plugin_is_regression: mock.MagicMock,
- mock_send_activity_notifications_delay: mock.MagicMock,
- ) -> None:
- plugin_is_regression.return_value = True
- repo = self.create_repo(project=self.project)
- commit = self.create_commit(repo=repo)
- manager = EventManager(
- make_event(
- event_id="a" * 32,
- checksum="a" * 32,
- timestamp=time() - 50000, # need to work around active_at
- )
- )
- event = manager.save(self.project.id)
- group = event.group
- assert group is not None
- group.update(status=GroupStatus.RESOLVED, substatus=None)
- GroupLink.objects.create(
- group_id=group.id,
- project_id=group.project_id,
- linked_id=commit.id,
- linked_type=GroupLink.LinkedType.commit,
- relationship=GroupLink.Relationship.resolves,
- )
- manager = EventManager(make_event(event_id="b" * 32, checksum="a" * 32, timestamp=time()))
- event = manager.save(self.project.id)
- assert event.group is not None
- assert event.group_id == group.id
- assert Group.objects.get(id=group.id).status == GroupStatus.RESOLVED
- @mock.patch("sentry.tasks.activity.send_activity_notifications.delay")
- @mock.patch("sentry.event_manager.plugin_is_regression")
- def test_mark_as_unresolved_with_released_commit(
- self,
- plugin_is_regression: mock.MagicMock,
- mock_send_activity_notifications_delay: mock.MagicMock,
- ) -> None:
- plugin_is_regression.return_value = True
- release = self.create_release(project=self.project)
- repo = self.create_repo(project=self.project)
- commit = self.create_commit(repo=repo, release=release, project=self.project)
- manager = EventManager(
- make_event(
- event_id="a" * 32,
- checksum="a" * 32,
- timestamp=time() - 50000, # need to work around active_at
- )
- )
- event = manager.save(self.project.id)
- group = event.group
- assert group is not None
- group.update(status=GroupStatus.RESOLVED, substatus=None)
- GroupLink.objects.create(
- group_id=group.id,
- project_id=group.project_id,
- linked_id=commit.id,
- linked_type=GroupLink.LinkedType.commit,
- relationship=GroupLink.Relationship.resolves,
- )
- manager = EventManager(make_event(event_id="b" * 32, checksum="a" * 32, timestamp=time()))
- event = manager.save(self.project.id)
- assert event.group is not None
- assert event.group_id == group.id
- assert Group.objects.get(id=group.id).status == GroupStatus.UNRESOLVED
- @mock.patch("sentry.models.Group.is_resolved")
- def test_unresolves_group_with_auto_resolve(self, mock_is_resolved: mock.MagicMock) -> None:
- ts = time() - 100
- mock_is_resolved.return_value = False
- manager = EventManager(make_event(event_id="a" * 32, checksum="a" * 32, timestamp=ts))
- with self.tasks():
- event = manager.save(self.project.id)
- assert event.group is not None
- mock_is_resolved.return_value = True
- manager = EventManager(make_event(event_id="b" * 32, checksum="a" * 32, timestamp=ts + 100))
- with self.tasks():
- event2 = manager.save(self.project.id)
- assert event2.group is not None
- assert event.group_id == event2.group_id
- group = Group.objects.get(id=event.group.id)
- assert group.active_at
- assert group.active_at.replace(second=0) == event2.datetime.replace(second=0)
- assert group.active_at.replace(second=0) != event.datetime.replace(second=0)
- def test_invalid_transaction(self) -> None:
- dict_input = {"messages": "foo"}
- manager = EventManager(make_event(transaction=dict_input))
- manager.normalize()
- event = manager.save(self.project.id)
- assert event.transaction is None
- def test_transaction_as_culprit(self) -> None:
- manager = EventManager(make_event(transaction="foobar"))
- manager.normalize()
- event = manager.save(self.project.id)
- assert event.transaction == "foobar"
- assert event.culprit == "foobar"
- def test_culprit_is_not_transaction(self) -> None:
- manager = EventManager(make_event(culprit="foobar"))
- manager.normalize()
- event1 = manager.save(self.project.id)
- assert event1.transaction is None
- assert event1.culprit == "foobar"
- def test_culprit_after_stacktrace_processing(self) -> None:
- from sentry.grouping.enhancer import Enhancements
- enhancement = Enhancements.from_config_string(
- """
- function:in_app_function +app
- function:not_in_app_function -app
- """,
- )
- manager = EventManager(
- make_event(
- platform="native",
- exception={
- "values": [
- {
- "type": "Hello",
- "stacktrace": {
- "frames": [
- {
- "function": "not_in_app_function",
- },
- {
- "function": "in_app_function",
- },
- ]
- },
- }
- ]
- },
- )
- )
- manager.normalize()
- manager.get_data()["grouping_config"] = {
- "enhancements": enhancement.dumps(),
- "id": "legacy:2019-03-12",
- }
- event1 = manager.save(self.project.id)
- assert event1.transaction is None
- assert event1.culprit == "in_app_function"
- def test_inferred_culprit_from_empty_stacktrace(self) -> None:
- manager = EventManager(make_event(stacktrace={"frames": []}))
- manager.normalize()
- event = manager.save(self.project.id)
- assert event.culprit == ""
- def test_transaction_and_culprit(self) -> None:
- manager = EventManager(make_event(transaction="foobar", culprit="baz"))
- manager.normalize()
- event1 = manager.save(self.project.id)
- assert event1.transaction == "foobar"
- assert event1.culprit == "baz"
- def test_release_with_empty_version(self) -> None:
- cases = ["", " ", "\t", "\n"]
- for case in cases:
- event = self.make_release_event(case, self.project.id)
- assert event.group is not None
- assert not event.group.first_release
- assert Release.objects.filter(projects__in=[self.project.id]).count() == 0
- assert Release.objects.filter(organization_id=self.project.organization_id).count() == 0
- def test_first_release(self) -> None:
- project_id = self.project.id
- event = self.make_release_event("1.0", project_id)
- group = event.group
- assert group is not None
- assert group.first_release.version == "1.0"
- event = self.make_release_event("2.0", project_id)
- group = event.group
- assert group is not None
- assert group.first_release.version == "1.0"
- def test_release_project_slug(self) -> None:
- project = self.create_project(name="foo")
- release = Release.objects.create(version="foo-1.0", organization=project.organization)
- release.add_project(project)
- event = self.make_release_event("1.0", project.id)
- group = event.group
- assert group is not None
- assert group.first_release.version == "foo-1.0"
- release_tag = [v for k, v in event.tags if k == "sentry:release"][0]
- assert release_tag == "foo-1.0"
- event = self.make_release_event("2.0", project.id)
- group = event.group
- assert group is not None
- assert group.first_release.version == "foo-1.0"
- def test_release_project_slug_long(self) -> None:
- project = self.create_project(name="foo")
- partial_version_len = MAX_VERSION_LENGTH - 4
- release = Release.objects.create(
- version="foo-{}".format("a" * partial_version_len), organization=project.organization
- )
- release.add_project(project)
- event = self.make_release_event("a" * partial_version_len, project.id)
- group = event.group
- assert group is not None
- assert group.first_release.version == "foo-{}".format("a" * partial_version_len)
- release_tag = [v for k, v in event.tags if k == "sentry:release"][0]
- assert release_tag == "foo-{}".format("a" * partial_version_len)
- def test_group_release_no_env(self) -> None:
- project_id = self.project.id
- event = self.make_release_event("1.0", project_id)
- assert event.group_id is not None
- release = Release.objects.get(version="1.0", projects=event.project_id)
- assert GroupRelease.objects.filter(
- release_id=release.id, group_id=event.group_id, environment=""
- ).exists()
- # ensure we're not erroring on second creation
- event = self.make_release_event("1.0", project_id)
- def test_group_release_with_env(self) -> None:
- manager = EventManager(make_event(release="1.0", environment="prod", event_id="a" * 32))
- manager.normalize()
- event = manager.save(self.project.id)
- assert event.group_id is not None
- release = Release.objects.get(version="1.0", projects=event.project_id)
- assert GroupRelease.objects.filter(
- release_id=release.id, group_id=event.group_id, environment="prod"
- ).exists()
- manager = EventManager(make_event(release="1.0", environment="staging", event_id="b" * 32))
- event = manager.save(self.project.id)
- release = Release.objects.get(version="1.0", projects=event.project_id)
- assert event.group_id is not None
- assert GroupRelease.objects.filter(
- release_id=release.id, group_id=event.group_id, environment="staging"
- ).exists()
- def test_tsdb(self) -> None:
- project = self.project
- manager = EventManager(
- make_event(
- fingerprint=["totally unique super duper fingerprint"],
- environment="totally unique super duper environment",
- )
- )
- event = manager.save(project.id)
- assert event.group is not None
- def query(model: TSDBModel, key: int, **kwargs: Any) -> int:
- return tsdb.backend.get_sums(
- model,
- [key],
- event.datetime,
- event.datetime,
- tenant_ids={"organization_id": 123, "referrer": "r"},
- **kwargs,
- )[key]
- assert query(TSDBModel.project, project.id) == 1
- assert query(TSDBModel.group, event.group.id) == 1
- environment_id = Environment.get_for_organization_id(
- event.project.organization_id, "totally unique super duper environment"
- ).id
- assert query(TSDBModel.project, project.id, environment_id=environment_id) == 1
- assert query(TSDBModel.group, event.group.id, environment_id=environment_id) == 1
- @pytest.mark.xfail
- def test_record_frequencies(self) -> None:
- project = self.project
- manager = EventManager(make_event())
- event = manager.save(project.id)
- assert tsdb.backend.get_most_frequent(
- TSDBModel.frequent_issues_by_project, (event.project.id,), event.datetime
- ) == {event.project.id: [(event.group_id, 1.0)]}
- def test_event_user(self) -> None:
- event_id = uuid.uuid4().hex
- manager = EventManager(
- make_event(
- event_id=event_id, environment="totally unique environment", **{"user": {"id": "1"}}
- )
- )
- manager.normalize()
- with self.tasks():
- event = manager.save(self.project.id)
- assert event.group is not None
- environment_id = Environment.get_for_organization_id(
- event.project.organization_id, "totally unique environment"
- ).id
- assert tsdb.backend.get_distinct_counts_totals(
- TSDBModel.users_affected_by_group,
- (event.group.id,),
- event.datetime,
- event.datetime,
- tenant_ids={"referrer": "r", "organization_id": 123},
- ) == {event.group.id: 1}
- assert tsdb.backend.get_distinct_counts_totals(
- TSDBModel.users_affected_by_project,
- (event.project.id,),
- event.datetime,
- event.datetime,
- tenant_ids={"organization_id": 123, "referrer": "r"},
- ) == {event.project.id: 1}
- assert tsdb.backend.get_distinct_counts_totals(
- TSDBModel.users_affected_by_group,
- (event.group.id,),
- event.datetime,
- event.datetime,
- environment_id=environment_id,
- tenant_ids={"organization_id": 123, "referrer": "r"},
- ) == {event.group.id: 1}
- assert tsdb.backend.get_distinct_counts_totals(
- TSDBModel.users_affected_by_project,
- (event.project.id,),
- event.datetime,
- event.datetime,
- environment_id=environment_id,
- tenant_ids={"organization_id": 123, "referrer": "r"},
- ) == {event.project.id: 1}
- saved_event = eventstore.backend.get_event_by_id(self.project.id, event_id)
- euser = EventUser.from_event(saved_event)
- assert event.get_tag("sentry:user") == euser.tag_value
- # clear the cache otherwise the cached EventUser from prev
- # manager.save() will be used instead of jane
- cache.clear()
- # ensure event user is mapped to tags in second attempt
- event_id_2 = uuid.uuid4().hex
- manager = EventManager(
- make_event(event_id=event_id_2, **{"user": {"id": "1", "name": "jane"}})
- )
- manager.normalize()
- with self.tasks():
- manager.save(self.project.id)
- saved_event = eventstore.backend.get_event_by_id(self.project.id, event_id_2)
- euser = EventUser.from_event(saved_event)
- assert event.get_tag("sentry:user") == euser.tag_value
- assert euser.name == "jane"
- assert euser.user_ident == "1"
- def test_event_user_invalid_ip(self) -> None:
- event_id = uuid.uuid4().hex
- manager = EventManager(
- make_event(
- event_id=event_id, environment="totally unique environment", **{"user": {"id": "1"}}
- )
- )
- manager.normalize()
- # This can happen as part of PII stripping, which happens after normalization
- manager._data["user"]["ip_address"] = "[ip]"
- with self.tasks():
- manager.save(self.project.id)
- saved_event = eventstore.backend.get_event_by_id(self.project.id, event_id)
- euser = EventUser.from_event(saved_event)
- assert euser.ip_address is None
- def test_event_user_unicode_identifier(self) -> None:
- event_id = uuid.uuid4().hex
- manager = EventManager(make_event(event_id=event_id, **{"user": {"username": "foô"}}))
- manager.normalize()
- with self.tasks():
- manager.save(self.project.id)
- saved_event = eventstore.backend.get_event_by_id(self.project.id, event_id)
- euser = EventUser.from_event(saved_event)
- assert euser.username == "foô"
- def test_environment(self) -> None:
- manager = EventManager(make_event(**{"environment": "beta"}))
- manager.normalize()
- event = manager.save(self.project.id)
- assert dict(event.tags).get("environment") == "beta"
- def test_invalid_environment(self) -> None:
- manager = EventManager(make_event(**{"environment": "bad/name"}))
- manager.normalize()
- event = manager.save(self.project.id)
- assert dict(event.tags).get("environment") is None
- def test_invalid_tags(self) -> None:
- manager = EventManager(make_event(**{"tags": [42]}))
- manager.normalize()
- assert None in manager.get_data().get("tags", [])
- assert 42 not in manager.get_data().get("tags", [])
- event = manager.save(self.project.id)
- assert 42 not in event.tags
- assert None not in event.tags
- @mock.patch("sentry.event_manager.eventstream.backend.insert")
- def test_group_environment(self, eventstream_insert: mock.MagicMock) -> None:
- release_version = "1.0"
- def save_event() -> Event:
- manager = EventManager(
- make_event(
- **{
- "message": "foo",
- "event_id": uuid.uuid1().hex,
- "environment": "beta",
- "release": release_version,
- }
- )
- )
- manager.normalize()
- return manager.save(self.project.id)
- event = save_event()
- assert event.group_id is not None
- # Ensure the `GroupEnvironment` record was created.
- instance = GroupEnvironment.objects.get(
- group_id=event.group_id,
- environment_id=Environment.objects.get(
- organization_id=self.project.organization_id, name=event.get_tag("environment")
- ).id,
- )
- assert Release.objects.get(id=instance.first_release_id).version == release_version
- group_states1 = {
- "is_new": True,
- "is_regression": False,
- "is_new_group_environment": True,
- }
- # Ensure that the first event in the (group, environment) pair is
- # marked as being part of a new environment.
- assert event.group is not None
- eventstream_insert.assert_called_with(
- event=event,
- **group_states1,
- primary_hash="acbd18db4cc2f85cedef654fccc4a4d8",
- skip_consume=False,
- received_timestamp=event.data["received"],
- group_states=[{"id": event.group.id, **group_states1}],
- )
- event = save_event()
- group_states2 = {
- "is_new": False,
- "is_regression": False,
- "is_new_group_environment": False,
- }
- # Ensure that the next event in the (group, environment) pair is *not*
- # marked as being part of a new environment.
- assert event.group is not None
- eventstream_insert.assert_called_with(
- event=event,
- **group_states2,
- primary_hash="acbd18db4cc2f85cedef654fccc4a4d8",
- skip_consume=False,
- received_timestamp=event.data["received"],
- group_states=[{"id": event.group.id, **group_states2}],
- )
- def test_default_event_type(self) -> None:
- manager = EventManager(make_event(message="foo bar"))
- manager.normalize()
- data = manager.get_data()
- assert data["type"] == "default"
- event = manager.save(self.project.id)
- group = event.group
- assert group is not None
- assert group.data["type"] == "default"
- assert group.data["metadata"]["title"] == "foo bar"
- def test_message_event_type(self) -> None:
- manager = EventManager(
- make_event(
- **{
- "message": "",
- "logentry": {"formatted": "foo bar", "message": "foo %s", "params": ["bar"]},
- }
- )
- )
- manager.normalize()
- data = manager.get_data()
- assert data["type"] == "default"
- event = manager.save(self.project.id)
- group = event.group
- assert group is not None
- assert group.data["type"] == "default"
- assert group.data["metadata"]["title"] == "foo bar"
- def test_error_event_type(self) -> None:
- manager = EventManager(
- make_event(**{"exception": {"values": [{"type": "Foo", "value": "bar"}]}})
- )
- manager.normalize()
- data = manager.get_data()
- assert data["type"] == "error"
- event = manager.save(self.project.id)
- group = event.group
- assert group is not None
- assert group.data.get("type") == "error"
- assert group.data.get("metadata") == {
- "type": "Foo",
- "value": "bar",
- "initial_priority": PriorityLevel.HIGH,
- "display_title_with_tree_label": False,
- }
- def test_csp_event_type(self) -> None:
- manager = EventManager(
- make_event(
- **{
- "csp": {
- "effective_directive": "script-src",
- "blocked_uri": "http://example.com",
- },
- # this normally is noramlized in relay as part of ingest
- "logentry": {"message": "Blocked 'script' from 'example.com'"},
- }
- )
- )
- manager.normalize()
- data = manager.get_data()
- assert data["type"] == "csp"
- event = manager.save(self.project.id)
- group = event.group
- assert group is not None
- assert group.data.get("type") == "csp"
- assert group.data.get("metadata") == {
- "directive": "script-src",
- "initial_priority": PriorityLevel.HIGH,
- "uri": "example.com",
- "message": "Blocked 'script' from 'example.com'",
- }
- assert group.title == "Blocked 'script' from 'example.com'"
- def test_transaction_event_type(self) -> None:
- manager = EventManager(
- make_event(
- **{
- "transaction": "wait",
- "contexts": {
- "trace": {
- "parent_span_id": "bce14471e0e9654d",
- "op": "foobar",
- "trace_id": "a0fa8803753e40fd8124b21eeb2986b5",
- "span_id": "bf5be759039ede9a",
- }
- },
- "spans": [],
- "timestamp": "2019-06-14T14:01:40Z",
- "start_timestamp": "2019-06-14T14:01:40Z",
- "type": "transaction",
- }
- )
- )
- manager.normalize()
- data = manager.get_data()
- assert data["type"] == "transaction"
- def test_transaction_event_span_grouping(self) -> None:
- manager = EventManager(
- make_event(
- **{
- "transaction": "wait",
- "contexts": {
- "trace": {
- "parent_span_id": "bce14471e0e9654d",
- "op": "foobar",
- "trace_id": "a0fa8803753e40fd8124b21eeb2986b5",
- "span_id": "bf5be759039ede9a",
- }
- },
- "spans": [
- {
- "trace_id": "a0fa8803753e40fd8124b21eeb2986b5",
- "parent_span_id": "bf5be759039ede9a",
- "span_id": "a" * 16,
- "start_timestamp": 0,
- "timestamp": 1,
- "same_process_as_parent": True,
- "op": "default",
- "description": "span a",
- },
- {
- "trace_id": "a0fa8803753e40fd8124b21eeb2986b5",
- "parent_span_id": "bf5be759039ede9a",
- "span_id": "b" * 16,
- "start_timestamp": 0,
- "timestamp": 1,
- "same_process_as_parent": True,
- "op": "default",
- "description": "span a",
- },
- {
- "trace_id": "a0fa8803753e40fd8124b21eeb2986b5",
- "parent_span_id": "bf5be759039ede9a",
- "span_id": "c" * 16,
- "start_timestamp": 0,
- "timestamp": 1,
- "same_process_as_parent": True,
- "op": "default",
- "description": "span b",
- },
- ],
- "timestamp": "2019-06-14T14:01:40Z",
- "start_timestamp": "2019-06-14T14:01:40Z",
- "type": "transaction",
- }
- )
- )
- manager.normalize()
- event = manager.save(self.project.id)
- data = event.data
- assert data["type"] == "transaction"
- assert data["span_grouping_config"]["id"] == "default:2022-10-27"
- spans = [{"hash": span["hash"]} for span in data["spans"]]
- # the basic strategy is to simply use the description
- assert spans == [{"hash": hash_values([span["description"]])} for span in data["spans"]]
- def test_sdk(self) -> None:
- manager = EventManager(make_event(**{"sdk": {"name": "sentry-unity", "version": "1.0"}}))
- manager.normalize()
- event = manager.save(self.project.id)
- assert event.data["sdk"] == {
- "name": "sentry-unity",
- "version": "1.0",
- "integrations": None,
- "packages": None,
- }
- def test_sdk_group_tagging(self) -> None:
- manager = EventManager(
- make_event(**{"sdk": {"name": "sentry-native-unity", "version": "1.0"}})
- )
- manager.normalize()
- event = manager.save(self.project.id)
- assert event.group is not None
- sdk_metadata = event.group.data["metadata"]["sdk"]
- assert sdk_metadata["name"] == "sentry-native-unity"
- assert sdk_metadata["name_normalized"] == "sentry.native.unity"
- def test_no_message(self) -> None:
- # test that the message is handled gracefully
- manager = EventManager(
- make_event(**{"message": None, "logentry": {"message": "hello world"}})
- )
- manager.normalize()
- event = manager.save(self.project.id)
- assert event.message == "hello world"
- def test_search_message_simple(self) -> None:
- manager = EventManager(
- make_event(
- **{
- "message": "test",
- "transaction": "sentry.tasks.process",
- }
- )
- )
- manager.normalize()
- event = manager.save(self.project.id)
- search_message = event.search_message
- assert "test" in search_message
- assert "sentry.tasks.process" in search_message
- def test_search_message_prefers_log_entry_message(self) -> None:
- manager = EventManager(
- make_event(
- **{
- "message": "test",
- "logentry": {"message": "hello world"},
- "transaction": "sentry.tasks.process",
- }
- )
- )
- manager.normalize()
- event = manager.save(self.project.id)
- search_message = event.search_message
- assert "test" not in search_message
- assert "hello world" in search_message
- assert "sentry.tasks.process" in search_message
- def test_search_message_skips_requested_keys(self) -> None:
- from sentry.eventstore import models
- with patch.object(models, "SEARCH_MESSAGE_SKIPPED_KEYS", ("dogs",)):
- manager = EventManager(
- make_event(
- **{
- "logentry": {"message": "hello world"},
- "transaction": "sentry.tasks.process",
- }
- )
- )
- manager.normalize()
- # Normalizing nukes any metadata we might pass when creating the event and event
- # manager, so we have to add it in here
- manager._data["metadata"] = {"dogs": "are great", "maisey": "silly", "charlie": "goofy"}
- event = manager.save(
- self.project.id,
- )
- search_message = event.search_message
- assert "hello world" in search_message
- assert "sentry.tasks.process" in search_message
- assert "silly" in search_message
- assert "goofy" in search_message
- assert "are great" not in search_message # "dogs" key is skipped
- def test_search_message_skips_bools_and_numbers(self) -> None:
- from sentry.eventstore import models
- with patch.object(models, "SEARCH_MESSAGE_SKIPPED_KEYS", ("dogs",)):
- manager = EventManager(
- make_event(
- **{
- "logentry": {"message": "hello world"},
- "transaction": "sentry.tasks.process",
- }
- )
- )
- manager.normalize()
- # Normalizing nukes any metadata we might pass when creating the event and event
- # manager, so we have to add it in here
- manager._data["metadata"] = {
- "dogs are great": True,
- "maisey": 12312012,
- "charlie": 1121.2012,
- "adopt": "don't shop",
- }
- event = manager.save(
- self.project.id,
- )
- search_message = event.search_message
- assert "hello world" in search_message
- assert "sentry.tasks.process" in search_message
- assert "True" not in search_message # skipped because it's a boolean
- assert "12312012" not in search_message # skipped because it's an int
- assert "1121.2012" not in search_message # skipped because it's a float
- assert "don't shop" in search_message
- def test_stringified_message(self) -> None:
- manager = EventManager(make_event(**{"message": 1234}))
- manager.normalize()
- event = manager.save(self.project.id)
- assert event.data["logentry"] == {"formatted": "1234", "message": None, "params": None}
- def test_bad_message(self) -> None:
- # test that invalid messages are rejected
- manager = EventManager(make_event(**{"message": ["asdf"]}))
- manager.normalize()
- event = manager.save(self.project.id)
- assert event.message == '["asdf"]'
- assert "logentry" in event.data
- def test_message_attribute_goes_to_interface(self) -> None:
- manager = EventManager(make_event(**{"message": "hello world"}))
- manager.normalize()
- event = manager.save(self.project.id)
- assert event.data["logentry"] == {
- "formatted": "hello world",
- "message": None,
- "params": None,
- }
- def test_message_attribute_shadowing(self) -> None:
- # Logentry shadows the legacy message attribute.
- manager = EventManager(
- make_event(**{"message": "world hello", "logentry": {"message": "hello world"}})
- )
- manager.normalize()
- event = manager.save(self.project.id)
- assert event.data["logentry"] == {
- "formatted": "hello world",
- "message": None,
- "params": None,
- }
- def test_message_attribute_interface_both_strings(self) -> None:
- manager = EventManager(
- make_event(**{"logentry": "a plain string", "message": "another string"})
- )
- manager.normalize()
- event = manager.save(self.project.id)
- assert event.data["logentry"] == {
- "formatted": "a plain string",
- "message": None,
- "params": None,
- }
- def test_throws_when_matches_discarded_hash(self) -> None:
- manager = EventManager(make_event(message="foo", event_id="a" * 32, fingerprint=["a" * 32]))
- with self.tasks():
- event = manager.save(self.project.id)
- group = Group.objects.get(id=event.group_id)
- tombstone = GroupTombstone.objects.create(
- project_id=group.project_id,
- level=group.level,
- message=group.message,
- culprit=group.culprit,
- data=group.data,
- previous_group_id=group.id,
- )
- GroupHash.objects.filter(group=group).update(group=None, group_tombstone_id=tombstone.id)
- manager = EventManager(
- make_event(message="foo", event_id="b" * 32, fingerprint=["a" * 32]),
- project=self.project,
- )
- manager.normalize()
- a1 = CachedAttachment(name="a1", data=b"hello")
- a2 = CachedAttachment(name="a2", data=b"world")
- cache_key = cache_key_for_event(manager.get_data())
- attachment_cache.set(cache_key, attachments=[a1, a2])
- from sentry.utils.outcomes import track_outcome
- mock_track_outcome = mock.Mock(wraps=track_outcome)
- with mock.patch("sentry.event_manager.track_outcome", mock_track_outcome):
- with self.feature("organizations:event-attachments"):
- with self.tasks():
- with pytest.raises(HashDiscarded):
- event = manager.save(
- self.project.id, cache_key=cache_key, has_attachments=True
- )
- assert mock_track_outcome.call_count == 3
- for o in mock_track_outcome.mock_calls:
- assert o.kwargs["outcome"] == Outcome.FILTERED
- assert o.kwargs["reason"] == FilterStatKeys.DISCARDED_HASH
- o = mock_track_outcome.mock_calls[0]
- assert o.kwargs["category"] == DataCategory.ERROR
- for o in mock_track_outcome.mock_calls[1:]:
- assert o.kwargs["category"] == DataCategory.ATTACHMENT
- assert o.kwargs["quantity"] == 5
- def test_honors_crash_report_limit(self) -> None:
- from sentry.utils.outcomes import track_outcome
- mock_track_outcome = mock.Mock(wraps=track_outcome)
- # Allow exactly one crash report
- self.project.update_option("sentry:store_crash_reports", 1)
- manager = EventManager(
- make_event(message="foo", event_id="a" * 32, fingerprint=["a" * 32]),
- project=self.project,
- )
- manager.normalize()
- a1 = CachedAttachment(name="a1", data=b"hello", type="event.minidump")
- a2 = CachedAttachment(name="a2", data=b"world")
- cache_key = cache_key_for_event(manager.get_data())
- attachment_cache.set(cache_key, attachments=[a1, a2])
- with mock.patch("sentry.event_manager.track_outcome", mock_track_outcome):
- with self.feature("organizations:event-attachments"):
- with self.tasks():
- manager.save(self.project.id, cache_key=cache_key, has_attachments=True)
- # The first minidump should be accepted, since the limit is 1
- assert mock_track_outcome.call_count == 3
- for o in mock_track_outcome.mock_calls:
- assert o.kwargs["outcome"] == Outcome.ACCEPTED
- mock_track_outcome.reset_mock()
- manager = EventManager(
- make_event(message="foo", event_id="b" * 32, fingerprint=["a" * 32]),
- project=self.project,
- )
- manager.normalize()
- cache_key = cache_key_for_event(manager.get_data())
- attachment_cache.set(cache_key, attachments=[a1, a2])
- with mock.patch("sentry.event_manager.track_outcome", mock_track_outcome):
- with self.feature("organizations:event-attachments"):
- with self.tasks():
- event = manager.save(self.project.id, cache_key=cache_key, has_attachments=True)
- assert event.data["metadata"]["stripped_crash"] is True
- assert mock_track_outcome.call_count == 3
- o = mock_track_outcome.mock_calls[0]
- assert o.kwargs["outcome"] == Outcome.FILTERED
- assert o.kwargs["category"] == DataCategory.ATTACHMENT
- assert o.kwargs["reason"] == FilterStatKeys.CRASH_REPORT_LIMIT
- for o in mock_track_outcome.mock_calls[1:]:
- assert o.kwargs["outcome"] == Outcome.ACCEPTED
- def test_event_accepted_outcome(self) -> None:
- manager = EventManager(make_event(message="foo"))
- manager.normalize()
- mock_track_outcome = mock.Mock()
- with mock.patch("sentry.event_manager.track_outcome", mock_track_outcome):
- manager.save(self.project.id)
- assert_mock_called_once_with_partial(
- mock_track_outcome, outcome=Outcome.ACCEPTED, category=DataCategory.ERROR
- )
- def test_attachment_accepted_outcomes(self) -> None:
- manager = EventManager(make_event(message="foo"), project=self.project)
- manager.normalize()
- a1 = CachedAttachment(name="a1", data=b"hello")
- a2 = CachedAttachment(name="a2", data=b"limited", rate_limited=True)
- a3 = CachedAttachment(name="a3", data=b"world")
- cache_key = cache_key_for_event(manager.get_data())
- attachment_cache.set(cache_key, attachments=[a1, a2, a3])
- mock_track_outcome = mock.Mock()
- with mock.patch("sentry.event_manager.track_outcome", mock_track_outcome):
- with self.feature("organizations:event-attachments"):
- manager.save(self.project.id, cache_key=cache_key, has_attachments=True)
- assert mock_track_outcome.call_count == 3
- for o in mock_track_outcome.mock_calls:
- assert o.kwargs["outcome"] == Outcome.ACCEPTED
- for o in mock_track_outcome.mock_calls[:2]:
- assert o.kwargs["category"] == DataCategory.ATTACHMENT
- assert o.kwargs["quantity"] == 5
- final = mock_track_outcome.mock_calls[2]
- assert final.kwargs["category"] == DataCategory.ERROR
- def test_attachment_filtered_outcomes(self) -> None:
- manager = EventManager(make_event(message="foo"), project=self.project)
- manager.normalize()
- # Disable storing all crash reports, which will drop the minidump but save the other
- a1 = CachedAttachment(name="a1", data=b"minidump", type="event.minidump")
- a2 = CachedAttachment(name="a2", data=b"limited", rate_limited=True)
- a3 = CachedAttachment(name="a3", data=b"world")
- cache_key = cache_key_for_event(manager.get_data())
- attachment_cache.set(cache_key, attachments=[a1, a2, a3])
- mock_track_outcome = mock.Mock()
- with mock.patch("sentry.event_manager.track_outcome", mock_track_outcome):
- with self.feature("organizations:event-attachments"):
- manager.save(self.project.id, cache_key=cache_key, has_attachments=True)
- assert mock_track_outcome.call_count == 3
- # First outcome is the rejection of the minidump
- o = mock_track_outcome.mock_calls[0]
- assert o.kwargs["outcome"] == Outcome.FILTERED
- assert o.kwargs["category"] == DataCategory.ATTACHMENT
- assert o.kwargs["reason"] == FilterStatKeys.CRASH_REPORT_LIMIT
- # Second outcome is acceptance of the "a3" attachment
- o = mock_track_outcome.mock_calls[1]
- assert o.kwargs["outcome"] == Outcome.ACCEPTED
- assert o.kwargs["category"] == DataCategory.ATTACHMENT
- assert o.kwargs["quantity"] == 5
- # Last outcome is the event
- o = mock_track_outcome.mock_calls[2]
- assert o.kwargs["outcome"] == Outcome.ACCEPTED
- assert o.kwargs["category"] == DataCategory.ERROR
- def test_transaction_outcome_accepted(self) -> None:
- """
- Without metrics extraction, we count the number of accepted transaction
- events in the TRANSACTION data category. This maintains compatibility
- with Sentry installations that do not have a metrics pipeline.
- """
- manager = EventManager(
- make_event(
- transaction="wait",
- contexts={
- "trace": {
- "parent_span_id": "bce14471e0e9654d",
- "op": "foobar",
- "trace_id": "a0fa8803753e40fd8124b21eeb2986b5",
- "span_id": "bf5be759039ede9a",
- }
- },
- spans=[],
- timestamp=iso_format(before_now(minutes=5)),
- start_timestamp=iso_format(before_now(minutes=5)),
- type="transaction",
- platform="python",
- )
- )
- manager.normalize()
- mock_track_outcome = mock.Mock()
- with mock.patch("sentry.event_manager.track_outcome", mock_track_outcome):
- with self.feature({"organizations:transaction-metrics-extraction": False}):
- manager.save(self.project.id)
- assert_mock_called_once_with_partial(
- mock_track_outcome, outcome=Outcome.ACCEPTED, category=DataCategory.TRANSACTION
- )
- def test_transaction_indexed_outcome_accepted(self) -> None:
- """
- With metrics extraction, we count the number of accepted transaction
- events in the TRANSACTION_INDEXED data category. The TRANSACTION data
- category contains the number of metrics from
- ``billing_metrics_consumer``.
- """
- manager = EventManager(
- make_event(
- transaction="wait",
- contexts={
- "trace": {
- "parent_span_id": "bce14471e0e9654d",
- "op": "foobar",
- "trace_id": "a0fa8803753e40fd8124b21eeb2986b5",
- "span_id": "bf5be759039ede9a",
- }
- },
- spans=[],
- timestamp=iso_format(before_now(minutes=5)),
- start_timestamp=iso_format(before_now(minutes=5)),
- type="transaction",
- platform="python",
- )
- )
- manager.normalize()
- mock_track_outcome = mock.Mock()
- with mock.patch("sentry.event_manager.track_outcome", mock_track_outcome):
- with self.feature("organizations:transaction-metrics-extraction"):
- manager.save(self.project.id)
- assert_mock_called_once_with_partial(
- mock_track_outcome, outcome=Outcome.ACCEPTED, category=DataCategory.TRANSACTION_INDEXED
- )
- def test_checksum_rehashed(self) -> None:
- checksum = "invalid checksum hash"
- manager = EventManager(make_event(**{"checksum": checksum}))
- manager.normalize()
- event = manager.save(self.project.id)
- hashes = [gh.hash for gh in GroupHash.objects.filter(group=event.group)]
- assert sorted(hashes) == sorted([hash_from_values(checksum), checksum])
- def test_legacy_attributes_moved(self) -> None:
- event_params = make_event(
- release="my-release",
- environment="my-environment",
- site="whatever",
- server_name="foo.com",
- event_id=uuid.uuid1().hex,
- )
- manager = EventManager(event_params)
- event = manager.save(self.project.id)
- # release and environment stay toplevel
- assert event.data["release"] == "my-release"
- assert event.data["environment"] == "my-environment"
- # site is a legacy attribute that is just a tag
- assert event.data.get("site") is None
- tags = dict(event.tags)
- assert tags["site"] == "whatever"
- assert event.data.get("server_name") is None
- tags = dict(event.tags)
- assert tags["server_name"] == "foo.com"
- @freeze_time()
- def test_save_issueless_event(self) -> None:
- manager = EventManager(
- make_event(
- transaction="wait",
- contexts={
- "trace": {
- "parent_span_id": "bce14471e0e9654d",
- "op": "foobar",
- "trace_id": "a0fa8803753e40fd8124b21eeb2986b5",
- "span_id": "bf5be759039ede9a",
- }
- },
- spans=[],
- timestamp=iso_format(before_now(minutes=5)),
- start_timestamp=iso_format(before_now(minutes=5)),
- type="transaction",
- platform="python",
- )
- )
- event = manager.save(self.project.id)
- assert event.group is None
- assert (
- tsdb.backend.get_sums(
- TSDBModel.project,
- [self.project.id],
- event.datetime,
- event.datetime,
- tenant_ids={"organization_id": 123, "referrer": "r"},
- )[self.project.id]
- == 0
- )
- def test_category_match_in_app(self) -> None:
- """
- Regression test to ensure that grouping in-app enhancements work in
- principle.
- """
- from sentry.grouping.enhancer import Enhancements
- enhancement = Enhancements.from_config_string(
- """
- function:foo category=bar
- function:foo2 category=bar
- category:bar -app
- """,
- )
- event_params = make_event(
- platform="native",
- exception={
- "values": [
- {
- "type": "Hello",
- "stacktrace": {
- "frames": [
- {
- "function": "foo",
- "in_app": True,
- },
- {"function": "bar"},
- ]
- },
- }
- ]
- },
- )
- manager = EventManager(event_params)
- manager.normalize()
- manager.get_data()["grouping_config"] = {
- "enhancements": enhancement.dumps(),
- "id": "mobile:2021-02-12",
- }
- event1 = manager.save(self.project.id)
- assert event1.data["exception"]["values"][0]["stacktrace"]["frames"][0]["in_app"] is False
- event_params = make_event(
- platform="native",
- exception={
- "values": [
- {
- "type": "Hello",
- "stacktrace": {
- "frames": [
- {
- "function": "foo2",
- "in_app": True,
- },
- {"function": "bar"},
- ]
- },
- }
- ]
- },
- )
- manager = EventManager(event_params)
- manager.normalize()
- manager.get_data()["grouping_config"] = {
- "enhancements": enhancement.dumps(),
- "id": "mobile:2021-02-12",
- }
- event2 = manager.save(self.project.id)
- assert event2.data["exception"]["values"][0]["stacktrace"]["frames"][0]["in_app"] is False
- assert event1.group_id == event2.group_id
- def test_category_match_group(self) -> None:
- """
- Regression test to ensure categories are applied consistently and don't
- produce hash mismatches.
- """
- from sentry.grouping.enhancer import Enhancements
- enhancement = Enhancements.from_config_string(
- """
- function:foo category=foo_like
- category:foo_like -group
- """,
- )
- event_params = make_event(
- platform="native",
- exception={
- "values": [
- {
- "type": "Hello",
- "stacktrace": {
- "frames": [
- {
- "function": "foo",
- },
- {
- "function": "bar",
- },
- ]
- },
- }
- ]
- },
- )
- manager = EventManager(event_params)
- manager.normalize()
- grouping_config: GroupingConfig = {
- "enhancements": enhancement.dumps(),
- "id": "mobile:2021-02-12",
- }
- manager.get_data()["grouping_config"] = grouping_config
- event1 = manager.save(self.project.id)
- event2 = Event(event1.project_id, event1.event_id, data=event1.data)
- assert (
- event1.get_hashes().hashes
- == event2.get_hashes(load_grouping_config(grouping_config)).hashes
- )
- def test_write_none_tree_labels(self) -> None:
- """Write tree labels even if None"""
- event_params = make_event(
- platform="native",
- exception={
- "values": [
- {
- "type": "Hello",
- "stacktrace": {
- "frames": [
- {
- "function": "<redacted>",
- },
- {
- "function": "<redacted>",
- },
- ]
- },
- }
- ]
- },
- )
- manager = EventManager(event_params)
- manager.normalize()
- manager.get_data()["grouping_config"] = {
- "id": "mobile:2021-02-12",
- }
- event = manager.save(self.project.id)
- assert event.data["hierarchical_tree_labels"] == [None]
- def test_synthetic_exception_detection(self) -> None:
- manager = EventManager(
- make_event(
- message="foo",
- event_id="b" * 32,
- exception={
- "values": [
- {
- "type": "SIGABRT",
- "mechanism": {"handled": False},
- "stacktrace": {"frames": [{"function": "foo"}]},
- }
- ]
- },
- ),
- project=self.project,
- )
- manager.normalize()
- manager.get_data()["grouping_config"] = {
- "id": "mobile:2021-02-12",
- }
- event = manager.save(self.project.id)
- mechanism = event.interfaces["exception"].values[0].mechanism
- assert mechanism is not None
- assert mechanism.synthetic is True
- assert event.title == "foo"
- @override_options({"performance.issues.all.problem-detection": 1.0})
- @override_options({"performance.issues.n_plus_one_db.problem-creation": 1.0})
- def test_perf_issue_creation(self) -> None:
- with mock.patch("sentry_sdk.tracing.Span.containing_transaction"):
- event = self.create_performance_issue(
- event_data=make_event(**get_event("n-plus-one-in-django-index-view"))
- )
- data = event.data
- assert event.get_event_type() == "transaction"
- assert event.transaction == "/books/"
- assert data["span_grouping_config"]["id"] == "default:2022-10-27"
- span_hashes = [span["hash"] for span in data["spans"]]
- assert span_hashes == [
- "0f43fb6f6e01ca52",
- "3dc5dd68b38e1730",
- "424c6ae1641f0f0e",
- "d5da18d7274b34a1",
- "ac72fc0a4f5fe381",
- "ac1468d8e11a0553",
- "d8681423cab4275f",
- "e853d2eb7fb9ebb0",
- "6a992d5529f459a4",
- "b640a0ce465fa2a4",
- "a3605e201eaf6c45",
- "061710eb39a66089",
- "c031296784b22ea9",
- "d74ed7012596c3fb",
- "d74ed7012596c3fb",
- "d74ed7012596c3fb",
- "d74ed7012596c3fb",
- "d74ed7012596c3fb",
- "d74ed7012596c3fb",
- "d74ed7012596c3fb",
- "d74ed7012596c3fb",
- "d74ed7012596c3fb",
- "d74ed7012596c3fb",
- ]
- assert event.group
- group = event.group
- assert group is not None
- assert group.title == "N+1 Query"
- assert (
- group.message
- == "/books/ N+1 Query SELECT `books_author`.`id`, `books_author`.`name` FROM `books_author` WHERE `books_author`.`id` = %s LIMIT 21"
- )
- assert group.culprit == "/books/"
- assert group.get_event_type() == "transaction"
- description = "SELECT `books_author`.`id`, `books_author`.`name` FROM `books_author` WHERE `books_author`.`id` = %s LIMIT 21"
- assert group.get_event_metadata() == {
- "location": "/books/",
- "title": "N+1 Query",
- "value": description,
- "initial_priority": PriorityLevel.LOW,
- }
- assert (
- event.search_message
- == "/books/ N+1 Query SELECT `books_author`.`id`, `books_author`.`name` FROM `books_author` WHERE `books_author`.`id` = %s LIMIT 21"
- )
- assert group.location() == "/books/"
- assert group.level == 40
- assert group.issue_category == GroupCategory.PERFORMANCE
- assert group.issue_type == PerformanceNPlusOneGroupType
- assert event.occurrence
- assert event.occurrence.evidence_display == [
- IssueEvidence(
- name="Offending Spans",
- value="db - SELECT `books_author`.`id`, `books_author`.`name` "
- "FROM `books_author` WHERE `books_author`.`id` = %s LIMIT 21",
- important=True,
- )
- ]
- assert event.occurrence.evidence_data == {
- "transaction_name": "/books/",
- "op": "db",
- "parent_span_ids": ["8dd7a5869a4f4583"],
- "parent_span": "django.view - index",
- "cause_span_ids": ["9179e43ae844b174"],
- "offender_span_ids": [
- "b8be6138369491dd",
- "b2d4826e7b618f1b",
- "b3fdeea42536dbf1",
- "b409e78a092e642f",
- "86d2ede57bbf48d4",
- "8e554c84cdc9731e",
- "94d6230f3f910e12",
- "a210b87a2191ceb6",
- "88a5ccaf25b9bd8f",
- "bb32cf50fc56b296",
- ],
- "repeating_spans": "db - SELECT `books_author`.`id`, `books_author`.`name` FROM `books_author` WHERE `books_author`.`id` = %s LIMIT 21",
- "repeating_spans_compact": "SELECT `books_author`.`id`, `books_author`.`name` FROM `books_author` WHERE `books_author`.`id` = %s LIMIT 21",
- "num_repeating_spans": "10",
- }
- @override_options({"performance.issues.all.problem-detection": 1.0})
- @override_options({"performance.issues.n_plus_one_db.problem-creation": 1.0})
- def test_perf_issue_update(self) -> None:
- with mock.patch("sentry_sdk.tracing.Span.containing_transaction"):
- event = self.create_performance_issue(
- event_data=make_event(**get_event("n-plus-one-in-django-index-view"))
- )
- group = event.group
- assert group is not None
- assert group.issue_category == GroupCategory.PERFORMANCE
- assert group.issue_type == PerformanceNPlusOneGroupType
- group.data["metadata"] = {
- "location": "hi",
- "title": "lol",
- }
- group.culprit = "wat"
- group.message = "nope"
- group.save()
- assert group.location() == "hi"
- assert group.title == "lol"
- with self.tasks():
- self.create_performance_issue(
- event_data=make_event(**get_event("n-plus-one-in-django-index-view"))
- )
- # Make sure the original group is updated via buffers
- group.refresh_from_db()
- assert group.title == "N+1 Query"
- assert group.get_event_metadata() == {
- "location": "/books/",
- "title": "N+1 Query",
- "value": "SELECT `books_author`.`id`, `books_author`.`name` FROM `books_author` WHERE `books_author`.`id` = %s LIMIT 21",
- "initial_priority": PriorityLevel.LOW,
- }
- assert group.location() == "/books/"
- assert group.message == "nope"
- assert group.culprit == "/books/"
- @override_options({"performance.issues.all.problem-detection": 1.0})
- @override_options({"performance.issues.n_plus_one_db.problem-creation": 1.0})
- def test_error_issue_no_associate_perf_event(self) -> None:
- """Test that you can't associate a performance event with an error issue"""
- with mock.patch("sentry_sdk.tracing.Span.containing_transaction"):
- event = self.create_performance_issue(
- event_data=make_event(**get_event("n-plus-one-in-django-index-view"))
- )
- assert event.group is not None
- # sneakily make the group type wrong
- group = event.group
- assert group is not None
- group.type = ErrorGroupType.type_id
- group.save()
- event = self.create_performance_issue(
- event_data=make_event(**get_event("n-plus-one-in-django-index-view"))
- )
- assert event.group is None
- @override_options({"performance.issues.all.problem-detection": 1.0})
- @override_options({"performance.issues.n_plus_one_db.problem-creation": 1.0})
- def test_perf_issue_no_associate_error_event(self) -> None:
- """Test that you can't associate an error event with a performance issue"""
- with mock.patch("sentry_sdk.tracing.Span.containing_transaction"):
- manager = EventManager(make_event())
- manager.normalize()
- event = manager.save(self.project.id)
- assert len(event.groups) == 1
- # sneakily make the group type wrong
- group = event.group
- assert group is not None
- group.type = PerformanceNPlusOneGroupType.type_id
- group.save()
- manager = EventManager(make_event())
- manager.normalize()
- event = manager.save(self.project.id)
- assert not event.group
- @override_options({"performance.issues.all.problem-detection": 1.0})
- @override_options({"performance.issues.n_plus_one_db.problem-creation": 1.0})
- def test_perf_issue_creation_ignored(self) -> None:
- with mock.patch("sentry_sdk.tracing.Span.containing_transaction"):
- event = self.create_performance_issue(
- event_data=make_event(**get_event("n-plus-one-in-django-index-view")),
- noise_limit=2,
- )
- assert event.get_event_type() == "transaction"
- assert event.group is None
- @override_options({"performance.issues.all.problem-detection": 1.0})
- @override_options({"performance.issues.n_plus_one_db.problem-creation": 1.0})
- def test_perf_issue_creation_over_ignored_threshold(self) -> None:
- with mock.patch("sentry_sdk.tracing.Span.containing_transaction"):
- event_1 = self.create_performance_issue(
- event_data=make_event(**get_event("n-plus-one-in-django-index-view")), noise_limit=3
- )
- event_2 = self.create_performance_issue(
- event_data=make_event(**get_event("n-plus-one-in-django-index-view")), noise_limit=3
- )
- event_3 = self.create_performance_issue(
- event_data=make_event(**get_event("n-plus-one-in-django-index-view")), noise_limit=3
- )
- assert event_1.get_event_type() == "transaction"
- assert event_2.get_event_type() == "transaction"
- assert event_3.get_event_type() == "transaction"
- # only the third occurrence of the hash should create the group
- assert event_1.group is None
- assert event_2.group is None
- assert event_3.group is not None
- @override_options(
- {
- "performance.issues.slow_db_query.problem-creation": 1.0,
- "performance_issue_creation_rate": 1.0,
- "performance.issues.all.problem-detection": 1.0,
- }
- )
- def test_perf_issue_slow_db_issue_is_created(self) -> None:
- def attempt_to_generate_slow_db_issue() -> Event:
- return self.create_performance_issue(
- event_data=make_event(**get_event("slow-db-spans")),
- issue_type=PerformanceSlowDBQueryGroupType,
- )
- last_event = attempt_to_generate_slow_db_issue()
- assert last_event.group
- assert last_event.group.type == PerformanceSlowDBQueryGroupType.type_id
- @patch("sentry.event_manager.metrics.incr")
- def test_new_group_metrics_logging(self, mock_metrics_incr: MagicMock) -> None:
- manager = EventManager(
- make_event(platform="javascript", sdk={"name": "sentry.javascript.nextjs"})
- )
- manager.normalize()
- manager.save(self.project.id)
- mock_metrics_incr.assert_any_call(
- "group.created",
- skip_internal=True,
- tags={
- "platform": "javascript",
- "sdk": "sentry.javascript.nextjs",
- },
- )
- @patch("sentry.event_manager.metrics.incr")
- def test_new_group_metrics_logging_no_platform_no_sdk(
- self, mock_metrics_incr: MagicMock
- ) -> None:
- manager = EventManager(make_event(platform=None, sdk=None))
- manager.normalize()
- manager.save(self.project.id)
- mock_metrics_incr.assert_any_call(
- "group.created",
- skip_internal=True,
- tags={
- "platform": "other",
- "sdk": "other",
- },
- )
- @patch("sentry.event_manager.metrics.incr")
- def test_new_group_metrics_logging_sdk_exist_but_null(
- self, mock_metrics_incr: MagicMock
- ) -> None:
- manager = EventManager(make_event(platform=None, sdk={"name": None}))
- manager.normalize()
- manager.save(self.project.id)
- mock_metrics_incr.assert_any_call(
- "group.created",
- skip_internal=True,
- tags={
- "platform": "other",
- "sdk": "other",
- },
- )
- def test_new_group_metrics_logging_with_frame_mix(self) -> None:
- with patch("sentry.event_manager.metrics.incr") as mock_metrics_incr:
- manager = EventManager(
- make_event(platform="javascript", sdk={"name": "sentry.javascript.nextjs"})
- )
- manager.normalize()
- # IRL, `normalize_stacktraces_for_grouping` adds frame mix metadata to the event, but we
- # can't mock that because it's imported inside its calling function to avoid circular imports
- manager._data["metadata"] = {"in_app_frame_mix": "in-app-only"}
- manager.save(self.project.id)
- mock_metrics_incr.assert_any_call(
- "grouping.in_app_frame_mix",
- sample_rate=1.0,
- tags={
- "platform": "javascript",
- "frame_mix": "in-app-only",
- "sdk": "sentry.javascript.nextjs",
- },
- )
- def test_new_group_metrics_logging_without_frame_mix(self) -> None:
- with patch("sentry.event_manager.metrics.incr") as mock_metrics_incr:
- manager = EventManager(make_event(platform="javascript"))
- event = manager.save(self.project.id)
- assert event.get_event_metadata().get("in_app_frame_mix") is None
- metrics_logged = [call.args[0] for call in mock_metrics_incr.mock_calls]
- assert "grouping.in_app_frame_mix" not in metrics_logged
- class AutoAssociateCommitTest(TestCase, EventManagerTestMixin):
- def setUp(self) -> None:
- super().setUp()
- self.repo_name = "example"
- self.project = self.create_project(name="foo")
- with assume_test_silo_mode_of(Integration):
- self.org_integration = self.integration.add_organization(
- self.project.organization, self.user
- )
- self.repo = self.create_repo(
- project=self.project,
- name=self.repo_name,
- provider="integrations:github",
- integration_id=self.integration.id,
- )
- self.repo.update(config={"name": self.repo_name})
- self.create_code_mapping(
- project=self.project,
- repo=self.repo,
- organization_integration=self.org_integration,
- stack_root="/stack/root",
- source_root="/source/root",
- default_branch="main",
- )
- responses.add(
- "GET",
- f"https://api.github.com/repos/{self.repo_name}/commits/{LATER_COMMIT_SHA}",
- json=json.loads(GET_COMMIT_EXAMPLE),
- )
- responses.add(
- "GET",
- f"https://api.github.com/repos/{self.repo_name}/commits/{EARLIER_COMMIT_SHA}",
- json=json.loads(GET_PRIOR_COMMIT_EXAMPLE),
- )
- self.dummy_commit_sha = "a" * 40
- responses.add(
- responses.GET,
- f"https://api.github.com/repos/{self.repo_name}/compare/{self.dummy_commit_sha}...{LATER_COMMIT_SHA}",
- json=json.loads(COMPARE_COMMITS_EXAMPLE_WITH_INTERMEDIATE),
- )
- responses.add(
- responses.GET,
- f"https://api.github.com/repos/{self.repo_name}/commits?sha={LATER_COMMIT_SHA}",
- json=json.loads(GET_LAST_2_COMMITS_EXAMPLE),
- )
- def _create_first_release_commit(self) -> None:
- # Create a release
- release = self.create_release(project=self.project, version="abcabcabc")
- # Create a commit
- commit = self.create_commit(
- repo=self.repo,
- key=self.dummy_commit_sha,
- )
- # Make a release head commit
- ReleaseHeadCommit.objects.create(
- organization_id=self.project.organization.id,
- repository_id=self.repo.id,
- release=release,
- commit=commit,
- )
- class ReleaseIssueTest(TestCase):
- def setUp(self) -> None:
- self.project = self.create_project()
- self.release = Release.get_or_create(self.project, "1.0")
- self.environment1 = Environment.get_or_create(self.project, "prod")
- self.environment2 = Environment.get_or_create(self.project, "staging")
- self.timestamp = float(int(time() - 300))
- def make_event(self, **kwargs: Any) -> dict[str, Any]:
- result = {
- "event_id": "a" * 32,
- "message": "foo",
- "timestamp": self.timestamp + 0.23,
- "level": logging.ERROR,
- "logger": "default",
- "tags": [],
- }
- result.update(kwargs)
- return result
- def make_release_event(
- self,
- release_version: str = "1.0",
- environment_name: str | None = "prod",
- project_id: int = 1,
- **kwargs: Any,
- ) -> Event:
- event_params = make_event(
- release=release_version, environment=environment_name, event_id=uuid.uuid1().hex
- )
- event_params.update(kwargs)
- manager = EventManager(event_params)
- with self.tasks():
- event = manager.save(project_id)
- return event
- def convert_timestamp(self, timestamp: float) -> datetime:
- return datetime.fromtimestamp(timestamp, tz=UTC)
- def assert_release_project_environment(
- self, event: Event, new_issues_count: int, first_seen: float, last_seen: float
- ) -> None:
- release = Release.objects.get(
- organization=event.project.organization.id, version=event.get_tag("sentry:release")
- )
- release_project_envs = ReleaseProjectEnvironment.objects.filter(
- release=release, project=event.project, environment=event.get_environment()
- )
- assert len(release_project_envs) == 1
- release_project_env = release_project_envs[0]
- assert release_project_env.new_issues_count == new_issues_count
- assert release_project_env.first_seen == self.convert_timestamp(first_seen)
- assert release_project_env.last_seen == self.convert_timestamp(last_seen)
- def test_different_groups(self) -> None:
- event1 = self.make_release_event(
- release_version=self.release.version,
- environment_name=self.environment1.name,
- project_id=self.project.id,
- checksum="a" * 32,
- timestamp=self.timestamp,
- )
- self.assert_release_project_environment(
- event=event1, new_issues_count=1, last_seen=self.timestamp, first_seen=self.timestamp
- )
- event2 = self.make_release_event(
- release_version=self.release.version,
- environment_name=self.environment1.name,
- project_id=self.project.id,
- checksum="b" * 32,
- timestamp=self.timestamp + 100,
- )
- self.assert_release_project_environment(
- event=event2,
- new_issues_count=2,
- last_seen=self.timestamp + 100,
- first_seen=self.timestamp,
- )
- def test_same_group(self) -> None:
- event1 = self.make_release_event(
- release_version=self.release.version,
- environment_name=self.environment1.name,
- project_id=self.project.id,
- checksum="a" * 32,
- timestamp=self.timestamp,
- )
- self.assert_release_project_environment(
- event=event1, new_issues_count=1, last_seen=self.timestamp, first_seen=self.timestamp
- )
- event2 = self.make_release_event(
- release_version=self.release.version,
- environment_name=self.environment1.name,
- project_id=self.project.id,
- checksum="a" * 32,
- timestamp=self.timestamp + 100,
- )
- self.assert_release_project_environment(
- event=event2,
- new_issues_count=1,
- last_seen=self.timestamp + 100,
- first_seen=self.timestamp,
- )
- def test_same_group_different_environment(self) -> None:
- event1 = self.make_release_event(
- release_version=self.release.version,
- environment_name=self.environment1.name,
- project_id=self.project.id,
- checksum="a" * 32,
- timestamp=self.timestamp,
- )
- self.assert_release_project_environment(
- event=event1, new_issues_count=1, last_seen=self.timestamp, first_seen=self.timestamp
- )
- event2 = self.make_release_event(
- release_version=self.release.version,
- environment_name=self.environment2.name,
- project_id=self.project.id,
- checksum="a" * 32,
- timestamp=self.timestamp + 100,
- )
- self.assert_release_project_environment(
- event=event1, new_issues_count=1, last_seen=self.timestamp, first_seen=self.timestamp
- )
- self.assert_release_project_environment(
- event=event2,
- new_issues_count=1,
- last_seen=self.timestamp + 100,
- first_seen=self.timestamp + 100,
- )
- @apply_feature_flag_on_cls("organizations:dynamic-sampling")
- class DSLatestReleaseBoostTest(TestCase):
- def setUp(self) -> None:
- self.environment1 = Environment.get_or_create(self.project, "prod")
- self.environment2 = Environment.get_or_create(self.project, "staging")
- self.timestamp = float(int(time() - 300))
- self.redis_client = get_redis_client_for_ds()
- def make_transaction_event(self, **kwargs: Any) -> dict[str, Any]:
- result = {
- "transaction": "wait",
- "contexts": {
- "trace": {
- "parent_span_id": "bce14471e0e9654d",
- "op": "foobar",
- "trace_id": "a0fa8803753e40fd8124b21eeb2986b5",
- "span_id": "bf5be759039ede9a",
- }
- },
- "spans": [],
- "timestamp": self.timestamp + 0.23,
- "start_timestamp": "2019-06-14T14:01:40Z",
- "type": "transaction",
- }
- result.update(kwargs)
- return result
- def make_release_transaction(
- self,
- release_version: str = "1.0",
- environment_name: str | None = "prod",
- project_id: int = 1,
- **kwargs: Any,
- ) -> Event:
- transaction = (
- self.make_transaction_event(
- release=release_version, environment=environment_name, event_id=uuid.uuid1().hex
- )
- if environment_name is not None
- else self.make_transaction_event(release=release_version, event_id=uuid.uuid1().hex)
- )
- transaction.update(kwargs)
- manager = EventManager(transaction)
- with self.tasks():
- event = manager.save(project_id)
- return event
- @freeze_time("2022-11-03 10:00:00")
- def test_boost_release_with_non_observed_release(self) -> None:
- ts = timezone.now().timestamp()
- project = self.create_project(platform="python")
- release_1 = Release.get_or_create(project=project, version="1.0", date_added=timezone.now())
- release_2 = Release.get_or_create(
- project=project, version="2.0", date_added=timezone.now() + timedelta(hours=1)
- )
- release_3 = Release.get_or_create(
- project=project, version="3.0", date_added=timezone.now() + timedelta(hours=2)
- )
- for release, environment in (
- (release_1, None),
- (release_2, "prod"),
- (release_3, "dev"),
- ):
- self.make_release_transaction(
- release_version=release.version,
- environment_name=environment,
- project_id=project.id,
- checksum="a" * 32,
- timestamp=self.timestamp,
- )
- env_postfix = f":e:{environment}" if environment is not None else ""
- assert self.redis_client.get(f"ds::p:{project.id}:r:{release.id}{env_postfix}") == "1"
- assert self.redis_client.hgetall(f"ds::p:{project.id}:boosted_releases") == {
- f"ds::r:{release_1.id}": str(ts),
- f"ds::r:{release_2.id}:e:prod": str(ts),
- f"ds::r:{release_3.id}:e:dev": str(ts),
- }
- assert ProjectBoostedReleases(project_id=project.id).get_extended_boosted_releases() == [
- ExtendedBoostedRelease(
- id=release_1.id,
- timestamp=ts,
- environment=None,
- cache_key=f"ds::r:{release_1.id}",
- version=release_1.version,
- platform=Platform(project.platform),
- ),
- ExtendedBoostedRelease(
- id=release_2.id,
- timestamp=ts,
- environment="prod",
- cache_key=f"ds::r:{release_2.id}:e:prod",
- version=release_2.version,
- platform=Platform(project.platform),
- ),
- ExtendedBoostedRelease(
- id=release_3.id,
- timestamp=ts,
- environment="dev",
- cache_key=f"ds::r:{release_3.id}:e:dev",
- version=release_3.version,
- platform=Platform(project.platform),
- ),
- ]
- @freeze_time("2022-11-03 10:00:00")
- def test_boost_release_boosts_only_latest_release(self) -> None:
- ts = timezone.now().timestamp()
- project = self.create_project(platform="python")
- release_1 = Release.get_or_create(project=project, version="1.0", date_added=timezone.now())
- release_2 = Release.get_or_create(
- project=project,
- version="2.0",
- # We must make sure the new release_2.date_added > release_1.date_added.
- date_added=timezone.now() + timedelta(hours=1),
- )
- # We add a transaction for latest release release_2.
- self.make_release_transaction(
- release_version=release_2.version,
- environment_name=self.environment1.name,
- project_id=project.id,
- checksum="a" * 32,
- timestamp=self.timestamp,
- )
- # We add a transaction for release_1 which is not anymore the latest release, therefore we should skip this.
- self.make_release_transaction(
- release_version=release_1.version,
- environment_name=self.environment1.name,
- project_id=project.id,
- checksum="a" * 32,
- timestamp=self.timestamp,
- )
- assert (
- self.redis_client.get(f"ds::p:{project.id}:r:{release_2.id}:e:{self.environment1.name}")
- == "1"
- )
- assert self.redis_client.hgetall(f"ds::p:{project.id}:boosted_releases") == {
- f"ds::r:{release_2.id}:e:{self.environment1.name}": str(ts),
- }
- assert ProjectBoostedReleases(project_id=project.id).get_extended_boosted_releases() == [
- ExtendedBoostedRelease(
- id=release_2.id,
- timestamp=ts,
- environment=self.environment1.name,
- cache_key=f"ds::r:{release_2.id}:e:{self.environment1.name}",
- version=release_2.version,
- platform=Platform(project.platform),
- )
- ]
- @freeze_time("2022-11-03 10:00:00")
- def test_boost_release_with_observed_release_and_different_environment(self) -> None:
- project = self.create_project(platform="python")
- release = Release.get_or_create(project=project, version="1.0", date_added=timezone.now())
- self.make_release_transaction(
- release_version=release.version,
- environment_name=self.environment1.name,
- project_id=project.id,
- checksum="a" * 32,
- timestamp=self.timestamp,
- )
- ts_1 = time()
- assert (
- self.redis_client.get(f"ds::p:{project.id}:r:{release.id}:e:{self.environment1.name}")
- == "1"
- )
- assert self.redis_client.hgetall(f"ds::p:{project.id}:boosted_releases") == {
- f"ds::r:{release.id}:e:{self.environment1.name}": str(ts_1)
- }
- assert ProjectBoostedReleases(project_id=project.id).get_extended_boosted_releases() == [
- ExtendedBoostedRelease(
- id=release.id,
- timestamp=ts_1,
- environment=self.environment1.name,
- cache_key=f"ds::r:{release.id}:e:{self.environment1.name}",
- version=release.version,
- platform=Platform(project.platform),
- )
- ]
- # We simulate that a new transaction with same release but with a different environment value comes after
- # 30 minutes to show that we expect the entry for that release-env to be added to the boosted releases.
- with freeze_time("2022-11-03 10:30:00"):
- self.make_release_transaction(
- release_version=release.version,
- environment_name=self.environment2.name,
- project_id=project.id,
- checksum="b" * 32,
- timestamp=self.timestamp,
- )
- ts_2 = time()
- assert (
- self.redis_client.get(
- f"ds::p:{project.id}:r:{release.id}:e:{self.environment2.name}"
- )
- == "1"
- )
- assert self.redis_client.hgetall(f"ds::p:{project.id}:boosted_releases") == {
- f"ds::r:{release.id}:e:{self.environment1.name}": str(ts_1),
- f"ds::r:{release.id}:e:{self.environment2.name}": str(ts_2),
- }
- assert ProjectBoostedReleases(
- project_id=project.id
- ).get_extended_boosted_releases() == [
- ExtendedBoostedRelease(
- id=release.id,
- timestamp=ts_1,
- environment=self.environment1.name,
- cache_key=f"ds::r:{release.id}:e:{self.environment1.name}",
- version=release.version,
- platform=Platform(project.platform),
- ),
- ExtendedBoostedRelease(
- id=release.id,
- timestamp=ts_2,
- environment=self.environment2.name,
- cache_key=f"ds::r:{release.id}:e:{self.environment2.name}",
- version=release.version,
- platform=Platform(project.platform),
- ),
- ]
- # We also test the case in which no environment is set, which can be the case as per
- # https://docs.sentry.io/platforms/javascript/configuration/options/#environment.
- with freeze_time("2022-11-03 11:00:00"):
- self.make_release_transaction(
- release_version=release.version,
- environment_name=None,
- project_id=project.id,
- checksum="b" * 32,
- timestamp=self.timestamp,
- )
- ts_3 = time()
- assert self.redis_client.get(f"ds::p:{project.id}:r:{release.id}") == "1"
- assert self.redis_client.hgetall(f"ds::p:{project.id}:boosted_releases") == {
- f"ds::r:{release.id}:e:{self.environment1.name}": str(ts_1),
- f"ds::r:{release.id}:e:{self.environment2.name}": str(ts_2),
- f"ds::r:{release.id}": str(ts_3),
- }
- assert ProjectBoostedReleases(
- project_id=project.id
- ).get_extended_boosted_releases() == [
- ExtendedBoostedRelease(
- id=release.id,
- timestamp=ts_1,
- environment=self.environment1.name,
- cache_key=f"ds::r:{release.id}:e:{self.environment1.name}",
- version=release.version,
- platform=Platform(project.platform),
- ),
- ExtendedBoostedRelease(
- id=release.id,
- timestamp=ts_2,
- environment=self.environment2.name,
- cache_key=f"ds::r:{release.id}:e:{self.environment2.name}",
- version=release.version,
- platform=Platform(project.platform),
- ),
- ExtendedBoostedRelease(
- id=release.id,
- timestamp=ts_3,
- environment=None,
- cache_key=f"ds::r:{release.id}",
- version=release.version,
- platform=Platform(project.platform),
- ),
- ]
- @freeze_time("2022-11-03 10:00:00")
- def test_release_not_boosted_with_observed_release_and_same_environment(self) -> None:
- project = self.create_project(platform="python")
- release = Release.get_or_create(project=project, version="1.0", date_added=timezone.now())
- for environment in (self.environment1.name, self.environment2.name):
- self.redis_client.set(
- f"ds::p:{project.id}:r:{release.id}:e:{environment}", 1, 60 * 60 * 24
- )
- self.make_release_transaction(
- release_version=release.version,
- environment_name=environment,
- project_id=project.id,
- checksum="b" * 32,
- timestamp=self.timestamp,
- )
- assert self.redis_client.hgetall(f"ds::p:{project.id}:boosted_releases") == {}
- assert ProjectBoostedReleases(project_id=project.id).get_extended_boosted_releases() == []
- @freeze_time("2022-11-03 10:00:00")
- def test_release_not_boosted_with_deleted_release_after_event_received(self) -> None:
- ts = timezone.now().timestamp()
- project = self.create_project(platform="python")
- release_1 = Release.get_or_create(project=project, version="1.0", date_added=timezone.now())
- release_2 = Release.get_or_create(
- project=project, version="2.0", date_added=timezone.now() + timedelta(hours=1)
- )
- self.make_release_transaction(
- release_version=release_1.version,
- environment_name=None,
- project_id=project.id,
- checksum="a" * 32,
- timestamp=self.timestamp,
- )
- assert self.redis_client.get(f"ds::p:{project.id}:r:{release_1.id}") == "1"
- self.make_release_transaction(
- release_version=release_2.version,
- environment_name=None,
- project_id=project.id,
- checksum="a" * 32,
- timestamp=self.timestamp,
- )
- assert self.redis_client.get(f"ds::p:{project.id}:r:{release_2.id}") == "1"
- # We simulate that the release_2 is deleted after the boost has been inserted.
- release_2_id = release_2.id
- release_2.delete()
- # We expect the boosted release to be kept in Redis, if not queried by the ProjectBoostedReleases.
- assert self.redis_client.hgetall(f"ds::p:{project.id}:boosted_releases") == {
- f"ds::r:{release_1.id}": str(ts),
- f"ds::r:{release_2_id}": str(ts),
- }
- # We expect to not see the release 2 because it will not be in the database anymore, thus we mark it as
- # expired.
- assert ProjectBoostedReleases(project_id=project.id).get_extended_boosted_releases() == [
- ExtendedBoostedRelease(
- id=release_1.id,
- timestamp=ts,
- environment=None,
- cache_key=f"ds::r:{release_1.id}",
- version=release_1.version,
- platform=Platform(project.platform),
- ),
- ]
- @freeze_time("2022-11-03 10:00:00")
- def test_get_boosted_releases_with_old_and_new_cache_keys(self) -> None:
- ts = timezone.now().timestamp()
- project = self.create_project(platform="python")
- # Old cache key
- release_1 = Release.get_or_create(project=project, version="1.0", date_added=timezone.now())
- self.redis_client.hset(
- f"ds::p:{project.id}:boosted_releases",
- f"{release_1.id}",
- ts,
- )
- # New cache key
- release_2 = Release.get_or_create(
- project=project, version="2.0", date_added=timezone.now() + timedelta(hours=1)
- )
- self.redis_client.hset(
- f"ds::p:{project.id}:boosted_releases",
- f"ds::r:{release_2.id}",
- ts,
- )
- self.redis_client.hset(
- f"ds::p:{project.id}:boosted_releases",
- f"ds::r:{release_2.id}:e:{self.environment1.name}",
- ts,
- )
- self.redis_client.hset(
- f"ds::p:{project.id}:boosted_releases",
- f"ds::r:{release_2.id}:e:{self.environment2.name}",
- ts,
- )
- assert ProjectBoostedReleases(project_id=project.id).get_extended_boosted_releases() == [
- ExtendedBoostedRelease(
- id=release_1.id,
- timestamp=ts,
- environment=None,
- # This item has the old cache key.
- cache_key=f"{release_1.id}",
- version=release_1.version,
- platform=Platform(project.platform),
- ),
- ExtendedBoostedRelease(
- id=release_2.id,
- timestamp=ts,
- environment=None,
- cache_key=f"ds::r:{release_2.id}",
- version=release_2.version,
- platform=Platform(project.platform),
- ),
- ExtendedBoostedRelease(
- id=release_2.id,
- timestamp=ts,
- environment=self.environment1.name,
- cache_key=f"ds::r:{release_2.id}:e:{self.environment1.name}",
- version=release_2.version,
- platform=Platform(project.platform),
- ),
- ExtendedBoostedRelease(
- id=release_2.id,
- timestamp=ts,
- environment=self.environment2.name,
- cache_key=f"ds::r:{release_2.id}:e:{self.environment2.name}",
- version=release_2.version,
- platform=Platform(project.platform),
- ),
- ]
- @freeze_time("2022-11-03 10:00:00")
- def test_expired_boosted_releases_are_removed(self) -> None:
- ts = timezone.now().timestamp()
- # We want to test with multiple platforms.
- for platform in ("python", "java", None):
- project = self.create_project(platform=platform)
- for index, (release_version, environment) in enumerate(
- (
- (f"1.0-{platform}", self.environment1.name),
- (f"2.0-{platform}", self.environment2.name),
- )
- ):
- release = Release.get_or_create(
- project=project,
- version=release_version,
- date_added=timezone.now() + timedelta(hours=index),
- )
- self.redis_client.set(
- f"ds::p:{project.id}:r:{release.id}:e:{environment}", 1, 60 * 60 * 24
- )
- self.redis_client.hset(
- f"ds::p:{project.id}:boosted_releases",
- f"ds::r:{release.id}:e:{environment}",
- # We set the creation time in order to expire it by 1 second.
- ts - Platform(platform).time_to_adoption - 1,
- )
- # We add a new boosted release that is not expired.
- release_3 = Release.get_or_create(
- project=project,
- version=f"3.0-{platform}",
- date_added=timezone.now() + timedelta(hours=2),
- )
- self.make_release_transaction(
- release_version=release_3.version,
- environment_name=self.environment1.name,
- project_id=project.id,
- checksum="b" * 32,
- timestamp=self.timestamp,
- )
- assert (
- self.redis_client.get(
- f"ds::p:{project.id}:r:{release_3.id}:e:{self.environment1.name}"
- )
- == "1"
- )
- assert self.redis_client.hgetall(f"ds::p:{project.id}:boosted_releases") == {
- f"ds::r:{release_3.id}:e:{self.environment1.name}": str(ts)
- }
- assert ProjectBoostedReleases(
- project_id=project.id
- ).get_extended_boosted_releases() == [
- ExtendedBoostedRelease(
- id=release_3.id,
- timestamp=ts,
- environment=self.environment1.name,
- cache_key=f"ds::r:{release_3.id}:e:{self.environment1.name}",
- version=release_3.version,
- platform=Platform(project.platform),
- )
- ]
- @mock.patch("sentry.event_manager.schedule_invalidate_project_config")
- def test_project_config_invalidation_is_triggered_when_new_release_is_observed(
- self, mocked_invalidate: mock.MagicMock
- ) -> None:
- self.make_release_transaction(
- release_version=self.release.version,
- environment_name=self.environment1.name,
- project_id=self.project.id,
- checksum="a" * 32,
- timestamp=self.timestamp,
- )
- assert any(
- o.kwargs["trigger"] == "dynamic_sampling:boost_release"
- for o in mocked_invalidate.mock_calls
- )
- @freeze_time("2022-11-03 10:00:00")
- @mock.patch("sentry.dynamic_sampling.rules.helpers.latest_releases.BOOSTED_RELEASES_LIMIT", 2)
- def test_least_recently_boosted_release_is_removed_if_limit_is_exceeded(self) -> None:
- ts = timezone.now().timestamp()
- project = self.create_project(platform="python")
- release_1 = Release.get_or_create(
- project=project,
- version="1.0",
- date_added=timezone.now(),
- )
- release_2 = Release.get_or_create(
- project=project,
- version="2.0",
- date_added=timezone.now() + timedelta(hours=1),
- )
- # We boost with increasing timestamps, so that we know that the smallest will be evicted.
- for release, boost_time in ((release_1, ts - 2), (release_2, ts - 1)):
- self.redis_client.set(
- f"ds::p:{project.id}:r:{release.id}",
- 1,
- 60 * 60 * 24,
- )
- self.redis_client.hset(
- f"ds::p:{project.id}:boosted_releases",
- f"ds::r:{release.id}",
- boost_time,
- )
- release_3 = Release.get_or_create(
- project=project,
- version="3.0",
- date_added=timezone.now() + timedelta(hours=2),
- )
- self.make_release_transaction(
- release_version=release_3.version,
- environment_name=self.environment1.name,
- project_id=project.id,
- checksum="b" * 32,
- timestamp=self.timestamp,
- )
- assert (
- self.redis_client.get(f"ds::p:{project.id}:r:{release_3.id}:e:{self.environment1.name}")
- == "1"
- )
- assert self.redis_client.hgetall(f"ds::p:{project.id}:boosted_releases") == {
- f"ds::r:{release_2.id}": str(ts - 1),
- f"ds::r:{release_3.id}:e:{self.environment1.name}": str(ts),
- }
- assert ProjectBoostedReleases(project_id=project.id).get_extended_boosted_releases() == [
- ExtendedBoostedRelease(
- id=release_2.id,
- timestamp=ts - 1,
- environment=None,
- cache_key=f"ds::r:{release_2.id}",
- version=release_2.version,
- platform=Platform(project.platform),
- ),
- ExtendedBoostedRelease(
- id=release_3.id,
- timestamp=ts,
- environment=self.environment1.name,
- cache_key=f"ds::r:{release_3.id}:e:{self.environment1.name}",
- version=release_3.version,
- platform=Platform(project.platform),
- ),
- ]
- @freeze_time()
- @mock.patch("sentry.dynamic_sampling.rules.helpers.latest_releases.BOOSTED_RELEASES_LIMIT", 2)
- def test_removed_boost_not_added_again_if_limit_is_exceeded(self) -> None:
- ts = timezone.now().timestamp()
- project = self.create_project(platform="python")
- release_1 = Release.get_or_create(project=project, version="1.0", date_added=timezone.now())
- # We want to test that if we have the same release, but we send different environments that go over the
- # limit, and we evict an environment, but then we send a transaction with the evicted environment.
- #
- # As an example suppose the following history of transactions received in the form (release, env) -> None:
- # (1, production) -> (1, staging) -> (1, None) -> (1, production)
- #
- # Once we receive the first two, we have reached maximum capacity. Then we receive (1, None) and evict boost
- # for (1, production) which results in the following boosts (1, staging), (1, None). After that we receive
- # (1, production) again but in this case we don't want to remove (1, staging) because we will end up in an
- # infinite loop. Instead, we expect to mark (1, production) as observed and only un-observe it if it does
- # not receive transactions within the next 24 hours.
- environments_sequence = [
- self.environment1.name,
- self.environment2.name,
- None,
- self.environment1.name,
- ]
- for environment in environments_sequence:
- self.make_release_transaction(
- release_version=release_1.version,
- environment_name=environment,
- project_id=project.id,
- checksum="b" * 32,
- timestamp=self.timestamp,
- )
- # We assert that all environments have been observed.
- assert (
- self.redis_client.get(f"ds::p:{project.id}:r:{release_1.id}:e:{self.environment1.name}")
- == "1"
- )
- assert (
- self.redis_client.get(f"ds::p:{project.id}:r:{release_1.id}:e:{self.environment2.name}")
- == "1"
- )
- assert self.redis_client.get(f"ds::p:{project.id}:r:{release_1.id}") == "1"
- # We assert that only the last 2 unseen (release, env) pairs are boosted.
- assert self.redis_client.hgetall(f"ds::p:{project.id}:boosted_releases") == {
- f"ds::r:{release_1.id}:e:{self.environment2.name}": str(ts),
- f"ds::r:{release_1.id}": str(ts),
- }
- assert ProjectBoostedReleases(project_id=project.id).get_extended_boosted_releases() == [
- ExtendedBoostedRelease(
- id=release_1.id,
- timestamp=ts,
- environment=self.environment2.name,
- cache_key=f"ds::r:{release_1.id}:e:{self.environment2.name}",
- version=release_1.version,
- platform=Platform(project.platform),
- ),
- ExtendedBoostedRelease(
- id=release_1.id,
- timestamp=ts,
- environment=None,
- cache_key=f"ds::r:{release_1.id}",
- version=release_1.version,
- platform=Platform(project.platform),
- ),
- ]
- class TestSaveGroupHashAndGroup(TransactionTestCase):
- def test(self) -> None:
- perf_data = load_data("transaction-n-plus-one", timestamp=before_now(minutes=10))
- event = _get_event_instance(perf_data, project_id=self.project.id)
- group_hash = "some_group"
- group, created = save_grouphash_and_group(self.project, event, group_hash)
- assert created
- group_2, created = save_grouphash_and_group(self.project, event, group_hash)
- assert group.id == group_2.id
- assert not created
- assert Group.objects.filter(grouphash__hash=group_hash).count() == 1
- group_3, created = save_grouphash_and_group(self.project, event, "new_hash")
- assert created
- assert group_2.id != group_3.id
- assert Group.objects.filter(grouphash__hash=group_hash).count() == 1
- example_transaction_event = {
- "type": "transaction",
- "timestamp": datetime.now().isoformat(),
- "start_timestamp": (datetime.now() - timedelta(seconds=1)).isoformat(),
- "spans": [],
- "contexts": {
- "trace": {
- "parent_span_id": "8988cec7cc0779c1",
- "type": "trace",
- "op": "foobar",
- "trace_id": "a7d67cf796774551a95be6543cacd459",
- "span_id": "babaae0d4b7512d9",
- "status": "ok",
- }
- },
- }
- example_error_event = {
- "event_id": "80e3496eff734ab0ac993167aaa0d1cd",
- "release": "5.222.5",
- "type": "error",
- "level": "fatal",
- "platform": "cocoa",
- "tags": {"level": "fatal"},
- "environment": "test-app",
- "sdk": {
- "name": "sentry.cocoa",
- "version": "8.2.0",
- "integrations": [
- "Crash",
- "PerformanceTracking",
- "MetricKit",
- "WatchdogTerminationTracking",
- "ViewHierarchy",
- "NetworkTracking",
- "ANRTracking",
- "AutoBreadcrumbTracking",
- "FramesTracking",
- "AppStartTracking",
- "Screenshot",
- "FileIOTracking",
- "UIEventTracking",
- "AutoSessionTracking",
- "CoreDataTracking",
- "PreWarmedAppStartTracing",
- ],
- },
- "user": {
- "id": "803F5C87-0F8B-41C7-8499-27BD71A92738",
- "ip_address": "192.168.0.1",
- "geo": {"country_code": "US", "region": "United States"},
- },
- "logger": "my.logger.name",
- }
- @pytest.mark.parametrize(
- "event_data,expected_type",
- [
- pytest.param(
- example_transaction_event,
- "transactions",
- id="transactions",
- ),
- pytest.param(
- example_error_event,
- "errors",
- id="errors",
- ),
- ],
- )
- @django_db_all
- def test_cogs_event_manager(
- default_project: int, event_data: Mapping[str, Any], expected_type: str
- ) -> None:
- storage: MemoryMessageStorage[KafkaPayload] = MemoryMessageStorage()
- broker = LocalBroker(storage)
- topic = Topic("shared-resources-usage")
- broker.create_topic(topic, 1)
- producer = broker.get_producer()
- set("shared_resources_accounting_enabled", [settings.COGS_EVENT_STORE_LABEL])
- accountant.init_backend(producer)
- raw_event_params = make_event(**event_data)
- manager = EventManager(raw_event_params)
- manager.normalize()
- normalized_data = dict(manager.get_data())
- _ = manager.save(default_project)
- expected_len = len(json.dumps(normalized_data))
- accountant._shutdown()
- accountant.reset_backend()
- msg1 = broker.consume(Partition(topic, 0), 0)
- assert msg1 is not None
- payload = msg1.payload
- assert payload is not None
- formatted = json.loads(payload.value.decode("utf-8"))
- assert formatted["shared_resource_id"] == settings.COGS_EVENT_STORE_LABEL
- assert formatted["app_feature"] == expected_type
- assert formatted["usage_unit"] == "bytes"
- # We cannot assert for exact length because manager save method adds some extra fields. So we
- # assert that the length is at least greater than the expected length.
- assert formatted["amount"] >= expected_len
|