12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852285328542855285628572858285928602861286228632864286528662867286828692870287128722873287428752876287728782879288028812882288328842885288628872888288928902891289228932894289528962897289828992900290129022903290429052906290729082909291029112912291329142915291629172918291929202921292229232924292529262927292829292930293129322933293429352936293729382939294029412942294329442945294629472948294929502951295229532954295529562957295829592960296129622963296429652966296729682969297029712972297329742975297629772978297929802981298229832984298529862987298829892990299129922993299429952996299729982999300030013002300330043005300630073008300930103011301230133014301530163017301830193020302130223023302430253026302730283029303030313032303330343035303630373038303930403041304230433044304530463047304830493050305130523053305430553056305730583059306030613062306330643065306630673068306930703071307230733074307530763077307830793080308130823083308430853086308730883089309030913092309330943095309630973098309931003101310231033104310531063107310831093110311131123113311431153116311731183119312031213122312331243125312631273128312931303131313231333134313531363137313831393140314131423143314431453146314731483149315031513152315331543155315631573158315931603161316231633164316531663167316831693170317131723173317431753176317731783179318031813182318331843185318631873188318931903191319231933194319531963197319831993200320132023203320432053206320732083209321032113212321332143215321632173218321932203221322232233224322532263227322832293230323132323233323432353236323732383239324032413242324332443245324632473248324932503251325232533254325532563257325832593260326132623263326432653266326732683269327032713272327332743275327632773278327932803281328232833284328532863287328832893290329132923293329432953296329732983299330033013302330333043305330633073308330933103311331233133314331533163317331833193320332133223323332433253326332733283329333033313332333333343335333633373338333933403341334233433344334533463347334833493350335133523353335433553356335733583359336033613362336333643365336633673368336933703371337233733374337533763377337833793380338133823383338433853386338733883389339033913392339333943395339633973398339934003401340234033404340534063407340834093410341134123413341434153416341734183419342034213422342334243425342634273428342934303431343234333434343534363437343834393440344134423443344434453446344734483449345034513452345334543455345634573458345934603461346234633464346534663467346834693470347134723473347434753476347734783479348034813482348334843485348634873488348934903491349234933494349534963497349834993500350135023503350435053506350735083509351035113512351335143515351635173518351935203521352235233524352535263527352835293530353135323533353435353536353735383539354035413542354335443545354635473548354935503551355235533554355535563557355835593560356135623563356435653566356735683569357035713572357335743575357635773578357935803581358235833584358535863587358835893590359135923593359435953596359735983599360036013602360336043605360636073608360936103611361236133614361536163617361836193620362136223623362436253626362736283629363036313632363336343635363636373638363936403641364236433644364536463647364836493650365136523653365436553656365736583659366036613662366336643665366636673668366936703671367236733674367536763677367836793680368136823683368436853686368736883689369036913692369336943695369636973698369937003701370237033704370537063707370837093710371137123713371437153716371737183719372037213722372337243725372637273728372937303731373237333734373537363737373837393740374137423743374437453746374737483749375037513752375337543755375637573758375937603761376237633764 |
- import logging
- import uuid
- from datetime import datetime, timedelta
- from time import time
- from unittest import mock
- from unittest.mock import MagicMock, patch
- import pytest
- import responses
- from django.core.cache import cache
- from django.test.utils import override_settings
- from django.utils import timezone
- from freezegun import freeze_time
- from rest_framework.status import HTTP_404_NOT_FOUND
- from fixtures.github import (
- COMPARE_COMMITS_EXAMPLE_WITH_INTERMEDIATE,
- EARLIER_COMMIT_SHA,
- GET_COMMIT_EXAMPLE,
- GET_LAST_2_COMMITS_EXAMPLE,
- GET_PRIOR_COMMIT_EXAMPLE,
- LATER_COMMIT_SHA,
- )
- from sentry import audit_log, nodestore, tsdb
- from sentry.attachments import CachedAttachment, attachment_cache
- from sentry.constants import MAX_VERSION_LENGTH, DataCategory
- from sentry.dynamic_sampling import (
- ExtendedBoostedRelease,
- Platform,
- ProjectBoostedReleases,
- get_redis_client_for_ds,
- )
- from sentry.event_manager import (
- EventManager,
- HashDiscarded,
- _get_event_instance,
- _save_grouphash_and_group,
- get_event_type,
- has_pending_commit_resolution,
- materialize_metadata,
- )
- from sentry.eventstore.models import Event
- from sentry.grouping.utils import hash_from_values
- from sentry.ingest.inbound_filters import FilterStatKeys
- from sentry.issues.grouptype import (
- ErrorGroupType,
- GroupCategory,
- PerformanceNPlusOneGroupType,
- PerformanceSlowDBQueryGroupType,
- )
- from sentry.issues.issue_occurrence import IssueEvidence
- from sentry.models import (
- Activity,
- Commit,
- CommitAuthor,
- Environment,
- ExternalIssue,
- Group,
- GroupEnvironment,
- GroupHash,
- GroupLink,
- GroupRelease,
- GroupResolution,
- GroupStatus,
- GroupTombstone,
- Integration,
- OrganizationIntegration,
- Project,
- PullRequest,
- PullRequestCommit,
- Release,
- ReleaseCommit,
- ReleaseHeadCommit,
- ReleaseProjectEnvironment,
- UserReport,
- )
- from sentry.models.auditlogentry import AuditLogEntry
- from sentry.models.eventuser import EventUser
- from sentry.projectoptions.defaults import DEFAULT_GROUPING_CONFIG, LEGACY_GROUPING_CONFIG
- from sentry.spans.grouping.utils import hash_values
- from sentry.testutils.asserts import assert_mock_called_once_with_partial
- from sentry.testutils.cases import (
- PerformanceIssueTestCase,
- SnubaTestCase,
- TestCase,
- TransactionTestCase,
- )
- from sentry.testutils.helpers import apply_feature_flag_on_cls, override_options
- from sentry.testutils.helpers.datetime import before_now, iso_format
- from sentry.testutils.performance_issues.event_generators import get_event
- from sentry.testutils.silo import region_silo_test
- from sentry.tsdb.base import TSDBModel
- from sentry.types.activity import ActivityType
- from sentry.utils import json
- from sentry.utils.cache import cache_key_for_event
- from sentry.utils.outcomes import Outcome
- from sentry.utils.samples import load_data
- from tests.sentry.integrations.github.test_repository import stub_installation_token
- def make_event(**kwargs):
- result = {
- "event_id": uuid.uuid1().hex,
- "level": logging.ERROR,
- "logger": "default",
- "tags": [],
- }
- result.update(kwargs)
- return result
- class EventManagerTestMixin:
- def make_release_event(self, release_name, project_id):
- manager = EventManager(make_event(release=release_name))
- manager.normalize()
- event = manager.save(project_id)
- return event
- @region_silo_test
- class EventManagerTest(TestCase, SnubaTestCase, EventManagerTestMixin, PerformanceIssueTestCase):
- def test_similar_message_prefix_doesnt_group(self):
- # we had a regression which caused the default hash to just be
- # 'event.message' instead of '[event.message]' which caused it to
- # generate a hash per letter
- manager = EventManager(make_event(event_id="a", message="foo bar"))
- manager.normalize()
- event1 = manager.save(self.project.id)
- manager = EventManager(make_event(event_id="b", message="foo baz"))
- manager.normalize()
- event2 = manager.save(self.project.id)
- assert event1.group_id != event2.group_id
- def test_ephemeral_interfaces_removed_on_save(self):
- manager = EventManager(make_event(platform="python"))
- manager.normalize()
- event = manager.save(self.project.id)
- group = event.group
- assert group is not None
- assert group.platform == "python"
- assert event.platform == "python"
- @mock.patch("sentry.event_manager.eventstream.backend.insert")
- def test_dupe_message_id(self, eventstream_insert):
- # Saves the latest event to nodestore and eventstream
- project_id = self.project.id
- event_id = "a" * 32
- node_id = Event.generate_node_id(project_id, event_id)
- manager = EventManager(make_event(event_id=event_id, message="first"))
- manager.normalize()
- manager.save(project_id)
- assert nodestore.backend.get(node_id)["logentry"]["formatted"] == "first"
- manager = EventManager(make_event(event_id=event_id, message="second"))
- manager.normalize()
- manager.save(project_id)
- assert nodestore.backend.get(node_id)["logentry"]["formatted"] == "second"
- assert eventstream_insert.call_count == 2
- def test_updates_group(self):
- timestamp = time() - 300
- manager = EventManager(
- make_event(message="foo", event_id="a" * 32, checksum="a" * 32, timestamp=timestamp)
- )
- manager.normalize()
- event = manager.save(self.project.id)
- manager = EventManager(
- make_event(
- message="foo bar", event_id="b" * 32, checksum="a" * 32, timestamp=timestamp + 2.0
- )
- )
- manager.normalize()
- with self.tasks():
- event2 = manager.save(self.project.id)
- group = Group.objects.get(id=event.group_id)
- assert group.times_seen == 2
- assert group.last_seen == event2.datetime
- assert group.message == event2.message
- assert group.data.get("type") == "default"
- assert group.data.get("metadata") == {"title": "foo bar"}
- def test_applies_secondary_grouping(self):
- project = self.project
- project.update_option("sentry:grouping_config", "legacy:2019-03-12")
- project.update_option("sentry:secondary_grouping_expiry", 0)
- timestamp = time() - 300
- manager = EventManager(
- make_event(message="foo 123", event_id="a" * 32, timestamp=timestamp)
- )
- manager.normalize()
- event = manager.save(project.id)
- project.update_option("sentry:grouping_config", "newstyle:2023-01-11")
- project.update_option("sentry:secondary_grouping_config", "legacy:2019-03-12")
- project.update_option("sentry:secondary_grouping_expiry", time() + (24 * 90 * 3600))
- # Switching to newstyle grouping changes hashes as 123 will be removed
- manager = EventManager(
- make_event(message="foo 123", event_id="b" * 32, timestamp=timestamp + 2.0)
- )
- manager.normalize()
- with self.tasks():
- event2 = manager.save(project.id)
- # make sure that events did get into same group because of fallback grouping, not because of hashes which come from primary grouping only
- assert not set(event.get_hashes().hashes) & set(event2.get_hashes().hashes)
- assert event.group_id == event2.group_id
- group = Group.objects.get(id=event.group_id)
- assert group.times_seen == 2
- assert group.last_seen == event2.datetime
- assert group.message == event2.message
- assert group.data.get("type") == "default"
- assert group.data.get("metadata") == {"title": "foo 123"}
- # After expiry, new events are still assigned to the same group:
- project.update_option("sentry:secondary_grouping_expiry", 0)
- manager = EventManager(
- make_event(message="foo 123", event_id="c" * 32, timestamp=timestamp + 4.0)
- )
- manager.normalize()
- with self.tasks():
- event3 = manager.save(project.id)
- assert event3.group_id == event2.group_id
- def test_applies_secondary_grouping_hierarchical(self):
- project = self.project
- project.update_option("sentry:grouping_config", "legacy:2019-03-12")
- project.update_option("sentry:secondary_grouping_expiry", 0)
- timestamp = time() - 300
- def save_event(ts_offset):
- ts = timestamp + ts_offset
- manager = EventManager(
- make_event(
- message="foo 123",
- event_id=hex(2**127 + int(ts))[-32:],
- timestamp=ts,
- exception={
- "values": [
- {
- "type": "Hello",
- "stacktrace": {
- "frames": [
- {
- "function": "not_in_app_function",
- },
- {
- "function": "in_app_function",
- },
- ]
- },
- }
- ]
- },
- )
- )
- manager.normalize()
- with self.tasks():
- return manager.save(project.id)
- event = save_event(0)
- project.update_option("sentry:grouping_config", "mobile:2021-02-12")
- project.update_option("sentry:secondary_grouping_config", "legacy:2019-03-12")
- project.update_option("sentry:secondary_grouping_expiry", time() + (24 * 90 * 3600))
- # Switching to newstyle grouping changes hashes as 123 will be removed
- event2 = save_event(2)
- # make sure that events did get into same group because of fallback grouping, not because of hashes which come from primary grouping only
- assert not set(event.get_hashes().hashes) & set(event2.get_hashes().hashes)
- assert event.group_id == event2.group_id
- group = Group.objects.get(id=event.group_id)
- assert group.times_seen == 2
- assert group.last_seen == event2.datetime
- # After expiry, new events are still assigned to the same group:
- project.update_option("sentry:secondary_grouping_expiry", 0)
- event3 = save_event(4)
- assert event3.group_id == event2.group_id
- def test_applies_downgrade_hierarchical(self):
- project = self.project
- project.update_option("sentry:grouping_config", "mobile:2021-02-12")
- project.update_option("sentry:secondary_grouping_expiry", 0)
- timestamp = time() - 300
- def save_event(ts_offset):
- ts = timestamp + ts_offset
- manager = EventManager(
- make_event(
- message="foo 123",
- event_id=hex(2**127 + int(ts))[-32:],
- timestamp=ts,
- exception={
- "values": [
- {
- "type": "Hello",
- "stacktrace": {
- "frames": [
- {
- "function": "not_in_app_function",
- },
- {
- "function": "in_app_function",
- },
- ]
- },
- }
- ]
- },
- )
- )
- manager.normalize()
- with self.tasks():
- return manager.save(project.id)
- event = save_event(0)
- project.update_option("sentry:grouping_config", "legacy:2019-03-12")
- project.update_option("sentry:secondary_grouping_config", "mobile:2021-02-12")
- project.update_option("sentry:secondary_grouping_expiry", time() + (24 * 90 * 3600))
- # Switching to newstyle grouping changes hashes as 123 will be removed
- event2 = save_event(2)
- # make sure that events did get into same group because of fallback grouping, not because of hashes which come from primary grouping only
- assert not set(event.get_hashes().hashes) & set(event2.get_hashes().hashes)
- assert event.group_id == event2.group_id
- group = Group.objects.get(id=event.group_id)
- group_hashes = GroupHash.objects.filter(
- project=self.project, hash__in=event.get_hashes().hashes
- )
- assert group_hashes
- for hash in group_hashes:
- assert hash.group_id == event.group_id
- assert group.times_seen == 2
- assert group.last_seen == event2.datetime
- # After expiry, new events are still assigned to the same group:
- project.update_option("sentry:secondary_grouping_expiry", 0)
- event3 = save_event(4)
- assert event3.group_id == event2.group_id
- @mock.patch("sentry.event_manager._calculate_background_grouping")
- def test_applies_background_grouping(self, mock_calc_grouping):
- timestamp = time() - 300
- manager = EventManager(
- make_event(message="foo 123", event_id="a" * 32, timestamp=timestamp)
- )
- manager.normalize()
- manager.save(self.project.id)
- assert mock_calc_grouping.call_count == 0
- with self.options(
- {
- "store.background-grouping-config-id": "mobile:2021-02-12",
- "store.background-grouping-sample-rate": 1.0,
- }
- ):
- manager.save(self.project.id)
- assert mock_calc_grouping.call_count == 1
- @mock.patch("sentry.event_manager._calculate_background_grouping")
- def test_background_grouping_sample_rate(self, mock_calc_grouping):
- timestamp = time() - 300
- manager = EventManager(
- make_event(message="foo 123", event_id="a" * 32, timestamp=timestamp)
- )
- manager.normalize()
- manager.save(self.project.id)
- assert mock_calc_grouping.call_count == 0
- with self.options(
- {
- "store.background-grouping-config-id": "mobile:2021-02-12",
- "store.background-grouping-sample-rate": 0.0,
- }
- ):
- manager.save(self.project.id)
- manager.save(self.project.id)
- assert mock_calc_grouping.call_count == 0
- def test_updates_group_with_fingerprint(self):
- ts = time() - 200
- manager = EventManager(
- make_event(message="foo", event_id="a" * 32, fingerprint=["a" * 32], timestamp=ts)
- )
- with self.tasks():
- event = manager.save(self.project.id)
- manager = EventManager(
- make_event(message="foo bar", event_id="b" * 32, fingerprint=["a" * 32], timestamp=ts)
- )
- with self.tasks():
- event2 = manager.save(self.project.id)
- group = Group.objects.get(id=event.group_id)
- assert group.times_seen == 2
- assert group.last_seen == event.datetime
- assert group.message == event2.message
- def test_differentiates_with_fingerprint(self):
- manager = EventManager(
- make_event(message="foo", event_id="a" * 32, fingerprint=["{{ default }}", "a" * 32])
- )
- with self.tasks():
- manager.normalize()
- event = manager.save(self.project.id)
- manager = EventManager(
- make_event(message="foo bar", event_id="b" * 32, fingerprint=["a" * 32])
- )
- with self.tasks():
- manager.normalize()
- event2 = manager.save(self.project.id)
- assert event.group_id != event2.group_id
- def test_materialze_metadata_simple(self):
- manager = EventManager(make_event(transaction="/dogs/are/great/"))
- event = manager.save(self.project.id)
- event_type = get_event_type(event.data)
- event_metadata = event_type.get_metadata(event.data)
- assert materialize_metadata(event.data, event_type, event_metadata) == {
- "type": "default",
- "culprit": "/dogs/are/great/",
- "metadata": {"title": "<unlabeled event>"},
- "title": "<unlabeled event>",
- "location": None,
- }
- def test_materialze_metadata_preserves_existing_metadata(self):
- manager = EventManager(make_event())
- event = manager.save(self.project.id)
- event.data.setdefault("metadata", {})
- event.data["metadata"]["dogs"] = "are great" # should not get clobbered
- event_type = get_event_type(event.data)
- event_metadata_from_type = event_type.get_metadata(event.data)
- materialized = materialize_metadata(event.data, event_type, event_metadata_from_type)
- assert materialized["metadata"] == {"title": "<unlabeled event>", "dogs": "are great"}
- @mock.patch("sentry.signals.issue_unresolved.send_robust")
- def test_unresolves_group(self, send_robust):
- ts = time() - 300
- # N.B. EventManager won't unresolve the group unless the event2 has a
- # later timestamp than event1.
- manager = EventManager(make_event(event_id="a" * 32, checksum="a" * 32, timestamp=ts))
- with self.tasks():
- event = manager.save(self.project.id)
- group = Group.objects.get(id=event.group_id)
- group.status = GroupStatus.RESOLVED
- group.substatus = None
- group.save()
- assert group.is_resolved()
- manager = EventManager(make_event(event_id="b" * 32, checksum="a" * 32, timestamp=ts + 50))
- event2 = manager.save(self.project.id)
- assert event.group_id == event2.group_id
- group = Group.objects.get(id=group.id)
- assert not group.is_resolved()
- assert send_robust.called
- @mock.patch("sentry.event_manager.plugin_is_regression")
- def test_does_not_unresolve_group(self, plugin_is_regression):
- # N.B. EventManager won't unresolve the group unless the event2 has a
- # later timestamp than event1.
- plugin_is_regression.return_value = False
- manager = EventManager(
- make_event(event_id="a" * 32, checksum="a" * 32, timestamp=1403007314)
- )
- with self.tasks():
- manager.normalize()
- event = manager.save(self.project.id)
- group = Group.objects.get(id=event.group_id)
- group.status = GroupStatus.RESOLVED
- group.substatus = None
- group.save()
- assert group.is_resolved()
- manager = EventManager(
- make_event(event_id="b" * 32, checksum="a" * 32, timestamp=1403007315)
- )
- manager.normalize()
- event2 = manager.save(self.project.id)
- assert event.group_id == event2.group_id
- group = Group.objects.get(id=group.id)
- assert group.is_resolved()
- @mock.patch("sentry.tasks.activity.send_activity_notifications.delay")
- @mock.patch("sentry.event_manager.plugin_is_regression")
- def test_marks_as_unresolved_with_new_release(
- self, plugin_is_regression, mock_send_activity_notifications_delay
- ):
- plugin_is_regression.return_value = True
- old_release = Release.objects.create(
- version="a",
- organization_id=self.project.organization_id,
- date_added=timezone.now() - timedelta(minutes=30),
- )
- old_release.add_project(self.project)
- manager = EventManager(
- make_event(
- event_id="a" * 32,
- checksum="a" * 32,
- timestamp=time() - 50000, # need to work around active_at
- release=old_release.version,
- )
- )
- event = manager.save(self.project.id)
- assert event.group is not None
- group = event.group
- group.update(status=GroupStatus.RESOLVED, substatus=None)
- resolution = GroupResolution.objects.create(release=old_release, group=group)
- activity = Activity.objects.create(
- group=group,
- project=group.project,
- type=ActivityType.SET_RESOLVED_IN_RELEASE.value,
- ident=resolution.id,
- data={"version": ""},
- )
- manager = EventManager(
- make_event(
- event_id="b" * 32, checksum="a" * 32, timestamp=time(), release=old_release.version
- )
- )
- event = manager.save(self.project.id)
- assert event.group_id == group.id
- group = Group.objects.get(id=group.id)
- assert group.status == GroupStatus.RESOLVED
- activity = Activity.objects.get(id=activity.id)
- assert activity.data["version"] == ""
- assert GroupResolution.objects.filter(group=group).exists()
- manager = EventManager(
- make_event(event_id="c" * 32, checksum="a" * 32, timestamp=time(), release="b")
- )
- event = manager.save(self.project.id)
- assert event.group_id == group.id
- group = Group.objects.get(id=group.id)
- assert group.status == GroupStatus.UNRESOLVED
- activity = Activity.objects.get(id=activity.id)
- assert activity.data["version"] == "b"
- assert not GroupResolution.objects.filter(group=group).exists()
- activity = Activity.objects.get(group=group, type=ActivityType.SET_REGRESSION.value)
- mock_send_activity_notifications_delay.assert_called_once_with(activity.id)
- @mock.patch("sentry.tasks.activity.send_activity_notifications.delay")
- @mock.patch("sentry.event_manager.plugin_is_regression")
- def test_that_release_in_latest_activity_prior_to_regression_is_not_overridden(
- self, plugin_is_regression, mock_send_activity_notifications_delay
- ):
- """
- Test that ensures in the case where a regression occurs, the release prior to the latest
- activity to that regression is not overridden.
- It should only be overridden if the activity was awaiting the upcoming release
- """
- plugin_is_regression.return_value = True
- # Create a release and a group associated with it
- old_release = self.create_release(
- version="foobar", date_added=timezone.now() - timedelta(minutes=30)
- )
- manager = EventManager(
- make_event(
- event_id="a" * 32,
- checksum="a" * 32,
- timestamp=time() - 50000, # need to work around active_at
- release=old_release.version,
- )
- )
- event = manager.save(self.project.id)
- assert event.group is not None
- group = event.group
- group.update(status=GroupStatus.RESOLVED, substatus=None)
- # Resolve the group in old_release
- resolution = GroupResolution.objects.create(release=old_release, group=group)
- activity = Activity.objects.create(
- group=group,
- project=group.project,
- type=ActivityType.SET_RESOLVED_IN_RELEASE.value,
- ident=resolution.id,
- data={"version": "foobar"},
- )
- # Create a regression
- manager = EventManager(
- make_event(event_id="c" * 32, checksum="a" * 32, timestamp=time(), release="b")
- )
- event = manager.save(self.project.id)
- assert event.group_id == group.id
- group = Group.objects.get(id=group.id)
- assert group.status == GroupStatus.UNRESOLVED
- activity = Activity.objects.get(id=activity.id)
- assert activity.data["version"] == "foobar"
- regressed_activity = Activity.objects.get(
- group=group, type=ActivityType.SET_REGRESSION.value
- )
- assert regressed_activity.data["version"] == "b"
- assert regressed_activity.data["follows_semver"] is False
- mock_send_activity_notifications_delay.assert_called_once_with(regressed_activity.id)
- @mock.patch("sentry.tasks.activity.send_activity_notifications.delay")
- @mock.patch("sentry.event_manager.plugin_is_regression")
- def test_current_release_version_in_latest_activity_prior_to_regression_is_not_overridden(
- self, plugin_is_regression, mock_send_activity_notifications_delay
- ):
- """
- Test that ensures in the case where a regression occurs, the release prior to the latest
- activity to that regression is overridden with the release regression occurred in but the
- value of `current_release_version` used for semver is not lost in the update.
- """
- plugin_is_regression.return_value = True
- # Create a release and a group associated with it
- old_release = self.create_release(
- version="a", date_added=timezone.now() - timedelta(minutes=30)
- )
- manager = EventManager(
- make_event(
- event_id="a" * 32,
- checksum="a" * 32,
- timestamp=time() - 50000, # need to work around active_at
- release=old_release.version,
- )
- )
- event = manager.save(self.project.id)
- assert event.group is not None
- group = event.group
- group.update(status=GroupStatus.RESOLVED, substatus=None)
- # Resolve the group in old_release
- resolution = GroupResolution.objects.create(release=old_release, group=group)
- activity = Activity.objects.create(
- group=group,
- project=group.project,
- type=ActivityType.SET_RESOLVED_IN_RELEASE.value,
- ident=resolution.id,
- data={"version": "", "current_release_version": "pre foobar"},
- )
- # Create a regression
- manager = EventManager(
- make_event(event_id="c" * 32, checksum="a" * 32, timestamp=time(), release="b")
- )
- event = manager.save(self.project.id)
- assert event.group_id == group.id
- group = Group.objects.get(id=group.id)
- assert group.status == GroupStatus.UNRESOLVED
- activity = Activity.objects.get(id=activity.id)
- assert activity.data["version"] == "b"
- assert activity.data["current_release_version"] == "pre foobar"
- regressed_activity = Activity.objects.get(
- group=group, type=ActivityType.SET_REGRESSION.value
- )
- assert regressed_activity.data["version"] == "b"
- mock_send_activity_notifications_delay.assert_called_once_with(regressed_activity.id)
- @mock.patch("sentry.event_manager.plugin_is_regression")
- def test_resolved_in_release_regression_activity_follows_semver(self, plugin_is_regression):
- """
- Issue was marked resolved in 1.0.0, regression occurred in 2.0.0.
- If the project follows semver then the regression activity should have `follows_semver` set.
- We should also record which version the issue was resolved in as `resolved_in_version`.
- This allows the UI to say the issue was resolved in 1.0.0, regressed in 2.0.0 and
- the versions were compared using semver.
- """
- plugin_is_regression.return_value = True
- # Create a release and a group associated with it
- old_release = self.create_release(
- version="foo@1.0.0", date_added=timezone.now() - timedelta(minutes=30)
- )
- manager = EventManager(
- make_event(
- event_id="a" * 32,
- checksum="a" * 32,
- timestamp=time() - 50000, # need to work around active_at
- release=old_release.version,
- )
- )
- event = manager.save(self.project.id)
- assert event.group is not None
- group = event.group
- group.update(status=GroupStatus.RESOLVED, substatus=None)
- # Resolve the group in old_release
- resolution = GroupResolution.objects.create(release=old_release, group=group)
- activity = Activity.objects.create(
- group=group,
- project=group.project,
- type=ActivityType.SET_RESOLVED_IN_RELEASE.value,
- ident=resolution.id,
- data={"version": "foo@1.0.0"},
- )
- # Create a regression
- manager = EventManager(
- make_event(event_id="c" * 32, checksum="a" * 32, timestamp=time(), release="foo@2.0.0")
- )
- event = manager.save(self.project.id)
- assert event.group_id == group.id
- group = Group.objects.get(id=group.id)
- assert group.status == GroupStatus.UNRESOLVED
- activity = Activity.objects.get(id=activity.id)
- assert activity.data["version"] == "foo@1.0.0"
- regressed_activity = Activity.objects.get(
- group=group, type=ActivityType.SET_REGRESSION.value
- )
- assert regressed_activity.data["version"] == "foo@2.0.0"
- assert regressed_activity.data["follows_semver"] is True
- assert regressed_activity.data["resolved_in_version"] == "foo@1.0.0"
- def test_has_pending_commit_resolution(self):
- project_id = self.project.id
- event = self.make_release_event("1.0", project_id)
- group = event.group
- assert group is not None
- assert group.first_release.version == "1.0"
- assert not has_pending_commit_resolution(group)
- # Add a commit with no associated release
- repo = self.create_repo(project=group.project)
- commit = Commit.objects.create(
- organization_id=group.project.organization_id, repository_id=repo.id, key="a" * 40
- )
- GroupLink.objects.create(
- group_id=group.id,
- project_id=group.project_id,
- linked_type=GroupLink.LinkedType.commit,
- linked_id=commit.id,
- relationship=GroupLink.Relationship.resolves,
- )
- assert has_pending_commit_resolution(group)
- def test_multiple_pending_commit_resolution(self):
- project_id = self.project.id
- event = self.make_release_event("1.0", project_id)
- group = event.group
- assert group is not None
- # Add a few commits with no associated release
- repo = self.create_repo(project=group.project)
- for key in ["a", "b", "c"]:
- commit = Commit.objects.create(
- organization_id=group.project.organization_id,
- repository_id=repo.id,
- key=key * 40,
- )
- GroupLink.objects.create(
- group_id=group.id,
- project_id=group.project_id,
- linked_type=GroupLink.LinkedType.commit,
- linked_id=commit.id,
- relationship=GroupLink.Relationship.resolves,
- )
- pending = has_pending_commit_resolution(group)
- assert pending
- # Most recent commit has been associated with a release
- latest_commit = Commit.objects.create(
- organization_id=group.project.organization_id, repository_id=repo.id, key="d" * 40
- )
- GroupLink.objects.create(
- group_id=group.id,
- project_id=group.project_id,
- linked_type=GroupLink.LinkedType.commit,
- linked_id=latest_commit.id,
- relationship=GroupLink.Relationship.resolves,
- )
- ReleaseCommit.objects.create(
- organization_id=group.project.organization_id,
- release=group.first_release,
- commit=latest_commit,
- order=0,
- )
- pending = has_pending_commit_resolution(group)
- assert pending is False
- def test_has_pending_commit_resolution_issue_regression(self):
- project_id = self.project.id
- event = self.make_release_event("1.0", project_id)
- group = event.group
- assert group is not None
- repo = self.create_repo(project=group.project)
- # commit that resolved the issue is part of a PR, but all commits within the PR are unreleased
- commit = Commit.objects.create(
- organization_id=group.project.organization_id, repository_id=repo.id, key="a" * 40
- )
- second_commit = Commit.objects.create(
- organization_id=group.project.organization_id, repository_id=repo.id, key="b" * 40
- )
- GroupLink.objects.create(
- group_id=group.id,
- project_id=group.project_id,
- linked_type=GroupLink.LinkedType.commit,
- linked_id=commit.id,
- relationship=GroupLink.Relationship.resolves,
- )
- pr = PullRequest.objects.create(
- organization_id=group.project.organization_id,
- repository_id=repo.id,
- key="1",
- )
- PullRequestCommit.objects.create(pull_request_id=pr.id, commit_id=commit.id)
- PullRequestCommit.objects.create(pull_request_id=pr.id, commit_id=second_commit.id)
- assert PullRequestCommit.objects.filter(pull_request_id=pr.id, commit_id=commit.id).exists()
- assert PullRequestCommit.objects.filter(
- pull_request_id=pr.id, commit_id=second_commit.id
- ).exists()
- assert not ReleaseCommit.objects.filter(commit__pullrequestcommit__id=commit.id).exists()
- assert not ReleaseCommit.objects.filter(
- commit__pullrequestcommit__id=second_commit.id
- ).exists()
- pending = has_pending_commit_resolution(group)
- assert pending
- def test_has_pending_commit_resolution_issue_regression_released_commits(self):
- project_id = self.project.id
- event = self.make_release_event("1.0", project_id)
- group = event.group
- assert group is not None
- release = self.create_release(project=self.project, version="1.1")
- repo = self.create_repo(project=group.project)
- # commit 1 is part of the PR, it resolves the issue in the commit message, and is unreleased
- commit = Commit.objects.create(
- organization_id=group.project.organization_id, repository_id=repo.id, key="a" * 38
- )
- GroupLink.objects.create(
- group_id=group.id,
- project_id=group.project_id,
- linked_type=GroupLink.LinkedType.commit,
- linked_id=commit.id,
- relationship=GroupLink.Relationship.resolves,
- )
- # commit 2 is part of the PR, but does not resolve the issue, and is released
- released_commit = Commit.objects.create(
- organization_id=group.project.organization_id, repository_id=repo.id, key="b" * 38
- )
- # commit 3 is part of the PR, but does not resolve the issue, and is unreleased
- unreleased_commit = Commit.objects.create(
- organization_id=group.project.organization_id, repository_id=repo.id, key="c" * 38
- )
- pr = PullRequest.objects.create(
- organization_id=group.project.organization_id,
- repository_id=repo.id,
- key="19",
- )
- PullRequestCommit.objects.create(pull_request_id=pr.id, commit_id=commit.id)
- released_pr_commit = PullRequestCommit.objects.create(
- pull_request_id=pr.id, commit_id=released_commit.id
- )
- unreleased_pr_commit = PullRequestCommit.objects.create(
- pull_request_id=pr.id, commit_id=unreleased_commit.id
- )
- ReleaseCommit.objects.create(
- organization_id=group.project.organization_id,
- release=release,
- commit=released_commit,
- order=1,
- )
- assert Commit.objects.all().count() == 3
- assert PullRequestCommit.objects.filter(pull_request_id=pr.id, commit_id=commit.id).exists()
- assert PullRequestCommit.objects.filter(
- pull_request_id=pr.id, commit_id=released_commit.id
- ).exists()
- assert PullRequestCommit.objects.filter(commit__id=unreleased_pr_commit.commit.id).exists()
- assert ReleaseCommit.objects.filter(
- commit__pullrequestcommit__id=released_pr_commit.id
- ).exists()
- pending = has_pending_commit_resolution(group)
- assert pending is False
- @mock.patch("sentry.integrations.example.integration.ExampleIntegration.sync_status_outbound")
- @mock.patch("sentry.tasks.activity.send_activity_notifications.delay")
- @mock.patch("sentry.event_manager.plugin_is_regression")
- def test_marks_as_unresolved_with_new_release_with_integration(
- self,
- plugin_is_regression,
- mock_send_activity_notifications_delay,
- mock_sync_status_outbound,
- ):
- plugin_is_regression.return_value = True
- old_release = Release.objects.create(
- version="a",
- organization_id=self.project.organization_id,
- date_added=timezone.now() - timedelta(minutes=30),
- )
- old_release.add_project(self.project)
- manager = EventManager(
- make_event(
- event_id="a" * 32,
- checksum="a" * 32,
- timestamp=time() - 50000, # need to work around active_at
- release=old_release.version,
- )
- )
- event = manager.save(self.project.id)
- assert event.group is not None
- group = event.group
- org = group.organization
- integration = Integration.objects.create(provider="example", name="Example")
- integration.add_organization(org, self.user)
- OrganizationIntegration.objects.filter(
- integration_id=integration.id, organization_id=group.organization.id
- ).update(
- config={
- "sync_comments": True,
- "sync_status_outbound": True,
- "sync_status_inbound": True,
- "sync_assignee_outbound": True,
- "sync_assignee_inbound": True,
- }
- )
- external_issue = ExternalIssue.objects.get_or_create(
- organization_id=org.id, integration_id=integration.id, key="APP-%s" % group.id
- )[0]
- GroupLink.objects.get_or_create(
- group_id=group.id,
- project_id=group.project_id,
- linked_type=GroupLink.LinkedType.issue,
- linked_id=external_issue.id,
- relationship=GroupLink.Relationship.references,
- )[0]
- group.update(status=GroupStatus.RESOLVED, substatus=None)
- resolution = GroupResolution.objects.create(release=old_release, group=group)
- activity = Activity.objects.create(
- group=group,
- project=group.project,
- type=ActivityType.SET_RESOLVED_IN_RELEASE.value,
- ident=resolution.id,
- data={"version": ""},
- )
- manager = EventManager(
- make_event(
- event_id="b" * 32, checksum="a" * 32, timestamp=time(), release=old_release.version
- )
- )
- with self.tasks():
- with self.feature({"organizations:integrations-issue-sync": True}):
- event = manager.save(self.project.id)
- assert event.group_id == group.id
- group = Group.objects.get(id=group.id)
- assert group.status == GroupStatus.RESOLVED
- activity = Activity.objects.get(id=activity.id)
- assert activity.data["version"] == ""
- assert GroupResolution.objects.filter(group=group).exists()
- manager = EventManager(
- make_event(event_id="c" * 32, checksum="a" * 32, timestamp=time(), release="b")
- )
- event = manager.save(self.project.id)
- assert event.group is not None
- mock_sync_status_outbound.assert_called_once_with(
- external_issue, False, event.group.project_id
- )
- assert event.group_id == group.id
- group = Group.objects.get(id=group.id)
- assert group.status == GroupStatus.UNRESOLVED
- activity = Activity.objects.get(id=activity.id)
- assert activity.data["version"] == "b"
- assert not GroupResolution.objects.filter(group=group).exists()
- activity = Activity.objects.get(group=group, type=ActivityType.SET_REGRESSION.value)
- mock_send_activity_notifications_delay.assert_called_once_with(activity.id)
- @mock.patch("sentry.tasks.activity.send_activity_notifications.delay")
- @mock.patch("sentry.event_manager.plugin_is_regression")
- def test_does_not_mark_as_unresolved_with_pending_commit(
- self, plugin_is_regression, mock_send_activity_notifications_delay
- ):
- plugin_is_regression.return_value = True
- repo = self.create_repo(project=self.project)
- commit = self.create_commit(repo=repo)
- manager = EventManager(
- make_event(
- event_id="a" * 32,
- checksum="a" * 32,
- timestamp=time() - 50000, # need to work around active_at
- )
- )
- event = manager.save(self.project.id)
- group = event.group
- assert group is not None
- group.update(status=GroupStatus.RESOLVED, substatus=None)
- GroupLink.objects.create(
- group_id=group.id,
- project_id=group.project_id,
- linked_id=commit.id,
- linked_type=GroupLink.LinkedType.commit,
- relationship=GroupLink.Relationship.resolves,
- )
- manager = EventManager(make_event(event_id="b" * 32, checksum="a" * 32, timestamp=time()))
- event = manager.save(self.project.id)
- assert event.group is not None
- assert event.group_id == group.id
- assert Group.objects.get(id=group.id).status == GroupStatus.RESOLVED
- @mock.patch("sentry.tasks.activity.send_activity_notifications.delay")
- @mock.patch("sentry.event_manager.plugin_is_regression")
- def test_mark_as_unresolved_with_released_commit(
- self, plugin_is_regression, mock_send_activity_notifications_delay
- ):
- plugin_is_regression.return_value = True
- release = self.create_release(project=self.project)
- repo = self.create_repo(project=self.project)
- commit = self.create_commit(repo=repo, release=release, project=self.project)
- manager = EventManager(
- make_event(
- event_id="a" * 32,
- checksum="a" * 32,
- timestamp=time() - 50000, # need to work around active_at
- )
- )
- event = manager.save(self.project.id)
- group = event.group
- assert group is not None
- group.update(status=GroupStatus.RESOLVED, substatus=None)
- GroupLink.objects.create(
- group_id=group.id,
- project_id=group.project_id,
- linked_id=commit.id,
- linked_type=GroupLink.LinkedType.commit,
- relationship=GroupLink.Relationship.resolves,
- )
- manager = EventManager(make_event(event_id="b" * 32, checksum="a" * 32, timestamp=time()))
- event = manager.save(self.project.id)
- assert event.group is not None
- assert event.group_id == group.id
- assert Group.objects.get(id=group.id).status == GroupStatus.UNRESOLVED
- @mock.patch("sentry.models.Group.is_resolved")
- def test_unresolves_group_with_auto_resolve(self, mock_is_resolved):
- ts = time() - 100
- mock_is_resolved.return_value = False
- manager = EventManager(make_event(event_id="a" * 32, checksum="a" * 32, timestamp=ts))
- with self.tasks():
- event = manager.save(self.project.id)
- assert event.group is not None
- mock_is_resolved.return_value = True
- manager = EventManager(make_event(event_id="b" * 32, checksum="a" * 32, timestamp=ts + 100))
- with self.tasks():
- event2 = manager.save(self.project.id)
- assert event2.group is not None
- assert event.group_id == event2.group_id
- group = Group.objects.get(id=event.group.id)
- assert group.active_at.replace(second=0) == event2.datetime.replace(second=0)
- assert group.active_at.replace(second=0) != event.datetime.replace(second=0)
- def test_invalid_transaction(self):
- dict_input = {"messages": "foo"}
- manager = EventManager(make_event(transaction=dict_input))
- manager.normalize()
- event = manager.save(self.project.id)
- assert event.transaction is None
- def test_transaction_as_culprit(self):
- manager = EventManager(make_event(transaction="foobar"))
- manager.normalize()
- event = manager.save(self.project.id)
- assert event.transaction == "foobar"
- assert event.culprit == "foobar"
- def test_culprit_is_not_transaction(self):
- manager = EventManager(make_event(culprit="foobar"))
- manager.normalize()
- event1 = manager.save(self.project.id)
- assert event1.transaction is None
- assert event1.culprit == "foobar"
- def test_culprit_after_stacktrace_processing(self):
- from sentry.grouping.enhancer import Enhancements
- enhancement = Enhancements.from_config_string(
- """
- function:in_app_function +app
- function:not_in_app_function -app
- """,
- )
- manager = EventManager(
- make_event(
- platform="native",
- exception={
- "values": [
- {
- "type": "Hello",
- "stacktrace": {
- "frames": [
- {
- "function": "not_in_app_function",
- },
- {
- "function": "in_app_function",
- },
- ]
- },
- }
- ]
- },
- )
- )
- manager.normalize()
- manager.get_data()["grouping_config"] = {
- "enhancements": enhancement.dumps(),
- "id": "legacy:2019-03-12",
- }
- event1 = manager.save(self.project.id)
- assert event1.transaction is None
- assert event1.culprit == "in_app_function"
- def test_inferred_culprit_from_empty_stacktrace(self):
- manager = EventManager(make_event(stacktrace={"frames": []}))
- manager.normalize()
- event = manager.save(self.project.id)
- assert event.culprit == ""
- def test_transaction_and_culprit(self):
- manager = EventManager(make_event(transaction="foobar", culprit="baz"))
- manager.normalize()
- event1 = manager.save(self.project.id)
- assert event1.transaction == "foobar"
- assert event1.culprit == "baz"
- def test_release_with_empty_version(self):
- cases = ["", " ", "\t", "\n"]
- for case in cases:
- event = self.make_release_event(case, self.project.id)
- assert not event.group.first_release
- assert Release.objects.filter(projects__in=[self.project.id]).count() == 0
- assert Release.objects.filter(organization_id=self.project.organization_id).count() == 0
- def test_first_release(self):
- project_id = self.project.id
- event = self.make_release_event("1.0", project_id)
- group = event.group
- assert group is not None
- assert group.first_release.version == "1.0"
- event = self.make_release_event("2.0", project_id)
- group = event.group
- assert group is not None
- assert group.first_release.version == "1.0"
- def test_release_project_slug(self):
- project = self.create_project(name="foo")
- release = Release.objects.create(version="foo-1.0", organization=project.organization)
- release.add_project(project)
- event = self.make_release_event("1.0", project.id)
- group = event.group
- assert group is not None
- assert group.first_release.version == "foo-1.0"
- release_tag = [v for k, v in event.tags if k == "sentry:release"][0]
- assert release_tag == "foo-1.0"
- event = self.make_release_event("2.0", project.id)
- group = event.group
- assert group is not None
- assert group.first_release.version == "foo-1.0"
- def test_release_project_slug_long(self):
- project = self.create_project(name="foo")
- partial_version_len = MAX_VERSION_LENGTH - 4
- release = Release.objects.create(
- version="foo-{}".format("a" * partial_version_len), organization=project.organization
- )
- release.add_project(project)
- event = self.make_release_event("a" * partial_version_len, project.id)
- group = event.group
- assert group is not None
- assert group.first_release.version == "foo-{}".format("a" * partial_version_len)
- release_tag = [v for k, v in event.tags if k == "sentry:release"][0]
- assert release_tag == "foo-{}".format("a" * partial_version_len)
- def test_group_release_no_env(self):
- project_id = self.project.id
- event = self.make_release_event("1.0", project_id)
- release = Release.objects.get(version="1.0", projects=event.project_id)
- assert GroupRelease.objects.filter(
- release_id=release.id, group_id=event.group_id, environment=""
- ).exists()
- # ensure we're not erroring on second creation
- event = self.make_release_event("1.0", project_id)
- def test_group_release_with_env(self):
- manager = EventManager(make_event(release="1.0", environment="prod", event_id="a" * 32))
- manager.normalize()
- event = manager.save(self.project.id)
- release = Release.objects.get(version="1.0", projects=event.project_id)
- assert GroupRelease.objects.filter(
- release_id=release.id, group_id=event.group_id, environment="prod"
- ).exists()
- manager = EventManager(make_event(release="1.0", environment="staging", event_id="b" * 32))
- event = manager.save(self.project.id)
- release = Release.objects.get(version="1.0", projects=event.project_id)
- assert GroupRelease.objects.filter(
- release_id=release.id, group_id=event.group_id, environment="staging"
- ).exists()
- def test_tsdb(self):
- project = self.project
- manager = EventManager(
- make_event(
- fingerprint=["totally unique super duper fingerprint"],
- environment="totally unique super duper environment",
- )
- )
- event = manager.save(project.id)
- assert event.group is not None
- def query(model, key, **kwargs):
- return tsdb.backend.get_sums(
- model,
- [key],
- event.datetime,
- event.datetime,
- tenant_ids={"organization_id": 123, "referrer": "r"},
- **kwargs,
- )[key]
- assert query(TSDBModel.project, project.id) == 1
- assert query(TSDBModel.group, event.group.id) == 1
- environment_id = Environment.get_for_organization_id(
- event.project.organization_id, "totally unique super duper environment"
- ).id
- assert query(TSDBModel.project, project.id, environment_id=environment_id) == 1
- assert query(TSDBModel.group, event.group.id, environment_id=environment_id) == 1
- @pytest.mark.xfail
- def test_record_frequencies(self):
- project = self.project
- manager = EventManager(make_event())
- event = manager.save(project.id)
- assert tsdb.backend.get_most_frequent(
- TSDBModel.frequent_issues_by_project, (event.project.id,), event.datetime
- ) == {event.project.id: [(event.group_id, 1.0)]}
- def test_event_user(self):
- manager = EventManager(
- make_event(
- event_id="a", environment="totally unique environment", **{"user": {"id": "1"}}
- )
- )
- manager.normalize()
- with self.tasks():
- event = manager.save(self.project.id)
- assert event.group is not None
- environment_id = Environment.get_for_organization_id(
- event.project.organization_id, "totally unique environment"
- ).id
- assert tsdb.backend.get_distinct_counts_totals(
- TSDBModel.users_affected_by_group,
- (event.group.id,),
- event.datetime,
- event.datetime,
- tenant_ids={"referrer": "r", "organization_id": 123},
- ) == {event.group.id: 1}
- assert tsdb.backend.get_distinct_counts_totals(
- TSDBModel.users_affected_by_project,
- (event.project.id,),
- event.datetime,
- event.datetime,
- tenant_ids={"organization_id": 123, "referrer": "r"},
- ) == {event.project.id: 1}
- assert tsdb.backend.get_distinct_counts_totals(
- TSDBModel.users_affected_by_group,
- (event.group.id,),
- event.datetime,
- event.datetime,
- environment_id=environment_id,
- tenant_ids={"organization_id": 123, "referrer": "r"},
- ) == {event.group.id: 1}
- assert tsdb.backend.get_distinct_counts_totals(
- TSDBModel.users_affected_by_project,
- (event.project.id,),
- event.datetime,
- event.datetime,
- environment_id=environment_id,
- tenant_ids={"organization_id": 123, "referrer": "r"},
- ) == {event.project.id: 1}
- euser = EventUser.objects.get(project_id=self.project.id, ident="1")
- assert event.get_tag("sentry:user") == euser.tag_value
- # clear the cache otherwise the cached EventUser from prev
- # manager.save() will be used instead of jane
- cache.clear()
- # ensure event user is mapped to tags in second attempt
- manager = EventManager(make_event(event_id="b", **{"user": {"id": "1", "name": "jane"}}))
- manager.normalize()
- with self.tasks():
- event = manager.save(self.project.id)
- euser = EventUser.objects.get(id=euser.id)
- assert event.get_tag("sentry:user") == euser.tag_value
- assert euser.name == "jane"
- assert euser.ident == "1"
- def test_event_user_invalid_ip(self):
- manager = EventManager(
- make_event(
- event_id="a", environment="totally unique environment", **{"user": {"id": "1"}}
- )
- )
- manager.normalize()
- # This can happen as part of PII stripping, which happens after normalization
- manager._data["user"]["ip_address"] = "[ip]"
- with self.tasks():
- manager.save(self.project.id)
- euser = EventUser.objects.get(project_id=self.project.id)
- assert euser.ip_address is None
- def test_event_user_unicode_identifier(self):
- manager = EventManager(make_event(**{"user": {"username": "foô"}}))
- manager.normalize()
- with self.tasks():
- manager.save(self.project.id)
- euser = EventUser.objects.get(project_id=self.project.id)
- assert euser.username == "foô"
- def test_environment(self):
- manager = EventManager(make_event(**{"environment": "beta"}))
- manager.normalize()
- event = manager.save(self.project.id)
- assert dict(event.tags).get("environment") == "beta"
- def test_invalid_environment(self):
- manager = EventManager(make_event(**{"environment": "bad/name"}))
- manager.normalize()
- event = manager.save(self.project.id)
- assert dict(event.tags).get("environment") is None
- def test_invalid_tags(self):
- manager = EventManager(make_event(**{"tags": [42]}))
- manager.normalize()
- assert None in manager.get_data().get("tags", [])
- assert 42 not in manager.get_data().get("tags", [])
- event = manager.save(self.project.id)
- assert 42 not in event.tags
- assert None not in event.tags
- @mock.patch("sentry.event_manager.eventstream.backend.insert")
- def test_group_environment(self, eventstream_insert):
- release_version = "1.0"
- def save_event():
- manager = EventManager(
- make_event(
- **{
- "message": "foo",
- "event_id": uuid.uuid1().hex,
- "environment": "beta",
- "release": release_version,
- }
- )
- )
- manager.normalize()
- return manager.save(self.project.id)
- event = save_event()
- # Ensure the `GroupEnvironment` record was created.
- instance = GroupEnvironment.objects.get(
- group_id=event.group_id,
- environment_id=Environment.objects.get(
- organization_id=self.project.organization_id, name=event.get_tag("environment")
- ).id,
- )
- assert Release.objects.get(id=instance.first_release_id).version == release_version
- group_states1 = {
- "is_new": True,
- "is_regression": False,
- "is_new_group_environment": True,
- }
- # Ensure that the first event in the (group, environment) pair is
- # marked as being part of a new environment.
- eventstream_insert.assert_called_with(
- event=event,
- **group_states1,
- primary_hash="acbd18db4cc2f85cedef654fccc4a4d8",
- skip_consume=False,
- received_timestamp=event.data["received"],
- group_states=[{"id": event.group.id, **group_states1}],
- )
- event = save_event()
- group_states2 = {
- "is_new": False,
- "is_regression": False,
- "is_new_group_environment": False,
- }
- # Ensure that the next event in the (group, environment) pair is *not*
- # marked as being part of a new environment.
- eventstream_insert.assert_called_with(
- event=event,
- **group_states2,
- primary_hash="acbd18db4cc2f85cedef654fccc4a4d8",
- skip_consume=False,
- received_timestamp=event.data["received"],
- group_states=[{"id": event.group.id, **group_states2}],
- )
- def test_default_fingerprint(self):
- manager = EventManager(make_event())
- manager.normalize()
- event = manager.save(self.project.id)
- assert event.data.get("fingerprint") == ["{{ default }}"]
- def test_user_report_gets_environment(self):
- project = self.create_project()
- environment = Environment.objects.create(
- organization_id=project.organization_id, name="production"
- )
- environment.add_project(project)
- event_id = "a" * 32
- UserReport.objects.create(
- project_id=project.id,
- event_id=event_id,
- name="foo",
- email="bar@example.com",
- comments="It Broke!!!",
- )
- self.store_event(
- data=make_event(environment=environment.name, event_id=event_id), project_id=project.id
- )
- assert UserReport.objects.get(event_id=event_id).environment_id == environment.id
- def test_default_event_type(self):
- manager = EventManager(make_event(message="foo bar"))
- manager.normalize()
- data = manager.get_data()
- assert data["type"] == "default"
- event = manager.save(self.project.id)
- group = event.group
- assert group is not None
- assert group.data.get("type") == "default"
- assert group.data.get("metadata") == {"title": "foo bar"}
- def test_message_event_type(self):
- manager = EventManager(
- make_event(
- **{
- "message": "",
- "logentry": {"formatted": "foo bar", "message": "foo %s", "params": ["bar"]},
- }
- )
- )
- manager.normalize()
- data = manager.get_data()
- assert data["type"] == "default"
- event = manager.save(self.project.id)
- group = event.group
- assert group is not None
- assert group.data.get("type") == "default"
- assert group.data.get("metadata") == {"title": "foo bar"}
- def test_error_event_type(self):
- manager = EventManager(
- make_event(**{"exception": {"values": [{"type": "Foo", "value": "bar"}]}})
- )
- manager.normalize()
- data = manager.get_data()
- assert data["type"] == "error"
- event = manager.save(self.project.id)
- group = event.group
- assert group is not None
- assert group.data.get("type") == "error"
- assert group.data.get("metadata") == {
- "type": "Foo",
- "value": "bar",
- "display_title_with_tree_label": False,
- }
- def test_csp_event_type(self):
- manager = EventManager(
- make_event(
- **{
- "csp": {
- "effective_directive": "script-src",
- "blocked_uri": "http://example.com",
- },
- # this normally is noramlized in relay as part of ingest
- "logentry": {"message": "Blocked 'script' from 'example.com'"},
- }
- )
- )
- manager.normalize()
- data = manager.get_data()
- assert data["type"] == "csp"
- event = manager.save(self.project.id)
- group = event.group
- assert group is not None
- assert group.data.get("type") == "csp"
- assert group.data.get("metadata") == {
- "directive": "script-src",
- "uri": "example.com",
- "message": "Blocked 'script' from 'example.com'",
- }
- assert group.title == "Blocked 'script' from 'example.com'"
- def test_transaction_event_type(self):
- manager = EventManager(
- make_event(
- **{
- "transaction": "wait",
- "contexts": {
- "trace": {
- "parent_span_id": "bce14471e0e9654d",
- "op": "foobar",
- "trace_id": "a0fa8803753e40fd8124b21eeb2986b5",
- "span_id": "bf5be759039ede9a",
- }
- },
- "spans": [],
- "timestamp": "2019-06-14T14:01:40Z",
- "start_timestamp": "2019-06-14T14:01:40Z",
- "type": "transaction",
- }
- )
- )
- manager.normalize()
- data = manager.get_data()
- assert data["type"] == "transaction"
- def test_transaction_event_span_grouping(self):
- manager = EventManager(
- make_event(
- **{
- "transaction": "wait",
- "contexts": {
- "trace": {
- "parent_span_id": "bce14471e0e9654d",
- "op": "foobar",
- "trace_id": "a0fa8803753e40fd8124b21eeb2986b5",
- "span_id": "bf5be759039ede9a",
- }
- },
- "spans": [
- {
- "trace_id": "a0fa8803753e40fd8124b21eeb2986b5",
- "parent_span_id": "bf5be759039ede9a",
- "span_id": "a" * 16,
- "start_timestamp": 0,
- "timestamp": 1,
- "same_process_as_parent": True,
- "op": "default",
- "description": "span a",
- },
- {
- "trace_id": "a0fa8803753e40fd8124b21eeb2986b5",
- "parent_span_id": "bf5be759039ede9a",
- "span_id": "b" * 16,
- "start_timestamp": 0,
- "timestamp": 1,
- "same_process_as_parent": True,
- "op": "default",
- "description": "span a",
- },
- {
- "trace_id": "a0fa8803753e40fd8124b21eeb2986b5",
- "parent_span_id": "bf5be759039ede9a",
- "span_id": "c" * 16,
- "start_timestamp": 0,
- "timestamp": 1,
- "same_process_as_parent": True,
- "op": "default",
- "description": "span b",
- },
- ],
- "timestamp": "2019-06-14T14:01:40Z",
- "start_timestamp": "2019-06-14T14:01:40Z",
- "type": "transaction",
- }
- )
- )
- manager.normalize()
- event = manager.save(self.project.id)
- data = event.data
- assert data["type"] == "transaction"
- assert data["span_grouping_config"]["id"] == "default:2022-10-27"
- spans = [{"hash": span["hash"]} for span in data["spans"]]
- # the basic strategy is to simply use the description
- assert spans == [{"hash": hash_values([span["description"]])} for span in data["spans"]]
- def test_sdk(self):
- manager = EventManager(make_event(**{"sdk": {"name": "sentry-unity", "version": "1.0"}}))
- manager.normalize()
- event = manager.save(self.project.id)
- assert event.data["sdk"] == {
- "name": "sentry-unity",
- "version": "1.0",
- "integrations": None,
- "packages": None,
- }
- def test_no_message(self):
- # test that the message is handled gracefully
- manager = EventManager(
- make_event(**{"message": None, "logentry": {"message": "hello world"}})
- )
- manager.normalize()
- event = manager.save(self.project.id)
- assert event.message == "hello world"
- def test_search_message_simple(self):
- manager = EventManager(
- make_event(
- **{
- "message": "test",
- "transaction": "sentry.tasks.process",
- }
- )
- )
- manager.normalize()
- event = manager.save(self.project.id)
- search_message = event.search_message
- assert "test" in search_message
- assert "sentry.tasks.process" in search_message
- def test_search_message_prefers_log_entry_message(self):
- manager = EventManager(
- make_event(
- **{
- "message": "test",
- "logentry": {"message": "hello world"},
- "transaction": "sentry.tasks.process",
- }
- )
- )
- manager.normalize()
- event = manager.save(self.project.id)
- search_message = event.search_message
- assert "test" not in search_message
- assert "hello world" in search_message
- assert "sentry.tasks.process" in search_message
- def test_stringified_message(self):
- manager = EventManager(make_event(**{"message": 1234}))
- manager.normalize()
- event = manager.save(self.project.id)
- assert event.data["logentry"] == {"formatted": "1234", "message": None, "params": None}
- def test_bad_message(self):
- # test that invalid messages are rejected
- manager = EventManager(make_event(**{"message": ["asdf"]}))
- manager.normalize()
- event = manager.save(self.project.id)
- assert event.message == '["asdf"]'
- assert "logentry" in event.data
- def test_message_attribute_goes_to_interface(self):
- manager = EventManager(make_event(**{"message": "hello world"}))
- manager.normalize()
- event = manager.save(self.project.id)
- assert event.data["logentry"] == {
- "formatted": "hello world",
- "message": None,
- "params": None,
- }
- def test_message_attribute_shadowing(self):
- # Logentry shadows the legacy message attribute.
- manager = EventManager(
- make_event(**{"message": "world hello", "logentry": {"message": "hello world"}})
- )
- manager.normalize()
- event = manager.save(self.project.id)
- assert event.data["logentry"] == {
- "formatted": "hello world",
- "message": None,
- "params": None,
- }
- def test_message_attribute_interface_both_strings(self):
- manager = EventManager(
- make_event(**{"logentry": "a plain string", "message": "another string"})
- )
- manager.normalize()
- event = manager.save(self.project.id)
- assert event.data["logentry"] == {
- "formatted": "a plain string",
- "message": None,
- "params": None,
- }
- def test_throws_when_matches_discarded_hash(self):
- manager = EventManager(make_event(message="foo", event_id="a" * 32, fingerprint=["a" * 32]))
- with self.tasks():
- event = manager.save(self.project.id)
- group = Group.objects.get(id=event.group_id)
- tombstone = GroupTombstone.objects.create(
- project_id=group.project_id,
- level=group.level,
- message=group.message,
- culprit=group.culprit,
- data=group.data,
- previous_group_id=group.id,
- )
- GroupHash.objects.filter(group=group).update(group=None, group_tombstone_id=tombstone.id)
- manager = EventManager(
- make_event(message="foo", event_id="b" * 32, fingerprint=["a" * 32]),
- project=self.project,
- )
- manager.normalize()
- a1 = CachedAttachment(name="a1", data=b"hello")
- a2 = CachedAttachment(name="a2", data=b"world")
- cache_key = cache_key_for_event(manager.get_data())
- attachment_cache.set(cache_key, attachments=[a1, a2])
- from sentry.utils.outcomes import track_outcome
- mock_track_outcome = mock.Mock(wraps=track_outcome)
- with mock.patch("sentry.event_manager.track_outcome", mock_track_outcome):
- with self.feature("organizations:event-attachments"):
- with self.tasks():
- with pytest.raises(HashDiscarded):
- event = manager.save(self.project.id, cache_key=cache_key)
- assert mock_track_outcome.call_count == 3
- for o in mock_track_outcome.mock_calls:
- assert o.kwargs["outcome"] == Outcome.FILTERED
- assert o.kwargs["reason"] == FilterStatKeys.DISCARDED_HASH
- o = mock_track_outcome.mock_calls[0]
- assert o.kwargs["category"] == DataCategory.ERROR
- for o in mock_track_outcome.mock_calls[1:]:
- assert o.kwargs["category"] == DataCategory.ATTACHMENT
- assert o.kwargs["quantity"] == 5
- def test_honors_crash_report_limit(self):
- from sentry.utils.outcomes import track_outcome
- mock_track_outcome = mock.Mock(wraps=track_outcome)
- # Allow exactly one crash report
- self.project.update_option("sentry:store_crash_reports", 1)
- manager = EventManager(
- make_event(message="foo", event_id="a" * 32, fingerprint=["a" * 32]),
- project=self.project,
- )
- manager.normalize()
- a1 = CachedAttachment(name="a1", data=b"hello", type="event.minidump")
- a2 = CachedAttachment(name="a2", data=b"world")
- cache_key = cache_key_for_event(manager.get_data())
- attachment_cache.set(cache_key, attachments=[a1, a2])
- with mock.patch("sentry.event_manager.track_outcome", mock_track_outcome):
- with self.feature("organizations:event-attachments"):
- with self.tasks():
- manager.save(self.project.id, cache_key=cache_key)
- # The first minidump should be accepted, since the limit is 1
- assert mock_track_outcome.call_count == 3
- for o in mock_track_outcome.mock_calls:
- assert o.kwargs["outcome"] == Outcome.ACCEPTED
- mock_track_outcome.reset_mock()
- manager = EventManager(
- make_event(message="foo", event_id="b" * 32, fingerprint=["a" * 32]),
- project=self.project,
- )
- manager.normalize()
- cache_key = cache_key_for_event(manager.get_data())
- attachment_cache.set(cache_key, attachments=[a1, a2])
- with mock.patch("sentry.event_manager.track_outcome", mock_track_outcome):
- with self.feature("organizations:event-attachments"):
- with self.tasks():
- event = manager.save(self.project.id, cache_key=cache_key)
- assert event.data["metadata"]["stripped_crash"] is True
- assert mock_track_outcome.call_count == 3
- o = mock_track_outcome.mock_calls[0]
- assert o.kwargs["outcome"] == Outcome.FILTERED
- assert o.kwargs["category"] == DataCategory.ATTACHMENT
- assert o.kwargs["reason"] == FilterStatKeys.CRASH_REPORT_LIMIT
- for o in mock_track_outcome.mock_calls[1:]:
- assert o.kwargs["outcome"] == Outcome.ACCEPTED
- def test_event_accepted_outcome(self):
- manager = EventManager(make_event(message="foo"))
- manager.normalize()
- mock_track_outcome = mock.Mock()
- with mock.patch("sentry.event_manager.track_outcome", mock_track_outcome):
- manager.save(self.project.id)
- assert_mock_called_once_with_partial(
- mock_track_outcome, outcome=Outcome.ACCEPTED, category=DataCategory.ERROR
- )
- def test_attachment_accepted_outcomes(self):
- manager = EventManager(make_event(message="foo"), project=self.project)
- manager.normalize()
- a1 = CachedAttachment(name="a1", data=b"hello")
- a2 = CachedAttachment(name="a2", data=b"limited", rate_limited=True)
- a3 = CachedAttachment(name="a3", data=b"world")
- cache_key = cache_key_for_event(manager.get_data())
- attachment_cache.set(cache_key, attachments=[a1, a2, a3])
- mock_track_outcome = mock.Mock()
- with mock.patch("sentry.event_manager.track_outcome", mock_track_outcome):
- with self.feature("organizations:event-attachments"):
- manager.save(self.project.id, cache_key=cache_key)
- assert mock_track_outcome.call_count == 3
- for o in mock_track_outcome.mock_calls:
- assert o.kwargs["outcome"] == Outcome.ACCEPTED
- for o in mock_track_outcome.mock_calls[:2]:
- assert o.kwargs["category"] == DataCategory.ATTACHMENT
- assert o.kwargs["quantity"] == 5
- final = mock_track_outcome.mock_calls[2]
- assert final.kwargs["category"] == DataCategory.ERROR
- def test_attachment_filtered_outcomes(self):
- manager = EventManager(make_event(message="foo"), project=self.project)
- manager.normalize()
- # Disable storing all crash reports, which will drop the minidump but save the other
- a1 = CachedAttachment(name="a1", data=b"minidump", type="event.minidump")
- a2 = CachedAttachment(name="a2", data=b"limited", rate_limited=True)
- a3 = CachedAttachment(name="a3", data=b"world")
- cache_key = cache_key_for_event(manager.get_data())
- attachment_cache.set(cache_key, attachments=[a1, a2, a3])
- mock_track_outcome = mock.Mock()
- with mock.patch("sentry.event_manager.track_outcome", mock_track_outcome):
- with self.feature("organizations:event-attachments"):
- manager.save(self.project.id, cache_key=cache_key)
- assert mock_track_outcome.call_count == 3
- # First outcome is the rejection of the minidump
- o = mock_track_outcome.mock_calls[0]
- assert o.kwargs["outcome"] == Outcome.FILTERED
- assert o.kwargs["category"] == DataCategory.ATTACHMENT
- assert o.kwargs["reason"] == FilterStatKeys.CRASH_REPORT_LIMIT
- # Second outcome is acceptance of the "a3" attachment
- o = mock_track_outcome.mock_calls[1]
- assert o.kwargs["outcome"] == Outcome.ACCEPTED
- assert o.kwargs["category"] == DataCategory.ATTACHMENT
- assert o.kwargs["quantity"] == 5
- # Last outcome is the event
- o = mock_track_outcome.mock_calls[2]
- assert o.kwargs["outcome"] == Outcome.ACCEPTED
- assert o.kwargs["category"] == DataCategory.ERROR
- def test_transaction_outcome_accepted(self):
- """
- Without metrics extraction, we count the number of accepted transaction
- events in the TRANSACTION data category. This maintains compatibility
- with Sentry installations that do not have a metrics pipeline.
- """
- manager = EventManager(
- make_event(
- transaction="wait",
- contexts={
- "trace": {
- "parent_span_id": "bce14471e0e9654d",
- "op": "foobar",
- "trace_id": "a0fa8803753e40fd8124b21eeb2986b5",
- "span_id": "bf5be759039ede9a",
- }
- },
- spans=[],
- timestamp=iso_format(before_now(minutes=5)),
- start_timestamp=iso_format(before_now(minutes=5)),
- type="transaction",
- platform="python",
- )
- )
- manager.normalize()
- mock_track_outcome = mock.Mock()
- with mock.patch("sentry.event_manager.track_outcome", mock_track_outcome):
- with self.feature({"organizations:transaction-metrics-extraction": False}):
- manager.save(self.project.id)
- assert_mock_called_once_with_partial(
- mock_track_outcome, outcome=Outcome.ACCEPTED, category=DataCategory.TRANSACTION
- )
- def test_transaction_indexed_outcome_accepted(self):
- """
- With metrics extraction, we count the number of accepted transaction
- events in the TRANSACTION_INDEXED data category. The TRANSACTION data
- category contains the number of metrics from
- ``billing_metrics_consumer``.
- """
- manager = EventManager(
- make_event(
- transaction="wait",
- contexts={
- "trace": {
- "parent_span_id": "bce14471e0e9654d",
- "op": "foobar",
- "trace_id": "a0fa8803753e40fd8124b21eeb2986b5",
- "span_id": "bf5be759039ede9a",
- }
- },
- spans=[],
- timestamp=iso_format(before_now(minutes=5)),
- start_timestamp=iso_format(before_now(minutes=5)),
- type="transaction",
- platform="python",
- )
- )
- manager.normalize()
- mock_track_outcome = mock.Mock()
- with mock.patch("sentry.event_manager.track_outcome", mock_track_outcome):
- with self.feature("organizations:transaction-metrics-extraction"):
- manager.save(self.project.id)
- assert_mock_called_once_with_partial(
- mock_track_outcome, outcome=Outcome.ACCEPTED, category=DataCategory.TRANSACTION_INDEXED
- )
- def test_checksum_rehashed(self):
- checksum = "invalid checksum hash"
- manager = EventManager(make_event(**{"checksum": checksum}))
- manager.normalize()
- event = manager.save(self.project.id)
- hashes = [gh.hash for gh in GroupHash.objects.filter(group=event.group)]
- assert sorted(hashes) == sorted([hash_from_values(checksum), checksum])
- def test_legacy_attributes_moved(self):
- event = make_event(
- release="my-release",
- environment="my-environment",
- site="whatever",
- server_name="foo.com",
- event_id=uuid.uuid1().hex,
- )
- manager = EventManager(event)
- event = manager.save(self.project.id)
- # release and environment stay toplevel
- assert event.data["release"] == "my-release"
- assert event.data["environment"] == "my-environment"
- # site is a legacy attribute that is just a tag
- assert event.data.get("site") is None
- tags = dict(event.tags)
- assert tags["site"] == "whatever"
- assert event.data.get("server_name") is None
- tags = dict(event.tags)
- assert tags["server_name"] == "foo.com"
- @freeze_time()
- def test_save_issueless_event(self):
- manager = EventManager(
- make_event(
- transaction="wait",
- contexts={
- "trace": {
- "parent_span_id": "bce14471e0e9654d",
- "op": "foobar",
- "trace_id": "a0fa8803753e40fd8124b21eeb2986b5",
- "span_id": "bf5be759039ede9a",
- }
- },
- spans=[],
- timestamp=iso_format(before_now(minutes=5)),
- start_timestamp=iso_format(before_now(minutes=5)),
- type="transaction",
- platform="python",
- )
- )
- event = manager.save(self.project.id)
- assert event.group is None
- assert (
- tsdb.backend.get_sums(
- TSDBModel.project,
- [self.project.id],
- event.datetime,
- event.datetime,
- tenant_ids={"organization_id": 123, "referrer": "r"},
- )[self.project.id]
- == 0
- )
- @freeze_time()
- def test_fingerprint_ignored(self):
- manager1 = EventManager(make_event(event_id="a" * 32, fingerprint="fingerprint1"))
- event1 = manager1.save(self.project.id)
- manager2 = EventManager(
- make_event(
- event_id="b" * 32,
- fingerprint="fingerprint1",
- transaction="wait",
- contexts={
- "trace": {
- "parent_span_id": "bce14471e0e9654d",
- "op": "foobar",
- "trace_id": "a0fa8803753e40fd8124b21eeb2986b5",
- "span_id": "bf5be759039ede9a",
- }
- },
- spans=[],
- timestamp=iso_format(before_now(minutes=1)),
- start_timestamp=iso_format(before_now(minutes=1)),
- type="transaction",
- platform="python",
- )
- )
- event2 = manager2.save(self.project.id)
- assert event1.group is not None
- assert event2.group is None
- assert (
- tsdb.backend.get_sums(
- TSDBModel.project,
- [self.project.id],
- event1.datetime,
- event1.datetime,
- tenant_ids={"organization_id": 123, "referrer": "r"},
- )[self.project.id]
- == 1
- )
- assert (
- tsdb.backend.get_sums(
- TSDBModel.group,
- [event1.group.id],
- event1.datetime,
- event1.datetime,
- tenant_ids={"organization_id": 123, "referrer": "r"},
- )[event1.group.id]
- == 1
- )
- def test_category_match_in_app(self):
- """
- Regression test to ensure that grouping in-app enhancements work in
- principle.
- """
- from sentry.grouping.enhancer import Enhancements
- enhancement = Enhancements.from_config_string(
- """
- function:foo category=bar
- function:foo2 category=bar
- category:bar -app
- """,
- )
- event = make_event(
- platform="native",
- exception={
- "values": [
- {
- "type": "Hello",
- "stacktrace": {
- "frames": [
- {
- "function": "foo",
- "in_app": True,
- },
- {"function": "bar"},
- ]
- },
- }
- ]
- },
- )
- manager = EventManager(event)
- manager.normalize()
- manager.get_data()["grouping_config"] = {
- "enhancements": enhancement.dumps(),
- "id": "mobile:2021-02-12",
- }
- event1 = manager.save(self.project.id)
- assert event1.data["exception"]["values"][0]["stacktrace"]["frames"][0]["in_app"] is False
- event = make_event(
- platform="native",
- exception={
- "values": [
- {
- "type": "Hello",
- "stacktrace": {
- "frames": [
- {
- "function": "foo2",
- "in_app": True,
- },
- {"function": "bar"},
- ]
- },
- }
- ]
- },
- )
- manager = EventManager(event)
- manager.normalize()
- manager.get_data()["grouping_config"] = {
- "enhancements": enhancement.dumps(),
- "id": "mobile:2021-02-12",
- }
- event2 = manager.save(self.project.id)
- assert event2.data["exception"]["values"][0]["stacktrace"]["frames"][0]["in_app"] is False
- assert event1.group_id == event2.group_id
- def test_category_match_group(self):
- """
- Regression test to ensure categories are applied consistently and don't
- produce hash mismatches.
- """
- from sentry.grouping.enhancer import Enhancements
- enhancement = Enhancements.from_config_string(
- """
- function:foo category=foo_like
- category:foo_like -group
- """,
- )
- event = make_event(
- platform="native",
- exception={
- "values": [
- {
- "type": "Hello",
- "stacktrace": {
- "frames": [
- {
- "function": "foo",
- },
- {
- "function": "bar",
- },
- ]
- },
- }
- ]
- },
- )
- manager = EventManager(event)
- manager.normalize()
- grouping_config = {
- "enhancements": enhancement.dumps(),
- "id": "mobile:2021-02-12",
- }
- manager.get_data()["grouping_config"] = grouping_config
- event1 = manager.save(self.project.id)
- event2 = Event(event1.project_id, event1.event_id, data=event1.data)
- assert event1.get_hashes().hashes == event2.get_hashes(grouping_config).hashes
- def test_write_none_tree_labels(self):
- """Write tree labels even if None"""
- event = make_event(
- platform="native",
- exception={
- "values": [
- {
- "type": "Hello",
- "stacktrace": {
- "frames": [
- {
- "function": "<redacted>",
- },
- {
- "function": "<redacted>",
- },
- ]
- },
- }
- ]
- },
- )
- manager = EventManager(event)
- manager.normalize()
- manager.get_data()["grouping_config"] = {
- "id": "mobile:2021-02-12",
- }
- event = manager.save(self.project.id)
- assert event.data["hierarchical_tree_labels"] == [None]
- def test_synthetic_exception_detection(self):
- manager = EventManager(
- make_event(
- message="foo",
- event_id="b" * 32,
- exception={
- "values": [
- {
- "type": "SIGABRT",
- "mechanism": {"handled": False},
- "stacktrace": {"frames": [{"function": "foo"}]},
- }
- ]
- },
- ),
- project=self.project,
- )
- manager.normalize()
- manager.get_data()["grouping_config"] = {
- "id": "mobile:2021-02-12",
- }
- event = manager.save(self.project.id)
- mechanism = event.interfaces["exception"].values[0].mechanism
- assert mechanism is not None
- assert mechanism.synthetic is True
- assert event.title == "foo"
- def test_auto_update_grouping(self):
- with override_settings(SENTRY_GROUPING_AUTO_UPDATE_ENABLED=False):
- # start out with legacy grouping, this should update us
- self.project.update_option("sentry:grouping_config", LEGACY_GROUPING_CONFIG)
- manager = EventManager(
- make_event(
- message="foo",
- event_id="c" * 32,
- ),
- project=self.project,
- )
- manager.normalize()
- manager.save(self.project.id)
- # No update yet
- project = Project.objects.get(id=self.project.id)
- assert project.get_option("sentry:grouping_config") == LEGACY_GROUPING_CONFIG
- with override_settings(SENTRY_GROUPING_AUTO_UPDATE_ENABLED=1.0):
- # start out with legacy grouping, this should update us
- self.project.update_option("sentry:grouping_config", LEGACY_GROUPING_CONFIG)
- manager = EventManager(
- make_event(
- message="foo",
- event_id="c" * 32,
- ),
- project=self.project,
- )
- manager.normalize()
- manager.save(self.project.id)
- # This should have moved us back to the default grouping
- project = Project.objects.get(id=self.project.id)
- assert project.get_option("sentry:grouping_config") == DEFAULT_GROUPING_CONFIG
- # and we should see an audit log record.
- record = AuditLogEntry.objects.first()
- assert record.event == audit_log.get_event_id("PROJECT_EDIT")
- assert record.data["sentry:grouping_config"] == DEFAULT_GROUPING_CONFIG
- assert record.data["slug"] == self.project.slug
- @override_options({"performance.issues.all.problem-detection": 1.0})
- @override_options({"performance.issues.n_plus_one_db.problem-creation": 1.0})
- def test_perf_issue_creation(self):
- with mock.patch("sentry_sdk.tracing.Span.containing_transaction"):
- event = self.create_performance_issue(
- event_data=make_event(**get_event("n-plus-one-in-django-index-view"))
- )
- data = event.data
- assert event.get_event_type() == "transaction"
- assert event.transaction == "/books/"
- assert data["span_grouping_config"]["id"] == "default:2022-10-27"
- span_hashes = [span["hash"] for span in data["spans"]]
- assert span_hashes == [
- "0f43fb6f6e01ca52",
- "3dc5dd68b38e1730",
- "424c6ae1641f0f0e",
- "d5da18d7274b34a1",
- "ac72fc0a4f5fe381",
- "ac1468d8e11a0553",
- "d8681423cab4275f",
- "e853d2eb7fb9ebb0",
- "6a992d5529f459a4",
- "b640a0ce465fa2a4",
- "a3605e201eaf6c45",
- "061710eb39a66089",
- "c031296784b22ea9",
- "d74ed7012596c3fb",
- "d74ed7012596c3fb",
- "d74ed7012596c3fb",
- "d74ed7012596c3fb",
- "d74ed7012596c3fb",
- "d74ed7012596c3fb",
- "d74ed7012596c3fb",
- "d74ed7012596c3fb",
- "d74ed7012596c3fb",
- "d74ed7012596c3fb",
- ]
- assert event.group
- group = event.group
- assert group is not None
- assert group.title == "N+1 Query"
- assert (
- group.message
- == "/books/ N+1 Query SELECT `books_author`.`id`, `books_author`.`name` FROM `books_author` WHERE `books_author`.`id` = %s LIMIT 21"
- )
- assert group.culprit == "/books/"
- assert group.get_event_type() == "transaction"
- description = "SELECT `books_author`.`id`, `books_author`.`name` FROM `books_author` WHERE `books_author`.`id` = %s LIMIT 21"
- assert group.get_event_metadata() == {
- "location": "/books/",
- "title": "N+1 Query",
- "value": description,
- }
- assert (
- event.search_message
- == "/books/ N+1 Query SELECT `books_author`.`id`, `books_author`.`name` FROM `books_author` WHERE `books_author`.`id` = %s LIMIT 21"
- )
- assert group.location() == "/books/"
- assert group.level == 40
- assert group.issue_category == GroupCategory.PERFORMANCE
- assert group.issue_type == PerformanceNPlusOneGroupType
- assert event.occurrence
- assert event.occurrence.evidence_display == [
- IssueEvidence(
- name="Offending Spans",
- value="db - SELECT `books_author`.`id`, `books_author`.`name` "
- "FROM `books_author` WHERE `books_author`.`id` = %s LIMIT 21",
- important=True,
- )
- ]
- assert event.occurrence.evidence_data == {
- "transaction_name": "/books/",
- "op": "db",
- "parent_span_ids": ["8dd7a5869a4f4583"],
- "parent_span": "django.view - index",
- "cause_span_ids": ["9179e43ae844b174"],
- "offender_span_ids": [
- "b8be6138369491dd",
- "b2d4826e7b618f1b",
- "b3fdeea42536dbf1",
- "b409e78a092e642f",
- "86d2ede57bbf48d4",
- "8e554c84cdc9731e",
- "94d6230f3f910e12",
- "a210b87a2191ceb6",
- "88a5ccaf25b9bd8f",
- "bb32cf50fc56b296",
- ],
- "repeating_spans": "db - SELECT `books_author`.`id`, `books_author`.`name` FROM `books_author` WHERE `books_author`.`id` = %s LIMIT 21",
- "repeating_spans_compact": "SELECT `books_author`.`id`, `books_author`.`name` FROM `books_author` WHERE `books_author`.`id` = %s LIMIT 21",
- "num_repeating_spans": "10",
- }
- @override_options({"performance.issues.all.problem-detection": 1.0})
- @override_options({"performance.issues.n_plus_one_db.problem-creation": 1.0})
- def test_perf_issue_update(self):
- with mock.patch("sentry_sdk.tracing.Span.containing_transaction"):
- event = self.create_performance_issue(
- event_data=make_event(**get_event("n-plus-one-in-django-index-view"))
- )
- group = event.group
- assert group is not None
- assert group.issue_category == GroupCategory.PERFORMANCE
- assert group.issue_type == PerformanceNPlusOneGroupType
- group.data["metadata"] = {
- "location": "hi",
- "title": "lol",
- }
- group.culprit = "wat"
- group.message = "nope"
- group.save()
- assert group.location() == "hi"
- assert group.title == "lol"
- with self.tasks():
- self.create_performance_issue(
- event_data=make_event(**get_event("n-plus-one-in-django-index-view"))
- )
- # Make sure the original group is updated via buffers
- group.refresh_from_db()
- assert group.title == "N+1 Query"
- assert group.get_event_metadata() == {
- "location": "/books/",
- "title": "N+1 Query",
- "value": "SELECT `books_author`.`id`, `books_author`.`name` FROM `books_author` WHERE `books_author`.`id` = %s LIMIT 21",
- }
- assert group.location() == "/books/"
- assert group.message == "nope"
- assert group.culprit == "/books/"
- @override_options({"performance.issues.all.problem-detection": 1.0})
- @override_options({"performance.issues.n_plus_one_db.problem-creation": 1.0})
- def test_error_issue_no_associate_perf_event(self):
- """Test that you can't associate a performance event with an error issue"""
- with mock.patch("sentry_sdk.tracing.Span.containing_transaction"):
- event = self.create_performance_issue(
- event_data=make_event(**get_event("n-plus-one-in-django-index-view"))
- )
- assert event.group is not None
- # sneakily make the group type wrong
- group = event.group
- assert group is not None
- group.type = ErrorGroupType.type_id
- group.save()
- event = self.create_performance_issue(
- event_data=make_event(**get_event("n-plus-one-in-django-index-view"))
- )
- assert event.group is None
- @override_options({"performance.issues.all.problem-detection": 1.0})
- @override_options({"performance.issues.n_plus_one_db.problem-creation": 1.0})
- def test_perf_issue_no_associate_error_event(self):
- """Test that you can't associate an error event with a performance issue"""
- with mock.patch("sentry_sdk.tracing.Span.containing_transaction"), self.feature(
- {
- "projects:performance-suspect-spans-ingestion": True,
- }
- ):
- manager = EventManager(make_event())
- manager.normalize()
- event = manager.save(self.project.id)
- assert len(event.groups) == 1
- # sneakily make the group type wrong
- group = event.group
- assert group is not None
- group.type = PerformanceNPlusOneGroupType.type_id
- group.save()
- manager = EventManager(make_event())
- manager.normalize()
- event = manager.save(self.project.id)
- assert not event.group
- @override_options({"performance.issues.all.problem-detection": 1.0})
- @override_options({"performance.issues.n_plus_one_db.problem-creation": 1.0})
- def test_perf_issue_creation_ignored(self):
- with mock.patch("sentry_sdk.tracing.Span.containing_transaction"), self.feature(
- {
- "projects:performance-suspect-spans-ingestion": True,
- }
- ):
- event = self.create_performance_issue(
- event_data=make_event(**get_event("n-plus-one-in-django-index-view")),
- noise_limit=2,
- )
- assert event.get_event_type() == "transaction"
- assert event.group is None
- @override_options({"performance.issues.all.problem-detection": 1.0})
- @override_options({"performance.issues.n_plus_one_db.problem-creation": 1.0})
- def test_perf_issue_creation_over_ignored_threshold(self):
- with mock.patch("sentry_sdk.tracing.Span.containing_transaction"), self.feature(
- {
- "projects:performance-suspect-spans-ingestion": True,
- }
- ):
- event_1 = self.create_performance_issue(
- event_data=make_event(**get_event("n-plus-one-in-django-index-view")), noise_limit=3
- )
- event_2 = self.create_performance_issue(
- event_data=make_event(**get_event("n-plus-one-in-django-index-view")), noise_limit=3
- )
- event_3 = self.create_performance_issue(
- event_data=make_event(**get_event("n-plus-one-in-django-index-view")), noise_limit=3
- )
- assert event_1.get_event_type() == "transaction"
- assert event_2.get_event_type() == "transaction"
- assert event_3.get_event_type() == "transaction"
- # only the third occurrence of the hash should create the group
- assert event_1.group is None
- assert event_2.group is None
- assert event_3.group is not None
- @override_options(
- {
- "performance.issues.slow_db_query.problem-creation": 1.0,
- "performance_issue_creation_rate": 1.0,
- "performance.issues.all.problem-detection": 1.0,
- }
- )
- def test_perf_issue_slow_db_issue_is_created(self):
- def attempt_to_generate_slow_db_issue() -> Event:
- for _ in range(100):
- event = self.create_performance_issue(
- event_data=make_event(**get_event("slow-db-spans")),
- issue_type=PerformanceSlowDBQueryGroupType,
- )
- last_event = event
- return last_event
- # Should not create the group without the feature flag
- last_event = attempt_to_generate_slow_db_issue()
- assert not last_event.group
- with self.feature({"organizations:performance-slow-db-issue": True}):
- last_event = attempt_to_generate_slow_db_issue()
- assert last_event.group
- assert last_event.group.type == PerformanceSlowDBQueryGroupType.type_id
- @patch("sentry.event_manager.metrics.incr")
- def test_new_group_metrics_logging(self, mock_metrics_incr: MagicMock) -> None:
- manager = EventManager(make_event(platform="javascript"))
- manager.normalize()
- manager.save(self.project.id)
- mock_metrics_incr.assert_any_call(
- "group.created",
- skip_internal=True,
- tags={
- "platform": "javascript",
- },
- )
- class AutoAssociateCommitTest(TestCase, EventManagerTestMixin):
- def setUp(self):
- super().setUp()
- self.repo_name = "example"
- self.project = self.create_project(name="foo")
- self.integration = Integration.objects.create(
- provider="github", name=self.repo_name, external_id="654321"
- )
- self.org_integration = self.integration.add_organization(
- self.project.organization, self.user
- )
- self.repo = self.create_repo(
- project=self.project,
- name=self.repo_name,
- provider="integrations:github",
- integration_id=self.integration.id,
- )
- self.repo.update(config={"name": self.repo_name})
- self.create_code_mapping(
- project=self.project,
- repo=self.repo,
- organization_integration=self.org_integration,
- stack_root="/stack/root",
- source_root="/source/root",
- default_branch="main",
- )
- stub_installation_token()
- responses.add(
- "GET",
- f"https://api.github.com/repos/{self.repo_name}/commits/{LATER_COMMIT_SHA}",
- json=json.loads(GET_COMMIT_EXAMPLE),
- )
- responses.add(
- "GET",
- f"https://api.github.com/repos/{self.repo_name}/commits/{EARLIER_COMMIT_SHA}",
- json=json.loads(GET_PRIOR_COMMIT_EXAMPLE),
- )
- self.dummy_commit_sha = "a" * 40
- responses.add(
- responses.GET,
- f"https://api.github.com/repos/{self.repo_name}/compare/{self.dummy_commit_sha}...{LATER_COMMIT_SHA}",
- json=json.loads(COMPARE_COMMITS_EXAMPLE_WITH_INTERMEDIATE),
- )
- responses.add(
- responses.GET,
- f"https://api.github.com/repos/{self.repo_name}/commits?sha={LATER_COMMIT_SHA}",
- json=json.loads(GET_LAST_2_COMMITS_EXAMPLE),
- )
- def _create_first_release_commit(self):
- # Create a release
- release = self.create_release(project=self.project, version="abcabcabc")
- # Create a commit
- commit = self.create_commit(
- repo=self.repo,
- key=self.dummy_commit_sha,
- )
- # Make a release head commit
- ReleaseHeadCommit.objects.create(
- organization_id=self.project.organization.id,
- repository_id=self.repo.id,
- release=release,
- commit=commit,
- )
- @mock.patch("sentry.integrations.github.client.get_jwt", return_value=b"jwt_token_1")
- @responses.activate
- def test_autoassign_commits_on_sha_release_version(self, get_jwt):
- with self.feature("projects:auto-associate-commits-to-release"):
- self._create_first_release_commit()
- # Make a new release with SHA checksum
- with self.tasks():
- _ = self.make_release_event(LATER_COMMIT_SHA, self.project.id)
- release2 = Release.objects.get(version=LATER_COMMIT_SHA)
- commit_list = list(
- Commit.objects.filter(releasecommit__release=release2).order_by(
- "releasecommit__order"
- )
- )
- assert len(commit_list) == 2
- assert commit_list[0].repository_id == self.repo.id
- assert commit_list[0].organization_id == self.project.organization.id
- assert commit_list[0].key == EARLIER_COMMIT_SHA
- assert commit_list[1].repository_id == self.repo.id
- assert commit_list[1].organization_id == self.project.organization.id
- assert commit_list[1].key == LATER_COMMIT_SHA
- @mock.patch("sentry.integrations.github.client.get_jwt", return_value=b"jwt_token_1")
- @responses.activate
- def test_autoassign_commits_first_release(self, get_jwt):
- with self.feature("projects:auto-associate-commits-to-release"):
- with self.tasks():
- _ = self.make_release_event(LATER_COMMIT_SHA, self.project.id)
- release2 = Release.objects.get(version=LATER_COMMIT_SHA)
- commit_list = list(
- Commit.objects.filter(releasecommit__release=release2).order_by(
- "releasecommit__order"
- )
- )
- assert len(commit_list) == 2
- assert commit_list[0].repository_id == self.repo.id
- assert commit_list[0].organization_id == self.project.organization.id
- assert commit_list[0].key == EARLIER_COMMIT_SHA
- assert commit_list[1].repository_id == self.repo.id
- assert commit_list[1].organization_id == self.project.organization.id
- assert commit_list[1].key == LATER_COMMIT_SHA
- @mock.patch("sentry.integrations.github.client.get_jwt", return_value=b"jwt_token_1")
- @responses.activate
- def test_autoassign_commits_not_a_sha(self, get_jwt):
- SHA = "not-a-sha"
- with self.feature("projects:auto-associate-commits-to-release"):
- with self.tasks():
- _ = self.make_release_event(SHA, self.project.id)
- release2 = Release.objects.get(version=SHA)
- commit_list = list(
- Commit.objects.filter(releasecommit__release=release2).order_by(
- "releasecommit__order"
- )
- )
- assert len(commit_list) == 0
- @mock.patch("sentry.integrations.github.client.get_jwt", return_value=b"jwt_token_1")
- @responses.activate
- def test_autoassign_commit_not_found(self, get_jwt):
- SHA = "b" * 40
- responses.add(
- "GET",
- f"https://api.github.com/repos/{self.repo_name}/commits/{SHA}",
- status=HTTP_404_NOT_FOUND,
- )
- with self.feature("projects:auto-associate-commits-to-release"):
- with self.tasks():
- _ = self.make_release_event(SHA, self.project.id)
- release2 = Release.objects.get(version=SHA)
- commit_list = list(
- Commit.objects.filter(releasecommit__release=release2).order_by(
- "releasecommit__order"
- )
- )
- assert len(commit_list) == 0
- @mock.patch("sentry.integrations.github.client.get_jwt", return_value=b"jwt_token_1")
- @responses.activate
- def test_autoassign_commits_release_conflict(self, get_jwt):
- # Release is created but none of the commits, we should still associate commits
- with self.feature("projects:auto-associate-commits-to-release"):
- preexisting_release = self.create_release(
- project=self.project, version=LATER_COMMIT_SHA
- )
- with self.tasks():
- _ = self.make_release_event(LATER_COMMIT_SHA, self.project.id)
- commit_releases = Release.objects.filter(version=LATER_COMMIT_SHA).all()
- assert len(commit_releases) == 1
- assert commit_releases[0].id == preexisting_release.id
- commit_list = list(
- Commit.objects.filter(releasecommit__release=preexisting_release).order_by(
- "releasecommit__order"
- )
- )
- assert len(commit_list) == 2
- assert commit_list[0].repository_id == self.repo.id
- assert commit_list[0].organization_id == self.project.organization.id
- assert commit_list[0].key == EARLIER_COMMIT_SHA
- assert commit_list[1].repository_id == self.repo.id
- assert commit_list[1].organization_id == self.project.organization.id
- assert commit_list[1].key == LATER_COMMIT_SHA
- @mock.patch("sentry.integrations.github.client.get_jwt", return_value=b"jwt_token_1")
- @responses.activate
- def test_autoassign_commits_commit_conflict(self, get_jwt):
- # A commit tied to the release is somehow created before the release itself is created.
- # autoassociation should tie the existing commit to the new release
- with self.feature("projects:auto-associate-commits-to-release"):
- author = CommitAuthor.objects.create(
- organization_id=self.organization.id,
- email="support@github.com",
- name="Monalisa Octocat",
- )
- # Values taken from commit generated from GH response fixtures
- preexisting_commit = self.create_commit(
- repo=self.repo,
- project=self.project,
- author=author,
- key=EARLIER_COMMIT_SHA,
- message="Fix all the bugs",
- date_added=datetime(2011, 4, 14, 16, 0, 49, tzinfo=timezone.utc),
- )
- with self.tasks():
- self.make_release_event(LATER_COMMIT_SHA, self.project.id)
- new_release = Release.objects.get(version=LATER_COMMIT_SHA)
- commit_list = list(
- Commit.objects.filter(releasecommit__release=new_release).order_by(
- "releasecommit__order"
- )
- )
- assert len(commit_list) == 2
- assert commit_list[0].id == preexisting_commit.id
- assert commit_list[0].repository_id == self.repo.id
- assert commit_list[0].organization_id == self.project.organization.id
- assert commit_list[0].key == EARLIER_COMMIT_SHA
- assert commit_list[1].repository_id == self.repo.id
- assert commit_list[1].organization_id == self.project.organization.id
- assert commit_list[1].key == LATER_COMMIT_SHA
- @mock.patch("sentry.integrations.github.client.get_jwt", return_value=b"jwt_token_1")
- @responses.activate
- def test_autoassign_commits_feature_not_enabled(self, get_jwt):
- with self.feature({"projects:auto-associate-commits-to-release": False}):
- with self.tasks():
- _ = self.make_release_event(LATER_COMMIT_SHA, self.project.id)
- release2 = Release.objects.get(version=LATER_COMMIT_SHA)
- commit_list = list(
- Commit.objects.filter(releasecommit__release=release2).order_by(
- "releasecommit__order"
- )
- )
- assert len(commit_list) == 0
- @mock.patch("sentry.integrations.github.client.get_jwt", return_value=b"jwt_token_1")
- @responses.activate
- def test_autoassign_commits_duplicate_events(self, get_jwt):
- with self.feature({"projects:auto-associate-commits-to-release": True}):
- with self.tasks():
- event1 = self.make_release_event(LATER_COMMIT_SHA, self.project.id)
- event2 = self.make_release_event(LATER_COMMIT_SHA, self.project.id)
- assert event1 != event2
- assert event1.release == event2.release
- releases = Release.objects.filter(version=LATER_COMMIT_SHA).all()
- assert len(releases) == 1
- commit_list = list(
- Commit.objects.filter(releasecommit__release=releases[0]).order_by(
- "releasecommit__order"
- )
- )
- assert len(commit_list) == 2
- assert commit_list[0].repository_id == self.repo.id
- assert commit_list[0].organization_id == self.project.organization.id
- assert commit_list[0].key == EARLIER_COMMIT_SHA
- assert commit_list[1].repository_id == self.repo.id
- assert commit_list[1].organization_id == self.project.organization.id
- assert commit_list[1].key == LATER_COMMIT_SHA
- @region_silo_test
- class ReleaseIssueTest(TestCase):
- def setUp(self):
- self.project = self.create_project()
- self.release = Release.get_or_create(self.project, "1.0")
- self.environment1 = Environment.get_or_create(self.project, "prod")
- self.environment2 = Environment.get_or_create(self.project, "staging")
- self.timestamp = float(int(time() - 300))
- def make_event(self, **kwargs):
- result = {
- "event_id": "a" * 32,
- "message": "foo",
- "timestamp": self.timestamp + 0.23,
- "level": logging.ERROR,
- "logger": "default",
- "tags": [],
- }
- result.update(kwargs)
- return result
- def make_release_event(
- self, release_version="1.0", environment_name="prod", project_id=1, **kwargs
- ):
- event = make_event(
- release=release_version, environment=environment_name, event_id=uuid.uuid1().hex
- )
- event.update(kwargs)
- manager = EventManager(event)
- with self.tasks():
- event = manager.save(project_id)
- return event
- def convert_timestamp(self, timestamp):
- date = datetime.fromtimestamp(timestamp)
- date = date.replace(tzinfo=timezone.utc)
- return date
- def assert_release_project_environment(self, event, new_issues_count, first_seen, last_seen):
- release = Release.objects.get(
- organization=event.project.organization.id, version=event.get_tag("sentry:release")
- )
- release_project_envs = ReleaseProjectEnvironment.objects.filter(
- release=release, project=event.project, environment=event.get_environment()
- )
- assert len(release_project_envs) == 1
- release_project_env = release_project_envs[0]
- assert release_project_env.new_issues_count == new_issues_count
- assert release_project_env.first_seen == self.convert_timestamp(first_seen)
- assert release_project_env.last_seen == self.convert_timestamp(last_seen)
- def test_different_groups(self):
- event1 = self.make_release_event(
- release_version=self.release.version,
- environment_name=self.environment1.name,
- project_id=self.project.id,
- checksum="a" * 32,
- timestamp=self.timestamp,
- )
- self.assert_release_project_environment(
- event=event1, new_issues_count=1, last_seen=self.timestamp, first_seen=self.timestamp
- )
- event2 = self.make_release_event(
- release_version=self.release.version,
- environment_name=self.environment1.name,
- project_id=self.project.id,
- checksum="b" * 32,
- timestamp=self.timestamp + 100,
- )
- self.assert_release_project_environment(
- event=event2,
- new_issues_count=2,
- last_seen=self.timestamp + 100,
- first_seen=self.timestamp,
- )
- def test_same_group(self):
- event1 = self.make_release_event(
- release_version=self.release.version,
- environment_name=self.environment1.name,
- project_id=self.project.id,
- checksum="a" * 32,
- timestamp=self.timestamp,
- )
- self.assert_release_project_environment(
- event=event1, new_issues_count=1, last_seen=self.timestamp, first_seen=self.timestamp
- )
- event2 = self.make_release_event(
- release_version=self.release.version,
- environment_name=self.environment1.name,
- project_id=self.project.id,
- checksum="a" * 32,
- timestamp=self.timestamp + 100,
- )
- self.assert_release_project_environment(
- event=event2,
- new_issues_count=1,
- last_seen=self.timestamp + 100,
- first_seen=self.timestamp,
- )
- def test_same_group_different_environment(self):
- event1 = self.make_release_event(
- release_version=self.release.version,
- environment_name=self.environment1.name,
- project_id=self.project.id,
- checksum="a" * 32,
- timestamp=self.timestamp,
- )
- self.assert_release_project_environment(
- event=event1, new_issues_count=1, last_seen=self.timestamp, first_seen=self.timestamp
- )
- event2 = self.make_release_event(
- release_version=self.release.version,
- environment_name=self.environment2.name,
- project_id=self.project.id,
- checksum="a" * 32,
- timestamp=self.timestamp + 100,
- )
- self.assert_release_project_environment(
- event=event1, new_issues_count=1, last_seen=self.timestamp, first_seen=self.timestamp
- )
- self.assert_release_project_environment(
- event=event2,
- new_issues_count=1,
- last_seen=self.timestamp + 100,
- first_seen=self.timestamp + 100,
- )
- @region_silo_test
- @apply_feature_flag_on_cls("organizations:dynamic-sampling")
- class DSLatestReleaseBoostTest(TestCase):
- def setUp(self):
- self.environment1 = Environment.get_or_create(self.project, "prod")
- self.environment2 = Environment.get_or_create(self.project, "staging")
- self.timestamp = float(int(time() - 300))
- self.redis_client = get_redis_client_for_ds()
- def make_transaction_event(self, **kwargs):
- result = {
- "transaction": "wait",
- "contexts": {
- "trace": {
- "parent_span_id": "bce14471e0e9654d",
- "op": "foobar",
- "trace_id": "a0fa8803753e40fd8124b21eeb2986b5",
- "span_id": "bf5be759039ede9a",
- }
- },
- "spans": [],
- "timestamp": self.timestamp + 0.23,
- "start_timestamp": "2019-06-14T14:01:40Z",
- "type": "transaction",
- }
- result.update(kwargs)
- return result
- def make_release_transaction(
- self, release_version="1.0", environment_name="prod", project_id=1, **kwargs
- ):
- transaction = (
- self.make_transaction_event(
- release=release_version, environment=environment_name, event_id=uuid.uuid1().hex
- )
- if environment_name is not None
- else self.make_transaction_event(release=release_version, event_id=uuid.uuid1().hex)
- )
- transaction.update(kwargs)
- manager = EventManager(transaction)
- with self.tasks():
- event = manager.save(project_id)
- return event
- @freeze_time("2022-11-03 10:00:00")
- def test_boost_release_with_non_observed_release(self):
- ts = time()
- project = self.create_project(platform="python")
- release_1 = Release.get_or_create(project=project, version="1.0", date_added=datetime.now())
- release_2 = Release.get_or_create(
- project=project, version="2.0", date_added=datetime.now() + timedelta(hours=1)
- )
- release_3 = Release.get_or_create(
- project=project, version="3.0", date_added=datetime.now() + timedelta(hours=2)
- )
- for release, environment in (
- (release_1, None),
- (release_2, "prod"),
- (release_3, "dev"),
- ):
- self.make_release_transaction(
- release_version=release.version,
- environment_name=environment,
- project_id=project.id,
- checksum="a" * 32,
- timestamp=self.timestamp,
- )
- env_postfix = f":e:{environment}" if environment is not None else ""
- assert self.redis_client.get(f"ds::p:{project.id}:r:{release.id}{env_postfix}") == "1"
- assert self.redis_client.hgetall(f"ds::p:{project.id}:boosted_releases") == {
- f"ds::r:{release_1.id}": str(ts),
- f"ds::r:{release_2.id}:e:prod": str(ts),
- f"ds::r:{release_3.id}:e:dev": str(ts),
- }
- assert ProjectBoostedReleases(project_id=project.id).get_extended_boosted_releases() == [
- ExtendedBoostedRelease(
- id=release_1.id,
- timestamp=ts,
- environment=None,
- cache_key=f"ds::r:{release_1.id}",
- version=release_1.version,
- platform=Platform(project.platform),
- ),
- ExtendedBoostedRelease(
- id=release_2.id,
- timestamp=ts,
- environment="prod",
- cache_key=f"ds::r:{release_2.id}:e:prod",
- version=release_2.version,
- platform=Platform(project.platform),
- ),
- ExtendedBoostedRelease(
- id=release_3.id,
- timestamp=ts,
- environment="dev",
- cache_key=f"ds::r:{release_3.id}:e:dev",
- version=release_3.version,
- platform=Platform(project.platform),
- ),
- ]
- @freeze_time("2022-11-03 10:00:00")
- def test_boost_release_boosts_only_latest_release(self):
- ts = time()
- project = self.create_project(platform="python")
- release_1 = Release.get_or_create(project=project, version="1.0", date_added=datetime.now())
- release_2 = Release.get_or_create(
- project=project,
- version="2.0",
- # We must make sure the new release_2.date_added > release_1.date_added.
- date_added=datetime.now() + timedelta(hours=1),
- )
- # We add a transaction for latest release release_2.
- self.make_release_transaction(
- release_version=release_2.version,
- environment_name=self.environment1.name,
- project_id=project.id,
- checksum="a" * 32,
- timestamp=self.timestamp,
- )
- # We add a transaction for release_1 which is not anymore the latest release, therefore we should skip this.
- self.make_release_transaction(
- release_version=release_1.version,
- environment_name=self.environment1.name,
- project_id=project.id,
- checksum="a" * 32,
- timestamp=self.timestamp,
- )
- assert (
- self.redis_client.get(f"ds::p:{project.id}:r:{release_2.id}:e:{self.environment1.name}")
- == "1"
- )
- assert self.redis_client.hgetall(f"ds::p:{project.id}:boosted_releases") == {
- f"ds::r:{release_2.id}:e:{self.environment1.name}": str(ts),
- }
- assert ProjectBoostedReleases(project_id=project.id).get_extended_boosted_releases() == [
- ExtendedBoostedRelease(
- id=release_2.id,
- timestamp=ts,
- environment=self.environment1.name,
- cache_key=f"ds::r:{release_2.id}:e:{self.environment1.name}",
- version=release_2.version,
- platform=Platform(project.platform),
- )
- ]
- @freeze_time("2022-11-03 10:00:00")
- def test_boost_release_with_observed_release_and_different_environment(self):
- project = self.create_project(platform="python")
- release = Release.get_or_create(project=project, version="1.0", date_added=datetime.now())
- self.make_release_transaction(
- release_version=release.version,
- environment_name=self.environment1.name,
- project_id=project.id,
- checksum="a" * 32,
- timestamp=self.timestamp,
- )
- ts_1 = time()
- assert (
- self.redis_client.get(f"ds::p:{project.id}:r:{release.id}:e:{self.environment1.name}")
- == "1"
- )
- assert self.redis_client.hgetall(f"ds::p:{project.id}:boosted_releases") == {
- f"ds::r:{release.id}:e:{self.environment1.name}": str(ts_1)
- }
- assert ProjectBoostedReleases(project_id=project.id).get_extended_boosted_releases() == [
- ExtendedBoostedRelease(
- id=release.id,
- timestamp=ts_1,
- environment=self.environment1.name,
- cache_key=f"ds::r:{release.id}:e:{self.environment1.name}",
- version=release.version,
- platform=Platform(project.platform),
- )
- ]
- # We simulate that a new transaction with same release but with a different environment value comes after
- # 30 minutes to show that we expect the entry for that release-env to be added to the boosted releases.
- with freeze_time("2022-11-03 10:30:00"):
- self.make_release_transaction(
- release_version=release.version,
- environment_name=self.environment2.name,
- project_id=project.id,
- checksum="b" * 32,
- timestamp=self.timestamp,
- )
- ts_2 = time()
- assert (
- self.redis_client.get(
- f"ds::p:{project.id}:r:{release.id}:e:{self.environment2.name}"
- )
- == "1"
- )
- assert self.redis_client.hgetall(f"ds::p:{project.id}:boosted_releases") == {
- f"ds::r:{release.id}:e:{self.environment1.name}": str(ts_1),
- f"ds::r:{release.id}:e:{self.environment2.name}": str(ts_2),
- }
- assert ProjectBoostedReleases(
- project_id=project.id
- ).get_extended_boosted_releases() == [
- ExtendedBoostedRelease(
- id=release.id,
- timestamp=ts_1,
- environment=self.environment1.name,
- cache_key=f"ds::r:{release.id}:e:{self.environment1.name}",
- version=release.version,
- platform=Platform(project.platform),
- ),
- ExtendedBoostedRelease(
- id=release.id,
- timestamp=ts_2,
- environment=self.environment2.name,
- cache_key=f"ds::r:{release.id}:e:{self.environment2.name}",
- version=release.version,
- platform=Platform(project.platform),
- ),
- ]
- # We also test the case in which no environment is set, which can be the case as per
- # https://docs.sentry.io/platforms/javascript/configuration/options/#environment.
- with freeze_time("2022-11-03 11:00:00"):
- self.make_release_transaction(
- release_version=release.version,
- environment_name=None,
- project_id=project.id,
- checksum="b" * 32,
- timestamp=self.timestamp,
- )
- ts_3 = time()
- assert self.redis_client.get(f"ds::p:{project.id}:r:{release.id}") == "1"
- assert self.redis_client.hgetall(f"ds::p:{project.id}:boosted_releases") == {
- f"ds::r:{release.id}:e:{self.environment1.name}": str(ts_1),
- f"ds::r:{release.id}:e:{self.environment2.name}": str(ts_2),
- f"ds::r:{release.id}": str(ts_3),
- }
- assert ProjectBoostedReleases(
- project_id=project.id
- ).get_extended_boosted_releases() == [
- ExtendedBoostedRelease(
- id=release.id,
- timestamp=ts_1,
- environment=self.environment1.name,
- cache_key=f"ds::r:{release.id}:e:{self.environment1.name}",
- version=release.version,
- platform=Platform(project.platform),
- ),
- ExtendedBoostedRelease(
- id=release.id,
- timestamp=ts_2,
- environment=self.environment2.name,
- cache_key=f"ds::r:{release.id}:e:{self.environment2.name}",
- version=release.version,
- platform=Platform(project.platform),
- ),
- ExtendedBoostedRelease(
- id=release.id,
- timestamp=ts_3,
- environment=None,
- cache_key=f"ds::r:{release.id}",
- version=release.version,
- platform=Platform(project.platform),
- ),
- ]
- @freeze_time("2022-11-03 10:00:00")
- def test_release_not_boosted_with_observed_release_and_same_environment(self):
- project = self.create_project(platform="python")
- release = Release.get_or_create(project=project, version="1.0", date_added=datetime.now())
- for environment in (self.environment1.name, self.environment2.name):
- self.redis_client.set(
- f"ds::p:{project.id}:r:{release.id}:e:{environment}", 1, 60 * 60 * 24
- )
- self.make_release_transaction(
- release_version=release.version,
- environment_name=environment,
- project_id=project.id,
- checksum="b" * 32,
- timestamp=self.timestamp,
- )
- assert self.redis_client.hgetall(f"ds::p:{project.id}:boosted_releases") == {}
- assert ProjectBoostedReleases(project_id=project.id).get_extended_boosted_releases() == []
- @freeze_time("2022-11-03 10:00:00")
- def test_release_not_boosted_with_deleted_release_after_event_received(self):
- ts = time()
- project = self.create_project(platform="python")
- release_1 = Release.get_or_create(project=project, version="1.0", date_added=datetime.now())
- release_2 = Release.get_or_create(
- project=project, version="2.0", date_added=datetime.now() + timedelta(hours=1)
- )
- self.make_release_transaction(
- release_version=release_1.version,
- environment_name=None,
- project_id=project.id,
- checksum="a" * 32,
- timestamp=self.timestamp,
- )
- assert self.redis_client.get(f"ds::p:{project.id}:r:{release_1.id}") == "1"
- self.make_release_transaction(
- release_version=release_2.version,
- environment_name=None,
- project_id=project.id,
- checksum="a" * 32,
- timestamp=self.timestamp,
- )
- assert self.redis_client.get(f"ds::p:{project.id}:r:{release_2.id}") == "1"
- # We simulate that the release_2 is deleted after the boost has been inserted.
- release_2_id = release_2.id
- release_2.delete()
- # We expect the boosted release to be kept in Redis, if not queried by the ProjectBoostedReleases.
- assert self.redis_client.hgetall(f"ds::p:{project.id}:boosted_releases") == {
- f"ds::r:{release_1.id}": str(ts),
- f"ds::r:{release_2_id}": str(ts),
- }
- # We expect to not see the release 2 because it will not be in the database anymore, thus we mark it as
- # expired.
- assert ProjectBoostedReleases(project_id=project.id).get_extended_boosted_releases() == [
- ExtendedBoostedRelease(
- id=release_1.id,
- timestamp=ts,
- environment=None,
- cache_key=f"ds::r:{release_1.id}",
- version=release_1.version,
- platform=Platform(project.platform),
- ),
- ]
- @freeze_time("2022-11-03 10:00:00")
- def test_get_boosted_releases_with_old_and_new_cache_keys(self):
- ts = time()
- project = self.create_project(platform="python")
- # Old cache key
- release_1 = Release.get_or_create(project=project, version="1.0", date_added=datetime.now())
- self.redis_client.hset(
- f"ds::p:{project.id}:boosted_releases",
- f"{release_1.id}",
- ts,
- )
- # New cache key
- release_2 = Release.get_or_create(
- project=project, version="2.0", date_added=datetime.now() + timedelta(hours=1)
- )
- self.redis_client.hset(
- f"ds::p:{project.id}:boosted_releases",
- f"ds::r:{release_2.id}",
- ts,
- )
- self.redis_client.hset(
- f"ds::p:{project.id}:boosted_releases",
- f"ds::r:{release_2.id}:e:{self.environment1.name}",
- ts,
- )
- self.redis_client.hset(
- f"ds::p:{project.id}:boosted_releases",
- f"ds::r:{release_2.id}:e:{self.environment2.name}",
- ts,
- )
- assert ProjectBoostedReleases(project_id=project.id).get_extended_boosted_releases() == [
- ExtendedBoostedRelease(
- id=release_1.id,
- timestamp=ts,
- environment=None,
- # This item has the old cache key.
- cache_key=f"{release_1.id}",
- version=release_1.version,
- platform=Platform(project.platform),
- ),
- ExtendedBoostedRelease(
- id=release_2.id,
- timestamp=ts,
- environment=None,
- cache_key=f"ds::r:{release_2.id}",
- version=release_2.version,
- platform=Platform(project.platform),
- ),
- ExtendedBoostedRelease(
- id=release_2.id,
- timestamp=ts,
- environment=self.environment1.name,
- cache_key=f"ds::r:{release_2.id}:e:{self.environment1.name}",
- version=release_2.version,
- platform=Platform(project.platform),
- ),
- ExtendedBoostedRelease(
- id=release_2.id,
- timestamp=ts,
- environment=self.environment2.name,
- cache_key=f"ds::r:{release_2.id}:e:{self.environment2.name}",
- version=release_2.version,
- platform=Platform(project.platform),
- ),
- ]
- @freeze_time("2022-11-03 10:00:00")
- def test_expired_boosted_releases_are_removed(self):
- ts = time()
- # We want to test with multiple platforms.
- for platform in ("python", "java", None):
- project = self.create_project(platform=platform)
- for index, (release_version, environment) in enumerate(
- (
- (f"1.0-{platform}", self.environment1.name),
- (f"2.0-{platform}", self.environment2.name),
- )
- ):
- release = Release.get_or_create(
- project=project,
- version=release_version,
- date_added=datetime.now() + timedelta(hours=index),
- )
- self.redis_client.set(
- f"ds::p:{project.id}:r:{release.id}:e:{environment}", 1, 60 * 60 * 24
- )
- self.redis_client.hset(
- f"ds::p:{project.id}:boosted_releases",
- f"ds::r:{release.id}:e:{environment}",
- # We set the creation time in order to expire it by 1 second.
- ts - Platform(platform).time_to_adoption - 1,
- )
- # We add a new boosted release that is not expired.
- release_3 = Release.get_or_create(
- project=project,
- version=f"3.0-{platform}",
- date_added=datetime.now() + timedelta(hours=2),
- )
- self.make_release_transaction(
- release_version=release_3.version,
- environment_name=self.environment1.name,
- project_id=project.id,
- checksum="b" * 32,
- timestamp=self.timestamp,
- )
- assert (
- self.redis_client.get(
- f"ds::p:{project.id}:r:{release_3.id}:e:{self.environment1.name}"
- )
- == "1"
- )
- assert self.redis_client.hgetall(f"ds::p:{project.id}:boosted_releases") == {
- f"ds::r:{release_3.id}:e:{self.environment1.name}": str(ts)
- }
- assert ProjectBoostedReleases(
- project_id=project.id
- ).get_extended_boosted_releases() == [
- ExtendedBoostedRelease(
- id=release_3.id,
- timestamp=ts,
- environment=self.environment1.name,
- cache_key=f"ds::r:{release_3.id}:e:{self.environment1.name}",
- version=release_3.version,
- platform=Platform(project.platform),
- )
- ]
- @mock.patch("sentry.event_manager.schedule_invalidate_project_config")
- def test_project_config_invalidation_is_triggered_when_new_release_is_observed(
- self, mocked_invalidate
- ):
- self.make_release_transaction(
- release_version=self.release.version,
- environment_name=self.environment1.name,
- project_id=self.project.id,
- checksum="a" * 32,
- timestamp=self.timestamp,
- )
- assert any(
- o.kwargs["trigger"] == "dynamic_sampling:boost_release"
- for o in mocked_invalidate.mock_calls
- )
- @freeze_time()
- @mock.patch("sentry.dynamic_sampling.rules.helpers.latest_releases.BOOSTED_RELEASES_LIMIT", 2)
- def test_least_recently_boosted_release_is_removed_if_limit_is_exceeded(self):
- ts = time()
- project = self.create_project(platform="python")
- release_1 = Release.get_or_create(
- project=project,
- version="1.0",
- date_added=datetime.now(),
- )
- release_2 = Release.get_or_create(
- project=project,
- version="2.0",
- date_added=datetime.now() + timedelta(hours=1),
- )
- # We boost with increasing timestamps, so that we know that the smallest will be evicted.
- for release, boost_time in ((release_1, ts - 2), (release_2, ts - 1)):
- self.redis_client.set(
- f"ds::p:{project.id}:r:{release.id}",
- 1,
- 60 * 60 * 24,
- )
- self.redis_client.hset(
- f"ds::p:{project.id}:boosted_releases",
- f"ds::r:{release.id}",
- boost_time,
- )
- release_3 = Release.get_or_create(
- project=project,
- version="3.0",
- date_added=datetime.now() + timedelta(hours=2),
- )
- self.make_release_transaction(
- release_version=release_3.version,
- environment_name=self.environment1.name,
- project_id=project.id,
- checksum="b" * 32,
- timestamp=self.timestamp,
- )
- assert (
- self.redis_client.get(f"ds::p:{project.id}:r:{release_3.id}:e:{self.environment1.name}")
- == "1"
- )
- assert self.redis_client.hgetall(f"ds::p:{project.id}:boosted_releases") == {
- f"ds::r:{release_2.id}": str(ts - 1),
- f"ds::r:{release_3.id}:e:{self.environment1.name}": str(ts),
- }
- assert ProjectBoostedReleases(project_id=project.id).get_extended_boosted_releases() == [
- ExtendedBoostedRelease(
- id=release_2.id,
- timestamp=ts - 1,
- environment=None,
- cache_key=f"ds::r:{release_2.id}",
- version=release_2.version,
- platform=Platform(project.platform),
- ),
- ExtendedBoostedRelease(
- id=release_3.id,
- timestamp=ts,
- environment=self.environment1.name,
- cache_key=f"ds::r:{release_3.id}:e:{self.environment1.name}",
- version=release_3.version,
- platform=Platform(project.platform),
- ),
- ]
- @freeze_time()
- @mock.patch("sentry.dynamic_sampling.rules.helpers.latest_releases.BOOSTED_RELEASES_LIMIT", 2)
- def test_removed_boost_not_added_again_if_limit_is_exceeded(self):
- ts = time()
- project = self.create_project(platform="python")
- release_1 = Release.get_or_create(project=project, version="1.0", date_added=datetime.now())
- # We want to test that if we have the same release, but we send different environments that go over the
- # limit, and we evict an environment, but then we send a transaction with the evicted environment.
- #
- # As an example suppose the following history of transactions received in the form (release, env):
- # (1, production) -> (1, staging) -> (1, None) -> (1, production)
- #
- # Once we receive the first two, we have reached maximum capacity. Then we receive (1, None) and evict boost
- # for (1, production) which results in the following boosts (1, staging), (1, None). After that we receive
- # (1, production) again but in this case we don't want to remove (1, staging) because we will end up in an
- # infinite loop. Instead, we expect to mark (1, production) as observed and only un-observe it if it does
- # not receive transactions within the next 24 hours.
- environments_sequence = [
- self.environment1.name,
- self.environment2.name,
- None,
- self.environment1.name,
- ]
- for environment in environments_sequence:
- self.make_release_transaction(
- release_version=release_1.version,
- environment_name=environment,
- project_id=project.id,
- checksum="b" * 32,
- timestamp=self.timestamp,
- )
- # We assert that all environments have been observed.
- assert (
- self.redis_client.get(f"ds::p:{project.id}:r:{release_1.id}:e:{self.environment1.name}")
- == "1"
- )
- assert (
- self.redis_client.get(f"ds::p:{project.id}:r:{release_1.id}:e:{self.environment2.name}")
- == "1"
- )
- assert self.redis_client.get(f"ds::p:{project.id}:r:{release_1.id}") == "1"
- # We assert that only the last 2 unseen (release, env) pairs are boosted.
- assert self.redis_client.hgetall(f"ds::p:{project.id}:boosted_releases") == {
- f"ds::r:{release_1.id}:e:{self.environment2.name}": str(ts),
- f"ds::r:{release_1.id}": str(ts),
- }
- assert ProjectBoostedReleases(project_id=project.id).get_extended_boosted_releases() == [
- ExtendedBoostedRelease(
- id=release_1.id,
- timestamp=ts,
- environment=self.environment2.name,
- cache_key=f"ds::r:{release_1.id}:e:{self.environment2.name}",
- version=release_1.version,
- platform=Platform(project.platform),
- ),
- ExtendedBoostedRelease(
- id=release_1.id,
- timestamp=ts,
- environment=None,
- cache_key=f"ds::r:{release_1.id}",
- version=release_1.version,
- platform=Platform(project.platform),
- ),
- ]
- class TestSaveGroupHashAndGroup(TransactionTestCase):
- def test(self):
- perf_data = load_data("transaction-n-plus-one", timestamp=before_now(minutes=10))
- event = _get_event_instance(perf_data, project_id=self.project.id)
- group_hash = "some_group"
- group, created = _save_grouphash_and_group(self.project, event, group_hash)
- assert created
- group_2, created = _save_grouphash_and_group(self.project, event, group_hash)
- assert group.id == group_2.id
- assert not created
- assert Group.objects.filter(grouphash__hash=group_hash).count() == 1
- group_3, created = _save_grouphash_and_group(self.project, event, "new_hash")
- assert created
- assert group_2.id != group_3.id
- assert Group.objects.filter(grouphash__hash=group_hash).count() == 1
|