test_event_manager.py 134 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852285328542855285628572858285928602861286228632864286528662867286828692870287128722873287428752876287728782879288028812882288328842885288628872888288928902891289228932894289528962897289828992900290129022903290429052906290729082909291029112912291329142915291629172918291929202921292229232924292529262927292829292930293129322933293429352936293729382939294029412942294329442945294629472948294929502951295229532954295529562957295829592960296129622963296429652966296729682969297029712972297329742975297629772978297929802981298229832984298529862987298829892990299129922993299429952996299729982999300030013002300330043005300630073008300930103011301230133014301530163017301830193020302130223023302430253026302730283029303030313032303330343035303630373038303930403041304230433044304530463047304830493050305130523053305430553056305730583059306030613062306330643065306630673068306930703071307230733074307530763077307830793080308130823083308430853086308730883089309030913092309330943095309630973098309931003101310231033104310531063107310831093110311131123113311431153116311731183119312031213122312331243125312631273128312931303131313231333134313531363137313831393140314131423143314431453146314731483149315031513152315331543155315631573158315931603161316231633164316531663167316831693170317131723173317431753176317731783179318031813182318331843185318631873188318931903191319231933194319531963197319831993200320132023203320432053206320732083209321032113212321332143215321632173218321932203221322232233224322532263227322832293230323132323233323432353236323732383239324032413242324332443245324632473248324932503251325232533254325532563257325832593260326132623263326432653266326732683269327032713272327332743275327632773278327932803281328232833284328532863287328832893290329132923293329432953296329732983299330033013302330333043305330633073308330933103311331233133314331533163317331833193320332133223323332433253326332733283329333033313332333333343335333633373338333933403341334233433344334533463347334833493350335133523353335433553356335733583359336033613362336333643365336633673368336933703371337233733374337533763377337833793380338133823383338433853386338733883389339033913392339333943395339633973398339934003401340234033404340534063407340834093410341134123413341434153416341734183419342034213422342334243425342634273428342934303431343234333434343534363437343834393440344134423443344434453446344734483449345034513452345334543455345634573458345934603461346234633464346534663467346834693470347134723473347434753476347734783479348034813482348334843485348634873488348934903491349234933494349534963497349834993500350135023503350435053506350735083509
  1. from __future__ import annotations
  2. import logging
  3. import uuid
  4. from collections.abc import Mapping
  5. from datetime import UTC, datetime, timedelta
  6. from time import time
  7. from typing import Any
  8. from unittest import mock
  9. from unittest.mock import MagicMock, patch
  10. import pytest
  11. import responses
  12. from arroyo.backends.kafka.consumer import KafkaPayload
  13. from arroyo.backends.local.backend import LocalBroker
  14. from arroyo.backends.local.storages.memory import MemoryMessageStorage
  15. from arroyo.types import Partition, Topic
  16. from django.conf import settings
  17. from django.core.cache import cache
  18. from django.utils import timezone
  19. from fixtures.github import (
  20. COMPARE_COMMITS_EXAMPLE_WITH_INTERMEDIATE,
  21. EARLIER_COMMIT_SHA,
  22. GET_COMMIT_EXAMPLE,
  23. GET_LAST_2_COMMITS_EXAMPLE,
  24. GET_PRIOR_COMMIT_EXAMPLE,
  25. LATER_COMMIT_SHA,
  26. )
  27. from sentry import eventstore, nodestore, tsdb
  28. from sentry.attachments import CachedAttachment, attachment_cache
  29. from sentry.constants import MAX_VERSION_LENGTH, DataCategory
  30. from sentry.dynamic_sampling import (
  31. ExtendedBoostedRelease,
  32. Platform,
  33. ProjectBoostedReleases,
  34. get_redis_client_for_ds,
  35. )
  36. from sentry.event_manager import (
  37. EventManager,
  38. _get_event_instance,
  39. get_event_type,
  40. has_pending_commit_resolution,
  41. materialize_metadata,
  42. save_grouphash_and_group,
  43. )
  44. from sentry.eventstore.models import Event
  45. from sentry.exceptions import HashDiscarded
  46. from sentry.grouping.api import GroupingConfig, load_grouping_config
  47. from sentry.grouping.utils import hash_from_values
  48. from sentry.ingest.inbound_filters import FilterStatKeys
  49. from sentry.issues.grouptype import (
  50. ErrorGroupType,
  51. GroupCategory,
  52. PerformanceNPlusOneGroupType,
  53. PerformanceSlowDBQueryGroupType,
  54. )
  55. from sentry.issues.issue_occurrence import IssueEvidence
  56. from sentry.models.activity import Activity
  57. from sentry.models.commit import Commit
  58. from sentry.models.environment import Environment
  59. from sentry.models.group import Group, GroupStatus
  60. from sentry.models.groupenvironment import GroupEnvironment
  61. from sentry.models.grouphash import GroupHash
  62. from sentry.models.grouplink import GroupLink
  63. from sentry.models.grouprelease import GroupRelease
  64. from sentry.models.groupresolution import GroupResolution
  65. from sentry.models.grouptombstone import GroupTombstone
  66. from sentry.models.integrations import Integration
  67. from sentry.models.integrations.external_issue import ExternalIssue
  68. from sentry.models.pullrequest import PullRequest, PullRequestCommit
  69. from sentry.models.release import Release
  70. from sentry.models.releasecommit import ReleaseCommit
  71. from sentry.models.releaseheadcommit import ReleaseHeadCommit
  72. from sentry.models.releaseprojectenvironment import ReleaseProjectEnvironment
  73. from sentry.options import set
  74. from sentry.spans.grouping.utils import hash_values
  75. from sentry.testutils.asserts import assert_mock_called_once_with_partial
  76. from sentry.testutils.cases import (
  77. PerformanceIssueTestCase,
  78. SnubaTestCase,
  79. TestCase,
  80. TransactionTestCase,
  81. )
  82. from sentry.testutils.helpers import apply_feature_flag_on_cls, override_options
  83. from sentry.testutils.helpers.datetime import before_now, freeze_time, iso_format
  84. from sentry.testutils.performance_issues.event_generators import get_event
  85. from sentry.testutils.pytest.fixtures import django_db_all
  86. from sentry.testutils.silo import assume_test_silo_mode_of
  87. from sentry.testutils.skips import requires_snuba
  88. from sentry.tsdb.base import TSDBModel
  89. from sentry.types.activity import ActivityType
  90. from sentry.types.group import PriorityLevel
  91. from sentry.usage_accountant import accountant
  92. from sentry.utils import json
  93. from sentry.utils.cache import cache_key_for_event
  94. from sentry.utils.eventuser import EventUser
  95. from sentry.utils.outcomes import Outcome
  96. from sentry.utils.samples import load_data
  97. pytestmark = [requires_snuba]
  98. def make_event(**kwargs: Any) -> dict[str, Any]:
  99. result = {
  100. "event_id": uuid.uuid1().hex,
  101. "level": logging.ERROR,
  102. "logger": "default",
  103. "tags": [],
  104. }
  105. result.update(kwargs)
  106. return result
  107. class EventManagerTestMixin:
  108. def make_release_event(self, release_name: str, project_id: int) -> Event:
  109. manager = EventManager(make_event(release=release_name))
  110. manager.normalize()
  111. event = manager.save(project_id)
  112. return event
  113. class EventManagerTest(TestCase, SnubaTestCase, EventManagerTestMixin, PerformanceIssueTestCase):
  114. def test_ephemeral_interfaces_removed_on_save(self) -> None:
  115. manager = EventManager(make_event(platform="python"))
  116. manager.normalize()
  117. event = manager.save(self.project.id)
  118. group = event.group
  119. assert group is not None
  120. assert group.platform == "python"
  121. assert event.platform == "python"
  122. @mock.patch("sentry.event_manager.eventstream.backend.insert")
  123. def test_dupe_message_id(self, eventstream_insert: mock.MagicMock) -> None:
  124. # Saves the latest event to nodestore and eventstream
  125. project_id = self.project.id
  126. event_id = "a" * 32
  127. node_id = Event.generate_node_id(project_id, event_id)
  128. manager = EventManager(make_event(event_id=event_id, message="first"))
  129. manager.normalize()
  130. manager.save(project_id)
  131. assert nodestore.backend.get(node_id)["logentry"]["formatted"] == "first"
  132. manager = EventManager(make_event(event_id=event_id, message="second"))
  133. manager.normalize()
  134. manager.save(project_id)
  135. assert nodestore.backend.get(node_id)["logentry"]["formatted"] == "second"
  136. assert eventstream_insert.call_count == 2
  137. def test_materialze_metadata_simple(self) -> None:
  138. manager = EventManager(make_event(transaction="/dogs/are/great/"))
  139. event = manager.save(self.project.id)
  140. event_type = get_event_type(event.data)
  141. event_metadata = event_type.get_metadata(event.data)
  142. assert materialize_metadata(event.data, event_type, event_metadata) == {
  143. "type": "default",
  144. "culprit": "/dogs/are/great/",
  145. "metadata": {"title": "<unlabeled event>"},
  146. "title": "<unlabeled event>",
  147. "location": None,
  148. }
  149. def test_materialze_metadata_preserves_existing_metadata(self) -> None:
  150. manager = EventManager(make_event())
  151. event = manager.save(self.project.id)
  152. event.data.setdefault("metadata", {})
  153. event.data["metadata"]["dogs"] = "are great" # should not get clobbered
  154. event_type = get_event_type(event.data)
  155. event_metadata_from_type = event_type.get_metadata(event.data)
  156. materialized = materialize_metadata(event.data, event_type, event_metadata_from_type)
  157. assert materialized["metadata"] == {"title": "<unlabeled event>", "dogs": "are great"}
  158. def test_react_error_picks_cause_error_title_subtitle(self) -> None:
  159. cause_error_value = "Load failed"
  160. # React 19 hydration error include the hydration error and a cause
  161. # If we derive the title from the cause error the developer will more easily distinguish them
  162. manager = EventManager(
  163. make_event(
  164. exception={
  165. "values": [
  166. {
  167. "type": "TypeError",
  168. "value": cause_error_value,
  169. "mechanism": {
  170. "type": "onerror",
  171. "handled": False,
  172. "source": "cause",
  173. "exception_id": 1,
  174. "parent_id": 0,
  175. },
  176. },
  177. {
  178. "type": "Error",
  179. "value": "There was an error during concurrent rendering but React was able to recover by instead synchronously rendering the entire root.",
  180. "mechanism": {
  181. "type": "generic",
  182. "handled": True,
  183. "exception_id": 0,
  184. },
  185. },
  186. ]
  187. },
  188. )
  189. )
  190. event = manager.save(self.project.id)
  191. assert event.data["metadata"]["value"] == cause_error_value
  192. assert event.data["metadata"]["type"] == "TypeError"
  193. assert event.group is not None
  194. assert event.group.title == f"TypeError: {cause_error_value}"
  195. def test_react_hydration_error_picks_cause_error_title_subtitle(self) -> None:
  196. cause_error_value = "Cannot read properties of undefined (reading 'nodeName')"
  197. # React 19 hydration error include the hydration error and a cause
  198. # If we derive the title from the cause error the developer will more easily distinguish them
  199. manager = EventManager(
  200. make_event(
  201. exception={
  202. "values": [
  203. {
  204. "type": "TypeError",
  205. "value": cause_error_value,
  206. "mechanism": {
  207. "type": "chained",
  208. "source": "cause",
  209. "exception_id": 1,
  210. "parent_id": 0,
  211. },
  212. },
  213. {
  214. "type": "Error",
  215. "value": "There was an error while hydrating but React was able to recover by instead client rendering from the nearest Suspense boundary.",
  216. "mechanism": {
  217. "type": "generic",
  218. "exception_id": 0,
  219. },
  220. },
  221. ]
  222. },
  223. )
  224. )
  225. event = manager.save(self.project.id)
  226. assert event.data["metadata"]["value"] == cause_error_value
  227. assert event.data["metadata"]["type"] == "TypeError"
  228. assert event.group is not None
  229. assert event.group.title == f"TypeError: {cause_error_value}"
  230. @mock.patch("sentry.signals.issue_unresolved.send_robust")
  231. def test_unresolves_group(self, send_robust: mock.MagicMock) -> None:
  232. ts = time() - 300
  233. # N.B. EventManager won't unresolve the group unless the event2 has a
  234. # later timestamp than event1.
  235. manager = EventManager(make_event(event_id="a" * 32, checksum="a" * 32, timestamp=ts))
  236. with self.tasks():
  237. event = manager.save(self.project.id)
  238. group = Group.objects.get(id=event.group_id)
  239. group.status = GroupStatus.RESOLVED
  240. group.substatus = None
  241. group.save()
  242. assert group.is_resolved()
  243. manager = EventManager(make_event(event_id="b" * 32, checksum="a" * 32, timestamp=ts + 50))
  244. event2 = manager.save(self.project.id)
  245. assert event.group_id == event2.group_id
  246. group = Group.objects.get(id=group.id)
  247. assert not group.is_resolved()
  248. assert send_robust.called
  249. @mock.patch("sentry.event_manager.plugin_is_regression")
  250. def test_does_not_unresolve_group(self, plugin_is_regression: mock.MagicMock) -> None:
  251. # N.B. EventManager won't unresolve the group unless the event2 has a
  252. # later timestamp than event1.
  253. plugin_is_regression.return_value = False
  254. manager = EventManager(
  255. make_event(event_id="a" * 32, checksum="a" * 32, timestamp=1403007314)
  256. )
  257. with self.tasks():
  258. manager.normalize()
  259. event = manager.save(self.project.id)
  260. group = Group.objects.get(id=event.group_id)
  261. group.status = GroupStatus.RESOLVED
  262. group.substatus = None
  263. group.save()
  264. assert group.is_resolved()
  265. manager = EventManager(
  266. make_event(event_id="b" * 32, checksum="a" * 32, timestamp=1403007315)
  267. )
  268. manager.normalize()
  269. event2 = manager.save(self.project.id)
  270. assert event.group_id == event2.group_id
  271. group = Group.objects.get(id=group.id)
  272. assert group.is_resolved()
  273. @mock.patch("sentry.tasks.activity.send_activity_notifications.delay")
  274. @mock.patch("sentry.event_manager.plugin_is_regression")
  275. def test_marks_as_unresolved_with_new_release(
  276. self,
  277. plugin_is_regression: mock.MagicMock,
  278. mock_send_activity_notifications_delay: mock.MagicMock,
  279. ) -> None:
  280. plugin_is_regression.return_value = True
  281. old_release = Release.objects.create(
  282. version="a",
  283. organization_id=self.project.organization_id,
  284. date_added=timezone.now() - timedelta(minutes=30),
  285. )
  286. old_release.add_project(self.project)
  287. manager = EventManager(
  288. make_event(
  289. event_id="a" * 32,
  290. checksum="a" * 32,
  291. timestamp=time() - 50000, # need to work around active_at
  292. release=old_release.version,
  293. )
  294. )
  295. event = manager.save(self.project.id)
  296. assert event.group is not None
  297. group = event.group
  298. group.update(status=GroupStatus.RESOLVED, substatus=None)
  299. resolution = GroupResolution.objects.create(release=old_release, group=group)
  300. activity = Activity.objects.create(
  301. group=group,
  302. project=group.project,
  303. type=ActivityType.SET_RESOLVED_IN_RELEASE.value,
  304. ident=resolution.id,
  305. data={"version": ""},
  306. )
  307. manager = EventManager(
  308. make_event(
  309. event_id="b" * 32, checksum="a" * 32, timestamp=time(), release=old_release.version
  310. )
  311. )
  312. event = manager.save(self.project.id)
  313. assert event.group_id == group.id
  314. group = Group.objects.get(id=group.id)
  315. assert group.status == GroupStatus.RESOLVED
  316. activity = Activity.objects.get(id=activity.id)
  317. assert activity.data is not None
  318. assert activity.data["version"] == ""
  319. assert GroupResolution.objects.filter(group=group).exists()
  320. manager = EventManager(
  321. make_event(event_id="c" * 32, checksum="a" * 32, timestamp=time(), release="b")
  322. )
  323. event = manager.save(self.project.id)
  324. assert event.group_id == group.id
  325. group = Group.objects.get(id=group.id)
  326. assert group.status == GroupStatus.UNRESOLVED
  327. activity = Activity.objects.get(id=activity.id)
  328. assert activity.data is not None
  329. assert activity.data["version"] == "b"
  330. assert not GroupResolution.objects.filter(group=group).exists()
  331. activity = Activity.objects.get(group=group, type=ActivityType.SET_REGRESSION.value)
  332. mock_send_activity_notifications_delay.assert_called_once_with(activity.id)
  333. @mock.patch("sentry.tasks.activity.send_activity_notifications.delay")
  334. @mock.patch("sentry.event_manager.plugin_is_regression")
  335. def test_that_release_in_latest_activity_prior_to_regression_is_not_overridden(
  336. self,
  337. plugin_is_regression: mock.MagicMock,
  338. mock_send_activity_notifications_delay: mock.MagicMock,
  339. ) -> None:
  340. """
  341. Test that ensures in the case where a regression occurs, the release prior to the latest
  342. activity to that regression is not overridden.
  343. It should only be overridden if the activity was awaiting the upcoming release
  344. """
  345. plugin_is_regression.return_value = True
  346. # Create a release and a group associated with it
  347. old_release = self.create_release(
  348. version="foobar", date_added=timezone.now() - timedelta(minutes=30)
  349. )
  350. manager = EventManager(
  351. make_event(
  352. event_id="a" * 32,
  353. checksum="a" * 32,
  354. timestamp=time() - 50000, # need to work around active_at
  355. release=old_release.version,
  356. )
  357. )
  358. event = manager.save(self.project.id)
  359. assert event.group is not None
  360. group = event.group
  361. group.update(status=GroupStatus.RESOLVED, substatus=None)
  362. # Resolve the group in old_release
  363. resolution = GroupResolution.objects.create(release=old_release, group=group)
  364. activity = Activity.objects.create(
  365. group=group,
  366. project=group.project,
  367. type=ActivityType.SET_RESOLVED_IN_RELEASE.value,
  368. ident=resolution.id,
  369. data={"version": "foobar"},
  370. )
  371. # Create a regression
  372. manager = EventManager(
  373. make_event(event_id="c" * 32, checksum="a" * 32, timestamp=time(), release="b")
  374. )
  375. event = manager.save(self.project.id)
  376. assert event.group_id == group.id
  377. group = Group.objects.get(id=group.id)
  378. assert group.status == GroupStatus.UNRESOLVED
  379. activity = Activity.objects.get(id=activity.id)
  380. assert activity.data is not None
  381. assert activity.data["version"] == "foobar"
  382. regressed_activity = Activity.objects.get(
  383. group=group, type=ActivityType.SET_REGRESSION.value
  384. )
  385. assert regressed_activity.data is not None
  386. assert regressed_activity.data["version"] == "b"
  387. assert regressed_activity.data["follows_semver"] is False
  388. mock_send_activity_notifications_delay.assert_called_once_with(regressed_activity.id)
  389. @mock.patch("sentry.tasks.activity.send_activity_notifications.delay")
  390. @mock.patch("sentry.event_manager.plugin_is_regression")
  391. def test_current_release_version_in_latest_activity_prior_to_regression_is_not_overridden(
  392. self,
  393. plugin_is_regression: mock.MagicMock,
  394. mock_send_activity_notifications_delay: mock.MagicMock,
  395. ) -> None:
  396. """
  397. Test that ensures in the case where a regression occurs, the release prior to the latest
  398. activity to that regression is overridden with the release regression occurred in but the
  399. value of `current_release_version` used for semver is not lost in the update.
  400. """
  401. plugin_is_regression.return_value = True
  402. # Create a release and a group associated with it
  403. old_release = self.create_release(
  404. version="a", date_added=timezone.now() - timedelta(minutes=30)
  405. )
  406. manager = EventManager(
  407. make_event(
  408. event_id="a" * 32,
  409. checksum="a" * 32,
  410. timestamp=time() - 50000, # need to work around active_at
  411. release=old_release.version,
  412. )
  413. )
  414. event = manager.save(self.project.id)
  415. assert event.group is not None
  416. group = event.group
  417. group.update(status=GroupStatus.RESOLVED, substatus=None)
  418. # Resolve the group in old_release
  419. resolution = GroupResolution.objects.create(release=old_release, group=group)
  420. activity = Activity.objects.create(
  421. group=group,
  422. project=group.project,
  423. type=ActivityType.SET_RESOLVED_IN_RELEASE.value,
  424. ident=resolution.id,
  425. data={"version": "", "current_release_version": "pre foobar"},
  426. )
  427. # Create a regression
  428. manager = EventManager(
  429. make_event(event_id="c" * 32, checksum="a" * 32, timestamp=time(), release="b")
  430. )
  431. event = manager.save(self.project.id)
  432. assert event.group_id == group.id
  433. group = Group.objects.get(id=group.id)
  434. assert group.status == GroupStatus.UNRESOLVED
  435. activity = Activity.objects.get(id=activity.id)
  436. assert activity.data is not None
  437. assert activity.data["version"] == "b"
  438. assert activity.data["current_release_version"] == "pre foobar"
  439. regressed_activity = Activity.objects.get(
  440. group=group, type=ActivityType.SET_REGRESSION.value
  441. )
  442. assert regressed_activity.data is not None
  443. assert regressed_activity.data["version"] == "b"
  444. mock_send_activity_notifications_delay.assert_called_once_with(regressed_activity.id)
  445. @mock.patch("sentry.event_manager.plugin_is_regression")
  446. def test_resolved_in_release_regression_activity_follows_semver(
  447. self, plugin_is_regression: mock.MagicMock
  448. ) -> None:
  449. """
  450. Issue was marked resolved in 1.0.0, regression occurred in 2.0.0.
  451. If the project follows semver then the regression activity should have `follows_semver` set.
  452. We should also record which version the issue was resolved in as `resolved_in_version`.
  453. This allows the UI to say the issue was resolved in 1.0.0, regressed in 2.0.0 and
  454. the versions were compared using semver.
  455. """
  456. plugin_is_regression.return_value = True
  457. # Create a release and a group associated with it
  458. old_release = self.create_release(
  459. version="foo@1.0.0", date_added=timezone.now() - timedelta(minutes=30)
  460. )
  461. manager = EventManager(
  462. make_event(
  463. event_id="a" * 32,
  464. checksum="a" * 32,
  465. timestamp=time() - 50000, # need to work around active_at
  466. release=old_release.version,
  467. )
  468. )
  469. event = manager.save(self.project.id)
  470. assert event.group is not None
  471. group = event.group
  472. group.update(status=GroupStatus.RESOLVED, substatus=None)
  473. # Resolve the group in old_release
  474. resolution = GroupResolution.objects.create(release=old_release, group=group)
  475. activity = Activity.objects.create(
  476. group=group,
  477. project=group.project,
  478. type=ActivityType.SET_RESOLVED_IN_RELEASE.value,
  479. ident=resolution.id,
  480. data={"version": "foo@1.0.0"},
  481. )
  482. # Create a regression
  483. manager = EventManager(
  484. make_event(event_id="c" * 32, checksum="a" * 32, timestamp=time(), release="foo@2.0.0")
  485. )
  486. event = manager.save(self.project.id)
  487. assert event.group_id == group.id
  488. group = Group.objects.get(id=group.id)
  489. assert group.status == GroupStatus.UNRESOLVED
  490. activity = Activity.objects.get(id=activity.id)
  491. assert activity.data is not None
  492. assert activity.data["version"] == "foo@1.0.0"
  493. regressed_activity = Activity.objects.get(
  494. group=group, type=ActivityType.SET_REGRESSION.value
  495. )
  496. assert regressed_activity.data is not None
  497. assert regressed_activity.data["version"] == "foo@2.0.0"
  498. assert regressed_activity.data["follows_semver"] is True
  499. assert regressed_activity.data["resolved_in_version"] == "foo@1.0.0"
  500. def test_has_pending_commit_resolution(self) -> None:
  501. project_id = self.project.id
  502. event = self.make_release_event("1.0", project_id)
  503. group = event.group
  504. assert group is not None
  505. assert group.first_release.version == "1.0"
  506. assert not has_pending_commit_resolution(group)
  507. # Add a commit with no associated release
  508. repo = self.create_repo(project=group.project)
  509. commit = Commit.objects.create(
  510. organization_id=group.project.organization_id, repository_id=repo.id, key="a" * 40
  511. )
  512. GroupLink.objects.create(
  513. group_id=group.id,
  514. project_id=group.project_id,
  515. linked_type=GroupLink.LinkedType.commit,
  516. linked_id=commit.id,
  517. relationship=GroupLink.Relationship.resolves,
  518. )
  519. assert has_pending_commit_resolution(group)
  520. def test_multiple_pending_commit_resolution(self) -> None:
  521. project_id = self.project.id
  522. event = self.make_release_event("1.0", project_id)
  523. group = event.group
  524. assert group is not None
  525. # Add a few commits with no associated release
  526. repo = self.create_repo(project=group.project)
  527. for key in ["a", "b", "c"]:
  528. commit = Commit.objects.create(
  529. organization_id=group.project.organization_id,
  530. repository_id=repo.id,
  531. key=key * 40,
  532. )
  533. GroupLink.objects.create(
  534. group_id=group.id,
  535. project_id=group.project_id,
  536. linked_type=GroupLink.LinkedType.commit,
  537. linked_id=commit.id,
  538. relationship=GroupLink.Relationship.resolves,
  539. )
  540. pending = has_pending_commit_resolution(group)
  541. assert pending
  542. # Most recent commit has been associated with a release
  543. latest_commit = Commit.objects.create(
  544. organization_id=group.project.organization_id, repository_id=repo.id, key="d" * 40
  545. )
  546. GroupLink.objects.create(
  547. group_id=group.id,
  548. project_id=group.project_id,
  549. linked_type=GroupLink.LinkedType.commit,
  550. linked_id=latest_commit.id,
  551. relationship=GroupLink.Relationship.resolves,
  552. )
  553. ReleaseCommit.objects.create(
  554. organization_id=group.project.organization_id,
  555. release=group.first_release,
  556. commit=latest_commit,
  557. order=0,
  558. )
  559. pending = has_pending_commit_resolution(group)
  560. assert pending is False
  561. def test_has_pending_commit_resolution_issue_regression(self) -> None:
  562. project_id = self.project.id
  563. event = self.make_release_event("1.0", project_id)
  564. group = event.group
  565. assert group is not None
  566. repo = self.create_repo(project=group.project)
  567. # commit that resolved the issue is part of a PR, but all commits within the PR are unreleased
  568. commit = Commit.objects.create(
  569. organization_id=group.project.organization_id, repository_id=repo.id, key="a" * 40
  570. )
  571. second_commit = Commit.objects.create(
  572. organization_id=group.project.organization_id, repository_id=repo.id, key="b" * 40
  573. )
  574. GroupLink.objects.create(
  575. group_id=group.id,
  576. project_id=group.project_id,
  577. linked_type=GroupLink.LinkedType.commit,
  578. linked_id=commit.id,
  579. relationship=GroupLink.Relationship.resolves,
  580. )
  581. pr = PullRequest.objects.create(
  582. organization_id=group.project.organization_id,
  583. repository_id=repo.id,
  584. key="1",
  585. )
  586. PullRequestCommit.objects.create(pull_request_id=pr.id, commit_id=commit.id)
  587. PullRequestCommit.objects.create(pull_request_id=pr.id, commit_id=second_commit.id)
  588. assert PullRequestCommit.objects.filter(pull_request_id=pr.id, commit_id=commit.id).exists()
  589. assert PullRequestCommit.objects.filter(
  590. pull_request_id=pr.id, commit_id=second_commit.id
  591. ).exists()
  592. assert not ReleaseCommit.objects.filter(commit__pullrequestcommit__id=commit.id).exists()
  593. assert not ReleaseCommit.objects.filter(
  594. commit__pullrequestcommit__id=second_commit.id
  595. ).exists()
  596. pending = has_pending_commit_resolution(group)
  597. assert pending
  598. def test_has_pending_commit_resolution_issue_regression_released_commits(self) -> None:
  599. project_id = self.project.id
  600. event = self.make_release_event("1.0", project_id)
  601. group = event.group
  602. assert group is not None
  603. release = self.create_release(project=self.project, version="1.1")
  604. repo = self.create_repo(project=group.project)
  605. # commit 1 is part of the PR, it resolves the issue in the commit message, and is unreleased
  606. commit = Commit.objects.create(
  607. organization_id=group.project.organization_id, repository_id=repo.id, key="a" * 38
  608. )
  609. GroupLink.objects.create(
  610. group_id=group.id,
  611. project_id=group.project_id,
  612. linked_type=GroupLink.LinkedType.commit,
  613. linked_id=commit.id,
  614. relationship=GroupLink.Relationship.resolves,
  615. )
  616. # commit 2 is part of the PR, but does not resolve the issue, and is released
  617. released_commit = Commit.objects.create(
  618. organization_id=group.project.organization_id, repository_id=repo.id, key="b" * 38
  619. )
  620. # commit 3 is part of the PR, but does not resolve the issue, and is unreleased
  621. unreleased_commit = Commit.objects.create(
  622. organization_id=group.project.organization_id, repository_id=repo.id, key="c" * 38
  623. )
  624. pr = PullRequest.objects.create(
  625. organization_id=group.project.organization_id,
  626. repository_id=repo.id,
  627. key="19",
  628. )
  629. PullRequestCommit.objects.create(pull_request_id=pr.id, commit_id=commit.id)
  630. released_pr_commit = PullRequestCommit.objects.create(
  631. pull_request_id=pr.id, commit_id=released_commit.id
  632. )
  633. unreleased_pr_commit = PullRequestCommit.objects.create(
  634. pull_request_id=pr.id, commit_id=unreleased_commit.id
  635. )
  636. ReleaseCommit.objects.create(
  637. organization_id=group.project.organization_id,
  638. release=release,
  639. commit=released_commit,
  640. order=1,
  641. )
  642. assert Commit.objects.all().count() == 3
  643. assert PullRequestCommit.objects.filter(pull_request_id=pr.id, commit_id=commit.id).exists()
  644. assert PullRequestCommit.objects.filter(
  645. pull_request_id=pr.id, commit_id=released_commit.id
  646. ).exists()
  647. assert PullRequestCommit.objects.filter(commit__id=unreleased_pr_commit.commit.id).exists()
  648. assert ReleaseCommit.objects.filter(
  649. commit__pullrequestcommit__id=released_pr_commit.id
  650. ).exists()
  651. pending = has_pending_commit_resolution(group)
  652. assert pending is False
  653. @mock.patch("sentry.integrations.example.integration.ExampleIntegration.sync_status_outbound")
  654. @mock.patch("sentry.tasks.activity.send_activity_notifications.delay")
  655. @mock.patch("sentry.event_manager.plugin_is_regression")
  656. def test_marks_as_unresolved_with_new_release_with_integration(
  657. self,
  658. plugin_is_regression: mock.MagicMock,
  659. mock_send_activity_notifications_delay: mock.MagicMock,
  660. mock_sync_status_outbound: mock.MagicMock,
  661. ) -> None:
  662. plugin_is_regression.return_value = True
  663. old_release = Release.objects.create(
  664. version="a",
  665. organization_id=self.project.organization_id,
  666. date_added=timezone.now() - timedelta(minutes=30),
  667. )
  668. old_release.add_project(self.project)
  669. manager = EventManager(
  670. make_event(
  671. event_id="a" * 32,
  672. checksum="a" * 32,
  673. timestamp=time() - 50000, # need to work around active_at
  674. release=old_release.version,
  675. )
  676. )
  677. event = manager.save(self.project.id)
  678. assert event.group is not None
  679. group = event.group
  680. org = group.organization
  681. integration = self.create_integration(
  682. organization=org,
  683. external_id="example",
  684. oi_params={
  685. "config": {
  686. "sync_comments": True,
  687. "sync_status_outbound": True,
  688. "sync_status_inbound": True,
  689. "sync_assignee_outbound": True,
  690. "sync_assignee_inbound": True,
  691. }
  692. },
  693. provider="example",
  694. name="Example",
  695. )
  696. external_issue = ExternalIssue.objects.get_or_create(
  697. organization_id=org.id, integration_id=integration.id, key="APP-%s" % group.id
  698. )[0]
  699. GroupLink.objects.get_or_create(
  700. group_id=group.id,
  701. project_id=group.project_id,
  702. linked_type=GroupLink.LinkedType.issue,
  703. linked_id=external_issue.id,
  704. relationship=GroupLink.Relationship.references,
  705. )[0]
  706. group.update(status=GroupStatus.RESOLVED, substatus=None)
  707. resolution = GroupResolution.objects.create(release=old_release, group=group)
  708. activity = Activity.objects.create(
  709. group=group,
  710. project=group.project,
  711. type=ActivityType.SET_RESOLVED_IN_RELEASE.value,
  712. ident=resolution.id,
  713. data={"version": ""},
  714. )
  715. manager = EventManager(
  716. make_event(
  717. event_id="b" * 32, checksum="a" * 32, timestamp=time(), release=old_release.version
  718. )
  719. )
  720. with self.tasks():
  721. with self.feature({"organizations:integrations-issue-sync": True}):
  722. event = manager.save(self.project.id)
  723. assert event.group_id == group.id
  724. group = Group.objects.get(id=group.id)
  725. assert group.status == GroupStatus.RESOLVED
  726. activity = Activity.objects.get(id=activity.id)
  727. assert activity.data is not None
  728. assert activity.data["version"] == ""
  729. assert GroupResolution.objects.filter(group=group).exists()
  730. manager = EventManager(
  731. make_event(event_id="c" * 32, checksum="a" * 32, timestamp=time(), release="b")
  732. )
  733. event = manager.save(self.project.id)
  734. assert event.group is not None
  735. mock_sync_status_outbound.assert_called_once_with(
  736. external_issue, False, event.group.project_id
  737. )
  738. assert event.group_id == group.id
  739. group = Group.objects.get(id=group.id)
  740. assert group.status == GroupStatus.UNRESOLVED
  741. activity = Activity.objects.get(id=activity.id)
  742. assert activity.data is not None
  743. assert activity.data["version"] == "b"
  744. assert not GroupResolution.objects.filter(group=group).exists()
  745. activity = Activity.objects.get(group=group, type=ActivityType.SET_REGRESSION.value)
  746. mock_send_activity_notifications_delay.assert_called_once_with(activity.id)
  747. @mock.patch("sentry.tasks.activity.send_activity_notifications.delay")
  748. @mock.patch("sentry.event_manager.plugin_is_regression")
  749. def test_does_not_mark_as_unresolved_with_pending_commit(
  750. self,
  751. plugin_is_regression: mock.MagicMock,
  752. mock_send_activity_notifications_delay: mock.MagicMock,
  753. ) -> None:
  754. plugin_is_regression.return_value = True
  755. repo = self.create_repo(project=self.project)
  756. commit = self.create_commit(repo=repo)
  757. manager = EventManager(
  758. make_event(
  759. event_id="a" * 32,
  760. checksum="a" * 32,
  761. timestamp=time() - 50000, # need to work around active_at
  762. )
  763. )
  764. event = manager.save(self.project.id)
  765. group = event.group
  766. assert group is not None
  767. group.update(status=GroupStatus.RESOLVED, substatus=None)
  768. GroupLink.objects.create(
  769. group_id=group.id,
  770. project_id=group.project_id,
  771. linked_id=commit.id,
  772. linked_type=GroupLink.LinkedType.commit,
  773. relationship=GroupLink.Relationship.resolves,
  774. )
  775. manager = EventManager(make_event(event_id="b" * 32, checksum="a" * 32, timestamp=time()))
  776. event = manager.save(self.project.id)
  777. assert event.group is not None
  778. assert event.group_id == group.id
  779. assert Group.objects.get(id=group.id).status == GroupStatus.RESOLVED
  780. @mock.patch("sentry.tasks.activity.send_activity_notifications.delay")
  781. @mock.patch("sentry.event_manager.plugin_is_regression")
  782. def test_mark_as_unresolved_with_released_commit(
  783. self,
  784. plugin_is_regression: mock.MagicMock,
  785. mock_send_activity_notifications_delay: mock.MagicMock,
  786. ) -> None:
  787. plugin_is_regression.return_value = True
  788. release = self.create_release(project=self.project)
  789. repo = self.create_repo(project=self.project)
  790. commit = self.create_commit(repo=repo, release=release, project=self.project)
  791. manager = EventManager(
  792. make_event(
  793. event_id="a" * 32,
  794. checksum="a" * 32,
  795. timestamp=time() - 50000, # need to work around active_at
  796. )
  797. )
  798. event = manager.save(self.project.id)
  799. group = event.group
  800. assert group is not None
  801. group.update(status=GroupStatus.RESOLVED, substatus=None)
  802. GroupLink.objects.create(
  803. group_id=group.id,
  804. project_id=group.project_id,
  805. linked_id=commit.id,
  806. linked_type=GroupLink.LinkedType.commit,
  807. relationship=GroupLink.Relationship.resolves,
  808. )
  809. manager = EventManager(make_event(event_id="b" * 32, checksum="a" * 32, timestamp=time()))
  810. event = manager.save(self.project.id)
  811. assert event.group is not None
  812. assert event.group_id == group.id
  813. assert Group.objects.get(id=group.id).status == GroupStatus.UNRESOLVED
  814. @mock.patch("sentry.models.Group.is_resolved")
  815. def test_unresolves_group_with_auto_resolve(self, mock_is_resolved: mock.MagicMock) -> None:
  816. ts = time() - 100
  817. mock_is_resolved.return_value = False
  818. manager = EventManager(make_event(event_id="a" * 32, checksum="a" * 32, timestamp=ts))
  819. with self.tasks():
  820. event = manager.save(self.project.id)
  821. assert event.group is not None
  822. mock_is_resolved.return_value = True
  823. manager = EventManager(make_event(event_id="b" * 32, checksum="a" * 32, timestamp=ts + 100))
  824. with self.tasks():
  825. event2 = manager.save(self.project.id)
  826. assert event2.group is not None
  827. assert event.group_id == event2.group_id
  828. group = Group.objects.get(id=event.group.id)
  829. assert group.active_at
  830. assert group.active_at.replace(second=0) == event2.datetime.replace(second=0)
  831. assert group.active_at.replace(second=0) != event.datetime.replace(second=0)
  832. def test_invalid_transaction(self) -> None:
  833. dict_input = {"messages": "foo"}
  834. manager = EventManager(make_event(transaction=dict_input))
  835. manager.normalize()
  836. event = manager.save(self.project.id)
  837. assert event.transaction is None
  838. def test_transaction_as_culprit(self) -> None:
  839. manager = EventManager(make_event(transaction="foobar"))
  840. manager.normalize()
  841. event = manager.save(self.project.id)
  842. assert event.transaction == "foobar"
  843. assert event.culprit == "foobar"
  844. def test_culprit_is_not_transaction(self) -> None:
  845. manager = EventManager(make_event(culprit="foobar"))
  846. manager.normalize()
  847. event1 = manager.save(self.project.id)
  848. assert event1.transaction is None
  849. assert event1.culprit == "foobar"
  850. def test_culprit_after_stacktrace_processing(self) -> None:
  851. from sentry.grouping.enhancer import Enhancements
  852. enhancement = Enhancements.from_config_string(
  853. """
  854. function:in_app_function +app
  855. function:not_in_app_function -app
  856. """,
  857. )
  858. manager = EventManager(
  859. make_event(
  860. platform="native",
  861. exception={
  862. "values": [
  863. {
  864. "type": "Hello",
  865. "stacktrace": {
  866. "frames": [
  867. {
  868. "function": "not_in_app_function",
  869. },
  870. {
  871. "function": "in_app_function",
  872. },
  873. ]
  874. },
  875. }
  876. ]
  877. },
  878. )
  879. )
  880. manager.normalize()
  881. manager.get_data()["grouping_config"] = {
  882. "enhancements": enhancement.dumps(),
  883. "id": "legacy:2019-03-12",
  884. }
  885. event1 = manager.save(self.project.id)
  886. assert event1.transaction is None
  887. assert event1.culprit == "in_app_function"
  888. def test_inferred_culprit_from_empty_stacktrace(self) -> None:
  889. manager = EventManager(make_event(stacktrace={"frames": []}))
  890. manager.normalize()
  891. event = manager.save(self.project.id)
  892. assert event.culprit == ""
  893. def test_transaction_and_culprit(self) -> None:
  894. manager = EventManager(make_event(transaction="foobar", culprit="baz"))
  895. manager.normalize()
  896. event1 = manager.save(self.project.id)
  897. assert event1.transaction == "foobar"
  898. assert event1.culprit == "baz"
  899. def test_release_with_empty_version(self) -> None:
  900. cases = ["", " ", "\t", "\n"]
  901. for case in cases:
  902. event = self.make_release_event(case, self.project.id)
  903. assert event.group is not None
  904. assert not event.group.first_release
  905. assert Release.objects.filter(projects__in=[self.project.id]).count() == 0
  906. assert Release.objects.filter(organization_id=self.project.organization_id).count() == 0
  907. def test_first_release(self) -> None:
  908. project_id = self.project.id
  909. event = self.make_release_event("1.0", project_id)
  910. group = event.group
  911. assert group is not None
  912. assert group.first_release.version == "1.0"
  913. event = self.make_release_event("2.0", project_id)
  914. group = event.group
  915. assert group is not None
  916. assert group.first_release.version == "1.0"
  917. def test_release_project_slug(self) -> None:
  918. project = self.create_project(name="foo")
  919. release = Release.objects.create(version="foo-1.0", organization=project.organization)
  920. release.add_project(project)
  921. event = self.make_release_event("1.0", project.id)
  922. group = event.group
  923. assert group is not None
  924. assert group.first_release.version == "foo-1.0"
  925. release_tag = [v for k, v in event.tags if k == "sentry:release"][0]
  926. assert release_tag == "foo-1.0"
  927. event = self.make_release_event("2.0", project.id)
  928. group = event.group
  929. assert group is not None
  930. assert group.first_release.version == "foo-1.0"
  931. def test_release_project_slug_long(self) -> None:
  932. project = self.create_project(name="foo")
  933. partial_version_len = MAX_VERSION_LENGTH - 4
  934. release = Release.objects.create(
  935. version="foo-{}".format("a" * partial_version_len), organization=project.organization
  936. )
  937. release.add_project(project)
  938. event = self.make_release_event("a" * partial_version_len, project.id)
  939. group = event.group
  940. assert group is not None
  941. assert group.first_release.version == "foo-{}".format("a" * partial_version_len)
  942. release_tag = [v for k, v in event.tags if k == "sentry:release"][0]
  943. assert release_tag == "foo-{}".format("a" * partial_version_len)
  944. def test_group_release_no_env(self) -> None:
  945. project_id = self.project.id
  946. event = self.make_release_event("1.0", project_id)
  947. assert event.group_id is not None
  948. release = Release.objects.get(version="1.0", projects=event.project_id)
  949. assert GroupRelease.objects.filter(
  950. release_id=release.id, group_id=event.group_id, environment=""
  951. ).exists()
  952. # ensure we're not erroring on second creation
  953. event = self.make_release_event("1.0", project_id)
  954. def test_group_release_with_env(self) -> None:
  955. manager = EventManager(make_event(release="1.0", environment="prod", event_id="a" * 32))
  956. manager.normalize()
  957. event = manager.save(self.project.id)
  958. assert event.group_id is not None
  959. release = Release.objects.get(version="1.0", projects=event.project_id)
  960. assert GroupRelease.objects.filter(
  961. release_id=release.id, group_id=event.group_id, environment="prod"
  962. ).exists()
  963. manager = EventManager(make_event(release="1.0", environment="staging", event_id="b" * 32))
  964. event = manager.save(self.project.id)
  965. release = Release.objects.get(version="1.0", projects=event.project_id)
  966. assert event.group_id is not None
  967. assert GroupRelease.objects.filter(
  968. release_id=release.id, group_id=event.group_id, environment="staging"
  969. ).exists()
  970. def test_tsdb(self) -> None:
  971. project = self.project
  972. manager = EventManager(
  973. make_event(
  974. fingerprint=["totally unique super duper fingerprint"],
  975. environment="totally unique super duper environment",
  976. )
  977. )
  978. event = manager.save(project.id)
  979. assert event.group is not None
  980. def query(model: TSDBModel, key: int, **kwargs: Any) -> int:
  981. return tsdb.backend.get_sums(
  982. model,
  983. [key],
  984. event.datetime,
  985. event.datetime,
  986. tenant_ids={"organization_id": 123, "referrer": "r"},
  987. **kwargs,
  988. )[key]
  989. assert query(TSDBModel.project, project.id) == 1
  990. assert query(TSDBModel.group, event.group.id) == 1
  991. environment_id = Environment.get_for_organization_id(
  992. event.project.organization_id, "totally unique super duper environment"
  993. ).id
  994. assert query(TSDBModel.project, project.id, environment_id=environment_id) == 1
  995. assert query(TSDBModel.group, event.group.id, environment_id=environment_id) == 1
  996. @pytest.mark.xfail
  997. def test_record_frequencies(self) -> None:
  998. project = self.project
  999. manager = EventManager(make_event())
  1000. event = manager.save(project.id)
  1001. assert tsdb.backend.get_most_frequent(
  1002. TSDBModel.frequent_issues_by_project, (event.project.id,), event.datetime
  1003. ) == {event.project.id: [(event.group_id, 1.0)]}
  1004. def test_event_user(self) -> None:
  1005. event_id = uuid.uuid4().hex
  1006. manager = EventManager(
  1007. make_event(
  1008. event_id=event_id, environment="totally unique environment", **{"user": {"id": "1"}}
  1009. )
  1010. )
  1011. manager.normalize()
  1012. with self.tasks():
  1013. event = manager.save(self.project.id)
  1014. assert event.group is not None
  1015. environment_id = Environment.get_for_organization_id(
  1016. event.project.organization_id, "totally unique environment"
  1017. ).id
  1018. assert tsdb.backend.get_distinct_counts_totals(
  1019. TSDBModel.users_affected_by_group,
  1020. (event.group.id,),
  1021. event.datetime,
  1022. event.datetime,
  1023. tenant_ids={"referrer": "r", "organization_id": 123},
  1024. ) == {event.group.id: 1}
  1025. assert tsdb.backend.get_distinct_counts_totals(
  1026. TSDBModel.users_affected_by_project,
  1027. (event.project.id,),
  1028. event.datetime,
  1029. event.datetime,
  1030. tenant_ids={"organization_id": 123, "referrer": "r"},
  1031. ) == {event.project.id: 1}
  1032. assert tsdb.backend.get_distinct_counts_totals(
  1033. TSDBModel.users_affected_by_group,
  1034. (event.group.id,),
  1035. event.datetime,
  1036. event.datetime,
  1037. environment_id=environment_id,
  1038. tenant_ids={"organization_id": 123, "referrer": "r"},
  1039. ) == {event.group.id: 1}
  1040. assert tsdb.backend.get_distinct_counts_totals(
  1041. TSDBModel.users_affected_by_project,
  1042. (event.project.id,),
  1043. event.datetime,
  1044. event.datetime,
  1045. environment_id=environment_id,
  1046. tenant_ids={"organization_id": 123, "referrer": "r"},
  1047. ) == {event.project.id: 1}
  1048. saved_event = eventstore.backend.get_event_by_id(self.project.id, event_id)
  1049. euser = EventUser.from_event(saved_event)
  1050. assert event.get_tag("sentry:user") == euser.tag_value
  1051. # clear the cache otherwise the cached EventUser from prev
  1052. # manager.save() will be used instead of jane
  1053. cache.clear()
  1054. # ensure event user is mapped to tags in second attempt
  1055. event_id_2 = uuid.uuid4().hex
  1056. manager = EventManager(
  1057. make_event(event_id=event_id_2, **{"user": {"id": "1", "name": "jane"}})
  1058. )
  1059. manager.normalize()
  1060. with self.tasks():
  1061. manager.save(self.project.id)
  1062. saved_event = eventstore.backend.get_event_by_id(self.project.id, event_id_2)
  1063. euser = EventUser.from_event(saved_event)
  1064. assert event.get_tag("sentry:user") == euser.tag_value
  1065. assert euser.name == "jane"
  1066. assert euser.user_ident == "1"
  1067. def test_event_user_invalid_ip(self) -> None:
  1068. event_id = uuid.uuid4().hex
  1069. manager = EventManager(
  1070. make_event(
  1071. event_id=event_id, environment="totally unique environment", **{"user": {"id": "1"}}
  1072. )
  1073. )
  1074. manager.normalize()
  1075. # This can happen as part of PII stripping, which happens after normalization
  1076. manager._data["user"]["ip_address"] = "[ip]"
  1077. with self.tasks():
  1078. manager.save(self.project.id)
  1079. saved_event = eventstore.backend.get_event_by_id(self.project.id, event_id)
  1080. euser = EventUser.from_event(saved_event)
  1081. assert euser.ip_address is None
  1082. def test_event_user_unicode_identifier(self) -> None:
  1083. event_id = uuid.uuid4().hex
  1084. manager = EventManager(make_event(event_id=event_id, **{"user": {"username": "foô"}}))
  1085. manager.normalize()
  1086. with self.tasks():
  1087. manager.save(self.project.id)
  1088. saved_event = eventstore.backend.get_event_by_id(self.project.id, event_id)
  1089. euser = EventUser.from_event(saved_event)
  1090. assert euser.username == "foô"
  1091. def test_environment(self) -> None:
  1092. manager = EventManager(make_event(**{"environment": "beta"}))
  1093. manager.normalize()
  1094. event = manager.save(self.project.id)
  1095. assert dict(event.tags).get("environment") == "beta"
  1096. def test_invalid_environment(self) -> None:
  1097. manager = EventManager(make_event(**{"environment": "bad/name"}))
  1098. manager.normalize()
  1099. event = manager.save(self.project.id)
  1100. assert dict(event.tags).get("environment") is None
  1101. def test_invalid_tags(self) -> None:
  1102. manager = EventManager(make_event(**{"tags": [42]}))
  1103. manager.normalize()
  1104. assert None in manager.get_data().get("tags", [])
  1105. assert 42 not in manager.get_data().get("tags", [])
  1106. event = manager.save(self.project.id)
  1107. assert 42 not in event.tags
  1108. assert None not in event.tags
  1109. @mock.patch("sentry.event_manager.eventstream.backend.insert")
  1110. def test_group_environment(self, eventstream_insert: mock.MagicMock) -> None:
  1111. release_version = "1.0"
  1112. def save_event() -> Event:
  1113. manager = EventManager(
  1114. make_event(
  1115. **{
  1116. "message": "foo",
  1117. "event_id": uuid.uuid1().hex,
  1118. "environment": "beta",
  1119. "release": release_version,
  1120. }
  1121. )
  1122. )
  1123. manager.normalize()
  1124. return manager.save(self.project.id)
  1125. event = save_event()
  1126. assert event.group_id is not None
  1127. # Ensure the `GroupEnvironment` record was created.
  1128. instance = GroupEnvironment.objects.get(
  1129. group_id=event.group_id,
  1130. environment_id=Environment.objects.get(
  1131. organization_id=self.project.organization_id, name=event.get_tag("environment")
  1132. ).id,
  1133. )
  1134. assert Release.objects.get(id=instance.first_release_id).version == release_version
  1135. group_states1 = {
  1136. "is_new": True,
  1137. "is_regression": False,
  1138. "is_new_group_environment": True,
  1139. }
  1140. # Ensure that the first event in the (group, environment) pair is
  1141. # marked as being part of a new environment.
  1142. assert event.group is not None
  1143. eventstream_insert.assert_called_with(
  1144. event=event,
  1145. **group_states1,
  1146. primary_hash="acbd18db4cc2f85cedef654fccc4a4d8",
  1147. skip_consume=False,
  1148. received_timestamp=event.data["received"],
  1149. group_states=[{"id": event.group.id, **group_states1}],
  1150. )
  1151. event = save_event()
  1152. group_states2 = {
  1153. "is_new": False,
  1154. "is_regression": False,
  1155. "is_new_group_environment": False,
  1156. }
  1157. # Ensure that the next event in the (group, environment) pair is *not*
  1158. # marked as being part of a new environment.
  1159. assert event.group is not None
  1160. eventstream_insert.assert_called_with(
  1161. event=event,
  1162. **group_states2,
  1163. primary_hash="acbd18db4cc2f85cedef654fccc4a4d8",
  1164. skip_consume=False,
  1165. received_timestamp=event.data["received"],
  1166. group_states=[{"id": event.group.id, **group_states2}],
  1167. )
  1168. def test_default_event_type(self) -> None:
  1169. manager = EventManager(make_event(message="foo bar"))
  1170. manager.normalize()
  1171. data = manager.get_data()
  1172. assert data["type"] == "default"
  1173. event = manager.save(self.project.id)
  1174. group = event.group
  1175. assert group is not None
  1176. assert group.data["type"] == "default"
  1177. assert group.data["metadata"]["title"] == "foo bar"
  1178. def test_message_event_type(self) -> None:
  1179. manager = EventManager(
  1180. make_event(
  1181. **{
  1182. "message": "",
  1183. "logentry": {"formatted": "foo bar", "message": "foo %s", "params": ["bar"]},
  1184. }
  1185. )
  1186. )
  1187. manager.normalize()
  1188. data = manager.get_data()
  1189. assert data["type"] == "default"
  1190. event = manager.save(self.project.id)
  1191. group = event.group
  1192. assert group is not None
  1193. assert group.data["type"] == "default"
  1194. assert group.data["metadata"]["title"] == "foo bar"
  1195. def test_error_event_type(self) -> None:
  1196. manager = EventManager(
  1197. make_event(**{"exception": {"values": [{"type": "Foo", "value": "bar"}]}})
  1198. )
  1199. manager.normalize()
  1200. data = manager.get_data()
  1201. assert data["type"] == "error"
  1202. event = manager.save(self.project.id)
  1203. group = event.group
  1204. assert group is not None
  1205. assert group.data.get("type") == "error"
  1206. assert group.data.get("metadata") == {
  1207. "type": "Foo",
  1208. "value": "bar",
  1209. "initial_priority": PriorityLevel.HIGH,
  1210. "display_title_with_tree_label": False,
  1211. }
  1212. def test_csp_event_type(self) -> None:
  1213. manager = EventManager(
  1214. make_event(
  1215. **{
  1216. "csp": {
  1217. "effective_directive": "script-src",
  1218. "blocked_uri": "http://example.com",
  1219. },
  1220. # this normally is noramlized in relay as part of ingest
  1221. "logentry": {"message": "Blocked 'script' from 'example.com'"},
  1222. }
  1223. )
  1224. )
  1225. manager.normalize()
  1226. data = manager.get_data()
  1227. assert data["type"] == "csp"
  1228. event = manager.save(self.project.id)
  1229. group = event.group
  1230. assert group is not None
  1231. assert group.data.get("type") == "csp"
  1232. assert group.data.get("metadata") == {
  1233. "directive": "script-src",
  1234. "initial_priority": PriorityLevel.HIGH,
  1235. "uri": "example.com",
  1236. "message": "Blocked 'script' from 'example.com'",
  1237. }
  1238. assert group.title == "Blocked 'script' from 'example.com'"
  1239. def test_transaction_event_type(self) -> None:
  1240. manager = EventManager(
  1241. make_event(
  1242. **{
  1243. "transaction": "wait",
  1244. "contexts": {
  1245. "trace": {
  1246. "parent_span_id": "bce14471e0e9654d",
  1247. "op": "foobar",
  1248. "trace_id": "a0fa8803753e40fd8124b21eeb2986b5",
  1249. "span_id": "bf5be759039ede9a",
  1250. }
  1251. },
  1252. "spans": [],
  1253. "timestamp": "2019-06-14T14:01:40Z",
  1254. "start_timestamp": "2019-06-14T14:01:40Z",
  1255. "type": "transaction",
  1256. }
  1257. )
  1258. )
  1259. manager.normalize()
  1260. data = manager.get_data()
  1261. assert data["type"] == "transaction"
  1262. def test_transaction_event_span_grouping(self) -> None:
  1263. manager = EventManager(
  1264. make_event(
  1265. **{
  1266. "transaction": "wait",
  1267. "contexts": {
  1268. "trace": {
  1269. "parent_span_id": "bce14471e0e9654d",
  1270. "op": "foobar",
  1271. "trace_id": "a0fa8803753e40fd8124b21eeb2986b5",
  1272. "span_id": "bf5be759039ede9a",
  1273. }
  1274. },
  1275. "spans": [
  1276. {
  1277. "trace_id": "a0fa8803753e40fd8124b21eeb2986b5",
  1278. "parent_span_id": "bf5be759039ede9a",
  1279. "span_id": "a" * 16,
  1280. "start_timestamp": 0,
  1281. "timestamp": 1,
  1282. "same_process_as_parent": True,
  1283. "op": "default",
  1284. "description": "span a",
  1285. },
  1286. {
  1287. "trace_id": "a0fa8803753e40fd8124b21eeb2986b5",
  1288. "parent_span_id": "bf5be759039ede9a",
  1289. "span_id": "b" * 16,
  1290. "start_timestamp": 0,
  1291. "timestamp": 1,
  1292. "same_process_as_parent": True,
  1293. "op": "default",
  1294. "description": "span a",
  1295. },
  1296. {
  1297. "trace_id": "a0fa8803753e40fd8124b21eeb2986b5",
  1298. "parent_span_id": "bf5be759039ede9a",
  1299. "span_id": "c" * 16,
  1300. "start_timestamp": 0,
  1301. "timestamp": 1,
  1302. "same_process_as_parent": True,
  1303. "op": "default",
  1304. "description": "span b",
  1305. },
  1306. ],
  1307. "timestamp": "2019-06-14T14:01:40Z",
  1308. "start_timestamp": "2019-06-14T14:01:40Z",
  1309. "type": "transaction",
  1310. }
  1311. )
  1312. )
  1313. manager.normalize()
  1314. event = manager.save(self.project.id)
  1315. data = event.data
  1316. assert data["type"] == "transaction"
  1317. assert data["span_grouping_config"]["id"] == "default:2022-10-27"
  1318. spans = [{"hash": span["hash"]} for span in data["spans"]]
  1319. # the basic strategy is to simply use the description
  1320. assert spans == [{"hash": hash_values([span["description"]])} for span in data["spans"]]
  1321. def test_sdk(self) -> None:
  1322. manager = EventManager(make_event(**{"sdk": {"name": "sentry-unity", "version": "1.0"}}))
  1323. manager.normalize()
  1324. event = manager.save(self.project.id)
  1325. assert event.data["sdk"] == {
  1326. "name": "sentry-unity",
  1327. "version": "1.0",
  1328. "integrations": None,
  1329. "packages": None,
  1330. }
  1331. def test_sdk_group_tagging(self) -> None:
  1332. manager = EventManager(
  1333. make_event(**{"sdk": {"name": "sentry-native-unity", "version": "1.0"}})
  1334. )
  1335. manager.normalize()
  1336. event = manager.save(self.project.id)
  1337. assert event.group is not None
  1338. sdk_metadata = event.group.data["metadata"]["sdk"]
  1339. assert sdk_metadata["name"] == "sentry-native-unity"
  1340. assert sdk_metadata["name_normalized"] == "sentry.native.unity"
  1341. def test_no_message(self) -> None:
  1342. # test that the message is handled gracefully
  1343. manager = EventManager(
  1344. make_event(**{"message": None, "logentry": {"message": "hello world"}})
  1345. )
  1346. manager.normalize()
  1347. event = manager.save(self.project.id)
  1348. assert event.message == "hello world"
  1349. def test_search_message_simple(self) -> None:
  1350. manager = EventManager(
  1351. make_event(
  1352. **{
  1353. "message": "test",
  1354. "transaction": "sentry.tasks.process",
  1355. }
  1356. )
  1357. )
  1358. manager.normalize()
  1359. event = manager.save(self.project.id)
  1360. search_message = event.search_message
  1361. assert "test" in search_message
  1362. assert "sentry.tasks.process" in search_message
  1363. def test_search_message_prefers_log_entry_message(self) -> None:
  1364. manager = EventManager(
  1365. make_event(
  1366. **{
  1367. "message": "test",
  1368. "logentry": {"message": "hello world"},
  1369. "transaction": "sentry.tasks.process",
  1370. }
  1371. )
  1372. )
  1373. manager.normalize()
  1374. event = manager.save(self.project.id)
  1375. search_message = event.search_message
  1376. assert "test" not in search_message
  1377. assert "hello world" in search_message
  1378. assert "sentry.tasks.process" in search_message
  1379. def test_search_message_skips_requested_keys(self) -> None:
  1380. from sentry.eventstore import models
  1381. with patch.object(models, "SEARCH_MESSAGE_SKIPPED_KEYS", ("dogs",)):
  1382. manager = EventManager(
  1383. make_event(
  1384. **{
  1385. "logentry": {"message": "hello world"},
  1386. "transaction": "sentry.tasks.process",
  1387. }
  1388. )
  1389. )
  1390. manager.normalize()
  1391. # Normalizing nukes any metadata we might pass when creating the event and event
  1392. # manager, so we have to add it in here
  1393. manager._data["metadata"] = {"dogs": "are great", "maisey": "silly", "charlie": "goofy"}
  1394. event = manager.save(
  1395. self.project.id,
  1396. )
  1397. search_message = event.search_message
  1398. assert "hello world" in search_message
  1399. assert "sentry.tasks.process" in search_message
  1400. assert "silly" in search_message
  1401. assert "goofy" in search_message
  1402. assert "are great" not in search_message # "dogs" key is skipped
  1403. def test_search_message_skips_bools_and_numbers(self) -> None:
  1404. from sentry.eventstore import models
  1405. with patch.object(models, "SEARCH_MESSAGE_SKIPPED_KEYS", ("dogs",)):
  1406. manager = EventManager(
  1407. make_event(
  1408. **{
  1409. "logentry": {"message": "hello world"},
  1410. "transaction": "sentry.tasks.process",
  1411. }
  1412. )
  1413. )
  1414. manager.normalize()
  1415. # Normalizing nukes any metadata we might pass when creating the event and event
  1416. # manager, so we have to add it in here
  1417. manager._data["metadata"] = {
  1418. "dogs are great": True,
  1419. "maisey": 12312012,
  1420. "charlie": 1121.2012,
  1421. "adopt": "don't shop",
  1422. }
  1423. event = manager.save(
  1424. self.project.id,
  1425. )
  1426. search_message = event.search_message
  1427. assert "hello world" in search_message
  1428. assert "sentry.tasks.process" in search_message
  1429. assert "True" not in search_message # skipped because it's a boolean
  1430. assert "12312012" not in search_message # skipped because it's an int
  1431. assert "1121.2012" not in search_message # skipped because it's a float
  1432. assert "don't shop" in search_message
  1433. def test_stringified_message(self) -> None:
  1434. manager = EventManager(make_event(**{"message": 1234}))
  1435. manager.normalize()
  1436. event = manager.save(self.project.id)
  1437. assert event.data["logentry"] == {"formatted": "1234", "message": None, "params": None}
  1438. def test_bad_message(self) -> None:
  1439. # test that invalid messages are rejected
  1440. manager = EventManager(make_event(**{"message": ["asdf"]}))
  1441. manager.normalize()
  1442. event = manager.save(self.project.id)
  1443. assert event.message == '["asdf"]'
  1444. assert "logentry" in event.data
  1445. def test_message_attribute_goes_to_interface(self) -> None:
  1446. manager = EventManager(make_event(**{"message": "hello world"}))
  1447. manager.normalize()
  1448. event = manager.save(self.project.id)
  1449. assert event.data["logentry"] == {
  1450. "formatted": "hello world",
  1451. "message": None,
  1452. "params": None,
  1453. }
  1454. def test_message_attribute_shadowing(self) -> None:
  1455. # Logentry shadows the legacy message attribute.
  1456. manager = EventManager(
  1457. make_event(**{"message": "world hello", "logentry": {"message": "hello world"}})
  1458. )
  1459. manager.normalize()
  1460. event = manager.save(self.project.id)
  1461. assert event.data["logentry"] == {
  1462. "formatted": "hello world",
  1463. "message": None,
  1464. "params": None,
  1465. }
  1466. def test_message_attribute_interface_both_strings(self) -> None:
  1467. manager = EventManager(
  1468. make_event(**{"logentry": "a plain string", "message": "another string"})
  1469. )
  1470. manager.normalize()
  1471. event = manager.save(self.project.id)
  1472. assert event.data["logentry"] == {
  1473. "formatted": "a plain string",
  1474. "message": None,
  1475. "params": None,
  1476. }
  1477. def test_throws_when_matches_discarded_hash(self) -> None:
  1478. manager = EventManager(make_event(message="foo", event_id="a" * 32, fingerprint=["a" * 32]))
  1479. with self.tasks():
  1480. event = manager.save(self.project.id)
  1481. group = Group.objects.get(id=event.group_id)
  1482. tombstone = GroupTombstone.objects.create(
  1483. project_id=group.project_id,
  1484. level=group.level,
  1485. message=group.message,
  1486. culprit=group.culprit,
  1487. data=group.data,
  1488. previous_group_id=group.id,
  1489. )
  1490. GroupHash.objects.filter(group=group).update(group=None, group_tombstone_id=tombstone.id)
  1491. manager = EventManager(
  1492. make_event(message="foo", event_id="b" * 32, fingerprint=["a" * 32]),
  1493. project=self.project,
  1494. )
  1495. manager.normalize()
  1496. a1 = CachedAttachment(name="a1", data=b"hello")
  1497. a2 = CachedAttachment(name="a2", data=b"world")
  1498. cache_key = cache_key_for_event(manager.get_data())
  1499. attachment_cache.set(cache_key, attachments=[a1, a2])
  1500. from sentry.utils.outcomes import track_outcome
  1501. mock_track_outcome = mock.Mock(wraps=track_outcome)
  1502. with mock.patch("sentry.event_manager.track_outcome", mock_track_outcome):
  1503. with self.feature("organizations:event-attachments"):
  1504. with self.tasks():
  1505. with pytest.raises(HashDiscarded):
  1506. event = manager.save(
  1507. self.project.id, cache_key=cache_key, has_attachments=True
  1508. )
  1509. assert mock_track_outcome.call_count == 3
  1510. for o in mock_track_outcome.mock_calls:
  1511. assert o.kwargs["outcome"] == Outcome.FILTERED
  1512. assert o.kwargs["reason"] == FilterStatKeys.DISCARDED_HASH
  1513. o = mock_track_outcome.mock_calls[0]
  1514. assert o.kwargs["category"] == DataCategory.ERROR
  1515. for o in mock_track_outcome.mock_calls[1:]:
  1516. assert o.kwargs["category"] == DataCategory.ATTACHMENT
  1517. assert o.kwargs["quantity"] == 5
  1518. def test_honors_crash_report_limit(self) -> None:
  1519. from sentry.utils.outcomes import track_outcome
  1520. mock_track_outcome = mock.Mock(wraps=track_outcome)
  1521. # Allow exactly one crash report
  1522. self.project.update_option("sentry:store_crash_reports", 1)
  1523. manager = EventManager(
  1524. make_event(message="foo", event_id="a" * 32, fingerprint=["a" * 32]),
  1525. project=self.project,
  1526. )
  1527. manager.normalize()
  1528. a1 = CachedAttachment(name="a1", data=b"hello", type="event.minidump")
  1529. a2 = CachedAttachment(name="a2", data=b"world")
  1530. cache_key = cache_key_for_event(manager.get_data())
  1531. attachment_cache.set(cache_key, attachments=[a1, a2])
  1532. with mock.patch("sentry.event_manager.track_outcome", mock_track_outcome):
  1533. with self.feature("organizations:event-attachments"):
  1534. with self.tasks():
  1535. manager.save(self.project.id, cache_key=cache_key, has_attachments=True)
  1536. # The first minidump should be accepted, since the limit is 1
  1537. assert mock_track_outcome.call_count == 3
  1538. for o in mock_track_outcome.mock_calls:
  1539. assert o.kwargs["outcome"] == Outcome.ACCEPTED
  1540. mock_track_outcome.reset_mock()
  1541. manager = EventManager(
  1542. make_event(message="foo", event_id="b" * 32, fingerprint=["a" * 32]),
  1543. project=self.project,
  1544. )
  1545. manager.normalize()
  1546. cache_key = cache_key_for_event(manager.get_data())
  1547. attachment_cache.set(cache_key, attachments=[a1, a2])
  1548. with mock.patch("sentry.event_manager.track_outcome", mock_track_outcome):
  1549. with self.feature("organizations:event-attachments"):
  1550. with self.tasks():
  1551. event = manager.save(self.project.id, cache_key=cache_key, has_attachments=True)
  1552. assert event.data["metadata"]["stripped_crash"] is True
  1553. assert mock_track_outcome.call_count == 3
  1554. o = mock_track_outcome.mock_calls[0]
  1555. assert o.kwargs["outcome"] == Outcome.FILTERED
  1556. assert o.kwargs["category"] == DataCategory.ATTACHMENT
  1557. assert o.kwargs["reason"] == FilterStatKeys.CRASH_REPORT_LIMIT
  1558. for o in mock_track_outcome.mock_calls[1:]:
  1559. assert o.kwargs["outcome"] == Outcome.ACCEPTED
  1560. def test_event_accepted_outcome(self) -> None:
  1561. manager = EventManager(make_event(message="foo"))
  1562. manager.normalize()
  1563. mock_track_outcome = mock.Mock()
  1564. with mock.patch("sentry.event_manager.track_outcome", mock_track_outcome):
  1565. manager.save(self.project.id)
  1566. assert_mock_called_once_with_partial(
  1567. mock_track_outcome, outcome=Outcome.ACCEPTED, category=DataCategory.ERROR
  1568. )
  1569. def test_attachment_accepted_outcomes(self) -> None:
  1570. manager = EventManager(make_event(message="foo"), project=self.project)
  1571. manager.normalize()
  1572. a1 = CachedAttachment(name="a1", data=b"hello")
  1573. a2 = CachedAttachment(name="a2", data=b"limited", rate_limited=True)
  1574. a3 = CachedAttachment(name="a3", data=b"world")
  1575. cache_key = cache_key_for_event(manager.get_data())
  1576. attachment_cache.set(cache_key, attachments=[a1, a2, a3])
  1577. mock_track_outcome = mock.Mock()
  1578. with mock.patch("sentry.event_manager.track_outcome", mock_track_outcome):
  1579. with self.feature("organizations:event-attachments"):
  1580. manager.save(self.project.id, cache_key=cache_key, has_attachments=True)
  1581. assert mock_track_outcome.call_count == 3
  1582. for o in mock_track_outcome.mock_calls:
  1583. assert o.kwargs["outcome"] == Outcome.ACCEPTED
  1584. for o in mock_track_outcome.mock_calls[:2]:
  1585. assert o.kwargs["category"] == DataCategory.ATTACHMENT
  1586. assert o.kwargs["quantity"] == 5
  1587. final = mock_track_outcome.mock_calls[2]
  1588. assert final.kwargs["category"] == DataCategory.ERROR
  1589. def test_attachment_filtered_outcomes(self) -> None:
  1590. manager = EventManager(make_event(message="foo"), project=self.project)
  1591. manager.normalize()
  1592. # Disable storing all crash reports, which will drop the minidump but save the other
  1593. a1 = CachedAttachment(name="a1", data=b"minidump", type="event.minidump")
  1594. a2 = CachedAttachment(name="a2", data=b"limited", rate_limited=True)
  1595. a3 = CachedAttachment(name="a3", data=b"world")
  1596. cache_key = cache_key_for_event(manager.get_data())
  1597. attachment_cache.set(cache_key, attachments=[a1, a2, a3])
  1598. mock_track_outcome = mock.Mock()
  1599. with mock.patch("sentry.event_manager.track_outcome", mock_track_outcome):
  1600. with self.feature("organizations:event-attachments"):
  1601. manager.save(self.project.id, cache_key=cache_key, has_attachments=True)
  1602. assert mock_track_outcome.call_count == 3
  1603. # First outcome is the rejection of the minidump
  1604. o = mock_track_outcome.mock_calls[0]
  1605. assert o.kwargs["outcome"] == Outcome.FILTERED
  1606. assert o.kwargs["category"] == DataCategory.ATTACHMENT
  1607. assert o.kwargs["reason"] == FilterStatKeys.CRASH_REPORT_LIMIT
  1608. # Second outcome is acceptance of the "a3" attachment
  1609. o = mock_track_outcome.mock_calls[1]
  1610. assert o.kwargs["outcome"] == Outcome.ACCEPTED
  1611. assert o.kwargs["category"] == DataCategory.ATTACHMENT
  1612. assert o.kwargs["quantity"] == 5
  1613. # Last outcome is the event
  1614. o = mock_track_outcome.mock_calls[2]
  1615. assert o.kwargs["outcome"] == Outcome.ACCEPTED
  1616. assert o.kwargs["category"] == DataCategory.ERROR
  1617. def test_transaction_outcome_accepted(self) -> None:
  1618. """
  1619. Without metrics extraction, we count the number of accepted transaction
  1620. events in the TRANSACTION data category. This maintains compatibility
  1621. with Sentry installations that do not have a metrics pipeline.
  1622. """
  1623. manager = EventManager(
  1624. make_event(
  1625. transaction="wait",
  1626. contexts={
  1627. "trace": {
  1628. "parent_span_id": "bce14471e0e9654d",
  1629. "op": "foobar",
  1630. "trace_id": "a0fa8803753e40fd8124b21eeb2986b5",
  1631. "span_id": "bf5be759039ede9a",
  1632. }
  1633. },
  1634. spans=[],
  1635. timestamp=iso_format(before_now(minutes=5)),
  1636. start_timestamp=iso_format(before_now(minutes=5)),
  1637. type="transaction",
  1638. platform="python",
  1639. )
  1640. )
  1641. manager.normalize()
  1642. mock_track_outcome = mock.Mock()
  1643. with mock.patch("sentry.event_manager.track_outcome", mock_track_outcome):
  1644. with self.feature({"organizations:transaction-metrics-extraction": False}):
  1645. manager.save(self.project.id)
  1646. assert_mock_called_once_with_partial(
  1647. mock_track_outcome, outcome=Outcome.ACCEPTED, category=DataCategory.TRANSACTION
  1648. )
  1649. def test_transaction_indexed_outcome_accepted(self) -> None:
  1650. """
  1651. With metrics extraction, we count the number of accepted transaction
  1652. events in the TRANSACTION_INDEXED data category. The TRANSACTION data
  1653. category contains the number of metrics from
  1654. ``billing_metrics_consumer``.
  1655. """
  1656. manager = EventManager(
  1657. make_event(
  1658. transaction="wait",
  1659. contexts={
  1660. "trace": {
  1661. "parent_span_id": "bce14471e0e9654d",
  1662. "op": "foobar",
  1663. "trace_id": "a0fa8803753e40fd8124b21eeb2986b5",
  1664. "span_id": "bf5be759039ede9a",
  1665. }
  1666. },
  1667. spans=[],
  1668. timestamp=iso_format(before_now(minutes=5)),
  1669. start_timestamp=iso_format(before_now(minutes=5)),
  1670. type="transaction",
  1671. platform="python",
  1672. )
  1673. )
  1674. manager.normalize()
  1675. mock_track_outcome = mock.Mock()
  1676. with mock.patch("sentry.event_manager.track_outcome", mock_track_outcome):
  1677. with self.feature("organizations:transaction-metrics-extraction"):
  1678. manager.save(self.project.id)
  1679. assert_mock_called_once_with_partial(
  1680. mock_track_outcome, outcome=Outcome.ACCEPTED, category=DataCategory.TRANSACTION_INDEXED
  1681. )
  1682. def test_checksum_rehashed(self) -> None:
  1683. checksum = "invalid checksum hash"
  1684. manager = EventManager(make_event(**{"checksum": checksum}))
  1685. manager.normalize()
  1686. event = manager.save(self.project.id)
  1687. hashes = [gh.hash for gh in GroupHash.objects.filter(group=event.group)]
  1688. assert sorted(hashes) == sorted([hash_from_values(checksum), checksum])
  1689. def test_legacy_attributes_moved(self) -> None:
  1690. event_params = make_event(
  1691. release="my-release",
  1692. environment="my-environment",
  1693. site="whatever",
  1694. server_name="foo.com",
  1695. event_id=uuid.uuid1().hex,
  1696. )
  1697. manager = EventManager(event_params)
  1698. event = manager.save(self.project.id)
  1699. # release and environment stay toplevel
  1700. assert event.data["release"] == "my-release"
  1701. assert event.data["environment"] == "my-environment"
  1702. # site is a legacy attribute that is just a tag
  1703. assert event.data.get("site") is None
  1704. tags = dict(event.tags)
  1705. assert tags["site"] == "whatever"
  1706. assert event.data.get("server_name") is None
  1707. tags = dict(event.tags)
  1708. assert tags["server_name"] == "foo.com"
  1709. @freeze_time()
  1710. def test_save_issueless_event(self) -> None:
  1711. manager = EventManager(
  1712. make_event(
  1713. transaction="wait",
  1714. contexts={
  1715. "trace": {
  1716. "parent_span_id": "bce14471e0e9654d",
  1717. "op": "foobar",
  1718. "trace_id": "a0fa8803753e40fd8124b21eeb2986b5",
  1719. "span_id": "bf5be759039ede9a",
  1720. }
  1721. },
  1722. spans=[],
  1723. timestamp=iso_format(before_now(minutes=5)),
  1724. start_timestamp=iso_format(before_now(minutes=5)),
  1725. type="transaction",
  1726. platform="python",
  1727. )
  1728. )
  1729. event = manager.save(self.project.id)
  1730. assert event.group is None
  1731. assert (
  1732. tsdb.backend.get_sums(
  1733. TSDBModel.project,
  1734. [self.project.id],
  1735. event.datetime,
  1736. event.datetime,
  1737. tenant_ids={"organization_id": 123, "referrer": "r"},
  1738. )[self.project.id]
  1739. == 0
  1740. )
  1741. def test_category_match_in_app(self) -> None:
  1742. """
  1743. Regression test to ensure that grouping in-app enhancements work in
  1744. principle.
  1745. """
  1746. from sentry.grouping.enhancer import Enhancements
  1747. enhancement = Enhancements.from_config_string(
  1748. """
  1749. function:foo category=bar
  1750. function:foo2 category=bar
  1751. category:bar -app
  1752. """,
  1753. )
  1754. event_params = make_event(
  1755. platform="native",
  1756. exception={
  1757. "values": [
  1758. {
  1759. "type": "Hello",
  1760. "stacktrace": {
  1761. "frames": [
  1762. {
  1763. "function": "foo",
  1764. "in_app": True,
  1765. },
  1766. {"function": "bar"},
  1767. ]
  1768. },
  1769. }
  1770. ]
  1771. },
  1772. )
  1773. manager = EventManager(event_params)
  1774. manager.normalize()
  1775. manager.get_data()["grouping_config"] = {
  1776. "enhancements": enhancement.dumps(),
  1777. "id": "mobile:2021-02-12",
  1778. }
  1779. event1 = manager.save(self.project.id)
  1780. assert event1.data["exception"]["values"][0]["stacktrace"]["frames"][0]["in_app"] is False
  1781. event_params = make_event(
  1782. platform="native",
  1783. exception={
  1784. "values": [
  1785. {
  1786. "type": "Hello",
  1787. "stacktrace": {
  1788. "frames": [
  1789. {
  1790. "function": "foo2",
  1791. "in_app": True,
  1792. },
  1793. {"function": "bar"},
  1794. ]
  1795. },
  1796. }
  1797. ]
  1798. },
  1799. )
  1800. manager = EventManager(event_params)
  1801. manager.normalize()
  1802. manager.get_data()["grouping_config"] = {
  1803. "enhancements": enhancement.dumps(),
  1804. "id": "mobile:2021-02-12",
  1805. }
  1806. event2 = manager.save(self.project.id)
  1807. assert event2.data["exception"]["values"][0]["stacktrace"]["frames"][0]["in_app"] is False
  1808. assert event1.group_id == event2.group_id
  1809. def test_category_match_group(self) -> None:
  1810. """
  1811. Regression test to ensure categories are applied consistently and don't
  1812. produce hash mismatches.
  1813. """
  1814. from sentry.grouping.enhancer import Enhancements
  1815. enhancement = Enhancements.from_config_string(
  1816. """
  1817. function:foo category=foo_like
  1818. category:foo_like -group
  1819. """,
  1820. )
  1821. event_params = make_event(
  1822. platform="native",
  1823. exception={
  1824. "values": [
  1825. {
  1826. "type": "Hello",
  1827. "stacktrace": {
  1828. "frames": [
  1829. {
  1830. "function": "foo",
  1831. },
  1832. {
  1833. "function": "bar",
  1834. },
  1835. ]
  1836. },
  1837. }
  1838. ]
  1839. },
  1840. )
  1841. manager = EventManager(event_params)
  1842. manager.normalize()
  1843. grouping_config: GroupingConfig = {
  1844. "enhancements": enhancement.dumps(),
  1845. "id": "mobile:2021-02-12",
  1846. }
  1847. manager.get_data()["grouping_config"] = grouping_config
  1848. event1 = manager.save(self.project.id)
  1849. event2 = Event(event1.project_id, event1.event_id, data=event1.data)
  1850. assert (
  1851. event1.get_hashes().hashes
  1852. == event2.get_hashes(load_grouping_config(grouping_config)).hashes
  1853. )
  1854. def test_write_none_tree_labels(self) -> None:
  1855. """Write tree labels even if None"""
  1856. event_params = make_event(
  1857. platform="native",
  1858. exception={
  1859. "values": [
  1860. {
  1861. "type": "Hello",
  1862. "stacktrace": {
  1863. "frames": [
  1864. {
  1865. "function": "<redacted>",
  1866. },
  1867. {
  1868. "function": "<redacted>",
  1869. },
  1870. ]
  1871. },
  1872. }
  1873. ]
  1874. },
  1875. )
  1876. manager = EventManager(event_params)
  1877. manager.normalize()
  1878. manager.get_data()["grouping_config"] = {
  1879. "id": "mobile:2021-02-12",
  1880. }
  1881. event = manager.save(self.project.id)
  1882. assert event.data["hierarchical_tree_labels"] == [None]
  1883. def test_synthetic_exception_detection(self) -> None:
  1884. manager = EventManager(
  1885. make_event(
  1886. message="foo",
  1887. event_id="b" * 32,
  1888. exception={
  1889. "values": [
  1890. {
  1891. "type": "SIGABRT",
  1892. "mechanism": {"handled": False},
  1893. "stacktrace": {"frames": [{"function": "foo"}]},
  1894. }
  1895. ]
  1896. },
  1897. ),
  1898. project=self.project,
  1899. )
  1900. manager.normalize()
  1901. manager.get_data()["grouping_config"] = {
  1902. "id": "mobile:2021-02-12",
  1903. }
  1904. event = manager.save(self.project.id)
  1905. mechanism = event.interfaces["exception"].values[0].mechanism
  1906. assert mechanism is not None
  1907. assert mechanism.synthetic is True
  1908. assert event.title == "foo"
  1909. @override_options({"performance.issues.all.problem-detection": 1.0})
  1910. @override_options({"performance.issues.n_plus_one_db.problem-creation": 1.0})
  1911. def test_perf_issue_creation(self) -> None:
  1912. with mock.patch("sentry_sdk.tracing.Span.containing_transaction"):
  1913. event = self.create_performance_issue(
  1914. event_data=make_event(**get_event("n-plus-one-in-django-index-view"))
  1915. )
  1916. data = event.data
  1917. assert event.get_event_type() == "transaction"
  1918. assert event.transaction == "/books/"
  1919. assert data["span_grouping_config"]["id"] == "default:2022-10-27"
  1920. span_hashes = [span["hash"] for span in data["spans"]]
  1921. assert span_hashes == [
  1922. "0f43fb6f6e01ca52",
  1923. "3dc5dd68b38e1730",
  1924. "424c6ae1641f0f0e",
  1925. "d5da18d7274b34a1",
  1926. "ac72fc0a4f5fe381",
  1927. "ac1468d8e11a0553",
  1928. "d8681423cab4275f",
  1929. "e853d2eb7fb9ebb0",
  1930. "6a992d5529f459a4",
  1931. "b640a0ce465fa2a4",
  1932. "a3605e201eaf6c45",
  1933. "061710eb39a66089",
  1934. "c031296784b22ea9",
  1935. "d74ed7012596c3fb",
  1936. "d74ed7012596c3fb",
  1937. "d74ed7012596c3fb",
  1938. "d74ed7012596c3fb",
  1939. "d74ed7012596c3fb",
  1940. "d74ed7012596c3fb",
  1941. "d74ed7012596c3fb",
  1942. "d74ed7012596c3fb",
  1943. "d74ed7012596c3fb",
  1944. "d74ed7012596c3fb",
  1945. ]
  1946. assert event.group
  1947. group = event.group
  1948. assert group is not None
  1949. assert group.title == "N+1 Query"
  1950. assert (
  1951. group.message
  1952. == "/books/ N+1 Query SELECT `books_author`.`id`, `books_author`.`name` FROM `books_author` WHERE `books_author`.`id` = %s LIMIT 21"
  1953. )
  1954. assert group.culprit == "/books/"
  1955. assert group.get_event_type() == "transaction"
  1956. description = "SELECT `books_author`.`id`, `books_author`.`name` FROM `books_author` WHERE `books_author`.`id` = %s LIMIT 21"
  1957. assert group.get_event_metadata() == {
  1958. "location": "/books/",
  1959. "title": "N+1 Query",
  1960. "value": description,
  1961. "initial_priority": PriorityLevel.LOW,
  1962. }
  1963. assert (
  1964. event.search_message
  1965. == "/books/ N+1 Query SELECT `books_author`.`id`, `books_author`.`name` FROM `books_author` WHERE `books_author`.`id` = %s LIMIT 21"
  1966. )
  1967. assert group.location() == "/books/"
  1968. assert group.level == 40
  1969. assert group.issue_category == GroupCategory.PERFORMANCE
  1970. assert group.issue_type == PerformanceNPlusOneGroupType
  1971. assert event.occurrence
  1972. assert event.occurrence.evidence_display == [
  1973. IssueEvidence(
  1974. name="Offending Spans",
  1975. value="db - SELECT `books_author`.`id`, `books_author`.`name` "
  1976. "FROM `books_author` WHERE `books_author`.`id` = %s LIMIT 21",
  1977. important=True,
  1978. )
  1979. ]
  1980. assert event.occurrence.evidence_data == {
  1981. "transaction_name": "/books/",
  1982. "op": "db",
  1983. "parent_span_ids": ["8dd7a5869a4f4583"],
  1984. "parent_span": "django.view - index",
  1985. "cause_span_ids": ["9179e43ae844b174"],
  1986. "offender_span_ids": [
  1987. "b8be6138369491dd",
  1988. "b2d4826e7b618f1b",
  1989. "b3fdeea42536dbf1",
  1990. "b409e78a092e642f",
  1991. "86d2ede57bbf48d4",
  1992. "8e554c84cdc9731e",
  1993. "94d6230f3f910e12",
  1994. "a210b87a2191ceb6",
  1995. "88a5ccaf25b9bd8f",
  1996. "bb32cf50fc56b296",
  1997. ],
  1998. "repeating_spans": "db - SELECT `books_author`.`id`, `books_author`.`name` FROM `books_author` WHERE `books_author`.`id` = %s LIMIT 21",
  1999. "repeating_spans_compact": "SELECT `books_author`.`id`, `books_author`.`name` FROM `books_author` WHERE `books_author`.`id` = %s LIMIT 21",
  2000. "num_repeating_spans": "10",
  2001. }
  2002. @override_options({"performance.issues.all.problem-detection": 1.0})
  2003. @override_options({"performance.issues.n_plus_one_db.problem-creation": 1.0})
  2004. def test_perf_issue_update(self) -> None:
  2005. with mock.patch("sentry_sdk.tracing.Span.containing_transaction"):
  2006. event = self.create_performance_issue(
  2007. event_data=make_event(**get_event("n-plus-one-in-django-index-view"))
  2008. )
  2009. group = event.group
  2010. assert group is not None
  2011. assert group.issue_category == GroupCategory.PERFORMANCE
  2012. assert group.issue_type == PerformanceNPlusOneGroupType
  2013. group.data["metadata"] = {
  2014. "location": "hi",
  2015. "title": "lol",
  2016. }
  2017. group.culprit = "wat"
  2018. group.message = "nope"
  2019. group.save()
  2020. assert group.location() == "hi"
  2021. assert group.title == "lol"
  2022. with self.tasks():
  2023. self.create_performance_issue(
  2024. event_data=make_event(**get_event("n-plus-one-in-django-index-view"))
  2025. )
  2026. # Make sure the original group is updated via buffers
  2027. group.refresh_from_db()
  2028. assert group.title == "N+1 Query"
  2029. assert group.get_event_metadata() == {
  2030. "location": "/books/",
  2031. "title": "N+1 Query",
  2032. "value": "SELECT `books_author`.`id`, `books_author`.`name` FROM `books_author` WHERE `books_author`.`id` = %s LIMIT 21",
  2033. "initial_priority": PriorityLevel.LOW,
  2034. }
  2035. assert group.location() == "/books/"
  2036. assert group.message == "nope"
  2037. assert group.culprit == "/books/"
  2038. @override_options({"performance.issues.all.problem-detection": 1.0})
  2039. @override_options({"performance.issues.n_plus_one_db.problem-creation": 1.0})
  2040. def test_error_issue_no_associate_perf_event(self) -> None:
  2041. """Test that you can't associate a performance event with an error issue"""
  2042. with mock.patch("sentry_sdk.tracing.Span.containing_transaction"):
  2043. event = self.create_performance_issue(
  2044. event_data=make_event(**get_event("n-plus-one-in-django-index-view"))
  2045. )
  2046. assert event.group is not None
  2047. # sneakily make the group type wrong
  2048. group = event.group
  2049. assert group is not None
  2050. group.type = ErrorGroupType.type_id
  2051. group.save()
  2052. event = self.create_performance_issue(
  2053. event_data=make_event(**get_event("n-plus-one-in-django-index-view"))
  2054. )
  2055. assert event.group is None
  2056. @override_options({"performance.issues.all.problem-detection": 1.0})
  2057. @override_options({"performance.issues.n_plus_one_db.problem-creation": 1.0})
  2058. def test_perf_issue_no_associate_error_event(self) -> None:
  2059. """Test that you can't associate an error event with a performance issue"""
  2060. with mock.patch("sentry_sdk.tracing.Span.containing_transaction"):
  2061. manager = EventManager(make_event())
  2062. manager.normalize()
  2063. event = manager.save(self.project.id)
  2064. assert len(event.groups) == 1
  2065. # sneakily make the group type wrong
  2066. group = event.group
  2067. assert group is not None
  2068. group.type = PerformanceNPlusOneGroupType.type_id
  2069. group.save()
  2070. manager = EventManager(make_event())
  2071. manager.normalize()
  2072. event = manager.save(self.project.id)
  2073. assert not event.group
  2074. @override_options({"performance.issues.all.problem-detection": 1.0})
  2075. @override_options({"performance.issues.n_plus_one_db.problem-creation": 1.0})
  2076. def test_perf_issue_creation_ignored(self) -> None:
  2077. with mock.patch("sentry_sdk.tracing.Span.containing_transaction"):
  2078. event = self.create_performance_issue(
  2079. event_data=make_event(**get_event("n-plus-one-in-django-index-view")),
  2080. noise_limit=2,
  2081. )
  2082. assert event.get_event_type() == "transaction"
  2083. assert event.group is None
  2084. @override_options({"performance.issues.all.problem-detection": 1.0})
  2085. @override_options({"performance.issues.n_plus_one_db.problem-creation": 1.0})
  2086. def test_perf_issue_creation_over_ignored_threshold(self) -> None:
  2087. with mock.patch("sentry_sdk.tracing.Span.containing_transaction"):
  2088. event_1 = self.create_performance_issue(
  2089. event_data=make_event(**get_event("n-plus-one-in-django-index-view")), noise_limit=3
  2090. )
  2091. event_2 = self.create_performance_issue(
  2092. event_data=make_event(**get_event("n-plus-one-in-django-index-view")), noise_limit=3
  2093. )
  2094. event_3 = self.create_performance_issue(
  2095. event_data=make_event(**get_event("n-plus-one-in-django-index-view")), noise_limit=3
  2096. )
  2097. assert event_1.get_event_type() == "transaction"
  2098. assert event_2.get_event_type() == "transaction"
  2099. assert event_3.get_event_type() == "transaction"
  2100. # only the third occurrence of the hash should create the group
  2101. assert event_1.group is None
  2102. assert event_2.group is None
  2103. assert event_3.group is not None
  2104. @override_options(
  2105. {
  2106. "performance.issues.slow_db_query.problem-creation": 1.0,
  2107. "performance_issue_creation_rate": 1.0,
  2108. "performance.issues.all.problem-detection": 1.0,
  2109. }
  2110. )
  2111. def test_perf_issue_slow_db_issue_is_created(self) -> None:
  2112. def attempt_to_generate_slow_db_issue() -> Event:
  2113. return self.create_performance_issue(
  2114. event_data=make_event(**get_event("slow-db-spans")),
  2115. issue_type=PerformanceSlowDBQueryGroupType,
  2116. )
  2117. last_event = attempt_to_generate_slow_db_issue()
  2118. assert last_event.group
  2119. assert last_event.group.type == PerformanceSlowDBQueryGroupType.type_id
  2120. @patch("sentry.event_manager.metrics.incr")
  2121. def test_new_group_metrics_logging(self, mock_metrics_incr: MagicMock) -> None:
  2122. manager = EventManager(
  2123. make_event(platform="javascript", sdk={"name": "sentry.javascript.nextjs"})
  2124. )
  2125. manager.normalize()
  2126. manager.save(self.project.id)
  2127. mock_metrics_incr.assert_any_call(
  2128. "group.created",
  2129. skip_internal=True,
  2130. tags={
  2131. "platform": "javascript",
  2132. "sdk": "sentry.javascript.nextjs",
  2133. },
  2134. )
  2135. @patch("sentry.event_manager.metrics.incr")
  2136. def test_new_group_metrics_logging_no_platform_no_sdk(
  2137. self, mock_metrics_incr: MagicMock
  2138. ) -> None:
  2139. manager = EventManager(make_event(platform=None, sdk=None))
  2140. manager.normalize()
  2141. manager.save(self.project.id)
  2142. mock_metrics_incr.assert_any_call(
  2143. "group.created",
  2144. skip_internal=True,
  2145. tags={
  2146. "platform": "other",
  2147. "sdk": "other",
  2148. },
  2149. )
  2150. @patch("sentry.event_manager.metrics.incr")
  2151. def test_new_group_metrics_logging_sdk_exist_but_null(
  2152. self, mock_metrics_incr: MagicMock
  2153. ) -> None:
  2154. manager = EventManager(make_event(platform=None, sdk={"name": None}))
  2155. manager.normalize()
  2156. manager.save(self.project.id)
  2157. mock_metrics_incr.assert_any_call(
  2158. "group.created",
  2159. skip_internal=True,
  2160. tags={
  2161. "platform": "other",
  2162. "sdk": "other",
  2163. },
  2164. )
  2165. def test_new_group_metrics_logging_with_frame_mix(self) -> None:
  2166. with patch("sentry.event_manager.metrics.incr") as mock_metrics_incr:
  2167. manager = EventManager(
  2168. make_event(platform="javascript", sdk={"name": "sentry.javascript.nextjs"})
  2169. )
  2170. manager.normalize()
  2171. # IRL, `normalize_stacktraces_for_grouping` adds frame mix metadata to the event, but we
  2172. # can't mock that because it's imported inside its calling function to avoid circular imports
  2173. manager._data["metadata"] = {"in_app_frame_mix": "in-app-only"}
  2174. manager.save(self.project.id)
  2175. mock_metrics_incr.assert_any_call(
  2176. "grouping.in_app_frame_mix",
  2177. sample_rate=1.0,
  2178. tags={
  2179. "platform": "javascript",
  2180. "frame_mix": "in-app-only",
  2181. "sdk": "sentry.javascript.nextjs",
  2182. },
  2183. )
  2184. def test_new_group_metrics_logging_without_frame_mix(self) -> None:
  2185. with patch("sentry.event_manager.metrics.incr") as mock_metrics_incr:
  2186. manager = EventManager(make_event(platform="javascript"))
  2187. event = manager.save(self.project.id)
  2188. assert event.get_event_metadata().get("in_app_frame_mix") is None
  2189. metrics_logged = [call.args[0] for call in mock_metrics_incr.mock_calls]
  2190. assert "grouping.in_app_frame_mix" not in metrics_logged
  2191. class AutoAssociateCommitTest(TestCase, EventManagerTestMixin):
  2192. def setUp(self) -> None:
  2193. super().setUp()
  2194. self.repo_name = "example"
  2195. self.project = self.create_project(name="foo")
  2196. with assume_test_silo_mode_of(Integration):
  2197. self.org_integration = self.integration.add_organization(
  2198. self.project.organization, self.user
  2199. )
  2200. self.repo = self.create_repo(
  2201. project=self.project,
  2202. name=self.repo_name,
  2203. provider="integrations:github",
  2204. integration_id=self.integration.id,
  2205. )
  2206. self.repo.update(config={"name": self.repo_name})
  2207. self.create_code_mapping(
  2208. project=self.project,
  2209. repo=self.repo,
  2210. organization_integration=self.org_integration,
  2211. stack_root="/stack/root",
  2212. source_root="/source/root",
  2213. default_branch="main",
  2214. )
  2215. responses.add(
  2216. "GET",
  2217. f"https://api.github.com/repos/{self.repo_name}/commits/{LATER_COMMIT_SHA}",
  2218. json=json.loads(GET_COMMIT_EXAMPLE),
  2219. )
  2220. responses.add(
  2221. "GET",
  2222. f"https://api.github.com/repos/{self.repo_name}/commits/{EARLIER_COMMIT_SHA}",
  2223. json=json.loads(GET_PRIOR_COMMIT_EXAMPLE),
  2224. )
  2225. self.dummy_commit_sha = "a" * 40
  2226. responses.add(
  2227. responses.GET,
  2228. f"https://api.github.com/repos/{self.repo_name}/compare/{self.dummy_commit_sha}...{LATER_COMMIT_SHA}",
  2229. json=json.loads(COMPARE_COMMITS_EXAMPLE_WITH_INTERMEDIATE),
  2230. )
  2231. responses.add(
  2232. responses.GET,
  2233. f"https://api.github.com/repos/{self.repo_name}/commits?sha={LATER_COMMIT_SHA}",
  2234. json=json.loads(GET_LAST_2_COMMITS_EXAMPLE),
  2235. )
  2236. def _create_first_release_commit(self) -> None:
  2237. # Create a release
  2238. release = self.create_release(project=self.project, version="abcabcabc")
  2239. # Create a commit
  2240. commit = self.create_commit(
  2241. repo=self.repo,
  2242. key=self.dummy_commit_sha,
  2243. )
  2244. # Make a release head commit
  2245. ReleaseHeadCommit.objects.create(
  2246. organization_id=self.project.organization.id,
  2247. repository_id=self.repo.id,
  2248. release=release,
  2249. commit=commit,
  2250. )
  2251. class ReleaseIssueTest(TestCase):
  2252. def setUp(self) -> None:
  2253. self.project = self.create_project()
  2254. self.release = Release.get_or_create(self.project, "1.0")
  2255. self.environment1 = Environment.get_or_create(self.project, "prod")
  2256. self.environment2 = Environment.get_or_create(self.project, "staging")
  2257. self.timestamp = float(int(time() - 300))
  2258. def make_event(self, **kwargs: Any) -> dict[str, Any]:
  2259. result = {
  2260. "event_id": "a" * 32,
  2261. "message": "foo",
  2262. "timestamp": self.timestamp + 0.23,
  2263. "level": logging.ERROR,
  2264. "logger": "default",
  2265. "tags": [],
  2266. }
  2267. result.update(kwargs)
  2268. return result
  2269. def make_release_event(
  2270. self,
  2271. release_version: str = "1.0",
  2272. environment_name: str | None = "prod",
  2273. project_id: int = 1,
  2274. **kwargs: Any,
  2275. ) -> Event:
  2276. event_params = make_event(
  2277. release=release_version, environment=environment_name, event_id=uuid.uuid1().hex
  2278. )
  2279. event_params.update(kwargs)
  2280. manager = EventManager(event_params)
  2281. with self.tasks():
  2282. event = manager.save(project_id)
  2283. return event
  2284. def convert_timestamp(self, timestamp: float) -> datetime:
  2285. return datetime.fromtimestamp(timestamp, tz=UTC)
  2286. def assert_release_project_environment(
  2287. self, event: Event, new_issues_count: int, first_seen: float, last_seen: float
  2288. ) -> None:
  2289. release = Release.objects.get(
  2290. organization=event.project.organization.id, version=event.get_tag("sentry:release")
  2291. )
  2292. release_project_envs = ReleaseProjectEnvironment.objects.filter(
  2293. release=release, project=event.project, environment=event.get_environment()
  2294. )
  2295. assert len(release_project_envs) == 1
  2296. release_project_env = release_project_envs[0]
  2297. assert release_project_env.new_issues_count == new_issues_count
  2298. assert release_project_env.first_seen == self.convert_timestamp(first_seen)
  2299. assert release_project_env.last_seen == self.convert_timestamp(last_seen)
  2300. def test_different_groups(self) -> None:
  2301. event1 = self.make_release_event(
  2302. release_version=self.release.version,
  2303. environment_name=self.environment1.name,
  2304. project_id=self.project.id,
  2305. checksum="a" * 32,
  2306. timestamp=self.timestamp,
  2307. )
  2308. self.assert_release_project_environment(
  2309. event=event1, new_issues_count=1, last_seen=self.timestamp, first_seen=self.timestamp
  2310. )
  2311. event2 = self.make_release_event(
  2312. release_version=self.release.version,
  2313. environment_name=self.environment1.name,
  2314. project_id=self.project.id,
  2315. checksum="b" * 32,
  2316. timestamp=self.timestamp + 100,
  2317. )
  2318. self.assert_release_project_environment(
  2319. event=event2,
  2320. new_issues_count=2,
  2321. last_seen=self.timestamp + 100,
  2322. first_seen=self.timestamp,
  2323. )
  2324. def test_same_group(self) -> None:
  2325. event1 = self.make_release_event(
  2326. release_version=self.release.version,
  2327. environment_name=self.environment1.name,
  2328. project_id=self.project.id,
  2329. checksum="a" * 32,
  2330. timestamp=self.timestamp,
  2331. )
  2332. self.assert_release_project_environment(
  2333. event=event1, new_issues_count=1, last_seen=self.timestamp, first_seen=self.timestamp
  2334. )
  2335. event2 = self.make_release_event(
  2336. release_version=self.release.version,
  2337. environment_name=self.environment1.name,
  2338. project_id=self.project.id,
  2339. checksum="a" * 32,
  2340. timestamp=self.timestamp + 100,
  2341. )
  2342. self.assert_release_project_environment(
  2343. event=event2,
  2344. new_issues_count=1,
  2345. last_seen=self.timestamp + 100,
  2346. first_seen=self.timestamp,
  2347. )
  2348. def test_same_group_different_environment(self) -> None:
  2349. event1 = self.make_release_event(
  2350. release_version=self.release.version,
  2351. environment_name=self.environment1.name,
  2352. project_id=self.project.id,
  2353. checksum="a" * 32,
  2354. timestamp=self.timestamp,
  2355. )
  2356. self.assert_release_project_environment(
  2357. event=event1, new_issues_count=1, last_seen=self.timestamp, first_seen=self.timestamp
  2358. )
  2359. event2 = self.make_release_event(
  2360. release_version=self.release.version,
  2361. environment_name=self.environment2.name,
  2362. project_id=self.project.id,
  2363. checksum="a" * 32,
  2364. timestamp=self.timestamp + 100,
  2365. )
  2366. self.assert_release_project_environment(
  2367. event=event1, new_issues_count=1, last_seen=self.timestamp, first_seen=self.timestamp
  2368. )
  2369. self.assert_release_project_environment(
  2370. event=event2,
  2371. new_issues_count=1,
  2372. last_seen=self.timestamp + 100,
  2373. first_seen=self.timestamp + 100,
  2374. )
  2375. @apply_feature_flag_on_cls("organizations:dynamic-sampling")
  2376. class DSLatestReleaseBoostTest(TestCase):
  2377. def setUp(self) -> None:
  2378. self.environment1 = Environment.get_or_create(self.project, "prod")
  2379. self.environment2 = Environment.get_or_create(self.project, "staging")
  2380. self.timestamp = float(int(time() - 300))
  2381. self.redis_client = get_redis_client_for_ds()
  2382. def make_transaction_event(self, **kwargs: Any) -> dict[str, Any]:
  2383. result = {
  2384. "transaction": "wait",
  2385. "contexts": {
  2386. "trace": {
  2387. "parent_span_id": "bce14471e0e9654d",
  2388. "op": "foobar",
  2389. "trace_id": "a0fa8803753e40fd8124b21eeb2986b5",
  2390. "span_id": "bf5be759039ede9a",
  2391. }
  2392. },
  2393. "spans": [],
  2394. "timestamp": self.timestamp + 0.23,
  2395. "start_timestamp": "2019-06-14T14:01:40Z",
  2396. "type": "transaction",
  2397. }
  2398. result.update(kwargs)
  2399. return result
  2400. def make_release_transaction(
  2401. self,
  2402. release_version: str = "1.0",
  2403. environment_name: str | None = "prod",
  2404. project_id: int = 1,
  2405. **kwargs: Any,
  2406. ) -> Event:
  2407. transaction = (
  2408. self.make_transaction_event(
  2409. release=release_version, environment=environment_name, event_id=uuid.uuid1().hex
  2410. )
  2411. if environment_name is not None
  2412. else self.make_transaction_event(release=release_version, event_id=uuid.uuid1().hex)
  2413. )
  2414. transaction.update(kwargs)
  2415. manager = EventManager(transaction)
  2416. with self.tasks():
  2417. event = manager.save(project_id)
  2418. return event
  2419. @freeze_time("2022-11-03 10:00:00")
  2420. def test_boost_release_with_non_observed_release(self) -> None:
  2421. ts = timezone.now().timestamp()
  2422. project = self.create_project(platform="python")
  2423. release_1 = Release.get_or_create(project=project, version="1.0", date_added=timezone.now())
  2424. release_2 = Release.get_or_create(
  2425. project=project, version="2.0", date_added=timezone.now() + timedelta(hours=1)
  2426. )
  2427. release_3 = Release.get_or_create(
  2428. project=project, version="3.0", date_added=timezone.now() + timedelta(hours=2)
  2429. )
  2430. for release, environment in (
  2431. (release_1, None),
  2432. (release_2, "prod"),
  2433. (release_3, "dev"),
  2434. ):
  2435. self.make_release_transaction(
  2436. release_version=release.version,
  2437. environment_name=environment,
  2438. project_id=project.id,
  2439. checksum="a" * 32,
  2440. timestamp=self.timestamp,
  2441. )
  2442. env_postfix = f":e:{environment}" if environment is not None else ""
  2443. assert self.redis_client.get(f"ds::p:{project.id}:r:{release.id}{env_postfix}") == "1"
  2444. assert self.redis_client.hgetall(f"ds::p:{project.id}:boosted_releases") == {
  2445. f"ds::r:{release_1.id}": str(ts),
  2446. f"ds::r:{release_2.id}:e:prod": str(ts),
  2447. f"ds::r:{release_3.id}:e:dev": str(ts),
  2448. }
  2449. assert ProjectBoostedReleases(project_id=project.id).get_extended_boosted_releases() == [
  2450. ExtendedBoostedRelease(
  2451. id=release_1.id,
  2452. timestamp=ts,
  2453. environment=None,
  2454. cache_key=f"ds::r:{release_1.id}",
  2455. version=release_1.version,
  2456. platform=Platform(project.platform),
  2457. ),
  2458. ExtendedBoostedRelease(
  2459. id=release_2.id,
  2460. timestamp=ts,
  2461. environment="prod",
  2462. cache_key=f"ds::r:{release_2.id}:e:prod",
  2463. version=release_2.version,
  2464. platform=Platform(project.platform),
  2465. ),
  2466. ExtendedBoostedRelease(
  2467. id=release_3.id,
  2468. timestamp=ts,
  2469. environment="dev",
  2470. cache_key=f"ds::r:{release_3.id}:e:dev",
  2471. version=release_3.version,
  2472. platform=Platform(project.platform),
  2473. ),
  2474. ]
  2475. @freeze_time("2022-11-03 10:00:00")
  2476. def test_boost_release_boosts_only_latest_release(self) -> None:
  2477. ts = timezone.now().timestamp()
  2478. project = self.create_project(platform="python")
  2479. release_1 = Release.get_or_create(project=project, version="1.0", date_added=timezone.now())
  2480. release_2 = Release.get_or_create(
  2481. project=project,
  2482. version="2.0",
  2483. # We must make sure the new release_2.date_added > release_1.date_added.
  2484. date_added=timezone.now() + timedelta(hours=1),
  2485. )
  2486. # We add a transaction for latest release release_2.
  2487. self.make_release_transaction(
  2488. release_version=release_2.version,
  2489. environment_name=self.environment1.name,
  2490. project_id=project.id,
  2491. checksum="a" * 32,
  2492. timestamp=self.timestamp,
  2493. )
  2494. # We add a transaction for release_1 which is not anymore the latest release, therefore we should skip this.
  2495. self.make_release_transaction(
  2496. release_version=release_1.version,
  2497. environment_name=self.environment1.name,
  2498. project_id=project.id,
  2499. checksum="a" * 32,
  2500. timestamp=self.timestamp,
  2501. )
  2502. assert (
  2503. self.redis_client.get(f"ds::p:{project.id}:r:{release_2.id}:e:{self.environment1.name}")
  2504. == "1"
  2505. )
  2506. assert self.redis_client.hgetall(f"ds::p:{project.id}:boosted_releases") == {
  2507. f"ds::r:{release_2.id}:e:{self.environment1.name}": str(ts),
  2508. }
  2509. assert ProjectBoostedReleases(project_id=project.id).get_extended_boosted_releases() == [
  2510. ExtendedBoostedRelease(
  2511. id=release_2.id,
  2512. timestamp=ts,
  2513. environment=self.environment1.name,
  2514. cache_key=f"ds::r:{release_2.id}:e:{self.environment1.name}",
  2515. version=release_2.version,
  2516. platform=Platform(project.platform),
  2517. )
  2518. ]
  2519. @freeze_time("2022-11-03 10:00:00")
  2520. def test_boost_release_with_observed_release_and_different_environment(self) -> None:
  2521. project = self.create_project(platform="python")
  2522. release = Release.get_or_create(project=project, version="1.0", date_added=timezone.now())
  2523. self.make_release_transaction(
  2524. release_version=release.version,
  2525. environment_name=self.environment1.name,
  2526. project_id=project.id,
  2527. checksum="a" * 32,
  2528. timestamp=self.timestamp,
  2529. )
  2530. ts_1 = time()
  2531. assert (
  2532. self.redis_client.get(f"ds::p:{project.id}:r:{release.id}:e:{self.environment1.name}")
  2533. == "1"
  2534. )
  2535. assert self.redis_client.hgetall(f"ds::p:{project.id}:boosted_releases") == {
  2536. f"ds::r:{release.id}:e:{self.environment1.name}": str(ts_1)
  2537. }
  2538. assert ProjectBoostedReleases(project_id=project.id).get_extended_boosted_releases() == [
  2539. ExtendedBoostedRelease(
  2540. id=release.id,
  2541. timestamp=ts_1,
  2542. environment=self.environment1.name,
  2543. cache_key=f"ds::r:{release.id}:e:{self.environment1.name}",
  2544. version=release.version,
  2545. platform=Platform(project.platform),
  2546. )
  2547. ]
  2548. # We simulate that a new transaction with same release but with a different environment value comes after
  2549. # 30 minutes to show that we expect the entry for that release-env to be added to the boosted releases.
  2550. with freeze_time("2022-11-03 10:30:00"):
  2551. self.make_release_transaction(
  2552. release_version=release.version,
  2553. environment_name=self.environment2.name,
  2554. project_id=project.id,
  2555. checksum="b" * 32,
  2556. timestamp=self.timestamp,
  2557. )
  2558. ts_2 = time()
  2559. assert (
  2560. self.redis_client.get(
  2561. f"ds::p:{project.id}:r:{release.id}:e:{self.environment2.name}"
  2562. )
  2563. == "1"
  2564. )
  2565. assert self.redis_client.hgetall(f"ds::p:{project.id}:boosted_releases") == {
  2566. f"ds::r:{release.id}:e:{self.environment1.name}": str(ts_1),
  2567. f"ds::r:{release.id}:e:{self.environment2.name}": str(ts_2),
  2568. }
  2569. assert ProjectBoostedReleases(
  2570. project_id=project.id
  2571. ).get_extended_boosted_releases() == [
  2572. ExtendedBoostedRelease(
  2573. id=release.id,
  2574. timestamp=ts_1,
  2575. environment=self.environment1.name,
  2576. cache_key=f"ds::r:{release.id}:e:{self.environment1.name}",
  2577. version=release.version,
  2578. platform=Platform(project.platform),
  2579. ),
  2580. ExtendedBoostedRelease(
  2581. id=release.id,
  2582. timestamp=ts_2,
  2583. environment=self.environment2.name,
  2584. cache_key=f"ds::r:{release.id}:e:{self.environment2.name}",
  2585. version=release.version,
  2586. platform=Platform(project.platform),
  2587. ),
  2588. ]
  2589. # We also test the case in which no environment is set, which can be the case as per
  2590. # https://docs.sentry.io/platforms/javascript/configuration/options/#environment.
  2591. with freeze_time("2022-11-03 11:00:00"):
  2592. self.make_release_transaction(
  2593. release_version=release.version,
  2594. environment_name=None,
  2595. project_id=project.id,
  2596. checksum="b" * 32,
  2597. timestamp=self.timestamp,
  2598. )
  2599. ts_3 = time()
  2600. assert self.redis_client.get(f"ds::p:{project.id}:r:{release.id}") == "1"
  2601. assert self.redis_client.hgetall(f"ds::p:{project.id}:boosted_releases") == {
  2602. f"ds::r:{release.id}:e:{self.environment1.name}": str(ts_1),
  2603. f"ds::r:{release.id}:e:{self.environment2.name}": str(ts_2),
  2604. f"ds::r:{release.id}": str(ts_3),
  2605. }
  2606. assert ProjectBoostedReleases(
  2607. project_id=project.id
  2608. ).get_extended_boosted_releases() == [
  2609. ExtendedBoostedRelease(
  2610. id=release.id,
  2611. timestamp=ts_1,
  2612. environment=self.environment1.name,
  2613. cache_key=f"ds::r:{release.id}:e:{self.environment1.name}",
  2614. version=release.version,
  2615. platform=Platform(project.platform),
  2616. ),
  2617. ExtendedBoostedRelease(
  2618. id=release.id,
  2619. timestamp=ts_2,
  2620. environment=self.environment2.name,
  2621. cache_key=f"ds::r:{release.id}:e:{self.environment2.name}",
  2622. version=release.version,
  2623. platform=Platform(project.platform),
  2624. ),
  2625. ExtendedBoostedRelease(
  2626. id=release.id,
  2627. timestamp=ts_3,
  2628. environment=None,
  2629. cache_key=f"ds::r:{release.id}",
  2630. version=release.version,
  2631. platform=Platform(project.platform),
  2632. ),
  2633. ]
  2634. @freeze_time("2022-11-03 10:00:00")
  2635. def test_release_not_boosted_with_observed_release_and_same_environment(self) -> None:
  2636. project = self.create_project(platform="python")
  2637. release = Release.get_or_create(project=project, version="1.0", date_added=timezone.now())
  2638. for environment in (self.environment1.name, self.environment2.name):
  2639. self.redis_client.set(
  2640. f"ds::p:{project.id}:r:{release.id}:e:{environment}", 1, 60 * 60 * 24
  2641. )
  2642. self.make_release_transaction(
  2643. release_version=release.version,
  2644. environment_name=environment,
  2645. project_id=project.id,
  2646. checksum="b" * 32,
  2647. timestamp=self.timestamp,
  2648. )
  2649. assert self.redis_client.hgetall(f"ds::p:{project.id}:boosted_releases") == {}
  2650. assert ProjectBoostedReleases(project_id=project.id).get_extended_boosted_releases() == []
  2651. @freeze_time("2022-11-03 10:00:00")
  2652. def test_release_not_boosted_with_deleted_release_after_event_received(self) -> None:
  2653. ts = timezone.now().timestamp()
  2654. project = self.create_project(platform="python")
  2655. release_1 = Release.get_or_create(project=project, version="1.0", date_added=timezone.now())
  2656. release_2 = Release.get_or_create(
  2657. project=project, version="2.0", date_added=timezone.now() + timedelta(hours=1)
  2658. )
  2659. self.make_release_transaction(
  2660. release_version=release_1.version,
  2661. environment_name=None,
  2662. project_id=project.id,
  2663. checksum="a" * 32,
  2664. timestamp=self.timestamp,
  2665. )
  2666. assert self.redis_client.get(f"ds::p:{project.id}:r:{release_1.id}") == "1"
  2667. self.make_release_transaction(
  2668. release_version=release_2.version,
  2669. environment_name=None,
  2670. project_id=project.id,
  2671. checksum="a" * 32,
  2672. timestamp=self.timestamp,
  2673. )
  2674. assert self.redis_client.get(f"ds::p:{project.id}:r:{release_2.id}") == "1"
  2675. # We simulate that the release_2 is deleted after the boost has been inserted.
  2676. release_2_id = release_2.id
  2677. release_2.delete()
  2678. # We expect the boosted release to be kept in Redis, if not queried by the ProjectBoostedReleases.
  2679. assert self.redis_client.hgetall(f"ds::p:{project.id}:boosted_releases") == {
  2680. f"ds::r:{release_1.id}": str(ts),
  2681. f"ds::r:{release_2_id}": str(ts),
  2682. }
  2683. # We expect to not see the release 2 because it will not be in the database anymore, thus we mark it as
  2684. # expired.
  2685. assert ProjectBoostedReleases(project_id=project.id).get_extended_boosted_releases() == [
  2686. ExtendedBoostedRelease(
  2687. id=release_1.id,
  2688. timestamp=ts,
  2689. environment=None,
  2690. cache_key=f"ds::r:{release_1.id}",
  2691. version=release_1.version,
  2692. platform=Platform(project.platform),
  2693. ),
  2694. ]
  2695. @freeze_time("2022-11-03 10:00:00")
  2696. def test_get_boosted_releases_with_old_and_new_cache_keys(self) -> None:
  2697. ts = timezone.now().timestamp()
  2698. project = self.create_project(platform="python")
  2699. # Old cache key
  2700. release_1 = Release.get_or_create(project=project, version="1.0", date_added=timezone.now())
  2701. self.redis_client.hset(
  2702. f"ds::p:{project.id}:boosted_releases",
  2703. f"{release_1.id}",
  2704. ts,
  2705. )
  2706. # New cache key
  2707. release_2 = Release.get_or_create(
  2708. project=project, version="2.0", date_added=timezone.now() + timedelta(hours=1)
  2709. )
  2710. self.redis_client.hset(
  2711. f"ds::p:{project.id}:boosted_releases",
  2712. f"ds::r:{release_2.id}",
  2713. ts,
  2714. )
  2715. self.redis_client.hset(
  2716. f"ds::p:{project.id}:boosted_releases",
  2717. f"ds::r:{release_2.id}:e:{self.environment1.name}",
  2718. ts,
  2719. )
  2720. self.redis_client.hset(
  2721. f"ds::p:{project.id}:boosted_releases",
  2722. f"ds::r:{release_2.id}:e:{self.environment2.name}",
  2723. ts,
  2724. )
  2725. assert ProjectBoostedReleases(project_id=project.id).get_extended_boosted_releases() == [
  2726. ExtendedBoostedRelease(
  2727. id=release_1.id,
  2728. timestamp=ts,
  2729. environment=None,
  2730. # This item has the old cache key.
  2731. cache_key=f"{release_1.id}",
  2732. version=release_1.version,
  2733. platform=Platform(project.platform),
  2734. ),
  2735. ExtendedBoostedRelease(
  2736. id=release_2.id,
  2737. timestamp=ts,
  2738. environment=None,
  2739. cache_key=f"ds::r:{release_2.id}",
  2740. version=release_2.version,
  2741. platform=Platform(project.platform),
  2742. ),
  2743. ExtendedBoostedRelease(
  2744. id=release_2.id,
  2745. timestamp=ts,
  2746. environment=self.environment1.name,
  2747. cache_key=f"ds::r:{release_2.id}:e:{self.environment1.name}",
  2748. version=release_2.version,
  2749. platform=Platform(project.platform),
  2750. ),
  2751. ExtendedBoostedRelease(
  2752. id=release_2.id,
  2753. timestamp=ts,
  2754. environment=self.environment2.name,
  2755. cache_key=f"ds::r:{release_2.id}:e:{self.environment2.name}",
  2756. version=release_2.version,
  2757. platform=Platform(project.platform),
  2758. ),
  2759. ]
  2760. @freeze_time("2022-11-03 10:00:00")
  2761. def test_expired_boosted_releases_are_removed(self) -> None:
  2762. ts = timezone.now().timestamp()
  2763. # We want to test with multiple platforms.
  2764. for platform in ("python", "java", None):
  2765. project = self.create_project(platform=platform)
  2766. for index, (release_version, environment) in enumerate(
  2767. (
  2768. (f"1.0-{platform}", self.environment1.name),
  2769. (f"2.0-{platform}", self.environment2.name),
  2770. )
  2771. ):
  2772. release = Release.get_or_create(
  2773. project=project,
  2774. version=release_version,
  2775. date_added=timezone.now() + timedelta(hours=index),
  2776. )
  2777. self.redis_client.set(
  2778. f"ds::p:{project.id}:r:{release.id}:e:{environment}", 1, 60 * 60 * 24
  2779. )
  2780. self.redis_client.hset(
  2781. f"ds::p:{project.id}:boosted_releases",
  2782. f"ds::r:{release.id}:e:{environment}",
  2783. # We set the creation time in order to expire it by 1 second.
  2784. ts - Platform(platform).time_to_adoption - 1,
  2785. )
  2786. # We add a new boosted release that is not expired.
  2787. release_3 = Release.get_or_create(
  2788. project=project,
  2789. version=f"3.0-{platform}",
  2790. date_added=timezone.now() + timedelta(hours=2),
  2791. )
  2792. self.make_release_transaction(
  2793. release_version=release_3.version,
  2794. environment_name=self.environment1.name,
  2795. project_id=project.id,
  2796. checksum="b" * 32,
  2797. timestamp=self.timestamp,
  2798. )
  2799. assert (
  2800. self.redis_client.get(
  2801. f"ds::p:{project.id}:r:{release_3.id}:e:{self.environment1.name}"
  2802. )
  2803. == "1"
  2804. )
  2805. assert self.redis_client.hgetall(f"ds::p:{project.id}:boosted_releases") == {
  2806. f"ds::r:{release_3.id}:e:{self.environment1.name}": str(ts)
  2807. }
  2808. assert ProjectBoostedReleases(
  2809. project_id=project.id
  2810. ).get_extended_boosted_releases() == [
  2811. ExtendedBoostedRelease(
  2812. id=release_3.id,
  2813. timestamp=ts,
  2814. environment=self.environment1.name,
  2815. cache_key=f"ds::r:{release_3.id}:e:{self.environment1.name}",
  2816. version=release_3.version,
  2817. platform=Platform(project.platform),
  2818. )
  2819. ]
  2820. @mock.patch("sentry.event_manager.schedule_invalidate_project_config")
  2821. def test_project_config_invalidation_is_triggered_when_new_release_is_observed(
  2822. self, mocked_invalidate: mock.MagicMock
  2823. ) -> None:
  2824. self.make_release_transaction(
  2825. release_version=self.release.version,
  2826. environment_name=self.environment1.name,
  2827. project_id=self.project.id,
  2828. checksum="a" * 32,
  2829. timestamp=self.timestamp,
  2830. )
  2831. assert any(
  2832. o.kwargs["trigger"] == "dynamic_sampling:boost_release"
  2833. for o in mocked_invalidate.mock_calls
  2834. )
  2835. @freeze_time("2022-11-03 10:00:00")
  2836. @mock.patch("sentry.dynamic_sampling.rules.helpers.latest_releases.BOOSTED_RELEASES_LIMIT", 2)
  2837. def test_least_recently_boosted_release_is_removed_if_limit_is_exceeded(self) -> None:
  2838. ts = timezone.now().timestamp()
  2839. project = self.create_project(platform="python")
  2840. release_1 = Release.get_or_create(
  2841. project=project,
  2842. version="1.0",
  2843. date_added=timezone.now(),
  2844. )
  2845. release_2 = Release.get_or_create(
  2846. project=project,
  2847. version="2.0",
  2848. date_added=timezone.now() + timedelta(hours=1),
  2849. )
  2850. # We boost with increasing timestamps, so that we know that the smallest will be evicted.
  2851. for release, boost_time in ((release_1, ts - 2), (release_2, ts - 1)):
  2852. self.redis_client.set(
  2853. f"ds::p:{project.id}:r:{release.id}",
  2854. 1,
  2855. 60 * 60 * 24,
  2856. )
  2857. self.redis_client.hset(
  2858. f"ds::p:{project.id}:boosted_releases",
  2859. f"ds::r:{release.id}",
  2860. boost_time,
  2861. )
  2862. release_3 = Release.get_or_create(
  2863. project=project,
  2864. version="3.0",
  2865. date_added=timezone.now() + timedelta(hours=2),
  2866. )
  2867. self.make_release_transaction(
  2868. release_version=release_3.version,
  2869. environment_name=self.environment1.name,
  2870. project_id=project.id,
  2871. checksum="b" * 32,
  2872. timestamp=self.timestamp,
  2873. )
  2874. assert (
  2875. self.redis_client.get(f"ds::p:{project.id}:r:{release_3.id}:e:{self.environment1.name}")
  2876. == "1"
  2877. )
  2878. assert self.redis_client.hgetall(f"ds::p:{project.id}:boosted_releases") == {
  2879. f"ds::r:{release_2.id}": str(ts - 1),
  2880. f"ds::r:{release_3.id}:e:{self.environment1.name}": str(ts),
  2881. }
  2882. assert ProjectBoostedReleases(project_id=project.id).get_extended_boosted_releases() == [
  2883. ExtendedBoostedRelease(
  2884. id=release_2.id,
  2885. timestamp=ts - 1,
  2886. environment=None,
  2887. cache_key=f"ds::r:{release_2.id}",
  2888. version=release_2.version,
  2889. platform=Platform(project.platform),
  2890. ),
  2891. ExtendedBoostedRelease(
  2892. id=release_3.id,
  2893. timestamp=ts,
  2894. environment=self.environment1.name,
  2895. cache_key=f"ds::r:{release_3.id}:e:{self.environment1.name}",
  2896. version=release_3.version,
  2897. platform=Platform(project.platform),
  2898. ),
  2899. ]
  2900. @freeze_time()
  2901. @mock.patch("sentry.dynamic_sampling.rules.helpers.latest_releases.BOOSTED_RELEASES_LIMIT", 2)
  2902. def test_removed_boost_not_added_again_if_limit_is_exceeded(self) -> None:
  2903. ts = timezone.now().timestamp()
  2904. project = self.create_project(platform="python")
  2905. release_1 = Release.get_or_create(project=project, version="1.0", date_added=timezone.now())
  2906. # We want to test that if we have the same release, but we send different environments that go over the
  2907. # limit, and we evict an environment, but then we send a transaction with the evicted environment.
  2908. #
  2909. # As an example suppose the following history of transactions received in the form (release, env) -> None:
  2910. # (1, production) -> (1, staging) -> (1, None) -> (1, production)
  2911. #
  2912. # Once we receive the first two, we have reached maximum capacity. Then we receive (1, None) and evict boost
  2913. # for (1, production) which results in the following boosts (1, staging), (1, None). After that we receive
  2914. # (1, production) again but in this case we don't want to remove (1, staging) because we will end up in an
  2915. # infinite loop. Instead, we expect to mark (1, production) as observed and only un-observe it if it does
  2916. # not receive transactions within the next 24 hours.
  2917. environments_sequence = [
  2918. self.environment1.name,
  2919. self.environment2.name,
  2920. None,
  2921. self.environment1.name,
  2922. ]
  2923. for environment in environments_sequence:
  2924. self.make_release_transaction(
  2925. release_version=release_1.version,
  2926. environment_name=environment,
  2927. project_id=project.id,
  2928. checksum="b" * 32,
  2929. timestamp=self.timestamp,
  2930. )
  2931. # We assert that all environments have been observed.
  2932. assert (
  2933. self.redis_client.get(f"ds::p:{project.id}:r:{release_1.id}:e:{self.environment1.name}")
  2934. == "1"
  2935. )
  2936. assert (
  2937. self.redis_client.get(f"ds::p:{project.id}:r:{release_1.id}:e:{self.environment2.name}")
  2938. == "1"
  2939. )
  2940. assert self.redis_client.get(f"ds::p:{project.id}:r:{release_1.id}") == "1"
  2941. # We assert that only the last 2 unseen (release, env) pairs are boosted.
  2942. assert self.redis_client.hgetall(f"ds::p:{project.id}:boosted_releases") == {
  2943. f"ds::r:{release_1.id}:e:{self.environment2.name}": str(ts),
  2944. f"ds::r:{release_1.id}": str(ts),
  2945. }
  2946. assert ProjectBoostedReleases(project_id=project.id).get_extended_boosted_releases() == [
  2947. ExtendedBoostedRelease(
  2948. id=release_1.id,
  2949. timestamp=ts,
  2950. environment=self.environment2.name,
  2951. cache_key=f"ds::r:{release_1.id}:e:{self.environment2.name}",
  2952. version=release_1.version,
  2953. platform=Platform(project.platform),
  2954. ),
  2955. ExtendedBoostedRelease(
  2956. id=release_1.id,
  2957. timestamp=ts,
  2958. environment=None,
  2959. cache_key=f"ds::r:{release_1.id}",
  2960. version=release_1.version,
  2961. platform=Platform(project.platform),
  2962. ),
  2963. ]
  2964. class TestSaveGroupHashAndGroup(TransactionTestCase):
  2965. def test(self) -> None:
  2966. perf_data = load_data("transaction-n-plus-one", timestamp=before_now(minutes=10))
  2967. event = _get_event_instance(perf_data, project_id=self.project.id)
  2968. group_hash = "some_group"
  2969. group, created = save_grouphash_and_group(self.project, event, group_hash)
  2970. assert created
  2971. group_2, created = save_grouphash_and_group(self.project, event, group_hash)
  2972. assert group.id == group_2.id
  2973. assert not created
  2974. assert Group.objects.filter(grouphash__hash=group_hash).count() == 1
  2975. group_3, created = save_grouphash_and_group(self.project, event, "new_hash")
  2976. assert created
  2977. assert group_2.id != group_3.id
  2978. assert Group.objects.filter(grouphash__hash=group_hash).count() == 1
  2979. example_transaction_event = {
  2980. "type": "transaction",
  2981. "timestamp": datetime.now().isoformat(),
  2982. "start_timestamp": (datetime.now() - timedelta(seconds=1)).isoformat(),
  2983. "spans": [],
  2984. "contexts": {
  2985. "trace": {
  2986. "parent_span_id": "8988cec7cc0779c1",
  2987. "type": "trace",
  2988. "op": "foobar",
  2989. "trace_id": "a7d67cf796774551a95be6543cacd459",
  2990. "span_id": "babaae0d4b7512d9",
  2991. "status": "ok",
  2992. }
  2993. },
  2994. }
  2995. example_error_event = {
  2996. "event_id": "80e3496eff734ab0ac993167aaa0d1cd",
  2997. "release": "5.222.5",
  2998. "type": "error",
  2999. "level": "fatal",
  3000. "platform": "cocoa",
  3001. "tags": {"level": "fatal"},
  3002. "environment": "test-app",
  3003. "sdk": {
  3004. "name": "sentry.cocoa",
  3005. "version": "8.2.0",
  3006. "integrations": [
  3007. "Crash",
  3008. "PerformanceTracking",
  3009. "MetricKit",
  3010. "WatchdogTerminationTracking",
  3011. "ViewHierarchy",
  3012. "NetworkTracking",
  3013. "ANRTracking",
  3014. "AutoBreadcrumbTracking",
  3015. "FramesTracking",
  3016. "AppStartTracking",
  3017. "Screenshot",
  3018. "FileIOTracking",
  3019. "UIEventTracking",
  3020. "AutoSessionTracking",
  3021. "CoreDataTracking",
  3022. "PreWarmedAppStartTracing",
  3023. ],
  3024. },
  3025. "user": {
  3026. "id": "803F5C87-0F8B-41C7-8499-27BD71A92738",
  3027. "ip_address": "192.168.0.1",
  3028. "geo": {"country_code": "US", "region": "United States"},
  3029. },
  3030. "logger": "my.logger.name",
  3031. }
  3032. @pytest.mark.parametrize(
  3033. "event_data,expected_type",
  3034. [
  3035. pytest.param(
  3036. example_transaction_event,
  3037. "transactions",
  3038. id="transactions",
  3039. ),
  3040. pytest.param(
  3041. example_error_event,
  3042. "errors",
  3043. id="errors",
  3044. ),
  3045. ],
  3046. )
  3047. @django_db_all
  3048. def test_cogs_event_manager(
  3049. default_project: int, event_data: Mapping[str, Any], expected_type: str
  3050. ) -> None:
  3051. storage: MemoryMessageStorage[KafkaPayload] = MemoryMessageStorage()
  3052. broker = LocalBroker(storage)
  3053. topic = Topic("shared-resources-usage")
  3054. broker.create_topic(topic, 1)
  3055. producer = broker.get_producer()
  3056. set("shared_resources_accounting_enabled", [settings.COGS_EVENT_STORE_LABEL])
  3057. accountant.init_backend(producer)
  3058. raw_event_params = make_event(**event_data)
  3059. manager = EventManager(raw_event_params)
  3060. manager.normalize()
  3061. normalized_data = dict(manager.get_data())
  3062. _ = manager.save(default_project)
  3063. expected_len = len(json.dumps(normalized_data))
  3064. accountant._shutdown()
  3065. accountant.reset_backend()
  3066. msg1 = broker.consume(Partition(topic, 0), 0)
  3067. assert msg1 is not None
  3068. payload = msg1.payload
  3069. assert payload is not None
  3070. formatted = json.loads(payload.value.decode("utf-8"))
  3071. assert formatted["shared_resource_id"] == settings.COGS_EVENT_STORE_LABEL
  3072. assert formatted["app_feature"] == expected_type
  3073. assert formatted["usage_unit"] == "bytes"
  3074. # We cannot assert for exact length because manager save method adds some extra fields. So we
  3075. # assert that the length is at least greater than the expected length.
  3076. assert formatted["amount"] >= expected_len