test_event_manager.py 143 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852285328542855285628572858285928602861286228632864286528662867286828692870287128722873287428752876287728782879288028812882288328842885288628872888288928902891289228932894289528962897289828992900290129022903290429052906290729082909291029112912291329142915291629172918291929202921292229232924292529262927292829292930293129322933293429352936293729382939294029412942294329442945294629472948294929502951295229532954295529562957295829592960296129622963296429652966296729682969297029712972297329742975297629772978297929802981298229832984298529862987298829892990299129922993299429952996299729982999300030013002300330043005300630073008300930103011301230133014301530163017301830193020302130223023302430253026302730283029303030313032303330343035303630373038303930403041304230433044304530463047304830493050305130523053305430553056305730583059306030613062306330643065306630673068306930703071307230733074307530763077307830793080308130823083308430853086308730883089309030913092309330943095309630973098309931003101310231033104310531063107310831093110311131123113311431153116311731183119312031213122312331243125312631273128312931303131313231333134313531363137313831393140314131423143314431453146314731483149315031513152315331543155315631573158315931603161316231633164316531663167316831693170317131723173317431753176317731783179318031813182318331843185318631873188318931903191319231933194319531963197319831993200320132023203320432053206320732083209321032113212321332143215321632173218321932203221322232233224322532263227322832293230323132323233323432353236323732383239324032413242324332443245324632473248324932503251325232533254325532563257325832593260326132623263326432653266326732683269327032713272327332743275327632773278327932803281328232833284328532863287328832893290329132923293329432953296329732983299330033013302330333043305330633073308330933103311331233133314331533163317331833193320332133223323332433253326332733283329333033313332333333343335333633373338333933403341334233433344334533463347334833493350335133523353335433553356335733583359336033613362336333643365336633673368336933703371337233733374337533763377337833793380338133823383338433853386338733883389339033913392339333943395339633973398339934003401340234033404340534063407340834093410341134123413341434153416341734183419342034213422342334243425342634273428342934303431343234333434343534363437343834393440344134423443344434453446344734483449345034513452345334543455345634573458345934603461346234633464346534663467346834693470347134723473347434753476347734783479348034813482348334843485348634873488348934903491349234933494349534963497349834993500350135023503350435053506350735083509351035113512351335143515351635173518351935203521352235233524352535263527352835293530353135323533353435353536353735383539354035413542354335443545354635473548354935503551355235533554355535563557355835593560356135623563356435653566356735683569357035713572357335743575357635773578357935803581358235833584358535863587358835893590359135923593359435953596359735983599360036013602360336043605360636073608360936103611361236133614361536163617361836193620362136223623362436253626362736283629363036313632363336343635363636373638363936403641364236433644364536463647364836493650365136523653365436553656365736583659366036613662366336643665366636673668366936703671367236733674367536763677367836793680368136823683368436853686368736883689369036913692369336943695369636973698369937003701370237033704370537063707370837093710371137123713371437153716371737183719372037213722372337243725372637273728372937303731373237333734373537363737373837393740374137423743374437453746374737483749375037513752375337543755375637573758375937603761376237633764
  1. import logging
  2. import uuid
  3. from datetime import datetime, timedelta
  4. from time import time
  5. from unittest import mock
  6. from unittest.mock import MagicMock, patch
  7. import pytest
  8. import responses
  9. from django.core.cache import cache
  10. from django.test.utils import override_settings
  11. from django.utils import timezone
  12. from freezegun import freeze_time
  13. from rest_framework.status import HTTP_404_NOT_FOUND
  14. from fixtures.github import (
  15. COMPARE_COMMITS_EXAMPLE_WITH_INTERMEDIATE,
  16. EARLIER_COMMIT_SHA,
  17. GET_COMMIT_EXAMPLE,
  18. GET_LAST_2_COMMITS_EXAMPLE,
  19. GET_PRIOR_COMMIT_EXAMPLE,
  20. LATER_COMMIT_SHA,
  21. )
  22. from sentry import audit_log, nodestore, tsdb
  23. from sentry.attachments import CachedAttachment, attachment_cache
  24. from sentry.constants import MAX_VERSION_LENGTH, DataCategory
  25. from sentry.dynamic_sampling import (
  26. ExtendedBoostedRelease,
  27. Platform,
  28. ProjectBoostedReleases,
  29. get_redis_client_for_ds,
  30. )
  31. from sentry.event_manager import (
  32. EventManager,
  33. HashDiscarded,
  34. _get_event_instance,
  35. _save_grouphash_and_group,
  36. get_event_type,
  37. has_pending_commit_resolution,
  38. materialize_metadata,
  39. )
  40. from sentry.eventstore.models import Event
  41. from sentry.grouping.utils import hash_from_values
  42. from sentry.ingest.inbound_filters import FilterStatKeys
  43. from sentry.issues.grouptype import (
  44. ErrorGroupType,
  45. GroupCategory,
  46. PerformanceNPlusOneGroupType,
  47. PerformanceSlowDBQueryGroupType,
  48. )
  49. from sentry.issues.issue_occurrence import IssueEvidence
  50. from sentry.models import (
  51. Activity,
  52. Commit,
  53. CommitAuthor,
  54. Environment,
  55. ExternalIssue,
  56. Group,
  57. GroupEnvironment,
  58. GroupHash,
  59. GroupLink,
  60. GroupRelease,
  61. GroupResolution,
  62. GroupStatus,
  63. GroupTombstone,
  64. Integration,
  65. OrganizationIntegration,
  66. Project,
  67. PullRequest,
  68. PullRequestCommit,
  69. Release,
  70. ReleaseCommit,
  71. ReleaseHeadCommit,
  72. ReleaseProjectEnvironment,
  73. UserReport,
  74. )
  75. from sentry.models.auditlogentry import AuditLogEntry
  76. from sentry.models.eventuser import EventUser
  77. from sentry.projectoptions.defaults import DEFAULT_GROUPING_CONFIG, LEGACY_GROUPING_CONFIG
  78. from sentry.spans.grouping.utils import hash_values
  79. from sentry.testutils.asserts import assert_mock_called_once_with_partial
  80. from sentry.testutils.cases import (
  81. PerformanceIssueTestCase,
  82. SnubaTestCase,
  83. TestCase,
  84. TransactionTestCase,
  85. )
  86. from sentry.testutils.helpers import apply_feature_flag_on_cls, override_options
  87. from sentry.testutils.helpers.datetime import before_now, iso_format
  88. from sentry.testutils.performance_issues.event_generators import get_event
  89. from sentry.testutils.silo import region_silo_test
  90. from sentry.tsdb.base import TSDBModel
  91. from sentry.types.activity import ActivityType
  92. from sentry.utils import json
  93. from sentry.utils.cache import cache_key_for_event
  94. from sentry.utils.outcomes import Outcome
  95. from sentry.utils.samples import load_data
  96. from tests.sentry.integrations.github.test_repository import stub_installation_token
  97. def make_event(**kwargs):
  98. result = {
  99. "event_id": uuid.uuid1().hex,
  100. "level": logging.ERROR,
  101. "logger": "default",
  102. "tags": [],
  103. }
  104. result.update(kwargs)
  105. return result
  106. class EventManagerTestMixin:
  107. def make_release_event(self, release_name, project_id):
  108. manager = EventManager(make_event(release=release_name))
  109. manager.normalize()
  110. event = manager.save(project_id)
  111. return event
  112. @region_silo_test
  113. class EventManagerTest(TestCase, SnubaTestCase, EventManagerTestMixin, PerformanceIssueTestCase):
  114. def test_similar_message_prefix_doesnt_group(self):
  115. # we had a regression which caused the default hash to just be
  116. # 'event.message' instead of '[event.message]' which caused it to
  117. # generate a hash per letter
  118. manager = EventManager(make_event(event_id="a", message="foo bar"))
  119. manager.normalize()
  120. event1 = manager.save(self.project.id)
  121. manager = EventManager(make_event(event_id="b", message="foo baz"))
  122. manager.normalize()
  123. event2 = manager.save(self.project.id)
  124. assert event1.group_id != event2.group_id
  125. def test_ephemeral_interfaces_removed_on_save(self):
  126. manager = EventManager(make_event(platform="python"))
  127. manager.normalize()
  128. event = manager.save(self.project.id)
  129. group = event.group
  130. assert group is not None
  131. assert group.platform == "python"
  132. assert event.platform == "python"
  133. @mock.patch("sentry.event_manager.eventstream.backend.insert")
  134. def test_dupe_message_id(self, eventstream_insert):
  135. # Saves the latest event to nodestore and eventstream
  136. project_id = self.project.id
  137. event_id = "a" * 32
  138. node_id = Event.generate_node_id(project_id, event_id)
  139. manager = EventManager(make_event(event_id=event_id, message="first"))
  140. manager.normalize()
  141. manager.save(project_id)
  142. assert nodestore.backend.get(node_id)["logentry"]["formatted"] == "first"
  143. manager = EventManager(make_event(event_id=event_id, message="second"))
  144. manager.normalize()
  145. manager.save(project_id)
  146. assert nodestore.backend.get(node_id)["logentry"]["formatted"] == "second"
  147. assert eventstream_insert.call_count == 2
  148. def test_updates_group(self):
  149. timestamp = time() - 300
  150. manager = EventManager(
  151. make_event(message="foo", event_id="a" * 32, checksum="a" * 32, timestamp=timestamp)
  152. )
  153. manager.normalize()
  154. event = manager.save(self.project.id)
  155. manager = EventManager(
  156. make_event(
  157. message="foo bar", event_id="b" * 32, checksum="a" * 32, timestamp=timestamp + 2.0
  158. )
  159. )
  160. manager.normalize()
  161. with self.tasks():
  162. event2 = manager.save(self.project.id)
  163. group = Group.objects.get(id=event.group_id)
  164. assert group.times_seen == 2
  165. assert group.last_seen == event2.datetime
  166. assert group.message == event2.message
  167. assert group.data.get("type") == "default"
  168. assert group.data.get("metadata") == {"title": "foo bar"}
  169. def test_applies_secondary_grouping(self):
  170. project = self.project
  171. project.update_option("sentry:grouping_config", "legacy:2019-03-12")
  172. project.update_option("sentry:secondary_grouping_expiry", 0)
  173. timestamp = time() - 300
  174. manager = EventManager(
  175. make_event(message="foo 123", event_id="a" * 32, timestamp=timestamp)
  176. )
  177. manager.normalize()
  178. event = manager.save(project.id)
  179. project.update_option("sentry:grouping_config", "newstyle:2023-01-11")
  180. project.update_option("sentry:secondary_grouping_config", "legacy:2019-03-12")
  181. project.update_option("sentry:secondary_grouping_expiry", time() + (24 * 90 * 3600))
  182. # Switching to newstyle grouping changes hashes as 123 will be removed
  183. manager = EventManager(
  184. make_event(message="foo 123", event_id="b" * 32, timestamp=timestamp + 2.0)
  185. )
  186. manager.normalize()
  187. with self.tasks():
  188. event2 = manager.save(project.id)
  189. # make sure that events did get into same group because of fallback grouping, not because of hashes which come from primary grouping only
  190. assert not set(event.get_hashes().hashes) & set(event2.get_hashes().hashes)
  191. assert event.group_id == event2.group_id
  192. group = Group.objects.get(id=event.group_id)
  193. assert group.times_seen == 2
  194. assert group.last_seen == event2.datetime
  195. assert group.message == event2.message
  196. assert group.data.get("type") == "default"
  197. assert group.data.get("metadata") == {"title": "foo 123"}
  198. # After expiry, new events are still assigned to the same group:
  199. project.update_option("sentry:secondary_grouping_expiry", 0)
  200. manager = EventManager(
  201. make_event(message="foo 123", event_id="c" * 32, timestamp=timestamp + 4.0)
  202. )
  203. manager.normalize()
  204. with self.tasks():
  205. event3 = manager.save(project.id)
  206. assert event3.group_id == event2.group_id
  207. def test_applies_secondary_grouping_hierarchical(self):
  208. project = self.project
  209. project.update_option("sentry:grouping_config", "legacy:2019-03-12")
  210. project.update_option("sentry:secondary_grouping_expiry", 0)
  211. timestamp = time() - 300
  212. def save_event(ts_offset):
  213. ts = timestamp + ts_offset
  214. manager = EventManager(
  215. make_event(
  216. message="foo 123",
  217. event_id=hex(2**127 + int(ts))[-32:],
  218. timestamp=ts,
  219. exception={
  220. "values": [
  221. {
  222. "type": "Hello",
  223. "stacktrace": {
  224. "frames": [
  225. {
  226. "function": "not_in_app_function",
  227. },
  228. {
  229. "function": "in_app_function",
  230. },
  231. ]
  232. },
  233. }
  234. ]
  235. },
  236. )
  237. )
  238. manager.normalize()
  239. with self.tasks():
  240. return manager.save(project.id)
  241. event = save_event(0)
  242. project.update_option("sentry:grouping_config", "mobile:2021-02-12")
  243. project.update_option("sentry:secondary_grouping_config", "legacy:2019-03-12")
  244. project.update_option("sentry:secondary_grouping_expiry", time() + (24 * 90 * 3600))
  245. # Switching to newstyle grouping changes hashes as 123 will be removed
  246. event2 = save_event(2)
  247. # make sure that events did get into same group because of fallback grouping, not because of hashes which come from primary grouping only
  248. assert not set(event.get_hashes().hashes) & set(event2.get_hashes().hashes)
  249. assert event.group_id == event2.group_id
  250. group = Group.objects.get(id=event.group_id)
  251. assert group.times_seen == 2
  252. assert group.last_seen == event2.datetime
  253. # After expiry, new events are still assigned to the same group:
  254. project.update_option("sentry:secondary_grouping_expiry", 0)
  255. event3 = save_event(4)
  256. assert event3.group_id == event2.group_id
  257. def test_applies_downgrade_hierarchical(self):
  258. project = self.project
  259. project.update_option("sentry:grouping_config", "mobile:2021-02-12")
  260. project.update_option("sentry:secondary_grouping_expiry", 0)
  261. timestamp = time() - 300
  262. def save_event(ts_offset):
  263. ts = timestamp + ts_offset
  264. manager = EventManager(
  265. make_event(
  266. message="foo 123",
  267. event_id=hex(2**127 + int(ts))[-32:],
  268. timestamp=ts,
  269. exception={
  270. "values": [
  271. {
  272. "type": "Hello",
  273. "stacktrace": {
  274. "frames": [
  275. {
  276. "function": "not_in_app_function",
  277. },
  278. {
  279. "function": "in_app_function",
  280. },
  281. ]
  282. },
  283. }
  284. ]
  285. },
  286. )
  287. )
  288. manager.normalize()
  289. with self.tasks():
  290. return manager.save(project.id)
  291. event = save_event(0)
  292. project.update_option("sentry:grouping_config", "legacy:2019-03-12")
  293. project.update_option("sentry:secondary_grouping_config", "mobile:2021-02-12")
  294. project.update_option("sentry:secondary_grouping_expiry", time() + (24 * 90 * 3600))
  295. # Switching to newstyle grouping changes hashes as 123 will be removed
  296. event2 = save_event(2)
  297. # make sure that events did get into same group because of fallback grouping, not because of hashes which come from primary grouping only
  298. assert not set(event.get_hashes().hashes) & set(event2.get_hashes().hashes)
  299. assert event.group_id == event2.group_id
  300. group = Group.objects.get(id=event.group_id)
  301. group_hashes = GroupHash.objects.filter(
  302. project=self.project, hash__in=event.get_hashes().hashes
  303. )
  304. assert group_hashes
  305. for hash in group_hashes:
  306. assert hash.group_id == event.group_id
  307. assert group.times_seen == 2
  308. assert group.last_seen == event2.datetime
  309. # After expiry, new events are still assigned to the same group:
  310. project.update_option("sentry:secondary_grouping_expiry", 0)
  311. event3 = save_event(4)
  312. assert event3.group_id == event2.group_id
  313. @mock.patch("sentry.event_manager._calculate_background_grouping")
  314. def test_applies_background_grouping(self, mock_calc_grouping):
  315. timestamp = time() - 300
  316. manager = EventManager(
  317. make_event(message="foo 123", event_id="a" * 32, timestamp=timestamp)
  318. )
  319. manager.normalize()
  320. manager.save(self.project.id)
  321. assert mock_calc_grouping.call_count == 0
  322. with self.options(
  323. {
  324. "store.background-grouping-config-id": "mobile:2021-02-12",
  325. "store.background-grouping-sample-rate": 1.0,
  326. }
  327. ):
  328. manager.save(self.project.id)
  329. assert mock_calc_grouping.call_count == 1
  330. @mock.patch("sentry.event_manager._calculate_background_grouping")
  331. def test_background_grouping_sample_rate(self, mock_calc_grouping):
  332. timestamp = time() - 300
  333. manager = EventManager(
  334. make_event(message="foo 123", event_id="a" * 32, timestamp=timestamp)
  335. )
  336. manager.normalize()
  337. manager.save(self.project.id)
  338. assert mock_calc_grouping.call_count == 0
  339. with self.options(
  340. {
  341. "store.background-grouping-config-id": "mobile:2021-02-12",
  342. "store.background-grouping-sample-rate": 0.0,
  343. }
  344. ):
  345. manager.save(self.project.id)
  346. manager.save(self.project.id)
  347. assert mock_calc_grouping.call_count == 0
  348. def test_updates_group_with_fingerprint(self):
  349. ts = time() - 200
  350. manager = EventManager(
  351. make_event(message="foo", event_id="a" * 32, fingerprint=["a" * 32], timestamp=ts)
  352. )
  353. with self.tasks():
  354. event = manager.save(self.project.id)
  355. manager = EventManager(
  356. make_event(message="foo bar", event_id="b" * 32, fingerprint=["a" * 32], timestamp=ts)
  357. )
  358. with self.tasks():
  359. event2 = manager.save(self.project.id)
  360. group = Group.objects.get(id=event.group_id)
  361. assert group.times_seen == 2
  362. assert group.last_seen == event.datetime
  363. assert group.message == event2.message
  364. def test_differentiates_with_fingerprint(self):
  365. manager = EventManager(
  366. make_event(message="foo", event_id="a" * 32, fingerprint=["{{ default }}", "a" * 32])
  367. )
  368. with self.tasks():
  369. manager.normalize()
  370. event = manager.save(self.project.id)
  371. manager = EventManager(
  372. make_event(message="foo bar", event_id="b" * 32, fingerprint=["a" * 32])
  373. )
  374. with self.tasks():
  375. manager.normalize()
  376. event2 = manager.save(self.project.id)
  377. assert event.group_id != event2.group_id
  378. def test_materialze_metadata_simple(self):
  379. manager = EventManager(make_event(transaction="/dogs/are/great/"))
  380. event = manager.save(self.project.id)
  381. event_type = get_event_type(event.data)
  382. event_metadata = event_type.get_metadata(event.data)
  383. assert materialize_metadata(event.data, event_type, event_metadata) == {
  384. "type": "default",
  385. "culprit": "/dogs/are/great/",
  386. "metadata": {"title": "<unlabeled event>"},
  387. "title": "<unlabeled event>",
  388. "location": None,
  389. }
  390. def test_materialze_metadata_preserves_existing_metadata(self):
  391. manager = EventManager(make_event())
  392. event = manager.save(self.project.id)
  393. event.data.setdefault("metadata", {})
  394. event.data["metadata"]["dogs"] = "are great" # should not get clobbered
  395. event_type = get_event_type(event.data)
  396. event_metadata_from_type = event_type.get_metadata(event.data)
  397. materialized = materialize_metadata(event.data, event_type, event_metadata_from_type)
  398. assert materialized["metadata"] == {"title": "<unlabeled event>", "dogs": "are great"}
  399. @mock.patch("sentry.signals.issue_unresolved.send_robust")
  400. def test_unresolves_group(self, send_robust):
  401. ts = time() - 300
  402. # N.B. EventManager won't unresolve the group unless the event2 has a
  403. # later timestamp than event1.
  404. manager = EventManager(make_event(event_id="a" * 32, checksum="a" * 32, timestamp=ts))
  405. with self.tasks():
  406. event = manager.save(self.project.id)
  407. group = Group.objects.get(id=event.group_id)
  408. group.status = GroupStatus.RESOLVED
  409. group.substatus = None
  410. group.save()
  411. assert group.is_resolved()
  412. manager = EventManager(make_event(event_id="b" * 32, checksum="a" * 32, timestamp=ts + 50))
  413. event2 = manager.save(self.project.id)
  414. assert event.group_id == event2.group_id
  415. group = Group.objects.get(id=group.id)
  416. assert not group.is_resolved()
  417. assert send_robust.called
  418. @mock.patch("sentry.event_manager.plugin_is_regression")
  419. def test_does_not_unresolve_group(self, plugin_is_regression):
  420. # N.B. EventManager won't unresolve the group unless the event2 has a
  421. # later timestamp than event1.
  422. plugin_is_regression.return_value = False
  423. manager = EventManager(
  424. make_event(event_id="a" * 32, checksum="a" * 32, timestamp=1403007314)
  425. )
  426. with self.tasks():
  427. manager.normalize()
  428. event = manager.save(self.project.id)
  429. group = Group.objects.get(id=event.group_id)
  430. group.status = GroupStatus.RESOLVED
  431. group.substatus = None
  432. group.save()
  433. assert group.is_resolved()
  434. manager = EventManager(
  435. make_event(event_id="b" * 32, checksum="a" * 32, timestamp=1403007315)
  436. )
  437. manager.normalize()
  438. event2 = manager.save(self.project.id)
  439. assert event.group_id == event2.group_id
  440. group = Group.objects.get(id=group.id)
  441. assert group.is_resolved()
  442. @mock.patch("sentry.tasks.activity.send_activity_notifications.delay")
  443. @mock.patch("sentry.event_manager.plugin_is_regression")
  444. def test_marks_as_unresolved_with_new_release(
  445. self, plugin_is_regression, mock_send_activity_notifications_delay
  446. ):
  447. plugin_is_regression.return_value = True
  448. old_release = Release.objects.create(
  449. version="a",
  450. organization_id=self.project.organization_id,
  451. date_added=timezone.now() - timedelta(minutes=30),
  452. )
  453. old_release.add_project(self.project)
  454. manager = EventManager(
  455. make_event(
  456. event_id="a" * 32,
  457. checksum="a" * 32,
  458. timestamp=time() - 50000, # need to work around active_at
  459. release=old_release.version,
  460. )
  461. )
  462. event = manager.save(self.project.id)
  463. assert event.group is not None
  464. group = event.group
  465. group.update(status=GroupStatus.RESOLVED, substatus=None)
  466. resolution = GroupResolution.objects.create(release=old_release, group=group)
  467. activity = Activity.objects.create(
  468. group=group,
  469. project=group.project,
  470. type=ActivityType.SET_RESOLVED_IN_RELEASE.value,
  471. ident=resolution.id,
  472. data={"version": ""},
  473. )
  474. manager = EventManager(
  475. make_event(
  476. event_id="b" * 32, checksum="a" * 32, timestamp=time(), release=old_release.version
  477. )
  478. )
  479. event = manager.save(self.project.id)
  480. assert event.group_id == group.id
  481. group = Group.objects.get(id=group.id)
  482. assert group.status == GroupStatus.RESOLVED
  483. activity = Activity.objects.get(id=activity.id)
  484. assert activity.data["version"] == ""
  485. assert GroupResolution.objects.filter(group=group).exists()
  486. manager = EventManager(
  487. make_event(event_id="c" * 32, checksum="a" * 32, timestamp=time(), release="b")
  488. )
  489. event = manager.save(self.project.id)
  490. assert event.group_id == group.id
  491. group = Group.objects.get(id=group.id)
  492. assert group.status == GroupStatus.UNRESOLVED
  493. activity = Activity.objects.get(id=activity.id)
  494. assert activity.data["version"] == "b"
  495. assert not GroupResolution.objects.filter(group=group).exists()
  496. activity = Activity.objects.get(group=group, type=ActivityType.SET_REGRESSION.value)
  497. mock_send_activity_notifications_delay.assert_called_once_with(activity.id)
  498. @mock.patch("sentry.tasks.activity.send_activity_notifications.delay")
  499. @mock.patch("sentry.event_manager.plugin_is_regression")
  500. def test_that_release_in_latest_activity_prior_to_regression_is_not_overridden(
  501. self, plugin_is_regression, mock_send_activity_notifications_delay
  502. ):
  503. """
  504. Test that ensures in the case where a regression occurs, the release prior to the latest
  505. activity to that regression is not overridden.
  506. It should only be overridden if the activity was awaiting the upcoming release
  507. """
  508. plugin_is_regression.return_value = True
  509. # Create a release and a group associated with it
  510. old_release = self.create_release(
  511. version="foobar", date_added=timezone.now() - timedelta(minutes=30)
  512. )
  513. manager = EventManager(
  514. make_event(
  515. event_id="a" * 32,
  516. checksum="a" * 32,
  517. timestamp=time() - 50000, # need to work around active_at
  518. release=old_release.version,
  519. )
  520. )
  521. event = manager.save(self.project.id)
  522. assert event.group is not None
  523. group = event.group
  524. group.update(status=GroupStatus.RESOLVED, substatus=None)
  525. # Resolve the group in old_release
  526. resolution = GroupResolution.objects.create(release=old_release, group=group)
  527. activity = Activity.objects.create(
  528. group=group,
  529. project=group.project,
  530. type=ActivityType.SET_RESOLVED_IN_RELEASE.value,
  531. ident=resolution.id,
  532. data={"version": "foobar"},
  533. )
  534. # Create a regression
  535. manager = EventManager(
  536. make_event(event_id="c" * 32, checksum="a" * 32, timestamp=time(), release="b")
  537. )
  538. event = manager.save(self.project.id)
  539. assert event.group_id == group.id
  540. group = Group.objects.get(id=group.id)
  541. assert group.status == GroupStatus.UNRESOLVED
  542. activity = Activity.objects.get(id=activity.id)
  543. assert activity.data["version"] == "foobar"
  544. regressed_activity = Activity.objects.get(
  545. group=group, type=ActivityType.SET_REGRESSION.value
  546. )
  547. assert regressed_activity.data["version"] == "b"
  548. assert regressed_activity.data["follows_semver"] is False
  549. mock_send_activity_notifications_delay.assert_called_once_with(regressed_activity.id)
  550. @mock.patch("sentry.tasks.activity.send_activity_notifications.delay")
  551. @mock.patch("sentry.event_manager.plugin_is_regression")
  552. def test_current_release_version_in_latest_activity_prior_to_regression_is_not_overridden(
  553. self, plugin_is_regression, mock_send_activity_notifications_delay
  554. ):
  555. """
  556. Test that ensures in the case where a regression occurs, the release prior to the latest
  557. activity to that regression is overridden with the release regression occurred in but the
  558. value of `current_release_version` used for semver is not lost in the update.
  559. """
  560. plugin_is_regression.return_value = True
  561. # Create a release and a group associated with it
  562. old_release = self.create_release(
  563. version="a", date_added=timezone.now() - timedelta(minutes=30)
  564. )
  565. manager = EventManager(
  566. make_event(
  567. event_id="a" * 32,
  568. checksum="a" * 32,
  569. timestamp=time() - 50000, # need to work around active_at
  570. release=old_release.version,
  571. )
  572. )
  573. event = manager.save(self.project.id)
  574. assert event.group is not None
  575. group = event.group
  576. group.update(status=GroupStatus.RESOLVED, substatus=None)
  577. # Resolve the group in old_release
  578. resolution = GroupResolution.objects.create(release=old_release, group=group)
  579. activity = Activity.objects.create(
  580. group=group,
  581. project=group.project,
  582. type=ActivityType.SET_RESOLVED_IN_RELEASE.value,
  583. ident=resolution.id,
  584. data={"version": "", "current_release_version": "pre foobar"},
  585. )
  586. # Create a regression
  587. manager = EventManager(
  588. make_event(event_id="c" * 32, checksum="a" * 32, timestamp=time(), release="b")
  589. )
  590. event = manager.save(self.project.id)
  591. assert event.group_id == group.id
  592. group = Group.objects.get(id=group.id)
  593. assert group.status == GroupStatus.UNRESOLVED
  594. activity = Activity.objects.get(id=activity.id)
  595. assert activity.data["version"] == "b"
  596. assert activity.data["current_release_version"] == "pre foobar"
  597. regressed_activity = Activity.objects.get(
  598. group=group, type=ActivityType.SET_REGRESSION.value
  599. )
  600. assert regressed_activity.data["version"] == "b"
  601. mock_send_activity_notifications_delay.assert_called_once_with(regressed_activity.id)
  602. @mock.patch("sentry.event_manager.plugin_is_regression")
  603. def test_resolved_in_release_regression_activity_follows_semver(self, plugin_is_regression):
  604. """
  605. Issue was marked resolved in 1.0.0, regression occurred in 2.0.0.
  606. If the project follows semver then the regression activity should have `follows_semver` set.
  607. We should also record which version the issue was resolved in as `resolved_in_version`.
  608. This allows the UI to say the issue was resolved in 1.0.0, regressed in 2.0.0 and
  609. the versions were compared using semver.
  610. """
  611. plugin_is_regression.return_value = True
  612. # Create a release and a group associated with it
  613. old_release = self.create_release(
  614. version="foo@1.0.0", date_added=timezone.now() - timedelta(minutes=30)
  615. )
  616. manager = EventManager(
  617. make_event(
  618. event_id="a" * 32,
  619. checksum="a" * 32,
  620. timestamp=time() - 50000, # need to work around active_at
  621. release=old_release.version,
  622. )
  623. )
  624. event = manager.save(self.project.id)
  625. assert event.group is not None
  626. group = event.group
  627. group.update(status=GroupStatus.RESOLVED, substatus=None)
  628. # Resolve the group in old_release
  629. resolution = GroupResolution.objects.create(release=old_release, group=group)
  630. activity = Activity.objects.create(
  631. group=group,
  632. project=group.project,
  633. type=ActivityType.SET_RESOLVED_IN_RELEASE.value,
  634. ident=resolution.id,
  635. data={"version": "foo@1.0.0"},
  636. )
  637. # Create a regression
  638. manager = EventManager(
  639. make_event(event_id="c" * 32, checksum="a" * 32, timestamp=time(), release="foo@2.0.0")
  640. )
  641. event = manager.save(self.project.id)
  642. assert event.group_id == group.id
  643. group = Group.objects.get(id=group.id)
  644. assert group.status == GroupStatus.UNRESOLVED
  645. activity = Activity.objects.get(id=activity.id)
  646. assert activity.data["version"] == "foo@1.0.0"
  647. regressed_activity = Activity.objects.get(
  648. group=group, type=ActivityType.SET_REGRESSION.value
  649. )
  650. assert regressed_activity.data["version"] == "foo@2.0.0"
  651. assert regressed_activity.data["follows_semver"] is True
  652. assert regressed_activity.data["resolved_in_version"] == "foo@1.0.0"
  653. def test_has_pending_commit_resolution(self):
  654. project_id = self.project.id
  655. event = self.make_release_event("1.0", project_id)
  656. group = event.group
  657. assert group is not None
  658. assert group.first_release.version == "1.0"
  659. assert not has_pending_commit_resolution(group)
  660. # Add a commit with no associated release
  661. repo = self.create_repo(project=group.project)
  662. commit = Commit.objects.create(
  663. organization_id=group.project.organization_id, repository_id=repo.id, key="a" * 40
  664. )
  665. GroupLink.objects.create(
  666. group_id=group.id,
  667. project_id=group.project_id,
  668. linked_type=GroupLink.LinkedType.commit,
  669. linked_id=commit.id,
  670. relationship=GroupLink.Relationship.resolves,
  671. )
  672. assert has_pending_commit_resolution(group)
  673. def test_multiple_pending_commit_resolution(self):
  674. project_id = self.project.id
  675. event = self.make_release_event("1.0", project_id)
  676. group = event.group
  677. assert group is not None
  678. # Add a few commits with no associated release
  679. repo = self.create_repo(project=group.project)
  680. for key in ["a", "b", "c"]:
  681. commit = Commit.objects.create(
  682. organization_id=group.project.organization_id,
  683. repository_id=repo.id,
  684. key=key * 40,
  685. )
  686. GroupLink.objects.create(
  687. group_id=group.id,
  688. project_id=group.project_id,
  689. linked_type=GroupLink.LinkedType.commit,
  690. linked_id=commit.id,
  691. relationship=GroupLink.Relationship.resolves,
  692. )
  693. pending = has_pending_commit_resolution(group)
  694. assert pending
  695. # Most recent commit has been associated with a release
  696. latest_commit = Commit.objects.create(
  697. organization_id=group.project.organization_id, repository_id=repo.id, key="d" * 40
  698. )
  699. GroupLink.objects.create(
  700. group_id=group.id,
  701. project_id=group.project_id,
  702. linked_type=GroupLink.LinkedType.commit,
  703. linked_id=latest_commit.id,
  704. relationship=GroupLink.Relationship.resolves,
  705. )
  706. ReleaseCommit.objects.create(
  707. organization_id=group.project.organization_id,
  708. release=group.first_release,
  709. commit=latest_commit,
  710. order=0,
  711. )
  712. pending = has_pending_commit_resolution(group)
  713. assert pending is False
  714. def test_has_pending_commit_resolution_issue_regression(self):
  715. project_id = self.project.id
  716. event = self.make_release_event("1.0", project_id)
  717. group = event.group
  718. assert group is not None
  719. repo = self.create_repo(project=group.project)
  720. # commit that resolved the issue is part of a PR, but all commits within the PR are unreleased
  721. commit = Commit.objects.create(
  722. organization_id=group.project.organization_id, repository_id=repo.id, key="a" * 40
  723. )
  724. second_commit = Commit.objects.create(
  725. organization_id=group.project.organization_id, repository_id=repo.id, key="b" * 40
  726. )
  727. GroupLink.objects.create(
  728. group_id=group.id,
  729. project_id=group.project_id,
  730. linked_type=GroupLink.LinkedType.commit,
  731. linked_id=commit.id,
  732. relationship=GroupLink.Relationship.resolves,
  733. )
  734. pr = PullRequest.objects.create(
  735. organization_id=group.project.organization_id,
  736. repository_id=repo.id,
  737. key="1",
  738. )
  739. PullRequestCommit.objects.create(pull_request_id=pr.id, commit_id=commit.id)
  740. PullRequestCommit.objects.create(pull_request_id=pr.id, commit_id=second_commit.id)
  741. assert PullRequestCommit.objects.filter(pull_request_id=pr.id, commit_id=commit.id).exists()
  742. assert PullRequestCommit.objects.filter(
  743. pull_request_id=pr.id, commit_id=second_commit.id
  744. ).exists()
  745. assert not ReleaseCommit.objects.filter(commit__pullrequestcommit__id=commit.id).exists()
  746. assert not ReleaseCommit.objects.filter(
  747. commit__pullrequestcommit__id=second_commit.id
  748. ).exists()
  749. pending = has_pending_commit_resolution(group)
  750. assert pending
  751. def test_has_pending_commit_resolution_issue_regression_released_commits(self):
  752. project_id = self.project.id
  753. event = self.make_release_event("1.0", project_id)
  754. group = event.group
  755. assert group is not None
  756. release = self.create_release(project=self.project, version="1.1")
  757. repo = self.create_repo(project=group.project)
  758. # commit 1 is part of the PR, it resolves the issue in the commit message, and is unreleased
  759. commit = Commit.objects.create(
  760. organization_id=group.project.organization_id, repository_id=repo.id, key="a" * 38
  761. )
  762. GroupLink.objects.create(
  763. group_id=group.id,
  764. project_id=group.project_id,
  765. linked_type=GroupLink.LinkedType.commit,
  766. linked_id=commit.id,
  767. relationship=GroupLink.Relationship.resolves,
  768. )
  769. # commit 2 is part of the PR, but does not resolve the issue, and is released
  770. released_commit = Commit.objects.create(
  771. organization_id=group.project.organization_id, repository_id=repo.id, key="b" * 38
  772. )
  773. # commit 3 is part of the PR, but does not resolve the issue, and is unreleased
  774. unreleased_commit = Commit.objects.create(
  775. organization_id=group.project.organization_id, repository_id=repo.id, key="c" * 38
  776. )
  777. pr = PullRequest.objects.create(
  778. organization_id=group.project.organization_id,
  779. repository_id=repo.id,
  780. key="19",
  781. )
  782. PullRequestCommit.objects.create(pull_request_id=pr.id, commit_id=commit.id)
  783. released_pr_commit = PullRequestCommit.objects.create(
  784. pull_request_id=pr.id, commit_id=released_commit.id
  785. )
  786. unreleased_pr_commit = PullRequestCommit.objects.create(
  787. pull_request_id=pr.id, commit_id=unreleased_commit.id
  788. )
  789. ReleaseCommit.objects.create(
  790. organization_id=group.project.organization_id,
  791. release=release,
  792. commit=released_commit,
  793. order=1,
  794. )
  795. assert Commit.objects.all().count() == 3
  796. assert PullRequestCommit.objects.filter(pull_request_id=pr.id, commit_id=commit.id).exists()
  797. assert PullRequestCommit.objects.filter(
  798. pull_request_id=pr.id, commit_id=released_commit.id
  799. ).exists()
  800. assert PullRequestCommit.objects.filter(commit__id=unreleased_pr_commit.commit.id).exists()
  801. assert ReleaseCommit.objects.filter(
  802. commit__pullrequestcommit__id=released_pr_commit.id
  803. ).exists()
  804. pending = has_pending_commit_resolution(group)
  805. assert pending is False
  806. @mock.patch("sentry.integrations.example.integration.ExampleIntegration.sync_status_outbound")
  807. @mock.patch("sentry.tasks.activity.send_activity_notifications.delay")
  808. @mock.patch("sentry.event_manager.plugin_is_regression")
  809. def test_marks_as_unresolved_with_new_release_with_integration(
  810. self,
  811. plugin_is_regression,
  812. mock_send_activity_notifications_delay,
  813. mock_sync_status_outbound,
  814. ):
  815. plugin_is_regression.return_value = True
  816. old_release = Release.objects.create(
  817. version="a",
  818. organization_id=self.project.organization_id,
  819. date_added=timezone.now() - timedelta(minutes=30),
  820. )
  821. old_release.add_project(self.project)
  822. manager = EventManager(
  823. make_event(
  824. event_id="a" * 32,
  825. checksum="a" * 32,
  826. timestamp=time() - 50000, # need to work around active_at
  827. release=old_release.version,
  828. )
  829. )
  830. event = manager.save(self.project.id)
  831. assert event.group is not None
  832. group = event.group
  833. org = group.organization
  834. integration = Integration.objects.create(provider="example", name="Example")
  835. integration.add_organization(org, self.user)
  836. OrganizationIntegration.objects.filter(
  837. integration_id=integration.id, organization_id=group.organization.id
  838. ).update(
  839. config={
  840. "sync_comments": True,
  841. "sync_status_outbound": True,
  842. "sync_status_inbound": True,
  843. "sync_assignee_outbound": True,
  844. "sync_assignee_inbound": True,
  845. }
  846. )
  847. external_issue = ExternalIssue.objects.get_or_create(
  848. organization_id=org.id, integration_id=integration.id, key="APP-%s" % group.id
  849. )[0]
  850. GroupLink.objects.get_or_create(
  851. group_id=group.id,
  852. project_id=group.project_id,
  853. linked_type=GroupLink.LinkedType.issue,
  854. linked_id=external_issue.id,
  855. relationship=GroupLink.Relationship.references,
  856. )[0]
  857. group.update(status=GroupStatus.RESOLVED, substatus=None)
  858. resolution = GroupResolution.objects.create(release=old_release, group=group)
  859. activity = Activity.objects.create(
  860. group=group,
  861. project=group.project,
  862. type=ActivityType.SET_RESOLVED_IN_RELEASE.value,
  863. ident=resolution.id,
  864. data={"version": ""},
  865. )
  866. manager = EventManager(
  867. make_event(
  868. event_id="b" * 32, checksum="a" * 32, timestamp=time(), release=old_release.version
  869. )
  870. )
  871. with self.tasks():
  872. with self.feature({"organizations:integrations-issue-sync": True}):
  873. event = manager.save(self.project.id)
  874. assert event.group_id == group.id
  875. group = Group.objects.get(id=group.id)
  876. assert group.status == GroupStatus.RESOLVED
  877. activity = Activity.objects.get(id=activity.id)
  878. assert activity.data["version"] == ""
  879. assert GroupResolution.objects.filter(group=group).exists()
  880. manager = EventManager(
  881. make_event(event_id="c" * 32, checksum="a" * 32, timestamp=time(), release="b")
  882. )
  883. event = manager.save(self.project.id)
  884. assert event.group is not None
  885. mock_sync_status_outbound.assert_called_once_with(
  886. external_issue, False, event.group.project_id
  887. )
  888. assert event.group_id == group.id
  889. group = Group.objects.get(id=group.id)
  890. assert group.status == GroupStatus.UNRESOLVED
  891. activity = Activity.objects.get(id=activity.id)
  892. assert activity.data["version"] == "b"
  893. assert not GroupResolution.objects.filter(group=group).exists()
  894. activity = Activity.objects.get(group=group, type=ActivityType.SET_REGRESSION.value)
  895. mock_send_activity_notifications_delay.assert_called_once_with(activity.id)
  896. @mock.patch("sentry.tasks.activity.send_activity_notifications.delay")
  897. @mock.patch("sentry.event_manager.plugin_is_regression")
  898. def test_does_not_mark_as_unresolved_with_pending_commit(
  899. self, plugin_is_regression, mock_send_activity_notifications_delay
  900. ):
  901. plugin_is_regression.return_value = True
  902. repo = self.create_repo(project=self.project)
  903. commit = self.create_commit(repo=repo)
  904. manager = EventManager(
  905. make_event(
  906. event_id="a" * 32,
  907. checksum="a" * 32,
  908. timestamp=time() - 50000, # need to work around active_at
  909. )
  910. )
  911. event = manager.save(self.project.id)
  912. group = event.group
  913. assert group is not None
  914. group.update(status=GroupStatus.RESOLVED, substatus=None)
  915. GroupLink.objects.create(
  916. group_id=group.id,
  917. project_id=group.project_id,
  918. linked_id=commit.id,
  919. linked_type=GroupLink.LinkedType.commit,
  920. relationship=GroupLink.Relationship.resolves,
  921. )
  922. manager = EventManager(make_event(event_id="b" * 32, checksum="a" * 32, timestamp=time()))
  923. event = manager.save(self.project.id)
  924. assert event.group is not None
  925. assert event.group_id == group.id
  926. assert Group.objects.get(id=group.id).status == GroupStatus.RESOLVED
  927. @mock.patch("sentry.tasks.activity.send_activity_notifications.delay")
  928. @mock.patch("sentry.event_manager.plugin_is_regression")
  929. def test_mark_as_unresolved_with_released_commit(
  930. self, plugin_is_regression, mock_send_activity_notifications_delay
  931. ):
  932. plugin_is_regression.return_value = True
  933. release = self.create_release(project=self.project)
  934. repo = self.create_repo(project=self.project)
  935. commit = self.create_commit(repo=repo, release=release, project=self.project)
  936. manager = EventManager(
  937. make_event(
  938. event_id="a" * 32,
  939. checksum="a" * 32,
  940. timestamp=time() - 50000, # need to work around active_at
  941. )
  942. )
  943. event = manager.save(self.project.id)
  944. group = event.group
  945. assert group is not None
  946. group.update(status=GroupStatus.RESOLVED, substatus=None)
  947. GroupLink.objects.create(
  948. group_id=group.id,
  949. project_id=group.project_id,
  950. linked_id=commit.id,
  951. linked_type=GroupLink.LinkedType.commit,
  952. relationship=GroupLink.Relationship.resolves,
  953. )
  954. manager = EventManager(make_event(event_id="b" * 32, checksum="a" * 32, timestamp=time()))
  955. event = manager.save(self.project.id)
  956. assert event.group is not None
  957. assert event.group_id == group.id
  958. assert Group.objects.get(id=group.id).status == GroupStatus.UNRESOLVED
  959. @mock.patch("sentry.models.Group.is_resolved")
  960. def test_unresolves_group_with_auto_resolve(self, mock_is_resolved):
  961. ts = time() - 100
  962. mock_is_resolved.return_value = False
  963. manager = EventManager(make_event(event_id="a" * 32, checksum="a" * 32, timestamp=ts))
  964. with self.tasks():
  965. event = manager.save(self.project.id)
  966. assert event.group is not None
  967. mock_is_resolved.return_value = True
  968. manager = EventManager(make_event(event_id="b" * 32, checksum="a" * 32, timestamp=ts + 100))
  969. with self.tasks():
  970. event2 = manager.save(self.project.id)
  971. assert event2.group is not None
  972. assert event.group_id == event2.group_id
  973. group = Group.objects.get(id=event.group.id)
  974. assert group.active_at.replace(second=0) == event2.datetime.replace(second=0)
  975. assert group.active_at.replace(second=0) != event.datetime.replace(second=0)
  976. def test_invalid_transaction(self):
  977. dict_input = {"messages": "foo"}
  978. manager = EventManager(make_event(transaction=dict_input))
  979. manager.normalize()
  980. event = manager.save(self.project.id)
  981. assert event.transaction is None
  982. def test_transaction_as_culprit(self):
  983. manager = EventManager(make_event(transaction="foobar"))
  984. manager.normalize()
  985. event = manager.save(self.project.id)
  986. assert event.transaction == "foobar"
  987. assert event.culprit == "foobar"
  988. def test_culprit_is_not_transaction(self):
  989. manager = EventManager(make_event(culprit="foobar"))
  990. manager.normalize()
  991. event1 = manager.save(self.project.id)
  992. assert event1.transaction is None
  993. assert event1.culprit == "foobar"
  994. def test_culprit_after_stacktrace_processing(self):
  995. from sentry.grouping.enhancer import Enhancements
  996. enhancement = Enhancements.from_config_string(
  997. """
  998. function:in_app_function +app
  999. function:not_in_app_function -app
  1000. """,
  1001. )
  1002. manager = EventManager(
  1003. make_event(
  1004. platform="native",
  1005. exception={
  1006. "values": [
  1007. {
  1008. "type": "Hello",
  1009. "stacktrace": {
  1010. "frames": [
  1011. {
  1012. "function": "not_in_app_function",
  1013. },
  1014. {
  1015. "function": "in_app_function",
  1016. },
  1017. ]
  1018. },
  1019. }
  1020. ]
  1021. },
  1022. )
  1023. )
  1024. manager.normalize()
  1025. manager.get_data()["grouping_config"] = {
  1026. "enhancements": enhancement.dumps(),
  1027. "id": "legacy:2019-03-12",
  1028. }
  1029. event1 = manager.save(self.project.id)
  1030. assert event1.transaction is None
  1031. assert event1.culprit == "in_app_function"
  1032. def test_inferred_culprit_from_empty_stacktrace(self):
  1033. manager = EventManager(make_event(stacktrace={"frames": []}))
  1034. manager.normalize()
  1035. event = manager.save(self.project.id)
  1036. assert event.culprit == ""
  1037. def test_transaction_and_culprit(self):
  1038. manager = EventManager(make_event(transaction="foobar", culprit="baz"))
  1039. manager.normalize()
  1040. event1 = manager.save(self.project.id)
  1041. assert event1.transaction == "foobar"
  1042. assert event1.culprit == "baz"
  1043. def test_release_with_empty_version(self):
  1044. cases = ["", " ", "\t", "\n"]
  1045. for case in cases:
  1046. event = self.make_release_event(case, self.project.id)
  1047. assert not event.group.first_release
  1048. assert Release.objects.filter(projects__in=[self.project.id]).count() == 0
  1049. assert Release.objects.filter(organization_id=self.project.organization_id).count() == 0
  1050. def test_first_release(self):
  1051. project_id = self.project.id
  1052. event = self.make_release_event("1.0", project_id)
  1053. group = event.group
  1054. assert group is not None
  1055. assert group.first_release.version == "1.0"
  1056. event = self.make_release_event("2.0", project_id)
  1057. group = event.group
  1058. assert group is not None
  1059. assert group.first_release.version == "1.0"
  1060. def test_release_project_slug(self):
  1061. project = self.create_project(name="foo")
  1062. release = Release.objects.create(version="foo-1.0", organization=project.organization)
  1063. release.add_project(project)
  1064. event = self.make_release_event("1.0", project.id)
  1065. group = event.group
  1066. assert group is not None
  1067. assert group.first_release.version == "foo-1.0"
  1068. release_tag = [v for k, v in event.tags if k == "sentry:release"][0]
  1069. assert release_tag == "foo-1.0"
  1070. event = self.make_release_event("2.0", project.id)
  1071. group = event.group
  1072. assert group is not None
  1073. assert group.first_release.version == "foo-1.0"
  1074. def test_release_project_slug_long(self):
  1075. project = self.create_project(name="foo")
  1076. partial_version_len = MAX_VERSION_LENGTH - 4
  1077. release = Release.objects.create(
  1078. version="foo-{}".format("a" * partial_version_len), organization=project.organization
  1079. )
  1080. release.add_project(project)
  1081. event = self.make_release_event("a" * partial_version_len, project.id)
  1082. group = event.group
  1083. assert group is not None
  1084. assert group.first_release.version == "foo-{}".format("a" * partial_version_len)
  1085. release_tag = [v for k, v in event.tags if k == "sentry:release"][0]
  1086. assert release_tag == "foo-{}".format("a" * partial_version_len)
  1087. def test_group_release_no_env(self):
  1088. project_id = self.project.id
  1089. event = self.make_release_event("1.0", project_id)
  1090. release = Release.objects.get(version="1.0", projects=event.project_id)
  1091. assert GroupRelease.objects.filter(
  1092. release_id=release.id, group_id=event.group_id, environment=""
  1093. ).exists()
  1094. # ensure we're not erroring on second creation
  1095. event = self.make_release_event("1.0", project_id)
  1096. def test_group_release_with_env(self):
  1097. manager = EventManager(make_event(release="1.0", environment="prod", event_id="a" * 32))
  1098. manager.normalize()
  1099. event = manager.save(self.project.id)
  1100. release = Release.objects.get(version="1.0", projects=event.project_id)
  1101. assert GroupRelease.objects.filter(
  1102. release_id=release.id, group_id=event.group_id, environment="prod"
  1103. ).exists()
  1104. manager = EventManager(make_event(release="1.0", environment="staging", event_id="b" * 32))
  1105. event = manager.save(self.project.id)
  1106. release = Release.objects.get(version="1.0", projects=event.project_id)
  1107. assert GroupRelease.objects.filter(
  1108. release_id=release.id, group_id=event.group_id, environment="staging"
  1109. ).exists()
  1110. def test_tsdb(self):
  1111. project = self.project
  1112. manager = EventManager(
  1113. make_event(
  1114. fingerprint=["totally unique super duper fingerprint"],
  1115. environment="totally unique super duper environment",
  1116. )
  1117. )
  1118. event = manager.save(project.id)
  1119. assert event.group is not None
  1120. def query(model, key, **kwargs):
  1121. return tsdb.backend.get_sums(
  1122. model,
  1123. [key],
  1124. event.datetime,
  1125. event.datetime,
  1126. tenant_ids={"organization_id": 123, "referrer": "r"},
  1127. **kwargs,
  1128. )[key]
  1129. assert query(TSDBModel.project, project.id) == 1
  1130. assert query(TSDBModel.group, event.group.id) == 1
  1131. environment_id = Environment.get_for_organization_id(
  1132. event.project.organization_id, "totally unique super duper environment"
  1133. ).id
  1134. assert query(TSDBModel.project, project.id, environment_id=environment_id) == 1
  1135. assert query(TSDBModel.group, event.group.id, environment_id=environment_id) == 1
  1136. @pytest.mark.xfail
  1137. def test_record_frequencies(self):
  1138. project = self.project
  1139. manager = EventManager(make_event())
  1140. event = manager.save(project.id)
  1141. assert tsdb.backend.get_most_frequent(
  1142. TSDBModel.frequent_issues_by_project, (event.project.id,), event.datetime
  1143. ) == {event.project.id: [(event.group_id, 1.0)]}
  1144. def test_event_user(self):
  1145. manager = EventManager(
  1146. make_event(
  1147. event_id="a", environment="totally unique environment", **{"user": {"id": "1"}}
  1148. )
  1149. )
  1150. manager.normalize()
  1151. with self.tasks():
  1152. event = manager.save(self.project.id)
  1153. assert event.group is not None
  1154. environment_id = Environment.get_for_organization_id(
  1155. event.project.organization_id, "totally unique environment"
  1156. ).id
  1157. assert tsdb.backend.get_distinct_counts_totals(
  1158. TSDBModel.users_affected_by_group,
  1159. (event.group.id,),
  1160. event.datetime,
  1161. event.datetime,
  1162. tenant_ids={"referrer": "r", "organization_id": 123},
  1163. ) == {event.group.id: 1}
  1164. assert tsdb.backend.get_distinct_counts_totals(
  1165. TSDBModel.users_affected_by_project,
  1166. (event.project.id,),
  1167. event.datetime,
  1168. event.datetime,
  1169. tenant_ids={"organization_id": 123, "referrer": "r"},
  1170. ) == {event.project.id: 1}
  1171. assert tsdb.backend.get_distinct_counts_totals(
  1172. TSDBModel.users_affected_by_group,
  1173. (event.group.id,),
  1174. event.datetime,
  1175. event.datetime,
  1176. environment_id=environment_id,
  1177. tenant_ids={"organization_id": 123, "referrer": "r"},
  1178. ) == {event.group.id: 1}
  1179. assert tsdb.backend.get_distinct_counts_totals(
  1180. TSDBModel.users_affected_by_project,
  1181. (event.project.id,),
  1182. event.datetime,
  1183. event.datetime,
  1184. environment_id=environment_id,
  1185. tenant_ids={"organization_id": 123, "referrer": "r"},
  1186. ) == {event.project.id: 1}
  1187. euser = EventUser.objects.get(project_id=self.project.id, ident="1")
  1188. assert event.get_tag("sentry:user") == euser.tag_value
  1189. # clear the cache otherwise the cached EventUser from prev
  1190. # manager.save() will be used instead of jane
  1191. cache.clear()
  1192. # ensure event user is mapped to tags in second attempt
  1193. manager = EventManager(make_event(event_id="b", **{"user": {"id": "1", "name": "jane"}}))
  1194. manager.normalize()
  1195. with self.tasks():
  1196. event = manager.save(self.project.id)
  1197. euser = EventUser.objects.get(id=euser.id)
  1198. assert event.get_tag("sentry:user") == euser.tag_value
  1199. assert euser.name == "jane"
  1200. assert euser.ident == "1"
  1201. def test_event_user_invalid_ip(self):
  1202. manager = EventManager(
  1203. make_event(
  1204. event_id="a", environment="totally unique environment", **{"user": {"id": "1"}}
  1205. )
  1206. )
  1207. manager.normalize()
  1208. # This can happen as part of PII stripping, which happens after normalization
  1209. manager._data["user"]["ip_address"] = "[ip]"
  1210. with self.tasks():
  1211. manager.save(self.project.id)
  1212. euser = EventUser.objects.get(project_id=self.project.id)
  1213. assert euser.ip_address is None
  1214. def test_event_user_unicode_identifier(self):
  1215. manager = EventManager(make_event(**{"user": {"username": "foô"}}))
  1216. manager.normalize()
  1217. with self.tasks():
  1218. manager.save(self.project.id)
  1219. euser = EventUser.objects.get(project_id=self.project.id)
  1220. assert euser.username == "foô"
  1221. def test_environment(self):
  1222. manager = EventManager(make_event(**{"environment": "beta"}))
  1223. manager.normalize()
  1224. event = manager.save(self.project.id)
  1225. assert dict(event.tags).get("environment") == "beta"
  1226. def test_invalid_environment(self):
  1227. manager = EventManager(make_event(**{"environment": "bad/name"}))
  1228. manager.normalize()
  1229. event = manager.save(self.project.id)
  1230. assert dict(event.tags).get("environment") is None
  1231. def test_invalid_tags(self):
  1232. manager = EventManager(make_event(**{"tags": [42]}))
  1233. manager.normalize()
  1234. assert None in manager.get_data().get("tags", [])
  1235. assert 42 not in manager.get_data().get("tags", [])
  1236. event = manager.save(self.project.id)
  1237. assert 42 not in event.tags
  1238. assert None not in event.tags
  1239. @mock.patch("sentry.event_manager.eventstream.backend.insert")
  1240. def test_group_environment(self, eventstream_insert):
  1241. release_version = "1.0"
  1242. def save_event():
  1243. manager = EventManager(
  1244. make_event(
  1245. **{
  1246. "message": "foo",
  1247. "event_id": uuid.uuid1().hex,
  1248. "environment": "beta",
  1249. "release": release_version,
  1250. }
  1251. )
  1252. )
  1253. manager.normalize()
  1254. return manager.save(self.project.id)
  1255. event = save_event()
  1256. # Ensure the `GroupEnvironment` record was created.
  1257. instance = GroupEnvironment.objects.get(
  1258. group_id=event.group_id,
  1259. environment_id=Environment.objects.get(
  1260. organization_id=self.project.organization_id, name=event.get_tag("environment")
  1261. ).id,
  1262. )
  1263. assert Release.objects.get(id=instance.first_release_id).version == release_version
  1264. group_states1 = {
  1265. "is_new": True,
  1266. "is_regression": False,
  1267. "is_new_group_environment": True,
  1268. }
  1269. # Ensure that the first event in the (group, environment) pair is
  1270. # marked as being part of a new environment.
  1271. eventstream_insert.assert_called_with(
  1272. event=event,
  1273. **group_states1,
  1274. primary_hash="acbd18db4cc2f85cedef654fccc4a4d8",
  1275. skip_consume=False,
  1276. received_timestamp=event.data["received"],
  1277. group_states=[{"id": event.group.id, **group_states1}],
  1278. )
  1279. event = save_event()
  1280. group_states2 = {
  1281. "is_new": False,
  1282. "is_regression": False,
  1283. "is_new_group_environment": False,
  1284. }
  1285. # Ensure that the next event in the (group, environment) pair is *not*
  1286. # marked as being part of a new environment.
  1287. eventstream_insert.assert_called_with(
  1288. event=event,
  1289. **group_states2,
  1290. primary_hash="acbd18db4cc2f85cedef654fccc4a4d8",
  1291. skip_consume=False,
  1292. received_timestamp=event.data["received"],
  1293. group_states=[{"id": event.group.id, **group_states2}],
  1294. )
  1295. def test_default_fingerprint(self):
  1296. manager = EventManager(make_event())
  1297. manager.normalize()
  1298. event = manager.save(self.project.id)
  1299. assert event.data.get("fingerprint") == ["{{ default }}"]
  1300. def test_user_report_gets_environment(self):
  1301. project = self.create_project()
  1302. environment = Environment.objects.create(
  1303. organization_id=project.organization_id, name="production"
  1304. )
  1305. environment.add_project(project)
  1306. event_id = "a" * 32
  1307. UserReport.objects.create(
  1308. project_id=project.id,
  1309. event_id=event_id,
  1310. name="foo",
  1311. email="bar@example.com",
  1312. comments="It Broke!!!",
  1313. )
  1314. self.store_event(
  1315. data=make_event(environment=environment.name, event_id=event_id), project_id=project.id
  1316. )
  1317. assert UserReport.objects.get(event_id=event_id).environment_id == environment.id
  1318. def test_default_event_type(self):
  1319. manager = EventManager(make_event(message="foo bar"))
  1320. manager.normalize()
  1321. data = manager.get_data()
  1322. assert data["type"] == "default"
  1323. event = manager.save(self.project.id)
  1324. group = event.group
  1325. assert group is not None
  1326. assert group.data.get("type") == "default"
  1327. assert group.data.get("metadata") == {"title": "foo bar"}
  1328. def test_message_event_type(self):
  1329. manager = EventManager(
  1330. make_event(
  1331. **{
  1332. "message": "",
  1333. "logentry": {"formatted": "foo bar", "message": "foo %s", "params": ["bar"]},
  1334. }
  1335. )
  1336. )
  1337. manager.normalize()
  1338. data = manager.get_data()
  1339. assert data["type"] == "default"
  1340. event = manager.save(self.project.id)
  1341. group = event.group
  1342. assert group is not None
  1343. assert group.data.get("type") == "default"
  1344. assert group.data.get("metadata") == {"title": "foo bar"}
  1345. def test_error_event_type(self):
  1346. manager = EventManager(
  1347. make_event(**{"exception": {"values": [{"type": "Foo", "value": "bar"}]}})
  1348. )
  1349. manager.normalize()
  1350. data = manager.get_data()
  1351. assert data["type"] == "error"
  1352. event = manager.save(self.project.id)
  1353. group = event.group
  1354. assert group is not None
  1355. assert group.data.get("type") == "error"
  1356. assert group.data.get("metadata") == {
  1357. "type": "Foo",
  1358. "value": "bar",
  1359. "display_title_with_tree_label": False,
  1360. }
  1361. def test_csp_event_type(self):
  1362. manager = EventManager(
  1363. make_event(
  1364. **{
  1365. "csp": {
  1366. "effective_directive": "script-src",
  1367. "blocked_uri": "http://example.com",
  1368. },
  1369. # this normally is noramlized in relay as part of ingest
  1370. "logentry": {"message": "Blocked 'script' from 'example.com'"},
  1371. }
  1372. )
  1373. )
  1374. manager.normalize()
  1375. data = manager.get_data()
  1376. assert data["type"] == "csp"
  1377. event = manager.save(self.project.id)
  1378. group = event.group
  1379. assert group is not None
  1380. assert group.data.get("type") == "csp"
  1381. assert group.data.get("metadata") == {
  1382. "directive": "script-src",
  1383. "uri": "example.com",
  1384. "message": "Blocked 'script' from 'example.com'",
  1385. }
  1386. assert group.title == "Blocked 'script' from 'example.com'"
  1387. def test_transaction_event_type(self):
  1388. manager = EventManager(
  1389. make_event(
  1390. **{
  1391. "transaction": "wait",
  1392. "contexts": {
  1393. "trace": {
  1394. "parent_span_id": "bce14471e0e9654d",
  1395. "op": "foobar",
  1396. "trace_id": "a0fa8803753e40fd8124b21eeb2986b5",
  1397. "span_id": "bf5be759039ede9a",
  1398. }
  1399. },
  1400. "spans": [],
  1401. "timestamp": "2019-06-14T14:01:40Z",
  1402. "start_timestamp": "2019-06-14T14:01:40Z",
  1403. "type": "transaction",
  1404. }
  1405. )
  1406. )
  1407. manager.normalize()
  1408. data = manager.get_data()
  1409. assert data["type"] == "transaction"
  1410. def test_transaction_event_span_grouping(self):
  1411. manager = EventManager(
  1412. make_event(
  1413. **{
  1414. "transaction": "wait",
  1415. "contexts": {
  1416. "trace": {
  1417. "parent_span_id": "bce14471e0e9654d",
  1418. "op": "foobar",
  1419. "trace_id": "a0fa8803753e40fd8124b21eeb2986b5",
  1420. "span_id": "bf5be759039ede9a",
  1421. }
  1422. },
  1423. "spans": [
  1424. {
  1425. "trace_id": "a0fa8803753e40fd8124b21eeb2986b5",
  1426. "parent_span_id": "bf5be759039ede9a",
  1427. "span_id": "a" * 16,
  1428. "start_timestamp": 0,
  1429. "timestamp": 1,
  1430. "same_process_as_parent": True,
  1431. "op": "default",
  1432. "description": "span a",
  1433. },
  1434. {
  1435. "trace_id": "a0fa8803753e40fd8124b21eeb2986b5",
  1436. "parent_span_id": "bf5be759039ede9a",
  1437. "span_id": "b" * 16,
  1438. "start_timestamp": 0,
  1439. "timestamp": 1,
  1440. "same_process_as_parent": True,
  1441. "op": "default",
  1442. "description": "span a",
  1443. },
  1444. {
  1445. "trace_id": "a0fa8803753e40fd8124b21eeb2986b5",
  1446. "parent_span_id": "bf5be759039ede9a",
  1447. "span_id": "c" * 16,
  1448. "start_timestamp": 0,
  1449. "timestamp": 1,
  1450. "same_process_as_parent": True,
  1451. "op": "default",
  1452. "description": "span b",
  1453. },
  1454. ],
  1455. "timestamp": "2019-06-14T14:01:40Z",
  1456. "start_timestamp": "2019-06-14T14:01:40Z",
  1457. "type": "transaction",
  1458. }
  1459. )
  1460. )
  1461. manager.normalize()
  1462. event = manager.save(self.project.id)
  1463. data = event.data
  1464. assert data["type"] == "transaction"
  1465. assert data["span_grouping_config"]["id"] == "default:2022-10-27"
  1466. spans = [{"hash": span["hash"]} for span in data["spans"]]
  1467. # the basic strategy is to simply use the description
  1468. assert spans == [{"hash": hash_values([span["description"]])} for span in data["spans"]]
  1469. def test_sdk(self):
  1470. manager = EventManager(make_event(**{"sdk": {"name": "sentry-unity", "version": "1.0"}}))
  1471. manager.normalize()
  1472. event = manager.save(self.project.id)
  1473. assert event.data["sdk"] == {
  1474. "name": "sentry-unity",
  1475. "version": "1.0",
  1476. "integrations": None,
  1477. "packages": None,
  1478. }
  1479. def test_no_message(self):
  1480. # test that the message is handled gracefully
  1481. manager = EventManager(
  1482. make_event(**{"message": None, "logentry": {"message": "hello world"}})
  1483. )
  1484. manager.normalize()
  1485. event = manager.save(self.project.id)
  1486. assert event.message == "hello world"
  1487. def test_search_message_simple(self):
  1488. manager = EventManager(
  1489. make_event(
  1490. **{
  1491. "message": "test",
  1492. "transaction": "sentry.tasks.process",
  1493. }
  1494. )
  1495. )
  1496. manager.normalize()
  1497. event = manager.save(self.project.id)
  1498. search_message = event.search_message
  1499. assert "test" in search_message
  1500. assert "sentry.tasks.process" in search_message
  1501. def test_search_message_prefers_log_entry_message(self):
  1502. manager = EventManager(
  1503. make_event(
  1504. **{
  1505. "message": "test",
  1506. "logentry": {"message": "hello world"},
  1507. "transaction": "sentry.tasks.process",
  1508. }
  1509. )
  1510. )
  1511. manager.normalize()
  1512. event = manager.save(self.project.id)
  1513. search_message = event.search_message
  1514. assert "test" not in search_message
  1515. assert "hello world" in search_message
  1516. assert "sentry.tasks.process" in search_message
  1517. def test_stringified_message(self):
  1518. manager = EventManager(make_event(**{"message": 1234}))
  1519. manager.normalize()
  1520. event = manager.save(self.project.id)
  1521. assert event.data["logentry"] == {"formatted": "1234", "message": None, "params": None}
  1522. def test_bad_message(self):
  1523. # test that invalid messages are rejected
  1524. manager = EventManager(make_event(**{"message": ["asdf"]}))
  1525. manager.normalize()
  1526. event = manager.save(self.project.id)
  1527. assert event.message == '["asdf"]'
  1528. assert "logentry" in event.data
  1529. def test_message_attribute_goes_to_interface(self):
  1530. manager = EventManager(make_event(**{"message": "hello world"}))
  1531. manager.normalize()
  1532. event = manager.save(self.project.id)
  1533. assert event.data["logentry"] == {
  1534. "formatted": "hello world",
  1535. "message": None,
  1536. "params": None,
  1537. }
  1538. def test_message_attribute_shadowing(self):
  1539. # Logentry shadows the legacy message attribute.
  1540. manager = EventManager(
  1541. make_event(**{"message": "world hello", "logentry": {"message": "hello world"}})
  1542. )
  1543. manager.normalize()
  1544. event = manager.save(self.project.id)
  1545. assert event.data["logentry"] == {
  1546. "formatted": "hello world",
  1547. "message": None,
  1548. "params": None,
  1549. }
  1550. def test_message_attribute_interface_both_strings(self):
  1551. manager = EventManager(
  1552. make_event(**{"logentry": "a plain string", "message": "another string"})
  1553. )
  1554. manager.normalize()
  1555. event = manager.save(self.project.id)
  1556. assert event.data["logentry"] == {
  1557. "formatted": "a plain string",
  1558. "message": None,
  1559. "params": None,
  1560. }
  1561. def test_throws_when_matches_discarded_hash(self):
  1562. manager = EventManager(make_event(message="foo", event_id="a" * 32, fingerprint=["a" * 32]))
  1563. with self.tasks():
  1564. event = manager.save(self.project.id)
  1565. group = Group.objects.get(id=event.group_id)
  1566. tombstone = GroupTombstone.objects.create(
  1567. project_id=group.project_id,
  1568. level=group.level,
  1569. message=group.message,
  1570. culprit=group.culprit,
  1571. data=group.data,
  1572. previous_group_id=group.id,
  1573. )
  1574. GroupHash.objects.filter(group=group).update(group=None, group_tombstone_id=tombstone.id)
  1575. manager = EventManager(
  1576. make_event(message="foo", event_id="b" * 32, fingerprint=["a" * 32]),
  1577. project=self.project,
  1578. )
  1579. manager.normalize()
  1580. a1 = CachedAttachment(name="a1", data=b"hello")
  1581. a2 = CachedAttachment(name="a2", data=b"world")
  1582. cache_key = cache_key_for_event(manager.get_data())
  1583. attachment_cache.set(cache_key, attachments=[a1, a2])
  1584. from sentry.utils.outcomes import track_outcome
  1585. mock_track_outcome = mock.Mock(wraps=track_outcome)
  1586. with mock.patch("sentry.event_manager.track_outcome", mock_track_outcome):
  1587. with self.feature("organizations:event-attachments"):
  1588. with self.tasks():
  1589. with pytest.raises(HashDiscarded):
  1590. event = manager.save(self.project.id, cache_key=cache_key)
  1591. assert mock_track_outcome.call_count == 3
  1592. for o in mock_track_outcome.mock_calls:
  1593. assert o.kwargs["outcome"] == Outcome.FILTERED
  1594. assert o.kwargs["reason"] == FilterStatKeys.DISCARDED_HASH
  1595. o = mock_track_outcome.mock_calls[0]
  1596. assert o.kwargs["category"] == DataCategory.ERROR
  1597. for o in mock_track_outcome.mock_calls[1:]:
  1598. assert o.kwargs["category"] == DataCategory.ATTACHMENT
  1599. assert o.kwargs["quantity"] == 5
  1600. def test_honors_crash_report_limit(self):
  1601. from sentry.utils.outcomes import track_outcome
  1602. mock_track_outcome = mock.Mock(wraps=track_outcome)
  1603. # Allow exactly one crash report
  1604. self.project.update_option("sentry:store_crash_reports", 1)
  1605. manager = EventManager(
  1606. make_event(message="foo", event_id="a" * 32, fingerprint=["a" * 32]),
  1607. project=self.project,
  1608. )
  1609. manager.normalize()
  1610. a1 = CachedAttachment(name="a1", data=b"hello", type="event.minidump")
  1611. a2 = CachedAttachment(name="a2", data=b"world")
  1612. cache_key = cache_key_for_event(manager.get_data())
  1613. attachment_cache.set(cache_key, attachments=[a1, a2])
  1614. with mock.patch("sentry.event_manager.track_outcome", mock_track_outcome):
  1615. with self.feature("organizations:event-attachments"):
  1616. with self.tasks():
  1617. manager.save(self.project.id, cache_key=cache_key)
  1618. # The first minidump should be accepted, since the limit is 1
  1619. assert mock_track_outcome.call_count == 3
  1620. for o in mock_track_outcome.mock_calls:
  1621. assert o.kwargs["outcome"] == Outcome.ACCEPTED
  1622. mock_track_outcome.reset_mock()
  1623. manager = EventManager(
  1624. make_event(message="foo", event_id="b" * 32, fingerprint=["a" * 32]),
  1625. project=self.project,
  1626. )
  1627. manager.normalize()
  1628. cache_key = cache_key_for_event(manager.get_data())
  1629. attachment_cache.set(cache_key, attachments=[a1, a2])
  1630. with mock.patch("sentry.event_manager.track_outcome", mock_track_outcome):
  1631. with self.feature("organizations:event-attachments"):
  1632. with self.tasks():
  1633. event = manager.save(self.project.id, cache_key=cache_key)
  1634. assert event.data["metadata"]["stripped_crash"] is True
  1635. assert mock_track_outcome.call_count == 3
  1636. o = mock_track_outcome.mock_calls[0]
  1637. assert o.kwargs["outcome"] == Outcome.FILTERED
  1638. assert o.kwargs["category"] == DataCategory.ATTACHMENT
  1639. assert o.kwargs["reason"] == FilterStatKeys.CRASH_REPORT_LIMIT
  1640. for o in mock_track_outcome.mock_calls[1:]:
  1641. assert o.kwargs["outcome"] == Outcome.ACCEPTED
  1642. def test_event_accepted_outcome(self):
  1643. manager = EventManager(make_event(message="foo"))
  1644. manager.normalize()
  1645. mock_track_outcome = mock.Mock()
  1646. with mock.patch("sentry.event_manager.track_outcome", mock_track_outcome):
  1647. manager.save(self.project.id)
  1648. assert_mock_called_once_with_partial(
  1649. mock_track_outcome, outcome=Outcome.ACCEPTED, category=DataCategory.ERROR
  1650. )
  1651. def test_attachment_accepted_outcomes(self):
  1652. manager = EventManager(make_event(message="foo"), project=self.project)
  1653. manager.normalize()
  1654. a1 = CachedAttachment(name="a1", data=b"hello")
  1655. a2 = CachedAttachment(name="a2", data=b"limited", rate_limited=True)
  1656. a3 = CachedAttachment(name="a3", data=b"world")
  1657. cache_key = cache_key_for_event(manager.get_data())
  1658. attachment_cache.set(cache_key, attachments=[a1, a2, a3])
  1659. mock_track_outcome = mock.Mock()
  1660. with mock.patch("sentry.event_manager.track_outcome", mock_track_outcome):
  1661. with self.feature("organizations:event-attachments"):
  1662. manager.save(self.project.id, cache_key=cache_key)
  1663. assert mock_track_outcome.call_count == 3
  1664. for o in mock_track_outcome.mock_calls:
  1665. assert o.kwargs["outcome"] == Outcome.ACCEPTED
  1666. for o in mock_track_outcome.mock_calls[:2]:
  1667. assert o.kwargs["category"] == DataCategory.ATTACHMENT
  1668. assert o.kwargs["quantity"] == 5
  1669. final = mock_track_outcome.mock_calls[2]
  1670. assert final.kwargs["category"] == DataCategory.ERROR
  1671. def test_attachment_filtered_outcomes(self):
  1672. manager = EventManager(make_event(message="foo"), project=self.project)
  1673. manager.normalize()
  1674. # Disable storing all crash reports, which will drop the minidump but save the other
  1675. a1 = CachedAttachment(name="a1", data=b"minidump", type="event.minidump")
  1676. a2 = CachedAttachment(name="a2", data=b"limited", rate_limited=True)
  1677. a3 = CachedAttachment(name="a3", data=b"world")
  1678. cache_key = cache_key_for_event(manager.get_data())
  1679. attachment_cache.set(cache_key, attachments=[a1, a2, a3])
  1680. mock_track_outcome = mock.Mock()
  1681. with mock.patch("sentry.event_manager.track_outcome", mock_track_outcome):
  1682. with self.feature("organizations:event-attachments"):
  1683. manager.save(self.project.id, cache_key=cache_key)
  1684. assert mock_track_outcome.call_count == 3
  1685. # First outcome is the rejection of the minidump
  1686. o = mock_track_outcome.mock_calls[0]
  1687. assert o.kwargs["outcome"] == Outcome.FILTERED
  1688. assert o.kwargs["category"] == DataCategory.ATTACHMENT
  1689. assert o.kwargs["reason"] == FilterStatKeys.CRASH_REPORT_LIMIT
  1690. # Second outcome is acceptance of the "a3" attachment
  1691. o = mock_track_outcome.mock_calls[1]
  1692. assert o.kwargs["outcome"] == Outcome.ACCEPTED
  1693. assert o.kwargs["category"] == DataCategory.ATTACHMENT
  1694. assert o.kwargs["quantity"] == 5
  1695. # Last outcome is the event
  1696. o = mock_track_outcome.mock_calls[2]
  1697. assert o.kwargs["outcome"] == Outcome.ACCEPTED
  1698. assert o.kwargs["category"] == DataCategory.ERROR
  1699. def test_transaction_outcome_accepted(self):
  1700. """
  1701. Without metrics extraction, we count the number of accepted transaction
  1702. events in the TRANSACTION data category. This maintains compatibility
  1703. with Sentry installations that do not have a metrics pipeline.
  1704. """
  1705. manager = EventManager(
  1706. make_event(
  1707. transaction="wait",
  1708. contexts={
  1709. "trace": {
  1710. "parent_span_id": "bce14471e0e9654d",
  1711. "op": "foobar",
  1712. "trace_id": "a0fa8803753e40fd8124b21eeb2986b5",
  1713. "span_id": "bf5be759039ede9a",
  1714. }
  1715. },
  1716. spans=[],
  1717. timestamp=iso_format(before_now(minutes=5)),
  1718. start_timestamp=iso_format(before_now(minutes=5)),
  1719. type="transaction",
  1720. platform="python",
  1721. )
  1722. )
  1723. manager.normalize()
  1724. mock_track_outcome = mock.Mock()
  1725. with mock.patch("sentry.event_manager.track_outcome", mock_track_outcome):
  1726. with self.feature({"organizations:transaction-metrics-extraction": False}):
  1727. manager.save(self.project.id)
  1728. assert_mock_called_once_with_partial(
  1729. mock_track_outcome, outcome=Outcome.ACCEPTED, category=DataCategory.TRANSACTION
  1730. )
  1731. def test_transaction_indexed_outcome_accepted(self):
  1732. """
  1733. With metrics extraction, we count the number of accepted transaction
  1734. events in the TRANSACTION_INDEXED data category. The TRANSACTION data
  1735. category contains the number of metrics from
  1736. ``billing_metrics_consumer``.
  1737. """
  1738. manager = EventManager(
  1739. make_event(
  1740. transaction="wait",
  1741. contexts={
  1742. "trace": {
  1743. "parent_span_id": "bce14471e0e9654d",
  1744. "op": "foobar",
  1745. "trace_id": "a0fa8803753e40fd8124b21eeb2986b5",
  1746. "span_id": "bf5be759039ede9a",
  1747. }
  1748. },
  1749. spans=[],
  1750. timestamp=iso_format(before_now(minutes=5)),
  1751. start_timestamp=iso_format(before_now(minutes=5)),
  1752. type="transaction",
  1753. platform="python",
  1754. )
  1755. )
  1756. manager.normalize()
  1757. mock_track_outcome = mock.Mock()
  1758. with mock.patch("sentry.event_manager.track_outcome", mock_track_outcome):
  1759. with self.feature("organizations:transaction-metrics-extraction"):
  1760. manager.save(self.project.id)
  1761. assert_mock_called_once_with_partial(
  1762. mock_track_outcome, outcome=Outcome.ACCEPTED, category=DataCategory.TRANSACTION_INDEXED
  1763. )
  1764. def test_checksum_rehashed(self):
  1765. checksum = "invalid checksum hash"
  1766. manager = EventManager(make_event(**{"checksum": checksum}))
  1767. manager.normalize()
  1768. event = manager.save(self.project.id)
  1769. hashes = [gh.hash for gh in GroupHash.objects.filter(group=event.group)]
  1770. assert sorted(hashes) == sorted([hash_from_values(checksum), checksum])
  1771. def test_legacy_attributes_moved(self):
  1772. event = make_event(
  1773. release="my-release",
  1774. environment="my-environment",
  1775. site="whatever",
  1776. server_name="foo.com",
  1777. event_id=uuid.uuid1().hex,
  1778. )
  1779. manager = EventManager(event)
  1780. event = manager.save(self.project.id)
  1781. # release and environment stay toplevel
  1782. assert event.data["release"] == "my-release"
  1783. assert event.data["environment"] == "my-environment"
  1784. # site is a legacy attribute that is just a tag
  1785. assert event.data.get("site") is None
  1786. tags = dict(event.tags)
  1787. assert tags["site"] == "whatever"
  1788. assert event.data.get("server_name") is None
  1789. tags = dict(event.tags)
  1790. assert tags["server_name"] == "foo.com"
  1791. @freeze_time()
  1792. def test_save_issueless_event(self):
  1793. manager = EventManager(
  1794. make_event(
  1795. transaction="wait",
  1796. contexts={
  1797. "trace": {
  1798. "parent_span_id": "bce14471e0e9654d",
  1799. "op": "foobar",
  1800. "trace_id": "a0fa8803753e40fd8124b21eeb2986b5",
  1801. "span_id": "bf5be759039ede9a",
  1802. }
  1803. },
  1804. spans=[],
  1805. timestamp=iso_format(before_now(minutes=5)),
  1806. start_timestamp=iso_format(before_now(minutes=5)),
  1807. type="transaction",
  1808. platform="python",
  1809. )
  1810. )
  1811. event = manager.save(self.project.id)
  1812. assert event.group is None
  1813. assert (
  1814. tsdb.backend.get_sums(
  1815. TSDBModel.project,
  1816. [self.project.id],
  1817. event.datetime,
  1818. event.datetime,
  1819. tenant_ids={"organization_id": 123, "referrer": "r"},
  1820. )[self.project.id]
  1821. == 0
  1822. )
  1823. @freeze_time()
  1824. def test_fingerprint_ignored(self):
  1825. manager1 = EventManager(make_event(event_id="a" * 32, fingerprint="fingerprint1"))
  1826. event1 = manager1.save(self.project.id)
  1827. manager2 = EventManager(
  1828. make_event(
  1829. event_id="b" * 32,
  1830. fingerprint="fingerprint1",
  1831. transaction="wait",
  1832. contexts={
  1833. "trace": {
  1834. "parent_span_id": "bce14471e0e9654d",
  1835. "op": "foobar",
  1836. "trace_id": "a0fa8803753e40fd8124b21eeb2986b5",
  1837. "span_id": "bf5be759039ede9a",
  1838. }
  1839. },
  1840. spans=[],
  1841. timestamp=iso_format(before_now(minutes=1)),
  1842. start_timestamp=iso_format(before_now(minutes=1)),
  1843. type="transaction",
  1844. platform="python",
  1845. )
  1846. )
  1847. event2 = manager2.save(self.project.id)
  1848. assert event1.group is not None
  1849. assert event2.group is None
  1850. assert (
  1851. tsdb.backend.get_sums(
  1852. TSDBModel.project,
  1853. [self.project.id],
  1854. event1.datetime,
  1855. event1.datetime,
  1856. tenant_ids={"organization_id": 123, "referrer": "r"},
  1857. )[self.project.id]
  1858. == 1
  1859. )
  1860. assert (
  1861. tsdb.backend.get_sums(
  1862. TSDBModel.group,
  1863. [event1.group.id],
  1864. event1.datetime,
  1865. event1.datetime,
  1866. tenant_ids={"organization_id": 123, "referrer": "r"},
  1867. )[event1.group.id]
  1868. == 1
  1869. )
  1870. def test_category_match_in_app(self):
  1871. """
  1872. Regression test to ensure that grouping in-app enhancements work in
  1873. principle.
  1874. """
  1875. from sentry.grouping.enhancer import Enhancements
  1876. enhancement = Enhancements.from_config_string(
  1877. """
  1878. function:foo category=bar
  1879. function:foo2 category=bar
  1880. category:bar -app
  1881. """,
  1882. )
  1883. event = make_event(
  1884. platform="native",
  1885. exception={
  1886. "values": [
  1887. {
  1888. "type": "Hello",
  1889. "stacktrace": {
  1890. "frames": [
  1891. {
  1892. "function": "foo",
  1893. "in_app": True,
  1894. },
  1895. {"function": "bar"},
  1896. ]
  1897. },
  1898. }
  1899. ]
  1900. },
  1901. )
  1902. manager = EventManager(event)
  1903. manager.normalize()
  1904. manager.get_data()["grouping_config"] = {
  1905. "enhancements": enhancement.dumps(),
  1906. "id": "mobile:2021-02-12",
  1907. }
  1908. event1 = manager.save(self.project.id)
  1909. assert event1.data["exception"]["values"][0]["stacktrace"]["frames"][0]["in_app"] is False
  1910. event = make_event(
  1911. platform="native",
  1912. exception={
  1913. "values": [
  1914. {
  1915. "type": "Hello",
  1916. "stacktrace": {
  1917. "frames": [
  1918. {
  1919. "function": "foo2",
  1920. "in_app": True,
  1921. },
  1922. {"function": "bar"},
  1923. ]
  1924. },
  1925. }
  1926. ]
  1927. },
  1928. )
  1929. manager = EventManager(event)
  1930. manager.normalize()
  1931. manager.get_data()["grouping_config"] = {
  1932. "enhancements": enhancement.dumps(),
  1933. "id": "mobile:2021-02-12",
  1934. }
  1935. event2 = manager.save(self.project.id)
  1936. assert event2.data["exception"]["values"][0]["stacktrace"]["frames"][0]["in_app"] is False
  1937. assert event1.group_id == event2.group_id
  1938. def test_category_match_group(self):
  1939. """
  1940. Regression test to ensure categories are applied consistently and don't
  1941. produce hash mismatches.
  1942. """
  1943. from sentry.grouping.enhancer import Enhancements
  1944. enhancement = Enhancements.from_config_string(
  1945. """
  1946. function:foo category=foo_like
  1947. category:foo_like -group
  1948. """,
  1949. )
  1950. event = make_event(
  1951. platform="native",
  1952. exception={
  1953. "values": [
  1954. {
  1955. "type": "Hello",
  1956. "stacktrace": {
  1957. "frames": [
  1958. {
  1959. "function": "foo",
  1960. },
  1961. {
  1962. "function": "bar",
  1963. },
  1964. ]
  1965. },
  1966. }
  1967. ]
  1968. },
  1969. )
  1970. manager = EventManager(event)
  1971. manager.normalize()
  1972. grouping_config = {
  1973. "enhancements": enhancement.dumps(),
  1974. "id": "mobile:2021-02-12",
  1975. }
  1976. manager.get_data()["grouping_config"] = grouping_config
  1977. event1 = manager.save(self.project.id)
  1978. event2 = Event(event1.project_id, event1.event_id, data=event1.data)
  1979. assert event1.get_hashes().hashes == event2.get_hashes(grouping_config).hashes
  1980. def test_write_none_tree_labels(self):
  1981. """Write tree labels even if None"""
  1982. event = make_event(
  1983. platform="native",
  1984. exception={
  1985. "values": [
  1986. {
  1987. "type": "Hello",
  1988. "stacktrace": {
  1989. "frames": [
  1990. {
  1991. "function": "<redacted>",
  1992. },
  1993. {
  1994. "function": "<redacted>",
  1995. },
  1996. ]
  1997. },
  1998. }
  1999. ]
  2000. },
  2001. )
  2002. manager = EventManager(event)
  2003. manager.normalize()
  2004. manager.get_data()["grouping_config"] = {
  2005. "id": "mobile:2021-02-12",
  2006. }
  2007. event = manager.save(self.project.id)
  2008. assert event.data["hierarchical_tree_labels"] == [None]
  2009. def test_synthetic_exception_detection(self):
  2010. manager = EventManager(
  2011. make_event(
  2012. message="foo",
  2013. event_id="b" * 32,
  2014. exception={
  2015. "values": [
  2016. {
  2017. "type": "SIGABRT",
  2018. "mechanism": {"handled": False},
  2019. "stacktrace": {"frames": [{"function": "foo"}]},
  2020. }
  2021. ]
  2022. },
  2023. ),
  2024. project=self.project,
  2025. )
  2026. manager.normalize()
  2027. manager.get_data()["grouping_config"] = {
  2028. "id": "mobile:2021-02-12",
  2029. }
  2030. event = manager.save(self.project.id)
  2031. mechanism = event.interfaces["exception"].values[0].mechanism
  2032. assert mechanism is not None
  2033. assert mechanism.synthetic is True
  2034. assert event.title == "foo"
  2035. def test_auto_update_grouping(self):
  2036. with override_settings(SENTRY_GROUPING_AUTO_UPDATE_ENABLED=False):
  2037. # start out with legacy grouping, this should update us
  2038. self.project.update_option("sentry:grouping_config", LEGACY_GROUPING_CONFIG)
  2039. manager = EventManager(
  2040. make_event(
  2041. message="foo",
  2042. event_id="c" * 32,
  2043. ),
  2044. project=self.project,
  2045. )
  2046. manager.normalize()
  2047. manager.save(self.project.id)
  2048. # No update yet
  2049. project = Project.objects.get(id=self.project.id)
  2050. assert project.get_option("sentry:grouping_config") == LEGACY_GROUPING_CONFIG
  2051. with override_settings(SENTRY_GROUPING_AUTO_UPDATE_ENABLED=1.0):
  2052. # start out with legacy grouping, this should update us
  2053. self.project.update_option("sentry:grouping_config", LEGACY_GROUPING_CONFIG)
  2054. manager = EventManager(
  2055. make_event(
  2056. message="foo",
  2057. event_id="c" * 32,
  2058. ),
  2059. project=self.project,
  2060. )
  2061. manager.normalize()
  2062. manager.save(self.project.id)
  2063. # This should have moved us back to the default grouping
  2064. project = Project.objects.get(id=self.project.id)
  2065. assert project.get_option("sentry:grouping_config") == DEFAULT_GROUPING_CONFIG
  2066. # and we should see an audit log record.
  2067. record = AuditLogEntry.objects.first()
  2068. assert record.event == audit_log.get_event_id("PROJECT_EDIT")
  2069. assert record.data["sentry:grouping_config"] == DEFAULT_GROUPING_CONFIG
  2070. assert record.data["slug"] == self.project.slug
  2071. @override_options({"performance.issues.all.problem-detection": 1.0})
  2072. @override_options({"performance.issues.n_plus_one_db.problem-creation": 1.0})
  2073. def test_perf_issue_creation(self):
  2074. with mock.patch("sentry_sdk.tracing.Span.containing_transaction"):
  2075. event = self.create_performance_issue(
  2076. event_data=make_event(**get_event("n-plus-one-in-django-index-view"))
  2077. )
  2078. data = event.data
  2079. assert event.get_event_type() == "transaction"
  2080. assert event.transaction == "/books/"
  2081. assert data["span_grouping_config"]["id"] == "default:2022-10-27"
  2082. span_hashes = [span["hash"] for span in data["spans"]]
  2083. assert span_hashes == [
  2084. "0f43fb6f6e01ca52",
  2085. "3dc5dd68b38e1730",
  2086. "424c6ae1641f0f0e",
  2087. "d5da18d7274b34a1",
  2088. "ac72fc0a4f5fe381",
  2089. "ac1468d8e11a0553",
  2090. "d8681423cab4275f",
  2091. "e853d2eb7fb9ebb0",
  2092. "6a992d5529f459a4",
  2093. "b640a0ce465fa2a4",
  2094. "a3605e201eaf6c45",
  2095. "061710eb39a66089",
  2096. "c031296784b22ea9",
  2097. "d74ed7012596c3fb",
  2098. "d74ed7012596c3fb",
  2099. "d74ed7012596c3fb",
  2100. "d74ed7012596c3fb",
  2101. "d74ed7012596c3fb",
  2102. "d74ed7012596c3fb",
  2103. "d74ed7012596c3fb",
  2104. "d74ed7012596c3fb",
  2105. "d74ed7012596c3fb",
  2106. "d74ed7012596c3fb",
  2107. ]
  2108. assert event.group
  2109. group = event.group
  2110. assert group is not None
  2111. assert group.title == "N+1 Query"
  2112. assert (
  2113. group.message
  2114. == "/books/ N+1 Query SELECT `books_author`.`id`, `books_author`.`name` FROM `books_author` WHERE `books_author`.`id` = %s LIMIT 21"
  2115. )
  2116. assert group.culprit == "/books/"
  2117. assert group.get_event_type() == "transaction"
  2118. description = "SELECT `books_author`.`id`, `books_author`.`name` FROM `books_author` WHERE `books_author`.`id` = %s LIMIT 21"
  2119. assert group.get_event_metadata() == {
  2120. "location": "/books/",
  2121. "title": "N+1 Query",
  2122. "value": description,
  2123. }
  2124. assert (
  2125. event.search_message
  2126. == "/books/ N+1 Query SELECT `books_author`.`id`, `books_author`.`name` FROM `books_author` WHERE `books_author`.`id` = %s LIMIT 21"
  2127. )
  2128. assert group.location() == "/books/"
  2129. assert group.level == 40
  2130. assert group.issue_category == GroupCategory.PERFORMANCE
  2131. assert group.issue_type == PerformanceNPlusOneGroupType
  2132. assert event.occurrence
  2133. assert event.occurrence.evidence_display == [
  2134. IssueEvidence(
  2135. name="Offending Spans",
  2136. value="db - SELECT `books_author`.`id`, `books_author`.`name` "
  2137. "FROM `books_author` WHERE `books_author`.`id` = %s LIMIT 21",
  2138. important=True,
  2139. )
  2140. ]
  2141. assert event.occurrence.evidence_data == {
  2142. "transaction_name": "/books/",
  2143. "op": "db",
  2144. "parent_span_ids": ["8dd7a5869a4f4583"],
  2145. "parent_span": "django.view - index",
  2146. "cause_span_ids": ["9179e43ae844b174"],
  2147. "offender_span_ids": [
  2148. "b8be6138369491dd",
  2149. "b2d4826e7b618f1b",
  2150. "b3fdeea42536dbf1",
  2151. "b409e78a092e642f",
  2152. "86d2ede57bbf48d4",
  2153. "8e554c84cdc9731e",
  2154. "94d6230f3f910e12",
  2155. "a210b87a2191ceb6",
  2156. "88a5ccaf25b9bd8f",
  2157. "bb32cf50fc56b296",
  2158. ],
  2159. "repeating_spans": "db - SELECT `books_author`.`id`, `books_author`.`name` FROM `books_author` WHERE `books_author`.`id` = %s LIMIT 21",
  2160. "repeating_spans_compact": "SELECT `books_author`.`id`, `books_author`.`name` FROM `books_author` WHERE `books_author`.`id` = %s LIMIT 21",
  2161. "num_repeating_spans": "10",
  2162. }
  2163. @override_options({"performance.issues.all.problem-detection": 1.0})
  2164. @override_options({"performance.issues.n_plus_one_db.problem-creation": 1.0})
  2165. def test_perf_issue_update(self):
  2166. with mock.patch("sentry_sdk.tracing.Span.containing_transaction"):
  2167. event = self.create_performance_issue(
  2168. event_data=make_event(**get_event("n-plus-one-in-django-index-view"))
  2169. )
  2170. group = event.group
  2171. assert group is not None
  2172. assert group.issue_category == GroupCategory.PERFORMANCE
  2173. assert group.issue_type == PerformanceNPlusOneGroupType
  2174. group.data["metadata"] = {
  2175. "location": "hi",
  2176. "title": "lol",
  2177. }
  2178. group.culprit = "wat"
  2179. group.message = "nope"
  2180. group.save()
  2181. assert group.location() == "hi"
  2182. assert group.title == "lol"
  2183. with self.tasks():
  2184. self.create_performance_issue(
  2185. event_data=make_event(**get_event("n-plus-one-in-django-index-view"))
  2186. )
  2187. # Make sure the original group is updated via buffers
  2188. group.refresh_from_db()
  2189. assert group.title == "N+1 Query"
  2190. assert group.get_event_metadata() == {
  2191. "location": "/books/",
  2192. "title": "N+1 Query",
  2193. "value": "SELECT `books_author`.`id`, `books_author`.`name` FROM `books_author` WHERE `books_author`.`id` = %s LIMIT 21",
  2194. }
  2195. assert group.location() == "/books/"
  2196. assert group.message == "nope"
  2197. assert group.culprit == "/books/"
  2198. @override_options({"performance.issues.all.problem-detection": 1.0})
  2199. @override_options({"performance.issues.n_plus_one_db.problem-creation": 1.0})
  2200. def test_error_issue_no_associate_perf_event(self):
  2201. """Test that you can't associate a performance event with an error issue"""
  2202. with mock.patch("sentry_sdk.tracing.Span.containing_transaction"):
  2203. event = self.create_performance_issue(
  2204. event_data=make_event(**get_event("n-plus-one-in-django-index-view"))
  2205. )
  2206. assert event.group is not None
  2207. # sneakily make the group type wrong
  2208. group = event.group
  2209. assert group is not None
  2210. group.type = ErrorGroupType.type_id
  2211. group.save()
  2212. event = self.create_performance_issue(
  2213. event_data=make_event(**get_event("n-plus-one-in-django-index-view"))
  2214. )
  2215. assert event.group is None
  2216. @override_options({"performance.issues.all.problem-detection": 1.0})
  2217. @override_options({"performance.issues.n_plus_one_db.problem-creation": 1.0})
  2218. def test_perf_issue_no_associate_error_event(self):
  2219. """Test that you can't associate an error event with a performance issue"""
  2220. with mock.patch("sentry_sdk.tracing.Span.containing_transaction"), self.feature(
  2221. {
  2222. "projects:performance-suspect-spans-ingestion": True,
  2223. }
  2224. ):
  2225. manager = EventManager(make_event())
  2226. manager.normalize()
  2227. event = manager.save(self.project.id)
  2228. assert len(event.groups) == 1
  2229. # sneakily make the group type wrong
  2230. group = event.group
  2231. assert group is not None
  2232. group.type = PerformanceNPlusOneGroupType.type_id
  2233. group.save()
  2234. manager = EventManager(make_event())
  2235. manager.normalize()
  2236. event = manager.save(self.project.id)
  2237. assert not event.group
  2238. @override_options({"performance.issues.all.problem-detection": 1.0})
  2239. @override_options({"performance.issues.n_plus_one_db.problem-creation": 1.0})
  2240. def test_perf_issue_creation_ignored(self):
  2241. with mock.patch("sentry_sdk.tracing.Span.containing_transaction"), self.feature(
  2242. {
  2243. "projects:performance-suspect-spans-ingestion": True,
  2244. }
  2245. ):
  2246. event = self.create_performance_issue(
  2247. event_data=make_event(**get_event("n-plus-one-in-django-index-view")),
  2248. noise_limit=2,
  2249. )
  2250. assert event.get_event_type() == "transaction"
  2251. assert event.group is None
  2252. @override_options({"performance.issues.all.problem-detection": 1.0})
  2253. @override_options({"performance.issues.n_plus_one_db.problem-creation": 1.0})
  2254. def test_perf_issue_creation_over_ignored_threshold(self):
  2255. with mock.patch("sentry_sdk.tracing.Span.containing_transaction"), self.feature(
  2256. {
  2257. "projects:performance-suspect-spans-ingestion": True,
  2258. }
  2259. ):
  2260. event_1 = self.create_performance_issue(
  2261. event_data=make_event(**get_event("n-plus-one-in-django-index-view")), noise_limit=3
  2262. )
  2263. event_2 = self.create_performance_issue(
  2264. event_data=make_event(**get_event("n-plus-one-in-django-index-view")), noise_limit=3
  2265. )
  2266. event_3 = self.create_performance_issue(
  2267. event_data=make_event(**get_event("n-plus-one-in-django-index-view")), noise_limit=3
  2268. )
  2269. assert event_1.get_event_type() == "transaction"
  2270. assert event_2.get_event_type() == "transaction"
  2271. assert event_3.get_event_type() == "transaction"
  2272. # only the third occurrence of the hash should create the group
  2273. assert event_1.group is None
  2274. assert event_2.group is None
  2275. assert event_3.group is not None
  2276. @override_options(
  2277. {
  2278. "performance.issues.slow_db_query.problem-creation": 1.0,
  2279. "performance_issue_creation_rate": 1.0,
  2280. "performance.issues.all.problem-detection": 1.0,
  2281. }
  2282. )
  2283. def test_perf_issue_slow_db_issue_is_created(self):
  2284. def attempt_to_generate_slow_db_issue() -> Event:
  2285. for _ in range(100):
  2286. event = self.create_performance_issue(
  2287. event_data=make_event(**get_event("slow-db-spans")),
  2288. issue_type=PerformanceSlowDBQueryGroupType,
  2289. )
  2290. last_event = event
  2291. return last_event
  2292. # Should not create the group without the feature flag
  2293. last_event = attempt_to_generate_slow_db_issue()
  2294. assert not last_event.group
  2295. with self.feature({"organizations:performance-slow-db-issue": True}):
  2296. last_event = attempt_to_generate_slow_db_issue()
  2297. assert last_event.group
  2298. assert last_event.group.type == PerformanceSlowDBQueryGroupType.type_id
  2299. @patch("sentry.event_manager.metrics.incr")
  2300. def test_new_group_metrics_logging(self, mock_metrics_incr: MagicMock) -> None:
  2301. manager = EventManager(make_event(platform="javascript"))
  2302. manager.normalize()
  2303. manager.save(self.project.id)
  2304. mock_metrics_incr.assert_any_call(
  2305. "group.created",
  2306. skip_internal=True,
  2307. tags={
  2308. "platform": "javascript",
  2309. },
  2310. )
  2311. class AutoAssociateCommitTest(TestCase, EventManagerTestMixin):
  2312. def setUp(self):
  2313. super().setUp()
  2314. self.repo_name = "example"
  2315. self.project = self.create_project(name="foo")
  2316. self.integration = Integration.objects.create(
  2317. provider="github", name=self.repo_name, external_id="654321"
  2318. )
  2319. self.org_integration = self.integration.add_organization(
  2320. self.project.organization, self.user
  2321. )
  2322. self.repo = self.create_repo(
  2323. project=self.project,
  2324. name=self.repo_name,
  2325. provider="integrations:github",
  2326. integration_id=self.integration.id,
  2327. )
  2328. self.repo.update(config={"name": self.repo_name})
  2329. self.create_code_mapping(
  2330. project=self.project,
  2331. repo=self.repo,
  2332. organization_integration=self.org_integration,
  2333. stack_root="/stack/root",
  2334. source_root="/source/root",
  2335. default_branch="main",
  2336. )
  2337. stub_installation_token()
  2338. responses.add(
  2339. "GET",
  2340. f"https://api.github.com/repos/{self.repo_name}/commits/{LATER_COMMIT_SHA}",
  2341. json=json.loads(GET_COMMIT_EXAMPLE),
  2342. )
  2343. responses.add(
  2344. "GET",
  2345. f"https://api.github.com/repos/{self.repo_name}/commits/{EARLIER_COMMIT_SHA}",
  2346. json=json.loads(GET_PRIOR_COMMIT_EXAMPLE),
  2347. )
  2348. self.dummy_commit_sha = "a" * 40
  2349. responses.add(
  2350. responses.GET,
  2351. f"https://api.github.com/repos/{self.repo_name}/compare/{self.dummy_commit_sha}...{LATER_COMMIT_SHA}",
  2352. json=json.loads(COMPARE_COMMITS_EXAMPLE_WITH_INTERMEDIATE),
  2353. )
  2354. responses.add(
  2355. responses.GET,
  2356. f"https://api.github.com/repos/{self.repo_name}/commits?sha={LATER_COMMIT_SHA}",
  2357. json=json.loads(GET_LAST_2_COMMITS_EXAMPLE),
  2358. )
  2359. def _create_first_release_commit(self):
  2360. # Create a release
  2361. release = self.create_release(project=self.project, version="abcabcabc")
  2362. # Create a commit
  2363. commit = self.create_commit(
  2364. repo=self.repo,
  2365. key=self.dummy_commit_sha,
  2366. )
  2367. # Make a release head commit
  2368. ReleaseHeadCommit.objects.create(
  2369. organization_id=self.project.organization.id,
  2370. repository_id=self.repo.id,
  2371. release=release,
  2372. commit=commit,
  2373. )
  2374. @mock.patch("sentry.integrations.github.client.get_jwt", return_value=b"jwt_token_1")
  2375. @responses.activate
  2376. def test_autoassign_commits_on_sha_release_version(self, get_jwt):
  2377. with self.feature("projects:auto-associate-commits-to-release"):
  2378. self._create_first_release_commit()
  2379. # Make a new release with SHA checksum
  2380. with self.tasks():
  2381. _ = self.make_release_event(LATER_COMMIT_SHA, self.project.id)
  2382. release2 = Release.objects.get(version=LATER_COMMIT_SHA)
  2383. commit_list = list(
  2384. Commit.objects.filter(releasecommit__release=release2).order_by(
  2385. "releasecommit__order"
  2386. )
  2387. )
  2388. assert len(commit_list) == 2
  2389. assert commit_list[0].repository_id == self.repo.id
  2390. assert commit_list[0].organization_id == self.project.organization.id
  2391. assert commit_list[0].key == EARLIER_COMMIT_SHA
  2392. assert commit_list[1].repository_id == self.repo.id
  2393. assert commit_list[1].organization_id == self.project.organization.id
  2394. assert commit_list[1].key == LATER_COMMIT_SHA
  2395. @mock.patch("sentry.integrations.github.client.get_jwt", return_value=b"jwt_token_1")
  2396. @responses.activate
  2397. def test_autoassign_commits_first_release(self, get_jwt):
  2398. with self.feature("projects:auto-associate-commits-to-release"):
  2399. with self.tasks():
  2400. _ = self.make_release_event(LATER_COMMIT_SHA, self.project.id)
  2401. release2 = Release.objects.get(version=LATER_COMMIT_SHA)
  2402. commit_list = list(
  2403. Commit.objects.filter(releasecommit__release=release2).order_by(
  2404. "releasecommit__order"
  2405. )
  2406. )
  2407. assert len(commit_list) == 2
  2408. assert commit_list[0].repository_id == self.repo.id
  2409. assert commit_list[0].organization_id == self.project.organization.id
  2410. assert commit_list[0].key == EARLIER_COMMIT_SHA
  2411. assert commit_list[1].repository_id == self.repo.id
  2412. assert commit_list[1].organization_id == self.project.organization.id
  2413. assert commit_list[1].key == LATER_COMMIT_SHA
  2414. @mock.patch("sentry.integrations.github.client.get_jwt", return_value=b"jwt_token_1")
  2415. @responses.activate
  2416. def test_autoassign_commits_not_a_sha(self, get_jwt):
  2417. SHA = "not-a-sha"
  2418. with self.feature("projects:auto-associate-commits-to-release"):
  2419. with self.tasks():
  2420. _ = self.make_release_event(SHA, self.project.id)
  2421. release2 = Release.objects.get(version=SHA)
  2422. commit_list = list(
  2423. Commit.objects.filter(releasecommit__release=release2).order_by(
  2424. "releasecommit__order"
  2425. )
  2426. )
  2427. assert len(commit_list) == 0
  2428. @mock.patch("sentry.integrations.github.client.get_jwt", return_value=b"jwt_token_1")
  2429. @responses.activate
  2430. def test_autoassign_commit_not_found(self, get_jwt):
  2431. SHA = "b" * 40
  2432. responses.add(
  2433. "GET",
  2434. f"https://api.github.com/repos/{self.repo_name}/commits/{SHA}",
  2435. status=HTTP_404_NOT_FOUND,
  2436. )
  2437. with self.feature("projects:auto-associate-commits-to-release"):
  2438. with self.tasks():
  2439. _ = self.make_release_event(SHA, self.project.id)
  2440. release2 = Release.objects.get(version=SHA)
  2441. commit_list = list(
  2442. Commit.objects.filter(releasecommit__release=release2).order_by(
  2443. "releasecommit__order"
  2444. )
  2445. )
  2446. assert len(commit_list) == 0
  2447. @mock.patch("sentry.integrations.github.client.get_jwt", return_value=b"jwt_token_1")
  2448. @responses.activate
  2449. def test_autoassign_commits_release_conflict(self, get_jwt):
  2450. # Release is created but none of the commits, we should still associate commits
  2451. with self.feature("projects:auto-associate-commits-to-release"):
  2452. preexisting_release = self.create_release(
  2453. project=self.project, version=LATER_COMMIT_SHA
  2454. )
  2455. with self.tasks():
  2456. _ = self.make_release_event(LATER_COMMIT_SHA, self.project.id)
  2457. commit_releases = Release.objects.filter(version=LATER_COMMIT_SHA).all()
  2458. assert len(commit_releases) == 1
  2459. assert commit_releases[0].id == preexisting_release.id
  2460. commit_list = list(
  2461. Commit.objects.filter(releasecommit__release=preexisting_release).order_by(
  2462. "releasecommit__order"
  2463. )
  2464. )
  2465. assert len(commit_list) == 2
  2466. assert commit_list[0].repository_id == self.repo.id
  2467. assert commit_list[0].organization_id == self.project.organization.id
  2468. assert commit_list[0].key == EARLIER_COMMIT_SHA
  2469. assert commit_list[1].repository_id == self.repo.id
  2470. assert commit_list[1].organization_id == self.project.organization.id
  2471. assert commit_list[1].key == LATER_COMMIT_SHA
  2472. @mock.patch("sentry.integrations.github.client.get_jwt", return_value=b"jwt_token_1")
  2473. @responses.activate
  2474. def test_autoassign_commits_commit_conflict(self, get_jwt):
  2475. # A commit tied to the release is somehow created before the release itself is created.
  2476. # autoassociation should tie the existing commit to the new release
  2477. with self.feature("projects:auto-associate-commits-to-release"):
  2478. author = CommitAuthor.objects.create(
  2479. organization_id=self.organization.id,
  2480. email="support@github.com",
  2481. name="Monalisa Octocat",
  2482. )
  2483. # Values taken from commit generated from GH response fixtures
  2484. preexisting_commit = self.create_commit(
  2485. repo=self.repo,
  2486. project=self.project,
  2487. author=author,
  2488. key=EARLIER_COMMIT_SHA,
  2489. message="Fix all the bugs",
  2490. date_added=datetime(2011, 4, 14, 16, 0, 49, tzinfo=timezone.utc),
  2491. )
  2492. with self.tasks():
  2493. self.make_release_event(LATER_COMMIT_SHA, self.project.id)
  2494. new_release = Release.objects.get(version=LATER_COMMIT_SHA)
  2495. commit_list = list(
  2496. Commit.objects.filter(releasecommit__release=new_release).order_by(
  2497. "releasecommit__order"
  2498. )
  2499. )
  2500. assert len(commit_list) == 2
  2501. assert commit_list[0].id == preexisting_commit.id
  2502. assert commit_list[0].repository_id == self.repo.id
  2503. assert commit_list[0].organization_id == self.project.organization.id
  2504. assert commit_list[0].key == EARLIER_COMMIT_SHA
  2505. assert commit_list[1].repository_id == self.repo.id
  2506. assert commit_list[1].organization_id == self.project.organization.id
  2507. assert commit_list[1].key == LATER_COMMIT_SHA
  2508. @mock.patch("sentry.integrations.github.client.get_jwt", return_value=b"jwt_token_1")
  2509. @responses.activate
  2510. def test_autoassign_commits_feature_not_enabled(self, get_jwt):
  2511. with self.feature({"projects:auto-associate-commits-to-release": False}):
  2512. with self.tasks():
  2513. _ = self.make_release_event(LATER_COMMIT_SHA, self.project.id)
  2514. release2 = Release.objects.get(version=LATER_COMMIT_SHA)
  2515. commit_list = list(
  2516. Commit.objects.filter(releasecommit__release=release2).order_by(
  2517. "releasecommit__order"
  2518. )
  2519. )
  2520. assert len(commit_list) == 0
  2521. @mock.patch("sentry.integrations.github.client.get_jwt", return_value=b"jwt_token_1")
  2522. @responses.activate
  2523. def test_autoassign_commits_duplicate_events(self, get_jwt):
  2524. with self.feature({"projects:auto-associate-commits-to-release": True}):
  2525. with self.tasks():
  2526. event1 = self.make_release_event(LATER_COMMIT_SHA, self.project.id)
  2527. event2 = self.make_release_event(LATER_COMMIT_SHA, self.project.id)
  2528. assert event1 != event2
  2529. assert event1.release == event2.release
  2530. releases = Release.objects.filter(version=LATER_COMMIT_SHA).all()
  2531. assert len(releases) == 1
  2532. commit_list = list(
  2533. Commit.objects.filter(releasecommit__release=releases[0]).order_by(
  2534. "releasecommit__order"
  2535. )
  2536. )
  2537. assert len(commit_list) == 2
  2538. assert commit_list[0].repository_id == self.repo.id
  2539. assert commit_list[0].organization_id == self.project.organization.id
  2540. assert commit_list[0].key == EARLIER_COMMIT_SHA
  2541. assert commit_list[1].repository_id == self.repo.id
  2542. assert commit_list[1].organization_id == self.project.organization.id
  2543. assert commit_list[1].key == LATER_COMMIT_SHA
  2544. @region_silo_test
  2545. class ReleaseIssueTest(TestCase):
  2546. def setUp(self):
  2547. self.project = self.create_project()
  2548. self.release = Release.get_or_create(self.project, "1.0")
  2549. self.environment1 = Environment.get_or_create(self.project, "prod")
  2550. self.environment2 = Environment.get_or_create(self.project, "staging")
  2551. self.timestamp = float(int(time() - 300))
  2552. def make_event(self, **kwargs):
  2553. result = {
  2554. "event_id": "a" * 32,
  2555. "message": "foo",
  2556. "timestamp": self.timestamp + 0.23,
  2557. "level": logging.ERROR,
  2558. "logger": "default",
  2559. "tags": [],
  2560. }
  2561. result.update(kwargs)
  2562. return result
  2563. def make_release_event(
  2564. self, release_version="1.0", environment_name="prod", project_id=1, **kwargs
  2565. ):
  2566. event = make_event(
  2567. release=release_version, environment=environment_name, event_id=uuid.uuid1().hex
  2568. )
  2569. event.update(kwargs)
  2570. manager = EventManager(event)
  2571. with self.tasks():
  2572. event = manager.save(project_id)
  2573. return event
  2574. def convert_timestamp(self, timestamp):
  2575. date = datetime.fromtimestamp(timestamp)
  2576. date = date.replace(tzinfo=timezone.utc)
  2577. return date
  2578. def assert_release_project_environment(self, event, new_issues_count, first_seen, last_seen):
  2579. release = Release.objects.get(
  2580. organization=event.project.organization.id, version=event.get_tag("sentry:release")
  2581. )
  2582. release_project_envs = ReleaseProjectEnvironment.objects.filter(
  2583. release=release, project=event.project, environment=event.get_environment()
  2584. )
  2585. assert len(release_project_envs) == 1
  2586. release_project_env = release_project_envs[0]
  2587. assert release_project_env.new_issues_count == new_issues_count
  2588. assert release_project_env.first_seen == self.convert_timestamp(first_seen)
  2589. assert release_project_env.last_seen == self.convert_timestamp(last_seen)
  2590. def test_different_groups(self):
  2591. event1 = self.make_release_event(
  2592. release_version=self.release.version,
  2593. environment_name=self.environment1.name,
  2594. project_id=self.project.id,
  2595. checksum="a" * 32,
  2596. timestamp=self.timestamp,
  2597. )
  2598. self.assert_release_project_environment(
  2599. event=event1, new_issues_count=1, last_seen=self.timestamp, first_seen=self.timestamp
  2600. )
  2601. event2 = self.make_release_event(
  2602. release_version=self.release.version,
  2603. environment_name=self.environment1.name,
  2604. project_id=self.project.id,
  2605. checksum="b" * 32,
  2606. timestamp=self.timestamp + 100,
  2607. )
  2608. self.assert_release_project_environment(
  2609. event=event2,
  2610. new_issues_count=2,
  2611. last_seen=self.timestamp + 100,
  2612. first_seen=self.timestamp,
  2613. )
  2614. def test_same_group(self):
  2615. event1 = self.make_release_event(
  2616. release_version=self.release.version,
  2617. environment_name=self.environment1.name,
  2618. project_id=self.project.id,
  2619. checksum="a" * 32,
  2620. timestamp=self.timestamp,
  2621. )
  2622. self.assert_release_project_environment(
  2623. event=event1, new_issues_count=1, last_seen=self.timestamp, first_seen=self.timestamp
  2624. )
  2625. event2 = self.make_release_event(
  2626. release_version=self.release.version,
  2627. environment_name=self.environment1.name,
  2628. project_id=self.project.id,
  2629. checksum="a" * 32,
  2630. timestamp=self.timestamp + 100,
  2631. )
  2632. self.assert_release_project_environment(
  2633. event=event2,
  2634. new_issues_count=1,
  2635. last_seen=self.timestamp + 100,
  2636. first_seen=self.timestamp,
  2637. )
  2638. def test_same_group_different_environment(self):
  2639. event1 = self.make_release_event(
  2640. release_version=self.release.version,
  2641. environment_name=self.environment1.name,
  2642. project_id=self.project.id,
  2643. checksum="a" * 32,
  2644. timestamp=self.timestamp,
  2645. )
  2646. self.assert_release_project_environment(
  2647. event=event1, new_issues_count=1, last_seen=self.timestamp, first_seen=self.timestamp
  2648. )
  2649. event2 = self.make_release_event(
  2650. release_version=self.release.version,
  2651. environment_name=self.environment2.name,
  2652. project_id=self.project.id,
  2653. checksum="a" * 32,
  2654. timestamp=self.timestamp + 100,
  2655. )
  2656. self.assert_release_project_environment(
  2657. event=event1, new_issues_count=1, last_seen=self.timestamp, first_seen=self.timestamp
  2658. )
  2659. self.assert_release_project_environment(
  2660. event=event2,
  2661. new_issues_count=1,
  2662. last_seen=self.timestamp + 100,
  2663. first_seen=self.timestamp + 100,
  2664. )
  2665. @region_silo_test
  2666. @apply_feature_flag_on_cls("organizations:dynamic-sampling")
  2667. class DSLatestReleaseBoostTest(TestCase):
  2668. def setUp(self):
  2669. self.environment1 = Environment.get_or_create(self.project, "prod")
  2670. self.environment2 = Environment.get_or_create(self.project, "staging")
  2671. self.timestamp = float(int(time() - 300))
  2672. self.redis_client = get_redis_client_for_ds()
  2673. def make_transaction_event(self, **kwargs):
  2674. result = {
  2675. "transaction": "wait",
  2676. "contexts": {
  2677. "trace": {
  2678. "parent_span_id": "bce14471e0e9654d",
  2679. "op": "foobar",
  2680. "trace_id": "a0fa8803753e40fd8124b21eeb2986b5",
  2681. "span_id": "bf5be759039ede9a",
  2682. }
  2683. },
  2684. "spans": [],
  2685. "timestamp": self.timestamp + 0.23,
  2686. "start_timestamp": "2019-06-14T14:01:40Z",
  2687. "type": "transaction",
  2688. }
  2689. result.update(kwargs)
  2690. return result
  2691. def make_release_transaction(
  2692. self, release_version="1.0", environment_name="prod", project_id=1, **kwargs
  2693. ):
  2694. transaction = (
  2695. self.make_transaction_event(
  2696. release=release_version, environment=environment_name, event_id=uuid.uuid1().hex
  2697. )
  2698. if environment_name is not None
  2699. else self.make_transaction_event(release=release_version, event_id=uuid.uuid1().hex)
  2700. )
  2701. transaction.update(kwargs)
  2702. manager = EventManager(transaction)
  2703. with self.tasks():
  2704. event = manager.save(project_id)
  2705. return event
  2706. @freeze_time("2022-11-03 10:00:00")
  2707. def test_boost_release_with_non_observed_release(self):
  2708. ts = time()
  2709. project = self.create_project(platform="python")
  2710. release_1 = Release.get_or_create(project=project, version="1.0", date_added=datetime.now())
  2711. release_2 = Release.get_or_create(
  2712. project=project, version="2.0", date_added=datetime.now() + timedelta(hours=1)
  2713. )
  2714. release_3 = Release.get_or_create(
  2715. project=project, version="3.0", date_added=datetime.now() + timedelta(hours=2)
  2716. )
  2717. for release, environment in (
  2718. (release_1, None),
  2719. (release_2, "prod"),
  2720. (release_3, "dev"),
  2721. ):
  2722. self.make_release_transaction(
  2723. release_version=release.version,
  2724. environment_name=environment,
  2725. project_id=project.id,
  2726. checksum="a" * 32,
  2727. timestamp=self.timestamp,
  2728. )
  2729. env_postfix = f":e:{environment}" if environment is not None else ""
  2730. assert self.redis_client.get(f"ds::p:{project.id}:r:{release.id}{env_postfix}") == "1"
  2731. assert self.redis_client.hgetall(f"ds::p:{project.id}:boosted_releases") == {
  2732. f"ds::r:{release_1.id}": str(ts),
  2733. f"ds::r:{release_2.id}:e:prod": str(ts),
  2734. f"ds::r:{release_3.id}:e:dev": str(ts),
  2735. }
  2736. assert ProjectBoostedReleases(project_id=project.id).get_extended_boosted_releases() == [
  2737. ExtendedBoostedRelease(
  2738. id=release_1.id,
  2739. timestamp=ts,
  2740. environment=None,
  2741. cache_key=f"ds::r:{release_1.id}",
  2742. version=release_1.version,
  2743. platform=Platform(project.platform),
  2744. ),
  2745. ExtendedBoostedRelease(
  2746. id=release_2.id,
  2747. timestamp=ts,
  2748. environment="prod",
  2749. cache_key=f"ds::r:{release_2.id}:e:prod",
  2750. version=release_2.version,
  2751. platform=Platform(project.platform),
  2752. ),
  2753. ExtendedBoostedRelease(
  2754. id=release_3.id,
  2755. timestamp=ts,
  2756. environment="dev",
  2757. cache_key=f"ds::r:{release_3.id}:e:dev",
  2758. version=release_3.version,
  2759. platform=Platform(project.platform),
  2760. ),
  2761. ]
  2762. @freeze_time("2022-11-03 10:00:00")
  2763. def test_boost_release_boosts_only_latest_release(self):
  2764. ts = time()
  2765. project = self.create_project(platform="python")
  2766. release_1 = Release.get_or_create(project=project, version="1.0", date_added=datetime.now())
  2767. release_2 = Release.get_or_create(
  2768. project=project,
  2769. version="2.0",
  2770. # We must make sure the new release_2.date_added > release_1.date_added.
  2771. date_added=datetime.now() + timedelta(hours=1),
  2772. )
  2773. # We add a transaction for latest release release_2.
  2774. self.make_release_transaction(
  2775. release_version=release_2.version,
  2776. environment_name=self.environment1.name,
  2777. project_id=project.id,
  2778. checksum="a" * 32,
  2779. timestamp=self.timestamp,
  2780. )
  2781. # We add a transaction for release_1 which is not anymore the latest release, therefore we should skip this.
  2782. self.make_release_transaction(
  2783. release_version=release_1.version,
  2784. environment_name=self.environment1.name,
  2785. project_id=project.id,
  2786. checksum="a" * 32,
  2787. timestamp=self.timestamp,
  2788. )
  2789. assert (
  2790. self.redis_client.get(f"ds::p:{project.id}:r:{release_2.id}:e:{self.environment1.name}")
  2791. == "1"
  2792. )
  2793. assert self.redis_client.hgetall(f"ds::p:{project.id}:boosted_releases") == {
  2794. f"ds::r:{release_2.id}:e:{self.environment1.name}": str(ts),
  2795. }
  2796. assert ProjectBoostedReleases(project_id=project.id).get_extended_boosted_releases() == [
  2797. ExtendedBoostedRelease(
  2798. id=release_2.id,
  2799. timestamp=ts,
  2800. environment=self.environment1.name,
  2801. cache_key=f"ds::r:{release_2.id}:e:{self.environment1.name}",
  2802. version=release_2.version,
  2803. platform=Platform(project.platform),
  2804. )
  2805. ]
  2806. @freeze_time("2022-11-03 10:00:00")
  2807. def test_boost_release_with_observed_release_and_different_environment(self):
  2808. project = self.create_project(platform="python")
  2809. release = Release.get_or_create(project=project, version="1.0", date_added=datetime.now())
  2810. self.make_release_transaction(
  2811. release_version=release.version,
  2812. environment_name=self.environment1.name,
  2813. project_id=project.id,
  2814. checksum="a" * 32,
  2815. timestamp=self.timestamp,
  2816. )
  2817. ts_1 = time()
  2818. assert (
  2819. self.redis_client.get(f"ds::p:{project.id}:r:{release.id}:e:{self.environment1.name}")
  2820. == "1"
  2821. )
  2822. assert self.redis_client.hgetall(f"ds::p:{project.id}:boosted_releases") == {
  2823. f"ds::r:{release.id}:e:{self.environment1.name}": str(ts_1)
  2824. }
  2825. assert ProjectBoostedReleases(project_id=project.id).get_extended_boosted_releases() == [
  2826. ExtendedBoostedRelease(
  2827. id=release.id,
  2828. timestamp=ts_1,
  2829. environment=self.environment1.name,
  2830. cache_key=f"ds::r:{release.id}:e:{self.environment1.name}",
  2831. version=release.version,
  2832. platform=Platform(project.platform),
  2833. )
  2834. ]
  2835. # We simulate that a new transaction with same release but with a different environment value comes after
  2836. # 30 minutes to show that we expect the entry for that release-env to be added to the boosted releases.
  2837. with freeze_time("2022-11-03 10:30:00"):
  2838. self.make_release_transaction(
  2839. release_version=release.version,
  2840. environment_name=self.environment2.name,
  2841. project_id=project.id,
  2842. checksum="b" * 32,
  2843. timestamp=self.timestamp,
  2844. )
  2845. ts_2 = time()
  2846. assert (
  2847. self.redis_client.get(
  2848. f"ds::p:{project.id}:r:{release.id}:e:{self.environment2.name}"
  2849. )
  2850. == "1"
  2851. )
  2852. assert self.redis_client.hgetall(f"ds::p:{project.id}:boosted_releases") == {
  2853. f"ds::r:{release.id}:e:{self.environment1.name}": str(ts_1),
  2854. f"ds::r:{release.id}:e:{self.environment2.name}": str(ts_2),
  2855. }
  2856. assert ProjectBoostedReleases(
  2857. project_id=project.id
  2858. ).get_extended_boosted_releases() == [
  2859. ExtendedBoostedRelease(
  2860. id=release.id,
  2861. timestamp=ts_1,
  2862. environment=self.environment1.name,
  2863. cache_key=f"ds::r:{release.id}:e:{self.environment1.name}",
  2864. version=release.version,
  2865. platform=Platform(project.platform),
  2866. ),
  2867. ExtendedBoostedRelease(
  2868. id=release.id,
  2869. timestamp=ts_2,
  2870. environment=self.environment2.name,
  2871. cache_key=f"ds::r:{release.id}:e:{self.environment2.name}",
  2872. version=release.version,
  2873. platform=Platform(project.platform),
  2874. ),
  2875. ]
  2876. # We also test the case in which no environment is set, which can be the case as per
  2877. # https://docs.sentry.io/platforms/javascript/configuration/options/#environment.
  2878. with freeze_time("2022-11-03 11:00:00"):
  2879. self.make_release_transaction(
  2880. release_version=release.version,
  2881. environment_name=None,
  2882. project_id=project.id,
  2883. checksum="b" * 32,
  2884. timestamp=self.timestamp,
  2885. )
  2886. ts_3 = time()
  2887. assert self.redis_client.get(f"ds::p:{project.id}:r:{release.id}") == "1"
  2888. assert self.redis_client.hgetall(f"ds::p:{project.id}:boosted_releases") == {
  2889. f"ds::r:{release.id}:e:{self.environment1.name}": str(ts_1),
  2890. f"ds::r:{release.id}:e:{self.environment2.name}": str(ts_2),
  2891. f"ds::r:{release.id}": str(ts_3),
  2892. }
  2893. assert ProjectBoostedReleases(
  2894. project_id=project.id
  2895. ).get_extended_boosted_releases() == [
  2896. ExtendedBoostedRelease(
  2897. id=release.id,
  2898. timestamp=ts_1,
  2899. environment=self.environment1.name,
  2900. cache_key=f"ds::r:{release.id}:e:{self.environment1.name}",
  2901. version=release.version,
  2902. platform=Platform(project.platform),
  2903. ),
  2904. ExtendedBoostedRelease(
  2905. id=release.id,
  2906. timestamp=ts_2,
  2907. environment=self.environment2.name,
  2908. cache_key=f"ds::r:{release.id}:e:{self.environment2.name}",
  2909. version=release.version,
  2910. platform=Platform(project.platform),
  2911. ),
  2912. ExtendedBoostedRelease(
  2913. id=release.id,
  2914. timestamp=ts_3,
  2915. environment=None,
  2916. cache_key=f"ds::r:{release.id}",
  2917. version=release.version,
  2918. platform=Platform(project.platform),
  2919. ),
  2920. ]
  2921. @freeze_time("2022-11-03 10:00:00")
  2922. def test_release_not_boosted_with_observed_release_and_same_environment(self):
  2923. project = self.create_project(platform="python")
  2924. release = Release.get_or_create(project=project, version="1.0", date_added=datetime.now())
  2925. for environment in (self.environment1.name, self.environment2.name):
  2926. self.redis_client.set(
  2927. f"ds::p:{project.id}:r:{release.id}:e:{environment}", 1, 60 * 60 * 24
  2928. )
  2929. self.make_release_transaction(
  2930. release_version=release.version,
  2931. environment_name=environment,
  2932. project_id=project.id,
  2933. checksum="b" * 32,
  2934. timestamp=self.timestamp,
  2935. )
  2936. assert self.redis_client.hgetall(f"ds::p:{project.id}:boosted_releases") == {}
  2937. assert ProjectBoostedReleases(project_id=project.id).get_extended_boosted_releases() == []
  2938. @freeze_time("2022-11-03 10:00:00")
  2939. def test_release_not_boosted_with_deleted_release_after_event_received(self):
  2940. ts = time()
  2941. project = self.create_project(platform="python")
  2942. release_1 = Release.get_or_create(project=project, version="1.0", date_added=datetime.now())
  2943. release_2 = Release.get_or_create(
  2944. project=project, version="2.0", date_added=datetime.now() + timedelta(hours=1)
  2945. )
  2946. self.make_release_transaction(
  2947. release_version=release_1.version,
  2948. environment_name=None,
  2949. project_id=project.id,
  2950. checksum="a" * 32,
  2951. timestamp=self.timestamp,
  2952. )
  2953. assert self.redis_client.get(f"ds::p:{project.id}:r:{release_1.id}") == "1"
  2954. self.make_release_transaction(
  2955. release_version=release_2.version,
  2956. environment_name=None,
  2957. project_id=project.id,
  2958. checksum="a" * 32,
  2959. timestamp=self.timestamp,
  2960. )
  2961. assert self.redis_client.get(f"ds::p:{project.id}:r:{release_2.id}") == "1"
  2962. # We simulate that the release_2 is deleted after the boost has been inserted.
  2963. release_2_id = release_2.id
  2964. release_2.delete()
  2965. # We expect the boosted release to be kept in Redis, if not queried by the ProjectBoostedReleases.
  2966. assert self.redis_client.hgetall(f"ds::p:{project.id}:boosted_releases") == {
  2967. f"ds::r:{release_1.id}": str(ts),
  2968. f"ds::r:{release_2_id}": str(ts),
  2969. }
  2970. # We expect to not see the release 2 because it will not be in the database anymore, thus we mark it as
  2971. # expired.
  2972. assert ProjectBoostedReleases(project_id=project.id).get_extended_boosted_releases() == [
  2973. ExtendedBoostedRelease(
  2974. id=release_1.id,
  2975. timestamp=ts,
  2976. environment=None,
  2977. cache_key=f"ds::r:{release_1.id}",
  2978. version=release_1.version,
  2979. platform=Platform(project.platform),
  2980. ),
  2981. ]
  2982. @freeze_time("2022-11-03 10:00:00")
  2983. def test_get_boosted_releases_with_old_and_new_cache_keys(self):
  2984. ts = time()
  2985. project = self.create_project(platform="python")
  2986. # Old cache key
  2987. release_1 = Release.get_or_create(project=project, version="1.0", date_added=datetime.now())
  2988. self.redis_client.hset(
  2989. f"ds::p:{project.id}:boosted_releases",
  2990. f"{release_1.id}",
  2991. ts,
  2992. )
  2993. # New cache key
  2994. release_2 = Release.get_or_create(
  2995. project=project, version="2.0", date_added=datetime.now() + timedelta(hours=1)
  2996. )
  2997. self.redis_client.hset(
  2998. f"ds::p:{project.id}:boosted_releases",
  2999. f"ds::r:{release_2.id}",
  3000. ts,
  3001. )
  3002. self.redis_client.hset(
  3003. f"ds::p:{project.id}:boosted_releases",
  3004. f"ds::r:{release_2.id}:e:{self.environment1.name}",
  3005. ts,
  3006. )
  3007. self.redis_client.hset(
  3008. f"ds::p:{project.id}:boosted_releases",
  3009. f"ds::r:{release_2.id}:e:{self.environment2.name}",
  3010. ts,
  3011. )
  3012. assert ProjectBoostedReleases(project_id=project.id).get_extended_boosted_releases() == [
  3013. ExtendedBoostedRelease(
  3014. id=release_1.id,
  3015. timestamp=ts,
  3016. environment=None,
  3017. # This item has the old cache key.
  3018. cache_key=f"{release_1.id}",
  3019. version=release_1.version,
  3020. platform=Platform(project.platform),
  3021. ),
  3022. ExtendedBoostedRelease(
  3023. id=release_2.id,
  3024. timestamp=ts,
  3025. environment=None,
  3026. cache_key=f"ds::r:{release_2.id}",
  3027. version=release_2.version,
  3028. platform=Platform(project.platform),
  3029. ),
  3030. ExtendedBoostedRelease(
  3031. id=release_2.id,
  3032. timestamp=ts,
  3033. environment=self.environment1.name,
  3034. cache_key=f"ds::r:{release_2.id}:e:{self.environment1.name}",
  3035. version=release_2.version,
  3036. platform=Platform(project.platform),
  3037. ),
  3038. ExtendedBoostedRelease(
  3039. id=release_2.id,
  3040. timestamp=ts,
  3041. environment=self.environment2.name,
  3042. cache_key=f"ds::r:{release_2.id}:e:{self.environment2.name}",
  3043. version=release_2.version,
  3044. platform=Platform(project.platform),
  3045. ),
  3046. ]
  3047. @freeze_time("2022-11-03 10:00:00")
  3048. def test_expired_boosted_releases_are_removed(self):
  3049. ts = time()
  3050. # We want to test with multiple platforms.
  3051. for platform in ("python", "java", None):
  3052. project = self.create_project(platform=platform)
  3053. for index, (release_version, environment) in enumerate(
  3054. (
  3055. (f"1.0-{platform}", self.environment1.name),
  3056. (f"2.0-{platform}", self.environment2.name),
  3057. )
  3058. ):
  3059. release = Release.get_or_create(
  3060. project=project,
  3061. version=release_version,
  3062. date_added=datetime.now() + timedelta(hours=index),
  3063. )
  3064. self.redis_client.set(
  3065. f"ds::p:{project.id}:r:{release.id}:e:{environment}", 1, 60 * 60 * 24
  3066. )
  3067. self.redis_client.hset(
  3068. f"ds::p:{project.id}:boosted_releases",
  3069. f"ds::r:{release.id}:e:{environment}",
  3070. # We set the creation time in order to expire it by 1 second.
  3071. ts - Platform(platform).time_to_adoption - 1,
  3072. )
  3073. # We add a new boosted release that is not expired.
  3074. release_3 = Release.get_or_create(
  3075. project=project,
  3076. version=f"3.0-{platform}",
  3077. date_added=datetime.now() + timedelta(hours=2),
  3078. )
  3079. self.make_release_transaction(
  3080. release_version=release_3.version,
  3081. environment_name=self.environment1.name,
  3082. project_id=project.id,
  3083. checksum="b" * 32,
  3084. timestamp=self.timestamp,
  3085. )
  3086. assert (
  3087. self.redis_client.get(
  3088. f"ds::p:{project.id}:r:{release_3.id}:e:{self.environment1.name}"
  3089. )
  3090. == "1"
  3091. )
  3092. assert self.redis_client.hgetall(f"ds::p:{project.id}:boosted_releases") == {
  3093. f"ds::r:{release_3.id}:e:{self.environment1.name}": str(ts)
  3094. }
  3095. assert ProjectBoostedReleases(
  3096. project_id=project.id
  3097. ).get_extended_boosted_releases() == [
  3098. ExtendedBoostedRelease(
  3099. id=release_3.id,
  3100. timestamp=ts,
  3101. environment=self.environment1.name,
  3102. cache_key=f"ds::r:{release_3.id}:e:{self.environment1.name}",
  3103. version=release_3.version,
  3104. platform=Platform(project.platform),
  3105. )
  3106. ]
  3107. @mock.patch("sentry.event_manager.schedule_invalidate_project_config")
  3108. def test_project_config_invalidation_is_triggered_when_new_release_is_observed(
  3109. self, mocked_invalidate
  3110. ):
  3111. self.make_release_transaction(
  3112. release_version=self.release.version,
  3113. environment_name=self.environment1.name,
  3114. project_id=self.project.id,
  3115. checksum="a" * 32,
  3116. timestamp=self.timestamp,
  3117. )
  3118. assert any(
  3119. o.kwargs["trigger"] == "dynamic_sampling:boost_release"
  3120. for o in mocked_invalidate.mock_calls
  3121. )
  3122. @freeze_time()
  3123. @mock.patch("sentry.dynamic_sampling.rules.helpers.latest_releases.BOOSTED_RELEASES_LIMIT", 2)
  3124. def test_least_recently_boosted_release_is_removed_if_limit_is_exceeded(self):
  3125. ts = time()
  3126. project = self.create_project(platform="python")
  3127. release_1 = Release.get_or_create(
  3128. project=project,
  3129. version="1.0",
  3130. date_added=datetime.now(),
  3131. )
  3132. release_2 = Release.get_or_create(
  3133. project=project,
  3134. version="2.0",
  3135. date_added=datetime.now() + timedelta(hours=1),
  3136. )
  3137. # We boost with increasing timestamps, so that we know that the smallest will be evicted.
  3138. for release, boost_time in ((release_1, ts - 2), (release_2, ts - 1)):
  3139. self.redis_client.set(
  3140. f"ds::p:{project.id}:r:{release.id}",
  3141. 1,
  3142. 60 * 60 * 24,
  3143. )
  3144. self.redis_client.hset(
  3145. f"ds::p:{project.id}:boosted_releases",
  3146. f"ds::r:{release.id}",
  3147. boost_time,
  3148. )
  3149. release_3 = Release.get_or_create(
  3150. project=project,
  3151. version="3.0",
  3152. date_added=datetime.now() + timedelta(hours=2),
  3153. )
  3154. self.make_release_transaction(
  3155. release_version=release_3.version,
  3156. environment_name=self.environment1.name,
  3157. project_id=project.id,
  3158. checksum="b" * 32,
  3159. timestamp=self.timestamp,
  3160. )
  3161. assert (
  3162. self.redis_client.get(f"ds::p:{project.id}:r:{release_3.id}:e:{self.environment1.name}")
  3163. == "1"
  3164. )
  3165. assert self.redis_client.hgetall(f"ds::p:{project.id}:boosted_releases") == {
  3166. f"ds::r:{release_2.id}": str(ts - 1),
  3167. f"ds::r:{release_3.id}:e:{self.environment1.name}": str(ts),
  3168. }
  3169. assert ProjectBoostedReleases(project_id=project.id).get_extended_boosted_releases() == [
  3170. ExtendedBoostedRelease(
  3171. id=release_2.id,
  3172. timestamp=ts - 1,
  3173. environment=None,
  3174. cache_key=f"ds::r:{release_2.id}",
  3175. version=release_2.version,
  3176. platform=Platform(project.platform),
  3177. ),
  3178. ExtendedBoostedRelease(
  3179. id=release_3.id,
  3180. timestamp=ts,
  3181. environment=self.environment1.name,
  3182. cache_key=f"ds::r:{release_3.id}:e:{self.environment1.name}",
  3183. version=release_3.version,
  3184. platform=Platform(project.platform),
  3185. ),
  3186. ]
  3187. @freeze_time()
  3188. @mock.patch("sentry.dynamic_sampling.rules.helpers.latest_releases.BOOSTED_RELEASES_LIMIT", 2)
  3189. def test_removed_boost_not_added_again_if_limit_is_exceeded(self):
  3190. ts = time()
  3191. project = self.create_project(platform="python")
  3192. release_1 = Release.get_or_create(project=project, version="1.0", date_added=datetime.now())
  3193. # We want to test that if we have the same release, but we send different environments that go over the
  3194. # limit, and we evict an environment, but then we send a transaction with the evicted environment.
  3195. #
  3196. # As an example suppose the following history of transactions received in the form (release, env):
  3197. # (1, production) -> (1, staging) -> (1, None) -> (1, production)
  3198. #
  3199. # Once we receive the first two, we have reached maximum capacity. Then we receive (1, None) and evict boost
  3200. # for (1, production) which results in the following boosts (1, staging), (1, None). After that we receive
  3201. # (1, production) again but in this case we don't want to remove (1, staging) because we will end up in an
  3202. # infinite loop. Instead, we expect to mark (1, production) as observed and only un-observe it if it does
  3203. # not receive transactions within the next 24 hours.
  3204. environments_sequence = [
  3205. self.environment1.name,
  3206. self.environment2.name,
  3207. None,
  3208. self.environment1.name,
  3209. ]
  3210. for environment in environments_sequence:
  3211. self.make_release_transaction(
  3212. release_version=release_1.version,
  3213. environment_name=environment,
  3214. project_id=project.id,
  3215. checksum="b" * 32,
  3216. timestamp=self.timestamp,
  3217. )
  3218. # We assert that all environments have been observed.
  3219. assert (
  3220. self.redis_client.get(f"ds::p:{project.id}:r:{release_1.id}:e:{self.environment1.name}")
  3221. == "1"
  3222. )
  3223. assert (
  3224. self.redis_client.get(f"ds::p:{project.id}:r:{release_1.id}:e:{self.environment2.name}")
  3225. == "1"
  3226. )
  3227. assert self.redis_client.get(f"ds::p:{project.id}:r:{release_1.id}") == "1"
  3228. # We assert that only the last 2 unseen (release, env) pairs are boosted.
  3229. assert self.redis_client.hgetall(f"ds::p:{project.id}:boosted_releases") == {
  3230. f"ds::r:{release_1.id}:e:{self.environment2.name}": str(ts),
  3231. f"ds::r:{release_1.id}": str(ts),
  3232. }
  3233. assert ProjectBoostedReleases(project_id=project.id).get_extended_boosted_releases() == [
  3234. ExtendedBoostedRelease(
  3235. id=release_1.id,
  3236. timestamp=ts,
  3237. environment=self.environment2.name,
  3238. cache_key=f"ds::r:{release_1.id}:e:{self.environment2.name}",
  3239. version=release_1.version,
  3240. platform=Platform(project.platform),
  3241. ),
  3242. ExtendedBoostedRelease(
  3243. id=release_1.id,
  3244. timestamp=ts,
  3245. environment=None,
  3246. cache_key=f"ds::r:{release_1.id}",
  3247. version=release_1.version,
  3248. platform=Platform(project.platform),
  3249. ),
  3250. ]
  3251. class TestSaveGroupHashAndGroup(TransactionTestCase):
  3252. def test(self):
  3253. perf_data = load_data("transaction-n-plus-one", timestamp=before_now(minutes=10))
  3254. event = _get_event_instance(perf_data, project_id=self.project.id)
  3255. group_hash = "some_group"
  3256. group, created = _save_grouphash_and_group(self.project, event, group_hash)
  3257. assert created
  3258. group_2, created = _save_grouphash_and_group(self.project, event, group_hash)
  3259. assert group.id == group_2.id
  3260. assert not created
  3261. assert Group.objects.filter(grouphash__hash=group_hash).count() == 1
  3262. group_3, created = _save_grouphash_and_group(self.project, event, "new_hash")
  3263. assert created
  3264. assert group_2.id != group_3.id
  3265. assert Group.objects.filter(grouphash__hash=group_hash).count() == 1