test_backend.py 143 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271327232733274327532763277327832793280328132823283328432853286328732883289329032913292329332943295329632973298329933003301330233033304330533063307330833093310331133123313331433153316331733183319332033213322332333243325332633273328332933303331333233333334333533363337333833393340334133423343334433453346334733483349335033513352335333543355335633573358335933603361336233633364336533663367336833693370337133723373337433753376337733783379338033813382338333843385338633873388338933903391339233933394339533963397339833993400340134023403340434053406340734083409341034113412341334143415341634173418341934203421342234233424342534263427342834293430343134323433343434353436343734383439344034413442344334443445344634473448344934503451345234533454345534563457345834593460346134623463346434653466346734683469347034713472347334743475347634773478347934803481348234833484348534863487348834893490349134923493349434953496349734983499350035013502350335043505350635073508350935103511351235133514351535163517351835193520352135223523352435253526352735283529353035313532353335343535353635373538353935403541354235433544354535463547354835493550355135523553355435553556355735583559356035613562356335643565356635673568356935703571357235733574357535763577357835793580358135823583358435853586358735883589359035913592359335943595359635973598359936003601360236033604360536063607360836093610361136123613361436153616361736183619362036213622362336243625362636273628362936303631363236333634363536363637363836393640364136423643364436453646364736483649365036513652365336543655365636573658365936603661366236633664366536663667366836693670367136723673367436753676367736783679368036813682368336843685368636873688368936903691369236933694369536963697369836993700370137023703370437053706370737083709371037113712371337143715371637173718371937203721372237233724372537263727372837293730373137323733373437353736373737383739374037413742374337443745374637473748374937503751375237533754375537563757375837593760376137623763376437653766376737683769377037713772377337743775377637773778377937803781378237833784378537863787378837893790379137923793379437953796379737983799380038013802380338043805380638073808380938103811381238133814381538163817381838193820382138223823382438253826382738283829383038313832383338343835383638373838383938403841384238433844384538463847384838493850385138523853385438553856385738583859386038613862386338643865386638673868386938703871387238733874387538763877387838793880388138823883388438853886388738883889
  1. import uuid
  2. from datetime import datetime, timedelta
  3. from typing import Any
  4. from unittest import mock
  5. import pytest
  6. import pytz
  7. from django.utils import timezone
  8. from sentry import options
  9. from sentry.api.issue_search import convert_query_values, issue_search_config, parse_search_query
  10. from sentry.exceptions import InvalidSearchQuery
  11. from sentry.issues.grouptype import (
  12. ErrorGroupType,
  13. NoiseConfig,
  14. PerformanceNPlusOneGroupType,
  15. PerformanceRenderBlockingAssetSpanGroupType,
  16. ProfileFileIOGroupType,
  17. )
  18. from sentry.issues.ingest import send_issue_occurrence_to_eventstream
  19. from sentry.issues.occurrence_consumer import process_event_and_issue_occurrence
  20. from sentry.models import (
  21. Environment,
  22. Group,
  23. GroupAssignee,
  24. GroupBookmark,
  25. GroupEnvironment,
  26. GroupHistoryStatus,
  27. GroupStatus,
  28. GroupSubscription,
  29. Integration,
  30. record_group_history,
  31. )
  32. from sentry.models.groupowner import GroupOwner
  33. from sentry.search.snuba.backend import (
  34. CdcEventsDatasetSnubaSearchBackend,
  35. EventsDatasetSnubaSearchBackend,
  36. SnubaSearchBackendBase,
  37. )
  38. from sentry.search.snuba.executors import InvalidQueryForExecutor, PrioritySortWeights
  39. from sentry.snuba.dataset import Dataset
  40. from sentry.testutils.cases import SnubaTestCase, TestCase
  41. from sentry.testutils.helpers import Feature
  42. from sentry.testutils.helpers.datetime import before_now, iso_format
  43. from sentry.testutils.skips import xfail_if_not_postgres
  44. from sentry.types.group import GroupSubStatus
  45. from sentry.utils.snuba import SENTRY_SNUBA_MAP, SnubaError
  46. from tests.sentry.issues.test_utils import OccurrenceTestMixin
  47. def date_to_query_format(date):
  48. return date.strftime("%Y-%m-%dT%H:%M:%S")
  49. class SharedSnubaTest(TestCase, SnubaTestCase):
  50. @property
  51. def backend(self) -> SnubaSearchBackendBase:
  52. raise NotImplementedError(self)
  53. def build_search_filter(self, query, projects=None, user=None, environments=None):
  54. user = user if user is not None else self.user
  55. projects = projects if projects is not None else [self.project]
  56. return convert_query_values(parse_search_query(query), projects, user, environments)
  57. def make_query(
  58. self,
  59. projects=None,
  60. search_filter_query=None,
  61. user=None,
  62. environments=None,
  63. sort_by="date",
  64. limit=None,
  65. count_hits=False,
  66. date_from=None,
  67. date_to=None,
  68. cursor=None,
  69. aggregate_kwargs=None,
  70. ):
  71. search_filters = []
  72. projects = projects if projects is not None else [self.project]
  73. if search_filter_query is not None:
  74. search_filters = self.build_search_filter(
  75. search_filter_query, projects, user=user, environments=environments
  76. )
  77. kwargs = {}
  78. if limit is not None:
  79. kwargs["limit"] = limit
  80. if aggregate_kwargs:
  81. kwargs["aggregate_kwargs"] = {"priority": {**aggregate_kwargs}}
  82. return self.backend.query(
  83. projects,
  84. search_filters=search_filters,
  85. environments=environments,
  86. count_hits=count_hits,
  87. sort_by=sort_by,
  88. date_from=date_from,
  89. date_to=date_to,
  90. cursor=cursor,
  91. **kwargs,
  92. )
  93. def store_event(self, data, *args, **kwargs):
  94. event = super().store_event(data, *args, **kwargs)
  95. environment_name = data.get("environment")
  96. if environment_name:
  97. GroupEnvironment.objects.filter(
  98. group_id=event.group_id,
  99. environment__name=environment_name,
  100. first_seen__gt=event.datetime,
  101. ).update(first_seen=event.datetime)
  102. return event
  103. class EventsSnubaSearchTest(SharedSnubaTest):
  104. @property
  105. def backend(self):
  106. return EventsDatasetSnubaSearchBackend()
  107. def setUp(self):
  108. super().setUp()
  109. self.base_datetime = (datetime.utcnow() - timedelta(days=3)).replace(tzinfo=pytz.utc)
  110. event1_timestamp = iso_format(self.base_datetime - timedelta(days=21))
  111. self.event1 = self.store_event(
  112. data={
  113. "fingerprint": ["put-me-in-group1"],
  114. "event_id": "a" * 32,
  115. "message": "foo. Also, this message is intended to be greater than 256 characters so that we can put some unique string identifier after that point in the string. The purpose of this is in order to verify we are using snuba to search messages instead of Postgres (postgres truncates at 256 characters and clickhouse does not). santryrox.",
  116. "environment": "production",
  117. "tags": {"server": "example.com", "sentry:user": "event1@example.com"},
  118. "timestamp": event1_timestamp,
  119. "stacktrace": {"frames": [{"module": "group1"}]},
  120. "level": "fatal",
  121. },
  122. project_id=self.project.id,
  123. )
  124. self.event3 = self.store_event(
  125. data={
  126. "fingerprint": ["put-me-in-group1"],
  127. "event_id": "c" * 32,
  128. "message": "group1",
  129. "environment": "production",
  130. "tags": {"server": "example.com", "sentry:user": "event3@example.com"},
  131. "timestamp": iso_format(self.base_datetime),
  132. "stacktrace": {"frames": [{"module": "group1"}]},
  133. "level": "fatal",
  134. },
  135. project_id=self.project.id,
  136. )
  137. self.group1 = Group.objects.get(id=self.event1.group.id)
  138. assert self.group1.id == self.event1.group.id
  139. assert self.group1.id == self.event3.group.id
  140. assert self.group1.first_seen == self.event1.datetime
  141. assert self.group1.last_seen == self.event3.datetime
  142. self.group1.times_seen = 5
  143. self.group1.status = GroupStatus.UNRESOLVED
  144. self.group1.substatus = GroupSubStatus.ONGOING
  145. self.group1.update(type=ErrorGroupType.type_id)
  146. self.group1.save()
  147. self.store_group(self.group1)
  148. self.event2 = self.store_event(
  149. data={
  150. "fingerprint": ["put-me-in-group2"],
  151. "event_id": "b" * 32,
  152. "timestamp": iso_format(self.base_datetime - timedelta(days=20)),
  153. "message": "bar",
  154. "stacktrace": {"frames": [{"module": "group2"}]},
  155. "environment": "staging",
  156. "tags": {
  157. "server": "example.com",
  158. "url": "http://example.com",
  159. "sentry:user": "event2@example.com",
  160. },
  161. "level": "error",
  162. },
  163. project_id=self.project.id,
  164. )
  165. self.group2 = Group.objects.get(id=self.event2.group.id)
  166. assert self.group2.id == self.event2.group.id
  167. assert self.group2.first_seen == self.group2.last_seen == self.event2.datetime
  168. self.group2.status = GroupStatus.RESOLVED
  169. self.group2.substatus = None
  170. self.group2.times_seen = 10
  171. self.group2.update(type=ErrorGroupType.type_id)
  172. self.group2.save()
  173. self.store_group(self.group2)
  174. GroupBookmark.objects.create(
  175. user_id=self.user.id, group=self.group2, project=self.group2.project
  176. )
  177. GroupAssignee.objects.create(
  178. user_id=self.user.id, group=self.group2, project=self.group2.project
  179. )
  180. GroupSubscription.objects.create(
  181. user_id=self.user.id, group=self.group1, project=self.group1.project, is_active=True
  182. )
  183. GroupSubscription.objects.create(
  184. user_id=self.user.id, group=self.group2, project=self.group2.project, is_active=False
  185. )
  186. self.environments = {
  187. "production": self.event1.get_environment(),
  188. "staging": self.event2.get_environment(),
  189. }
  190. def set_up_multi_project(self):
  191. self.project2 = self.create_project(organization=self.project.organization)
  192. self.event_p2 = self.store_event(
  193. data={
  194. "event_id": "a" * 32,
  195. "fingerprint": ["put-me-in-groupP2"],
  196. "timestamp": iso_format(self.base_datetime - timedelta(days=21)),
  197. "message": "foo",
  198. "stacktrace": {"frames": [{"module": "group_p2"}]},
  199. "tags": {"server": "example.com"},
  200. "environment": "production",
  201. },
  202. project_id=self.project2.id,
  203. )
  204. self.group_p2 = Group.objects.get(id=self.event_p2.group.id)
  205. self.group_p2.times_seen = 6
  206. self.group_p2.last_seen = self.base_datetime - timedelta(days=1)
  207. self.group_p2.save()
  208. self.store_group(self.group_p2)
  209. def create_group_with_integration_external_issue(self, environment="production"):
  210. event = self.store_event(
  211. data={
  212. "fingerprint": ["linked_group1"],
  213. "event_id": uuid.uuid4().hex,
  214. "timestamp": iso_format(self.base_datetime),
  215. "environment": environment,
  216. },
  217. project_id=self.project.id,
  218. )
  219. integration = Integration.objects.create(provider="example", name="Example")
  220. integration.add_organization(event.group.organization, self.user)
  221. self.create_integration_external_issue(
  222. group=event.group,
  223. integration=integration,
  224. key="APP-123",
  225. )
  226. return event.group
  227. def create_group_with_platform_external_issue(self, environment="production"):
  228. event = self.store_event(
  229. data={
  230. "fingerprint": ["linked_group2"],
  231. "event_id": uuid.uuid4().hex,
  232. "timestamp": iso_format(self.base_datetime),
  233. "environment": environment,
  234. },
  235. project_id=self.project.id,
  236. )
  237. self.create_platform_external_issue(
  238. group=event.group,
  239. service_type="sentry-app",
  240. display_name="App#issue-1",
  241. web_url="https://example.com/app/issues/1",
  242. )
  243. return event.group
  244. def run_test_query(
  245. self, query, expected_groups, expected_negative_groups=None, environments=None, user=None
  246. ):
  247. results = self.make_query(search_filter_query=query, environments=environments, user=user)
  248. def sort_key(result):
  249. return result.id
  250. assert sorted(results, key=sort_key) == sorted(expected_groups, key=sort_key)
  251. if expected_negative_groups is not None:
  252. results = self.make_query(search_filter_query=f"!{query}", user=user)
  253. assert sorted(results, key=sort_key) == sorted(expected_negative_groups, key=sort_key)
  254. def test_query(self):
  255. results = self.make_query(search_filter_query="foo")
  256. assert set(results) == {self.group1}
  257. results = self.make_query(search_filter_query="bar")
  258. assert set(results) == {self.group2}
  259. def test_query_multi_project(self):
  260. self.set_up_multi_project()
  261. results = self.make_query([self.project, self.project2], search_filter_query="foo")
  262. assert set(results) == {self.group1, self.group_p2}
  263. def test_query_with_environment(self):
  264. results = self.make_query(
  265. environments=[self.environments["production"]], search_filter_query="foo"
  266. )
  267. assert set(results) == {self.group1}
  268. results = self.make_query(
  269. environments=[self.environments["production"]], search_filter_query="bar"
  270. )
  271. assert set(results) == set()
  272. results = self.make_query(
  273. environments=[self.environments["staging"]], search_filter_query="bar"
  274. )
  275. assert set(results) == {self.group2}
  276. def test_query_for_text_in_long_message(self):
  277. results = self.make_query(
  278. [self.project],
  279. environments=[self.environments["production"]],
  280. search_filter_query="santryrox",
  281. )
  282. assert set(results) == {self.group1}
  283. def test_multi_environments(self):
  284. self.set_up_multi_project()
  285. results = self.make_query(
  286. [self.project, self.project2],
  287. environments=[self.environments["production"], self.environments["staging"]],
  288. )
  289. assert set(results) == {self.group1, self.group2, self.group_p2}
  290. def test_query_with_environment_multi_project(self):
  291. self.set_up_multi_project()
  292. results = self.make_query(
  293. [self.project, self.project2],
  294. environments=[self.environments["production"]],
  295. search_filter_query="foo",
  296. )
  297. assert set(results) == {self.group1, self.group_p2}
  298. results = self.make_query(
  299. [self.project, self.project2],
  300. environments=[self.environments["production"]],
  301. search_filter_query="bar",
  302. )
  303. assert set(results) == set()
  304. def test_query_timestamp(self):
  305. results = self.make_query(
  306. [self.project],
  307. environments=[self.environments["production"]],
  308. search_filter_query=f"timestamp:>{iso_format(self.event1.datetime)} timestamp:<{iso_format(self.event3.datetime)}",
  309. )
  310. assert set(results) == {self.group1}
  311. def test_sort(self):
  312. results = self.make_query(sort_by="date")
  313. assert list(results) == [self.group1, self.group2]
  314. results = self.make_query(sort_by="new")
  315. assert list(results) == [self.group2, self.group1]
  316. results = self.make_query(sort_by="freq")
  317. assert list(results) == [self.group1, self.group2]
  318. results = self.make_query(sort_by="priority")
  319. assert list(results) == [self.group2, self.group1]
  320. results = self.make_query(sort_by="user")
  321. assert list(results) == [self.group1, self.group2]
  322. def test_priority_sort(self):
  323. weights: PrioritySortWeights = {
  324. "log_level": 5,
  325. "has_stacktrace": 5,
  326. "relative_volume": 1,
  327. "event_halflife_hours": 4,
  328. "issue_halflife_hours": 24 * 7,
  329. "v2": False,
  330. "norm": False,
  331. }
  332. results = self.make_query(
  333. sort_by="priority",
  334. aggregate_kwargs=weights,
  335. )
  336. assert list(results) == [self.group2, self.group1]
  337. def test_sort_with_environment(self):
  338. for dt in [
  339. self.group1.first_seen + timedelta(days=1),
  340. self.group1.first_seen + timedelta(days=2),
  341. self.group1.last_seen + timedelta(days=1),
  342. ]:
  343. self.store_event(
  344. data={
  345. "fingerprint": ["put-me-in-group2"],
  346. "timestamp": iso_format(dt),
  347. "stacktrace": {"frames": [{"module": "group2"}]},
  348. "environment": "production",
  349. "message": "group2",
  350. },
  351. project_id=self.project.id,
  352. )
  353. results = self.make_query(environments=[self.environments["production"]], sort_by="date")
  354. assert list(results) == [self.group2, self.group1]
  355. results = self.make_query(environments=[self.environments["production"]], sort_by="new")
  356. assert list(results) == [self.group2, self.group1]
  357. results = self.make_query(environments=[self.environments["production"]], sort_by="freq")
  358. assert list(results) == [self.group2, self.group1]
  359. results = self.make_query(
  360. environments=[self.environments["production"]], sort_by="priority"
  361. )
  362. assert list(results) == [self.group2, self.group1]
  363. results = self.make_query(environments=[self.environments["production"]], sort_by="user")
  364. assert list(results) == [self.group1, self.group2]
  365. def test_status(self):
  366. results = self.make_query(search_filter_query="is:unresolved")
  367. assert set(results) == {self.group1}
  368. results = self.make_query(search_filter_query="is:resolved")
  369. assert set(results) == {self.group2}
  370. event_3 = self.store_event(
  371. data={
  372. "fingerprint": ["put-me-in-group3"],
  373. "event_id": "c" * 32,
  374. "timestamp": iso_format(self.base_datetime - timedelta(days=20)),
  375. },
  376. project_id=self.project.id,
  377. )
  378. group_3 = event_3.group
  379. group_3.status = GroupStatus.MUTED
  380. group_3.substatus = None
  381. group_3.save()
  382. self.run_test_query("status:[unresolved, resolved]", [self.group1, self.group2], [group_3])
  383. self.run_test_query("status:[resolved, muted]", [self.group2, group_3], [self.group1])
  384. def test_substatus(self):
  385. with Feature("organizations:escalating-issues"):
  386. results = self.make_query(search_filter_query="is:ongoing")
  387. assert set(results) == {self.group1}
  388. with pytest.raises(
  389. InvalidSearchQuery, match="The substatus filter is not supported for this organization"
  390. ):
  391. self.make_query(search_filter_query="is:ongoing")
  392. def test_category(self):
  393. results = self.make_query(search_filter_query="issue.category:error")
  394. assert set(results) == {self.group1, self.group2}
  395. event_3 = self.store_event(
  396. data={
  397. "fingerprint": ["put-me-in-group3"],
  398. "event_id": "c" * 32,
  399. "timestamp": iso_format(self.base_datetime - timedelta(days=20)),
  400. },
  401. project_id=self.project.id,
  402. )
  403. group_3 = event_3.group
  404. group_3.update(type=PerformanceNPlusOneGroupType.type_id)
  405. results = self.make_query(search_filter_query="issue.category:performance")
  406. assert set(results) == {group_3}
  407. results = self.make_query(search_filter_query="issue.category:[error, performance]")
  408. assert set(results) == {self.group1, self.group2, group_3}
  409. with pytest.raises(InvalidSearchQuery):
  410. self.make_query(search_filter_query="issue.category:hellboy")
  411. def test_not_perf_category(self):
  412. results = self.make_query(search_filter_query="issue.category:error foo")
  413. assert set(results) == {self.group1}
  414. not_results = self.make_query(search_filter_query="!issue.category:performance foo")
  415. assert set(not_results) == {self.group1}
  416. def test_type(self):
  417. results = self.make_query(search_filter_query="issue.type:error")
  418. assert set(results) == {self.group1, self.group2}
  419. event_3 = self.store_event(
  420. data={
  421. "fingerprint": ["put-me-in-group3"],
  422. "event_id": "c" * 32,
  423. "timestamp": iso_format(self.base_datetime - timedelta(days=20)),
  424. "type": PerformanceNPlusOneGroupType.type_id,
  425. },
  426. project_id=self.project.id,
  427. )
  428. group_3 = event_3.group
  429. group_3.update(type=PerformanceNPlusOneGroupType.type_id)
  430. results = self.make_query(
  431. search_filter_query="issue.type:performance_n_plus_one_db_queries"
  432. )
  433. assert set(results) == {group_3}
  434. event_4 = self.store_event(
  435. data={
  436. "fingerprint": ["put-me-in-group4"],
  437. "event_id": "d" * 32,
  438. "timestamp": iso_format(self.base_datetime - timedelta(days=20)),
  439. },
  440. project_id=self.project.id,
  441. )
  442. group_4 = event_4.group
  443. group_4.update(type=PerformanceRenderBlockingAssetSpanGroupType.type_id)
  444. results = self.make_query(
  445. search_filter_query="issue.type:performance_render_blocking_asset_span"
  446. )
  447. assert set(results) == {group_4}
  448. results = self.make_query(
  449. search_filter_query="issue.type:[performance_render_blocking_asset_span, performance_n_plus_one_db_queries, error]"
  450. )
  451. assert set(results) == {self.group1, self.group2, group_3, group_4}
  452. with pytest.raises(InvalidSearchQuery):
  453. self.make_query(search_filter_query="issue.type:performance_i_dont_exist")
  454. def test_status_with_environment(self):
  455. results = self.make_query(
  456. environments=[self.environments["production"]], search_filter_query="is:unresolved"
  457. )
  458. assert set(results) == {self.group1}
  459. results = self.make_query(
  460. environments=[self.environments["staging"]], search_filter_query="is:resolved"
  461. )
  462. assert set(results) == {self.group2}
  463. results = self.make_query(
  464. environments=[self.environments["production"]], search_filter_query="is:resolved"
  465. )
  466. assert set(results) == set()
  467. def test_tags(self):
  468. results = self.make_query(search_filter_query="environment:staging")
  469. assert set(results) == {self.group2}
  470. results = self.make_query(search_filter_query="environment:example.com")
  471. assert set(results) == set()
  472. results = self.make_query(search_filter_query="has:environment")
  473. assert set(results) == {self.group2, self.group1}
  474. results = self.make_query(search_filter_query="environment:staging server:example.com")
  475. assert set(results) == {self.group2}
  476. results = self.make_query(search_filter_query='url:"http://example.com"')
  477. assert set(results) == {self.group2}
  478. results = self.make_query(search_filter_query="environment:staging has:server")
  479. assert set(results) == {self.group2}
  480. results = self.make_query(search_filter_query="environment:staging server:bar.example.com")
  481. assert set(results) == set()
  482. def test_tags_with_environment(self):
  483. results = self.make_query(
  484. environments=[self.environments["production"]], search_filter_query="server:example.com"
  485. )
  486. assert set(results) == {self.group1}
  487. results = self.make_query(
  488. environments=[self.environments["staging"]], search_filter_query="server:example.com"
  489. )
  490. assert set(results) == {self.group2}
  491. results = self.make_query(
  492. environments=[self.environments["staging"]], search_filter_query="has:server"
  493. )
  494. assert set(results) == {self.group2}
  495. results = self.make_query(
  496. environments=[self.environments["production"]],
  497. search_filter_query='url:"http://example.com"',
  498. )
  499. assert set(results) == set()
  500. results = self.make_query(
  501. environments=[self.environments["staging"]],
  502. search_filter_query='url:"http://example.com"',
  503. )
  504. assert set(results) == {self.group2}
  505. results = self.make_query(
  506. environments=[self.environments["staging"]],
  507. search_filter_query="server:bar.example.com",
  508. )
  509. assert set(results) == set()
  510. def test_bookmarked_by(self):
  511. results = self.make_query(search_filter_query="bookmarks:%s" % self.user.username)
  512. assert set(results) == {self.group2}
  513. def test_bookmarked_by_in_syntax(self):
  514. self.run_test_query(f"bookmarks:[{self.user.username}]", [self.group2], [self.group1])
  515. user_2 = self.create_user()
  516. GroupBookmark.objects.create(
  517. user_id=user_2.id, group=self.group1, project=self.group2.project
  518. )
  519. self.run_test_query(
  520. f"bookmarks:[{self.user.username}, {user_2.username}]", [self.group2, self.group1], []
  521. )
  522. def test_bookmarked_by_with_environment(self):
  523. results = self.make_query(
  524. environments=[self.environments["staging"]],
  525. search_filter_query="bookmarks:%s" % self.user.username,
  526. )
  527. assert set(results) == {self.group2}
  528. results = self.make_query(
  529. environments=[self.environments["production"]],
  530. search_filter_query="bookmarks:%s" % self.user.username,
  531. )
  532. assert set(results) == set()
  533. def test_search_filter_query_with_custom_priority_tag(self):
  534. priority = "high"
  535. self.store_event(
  536. data={
  537. "fingerprint": ["put-me-in-group2"],
  538. "timestamp": iso_format(self.group2.first_seen + timedelta(days=1)),
  539. "stacktrace": {"frames": [{"module": "group2"}]},
  540. "message": "group2",
  541. "tags": {"priority": priority},
  542. },
  543. project_id=self.project.id,
  544. )
  545. results = self.make_query(search_filter_query="priority:%s" % priority)
  546. assert set(results) == {self.group2}
  547. def test_search_filter_query_with_custom_priority_tag_and_priority_sort(self):
  548. priority = "high"
  549. for i in range(1, 3):
  550. self.store_event(
  551. data={
  552. "fingerprint": ["put-me-in-group1"],
  553. "timestamp": iso_format(self.group2.last_seen + timedelta(days=i)),
  554. "stacktrace": {"frames": [{"module": "group1"}]},
  555. "message": "group1",
  556. "tags": {"priority": priority},
  557. },
  558. project_id=self.project.id,
  559. )
  560. self.store_event(
  561. data={
  562. "fingerprint": ["put-me-in-group2"],
  563. "timestamp": iso_format(self.group2.last_seen + timedelta(days=2)),
  564. "stacktrace": {"frames": [{"module": "group2"}]},
  565. "message": "group2",
  566. "tags": {"priority": priority},
  567. },
  568. project_id=self.project.id,
  569. )
  570. results = self.make_query(search_filter_query="priority:%s" % priority, sort_by="priority")
  571. assert list(results) == [self.group2, self.group1]
  572. def test_search_tag_overlapping_with_internal_fields(self):
  573. # Using a tag of email overlaps with the promoted user.email column in events.
  574. # We don't want to bypass public schema limits in issue search.
  575. self.store_event(
  576. data={
  577. "fingerprint": ["put-me-in-group2"],
  578. "timestamp": iso_format(self.group2.first_seen + timedelta(days=1)),
  579. "stacktrace": {"frames": [{"module": "group2"}]},
  580. "message": "group2",
  581. "tags": {"email": "tags@example.com"},
  582. },
  583. project_id=self.project.id,
  584. )
  585. results = self.make_query(search_filter_query="email:tags@example.com")
  586. assert set(results) == {self.group2}
  587. def test_project(self):
  588. results = self.make_query([self.create_project(name="other")])
  589. assert set(results) == set()
  590. def test_pagination(self):
  591. for options_set in [
  592. {"snuba.search.min-pre-snuba-candidates": None},
  593. {"snuba.search.min-pre-snuba-candidates": 500},
  594. ]:
  595. with self.options(options_set):
  596. results = self.backend.query([self.project], limit=1, sort_by="date")
  597. assert set(results) == {self.group1}
  598. assert not results.prev.has_results
  599. assert results.next.has_results
  600. results = self.backend.query(
  601. [self.project], cursor=results.next, limit=1, sort_by="date"
  602. )
  603. assert set(results) == {self.group2}
  604. assert results.prev.has_results
  605. assert not results.next.has_results
  606. # note: previous cursor
  607. results = self.backend.query(
  608. [self.project], cursor=results.prev, limit=1, sort_by="date"
  609. )
  610. assert set(results) == {self.group1}
  611. assert results.prev.has_results
  612. assert results.next.has_results
  613. # note: previous cursor, paging too far into 0 results
  614. results = self.backend.query(
  615. [self.project], cursor=results.prev, limit=1, sort_by="date"
  616. )
  617. assert set(results) == set()
  618. assert not results.prev.has_results
  619. assert results.next.has_results
  620. results = self.backend.query(
  621. [self.project], cursor=results.next, limit=1, sort_by="date"
  622. )
  623. assert set(results) == {self.group1}
  624. assert results.prev.has_results
  625. assert results.next.has_results
  626. results = self.backend.query(
  627. [self.project], cursor=results.next, limit=1, sort_by="date"
  628. )
  629. assert set(results) == {self.group2}
  630. assert results.prev.has_results
  631. assert not results.next.has_results
  632. results = self.backend.query(
  633. [self.project], cursor=results.next, limit=1, sort_by="date"
  634. )
  635. assert set(results) == set()
  636. assert results.prev.has_results
  637. assert not results.next.has_results
  638. def test_pagination_with_environment(self):
  639. for dt in [
  640. self.group1.first_seen + timedelta(days=1),
  641. self.group1.first_seen + timedelta(days=2),
  642. self.group1.last_seen + timedelta(days=1),
  643. ]:
  644. self.store_event(
  645. data={
  646. "fingerprint": ["put-me-in-group2"],
  647. "timestamp": iso_format(dt),
  648. "environment": "production",
  649. "message": "group2",
  650. "stacktrace": {"frames": [{"module": "group2"}]},
  651. },
  652. project_id=self.project.id,
  653. )
  654. results = self.backend.query(
  655. [self.project],
  656. environments=[self.environments["production"]],
  657. sort_by="date",
  658. limit=1,
  659. count_hits=True,
  660. )
  661. assert list(results) == [self.group2]
  662. assert results.hits == 2
  663. results = self.backend.query(
  664. [self.project],
  665. environments=[self.environments["production"]],
  666. sort_by="date",
  667. limit=1,
  668. cursor=results.next,
  669. count_hits=True,
  670. )
  671. assert list(results) == [self.group1]
  672. assert results.hits == 2
  673. results = self.backend.query(
  674. [self.project],
  675. environments=[self.environments["production"]],
  676. sort_by="date",
  677. limit=1,
  678. cursor=results.next,
  679. count_hits=True,
  680. )
  681. assert list(results) == []
  682. assert results.hits == 2
  683. def test_age_filter(self):
  684. results = self.make_query(
  685. search_filter_query="firstSeen:>=%s" % date_to_query_format(self.group2.first_seen)
  686. )
  687. assert set(results) == {self.group2}
  688. results = self.make_query(
  689. search_filter_query="firstSeen:<=%s"
  690. % date_to_query_format(self.group1.first_seen + timedelta(minutes=1))
  691. )
  692. assert set(results) == {self.group1}
  693. results = self.make_query(
  694. search_filter_query="firstSeen:>=%s firstSeen:<=%s"
  695. % (
  696. date_to_query_format(self.group1.first_seen),
  697. date_to_query_format(self.group1.first_seen + timedelta(minutes=1)),
  698. )
  699. )
  700. assert set(results) == {self.group1}
  701. def test_age_filter_with_environment(self):
  702. # add time instead to make it greater than or less than as needed.
  703. group1_first_seen = GroupEnvironment.objects.get(
  704. environment=self.environments["production"], group=self.group1
  705. ).first_seen
  706. assert group1_first_seen is not None
  707. results = self.make_query(
  708. environments=[self.environments["production"]],
  709. search_filter_query="firstSeen:>=%s" % date_to_query_format(group1_first_seen),
  710. )
  711. assert set(results) == {self.group1}
  712. results = self.make_query(
  713. environments=[self.environments["production"]],
  714. search_filter_query="firstSeen:<=%s" % date_to_query_format(group1_first_seen),
  715. )
  716. assert set(results) == {self.group1}
  717. results = self.make_query(
  718. environments=[self.environments["production"]],
  719. search_filter_query="firstSeen:>%s" % date_to_query_format(group1_first_seen),
  720. )
  721. assert set(results) == set()
  722. self.store_event(
  723. data={
  724. "fingerprint": ["put-me-in-group1"],
  725. "timestamp": iso_format(group1_first_seen + timedelta(days=1)),
  726. "message": "group1",
  727. "stacktrace": {"frames": [{"module": "group1"}]},
  728. "environment": "development",
  729. },
  730. project_id=self.project.id,
  731. )
  732. results = self.make_query(
  733. environments=[self.environments["production"]],
  734. search_filter_query="firstSeen:>%s" % date_to_query_format(group1_first_seen),
  735. )
  736. assert set(results) == set()
  737. results = self.make_query(
  738. environments=[Environment.objects.get(name="development")],
  739. search_filter_query="firstSeen:>%s" % date_to_query_format(group1_first_seen),
  740. )
  741. assert set(results) == {self.group1}
  742. def test_times_seen_filter(self):
  743. results = self.make_query([self.project], search_filter_query="times_seen:2")
  744. assert set(results) == {self.group1}
  745. results = self.make_query([self.project], search_filter_query="times_seen:>=2")
  746. assert set(results) == {self.group1}
  747. results = self.make_query([self.project], search_filter_query="times_seen:<=1")
  748. assert set(results) == {self.group2}
  749. def test_last_seen_filter(self):
  750. results = self.make_query(
  751. search_filter_query="lastSeen:>=%s" % date_to_query_format(self.group1.last_seen)
  752. )
  753. assert set(results) == {self.group1}
  754. results = self.make_query(
  755. search_filter_query="lastSeen:>=%s lastSeen:<=%s"
  756. % (
  757. date_to_query_format(self.group1.last_seen),
  758. date_to_query_format(self.group1.last_seen + timedelta(minutes=1)),
  759. )
  760. )
  761. assert set(results) == {self.group1}
  762. def test_last_seen_filter_with_environment(self):
  763. results = self.make_query(
  764. environments=[self.environments["production"]],
  765. search_filter_query="lastSeen:>=%s" % date_to_query_format(self.group1.last_seen),
  766. )
  767. assert set(results) == {self.group1}
  768. results = self.make_query(
  769. environments=[self.environments["production"]],
  770. search_filter_query="lastSeen:<=%s" % date_to_query_format(self.group1.last_seen),
  771. )
  772. assert set(results) == {self.group1}
  773. results = self.make_query(
  774. environments=[self.environments["production"]],
  775. search_filter_query="lastSeen:>%s" % date_to_query_format(self.group1.last_seen),
  776. )
  777. assert set(results) == set()
  778. self.store_event(
  779. data={
  780. "fingerprint": ["put-me-in-group1"],
  781. "timestamp": iso_format(self.group1.last_seen + timedelta(days=1)),
  782. "message": "group1",
  783. "stacktrace": {"frames": [{"module": "group1"}]},
  784. "environment": "development",
  785. },
  786. project_id=self.project.id,
  787. )
  788. self.group1.update(last_seen=self.group1.last_seen + timedelta(days=1))
  789. results = self.make_query(
  790. environments=[self.environments["production"]],
  791. search_filter_query="lastSeen:>%s" % date_to_query_format(self.group1.last_seen),
  792. )
  793. assert set(results) == set()
  794. results = self.make_query(
  795. environments=[Environment.objects.get(name="development")],
  796. search_filter_query="lastSeen:>%s" % date_to_query_format(self.group1.last_seen),
  797. )
  798. assert set(results) == set()
  799. results = self.make_query(
  800. environments=[Environment.objects.get(name="development")],
  801. search_filter_query="lastSeen:>=%s" % date_to_query_format(self.group1.last_seen),
  802. )
  803. assert set(results) == {self.group1}
  804. def test_date_filter(self):
  805. results = self.make_query(
  806. date_from=self.event2.datetime,
  807. search_filter_query="timestamp:>=%s" % date_to_query_format(self.event2.datetime),
  808. )
  809. assert set(results) == {self.group1, self.group2}
  810. results = self.make_query(
  811. date_to=self.event1.datetime + timedelta(minutes=1),
  812. search_filter_query="timestamp:<=%s"
  813. % date_to_query_format(self.event1.datetime + timedelta(minutes=1)),
  814. )
  815. assert set(results) == {self.group1}
  816. results = self.make_query(
  817. date_from=self.event1.datetime,
  818. date_to=self.event2.datetime + timedelta(minutes=1),
  819. search_filter_query="timestamp:>=%s timestamp:<=%s"
  820. % (
  821. date_to_query_format(self.event1.datetime),
  822. date_to_query_format(self.event2.datetime + timedelta(minutes=1)),
  823. ),
  824. )
  825. assert set(results) == {self.group1, self.group2}
  826. # Test with `Z` utc marker, should be equivalent
  827. results = self.make_query(
  828. date_from=self.event1.datetime,
  829. date_to=self.event2.datetime + timedelta(minutes=1),
  830. search_filter_query="timestamp:>=%s timestamp:<=%s"
  831. % (
  832. date_to_query_format(self.event1.datetime) + "Z",
  833. date_to_query_format(self.event2.datetime + timedelta(minutes=1)) + "Z",
  834. ),
  835. )
  836. assert set(results) == {self.group1, self.group2}
  837. def test_date_filter_with_environment(self):
  838. results = self.backend.query(
  839. [self.project],
  840. environments=[self.environments["production"]],
  841. date_from=self.event2.datetime,
  842. )
  843. assert set(results) == {self.group1}
  844. results = self.backend.query(
  845. [self.project],
  846. environments=[self.environments["production"]],
  847. date_to=self.event1.datetime + timedelta(minutes=1),
  848. )
  849. assert set(results) == {self.group1}
  850. results = self.backend.query(
  851. [self.project],
  852. environments=[self.environments["staging"]],
  853. date_from=self.event1.datetime,
  854. date_to=self.event2.datetime + timedelta(minutes=1),
  855. )
  856. assert set(results) == {self.group2}
  857. def test_linked(self):
  858. linked_group1 = self.create_group_with_integration_external_issue()
  859. linked_group2 = self.create_group_with_platform_external_issue()
  860. results = self.make_query(search_filter_query="is:unlinked")
  861. assert set(results) == {self.group1, self.group2}
  862. results = self.make_query(search_filter_query="is:linked")
  863. assert set(results) == {linked_group1, linked_group2}
  864. def test_linked_with_only_integration_external_issue(self):
  865. linked_group = self.create_group_with_integration_external_issue()
  866. results = self.make_query(search_filter_query="is:unlinked")
  867. assert set(results) == {self.group1, self.group2}
  868. results = self.make_query(search_filter_query="is:linked")
  869. assert set(results) == {linked_group}
  870. def test_linked_with_only_platform_external_issue(self):
  871. linked_group = self.create_group_with_platform_external_issue()
  872. results = self.make_query(search_filter_query="is:unlinked")
  873. assert set(results) == {self.group1, self.group2}
  874. results = self.make_query(search_filter_query="is:linked")
  875. assert set(results) == {linked_group}
  876. def test_linked_with_environment(self):
  877. linked_group1 = self.create_group_with_integration_external_issue(environment="production")
  878. linked_group2 = self.create_group_with_platform_external_issue(environment="staging")
  879. results = self.make_query(
  880. environments=[self.environments["production"]], search_filter_query="is:unlinked"
  881. )
  882. assert set(results) == {self.group1}
  883. results = self.make_query(
  884. environments=[self.environments["staging"]], search_filter_query="is:unlinked"
  885. )
  886. assert set(results) == {self.group2}
  887. results = self.make_query(
  888. environments=[self.environments["production"]], search_filter_query="is:linked"
  889. )
  890. assert set(results) == {linked_group1}
  891. results = self.make_query(
  892. environments=[self.environments["staging"]], search_filter_query="is:linked"
  893. )
  894. assert set(results) == {linked_group2}
  895. def test_unassigned(self):
  896. results = self.make_query(search_filter_query="is:unassigned")
  897. assert set(results) == {self.group1}
  898. results = self.make_query(search_filter_query="is:assigned")
  899. assert set(results) == {self.group2}
  900. def test_unassigned_with_environment(self):
  901. results = self.make_query(
  902. environments=[self.environments["production"]], search_filter_query="is:unassigned"
  903. )
  904. assert set(results) == {self.group1}
  905. results = self.make_query(
  906. environments=[self.environments["staging"]], search_filter_query="is:assigned"
  907. )
  908. assert set(results) == {self.group2}
  909. results = self.make_query(
  910. environments=[self.environments["production"]], search_filter_query="is:assigned"
  911. )
  912. assert set(results) == set()
  913. def test_assigned_to(self):
  914. results = self.make_query(search_filter_query="assigned:%s" % self.user.username)
  915. assert set(results) == {self.group2}
  916. # test team assignee
  917. ga = GroupAssignee.objects.get(
  918. user_id=self.user.id, group=self.group2, project=self.group2.project
  919. )
  920. ga.update(team=self.team, user_id=None)
  921. assert GroupAssignee.objects.get(id=ga.id).user_id is None
  922. results = self.make_query(search_filter_query="assigned:%s" % self.user.username)
  923. assert set(results) == set()
  924. # test when there should be no results
  925. other_user = self.create_user()
  926. results = self.make_query(search_filter_query="assigned:%s" % other_user.username)
  927. assert set(results) == set()
  928. owner = self.create_user()
  929. self.create_member(
  930. organization=self.project.organization, user=owner, role="owner", teams=[]
  931. )
  932. # test that owners don't see results for all teams
  933. results = self.make_query(search_filter_query="assigned:%s" % owner.username)
  934. assert set(results) == set()
  935. def test_assigned_to_me_my_teams(self):
  936. my_team_group = self.store_event(
  937. data={
  938. "fingerprint": ["put-me-in-group-my-teams"],
  939. "event_id": "f" * 32,
  940. "timestamp": iso_format(self.base_datetime - timedelta(days=20)),
  941. "message": "baz",
  942. "environment": "staging",
  943. "tags": {
  944. "server": "example.com",
  945. "url": "http://example.com",
  946. "sentry:user": "event2@example.com",
  947. },
  948. "level": "error",
  949. },
  950. project_id=self.project.id,
  951. ).group
  952. # assign the issue to my team instead of me
  953. GroupAssignee.objects.create(
  954. user_id=None, team_id=self.team.id, group=my_team_group, project=my_team_group.project
  955. )
  956. self.run_test_query(
  957. "assigned:me",
  958. [self.group2],
  959. user=self.user,
  960. )
  961. assert not GroupAssignee.objects.filter(user_id=self.user.id, group=my_team_group).exists()
  962. self.run_test_query(
  963. "assigned:my_teams",
  964. [my_team_group],
  965. user=self.user,
  966. )
  967. def test_assigned_to_me_my_teams_in_syntax(self):
  968. my_team_group = self.store_event(
  969. data={
  970. "fingerprint": ["put-me-in-group-my-teams"],
  971. "event_id": "f" * 32,
  972. "timestamp": iso_format(self.base_datetime - timedelta(days=20)),
  973. "message": "baz",
  974. "environment": "staging",
  975. "tags": {
  976. "server": "example.com",
  977. "url": "http://example.com",
  978. "sentry:user": "event2@example.com",
  979. },
  980. "level": "error",
  981. },
  982. project_id=self.project.id,
  983. ).group
  984. # assign the issue to my team instead of me
  985. GroupAssignee.objects.create(
  986. user_id=None, team_id=self.team.id, group=my_team_group, project=my_team_group.project
  987. )
  988. self.run_test_query(
  989. "assigned:[me]",
  990. [self.group2],
  991. user=self.user,
  992. )
  993. assert not GroupAssignee.objects.filter(user_id=self.user.id, group=my_team_group).exists()
  994. self.run_test_query(
  995. "assigned:[me]",
  996. [self.group2],
  997. user=self.user,
  998. )
  999. self.run_test_query(
  1000. "assigned:[my_teams]",
  1001. [my_team_group],
  1002. user=self.user,
  1003. )
  1004. self.run_test_query(
  1005. "assigned:[me, my_teams]",
  1006. [self.group2, my_team_group],
  1007. user=self.user,
  1008. )
  1009. def test_assigned_to_in_syntax(self):
  1010. group_3 = self.store_event(
  1011. data={
  1012. "fingerprint": ["put-me-in-group3"],
  1013. "event_id": "c" * 32,
  1014. "timestamp": iso_format(self.base_datetime - timedelta(days=20)),
  1015. },
  1016. project_id=self.project.id,
  1017. ).group
  1018. group_3.status = GroupStatus.MUTED
  1019. group_3.substatus = None
  1020. group_3.save()
  1021. other_user = self.create_user()
  1022. self.run_test_query(
  1023. f"assigned:[{self.user.username}, {other_user.username}]",
  1024. [self.group2],
  1025. [self.group1, group_3],
  1026. )
  1027. GroupAssignee.objects.create(project=self.project, group=group_3, user_id=other_user.id)
  1028. self.run_test_query(
  1029. f"assigned:[{self.user.username}, {other_user.username}]",
  1030. [self.group2, group_3],
  1031. [self.group1],
  1032. )
  1033. self.run_test_query(
  1034. f"assigned:[#{self.team.slug}, {other_user.username}]",
  1035. [group_3],
  1036. [self.group1, self.group2],
  1037. )
  1038. ga_2 = GroupAssignee.objects.get(
  1039. user_id=self.user.id, group=self.group2, project=self.group2.project
  1040. )
  1041. ga_2.update(team=self.team, user_id=None)
  1042. self.run_test_query(
  1043. f"assigned:[{self.user.username}, {other_user.username}]",
  1044. [group_3],
  1045. [self.group1, self.group2],
  1046. )
  1047. self.run_test_query(
  1048. f"assigned:[#{self.team.slug}, {other_user.username}]",
  1049. [self.group2, group_3],
  1050. [self.group1],
  1051. )
  1052. self.run_test_query(
  1053. f"assigned:[me, none, {other_user.username}]",
  1054. [self.group1, group_3],
  1055. [self.group2],
  1056. )
  1057. def test_assigned_or_suggested_in_syntax(self):
  1058. Group.objects.all().delete()
  1059. group = self.store_event(
  1060. data={
  1061. "timestamp": iso_format(before_now(seconds=180)),
  1062. "fingerprint": ["group-1"],
  1063. },
  1064. project_id=self.project.id,
  1065. ).group
  1066. group1 = self.store_event(
  1067. data={
  1068. "timestamp": iso_format(before_now(seconds=185)),
  1069. "fingerprint": ["group-2"],
  1070. },
  1071. project_id=self.project.id,
  1072. ).group
  1073. group2 = self.store_event(
  1074. data={
  1075. "timestamp": iso_format(before_now(seconds=190)),
  1076. "fingerprint": ["group-3"],
  1077. },
  1078. project_id=self.project.id,
  1079. ).group
  1080. assigned_group = self.store_event(
  1081. data={
  1082. "timestamp": iso_format(before_now(seconds=195)),
  1083. "fingerprint": ["group-4"],
  1084. },
  1085. project_id=self.project.id,
  1086. ).group
  1087. assigned_to_other_group = self.store_event(
  1088. data={
  1089. "timestamp": iso_format(before_now(seconds=195)),
  1090. "fingerprint": ["group-5"],
  1091. },
  1092. project_id=self.project.id,
  1093. ).group
  1094. self.run_test_query(
  1095. "assigned_or_suggested:[me]",
  1096. [],
  1097. [group, group1, group2, assigned_group, assigned_to_other_group],
  1098. )
  1099. GroupOwner.objects.create(
  1100. group=assigned_to_other_group,
  1101. project=self.project,
  1102. organization=self.organization,
  1103. type=0,
  1104. team_id=None,
  1105. user_id=self.user.id,
  1106. )
  1107. GroupOwner.objects.create(
  1108. group=group,
  1109. project=self.project,
  1110. organization=self.organization,
  1111. type=0,
  1112. team_id=None,
  1113. user_id=self.user.id,
  1114. )
  1115. self.run_test_query(
  1116. "assigned_or_suggested:[me]",
  1117. [group, assigned_to_other_group],
  1118. [group1, group2, assigned_group],
  1119. )
  1120. # Because assigned_to_other_event is assigned to self.other_user, it should not show up in assigned_or_suggested search for anyone but self.other_user. (aka. they are now the only owner)
  1121. other_user = self.create_user("other@user.com", is_superuser=False)
  1122. GroupAssignee.objects.create(
  1123. group=assigned_to_other_group,
  1124. project=self.project,
  1125. user_id=other_user.id,
  1126. )
  1127. self.run_test_query(
  1128. "assigned_or_suggested:[me]",
  1129. [group],
  1130. [group1, group2, assigned_group, assigned_to_other_group],
  1131. )
  1132. self.run_test_query(
  1133. f"assigned_or_suggested:[{other_user.email}]",
  1134. [assigned_to_other_group],
  1135. [group, group1, group2, assigned_group],
  1136. )
  1137. GroupAssignee.objects.create(
  1138. group=assigned_group, project=self.project, user_id=self.user.id
  1139. )
  1140. self.run_test_query(
  1141. f"assigned_or_suggested:[{self.user.email}]",
  1142. [assigned_group, group],
  1143. )
  1144. GroupOwner.objects.create(
  1145. group=group,
  1146. project=self.project,
  1147. organization=self.organization,
  1148. type=0,
  1149. team_id=self.team.id,
  1150. user_id=None,
  1151. )
  1152. self.run_test_query(
  1153. f"assigned_or_suggested:[#{self.team.slug}]",
  1154. [group],
  1155. )
  1156. self.run_test_query(
  1157. "assigned_or_suggested:[me, none]",
  1158. [group, group1, group2, assigned_group],
  1159. [assigned_to_other_group],
  1160. )
  1161. not_me = self.create_user(email="notme@sentry.io")
  1162. GroupOwner.objects.create(
  1163. group=group2,
  1164. project=self.project,
  1165. organization=self.organization,
  1166. type=0,
  1167. team_id=None,
  1168. user_id=not_me.id,
  1169. )
  1170. self.run_test_query(
  1171. "assigned_or_suggested:[me, none]",
  1172. [group, group1, assigned_group],
  1173. [assigned_to_other_group, group2],
  1174. )
  1175. GroupOwner.objects.filter(group=group, user_id=self.user.id).delete()
  1176. self.run_test_query(
  1177. f"assigned_or_suggested:[me, none, #{self.team.slug}]",
  1178. [group, group1, assigned_group],
  1179. [assigned_to_other_group, group2],
  1180. )
  1181. self.run_test_query(
  1182. f"assigned_or_suggested:[me, none, #{self.team.slug}, {not_me.email}]",
  1183. [group, group1, assigned_group, group2],
  1184. [assigned_to_other_group],
  1185. )
  1186. def test_assigned_or_suggested_my_teams(self):
  1187. Group.objects.all().delete()
  1188. group = self.store_event(
  1189. data={
  1190. "timestamp": iso_format(before_now(seconds=180)),
  1191. "fingerprint": ["group-1"],
  1192. },
  1193. project_id=self.project.id,
  1194. ).group
  1195. group1 = self.store_event(
  1196. data={
  1197. "timestamp": iso_format(before_now(seconds=185)),
  1198. "fingerprint": ["group-2"],
  1199. },
  1200. project_id=self.project.id,
  1201. ).group
  1202. group2 = self.store_event(
  1203. data={
  1204. "timestamp": iso_format(before_now(seconds=190)),
  1205. "fingerprint": ["group-3"],
  1206. },
  1207. project_id=self.project.id,
  1208. ).group
  1209. assigned_group = self.store_event(
  1210. data={
  1211. "timestamp": iso_format(before_now(seconds=195)),
  1212. "fingerprint": ["group-4"],
  1213. },
  1214. project_id=self.project.id,
  1215. ).group
  1216. assigned_to_other_group = self.store_event(
  1217. data={
  1218. "timestamp": iso_format(before_now(seconds=195)),
  1219. "fingerprint": ["group-5"],
  1220. },
  1221. project_id=self.project.id,
  1222. ).group
  1223. my_team_group = self.store_event(
  1224. data={
  1225. "fingerprint": ["put-me-in-group-my-teams"],
  1226. "event_id": "f" * 32,
  1227. "timestamp": iso_format(self.base_datetime - timedelta(days=20)),
  1228. "message": "baz",
  1229. "environment": "staging",
  1230. "tags": {
  1231. "server": "example.com",
  1232. "url": "http://example.com",
  1233. "sentry:user": "event2@example.com",
  1234. },
  1235. "level": "error",
  1236. },
  1237. project_id=self.project.id,
  1238. ).group
  1239. self.run_test_query(
  1240. "assigned_or_suggested:me",
  1241. [],
  1242. [group, group1, group2, assigned_group, assigned_to_other_group, my_team_group],
  1243. user=self.user,
  1244. )
  1245. self.run_test_query(
  1246. "assigned_or_suggested:my_teams",
  1247. [],
  1248. [group, group1, group2, assigned_group, assigned_to_other_group, my_team_group],
  1249. user=self.user,
  1250. )
  1251. GroupOwner.objects.create(
  1252. group=assigned_to_other_group,
  1253. project=self.project,
  1254. organization=self.organization,
  1255. type=0,
  1256. team_id=None,
  1257. user_id=self.user.id,
  1258. )
  1259. GroupOwner.objects.create(
  1260. group=group,
  1261. project=self.project,
  1262. organization=self.organization,
  1263. type=0,
  1264. team_id=None,
  1265. user_id=self.user.id,
  1266. )
  1267. GroupAssignee.objects.create(
  1268. user_id=None, team_id=self.team.id, group=my_team_group, project=my_team_group.project
  1269. )
  1270. self.run_test_query(
  1271. "assigned_or_suggested:me",
  1272. [group, assigned_to_other_group],
  1273. [group1, group2, assigned_group, my_team_group],
  1274. user=self.user,
  1275. )
  1276. self.run_test_query(
  1277. "assigned_or_suggested:my_teams",
  1278. [my_team_group],
  1279. [group, group1, group2, assigned_group, assigned_to_other_group],
  1280. user=self.user,
  1281. )
  1282. # Because assigned_to_other_event is assigned to self.other_user, it should not show up in assigned_or_suggested search for anyone but self.other_user. (aka. they are now the only owner)
  1283. other_user = self.create_user("other@user.com", is_superuser=False)
  1284. GroupAssignee.objects.create(
  1285. group=assigned_to_other_group,
  1286. project=self.project,
  1287. user_id=other_user.id,
  1288. )
  1289. self.run_test_query(
  1290. "assigned_or_suggested:me",
  1291. [group],
  1292. [group1, group2, assigned_group, my_team_group, assigned_to_other_group],
  1293. user=self.user,
  1294. )
  1295. self.run_test_query(
  1296. "assigned_or_suggested:my_teams",
  1297. [my_team_group],
  1298. [group, group1, group2, assigned_group, assigned_to_other_group],
  1299. user=self.user,
  1300. )
  1301. self.run_test_query(
  1302. f"assigned_or_suggested:{other_user.email}",
  1303. [assigned_to_other_group],
  1304. [group, group1, group2, assigned_group, my_team_group],
  1305. user=self.user,
  1306. )
  1307. GroupAssignee.objects.create(
  1308. group=assigned_group, project=self.project, user_id=self.user.id
  1309. )
  1310. self.run_test_query(
  1311. f"assigned_or_suggested:{self.user.email}",
  1312. [assigned_group, group],
  1313. [group1, group2, my_team_group, assigned_to_other_group],
  1314. user=self.user,
  1315. )
  1316. GroupOwner.objects.create(
  1317. group=group,
  1318. project=self.project,
  1319. organization=self.organization,
  1320. type=0,
  1321. team_id=self.team.id,
  1322. user_id=None,
  1323. )
  1324. self.run_test_query(
  1325. f"assigned_or_suggested:#{self.team.slug}",
  1326. [group, my_team_group],
  1327. [group1, group2, assigned_group, assigned_to_other_group],
  1328. user=self.user,
  1329. )
  1330. def test_assigned_or_suggested_my_teams_in_syntax(self):
  1331. Group.objects.all().delete()
  1332. group = self.store_event(
  1333. data={
  1334. "timestamp": iso_format(before_now(seconds=180)),
  1335. "fingerprint": ["group-1"],
  1336. },
  1337. project_id=self.project.id,
  1338. ).group
  1339. group1 = self.store_event(
  1340. data={
  1341. "timestamp": iso_format(before_now(seconds=185)),
  1342. "fingerprint": ["group-2"],
  1343. },
  1344. project_id=self.project.id,
  1345. ).group
  1346. group2 = self.store_event(
  1347. data={
  1348. "timestamp": iso_format(before_now(seconds=190)),
  1349. "fingerprint": ["group-3"],
  1350. },
  1351. project_id=self.project.id,
  1352. ).group
  1353. assigned_group = self.store_event(
  1354. data={
  1355. "timestamp": iso_format(before_now(seconds=195)),
  1356. "fingerprint": ["group-4"],
  1357. },
  1358. project_id=self.project.id,
  1359. ).group
  1360. assigned_to_other_group = self.store_event(
  1361. data={
  1362. "timestamp": iso_format(before_now(seconds=195)),
  1363. "fingerprint": ["group-5"],
  1364. },
  1365. project_id=self.project.id,
  1366. ).group
  1367. my_team_group = self.store_event(
  1368. data={
  1369. "fingerprint": ["put-me-in-group-my-teams"],
  1370. "event_id": "f" * 32,
  1371. "timestamp": iso_format(self.base_datetime - timedelta(days=20)),
  1372. "message": "baz",
  1373. "environment": "staging",
  1374. "tags": {
  1375. "server": "example.com",
  1376. "url": "http://example.com",
  1377. "sentry:user": "event2@example.com",
  1378. },
  1379. "level": "error",
  1380. },
  1381. project_id=self.project.id,
  1382. ).group
  1383. self.run_test_query(
  1384. "assigned_or_suggested:[me]",
  1385. [],
  1386. [group, group1, group2, assigned_group, assigned_to_other_group, my_team_group],
  1387. user=self.user,
  1388. )
  1389. self.run_test_query(
  1390. "assigned_or_suggested:[my_teams]",
  1391. [],
  1392. [group, group1, group2, assigned_group, assigned_to_other_group, my_team_group],
  1393. user=self.user,
  1394. )
  1395. self.run_test_query(
  1396. "assigned_or_suggested:[me, my_teams]",
  1397. [],
  1398. [group, group1, group2, assigned_group, assigned_to_other_group, my_team_group],
  1399. user=self.user,
  1400. )
  1401. GroupOwner.objects.create(
  1402. group=assigned_to_other_group,
  1403. project=self.project,
  1404. organization=self.organization,
  1405. type=0,
  1406. team_id=None,
  1407. user_id=self.user.id,
  1408. )
  1409. GroupOwner.objects.create(
  1410. group=group,
  1411. project=self.project,
  1412. organization=self.organization,
  1413. type=0,
  1414. team_id=None,
  1415. user_id=self.user.id,
  1416. )
  1417. GroupAssignee.objects.create(
  1418. user_id=None, team_id=self.team.id, group=my_team_group, project=my_team_group.project
  1419. )
  1420. self.run_test_query(
  1421. "assigned_or_suggested:[me]",
  1422. [group, assigned_to_other_group],
  1423. [group1, group2, assigned_group, my_team_group],
  1424. user=self.user,
  1425. )
  1426. self.run_test_query(
  1427. "assigned_or_suggested:[my_teams]",
  1428. [my_team_group],
  1429. [group, group1, group2, assigned_group, assigned_to_other_group],
  1430. user=self.user,
  1431. )
  1432. self.run_test_query(
  1433. "assigned_or_suggested:[me, my_teams]",
  1434. [group, assigned_to_other_group, my_team_group],
  1435. [group1, group2, assigned_group],
  1436. user=self.user,
  1437. )
  1438. # Because assigned_to_other_event is assigned to self.other_user, it should not show up in assigned_or_suggested search for anyone but self.other_user. (aka. they are now the only owner)
  1439. other_user = self.create_user("other@user.com", is_superuser=False)
  1440. GroupAssignee.objects.create(
  1441. group=assigned_to_other_group,
  1442. project=self.project,
  1443. user_id=other_user.id,
  1444. )
  1445. self.run_test_query(
  1446. "assigned_or_suggested:[me]",
  1447. [group],
  1448. [group1, group2, assigned_group, my_team_group, assigned_to_other_group],
  1449. user=self.user,
  1450. )
  1451. self.run_test_query(
  1452. "assigned_or_suggested:[my_teams]",
  1453. [my_team_group],
  1454. [group, group1, group2, assigned_group, assigned_to_other_group],
  1455. user=self.user,
  1456. )
  1457. self.run_test_query(
  1458. "assigned_or_suggested:[me, my_teams]",
  1459. [group, my_team_group],
  1460. [group1, group2, assigned_group, assigned_to_other_group],
  1461. user=self.user,
  1462. )
  1463. self.run_test_query(
  1464. f"assigned_or_suggested:[{other_user.email}]",
  1465. [assigned_to_other_group],
  1466. [group, group1, group2, assigned_group, my_team_group],
  1467. user=self.user,
  1468. )
  1469. GroupAssignee.objects.create(
  1470. group=assigned_group, project=self.project, user_id=self.user.id
  1471. )
  1472. self.run_test_query(
  1473. f"assigned_or_suggested:[{self.user.email}]",
  1474. [assigned_group, group],
  1475. [group1, group2, my_team_group, assigned_to_other_group],
  1476. user=self.user,
  1477. )
  1478. GroupOwner.objects.create(
  1479. group=group,
  1480. project=self.project,
  1481. organization=self.organization,
  1482. type=0,
  1483. team_id=self.team.id,
  1484. user_id=None,
  1485. )
  1486. self.run_test_query(
  1487. f"assigned_or_suggested:[#{self.team.slug}]",
  1488. [group, my_team_group],
  1489. [group1, group2, assigned_group, assigned_to_other_group],
  1490. user=self.user,
  1491. )
  1492. self.run_test_query(
  1493. "assigned_or_suggested:[me, none]",
  1494. [group, group1, group2, assigned_group],
  1495. [my_team_group, assigned_to_other_group],
  1496. user=self.user,
  1497. )
  1498. self.run_test_query(
  1499. "assigned_or_suggested:[my_teams, none]",
  1500. [group, group1, group2, my_team_group],
  1501. [assigned_to_other_group, assigned_group],
  1502. user=self.user,
  1503. )
  1504. self.run_test_query(
  1505. "assigned_or_suggested:[me, my_teams, none]",
  1506. [group, group1, group2, my_team_group, assigned_group],
  1507. [assigned_to_other_group],
  1508. user=self.user,
  1509. )
  1510. not_me = self.create_user(email="notme@sentry.io")
  1511. GroupOwner.objects.create(
  1512. group=group2,
  1513. project=self.project,
  1514. organization=self.organization,
  1515. type=0,
  1516. team_id=None,
  1517. user_id=not_me.id,
  1518. )
  1519. self.run_test_query(
  1520. "assigned_or_suggested:[me, none]",
  1521. [group, group1, assigned_group],
  1522. [group2, my_team_group, assigned_to_other_group],
  1523. user=self.user,
  1524. )
  1525. self.run_test_query(
  1526. "assigned_or_suggested:[my_teams, none]",
  1527. [group, group1, my_team_group],
  1528. [group2, assigned_group, assigned_to_other_group],
  1529. user=self.user,
  1530. )
  1531. self.run_test_query(
  1532. "assigned_or_suggested:[me, my_teams, none]",
  1533. [group, group1, my_team_group, assigned_group],
  1534. [group2, assigned_to_other_group],
  1535. user=self.user,
  1536. )
  1537. GroupOwner.objects.filter(group=group, user_id=self.user.id).delete()
  1538. self.run_test_query(
  1539. f"assigned_or_suggested:[me, none, #{self.team.slug}]",
  1540. [group, group1, assigned_group, my_team_group],
  1541. [assigned_to_other_group, group2],
  1542. user=self.user,
  1543. )
  1544. self.run_test_query(
  1545. f"assigned_or_suggested:[my_teams, none, #{self.team.slug}]",
  1546. [group, group1, my_team_group],
  1547. [assigned_to_other_group, group2, assigned_group],
  1548. user=self.user,
  1549. )
  1550. self.run_test_query(
  1551. f"assigned_or_suggested:[me, my_teams, none, #{self.team.slug}]",
  1552. [group, group1, my_team_group, assigned_group],
  1553. [assigned_to_other_group, group2],
  1554. user=self.user,
  1555. )
  1556. self.run_test_query(
  1557. f"assigned_or_suggested:[me, none, #{self.team.slug}, {not_me.email}]",
  1558. [group, group1, group2, assigned_group, my_team_group],
  1559. [assigned_to_other_group],
  1560. user=self.user,
  1561. )
  1562. self.run_test_query(
  1563. f"assigned_or_suggested:[my_teams, none, #{self.team.slug}, {not_me.email}]",
  1564. [group, group1, group2, my_team_group],
  1565. [assigned_to_other_group, assigned_group],
  1566. user=self.user,
  1567. )
  1568. self.run_test_query(
  1569. f"assigned_or_suggested:[me, my_teams, none, #{self.team.slug}, {not_me.email}]",
  1570. [group, group1, group2, my_team_group, assigned_group],
  1571. [assigned_to_other_group],
  1572. user=self.user,
  1573. )
  1574. def test_assigned_to_with_environment(self):
  1575. results = self.make_query(
  1576. environments=[self.environments["staging"]],
  1577. search_filter_query="assigned:%s" % self.user.username,
  1578. )
  1579. assert set(results) == {self.group2}
  1580. results = self.make_query(
  1581. environments=[self.environments["production"]],
  1582. search_filter_query="assigned:%s" % self.user.username,
  1583. )
  1584. assert set(results) == set()
  1585. def test_subscribed_by(self):
  1586. results = self.make_query(
  1587. [self.group1.project], search_filter_query="subscribed:%s" % self.user.username
  1588. )
  1589. assert set(results) == {self.group1}
  1590. def test_subscribed_by_in_syntax(self):
  1591. self.run_test_query(f"subscribed:[{self.user.username}]", [self.group1], [self.group2])
  1592. user_2 = self.create_user()
  1593. GroupSubscription.objects.create(
  1594. user_id=user_2.id, group=self.group2, project=self.project, is_active=True
  1595. )
  1596. self.run_test_query(
  1597. f"subscribed:[{self.user.username}, {user_2.username}]", [self.group1, self.group2], []
  1598. )
  1599. def test_subscribed_by_with_environment(self):
  1600. results = self.make_query(
  1601. [self.group1.project],
  1602. environments=[self.environments["production"]],
  1603. search_filter_query="subscribed:%s" % self.user.username,
  1604. )
  1605. assert set(results) == {self.group1}
  1606. results = self.make_query(
  1607. [self.group1.project],
  1608. environments=[self.environments["staging"]],
  1609. search_filter_query="subscribed:%s" % self.user.username,
  1610. )
  1611. assert set(results) == set()
  1612. @mock.patch("sentry.search.snuba.executors.bulk_raw_query")
  1613. def test_snuba_not_called_optimization(self, query_mock):
  1614. assert self.make_query(search_filter_query="status:unresolved").results == [self.group1]
  1615. assert not query_mock.called
  1616. assert (
  1617. self.make_query(
  1618. search_filter_query="last_seen:>%s" % date_to_query_format(timezone.now()),
  1619. sort_by="date",
  1620. ).results
  1621. == []
  1622. )
  1623. assert query_mock.called
  1624. @mock.patch("sentry.search.snuba.executors.bulk_raw_query")
  1625. def test_reduce_bulk_results_none_total(self, bulk_raw_query_mock):
  1626. bulk_raw_query_mock.return_value = [
  1627. {"data": [], "totals": {"total": None}},
  1628. {"data": [], "totals": {"total": None}},
  1629. ]
  1630. assert (
  1631. self.make_query(
  1632. search_filter_query="last_seen:>%s" % date_to_query_format(timezone.now()),
  1633. sort_by="date",
  1634. ).results
  1635. == []
  1636. )
  1637. assert bulk_raw_query_mock.called
  1638. @mock.patch("sentry.search.snuba.executors.bulk_raw_query")
  1639. def test_reduce_bulk_results_none_data(self, bulk_raw_query_mock):
  1640. bulk_raw_query_mock.return_value = [
  1641. {"data": None, "totals": {"total": 0}},
  1642. {"data": None, "totals": {"total": 0}},
  1643. ]
  1644. assert (
  1645. self.make_query(
  1646. search_filter_query="last_seen:>%s" % date_to_query_format(timezone.now()),
  1647. sort_by="date",
  1648. ).results
  1649. == []
  1650. )
  1651. assert bulk_raw_query_mock.called
  1652. def test_pre_and_post_filtering(self):
  1653. prev_max_pre = options.get("snuba.search.max-pre-snuba-candidates")
  1654. options.set("snuba.search.max-pre-snuba-candidates", 1)
  1655. try:
  1656. # normal queries work as expected
  1657. results = self.make_query(search_filter_query="foo")
  1658. assert set(results) == {self.group1}
  1659. results = self.make_query(search_filter_query="bar")
  1660. assert set(results) == {self.group2}
  1661. # no candidate matches in Sentry, immediately return empty paginator
  1662. results = self.make_query(search_filter_query="NO MATCHES IN SENTRY")
  1663. assert set(results) == set()
  1664. # too many candidates, skip pre-filter, requires >1 postfilter queries
  1665. results = self.make_query()
  1666. assert set(results) == {self.group1, self.group2}
  1667. finally:
  1668. options.set("snuba.search.max-pre-snuba-candidates", prev_max_pre)
  1669. def test_optimizer_enabled(self):
  1670. prev_optimizer_enabled = options.get("snuba.search.pre-snuba-candidates-optimizer")
  1671. options.set("snuba.search.pre-snuba-candidates-optimizer", True)
  1672. try:
  1673. results = self.make_query(
  1674. search_filter_query="server:example.com",
  1675. environments=[self.environments["production"]],
  1676. )
  1677. assert set(results) == {self.group1}
  1678. finally:
  1679. options.set("snuba.search.pre-snuba-candidates-optimizer", prev_optimizer_enabled)
  1680. def test_search_out_of_range(self):
  1681. the_date = datetime(2000, 1, 1, 0, 0, 0, tzinfo=pytz.utc)
  1682. results = self.make_query(
  1683. search_filter_query=f"event.timestamp:>{the_date} event.timestamp:<{the_date}",
  1684. date_from=the_date,
  1685. date_to=the_date,
  1686. )
  1687. assert set(results) == set()
  1688. def test_regressed_in_release(self):
  1689. # expect no groups within the results since there are no releases
  1690. results = self.make_query(search_filter_query="regressed_in_release:fake")
  1691. assert set(results) == set()
  1692. # expect no groups even though there is a release; since no group regressed in this release
  1693. release_1 = self.create_release()
  1694. results = self.make_query(search_filter_query="regressed_in_release:%s" % release_1.version)
  1695. assert set(results) == set()
  1696. # Create a new event so that we get a group in this release
  1697. group = self.store_event(
  1698. data={
  1699. "release": release_1.version,
  1700. },
  1701. project_id=self.project.id,
  1702. ).group
  1703. # # Should still be no group since we didn't regress in this release
  1704. results = self.make_query(search_filter_query="regressed_in_release:%s" % release_1.version)
  1705. assert set(results) == set()
  1706. record_group_history(group, GroupHistoryStatus.REGRESSED, release=release_1)
  1707. results = self.make_query(search_filter_query="regressed_in_release:%s" % release_1.version)
  1708. assert set(results) == {group}
  1709. # Make sure this works correctly with multiple releases
  1710. release_2 = self.create_release()
  1711. group_2 = self.store_event(
  1712. data={
  1713. "fingerprint": ["put-me-in-group9001"],
  1714. "event_id": "a" * 32,
  1715. "release": release_2.version,
  1716. },
  1717. project_id=self.project.id,
  1718. ).group
  1719. record_group_history(group_2, GroupHistoryStatus.REGRESSED, release=release_2)
  1720. results = self.make_query(search_filter_query="regressed_in_release:%s" % release_1.version)
  1721. assert set(results) == {group}
  1722. results = self.make_query(search_filter_query="regressed_in_release:%s" % release_2.version)
  1723. assert set(results) == {group_2}
  1724. def test_first_release(self):
  1725. # expect no groups within the results since there are no releases
  1726. results = self.make_query(search_filter_query="first_release:%s" % "fake")
  1727. assert set(results) == set()
  1728. # expect no groups even though there is a release; since no group
  1729. # is attached to a release
  1730. release_1 = self.create_release(self.project)
  1731. results = self.make_query(search_filter_query="first_release:%s" % release_1.version)
  1732. assert set(results) == set()
  1733. # Create a new event so that we get a group in this release
  1734. group = self.store_event(
  1735. data={
  1736. "fingerprint": ["put-me-in-group9001"],
  1737. "event_id": "a" * 32,
  1738. "message": "hello",
  1739. "environment": "production",
  1740. "tags": {"server": "example.com"},
  1741. "release": release_1.version,
  1742. "stacktrace": {"frames": [{"module": "group1"}]},
  1743. },
  1744. project_id=self.project.id,
  1745. ).group
  1746. results = self.make_query(search_filter_query="first_release:%s" % release_1.version)
  1747. assert set(results) == {group}
  1748. def test_first_release_in_syntax(self):
  1749. # expect no groups within the results since there are no releases
  1750. self.run_test_query("first_release:[fake, fake2]", [])
  1751. # expect no groups even though there is a release; since no group
  1752. # is attached to a release
  1753. release_1 = self.create_release(self.project)
  1754. release_2 = self.create_release(self.project)
  1755. self.run_test_query(f"first_release:[{release_1.version}, {release_2.version}]", [])
  1756. # Create a new event so that we get a group in this release
  1757. group = self.store_event(
  1758. data={
  1759. "fingerprint": ["put-me-in-group9001"],
  1760. "event_id": "a" * 32,
  1761. "message": "hello",
  1762. "environment": "production",
  1763. "tags": {"server": "example.com"},
  1764. "release": release_1.version,
  1765. "stacktrace": {"frames": [{"module": "group1"}]},
  1766. },
  1767. project_id=self.project.id,
  1768. ).group
  1769. self.run_test_query(
  1770. f"first_release:[{release_1.version}, {release_2.version}]",
  1771. [group],
  1772. [self.group1, self.group2],
  1773. )
  1774. # Create a new event so that we get a group in this release
  1775. group_2 = self.store_event(
  1776. data={
  1777. "fingerprint": ["put-me-in-group9002"],
  1778. "event_id": "a" * 32,
  1779. "message": "hello",
  1780. "environment": "production",
  1781. "tags": {"server": "example.com"},
  1782. "release": release_2.version,
  1783. "stacktrace": {"frames": [{"module": "group1"}]},
  1784. },
  1785. project_id=self.project.id,
  1786. ).group
  1787. self.run_test_query(
  1788. f"first_release:[{release_1.version}, {release_2.version}]",
  1789. [group, group_2],
  1790. )
  1791. def test_first_release_environments(self):
  1792. results = self.make_query(
  1793. environments=[self.environments["production"]],
  1794. search_filter_query="first_release:fake",
  1795. )
  1796. assert set(results) == set()
  1797. release = self.create_release(self.project)
  1798. group_env = GroupEnvironment.get_or_create(
  1799. group_id=self.group1.id, environment_id=self.environments["production"].id
  1800. )[0]
  1801. results = self.make_query(
  1802. environments=[self.environments["production"]],
  1803. search_filter_query=f"first_release:{release.version}",
  1804. )
  1805. assert set(results) == set()
  1806. group_env.first_release = release
  1807. group_env.save()
  1808. results = self.make_query(
  1809. environments=[self.environments["production"]],
  1810. search_filter_query=f"first_release:{release.version}",
  1811. )
  1812. assert set(results) == {self.group1}
  1813. def test_first_release_environments_in_syntax(self):
  1814. self.run_test_query(
  1815. "first_release:[fake, fake2]",
  1816. [],
  1817. [self.group1, self.group2],
  1818. environments=[self.environments["production"]],
  1819. )
  1820. release = self.create_release(self.project)
  1821. group_1_env = GroupEnvironment.objects.get(
  1822. group_id=self.group1.id, environment_id=self.environments["production"].id
  1823. )
  1824. group_1_env.update(first_release=release)
  1825. self.run_test_query(
  1826. f"first_release:[{release.version}, fake2]",
  1827. [self.group1],
  1828. [self.group2],
  1829. environments=[self.environments["production"]],
  1830. )
  1831. group_2_env = GroupEnvironment.objects.get(
  1832. group_id=self.group2.id, environment_id=self.environments["staging"].id
  1833. )
  1834. group_2_env.update(first_release=release)
  1835. self.run_test_query(
  1836. f"first_release:[{release.version}, fake2]",
  1837. [self.group1, self.group2],
  1838. [],
  1839. environments=[self.environments["production"], self.environments["staging"]],
  1840. )
  1841. # Make sure we don't get duplicate groups
  1842. GroupEnvironment.objects.create(
  1843. group_id=self.group1.id,
  1844. environment_id=self.environments["staging"].id,
  1845. first_release=release,
  1846. )
  1847. self.run_test_query(
  1848. f"first_release:[{release.version}, fake2]",
  1849. [self.group1, self.group2],
  1850. [],
  1851. environments=[self.environments["production"], self.environments["staging"]],
  1852. )
  1853. def test_query_enclosed_in_quotes(self):
  1854. results = self.make_query(search_filter_query='"foo"')
  1855. assert set(results) == {self.group1}
  1856. results = self.make_query(search_filter_query='"bar"')
  1857. assert set(results) == {self.group2}
  1858. @xfail_if_not_postgres("Wildcard searching only supported in Postgres")
  1859. def test_wildcard(self):
  1860. escaped_event = self.store_event(
  1861. data={
  1862. "fingerprint": ["hello-there"],
  1863. "event_id": "f" * 32,
  1864. "message": "somet[hing]",
  1865. "environment": "production",
  1866. "tags": {"server": "example.net"},
  1867. "timestamp": iso_format(self.base_datetime),
  1868. "stacktrace": {"frames": [{"module": "group1"}]},
  1869. },
  1870. project_id=self.project.id,
  1871. )
  1872. # Note: Adding in `environment:production` so that we make sure we query
  1873. # in both snuba and postgres
  1874. results = self.make_query(search_filter_query="environment:production so*t")
  1875. assert set(results) == {escaped_event.group}
  1876. # Make sure it's case insensitive
  1877. results = self.make_query(search_filter_query="environment:production SO*t")
  1878. assert set(results) == {escaped_event.group}
  1879. results = self.make_query(search_filter_query="environment:production so*zz")
  1880. assert set(results) == set()
  1881. results = self.make_query(search_filter_query="environment:production [hing]")
  1882. assert set(results) == {escaped_event.group}
  1883. results = self.make_query(search_filter_query="environment:production s*]")
  1884. assert set(results) == {escaped_event.group}
  1885. results = self.make_query(search_filter_query="environment:production server:example.*")
  1886. assert set(results) == {self.group1, escaped_event.group}
  1887. results = self.make_query(search_filter_query="environment:production !server:*net")
  1888. assert set(results) == {self.group1}
  1889. # TODO: Disabling tests that use [] syntax for the moment. Re-enable
  1890. # these if we decide to add back in, or remove if this comment has been
  1891. # here a while.
  1892. # results = self.make_query(
  1893. # search_filter_query='environment:production [s][of][mz]',
  1894. # )
  1895. # assert set(results) == set([escaped_event.group])
  1896. # results = self.make_query(
  1897. # search_filter_query='environment:production [z][of][mz]',
  1898. # )
  1899. # assert set(results) == set()
  1900. def test_null_tags(self):
  1901. tag_event = self.store_event(
  1902. data={
  1903. "fingerprint": ["hello-there"],
  1904. "event_id": "f" * 32,
  1905. "message": "something",
  1906. "environment": "production",
  1907. "tags": {"server": "example.net"},
  1908. "timestamp": iso_format(self.base_datetime),
  1909. "stacktrace": {"frames": [{"module": "group1"}]},
  1910. },
  1911. project_id=self.project.id,
  1912. )
  1913. no_tag_event = self.store_event(
  1914. data={
  1915. "fingerprint": ["hello-there-2"],
  1916. "event_id": "5" * 32,
  1917. "message": "something",
  1918. "environment": "production",
  1919. "timestamp": iso_format(self.base_datetime),
  1920. "stacktrace": {"frames": [{"module": "group2"}]},
  1921. },
  1922. project_id=self.project.id,
  1923. )
  1924. results = self.make_query(search_filter_query="environment:production !server:*net")
  1925. assert set(results) == {self.group1, no_tag_event.group}
  1926. results = self.make_query(search_filter_query="environment:production server:*net")
  1927. assert set(results) == {tag_event.group}
  1928. results = self.make_query(search_filter_query="environment:production !server:example.net")
  1929. assert set(results) == {self.group1, no_tag_event.group}
  1930. results = self.make_query(search_filter_query="environment:production server:example.net")
  1931. assert set(results) == {tag_event.group}
  1932. results = self.make_query(search_filter_query="environment:production has:server")
  1933. assert set(results) == {self.group1, tag_event.group}
  1934. results = self.make_query(search_filter_query="environment:production !has:server")
  1935. assert set(results) == {no_tag_event.group}
  1936. def test_null_promoted_tags(self):
  1937. tag_event = self.store_event(
  1938. data={
  1939. "fingerprint": ["hello-there"],
  1940. "event_id": "f" * 32,
  1941. "message": "something",
  1942. "environment": "production",
  1943. "tags": {"logger": "csp"},
  1944. "timestamp": iso_format(self.base_datetime),
  1945. "stacktrace": {"frames": [{"module": "group1"}]},
  1946. },
  1947. project_id=self.project.id,
  1948. )
  1949. no_tag_event = self.store_event(
  1950. data={
  1951. "fingerprint": ["hello-there-2"],
  1952. "event_id": "5" * 32,
  1953. "message": "something",
  1954. "environment": "production",
  1955. "timestamp": iso_format(self.base_datetime),
  1956. "stacktrace": {"frames": [{"module": "group2"}]},
  1957. },
  1958. project_id=self.project.id,
  1959. )
  1960. results = self.make_query(search_filter_query="environment:production !logger:*sp")
  1961. assert set(results) == {self.group1, no_tag_event.group}
  1962. results = self.make_query(search_filter_query="environment:production logger:*sp")
  1963. assert set(results) == {tag_event.group}
  1964. results = self.make_query(search_filter_query="environment:production !logger:csp")
  1965. assert set(results) == {self.group1, no_tag_event.group}
  1966. results = self.make_query(search_filter_query="environment:production logger:csp")
  1967. assert set(results) == {tag_event.group}
  1968. results = self.make_query(search_filter_query="environment:production has:logger")
  1969. assert set(results) == {tag_event.group}
  1970. results = self.make_query(search_filter_query="environment:production !has:logger")
  1971. assert set(results) == {self.group1, no_tag_event.group}
  1972. def test_sort_multi_project(self):
  1973. self.set_up_multi_project()
  1974. results = self.make_query([self.project, self.project2], sort_by="date")
  1975. assert list(results) == [self.group1, self.group_p2, self.group2]
  1976. results = self.make_query([self.project, self.project2], sort_by="new")
  1977. assert list(results) == [self.group2, self.group_p2, self.group1]
  1978. results = self.make_query([self.project, self.project2], sort_by="freq")
  1979. assert list(results) == [self.group1, self.group_p2, self.group2]
  1980. results = self.make_query([self.project, self.project2], sort_by="priority")
  1981. assert list(results) == [
  1982. self.group_p2,
  1983. self.group2,
  1984. self.group1,
  1985. ]
  1986. results = self.make_query([self.project, self.project2], sort_by="user")
  1987. assert list(results) == [self.group1, self.group2, self.group_p2]
  1988. def test_in_syntax_is_invalid(self):
  1989. with pytest.raises(InvalidSearchQuery, match='"in" syntax invalid for "is" search'):
  1990. self.make_query(search_filter_query="is:[unresolved, resolved]")
  1991. def test_first_release_any_or_no_environments(self):
  1992. # test scenarios for tickets:
  1993. # SEN-571
  1994. # ISSUE-432
  1995. # given the following setup:
  1996. #
  1997. # groups table:
  1998. # group first_release
  1999. # A 1
  2000. # B 1
  2001. # C 2
  2002. #
  2003. # groupenvironments table:
  2004. # group environment first_release
  2005. # A staging 1
  2006. # A production 2
  2007. #
  2008. # when querying by first release, the appropriate set of groups should be displayed:
  2009. #
  2010. # first_release: 1
  2011. # env=[]: A, B
  2012. # env=[production, staging]: A
  2013. # env=[staging]: A
  2014. # env=[production]: nothing
  2015. #
  2016. # first_release: 2
  2017. # env=[]: A, C
  2018. # env=[production, staging]: A
  2019. # env=[staging]: nothing
  2020. # env=[production]: A
  2021. # create an issue/group whose events that occur in 2 distinct environments
  2022. group_a_event_1 = self.store_event(
  2023. data={
  2024. "fingerprint": ["group_a"],
  2025. "event_id": "aaa" + ("1" * 29),
  2026. "environment": "example_staging",
  2027. "release": "release_1",
  2028. },
  2029. project_id=self.project.id,
  2030. )
  2031. group_a_event_2 = self.store_event(
  2032. data={
  2033. "fingerprint": ["group_a"],
  2034. "event_id": "aaa" + ("2" * 29),
  2035. "environment": "example_production",
  2036. "release": "release_2",
  2037. },
  2038. project_id=self.project.id,
  2039. )
  2040. group_a = group_a_event_1.group
  2041. # get the environments for group_a
  2042. prod_env = group_a_event_2.get_environment()
  2043. staging_env = group_a_event_1.get_environment()
  2044. # create an issue/group whose event that occur in no environments
  2045. # but will be tied to release release_1
  2046. group_b_event_1 = self.store_event(
  2047. data={
  2048. "fingerprint": ["group_b"],
  2049. "event_id": "bbb" + ("1" * 29),
  2050. "release": "release_1",
  2051. },
  2052. project_id=self.project.id,
  2053. )
  2054. assert group_b_event_1.get_environment().name == "" # has no environment
  2055. group_b = group_b_event_1.group
  2056. # create an issue/group whose event that occur in no environments
  2057. # but will be tied to release release_2
  2058. group_c_event_1 = self.store_event(
  2059. data={
  2060. "fingerprint": ["group_c"],
  2061. "event_id": "ccc" + ("1" * 29),
  2062. "release": "release_2",
  2063. },
  2064. project_id=self.project.id,
  2065. )
  2066. assert group_c_event_1.get_environment().name == "" # has no environment
  2067. group_c = group_c_event_1.group
  2068. # query by release release_1
  2069. results = self.make_query(search_filter_query="first_release:%s" % "release_1")
  2070. assert set(results) == {group_a, group_b}
  2071. results = self.make_query(
  2072. environments=[staging_env, prod_env],
  2073. search_filter_query="first_release:%s" % "release_1",
  2074. )
  2075. assert set(results) == {group_a}
  2076. results = self.make_query(
  2077. environments=[staging_env], search_filter_query="first_release:%s" % "release_1"
  2078. )
  2079. assert set(results) == {group_a}
  2080. results = self.make_query(
  2081. environments=[prod_env], search_filter_query="first_release:%s" % "release_1"
  2082. )
  2083. assert set(results) == set()
  2084. # query by release release_2
  2085. results = self.make_query(search_filter_query="first_release:%s" % "release_2")
  2086. assert set(results) == {group_a, group_c}
  2087. results = self.make_query(
  2088. environments=[staging_env, prod_env],
  2089. search_filter_query="first_release:%s" % "release_2",
  2090. )
  2091. assert set(results) == {group_a}
  2092. results = self.make_query(
  2093. environments=[staging_env], search_filter_query="first_release:%s" % "release_2"
  2094. )
  2095. assert set(results) == set()
  2096. results = self.make_query(
  2097. environments=[prod_env], search_filter_query="first_release:%s" % "release_2"
  2098. )
  2099. assert set(results) == {group_a}
  2100. def test_all_fields_do_not_error(self):
  2101. # Just a sanity check to make sure that all fields can be successfully
  2102. # searched on without returning type errors and other schema related
  2103. # issues.
  2104. def test_query(query):
  2105. try:
  2106. self.make_query(search_filter_query=query)
  2107. except SnubaError as e:
  2108. self.fail(f"Query {query} errored. Error info: {e}")
  2109. for key in SENTRY_SNUBA_MAP:
  2110. if key in ["project.id", "issue.id", "performance.issue_ids"]:
  2111. continue
  2112. test_query("has:%s" % key)
  2113. test_query("!has:%s" % key)
  2114. if key == "error.handled":
  2115. val: Any = 1
  2116. elif key in issue_search_config.numeric_keys:
  2117. val = "123"
  2118. elif key in issue_search_config.date_keys:
  2119. val = self.base_datetime.isoformat()
  2120. elif key in issue_search_config.boolean_keys:
  2121. val = "true"
  2122. elif key in {"trace.span", "trace.parent_span"}:
  2123. val = "abcdef1234abcdef"
  2124. test_query(f"!{key}:{val}")
  2125. else:
  2126. val = "abadcafedeadbeefdeaffeedabadfeed"
  2127. test_query(f"!{key}:{val}")
  2128. test_query(f"{key}:{val}")
  2129. def test_message_negation(self):
  2130. self.store_event(
  2131. data={
  2132. "fingerprint": ["put-me-in-group1"],
  2133. "event_id": "2" * 32,
  2134. "message": "something",
  2135. "timestamp": iso_format(self.base_datetime),
  2136. },
  2137. project_id=self.project.id,
  2138. )
  2139. results = self.make_query(search_filter_query="!message:else")
  2140. results2 = self.make_query(search_filter_query="!message:else")
  2141. assert list(results) == list(results2)
  2142. def test_error_main_thread_true(self):
  2143. myProject = self.create_project(
  2144. name="Foo", slug="foo", teams=[self.team], fire_project_created=True
  2145. )
  2146. event = self.store_event(
  2147. data={
  2148. "event_id": "1" * 32,
  2149. "message": "something",
  2150. "timestamp": iso_format(self.base_datetime),
  2151. "exception": {
  2152. "values": [
  2153. {
  2154. "type": "SyntaxError",
  2155. "value": "hello world",
  2156. "thread_id": 1,
  2157. },
  2158. ],
  2159. },
  2160. "threads": {
  2161. "values": [
  2162. {
  2163. "id": 1,
  2164. "main": True,
  2165. },
  2166. ],
  2167. },
  2168. },
  2169. project_id=myProject.id,
  2170. )
  2171. myGroup = event.groups[0]
  2172. results = self.make_query(
  2173. projects=[myProject],
  2174. search_filter_query="error.main_thread:1",
  2175. sort_by="date",
  2176. )
  2177. assert list(results) == [myGroup]
  2178. def test_error_main_thread_false(self):
  2179. myProject = self.create_project(
  2180. name="Foo2", slug="foo2", teams=[self.team], fire_project_created=True
  2181. )
  2182. event = self.store_event(
  2183. data={
  2184. "event_id": "2" * 32,
  2185. "message": "something",
  2186. "timestamp": iso_format(self.base_datetime),
  2187. "exception": {
  2188. "values": [
  2189. {
  2190. "type": "SyntaxError",
  2191. "value": "hello world",
  2192. "thread_id": 1,
  2193. },
  2194. ],
  2195. },
  2196. "threads": {
  2197. "values": [
  2198. {
  2199. "id": 1,
  2200. "main": False,
  2201. },
  2202. ],
  2203. },
  2204. },
  2205. project_id=myProject.id,
  2206. )
  2207. myGroup = event.groups[0]
  2208. results = self.make_query(
  2209. projects=[myProject],
  2210. search_filter_query="error.main_thread:0",
  2211. sort_by="date",
  2212. )
  2213. assert list(results) == [myGroup]
  2214. def test_error_main_thread_no_results(self):
  2215. myProject = self.create_project(
  2216. name="Foo3", slug="foo3", teams=[self.team], fire_project_created=True
  2217. )
  2218. self.store_event(
  2219. data={
  2220. "event_id": "3" * 32,
  2221. "message": "something",
  2222. "timestamp": iso_format(self.base_datetime),
  2223. "exception": {
  2224. "values": [
  2225. {
  2226. "type": "SyntaxError",
  2227. "value": "hello world",
  2228. "thread_id": 1,
  2229. },
  2230. ],
  2231. },
  2232. "threads": {
  2233. "values": [
  2234. {
  2235. "id": 1,
  2236. },
  2237. ],
  2238. },
  2239. },
  2240. project_id=myProject.id,
  2241. )
  2242. results = self.make_query(
  2243. projects=[myProject],
  2244. search_filter_query="error.main_thread:1",
  2245. sort_by="date",
  2246. )
  2247. assert len(results) == 0
  2248. class EventsPriorityTest(SharedSnubaTest, OccurrenceTestMixin):
  2249. @property
  2250. def backend(self):
  2251. return EventsDatasetSnubaSearchBackend()
  2252. def test_priority_sort_old_and_new_events(self):
  2253. """Test that an issue with only one old event is ranked lower than an issue with only one new event"""
  2254. new_project = self.create_project(organization=self.project.organization)
  2255. base_datetime = (datetime.utcnow() - timedelta(days=3)).replace(tzinfo=pytz.utc)
  2256. recent_event = self.store_event(
  2257. data={
  2258. "fingerprint": ["put-me-in-recent-group"],
  2259. "event_id": "c" * 32,
  2260. "message": "group1",
  2261. "environment": "production",
  2262. "tags": {"server": "example.com", "sentry:user": "event3@example.com"},
  2263. "timestamp": iso_format(base_datetime),
  2264. "stacktrace": {"frames": [{"module": "group1"}]},
  2265. },
  2266. project_id=new_project.id,
  2267. )
  2268. old_event = self.store_event(
  2269. data={
  2270. "fingerprint": ["put-me-in-old-group"],
  2271. "event_id": "a" * 32,
  2272. "message": "foo. Also, this message is intended to be greater than 256 characters so that we can put some unique string identifier after that point in the string. The purpose of this is in order to verify we are using snuba to search messages instead of Postgres (postgres truncates at 256 characters and clickhouse does not). santryrox.",
  2273. "environment": "production",
  2274. "tags": {"server": "example.com", "sentry:user": "old_event@example.com"},
  2275. "timestamp": iso_format(base_datetime - timedelta(days=20)),
  2276. "stacktrace": {"frames": [{"module": "group1"}]},
  2277. },
  2278. project_id=new_project.id,
  2279. )
  2280. # datetime(2017, 9, 6, 0, 0)
  2281. old_event.data["timestamp"] = 1504656000.0
  2282. weights: PrioritySortWeights = {
  2283. "log_level": 0,
  2284. "has_stacktrace": 0,
  2285. "relative_volume": 1,
  2286. "event_halflife_hours": 4,
  2287. "issue_halflife_hours": 24 * 7,
  2288. "v2": False,
  2289. "norm": False,
  2290. }
  2291. results = self.make_query(
  2292. sort_by="priority",
  2293. projects=[new_project],
  2294. aggregate_kwargs=weights,
  2295. )
  2296. recent_group = Group.objects.get(id=recent_event.group.id)
  2297. old_group = Group.objects.get(id=old_event.group.id)
  2298. assert list(results) == [recent_group, old_group]
  2299. def test_priority_sort_v2(self):
  2300. """Test that the v2 formula works."""
  2301. new_project = self.create_project(organization=self.project.organization)
  2302. base_datetime = (datetime.utcnow() - timedelta(days=3)).replace(tzinfo=pytz.utc)
  2303. recent_event = self.store_event(
  2304. data={
  2305. "fingerprint": ["put-me-in-recent-group"],
  2306. "event_id": "c" * 32,
  2307. "message": "group1",
  2308. "environment": "production",
  2309. "tags": {"server": "example.com", "sentry:user": "event3@example.com"},
  2310. "timestamp": iso_format(base_datetime),
  2311. "stacktrace": {"frames": [{"module": "group1"}]},
  2312. },
  2313. project_id=new_project.id,
  2314. )
  2315. old_event = self.store_event(
  2316. data={
  2317. "fingerprint": ["put-me-in-old-group"],
  2318. "event_id": "a" * 32,
  2319. "message": "foo. Also, this message is intended to be greater than 256 characters so that we can put some unique string identifier after that point in the string. The purpose of this is in order to verify we are using snuba to search messages instead of Postgres (postgres truncates at 256 characters and clickhouse does not). santryrox.",
  2320. "environment": "production",
  2321. "tags": {"server": "example.com", "sentry:user": "old_event@example.com"},
  2322. "timestamp": iso_format(base_datetime - timedelta(days=20)),
  2323. "stacktrace": {"frames": [{"module": "group1"}]},
  2324. },
  2325. project_id=new_project.id,
  2326. )
  2327. # datetime(2017, 9, 6, 0, 0)
  2328. old_event.data["timestamp"] = 1504656000.0
  2329. weights: PrioritySortWeights = {
  2330. "log_level": 0,
  2331. "has_stacktrace": 0,
  2332. "relative_volume": 1,
  2333. "event_halflife_hours": 4,
  2334. "issue_halflife_hours": 24 * 7,
  2335. "v2": True,
  2336. "norm": False,
  2337. }
  2338. results = self.make_query(
  2339. sort_by="priority",
  2340. projects=[new_project],
  2341. aggregate_kwargs=weights,
  2342. )
  2343. recent_group = Group.objects.get(id=recent_event.group.id)
  2344. old_group = Group.objects.get(id=old_event.group.id)
  2345. assert list(results) == [recent_group, old_group]
  2346. def test_priority_log_level_results(self):
  2347. """Test that the scoring results change when we pass in different log level weights"""
  2348. base_datetime = (datetime.utcnow() - timedelta(hours=1)).replace(tzinfo=pytz.utc)
  2349. event1 = self.store_event(
  2350. data={
  2351. "fingerprint": ["put-me-in-group1"],
  2352. "event_id": "c" * 32,
  2353. "timestamp": iso_format(base_datetime - timedelta(hours=1)),
  2354. "message": "foo",
  2355. "stacktrace": {"frames": [{"module": "group1"}]},
  2356. "environment": "staging",
  2357. "level": "fatal",
  2358. },
  2359. project_id=self.project.id,
  2360. )
  2361. event2 = self.store_event(
  2362. data={
  2363. "fingerprint": ["put-me-in-group2"],
  2364. "event_id": "d" * 32,
  2365. "timestamp": iso_format(base_datetime),
  2366. "message": "bar",
  2367. "stacktrace": {"frames": [{"module": "group2"}]},
  2368. "environment": "staging",
  2369. "level": "error",
  2370. },
  2371. project_id=self.project.id,
  2372. )
  2373. group1 = Group.objects.get(id=event1.group.id)
  2374. group2 = Group.objects.get(id=event2.group.id)
  2375. agg_kwargs = {
  2376. "priority": {
  2377. "log_level": 0,
  2378. "has_stacktrace": 0,
  2379. "relative_volume": 1,
  2380. "event_halflife_hours": 4,
  2381. "issue_halflife_hours": 24 * 7,
  2382. "v2": False,
  2383. "norm": False,
  2384. }
  2385. }
  2386. query_executor = self.backend._get_query_executor()
  2387. results_zero_log_level = query_executor.snuba_search(
  2388. start=None,
  2389. end=None,
  2390. project_ids=[self.project.id],
  2391. environment_ids=[],
  2392. sort_field="priority",
  2393. organization=self.organization,
  2394. group_ids=[group1.id, group2.id],
  2395. limit=150,
  2396. aggregate_kwargs=agg_kwargs,
  2397. )[0]
  2398. group1_score_before = results_zero_log_level[0][1]
  2399. group2_score_before = results_zero_log_level[1][1]
  2400. # initially group 2's score is higher since it has a more recent event
  2401. assert group2_score_before > group1_score_before
  2402. agg_kwargs["priority"].update({"log_level": 5})
  2403. results2 = query_executor.snuba_search(
  2404. start=None,
  2405. end=None,
  2406. project_ids=[self.project.id],
  2407. environment_ids=[],
  2408. sort_field="priority",
  2409. organization=self.organization,
  2410. group_ids=[group1.id, group2.id],
  2411. limit=150,
  2412. aggregate_kwargs=agg_kwargs,
  2413. )[0]
  2414. group1_score_after = results2[0][1]
  2415. group2_score_after = results2[1][1]
  2416. # ensure fatal has a higher score than error
  2417. assert group1_score_after > group2_score_after
  2418. def test_priority_has_stacktrace_results(self):
  2419. """Test that the scoring results change when we pass in different has_stacktrace weights"""
  2420. base_datetime = (datetime.utcnow() - timedelta(hours=1)).replace(tzinfo=pytz.utc)
  2421. agg_kwargs = {
  2422. "priority": {
  2423. "log_level": 0,
  2424. "has_stacktrace": 0,
  2425. "relative_volume": 1,
  2426. "event_halflife_hours": 4,
  2427. "issue_halflife_hours": 24 * 7,
  2428. "v2": False,
  2429. "norm": False,
  2430. }
  2431. }
  2432. query_executor = self.backend._get_query_executor()
  2433. no_stacktrace_event = self.store_event(
  2434. data={
  2435. "event_id": "d" * 32,
  2436. "message": "oh no",
  2437. "timestamp": iso_format(base_datetime - timedelta(hours=1)),
  2438. },
  2439. project_id=self.project.id,
  2440. )
  2441. group1 = Group.objects.get(id=no_stacktrace_event.group.id)
  2442. stacktrace_event = self.store_event(
  2443. data={
  2444. "event_id": "d" * 32,
  2445. "exception": {
  2446. "values": [
  2447. {
  2448. "type": "AnError",
  2449. "value": "Bad request",
  2450. "stacktrace": {
  2451. "frames": [
  2452. {
  2453. "module": "<my module>",
  2454. },
  2455. ]
  2456. },
  2457. }
  2458. ]
  2459. },
  2460. "timestamp": iso_format(base_datetime - timedelta(hours=1)),
  2461. },
  2462. project_id=self.project.id,
  2463. )
  2464. group2 = Group.objects.get(id=stacktrace_event.group.id)
  2465. results = query_executor.snuba_search(
  2466. start=None,
  2467. end=None,
  2468. project_ids=[self.project.id],
  2469. environment_ids=[],
  2470. sort_field="priority",
  2471. organization=self.organization,
  2472. group_ids=[group1.id, group2.id],
  2473. limit=150,
  2474. aggregate_kwargs=agg_kwargs,
  2475. )[0]
  2476. group1_score = results[0][1]
  2477. group2_score = results[1][1]
  2478. assert group1_score == group2_score
  2479. agg_kwargs["priority"].update({"has_stacktrace": 3})
  2480. results = query_executor.snuba_search(
  2481. start=None,
  2482. end=None,
  2483. project_ids=[self.project.id],
  2484. environment_ids=[],
  2485. sort_field="priority",
  2486. organization=self.organization,
  2487. group_ids=[group1.id, group2.id],
  2488. limit=150,
  2489. aggregate_kwargs=agg_kwargs,
  2490. )[0]
  2491. group1_score = results[0][1]
  2492. group2_score = results[1][1]
  2493. # check that a group with an event with a stacktrace has a higher weight than one without
  2494. assert group1_score < group2_score
  2495. def test_priority_event_halflife_results(self):
  2496. """Test that the scoring results change when we pass in different event halflife weights"""
  2497. base_datetime = (datetime.utcnow() - timedelta(hours=1)).replace(tzinfo=pytz.utc)
  2498. event1 = self.store_event(
  2499. data={
  2500. "fingerprint": ["put-me-in-group1"],
  2501. "event_id": "a" * 32,
  2502. "timestamp": iso_format(base_datetime - timedelta(hours=1)),
  2503. "message": "foo",
  2504. "stacktrace": {"frames": [{"module": "group1"}]},
  2505. "environment": "staging",
  2506. "level": "fatal",
  2507. },
  2508. project_id=self.project.id,
  2509. )
  2510. event2 = self.store_event(
  2511. data={
  2512. "fingerprint": ["put-me-in-group2"],
  2513. "event_id": "b" * 32,
  2514. "timestamp": iso_format(base_datetime),
  2515. "message": "bar",
  2516. "stacktrace": {"frames": [{"module": "group2"}]},
  2517. "environment": "staging",
  2518. "level": "error",
  2519. },
  2520. project_id=self.project.id,
  2521. )
  2522. group1 = Group.objects.get(id=event1.group.id)
  2523. group2 = Group.objects.get(id=event2.group.id)
  2524. agg_kwargs = {
  2525. "priority": {
  2526. "log_level": 0,
  2527. "has_stacktrace": 0,
  2528. "relative_volume": 1,
  2529. "event_halflife_hours": 4,
  2530. "issue_halflife_hours": 24 * 7,
  2531. "v2": False,
  2532. "norm": False,
  2533. }
  2534. }
  2535. query_executor = self.backend._get_query_executor()
  2536. results = query_executor.snuba_search(
  2537. start=None,
  2538. end=None,
  2539. project_ids=[self.project.id],
  2540. environment_ids=[],
  2541. sort_field="priority",
  2542. organization=self.organization,
  2543. group_ids=[group1.id, group2.id],
  2544. limit=150,
  2545. aggregate_kwargs=agg_kwargs,
  2546. )[0]
  2547. group1_score_before = results[0][1]
  2548. group2_score_before = results[1][1]
  2549. # initially group 2's score is higher since it has a more recent event
  2550. assert group2_score_before > group1_score_before
  2551. agg_kwargs["priority"].update({"event_halflife_hours": 2})
  2552. results = query_executor.snuba_search(
  2553. start=None,
  2554. end=None,
  2555. project_ids=[self.project.id],
  2556. environment_ids=[],
  2557. sort_field="priority",
  2558. organization=self.organization,
  2559. group_ids=[group1.id, group2.id],
  2560. limit=150,
  2561. aggregate_kwargs=agg_kwargs,
  2562. )[0]
  2563. group1_score_after = results[0][1]
  2564. group2_score_after = results[1][1]
  2565. assert group1_score_after < group2_score_after
  2566. def test_priority_mixed_group_types(self):
  2567. base_datetime = (datetime.utcnow() - timedelta(hours=1)).replace(tzinfo=pytz.utc)
  2568. error_event = self.store_event(
  2569. data={
  2570. "fingerprint": ["put-me-in-group1"],
  2571. "event_id": "a" * 32,
  2572. "timestamp": iso_format(base_datetime - timedelta(hours=1)),
  2573. "message": "foo",
  2574. "stacktrace": {"frames": [{"module": "group1"}]},
  2575. "environment": "staging",
  2576. "level": "fatal",
  2577. },
  2578. project_id=self.project.id,
  2579. )
  2580. error_group = error_event.group
  2581. profile_event_id = uuid.uuid4().hex
  2582. _, group_info = process_event_and_issue_occurrence(
  2583. self.build_occurrence_data(event_id=profile_event_id),
  2584. {
  2585. "event_id": profile_event_id,
  2586. "project_id": self.project.id,
  2587. "title": "some problem",
  2588. "platform": "python",
  2589. "tags": {"my_tag": "1"},
  2590. "timestamp": before_now(minutes=1).isoformat(),
  2591. "received": before_now(minutes=1).isoformat(),
  2592. },
  2593. )
  2594. assert group_info is not None
  2595. profile_group_1 = group_info.group
  2596. agg_kwargs = {
  2597. "priority": {
  2598. "log_level": 0,
  2599. "has_stacktrace": 0,
  2600. "relative_volume": 1,
  2601. "event_halflife_hours": 4,
  2602. "issue_halflife_hours": 24 * 7,
  2603. "v2": False,
  2604. "norm": False,
  2605. }
  2606. }
  2607. query_executor = self.backend._get_query_executor()
  2608. with self.feature(
  2609. [
  2610. "organizations:issue-platform",
  2611. ProfileFileIOGroupType.build_visible_feature_name(),
  2612. ]
  2613. ):
  2614. results = query_executor.snuba_search(
  2615. start=None,
  2616. end=None,
  2617. project_ids=[self.project.id],
  2618. environment_ids=[],
  2619. sort_field="priority",
  2620. organization=self.organization,
  2621. group_ids=[profile_group_1.id, error_group.id],
  2622. limit=150,
  2623. aggregate_kwargs=agg_kwargs,
  2624. )[0]
  2625. error_group_score = results[0][1]
  2626. profile_group_score = results[1][1]
  2627. assert error_group_score > 0
  2628. assert profile_group_score > 0
  2629. class EventsTransactionsSnubaSearchTest(SharedSnubaTest):
  2630. @property
  2631. def backend(self):
  2632. return EventsDatasetSnubaSearchBackend()
  2633. def setUp(self):
  2634. super().setUp()
  2635. self.base_datetime = (datetime.utcnow() - timedelta(days=3)).replace(tzinfo=pytz.utc)
  2636. transaction_event_data = {
  2637. "level": "info",
  2638. "message": "ayoo",
  2639. "type": "transaction",
  2640. "culprit": "app/components/events/eventEntries in map",
  2641. "contexts": {"trace": {"trace_id": "b" * 32, "span_id": "c" * 16, "op": ""}},
  2642. }
  2643. with mock.patch(
  2644. "sentry.issues.ingest.send_issue_occurrence_to_eventstream",
  2645. side_effect=send_issue_occurrence_to_eventstream,
  2646. ) as mock_eventstream, mock.patch.object(
  2647. PerformanceRenderBlockingAssetSpanGroupType,
  2648. "noise_config",
  2649. new=NoiseConfig(0, timedelta(minutes=1)),
  2650. ), self.feature(
  2651. "organizations:issue-platform"
  2652. ):
  2653. self.store_event(
  2654. data={
  2655. **transaction_event_data,
  2656. "event_id": "a" * 32,
  2657. "timestamp": iso_format(before_now(minutes=1)),
  2658. "start_timestamp": iso_format(before_now(minutes=1, seconds=5)),
  2659. "tags": {"my_tag": 1},
  2660. "fingerprint": [
  2661. f"{PerformanceRenderBlockingAssetSpanGroupType.type_id}-group1"
  2662. ],
  2663. },
  2664. project_id=self.project.id,
  2665. )
  2666. self.perf_group_1 = mock_eventstream.call_args[0][2].group
  2667. self.store_event(
  2668. data={
  2669. **transaction_event_data,
  2670. "event_id": "a" * 32,
  2671. "timestamp": iso_format(before_now(minutes=2)),
  2672. "start_timestamp": iso_format(before_now(minutes=2, seconds=5)),
  2673. "tags": {"my_tag": 1},
  2674. "fingerprint": [
  2675. f"{PerformanceRenderBlockingAssetSpanGroupType.type_id}-group2"
  2676. ],
  2677. },
  2678. project_id=self.project.id,
  2679. )
  2680. self.perf_group_2 = mock_eventstream.call_args[0][2].group
  2681. error_event_data = {
  2682. "timestamp": iso_format(self.base_datetime - timedelta(days=20)),
  2683. "message": "bar",
  2684. "environment": "staging",
  2685. "tags": {
  2686. "server": "example.com",
  2687. "url": "http://example.com",
  2688. "sentry:user": "event2@example.com",
  2689. "my_tag": 1,
  2690. },
  2691. }
  2692. error_event = self.store_event(
  2693. data={
  2694. **error_event_data,
  2695. "fingerprint": ["put-me-in-error_group_1"],
  2696. "event_id": "c" * 32,
  2697. "stacktrace": {"frames": [{"module": "error_group_1"}]},
  2698. },
  2699. project_id=self.project.id,
  2700. )
  2701. self.error_group_1 = error_event.group
  2702. error_event_2 = self.store_event(
  2703. data={
  2704. **error_event_data,
  2705. "fingerprint": ["put-me-in-error_group_2"],
  2706. "event_id": "d" * 32,
  2707. "stacktrace": {"frames": [{"module": "error_group_2"}]},
  2708. },
  2709. project_id=self.project.id,
  2710. )
  2711. self.error_group_2 = error_event_2.group
  2712. def test_performance_query(self):
  2713. with self.feature(
  2714. [
  2715. "organizations:issue-platform",
  2716. self.perf_group_1.issue_type.build_visible_feature_name(),
  2717. ]
  2718. ):
  2719. results = self.make_query(search_filter_query="issue.category:performance my_tag:1")
  2720. assert list(results) == [self.perf_group_1, self.perf_group_2]
  2721. results = self.make_query(
  2722. search_filter_query="issue.type:[performance_n_plus_one_db_queries, performance_render_blocking_asset_span] my_tag:1"
  2723. )
  2724. assert list(results) == [self.perf_group_1, self.perf_group_2]
  2725. def test_performance_query_no_duplicates(self):
  2726. # Regression test to catch an issue we had with performance issues showing duplicated in the
  2727. # issue stream. This was caused by us dual writing perf issues to transactions and to the
  2728. # issue platform. We'd end up reading the same issue twice and duplicate it in the response.
  2729. with self.feature(
  2730. [
  2731. "organizations:issue-platform",
  2732. self.perf_group_1.issue_type.build_visible_feature_name(),
  2733. ]
  2734. ):
  2735. results = self.make_query(search_filter_query="!issue.category:error my_tag:1")
  2736. assert list(results) == [self.perf_group_1, self.perf_group_2]
  2737. def test_performance_issue_search_feature_off(self):
  2738. with Feature({"organizations:performance-issues-search": False}):
  2739. results = self.make_query(search_filter_query="issue.category:performance my_tag:1")
  2740. assert list(results) == []
  2741. with self.feature(
  2742. [
  2743. "organizations:issue-platform",
  2744. self.perf_group_1.issue_type.build_visible_feature_name(),
  2745. ]
  2746. ):
  2747. results = self.make_query(search_filter_query="issue.category:performance my_tag:1")
  2748. assert list(results) == [self.perf_group_1, self.perf_group_2]
  2749. def test_error_performance_query(self):
  2750. with self.feature(
  2751. [
  2752. "organizations:issue-platform",
  2753. self.perf_group_1.issue_type.build_visible_feature_name(),
  2754. ]
  2755. ):
  2756. results = self.make_query(search_filter_query="my_tag:1")
  2757. assert list(results) == [
  2758. self.perf_group_1,
  2759. self.perf_group_2,
  2760. self.error_group_2,
  2761. self.error_group_1,
  2762. ]
  2763. results = self.make_query(
  2764. search_filter_query="issue.category:[performance, error] my_tag:1"
  2765. )
  2766. assert list(results) == [
  2767. self.perf_group_1,
  2768. self.perf_group_2,
  2769. self.error_group_2,
  2770. self.error_group_1,
  2771. ]
  2772. results = self.make_query(
  2773. search_filter_query="issue.type:[performance_render_blocking_asset_span, error] my_tag:1"
  2774. )
  2775. assert list(results) == [
  2776. self.perf_group_1,
  2777. self.perf_group_2,
  2778. self.error_group_2,
  2779. self.error_group_1,
  2780. ]
  2781. def test_cursor_performance_issues(self):
  2782. with self.feature(
  2783. [
  2784. "organizations:issue-platform",
  2785. self.perf_group_1.issue_type.build_visible_feature_name(),
  2786. ]
  2787. ):
  2788. results = self.make_query(
  2789. projects=[self.project],
  2790. search_filter_query="issue.category:performance my_tag:1",
  2791. sort_by="date",
  2792. limit=1,
  2793. count_hits=True,
  2794. )
  2795. assert list(results) == [self.perf_group_1]
  2796. assert results.hits == 2
  2797. results = self.make_query(
  2798. projects=[self.project],
  2799. search_filter_query="issue.category:performance my_tag:1",
  2800. sort_by="date",
  2801. limit=1,
  2802. cursor=results.next,
  2803. count_hits=True,
  2804. )
  2805. assert list(results) == [self.perf_group_2]
  2806. assert results.hits == 2
  2807. results = self.make_query(
  2808. projects=[self.project],
  2809. search_filter_query="issue.category:performance my_tag:1",
  2810. sort_by="date",
  2811. limit=1,
  2812. cursor=results.next,
  2813. count_hits=True,
  2814. )
  2815. assert list(results) == []
  2816. assert results.hits == 2
  2817. def test_perf_issue_search_message_term_queries_postgres(self):
  2818. from django.db.models import Q
  2819. from sentry.utils import snuba
  2820. transaction_name = "im a little tea pot"
  2821. with mock.patch(
  2822. "sentry.issues.ingest.send_issue_occurrence_to_eventstream",
  2823. side_effect=send_issue_occurrence_to_eventstream,
  2824. ) as mock_eventstream, mock.patch.object(
  2825. PerformanceRenderBlockingAssetSpanGroupType,
  2826. "noise_config",
  2827. new=NoiseConfig(0, timedelta(minutes=1)),
  2828. ), self.feature(
  2829. "organizations:issue-platform"
  2830. ):
  2831. tx = self.store_event(
  2832. data={
  2833. "level": "info",
  2834. "culprit": "app/components/events/eventEntries in map",
  2835. "contexts": {"trace": {"trace_id": "b" * 32, "span_id": "c" * 16, "op": ""}},
  2836. "fingerprint": [
  2837. f"{PerformanceRenderBlockingAssetSpanGroupType.type_id}-group12"
  2838. ],
  2839. "event_id": "e" * 32,
  2840. "timestamp": iso_format(self.base_datetime),
  2841. "start_timestamp": iso_format(self.base_datetime),
  2842. "type": "transaction",
  2843. "transaction": transaction_name,
  2844. },
  2845. project_id=self.project.id,
  2846. )
  2847. assert "tea" in tx.search_message
  2848. created_group = mock_eventstream.call_args[0][2].group
  2849. find_group = Group.objects.filter(
  2850. Q(type=PerformanceRenderBlockingAssetSpanGroupType.type_id, message__icontains="tea")
  2851. ).first()
  2852. assert created_group == find_group
  2853. with self.feature(
  2854. [
  2855. "organizations:issue-platform",
  2856. created_group.issue_type.build_visible_feature_name(),
  2857. ]
  2858. ):
  2859. result = snuba.raw_query(
  2860. dataset=Dataset.IssuePlatform,
  2861. start=self.base_datetime - timedelta(hours=1),
  2862. end=self.base_datetime + timedelta(hours=1),
  2863. selected_columns=[
  2864. "event_id",
  2865. "group_id",
  2866. "transaction_name",
  2867. ],
  2868. groupby=None,
  2869. filter_keys={"project_id": [self.project.id], "event_id": [tx.event_id]},
  2870. referrer="_insert_transaction.verify_transaction",
  2871. )
  2872. assert result["data"][0]["transaction_name"] == transaction_name
  2873. assert result["data"][0]["group_id"] == created_group.id
  2874. results = self.make_query(search_filter_query="issue.category:performance tea")
  2875. assert set(results) == {created_group}
  2876. results2 = self.make_query(search_filter_query="tea")
  2877. assert set(results2) == {created_group}
  2878. def test_search_message_error_and_perf_issues(self):
  2879. with mock.patch(
  2880. "sentry.issues.ingest.send_issue_occurrence_to_eventstream",
  2881. side_effect=send_issue_occurrence_to_eventstream,
  2882. ) as mock_eventstream, mock.patch.object(
  2883. PerformanceRenderBlockingAssetSpanGroupType,
  2884. "noise_config",
  2885. new=NoiseConfig(0, timedelta(minutes=1)),
  2886. ), self.feature(
  2887. "organizations:issue-platform"
  2888. ):
  2889. self.store_event(
  2890. data={
  2891. "level": "info",
  2892. "culprit": "app/components/events/eventEntries in map",
  2893. "contexts": {"trace": {"trace_id": "b" * 32, "span_id": "c" * 16, "op": ""}},
  2894. "fingerprint": [
  2895. f"{PerformanceRenderBlockingAssetSpanGroupType.type_id}-group12"
  2896. ],
  2897. "event_id": "e" * 32,
  2898. "timestamp": iso_format(self.base_datetime),
  2899. "start_timestamp": iso_format(self.base_datetime),
  2900. "type": "transaction",
  2901. "transaction": "/api/0/events",
  2902. },
  2903. project_id=self.project.id,
  2904. )
  2905. perf_issue = mock_eventstream.call_args[0][2].group
  2906. assert perf_issue
  2907. error = self.store_event(
  2908. data={
  2909. "fingerprint": ["another-random-group"],
  2910. "event_id": "d" * 32,
  2911. "message": "Uncaught exception on api /api/0/events",
  2912. "environment": "production",
  2913. "tags": {"server": "example.com", "sentry:user": "event3@example.com"},
  2914. "timestamp": iso_format(self.base_datetime),
  2915. "stacktrace": {"frames": [{"module": "group1"}]},
  2916. },
  2917. project_id=self.project.id,
  2918. )
  2919. error_issue = error.group
  2920. assert error_issue
  2921. assert error_issue != perf_issue
  2922. with self.feature(
  2923. [
  2924. "organizations:issue-platform",
  2925. perf_issue.issue_type.build_visible_feature_name(),
  2926. ]
  2927. ):
  2928. assert set(self.make_query(search_filter_query="is:unresolved /api/0/events")) == {
  2929. perf_issue,
  2930. error_issue,
  2931. }
  2932. assert set(self.make_query(search_filter_query="/api/0/events")) == {
  2933. error_issue,
  2934. perf_issue,
  2935. }
  2936. def test_compound_message_negation(self):
  2937. self.store_event(
  2938. data={
  2939. "fingerprint": ["put-me-in-group1"],
  2940. "event_id": "2" * 32,
  2941. "message": "something",
  2942. "timestamp": iso_format(self.base_datetime),
  2943. },
  2944. project_id=self.project.id,
  2945. )
  2946. self.store_event(
  2947. data={
  2948. "level": "info",
  2949. "culprit": "app/components/events/eventEntries in map",
  2950. "contexts": {"trace": {"trace_id": "b" * 32, "span_id": "c" * 16, "op": ""}},
  2951. "fingerprint": [f"{PerformanceRenderBlockingAssetSpanGroupType.type_id}-group12"],
  2952. "event_id": "e" * 32,
  2953. "timestamp": iso_format(self.base_datetime),
  2954. "start_timestamp": iso_format(self.base_datetime),
  2955. "type": "transaction",
  2956. "transaction": "something",
  2957. },
  2958. project_id=self.project.id,
  2959. )
  2960. error_issues_only = self.make_query(
  2961. search_filter_query="!message:else group.category:error"
  2962. )
  2963. error_and_perf_issues = self.make_query(search_filter_query="!message:else")
  2964. assert set(error_and_perf_issues) > set(error_issues_only)
  2965. class EventsGenericSnubaSearchTest(SharedSnubaTest, OccurrenceTestMixin):
  2966. @property
  2967. def backend(self):
  2968. return EventsDatasetSnubaSearchBackend()
  2969. def setUp(self):
  2970. super().setUp()
  2971. self.base_datetime = (datetime.utcnow() - timedelta(days=3)).replace(tzinfo=pytz.utc)
  2972. event_id_1 = uuid.uuid4().hex
  2973. _, group_info = process_event_and_issue_occurrence(
  2974. self.build_occurrence_data(event_id=event_id_1, issue_title="File I/O on Main Thread"),
  2975. {
  2976. "event_id": event_id_1,
  2977. "project_id": self.project.id,
  2978. "title": "some problem",
  2979. "platform": "python",
  2980. "tags": {"my_tag": "1"},
  2981. "timestamp": before_now(minutes=1).isoformat(),
  2982. "received": before_now(minutes=1).isoformat(),
  2983. },
  2984. )
  2985. assert group_info is not None
  2986. self.profile_group_1 = group_info.group
  2987. event_id_2 = uuid.uuid4().hex
  2988. _, group_info = process_event_and_issue_occurrence(
  2989. self.build_occurrence_data(
  2990. event_id=event_id_2,
  2991. fingerprint=["put-me-in-group-2"],
  2992. issue_title="File I/O on Main Thread",
  2993. ),
  2994. {
  2995. "event_id": event_id_2,
  2996. "project_id": self.project.id,
  2997. "title": "some other problem",
  2998. "platform": "python",
  2999. "tags": {"my_tag": "1"},
  3000. "timestamp": before_now(minutes=2).isoformat(),
  3001. "received": before_now(minutes=2).isoformat(),
  3002. },
  3003. )
  3004. assert group_info is not None
  3005. self.profile_group_2 = group_info.group
  3006. event_id_3 = uuid.uuid4().hex
  3007. process_event_and_issue_occurrence(
  3008. self.build_occurrence_data(event_id=event_id_3, fingerprint=["put-me-in-group-3"]),
  3009. {
  3010. "event_id": event_id_3,
  3011. "project_id": self.project.id,
  3012. "title": "some other problem",
  3013. "platform": "python",
  3014. "tags": {"my_tag": "2"},
  3015. "timestamp": before_now(minutes=2).isoformat(),
  3016. "message_timestamp": before_now(minutes=2).isoformat(),
  3017. },
  3018. )
  3019. error_event_data = {
  3020. "timestamp": iso_format(self.base_datetime - timedelta(days=20)),
  3021. "message": "bar",
  3022. "environment": "staging",
  3023. "tags": {
  3024. "server": "example.com",
  3025. "url": "http://example.com",
  3026. "sentry:user": "event2@example.com",
  3027. "my_tag": 1,
  3028. },
  3029. }
  3030. error_event = self.store_event(
  3031. data={
  3032. **error_event_data,
  3033. "fingerprint": ["put-me-in-error_group_1"],
  3034. "event_id": "c" * 32,
  3035. "stacktrace": {"frames": [{"module": "error_group_1"}]},
  3036. },
  3037. project_id=self.project.id,
  3038. )
  3039. self.error_group_1 = error_event.group
  3040. error_event_2 = self.store_event(
  3041. data={
  3042. **error_event_data,
  3043. "fingerprint": ["put-me-in-error_group_2"],
  3044. "event_id": "d" * 32,
  3045. "stacktrace": {"frames": [{"module": "error_group_2"}]},
  3046. },
  3047. project_id=self.project.id,
  3048. )
  3049. self.error_group_2 = error_event_2.group
  3050. def test_no_feature(self):
  3051. results = self.make_query(search_filter_query="issue.category:performance my_tag:1")
  3052. assert list(results) == []
  3053. def test_generic_query(self):
  3054. with self.feature(
  3055. ["organizations:issue-platform", ProfileFileIOGroupType.build_visible_feature_name()]
  3056. ):
  3057. results = self.make_query(search_filter_query="issue.category:performance my_tag:1")
  3058. assert list(results) == [self.profile_group_1, self.profile_group_2]
  3059. results = self.make_query(
  3060. search_filter_query="issue.type:profile_file_io_main_thread my_tag:1"
  3061. )
  3062. assert list(results) == [self.profile_group_1, self.profile_group_2]
  3063. def test_generic_query_message(self):
  3064. with self.feature(
  3065. ["organizations:issue-platform", ProfileFileIOGroupType.build_visible_feature_name()]
  3066. ):
  3067. results = self.make_query(search_filter_query="File I/O")
  3068. assert list(results) == [self.profile_group_1, self.profile_group_2]
  3069. def test_generic_query_perf(self):
  3070. event_id = uuid.uuid4().hex
  3071. group_type = PerformanceNPlusOneGroupType
  3072. with mock.patch.object(
  3073. PerformanceNPlusOneGroupType, "noise_config", new=NoiseConfig(0, timedelta(minutes=1))
  3074. ):
  3075. with self.feature(group_type.build_ingest_feature_name()):
  3076. _, group_info = process_event_and_issue_occurrence(
  3077. self.build_occurrence_data(
  3078. event_id=event_id, type=group_type.type_id, fingerprint=["some perf issue"]
  3079. ),
  3080. {
  3081. "event_id": event_id,
  3082. "project_id": self.project.id,
  3083. "title": "some problem",
  3084. "platform": "python",
  3085. "tags": {"my_tag": "2"},
  3086. "timestamp": before_now(minutes=1).isoformat(),
  3087. "received": before_now(minutes=1).isoformat(),
  3088. },
  3089. )
  3090. assert group_info is not None
  3091. results = self.make_query(search_filter_query="issue.category:performance my_tag:2")
  3092. assert list(results) == []
  3093. with self.feature(
  3094. [
  3095. "organizations:issue-platform",
  3096. group_type.build_visible_feature_name(),
  3097. "organizations:performance-issues-search",
  3098. ]
  3099. ):
  3100. results = self.make_query(search_filter_query="issue.category:performance my_tag:2")
  3101. assert list(results) == [group_info.group]
  3102. def test_error_generic_query(self):
  3103. with self.feature(
  3104. ["organizations:issue-platform", ProfileFileIOGroupType.build_visible_feature_name()]
  3105. ):
  3106. results = self.make_query(search_filter_query="my_tag:1")
  3107. assert list(results) == [
  3108. self.profile_group_1,
  3109. self.profile_group_2,
  3110. self.error_group_2,
  3111. self.error_group_1,
  3112. ]
  3113. results = self.make_query(
  3114. search_filter_query="issue.category:[performance, error] my_tag:1"
  3115. )
  3116. assert list(results) == [
  3117. self.profile_group_1,
  3118. self.profile_group_2,
  3119. self.error_group_2,
  3120. self.error_group_1,
  3121. ]
  3122. results = self.make_query(
  3123. search_filter_query="issue.type:[profile_file_io_main_thread, error] my_tag:1"
  3124. )
  3125. assert list(results) == [
  3126. self.profile_group_1,
  3127. self.profile_group_2,
  3128. self.error_group_2,
  3129. self.error_group_1,
  3130. ]
  3131. def test_cursor_profile_issues(self):
  3132. with self.feature(
  3133. ["organizations:issue-platform", ProfileFileIOGroupType.build_visible_feature_name()]
  3134. ):
  3135. results = self.make_query(
  3136. projects=[self.project],
  3137. search_filter_query="issue.category:performance my_tag:1",
  3138. sort_by="date",
  3139. limit=1,
  3140. count_hits=True,
  3141. )
  3142. assert list(results) == [self.profile_group_1]
  3143. assert results.hits == 2
  3144. results = self.make_query(
  3145. projects=[self.project],
  3146. search_filter_query="issue.category:performance my_tag:1",
  3147. sort_by="date",
  3148. limit=1,
  3149. cursor=results.next,
  3150. count_hits=True,
  3151. )
  3152. assert list(results) == [self.profile_group_2]
  3153. assert results.hits == 2
  3154. results = self.make_query(
  3155. projects=[self.project],
  3156. search_filter_query="issue.category:performance my_tag:1",
  3157. sort_by="date",
  3158. limit=1,
  3159. cursor=results.next,
  3160. count_hits=True,
  3161. )
  3162. assert list(results) == []
  3163. assert results.hits == 2
  3164. def test_rejected_filters(self):
  3165. """
  3166. Any queries with `error.handled` or `error.unhandled` filters querying the search_issues dataset
  3167. should be rejected and return empty results.
  3168. """
  3169. with self.feature(
  3170. ["organizations:issue-platform", ProfileFileIOGroupType.build_visible_feature_name()]
  3171. ):
  3172. results = self.make_query(
  3173. projects=[self.project],
  3174. search_filter_query="issue.category:performance error.unhandled:0",
  3175. sort_by="date",
  3176. limit=1,
  3177. count_hits=True,
  3178. )
  3179. results2 = self.make_query(
  3180. projects=[self.project],
  3181. search_filter_query="issue.category:performance error.unhandled:1",
  3182. sort_by="date",
  3183. limit=1,
  3184. count_hits=True,
  3185. )
  3186. result3 = self.make_query(
  3187. projects=[self.project],
  3188. search_filter_query="issue.category:performance error.handled:0",
  3189. sort_by="date",
  3190. limit=1,
  3191. count_hits=True,
  3192. )
  3193. results4 = self.make_query(
  3194. projects=[self.project],
  3195. search_filter_query="issue.category:performance error.handled:1",
  3196. sort_by="date",
  3197. limit=1,
  3198. count_hits=True,
  3199. )
  3200. results5 = self.make_query(
  3201. projects=[self.project],
  3202. search_filter_query="issue.category:performance error.main_thread:0",
  3203. sort_by="date",
  3204. limit=1,
  3205. count_hits=True,
  3206. )
  3207. results6 = self.make_query(
  3208. projects=[self.project],
  3209. search_filter_query="issue.category:performance error.main_thread:1",
  3210. sort_by="date",
  3211. limit=1,
  3212. count_hits=True,
  3213. )
  3214. assert (
  3215. list(results)
  3216. == list(results2)
  3217. == list(result3)
  3218. == list(results4)
  3219. == list(results5)
  3220. == list(results6)
  3221. == []
  3222. )
  3223. class CdcEventsSnubaSearchTest(SharedSnubaTest):
  3224. @property
  3225. def backend(self):
  3226. return CdcEventsDatasetSnubaSearchBackend()
  3227. def setUp(self):
  3228. super().setUp()
  3229. self.base_datetime = (datetime.utcnow() - timedelta(days=3)).replace(tzinfo=pytz.utc)
  3230. self.event1 = self.store_event(
  3231. data={
  3232. "fingerprint": ["put-me-in-group1"],
  3233. "event_id": "a" * 32,
  3234. "environment": "production",
  3235. "timestamp": iso_format(self.base_datetime - timedelta(days=21)),
  3236. "tags": {"sentry:user": "user1"},
  3237. },
  3238. project_id=self.project.id,
  3239. )
  3240. self.env1 = self.event1.get_environment()
  3241. self.group1 = self.event1.group
  3242. self.event3 = self.store_event(
  3243. data={
  3244. "fingerprint": ["put-me-in-group1"],
  3245. "environment": "staging",
  3246. "timestamp": iso_format(self.base_datetime),
  3247. "tags": {"sentry:user": "user2"},
  3248. },
  3249. project_id=self.project.id,
  3250. )
  3251. self.event2 = self.store_event(
  3252. data={
  3253. "fingerprint": ["put-me-in-group2"],
  3254. "timestamp": iso_format(self.base_datetime - timedelta(days=20)),
  3255. "environment": "staging",
  3256. "tags": {"sentry:user": "user1"},
  3257. },
  3258. project_id=self.project.id,
  3259. )
  3260. self.group2 = self.event2.group
  3261. self.env2 = self.event2.get_environment()
  3262. def run_test(
  3263. self,
  3264. search_filter_query,
  3265. expected_groups,
  3266. expected_hits,
  3267. projects=None,
  3268. environments=None,
  3269. sort_by="date",
  3270. limit=None,
  3271. count_hits=False,
  3272. date_from=None,
  3273. date_to=None,
  3274. cursor=None,
  3275. ):
  3276. results = self.make_query(
  3277. projects=projects,
  3278. search_filter_query=search_filter_query,
  3279. environments=environments,
  3280. sort_by=sort_by,
  3281. limit=limit,
  3282. count_hits=count_hits,
  3283. date_from=date_from,
  3284. date_to=date_to,
  3285. cursor=cursor,
  3286. )
  3287. assert list(results) == expected_groups
  3288. assert results.hits == expected_hits
  3289. return results
  3290. def test(self):
  3291. self.run_test("is:unresolved", [self.group1, self.group2], None)
  3292. def test_invalid(self):
  3293. with pytest.raises(InvalidQueryForExecutor):
  3294. self.make_query(search_filter_query="is:unresolved abc:123")
  3295. def test_resolved_group(self):
  3296. self.group2.status = GroupStatus.RESOLVED
  3297. self.group2.substatus = None
  3298. self.group2.save()
  3299. self.store_group(self.group2)
  3300. self.run_test("is:unresolved", [self.group1], None)
  3301. self.run_test("is:resolved", [self.group2], None)
  3302. self.run_test("is:unresolved is:resolved", [], None)
  3303. def test_environment(self):
  3304. self.run_test("is:unresolved", [self.group1], None, environments=[self.env1])
  3305. self.run_test("is:unresolved", [self.group1, self.group2], None, environments=[self.env2])
  3306. def test_sort_times_seen(self):
  3307. self.run_test(
  3308. "is:unresolved",
  3309. [self.group1, self.group2],
  3310. None,
  3311. sort_by="freq",
  3312. date_from=self.base_datetime - timedelta(days=30),
  3313. )
  3314. self.store_event(
  3315. data={
  3316. "fingerprint": ["put-me-in-group2"],
  3317. "timestamp": iso_format(self.base_datetime - timedelta(days=15)),
  3318. },
  3319. project_id=self.project.id,
  3320. )
  3321. self.store_event(
  3322. data={
  3323. "fingerprint": ["put-me-in-group2"],
  3324. "timestamp": iso_format(self.base_datetime - timedelta(days=10)),
  3325. "tags": {"sentry:user": "user2"},
  3326. },
  3327. project_id=self.project.id,
  3328. )
  3329. self.run_test(
  3330. "is:unresolved",
  3331. [self.group2, self.group1],
  3332. None,
  3333. sort_by="freq",
  3334. # Change the date range to bust the
  3335. date_from=self.base_datetime - timedelta(days=29),
  3336. )
  3337. def test_sort_first_seen(self):
  3338. self.run_test(
  3339. "is:unresolved",
  3340. [self.group2, self.group1],
  3341. None,
  3342. sort_by="new",
  3343. date_from=self.base_datetime - timedelta(days=30),
  3344. )
  3345. group3 = self.store_event(
  3346. data={
  3347. "fingerprint": ["put-me-in-group3"],
  3348. "timestamp": iso_format(self.base_datetime + timedelta(days=1)),
  3349. },
  3350. project_id=self.project.id,
  3351. ).group
  3352. self.run_test(
  3353. "is:unresolved",
  3354. [group3, self.group2, self.group1],
  3355. None,
  3356. sort_by="new",
  3357. # Change the date range to bust the
  3358. date_from=self.base_datetime - timedelta(days=29),
  3359. )
  3360. def test_sort_user(self):
  3361. self.run_test(
  3362. "is:unresolved",
  3363. [self.group1, self.group2],
  3364. None,
  3365. sort_by="user",
  3366. date_from=self.base_datetime - timedelta(days=30),
  3367. )
  3368. self.store_event(
  3369. data={
  3370. "fingerprint": ["put-me-in-group2"],
  3371. "timestamp": iso_format(self.base_datetime + timedelta(days=1)),
  3372. "tags": {"sentry:user": "user2"},
  3373. },
  3374. project_id=self.project.id,
  3375. )
  3376. self.store_event(
  3377. data={
  3378. "fingerprint": ["put-me-in-group2"],
  3379. "timestamp": iso_format(self.base_datetime + timedelta(days=1)),
  3380. "tags": {"sentry:user": "user2"},
  3381. },
  3382. project_id=self.project.id,
  3383. )
  3384. self.store_event(
  3385. data={
  3386. "fingerprint": ["put-me-in-group1"],
  3387. "timestamp": iso_format(self.base_datetime + timedelta(days=1)),
  3388. "tags": {"sentry:user": "user1"},
  3389. },
  3390. project_id=self.project.id,
  3391. )
  3392. self.store_event(
  3393. data={
  3394. "fingerprint": ["put-me-in-group1"],
  3395. "timestamp": iso_format(self.base_datetime + timedelta(days=1)),
  3396. "tags": {"sentry:user": "user1"},
  3397. },
  3398. project_id=self.project.id,
  3399. )
  3400. # Test group with no users, which can return a null count
  3401. group3 = self.store_event(
  3402. data={
  3403. "fingerprint": ["put-me-in-group3"],
  3404. "timestamp": iso_format(self.base_datetime + timedelta(days=1)),
  3405. },
  3406. project_id=self.project.id,
  3407. ).group
  3408. self.run_test(
  3409. "is:unresolved",
  3410. [self.group2, self.group1, group3],
  3411. None,
  3412. sort_by="user",
  3413. # Change the date range to bust the
  3414. date_from=self.base_datetime - timedelta(days=29),
  3415. )
  3416. def test_sort_priority(self):
  3417. self.run_test(
  3418. "is:unresolved",
  3419. [self.group1, self.group2],
  3420. None,
  3421. sort_by="priority",
  3422. date_from=self.base_datetime - timedelta(days=30),
  3423. )
  3424. def test_cursor(self):
  3425. group3 = self.store_event(
  3426. data={
  3427. "fingerprint": ["put-me-in-group3"],
  3428. "timestamp": iso_format(self.base_datetime + timedelta(days=1)),
  3429. "tags": {"sentry:user": "user2"},
  3430. },
  3431. project_id=self.project.id,
  3432. ).group
  3433. group4 = self.store_event(
  3434. data={
  3435. "fingerprint": ["put-me-in-group7"],
  3436. "timestamp": iso_format(self.base_datetime + timedelta(days=2)),
  3437. "tags": {"sentry:user": "user2"},
  3438. },
  3439. project_id=self.project.id,
  3440. ).group
  3441. results = self.run_test("is:unresolved", [group4], 4, limit=1, count_hits=True)
  3442. results = self.run_test(
  3443. "is:unresolved", [group3], 4, limit=1, cursor=results.next, count_hits=True
  3444. )
  3445. results = self.run_test(
  3446. "is:unresolved", [group4], 4, limit=1, cursor=results.prev, count_hits=True
  3447. )
  3448. self.run_test(
  3449. "is:unresolved", [group3, self.group1], 4, limit=2, cursor=results.next, count_hits=True
  3450. )
  3451. def test_rechecking(self):
  3452. self.group2.status = GroupStatus.RESOLVED
  3453. self.group2.substatus = None
  3454. self.group2.save()
  3455. # Explicitly avoid calling `store_group` here. This means that Clickhouse will still see
  3456. # this group as `UNRESOLVED` and it will be returned in the snuba results. This group
  3457. # should still be filtered out by our recheck.
  3458. self.run_test("is:unresolved", [self.group1], None)