test_backend.py 138 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997299829993000300130023003300430053006300730083009301030113012301330143015301630173018301930203021302230233024302530263027302830293030303130323033303430353036303730383039304030413042304330443045304630473048304930503051305230533054305530563057305830593060306130623063306430653066306730683069307030713072307330743075307630773078307930803081308230833084308530863087308830893090309130923093309430953096309730983099310031013102310331043105310631073108310931103111311231133114311531163117311831193120312131223123312431253126312731283129313031313132313331343135313631373138313931403141314231433144314531463147314831493150315131523153315431553156315731583159316031613162316331643165316631673168316931703171317231733174317531763177317831793180318131823183318431853186318731883189319031913192319331943195319631973198319932003201320232033204320532063207320832093210321132123213321432153216321732183219322032213222322332243225322632273228322932303231323232333234323532363237323832393240324132423243324432453246324732483249325032513252325332543255325632573258325932603261326232633264326532663267326832693270327132723273327432753276327732783279328032813282328332843285328632873288328932903291329232933294329532963297329832993300330133023303330433053306330733083309331033113312331333143315331633173318331933203321332233233324332533263327332833293330333133323333333433353336333733383339334033413342334333443345334633473348334933503351335233533354335533563357335833593360336133623363336433653366336733683369337033713372337333743375337633773378337933803381338233833384338533863387338833893390339133923393339433953396339733983399340034013402340334043405340634073408340934103411341234133414341534163417341834193420342134223423342434253426342734283429343034313432343334343435343634373438343934403441344234433444344534463447344834493450345134523453345434553456345734583459346034613462346334643465346634673468346934703471347234733474347534763477347834793480348134823483348434853486348734883489349034913492349334943495349634973498349935003501350235033504350535063507350835093510351135123513351435153516351735183519352035213522352335243525352635273528352935303531353235333534353535363537353835393540354135423543354435453546354735483549355035513552355335543555355635573558355935603561356235633564356535663567356835693570357135723573357435753576357735783579358035813582358335843585358635873588358935903591359235933594359535963597359835993600360136023603360436053606360736083609361036113612361336143615361636173618361936203621362236233624362536263627362836293630363136323633363436353636363736383639364036413642364336443645364636473648364936503651365236533654365536563657365836593660366136623663366436653666366736683669367036713672367336743675367636773678367936803681368236833684368536863687368836893690369136923693369436953696369736983699370037013702370337043705370637073708370937103711371237133714371537163717371837193720372137223723372437253726372737283729373037313732373337343735373637373738
  1. import time
  2. import uuid
  3. from datetime import UTC, datetime, timedelta
  4. from typing import Any
  5. from unittest import mock
  6. import pytest
  7. import urllib3
  8. from django.utils import timezone
  9. from sentry_kafka_schemas.schema_types.group_attributes_v1 import GroupAttributesSnapshot
  10. from sentry import options
  11. from sentry.api.issue_search import convert_query_values, issue_search_config, parse_search_query
  12. from sentry.exceptions import InvalidSearchQuery
  13. from sentry.issues.grouptype import (
  14. ErrorGroupType,
  15. FeedbackGroup,
  16. NoiseConfig,
  17. PerformanceNPlusOneGroupType,
  18. PerformanceRenderBlockingAssetSpanGroupType,
  19. ProfileFileIOGroupType,
  20. )
  21. from sentry.issues.ingest import send_issue_occurrence_to_eventstream
  22. from sentry.models.environment import Environment
  23. from sentry.models.group import Group, GroupStatus
  24. from sentry.models.groupassignee import GroupAssignee
  25. from sentry.models.groupbookmark import GroupBookmark
  26. from sentry.models.groupenvironment import GroupEnvironment
  27. from sentry.models.grouphistory import GroupHistoryStatus, record_group_history
  28. from sentry.models.groupowner import GroupOwner
  29. from sentry.models.groupsubscription import GroupSubscription
  30. from sentry.search.snuba.backend import EventsDatasetSnubaSearchBackend, SnubaSearchBackendBase
  31. from sentry.search.snuba.executors import TrendsSortWeights
  32. from sentry.snuba.dataset import Dataset
  33. from sentry.testutils.cases import SnubaTestCase, TestCase, TransactionTestCase
  34. from sentry.testutils.helpers import Feature, apply_feature_flag_on_cls
  35. from sentry.testutils.helpers.datetime import before_now, iso_format
  36. from sentry.testutils.skips import xfail_if_not_postgres
  37. from sentry.types.group import GroupSubStatus, PriorityLevel
  38. from sentry.utils import json
  39. from sentry.utils.snuba import SENTRY_SNUBA_MAP, SnubaError
  40. from tests.sentry.issues.test_utils import OccurrenceTestMixin
  41. def date_to_query_format(date):
  42. return date.strftime("%Y-%m-%dT%H:%M:%S")
  43. class SharedSnubaMixin(SnubaTestCase):
  44. @property
  45. def backend(self) -> SnubaSearchBackendBase:
  46. raise NotImplementedError(self)
  47. def build_search_filter(self, query, projects=None, user=None, environments=None):
  48. user = user if user is not None else self.user
  49. projects = projects if projects is not None else [self.project]
  50. return convert_query_values(parse_search_query(query), projects, user, environments)
  51. def make_query(
  52. self,
  53. projects=None,
  54. search_filter_query=None,
  55. user=None,
  56. environments=None,
  57. sort_by="date",
  58. limit=None,
  59. count_hits=False,
  60. date_from=None,
  61. date_to=None,
  62. cursor=None,
  63. aggregate_kwargs=None,
  64. ):
  65. search_filters = []
  66. projects = projects if projects is not None else [self.project]
  67. if search_filter_query is not None:
  68. search_filters = self.build_search_filter(
  69. search_filter_query, projects, user=user, environments=environments
  70. )
  71. kwargs = {}
  72. if limit is not None:
  73. kwargs["limit"] = limit
  74. if aggregate_kwargs:
  75. kwargs["aggregate_kwargs"] = {"trends": {**aggregate_kwargs}}
  76. return self.backend.query(
  77. projects,
  78. search_filters=search_filters,
  79. environments=environments,
  80. count_hits=count_hits,
  81. sort_by=sort_by,
  82. date_from=date_from,
  83. date_to=date_to,
  84. cursor=cursor,
  85. **kwargs,
  86. )
  87. def store_event(self, data, *args, **kwargs):
  88. event = super().store_event(data, *args, **kwargs)
  89. environment_name = data.get("environment")
  90. if environment_name:
  91. GroupEnvironment.objects.filter(
  92. group_id=event.group_id,
  93. environment__name=environment_name,
  94. first_seen__gt=event.datetime,
  95. ).update(first_seen=event.datetime)
  96. return event
  97. class EventsDatasetTestSetup(SharedSnubaMixin):
  98. @property
  99. def backend(self):
  100. return EventsDatasetSnubaSearchBackend()
  101. def setUp(self):
  102. super().setUp()
  103. self.base_datetime = before_now(days=3)
  104. event1_timestamp = iso_format(self.base_datetime - timedelta(days=21))
  105. self.event1 = self.store_event(
  106. data={
  107. "fingerprint": ["put-me-in-group1"],
  108. "event_id": "a" * 32,
  109. "message": "foo. Also, this message is intended to be greater than 256 characters so that we can put some unique string identifier after that point in the string. The purpose of this is in order to verify we are using snuba to search messages instead of Postgres (postgres truncates at 256 characters and clickhouse does not). santryrox.",
  110. "environment": "production",
  111. "tags": {"server": "example.com", "sentry:user": "event1@example.com"},
  112. "timestamp": event1_timestamp,
  113. "stacktrace": {"frames": [{"module": "group1"}]},
  114. "level": "fatal",
  115. },
  116. project_id=self.project.id,
  117. )
  118. self.event3 = self.store_event(
  119. data={
  120. "fingerprint": ["put-me-in-group1"],
  121. "event_id": "c" * 32,
  122. "message": "group1",
  123. "environment": "production",
  124. "tags": {"server": "example.com", "sentry:user": "event3@example.com"},
  125. "timestamp": iso_format(self.base_datetime),
  126. "stacktrace": {"frames": [{"module": "group1"}]},
  127. "level": "fatal",
  128. },
  129. project_id=self.project.id,
  130. )
  131. self.group1 = Group.objects.get(id=self.event1.group.id)
  132. assert self.group1.id == self.event1.group.id
  133. assert self.group1.id == self.event3.group.id
  134. assert self.group1.first_seen == self.event1.datetime
  135. assert self.group1.last_seen == self.event3.datetime
  136. self.group1.times_seen = 5
  137. self.group1.status = GroupStatus.UNRESOLVED
  138. self.group1.substatus = GroupSubStatus.ONGOING
  139. self.group1.priority = PriorityLevel.HIGH
  140. self.group1.update(type=ErrorGroupType.type_id)
  141. self.group1.save()
  142. self.store_group(self.group1)
  143. self.event2 = self.store_event(
  144. data={
  145. "fingerprint": ["put-me-in-group2"],
  146. "event_id": "b" * 32,
  147. "timestamp": iso_format(self.base_datetime - timedelta(days=20)),
  148. "message": "bar",
  149. "stacktrace": {"frames": [{"module": "group2"}]},
  150. "environment": "staging",
  151. "tags": {
  152. "server": "example.com",
  153. "url": "http://example.com",
  154. "sentry:user": "event2@example.com",
  155. },
  156. "level": "error",
  157. },
  158. project_id=self.project.id,
  159. )
  160. self.group2 = Group.objects.get(id=self.event2.group.id)
  161. assert self.group2.id == self.event2.group.id
  162. assert self.group2.first_seen == self.group2.last_seen == self.event2.datetime
  163. self.group2.status = GroupStatus.RESOLVED
  164. self.group2.substatus = None
  165. self.group2.times_seen = 10
  166. self.group2.update(type=ErrorGroupType.type_id)
  167. self.group2.priority = PriorityLevel.HIGH
  168. self.group2.save()
  169. self.store_group(self.group2)
  170. GroupBookmark.objects.create(
  171. user_id=self.user.id, group=self.group2, project=self.group2.project
  172. )
  173. GroupAssignee.objects.create(
  174. user_id=self.user.id, group=self.group2, project=self.group2.project
  175. )
  176. GroupSubscription.objects.create(
  177. user_id=self.user.id, group=self.group1, project=self.group1.project, is_active=True
  178. )
  179. GroupSubscription.objects.create(
  180. user_id=self.user.id, group=self.group2, project=self.group2.project, is_active=False
  181. )
  182. self.environments = {
  183. "production": self.event1.get_environment(),
  184. "staging": self.event2.get_environment(),
  185. }
  186. def set_up_multi_project(self):
  187. self.project2 = self.create_project(organization=self.project.organization)
  188. self.event_p2 = self.store_event(
  189. data={
  190. "event_id": "a" * 32,
  191. "fingerprint": ["put-me-in-groupP2"],
  192. "timestamp": iso_format(self.base_datetime - timedelta(days=21)),
  193. "message": "foo",
  194. "stacktrace": {"frames": [{"module": "group_p2"}]},
  195. "tags": {"server": "example.com"},
  196. "environment": "production",
  197. },
  198. project_id=self.project2.id,
  199. )
  200. self.group_p2 = Group.objects.get(id=self.event_p2.group.id)
  201. self.group_p2.times_seen = 6
  202. self.group_p2.last_seen = self.base_datetime - timedelta(days=1)
  203. self.group_p2.save()
  204. self.store_group(self.group_p2)
  205. def create_group_with_integration_external_issue(self, environment="production"):
  206. event = self.store_event(
  207. data={
  208. "fingerprint": ["linked_group1"],
  209. "event_id": uuid.uuid4().hex,
  210. "timestamp": iso_format(self.base_datetime),
  211. "environment": environment,
  212. },
  213. project_id=self.project.id,
  214. )
  215. integration, _ = self.create_provider_integration_for(
  216. event.group.organization, self.user, provider="example", name="Example"
  217. )
  218. self.create_integration_external_issue(
  219. group=event.group,
  220. integration=integration,
  221. key="APP-123",
  222. )
  223. return event.group
  224. def create_group_with_platform_external_issue(self, environment="production"):
  225. event = self.store_event(
  226. data={
  227. "fingerprint": ["linked_group2"],
  228. "event_id": uuid.uuid4().hex,
  229. "timestamp": iso_format(self.base_datetime),
  230. "environment": environment,
  231. },
  232. project_id=self.project.id,
  233. )
  234. self.create_platform_external_issue(
  235. group=event.group,
  236. service_type="sentry-app",
  237. display_name="App#issue-1",
  238. web_url="https://example.com/app/issues/1",
  239. )
  240. return event.group
  241. def run_test_query(
  242. self, query, expected_groups, expected_negative_groups=None, environments=None, user=None
  243. ):
  244. results = self.make_query(search_filter_query=query, environments=environments, user=user)
  245. def sort_key(result):
  246. return result.id
  247. assert sorted(results, key=sort_key) == sorted(expected_groups, key=sort_key)
  248. if expected_negative_groups is not None:
  249. results = self.make_query(search_filter_query=f"!{query}", user=user)
  250. assert sorted(results, key=sort_key) == sorted(expected_negative_groups, key=sort_key)
  251. class EventsSnubaSearchTestCases(EventsDatasetTestSetup):
  252. def test_query(self):
  253. results = self.make_query(search_filter_query="foo")
  254. assert set(results) == {self.group1}
  255. results = self.make_query(search_filter_query="bar")
  256. assert set(results) == {self.group2}
  257. def test_query_multi_project(self):
  258. self.set_up_multi_project()
  259. results = self.make_query([self.project, self.project2], search_filter_query="foo")
  260. assert set(results) == {self.group1, self.group_p2}
  261. def test_query_with_environment(self):
  262. results = self.make_query(
  263. environments=[self.environments["production"]], search_filter_query="foo"
  264. )
  265. assert set(results) == {self.group1}
  266. results = self.make_query(
  267. environments=[self.environments["production"]], search_filter_query="bar"
  268. )
  269. assert set(results) == set()
  270. results = self.make_query(
  271. environments=[self.environments["staging"]], search_filter_query="bar"
  272. )
  273. assert set(results) == {self.group2}
  274. def test_query_for_text_in_long_message(self):
  275. results = self.make_query(
  276. [self.project],
  277. environments=[self.environments["production"]],
  278. search_filter_query="santryrox",
  279. )
  280. assert set(results) == {self.group1}
  281. def test_multi_environments(self):
  282. self.set_up_multi_project()
  283. results = self.make_query(
  284. [self.project, self.project2],
  285. environments=[self.environments["production"], self.environments["staging"]],
  286. )
  287. assert set(results) == {self.group1, self.group2, self.group_p2}
  288. def test_query_with_environment_multi_project(self):
  289. self.set_up_multi_project()
  290. results = self.make_query(
  291. [self.project, self.project2],
  292. environments=[self.environments["production"]],
  293. search_filter_query="foo",
  294. )
  295. assert set(results) == {self.group1, self.group_p2}
  296. results = self.make_query(
  297. [self.project, self.project2],
  298. environments=[self.environments["production"]],
  299. search_filter_query="bar",
  300. )
  301. assert set(results) == set()
  302. def test_query_timestamp(self):
  303. results = self.make_query(
  304. [self.project],
  305. environments=[self.environments["production"]],
  306. search_filter_query=f"timestamp:>{iso_format(self.event1.datetime)} timestamp:<{iso_format(self.event3.datetime)}",
  307. )
  308. assert set(results) == {self.group1}
  309. def test_sort(self):
  310. results = self.make_query(sort_by="date")
  311. assert list(results) == [self.group1, self.group2]
  312. results = self.make_query(sort_by="new")
  313. assert list(results) == [self.group2, self.group1]
  314. results = self.make_query(sort_by="freq")
  315. assert list(results) == [self.group1, self.group2]
  316. results = self.make_query(sort_by="trends")
  317. assert list(results) == [self.group2, self.group1]
  318. results = self.make_query(sort_by="user")
  319. assert list(results) == [self.group1, self.group2]
  320. def test_trends_sort(self):
  321. weights: TrendsSortWeights = {
  322. "log_level": 5,
  323. "has_stacktrace": 5,
  324. "relative_volume": 1,
  325. "event_halflife_hours": 4,
  326. "issue_halflife_hours": 24 * 7,
  327. "v2": False,
  328. "norm": False,
  329. }
  330. results = self.make_query(
  331. sort_by="trends",
  332. aggregate_kwargs=weights,
  333. )
  334. assert list(results) == [self.group2, self.group1]
  335. def test_sort_with_environment(self):
  336. for dt in [
  337. self.group1.first_seen + timedelta(days=1),
  338. self.group1.first_seen + timedelta(days=2),
  339. self.group1.last_seen + timedelta(days=1),
  340. ]:
  341. self.store_event(
  342. data={
  343. "fingerprint": ["put-me-in-group2"],
  344. "timestamp": iso_format(dt),
  345. "stacktrace": {"frames": [{"module": "group2"}]},
  346. "environment": "production",
  347. "message": "group2",
  348. },
  349. project_id=self.project.id,
  350. )
  351. results = self.make_query(environments=[self.environments["production"]], sort_by="date")
  352. assert list(results) == [self.group2, self.group1]
  353. results = self.make_query(environments=[self.environments["production"]], sort_by="new")
  354. assert list(results) == [self.group2, self.group1]
  355. results = self.make_query(environments=[self.environments["production"]], sort_by="freq")
  356. assert list(results) == [self.group2, self.group1]
  357. results = self.make_query(environments=[self.environments["production"]], sort_by="trends")
  358. assert list(results) == [self.group2, self.group1]
  359. results = self.make_query(environments=[self.environments["production"]], sort_by="user")
  360. assert list(results) == [self.group1, self.group2]
  361. def test_status(self):
  362. results = self.make_query(search_filter_query="is:unresolved")
  363. assert set(results) == {self.group1}
  364. results = self.make_query(search_filter_query="is:resolved")
  365. assert set(results) == {self.group2}
  366. event_3 = self.store_event(
  367. data={
  368. "fingerprint": ["put-me-in-group3"],
  369. "event_id": "c" * 32,
  370. "timestamp": iso_format(self.base_datetime - timedelta(days=20)),
  371. },
  372. project_id=self.project.id,
  373. )
  374. group_3 = event_3.group
  375. group_3.status = GroupStatus.MUTED
  376. group_3.substatus = None
  377. group_3.save()
  378. self.run_test_query("status:[unresolved, resolved]", [self.group1, self.group2], [group_3])
  379. self.run_test_query("status:[resolved, muted]", [self.group2, group_3], [self.group1])
  380. def test_substatus(self):
  381. results = self.make_query(search_filter_query="is:ongoing")
  382. assert set(results) == {self.group1}
  383. def test_category(self):
  384. results = self.make_query(search_filter_query="issue.category:error")
  385. assert set(results) == {self.group1, self.group2}
  386. event_3 = self.store_event(
  387. data={
  388. "fingerprint": ["put-me-in-group3"],
  389. "event_id": "c" * 32,
  390. "timestamp": iso_format(self.base_datetime - timedelta(days=20)),
  391. },
  392. project_id=self.project.id,
  393. )
  394. group_3 = event_3.group
  395. group_3.update(type=PerformanceNPlusOneGroupType.type_id)
  396. results = self.make_query(search_filter_query="issue.category:performance")
  397. assert set(results) == {group_3}
  398. results = self.make_query(search_filter_query="issue.category:[error, performance]")
  399. assert set(results) == {self.group1, self.group2, group_3}
  400. with pytest.raises(InvalidSearchQuery):
  401. self.make_query(search_filter_query="issue.category:hellboy")
  402. def test_not_perf_category(self):
  403. results = self.make_query(search_filter_query="issue.category:error foo")
  404. assert set(results) == {self.group1}
  405. not_results = self.make_query(search_filter_query="!issue.category:performance foo")
  406. assert set(not_results) == {self.group1}
  407. def test_type(self):
  408. results = self.make_query(search_filter_query="issue.type:error")
  409. assert set(results) == {self.group1, self.group2}
  410. event_3 = self.store_event(
  411. data={
  412. "fingerprint": ["put-me-in-group3"],
  413. "event_id": "c" * 32,
  414. "timestamp": iso_format(self.base_datetime - timedelta(days=20)),
  415. "type": PerformanceNPlusOneGroupType.type_id,
  416. },
  417. project_id=self.project.id,
  418. )
  419. group_3 = event_3.group
  420. group_3.update(type=PerformanceNPlusOneGroupType.type_id)
  421. results = self.make_query(
  422. search_filter_query="issue.type:performance_n_plus_one_db_queries"
  423. )
  424. assert set(results) == {group_3}
  425. event_4 = self.store_event(
  426. data={
  427. "fingerprint": ["put-me-in-group4"],
  428. "event_id": "d" * 32,
  429. "timestamp": iso_format(self.base_datetime - timedelta(days=20)),
  430. },
  431. project_id=self.project.id,
  432. )
  433. group_4 = event_4.group
  434. group_4.update(type=PerformanceRenderBlockingAssetSpanGroupType.type_id)
  435. results = self.make_query(
  436. search_filter_query="issue.type:performance_render_blocking_asset_span"
  437. )
  438. assert set(results) == {group_4}
  439. results = self.make_query(
  440. search_filter_query="issue.type:[performance_render_blocking_asset_span, performance_n_plus_one_db_queries, error]"
  441. )
  442. assert set(results) == {self.group1, self.group2, group_3, group_4}
  443. with pytest.raises(InvalidSearchQuery):
  444. self.make_query(search_filter_query="issue.type:performance_i_dont_exist")
  445. def test_status_with_environment(self):
  446. results = self.make_query(
  447. environments=[self.environments["production"]], search_filter_query="is:unresolved"
  448. )
  449. assert set(results) == {self.group1}
  450. results = self.make_query(
  451. environments=[self.environments["staging"]], search_filter_query="is:resolved"
  452. )
  453. assert set(results) == {self.group2}
  454. results = self.make_query(
  455. environments=[self.environments["production"]], search_filter_query="is:resolved"
  456. )
  457. assert set(results) == set()
  458. def test_tags(self):
  459. results = self.make_query(search_filter_query="environment:staging")
  460. assert set(results) == {self.group2}
  461. results = self.make_query(search_filter_query="environment:example.com")
  462. assert set(results) == set()
  463. results = self.make_query(search_filter_query="has:environment")
  464. assert set(results) == {self.group2, self.group1}
  465. results = self.make_query(search_filter_query="environment:staging server:example.com")
  466. assert set(results) == {self.group2}
  467. results = self.make_query(search_filter_query='url:"http://example.com"')
  468. assert set(results) == {self.group2}
  469. results = self.make_query(search_filter_query="environment:staging has:server")
  470. assert set(results) == {self.group2}
  471. results = self.make_query(search_filter_query="environment:staging server:bar.example.com")
  472. assert set(results) == set()
  473. def test_tags_with_environment(self):
  474. results = self.make_query(
  475. environments=[self.environments["production"]], search_filter_query="server:example.com"
  476. )
  477. assert set(results) == {self.group1}
  478. results = self.make_query(
  479. environments=[self.environments["staging"]], search_filter_query="server:example.com"
  480. )
  481. assert set(results) == {self.group2}
  482. results = self.make_query(
  483. environments=[self.environments["staging"]], search_filter_query="has:server"
  484. )
  485. assert set(results) == {self.group2}
  486. results = self.make_query(
  487. environments=[self.environments["production"]],
  488. search_filter_query='url:"http://example.com"',
  489. )
  490. assert set(results) == set()
  491. results = self.make_query(
  492. environments=[self.environments["staging"]],
  493. search_filter_query='url:"http://example.com"',
  494. )
  495. assert set(results) == {self.group2}
  496. results = self.make_query(
  497. environments=[self.environments["staging"]],
  498. search_filter_query="server:bar.example.com",
  499. )
  500. assert set(results) == set()
  501. def test_bookmarked_by(self):
  502. results = self.make_query(search_filter_query="bookmarks:%s" % self.user.username)
  503. assert set(results) == {self.group2}
  504. def test_bookmarked_by_in_syntax(self):
  505. self.run_test_query(f"bookmarks:[{self.user.username}]", [self.group2], [self.group1])
  506. user_2 = self.create_user()
  507. GroupBookmark.objects.create(
  508. user_id=user_2.id, group=self.group1, project=self.group2.project
  509. )
  510. self.run_test_query(
  511. f"bookmarks:[{self.user.username}, {user_2.username}]", [self.group2, self.group1], []
  512. )
  513. def test_bookmarked_by_with_environment(self):
  514. results = self.make_query(
  515. environments=[self.environments["staging"]],
  516. search_filter_query="bookmarks:%s" % self.user.username,
  517. )
  518. assert set(results) == {self.group2}
  519. results = self.make_query(
  520. environments=[self.environments["production"]],
  521. search_filter_query="bookmarks:%s" % self.user.username,
  522. )
  523. assert set(results) == set()
  524. def test_search_filter_query_with_custom_trends_tag(self):
  525. trends = "high"
  526. self.store_event(
  527. data={
  528. "fingerprint": ["put-me-in-group2"],
  529. "timestamp": iso_format(self.group2.first_seen + timedelta(days=1)),
  530. "stacktrace": {"frames": [{"module": "group2"}]},
  531. "message": "group2",
  532. "tags": {"trends": trends},
  533. },
  534. project_id=self.project.id,
  535. )
  536. results = self.make_query(search_filter_query="trends:%s" % trends)
  537. assert set(results) == {self.group2}
  538. def test_search_filter_query_with_custom_trends_tag_and_trends_sort(self):
  539. trends = "high"
  540. for i in range(1, 3):
  541. self.store_event(
  542. data={
  543. "fingerprint": ["put-me-in-group1"],
  544. "timestamp": iso_format(self.group2.last_seen + timedelta(days=i)),
  545. "stacktrace": {"frames": [{"module": "group1"}]},
  546. "message": "group1",
  547. "tags": {"trends": trends},
  548. },
  549. project_id=self.project.id,
  550. )
  551. self.store_event(
  552. data={
  553. "fingerprint": ["put-me-in-group2"],
  554. "timestamp": iso_format(self.group2.last_seen + timedelta(days=2)),
  555. "stacktrace": {"frames": [{"module": "group2"}]},
  556. "message": "group2",
  557. "tags": {"trends": trends},
  558. },
  559. project_id=self.project.id,
  560. )
  561. results = self.make_query(search_filter_query="trends:%s" % trends, sort_by="trends")
  562. assert list(results) == [self.group2, self.group1]
  563. def test_search_tag_overlapping_with_internal_fields(self):
  564. # Using a tag of email overlaps with the promoted user.email column in events.
  565. # We don't want to bypass public schema limits in issue search.
  566. self.store_event(
  567. data={
  568. "fingerprint": ["put-me-in-group2"],
  569. "timestamp": iso_format(self.group2.first_seen + timedelta(days=1)),
  570. "stacktrace": {"frames": [{"module": "group2"}]},
  571. "message": "group2",
  572. "tags": {"email": "tags@example.com"},
  573. },
  574. project_id=self.project.id,
  575. )
  576. results = self.make_query(search_filter_query="email:tags@example.com")
  577. assert set(results) == {self.group2}
  578. def test_project(self):
  579. results = self.make_query([self.create_project(name="other")])
  580. assert set(results) == set()
  581. def test_pagination(self):
  582. for options_set in [
  583. {"snuba.search.min-pre-snuba-candidates": None},
  584. {"snuba.search.min-pre-snuba-candidates": 500},
  585. ]:
  586. with self.options(options_set):
  587. results = self.backend.query([self.project], limit=1, sort_by="date")
  588. assert set(results) == {self.group1}
  589. assert not results.prev.has_results
  590. assert results.next.has_results
  591. results = self.backend.query(
  592. [self.project], cursor=results.next, limit=1, sort_by="date"
  593. )
  594. assert set(results) == {self.group2}
  595. assert results.prev.has_results
  596. assert not results.next.has_results
  597. # note: previous cursor
  598. results = self.backend.query(
  599. [self.project], cursor=results.prev, limit=1, sort_by="date"
  600. )
  601. assert set(results) == {self.group1}
  602. assert results.prev.has_results
  603. assert results.next.has_results
  604. # note: previous cursor, paging too far into 0 results
  605. results = self.backend.query(
  606. [self.project], cursor=results.prev, limit=1, sort_by="date"
  607. )
  608. assert set(results) == set()
  609. assert not results.prev.has_results
  610. assert results.next.has_results
  611. results = self.backend.query(
  612. [self.project], cursor=results.next, limit=1, sort_by="date"
  613. )
  614. assert set(results) == {self.group1}
  615. assert results.prev.has_results
  616. assert results.next.has_results
  617. results = self.backend.query(
  618. [self.project], cursor=results.next, limit=1, sort_by="date"
  619. )
  620. assert set(results) == {self.group2}
  621. assert results.prev.has_results
  622. assert not results.next.has_results
  623. results = self.backend.query(
  624. [self.project], cursor=results.next, limit=1, sort_by="date"
  625. )
  626. assert set(results) == set()
  627. assert results.prev.has_results
  628. assert not results.next.has_results
  629. def test_pagination_with_environment(self):
  630. for dt in [
  631. self.group1.first_seen + timedelta(days=1),
  632. self.group1.first_seen + timedelta(days=2),
  633. self.group1.last_seen + timedelta(days=1),
  634. ]:
  635. self.store_event(
  636. data={
  637. "fingerprint": ["put-me-in-group2"],
  638. "timestamp": iso_format(dt),
  639. "environment": "production",
  640. "message": "group2",
  641. "stacktrace": {"frames": [{"module": "group2"}]},
  642. },
  643. project_id=self.project.id,
  644. )
  645. results = self.backend.query(
  646. [self.project],
  647. environments=[self.environments["production"]],
  648. sort_by="date",
  649. limit=1,
  650. count_hits=True,
  651. )
  652. assert list(results) == [self.group2]
  653. assert results.hits == 2
  654. results = self.backend.query(
  655. [self.project],
  656. environments=[self.environments["production"]],
  657. sort_by="date",
  658. limit=1,
  659. cursor=results.next,
  660. count_hits=True,
  661. )
  662. assert list(results) == [self.group1]
  663. assert results.hits == 2
  664. results = self.backend.query(
  665. [self.project],
  666. environments=[self.environments["production"]],
  667. sort_by="date",
  668. limit=1,
  669. cursor=results.next,
  670. count_hits=True,
  671. )
  672. assert list(results) == []
  673. assert results.hits == 2
  674. def test_age_filter(self):
  675. results = self.make_query(
  676. search_filter_query="firstSeen:>=%s" % date_to_query_format(self.group2.first_seen)
  677. )
  678. assert set(results) == {self.group2}
  679. results = self.make_query(
  680. search_filter_query="firstSeen:<=%s"
  681. % date_to_query_format(self.group1.first_seen + timedelta(minutes=1))
  682. )
  683. assert set(results) == {self.group1}
  684. results = self.make_query(
  685. search_filter_query="firstSeen:>=%s firstSeen:<=%s"
  686. % (
  687. date_to_query_format(self.group1.first_seen),
  688. date_to_query_format(self.group1.first_seen + timedelta(minutes=1)),
  689. )
  690. )
  691. assert set(results) == {self.group1}
  692. def test_age_filter_with_environment(self):
  693. # add time instead to make it greater than or less than as needed.
  694. group1_first_seen = GroupEnvironment.objects.get(
  695. environment=self.environments["production"], group=self.group1
  696. ).first_seen
  697. assert group1_first_seen is not None
  698. results = self.make_query(
  699. environments=[self.environments["production"]],
  700. search_filter_query="firstSeen:>=%s" % date_to_query_format(group1_first_seen),
  701. )
  702. assert set(results) == {self.group1}
  703. results = self.make_query(
  704. environments=[self.environments["production"]],
  705. search_filter_query="firstSeen:<=%s" % date_to_query_format(group1_first_seen),
  706. )
  707. assert set(results) == {self.group1}
  708. results = self.make_query(
  709. environments=[self.environments["production"]],
  710. search_filter_query="firstSeen:>%s" % date_to_query_format(group1_first_seen),
  711. )
  712. assert set(results) == set()
  713. self.store_event(
  714. data={
  715. "fingerprint": ["put-me-in-group1"],
  716. "timestamp": iso_format(group1_first_seen + timedelta(days=1)),
  717. "message": "group1",
  718. "stacktrace": {"frames": [{"module": "group1"}]},
  719. "environment": "development",
  720. },
  721. project_id=self.project.id,
  722. )
  723. results = self.make_query(
  724. environments=[self.environments["production"]],
  725. search_filter_query="firstSeen:>%s" % date_to_query_format(group1_first_seen),
  726. )
  727. assert set(results) == set()
  728. results = self.make_query(
  729. environments=[Environment.objects.get(name="development")],
  730. search_filter_query="firstSeen:>%s" % date_to_query_format(group1_first_seen),
  731. )
  732. assert set(results) == {self.group1}
  733. def test_times_seen_filter(self):
  734. results = self.make_query([self.project], search_filter_query="times_seen:2")
  735. assert set(results) == {self.group1}
  736. results = self.make_query([self.project], search_filter_query="times_seen:>=2")
  737. assert set(results) == {self.group1}
  738. results = self.make_query([self.project], search_filter_query="times_seen:<=1")
  739. assert set(results) == {self.group2}
  740. def test_last_seen_filter(self):
  741. results = self.make_query(
  742. search_filter_query="lastSeen:>=%s" % date_to_query_format(self.group1.last_seen)
  743. )
  744. assert set(results) == {self.group1}
  745. results = self.make_query(
  746. search_filter_query="lastSeen:>=%s lastSeen:<=%s"
  747. % (
  748. date_to_query_format(self.group1.last_seen),
  749. date_to_query_format(self.group1.last_seen + timedelta(minutes=1)),
  750. )
  751. )
  752. assert set(results) == {self.group1}
  753. def test_last_seen_filter_with_environment(self):
  754. results = self.make_query(
  755. environments=[self.environments["production"]],
  756. search_filter_query="lastSeen:>=%s" % date_to_query_format(self.group1.last_seen),
  757. )
  758. assert set(results) == {self.group1}
  759. results = self.make_query(
  760. environments=[self.environments["production"]],
  761. search_filter_query="lastSeen:<=%s" % date_to_query_format(self.group1.last_seen),
  762. )
  763. assert set(results) == {self.group1}
  764. results = self.make_query(
  765. environments=[self.environments["production"]],
  766. search_filter_query="lastSeen:>%s" % date_to_query_format(self.group1.last_seen),
  767. )
  768. assert set(results) == set()
  769. self.store_event(
  770. data={
  771. "fingerprint": ["put-me-in-group1"],
  772. "timestamp": iso_format(self.group1.last_seen + timedelta(days=1)),
  773. "message": "group1",
  774. "stacktrace": {"frames": [{"module": "group1"}]},
  775. "environment": "development",
  776. },
  777. project_id=self.project.id,
  778. )
  779. self.group1.update(last_seen=self.group1.last_seen + timedelta(days=1))
  780. results = self.make_query(
  781. environments=[self.environments["production"]],
  782. search_filter_query="lastSeen:>%s" % date_to_query_format(self.group1.last_seen),
  783. )
  784. assert set(results) == set()
  785. results = self.make_query(
  786. environments=[Environment.objects.get(name="development")],
  787. search_filter_query="lastSeen:>%s" % date_to_query_format(self.group1.last_seen),
  788. )
  789. assert set(results) == set()
  790. results = self.make_query(
  791. environments=[Environment.objects.get(name="development")],
  792. search_filter_query="lastSeen:>=%s" % date_to_query_format(self.group1.last_seen),
  793. )
  794. assert set(results) == {self.group1}
  795. def test_date_filter(self):
  796. results = self.make_query(
  797. date_from=self.event2.datetime,
  798. search_filter_query="timestamp:>=%s" % date_to_query_format(self.event2.datetime),
  799. )
  800. assert set(results) == {self.group1, self.group2}
  801. results = self.make_query(
  802. date_to=self.event1.datetime + timedelta(minutes=1),
  803. search_filter_query="timestamp:<=%s"
  804. % date_to_query_format(self.event1.datetime + timedelta(minutes=1)),
  805. )
  806. assert set(results) == {self.group1}
  807. results = self.make_query(
  808. date_from=self.event1.datetime,
  809. date_to=self.event2.datetime + timedelta(minutes=1),
  810. search_filter_query="timestamp:>=%s timestamp:<=%s"
  811. % (
  812. date_to_query_format(self.event1.datetime),
  813. date_to_query_format(self.event2.datetime + timedelta(minutes=1)),
  814. ),
  815. )
  816. assert set(results) == {self.group1, self.group2}
  817. # Test with `Z` utc marker, should be equivalent
  818. results = self.make_query(
  819. date_from=self.event1.datetime,
  820. date_to=self.event2.datetime + timedelta(minutes=1),
  821. search_filter_query="timestamp:>=%s timestamp:<=%s"
  822. % (
  823. date_to_query_format(self.event1.datetime) + "Z",
  824. date_to_query_format(self.event2.datetime + timedelta(minutes=1)) + "Z",
  825. ),
  826. )
  827. assert set(results) == {self.group1, self.group2}
  828. def test_date_filter_with_environment(self):
  829. results = self.backend.query(
  830. [self.project],
  831. environments=[self.environments["production"]],
  832. date_from=self.event2.datetime,
  833. )
  834. assert set(results) == {self.group1}
  835. results = self.backend.query(
  836. [self.project],
  837. environments=[self.environments["production"]],
  838. date_to=self.event1.datetime + timedelta(minutes=1),
  839. )
  840. assert set(results) == {self.group1}
  841. results = self.backend.query(
  842. [self.project],
  843. environments=[self.environments["staging"]],
  844. date_from=self.event1.datetime,
  845. date_to=self.event2.datetime + timedelta(minutes=1),
  846. )
  847. assert set(results) == {self.group2}
  848. def test_linked(self):
  849. linked_group1 = self.create_group_with_integration_external_issue()
  850. linked_group2 = self.create_group_with_platform_external_issue()
  851. results = self.make_query(search_filter_query="is:unlinked")
  852. assert set(results) == {self.group1, self.group2}
  853. results = self.make_query(search_filter_query="is:linked")
  854. assert set(results) == {linked_group1, linked_group2}
  855. def test_linked_with_only_integration_external_issue(self):
  856. linked_group = self.create_group_with_integration_external_issue()
  857. results = self.make_query(search_filter_query="is:unlinked")
  858. assert set(results) == {self.group1, self.group2}
  859. results = self.make_query(search_filter_query="is:linked")
  860. assert set(results) == {linked_group}
  861. def test_linked_with_only_platform_external_issue(self):
  862. linked_group = self.create_group_with_platform_external_issue()
  863. results = self.make_query(search_filter_query="is:unlinked")
  864. assert set(results) == {self.group1, self.group2}
  865. results = self.make_query(search_filter_query="is:linked")
  866. assert set(results) == {linked_group}
  867. def test_linked_with_environment(self):
  868. linked_group1 = self.create_group_with_integration_external_issue(environment="production")
  869. linked_group2 = self.create_group_with_platform_external_issue(environment="staging")
  870. results = self.make_query(
  871. environments=[self.environments["production"]], search_filter_query="is:unlinked"
  872. )
  873. assert set(results) == {self.group1}
  874. results = self.make_query(
  875. environments=[self.environments["staging"]], search_filter_query="is:unlinked"
  876. )
  877. assert set(results) == {self.group2}
  878. results = self.make_query(
  879. environments=[self.environments["production"]], search_filter_query="is:linked"
  880. )
  881. assert set(results) == {linked_group1}
  882. results = self.make_query(
  883. environments=[self.environments["staging"]], search_filter_query="is:linked"
  884. )
  885. assert set(results) == {linked_group2}
  886. def test_unassigned(self):
  887. results = self.make_query(search_filter_query="is:unassigned")
  888. assert set(results) == {self.group1}
  889. results = self.make_query(search_filter_query="is:assigned")
  890. assert set(results) == {self.group2}
  891. def test_unassigned_with_environment(self):
  892. results = self.make_query(
  893. environments=[self.environments["production"]], search_filter_query="is:unassigned"
  894. )
  895. assert set(results) == {self.group1}
  896. results = self.make_query(
  897. environments=[self.environments["staging"]], search_filter_query="is:assigned"
  898. )
  899. assert set(results) == {self.group2}
  900. results = self.make_query(
  901. environments=[self.environments["production"]], search_filter_query="is:assigned"
  902. )
  903. assert set(results) == set()
  904. def test_assigned_to(self):
  905. results = self.make_query(search_filter_query="assigned:%s" % self.user.username)
  906. assert set(results) == {self.group2}
  907. # test team assignee
  908. ga = GroupAssignee.objects.get(
  909. user_id=self.user.id, group=self.group2, project=self.group2.project
  910. )
  911. ga.update(team=self.team, user_id=None)
  912. assert GroupAssignee.objects.get(id=ga.id).user_id is None
  913. results = self.make_query(search_filter_query="assigned:%s" % self.user.username)
  914. assert set(results) == set()
  915. # test when there should be no results
  916. other_user = self.create_user()
  917. results = self.make_query(search_filter_query="assigned:%s" % other_user.username)
  918. assert set(results) == set()
  919. owner = self.create_user()
  920. self.create_member(
  921. organization=self.project.organization, user=owner, role="owner", teams=[]
  922. )
  923. # test that owners don't see results for all teams
  924. results = self.make_query(search_filter_query="assigned:%s" % owner.username)
  925. assert set(results) == set()
  926. def test_assigned_to_me_my_teams(self):
  927. my_team_group = self.store_event(
  928. data={
  929. "fingerprint": ["put-me-in-group-my-teams"],
  930. "event_id": "f" * 32,
  931. "timestamp": iso_format(self.base_datetime - timedelta(days=20)),
  932. "message": "baz",
  933. "environment": "staging",
  934. "tags": {
  935. "server": "example.com",
  936. "url": "http://example.com",
  937. "sentry:user": "event2@example.com",
  938. },
  939. "level": "error",
  940. },
  941. project_id=self.project.id,
  942. ).group
  943. # assign the issue to my team instead of me
  944. GroupAssignee.objects.create(
  945. user_id=None, team_id=self.team.id, group=my_team_group, project=my_team_group.project
  946. )
  947. self.run_test_query(
  948. "assigned:me",
  949. [self.group2],
  950. user=self.user,
  951. )
  952. assert not GroupAssignee.objects.filter(user_id=self.user.id, group=my_team_group).exists()
  953. self.run_test_query(
  954. "assigned:my_teams",
  955. [my_team_group],
  956. user=self.user,
  957. )
  958. def test_assigned_to_me_my_teams_in_syntax(self):
  959. my_team_group = self.store_event(
  960. data={
  961. "fingerprint": ["put-me-in-group-my-teams"],
  962. "event_id": "f" * 32,
  963. "timestamp": iso_format(self.base_datetime - timedelta(days=20)),
  964. "message": "baz",
  965. "environment": "staging",
  966. "tags": {
  967. "server": "example.com",
  968. "url": "http://example.com",
  969. "sentry:user": "event2@example.com",
  970. },
  971. "level": "error",
  972. },
  973. project_id=self.project.id,
  974. ).group
  975. # assign the issue to my team instead of me
  976. GroupAssignee.objects.create(
  977. user_id=None, team_id=self.team.id, group=my_team_group, project=my_team_group.project
  978. )
  979. self.run_test_query(
  980. "assigned:[me]",
  981. [self.group2],
  982. user=self.user,
  983. )
  984. assert not GroupAssignee.objects.filter(user_id=self.user.id, group=my_team_group).exists()
  985. self.run_test_query(
  986. "assigned:[me]",
  987. [self.group2],
  988. user=self.user,
  989. )
  990. self.run_test_query(
  991. "assigned:[my_teams]",
  992. [my_team_group],
  993. user=self.user,
  994. )
  995. self.run_test_query(
  996. "assigned:[me, my_teams]",
  997. [self.group2, my_team_group],
  998. user=self.user,
  999. )
  1000. def test_assigned_to_in_syntax(self):
  1001. group_3 = self.store_event(
  1002. data={
  1003. "fingerprint": ["put-me-in-group3"],
  1004. "event_id": "c" * 32,
  1005. "timestamp": iso_format(self.base_datetime - timedelta(days=20)),
  1006. },
  1007. project_id=self.project.id,
  1008. ).group
  1009. group_3.status = GroupStatus.MUTED
  1010. group_3.substatus = None
  1011. group_3.save()
  1012. other_user = self.create_user()
  1013. self.run_test_query(
  1014. f"assigned:[{self.user.username}, {other_user.username}]",
  1015. [self.group2],
  1016. [self.group1, group_3],
  1017. )
  1018. GroupAssignee.objects.create(project=self.project, group=group_3, user_id=other_user.id)
  1019. self.run_test_query(
  1020. f"assigned:[{self.user.username}, {other_user.username}]",
  1021. [self.group2, group_3],
  1022. [self.group1],
  1023. )
  1024. self.run_test_query(
  1025. f"assigned:[#{self.team.slug}, {other_user.username}]",
  1026. [group_3],
  1027. [self.group1, self.group2],
  1028. )
  1029. ga_2 = GroupAssignee.objects.get(
  1030. user_id=self.user.id, group=self.group2, project=self.group2.project
  1031. )
  1032. ga_2.update(team=self.team, user_id=None)
  1033. self.run_test_query(
  1034. f"assigned:[{self.user.username}, {other_user.username}]",
  1035. [group_3],
  1036. [self.group1, self.group2],
  1037. )
  1038. self.run_test_query(
  1039. f"assigned:[#{self.team.slug}, {other_user.username}]",
  1040. [self.group2, group_3],
  1041. [self.group1],
  1042. )
  1043. self.run_test_query(
  1044. f"assigned:[me, none, {other_user.username}]",
  1045. [self.group1, group_3],
  1046. [self.group2],
  1047. )
  1048. def test_assigned_or_suggested_in_syntax(self):
  1049. Group.objects.all().delete()
  1050. group = self.store_event(
  1051. data={
  1052. "timestamp": iso_format(before_now(seconds=180)),
  1053. "fingerprint": ["group-1"],
  1054. },
  1055. project_id=self.project.id,
  1056. ).group
  1057. group1 = self.store_event(
  1058. data={
  1059. "timestamp": iso_format(before_now(seconds=185)),
  1060. "fingerprint": ["group-2"],
  1061. },
  1062. project_id=self.project.id,
  1063. ).group
  1064. group2 = self.store_event(
  1065. data={
  1066. "timestamp": iso_format(before_now(seconds=190)),
  1067. "fingerprint": ["group-3"],
  1068. },
  1069. project_id=self.project.id,
  1070. ).group
  1071. assigned_group = self.store_event(
  1072. data={
  1073. "timestamp": iso_format(before_now(seconds=195)),
  1074. "fingerprint": ["group-4"],
  1075. },
  1076. project_id=self.project.id,
  1077. ).group
  1078. assigned_to_other_group = self.store_event(
  1079. data={
  1080. "timestamp": iso_format(before_now(seconds=195)),
  1081. "fingerprint": ["group-5"],
  1082. },
  1083. project_id=self.project.id,
  1084. ).group
  1085. self.run_test_query(
  1086. "assigned_or_suggested:[me]",
  1087. [],
  1088. [group, group1, group2, assigned_group, assigned_to_other_group],
  1089. )
  1090. GroupOwner.objects.create(
  1091. group=assigned_to_other_group,
  1092. project=self.project,
  1093. organization=self.organization,
  1094. type=0,
  1095. team_id=None,
  1096. user_id=self.user.id,
  1097. )
  1098. GroupOwner.objects.create(
  1099. group=group,
  1100. project=self.project,
  1101. organization=self.organization,
  1102. type=0,
  1103. team_id=None,
  1104. user_id=self.user.id,
  1105. )
  1106. self.run_test_query(
  1107. "assigned_or_suggested:[me]",
  1108. [group, assigned_to_other_group],
  1109. [group1, group2, assigned_group],
  1110. )
  1111. # Because assigned_to_other_event is assigned to self.other_user, it should not show up in assigned_or_suggested search for anyone but self.other_user. (aka. they are now the only owner)
  1112. other_user = self.create_user("other@user.com", is_superuser=False)
  1113. GroupAssignee.objects.create(
  1114. group=assigned_to_other_group,
  1115. project=self.project,
  1116. user_id=other_user.id,
  1117. )
  1118. self.run_test_query(
  1119. "assigned_or_suggested:[me]",
  1120. [group],
  1121. [group1, group2, assigned_group, assigned_to_other_group],
  1122. )
  1123. self.run_test_query(
  1124. f"assigned_or_suggested:[{other_user.email}]",
  1125. [assigned_to_other_group],
  1126. [group, group1, group2, assigned_group],
  1127. )
  1128. GroupAssignee.objects.create(
  1129. group=assigned_group, project=self.project, user_id=self.user.id
  1130. )
  1131. self.run_test_query(
  1132. f"assigned_or_suggested:[{self.user.email}]",
  1133. [assigned_group, group],
  1134. )
  1135. GroupOwner.objects.create(
  1136. group=group,
  1137. project=self.project,
  1138. organization=self.organization,
  1139. type=0,
  1140. team_id=self.team.id,
  1141. user_id=None,
  1142. )
  1143. self.run_test_query(
  1144. f"assigned_or_suggested:[#{self.team.slug}]",
  1145. [group],
  1146. )
  1147. self.run_test_query(
  1148. "assigned_or_suggested:[me, none]",
  1149. [group, group1, group2, assigned_group],
  1150. [assigned_to_other_group],
  1151. )
  1152. not_me = self.create_user(email="notme@sentry.io")
  1153. GroupOwner.objects.create(
  1154. group=group2,
  1155. project=self.project,
  1156. organization=self.organization,
  1157. type=0,
  1158. team_id=None,
  1159. user_id=not_me.id,
  1160. )
  1161. self.run_test_query(
  1162. "assigned_or_suggested:[me, none]",
  1163. [group, group1, assigned_group],
  1164. [assigned_to_other_group, group2],
  1165. )
  1166. GroupOwner.objects.filter(group=group, user_id=self.user.id).delete()
  1167. self.run_test_query(
  1168. f"assigned_or_suggested:[me, none, #{self.team.slug}]",
  1169. [group, group1, assigned_group],
  1170. [assigned_to_other_group, group2],
  1171. )
  1172. self.run_test_query(
  1173. f"assigned_or_suggested:[me, none, #{self.team.slug}, {not_me.email}]",
  1174. [group, group1, assigned_group, group2],
  1175. [assigned_to_other_group],
  1176. )
  1177. def test_assigned_or_suggested_my_teams(self):
  1178. Group.objects.all().delete()
  1179. group = self.store_event(
  1180. data={
  1181. "timestamp": iso_format(before_now(seconds=180)),
  1182. "fingerprint": ["group-1"],
  1183. },
  1184. project_id=self.project.id,
  1185. ).group
  1186. group1 = self.store_event(
  1187. data={
  1188. "timestamp": iso_format(before_now(seconds=185)),
  1189. "fingerprint": ["group-2"],
  1190. },
  1191. project_id=self.project.id,
  1192. ).group
  1193. group2 = self.store_event(
  1194. data={
  1195. "timestamp": iso_format(before_now(seconds=190)),
  1196. "fingerprint": ["group-3"],
  1197. },
  1198. project_id=self.project.id,
  1199. ).group
  1200. assigned_group = self.store_event(
  1201. data={
  1202. "timestamp": iso_format(before_now(seconds=195)),
  1203. "fingerprint": ["group-4"],
  1204. },
  1205. project_id=self.project.id,
  1206. ).group
  1207. assigned_to_other_group = self.store_event(
  1208. data={
  1209. "timestamp": iso_format(before_now(seconds=195)),
  1210. "fingerprint": ["group-5"],
  1211. },
  1212. project_id=self.project.id,
  1213. ).group
  1214. my_team_group = self.store_event(
  1215. data={
  1216. "fingerprint": ["put-me-in-group-my-teams"],
  1217. "event_id": "f" * 32,
  1218. "timestamp": iso_format(self.base_datetime - timedelta(days=20)),
  1219. "message": "baz",
  1220. "environment": "staging",
  1221. "tags": {
  1222. "server": "example.com",
  1223. "url": "http://example.com",
  1224. "sentry:user": "event2@example.com",
  1225. },
  1226. "level": "error",
  1227. },
  1228. project_id=self.project.id,
  1229. ).group
  1230. self.run_test_query(
  1231. "assigned_or_suggested:me",
  1232. [],
  1233. [group, group1, group2, assigned_group, assigned_to_other_group, my_team_group],
  1234. user=self.user,
  1235. )
  1236. self.run_test_query(
  1237. "assigned_or_suggested:my_teams",
  1238. [],
  1239. [group, group1, group2, assigned_group, assigned_to_other_group, my_team_group],
  1240. user=self.user,
  1241. )
  1242. GroupOwner.objects.create(
  1243. group=assigned_to_other_group,
  1244. project=self.project,
  1245. organization=self.organization,
  1246. type=0,
  1247. team_id=None,
  1248. user_id=self.user.id,
  1249. )
  1250. GroupOwner.objects.create(
  1251. group=group,
  1252. project=self.project,
  1253. organization=self.organization,
  1254. type=0,
  1255. team_id=None,
  1256. user_id=self.user.id,
  1257. )
  1258. GroupAssignee.objects.create(
  1259. user_id=None, team_id=self.team.id, group=my_team_group, project=my_team_group.project
  1260. )
  1261. self.run_test_query(
  1262. "assigned_or_suggested:me",
  1263. [group, assigned_to_other_group],
  1264. [group1, group2, assigned_group, my_team_group],
  1265. user=self.user,
  1266. )
  1267. self.run_test_query(
  1268. "assigned_or_suggested:my_teams",
  1269. [my_team_group],
  1270. [group, group1, group2, assigned_group, assigned_to_other_group],
  1271. user=self.user,
  1272. )
  1273. # Because assigned_to_other_event is assigned to self.other_user, it should not show up in assigned_or_suggested search for anyone but self.other_user. (aka. they are now the only owner)
  1274. other_user = self.create_user("other@user.com", is_superuser=False)
  1275. GroupAssignee.objects.create(
  1276. group=assigned_to_other_group,
  1277. project=self.project,
  1278. user_id=other_user.id,
  1279. )
  1280. self.run_test_query(
  1281. "assigned_or_suggested:me",
  1282. [group],
  1283. [group1, group2, assigned_group, my_team_group, assigned_to_other_group],
  1284. user=self.user,
  1285. )
  1286. self.run_test_query(
  1287. "assigned_or_suggested:my_teams",
  1288. [my_team_group],
  1289. [group, group1, group2, assigned_group, assigned_to_other_group],
  1290. user=self.user,
  1291. )
  1292. self.run_test_query(
  1293. f"assigned_or_suggested:{other_user.email}",
  1294. [assigned_to_other_group],
  1295. [group, group1, group2, assigned_group, my_team_group],
  1296. user=self.user,
  1297. )
  1298. GroupAssignee.objects.create(
  1299. group=assigned_group, project=self.project, user_id=self.user.id
  1300. )
  1301. self.run_test_query(
  1302. f"assigned_or_suggested:{self.user.email}",
  1303. [assigned_group, group],
  1304. [group1, group2, my_team_group, assigned_to_other_group],
  1305. user=self.user,
  1306. )
  1307. GroupOwner.objects.create(
  1308. group=group,
  1309. project=self.project,
  1310. organization=self.organization,
  1311. type=0,
  1312. team_id=self.team.id,
  1313. user_id=None,
  1314. )
  1315. self.run_test_query(
  1316. f"assigned_or_suggested:#{self.team.slug}",
  1317. [group, my_team_group],
  1318. [group1, group2, assigned_group, assigned_to_other_group],
  1319. user=self.user,
  1320. )
  1321. def test_assigned_or_suggested_my_teams_in_syntax(self):
  1322. Group.objects.all().delete()
  1323. group = self.store_event(
  1324. data={
  1325. "timestamp": iso_format(before_now(seconds=180)),
  1326. "fingerprint": ["group-1"],
  1327. },
  1328. project_id=self.project.id,
  1329. ).group
  1330. group1 = self.store_event(
  1331. data={
  1332. "timestamp": iso_format(before_now(seconds=185)),
  1333. "fingerprint": ["group-2"],
  1334. },
  1335. project_id=self.project.id,
  1336. ).group
  1337. group2 = self.store_event(
  1338. data={
  1339. "timestamp": iso_format(before_now(seconds=190)),
  1340. "fingerprint": ["group-3"],
  1341. },
  1342. project_id=self.project.id,
  1343. ).group
  1344. assigned_group = self.store_event(
  1345. data={
  1346. "timestamp": iso_format(before_now(seconds=195)),
  1347. "fingerprint": ["group-4"],
  1348. },
  1349. project_id=self.project.id,
  1350. ).group
  1351. assigned_to_other_group = self.store_event(
  1352. data={
  1353. "timestamp": iso_format(before_now(seconds=195)),
  1354. "fingerprint": ["group-5"],
  1355. },
  1356. project_id=self.project.id,
  1357. ).group
  1358. my_team_group = self.store_event(
  1359. data={
  1360. "fingerprint": ["put-me-in-group-my-teams"],
  1361. "event_id": "f" * 32,
  1362. "timestamp": iso_format(self.base_datetime - timedelta(days=20)),
  1363. "message": "baz",
  1364. "environment": "staging",
  1365. "tags": {
  1366. "server": "example.com",
  1367. "url": "http://example.com",
  1368. "sentry:user": "event2@example.com",
  1369. },
  1370. "level": "error",
  1371. },
  1372. project_id=self.project.id,
  1373. ).group
  1374. self.run_test_query(
  1375. "assigned_or_suggested:[me]",
  1376. [],
  1377. [group, group1, group2, assigned_group, assigned_to_other_group, my_team_group],
  1378. user=self.user,
  1379. )
  1380. self.run_test_query(
  1381. "assigned_or_suggested:[my_teams]",
  1382. [],
  1383. [group, group1, group2, assigned_group, assigned_to_other_group, my_team_group],
  1384. user=self.user,
  1385. )
  1386. self.run_test_query(
  1387. "assigned_or_suggested:[me, my_teams]",
  1388. [],
  1389. [group, group1, group2, assigned_group, assigned_to_other_group, my_team_group],
  1390. user=self.user,
  1391. )
  1392. GroupOwner.objects.create(
  1393. group=assigned_to_other_group,
  1394. project=self.project,
  1395. organization=self.organization,
  1396. type=0,
  1397. team_id=None,
  1398. user_id=self.user.id,
  1399. )
  1400. GroupOwner.objects.create(
  1401. group=group,
  1402. project=self.project,
  1403. organization=self.organization,
  1404. type=0,
  1405. team_id=None,
  1406. user_id=self.user.id,
  1407. )
  1408. GroupAssignee.objects.create(
  1409. user_id=None, team_id=self.team.id, group=my_team_group, project=my_team_group.project
  1410. )
  1411. self.run_test_query(
  1412. "assigned_or_suggested:[me]",
  1413. [group, assigned_to_other_group],
  1414. [group1, group2, assigned_group, my_team_group],
  1415. user=self.user,
  1416. )
  1417. self.run_test_query(
  1418. "assigned_or_suggested:[my_teams]",
  1419. [my_team_group],
  1420. [group, group1, group2, assigned_group, assigned_to_other_group],
  1421. user=self.user,
  1422. )
  1423. self.run_test_query(
  1424. "assigned_or_suggested:[me, my_teams]",
  1425. [group, assigned_to_other_group, my_team_group],
  1426. [group1, group2, assigned_group],
  1427. user=self.user,
  1428. )
  1429. # Because assigned_to_other_event is assigned to self.other_user, it should not show up in assigned_or_suggested search for anyone but self.other_user. (aka. they are now the only owner)
  1430. other_user = self.create_user("other@user.com", is_superuser=False)
  1431. GroupAssignee.objects.create(
  1432. group=assigned_to_other_group,
  1433. project=self.project,
  1434. user_id=other_user.id,
  1435. )
  1436. self.run_test_query(
  1437. "assigned_or_suggested:[me]",
  1438. [group],
  1439. [group1, group2, assigned_group, my_team_group, assigned_to_other_group],
  1440. user=self.user,
  1441. )
  1442. self.run_test_query(
  1443. "assigned_or_suggested:[my_teams]",
  1444. [my_team_group],
  1445. [group, group1, group2, assigned_group, assigned_to_other_group],
  1446. user=self.user,
  1447. )
  1448. self.run_test_query(
  1449. "assigned_or_suggested:[me, my_teams]",
  1450. [group, my_team_group],
  1451. [group1, group2, assigned_group, assigned_to_other_group],
  1452. user=self.user,
  1453. )
  1454. self.run_test_query(
  1455. f"assigned_or_suggested:[{other_user.email}]",
  1456. [assigned_to_other_group],
  1457. [group, group1, group2, assigned_group, my_team_group],
  1458. user=self.user,
  1459. )
  1460. GroupAssignee.objects.create(
  1461. group=assigned_group, project=self.project, user_id=self.user.id
  1462. )
  1463. self.run_test_query(
  1464. f"assigned_or_suggested:[{self.user.email}]",
  1465. [assigned_group, group],
  1466. [group1, group2, my_team_group, assigned_to_other_group],
  1467. user=self.user,
  1468. )
  1469. GroupOwner.objects.create(
  1470. group=group,
  1471. project=self.project,
  1472. organization=self.organization,
  1473. type=0,
  1474. team_id=self.team.id,
  1475. user_id=None,
  1476. )
  1477. self.run_test_query(
  1478. f"assigned_or_suggested:[#{self.team.slug}]",
  1479. [group, my_team_group],
  1480. [group1, group2, assigned_group, assigned_to_other_group],
  1481. user=self.user,
  1482. )
  1483. self.run_test_query(
  1484. "assigned_or_suggested:[me, none]",
  1485. [group, group1, group2, assigned_group],
  1486. [my_team_group, assigned_to_other_group],
  1487. user=self.user,
  1488. )
  1489. self.run_test_query(
  1490. "assigned_or_suggested:[my_teams, none]",
  1491. [group, group1, group2, my_team_group],
  1492. [assigned_to_other_group, assigned_group],
  1493. user=self.user,
  1494. )
  1495. self.run_test_query(
  1496. "assigned_or_suggested:[me, my_teams, none]",
  1497. [group, group1, group2, my_team_group, assigned_group],
  1498. [assigned_to_other_group],
  1499. user=self.user,
  1500. )
  1501. not_me = self.create_user(email="notme@sentry.io")
  1502. GroupOwner.objects.create(
  1503. group=group2,
  1504. project=self.project,
  1505. organization=self.organization,
  1506. type=0,
  1507. team_id=None,
  1508. user_id=not_me.id,
  1509. )
  1510. self.run_test_query(
  1511. "assigned_or_suggested:[me, none]",
  1512. [group, group1, assigned_group],
  1513. [group2, my_team_group, assigned_to_other_group],
  1514. user=self.user,
  1515. )
  1516. self.run_test_query(
  1517. "assigned_or_suggested:[my_teams, none]",
  1518. [group, group1, my_team_group],
  1519. [group2, assigned_group, assigned_to_other_group],
  1520. user=self.user,
  1521. )
  1522. self.run_test_query(
  1523. "assigned_or_suggested:[me, my_teams, none]",
  1524. [group, group1, my_team_group, assigned_group],
  1525. [group2, assigned_to_other_group],
  1526. user=self.user,
  1527. )
  1528. GroupOwner.objects.filter(group=group, user_id=self.user.id).delete()
  1529. self.run_test_query(
  1530. f"assigned_or_suggested:[me, none, #{self.team.slug}]",
  1531. [group, group1, assigned_group, my_team_group],
  1532. [assigned_to_other_group, group2],
  1533. user=self.user,
  1534. )
  1535. self.run_test_query(
  1536. f"assigned_or_suggested:[my_teams, none, #{self.team.slug}]",
  1537. [group, group1, my_team_group],
  1538. [assigned_to_other_group, group2, assigned_group],
  1539. user=self.user,
  1540. )
  1541. self.run_test_query(
  1542. f"assigned_or_suggested:[me, my_teams, none, #{self.team.slug}]",
  1543. [group, group1, my_team_group, assigned_group],
  1544. [assigned_to_other_group, group2],
  1545. user=self.user,
  1546. )
  1547. self.run_test_query(
  1548. f"assigned_or_suggested:[me, none, #{self.team.slug}, {not_me.email}]",
  1549. [group, group1, group2, assigned_group, my_team_group],
  1550. [assigned_to_other_group],
  1551. user=self.user,
  1552. )
  1553. self.run_test_query(
  1554. f"assigned_or_suggested:[my_teams, none, #{self.team.slug}, {not_me.email}]",
  1555. [group, group1, group2, my_team_group],
  1556. [assigned_to_other_group, assigned_group],
  1557. user=self.user,
  1558. )
  1559. self.run_test_query(
  1560. f"assigned_or_suggested:[me, my_teams, none, #{self.team.slug}, {not_me.email}]",
  1561. [group, group1, group2, my_team_group, assigned_group],
  1562. [assigned_to_other_group],
  1563. user=self.user,
  1564. )
  1565. def test_assigned_to_with_environment(self):
  1566. results = self.make_query(
  1567. environments=[self.environments["staging"]],
  1568. search_filter_query="assigned:%s" % self.user.username,
  1569. )
  1570. assert set(results) == {self.group2}
  1571. results = self.make_query(
  1572. environments=[self.environments["production"]],
  1573. search_filter_query="assigned:%s" % self.user.username,
  1574. )
  1575. assert set(results) == set()
  1576. def test_subscribed_by(self):
  1577. results = self.make_query(
  1578. [self.group1.project], search_filter_query="subscribed:%s" % self.user.username
  1579. )
  1580. assert set(results) == {self.group1}
  1581. def test_subscribed_by_in_syntax(self):
  1582. self.run_test_query(f"subscribed:[{self.user.username}]", [self.group1], [self.group2])
  1583. user_2 = self.create_user()
  1584. GroupSubscription.objects.create(
  1585. user_id=user_2.id, group=self.group2, project=self.project, is_active=True
  1586. )
  1587. self.run_test_query(
  1588. f"subscribed:[{self.user.username}, {user_2.username}]", [self.group1, self.group2], []
  1589. )
  1590. def test_subscribed_by_with_environment(self):
  1591. results = self.make_query(
  1592. [self.group1.project],
  1593. environments=[self.environments["production"]],
  1594. search_filter_query="subscribed:%s" % self.user.username,
  1595. )
  1596. assert set(results) == {self.group1}
  1597. results = self.make_query(
  1598. [self.group1.project],
  1599. environments=[self.environments["staging"]],
  1600. search_filter_query="subscribed:%s" % self.user.username,
  1601. )
  1602. assert set(results) == set()
  1603. @mock.patch("sentry.search.snuba.executors.bulk_raw_query")
  1604. def test_snuba_not_called_optimization(self, query_mock):
  1605. assert self.make_query(search_filter_query="status:unresolved").results == [self.group1]
  1606. assert not query_mock.called
  1607. assert (
  1608. self.make_query(
  1609. search_filter_query="last_seen:>%s" % date_to_query_format(timezone.now()),
  1610. sort_by="date",
  1611. ).results
  1612. == []
  1613. )
  1614. assert query_mock.called
  1615. @mock.patch("sentry.search.snuba.executors.bulk_raw_query")
  1616. def test_reduce_bulk_results_none_total(self, bulk_raw_query_mock):
  1617. bulk_raw_query_mock.return_value = [
  1618. {"data": [], "totals": {"total": None}},
  1619. {"data": [], "totals": {"total": None}},
  1620. ]
  1621. assert (
  1622. self.make_query(
  1623. search_filter_query="last_seen:>%s" % date_to_query_format(timezone.now()),
  1624. sort_by="date",
  1625. ).results
  1626. == []
  1627. )
  1628. assert bulk_raw_query_mock.called
  1629. @mock.patch("sentry.search.snuba.executors.bulk_raw_query")
  1630. def test_reduce_bulk_results_none_data(self, bulk_raw_query_mock):
  1631. bulk_raw_query_mock.return_value = [
  1632. {"data": None, "totals": {"total": 0}},
  1633. {"data": None, "totals": {"total": 0}},
  1634. ]
  1635. assert (
  1636. self.make_query(
  1637. search_filter_query="last_seen:>%s" % date_to_query_format(timezone.now()),
  1638. sort_by="date",
  1639. ).results
  1640. == []
  1641. )
  1642. assert bulk_raw_query_mock.called
  1643. def test_pre_and_post_filtering(self):
  1644. prev_max_pre = options.get("snuba.search.max-pre-snuba-candidates")
  1645. options.set("snuba.search.max-pre-snuba-candidates", 1)
  1646. try:
  1647. # normal queries work as expected
  1648. results = self.make_query(search_filter_query="foo")
  1649. assert set(results) == {self.group1}
  1650. results = self.make_query(search_filter_query="bar")
  1651. assert set(results) == {self.group2}
  1652. # no candidate matches in Sentry, immediately return empty paginator
  1653. results = self.make_query(search_filter_query="NO MATCHES IN SENTRY")
  1654. assert set(results) == set()
  1655. # too many candidates, skip pre-filter, requires >1 postfilter queries
  1656. results = self.make_query()
  1657. assert set(results) == {self.group1, self.group2}
  1658. finally:
  1659. options.set("snuba.search.max-pre-snuba-candidates", prev_max_pre)
  1660. def test_optimizer_enabled(self):
  1661. prev_optimizer_enabled = options.get("snuba.search.pre-snuba-candidates-optimizer")
  1662. options.set("snuba.search.pre-snuba-candidates-optimizer", True)
  1663. try:
  1664. results = self.make_query(
  1665. search_filter_query="server:example.com",
  1666. environments=[self.environments["production"]],
  1667. )
  1668. assert set(results) == {self.group1}
  1669. finally:
  1670. options.set("snuba.search.pre-snuba-candidates-optimizer", prev_optimizer_enabled)
  1671. def test_search_out_of_range(self):
  1672. the_date = datetime(2000, 1, 1, 0, 0, 0, tzinfo=UTC)
  1673. results = self.make_query(
  1674. search_filter_query=f"event.timestamp:>{the_date} event.timestamp:<{the_date}",
  1675. date_from=the_date,
  1676. date_to=the_date,
  1677. )
  1678. assert set(results) == set()
  1679. def test_regressed_in_release(self):
  1680. # expect no groups within the results since there are no releases
  1681. results = self.make_query(search_filter_query="regressed_in_release:fake")
  1682. assert set(results) == set()
  1683. # expect no groups even though there is a release; since no group regressed in this release
  1684. release_1 = self.create_release()
  1685. results = self.make_query(search_filter_query="regressed_in_release:%s" % release_1.version)
  1686. assert set(results) == set()
  1687. # Create a new event so that we get a group in this release
  1688. group = self.store_event(
  1689. data={
  1690. "release": release_1.version,
  1691. },
  1692. project_id=self.project.id,
  1693. ).group
  1694. # # Should still be no group since we didn't regress in this release
  1695. results = self.make_query(search_filter_query="regressed_in_release:%s" % release_1.version)
  1696. assert set(results) == set()
  1697. record_group_history(group, GroupHistoryStatus.REGRESSED, release=release_1)
  1698. results = self.make_query(search_filter_query="regressed_in_release:%s" % release_1.version)
  1699. assert set(results) == {group}
  1700. # Make sure this works correctly with multiple releases
  1701. release_2 = self.create_release()
  1702. group_2 = self.store_event(
  1703. data={
  1704. "fingerprint": ["put-me-in-group9001"],
  1705. "event_id": "a" * 32,
  1706. "release": release_2.version,
  1707. },
  1708. project_id=self.project.id,
  1709. ).group
  1710. record_group_history(group_2, GroupHistoryStatus.REGRESSED, release=release_2)
  1711. results = self.make_query(search_filter_query="regressed_in_release:%s" % release_1.version)
  1712. assert set(results) == {group}
  1713. results = self.make_query(search_filter_query="regressed_in_release:%s" % release_2.version)
  1714. assert set(results) == {group_2}
  1715. def test_first_release(self):
  1716. # expect no groups within the results since there are no releases
  1717. results = self.make_query(search_filter_query="first_release:%s" % "fake")
  1718. assert set(results) == set()
  1719. # expect no groups even though there is a release; since no group
  1720. # is attached to a release
  1721. release_1 = self.create_release(self.project)
  1722. results = self.make_query(search_filter_query="first_release:%s" % release_1.version)
  1723. assert set(results) == set()
  1724. # Create a new event so that we get a group in this release
  1725. group = self.store_event(
  1726. data={
  1727. "fingerprint": ["put-me-in-group9001"],
  1728. "event_id": "a" * 32,
  1729. "message": "hello",
  1730. "environment": "production",
  1731. "tags": {"server": "example.com"},
  1732. "release": release_1.version,
  1733. "stacktrace": {"frames": [{"module": "group1"}]},
  1734. },
  1735. project_id=self.project.id,
  1736. ).group
  1737. results = self.make_query(search_filter_query="first_release:%s" % release_1.version)
  1738. assert set(results) == {group}
  1739. def test_first_release_in_syntax(self):
  1740. # expect no groups within the results since there are no releases
  1741. self.run_test_query("first_release:[fake, fake2]", [])
  1742. # expect no groups even though there is a release; since no group
  1743. # is attached to a release
  1744. release_1 = self.create_release(self.project)
  1745. release_2 = self.create_release(self.project)
  1746. self.run_test_query(f"first_release:[{release_1.version}, {release_2.version}]", [])
  1747. # Create a new event so that we get a group in release 1
  1748. group = self.store_event(
  1749. data={
  1750. "fingerprint": ["put-me-in-group9001"],
  1751. "event_id": "a" * 32,
  1752. "message": "hello",
  1753. "environment": "production",
  1754. "tags": {"server": "example.com"},
  1755. "release": release_1.version,
  1756. "stacktrace": {"frames": [{"module": "group1"}]},
  1757. },
  1758. project_id=self.project.id,
  1759. ).group
  1760. self.run_test_query(
  1761. f"first_release:[{release_1.version}, {release_2.version}]",
  1762. [group],
  1763. [self.group1, self.group2],
  1764. )
  1765. # Create a new event so that we get a group in release 2
  1766. group_2 = self.store_event(
  1767. data={
  1768. "fingerprint": ["put-me-in-group9002"],
  1769. "event_id": "a" * 32,
  1770. "message": "hello",
  1771. "environment": "production",
  1772. "tags": {"server": "example.com"},
  1773. "release": release_2.version,
  1774. "stacktrace": {"frames": [{"module": "group1"}]},
  1775. },
  1776. project_id=self.project.id,
  1777. ).group
  1778. self.run_test_query(
  1779. f"first_release:[{release_1.version}, {release_2.version}]",
  1780. [group, group_2],
  1781. [self.group1, self.group2],
  1782. )
  1783. def test_first_release_environments(self):
  1784. results = self.make_query(
  1785. environments=[self.environments["production"]],
  1786. search_filter_query="first_release:fake",
  1787. )
  1788. assert set(results) == set()
  1789. release = self.create_release(self.project)
  1790. group_env = GroupEnvironment.get_or_create(
  1791. group_id=self.group1.id, environment_id=self.environments["production"].id
  1792. )[0]
  1793. results = self.make_query(
  1794. environments=[self.environments["production"]],
  1795. search_filter_query=f"first_release:{release.version}",
  1796. )
  1797. assert set(results) == set()
  1798. group_env.first_release = release
  1799. group_env.save()
  1800. results = self.make_query(
  1801. environments=[self.environments["production"]],
  1802. search_filter_query=f"first_release:{release.version}",
  1803. )
  1804. assert set(results) == {self.group1}
  1805. def test_first_release_environments_in_syntax(self):
  1806. self.run_test_query(
  1807. "first_release:[fake, fake2]",
  1808. [],
  1809. [self.group1, self.group2],
  1810. environments=[self.environments["production"]],
  1811. )
  1812. release = self.create_release(self.project)
  1813. group_1_env = GroupEnvironment.objects.get(
  1814. group_id=self.group1.id, environment_id=self.environments["production"].id
  1815. )
  1816. group_1_env.update(first_release=release)
  1817. self.group1.first_release = release
  1818. self.group1.save()
  1819. self.run_test_query(
  1820. f"first_release:[{release.version}, fake2]",
  1821. [self.group1],
  1822. [self.group2],
  1823. environments=[self.environments["production"]],
  1824. )
  1825. group_2_env = GroupEnvironment.objects.get(
  1826. group_id=self.group2.id, environment_id=self.environments["staging"].id
  1827. )
  1828. group_2_env.update(first_release=release)
  1829. self.group2.first_release = release
  1830. self.group2.save()
  1831. self.run_test_query(
  1832. f"first_release:[{release.version}, fake2]",
  1833. [self.group1, self.group2],
  1834. [],
  1835. environments=[self.environments["production"], self.environments["staging"]],
  1836. )
  1837. # Make sure we don't get duplicate groups
  1838. GroupEnvironment.objects.create(
  1839. group_id=self.group1.id,
  1840. environment_id=self.environments["staging"].id,
  1841. first_release=release,
  1842. )
  1843. self.run_test_query(
  1844. f"first_release:[{release.version}, fake2]",
  1845. [self.group1, self.group2],
  1846. [],
  1847. environments=[self.environments["production"], self.environments["staging"]],
  1848. )
  1849. def test_query_enclosed_in_quotes(self):
  1850. results = self.make_query(search_filter_query='"foo"')
  1851. assert set(results) == {self.group1}
  1852. results = self.make_query(search_filter_query='"bar"')
  1853. assert set(results) == {self.group2}
  1854. @xfail_if_not_postgres("Wildcard searching only supported in Postgres")
  1855. def test_wildcard(self):
  1856. escaped_event = self.store_event(
  1857. data={
  1858. "fingerprint": ["hello-there"],
  1859. "event_id": "f" * 32,
  1860. "message": "somet[hing]",
  1861. "environment": "production",
  1862. "tags": {"server": "example.net"},
  1863. "timestamp": iso_format(self.base_datetime),
  1864. "stacktrace": {"frames": [{"module": "group1"}]},
  1865. },
  1866. project_id=self.project.id,
  1867. )
  1868. # Note: Adding in `environment:production` so that we make sure we query
  1869. # in both snuba and postgres
  1870. results = self.make_query(search_filter_query="environment:production so*t")
  1871. assert set(results) == {escaped_event.group}
  1872. # Make sure it's case insensitive
  1873. results = self.make_query(search_filter_query="environment:production SO*t")
  1874. assert set(results) == {escaped_event.group}
  1875. results = self.make_query(search_filter_query="environment:production so*zz")
  1876. assert set(results) == set()
  1877. results = self.make_query(search_filter_query="environment:production [hing]")
  1878. assert set(results) == {escaped_event.group}
  1879. results = self.make_query(search_filter_query="environment:production s*]")
  1880. assert set(results) == {escaped_event.group}
  1881. results = self.make_query(search_filter_query="environment:production server:example.*")
  1882. assert set(results) == {self.group1, escaped_event.group}
  1883. results = self.make_query(search_filter_query="environment:production !server:*net")
  1884. assert set(results) == {self.group1}
  1885. # TODO: Disabling tests that use [] syntax for the moment. Re-enable
  1886. # these if we decide to add back in, or remove if this comment has been
  1887. # here a while.
  1888. # results = self.make_query(
  1889. # search_filter_query='environment:production [s][of][mz]',
  1890. # )
  1891. # assert set(results) == set([escaped_event.group])
  1892. # results = self.make_query(
  1893. # search_filter_query='environment:production [z][of][mz]',
  1894. # )
  1895. # assert set(results) == set()
  1896. def test_null_tags(self):
  1897. tag_event = self.store_event(
  1898. data={
  1899. "fingerprint": ["hello-there"],
  1900. "event_id": "f" * 32,
  1901. "message": "something",
  1902. "environment": "production",
  1903. "tags": {"server": "example.net"},
  1904. "timestamp": iso_format(self.base_datetime),
  1905. "stacktrace": {"frames": [{"module": "group1"}]},
  1906. },
  1907. project_id=self.project.id,
  1908. )
  1909. no_tag_event = self.store_event(
  1910. data={
  1911. "fingerprint": ["hello-there-2"],
  1912. "event_id": "5" * 32,
  1913. "message": "something",
  1914. "environment": "production",
  1915. "timestamp": iso_format(self.base_datetime),
  1916. "stacktrace": {"frames": [{"module": "group2"}]},
  1917. },
  1918. project_id=self.project.id,
  1919. )
  1920. results = self.make_query(search_filter_query="environment:production !server:*net")
  1921. assert set(results) == {self.group1, no_tag_event.group}
  1922. results = self.make_query(search_filter_query="environment:production server:*net")
  1923. assert set(results) == {tag_event.group}
  1924. results = self.make_query(search_filter_query="environment:production !server:example.net")
  1925. assert set(results) == {self.group1, no_tag_event.group}
  1926. results = self.make_query(search_filter_query="environment:production server:example.net")
  1927. assert set(results) == {tag_event.group}
  1928. results = self.make_query(search_filter_query="environment:production has:server")
  1929. assert set(results) == {self.group1, tag_event.group}
  1930. results = self.make_query(search_filter_query="environment:production !has:server")
  1931. assert set(results) == {no_tag_event.group}
  1932. def test_null_promoted_tags(self):
  1933. tag_event = self.store_event(
  1934. data={
  1935. "fingerprint": ["hello-there"],
  1936. "event_id": "f" * 32,
  1937. "message": "something",
  1938. "environment": "production",
  1939. "tags": {"logger": "csp"},
  1940. "timestamp": iso_format(self.base_datetime),
  1941. "stacktrace": {"frames": [{"module": "group1"}]},
  1942. },
  1943. project_id=self.project.id,
  1944. )
  1945. no_tag_event = self.store_event(
  1946. data={
  1947. "fingerprint": ["hello-there-2"],
  1948. "event_id": "5" * 32,
  1949. "message": "something",
  1950. "environment": "production",
  1951. "timestamp": iso_format(self.base_datetime),
  1952. "stacktrace": {"frames": [{"module": "group2"}]},
  1953. },
  1954. project_id=self.project.id,
  1955. )
  1956. results = self.make_query(search_filter_query="environment:production !logger:*sp")
  1957. assert set(results) == {self.group1, no_tag_event.group}
  1958. results = self.make_query(search_filter_query="environment:production logger:*sp")
  1959. assert set(results) == {tag_event.group}
  1960. results = self.make_query(search_filter_query="environment:production !logger:csp")
  1961. assert set(results) == {self.group1, no_tag_event.group}
  1962. results = self.make_query(search_filter_query="environment:production logger:csp")
  1963. assert set(results) == {tag_event.group}
  1964. results = self.make_query(search_filter_query="environment:production has:logger")
  1965. assert set(results) == {tag_event.group}
  1966. results = self.make_query(search_filter_query="environment:production !has:logger")
  1967. assert set(results) == {self.group1, no_tag_event.group}
  1968. def test_sort_multi_project(self):
  1969. self.set_up_multi_project()
  1970. results = self.make_query([self.project, self.project2], sort_by="date")
  1971. assert list(results) == [self.group1, self.group_p2, self.group2]
  1972. results = self.make_query([self.project, self.project2], sort_by="new")
  1973. assert list(results) == [self.group2, self.group_p2, self.group1]
  1974. results = self.make_query([self.project, self.project2], sort_by="freq")
  1975. assert list(results) == [self.group1, self.group_p2, self.group2]
  1976. results = self.make_query([self.project, self.project2], sort_by="trends")
  1977. assert list(results) == [
  1978. self.group_p2,
  1979. self.group2,
  1980. self.group1,
  1981. ]
  1982. results = self.make_query([self.project, self.project2], sort_by="user")
  1983. assert list(results) == [self.group1, self.group2, self.group_p2]
  1984. def test_in_syntax_is_invalid(self):
  1985. with pytest.raises(InvalidSearchQuery, match='"in" syntax invalid for "is" search'):
  1986. self.make_query(search_filter_query="is:[unresolved, resolved]")
  1987. def test_first_release_any_or_no_environments(self):
  1988. # test scenarios for tickets:
  1989. # SEN-571
  1990. # ISSUE-432
  1991. # given the following setup:
  1992. #
  1993. # groups table:
  1994. # group first_release
  1995. # A 1
  1996. # B 1
  1997. # C 2
  1998. #
  1999. # groupenvironments table:
  2000. # group environment first_release
  2001. # A staging 1
  2002. # A production 2
  2003. #
  2004. # when querying by first release, the appropriate set of groups should be displayed:
  2005. #
  2006. # first_release: 1
  2007. # env=[]: A, B
  2008. # env=[production, staging]: A
  2009. # env=[staging]: A
  2010. # env=[production]: nothing
  2011. #
  2012. # first_release: 2
  2013. # env=[]: C
  2014. # env=[production, staging]: A
  2015. # env=[staging]: nothing
  2016. # env=[production]: A
  2017. # create an issue/group whose events that occur in 2 distinct environments
  2018. group_a_event_1 = self.store_event(
  2019. data={
  2020. "fingerprint": ["group_a"],
  2021. "event_id": "aaa" + ("1" * 29),
  2022. "environment": "example_staging",
  2023. "release": "release_1",
  2024. },
  2025. project_id=self.project.id,
  2026. )
  2027. group_a_event_2 = self.store_event(
  2028. data={
  2029. "fingerprint": ["group_a"],
  2030. "event_id": "aaa" + ("2" * 29),
  2031. "environment": "example_production",
  2032. "release": "release_2",
  2033. },
  2034. project_id=self.project.id,
  2035. )
  2036. group_a = group_a_event_1.group
  2037. # get the environments for group_a
  2038. prod_env = group_a_event_2.get_environment()
  2039. staging_env = group_a_event_1.get_environment()
  2040. # create an issue/group whose event that occur in no environments
  2041. # but will be tied to release release_1
  2042. group_b_event_1 = self.store_event(
  2043. data={
  2044. "fingerprint": ["group_b"],
  2045. "event_id": "bbb" + ("1" * 29),
  2046. "release": "release_1",
  2047. },
  2048. project_id=self.project.id,
  2049. )
  2050. assert group_b_event_1.get_environment().name == "" # has no environment
  2051. group_b = group_b_event_1.group
  2052. # create an issue/group whose event that occur in no environments
  2053. # but will be tied to release release_2
  2054. group_c_event_1 = self.store_event(
  2055. data={
  2056. "fingerprint": ["group_c"],
  2057. "event_id": "ccc" + ("1" * 29),
  2058. "release": "release_2",
  2059. },
  2060. project_id=self.project.id,
  2061. )
  2062. assert group_c_event_1.get_environment().name == "" # has no environment
  2063. group_c = group_c_event_1.group
  2064. # query by release release_1
  2065. results = self.make_query(search_filter_query="first_release:release_1")
  2066. assert set(results) == {group_a, group_b}
  2067. results = self.make_query(
  2068. environments=[staging_env, prod_env],
  2069. search_filter_query="first_release:release_1",
  2070. )
  2071. assert set(results) == {group_a}
  2072. results = self.make_query(
  2073. environments=[staging_env], search_filter_query="first_release:release_1"
  2074. )
  2075. assert set(results) == {group_a}
  2076. results = self.make_query(
  2077. environments=[prod_env], search_filter_query="first_release:release_1"
  2078. )
  2079. assert set(results) == set()
  2080. # query by release release_2
  2081. results = self.make_query(search_filter_query="first_release:release_2")
  2082. assert set(results) == {group_c}
  2083. results = self.make_query(
  2084. environments=[staging_env, prod_env],
  2085. search_filter_query="first_release:release_2",
  2086. )
  2087. assert set(results) == {group_a}
  2088. results = self.make_query(
  2089. environments=[staging_env], search_filter_query="first_release:release_2"
  2090. )
  2091. assert set(results) == set()
  2092. results = self.make_query(
  2093. environments=[prod_env], search_filter_query="first_release:release_2"
  2094. )
  2095. assert set(results) == {group_a}
  2096. def test_all_fields_do_not_error(self):
  2097. # Just a sanity check to make sure that all fields can be successfully
  2098. # searched on without returning type errors and other schema related
  2099. # issues.
  2100. def test_query(query):
  2101. try:
  2102. self.make_query(search_filter_query=query)
  2103. except SnubaError as e:
  2104. self.fail(f"Query {query} errored. Error info: {e}") # type:ignore[attr-defined]
  2105. for key in SENTRY_SNUBA_MAP:
  2106. if key in ["project.id", "issue.id", "performance.issue_ids", "status"]:
  2107. continue
  2108. test_query("has:%s" % key)
  2109. test_query("!has:%s" % key)
  2110. if key == "error.handled":
  2111. val: Any = 1
  2112. elif key in issue_search_config.numeric_keys:
  2113. val = "123"
  2114. elif key in issue_search_config.date_keys:
  2115. val = self.base_datetime.isoformat()
  2116. elif key in issue_search_config.boolean_keys:
  2117. val = "true"
  2118. elif key in {"trace.span", "trace.parent_span"}:
  2119. val = "abcdef1234abcdef"
  2120. test_query(f"!{key}:{val}")
  2121. else:
  2122. val = "abadcafedeadbeefdeaffeedabadfeed"
  2123. test_query(f"!{key}:{val}")
  2124. test_query(f"{key}:{val}")
  2125. def test_message_negation(self):
  2126. self.store_event(
  2127. data={
  2128. "fingerprint": ["put-me-in-group1"],
  2129. "event_id": "2" * 32,
  2130. "message": "something",
  2131. "timestamp": iso_format(self.base_datetime),
  2132. },
  2133. project_id=self.project.id,
  2134. )
  2135. results = self.make_query(search_filter_query="!message:else")
  2136. results2 = self.make_query(search_filter_query="!message:else")
  2137. assert list(results) == list(results2)
  2138. def test_error_main_thread_true(self):
  2139. myProject = self.create_project(
  2140. name="Foo", slug="foo", teams=[self.team], fire_project_created=True
  2141. )
  2142. event = self.store_event(
  2143. data={
  2144. "event_id": "1" * 32,
  2145. "message": "something",
  2146. "timestamp": iso_format(self.base_datetime),
  2147. "exception": {
  2148. "values": [
  2149. {
  2150. "type": "SyntaxError",
  2151. "value": "hello world",
  2152. "thread_id": 1,
  2153. },
  2154. ],
  2155. },
  2156. "threads": {
  2157. "values": [
  2158. {
  2159. "id": 1,
  2160. "main": True,
  2161. },
  2162. ],
  2163. },
  2164. },
  2165. project_id=myProject.id,
  2166. )
  2167. myGroup = event.groups[0]
  2168. results = self.make_query(
  2169. projects=[myProject],
  2170. search_filter_query="error.main_thread:1",
  2171. sort_by="date",
  2172. )
  2173. assert list(results) == [myGroup]
  2174. def test_error_main_thread_false(self):
  2175. myProject = self.create_project(
  2176. name="Foo2", slug="foo2", teams=[self.team], fire_project_created=True
  2177. )
  2178. event = self.store_event(
  2179. data={
  2180. "event_id": "2" * 32,
  2181. "message": "something",
  2182. "timestamp": iso_format(self.base_datetime),
  2183. "exception": {
  2184. "values": [
  2185. {
  2186. "type": "SyntaxError",
  2187. "value": "hello world",
  2188. "thread_id": 1,
  2189. },
  2190. ],
  2191. },
  2192. "threads": {
  2193. "values": [
  2194. {
  2195. "id": 1,
  2196. "main": False,
  2197. },
  2198. ],
  2199. },
  2200. },
  2201. project_id=myProject.id,
  2202. )
  2203. myGroup = event.groups[0]
  2204. results = self.make_query(
  2205. projects=[myProject],
  2206. search_filter_query="error.main_thread:0",
  2207. sort_by="date",
  2208. )
  2209. assert list(results) == [myGroup]
  2210. def test_error_main_thread_no_results(self):
  2211. myProject = self.create_project(
  2212. name="Foo3", slug="foo3", teams=[self.team], fire_project_created=True
  2213. )
  2214. self.store_event(
  2215. data={
  2216. "event_id": "3" * 32,
  2217. "message": "something",
  2218. "timestamp": iso_format(self.base_datetime),
  2219. "exception": {
  2220. "values": [
  2221. {
  2222. "type": "SyntaxError",
  2223. "value": "hello world",
  2224. "thread_id": 1,
  2225. },
  2226. ],
  2227. },
  2228. "threads": {
  2229. "values": [
  2230. {
  2231. "id": 1,
  2232. },
  2233. ],
  2234. },
  2235. },
  2236. project_id=myProject.id,
  2237. )
  2238. results = self.make_query(
  2239. projects=[myProject],
  2240. search_filter_query="error.main_thread:1",
  2241. sort_by="date",
  2242. )
  2243. assert len(results) == 0
  2244. class EventsSnubaSearchTest(TestCase, EventsSnubaSearchTestCases):
  2245. pass
  2246. @apply_feature_flag_on_cls("organizations:issue-search-group-attributes-side-query")
  2247. class EventsJoinedGroupAttributesSnubaSearchTest(TransactionTestCase, EventsSnubaSearchTestCases):
  2248. def setUp(self):
  2249. def post_insert(snapshot: GroupAttributesSnapshot):
  2250. from sentry.utils import snuba
  2251. try:
  2252. resp = snuba._snuba_pool.urlopen(
  2253. "POST",
  2254. "/tests/entities/group_attributes/insert",
  2255. body=json.dumps([snapshot]),
  2256. headers={},
  2257. )
  2258. if resp.status != 200:
  2259. raise snuba.SnubaError(
  2260. f"HTTP {resp.status} response from Snuba! {json.loads(resp.data)}"
  2261. )
  2262. return None
  2263. except urllib3.exceptions.HTTPError as err:
  2264. raise snuba.SnubaError(err)
  2265. with (
  2266. self.options({"issues.group_attributes.send_kafka": True}),
  2267. mock.patch("sentry.issues.attributes.produce_snapshot_to_kafka", post_insert),
  2268. ):
  2269. super().setUp()
  2270. @mock.patch("sentry.utils.metrics.timer")
  2271. @mock.patch("sentry.utils.metrics.incr")
  2272. def test_is_unresolved_query_logs_metric(self, metrics_incr, metrics_timer):
  2273. results = self.make_query(search_filter_query="is:unresolved")
  2274. assert set(results) == {self.group1}
  2275. # introduce a slight delay so the async future has time to run and log the metric
  2276. time.sleep(1)
  2277. metrics_incr_called = False
  2278. for call in metrics_incr.call_args_list:
  2279. args, kwargs = call
  2280. if "snuba.search.group_attributes_joined.events_compared" in set(args):
  2281. metrics_incr_called = True
  2282. assert metrics_incr_called
  2283. metrics_timer_called = False
  2284. for call in metrics_timer.call_args_list:
  2285. args, kwargs = call
  2286. if "snuba.search.group_attributes_joined.duration" in set(args):
  2287. metrics_timer_called = True
  2288. assert metrics_timer_called
  2289. def test_issue_priority(self):
  2290. results = self.make_query(search_filter_query="issue.priority:high")
  2291. assert set(results) == {self.group1, self.group2}
  2292. event_3 = self.store_event(
  2293. data={
  2294. "fingerprint": ["put-me-in-group3"],
  2295. "event_id": "c" * 32,
  2296. "timestamp": iso_format(self.base_datetime - timedelta(days=20)),
  2297. },
  2298. project_id=self.project.id,
  2299. )
  2300. group_3 = event_3.group
  2301. group_3.update(priority=PriorityLevel.LOW)
  2302. results = self.make_query(search_filter_query="issue.priority:low")
  2303. assert set(results) == {group_3}
  2304. results = self.make_query(search_filter_query="issue.priority:[high, low]")
  2305. assert set(results) == {self.group1, self.group2, group_3}
  2306. with pytest.raises(InvalidSearchQuery):
  2307. self.make_query(search_filter_query="issue.category:wrong")
  2308. class EventsTrendsTest(TestCase, SharedSnubaMixin, OccurrenceTestMixin):
  2309. @property
  2310. def backend(self):
  2311. return EventsDatasetSnubaSearchBackend()
  2312. def test_trends_sort_old_and_new_events(self):
  2313. """Test that an issue with only one old event is ranked lower than an issue with only one new event"""
  2314. new_project = self.create_project(organization=self.project.organization)
  2315. base_datetime = before_now(days=3)
  2316. recent_event = self.store_event(
  2317. data={
  2318. "fingerprint": ["put-me-in-recent-group"],
  2319. "event_id": "c" * 32,
  2320. "message": "group1",
  2321. "environment": "production",
  2322. "tags": {"server": "example.com", "sentry:user": "event3@example.com"},
  2323. "timestamp": iso_format(base_datetime),
  2324. "stacktrace": {"frames": [{"module": "group1"}]},
  2325. },
  2326. project_id=new_project.id,
  2327. )
  2328. old_event = self.store_event(
  2329. data={
  2330. "fingerprint": ["put-me-in-old-group"],
  2331. "event_id": "a" * 32,
  2332. "message": "foo. Also, this message is intended to be greater than 256 characters so that we can put some unique string identifier after that point in the string. The purpose of this is in order to verify we are using snuba to search messages instead of Postgres (postgres truncates at 256 characters and clickhouse does not). santryrox.",
  2333. "environment": "production",
  2334. "tags": {"server": "example.com", "sentry:user": "old_event@example.com"},
  2335. "timestamp": iso_format(base_datetime - timedelta(days=20)),
  2336. "stacktrace": {"frames": [{"module": "group1"}]},
  2337. },
  2338. project_id=new_project.id,
  2339. )
  2340. # datetime(2017, 9, 6, 0, 0)
  2341. old_event.data["timestamp"] = 1504656000.0
  2342. weights: TrendsSortWeights = {
  2343. "log_level": 0,
  2344. "has_stacktrace": 0,
  2345. "relative_volume": 1,
  2346. "event_halflife_hours": 4,
  2347. "issue_halflife_hours": 24 * 7,
  2348. "v2": False,
  2349. "norm": False,
  2350. }
  2351. results = self.make_query(
  2352. sort_by="trends",
  2353. projects=[new_project],
  2354. aggregate_kwargs=weights,
  2355. )
  2356. recent_group = Group.objects.get(id=recent_event.group.id)
  2357. old_group = Group.objects.get(id=old_event.group.id)
  2358. assert list(results) == [recent_group, old_group]
  2359. def test_trends_sort_v2(self):
  2360. """Test that the v2 formula works."""
  2361. new_project = self.create_project(organization=self.project.organization)
  2362. base_datetime = before_now(days=3)
  2363. recent_event = self.store_event(
  2364. data={
  2365. "fingerprint": ["put-me-in-recent-group"],
  2366. "event_id": "c" * 32,
  2367. "message": "group1",
  2368. "environment": "production",
  2369. "tags": {"server": "example.com", "sentry:user": "event3@example.com"},
  2370. "timestamp": iso_format(base_datetime),
  2371. "stacktrace": {"frames": [{"module": "group1"}]},
  2372. },
  2373. project_id=new_project.id,
  2374. )
  2375. old_event = self.store_event(
  2376. data={
  2377. "fingerprint": ["put-me-in-old-group"],
  2378. "event_id": "a" * 32,
  2379. "message": "foo. Also, this message is intended to be greater than 256 characters so that we can put some unique string identifier after that point in the string. The purpose of this is in order to verify we are using snuba to search messages instead of Postgres (postgres truncates at 256 characters and clickhouse does not). santryrox.",
  2380. "environment": "production",
  2381. "tags": {"server": "example.com", "sentry:user": "old_event@example.com"},
  2382. "timestamp": iso_format(base_datetime - timedelta(days=20)),
  2383. "stacktrace": {"frames": [{"module": "group1"}]},
  2384. },
  2385. project_id=new_project.id,
  2386. )
  2387. # datetime(2017, 9, 6, 0, 0)
  2388. old_event.data["timestamp"] = 1504656000.0
  2389. weights: TrendsSortWeights = {
  2390. "log_level": 0,
  2391. "has_stacktrace": 0,
  2392. "relative_volume": 1,
  2393. "event_halflife_hours": 4,
  2394. "issue_halflife_hours": 24 * 7,
  2395. "v2": True,
  2396. "norm": False,
  2397. }
  2398. results = self.make_query(
  2399. sort_by="trends",
  2400. projects=[new_project],
  2401. aggregate_kwargs=weights,
  2402. )
  2403. recent_group = Group.objects.get(id=recent_event.group.id)
  2404. old_group = Group.objects.get(id=old_event.group.id)
  2405. assert list(results) == [recent_group, old_group]
  2406. def test_trends_log_level_results(self):
  2407. """Test that the scoring results change when we pass in different log level weights"""
  2408. base_datetime = before_now(hours=1)
  2409. event1 = self.store_event(
  2410. data={
  2411. "fingerprint": ["put-me-in-group1"],
  2412. "event_id": "c" * 32,
  2413. "timestamp": iso_format(base_datetime - timedelta(hours=1)),
  2414. "message": "foo",
  2415. "stacktrace": {"frames": [{"module": "group1"}]},
  2416. "environment": "staging",
  2417. "level": "fatal",
  2418. },
  2419. project_id=self.project.id,
  2420. )
  2421. event2 = self.store_event(
  2422. data={
  2423. "fingerprint": ["put-me-in-group2"],
  2424. "event_id": "d" * 32,
  2425. "timestamp": iso_format(base_datetime),
  2426. "message": "bar",
  2427. "stacktrace": {"frames": [{"module": "group2"}]},
  2428. "environment": "staging",
  2429. "level": "error",
  2430. },
  2431. project_id=self.project.id,
  2432. )
  2433. group1 = Group.objects.get(id=event1.group.id)
  2434. group2 = Group.objects.get(id=event2.group.id)
  2435. agg_kwargs = {
  2436. "trends": {
  2437. "log_level": 0,
  2438. "has_stacktrace": 0,
  2439. "relative_volume": 1,
  2440. "event_halflife_hours": 4,
  2441. "issue_halflife_hours": 24 * 7,
  2442. "v2": False,
  2443. "norm": False,
  2444. }
  2445. }
  2446. query_executor = self.backend._get_query_executor()
  2447. results_zero_log_level = query_executor.snuba_search(
  2448. start=None,
  2449. end=None,
  2450. project_ids=[self.project.id],
  2451. environment_ids=[],
  2452. sort_field="trends",
  2453. organization=self.organization,
  2454. group_ids=[group1.id, group2.id],
  2455. limit=150,
  2456. aggregate_kwargs=agg_kwargs,
  2457. )[0]
  2458. group1_score_before = results_zero_log_level[0][1]
  2459. group2_score_before = results_zero_log_level[1][1]
  2460. # initially group 2's score is higher since it has a more recent event
  2461. assert group2_score_before > group1_score_before
  2462. agg_kwargs["trends"].update({"log_level": 5})
  2463. results2 = query_executor.snuba_search(
  2464. start=None,
  2465. end=None,
  2466. project_ids=[self.project.id],
  2467. environment_ids=[],
  2468. sort_field="trends",
  2469. organization=self.organization,
  2470. group_ids=[group1.id, group2.id],
  2471. limit=150,
  2472. aggregate_kwargs=agg_kwargs,
  2473. )[0]
  2474. group1_score_after = results2[0][1]
  2475. group2_score_after = results2[1][1]
  2476. # ensure fatal has a higher score than error
  2477. assert group1_score_after > group2_score_after
  2478. def test_trends_has_stacktrace_results(self):
  2479. """Test that the scoring results change when we pass in different has_stacktrace weights"""
  2480. base_datetime = before_now(hours=1)
  2481. agg_kwargs = {
  2482. "trends": {
  2483. "log_level": 0,
  2484. "has_stacktrace": 0,
  2485. "relative_volume": 1,
  2486. "event_halflife_hours": 4,
  2487. "issue_halflife_hours": 24 * 7,
  2488. "v2": False,
  2489. "norm": False,
  2490. }
  2491. }
  2492. query_executor = self.backend._get_query_executor()
  2493. no_stacktrace_event = self.store_event(
  2494. data={
  2495. "event_id": "d" * 32,
  2496. "message": "oh no",
  2497. "timestamp": iso_format(base_datetime - timedelta(hours=1)),
  2498. },
  2499. project_id=self.project.id,
  2500. )
  2501. group1 = Group.objects.get(id=no_stacktrace_event.group.id)
  2502. stacktrace_event = self.store_event(
  2503. data={
  2504. "event_id": "d" * 32,
  2505. "exception": {
  2506. "values": [
  2507. {
  2508. "type": "AnError",
  2509. "value": "Bad request",
  2510. "stacktrace": {
  2511. "frames": [
  2512. {
  2513. "module": "<my module>",
  2514. },
  2515. ]
  2516. },
  2517. }
  2518. ]
  2519. },
  2520. "timestamp": iso_format(base_datetime - timedelta(hours=1)),
  2521. },
  2522. project_id=self.project.id,
  2523. )
  2524. group2 = Group.objects.get(id=stacktrace_event.group.id)
  2525. results = query_executor.snuba_search(
  2526. start=None,
  2527. end=None,
  2528. project_ids=[self.project.id],
  2529. environment_ids=[],
  2530. sort_field="trends",
  2531. organization=self.organization,
  2532. group_ids=[group1.id, group2.id],
  2533. limit=150,
  2534. aggregate_kwargs=agg_kwargs,
  2535. )[0]
  2536. group1_score = results[0][1]
  2537. group2_score = results[1][1]
  2538. assert group1_score == group2_score
  2539. agg_kwargs["trends"].update({"has_stacktrace": 3})
  2540. results = query_executor.snuba_search(
  2541. start=None,
  2542. end=None,
  2543. project_ids=[self.project.id],
  2544. environment_ids=[],
  2545. sort_field="trends",
  2546. organization=self.organization,
  2547. group_ids=[group1.id, group2.id],
  2548. limit=150,
  2549. aggregate_kwargs=agg_kwargs,
  2550. )[0]
  2551. group1_score = results[0][1]
  2552. group2_score = results[1][1]
  2553. # check that a group with an event with a stacktrace has a higher weight than one without
  2554. assert group1_score < group2_score
  2555. def test_trends_event_halflife_results(self):
  2556. """Test that the scoring results change when we pass in different event halflife weights"""
  2557. base_datetime = before_now(hours=1)
  2558. event1 = self.store_event(
  2559. data={
  2560. "fingerprint": ["put-me-in-group1"],
  2561. "event_id": "a" * 32,
  2562. "timestamp": iso_format(base_datetime - timedelta(hours=1)),
  2563. "message": "foo",
  2564. "stacktrace": {"frames": [{"module": "group1"}]},
  2565. "environment": "staging",
  2566. "level": "fatal",
  2567. },
  2568. project_id=self.project.id,
  2569. )
  2570. event2 = self.store_event(
  2571. data={
  2572. "fingerprint": ["put-me-in-group2"],
  2573. "event_id": "b" * 32,
  2574. "timestamp": iso_format(base_datetime),
  2575. "message": "bar",
  2576. "stacktrace": {"frames": [{"module": "group2"}]},
  2577. "environment": "staging",
  2578. "level": "error",
  2579. },
  2580. project_id=self.project.id,
  2581. )
  2582. group1 = Group.objects.get(id=event1.group.id)
  2583. group2 = Group.objects.get(id=event2.group.id)
  2584. agg_kwargs = {
  2585. "trends": {
  2586. "log_level": 0,
  2587. "has_stacktrace": 0,
  2588. "relative_volume": 1,
  2589. "event_halflife_hours": 4,
  2590. "issue_halflife_hours": 24 * 7,
  2591. "v2": False,
  2592. "norm": False,
  2593. }
  2594. }
  2595. query_executor = self.backend._get_query_executor()
  2596. results = query_executor.snuba_search(
  2597. start=None,
  2598. end=None,
  2599. project_ids=[self.project.id],
  2600. environment_ids=[],
  2601. sort_field="trends",
  2602. organization=self.organization,
  2603. group_ids=[group1.id, group2.id],
  2604. limit=150,
  2605. aggregate_kwargs=agg_kwargs,
  2606. )[0]
  2607. group1_score_before = results[0][1]
  2608. group2_score_before = results[1][1]
  2609. # initially group 2's score is higher since it has a more recent event
  2610. assert group2_score_before > group1_score_before
  2611. agg_kwargs["trends"].update({"event_halflife_hours": 2})
  2612. results = query_executor.snuba_search(
  2613. start=None,
  2614. end=None,
  2615. project_ids=[self.project.id],
  2616. environment_ids=[],
  2617. sort_field="trends",
  2618. organization=self.organization,
  2619. group_ids=[group1.id, group2.id],
  2620. limit=150,
  2621. aggregate_kwargs=agg_kwargs,
  2622. )[0]
  2623. group1_score_after = results[0][1]
  2624. group2_score_after = results[1][1]
  2625. assert group1_score_after < group2_score_after
  2626. def test_trends_mixed_group_types(self):
  2627. base_datetime = before_now(hours=1)
  2628. error_event = self.store_event(
  2629. data={
  2630. "fingerprint": ["put-me-in-group1"],
  2631. "event_id": "a" * 32,
  2632. "timestamp": iso_format(base_datetime - timedelta(hours=1)),
  2633. "message": "foo",
  2634. "stacktrace": {"frames": [{"module": "group1"}]},
  2635. "environment": "staging",
  2636. "level": "fatal",
  2637. },
  2638. project_id=self.project.id,
  2639. )
  2640. error_group = error_event.group
  2641. profile_event_id = uuid.uuid4().hex
  2642. _, group_info = self.process_occurrence(
  2643. event_id=profile_event_id,
  2644. project_id=self.project.id,
  2645. event_data={
  2646. "title": "some problem",
  2647. "platform": "python",
  2648. "tags": {"my_tag": "1"},
  2649. "timestamp": before_now(minutes=1).isoformat(),
  2650. "received": before_now(minutes=1).isoformat(),
  2651. },
  2652. )
  2653. assert group_info is not None
  2654. profile_group_1 = group_info.group
  2655. agg_kwargs = {
  2656. "trends": {
  2657. "log_level": 0,
  2658. "has_stacktrace": 0,
  2659. "relative_volume": 1,
  2660. "event_halflife_hours": 4,
  2661. "issue_halflife_hours": 24 * 7,
  2662. "v2": False,
  2663. "norm": False,
  2664. }
  2665. }
  2666. query_executor = self.backend._get_query_executor()
  2667. with self.feature(
  2668. [
  2669. ProfileFileIOGroupType.build_visible_feature_name(),
  2670. ]
  2671. ):
  2672. results = query_executor.snuba_search(
  2673. start=None,
  2674. end=None,
  2675. project_ids=[self.project.id],
  2676. environment_ids=[],
  2677. sort_field="trends",
  2678. organization=self.organization,
  2679. group_ids=[profile_group_1.id, error_group.id],
  2680. limit=150,
  2681. aggregate_kwargs=agg_kwargs,
  2682. )[0]
  2683. error_group_score = results[0][1]
  2684. profile_group_score = results[1][1]
  2685. assert error_group_score > 0
  2686. assert profile_group_score > 0
  2687. class EventsTransactionsSnubaSearchTest(TestCase, SharedSnubaMixin):
  2688. @property
  2689. def backend(self):
  2690. return EventsDatasetSnubaSearchBackend()
  2691. def setUp(self):
  2692. super().setUp()
  2693. self.base_datetime = before_now(days=3)
  2694. transaction_event_data = {
  2695. "level": "info",
  2696. "message": "ayoo",
  2697. "type": "transaction",
  2698. "culprit": "app/components/events/eventEntries in map",
  2699. "contexts": {"trace": {"trace_id": "b" * 32, "span_id": "c" * 16, "op": ""}},
  2700. }
  2701. with (
  2702. mock.patch(
  2703. "sentry.issues.ingest.send_issue_occurrence_to_eventstream",
  2704. side_effect=send_issue_occurrence_to_eventstream,
  2705. ) as mock_eventstream,
  2706. mock.patch.object(
  2707. PerformanceRenderBlockingAssetSpanGroupType,
  2708. "noise_config",
  2709. new=NoiseConfig(0, timedelta(minutes=1)),
  2710. ),
  2711. ):
  2712. self.store_event(
  2713. data={
  2714. **transaction_event_data,
  2715. "event_id": "a" * 32,
  2716. "timestamp": iso_format(before_now(minutes=1)),
  2717. "start_timestamp": iso_format(before_now(minutes=1, seconds=5)),
  2718. "tags": {"my_tag": 1},
  2719. "fingerprint": [
  2720. f"{PerformanceRenderBlockingAssetSpanGroupType.type_id}-group1"
  2721. ],
  2722. },
  2723. project_id=self.project.id,
  2724. )
  2725. self.perf_group_1 = mock_eventstream.call_args[0][2].group
  2726. self.store_event(
  2727. data={
  2728. **transaction_event_data,
  2729. "event_id": "a" * 32,
  2730. "timestamp": iso_format(before_now(minutes=2)),
  2731. "start_timestamp": iso_format(before_now(minutes=2, seconds=5)),
  2732. "tags": {"my_tag": 1},
  2733. "fingerprint": [
  2734. f"{PerformanceRenderBlockingAssetSpanGroupType.type_id}-group2"
  2735. ],
  2736. },
  2737. project_id=self.project.id,
  2738. )
  2739. self.perf_group_2 = mock_eventstream.call_args[0][2].group
  2740. error_event_data = {
  2741. "timestamp": iso_format(self.base_datetime - timedelta(days=20)),
  2742. "message": "bar",
  2743. "environment": "staging",
  2744. "tags": {
  2745. "server": "example.com",
  2746. "url": "http://example.com",
  2747. "sentry:user": "event2@example.com",
  2748. "my_tag": 1,
  2749. },
  2750. }
  2751. error_event = self.store_event(
  2752. data={
  2753. **error_event_data,
  2754. "fingerprint": ["put-me-in-error_group_1"],
  2755. "event_id": "c" * 32,
  2756. "stacktrace": {"frames": [{"module": "error_group_1"}]},
  2757. },
  2758. project_id=self.project.id,
  2759. )
  2760. self.error_group_1 = error_event.group
  2761. error_event_2 = self.store_event(
  2762. data={
  2763. **error_event_data,
  2764. "fingerprint": ["put-me-in-error_group_2"],
  2765. "event_id": "d" * 32,
  2766. "stacktrace": {"frames": [{"module": "error_group_2"}]},
  2767. },
  2768. project_id=self.project.id,
  2769. )
  2770. self.error_group_2 = error_event_2.group
  2771. def test_performance_query(self):
  2772. with self.feature(
  2773. [
  2774. self.perf_group_1.issue_type.build_visible_feature_name(),
  2775. ]
  2776. ):
  2777. results = self.make_query(search_filter_query="issue.category:performance my_tag:1")
  2778. assert list(results) == [self.perf_group_1, self.perf_group_2]
  2779. results = self.make_query(
  2780. search_filter_query="issue.type:[performance_n_plus_one_db_queries, performance_render_blocking_asset_span] my_tag:1"
  2781. )
  2782. assert list(results) == [self.perf_group_1, self.perf_group_2]
  2783. def test_performance_query_no_duplicates(self):
  2784. # Regression test to catch an issue we had with performance issues showing duplicated in the
  2785. # issue stream. This was caused by us dual writing perf issues to transactions and to the
  2786. # issue platform. We'd end up reading the same issue twice and duplicate it in the response.
  2787. with self.feature(
  2788. [
  2789. self.perf_group_1.issue_type.build_visible_feature_name(),
  2790. ]
  2791. ):
  2792. results = self.make_query(search_filter_query="!issue.category:error my_tag:1")
  2793. assert list(results) == [self.perf_group_1, self.perf_group_2]
  2794. def test_performance_issue_search_feature_off(self):
  2795. with Feature({"organizations:performance-issues-search": False}):
  2796. results = self.make_query(search_filter_query="issue.category:performance my_tag:1")
  2797. assert list(results) == []
  2798. with self.feature(
  2799. [
  2800. self.perf_group_1.issue_type.build_visible_feature_name(),
  2801. ]
  2802. ):
  2803. results = self.make_query(search_filter_query="issue.category:performance my_tag:1")
  2804. assert list(results) == [self.perf_group_1, self.perf_group_2]
  2805. def test_error_performance_query(self):
  2806. with self.feature(
  2807. [
  2808. self.perf_group_1.issue_type.build_visible_feature_name(),
  2809. ]
  2810. ):
  2811. results = self.make_query(search_filter_query="my_tag:1")
  2812. assert list(results) == [
  2813. self.perf_group_1,
  2814. self.perf_group_2,
  2815. self.error_group_2,
  2816. self.error_group_1,
  2817. ]
  2818. results = self.make_query(
  2819. search_filter_query="issue.category:[performance, error] my_tag:1"
  2820. )
  2821. assert list(results) == [
  2822. self.perf_group_1,
  2823. self.perf_group_2,
  2824. self.error_group_2,
  2825. self.error_group_1,
  2826. ]
  2827. results = self.make_query(
  2828. search_filter_query="issue.type:[performance_render_blocking_asset_span, error] my_tag:1"
  2829. )
  2830. assert list(results) == [
  2831. self.perf_group_1,
  2832. self.perf_group_2,
  2833. self.error_group_2,
  2834. self.error_group_1,
  2835. ]
  2836. def test_cursor_performance_issues(self):
  2837. with self.feature(
  2838. [
  2839. self.perf_group_1.issue_type.build_visible_feature_name(),
  2840. ]
  2841. ):
  2842. results = self.make_query(
  2843. projects=[self.project],
  2844. search_filter_query="issue.category:performance my_tag:1",
  2845. sort_by="date",
  2846. limit=1,
  2847. count_hits=True,
  2848. )
  2849. assert list(results) == [self.perf_group_1]
  2850. assert results.hits == 2
  2851. results = self.make_query(
  2852. projects=[self.project],
  2853. search_filter_query="issue.category:performance my_tag:1",
  2854. sort_by="date",
  2855. limit=1,
  2856. cursor=results.next,
  2857. count_hits=True,
  2858. )
  2859. assert list(results) == [self.perf_group_2]
  2860. assert results.hits == 2
  2861. results = self.make_query(
  2862. projects=[self.project],
  2863. search_filter_query="issue.category:performance my_tag:1",
  2864. sort_by="date",
  2865. limit=1,
  2866. cursor=results.next,
  2867. count_hits=True,
  2868. )
  2869. assert list(results) == []
  2870. assert results.hits == 2
  2871. def test_perf_issue_search_message_term_queries_postgres(self):
  2872. from django.db.models import Q
  2873. from sentry.utils import snuba
  2874. transaction_name = "im a little tea pot"
  2875. with (
  2876. mock.patch(
  2877. "sentry.issues.ingest.send_issue_occurrence_to_eventstream",
  2878. side_effect=send_issue_occurrence_to_eventstream,
  2879. ) as mock_eventstream,
  2880. mock.patch.object(
  2881. PerformanceRenderBlockingAssetSpanGroupType,
  2882. "noise_config",
  2883. new=NoiseConfig(0, timedelta(minutes=1)),
  2884. ),
  2885. ):
  2886. tx = self.store_event(
  2887. data={
  2888. "level": "info",
  2889. "culprit": "app/components/events/eventEntries in map",
  2890. "contexts": {"trace": {"trace_id": "b" * 32, "span_id": "c" * 16, "op": ""}},
  2891. "fingerprint": [
  2892. f"{PerformanceRenderBlockingAssetSpanGroupType.type_id}-group12"
  2893. ],
  2894. "event_id": "e" * 32,
  2895. "timestamp": iso_format(self.base_datetime),
  2896. "start_timestamp": iso_format(self.base_datetime),
  2897. "type": "transaction",
  2898. "transaction": transaction_name,
  2899. },
  2900. project_id=self.project.id,
  2901. )
  2902. assert "tea" in tx.search_message
  2903. created_group = mock_eventstream.call_args[0][2].group
  2904. find_group = Group.objects.filter(
  2905. Q(type=PerformanceRenderBlockingAssetSpanGroupType.type_id, message__icontains="tea")
  2906. ).first()
  2907. assert created_group == find_group
  2908. with self.feature(
  2909. [
  2910. created_group.issue_type.build_visible_feature_name(),
  2911. ]
  2912. ):
  2913. result = snuba.raw_query(
  2914. dataset=Dataset.IssuePlatform,
  2915. start=self.base_datetime - timedelta(hours=1),
  2916. end=self.base_datetime + timedelta(hours=1),
  2917. selected_columns=[
  2918. "event_id",
  2919. "group_id",
  2920. "transaction_name",
  2921. ],
  2922. groupby=None,
  2923. filter_keys={"project_id": [self.project.id], "event_id": [tx.event_id]},
  2924. referrer="_insert_transaction.verify_transaction",
  2925. )
  2926. assert result["data"][0]["transaction_name"] == transaction_name
  2927. assert result["data"][0]["group_id"] == created_group.id
  2928. results = self.make_query(search_filter_query="issue.category:performance tea")
  2929. assert set(results) == {created_group}
  2930. results2 = self.make_query(search_filter_query="tea")
  2931. assert set(results2) == {created_group}
  2932. def test_search_message_error_and_perf_issues(self):
  2933. with (
  2934. mock.patch(
  2935. "sentry.issues.ingest.send_issue_occurrence_to_eventstream",
  2936. side_effect=send_issue_occurrence_to_eventstream,
  2937. ) as mock_eventstream,
  2938. mock.patch.object(
  2939. PerformanceRenderBlockingAssetSpanGroupType,
  2940. "noise_config",
  2941. new=NoiseConfig(0, timedelta(minutes=1)),
  2942. ),
  2943. ):
  2944. self.store_event(
  2945. data={
  2946. "level": "info",
  2947. "culprit": "app/components/events/eventEntries in map",
  2948. "contexts": {"trace": {"trace_id": "b" * 32, "span_id": "c" * 16, "op": ""}},
  2949. "fingerprint": [
  2950. f"{PerformanceRenderBlockingAssetSpanGroupType.type_id}-group12"
  2951. ],
  2952. "event_id": "e" * 32,
  2953. "timestamp": iso_format(self.base_datetime),
  2954. "start_timestamp": iso_format(self.base_datetime),
  2955. "type": "transaction",
  2956. "transaction": "/api/0/events",
  2957. },
  2958. project_id=self.project.id,
  2959. )
  2960. perf_issue = mock_eventstream.call_args[0][2].group
  2961. assert perf_issue
  2962. error = self.store_event(
  2963. data={
  2964. "fingerprint": ["another-random-group"],
  2965. "event_id": "d" * 32,
  2966. "message": "Uncaught exception on api /api/0/events",
  2967. "environment": "production",
  2968. "tags": {"server": "example.com", "sentry:user": "event3@example.com"},
  2969. "timestamp": iso_format(self.base_datetime),
  2970. "stacktrace": {"frames": [{"module": "group1"}]},
  2971. },
  2972. project_id=self.project.id,
  2973. )
  2974. error_issue = error.group
  2975. assert error_issue
  2976. assert error_issue != perf_issue
  2977. with self.feature(
  2978. [
  2979. perf_issue.issue_type.build_visible_feature_name(),
  2980. ]
  2981. ):
  2982. assert set(self.make_query(search_filter_query="is:unresolved /api/0/events")) == {
  2983. perf_issue,
  2984. error_issue,
  2985. }
  2986. assert set(self.make_query(search_filter_query="/api/0/events")) == {
  2987. error_issue,
  2988. perf_issue,
  2989. }
  2990. def test_compound_message_negation(self):
  2991. self.store_event(
  2992. data={
  2993. "fingerprint": ["put-me-in-group1"],
  2994. "event_id": "2" * 32,
  2995. "message": "something",
  2996. "timestamp": iso_format(self.base_datetime),
  2997. },
  2998. project_id=self.project.id,
  2999. )
  3000. self.store_event(
  3001. data={
  3002. "level": "info",
  3003. "culprit": "app/components/events/eventEntries in map",
  3004. "contexts": {"trace": {"trace_id": "b" * 32, "span_id": "c" * 16, "op": ""}},
  3005. "fingerprint": [f"{PerformanceRenderBlockingAssetSpanGroupType.type_id}-group12"],
  3006. "event_id": "e" * 32,
  3007. "timestamp": iso_format(self.base_datetime),
  3008. "start_timestamp": iso_format(self.base_datetime),
  3009. "type": "transaction",
  3010. "transaction": "something",
  3011. },
  3012. project_id=self.project.id,
  3013. )
  3014. error_issues_only = self.make_query(
  3015. search_filter_query="!message:else group.category:error"
  3016. )
  3017. error_and_perf_issues = self.make_query(search_filter_query="!message:else")
  3018. assert set(error_and_perf_issues) > set(error_issues_only)
  3019. class EventsGenericSnubaSearchTest(TestCase, SharedSnubaMixin, OccurrenceTestMixin):
  3020. @property
  3021. def backend(self):
  3022. return EventsDatasetSnubaSearchBackend()
  3023. def setUp(self):
  3024. super().setUp()
  3025. self.base_datetime = before_now(days=3)
  3026. event_id_1 = uuid.uuid4().hex
  3027. _, group_info = self.process_occurrence(
  3028. event_id=event_id_1,
  3029. project_id=self.project.id,
  3030. issue_title="File I/O on Main Thread",
  3031. event_data={
  3032. "title": "some problem",
  3033. "platform": "python",
  3034. "tags": {"my_tag": "1"},
  3035. "timestamp": before_now(minutes=1).isoformat(),
  3036. "received": before_now(minutes=1).isoformat(),
  3037. },
  3038. )
  3039. assert group_info is not None
  3040. self.profile_group_1 = group_info.group
  3041. event_id_2 = uuid.uuid4().hex
  3042. _, group_info = self.process_occurrence(
  3043. event_id=event_id_2,
  3044. project_id=self.project.id,
  3045. fingerprint=["put-me-in-group-2"],
  3046. issue_title="File I/O on Main Thread",
  3047. event_data={
  3048. "title": "some other problem",
  3049. "platform": "python",
  3050. "tags": {"my_tag": "1"},
  3051. "timestamp": before_now(minutes=2).isoformat(),
  3052. "received": before_now(minutes=2).isoformat(),
  3053. },
  3054. )
  3055. assert group_info is not None
  3056. self.profile_group_2 = group_info.group
  3057. event_id_3 = uuid.uuid4().hex
  3058. self.process_occurrence(
  3059. event_id=event_id_3,
  3060. project_id=self.project.id,
  3061. fingerprint=["put-me-in-group-3"],
  3062. event_data={
  3063. "title": "some other problem",
  3064. "platform": "python",
  3065. "tags": {"my_tag": "2"},
  3066. "timestamp": before_now(minutes=2).isoformat(),
  3067. "message_timestamp": before_now(minutes=2).isoformat(),
  3068. },
  3069. )
  3070. error_event_data = {
  3071. "timestamp": iso_format(self.base_datetime - timedelta(days=20)),
  3072. "message": "bar",
  3073. "environment": "staging",
  3074. "tags": {
  3075. "server": "example.com",
  3076. "url": "http://example.com",
  3077. "sentry:user": "event2@example.com",
  3078. "my_tag": 1,
  3079. },
  3080. }
  3081. error_event = self.store_event(
  3082. data={
  3083. **error_event_data,
  3084. "fingerprint": ["put-me-in-error_group_1"],
  3085. "event_id": "c" * 32,
  3086. "stacktrace": {"frames": [{"module": "error_group_1"}]},
  3087. },
  3088. project_id=self.project.id,
  3089. )
  3090. self.error_group_1 = error_event.group
  3091. error_event_2 = self.store_event(
  3092. data={
  3093. **error_event_data,
  3094. "fingerprint": ["put-me-in-error_group_2"],
  3095. "event_id": "d" * 32,
  3096. "stacktrace": {"frames": [{"module": "error_group_2"}]},
  3097. },
  3098. project_id=self.project.id,
  3099. )
  3100. self.error_group_2 = error_event_2.group
  3101. def test_no_feature(self):
  3102. results = self.make_query(search_filter_query="issue.category:performance my_tag:1")
  3103. assert list(results) == []
  3104. def test_generic_query(self):
  3105. with self.feature([ProfileFileIOGroupType.build_visible_feature_name()]):
  3106. results = self.make_query(search_filter_query="issue.category:performance my_tag:1")
  3107. assert list(results) == [self.profile_group_1, self.profile_group_2]
  3108. results = self.make_query(
  3109. search_filter_query="issue.type:profile_file_io_main_thread my_tag:1"
  3110. )
  3111. assert list(results) == [self.profile_group_1, self.profile_group_2]
  3112. def test_generic_query_message(self):
  3113. with self.feature([ProfileFileIOGroupType.build_visible_feature_name()]):
  3114. results = self.make_query(search_filter_query="File I/O")
  3115. assert list(results) == [self.profile_group_1, self.profile_group_2]
  3116. def test_generic_query_perf(self):
  3117. event_id = uuid.uuid4().hex
  3118. group_type = PerformanceNPlusOneGroupType
  3119. with mock.patch.object(
  3120. PerformanceNPlusOneGroupType, "noise_config", new=NoiseConfig(0, timedelta(minutes=1))
  3121. ):
  3122. with self.feature(group_type.build_ingest_feature_name()):
  3123. _, group_info = self.process_occurrence(
  3124. event_id=event_id,
  3125. project_id=self.project.id,
  3126. type=group_type.type_id,
  3127. fingerprint=["some perf issue"],
  3128. event_data={
  3129. "title": "some problem",
  3130. "platform": "python",
  3131. "tags": {"my_tag": "2"},
  3132. "timestamp": before_now(minutes=1).isoformat(),
  3133. "received": before_now(minutes=1).isoformat(),
  3134. },
  3135. )
  3136. assert group_info is not None
  3137. with self.feature(
  3138. [
  3139. group_type.build_visible_feature_name(),
  3140. "organizations:performance-issues-search",
  3141. ]
  3142. ):
  3143. results = self.make_query(search_filter_query="issue.category:performance my_tag:2")
  3144. assert list(results) == [group_info.group]
  3145. def test_error_generic_query(self):
  3146. with self.feature([ProfileFileIOGroupType.build_visible_feature_name()]):
  3147. results = self.make_query(search_filter_query="my_tag:1")
  3148. assert list(results) == [
  3149. self.profile_group_1,
  3150. self.profile_group_2,
  3151. self.error_group_2,
  3152. self.error_group_1,
  3153. ]
  3154. results = self.make_query(
  3155. search_filter_query="issue.category:[performance, error] my_tag:1"
  3156. )
  3157. assert list(results) == [
  3158. self.profile_group_1,
  3159. self.profile_group_2,
  3160. self.error_group_2,
  3161. self.error_group_1,
  3162. ]
  3163. results = self.make_query(
  3164. search_filter_query="issue.type:[profile_file_io_main_thread, error] my_tag:1"
  3165. )
  3166. assert list(results) == [
  3167. self.profile_group_1,
  3168. self.profile_group_2,
  3169. self.error_group_2,
  3170. self.error_group_1,
  3171. ]
  3172. def test_cursor_profile_issues(self):
  3173. with self.feature([ProfileFileIOGroupType.build_visible_feature_name()]):
  3174. results = self.make_query(
  3175. projects=[self.project],
  3176. search_filter_query="issue.category:performance my_tag:1",
  3177. sort_by="date",
  3178. limit=1,
  3179. count_hits=True,
  3180. )
  3181. assert list(results) == [self.profile_group_1]
  3182. assert results.hits == 2
  3183. results = self.make_query(
  3184. projects=[self.project],
  3185. search_filter_query="issue.category:performance my_tag:1",
  3186. sort_by="date",
  3187. limit=1,
  3188. cursor=results.next,
  3189. count_hits=True,
  3190. )
  3191. assert list(results) == [self.profile_group_2]
  3192. assert results.hits == 2
  3193. results = self.make_query(
  3194. projects=[self.project],
  3195. search_filter_query="issue.category:performance my_tag:1",
  3196. sort_by="date",
  3197. limit=1,
  3198. cursor=results.next,
  3199. count_hits=True,
  3200. )
  3201. assert list(results) == []
  3202. assert results.hits == 2
  3203. def test_rejected_filters(self):
  3204. """
  3205. Any queries with `error.handled` or `error.unhandled` filters querying the search_issues dataset
  3206. should be rejected and return empty results.
  3207. """
  3208. with self.feature([ProfileFileIOGroupType.build_visible_feature_name()]):
  3209. results = self.make_query(
  3210. projects=[self.project],
  3211. search_filter_query="issue.category:performance error.unhandled:0",
  3212. sort_by="date",
  3213. limit=1,
  3214. count_hits=True,
  3215. )
  3216. results2 = self.make_query(
  3217. projects=[self.project],
  3218. search_filter_query="issue.category:performance error.unhandled:1",
  3219. sort_by="date",
  3220. limit=1,
  3221. count_hits=True,
  3222. )
  3223. result3 = self.make_query(
  3224. projects=[self.project],
  3225. search_filter_query="issue.category:performance error.handled:0",
  3226. sort_by="date",
  3227. limit=1,
  3228. count_hits=True,
  3229. )
  3230. results4 = self.make_query(
  3231. projects=[self.project],
  3232. search_filter_query="issue.category:performance error.handled:1",
  3233. sort_by="date",
  3234. limit=1,
  3235. count_hits=True,
  3236. )
  3237. results5 = self.make_query(
  3238. projects=[self.project],
  3239. search_filter_query="issue.category:performance error.main_thread:0",
  3240. sort_by="date",
  3241. limit=1,
  3242. count_hits=True,
  3243. )
  3244. results6 = self.make_query(
  3245. projects=[self.project],
  3246. search_filter_query="issue.category:performance error.main_thread:1",
  3247. sort_by="date",
  3248. limit=1,
  3249. count_hits=True,
  3250. )
  3251. assert (
  3252. list(results)
  3253. == list(results2)
  3254. == list(result3)
  3255. == list(results4)
  3256. == list(results5)
  3257. == list(results6)
  3258. == []
  3259. )
  3260. def test_feedback_category_hidden_default(self):
  3261. with self.feature([FeedbackGroup.build_visible_feature_name()]):
  3262. event_id_1 = uuid.uuid4().hex
  3263. self.process_occurrence(
  3264. **{
  3265. "project_id": self.project.id,
  3266. "event_id": event_id_1,
  3267. "fingerprint": ["c" * 32],
  3268. "issue_title": "User Feedback",
  3269. "type": FeedbackGroup.type_id,
  3270. "detection_time": datetime.now().timestamp(),
  3271. "level": "info",
  3272. },
  3273. event_data={
  3274. "platform": "python",
  3275. "timestamp": before_now(minutes=1).isoformat(),
  3276. "received": before_now(minutes=1).isoformat(),
  3277. },
  3278. )
  3279. results = self.make_query(
  3280. date_from=self.base_datetime,
  3281. date_to=self.base_datetime + timedelta(days=10),
  3282. )
  3283. assert set(results) == set()
  3284. def test_feedback_category_show_when_filtered_on(self):
  3285. with self.feature(
  3286. [
  3287. FeedbackGroup.build_visible_feature_name(),
  3288. FeedbackGroup.build_ingest_feature_name(),
  3289. ]
  3290. ):
  3291. event_id_1 = uuid.uuid4().hex
  3292. _, group_info = self.process_occurrence(
  3293. **{
  3294. "project_id": self.project.id,
  3295. "event_id": event_id_1,
  3296. "fingerprint": ["c" * 32],
  3297. "issue_title": "User Feedback",
  3298. "type": FeedbackGroup.type_id,
  3299. "detection_time": datetime.now().timestamp(),
  3300. "level": "info",
  3301. },
  3302. event_data={
  3303. "platform": "python",
  3304. "timestamp": before_now(minutes=1).isoformat(),
  3305. "received": before_now(minutes=1).isoformat(),
  3306. },
  3307. )
  3308. results = self.make_query(
  3309. search_filter_query="issue.category:feedback",
  3310. date_from=self.base_datetime,
  3311. date_to=self.base_datetime + timedelta(days=10),
  3312. )
  3313. assert group_info is not None
  3314. assert list(results) == [group_info.group]