test_backend.py 109 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956
  1. import uuid
  2. from datetime import datetime, timedelta
  3. from unittest import mock
  4. import pytest
  5. import pytz
  6. from django.utils import timezone
  7. from sentry import options
  8. from sentry.api.issue_search import convert_query_values, issue_search_config, parse_search_query
  9. from sentry.exceptions import InvalidSearchQuery
  10. from sentry.issues.grouptype import (
  11. ErrorGroupType,
  12. NoiseConfig,
  13. PerformanceNPlusOneGroupType,
  14. PerformanceRenderBlockingAssetSpanGroupType,
  15. ProfileFileIOGroupType,
  16. )
  17. from sentry.issues.occurrence_consumer import process_event_and_issue_occurrence
  18. from sentry.models import (
  19. Environment,
  20. Group,
  21. GroupAssignee,
  22. GroupBookmark,
  23. GroupEnvironment,
  24. GroupHistoryStatus,
  25. GroupStatus,
  26. GroupSubscription,
  27. Integration,
  28. record_group_history,
  29. )
  30. from sentry.models.groupowner import GroupOwner
  31. from sentry.search.snuba.backend import (
  32. CdcEventsDatasetSnubaSearchBackend,
  33. EventsDatasetSnubaSearchBackend,
  34. )
  35. from sentry.search.snuba.executors import InvalidQueryForExecutor
  36. from sentry.testutils import SnubaTestCase, TestCase, xfail_if_not_postgres
  37. from sentry.testutils.helpers import Feature
  38. from sentry.testutils.helpers.datetime import before_now, iso_format
  39. from sentry.types.group import GroupSubStatus
  40. from sentry.utils.snuba import SENTRY_SNUBA_MAP, SnubaError
  41. from tests.sentry.issues.test_utils import OccurrenceTestMixin
  42. def date_to_query_format(date):
  43. return date.strftime("%Y-%m-%dT%H:%M:%S")
  44. class SharedSnubaTest(TestCase, SnubaTestCase):
  45. def build_search_filter(self, query, projects=None, user=None, environments=None):
  46. user = user if user is not None else self.user
  47. projects = projects if projects is not None else [self.project]
  48. return convert_query_values(parse_search_query(query), projects, user, environments)
  49. def make_query(
  50. self,
  51. projects=None,
  52. search_filter_query=None,
  53. environments=None,
  54. sort_by="date",
  55. limit=None,
  56. count_hits=False,
  57. date_from=None,
  58. date_to=None,
  59. cursor=None,
  60. ):
  61. search_filters = []
  62. projects = projects if projects is not None else [self.project]
  63. if search_filter_query is not None:
  64. search_filters = self.build_search_filter(
  65. search_filter_query, projects, environments=environments
  66. )
  67. kwargs = {}
  68. if limit is not None:
  69. kwargs["limit"] = limit
  70. return self.backend.query(
  71. projects,
  72. search_filters=search_filters,
  73. environments=environments,
  74. count_hits=count_hits,
  75. sort_by=sort_by,
  76. date_from=date_from,
  77. date_to=date_to,
  78. cursor=cursor,
  79. **kwargs,
  80. )
  81. def store_event(self, data, *args, **kwargs):
  82. event = super().store_event(data, *args, **kwargs)
  83. environment_name = data.get("environment")
  84. if environment_name:
  85. GroupEnvironment.objects.filter(
  86. group_id=event.group_id,
  87. environment__name=environment_name,
  88. first_seen__gt=event.datetime,
  89. ).update(first_seen=event.datetime)
  90. return event
  91. class EventsSnubaSearchTest(SharedSnubaTest):
  92. @property
  93. def backend(self):
  94. return EventsDatasetSnubaSearchBackend()
  95. def setUp(self):
  96. super().setUp()
  97. self.base_datetime = (datetime.utcnow() - timedelta(days=3)).replace(tzinfo=pytz.utc)
  98. event1_timestamp = iso_format(self.base_datetime - timedelta(days=21))
  99. self.event1 = self.store_event(
  100. data={
  101. "fingerprint": ["put-me-in-group1"],
  102. "event_id": "a" * 32,
  103. "message": "foo. Also, this message is intended to be greater than 256 characters so that we can put some unique string identifier after that point in the string. The purpose of this is in order to verify we are using snuba to search messages instead of Postgres (postgres truncates at 256 characters and clickhouse does not). santryrox.",
  104. "environment": "production",
  105. "tags": {"server": "example.com", "sentry:user": "event1@example.com"},
  106. "timestamp": event1_timestamp,
  107. "stacktrace": {"frames": [{"module": "group1"}]},
  108. },
  109. project_id=self.project.id,
  110. )
  111. self.event3 = self.store_event(
  112. data={
  113. "fingerprint": ["put-me-in-group1"],
  114. "event_id": "c" * 32,
  115. "message": "group1",
  116. "environment": "production",
  117. "tags": {"server": "example.com", "sentry:user": "event3@example.com"},
  118. "timestamp": iso_format(self.base_datetime),
  119. "stacktrace": {"frames": [{"module": "group1"}]},
  120. },
  121. project_id=self.project.id,
  122. )
  123. self.group1 = Group.objects.get(id=self.event1.group.id)
  124. assert self.group1.id == self.event1.group.id
  125. assert self.group1.id == self.event3.group.id
  126. assert self.group1.first_seen == self.event1.datetime
  127. assert self.group1.last_seen == self.event3.datetime
  128. self.group1.times_seen = 5
  129. self.group1.status = GroupStatus.UNRESOLVED
  130. self.group1.substatus = GroupSubStatus.ONGOING
  131. self.group1.update(type=ErrorGroupType.type_id)
  132. self.group1.save()
  133. self.store_group(self.group1)
  134. self.event2 = self.store_event(
  135. data={
  136. "fingerprint": ["put-me-in-group2"],
  137. "event_id": "b" * 32,
  138. "timestamp": iso_format(self.base_datetime - timedelta(days=20)),
  139. "message": "bar",
  140. "stacktrace": {"frames": [{"module": "group2"}]},
  141. "environment": "staging",
  142. "tags": {
  143. "server": "example.com",
  144. "url": "http://example.com",
  145. "sentry:user": "event2@example.com",
  146. },
  147. },
  148. project_id=self.project.id,
  149. )
  150. self.group2 = Group.objects.get(id=self.event2.group.id)
  151. assert self.group2.id == self.event2.group.id
  152. assert self.group2.first_seen == self.group2.last_seen == self.event2.datetime
  153. self.group2.status = GroupStatus.RESOLVED
  154. self.group2.substatus = None
  155. self.group2.times_seen = 10
  156. self.group2.update(type=ErrorGroupType.type_id)
  157. self.group2.save()
  158. self.store_group(self.group2)
  159. GroupBookmark.objects.create(
  160. user_id=self.user.id, group=self.group2, project=self.group2.project
  161. )
  162. GroupAssignee.objects.create(
  163. user_id=self.user.id, group=self.group2, project=self.group2.project
  164. )
  165. GroupSubscription.objects.create(
  166. user_id=self.user.id, group=self.group1, project=self.group1.project, is_active=True
  167. )
  168. GroupSubscription.objects.create(
  169. user_id=self.user.id, group=self.group2, project=self.group2.project, is_active=False
  170. )
  171. self.environments = {
  172. "production": self.event1.get_environment(),
  173. "staging": self.event2.get_environment(),
  174. }
  175. def set_up_multi_project(self):
  176. self.project2 = self.create_project(organization=self.project.organization)
  177. self.event_p2 = self.store_event(
  178. data={
  179. "event_id": "a" * 32,
  180. "fingerprint": ["put-me-in-groupP2"],
  181. "timestamp": iso_format(self.base_datetime - timedelta(days=21)),
  182. "message": "foo",
  183. "stacktrace": {"frames": [{"module": "group_p2"}]},
  184. "tags": {"server": "example.com"},
  185. "environment": "production",
  186. },
  187. project_id=self.project2.id,
  188. )
  189. self.group_p2 = Group.objects.get(id=self.event_p2.group.id)
  190. self.group_p2.times_seen = 6
  191. self.group_p2.last_seen = self.base_datetime - timedelta(days=1)
  192. self.group_p2.save()
  193. self.store_group(self.group_p2)
  194. def create_group_with_integration_external_issue(self, environment="production"):
  195. event = self.store_event(
  196. data={
  197. "fingerprint": ["linked_group1"],
  198. "event_id": uuid.uuid4().hex,
  199. "timestamp": iso_format(self.base_datetime),
  200. "environment": environment,
  201. },
  202. project_id=self.project.id,
  203. )
  204. integration = Integration.objects.create(provider="example", name="Example")
  205. integration.add_organization(event.group.organization, self.user)
  206. self.create_integration_external_issue(
  207. group=event.group,
  208. integration=integration,
  209. key="APP-123",
  210. )
  211. return event.group
  212. def create_group_with_platform_external_issue(self, environment="production"):
  213. event = self.store_event(
  214. data={
  215. "fingerprint": ["linked_group2"],
  216. "event_id": uuid.uuid4().hex,
  217. "timestamp": iso_format(self.base_datetime),
  218. "environment": environment,
  219. },
  220. project_id=self.project.id,
  221. )
  222. self.create_platform_external_issue(
  223. group=event.group,
  224. service_type="sentry-app",
  225. display_name="App#issue-1",
  226. web_url="https://example.com/app/issues/1",
  227. )
  228. return event.group
  229. def run_test_query_in_syntax(
  230. self, query, expected_groups, expected_negative_groups=None, environments=None
  231. ):
  232. results = self.make_query(search_filter_query=query, environments=environments)
  233. sort_key = lambda result: result.id
  234. assert sorted(results, key=sort_key) == sorted(expected_groups, key=sort_key)
  235. if expected_negative_groups is not None:
  236. results = self.make_query(search_filter_query=f"!{query}")
  237. assert sorted(results, key=sort_key) == sorted(expected_negative_groups, key=sort_key)
  238. def test_query(self):
  239. results = self.make_query(search_filter_query="foo")
  240. assert set(results) == {self.group1}
  241. results = self.make_query(search_filter_query="bar")
  242. assert set(results) == {self.group2}
  243. def test_query_multi_project(self):
  244. self.set_up_multi_project()
  245. results = self.make_query([self.project, self.project2], search_filter_query="foo")
  246. assert set(results) == {self.group1, self.group_p2}
  247. def test_query_with_environment(self):
  248. results = self.make_query(
  249. environments=[self.environments["production"]], search_filter_query="foo"
  250. )
  251. assert set(results) == {self.group1}
  252. results = self.make_query(
  253. environments=[self.environments["production"]], search_filter_query="bar"
  254. )
  255. assert set(results) == set()
  256. results = self.make_query(
  257. environments=[self.environments["staging"]], search_filter_query="bar"
  258. )
  259. assert set(results) == {self.group2}
  260. def test_query_for_text_in_long_message(self):
  261. results = self.make_query(
  262. [self.project],
  263. environments=[self.environments["production"]],
  264. search_filter_query="santryrox",
  265. )
  266. assert set(results) == {self.group1}
  267. def test_multi_environments(self):
  268. self.set_up_multi_project()
  269. results = self.make_query(
  270. [self.project, self.project2],
  271. environments=[self.environments["production"], self.environments["staging"]],
  272. )
  273. assert set(results) == {self.group1, self.group2, self.group_p2}
  274. def test_query_with_environment_multi_project(self):
  275. self.set_up_multi_project()
  276. results = self.make_query(
  277. [self.project, self.project2],
  278. environments=[self.environments["production"]],
  279. search_filter_query="foo",
  280. )
  281. assert set(results) == {self.group1, self.group_p2}
  282. results = self.make_query(
  283. [self.project, self.project2],
  284. environments=[self.environments["production"]],
  285. search_filter_query="bar",
  286. )
  287. assert set(results) == set()
  288. def test_query_timestamp(self):
  289. results = self.make_query(
  290. [self.project],
  291. environments=[self.environments["production"]],
  292. search_filter_query=f"timestamp:>{iso_format(self.event1.datetime)} timestamp:<{iso_format(self.event3.datetime)}",
  293. )
  294. assert set(results) == {self.group1}
  295. def test_sort(self):
  296. results = self.make_query(sort_by="date")
  297. assert list(results) == [self.group1, self.group2]
  298. results = self.make_query(sort_by="new")
  299. assert list(results) == [self.group2, self.group1]
  300. results = self.make_query(sort_by="freq")
  301. assert list(results) == [self.group1, self.group2]
  302. results = self.make_query(sort_by="priority")
  303. assert list(results) == [self.group1, self.group2]
  304. results = self.make_query(sort_by="user")
  305. assert list(results) == [self.group1, self.group2]
  306. def test_sort_with_environment(self):
  307. for dt in [
  308. self.group1.first_seen + timedelta(days=1),
  309. self.group1.first_seen + timedelta(days=2),
  310. self.group1.last_seen + timedelta(days=1),
  311. ]:
  312. self.store_event(
  313. data={
  314. "fingerprint": ["put-me-in-group2"],
  315. "timestamp": iso_format(dt),
  316. "stacktrace": {"frames": [{"module": "group2"}]},
  317. "environment": "production",
  318. "message": "group2",
  319. },
  320. project_id=self.project.id,
  321. )
  322. results = self.make_query(environments=[self.environments["production"]], sort_by="date")
  323. assert list(results) == [self.group2, self.group1]
  324. results = self.make_query(environments=[self.environments["production"]], sort_by="new")
  325. assert list(results) == [self.group2, self.group1]
  326. results = self.make_query(environments=[self.environments["production"]], sort_by="freq")
  327. assert list(results) == [self.group2, self.group1]
  328. results = self.make_query(
  329. environments=[self.environments["production"]], sort_by="priority"
  330. )
  331. assert list(results) == [self.group2, self.group1]
  332. results = self.make_query(environments=[self.environments["production"]], sort_by="user")
  333. assert list(results) == [self.group1, self.group2]
  334. def test_status(self):
  335. results = self.make_query(search_filter_query="is:unresolved")
  336. assert set(results) == {self.group1}
  337. results = self.make_query(search_filter_query="is:resolved")
  338. assert set(results) == {self.group2}
  339. event_3 = self.store_event(
  340. data={
  341. "fingerprint": ["put-me-in-group3"],
  342. "event_id": "c" * 32,
  343. "timestamp": iso_format(self.base_datetime - timedelta(days=20)),
  344. },
  345. project_id=self.project.id,
  346. )
  347. group_3 = event_3.group
  348. group_3.status = GroupStatus.MUTED
  349. group_3.substatus = None
  350. group_3.save()
  351. self.run_test_query_in_syntax(
  352. "status:[unresolved, resolved]", [self.group1, self.group2], [group_3]
  353. )
  354. self.run_test_query_in_syntax(
  355. "status:[resolved, muted]", [self.group2, group_3], [self.group1]
  356. )
  357. def test_substatus(self):
  358. with Feature("organizations:issue-states"):
  359. results = self.make_query(search_filter_query="is:ongoing")
  360. assert set(results) == {self.group1}
  361. with pytest.raises(
  362. InvalidSearchQuery, match="The substatus filter is not supported for this organization"
  363. ):
  364. self.make_query(search_filter_query="is:ongoing")
  365. def test_category(self):
  366. results = self.make_query(search_filter_query="issue.category:error")
  367. assert set(results) == {self.group1, self.group2}
  368. event_3 = self.store_event(
  369. data={
  370. "fingerprint": ["put-me-in-group3"],
  371. "event_id": "c" * 32,
  372. "timestamp": iso_format(self.base_datetime - timedelta(days=20)),
  373. },
  374. project_id=self.project.id,
  375. )
  376. group_3 = event_3.group
  377. group_3.update(type=PerformanceNPlusOneGroupType.type_id)
  378. results = self.make_query(search_filter_query="issue.category:performance")
  379. assert set(results) == {group_3}
  380. results = self.make_query(search_filter_query="issue.category:[error, performance]")
  381. assert set(results) == {self.group1, self.group2, group_3}
  382. with pytest.raises(InvalidSearchQuery):
  383. self.make_query(search_filter_query="issue.category:hellboy")
  384. def test_not_perf_category(self):
  385. results = self.make_query(search_filter_query="issue.category:error foo")
  386. assert set(results) == {self.group1}
  387. not_results = self.make_query(search_filter_query="!issue.category:performance foo")
  388. assert set(not_results) == {self.group1}
  389. def test_type(self):
  390. results = self.make_query(search_filter_query="issue.type:error")
  391. assert set(results) == {self.group1, self.group2}
  392. event_3 = self.store_event(
  393. data={
  394. "fingerprint": ["put-me-in-group3"],
  395. "event_id": "c" * 32,
  396. "timestamp": iso_format(self.base_datetime - timedelta(days=20)),
  397. "type": PerformanceNPlusOneGroupType.type_id,
  398. },
  399. project_id=self.project.id,
  400. )
  401. group_3 = event_3.group
  402. group_3.update(type=PerformanceNPlusOneGroupType.type_id)
  403. results = self.make_query(
  404. search_filter_query="issue.type:performance_n_plus_one_db_queries"
  405. )
  406. assert set(results) == {group_3}
  407. event_4 = self.store_event(
  408. data={
  409. "fingerprint": ["put-me-in-group4"],
  410. "event_id": "d" * 32,
  411. "timestamp": iso_format(self.base_datetime - timedelta(days=20)),
  412. },
  413. project_id=self.project.id,
  414. )
  415. group_4 = event_4.group
  416. group_4.update(type=PerformanceRenderBlockingAssetSpanGroupType.type_id)
  417. results = self.make_query(
  418. search_filter_query="issue.type:performance_render_blocking_asset_span"
  419. )
  420. assert set(results) == {group_4}
  421. results = self.make_query(
  422. search_filter_query="issue.type:[performance_render_blocking_asset_span, performance_n_plus_one_db_queries, error]"
  423. )
  424. assert set(results) == {self.group1, self.group2, group_3, group_4}
  425. with pytest.raises(InvalidSearchQuery):
  426. self.make_query(search_filter_query="issue.type:performance_i_dont_exist")
  427. def test_status_with_environment(self):
  428. results = self.make_query(
  429. environments=[self.environments["production"]], search_filter_query="is:unresolved"
  430. )
  431. assert set(results) == {self.group1}
  432. results = self.make_query(
  433. environments=[self.environments["staging"]], search_filter_query="is:resolved"
  434. )
  435. assert set(results) == {self.group2}
  436. results = self.make_query(
  437. environments=[self.environments["production"]], search_filter_query="is:resolved"
  438. )
  439. assert set(results) == set()
  440. def test_tags(self):
  441. results = self.make_query(search_filter_query="environment:staging")
  442. assert set(results) == {self.group2}
  443. results = self.make_query(search_filter_query="environment:example.com")
  444. assert set(results) == set()
  445. results = self.make_query(search_filter_query="has:environment")
  446. assert set(results) == {self.group2, self.group1}
  447. results = self.make_query(search_filter_query="environment:staging server:example.com")
  448. assert set(results) == {self.group2}
  449. results = self.make_query(search_filter_query='url:"http://example.com"')
  450. assert set(results) == {self.group2}
  451. results = self.make_query(search_filter_query="environment:staging has:server")
  452. assert set(results) == {self.group2}
  453. results = self.make_query(search_filter_query="environment:staging server:bar.example.com")
  454. assert set(results) == set()
  455. def test_tags_with_environment(self):
  456. results = self.make_query(
  457. environments=[self.environments["production"]], search_filter_query="server:example.com"
  458. )
  459. assert set(results) == {self.group1}
  460. results = self.make_query(
  461. environments=[self.environments["staging"]], search_filter_query="server:example.com"
  462. )
  463. assert set(results) == {self.group2}
  464. results = self.make_query(
  465. environments=[self.environments["staging"]], search_filter_query="has:server"
  466. )
  467. assert set(results) == {self.group2}
  468. results = self.make_query(
  469. environments=[self.environments["production"]],
  470. search_filter_query='url:"http://example.com"',
  471. )
  472. assert set(results) == set()
  473. results = self.make_query(
  474. environments=[self.environments["staging"]],
  475. search_filter_query='url:"http://example.com"',
  476. )
  477. assert set(results) == {self.group2}
  478. results = self.make_query(
  479. environments=[self.environments["staging"]],
  480. search_filter_query="server:bar.example.com",
  481. )
  482. assert set(results) == set()
  483. def test_bookmarked_by(self):
  484. results = self.make_query(search_filter_query="bookmarks:%s" % self.user.username)
  485. assert set(results) == {self.group2}
  486. def test_bookmarked_by_in_syntax(self):
  487. self.run_test_query_in_syntax(
  488. f"bookmarks:[{self.user.username}]", [self.group2], [self.group1]
  489. )
  490. user_2 = self.create_user()
  491. GroupBookmark.objects.create(
  492. user_id=user_2.id, group=self.group1, project=self.group2.project
  493. )
  494. self.run_test_query_in_syntax(
  495. f"bookmarks:[{self.user.username}, {user_2.username}]", [self.group2, self.group1], []
  496. )
  497. def test_bookmarked_by_with_environment(self):
  498. results = self.make_query(
  499. environments=[self.environments["staging"]],
  500. search_filter_query="bookmarks:%s" % self.user.username,
  501. )
  502. assert set(results) == {self.group2}
  503. results = self.make_query(
  504. environments=[self.environments["production"]],
  505. search_filter_query="bookmarks:%s" % self.user.username,
  506. )
  507. assert set(results) == set()
  508. def test_search_filter_query_with_custom_priority_tag(self):
  509. priority = "high"
  510. self.store_event(
  511. data={
  512. "fingerprint": ["put-me-in-group2"],
  513. "timestamp": iso_format(self.group2.first_seen + timedelta(days=1)),
  514. "stacktrace": {"frames": [{"module": "group2"}]},
  515. "message": "group2",
  516. "tags": {"priority": priority},
  517. },
  518. project_id=self.project.id,
  519. )
  520. results = self.make_query(search_filter_query="priority:%s" % priority)
  521. assert set(results) == {self.group2}
  522. def test_search_filter_query_with_custom_priority_tag_and_priority_sort(self):
  523. priority = "high"
  524. for i in range(1, 3):
  525. self.store_event(
  526. data={
  527. "fingerprint": ["put-me-in-group1"],
  528. "timestamp": iso_format(self.group2.last_seen + timedelta(days=i)),
  529. "stacktrace": {"frames": [{"module": "group1"}]},
  530. "message": "group1",
  531. "tags": {"priority": priority},
  532. },
  533. project_id=self.project.id,
  534. )
  535. self.store_event(
  536. data={
  537. "fingerprint": ["put-me-in-group2"],
  538. "timestamp": iso_format(self.group2.last_seen + timedelta(days=2)),
  539. "stacktrace": {"frames": [{"module": "group2"}]},
  540. "message": "group2",
  541. "tags": {"priority": priority},
  542. },
  543. project_id=self.project.id,
  544. )
  545. results = self.make_query(search_filter_query="priority:%s" % priority, sort_by="priority")
  546. assert list(results) == [self.group1, self.group2]
  547. def test_search_tag_overlapping_with_internal_fields(self):
  548. # Using a tag of email overlaps with the promoted user.email column in events.
  549. # We don't want to bypass public schema limits in issue search.
  550. self.store_event(
  551. data={
  552. "fingerprint": ["put-me-in-group2"],
  553. "timestamp": iso_format(self.group2.first_seen + timedelta(days=1)),
  554. "stacktrace": {"frames": [{"module": "group2"}]},
  555. "message": "group2",
  556. "tags": {"email": "tags@example.com"},
  557. },
  558. project_id=self.project.id,
  559. )
  560. results = self.make_query(search_filter_query="email:tags@example.com")
  561. assert set(results) == {self.group2}
  562. def test_project(self):
  563. results = self.make_query([self.create_project(name="other")])
  564. assert set(results) == set()
  565. def test_pagination(self):
  566. for options_set in [
  567. {"snuba.search.min-pre-snuba-candidates": None},
  568. {"snuba.search.min-pre-snuba-candidates": 500},
  569. ]:
  570. with self.options(options_set):
  571. results = self.backend.query([self.project], limit=1, sort_by="date")
  572. assert set(results) == {self.group1}
  573. assert not results.prev.has_results
  574. assert results.next.has_results
  575. results = self.backend.query(
  576. [self.project], cursor=results.next, limit=1, sort_by="date"
  577. )
  578. assert set(results) == {self.group2}
  579. assert results.prev.has_results
  580. assert not results.next.has_results
  581. # note: previous cursor
  582. results = self.backend.query(
  583. [self.project], cursor=results.prev, limit=1, sort_by="date"
  584. )
  585. assert set(results) == {self.group1}
  586. assert results.prev.has_results
  587. assert results.next.has_results
  588. # note: previous cursor, paging too far into 0 results
  589. results = self.backend.query(
  590. [self.project], cursor=results.prev, limit=1, sort_by="date"
  591. )
  592. assert set(results) == set()
  593. assert not results.prev.has_results
  594. assert results.next.has_results
  595. results = self.backend.query(
  596. [self.project], cursor=results.next, limit=1, sort_by="date"
  597. )
  598. assert set(results) == {self.group1}
  599. assert results.prev.has_results
  600. assert results.next.has_results
  601. results = self.backend.query(
  602. [self.project], cursor=results.next, limit=1, sort_by="date"
  603. )
  604. assert set(results) == {self.group2}
  605. assert results.prev.has_results
  606. assert not results.next.has_results
  607. results = self.backend.query(
  608. [self.project], cursor=results.next, limit=1, sort_by="date"
  609. )
  610. assert set(results) == set()
  611. assert results.prev.has_results
  612. assert not results.next.has_results
  613. def test_pagination_with_environment(self):
  614. for dt in [
  615. self.group1.first_seen + timedelta(days=1),
  616. self.group1.first_seen + timedelta(days=2),
  617. self.group1.last_seen + timedelta(days=1),
  618. ]:
  619. self.store_event(
  620. data={
  621. "fingerprint": ["put-me-in-group2"],
  622. "timestamp": iso_format(dt),
  623. "environment": "production",
  624. "message": "group2",
  625. "stacktrace": {"frames": [{"module": "group2"}]},
  626. },
  627. project_id=self.project.id,
  628. )
  629. results = self.backend.query(
  630. [self.project],
  631. environments=[self.environments["production"]],
  632. sort_by="date",
  633. limit=1,
  634. count_hits=True,
  635. )
  636. assert list(results) == [self.group2]
  637. assert results.hits == 2
  638. results = self.backend.query(
  639. [self.project],
  640. environments=[self.environments["production"]],
  641. sort_by="date",
  642. limit=1,
  643. cursor=results.next,
  644. count_hits=True,
  645. )
  646. assert list(results) == [self.group1]
  647. assert results.hits == 2
  648. results = self.backend.query(
  649. [self.project],
  650. environments=[self.environments["production"]],
  651. sort_by="date",
  652. limit=1,
  653. cursor=results.next,
  654. count_hits=True,
  655. )
  656. assert list(results) == []
  657. assert results.hits == 2
  658. def test_age_filter(self):
  659. results = self.make_query(
  660. search_filter_query="firstSeen:>=%s" % date_to_query_format(self.group2.first_seen)
  661. )
  662. assert set(results) == {self.group2}
  663. results = self.make_query(
  664. search_filter_query="firstSeen:<=%s"
  665. % date_to_query_format(self.group1.first_seen + timedelta(minutes=1))
  666. )
  667. assert set(results) == {self.group1}
  668. results = self.make_query(
  669. search_filter_query="firstSeen:>=%s firstSeen:<=%s"
  670. % (
  671. date_to_query_format(self.group1.first_seen),
  672. date_to_query_format(self.group1.first_seen + timedelta(minutes=1)),
  673. )
  674. )
  675. assert set(results) == {self.group1}
  676. def test_age_filter_with_environment(self):
  677. # add time instead to make it greater than or less than as needed.
  678. group1_first_seen = GroupEnvironment.objects.get(
  679. environment=self.environments["production"], group=self.group1
  680. ).first_seen
  681. results = self.make_query(
  682. environments=[self.environments["production"]],
  683. search_filter_query="firstSeen:>=%s" % date_to_query_format(group1_first_seen),
  684. )
  685. assert set(results) == {self.group1}
  686. results = self.make_query(
  687. environments=[self.environments["production"]],
  688. search_filter_query="firstSeen:<=%s" % date_to_query_format(group1_first_seen),
  689. )
  690. assert set(results) == {self.group1}
  691. results = self.make_query(
  692. environments=[self.environments["production"]],
  693. search_filter_query="firstSeen:>%s" % date_to_query_format(group1_first_seen),
  694. )
  695. assert set(results) == set()
  696. self.store_event(
  697. data={
  698. "fingerprint": ["put-me-in-group1"],
  699. "timestamp": iso_format(group1_first_seen + timedelta(days=1)),
  700. "message": "group1",
  701. "stacktrace": {"frames": [{"module": "group1"}]},
  702. "environment": "development",
  703. },
  704. project_id=self.project.id,
  705. )
  706. results = self.make_query(
  707. environments=[self.environments["production"]],
  708. search_filter_query="firstSeen:>%s" % date_to_query_format(group1_first_seen),
  709. )
  710. assert set(results) == set()
  711. results = self.make_query(
  712. environments=[Environment.objects.get(name="development")],
  713. search_filter_query="firstSeen:>%s" % date_to_query_format(group1_first_seen),
  714. )
  715. assert set(results) == {self.group1}
  716. def test_times_seen_filter(self):
  717. results = self.make_query([self.project], search_filter_query="times_seen:2")
  718. assert set(results) == {self.group1}
  719. results = self.make_query([self.project], search_filter_query="times_seen:>=2")
  720. assert set(results) == {self.group1}
  721. results = self.make_query([self.project], search_filter_query="times_seen:<=1")
  722. assert set(results) == {self.group2}
  723. def test_last_seen_filter(self):
  724. results = self.make_query(
  725. search_filter_query="lastSeen:>=%s" % date_to_query_format(self.group1.last_seen)
  726. )
  727. assert set(results) == {self.group1}
  728. results = self.make_query(
  729. search_filter_query="lastSeen:>=%s lastSeen:<=%s"
  730. % (
  731. date_to_query_format(self.group1.last_seen),
  732. date_to_query_format(self.group1.last_seen + timedelta(minutes=1)),
  733. )
  734. )
  735. assert set(results) == {self.group1}
  736. def test_last_seen_filter_with_environment(self):
  737. results = self.make_query(
  738. environments=[self.environments["production"]],
  739. search_filter_query="lastSeen:>=%s" % date_to_query_format(self.group1.last_seen),
  740. )
  741. assert set(results) == {self.group1}
  742. results = self.make_query(
  743. environments=[self.environments["production"]],
  744. search_filter_query="lastSeen:<=%s" % date_to_query_format(self.group1.last_seen),
  745. )
  746. assert set(results) == {self.group1}
  747. results = self.make_query(
  748. environments=[self.environments["production"]],
  749. search_filter_query="lastSeen:>%s" % date_to_query_format(self.group1.last_seen),
  750. )
  751. assert set(results) == set()
  752. self.store_event(
  753. data={
  754. "fingerprint": ["put-me-in-group1"],
  755. "timestamp": iso_format(self.group1.last_seen + timedelta(days=1)),
  756. "message": "group1",
  757. "stacktrace": {"frames": [{"module": "group1"}]},
  758. "environment": "development",
  759. },
  760. project_id=self.project.id,
  761. )
  762. self.group1.update(last_seen=self.group1.last_seen + timedelta(days=1))
  763. results = self.make_query(
  764. environments=[self.environments["production"]],
  765. search_filter_query="lastSeen:>%s" % date_to_query_format(self.group1.last_seen),
  766. )
  767. assert set(results) == set()
  768. results = self.make_query(
  769. environments=[Environment.objects.get(name="development")],
  770. search_filter_query="lastSeen:>%s" % date_to_query_format(self.group1.last_seen),
  771. )
  772. assert set(results) == set()
  773. results = self.make_query(
  774. environments=[Environment.objects.get(name="development")],
  775. search_filter_query="lastSeen:>=%s" % date_to_query_format(self.group1.last_seen),
  776. )
  777. assert set(results) == {self.group1}
  778. def test_date_filter(self):
  779. results = self.make_query(
  780. date_from=self.event2.datetime,
  781. search_filter_query="timestamp:>=%s" % date_to_query_format(self.event2.datetime),
  782. )
  783. assert set(results) == {self.group1, self.group2}
  784. results = self.make_query(
  785. date_to=self.event1.datetime + timedelta(minutes=1),
  786. search_filter_query="timestamp:<=%s"
  787. % date_to_query_format(self.event1.datetime + timedelta(minutes=1)),
  788. )
  789. assert set(results) == {self.group1}
  790. results = self.make_query(
  791. date_from=self.event1.datetime,
  792. date_to=self.event2.datetime + timedelta(minutes=1),
  793. search_filter_query="timestamp:>=%s timestamp:<=%s"
  794. % (
  795. date_to_query_format(self.event1.datetime),
  796. date_to_query_format(self.event2.datetime + timedelta(minutes=1)),
  797. ),
  798. )
  799. assert set(results) == {self.group1, self.group2}
  800. # Test with `Z` utc marker, should be equivalent
  801. results = self.make_query(
  802. date_from=self.event1.datetime,
  803. date_to=self.event2.datetime + timedelta(minutes=1),
  804. search_filter_query="timestamp:>=%s timestamp:<=%s"
  805. % (
  806. date_to_query_format(self.event1.datetime) + "Z",
  807. date_to_query_format(self.event2.datetime + timedelta(minutes=1)) + "Z",
  808. ),
  809. )
  810. assert set(results) == {self.group1, self.group2}
  811. def test_date_filter_with_environment(self):
  812. results = self.backend.query(
  813. [self.project],
  814. environments=[self.environments["production"]],
  815. date_from=self.event2.datetime,
  816. )
  817. assert set(results) == {self.group1}
  818. results = self.backend.query(
  819. [self.project],
  820. environments=[self.environments["production"]],
  821. date_to=self.event1.datetime + timedelta(minutes=1),
  822. )
  823. assert set(results) == {self.group1}
  824. results = self.backend.query(
  825. [self.project],
  826. environments=[self.environments["staging"]],
  827. date_from=self.event1.datetime,
  828. date_to=self.event2.datetime + timedelta(minutes=1),
  829. )
  830. assert set(results) == {self.group2}
  831. def test_linked(self):
  832. linked_group1 = self.create_group_with_integration_external_issue()
  833. linked_group2 = self.create_group_with_platform_external_issue()
  834. results = self.make_query(search_filter_query="is:unlinked")
  835. assert set(results) == {self.group1, self.group2}
  836. results = self.make_query(search_filter_query="is:linked")
  837. assert set(results) == {linked_group1, linked_group2}
  838. def test_linked_with_only_integration_external_issue(self):
  839. linked_group = self.create_group_with_integration_external_issue()
  840. results = self.make_query(search_filter_query="is:unlinked")
  841. assert set(results) == {self.group1, self.group2}
  842. results = self.make_query(search_filter_query="is:linked")
  843. assert set(results) == {linked_group}
  844. def test_linked_with_only_platform_external_issue(self):
  845. linked_group = self.create_group_with_platform_external_issue()
  846. results = self.make_query(search_filter_query="is:unlinked")
  847. assert set(results) == {self.group1, self.group2}
  848. results = self.make_query(search_filter_query="is:linked")
  849. assert set(results) == {linked_group}
  850. def test_linked_with_environment(self):
  851. linked_group1 = self.create_group_with_integration_external_issue(environment="production")
  852. linked_group2 = self.create_group_with_platform_external_issue(environment="staging")
  853. results = self.make_query(
  854. environments=[self.environments["production"]], search_filter_query="is:unlinked"
  855. )
  856. assert set(results) == {self.group1}
  857. results = self.make_query(
  858. environments=[self.environments["staging"]], search_filter_query="is:unlinked"
  859. )
  860. assert set(results) == {self.group2}
  861. results = self.make_query(
  862. environments=[self.environments["production"]], search_filter_query="is:linked"
  863. )
  864. assert set(results) == {linked_group1}
  865. results = self.make_query(
  866. environments=[self.environments["staging"]], search_filter_query="is:linked"
  867. )
  868. assert set(results) == {linked_group2}
  869. def test_unassigned(self):
  870. results = self.make_query(search_filter_query="is:unassigned")
  871. assert set(results) == {self.group1}
  872. results = self.make_query(search_filter_query="is:assigned")
  873. assert set(results) == {self.group2}
  874. def test_unassigned_with_environment(self):
  875. results = self.make_query(
  876. environments=[self.environments["production"]], search_filter_query="is:unassigned"
  877. )
  878. assert set(results) == {self.group1}
  879. results = self.make_query(
  880. environments=[self.environments["staging"]], search_filter_query="is:assigned"
  881. )
  882. assert set(results) == {self.group2}
  883. results = self.make_query(
  884. environments=[self.environments["production"]], search_filter_query="is:assigned"
  885. )
  886. assert set(results) == set()
  887. def test_assigned_to(self):
  888. results = self.make_query(search_filter_query="assigned:%s" % self.user.username)
  889. assert set(results) == {self.group2}
  890. # test team assignee
  891. ga = GroupAssignee.objects.get(
  892. user_id=self.user.id, group=self.group2, project=self.group2.project
  893. )
  894. ga.update(team=self.team, user_id=None)
  895. assert GroupAssignee.objects.get(id=ga.id).user_id is None
  896. results = self.make_query(search_filter_query="assigned:%s" % self.user.username)
  897. assert set(results) == {self.group2}
  898. # test when there should be no results
  899. other_user = self.create_user()
  900. results = self.make_query(search_filter_query="assigned:%s" % other_user.username)
  901. assert set(results) == set()
  902. owner = self.create_user()
  903. self.create_member(
  904. organization=self.project.organization, user=owner, role="owner", teams=[]
  905. )
  906. # test that owners don't see results for all teams
  907. results = self.make_query(search_filter_query="assigned:%s" % owner.username)
  908. assert set(results) == set()
  909. def test_assigned_to_in_syntax(self):
  910. group_3 = self.store_event(
  911. data={
  912. "fingerprint": ["put-me-in-group3"],
  913. "event_id": "c" * 32,
  914. "timestamp": iso_format(self.base_datetime - timedelta(days=20)),
  915. },
  916. project_id=self.project.id,
  917. ).group
  918. group_3.status = GroupStatus.MUTED
  919. group_3.substatus = None
  920. group_3.save()
  921. other_user = self.create_user()
  922. self.run_test_query_in_syntax(
  923. f"assigned:[{self.user.username}, {other_user.username}]",
  924. [self.group2],
  925. [self.group1, group_3],
  926. )
  927. GroupAssignee.objects.create(project=self.project, group=group_3, user_id=other_user.id)
  928. self.run_test_query_in_syntax(
  929. f"assigned:[{self.user.username}, {other_user.username}]",
  930. [self.group2, group_3],
  931. [self.group1],
  932. )
  933. self.run_test_query_in_syntax(
  934. f"assigned:[#{self.team.slug}, {other_user.username}]",
  935. [group_3],
  936. [self.group1, self.group2],
  937. )
  938. ga_2 = GroupAssignee.objects.get(
  939. user_id=self.user.id, group=self.group2, project=self.group2.project
  940. )
  941. ga_2.update(team=self.team, user_id=None)
  942. self.run_test_query_in_syntax(
  943. f"assigned:[{self.user.username}, {other_user.username}]",
  944. [self.group2, group_3],
  945. [self.group1],
  946. )
  947. self.run_test_query_in_syntax(
  948. f"assigned:[#{self.team.slug}, {other_user.username}]",
  949. [self.group2, group_3],
  950. [self.group1],
  951. )
  952. self.run_test_query_in_syntax(
  953. f"assigned:[me, none, {other_user.username}]",
  954. [self.group1, self.group2, group_3],
  955. [],
  956. )
  957. def test_assigned_or_suggested_in_syntax(self):
  958. Group.objects.all().delete()
  959. group = self.store_event(
  960. data={
  961. "timestamp": iso_format(before_now(seconds=180)),
  962. "fingerprint": ["group-1"],
  963. },
  964. project_id=self.project.id,
  965. ).group
  966. group1 = self.store_event(
  967. data={
  968. "timestamp": iso_format(before_now(seconds=185)),
  969. "fingerprint": ["group-2"],
  970. },
  971. project_id=self.project.id,
  972. ).group
  973. group2 = self.store_event(
  974. data={
  975. "timestamp": iso_format(before_now(seconds=190)),
  976. "fingerprint": ["group-3"],
  977. },
  978. project_id=self.project.id,
  979. ).group
  980. assigned_group = self.store_event(
  981. data={
  982. "timestamp": iso_format(before_now(seconds=195)),
  983. "fingerprint": ["group-4"],
  984. },
  985. project_id=self.project.id,
  986. ).group
  987. assigned_to_other_group = self.store_event(
  988. data={
  989. "timestamp": iso_format(before_now(seconds=195)),
  990. "fingerprint": ["group-5"],
  991. },
  992. project_id=self.project.id,
  993. ).group
  994. self.run_test_query_in_syntax(
  995. "assigned_or_suggested:[me]",
  996. [],
  997. [group, group1, group2, assigned_group, assigned_to_other_group],
  998. )
  999. GroupOwner.objects.create(
  1000. group=assigned_to_other_group,
  1001. project=self.project,
  1002. organization=self.organization,
  1003. type=0,
  1004. team_id=None,
  1005. user_id=self.user.id,
  1006. )
  1007. GroupOwner.objects.create(
  1008. group=group,
  1009. project=self.project,
  1010. organization=self.organization,
  1011. type=0,
  1012. team_id=None,
  1013. user_id=self.user.id,
  1014. )
  1015. self.run_test_query_in_syntax(
  1016. "assigned_or_suggested:[me]",
  1017. [group, assigned_to_other_group],
  1018. [group1, group2, assigned_group],
  1019. )
  1020. # Because assigned_to_other_event is assigned to self.other_user, it should not show up in assigned_or_suggested search for anyone but self.other_user. (aka. they are now the only owner)
  1021. other_user = self.create_user("other@user.com", is_superuser=False)
  1022. GroupAssignee.objects.create(
  1023. group=assigned_to_other_group,
  1024. project=self.project,
  1025. user_id=other_user.id,
  1026. )
  1027. self.run_test_query_in_syntax(
  1028. "assigned_or_suggested:[me]",
  1029. [group],
  1030. [group1, group2, assigned_group, assigned_to_other_group],
  1031. )
  1032. self.run_test_query_in_syntax(
  1033. f"assigned_or_suggested:[{other_user.email}]",
  1034. [assigned_to_other_group],
  1035. [group, group1, group2, assigned_group],
  1036. )
  1037. GroupAssignee.objects.create(
  1038. group=assigned_group, project=self.project, user_id=self.user.id
  1039. )
  1040. self.run_test_query_in_syntax(
  1041. f"assigned_or_suggested:[{self.user.email}]",
  1042. [assigned_group, group],
  1043. )
  1044. GroupOwner.objects.create(
  1045. group=group,
  1046. project=self.project,
  1047. organization=self.organization,
  1048. type=0,
  1049. team_id=self.team.id,
  1050. user_id=None,
  1051. )
  1052. self.run_test_query_in_syntax(
  1053. f"assigned_or_suggested:[#{self.team.slug}]",
  1054. [group],
  1055. )
  1056. self.run_test_query_in_syntax(
  1057. "assigned_or_suggested:[me, none]",
  1058. [group, group1, group2, assigned_group],
  1059. [assigned_to_other_group],
  1060. )
  1061. not_me = self.create_user(email="notme@sentry.io")
  1062. GroupOwner.objects.create(
  1063. group=group2,
  1064. project=self.project,
  1065. organization=self.organization,
  1066. type=0,
  1067. team_id=None,
  1068. user_id=not_me.id,
  1069. )
  1070. self.run_test_query_in_syntax(
  1071. "assigned_or_suggested:[me, none]",
  1072. [group, group1, assigned_group],
  1073. [assigned_to_other_group, group2],
  1074. )
  1075. GroupOwner.objects.filter(group=group, user_id=self.user.id).delete()
  1076. self.run_test_query_in_syntax(
  1077. f"assigned_or_suggested:[me, none, #{self.team.slug}]",
  1078. [group, group1, assigned_group],
  1079. [assigned_to_other_group, group2],
  1080. )
  1081. self.run_test_query_in_syntax(
  1082. f"assigned_or_suggested:[me, none, #{self.team.slug}, {not_me.email}]",
  1083. [group, group1, assigned_group, group2],
  1084. [assigned_to_other_group],
  1085. )
  1086. def test_assigned_to_with_environment(self):
  1087. results = self.make_query(
  1088. environments=[self.environments["staging"]],
  1089. search_filter_query="assigned:%s" % self.user.username,
  1090. )
  1091. assert set(results) == {self.group2}
  1092. results = self.make_query(
  1093. environments=[self.environments["production"]],
  1094. search_filter_query="assigned:%s" % self.user.username,
  1095. )
  1096. assert set(results) == set()
  1097. def test_subscribed_by(self):
  1098. results = self.make_query(
  1099. [self.group1.project], search_filter_query="subscribed:%s" % self.user.username
  1100. )
  1101. assert set(results) == {self.group1}
  1102. def test_subscribed_by_in_syntax(self):
  1103. self.run_test_query_in_syntax(
  1104. f"subscribed:[{self.user.username}]", [self.group1], [self.group2]
  1105. )
  1106. user_2 = self.create_user()
  1107. GroupSubscription.objects.create(
  1108. user_id=user_2.id, group=self.group2, project=self.project, is_active=True
  1109. )
  1110. self.run_test_query_in_syntax(
  1111. f"subscribed:[{self.user.username}, {user_2.username}]", [self.group1, self.group2], []
  1112. )
  1113. def test_subscribed_by_with_environment(self):
  1114. results = self.make_query(
  1115. [self.group1.project],
  1116. environments=[self.environments["production"]],
  1117. search_filter_query="subscribed:%s" % self.user.username,
  1118. )
  1119. assert set(results) == {self.group1}
  1120. results = self.make_query(
  1121. [self.group1.project],
  1122. environments=[self.environments["staging"]],
  1123. search_filter_query="subscribed:%s" % self.user.username,
  1124. )
  1125. assert set(results) == set()
  1126. @mock.patch("sentry.search.snuba.executors.bulk_raw_query")
  1127. def test_snuba_not_called_optimization(self, query_mock):
  1128. assert self.make_query(search_filter_query="status:unresolved").results == [self.group1]
  1129. assert not query_mock.called
  1130. assert (
  1131. self.make_query(
  1132. search_filter_query="last_seen:>%s" % date_to_query_format(timezone.now()),
  1133. sort_by="date",
  1134. ).results
  1135. == []
  1136. )
  1137. assert query_mock.called
  1138. @mock.patch("sentry.search.snuba.executors.bulk_raw_query")
  1139. def test_reduce_bulk_results_none_total(self, bulk_raw_query_mock):
  1140. bulk_raw_query_mock.return_value = [
  1141. {"data": [], "totals": {"total": None}},
  1142. {"data": [], "totals": {"total": None}},
  1143. ]
  1144. assert (
  1145. self.make_query(
  1146. search_filter_query="last_seen:>%s" % date_to_query_format(timezone.now()),
  1147. sort_by="date",
  1148. ).results
  1149. == []
  1150. )
  1151. assert bulk_raw_query_mock.called
  1152. @mock.patch("sentry.search.snuba.executors.bulk_raw_query")
  1153. def test_reduce_bulk_results_none_data(self, bulk_raw_query_mock):
  1154. bulk_raw_query_mock.return_value = [
  1155. {"data": None, "totals": {"total": 0}},
  1156. {"data": None, "totals": {"total": 0}},
  1157. ]
  1158. assert (
  1159. self.make_query(
  1160. search_filter_query="last_seen:>%s" % date_to_query_format(timezone.now()),
  1161. sort_by="date",
  1162. ).results
  1163. == []
  1164. )
  1165. assert bulk_raw_query_mock.called
  1166. def test_pre_and_post_filtering(self):
  1167. prev_max_pre = options.get("snuba.search.max-pre-snuba-candidates")
  1168. options.set("snuba.search.max-pre-snuba-candidates", 1)
  1169. try:
  1170. # normal queries work as expected
  1171. results = self.make_query(search_filter_query="foo")
  1172. assert set(results) == {self.group1}
  1173. results = self.make_query(search_filter_query="bar")
  1174. assert set(results) == {self.group2}
  1175. # no candidate matches in Sentry, immediately return empty paginator
  1176. results = self.make_query(search_filter_query="NO MATCHES IN SENTRY")
  1177. assert set(results) == set()
  1178. # too many candidates, skip pre-filter, requires >1 postfilter queries
  1179. results = self.make_query()
  1180. assert set(results) == {self.group1, self.group2}
  1181. finally:
  1182. options.set("snuba.search.max-pre-snuba-candidates", prev_max_pre)
  1183. def test_optimizer_enabled(self):
  1184. prev_optimizer_enabled = options.get("snuba.search.pre-snuba-candidates-optimizer")
  1185. options.set("snuba.search.pre-snuba-candidates-optimizer", True)
  1186. try:
  1187. results = self.make_query(
  1188. search_filter_query="server:example.com",
  1189. environments=[self.environments["production"]],
  1190. )
  1191. assert set(results) == {self.group1}
  1192. finally:
  1193. options.set("snuba.search.pre-snuba-candidates-optimizer", prev_optimizer_enabled)
  1194. def test_search_out_of_range(self):
  1195. the_date = datetime(2000, 1, 1, 0, 0, 0, tzinfo=pytz.utc)
  1196. results = self.make_query(
  1197. search_filter_query=f"event.timestamp:>{the_date} event.timestamp:<{the_date}",
  1198. date_from=the_date,
  1199. date_to=the_date,
  1200. )
  1201. assert set(results) == set()
  1202. def test_regressed_in_release(self):
  1203. # expect no groups within the results since there are no releases
  1204. results = self.make_query(search_filter_query="regressed_in_release:fake")
  1205. assert set(results) == set()
  1206. # expect no groups even though there is a release; since no group regressed in this release
  1207. release_1 = self.create_release()
  1208. results = self.make_query(search_filter_query="regressed_in_release:%s" % release_1.version)
  1209. assert set(results) == set()
  1210. # Create a new event so that we get a group in this release
  1211. group = self.store_event(
  1212. data={
  1213. "release": release_1.version,
  1214. },
  1215. project_id=self.project.id,
  1216. ).group
  1217. # # Should still be no group since we didn't regress in this release
  1218. results = self.make_query(search_filter_query="regressed_in_release:%s" % release_1.version)
  1219. assert set(results) == set()
  1220. record_group_history(group, GroupHistoryStatus.REGRESSED, release=release_1)
  1221. results = self.make_query(search_filter_query="regressed_in_release:%s" % release_1.version)
  1222. assert set(results) == {group}
  1223. # Make sure this works correctly with multiple releases
  1224. release_2 = self.create_release()
  1225. group_2 = self.store_event(
  1226. data={
  1227. "fingerprint": ["put-me-in-group9001"],
  1228. "event_id": "a" * 32,
  1229. "release": release_2.version,
  1230. },
  1231. project_id=self.project.id,
  1232. ).group
  1233. record_group_history(group_2, GroupHistoryStatus.REGRESSED, release=release_2)
  1234. results = self.make_query(search_filter_query="regressed_in_release:%s" % release_1.version)
  1235. assert set(results) == {group}
  1236. results = self.make_query(search_filter_query="regressed_in_release:%s" % release_2.version)
  1237. assert set(results) == {group_2}
  1238. def test_first_release(self):
  1239. # expect no groups within the results since there are no releases
  1240. results = self.make_query(search_filter_query="first_release:%s" % "fake")
  1241. assert set(results) == set()
  1242. # expect no groups even though there is a release; since no group
  1243. # is attached to a release
  1244. release_1 = self.create_release(self.project)
  1245. results = self.make_query(search_filter_query="first_release:%s" % release_1.version)
  1246. assert set(results) == set()
  1247. # Create a new event so that we get a group in this release
  1248. group = self.store_event(
  1249. data={
  1250. "fingerprint": ["put-me-in-group9001"],
  1251. "event_id": "a" * 32,
  1252. "message": "hello",
  1253. "environment": "production",
  1254. "tags": {"server": "example.com"},
  1255. "release": release_1.version,
  1256. "stacktrace": {"frames": [{"module": "group1"}]},
  1257. },
  1258. project_id=self.project.id,
  1259. ).group
  1260. results = self.make_query(search_filter_query="first_release:%s" % release_1.version)
  1261. assert set(results) == {group}
  1262. def test_first_release_in_syntax(self):
  1263. # expect no groups within the results since there are no releases
  1264. self.run_test_query_in_syntax("first_release:[fake, fake2]", [])
  1265. # expect no groups even though there is a release; since no group
  1266. # is attached to a release
  1267. release_1 = self.create_release(self.project)
  1268. release_2 = self.create_release(self.project)
  1269. self.run_test_query_in_syntax(
  1270. f"first_release:[{release_1.version}, {release_2.version}]", []
  1271. )
  1272. # Create a new event so that we get a group in this release
  1273. group = self.store_event(
  1274. data={
  1275. "fingerprint": ["put-me-in-group9001"],
  1276. "event_id": "a" * 32,
  1277. "message": "hello",
  1278. "environment": "production",
  1279. "tags": {"server": "example.com"},
  1280. "release": release_1.version,
  1281. "stacktrace": {"frames": [{"module": "group1"}]},
  1282. },
  1283. project_id=self.project.id,
  1284. ).group
  1285. self.run_test_query_in_syntax(
  1286. f"first_release:[{release_1.version}, {release_2.version}]",
  1287. [group],
  1288. [self.group1, self.group2],
  1289. )
  1290. # Create a new event so that we get a group in this release
  1291. group_2 = self.store_event(
  1292. data={
  1293. "fingerprint": ["put-me-in-group9002"],
  1294. "event_id": "a" * 32,
  1295. "message": "hello",
  1296. "environment": "production",
  1297. "tags": {"server": "example.com"},
  1298. "release": release_2.version,
  1299. "stacktrace": {"frames": [{"module": "group1"}]},
  1300. },
  1301. project_id=self.project.id,
  1302. ).group
  1303. self.run_test_query_in_syntax(
  1304. f"first_release:[{release_1.version}, {release_2.version}]",
  1305. [group, group_2],
  1306. )
  1307. def test_first_release_environments(self):
  1308. results = self.make_query(
  1309. environments=[self.environments["production"]],
  1310. search_filter_query="first_release:fake",
  1311. )
  1312. assert set(results) == set()
  1313. release = self.create_release(self.project)
  1314. group_env = GroupEnvironment.get_or_create(
  1315. group_id=self.group1.id, environment_id=self.environments["production"].id
  1316. )[0]
  1317. results = self.make_query(
  1318. environments=[self.environments["production"]],
  1319. search_filter_query=f"first_release:{release.version}",
  1320. )
  1321. assert set(results) == set()
  1322. group_env.first_release = release
  1323. group_env.save()
  1324. results = self.make_query(
  1325. environments=[self.environments["production"]],
  1326. search_filter_query=f"first_release:{release.version}",
  1327. )
  1328. assert set(results) == {self.group1}
  1329. def test_first_release_environments_in_syntax(self):
  1330. self.run_test_query_in_syntax(
  1331. "first_release:[fake, fake2]",
  1332. [],
  1333. [self.group1, self.group2],
  1334. environments=[self.environments["production"]],
  1335. )
  1336. release = self.create_release(self.project)
  1337. group_1_env = GroupEnvironment.objects.get(
  1338. group_id=self.group1.id, environment_id=self.environments["production"].id
  1339. )
  1340. group_1_env.update(first_release=release)
  1341. self.run_test_query_in_syntax(
  1342. f"first_release:[{release.version}, fake2]",
  1343. [self.group1],
  1344. [self.group2],
  1345. environments=[self.environments["production"]],
  1346. )
  1347. group_2_env = GroupEnvironment.objects.get(
  1348. group_id=self.group2.id, environment_id=self.environments["staging"].id
  1349. )
  1350. group_2_env.update(first_release=release)
  1351. self.run_test_query_in_syntax(
  1352. f"first_release:[{release.version}, fake2]",
  1353. [self.group1, self.group2],
  1354. [],
  1355. environments=[self.environments["production"], self.environments["staging"]],
  1356. )
  1357. # Make sure we don't get duplicate groups
  1358. GroupEnvironment.objects.create(
  1359. group_id=self.group1.id,
  1360. environment_id=self.environments["staging"].id,
  1361. first_release=release,
  1362. )
  1363. self.run_test_query_in_syntax(
  1364. f"first_release:[{release.version}, fake2]",
  1365. [self.group1, self.group2],
  1366. [],
  1367. environments=[self.environments["production"], self.environments["staging"]],
  1368. )
  1369. def test_query_enclosed_in_quotes(self):
  1370. results = self.make_query(search_filter_query='"foo"')
  1371. assert set(results) == {self.group1}
  1372. results = self.make_query(search_filter_query='"bar"')
  1373. assert set(results) == {self.group2}
  1374. @xfail_if_not_postgres("Wildcard searching only supported in Postgres")
  1375. def test_wildcard(self):
  1376. escaped_event = self.store_event(
  1377. data={
  1378. "fingerprint": ["hello-there"],
  1379. "event_id": "f" * 32,
  1380. "message": "somet[hing]",
  1381. "environment": "production",
  1382. "tags": {"server": "example.net"},
  1383. "timestamp": iso_format(self.base_datetime),
  1384. "stacktrace": {"frames": [{"module": "group1"}]},
  1385. },
  1386. project_id=self.project.id,
  1387. )
  1388. # Note: Adding in `environment:production` so that we make sure we query
  1389. # in both snuba and postgres
  1390. results = self.make_query(search_filter_query="environment:production so*t")
  1391. assert set(results) == {escaped_event.group}
  1392. # Make sure it's case insensitive
  1393. results = self.make_query(search_filter_query="environment:production SO*t")
  1394. assert set(results) == {escaped_event.group}
  1395. results = self.make_query(search_filter_query="environment:production so*zz")
  1396. assert set(results) == set()
  1397. results = self.make_query(search_filter_query="environment:production [hing]")
  1398. assert set(results) == {escaped_event.group}
  1399. results = self.make_query(search_filter_query="environment:production s*]")
  1400. assert set(results) == {escaped_event.group}
  1401. results = self.make_query(search_filter_query="environment:production server:example.*")
  1402. assert set(results) == {self.group1, escaped_event.group}
  1403. results = self.make_query(search_filter_query="environment:production !server:*net")
  1404. assert set(results) == {self.group1}
  1405. # TODO: Disabling tests that use [] syntax for the moment. Re-enable
  1406. # these if we decide to add back in, or remove if this comment has been
  1407. # here a while.
  1408. # results = self.make_query(
  1409. # search_filter_query='environment:production [s][of][mz]',
  1410. # )
  1411. # assert set(results) == set([escaped_event.group])
  1412. # results = self.make_query(
  1413. # search_filter_query='environment:production [z][of][mz]',
  1414. # )
  1415. # assert set(results) == set()
  1416. def test_null_tags(self):
  1417. tag_event = self.store_event(
  1418. data={
  1419. "fingerprint": ["hello-there"],
  1420. "event_id": "f" * 32,
  1421. "message": "something",
  1422. "environment": "production",
  1423. "tags": {"server": "example.net"},
  1424. "timestamp": iso_format(self.base_datetime),
  1425. "stacktrace": {"frames": [{"module": "group1"}]},
  1426. },
  1427. project_id=self.project.id,
  1428. )
  1429. no_tag_event = self.store_event(
  1430. data={
  1431. "fingerprint": ["hello-there-2"],
  1432. "event_id": "5" * 32,
  1433. "message": "something",
  1434. "environment": "production",
  1435. "timestamp": iso_format(self.base_datetime),
  1436. "stacktrace": {"frames": [{"module": "group2"}]},
  1437. },
  1438. project_id=self.project.id,
  1439. )
  1440. results = self.make_query(search_filter_query="environment:production !server:*net")
  1441. assert set(results) == {self.group1, no_tag_event.group}
  1442. results = self.make_query(search_filter_query="environment:production server:*net")
  1443. assert set(results) == {tag_event.group}
  1444. results = self.make_query(search_filter_query="environment:production !server:example.net")
  1445. assert set(results) == {self.group1, no_tag_event.group}
  1446. results = self.make_query(search_filter_query="environment:production server:example.net")
  1447. assert set(results) == {tag_event.group}
  1448. results = self.make_query(search_filter_query="environment:production has:server")
  1449. assert set(results) == {self.group1, tag_event.group}
  1450. results = self.make_query(search_filter_query="environment:production !has:server")
  1451. assert set(results) == {no_tag_event.group}
  1452. def test_null_promoted_tags(self):
  1453. tag_event = self.store_event(
  1454. data={
  1455. "fingerprint": ["hello-there"],
  1456. "event_id": "f" * 32,
  1457. "message": "something",
  1458. "environment": "production",
  1459. "tags": {"logger": "csp"},
  1460. "timestamp": iso_format(self.base_datetime),
  1461. "stacktrace": {"frames": [{"module": "group1"}]},
  1462. },
  1463. project_id=self.project.id,
  1464. )
  1465. no_tag_event = self.store_event(
  1466. data={
  1467. "fingerprint": ["hello-there-2"],
  1468. "event_id": "5" * 32,
  1469. "message": "something",
  1470. "environment": "production",
  1471. "timestamp": iso_format(self.base_datetime),
  1472. "stacktrace": {"frames": [{"module": "group2"}]},
  1473. },
  1474. project_id=self.project.id,
  1475. )
  1476. results = self.make_query(search_filter_query="environment:production !logger:*sp")
  1477. assert set(results) == {self.group1, no_tag_event.group}
  1478. results = self.make_query(search_filter_query="environment:production logger:*sp")
  1479. assert set(results) == {tag_event.group}
  1480. results = self.make_query(search_filter_query="environment:production !logger:csp")
  1481. assert set(results) == {self.group1, no_tag_event.group}
  1482. results = self.make_query(search_filter_query="environment:production logger:csp")
  1483. assert set(results) == {tag_event.group}
  1484. results = self.make_query(search_filter_query="environment:production has:logger")
  1485. assert set(results) == {tag_event.group}
  1486. results = self.make_query(search_filter_query="environment:production !has:logger")
  1487. assert set(results) == {self.group1, no_tag_event.group}
  1488. def test_sort_multi_project(self):
  1489. self.set_up_multi_project()
  1490. results = self.make_query([self.project, self.project2], sort_by="date")
  1491. assert list(results) == [self.group1, self.group_p2, self.group2]
  1492. results = self.make_query([self.project, self.project2], sort_by="new")
  1493. assert list(results) == [self.group2, self.group_p2, self.group1]
  1494. results = self.make_query([self.project, self.project2], sort_by="freq")
  1495. assert list(results) == [self.group1, self.group_p2, self.group2]
  1496. results = self.make_query([self.project, self.project2], sort_by="priority")
  1497. assert list(results) == [self.group1, self.group2, self.group_p2]
  1498. results = self.make_query([self.project, self.project2], sort_by="user")
  1499. assert list(results) == [self.group1, self.group2, self.group_p2]
  1500. def test_sort_trend(self):
  1501. start = self.group1.first_seen - timedelta(days=1)
  1502. end = before_now(days=1).replace(tzinfo=pytz.utc)
  1503. middle = start + ((end - start) / 2)
  1504. self.store_event(
  1505. data={
  1506. "fingerprint": ["put-me-in-group1"],
  1507. "event_id": "2" * 32,
  1508. "message": "something",
  1509. "timestamp": iso_format(self.base_datetime),
  1510. },
  1511. project_id=self.project.id,
  1512. )
  1513. self.store_event(
  1514. data={
  1515. "fingerprint": ["put-me-in-group1"],
  1516. "event_id": "3" * 32,
  1517. "message": "something",
  1518. "timestamp": iso_format(self.base_datetime),
  1519. },
  1520. project_id=self.project.id,
  1521. )
  1522. fewer_events_group = self.store_event(
  1523. data={
  1524. "fingerprint": ["put-me-in-group4"],
  1525. "event_id": "4" * 32,
  1526. "message": "something",
  1527. "timestamp": iso_format(middle - timedelta(days=1)),
  1528. },
  1529. project_id=self.project.id,
  1530. ).group
  1531. self.store_event(
  1532. data={
  1533. "fingerprint": ["put-me-in-group4"],
  1534. "event_id": "5" * 32,
  1535. "message": "something",
  1536. "timestamp": iso_format(middle - timedelta(days=1)),
  1537. },
  1538. project_id=self.project.id,
  1539. )
  1540. self.store_event(
  1541. data={
  1542. "fingerprint": ["put-me-in-group4"],
  1543. "event_id": "6" * 32,
  1544. "message": "something",
  1545. "timestamp": iso_format(self.base_datetime),
  1546. },
  1547. project_id=self.project.id,
  1548. )
  1549. no_before_group = self.store_event(
  1550. data={
  1551. "fingerprint": ["put-me-in-group5"],
  1552. "event_id": "3" * 32,
  1553. "message": "something",
  1554. "timestamp": iso_format(self.base_datetime),
  1555. },
  1556. project_id=self.project.id,
  1557. ).group
  1558. no_after_group = self.store_event(
  1559. data={
  1560. "fingerprint": ["put-me-in-group6"],
  1561. "event_id": "4" * 32,
  1562. "message": "something",
  1563. "timestamp": iso_format(middle - timedelta(days=1)),
  1564. },
  1565. project_id=self.project.id,
  1566. ).group
  1567. self.set_up_multi_project()
  1568. results = self.make_query([self.project], sort_by="trend", date_from=start, date_to=end)
  1569. assert results[:2] == [self.group1, fewer_events_group]
  1570. # These will be arbitrarily ordered since their trend values are all 0
  1571. assert set(results[2:]) == {self.group2, no_before_group, no_after_group}
  1572. def test_in_syntax_is_invalid(self):
  1573. with pytest.raises(InvalidSearchQuery, match='"in" syntax invalid for "is" search'):
  1574. self.make_query(search_filter_query="is:[unresolved, resolved]")
  1575. def test_first_release_any_or_no_environments(self):
  1576. # test scenarios for tickets:
  1577. # SEN-571
  1578. # ISSUE-432
  1579. # given the following setup:
  1580. #
  1581. # groups table:
  1582. # group first_release
  1583. # A 1
  1584. # B 1
  1585. # C 2
  1586. #
  1587. # groupenvironments table:
  1588. # group environment first_release
  1589. # A staging 1
  1590. # A production 2
  1591. #
  1592. # when querying by first release, the appropriate set of groups should be displayed:
  1593. #
  1594. # first_release: 1
  1595. # env=[]: A, B
  1596. # env=[production, staging]: A
  1597. # env=[staging]: A
  1598. # env=[production]: nothing
  1599. #
  1600. # first_release: 2
  1601. # env=[]: A, C
  1602. # env=[production, staging]: A
  1603. # env=[staging]: nothing
  1604. # env=[production]: A
  1605. # create an issue/group whose events that occur in 2 distinct environments
  1606. group_a_event_1 = self.store_event(
  1607. data={
  1608. "fingerprint": ["group_a"],
  1609. "event_id": "aaa" + ("1" * 29),
  1610. "environment": "example_staging",
  1611. "release": "release_1",
  1612. },
  1613. project_id=self.project.id,
  1614. )
  1615. group_a_event_2 = self.store_event(
  1616. data={
  1617. "fingerprint": ["group_a"],
  1618. "event_id": "aaa" + ("2" * 29),
  1619. "environment": "example_production",
  1620. "release": "release_2",
  1621. },
  1622. project_id=self.project.id,
  1623. )
  1624. group_a = group_a_event_1.group
  1625. # get the environments for group_a
  1626. prod_env = group_a_event_2.get_environment()
  1627. staging_env = group_a_event_1.get_environment()
  1628. # create an issue/group whose event that occur in no environments
  1629. # but will be tied to release release_1
  1630. group_b_event_1 = self.store_event(
  1631. data={
  1632. "fingerprint": ["group_b"],
  1633. "event_id": "bbb" + ("1" * 29),
  1634. "release": "release_1",
  1635. },
  1636. project_id=self.project.id,
  1637. )
  1638. assert group_b_event_1.get_environment().name == "" # has no environment
  1639. group_b = group_b_event_1.group
  1640. # create an issue/group whose event that occur in no environments
  1641. # but will be tied to release release_2
  1642. group_c_event_1 = self.store_event(
  1643. data={
  1644. "fingerprint": ["group_c"],
  1645. "event_id": "ccc" + ("1" * 29),
  1646. "release": "release_2",
  1647. },
  1648. project_id=self.project.id,
  1649. )
  1650. assert group_c_event_1.get_environment().name == "" # has no environment
  1651. group_c = group_c_event_1.group
  1652. # query by release release_1
  1653. results = self.make_query(search_filter_query="first_release:%s" % "release_1")
  1654. assert set(results) == {group_a, group_b}
  1655. results = self.make_query(
  1656. environments=[staging_env, prod_env],
  1657. search_filter_query="first_release:%s" % "release_1",
  1658. )
  1659. assert set(results) == {group_a}
  1660. results = self.make_query(
  1661. environments=[staging_env], search_filter_query="first_release:%s" % "release_1"
  1662. )
  1663. assert set(results) == {group_a}
  1664. results = self.make_query(
  1665. environments=[prod_env], search_filter_query="first_release:%s" % "release_1"
  1666. )
  1667. assert set(results) == set()
  1668. # query by release release_2
  1669. results = self.make_query(search_filter_query="first_release:%s" % "release_2")
  1670. assert set(results) == {group_a, group_c}
  1671. results = self.make_query(
  1672. environments=[staging_env, prod_env],
  1673. search_filter_query="first_release:%s" % "release_2",
  1674. )
  1675. assert set(results) == {group_a}
  1676. results = self.make_query(
  1677. environments=[staging_env], search_filter_query="first_release:%s" % "release_2"
  1678. )
  1679. assert set(results) == set()
  1680. results = self.make_query(
  1681. environments=[prod_env], search_filter_query="first_release:%s" % "release_2"
  1682. )
  1683. assert set(results) == {group_a}
  1684. def test_all_fields_do_not_error(self):
  1685. # Just a sanity check to make sure that all fields can be successfully
  1686. # searched on without returning type errors and other schema related
  1687. # issues.
  1688. def test_query(query):
  1689. try:
  1690. self.make_query(search_filter_query=query)
  1691. except SnubaError as e:
  1692. self.fail(f"Query {query} errored. Error info: {e}")
  1693. for key in SENTRY_SNUBA_MAP:
  1694. if key in ["project.id", "issue.id", "performance.issue_ids"]:
  1695. continue
  1696. test_query("has:%s" % key)
  1697. test_query("!has:%s" % key)
  1698. if key == "error.handled":
  1699. val = 1
  1700. elif key in issue_search_config.numeric_keys:
  1701. val = "123"
  1702. elif key in issue_search_config.date_keys:
  1703. val = self.base_datetime.isoformat()
  1704. elif key in issue_search_config.boolean_keys:
  1705. val = "true"
  1706. elif key in {"trace.span", "trace.parent_span"}:
  1707. val = "abcdef1234abcdef"
  1708. test_query(f"!{key}:{val}")
  1709. else:
  1710. val = "abadcafedeadbeefdeaffeedabadfeed"
  1711. test_query(f"!{key}:{val}")
  1712. test_query(f"{key}:{val}")
  1713. def test_message_negation(self):
  1714. self.store_event(
  1715. data={
  1716. "fingerprint": ["put-me-in-group1"],
  1717. "event_id": "2" * 32,
  1718. "message": "something",
  1719. "timestamp": iso_format(self.base_datetime),
  1720. },
  1721. project_id=self.project.id,
  1722. )
  1723. results = self.make_query(search_filter_query="!message:else")
  1724. results2 = self.make_query(search_filter_query="!message:else")
  1725. assert list(results) == list(results2)
  1726. def test_error_main_thread_true(self):
  1727. myProject = self.create_project(
  1728. name="Foo", slug="foo", teams=[self.team], fire_project_created=True
  1729. )
  1730. event = self.store_event(
  1731. data={
  1732. "event_id": "1" * 32,
  1733. "message": "something",
  1734. "timestamp": iso_format(self.base_datetime),
  1735. "exception": {
  1736. "values": [
  1737. {
  1738. "type": "SyntaxError",
  1739. "value": "hello world",
  1740. "thread_id": 1,
  1741. },
  1742. ],
  1743. },
  1744. "threads": {
  1745. "values": [
  1746. {
  1747. "id": 1,
  1748. "main": True,
  1749. },
  1750. ],
  1751. },
  1752. },
  1753. project_id=myProject.id,
  1754. )
  1755. myGroup = event.groups[0]
  1756. results = self.make_query(
  1757. projects=[myProject],
  1758. search_filter_query="error.main_thread:1",
  1759. sort_by="date",
  1760. )
  1761. assert list(results) == [myGroup]
  1762. def test_error_main_thread_false(self):
  1763. myProject = self.create_project(
  1764. name="Foo2", slug="foo2", teams=[self.team], fire_project_created=True
  1765. )
  1766. event = self.store_event(
  1767. data={
  1768. "event_id": "2" * 32,
  1769. "message": "something",
  1770. "timestamp": iso_format(self.base_datetime),
  1771. "exception": {
  1772. "values": [
  1773. {
  1774. "type": "SyntaxError",
  1775. "value": "hello world",
  1776. "thread_id": 1,
  1777. },
  1778. ],
  1779. },
  1780. "threads": {
  1781. "values": [
  1782. {
  1783. "id": 1,
  1784. "main": False,
  1785. },
  1786. ],
  1787. },
  1788. },
  1789. project_id=myProject.id,
  1790. )
  1791. myGroup = event.groups[0]
  1792. results = self.make_query(
  1793. projects=[myProject],
  1794. search_filter_query="error.main_thread:0",
  1795. sort_by="date",
  1796. )
  1797. assert list(results) == [myGroup]
  1798. def test_error_main_thread_no_results(self):
  1799. myProject = self.create_project(
  1800. name="Foo3", slug="foo3", teams=[self.team], fire_project_created=True
  1801. )
  1802. self.store_event(
  1803. data={
  1804. "event_id": "3" * 32,
  1805. "message": "something",
  1806. "timestamp": iso_format(self.base_datetime),
  1807. "exception": {
  1808. "values": [
  1809. {
  1810. "type": "SyntaxError",
  1811. "value": "hello world",
  1812. "thread_id": 1,
  1813. },
  1814. ],
  1815. },
  1816. "threads": {
  1817. "values": [
  1818. {
  1819. "id": 1,
  1820. },
  1821. ],
  1822. },
  1823. },
  1824. project_id=myProject.id,
  1825. )
  1826. results = self.make_query(
  1827. projects=[myProject],
  1828. search_filter_query="error.main_thread:1",
  1829. sort_by="date",
  1830. )
  1831. assert len(results) == 0
  1832. class EventsTransactionsSnubaSearchTest(SharedSnubaTest):
  1833. @property
  1834. def backend(self):
  1835. return EventsDatasetSnubaSearchBackend()
  1836. def setUp(self):
  1837. super().setUp()
  1838. self.base_datetime = (datetime.utcnow() - timedelta(days=3)).replace(tzinfo=pytz.utc)
  1839. transaction_event_data = {
  1840. "level": "info",
  1841. "message": "ayoo",
  1842. "type": "transaction",
  1843. "culprit": "app/components/events/eventEntries in map",
  1844. "contexts": {"trace": {"trace_id": "b" * 32, "span_id": "c" * 16, "op": ""}},
  1845. }
  1846. with self.options({"performance.issues.send_to_issues_platform": True}), self.feature(
  1847. "organizations:issue-platform"
  1848. ):
  1849. transaction_event_1 = self.store_event(
  1850. data={
  1851. **transaction_event_data,
  1852. "event_id": "a" * 32,
  1853. "timestamp": iso_format(before_now(minutes=1)),
  1854. "start_timestamp": iso_format(before_now(minutes=1, seconds=5)),
  1855. "tags": {"my_tag": 1},
  1856. "fingerprint": [
  1857. f"{PerformanceRenderBlockingAssetSpanGroupType.type_id}-group1"
  1858. ],
  1859. },
  1860. project_id=self.project.id,
  1861. )
  1862. self.perf_group_1 = transaction_event_1.groups[0]
  1863. transaction_event_2 = self.store_event(
  1864. data={
  1865. **transaction_event_data,
  1866. "event_id": "a" * 32,
  1867. "timestamp": iso_format(before_now(minutes=2)),
  1868. "start_timestamp": iso_format(before_now(minutes=2, seconds=5)),
  1869. "tags": {"my_tag": 1},
  1870. "fingerprint": [
  1871. f"{PerformanceRenderBlockingAssetSpanGroupType.type_id}-group2"
  1872. ],
  1873. },
  1874. project_id=self.project.id,
  1875. )
  1876. self.perf_group_2 = transaction_event_2.groups[0]
  1877. error_event_data = {
  1878. "timestamp": iso_format(self.base_datetime - timedelta(days=20)),
  1879. "message": "bar",
  1880. "environment": "staging",
  1881. "tags": {
  1882. "server": "example.com",
  1883. "url": "http://example.com",
  1884. "sentry:user": "event2@example.com",
  1885. "my_tag": 1,
  1886. },
  1887. }
  1888. error_event = self.store_event(
  1889. data={
  1890. **error_event_data,
  1891. "fingerprint": ["put-me-in-error_group_1"],
  1892. "event_id": "c" * 32,
  1893. "stacktrace": {"frames": [{"module": "error_group_1"}]},
  1894. },
  1895. project_id=self.project.id,
  1896. )
  1897. self.error_group_1 = error_event.group
  1898. error_event_2 = self.store_event(
  1899. data={
  1900. **error_event_data,
  1901. "fingerprint": ["put-me-in-error_group_2"],
  1902. "event_id": "d" * 32,
  1903. "stacktrace": {"frames": [{"module": "error_group_2"}]},
  1904. },
  1905. project_id=self.project.id,
  1906. )
  1907. self.error_group_2 = error_event_2.group
  1908. def test_performance_query(self):
  1909. results = self.make_query(search_filter_query="issue.category:performance my_tag:1")
  1910. assert list(results) == [self.perf_group_1, self.perf_group_2]
  1911. results = self.make_query(
  1912. search_filter_query="issue.type:[performance_n_plus_one_db_queries, performance_render_blocking_asset_span] my_tag:1"
  1913. )
  1914. assert list(results) == [self.perf_group_1, self.perf_group_2]
  1915. def test_performance_query_no_duplicates(self):
  1916. # Regression test to catch an issue we had with performance issues showing duplicated in the
  1917. # issue stream. This was caused by us dual writing perf issues to transactions and to the
  1918. # issue platform. We'd end up reading the same issue twice and duplicate it in the response.
  1919. with self.feature("organizations:issue-platform"), self.options(
  1920. {"performance.issues.send_to_issues_platform": True}
  1921. ):
  1922. results = self.make_query(search_filter_query="!issue.category:error my_tag:1")
  1923. assert list(results) == [self.perf_group_1, self.perf_group_2]
  1924. def test_performance_issue_search_feature_off(self):
  1925. with Feature({"organizations:performance-issues-search": False}):
  1926. results = self.make_query(search_filter_query="issue.category:performance my_tag:1")
  1927. assert list(results) == []
  1928. with Feature({"organizations:performance-issues-search": True}):
  1929. results = self.make_query(search_filter_query="issue.category:performance my_tag:1")
  1930. assert list(results) == [self.perf_group_1, self.perf_group_2]
  1931. def test_error_performance_query(self):
  1932. results = self.make_query(search_filter_query="my_tag:1")
  1933. assert list(results) == [
  1934. self.perf_group_1,
  1935. self.perf_group_2,
  1936. self.error_group_2,
  1937. self.error_group_1,
  1938. ]
  1939. results = self.make_query(
  1940. search_filter_query="issue.category:[performance, error] my_tag:1"
  1941. )
  1942. assert list(results) == [
  1943. self.perf_group_1,
  1944. self.perf_group_2,
  1945. self.error_group_2,
  1946. self.error_group_1,
  1947. ]
  1948. results = self.make_query(
  1949. search_filter_query="issue.type:[performance_render_blocking_asset_span, error] my_tag:1"
  1950. )
  1951. assert list(results) == [
  1952. self.perf_group_1,
  1953. self.perf_group_2,
  1954. self.error_group_2,
  1955. self.error_group_1,
  1956. ]
  1957. def test_cursor_performance_issues(self):
  1958. results = self.make_query(
  1959. projects=[self.project],
  1960. search_filter_query="issue.category:performance my_tag:1",
  1961. sort_by="date",
  1962. limit=1,
  1963. count_hits=True,
  1964. )
  1965. assert list(results) == [self.perf_group_1]
  1966. assert results.hits == 2
  1967. results = self.make_query(
  1968. projects=[self.project],
  1969. search_filter_query="issue.category:performance my_tag:1",
  1970. sort_by="date",
  1971. limit=1,
  1972. cursor=results.next,
  1973. count_hits=True,
  1974. )
  1975. assert list(results) == [self.perf_group_2]
  1976. assert results.hits == 2
  1977. results = self.make_query(
  1978. projects=[self.project],
  1979. search_filter_query="issue.category:performance my_tag:1",
  1980. sort_by="date",
  1981. limit=1,
  1982. cursor=results.next,
  1983. count_hits=True,
  1984. )
  1985. assert list(results) == []
  1986. assert results.hits == 2
  1987. def test_perf_issue_search_message_term_queries_postgres(self):
  1988. from django.db.models import Q
  1989. from sentry.utils import snuba
  1990. transaction_name = "im a little tea pot"
  1991. tx = self.store_event(
  1992. data={
  1993. "level": "info",
  1994. "culprit": "app/components/events/eventEntries in map",
  1995. "contexts": {"trace": {"trace_id": "b" * 32, "span_id": "c" * 16, "op": ""}},
  1996. "fingerprint": [f"{PerformanceRenderBlockingAssetSpanGroupType.type_id}-group12"],
  1997. "event_id": "e" * 32,
  1998. "timestamp": iso_format(self.base_datetime),
  1999. "start_timestamp": iso_format(self.base_datetime),
  2000. "type": "transaction",
  2001. "transaction": transaction_name,
  2002. },
  2003. project_id=self.project.id,
  2004. )
  2005. assert "tea" in tx.search_message
  2006. created_group = tx.groups[0]
  2007. find_group = Group.objects.filter(
  2008. Q(type=PerformanceRenderBlockingAssetSpanGroupType.type_id, message__icontains="tea")
  2009. ).first()
  2010. assert created_group == find_group
  2011. result = snuba.raw_query(
  2012. dataset=snuba.Dataset.Transactions,
  2013. start=self.base_datetime - timedelta(hours=1),
  2014. end=self.base_datetime + timedelta(hours=1),
  2015. selected_columns=[
  2016. "event_id",
  2017. "group_ids",
  2018. "transaction_name",
  2019. ],
  2020. groupby=None,
  2021. filter_keys={"project_id": [self.project.id], "event_id": [tx.event_id]},
  2022. referrer="_insert_transaction.verify_transaction",
  2023. )
  2024. assert result["data"][0]["transaction_name"] == transaction_name
  2025. assert result["data"][0]["group_ids"] == [created_group.id]
  2026. results = self.make_query(search_filter_query="issue.category:performance tea")
  2027. assert set(results) == {created_group}
  2028. results2 = self.make_query(search_filter_query="tea")
  2029. assert set(results2) == {created_group}
  2030. def test_search_message_error_and_perf_issues(self):
  2031. tx = self.store_event(
  2032. data={
  2033. "level": "info",
  2034. "culprit": "app/components/events/eventEntries in map",
  2035. "contexts": {"trace": {"trace_id": "b" * 32, "span_id": "c" * 16, "op": ""}},
  2036. "fingerprint": [f"{PerformanceRenderBlockingAssetSpanGroupType.type_id}-group12"],
  2037. "event_id": "e" * 32,
  2038. "timestamp": iso_format(self.base_datetime),
  2039. "start_timestamp": iso_format(self.base_datetime),
  2040. "type": "transaction",
  2041. "transaction": "/api/0/events",
  2042. },
  2043. project_id=self.project.id,
  2044. )
  2045. perf_issue = tx.groups[0]
  2046. assert perf_issue
  2047. error = self.store_event(
  2048. data={
  2049. "fingerprint": ["another-random-group"],
  2050. "event_id": "d" * 32,
  2051. "message": "Uncaught exception on api /api/0/events",
  2052. "environment": "production",
  2053. "tags": {"server": "example.com", "sentry:user": "event3@example.com"},
  2054. "timestamp": iso_format(self.base_datetime),
  2055. "stacktrace": {"frames": [{"module": "group1"}]},
  2056. },
  2057. project_id=self.project.id,
  2058. )
  2059. error_issue = error.group
  2060. assert error_issue
  2061. assert error_issue != perf_issue
  2062. assert set(self.make_query(search_filter_query="is:unresolved /api/0/events")) == {
  2063. perf_issue,
  2064. error_issue,
  2065. }
  2066. assert set(self.make_query(search_filter_query="/api/0/events")) == {
  2067. error_issue,
  2068. perf_issue,
  2069. }
  2070. def test_compound_message_negation(self):
  2071. self.store_event(
  2072. data={
  2073. "fingerprint": ["put-me-in-group1"],
  2074. "event_id": "2" * 32,
  2075. "message": "something",
  2076. "timestamp": iso_format(self.base_datetime),
  2077. },
  2078. project_id=self.project.id,
  2079. )
  2080. self.store_event(
  2081. data={
  2082. "level": "info",
  2083. "culprit": "app/components/events/eventEntries in map",
  2084. "contexts": {"trace": {"trace_id": "b" * 32, "span_id": "c" * 16, "op": ""}},
  2085. "fingerprint": [f"{PerformanceRenderBlockingAssetSpanGroupType.type_id}-group12"],
  2086. "event_id": "e" * 32,
  2087. "timestamp": iso_format(self.base_datetime),
  2088. "start_timestamp": iso_format(self.base_datetime),
  2089. "type": "transaction",
  2090. "transaction": "something",
  2091. },
  2092. project_id=self.project.id,
  2093. )
  2094. error_issues_only = self.make_query(
  2095. search_filter_query="!message:else group.category:error"
  2096. )
  2097. error_and_perf_issues = self.make_query(search_filter_query="!message:else")
  2098. assert set(error_and_perf_issues) > set(error_issues_only)
  2099. class EventsGenericSnubaSearchTest(SharedSnubaTest, OccurrenceTestMixin):
  2100. @property
  2101. def backend(self):
  2102. return EventsDatasetSnubaSearchBackend()
  2103. def setUp(self):
  2104. super().setUp()
  2105. self.base_datetime = (datetime.utcnow() - timedelta(days=3)).replace(tzinfo=pytz.utc)
  2106. event_id_1 = uuid.uuid4().hex
  2107. _, group_info = process_event_and_issue_occurrence(
  2108. self.build_occurrence_data(event_id=event_id_1),
  2109. {
  2110. "event_id": event_id_1,
  2111. "project_id": self.project.id,
  2112. "title": "some problem",
  2113. "platform": "python",
  2114. "tags": {"my_tag": "1"},
  2115. "timestamp": before_now(minutes=1).isoformat(),
  2116. "received": before_now(minutes=1).isoformat(),
  2117. },
  2118. )
  2119. self.profile_group_1 = group_info.group
  2120. event_id_2 = uuid.uuid4().hex
  2121. _, group_info = process_event_and_issue_occurrence(
  2122. self.build_occurrence_data(event_id=event_id_2, fingerprint=["put-me-in-group-2"]),
  2123. {
  2124. "event_id": event_id_2,
  2125. "project_id": self.project.id,
  2126. "title": "some other problem",
  2127. "platform": "python",
  2128. "tags": {"my_tag": "1"},
  2129. "timestamp": before_now(minutes=2).isoformat(),
  2130. "received": before_now(minutes=2).isoformat(),
  2131. },
  2132. )
  2133. self.profile_group_2 = group_info.group
  2134. event_id_3 = uuid.uuid4().hex
  2135. process_event_and_issue_occurrence(
  2136. self.build_occurrence_data(event_id=event_id_3, fingerprint=["put-me-in-group-3"]),
  2137. {
  2138. "event_id": event_id_3,
  2139. "project_id": self.project.id,
  2140. "title": "some other problem",
  2141. "platform": "python",
  2142. "tags": {"my_tag": "2"},
  2143. "timestamp": before_now(minutes=2).isoformat(),
  2144. "message_timestamp": before_now(minutes=2).isoformat(),
  2145. },
  2146. )
  2147. error_event_data = {
  2148. "timestamp": iso_format(self.base_datetime - timedelta(days=20)),
  2149. "message": "bar",
  2150. "environment": "staging",
  2151. "tags": {
  2152. "server": "example.com",
  2153. "url": "http://example.com",
  2154. "sentry:user": "event2@example.com",
  2155. "my_tag": 1,
  2156. },
  2157. }
  2158. error_event = self.store_event(
  2159. data={
  2160. **error_event_data,
  2161. "fingerprint": ["put-me-in-error_group_1"],
  2162. "event_id": "c" * 32,
  2163. "stacktrace": {"frames": [{"module": "error_group_1"}]},
  2164. },
  2165. project_id=self.project.id,
  2166. )
  2167. self.error_group_1 = error_event.group
  2168. error_event_2 = self.store_event(
  2169. data={
  2170. **error_event_data,
  2171. "fingerprint": ["put-me-in-error_group_2"],
  2172. "event_id": "d" * 32,
  2173. "stacktrace": {"frames": [{"module": "error_group_2"}]},
  2174. },
  2175. project_id=self.project.id,
  2176. )
  2177. self.error_group_2 = error_event_2.group
  2178. def test_no_feature(self):
  2179. results = self.make_query(search_filter_query="issue.category:profile my_tag:1")
  2180. assert list(results) == []
  2181. def test_generic_query(self):
  2182. with self.feature(
  2183. ["organizations:issue-platform", ProfileFileIOGroupType.build_visible_feature_name()]
  2184. ):
  2185. results = self.make_query(search_filter_query="issue.category:profile my_tag:1")
  2186. assert list(results) == [self.profile_group_1, self.profile_group_2]
  2187. results = self.make_query(
  2188. search_filter_query="issue.type:profile_file_io_main_thread my_tag:1"
  2189. )
  2190. assert list(results) == [self.profile_group_1, self.profile_group_2]
  2191. def test_generic_query_perf(self):
  2192. event_id = uuid.uuid4().hex
  2193. group_type = PerformanceNPlusOneGroupType
  2194. self.project.update_option("sentry:performance_issue_create_issue_through_platform", True)
  2195. with self.feature("organizations:issue-platform-search-perf-issues"), self.options(
  2196. {"performance.issues.create_issues_through_platform": True}
  2197. ), mock.patch.object(
  2198. PerformanceNPlusOneGroupType, "noise_config", new=NoiseConfig(0, timedelta(minutes=1))
  2199. ):
  2200. with self.feature(group_type.build_ingest_feature_name()):
  2201. _, group_info = process_event_and_issue_occurrence(
  2202. self.build_occurrence_data(
  2203. event_id=event_id, type=group_type.type_id, fingerprint=["some perf issue"]
  2204. ),
  2205. {
  2206. "event_id": event_id,
  2207. "project_id": self.project.id,
  2208. "title": "some problem",
  2209. "platform": "python",
  2210. "tags": {"my_tag": "2"},
  2211. "timestamp": before_now(minutes=1).isoformat(),
  2212. "received": before_now(minutes=1).isoformat(),
  2213. },
  2214. )
  2215. results = self.make_query(search_filter_query="issue.category:performance my_tag:2")
  2216. assert list(results) == []
  2217. with self.feature(
  2218. [
  2219. "organizations:issue-platform",
  2220. group_type.build_visible_feature_name(),
  2221. "organizations:performance-issues-search",
  2222. ]
  2223. ):
  2224. results = self.make_query(search_filter_query="issue.category:performance my_tag:2")
  2225. assert list(results) == [group_info.group]
  2226. def test_error_generic_query(self):
  2227. with self.feature(
  2228. ["organizations:issue-platform", ProfileFileIOGroupType.build_visible_feature_name()]
  2229. ):
  2230. results = self.make_query(search_filter_query="my_tag:1")
  2231. assert list(results) == [
  2232. self.profile_group_1,
  2233. self.profile_group_2,
  2234. self.error_group_2,
  2235. self.error_group_1,
  2236. ]
  2237. results = self.make_query(
  2238. search_filter_query="issue.category:[profile, error] my_tag:1"
  2239. )
  2240. assert list(results) == [
  2241. self.profile_group_1,
  2242. self.profile_group_2,
  2243. self.error_group_2,
  2244. self.error_group_1,
  2245. ]
  2246. results = self.make_query(
  2247. search_filter_query="issue.type:[profile_file_io_main_thread, error] my_tag:1"
  2248. )
  2249. assert list(results) == [
  2250. self.profile_group_1,
  2251. self.profile_group_2,
  2252. self.error_group_2,
  2253. self.error_group_1,
  2254. ]
  2255. def test_cursor_profile_issues(self):
  2256. with self.feature(
  2257. ["organizations:issue-platform", ProfileFileIOGroupType.build_visible_feature_name()]
  2258. ):
  2259. results = self.make_query(
  2260. projects=[self.project],
  2261. search_filter_query="issue.category:profile my_tag:1",
  2262. sort_by="date",
  2263. limit=1,
  2264. count_hits=True,
  2265. )
  2266. assert list(results) == [self.profile_group_1]
  2267. assert results.hits == 2
  2268. results = self.make_query(
  2269. projects=[self.project],
  2270. search_filter_query="issue.category:profile my_tag:1",
  2271. sort_by="date",
  2272. limit=1,
  2273. cursor=results.next,
  2274. count_hits=True,
  2275. )
  2276. assert list(results) == [self.profile_group_2]
  2277. assert results.hits == 2
  2278. results = self.make_query(
  2279. projects=[self.project],
  2280. search_filter_query="issue.category:profile my_tag:1",
  2281. sort_by="date",
  2282. limit=1,
  2283. cursor=results.next,
  2284. count_hits=True,
  2285. )
  2286. assert list(results) == []
  2287. assert results.hits == 2
  2288. def test_rejected_filters(self):
  2289. """
  2290. Any queries with `error.handled` or `error.unhandled` filters querying the search_issues dataset
  2291. should be rejected and return empty results.
  2292. """
  2293. with self.feature(
  2294. ["organizations:issue-platform", ProfileFileIOGroupType.build_visible_feature_name()]
  2295. ):
  2296. results = self.make_query(
  2297. projects=[self.project],
  2298. search_filter_query="issue.category:profile error.unhandled:0",
  2299. sort_by="date",
  2300. limit=1,
  2301. count_hits=True,
  2302. )
  2303. results2 = self.make_query(
  2304. projects=[self.project],
  2305. search_filter_query="issue.category:profile error.unhandled:1",
  2306. sort_by="date",
  2307. limit=1,
  2308. count_hits=True,
  2309. )
  2310. result3 = self.make_query(
  2311. projects=[self.project],
  2312. search_filter_query="issue.category:profile error.handled:0",
  2313. sort_by="date",
  2314. limit=1,
  2315. count_hits=True,
  2316. )
  2317. results4 = self.make_query(
  2318. projects=[self.project],
  2319. search_filter_query="issue.category:profile error.handled:1",
  2320. sort_by="date",
  2321. limit=1,
  2322. count_hits=True,
  2323. )
  2324. results5 = self.make_query(
  2325. projects=[self.project],
  2326. search_filter_query="issue.category:profile error.main_thread:0",
  2327. sort_by="date",
  2328. limit=1,
  2329. count_hits=True,
  2330. )
  2331. results6 = self.make_query(
  2332. projects=[self.project],
  2333. search_filter_query="issue.category:profile error.main_thread:1",
  2334. sort_by="date",
  2335. limit=1,
  2336. count_hits=True,
  2337. )
  2338. assert (
  2339. list(results)
  2340. == list(results2)
  2341. == list(result3)
  2342. == list(results4)
  2343. == list(results5)
  2344. == list(results6)
  2345. == []
  2346. )
  2347. class CdcEventsSnubaSearchTest(SharedSnubaTest):
  2348. @property
  2349. def backend(self):
  2350. return CdcEventsDatasetSnubaSearchBackend()
  2351. def setUp(self):
  2352. super().setUp()
  2353. self.base_datetime = (datetime.utcnow() - timedelta(days=3)).replace(tzinfo=pytz.utc)
  2354. self.event1 = self.store_event(
  2355. data={
  2356. "fingerprint": ["put-me-in-group1"],
  2357. "event_id": "a" * 32,
  2358. "environment": "production",
  2359. "timestamp": iso_format(self.base_datetime - timedelta(days=21)),
  2360. "tags": {"sentry:user": "user1"},
  2361. },
  2362. project_id=self.project.id,
  2363. )
  2364. self.env1 = self.event1.get_environment()
  2365. self.group1 = self.event1.group
  2366. self.event3 = self.store_event(
  2367. data={
  2368. "fingerprint": ["put-me-in-group1"],
  2369. "environment": "staging",
  2370. "timestamp": iso_format(self.base_datetime),
  2371. "tags": {"sentry:user": "user2"},
  2372. },
  2373. project_id=self.project.id,
  2374. )
  2375. self.event2 = self.store_event(
  2376. data={
  2377. "fingerprint": ["put-me-in-group2"],
  2378. "timestamp": iso_format(self.base_datetime - timedelta(days=20)),
  2379. "environment": "staging",
  2380. "tags": {"sentry:user": "user1"},
  2381. },
  2382. project_id=self.project.id,
  2383. )
  2384. self.group2 = self.event2.group
  2385. self.env2 = self.event2.get_environment()
  2386. def run_test(
  2387. self,
  2388. search_filter_query,
  2389. expected_groups,
  2390. expected_hits,
  2391. projects=None,
  2392. environments=None,
  2393. sort_by="date",
  2394. limit=None,
  2395. count_hits=False,
  2396. date_from=None,
  2397. date_to=None,
  2398. cursor=None,
  2399. ):
  2400. results = self.make_query(
  2401. projects=projects,
  2402. search_filter_query=search_filter_query,
  2403. environments=environments,
  2404. sort_by=sort_by,
  2405. limit=limit,
  2406. count_hits=count_hits,
  2407. date_from=date_from,
  2408. date_to=date_to,
  2409. cursor=cursor,
  2410. )
  2411. assert list(results) == expected_groups
  2412. assert results.hits == expected_hits
  2413. return results
  2414. def test(self):
  2415. self.run_test("is:unresolved", [self.group1, self.group2], None)
  2416. def test_invalid(self):
  2417. with pytest.raises(InvalidQueryForExecutor):
  2418. self.make_query(search_filter_query="is:unresolved abc:123")
  2419. def test_resolved_group(self):
  2420. self.group2.status = GroupStatus.RESOLVED
  2421. self.group2.substatus = None
  2422. self.group2.save()
  2423. self.store_group(self.group2)
  2424. self.run_test("is:unresolved", [self.group1], None)
  2425. self.run_test("is:resolved", [self.group2], None)
  2426. self.run_test("is:unresolved is:resolved", [], None)
  2427. def test_environment(self):
  2428. self.run_test("is:unresolved", [self.group1], None, environments=[self.env1])
  2429. self.run_test("is:unresolved", [self.group1, self.group2], None, environments=[self.env2])
  2430. def test_sort_times_seen(self):
  2431. self.run_test(
  2432. "is:unresolved",
  2433. [self.group1, self.group2],
  2434. None,
  2435. sort_by="freq",
  2436. date_from=self.base_datetime - timedelta(days=30),
  2437. )
  2438. self.store_event(
  2439. data={
  2440. "fingerprint": ["put-me-in-group2"],
  2441. "timestamp": iso_format(self.base_datetime - timedelta(days=15)),
  2442. },
  2443. project_id=self.project.id,
  2444. )
  2445. self.store_event(
  2446. data={
  2447. "fingerprint": ["put-me-in-group2"],
  2448. "timestamp": iso_format(self.base_datetime - timedelta(days=10)),
  2449. "tags": {"sentry:user": "user2"},
  2450. },
  2451. project_id=self.project.id,
  2452. )
  2453. self.run_test(
  2454. "is:unresolved",
  2455. [self.group2, self.group1],
  2456. None,
  2457. sort_by="freq",
  2458. # Change the date range to bust the
  2459. date_from=self.base_datetime - timedelta(days=29),
  2460. )
  2461. def test_sort_first_seen(self):
  2462. self.run_test(
  2463. "is:unresolved",
  2464. [self.group2, self.group1],
  2465. None,
  2466. sort_by="new",
  2467. date_from=self.base_datetime - timedelta(days=30),
  2468. )
  2469. group3 = self.store_event(
  2470. data={
  2471. "fingerprint": ["put-me-in-group3"],
  2472. "timestamp": iso_format(self.base_datetime + timedelta(days=1)),
  2473. },
  2474. project_id=self.project.id,
  2475. ).group
  2476. self.run_test(
  2477. "is:unresolved",
  2478. [group3, self.group2, self.group1],
  2479. None,
  2480. sort_by="new",
  2481. # Change the date range to bust the
  2482. date_from=self.base_datetime - timedelta(days=29),
  2483. )
  2484. def test_sort_user(self):
  2485. self.run_test(
  2486. "is:unresolved",
  2487. [self.group1, self.group2],
  2488. None,
  2489. sort_by="user",
  2490. date_from=self.base_datetime - timedelta(days=30),
  2491. )
  2492. self.store_event(
  2493. data={
  2494. "fingerprint": ["put-me-in-group2"],
  2495. "timestamp": iso_format(self.base_datetime + timedelta(days=1)),
  2496. "tags": {"sentry:user": "user2"},
  2497. },
  2498. project_id=self.project.id,
  2499. )
  2500. self.store_event(
  2501. data={
  2502. "fingerprint": ["put-me-in-group2"],
  2503. "timestamp": iso_format(self.base_datetime + timedelta(days=1)),
  2504. "tags": {"sentry:user": "user2"},
  2505. },
  2506. project_id=self.project.id,
  2507. )
  2508. self.store_event(
  2509. data={
  2510. "fingerprint": ["put-me-in-group1"],
  2511. "timestamp": iso_format(self.base_datetime + timedelta(days=1)),
  2512. "tags": {"sentry:user": "user1"},
  2513. },
  2514. project_id=self.project.id,
  2515. )
  2516. self.store_event(
  2517. data={
  2518. "fingerprint": ["put-me-in-group1"],
  2519. "timestamp": iso_format(self.base_datetime + timedelta(days=1)),
  2520. "tags": {"sentry:user": "user1"},
  2521. },
  2522. project_id=self.project.id,
  2523. )
  2524. # Test group with no users, which can return a null count
  2525. group3 = self.store_event(
  2526. data={
  2527. "fingerprint": ["put-me-in-group3"],
  2528. "timestamp": iso_format(self.base_datetime + timedelta(days=1)),
  2529. },
  2530. project_id=self.project.id,
  2531. ).group
  2532. self.run_test(
  2533. "is:unresolved",
  2534. [self.group2, self.group1, group3],
  2535. None,
  2536. sort_by="user",
  2537. # Change the date range to bust the
  2538. date_from=self.base_datetime - timedelta(days=29),
  2539. )
  2540. def test_sort_priority(self):
  2541. self.run_test(
  2542. "is:unresolved",
  2543. [self.group1, self.group2],
  2544. None,
  2545. sort_by="priority",
  2546. date_from=self.base_datetime - timedelta(days=30),
  2547. )
  2548. def test_cursor(self):
  2549. group3 = self.store_event(
  2550. data={
  2551. "fingerprint": ["put-me-in-group3"],
  2552. "timestamp": iso_format(self.base_datetime + timedelta(days=1)),
  2553. "tags": {"sentry:user": "user2"},
  2554. },
  2555. project_id=self.project.id,
  2556. ).group
  2557. group4 = self.store_event(
  2558. data={
  2559. "fingerprint": ["put-me-in-group7"],
  2560. "timestamp": iso_format(self.base_datetime + timedelta(days=2)),
  2561. "tags": {"sentry:user": "user2"},
  2562. },
  2563. project_id=self.project.id,
  2564. ).group
  2565. results = self.run_test("is:unresolved", [group4], 4, limit=1, count_hits=True)
  2566. results = self.run_test(
  2567. "is:unresolved", [group3], 4, limit=1, cursor=results.next, count_hits=True
  2568. )
  2569. results = self.run_test(
  2570. "is:unresolved", [group4], 4, limit=1, cursor=results.prev, count_hits=True
  2571. )
  2572. self.run_test(
  2573. "is:unresolved", [group3, self.group1], 4, limit=2, cursor=results.next, count_hits=True
  2574. )
  2575. def test_rechecking(self):
  2576. self.group2.status = GroupStatus.RESOLVED
  2577. self.group2.substatus = None
  2578. self.group2.save()
  2579. # Explicitly avoid calling `store_group` here. This means that Clickhouse will still see
  2580. # this group as `UNRESOLVED` and it will be returned in the snuba results. This group
  2581. # should still be filtered out by our recheck.
  2582. self.run_test("is:unresolved", [self.group1], None)