test_backend.py 98 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615
  1. import uuid
  2. from datetime import datetime, timedelta
  3. from hashlib import md5
  4. from unittest import mock
  5. import pytest
  6. import pytz
  7. from django.utils import timezone
  8. from sentry import options
  9. from sentry.api.issue_search import convert_query_values, issue_search_config, parse_search_query
  10. from sentry.exceptions import InvalidSearchQuery
  11. from sentry.models import (
  12. Environment,
  13. Group,
  14. GroupAssignee,
  15. GroupBookmark,
  16. GroupEnvironment,
  17. GroupHistoryStatus,
  18. GroupStatus,
  19. GroupSubscription,
  20. Integration,
  21. record_group_history,
  22. )
  23. from sentry.models.groupowner import GroupOwner
  24. from sentry.search.snuba.backend import (
  25. CdcEventsDatasetSnubaSearchBackend,
  26. EventsDatasetSnubaSearchBackend,
  27. )
  28. from sentry.search.snuba.executors import InvalidQueryForExecutor
  29. from sentry.testutils import SnubaTestCase, TestCase, xfail_if_not_postgres
  30. from sentry.testutils.helpers.datetime import before_now, iso_format
  31. from sentry.testutils.helpers.faux import Any
  32. from sentry.types.issues import GroupType
  33. from sentry.utils.snuba import SENTRY_SNUBA_MAP, Dataset, SnubaError, get_snuba_column_name
  34. def date_to_query_format(date):
  35. return date.strftime("%Y-%m-%dT%H:%M:%S")
  36. class SharedSnubaTest(TestCase, SnubaTestCase):
  37. def build_search_filter(self, query, projects=None, user=None, environments=None):
  38. user = user if user is not None else self.user
  39. projects = projects if projects is not None else [self.project]
  40. return convert_query_values(parse_search_query(query), projects, user, environments)
  41. def make_query(
  42. self,
  43. projects=None,
  44. search_filter_query=None,
  45. environments=None,
  46. sort_by="date",
  47. limit=None,
  48. count_hits=False,
  49. date_from=None,
  50. date_to=None,
  51. cursor=None,
  52. ):
  53. search_filters = []
  54. projects = projects if projects is not None else [self.project]
  55. if search_filter_query is not None:
  56. search_filters = self.build_search_filter(
  57. search_filter_query, projects, environments=environments
  58. )
  59. kwargs = {}
  60. if limit is not None:
  61. kwargs["limit"] = limit
  62. return self.backend.query(
  63. projects,
  64. search_filters=search_filters,
  65. environments=environments,
  66. count_hits=count_hits,
  67. sort_by=sort_by,
  68. date_from=date_from,
  69. date_to=date_to,
  70. cursor=cursor,
  71. **kwargs,
  72. )
  73. def store_event(self, data, *args, **kwargs):
  74. event = super().store_event(data, *args, **kwargs)
  75. environment_name = data.get("environment")
  76. if environment_name:
  77. GroupEnvironment.objects.filter(
  78. group_id=event.group_id,
  79. environment__name=environment_name,
  80. first_seen__gt=event.datetime,
  81. ).update(first_seen=event.datetime)
  82. return event
  83. class EventsSnubaSearchTest(SharedSnubaTest):
  84. @property
  85. def backend(self):
  86. return EventsDatasetSnubaSearchBackend()
  87. def setUp(self):
  88. super().setUp()
  89. self.base_datetime = (datetime.utcnow() - timedelta(days=3)).replace(tzinfo=pytz.utc)
  90. event1_timestamp = iso_format(self.base_datetime - timedelta(days=21))
  91. self.event1 = self.store_event(
  92. data={
  93. "fingerprint": ["put-me-in-group1"],
  94. "event_id": "a" * 32,
  95. "message": "foo. Also, this message is intended to be greater than 256 characters so that we can put some unique string identifier after that point in the string. The purpose of this is in order to verify we are using snuba to search messages instead of Postgres (postgres truncates at 256 characters and clickhouse does not). santryrox.",
  96. "environment": "production",
  97. "tags": {"server": "example.com", "sentry:user": "event1@example.com"},
  98. "timestamp": event1_timestamp,
  99. "stacktrace": {"frames": [{"module": "group1"}]},
  100. },
  101. project_id=self.project.id,
  102. )
  103. self.event3 = self.store_event(
  104. data={
  105. "fingerprint": ["put-me-in-group1"],
  106. "event_id": "c" * 32,
  107. "message": "group1",
  108. "environment": "production",
  109. "tags": {"server": "example.com", "sentry:user": "event3@example.com"},
  110. "timestamp": iso_format(self.base_datetime),
  111. "stacktrace": {"frames": [{"module": "group1"}]},
  112. },
  113. project_id=self.project.id,
  114. )
  115. self.group1 = Group.objects.get(id=self.event1.group.id)
  116. assert self.group1.id == self.event1.group.id
  117. assert self.group1.id == self.event3.group.id
  118. assert self.group1.first_seen == self.event1.datetime
  119. assert self.group1.last_seen == self.event3.datetime
  120. self.group1.times_seen = 5
  121. self.group1.status = GroupStatus.UNRESOLVED
  122. self.group1.update(type=GroupType.ERROR.value)
  123. self.group1.save()
  124. self.store_group(self.group1)
  125. self.event2 = self.store_event(
  126. data={
  127. "fingerprint": ["put-me-in-group2"],
  128. "event_id": "b" * 32,
  129. "timestamp": iso_format(self.base_datetime - timedelta(days=20)),
  130. "message": "bar",
  131. "stacktrace": {"frames": [{"module": "group2"}]},
  132. "environment": "staging",
  133. "tags": {
  134. "server": "example.com",
  135. "url": "http://example.com",
  136. "sentry:user": "event2@example.com",
  137. },
  138. },
  139. project_id=self.project.id,
  140. )
  141. self.group2 = Group.objects.get(id=self.event2.group.id)
  142. assert self.group2.id == self.event2.group.id
  143. assert self.group2.first_seen == self.group2.last_seen == self.event2.datetime
  144. self.group2.status = GroupStatus.RESOLVED
  145. self.group2.times_seen = 10
  146. self.group2.update(type=GroupType.ERROR.value)
  147. self.group2.save()
  148. self.store_group(self.group2)
  149. GroupBookmark.objects.create(user=self.user, group=self.group2, project=self.group2.project)
  150. GroupAssignee.objects.create(user=self.user, group=self.group2, project=self.group2.project)
  151. GroupSubscription.objects.create(
  152. user=self.user, group=self.group1, project=self.group1.project, is_active=True
  153. )
  154. GroupSubscription.objects.create(
  155. user=self.user, group=self.group2, project=self.group2.project, is_active=False
  156. )
  157. self.environments = {
  158. "production": self.event1.get_environment(),
  159. "staging": self.event2.get_environment(),
  160. }
  161. def set_up_multi_project(self):
  162. self.project2 = self.create_project(organization=self.project.organization)
  163. self.event_p2 = self.store_event(
  164. data={
  165. "event_id": "a" * 32,
  166. "fingerprint": ["put-me-in-groupP2"],
  167. "timestamp": iso_format(self.base_datetime - timedelta(days=21)),
  168. "message": "foo",
  169. "stacktrace": {"frames": [{"module": "group_p2"}]},
  170. "tags": {"server": "example.com"},
  171. "environment": "production",
  172. },
  173. project_id=self.project2.id,
  174. )
  175. self.group_p2 = Group.objects.get(id=self.event_p2.group.id)
  176. self.group_p2.times_seen = 6
  177. self.group_p2.last_seen = self.base_datetime - timedelta(days=1)
  178. self.group_p2.save()
  179. self.store_group(self.group_p2)
  180. def create_group_with_integration_external_issue(self, environment="production"):
  181. event = self.store_event(
  182. data={
  183. "fingerprint": ["linked_group1"],
  184. "event_id": uuid.uuid4().hex,
  185. "timestamp": iso_format(self.base_datetime),
  186. "environment": environment,
  187. },
  188. project_id=self.project.id,
  189. )
  190. integration = Integration.objects.create(provider="example", name="Example")
  191. integration.add_organization(event.group.organization, self.user)
  192. self.create_integration_external_issue(
  193. group=event.group,
  194. integration=integration,
  195. key="APP-123",
  196. )
  197. return event.group
  198. def create_group_with_platform_external_issue(self, environment="production"):
  199. event = self.store_event(
  200. data={
  201. "fingerprint": ["linked_group2"],
  202. "event_id": uuid.uuid4().hex,
  203. "timestamp": iso_format(self.base_datetime),
  204. "environment": environment,
  205. },
  206. project_id=self.project.id,
  207. )
  208. self.create_platform_external_issue(
  209. group=event.group,
  210. service_type="sentry-app",
  211. display_name="App#issue-1",
  212. web_url="https://example.com/app/issues/1",
  213. )
  214. return event.group
  215. def run_test_query_in_syntax(
  216. self, query, expected_groups, expected_negative_groups=None, environments=None
  217. ):
  218. results = self.make_query(search_filter_query=query, environments=environments)
  219. sort_key = lambda result: result.id
  220. assert sorted(results, key=sort_key) == sorted(expected_groups, key=sort_key)
  221. if expected_negative_groups is not None:
  222. results = self.make_query(search_filter_query=f"!{query}")
  223. assert sorted(results, key=sort_key) == sorted(expected_negative_groups, key=sort_key)
  224. def test_query(self):
  225. results = self.make_query(search_filter_query="foo")
  226. assert set(results) == {self.group1}
  227. results = self.make_query(search_filter_query="bar")
  228. assert set(results) == {self.group2}
  229. def test_query_multi_project(self):
  230. self.set_up_multi_project()
  231. results = self.make_query([self.project, self.project2], search_filter_query="foo")
  232. assert set(results) == {self.group1, self.group_p2}
  233. def test_query_with_environment(self):
  234. results = self.make_query(
  235. environments=[self.environments["production"]], search_filter_query="foo"
  236. )
  237. assert set(results) == {self.group1}
  238. results = self.make_query(
  239. environments=[self.environments["production"]], search_filter_query="bar"
  240. )
  241. assert set(results) == set()
  242. results = self.make_query(
  243. environments=[self.environments["staging"]], search_filter_query="bar"
  244. )
  245. assert set(results) == {self.group2}
  246. def test_query_for_text_in_long_message(self):
  247. results = self.make_query(
  248. [self.project],
  249. environments=[self.environments["production"]],
  250. search_filter_query="santryrox",
  251. )
  252. assert set(results) == {self.group1}
  253. def test_multi_environments(self):
  254. self.set_up_multi_project()
  255. results = self.make_query(
  256. [self.project, self.project2],
  257. environments=[self.environments["production"], self.environments["staging"]],
  258. )
  259. assert set(results) == {self.group1, self.group2, self.group_p2}
  260. def test_query_with_environment_multi_project(self):
  261. self.set_up_multi_project()
  262. results = self.make_query(
  263. [self.project, self.project2],
  264. environments=[self.environments["production"]],
  265. search_filter_query="foo",
  266. )
  267. assert set(results) == {self.group1, self.group_p2}
  268. results = self.make_query(
  269. [self.project, self.project2],
  270. environments=[self.environments["production"]],
  271. search_filter_query="bar",
  272. )
  273. assert set(results) == set()
  274. def test_sort(self):
  275. results = self.make_query(sort_by="date")
  276. assert list(results) == [self.group1, self.group2]
  277. results = self.make_query(sort_by="new")
  278. assert list(results) == [self.group2, self.group1]
  279. results = self.make_query(sort_by="freq")
  280. assert list(results) == [self.group1, self.group2]
  281. results = self.make_query(sort_by="priority")
  282. assert list(results) == [self.group1, self.group2]
  283. results = self.make_query(sort_by="user")
  284. assert list(results) == [self.group1, self.group2]
  285. def test_sort_with_environment(self):
  286. for dt in [
  287. self.group1.first_seen + timedelta(days=1),
  288. self.group1.first_seen + timedelta(days=2),
  289. self.group1.last_seen + timedelta(days=1),
  290. ]:
  291. self.store_event(
  292. data={
  293. "fingerprint": ["put-me-in-group2"],
  294. "timestamp": iso_format(dt),
  295. "stacktrace": {"frames": [{"module": "group2"}]},
  296. "environment": "production",
  297. "message": "group2",
  298. },
  299. project_id=self.project.id,
  300. )
  301. results = self.make_query(environments=[self.environments["production"]], sort_by="date")
  302. assert list(results) == [self.group2, self.group1]
  303. results = self.make_query(environments=[self.environments["production"]], sort_by="new")
  304. assert list(results) == [self.group2, self.group1]
  305. results = self.make_query(environments=[self.environments["production"]], sort_by="freq")
  306. assert list(results) == [self.group2, self.group1]
  307. results = self.make_query(
  308. environments=[self.environments["production"]], sort_by="priority"
  309. )
  310. assert list(results) == [self.group2, self.group1]
  311. results = self.make_query(environments=[self.environments["production"]], sort_by="user")
  312. assert list(results) == [self.group1, self.group2]
  313. def test_status(self):
  314. results = self.make_query(search_filter_query="is:unresolved")
  315. assert set(results) == {self.group1}
  316. results = self.make_query(search_filter_query="is:resolved")
  317. assert set(results) == {self.group2}
  318. event_3 = self.store_event(
  319. data={
  320. "fingerprint": ["put-me-in-group3"],
  321. "event_id": "c" * 32,
  322. "timestamp": iso_format(self.base_datetime - timedelta(days=20)),
  323. },
  324. project_id=self.project.id,
  325. )
  326. group_3 = event_3.group
  327. group_3.status = GroupStatus.MUTED
  328. group_3.save()
  329. self.run_test_query_in_syntax(
  330. "status:[unresolved, resolved]", [self.group1, self.group2], [group_3]
  331. )
  332. self.run_test_query_in_syntax(
  333. "status:[resolved, muted]", [self.group2, group_3], [self.group1]
  334. )
  335. def test_category(self):
  336. with self.feature("organizations:performance-issues"):
  337. results = self.make_query(search_filter_query="issue.category:error")
  338. assert set(results) == {self.group1, self.group2}
  339. event_3 = self.store_event(
  340. data={
  341. "fingerprint": ["put-me-in-group3"],
  342. "event_id": "c" * 32,
  343. "timestamp": iso_format(self.base_datetime - timedelta(days=20)),
  344. },
  345. project_id=self.project.id,
  346. )
  347. group_3 = event_3.group
  348. group_3.update(type=GroupType.PERFORMANCE_N_PLUS_ONE.value)
  349. with self.feature("organizations:performance-issues"):
  350. results = self.make_query(search_filter_query="issue.category:performance")
  351. assert set(results) == {group_3}
  352. with self.feature("organizations:performance-issues"):
  353. results = self.make_query(search_filter_query="issue.category:[error, performance]")
  354. assert set(results) == {self.group1, self.group2, group_3}
  355. with pytest.raises(InvalidSearchQuery):
  356. with self.feature("organizations:performance-issues"):
  357. self.make_query(search_filter_query="issue.category:hellboy")
  358. def test_not_perf_category(self):
  359. with self.feature("organizations:performance-issues"):
  360. results = self.make_query(search_filter_query="issue.category:error foo")
  361. assert set(results) == {self.group1}
  362. with self.feature("organizations:performance-issues"):
  363. not_results = self.make_query(search_filter_query="!issue.category:performance foo")
  364. assert set(not_results) == {self.group1}
  365. def test_type(self):
  366. with self.feature("organizations:performance-issues"):
  367. results = self.make_query(search_filter_query="issue.type:error")
  368. assert set(results) == {self.group1, self.group2}
  369. event_3 = self.store_event(
  370. data={
  371. "fingerprint": ["put-me-in-group3"],
  372. "event_id": "c" * 32,
  373. "timestamp": iso_format(self.base_datetime - timedelta(days=20)),
  374. "type": GroupType.PERFORMANCE_N_PLUS_ONE.value,
  375. },
  376. project_id=self.project.id,
  377. )
  378. group_3 = event_3.group
  379. group_3.update(type=GroupType.PERFORMANCE_N_PLUS_ONE.value)
  380. with self.feature("organizations:performance-issues"):
  381. results = self.make_query(search_filter_query="issue.type:performance_n_plus_one")
  382. assert set(results) == {group_3}
  383. event_4 = self.store_event(
  384. data={
  385. "fingerprint": ["put-me-in-group4"],
  386. "event_id": "d" * 32,
  387. "timestamp": iso_format(self.base_datetime - timedelta(days=20)),
  388. },
  389. project_id=self.project.id,
  390. )
  391. group_4 = event_4.group
  392. group_4.update(type=GroupType.PERFORMANCE_SLOW_SPAN.value)
  393. with self.feature("organizations:performance-issues"):
  394. results = self.make_query(search_filter_query="issue.type:performance_slow_span")
  395. assert set(results) == {group_4}
  396. with self.feature("organizations:performance-issues"):
  397. results = self.make_query(
  398. search_filter_query="issue.type:[performance_slow_span, performance_n_plus_one, error]"
  399. )
  400. assert set(results) == {self.group1, self.group2, group_3, group_4}
  401. with pytest.raises(InvalidSearchQuery):
  402. with self.feature("organizations:performance-issues"):
  403. self.make_query(search_filter_query="issue.type:performance_i_dont_exist")
  404. def test_status_with_environment(self):
  405. results = self.make_query(
  406. environments=[self.environments["production"]], search_filter_query="is:unresolved"
  407. )
  408. assert set(results) == {self.group1}
  409. results = self.make_query(
  410. environments=[self.environments["staging"]], search_filter_query="is:resolved"
  411. )
  412. assert set(results) == {self.group2}
  413. results = self.make_query(
  414. environments=[self.environments["production"]], search_filter_query="is:resolved"
  415. )
  416. assert set(results) == set()
  417. def test_tags(self):
  418. results = self.make_query(search_filter_query="environment:staging")
  419. assert set(results) == {self.group2}
  420. results = self.make_query(search_filter_query="environment:example.com")
  421. assert set(results) == set()
  422. results = self.make_query(search_filter_query="has:environment")
  423. assert set(results) == {self.group2, self.group1}
  424. results = self.make_query(search_filter_query="environment:staging server:example.com")
  425. assert set(results) == {self.group2}
  426. results = self.make_query(search_filter_query='url:"http://example.com"')
  427. assert set(results) == {self.group2}
  428. results = self.make_query(search_filter_query="environment:staging has:server")
  429. assert set(results) == {self.group2}
  430. results = self.make_query(search_filter_query="environment:staging server:bar.example.com")
  431. assert set(results) == set()
  432. def test_tags_with_environment(self):
  433. results = self.make_query(
  434. environments=[self.environments["production"]], search_filter_query="server:example.com"
  435. )
  436. assert set(results) == {self.group1}
  437. results = self.make_query(
  438. environments=[self.environments["staging"]], search_filter_query="server:example.com"
  439. )
  440. assert set(results) == {self.group2}
  441. results = self.make_query(
  442. environments=[self.environments["staging"]], search_filter_query="has:server"
  443. )
  444. assert set(results) == {self.group2}
  445. results = self.make_query(
  446. environments=[self.environments["production"]],
  447. search_filter_query='url:"http://example.com"',
  448. )
  449. assert set(results) == set()
  450. results = self.make_query(
  451. environments=[self.environments["staging"]],
  452. search_filter_query='url:"http://example.com"',
  453. )
  454. assert set(results) == {self.group2}
  455. results = self.make_query(
  456. environments=[self.environments["staging"]],
  457. search_filter_query="server:bar.example.com",
  458. )
  459. assert set(results) == set()
  460. def test_bookmarked_by(self):
  461. results = self.make_query(search_filter_query="bookmarks:%s" % self.user.username)
  462. assert set(results) == {self.group2}
  463. def test_bookmarked_by_in_syntax(self):
  464. self.run_test_query_in_syntax(
  465. f"bookmarks:[{self.user.username}]", [self.group2], [self.group1]
  466. )
  467. user_2 = self.create_user()
  468. GroupBookmark.objects.create(user=user_2, group=self.group1, project=self.group2.project)
  469. self.run_test_query_in_syntax(
  470. f"bookmarks:[{self.user.username}, {user_2.username}]", [self.group2, self.group1], []
  471. )
  472. def test_bookmarked_by_with_environment(self):
  473. results = self.make_query(
  474. environments=[self.environments["staging"]],
  475. search_filter_query="bookmarks:%s" % self.user.username,
  476. )
  477. assert set(results) == {self.group2}
  478. results = self.make_query(
  479. environments=[self.environments["production"]],
  480. search_filter_query="bookmarks:%s" % self.user.username,
  481. )
  482. assert set(results) == set()
  483. def test_search_filter_query_with_custom_priority_tag(self):
  484. priority = "high"
  485. self.store_event(
  486. data={
  487. "fingerprint": ["put-me-in-group2"],
  488. "timestamp": iso_format(self.group2.first_seen + timedelta(days=1)),
  489. "stacktrace": {"frames": [{"module": "group2"}]},
  490. "message": "group2",
  491. "tags": {"priority": priority},
  492. },
  493. project_id=self.project.id,
  494. )
  495. results = self.make_query(search_filter_query="priority:%s" % priority)
  496. assert set(results) == {self.group2}
  497. def test_search_filter_query_with_custom_priority_tag_and_priority_sort(self):
  498. priority = "high"
  499. for i in range(1, 3):
  500. self.store_event(
  501. data={
  502. "fingerprint": ["put-me-in-group1"],
  503. "timestamp": iso_format(self.group2.last_seen + timedelta(days=i)),
  504. "stacktrace": {"frames": [{"module": "group1"}]},
  505. "message": "group1",
  506. "tags": {"priority": priority},
  507. },
  508. project_id=self.project.id,
  509. )
  510. self.store_event(
  511. data={
  512. "fingerprint": ["put-me-in-group2"],
  513. "timestamp": iso_format(self.group2.last_seen + timedelta(days=2)),
  514. "stacktrace": {"frames": [{"module": "group2"}]},
  515. "message": "group2",
  516. "tags": {"priority": priority},
  517. },
  518. project_id=self.project.id,
  519. )
  520. results = self.make_query(search_filter_query="priority:%s" % priority, sort_by="priority")
  521. assert list(results) == [self.group1, self.group2]
  522. def test_search_tag_overlapping_with_internal_fields(self):
  523. # Using a tag of email overlaps with the promoted user.email column in events.
  524. # We don't want to bypass public schema limits in issue search.
  525. self.store_event(
  526. data={
  527. "fingerprint": ["put-me-in-group2"],
  528. "timestamp": iso_format(self.group2.first_seen + timedelta(days=1)),
  529. "stacktrace": {"frames": [{"module": "group2"}]},
  530. "message": "group2",
  531. "tags": {"email": "tags@example.com"},
  532. },
  533. project_id=self.project.id,
  534. )
  535. results = self.make_query(search_filter_query="email:tags@example.com")
  536. assert set(results) == {self.group2}
  537. def test_project(self):
  538. results = self.make_query([self.create_project(name="other")])
  539. assert set(results) == set()
  540. def test_pagination(self):
  541. for options_set in [
  542. {"snuba.search.min-pre-snuba-candidates": None},
  543. {"snuba.search.min-pre-snuba-candidates": 500},
  544. ]:
  545. with self.options(options_set):
  546. results = self.backend.query([self.project], limit=1, sort_by="date")
  547. assert set(results) == {self.group1}
  548. assert not results.prev.has_results
  549. assert results.next.has_results
  550. results = self.backend.query(
  551. [self.project], cursor=results.next, limit=1, sort_by="date"
  552. )
  553. assert set(results) == {self.group2}
  554. assert results.prev.has_results
  555. assert not results.next.has_results
  556. # note: previous cursor
  557. results = self.backend.query(
  558. [self.project], cursor=results.prev, limit=1, sort_by="date"
  559. )
  560. assert set(results) == {self.group1}
  561. assert results.prev.has_results
  562. assert results.next.has_results
  563. # note: previous cursor, paging too far into 0 results
  564. results = self.backend.query(
  565. [self.project], cursor=results.prev, limit=1, sort_by="date"
  566. )
  567. assert set(results) == set()
  568. assert not results.prev.has_results
  569. assert results.next.has_results
  570. results = self.backend.query(
  571. [self.project], cursor=results.next, limit=1, sort_by="date"
  572. )
  573. assert set(results) == {self.group1}
  574. assert results.prev.has_results
  575. assert results.next.has_results
  576. results = self.backend.query(
  577. [self.project], cursor=results.next, limit=1, sort_by="date"
  578. )
  579. assert set(results) == {self.group2}
  580. assert results.prev.has_results
  581. assert not results.next.has_results
  582. results = self.backend.query(
  583. [self.project], cursor=results.next, limit=1, sort_by="date"
  584. )
  585. assert set(results) == set()
  586. assert results.prev.has_results
  587. assert not results.next.has_results
  588. def test_pagination_with_environment(self):
  589. for dt in [
  590. self.group1.first_seen + timedelta(days=1),
  591. self.group1.first_seen + timedelta(days=2),
  592. self.group1.last_seen + timedelta(days=1),
  593. ]:
  594. self.store_event(
  595. data={
  596. "fingerprint": ["put-me-in-group2"],
  597. "timestamp": iso_format(dt),
  598. "environment": "production",
  599. "message": "group2",
  600. "stacktrace": {"frames": [{"module": "group2"}]},
  601. },
  602. project_id=self.project.id,
  603. )
  604. results = self.backend.query(
  605. [self.project],
  606. environments=[self.environments["production"]],
  607. sort_by="date",
  608. limit=1,
  609. count_hits=True,
  610. )
  611. assert list(results) == [self.group2]
  612. assert results.hits == 2
  613. results = self.backend.query(
  614. [self.project],
  615. environments=[self.environments["production"]],
  616. sort_by="date",
  617. limit=1,
  618. cursor=results.next,
  619. count_hits=True,
  620. )
  621. assert list(results) == [self.group1]
  622. assert results.hits == 2
  623. results = self.backend.query(
  624. [self.project],
  625. environments=[self.environments["production"]],
  626. sort_by="date",
  627. limit=1,
  628. cursor=results.next,
  629. count_hits=True,
  630. )
  631. assert list(results) == []
  632. assert results.hits == 2
  633. def test_age_filter(self):
  634. results = self.make_query(
  635. search_filter_query="firstSeen:>=%s" % date_to_query_format(self.group2.first_seen)
  636. )
  637. assert set(results) == {self.group2}
  638. results = self.make_query(
  639. search_filter_query="firstSeen:<=%s"
  640. % date_to_query_format(self.group1.first_seen + timedelta(minutes=1))
  641. )
  642. assert set(results) == {self.group1}
  643. results = self.make_query(
  644. search_filter_query="firstSeen:>=%s firstSeen:<=%s"
  645. % (
  646. date_to_query_format(self.group1.first_seen),
  647. date_to_query_format(self.group1.first_seen + timedelta(minutes=1)),
  648. )
  649. )
  650. assert set(results) == {self.group1}
  651. def test_age_filter_with_environment(self):
  652. # add time instead to make it greater than or less than as needed.
  653. group1_first_seen = GroupEnvironment.objects.get(
  654. environment=self.environments["production"], group=self.group1
  655. ).first_seen
  656. results = self.make_query(
  657. environments=[self.environments["production"]],
  658. search_filter_query="firstSeen:>=%s" % date_to_query_format(group1_first_seen),
  659. )
  660. assert set(results) == {self.group1}
  661. results = self.make_query(
  662. environments=[self.environments["production"]],
  663. search_filter_query="firstSeen:<=%s" % date_to_query_format(group1_first_seen),
  664. )
  665. assert set(results) == {self.group1}
  666. results = self.make_query(
  667. environments=[self.environments["production"]],
  668. search_filter_query="firstSeen:>%s" % date_to_query_format(group1_first_seen),
  669. )
  670. assert set(results) == set()
  671. self.store_event(
  672. data={
  673. "fingerprint": ["put-me-in-group1"],
  674. "timestamp": iso_format(group1_first_seen + timedelta(days=1)),
  675. "message": "group1",
  676. "stacktrace": {"frames": [{"module": "group1"}]},
  677. "environment": "development",
  678. },
  679. project_id=self.project.id,
  680. )
  681. results = self.make_query(
  682. environments=[self.environments["production"]],
  683. search_filter_query="firstSeen:>%s" % date_to_query_format(group1_first_seen),
  684. )
  685. assert set(results) == set()
  686. results = self.make_query(
  687. environments=[Environment.objects.get(name="development")],
  688. search_filter_query="firstSeen:>%s" % date_to_query_format(group1_first_seen),
  689. )
  690. assert set(results) == {self.group1}
  691. def test_times_seen_filter(self):
  692. results = self.make_query([self.project], search_filter_query="times_seen:2")
  693. assert set(results) == {self.group1}
  694. results = self.make_query([self.project], search_filter_query="times_seen:>=2")
  695. assert set(results) == {self.group1}
  696. results = self.make_query([self.project], search_filter_query="times_seen:<=1")
  697. assert set(results) == {self.group2}
  698. def test_last_seen_filter(self):
  699. results = self.make_query(
  700. search_filter_query="lastSeen:>=%s" % date_to_query_format(self.group1.last_seen)
  701. )
  702. assert set(results) == {self.group1}
  703. results = self.make_query(
  704. search_filter_query="lastSeen:>=%s lastSeen:<=%s"
  705. % (
  706. date_to_query_format(self.group1.last_seen),
  707. date_to_query_format(self.group1.last_seen + timedelta(minutes=1)),
  708. )
  709. )
  710. assert set(results) == {self.group1}
  711. def test_last_seen_filter_with_environment(self):
  712. results = self.make_query(
  713. environments=[self.environments["production"]],
  714. search_filter_query="lastSeen:>=%s" % date_to_query_format(self.group1.last_seen),
  715. )
  716. assert set(results) == {self.group1}
  717. results = self.make_query(
  718. environments=[self.environments["production"]],
  719. search_filter_query="lastSeen:<=%s" % date_to_query_format(self.group1.last_seen),
  720. )
  721. assert set(results) == {self.group1}
  722. results = self.make_query(
  723. environments=[self.environments["production"]],
  724. search_filter_query="lastSeen:>%s" % date_to_query_format(self.group1.last_seen),
  725. )
  726. assert set(results) == set()
  727. self.store_event(
  728. data={
  729. "fingerprint": ["put-me-in-group1"],
  730. "timestamp": iso_format(self.group1.last_seen + timedelta(days=1)),
  731. "message": "group1",
  732. "stacktrace": {"frames": [{"module": "group1"}]},
  733. "environment": "development",
  734. },
  735. project_id=self.project.id,
  736. )
  737. self.group1.update(last_seen=self.group1.last_seen + timedelta(days=1))
  738. results = self.make_query(
  739. environments=[self.environments["production"]],
  740. search_filter_query="lastSeen:>%s" % date_to_query_format(self.group1.last_seen),
  741. )
  742. assert set(results) == set()
  743. results = self.make_query(
  744. environments=[Environment.objects.get(name="development")],
  745. search_filter_query="lastSeen:>%s" % date_to_query_format(self.group1.last_seen),
  746. )
  747. assert set(results) == set()
  748. results = self.make_query(
  749. environments=[Environment.objects.get(name="development")],
  750. search_filter_query="lastSeen:>=%s" % date_to_query_format(self.group1.last_seen),
  751. )
  752. assert set(results) == {self.group1}
  753. def test_date_filter(self):
  754. results = self.make_query(
  755. date_from=self.event2.datetime,
  756. search_filter_query="timestamp:>=%s" % date_to_query_format(self.event2.datetime),
  757. )
  758. assert set(results) == {self.group1, self.group2}
  759. results = self.make_query(
  760. date_to=self.event1.datetime + timedelta(minutes=1),
  761. search_filter_query="timestamp:<=%s"
  762. % date_to_query_format(self.event1.datetime + timedelta(minutes=1)),
  763. )
  764. assert set(results) == {self.group1}
  765. results = self.make_query(
  766. date_from=self.event1.datetime,
  767. date_to=self.event2.datetime + timedelta(minutes=1),
  768. search_filter_query="timestamp:>=%s timestamp:<=%s"
  769. % (
  770. date_to_query_format(self.event1.datetime),
  771. date_to_query_format(self.event2.datetime + timedelta(minutes=1)),
  772. ),
  773. )
  774. assert set(results) == {self.group1, self.group2}
  775. # Test with `Z` utc marker, should be equivalent
  776. results = self.make_query(
  777. date_from=self.event1.datetime,
  778. date_to=self.event2.datetime + timedelta(minutes=1),
  779. search_filter_query="timestamp:>=%s timestamp:<=%s"
  780. % (
  781. date_to_query_format(self.event1.datetime) + "Z",
  782. date_to_query_format(self.event2.datetime + timedelta(minutes=1)) + "Z",
  783. ),
  784. )
  785. assert set(results) == {self.group1, self.group2}
  786. def test_date_filter_with_environment(self):
  787. results = self.backend.query(
  788. [self.project],
  789. environments=[self.environments["production"]],
  790. date_from=self.event2.datetime,
  791. )
  792. assert set(results) == {self.group1}
  793. results = self.backend.query(
  794. [self.project],
  795. environments=[self.environments["production"]],
  796. date_to=self.event1.datetime + timedelta(minutes=1),
  797. )
  798. assert set(results) == {self.group1}
  799. results = self.backend.query(
  800. [self.project],
  801. environments=[self.environments["staging"]],
  802. date_from=self.event1.datetime,
  803. date_to=self.event2.datetime + timedelta(minutes=1),
  804. )
  805. assert set(results) == {self.group2}
  806. def test_linked(self):
  807. linked_group1 = self.create_group_with_integration_external_issue()
  808. linked_group2 = self.create_group_with_platform_external_issue()
  809. results = self.make_query(search_filter_query="is:unlinked")
  810. assert set(results) == {self.group1, self.group2}
  811. results = self.make_query(search_filter_query="is:linked")
  812. assert set(results) == {linked_group1, linked_group2}
  813. def test_linked_with_only_integration_external_issue(self):
  814. linked_group = self.create_group_with_integration_external_issue()
  815. results = self.make_query(search_filter_query="is:unlinked")
  816. assert set(results) == {self.group1, self.group2}
  817. results = self.make_query(search_filter_query="is:linked")
  818. assert set(results) == {linked_group}
  819. def test_linked_with_only_platform_external_issue(self):
  820. linked_group = self.create_group_with_platform_external_issue()
  821. results = self.make_query(search_filter_query="is:unlinked")
  822. assert set(results) == {self.group1, self.group2}
  823. results = self.make_query(search_filter_query="is:linked")
  824. assert set(results) == {linked_group}
  825. def test_linked_with_environment(self):
  826. linked_group1 = self.create_group_with_integration_external_issue(environment="production")
  827. linked_group2 = self.create_group_with_platform_external_issue(environment="staging")
  828. results = self.make_query(
  829. environments=[self.environments["production"]], search_filter_query="is:unlinked"
  830. )
  831. assert set(results) == {self.group1}
  832. results = self.make_query(
  833. environments=[self.environments["staging"]], search_filter_query="is:unlinked"
  834. )
  835. assert set(results) == {self.group2}
  836. results = self.make_query(
  837. environments=[self.environments["production"]], search_filter_query="is:linked"
  838. )
  839. assert set(results) == {linked_group1}
  840. results = self.make_query(
  841. environments=[self.environments["staging"]], search_filter_query="is:linked"
  842. )
  843. assert set(results) == {linked_group2}
  844. def test_unassigned(self):
  845. results = self.make_query(search_filter_query="is:unassigned")
  846. assert set(results) == {self.group1}
  847. results = self.make_query(search_filter_query="is:assigned")
  848. assert set(results) == {self.group2}
  849. def test_unassigned_with_environment(self):
  850. results = self.make_query(
  851. environments=[self.environments["production"]], search_filter_query="is:unassigned"
  852. )
  853. assert set(results) == {self.group1}
  854. results = self.make_query(
  855. environments=[self.environments["staging"]], search_filter_query="is:assigned"
  856. )
  857. assert set(results) == {self.group2}
  858. results = self.make_query(
  859. environments=[self.environments["production"]], search_filter_query="is:assigned"
  860. )
  861. assert set(results) == set()
  862. def test_assigned_to(self):
  863. results = self.make_query(search_filter_query="assigned:%s" % self.user.username)
  864. assert set(results) == {self.group2}
  865. # test team assignee
  866. ga = GroupAssignee.objects.get(
  867. user=self.user, group=self.group2, project=self.group2.project
  868. )
  869. ga.update(team=self.team, user=None)
  870. assert GroupAssignee.objects.get(id=ga.id).user is None
  871. results = self.make_query(search_filter_query="assigned:%s" % self.user.username)
  872. assert set(results) == {self.group2}
  873. # test when there should be no results
  874. other_user = self.create_user()
  875. results = self.make_query(search_filter_query="assigned:%s" % other_user.username)
  876. assert set(results) == set()
  877. owner = self.create_user()
  878. self.create_member(
  879. organization=self.project.organization, user=owner, role="owner", teams=[]
  880. )
  881. # test that owners don't see results for all teams
  882. results = self.make_query(search_filter_query="assigned:%s" % owner.username)
  883. assert set(results) == set()
  884. def test_assigned_to_in_syntax(self):
  885. group_3 = self.store_event(
  886. data={
  887. "fingerprint": ["put-me-in-group3"],
  888. "event_id": "c" * 32,
  889. "timestamp": iso_format(self.base_datetime - timedelta(days=20)),
  890. },
  891. project_id=self.project.id,
  892. ).group
  893. group_3.status = GroupStatus.MUTED
  894. group_3.save()
  895. other_user = self.create_user()
  896. self.run_test_query_in_syntax(
  897. f"assigned:[{self.user.username}, {other_user.username}]",
  898. [self.group2],
  899. [self.group1, group_3],
  900. )
  901. GroupAssignee.objects.create(project=self.project, group=group_3, user=other_user)
  902. self.run_test_query_in_syntax(
  903. f"assigned:[{self.user.username}, {other_user.username}]",
  904. [self.group2, group_3],
  905. [self.group1],
  906. )
  907. self.run_test_query_in_syntax(
  908. f"assigned:[#{self.team.slug}, {other_user.username}]",
  909. [group_3],
  910. [self.group1, self.group2],
  911. )
  912. ga_2 = GroupAssignee.objects.get(
  913. user=self.user, group=self.group2, project=self.group2.project
  914. )
  915. ga_2.update(team=self.team, user=None)
  916. self.run_test_query_in_syntax(
  917. f"assigned:[{self.user.username}, {other_user.username}]",
  918. [self.group2, group_3],
  919. [self.group1],
  920. )
  921. self.run_test_query_in_syntax(
  922. f"assigned:[#{self.team.slug}, {other_user.username}]",
  923. [self.group2, group_3],
  924. [self.group1],
  925. )
  926. self.run_test_query_in_syntax(
  927. f"assigned:[me, none, {other_user.username}]",
  928. [self.group1, self.group2, group_3],
  929. [],
  930. )
  931. def test_assigned_or_suggested_in_syntax(self):
  932. Group.objects.all().delete()
  933. group = self.store_event(
  934. data={
  935. "timestamp": iso_format(before_now(seconds=180)),
  936. "fingerprint": ["group-1"],
  937. },
  938. project_id=self.project.id,
  939. ).group
  940. group1 = self.store_event(
  941. data={
  942. "timestamp": iso_format(before_now(seconds=185)),
  943. "fingerprint": ["group-2"],
  944. },
  945. project_id=self.project.id,
  946. ).group
  947. group2 = self.store_event(
  948. data={
  949. "timestamp": iso_format(before_now(seconds=190)),
  950. "fingerprint": ["group-3"],
  951. },
  952. project_id=self.project.id,
  953. ).group
  954. assigned_group = self.store_event(
  955. data={
  956. "timestamp": iso_format(before_now(seconds=195)),
  957. "fingerprint": ["group-4"],
  958. },
  959. project_id=self.project.id,
  960. ).group
  961. assigned_to_other_group = self.store_event(
  962. data={
  963. "timestamp": iso_format(before_now(seconds=195)),
  964. "fingerprint": ["group-5"],
  965. },
  966. project_id=self.project.id,
  967. ).group
  968. self.run_test_query_in_syntax(
  969. "assigned_or_suggested:[me]",
  970. [],
  971. [group, group1, group2, assigned_group, assigned_to_other_group],
  972. )
  973. GroupOwner.objects.create(
  974. group=assigned_to_other_group,
  975. project=self.project,
  976. organization=self.organization,
  977. type=0,
  978. team_id=None,
  979. user_id=self.user.id,
  980. )
  981. GroupOwner.objects.create(
  982. group=group,
  983. project=self.project,
  984. organization=self.organization,
  985. type=0,
  986. team_id=None,
  987. user_id=self.user.id,
  988. )
  989. self.run_test_query_in_syntax(
  990. "assigned_or_suggested:[me]",
  991. [group, assigned_to_other_group],
  992. [group1, group2, assigned_group],
  993. )
  994. # Because assigned_to_other_event is assigned to self.other_user, it should not show up in assigned_or_suggested search for anyone but self.other_user. (aka. they are now the only owner)
  995. other_user = self.create_user("other@user.com", is_superuser=False)
  996. GroupAssignee.objects.create(
  997. group=assigned_to_other_group,
  998. project=self.project,
  999. user=other_user,
  1000. )
  1001. self.run_test_query_in_syntax(
  1002. "assigned_or_suggested:[me]",
  1003. [group],
  1004. [group1, group2, assigned_group, assigned_to_other_group],
  1005. )
  1006. self.run_test_query_in_syntax(
  1007. f"assigned_or_suggested:[{other_user.email}]",
  1008. [assigned_to_other_group],
  1009. [group, group1, group2, assigned_group],
  1010. )
  1011. GroupAssignee.objects.create(group=assigned_group, project=self.project, user=self.user)
  1012. self.run_test_query_in_syntax(
  1013. f"assigned_or_suggested:[{self.user.email}]",
  1014. [assigned_group, group],
  1015. )
  1016. GroupOwner.objects.create(
  1017. group=group,
  1018. project=self.project,
  1019. organization=self.organization,
  1020. type=0,
  1021. team_id=self.team.id,
  1022. user_id=None,
  1023. )
  1024. self.run_test_query_in_syntax(
  1025. f"assigned_or_suggested:[#{self.team.slug}]",
  1026. [group],
  1027. )
  1028. self.run_test_query_in_syntax(
  1029. "assigned_or_suggested:[me, none]",
  1030. [group, group1, group2, assigned_group],
  1031. [assigned_to_other_group],
  1032. )
  1033. not_me = self.create_user(email="notme@sentry.io")
  1034. GroupOwner.objects.create(
  1035. group=group2,
  1036. project=self.project,
  1037. organization=self.organization,
  1038. type=0,
  1039. team_id=None,
  1040. user_id=not_me.id,
  1041. )
  1042. self.run_test_query_in_syntax(
  1043. "assigned_or_suggested:[me, none]",
  1044. [group, group1, assigned_group],
  1045. [assigned_to_other_group, group2],
  1046. )
  1047. GroupOwner.objects.filter(group=group, user=self.user).delete()
  1048. self.run_test_query_in_syntax(
  1049. f"assigned_or_suggested:[me, none, #{self.team.slug}]",
  1050. [group, group1, assigned_group],
  1051. [assigned_to_other_group, group2],
  1052. )
  1053. self.run_test_query_in_syntax(
  1054. f"assigned_or_suggested:[me, none, #{self.team.slug}, {not_me.email}]",
  1055. [group, group1, assigned_group, group2],
  1056. [assigned_to_other_group],
  1057. )
  1058. def test_assigned_to_with_environment(self):
  1059. results = self.make_query(
  1060. environments=[self.environments["staging"]],
  1061. search_filter_query="assigned:%s" % self.user.username,
  1062. )
  1063. assert set(results) == {self.group2}
  1064. results = self.make_query(
  1065. environments=[self.environments["production"]],
  1066. search_filter_query="assigned:%s" % self.user.username,
  1067. )
  1068. assert set(results) == set()
  1069. def test_subscribed_by(self):
  1070. results = self.make_query(
  1071. [self.group1.project], search_filter_query="subscribed:%s" % self.user.username
  1072. )
  1073. assert set(results) == {self.group1}
  1074. def test_subscribed_by_in_syntax(self):
  1075. self.run_test_query_in_syntax(
  1076. f"subscribed:[{self.user.username}]", [self.group1], [self.group2]
  1077. )
  1078. user_2 = self.create_user()
  1079. GroupSubscription.objects.create(
  1080. user=user_2, group=self.group2, project=self.project, is_active=True
  1081. )
  1082. self.run_test_query_in_syntax(
  1083. f"subscribed:[{self.user.username}, {user_2.username}]", [self.group1, self.group2], []
  1084. )
  1085. def test_subscribed_by_with_environment(self):
  1086. results = self.make_query(
  1087. [self.group1.project],
  1088. environments=[self.environments["production"]],
  1089. search_filter_query="subscribed:%s" % self.user.username,
  1090. )
  1091. assert set(results) == {self.group1}
  1092. results = self.make_query(
  1093. [self.group1.project],
  1094. environments=[self.environments["staging"]],
  1095. search_filter_query="subscribed:%s" % self.user.username,
  1096. )
  1097. assert set(results) == set()
  1098. @mock.patch("sentry.search.snuba.executors.bulk_raw_query")
  1099. def test_snuba_not_called_optimization(self, query_mock):
  1100. assert self.make_query(search_filter_query="status:unresolved").results == [self.group1]
  1101. assert not query_mock.called
  1102. assert (
  1103. self.make_query(
  1104. search_filter_query="last_seen:>%s" % date_to_query_format(timezone.now()),
  1105. sort_by="date",
  1106. ).results
  1107. == []
  1108. )
  1109. assert query_mock.called
  1110. @mock.patch("sentry.issues.search.SnubaQueryParams")
  1111. @mock.patch("sentry.search.snuba.executors.bulk_raw_query")
  1112. def test_optimized_aggregates(self, bulk_raw_query_mock, snuba_query_params_mock):
  1113. # TODO this test is annoyingly fragile and breaks in hard-to-see ways
  1114. # any time anything about the snuba query changes
  1115. bulk_raw_query_mock.return_value = [{"data": [], "totals": {"total": 0}}]
  1116. DEFAULT_LIMIT = 100
  1117. chunk_growth = options.get("snuba.search.chunk-growth-rate")
  1118. limit = int(DEFAULT_LIMIT * chunk_growth)
  1119. common_args = {
  1120. "arrayjoin": None,
  1121. "dataset": Dataset.Discover,
  1122. "start": Any(datetime),
  1123. "end": Any(datetime),
  1124. "filter_keys": {
  1125. "project_id": [self.project.id],
  1126. "group_id": [self.group1.id, self.group2.id],
  1127. },
  1128. "referrer": "search",
  1129. "groupby": ["group_id"],
  1130. "conditions": [
  1131. [["positionCaseInsensitive", ["message", "'foo'"]], "!=", 0],
  1132. ["type", "!=", "transaction"],
  1133. ],
  1134. "selected_columns": [],
  1135. "limit": limit,
  1136. "offset": 0,
  1137. "totals": True,
  1138. "turbo": False,
  1139. "sample": 1,
  1140. "condition_resolver": get_snuba_column_name,
  1141. }
  1142. self.make_query(search_filter_query="status:unresolved")
  1143. assert not snuba_query_params_mock.called
  1144. self.make_query(
  1145. search_filter_query="last_seen:>=%s foo" % date_to_query_format(timezone.now()),
  1146. sort_by="date",
  1147. )
  1148. assert snuba_query_params_mock.called
  1149. snuba_query_params_mock.call_args[1]["aggregations"].sort()
  1150. assert snuba_query_params_mock.call_args == mock.call(
  1151. orderby=["-last_seen", "group_id"],
  1152. aggregations=[
  1153. ["multiply(toUInt64(max(timestamp)), 1000)", "", "last_seen"],
  1154. ["uniq", "group_id", "total"],
  1155. ],
  1156. having=[["last_seen", ">=", Any(int)]],
  1157. **common_args,
  1158. )
  1159. self.make_query(search_filter_query="foo", sort_by="priority")
  1160. snuba_query_params_mock.call_args[1]["aggregations"].sort()
  1161. assert snuba_query_params_mock.call_args == mock.call(
  1162. orderby=["-priority", "group_id"],
  1163. aggregations=[
  1164. ["count()", "", "times_seen"],
  1165. ["multiply(toUInt64(max(timestamp)), 1000)", "", "last_seen"],
  1166. ["toUInt64(plus(multiply(log(times_seen), 600), last_seen))", "", "priority"],
  1167. ["uniq", "group_id", "total"],
  1168. ],
  1169. having=[],
  1170. **common_args,
  1171. )
  1172. self.make_query(search_filter_query="times_seen:5 foo", sort_by="freq")
  1173. snuba_query_params_mock.call_args[1]["aggregations"].sort()
  1174. assert snuba_query_params_mock.call_args == mock.call(
  1175. orderby=["-times_seen", "group_id"],
  1176. aggregations=[
  1177. ["count()", "", "times_seen"],
  1178. ["uniq", "group_id", "total"],
  1179. ],
  1180. having=[["times_seen", "=", 5]],
  1181. **common_args,
  1182. )
  1183. self.make_query(search_filter_query="foo", sort_by="user")
  1184. snuba_query_params_mock.call_args[1]["aggregations"].sort()
  1185. assert snuba_query_params_mock.call_args == mock.call(
  1186. orderby=["-user_count", "group_id"],
  1187. aggregations=[
  1188. ["uniq", "group_id", "total"],
  1189. ["uniq", "tags[sentry:user]", "user_count"],
  1190. ],
  1191. having=[],
  1192. **common_args,
  1193. )
  1194. @mock.patch("sentry.search.snuba.executors.bulk_raw_query")
  1195. def test_reduce_bulk_results_none_total(self, bulk_raw_query_mock):
  1196. bulk_raw_query_mock.return_value = [
  1197. {"data": [], "totals": {"total": None}},
  1198. {"data": [], "totals": {"total": None}},
  1199. ]
  1200. assert (
  1201. self.make_query(
  1202. search_filter_query="last_seen:>%s" % date_to_query_format(timezone.now()),
  1203. sort_by="date",
  1204. ).results
  1205. == []
  1206. )
  1207. assert bulk_raw_query_mock.called
  1208. @mock.patch("sentry.search.snuba.executors.bulk_raw_query")
  1209. def test_reduce_bulk_results_none_data(self, bulk_raw_query_mock):
  1210. bulk_raw_query_mock.return_value = [
  1211. {"data": None, "totals": {"total": 0}},
  1212. {"data": None, "totals": {"total": 0}},
  1213. ]
  1214. assert (
  1215. self.make_query(
  1216. search_filter_query="last_seen:>%s" % date_to_query_format(timezone.now()),
  1217. sort_by="date",
  1218. ).results
  1219. == []
  1220. )
  1221. assert bulk_raw_query_mock.called
  1222. def test_pre_and_post_filtering(self):
  1223. prev_max_pre = options.get("snuba.search.max-pre-snuba-candidates")
  1224. options.set("snuba.search.max-pre-snuba-candidates", 1)
  1225. try:
  1226. # normal queries work as expected
  1227. results = self.make_query(search_filter_query="foo")
  1228. assert set(results) == {self.group1}
  1229. results = self.make_query(search_filter_query="bar")
  1230. assert set(results) == {self.group2}
  1231. # no candidate matches in Sentry, immediately return empty paginator
  1232. results = self.make_query(search_filter_query="NO MATCHES IN SENTRY")
  1233. assert set(results) == set()
  1234. # too many candidates, skip pre-filter, requires >1 postfilter queries
  1235. results = self.make_query()
  1236. assert set(results) == {self.group1, self.group2}
  1237. finally:
  1238. options.set("snuba.search.max-pre-snuba-candidates", prev_max_pre)
  1239. def test_optimizer_enabled(self):
  1240. prev_optimizer_enabled = options.get("snuba.search.pre-snuba-candidates-optimizer")
  1241. options.set("snuba.search.pre-snuba-candidates-optimizer", True)
  1242. try:
  1243. results = self.make_query(
  1244. search_filter_query="server:example.com",
  1245. environments=[self.environments["production"]],
  1246. )
  1247. assert set(results) == {self.group1}
  1248. finally:
  1249. options.set("snuba.search.pre-snuba-candidates-optimizer", prev_optimizer_enabled)
  1250. def test_search_out_of_range(self):
  1251. the_date = datetime(2000, 1, 1, 0, 0, 0, tzinfo=pytz.utc)
  1252. results = self.make_query(
  1253. search_filter_query=f"event.timestamp:>{the_date} event.timestamp:<{the_date}",
  1254. date_from=the_date,
  1255. date_to=the_date,
  1256. )
  1257. assert set(results) == set()
  1258. def test_hits_estimate(self):
  1259. # 400 Groups/Events
  1260. # Every 3rd one is Unresolved
  1261. # Every 2nd one has tag match=1
  1262. for i in range(400):
  1263. event = self.store_event(
  1264. data={
  1265. "event_id": md5(f"event {i}".encode()).hexdigest(),
  1266. "fingerprint": [f"put-me-in-group{i}"],
  1267. "timestamp": iso_format(self.base_datetime - timedelta(days=21)),
  1268. "message": f"group {i} event",
  1269. "stacktrace": {"frames": [{"module": f"module {i}"}]},
  1270. "tags": {"match": f"{i % 2}"},
  1271. "environment": "production",
  1272. },
  1273. project_id=self.project.id,
  1274. )
  1275. group = event.group
  1276. group.times_seen = 5
  1277. group.status = GroupStatus.UNRESOLVED if i % 3 == 0 else GroupStatus.RESOLVED
  1278. group.save()
  1279. self.store_group(group)
  1280. # Sample should estimate there are roughly 66 overall matching groups
  1281. # based on a random sample of 100 (or $sample_size) of the total 200
  1282. # snuba matches, of which 33% should pass the postgres filter.
  1283. with self.options(
  1284. {
  1285. # Too small to pass all django candidates down to snuba
  1286. "snuba.search.max-pre-snuba-candidates": 5,
  1287. "snuba.search.hits-sample-size": 50,
  1288. }
  1289. ):
  1290. first_results = self.make_query(
  1291. search_filter_query="is:unresolved match:1", limit=10, count_hits=True
  1292. )
  1293. # Deliberately do not assert that the value is within some margin
  1294. # of error, as this will fail tests at some rate corresponding to
  1295. # our confidence interval.
  1296. assert first_results.hits > 10
  1297. # When searching for the same tags, we should get the same set of
  1298. # hits as the sampling is based on the hash of the query.
  1299. second_results = self.make_query(
  1300. search_filter_query="is:unresolved match:1", limit=10, count_hits=True
  1301. )
  1302. assert first_results.results == second_results.results
  1303. # When using a different search, we should get a different sample
  1304. # but still should have some hits.
  1305. third_results = self.make_query(
  1306. search_filter_query="is:unresolved match:0", limit=10, count_hits=True
  1307. )
  1308. assert third_results.hits > 10
  1309. assert third_results.results != second_results.results
  1310. def test_regressed_in_release(self):
  1311. # expect no groups within the results since there are no releases
  1312. results = self.make_query(search_filter_query="regressed_in_release:fake")
  1313. assert set(results) == set()
  1314. # expect no groups even though there is a release; since no group regressed in this release
  1315. release_1 = self.create_release()
  1316. results = self.make_query(search_filter_query="regressed_in_release:%s" % release_1.version)
  1317. assert set(results) == set()
  1318. # Create a new event so that we get a group in this release
  1319. group = self.store_event(
  1320. data={
  1321. "release": release_1.version,
  1322. },
  1323. project_id=self.project.id,
  1324. ).group
  1325. # # Should still be no group since we didn't regress in this release
  1326. results = self.make_query(search_filter_query="regressed_in_release:%s" % release_1.version)
  1327. assert set(results) == set()
  1328. record_group_history(group, GroupHistoryStatus.REGRESSED, release=release_1)
  1329. results = self.make_query(search_filter_query="regressed_in_release:%s" % release_1.version)
  1330. assert set(results) == {group}
  1331. # Make sure this works correctly with multiple releases
  1332. release_2 = self.create_release()
  1333. group_2 = self.store_event(
  1334. data={
  1335. "fingerprint": ["put-me-in-group9001"],
  1336. "event_id": "a" * 32,
  1337. "release": release_2.version,
  1338. },
  1339. project_id=self.project.id,
  1340. ).group
  1341. record_group_history(group_2, GroupHistoryStatus.REGRESSED, release=release_2)
  1342. results = self.make_query(search_filter_query="regressed_in_release:%s" % release_1.version)
  1343. assert set(results) == {group}
  1344. results = self.make_query(search_filter_query="regressed_in_release:%s" % release_2.version)
  1345. assert set(results) == {group_2}
  1346. def test_first_release(self):
  1347. # expect no groups within the results since there are no releases
  1348. results = self.make_query(search_filter_query="first_release:%s" % "fake")
  1349. assert set(results) == set()
  1350. # expect no groups even though there is a release; since no group
  1351. # is attached to a release
  1352. release_1 = self.create_release(self.project)
  1353. results = self.make_query(search_filter_query="first_release:%s" % release_1.version)
  1354. assert set(results) == set()
  1355. # Create a new event so that we get a group in this release
  1356. group = self.store_event(
  1357. data={
  1358. "fingerprint": ["put-me-in-group9001"],
  1359. "event_id": "a" * 32,
  1360. "message": "hello",
  1361. "environment": "production",
  1362. "tags": {"server": "example.com"},
  1363. "release": release_1.version,
  1364. "stacktrace": {"frames": [{"module": "group1"}]},
  1365. },
  1366. project_id=self.project.id,
  1367. ).group
  1368. results = self.make_query(search_filter_query="first_release:%s" % release_1.version)
  1369. assert set(results) == {group}
  1370. def test_first_release_in_syntax(self):
  1371. # expect no groups within the results since there are no releases
  1372. self.run_test_query_in_syntax("first_release:[fake, fake2]", [])
  1373. # expect no groups even though there is a release; since no group
  1374. # is attached to a release
  1375. release_1 = self.create_release(self.project)
  1376. release_2 = self.create_release(self.project)
  1377. self.run_test_query_in_syntax(
  1378. f"first_release:[{release_1.version}, {release_2.version}]", []
  1379. )
  1380. # Create a new event so that we get a group in this release
  1381. group = self.store_event(
  1382. data={
  1383. "fingerprint": ["put-me-in-group9001"],
  1384. "event_id": "a" * 32,
  1385. "message": "hello",
  1386. "environment": "production",
  1387. "tags": {"server": "example.com"},
  1388. "release": release_1.version,
  1389. "stacktrace": {"frames": [{"module": "group1"}]},
  1390. },
  1391. project_id=self.project.id,
  1392. ).group
  1393. self.run_test_query_in_syntax(
  1394. f"first_release:[{release_1.version}, {release_2.version}]",
  1395. [group],
  1396. [self.group1, self.group2],
  1397. )
  1398. # Create a new event so that we get a group in this release
  1399. group_2 = self.store_event(
  1400. data={
  1401. "fingerprint": ["put-me-in-group9002"],
  1402. "event_id": "a" * 32,
  1403. "message": "hello",
  1404. "environment": "production",
  1405. "tags": {"server": "example.com"},
  1406. "release": release_2.version,
  1407. "stacktrace": {"frames": [{"module": "group1"}]},
  1408. },
  1409. project_id=self.project.id,
  1410. ).group
  1411. self.run_test_query_in_syntax(
  1412. f"first_release:[{release_1.version}, {release_2.version}]",
  1413. [group, group_2],
  1414. )
  1415. def test_first_release_environments(self):
  1416. results = self.make_query(
  1417. environments=[self.environments["production"]],
  1418. search_filter_query="first_release:fake",
  1419. )
  1420. assert set(results) == set()
  1421. release = self.create_release(self.project)
  1422. group_env = GroupEnvironment.get_or_create(
  1423. group_id=self.group1.id, environment_id=self.environments["production"].id
  1424. )[0]
  1425. results = self.make_query(
  1426. environments=[self.environments["production"]],
  1427. search_filter_query=f"first_release:{release.version}",
  1428. )
  1429. assert set(results) == set()
  1430. group_env.first_release = release
  1431. group_env.save()
  1432. results = self.make_query(
  1433. environments=[self.environments["production"]],
  1434. search_filter_query=f"first_release:{release.version}",
  1435. )
  1436. assert set(results) == {self.group1}
  1437. def test_first_release_environments_in_syntax(self):
  1438. self.run_test_query_in_syntax(
  1439. "first_release:[fake, fake2]",
  1440. [],
  1441. [self.group1, self.group2],
  1442. environments=[self.environments["production"]],
  1443. )
  1444. release = self.create_release(self.project)
  1445. group_1_env = GroupEnvironment.objects.get(
  1446. group_id=self.group1.id, environment_id=self.environments["production"].id
  1447. )
  1448. group_1_env.update(first_release=release)
  1449. self.run_test_query_in_syntax(
  1450. f"first_release:[{release.version}, fake2]",
  1451. [self.group1],
  1452. [self.group2],
  1453. environments=[self.environments["production"]],
  1454. )
  1455. group_2_env = GroupEnvironment.objects.get(
  1456. group_id=self.group2.id, environment_id=self.environments["staging"].id
  1457. )
  1458. group_2_env.update(first_release=release)
  1459. self.run_test_query_in_syntax(
  1460. f"first_release:[{release.version}, fake2]",
  1461. [self.group1, self.group2],
  1462. [],
  1463. environments=[self.environments["production"], self.environments["staging"]],
  1464. )
  1465. # Make sure we don't get duplicate groups
  1466. GroupEnvironment.objects.create(
  1467. group_id=self.group1.id,
  1468. environment_id=self.environments["staging"].id,
  1469. first_release=release,
  1470. )
  1471. self.run_test_query_in_syntax(
  1472. f"first_release:[{release.version}, fake2]",
  1473. [self.group1, self.group2],
  1474. [],
  1475. environments=[self.environments["production"], self.environments["staging"]],
  1476. )
  1477. def test_query_enclosed_in_quotes(self):
  1478. results = self.make_query(search_filter_query='"foo"')
  1479. assert set(results) == {self.group1}
  1480. results = self.make_query(search_filter_query='"bar"')
  1481. assert set(results) == {self.group2}
  1482. @xfail_if_not_postgres("Wildcard searching only supported in Postgres")
  1483. def test_wildcard(self):
  1484. escaped_event = self.store_event(
  1485. data={
  1486. "fingerprint": ["hello-there"],
  1487. "event_id": "f" * 32,
  1488. "message": "somet[hing]",
  1489. "environment": "production",
  1490. "tags": {"server": "example.net"},
  1491. "timestamp": iso_format(self.base_datetime),
  1492. "stacktrace": {"frames": [{"module": "group1"}]},
  1493. },
  1494. project_id=self.project.id,
  1495. )
  1496. # Note: Adding in `environment:production` so that we make sure we query
  1497. # in both snuba and postgres
  1498. results = self.make_query(search_filter_query="environment:production so*t")
  1499. assert set(results) == {escaped_event.group}
  1500. # Make sure it's case insensitive
  1501. results = self.make_query(search_filter_query="environment:production SO*t")
  1502. assert set(results) == {escaped_event.group}
  1503. results = self.make_query(search_filter_query="environment:production so*zz")
  1504. assert set(results) == set()
  1505. results = self.make_query(search_filter_query="environment:production [hing]")
  1506. assert set(results) == {escaped_event.group}
  1507. results = self.make_query(search_filter_query="environment:production s*]")
  1508. assert set(results) == {escaped_event.group}
  1509. results = self.make_query(search_filter_query="environment:production server:example.*")
  1510. assert set(results) == {self.group1, escaped_event.group}
  1511. results = self.make_query(search_filter_query="environment:production !server:*net")
  1512. assert set(results) == {self.group1}
  1513. # TODO: Disabling tests that use [] syntax for the moment. Re-enable
  1514. # these if we decide to add back in, or remove if this comment has been
  1515. # here a while.
  1516. # results = self.make_query(
  1517. # search_filter_query='environment:production [s][of][mz]',
  1518. # )
  1519. # assert set(results) == set([escaped_event.group])
  1520. # results = self.make_query(
  1521. # search_filter_query='environment:production [z][of][mz]',
  1522. # )
  1523. # assert set(results) == set()
  1524. def test_null_tags(self):
  1525. tag_event = self.store_event(
  1526. data={
  1527. "fingerprint": ["hello-there"],
  1528. "event_id": "f" * 32,
  1529. "message": "something",
  1530. "environment": "production",
  1531. "tags": {"server": "example.net"},
  1532. "timestamp": iso_format(self.base_datetime),
  1533. "stacktrace": {"frames": [{"module": "group1"}]},
  1534. },
  1535. project_id=self.project.id,
  1536. )
  1537. no_tag_event = self.store_event(
  1538. data={
  1539. "fingerprint": ["hello-there-2"],
  1540. "event_id": "5" * 32,
  1541. "message": "something",
  1542. "environment": "production",
  1543. "timestamp": iso_format(self.base_datetime),
  1544. "stacktrace": {"frames": [{"module": "group2"}]},
  1545. },
  1546. project_id=self.project.id,
  1547. )
  1548. results = self.make_query(search_filter_query="environment:production !server:*net")
  1549. assert set(results) == {self.group1, no_tag_event.group}
  1550. results = self.make_query(search_filter_query="environment:production server:*net")
  1551. assert set(results) == {tag_event.group}
  1552. results = self.make_query(search_filter_query="environment:production !server:example.net")
  1553. assert set(results) == {self.group1, no_tag_event.group}
  1554. results = self.make_query(search_filter_query="environment:production server:example.net")
  1555. assert set(results) == {tag_event.group}
  1556. results = self.make_query(search_filter_query="environment:production has:server")
  1557. assert set(results) == {self.group1, tag_event.group}
  1558. results = self.make_query(search_filter_query="environment:production !has:server")
  1559. assert set(results) == {no_tag_event.group}
  1560. def test_null_promoted_tags(self):
  1561. tag_event = self.store_event(
  1562. data={
  1563. "fingerprint": ["hello-there"],
  1564. "event_id": "f" * 32,
  1565. "message": "something",
  1566. "environment": "production",
  1567. "tags": {"logger": "csp"},
  1568. "timestamp": iso_format(self.base_datetime),
  1569. "stacktrace": {"frames": [{"module": "group1"}]},
  1570. },
  1571. project_id=self.project.id,
  1572. )
  1573. no_tag_event = self.store_event(
  1574. data={
  1575. "fingerprint": ["hello-there-2"],
  1576. "event_id": "5" * 32,
  1577. "message": "something",
  1578. "environment": "production",
  1579. "timestamp": iso_format(self.base_datetime),
  1580. "stacktrace": {"frames": [{"module": "group2"}]},
  1581. },
  1582. project_id=self.project.id,
  1583. )
  1584. results = self.make_query(search_filter_query="environment:production !logger:*sp")
  1585. assert set(results) == {self.group1, no_tag_event.group}
  1586. results = self.make_query(search_filter_query="environment:production logger:*sp")
  1587. assert set(results) == {tag_event.group}
  1588. results = self.make_query(search_filter_query="environment:production !logger:csp")
  1589. assert set(results) == {self.group1, no_tag_event.group}
  1590. results = self.make_query(search_filter_query="environment:production logger:csp")
  1591. assert set(results) == {tag_event.group}
  1592. results = self.make_query(search_filter_query="environment:production has:logger")
  1593. assert set(results) == {tag_event.group}
  1594. results = self.make_query(search_filter_query="environment:production !has:logger")
  1595. assert set(results) == {self.group1, no_tag_event.group}
  1596. def test_sort_multi_project(self):
  1597. self.set_up_multi_project()
  1598. results = self.make_query([self.project, self.project2], sort_by="date")
  1599. assert list(results) == [self.group1, self.group_p2, self.group2]
  1600. results = self.make_query([self.project, self.project2], sort_by="new")
  1601. assert list(results) == [self.group2, self.group_p2, self.group1]
  1602. results = self.make_query([self.project, self.project2], sort_by="freq")
  1603. assert list(results) == [self.group1, self.group_p2, self.group2]
  1604. results = self.make_query([self.project, self.project2], sort_by="priority")
  1605. assert list(results) == [self.group1, self.group2, self.group_p2]
  1606. results = self.make_query([self.project, self.project2], sort_by="user")
  1607. assert list(results) == [self.group1, self.group2, self.group_p2]
  1608. def test_sort_trend(self):
  1609. start = self.group1.first_seen - timedelta(days=1)
  1610. end = before_now(days=1).replace(tzinfo=pytz.utc)
  1611. middle = start + ((end - start) / 2)
  1612. self.store_event(
  1613. data={
  1614. "fingerprint": ["put-me-in-group1"],
  1615. "event_id": "2" * 32,
  1616. "message": "something",
  1617. "timestamp": iso_format(self.base_datetime),
  1618. },
  1619. project_id=self.project.id,
  1620. )
  1621. self.store_event(
  1622. data={
  1623. "fingerprint": ["put-me-in-group1"],
  1624. "event_id": "3" * 32,
  1625. "message": "something",
  1626. "timestamp": iso_format(self.base_datetime),
  1627. },
  1628. project_id=self.project.id,
  1629. )
  1630. fewer_events_group = self.store_event(
  1631. data={
  1632. "fingerprint": ["put-me-in-group4"],
  1633. "event_id": "4" * 32,
  1634. "message": "something",
  1635. "timestamp": iso_format(middle - timedelta(days=1)),
  1636. },
  1637. project_id=self.project.id,
  1638. ).group
  1639. self.store_event(
  1640. data={
  1641. "fingerprint": ["put-me-in-group4"],
  1642. "event_id": "5" * 32,
  1643. "message": "something",
  1644. "timestamp": iso_format(middle - timedelta(days=1)),
  1645. },
  1646. project_id=self.project.id,
  1647. )
  1648. self.store_event(
  1649. data={
  1650. "fingerprint": ["put-me-in-group4"],
  1651. "event_id": "6" * 32,
  1652. "message": "something",
  1653. "timestamp": iso_format(self.base_datetime),
  1654. },
  1655. project_id=self.project.id,
  1656. )
  1657. no_before_group = self.store_event(
  1658. data={
  1659. "fingerprint": ["put-me-in-group5"],
  1660. "event_id": "3" * 32,
  1661. "message": "something",
  1662. "timestamp": iso_format(self.base_datetime),
  1663. },
  1664. project_id=self.project.id,
  1665. ).group
  1666. no_after_group = self.store_event(
  1667. data={
  1668. "fingerprint": ["put-me-in-group6"],
  1669. "event_id": "4" * 32,
  1670. "message": "something",
  1671. "timestamp": iso_format(middle - timedelta(days=1)),
  1672. },
  1673. project_id=self.project.id,
  1674. ).group
  1675. self.set_up_multi_project()
  1676. results = self.make_query([self.project], sort_by="trend", date_from=start, date_to=end)
  1677. assert results[:2] == [self.group1, fewer_events_group]
  1678. # These will be arbitrarily ordered since their trend values are all 0
  1679. assert set(results[2:]) == {self.group2, no_before_group, no_after_group}
  1680. def test_in_syntax_is_invalid(self):
  1681. with pytest.raises(InvalidSearchQuery, match='"in" syntax invalid for "is" search'):
  1682. self.make_query(search_filter_query="is:[unresolved, resolved]")
  1683. def test_first_release_any_or_no_environments(self):
  1684. # test scenarios for tickets:
  1685. # SEN-571
  1686. # ISSUE-432
  1687. # given the following setup:
  1688. #
  1689. # groups table:
  1690. # group first_release
  1691. # A 1
  1692. # B 1
  1693. # C 2
  1694. #
  1695. # groupenvironments table:
  1696. # group environment first_release
  1697. # A staging 1
  1698. # A production 2
  1699. #
  1700. # when querying by first release, the appropriate set of groups should be displayed:
  1701. #
  1702. # first_release: 1
  1703. # env=[]: A, B
  1704. # env=[production, staging]: A
  1705. # env=[staging]: A
  1706. # env=[production]: nothing
  1707. #
  1708. # first_release: 2
  1709. # env=[]: A, C
  1710. # env=[production, staging]: A
  1711. # env=[staging]: nothing
  1712. # env=[production]: A
  1713. # create an issue/group whose events that occur in 2 distinct environments
  1714. group_a_event_1 = self.store_event(
  1715. data={
  1716. "fingerprint": ["group_a"],
  1717. "event_id": "aaa" + ("1" * 29),
  1718. "environment": "example_staging",
  1719. "release": "release_1",
  1720. },
  1721. project_id=self.project.id,
  1722. )
  1723. group_a_event_2 = self.store_event(
  1724. data={
  1725. "fingerprint": ["group_a"],
  1726. "event_id": "aaa" + ("2" * 29),
  1727. "environment": "example_production",
  1728. "release": "release_2",
  1729. },
  1730. project_id=self.project.id,
  1731. )
  1732. group_a = group_a_event_1.group
  1733. # get the environments for group_a
  1734. prod_env = group_a_event_2.get_environment()
  1735. staging_env = group_a_event_1.get_environment()
  1736. # create an issue/group whose event that occur in no environments
  1737. # but will be tied to release release_1
  1738. group_b_event_1 = self.store_event(
  1739. data={
  1740. "fingerprint": ["group_b"],
  1741. "event_id": "bbb" + ("1" * 29),
  1742. "release": "release_1",
  1743. },
  1744. project_id=self.project.id,
  1745. )
  1746. assert group_b_event_1.get_environment().name == "" # has no environment
  1747. group_b = group_b_event_1.group
  1748. # create an issue/group whose event that occur in no environments
  1749. # but will be tied to release release_2
  1750. group_c_event_1 = self.store_event(
  1751. data={
  1752. "fingerprint": ["group_c"],
  1753. "event_id": "ccc" + ("1" * 29),
  1754. "release": "release_2",
  1755. },
  1756. project_id=self.project.id,
  1757. )
  1758. assert group_c_event_1.get_environment().name == "" # has no environment
  1759. group_c = group_c_event_1.group
  1760. # query by release release_1
  1761. results = self.make_query(search_filter_query="first_release:%s" % "release_1")
  1762. assert set(results) == {group_a, group_b}
  1763. results = self.make_query(
  1764. environments=[staging_env, prod_env],
  1765. search_filter_query="first_release:%s" % "release_1",
  1766. )
  1767. assert set(results) == {group_a}
  1768. results = self.make_query(
  1769. environments=[staging_env], search_filter_query="first_release:%s" % "release_1"
  1770. )
  1771. assert set(results) == {group_a}
  1772. results = self.make_query(
  1773. environments=[prod_env], search_filter_query="first_release:%s" % "release_1"
  1774. )
  1775. assert set(results) == set()
  1776. # query by release release_2
  1777. results = self.make_query(search_filter_query="first_release:%s" % "release_2")
  1778. assert set(results) == {group_a, group_c}
  1779. results = self.make_query(
  1780. environments=[staging_env, prod_env],
  1781. search_filter_query="first_release:%s" % "release_2",
  1782. )
  1783. assert set(results) == {group_a}
  1784. results = self.make_query(
  1785. environments=[staging_env], search_filter_query="first_release:%s" % "release_2"
  1786. )
  1787. assert set(results) == set()
  1788. results = self.make_query(
  1789. environments=[prod_env], search_filter_query="first_release:%s" % "release_2"
  1790. )
  1791. assert set(results) == {group_a}
  1792. def test_all_fields_do_not_error(self):
  1793. # Just a sanity check to make sure that all fields can be successfully
  1794. # searched on without returning type errors and other schema related
  1795. # issues.
  1796. def test_query(query):
  1797. try:
  1798. self.make_query(search_filter_query=query)
  1799. except SnubaError as e:
  1800. self.fail(f"Query {query} errored. Error info: {e}")
  1801. for key in SENTRY_SNUBA_MAP:
  1802. if key in ["project.id", "issue.id", "performance.issue_ids"]:
  1803. continue
  1804. test_query("has:%s" % key)
  1805. test_query("!has:%s" % key)
  1806. if key == "error.handled":
  1807. val = 1
  1808. elif key in issue_search_config.numeric_keys:
  1809. val = "123"
  1810. elif key in issue_search_config.date_keys:
  1811. val = self.base_datetime.isoformat()
  1812. elif key in issue_search_config.boolean_keys:
  1813. val = "true"
  1814. elif key in {"trace.span", "trace.parent_span"}:
  1815. val = "abcdef1234abcdef"
  1816. test_query(f"!{key}:{val}")
  1817. else:
  1818. val = "abadcafedeadbeefdeaffeedabadfeed"
  1819. test_query(f"!{key}:{val}")
  1820. test_query(f"{key}:{val}")
  1821. class EventsTransactionsSnubaSearchTest(SharedSnubaTest):
  1822. @property
  1823. def backend(self):
  1824. return EventsDatasetSnubaSearchBackend()
  1825. def setUp(self):
  1826. super().setUp()
  1827. self.base_datetime = (datetime.utcnow() - timedelta(days=3)).replace(tzinfo=pytz.utc)
  1828. transaction_event_data = {
  1829. "level": "info",
  1830. "message": "ayoo",
  1831. "type": "transaction",
  1832. "culprit": "app/components/events/eventEntries in map",
  1833. "contexts": {"trace": {"trace_id": "b" * 32, "span_id": "c" * 16, "op": ""}},
  1834. }
  1835. transaction_event_1 = self.store_event(
  1836. data={
  1837. **transaction_event_data,
  1838. "event_id": "a" * 32,
  1839. "timestamp": iso_format(before_now(minutes=1)),
  1840. "start_timestamp": iso_format(before_now(minutes=1)),
  1841. "tags": {"my_tag": 1},
  1842. "fingerprint": [f"{GroupType.PERFORMANCE_SLOW_SPAN.value}-group1"],
  1843. },
  1844. project_id=self.project.id,
  1845. )
  1846. self.perf_group_1 = transaction_event_1.groups[0]
  1847. transaction_event_2 = self.store_event(
  1848. data={
  1849. **transaction_event_data,
  1850. "event_id": "a" * 32,
  1851. "timestamp": iso_format(before_now(minutes=2)),
  1852. "start_timestamp": iso_format(before_now(minutes=2)),
  1853. "tags": {"my_tag": 1},
  1854. "fingerprint": [f"{GroupType.PERFORMANCE_SLOW_SPAN.value}-group2"],
  1855. },
  1856. project_id=self.project.id,
  1857. )
  1858. self.perf_group_2 = transaction_event_2.groups[0]
  1859. error_event_data = {
  1860. "timestamp": iso_format(self.base_datetime - timedelta(days=20)),
  1861. "message": "bar",
  1862. "environment": "staging",
  1863. "tags": {
  1864. "server": "example.com",
  1865. "url": "http://example.com",
  1866. "sentry:user": "event2@example.com",
  1867. "my_tag": 1,
  1868. },
  1869. }
  1870. error_event = self.store_event(
  1871. data={
  1872. **error_event_data,
  1873. "fingerprint": ["put-me-in-error_group_1"],
  1874. "event_id": "c" * 32,
  1875. "stacktrace": {"frames": [{"module": "error_group_1"}]},
  1876. },
  1877. project_id=self.project.id,
  1878. )
  1879. self.error_group_1 = error_event.group
  1880. error_event_2 = self.store_event(
  1881. data={
  1882. **error_event_data,
  1883. "fingerprint": ["put-me-in-error_group_2"],
  1884. "event_id": "d" * 32,
  1885. "stacktrace": {"frames": [{"module": "error_group_2"}]},
  1886. },
  1887. project_id=self.project.id,
  1888. )
  1889. self.error_group_2 = error_event_2.group
  1890. def test_performance_query(self):
  1891. with self.feature("organizations:performance-issues"):
  1892. results = self.make_query(search_filter_query="issue.category:performance my_tag:1")
  1893. assert list(results) == [self.perf_group_1, self.perf_group_2]
  1894. with self.feature("organizations:performance-issues"):
  1895. results = self.make_query(
  1896. search_filter_query="issue.type:[performance_n_plus_one, performance_slow_span] my_tag:1"
  1897. )
  1898. assert list(results) == [self.perf_group_1, self.perf_group_2]
  1899. def test_error_performance_query(self):
  1900. with self.feature("organizations:performance-issues"):
  1901. results = self.make_query(search_filter_query="my_tag:1")
  1902. assert list(results) == [
  1903. self.perf_group_1,
  1904. self.perf_group_2,
  1905. self.error_group_2,
  1906. self.error_group_1,
  1907. ]
  1908. with self.feature("organizations:performance-issues"):
  1909. results = self.make_query(
  1910. search_filter_query="issue.category:[performance, error] my_tag:1"
  1911. )
  1912. assert list(results) == [
  1913. self.perf_group_1,
  1914. self.perf_group_2,
  1915. self.error_group_2,
  1916. self.error_group_1,
  1917. ]
  1918. with self.feature("organizations:performance-issues"):
  1919. results = self.make_query(
  1920. search_filter_query="issue.type:[performance_slow_span, error] my_tag:1"
  1921. )
  1922. assert list(results) == [
  1923. self.perf_group_1,
  1924. self.perf_group_2,
  1925. self.error_group_2,
  1926. self.error_group_1,
  1927. ]
  1928. def test_cursor_performance_issues(self):
  1929. with self.feature("organizations:performance-issues"):
  1930. results = self.make_query(
  1931. projects=[self.project],
  1932. search_filter_query="issue.category:performance my_tag:1",
  1933. sort_by="date",
  1934. limit=1,
  1935. count_hits=True,
  1936. )
  1937. assert list(results) == [self.perf_group_1]
  1938. assert results.hits == 2
  1939. with self.feature("organizations:performance-issues"):
  1940. results = self.make_query(
  1941. projects=[self.project],
  1942. search_filter_query="issue.category:performance my_tag:1",
  1943. sort_by="date",
  1944. limit=1,
  1945. cursor=results.next,
  1946. count_hits=True,
  1947. )
  1948. assert list(results) == [self.perf_group_2]
  1949. assert results.hits == 2
  1950. with self.feature("organizations:performance-issues"):
  1951. results = self.make_query(
  1952. projects=[self.project],
  1953. search_filter_query="issue.category:performance my_tag:1",
  1954. sort_by="date",
  1955. limit=1,
  1956. cursor=results.next,
  1957. count_hits=True,
  1958. )
  1959. assert list(results) == []
  1960. assert results.hits == 2
  1961. def test_perf_issue_search_message_term_queries_postgres(self):
  1962. from django.db.models import Q
  1963. from sentry.utils import snuba
  1964. transaction_name = "im a little tea pot"
  1965. tx = self.store_event(
  1966. data={
  1967. "level": "info",
  1968. "culprit": "app/components/events/eventEntries in map",
  1969. "contexts": {"trace": {"trace_id": "b" * 32, "span_id": "c" * 16, "op": ""}},
  1970. "fingerprint": [f"{GroupType.PERFORMANCE_SLOW_SPAN.value}-group12"],
  1971. "event_id": "e" * 32,
  1972. "timestamp": iso_format(self.base_datetime),
  1973. "start_timestamp": iso_format(self.base_datetime),
  1974. "type": "transaction",
  1975. "transaction": transaction_name,
  1976. },
  1977. project_id=self.project.id,
  1978. )
  1979. assert "tea" in tx.search_message
  1980. created_group = tx.groups[0]
  1981. find_group = Group.objects.filter(
  1982. Q(type=GroupType.PERFORMANCE_SLOW_SPAN.value, message__icontains="tea")
  1983. ).first()
  1984. assert created_group == find_group
  1985. result = snuba.raw_query(
  1986. dataset=snuba.Dataset.Transactions,
  1987. start=self.base_datetime - timedelta(hours=1),
  1988. end=self.base_datetime + timedelta(hours=1),
  1989. selected_columns=[
  1990. "event_id",
  1991. "group_ids",
  1992. "transaction_name",
  1993. ],
  1994. groupby=None,
  1995. filter_keys={"project_id": [self.project.id], "event_id": [tx.event_id]},
  1996. referrer="_insert_transaction.verify_transaction",
  1997. )
  1998. assert result["data"][0]["transaction_name"] == transaction_name
  1999. assert result["data"][0]["group_ids"] == [created_group.id]
  2000. with self.feature("organizations:performance-issues"):
  2001. results = self.make_query(search_filter_query="issue.category:performance tea")
  2002. assert set(results) == {created_group}
  2003. results2 = self.make_query(search_filter_query="tea")
  2004. assert set(results2) == {created_group}
  2005. assert not self.make_query(search_filter_query="issue.category:performance tea")
  2006. def test_search_message_error_and_perf_issues(self):
  2007. tx = self.store_event(
  2008. data={
  2009. "level": "info",
  2010. "culprit": "app/components/events/eventEntries in map",
  2011. "contexts": {"trace": {"trace_id": "b" * 32, "span_id": "c" * 16, "op": ""}},
  2012. "fingerprint": [f"{GroupType.PERFORMANCE_SLOW_SPAN.value}-group12"],
  2013. "event_id": "e" * 32,
  2014. "timestamp": iso_format(self.base_datetime),
  2015. "start_timestamp": iso_format(self.base_datetime),
  2016. "type": "transaction",
  2017. "transaction": "/api/0/events",
  2018. },
  2019. project_id=self.project.id,
  2020. )
  2021. perf_issue = tx.groups[0]
  2022. assert perf_issue
  2023. error = self.store_event(
  2024. data={
  2025. "fingerprint": ["another-random-group"],
  2026. "event_id": "d" * 32,
  2027. "message": "Uncaught exception on api /api/0/events",
  2028. "environment": "production",
  2029. "tags": {"server": "example.com", "sentry:user": "event3@example.com"},
  2030. "timestamp": iso_format(self.base_datetime),
  2031. "stacktrace": {"frames": [{"module": "group1"}]},
  2032. },
  2033. project_id=self.project.id,
  2034. )
  2035. error_issue = error.group
  2036. assert error_issue
  2037. assert error_issue != perf_issue
  2038. with self.feature("organizations:performance-issues"):
  2039. assert set(self.make_query(search_filter_query="is:unresolved /api/0/events")) == {
  2040. perf_issue,
  2041. error_issue,
  2042. }
  2043. assert set(self.make_query(search_filter_query="/api/0/events")) == {error_issue}
  2044. class CdcEventsSnubaSearchTest(SharedSnubaTest):
  2045. @property
  2046. def backend(self):
  2047. return CdcEventsDatasetSnubaSearchBackend()
  2048. def setUp(self):
  2049. super().setUp()
  2050. self.base_datetime = (datetime.utcnow() - timedelta(days=3)).replace(tzinfo=pytz.utc)
  2051. self.event1 = self.store_event(
  2052. data={
  2053. "fingerprint": ["put-me-in-group1"],
  2054. "event_id": "a" * 32,
  2055. "environment": "production",
  2056. "timestamp": iso_format(self.base_datetime - timedelta(days=21)),
  2057. "tags": {"sentry:user": "user1"},
  2058. },
  2059. project_id=self.project.id,
  2060. )
  2061. self.env1 = self.event1.get_environment()
  2062. self.group1 = self.event1.group
  2063. self.event3 = self.store_event(
  2064. data={
  2065. "fingerprint": ["put-me-in-group1"],
  2066. "environment": "staging",
  2067. "timestamp": iso_format(self.base_datetime),
  2068. "tags": {"sentry:user": "user2"},
  2069. },
  2070. project_id=self.project.id,
  2071. )
  2072. self.event2 = self.store_event(
  2073. data={
  2074. "fingerprint": ["put-me-in-group2"],
  2075. "timestamp": iso_format(self.base_datetime - timedelta(days=20)),
  2076. "environment": "staging",
  2077. "tags": {"sentry:user": "user1"},
  2078. },
  2079. project_id=self.project.id,
  2080. )
  2081. self.group2 = self.event2.group
  2082. self.env2 = self.event2.get_environment()
  2083. def run_test(
  2084. self,
  2085. search_filter_query,
  2086. expected_groups,
  2087. expected_hits,
  2088. projects=None,
  2089. environments=None,
  2090. sort_by="date",
  2091. limit=None,
  2092. count_hits=False,
  2093. date_from=None,
  2094. date_to=None,
  2095. cursor=None,
  2096. ):
  2097. results = self.make_query(
  2098. projects=projects,
  2099. search_filter_query=search_filter_query,
  2100. environments=environments,
  2101. sort_by=sort_by,
  2102. limit=limit,
  2103. count_hits=count_hits,
  2104. date_from=date_from,
  2105. date_to=date_to,
  2106. cursor=cursor,
  2107. )
  2108. assert list(results) == expected_groups
  2109. assert results.hits == expected_hits
  2110. return results
  2111. def test(self):
  2112. self.run_test("is:unresolved", [self.group1, self.group2], None)
  2113. def test_invalid(self):
  2114. with pytest.raises(InvalidQueryForExecutor):
  2115. self.make_query(search_filter_query="is:unresolved abc:123")
  2116. def test_resolved_group(self):
  2117. self.group2.status = GroupStatus.RESOLVED
  2118. self.group2.save()
  2119. self.store_group(self.group2)
  2120. self.run_test("is:unresolved", [self.group1], None)
  2121. self.run_test("is:resolved", [self.group2], None)
  2122. self.run_test("is:unresolved is:resolved", [], None)
  2123. def test_environment(self):
  2124. self.run_test("is:unresolved", [self.group1], None, environments=[self.env1])
  2125. self.run_test("is:unresolved", [self.group1, self.group2], None, environments=[self.env2])
  2126. def test_sort_times_seen(self):
  2127. self.run_test(
  2128. "is:unresolved",
  2129. [self.group1, self.group2],
  2130. None,
  2131. sort_by="freq",
  2132. date_from=self.base_datetime - timedelta(days=30),
  2133. )
  2134. self.store_event(
  2135. data={
  2136. "fingerprint": ["put-me-in-group2"],
  2137. "timestamp": iso_format(self.base_datetime - timedelta(days=15)),
  2138. },
  2139. project_id=self.project.id,
  2140. )
  2141. self.store_event(
  2142. data={
  2143. "fingerprint": ["put-me-in-group2"],
  2144. "timestamp": iso_format(self.base_datetime - timedelta(days=10)),
  2145. "tags": {"sentry:user": "user2"},
  2146. },
  2147. project_id=self.project.id,
  2148. )
  2149. self.run_test(
  2150. "is:unresolved",
  2151. [self.group2, self.group1],
  2152. None,
  2153. sort_by="freq",
  2154. # Change the date range to bust the
  2155. date_from=self.base_datetime - timedelta(days=29),
  2156. )
  2157. def test_sort_first_seen(self):
  2158. self.run_test(
  2159. "is:unresolved",
  2160. [self.group2, self.group1],
  2161. None,
  2162. sort_by="new",
  2163. date_from=self.base_datetime - timedelta(days=30),
  2164. )
  2165. group3 = self.store_event(
  2166. data={
  2167. "fingerprint": ["put-me-in-group3"],
  2168. "timestamp": iso_format(self.base_datetime + timedelta(days=1)),
  2169. },
  2170. project_id=self.project.id,
  2171. ).group
  2172. self.run_test(
  2173. "is:unresolved",
  2174. [group3, self.group2, self.group1],
  2175. None,
  2176. sort_by="new",
  2177. # Change the date range to bust the
  2178. date_from=self.base_datetime - timedelta(days=29),
  2179. )
  2180. def test_sort_user(self):
  2181. self.run_test(
  2182. "is:unresolved",
  2183. [self.group1, self.group2],
  2184. None,
  2185. sort_by="user",
  2186. date_from=self.base_datetime - timedelta(days=30),
  2187. )
  2188. self.store_event(
  2189. data={
  2190. "fingerprint": ["put-me-in-group2"],
  2191. "timestamp": iso_format(self.base_datetime + timedelta(days=1)),
  2192. "tags": {"sentry:user": "user2"},
  2193. },
  2194. project_id=self.project.id,
  2195. )
  2196. self.store_event(
  2197. data={
  2198. "fingerprint": ["put-me-in-group2"],
  2199. "timestamp": iso_format(self.base_datetime + timedelta(days=1)),
  2200. "tags": {"sentry:user": "user2"},
  2201. },
  2202. project_id=self.project.id,
  2203. )
  2204. self.store_event(
  2205. data={
  2206. "fingerprint": ["put-me-in-group1"],
  2207. "timestamp": iso_format(self.base_datetime + timedelta(days=1)),
  2208. "tags": {"sentry:user": "user1"},
  2209. },
  2210. project_id=self.project.id,
  2211. )
  2212. self.store_event(
  2213. data={
  2214. "fingerprint": ["put-me-in-group1"],
  2215. "timestamp": iso_format(self.base_datetime + timedelta(days=1)),
  2216. "tags": {"sentry:user": "user1"},
  2217. },
  2218. project_id=self.project.id,
  2219. )
  2220. # Test group with no users, which can return a null count
  2221. group3 = self.store_event(
  2222. data={
  2223. "fingerprint": ["put-me-in-group3"],
  2224. "timestamp": iso_format(self.base_datetime + timedelta(days=1)),
  2225. },
  2226. project_id=self.project.id,
  2227. ).group
  2228. self.run_test(
  2229. "is:unresolved",
  2230. [self.group2, self.group1, group3],
  2231. None,
  2232. sort_by="user",
  2233. # Change the date range to bust the
  2234. date_from=self.base_datetime - timedelta(days=29),
  2235. )
  2236. def test_sort_priority(self):
  2237. self.run_test(
  2238. "is:unresolved",
  2239. [self.group1, self.group2],
  2240. None,
  2241. sort_by="priority",
  2242. date_from=self.base_datetime - timedelta(days=30),
  2243. )
  2244. def test_cursor(self):
  2245. group3 = self.store_event(
  2246. data={
  2247. "fingerprint": ["put-me-in-group3"],
  2248. "timestamp": iso_format(self.base_datetime + timedelta(days=1)),
  2249. "tags": {"sentry:user": "user2"},
  2250. },
  2251. project_id=self.project.id,
  2252. ).group
  2253. group4 = self.store_event(
  2254. data={
  2255. "fingerprint": ["put-me-in-group7"],
  2256. "timestamp": iso_format(self.base_datetime + timedelta(days=2)),
  2257. "tags": {"sentry:user": "user2"},
  2258. },
  2259. project_id=self.project.id,
  2260. ).group
  2261. results = self.run_test("is:unresolved", [group4], 4, limit=1, count_hits=True)
  2262. results = self.run_test(
  2263. "is:unresolved", [group3], 4, limit=1, cursor=results.next, count_hits=True
  2264. )
  2265. results = self.run_test(
  2266. "is:unresolved", [group4], 4, limit=1, cursor=results.prev, count_hits=True
  2267. )
  2268. self.run_test(
  2269. "is:unresolved", [group3, self.group1], 4, limit=2, cursor=results.next, count_hits=True
  2270. )
  2271. def test_rechecking(self):
  2272. self.group2.status = GroupStatus.RESOLVED
  2273. self.group2.save()
  2274. # Explicitly avoid calling `store_group` here. This means that Clickhouse will still see
  2275. # this group as `UNRESOLVED` and it will be returned in the snuba results. This group
  2276. # should still be filtered out by our recheck.
  2277. self.run_test("is:unresolved", [self.group1], None)