test_tsdb_backend.py 38 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057
  1. from datetime import datetime, timedelta, timezone
  2. from unittest.mock import patch
  3. import pytz
  4. from snuba_sdk import Limit
  5. from sentry.issues.grouptype import (
  6. PerformanceNPlusOneGroupType,
  7. PerformanceRenderBlockingAssetSpanGroupType,
  8. ProfileBlockedThreadGroupType,
  9. )
  10. from sentry.models import Environment, Group, GroupRelease, Release
  11. from sentry.testutils import SnubaTestCase, TestCase
  12. from sentry.testutils.helpers.datetime import iso_format
  13. from sentry.testutils.performance_issues.store_transaction import PerfIssueTransactionTestMixin
  14. from sentry.testutils.silo import region_silo_test
  15. from sentry.tsdb.base import TSDBModel
  16. from sentry.tsdb.snuba import SnubaTSDB
  17. from sentry.utils.dates import to_datetime, to_timestamp
  18. from sentry.utils.snuba import aliased_query
  19. from tests.sentry.issues.test_utils import SearchIssueTestMixin
  20. def timestamp(d):
  21. t = int(to_timestamp(d))
  22. return t - (t % 3600)
  23. def has_shape(data, shape, allow_empty=False):
  24. """
  25. Determine if a data object has the provided shape
  26. At any level, the object in `data` and in `shape` must have the same type.
  27. A dict is the same shape if all its keys and values have the same shape as the
  28. key/value in `shape`. The number of keys/values is not relevant.
  29. A list is the same shape if all its items have the same shape as the value
  30. in `shape`
  31. A tuple is the same shape if it has the same length as `shape` and all the
  32. values have the same shape as the corresponding value in `shape`
  33. Any other object simply has to have the same type.
  34. If `allow_empty` is set, lists and dicts in `data` will pass even if they are empty.
  35. """
  36. if not isinstance(data, type(shape)):
  37. return False
  38. if isinstance(data, dict):
  39. return (
  40. (allow_empty or len(data) > 0)
  41. and all(has_shape(k, list(shape.keys())[0]) for k in data.keys())
  42. and all(has_shape(v, list(shape.values())[0]) for v in data.values())
  43. )
  44. elif isinstance(data, list):
  45. return (allow_empty or len(data) > 0) and all(has_shape(v, shape[0]) for v in data)
  46. elif isinstance(data, tuple):
  47. return len(data) == len(shape) and all(
  48. has_shape(data[i], shape[i]) for i in range(len(data))
  49. )
  50. else:
  51. return True
  52. class SnubaTSDBTest(TestCase, SnubaTestCase):
  53. def setUp(self):
  54. super().setUp()
  55. self.db = SnubaTSDB()
  56. self.now = (datetime.utcnow() - timedelta(hours=4)).replace(
  57. hour=0, minute=0, second=0, microsecond=0, tzinfo=pytz.UTC
  58. )
  59. self.proj1 = self.create_project()
  60. env1 = "test"
  61. env2 = "dev"
  62. defaultenv = ""
  63. release1 = "1" * 10
  64. release2 = "2" * 10
  65. self.release1 = Release.objects.create(
  66. organization_id=self.organization.id, version=release1, date_added=self.now
  67. )
  68. self.release1.add_project(self.proj1)
  69. self.release2 = Release.objects.create(
  70. organization_id=self.organization.id, version=release2, date_added=self.now
  71. )
  72. self.release2.add_project(self.proj1)
  73. for r in range(0, 14400, 600): # Every 10 min for 4 hours
  74. self.store_event(
  75. data={
  76. "event_id": (str(r) * 32)[:32],
  77. "message": "message 1",
  78. "platform": "python",
  79. "fingerprint": [["group-1"], ["group-2"]][
  80. (r // 600) % 2
  81. ], # Switch every 10 mins
  82. "timestamp": iso_format(self.now + timedelta(seconds=r)),
  83. "tags": {
  84. "foo": "bar",
  85. "baz": "quux",
  86. # Switch every 2 hours
  87. "environment": [env1, None][(r // 7200) % 3],
  88. "sentry:user": f"id:user{r // 3300}",
  89. },
  90. "user": {
  91. # change every 55 min so some hours have 1 user, some have 2
  92. "id": f"user{r // 3300}",
  93. },
  94. "release": str(r // 3600) * 10, # 1 per hour,
  95. },
  96. project_id=self.proj1.id,
  97. )
  98. groups = Group.objects.filter(project=self.proj1).order_by("id")
  99. self.proj1group1 = groups[0]
  100. self.proj1group2 = groups[1]
  101. self.env1 = Environment.objects.get(name=env1)
  102. self.env2 = self.create_environment(name=env2) # No events
  103. self.defaultenv = Environment.objects.get(name=defaultenv)
  104. self.group1release1env1 = GroupRelease.objects.get(
  105. project_id=self.proj1.id,
  106. group_id=self.proj1group1.id,
  107. release_id=self.release1.id,
  108. environment=env1,
  109. )
  110. self.group1release2env1 = GroupRelease.objects.create(
  111. project_id=self.proj1.id,
  112. group_id=self.proj1group1.id,
  113. release_id=self.release2.id,
  114. environment=env1,
  115. )
  116. self.group2release1env1 = GroupRelease.objects.get(
  117. project_id=self.proj1.id,
  118. group_id=self.proj1group2.id,
  119. release_id=self.release1.id,
  120. environment=env1,
  121. )
  122. def test_range_single(self):
  123. env1 = "test"
  124. project = self.create_project()
  125. for r in range(0, 600 * 6 * 4, 300): # Every 10 min for 4 hours
  126. self.store_event(
  127. data={
  128. "event_id": (str(r) * 32)[:32],
  129. "message": "message 1",
  130. "platform": "python",
  131. "fingerprint": ["group-1"],
  132. "timestamp": iso_format(self.now + timedelta(seconds=r)),
  133. "tags": {
  134. "foo": "bar",
  135. "baz": "quux",
  136. # Switch every 2 hours
  137. "environment": [env1, None][(r // 7200) % 3],
  138. "sentry:user": f"id:user{r // 3300}",
  139. },
  140. "user": {
  141. # change every 55 min so some hours have 1 user, some have 2
  142. "id": f"user{r // 3300}",
  143. },
  144. "release": str(r // 3600) * 10, # 1 per hour,
  145. },
  146. project_id=project.id,
  147. )
  148. groups = Group.objects.filter(project=project).order_by("id")
  149. group = groups[0]
  150. dts = [self.now + timedelta(hours=i) for i in range(4)]
  151. assert self.db.get_range(TSDBModel.group, [group.id], dts[0], dts[-1], rollup=3600) == {
  152. group.id: [
  153. (timestamp(dts[0]), 6 * 2),
  154. (timestamp(dts[1]), 6 * 2),
  155. (timestamp(dts[2]), 6 * 2),
  156. (timestamp(dts[3]), 6 * 2),
  157. ]
  158. }
  159. def test_range_groups(self):
  160. dts = [self.now + timedelta(hours=i) for i in range(4)]
  161. assert self.db.get_range(
  162. TSDBModel.group, [self.proj1group1.id], dts[0], dts[-1], rollup=3600
  163. ) == {
  164. self.proj1group1.id: [
  165. (timestamp(dts[0]), 3),
  166. (timestamp(dts[1]), 3),
  167. (timestamp(dts[2]), 3),
  168. (timestamp(dts[3]), 3),
  169. ]
  170. }
  171. # Multiple groups
  172. assert self.db.get_range(
  173. TSDBModel.group,
  174. [self.proj1group1.id, self.proj1group2.id],
  175. dts[0],
  176. dts[-1],
  177. rollup=3600,
  178. ) == {
  179. self.proj1group1.id: [
  180. (timestamp(dts[0]), 3),
  181. (timestamp(dts[1]), 3),
  182. (timestamp(dts[2]), 3),
  183. (timestamp(dts[3]), 3),
  184. ],
  185. self.proj1group2.id: [
  186. (timestamp(dts[0]), 3),
  187. (timestamp(dts[1]), 3),
  188. (timestamp(dts[2]), 3),
  189. (timestamp(dts[3]), 3),
  190. ],
  191. }
  192. assert self.db.get_range(TSDBModel.group, [], dts[0], dts[-1], rollup=3600) == {}
  193. def test_range_releases(self):
  194. dts = [self.now + timedelta(hours=i) for i in range(4)]
  195. assert self.db.get_range(
  196. TSDBModel.release, [self.release1.id], dts[0], dts[-1], rollup=3600
  197. ) == {
  198. self.release1.id: [
  199. (timestamp(dts[0]), 0),
  200. (timestamp(dts[1]), 6),
  201. (timestamp(dts[2]), 0),
  202. (timestamp(dts[3]), 0),
  203. ]
  204. }
  205. def test_range_project(self):
  206. dts = [self.now + timedelta(hours=i) for i in range(4)]
  207. assert self.db.get_range(
  208. TSDBModel.project, [self.proj1.id], dts[0], dts[-1], rollup=3600
  209. ) == {
  210. self.proj1.id: [
  211. (timestamp(dts[0]), 6),
  212. (timestamp(dts[1]), 6),
  213. (timestamp(dts[2]), 6),
  214. (timestamp(dts[3]), 6),
  215. ]
  216. }
  217. def test_range_environment_filter(self):
  218. dts = [self.now + timedelta(hours=i) for i in range(4)]
  219. assert self.db.get_range(
  220. TSDBModel.project,
  221. [self.proj1.id],
  222. dts[0],
  223. dts[-1],
  224. rollup=3600,
  225. environment_ids=[self.env1.id],
  226. ) == {
  227. self.proj1.id: [
  228. (timestamp(dts[0]), 6),
  229. (timestamp(dts[1]), 6),
  230. (timestamp(dts[2]), 0),
  231. (timestamp(dts[3]), 0),
  232. ]
  233. }
  234. # No events submitted for env2
  235. assert self.db.get_range(
  236. TSDBModel.project,
  237. [self.proj1.id],
  238. dts[0],
  239. dts[-1],
  240. rollup=3600,
  241. environment_ids=[self.env2.id],
  242. ) == {
  243. self.proj1.id: [
  244. (timestamp(dts[0]), 0),
  245. (timestamp(dts[1]), 0),
  246. (timestamp(dts[2]), 0),
  247. (timestamp(dts[3]), 0),
  248. ]
  249. }
  250. # Events submitted with no environment should match default environment
  251. assert self.db.get_range(
  252. TSDBModel.project,
  253. [self.proj1.id],
  254. dts[0],
  255. dts[-1],
  256. rollup=3600,
  257. environment_ids=[self.defaultenv.id],
  258. ) == {
  259. self.proj1.id: [
  260. (timestamp(dts[0]), 0),
  261. (timestamp(dts[1]), 0),
  262. (timestamp(dts[2]), 6),
  263. (timestamp(dts[3]), 6),
  264. ]
  265. }
  266. def test_range_rollups(self):
  267. # Daily
  268. daystart = self.now.replace(hour=0) # day buckets start on day boundaries
  269. dts = [daystart + timedelta(days=i) for i in range(2)]
  270. assert self.db.get_range(
  271. TSDBModel.project, [self.proj1.id], dts[0], dts[-1], rollup=86400
  272. ) == {self.proj1.id: [(timestamp(dts[0]), 24), (timestamp(dts[1]), 0)]}
  273. # Minutely
  274. dts = [self.now + timedelta(minutes=i) for i in range(120)]
  275. # Expect every 10th minute to have a 1, else 0
  276. expected = [(to_timestamp(d), 1 if i % 10 == 0 else 0) for i, d in enumerate(dts)]
  277. assert self.db.get_range(
  278. TSDBModel.project, [self.proj1.id], dts[0], dts[-1], rollup=60
  279. ) == {self.proj1.id: expected}
  280. def test_distinct_counts_series_users(self):
  281. dts = [self.now + timedelta(hours=i) for i in range(4)]
  282. assert self.db.get_distinct_counts_series(
  283. TSDBModel.users_affected_by_group, [self.proj1group1.id], dts[0], dts[-1], rollup=3600
  284. ) == {
  285. self.proj1group1.id: [
  286. (timestamp(dts[0]), 1),
  287. (timestamp(dts[1]), 1),
  288. (timestamp(dts[2]), 1),
  289. (timestamp(dts[3]), 2),
  290. ]
  291. }
  292. dts = [self.now + timedelta(hours=i) for i in range(4)]
  293. assert self.db.get_distinct_counts_series(
  294. TSDBModel.users_affected_by_project, [self.proj1.id], dts[0], dts[-1], rollup=3600
  295. ) == {
  296. self.proj1.id: [
  297. (timestamp(dts[0]), 1),
  298. (timestamp(dts[1]), 2),
  299. (timestamp(dts[2]), 2),
  300. (timestamp(dts[3]), 2),
  301. ]
  302. }
  303. assert (
  304. self.db.get_distinct_counts_series(
  305. TSDBModel.users_affected_by_group, [], dts[0], dts[-1], rollup=3600
  306. )
  307. == {}
  308. )
  309. def get_distinct_counts_totals_users(self):
  310. assert self.db.get_distinct_counts_totals(
  311. TSDBModel.users_affected_by_group,
  312. [self.proj1group1.id],
  313. self.now,
  314. self.now + timedelta(hours=4),
  315. rollup=3600,
  316. ) == {
  317. self.proj1group1.id: 2 # 2 unique users overall
  318. }
  319. assert self.db.get_distinct_counts_totals(
  320. TSDBModel.users_affected_by_group,
  321. [self.proj1group1.id],
  322. self.now,
  323. self.now,
  324. rollup=3600,
  325. ) == {
  326. self.proj1group1.id: 1 # Only 1 unique user in the first hour
  327. }
  328. assert self.db.get_distinct_counts_totals(
  329. TSDBModel.users_affected_by_project,
  330. [self.proj1.id],
  331. self.now,
  332. self.now + timedelta(hours=4),
  333. rollup=3600,
  334. ) == {self.proj1.id: 2}
  335. assert (
  336. self.db.get_distinct_counts_totals(
  337. TSDBModel.users_affected_by_group,
  338. [],
  339. self.now,
  340. self.now + timedelta(hours=4),
  341. rollup=3600,
  342. )
  343. == {}
  344. )
  345. def test_most_frequent(self):
  346. assert self.db.get_most_frequent(
  347. TSDBModel.frequent_issues_by_project,
  348. [self.proj1.id],
  349. self.now,
  350. self.now + timedelta(hours=4),
  351. rollup=3600,
  352. ) in [
  353. {self.proj1.id: [(self.proj1group1.id, 2.0), (self.proj1group2.id, 1.0)]},
  354. {self.proj1.id: [(self.proj1group2.id, 2.0), (self.proj1group1.id, 1.0)]},
  355. ] # Both issues equally frequent
  356. assert (
  357. self.db.get_most_frequent(
  358. TSDBModel.frequent_issues_by_project,
  359. [],
  360. self.now,
  361. self.now + timedelta(hours=4),
  362. rollup=3600,
  363. )
  364. == {}
  365. )
  366. def test_frequency_series(self):
  367. dts = [self.now + timedelta(hours=i) for i in range(4)]
  368. assert self.db.get_frequency_series(
  369. TSDBModel.frequent_releases_by_group,
  370. {
  371. self.proj1group1.id: (self.group1release1env1.id, self.group1release2env1.id),
  372. self.proj1group2.id: (self.group2release1env1.id,),
  373. },
  374. dts[0],
  375. dts[-1],
  376. rollup=3600,
  377. ) == {
  378. self.proj1group1.id: [
  379. (timestamp(dts[0]), {self.group1release1env1.id: 0, self.group1release2env1.id: 0}),
  380. (timestamp(dts[1]), {self.group1release1env1.id: 3, self.group1release2env1.id: 0}),
  381. (timestamp(dts[2]), {self.group1release1env1.id: 0, self.group1release2env1.id: 3}),
  382. (timestamp(dts[3]), {self.group1release1env1.id: 0, self.group1release2env1.id: 0}),
  383. ],
  384. self.proj1group2.id: [
  385. (timestamp(dts[0]), {self.group2release1env1.id: 0}),
  386. (timestamp(dts[1]), {self.group2release1env1.id: 3}),
  387. (timestamp(dts[2]), {self.group2release1env1.id: 0}),
  388. (timestamp(dts[3]), {self.group2release1env1.id: 0}),
  389. ],
  390. }
  391. assert (
  392. self.db.get_frequency_series(
  393. TSDBModel.frequent_releases_by_group, {}, dts[0], dts[-1], rollup=3600
  394. )
  395. == {}
  396. )
  397. def test_result_shape(self):
  398. """
  399. Tests that the results from the different TSDB methods have the
  400. expected format.
  401. """
  402. project_id = self.proj1.id
  403. dts = [self.now + timedelta(hours=i) for i in range(4)]
  404. results = self.db.get_most_frequent(
  405. TSDBModel.frequent_issues_by_project, [project_id], dts[0], dts[0]
  406. )
  407. assert has_shape(results, {1: [(1, 1.0)]})
  408. results = self.db.get_most_frequent_series(
  409. TSDBModel.frequent_issues_by_project, [project_id], dts[0], dts[0]
  410. )
  411. assert has_shape(results, {1: [(1, {1: 1.0})]})
  412. items = {
  413. # {project_id: (issue_id, issue_id, ...)}
  414. project_id: (self.proj1group1.id, self.proj1group2.id)
  415. }
  416. results = self.db.get_frequency_series(
  417. TSDBModel.frequent_issues_by_project, items, dts[0], dts[-1]
  418. )
  419. assert has_shape(results, {1: [(1, {1: 1})]})
  420. results = self.db.get_frequency_totals(
  421. TSDBModel.frequent_issues_by_project, items, dts[0], dts[-1]
  422. )
  423. assert has_shape(results, {1: {1: 1}})
  424. results = self.db.get_range(TSDBModel.project, [project_id], dts[0], dts[-1])
  425. assert has_shape(results, {1: [(1, 1)]})
  426. results = self.db.get_distinct_counts_series(
  427. TSDBModel.users_affected_by_project, [project_id], dts[0], dts[-1]
  428. )
  429. assert has_shape(results, {1: [(1, 1)]})
  430. results = self.db.get_distinct_counts_totals(
  431. TSDBModel.users_affected_by_project, [project_id], dts[0], dts[-1]
  432. )
  433. assert has_shape(results, {1: 1})
  434. results = self.db.get_distinct_counts_union(
  435. TSDBModel.users_affected_by_project, [project_id], dts[0], dts[-1]
  436. )
  437. assert has_shape(results, 1)
  438. def test_calculated_limit(self):
  439. with patch("sentry.tsdb.snuba.raw_snql_query") as snuba:
  440. # 24h test
  441. rollup = 3600
  442. end = self.now
  443. start = end + timedelta(days=-1, seconds=rollup)
  444. self.db.get_data(TSDBModel.group, [1, 2, 3, 4, 5], start, end, rollup=rollup)
  445. assert snuba.call_args.args[0].query.limit == Limit(120)
  446. # 14 day test
  447. rollup = 86400
  448. start = end + timedelta(days=-14, seconds=rollup)
  449. self.db.get_data(TSDBModel.group, [1, 2, 3, 4, 5], start, end, rollup=rollup)
  450. assert snuba.call_args.args[0].query.limit == Limit(70)
  451. # 1h test
  452. rollup = 3600
  453. end = self.now
  454. start = end + timedelta(hours=-1, seconds=rollup)
  455. self.db.get_data(TSDBModel.group, [1, 2, 3, 4, 5], start, end, rollup=rollup)
  456. assert snuba.call_args.args[0].query.limit == Limit(5)
  457. @patch("sentry.utils.snuba.OVERRIDE_OPTIONS", new={"consistent": True})
  458. def test_tsdb_with_consistent(self):
  459. with patch("sentry.utils.snuba._apply_cache_and_build_results") as snuba:
  460. rollup = 3600
  461. end = self.now
  462. start = end + timedelta(days=-1, seconds=rollup)
  463. self.db.get_data(TSDBModel.group, [1, 2, 3, 4, 5], start, end, rollup=rollup)
  464. assert snuba.call_args.args[0][0][0].query.limit == Limit(120)
  465. assert snuba.call_args.args[0][0][0].flags.consistent is True
  466. @region_silo_test
  467. class SnubaTSDBGroupPerformanceTest(TestCase, SnubaTestCase, PerfIssueTransactionTestMixin):
  468. def setUp(self):
  469. super().setUp()
  470. self.db = SnubaTSDB()
  471. self.now = (datetime.utcnow() - timedelta(hours=4)).replace(
  472. hour=0, minute=0, second=0, microsecond=0, tzinfo=pytz.UTC
  473. )
  474. self.proj1 = self.create_project()
  475. self.env1 = Environment.objects.get_or_create(
  476. organization_id=self.proj1.organization_id, name="test"
  477. )[0]
  478. self.env2 = Environment.objects.get_or_create(
  479. organization_id=self.proj1.organization_id, name="dev"
  480. )[0]
  481. defaultenv = ""
  482. group1_fingerprint = f"{PerformanceRenderBlockingAssetSpanGroupType.type_id}-group1"
  483. group2_fingerprint = f"{PerformanceNPlusOneGroupType.type_id}-group2"
  484. for r in range(0, 14400, 600): # Every 10 min for 4 hours
  485. event = self.store_transaction(
  486. environment=[self.env1.name, None][(r // 7200) % 3],
  487. project_id=self.proj1.id,
  488. # change every 55 min so some hours have 1 user, some have 2
  489. user_id=f"user{r // 3300}",
  490. # release_version=str(r // 3600) * 10, # 1 per hour,
  491. timestamp=self.now + timedelta(seconds=r),
  492. fingerprint=[group1_fingerprint, group2_fingerprint] if ((r // 600) % 2) else [],
  493. )
  494. self.proj1group1 = event.groups[0]
  495. self.proj1group2 = event.groups[1]
  496. self.defaultenv = Environment.objects.get(name=defaultenv)
  497. def test_range_groups_single(self):
  498. from sentry.snuba.dataset import Dataset
  499. now = (datetime.utcnow() - timedelta(days=1)).replace(
  500. hour=10, minute=0, second=0, microsecond=0, tzinfo=pytz.UTC
  501. )
  502. dts = [now + timedelta(hours=i) for i in range(4)]
  503. project = self.create_project()
  504. group_fingerprint = f"{PerformanceNPlusOneGroupType.type_id}-group3"
  505. # not sure what's going on here, but `times=1,2,3,4` work fine
  506. # fails with anything above 4
  507. times = 4
  508. event_ids = []
  509. events = []
  510. for i in range(0, times):
  511. res = self.store_transaction(
  512. environment=None,
  513. project_id=project.id,
  514. user_id="my_user",
  515. timestamp=now + timedelta(minutes=i * 10),
  516. fingerprint=[group_fingerprint],
  517. )
  518. grouped_by_project = aliased_query(
  519. dataset=Dataset.Transactions,
  520. start=None,
  521. end=None,
  522. groupby=None,
  523. conditions=None,
  524. filter_keys={"project_id": [project.id], "event_id": [res.event_id]},
  525. selected_columns=["event_id", "project_id", "group_ids"],
  526. aggregations=None,
  527. )
  528. assert grouped_by_project["data"][0]["event_id"] == res.event_id
  529. from sentry.eventstore.models import Event
  530. event_from_nodestore = Event(project_id=project.id, event_id=res.event_id)
  531. assert event_from_nodestore.event_id == res.event_id
  532. event_ids.append(res.event_id)
  533. events.append(res)
  534. group = events[0].groups[0]
  535. transactions_for_project = aliased_query(
  536. dataset=Dataset.Transactions,
  537. start=None,
  538. end=None,
  539. groupby=None,
  540. conditions=None,
  541. filter_keys={"project_id": [project.id]},
  542. selected_columns=["project_id", "event_id"],
  543. aggregations=None,
  544. )
  545. assert len(transactions_for_project["data"]) == times
  546. transactions_by_group = aliased_query(
  547. dataset=Dataset.Transactions,
  548. start=None,
  549. end=None,
  550. # start=group.first_seen,
  551. # end=now + timedelta(hours=4),
  552. groupby=["group_id"],
  553. conditions=None,
  554. filter_keys={"project_id": [project.id], "group_id": [group.id]},
  555. aggregations=[
  556. ["arrayJoin", ["group_ids"], "group_id"],
  557. ["count()", "", "times_seen"],
  558. ],
  559. )
  560. assert transactions_by_group["data"][0]["times_seen"] == times # 1 + (times % 5)
  561. assert self.db.get_range(
  562. TSDBModel.group_performance,
  563. [group.id],
  564. dts[0],
  565. dts[-1],
  566. rollup=3600,
  567. ) == {
  568. group.id: [
  569. # (timestamp(dts[0]), 1 + (times % 5)),
  570. (timestamp(dts[0]), times),
  571. (timestamp(dts[1]), 0),
  572. (timestamp(dts[2]), 0),
  573. (timestamp(dts[3]), 0),
  574. ]
  575. }
  576. def test_range_groups_mult(self):
  577. now = (datetime.utcnow() - timedelta(days=1)).replace(
  578. hour=10, minute=0, second=0, microsecond=0, tzinfo=pytz.UTC
  579. )
  580. dts = [now + timedelta(hours=i) for i in range(4)]
  581. project = self.create_project()
  582. group_fingerprint = f"{PerformanceNPlusOneGroupType.type_id}-group4"
  583. ids = ["a", "b", "c", "d", "e", "f", "1", "2", "3", "4", "5"]
  584. events = []
  585. for i, _ in enumerate(ids):
  586. event = self.store_transaction(
  587. environment=None,
  588. project_id=project.id,
  589. user_id="my_user",
  590. timestamp=now + timedelta(minutes=i * 10),
  591. fingerprint=[group_fingerprint],
  592. )
  593. events.append(event)
  594. group = events[0].groups[0]
  595. assert self.db.get_range(
  596. TSDBModel.group_performance,
  597. [group.id],
  598. dts[0],
  599. dts[-1],
  600. rollup=3600,
  601. ) == {
  602. group.id: [
  603. (timestamp(dts[0]), 6),
  604. (timestamp(dts[1]), 5),
  605. (timestamp(dts[2]), 0),
  606. (timestamp(dts[3]), 0),
  607. ]
  608. }
  609. def test_range_groups_simple(self):
  610. project = self.create_project()
  611. now = (datetime.utcnow() - timedelta(days=1)).replace(
  612. hour=10, minute=0, second=0, microsecond=0, tzinfo=pytz.UTC
  613. )
  614. group_fingerprint = f"{PerformanceRenderBlockingAssetSpanGroupType.type_id}-group5"
  615. # for r in range(0, 14400, 600): # Every 10 min for 4 hours
  616. # for r in [1, 2, 3, 4, 5, 6, 7, 8]:
  617. ids = ["a", "b", "c", "d", "e"] # , "f"]
  618. events = []
  619. for r in ids:
  620. # for r in range(0, 9, 1):
  621. event = self.store_transaction(
  622. environment=None,
  623. project_id=project.id,
  624. # change every 55 min so some hours have 1 user, some have 2
  625. user_id=f"user{r}",
  626. # release_version=str(r // 3600) * 10, # 1 per hour,
  627. timestamp=now,
  628. fingerprint=[group_fingerprint],
  629. )
  630. events.append(event)
  631. group = events[0].groups[0]
  632. dts = [now + timedelta(hours=i) for i in range(4)]
  633. assert self.db.get_range(
  634. TSDBModel.group_performance,
  635. [group.id],
  636. dts[0],
  637. dts[-1],
  638. rollup=3600,
  639. ) == {
  640. group.id: [
  641. (timestamp(dts[0]), len(ids)),
  642. (timestamp(dts[1]), 0),
  643. (timestamp(dts[2]), 0),
  644. (timestamp(dts[3]), 0),
  645. ]
  646. }
  647. def test_range_groups(self):
  648. dts = [self.now + timedelta(hours=i) for i in range(4)]
  649. # Multiple groups
  650. assert self.db.get_range(
  651. TSDBModel.group_performance,
  652. [self.proj1group1.id, self.proj1group2.id],
  653. dts[0],
  654. dts[-1],
  655. rollup=3600,
  656. ) == {
  657. self.proj1group1.id: [
  658. (timestamp(dts[0]), 3),
  659. (timestamp(dts[1]), 3),
  660. (timestamp(dts[2]), 3),
  661. (timestamp(dts[3]), 3),
  662. ],
  663. self.proj1group2.id: [
  664. (timestamp(dts[0]), 3),
  665. (timestamp(dts[1]), 3),
  666. (timestamp(dts[2]), 3),
  667. (timestamp(dts[3]), 3),
  668. ],
  669. }
  670. assert (
  671. self.db.get_range(TSDBModel.group_performance, [], dts[0], dts[-1], rollup=3600) == {}
  672. )
  673. @region_silo_test
  674. class SnubaTSDBGroupProfilingTest(TestCase, SnubaTestCase, SearchIssueTestMixin):
  675. def setUp(self):
  676. super().setUp()
  677. self.db = SnubaTSDB()
  678. self.now = (datetime.utcnow() - timedelta(hours=4)).replace(
  679. hour=0, minute=0, second=0, microsecond=0, tzinfo=pytz.UTC
  680. )
  681. self.proj1 = self.create_project()
  682. self.env1 = Environment.objects.get_or_create(
  683. organization_id=self.proj1.organization_id, name="test"
  684. )[0]
  685. self.env2 = Environment.objects.get_or_create(
  686. organization_id=self.proj1.organization_id, name="dev"
  687. )[0]
  688. defaultenv = ""
  689. group1_fingerprint = f"{ProfileBlockedThreadGroupType.type_id}-group1"
  690. group2_fingerprint = f"{ProfileBlockedThreadGroupType.type_id}-group2"
  691. groups = {}
  692. for r in range(0, 14400, 600): # Every 10 min for 4 hours
  693. event, occurrence, group_info = self.store_search_issue(
  694. project_id=self.proj1.id,
  695. # change every 55 min so some hours have 1 user, some have 2
  696. user_id=r // 3300,
  697. fingerprints=[group1_fingerprint] if ((r // 600) % 2) else [group2_fingerprint],
  698. # release_version=str(r // 3600) * 10, # 1 per hour,
  699. environment=[self.env1.name, None][(r // 7200) % 3],
  700. insert_time=self.now + timedelta(seconds=r),
  701. )
  702. if group_info:
  703. groups[group_info.group.id] = group_info.group
  704. all_groups = list(groups.values())
  705. self.proj1group1 = all_groups[0]
  706. self.proj1group2 = all_groups[1]
  707. self.defaultenv = Environment.objects.get(name=defaultenv)
  708. def test_range_group_manual_group_time_rollup(self):
  709. project = self.create_project()
  710. # these are the only granularities/rollups that be actually be used
  711. GRANULARITIES = [
  712. (10, timedelta(seconds=10), 5),
  713. (60 * 60, timedelta(hours=1), 6),
  714. (60 * 60 * 24, timedelta(days=1), 15),
  715. ]
  716. start = (datetime.now(timezone.utc) - timedelta(days=15)).replace(
  717. hour=0, minute=0, second=0
  718. )
  719. for step, delta, times in GRANULARITIES:
  720. series = [start + (delta * i) for i in range(times)]
  721. series_ts = [int(to_timestamp(ts)) for ts in series]
  722. assert self.db.get_optimal_rollup(series[0], series[-1]) == step
  723. assert self.db.get_optimal_rollup_series(series[0], end=series[-1], rollup=None) == (
  724. step,
  725. series_ts,
  726. )
  727. for time_step in series:
  728. _, _, group_info = self.store_search_issue(
  729. project_id=project.id,
  730. user_id=0,
  731. fingerprints=[f"test_range_group_manual_group_time_rollup-{step}"],
  732. environment=None,
  733. insert_time=time_step,
  734. )
  735. assert self.db.get_range(
  736. TSDBModel.group_generic,
  737. [group_info.group.id],
  738. series[0],
  739. series[-1],
  740. rollup=None,
  741. ) == {group_info.group.id: [(ts, 1) for ts in series_ts]}
  742. def test_range_groups_mult(self):
  743. now = (datetime.utcnow() - timedelta(days=1)).replace(
  744. hour=10, minute=0, second=0, microsecond=0, tzinfo=pytz.UTC
  745. )
  746. dts = [now + timedelta(hours=i) for i in range(4)]
  747. project = self.create_project()
  748. group_fingerprint = f"{ProfileBlockedThreadGroupType.type_id}-group4"
  749. groups = []
  750. for i in range(0, 11):
  751. _, _, group_info = self.store_search_issue(
  752. project_id=project.id,
  753. user_id=0,
  754. fingerprints=[group_fingerprint],
  755. environment=None,
  756. insert_time=now + timedelta(minutes=i * 10),
  757. )
  758. if group_info:
  759. groups.append(group_info.group)
  760. group = groups[0]
  761. assert self.db.get_range(
  762. TSDBModel.group_generic,
  763. [group.id],
  764. dts[0],
  765. dts[-1],
  766. rollup=3600,
  767. ) == {
  768. group.id: [
  769. (timestamp(dts[0]), 6),
  770. (timestamp(dts[1]), 5),
  771. (timestamp(dts[2]), 0),
  772. (timestamp(dts[3]), 0),
  773. ]
  774. }
  775. def test_range_groups_simple(self):
  776. project = self.create_project()
  777. now = (datetime.utcnow() - timedelta(days=1)).replace(
  778. hour=10, minute=0, second=0, microsecond=0, tzinfo=pytz.UTC
  779. )
  780. group_fingerprint = f"{ProfileBlockedThreadGroupType.type_id}-group5"
  781. ids = [1, 2, 3, 4, 5]
  782. groups = []
  783. for r in ids:
  784. # for r in range(0, 9, 1):
  785. event, occurrence, group_info = self.store_search_issue(
  786. project_id=project.id,
  787. # change every 55 min so some hours have 1 user, some have 2
  788. user_id=r,
  789. fingerprints=[group_fingerprint],
  790. environment=None,
  791. # release_version=str(r // 3600) * 10, # 1 per hour,
  792. insert_time=now,
  793. )
  794. if group_info:
  795. groups.append(group_info.group)
  796. group = groups[0]
  797. dts = [now + timedelta(hours=i) for i in range(4)]
  798. assert self.db.get_range(
  799. TSDBModel.group_generic,
  800. [group.id],
  801. dts[0],
  802. dts[-1],
  803. rollup=3600,
  804. ) == {
  805. group.id: [
  806. (timestamp(dts[0]), len(ids)),
  807. (timestamp(dts[1]), 0),
  808. (timestamp(dts[2]), 0),
  809. (timestamp(dts[3]), 0),
  810. ]
  811. }
  812. def test_range_groups(self):
  813. dts = [self.now + timedelta(hours=i) for i in range(4)]
  814. # Multiple groups
  815. assert self.db.get_range(
  816. TSDBModel.group_generic,
  817. [self.proj1group1.id, self.proj1group2.id],
  818. dts[0],
  819. dts[-1],
  820. rollup=3600,
  821. ) == {
  822. self.proj1group1.id: [
  823. (timestamp(dts[0]), 3),
  824. (timestamp(dts[1]), 3),
  825. (timestamp(dts[2]), 3),
  826. (timestamp(dts[3]), 3),
  827. ],
  828. self.proj1group2.id: [
  829. (timestamp(dts[0]), 3),
  830. (timestamp(dts[1]), 3),
  831. (timestamp(dts[2]), 3),
  832. (timestamp(dts[3]), 3),
  833. ],
  834. }
  835. assert self.db.get_range(TSDBModel.group_generic, [], dts[0], dts[-1], rollup=3600) == {}
  836. def test_get_distinct_counts_totals_users(self):
  837. assert self.db.get_distinct_counts_totals(
  838. TSDBModel.users_affected_by_generic_group,
  839. [self.proj1group1.id],
  840. self.now,
  841. self.now + timedelta(hours=4),
  842. rollup=3600,
  843. ) == {
  844. self.proj1group1.id: 5 # 5 unique users overall
  845. }
  846. assert self.db.get_distinct_counts_totals(
  847. TSDBModel.users_affected_by_generic_group,
  848. [self.proj1group1.id],
  849. self.now,
  850. self.now,
  851. rollup=3600,
  852. ) == {
  853. self.proj1group1.id: 1 # Only 1 unique user in the first hour
  854. }
  855. assert (
  856. self.db.get_distinct_counts_totals(
  857. TSDBModel.users_affected_by_generic_group,
  858. [],
  859. self.now,
  860. self.now + timedelta(hours=4),
  861. rollup=3600,
  862. )
  863. == {}
  864. )
  865. def test_get_sums(self):
  866. assert self.db.get_sums(
  867. model=TSDBModel.group_generic,
  868. keys=[self.proj1group1.id, self.proj1group2.id],
  869. start=self.now,
  870. end=self.now + timedelta(hours=4),
  871. ) == {self.proj1group1.id: 12, self.proj1group2.id: 12}
  872. def test_get_data_or_conditions_parsed(self):
  873. """
  874. Verify parsing the legacy format with nested OR conditions works
  875. """
  876. conditions = [
  877. # or conditions in the legacy format needs open and close brackets for precedence
  878. # there's some special casing when parsing conditions that specifically handles this
  879. [
  880. [["isNull", ["environment"]], "=", 1],
  881. ["environment", "IN", [self.env1.name]],
  882. ]
  883. ]
  884. data1 = self.db.get_data(
  885. model=TSDBModel.group_generic,
  886. keys=[self.proj1group1.id, self.proj1group2.id],
  887. conditions=conditions,
  888. start=self.now,
  889. end=self.now + timedelta(hours=4),
  890. )
  891. data2 = self.db.get_data(
  892. model=TSDBModel.group_generic,
  893. keys=[self.proj1group1.id, self.proj1group2.id],
  894. start=self.now,
  895. end=self.now + timedelta(hours=4),
  896. )
  897. # the above queries should return the same data since all groups either have:
  898. # environment=None or environment=test
  899. # so the condition really shouldn't be filtering anything
  900. assert data1 == data2
  901. class AddJitterToSeriesTest(TestCase):
  902. def setUp(self):
  903. self.db = SnubaTSDB()
  904. def run_test(self, end, interval, jitter, expected_start, expected_end):
  905. end = end.replace(tzinfo=pytz.UTC)
  906. start = end - interval
  907. rollup, rollup_series = self.db.get_optimal_rollup_series(start, end)
  908. series = self.db._add_jitter_to_series(rollup_series, start, rollup, jitter)
  909. assert to_datetime(series[0]) == expected_start.replace(tzinfo=pytz.UTC)
  910. assert to_datetime(series[-1]) == expected_end.replace(tzinfo=pytz.UTC)
  911. def test(self):
  912. self.run_test(
  913. end=datetime(2022, 5, 18, 10, 23, 4),
  914. interval=timedelta(hours=1),
  915. jitter=5,
  916. expected_start=datetime(2022, 5, 18, 9, 22, 55),
  917. expected_end=datetime(2022, 5, 18, 10, 22, 55),
  918. )
  919. self.run_test(
  920. end=datetime(2022, 5, 18, 10, 23, 8),
  921. interval=timedelta(hours=1),
  922. jitter=5,
  923. expected_start=datetime(2022, 5, 18, 9, 23, 5),
  924. expected_end=datetime(2022, 5, 18, 10, 23, 5),
  925. )
  926. # Jitter should be the same
  927. self.run_test(
  928. end=datetime(2022, 5, 18, 10, 23, 8),
  929. interval=timedelta(hours=1),
  930. jitter=55,
  931. expected_start=datetime(2022, 5, 18, 9, 23, 5),
  932. expected_end=datetime(2022, 5, 18, 10, 23, 5),
  933. )
  934. self.run_test(
  935. end=datetime(2022, 5, 18, 22, 33, 2),
  936. interval=timedelta(minutes=1),
  937. jitter=3,
  938. expected_start=datetime(2022, 5, 18, 22, 31, 53),
  939. expected_end=datetime(2022, 5, 18, 22, 32, 53),
  940. )
  941. def test_empty_series(self):
  942. assert self.db._add_jitter_to_series([], datetime(2022, 5, 18, 10, 23, 4), 60, 127) == []
  943. assert self.db._add_jitter_to_series([], datetime(2022, 5, 18, 10, 23, 4), 60, None) == []