123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939 |
- from datetime import UTC, datetime, timedelta
- from unittest.mock import patch
- from snuba_sdk import Limit
- from sentry.issues.grouptype import ProfileFileIOGroupType
- from sentry.models.environment import Environment
- from sentry.models.group import Group
- from sentry.models.grouprelease import GroupRelease
- from sentry.models.release import Release
- from sentry.testutils.cases import SnubaTestCase, TestCase
- from sentry.testutils.helpers.datetime import before_now, iso_format
- from sentry.testutils.silo import region_silo_test
- from sentry.tsdb.base import TSDBModel
- from sentry.tsdb.snuba import SnubaTSDB
- from sentry.utils.dates import to_datetime
- from tests.sentry.issues.test_utils import SearchIssueTestMixin
- def timestamp(d):
- t = int(d.timestamp())
- return t - (t % 3600)
- def has_shape(data, shape, allow_empty=False):
- """
- Determine if a data object has the provided shape
- At any level, the object in `data` and in `shape` must have the same type.
- A dict is the same shape if all its keys and values have the same shape as the
- key/value in `shape`. The number of keys/values is not relevant.
- A list is the same shape if all its items have the same shape as the value
- in `shape`
- A tuple is the same shape if it has the same length as `shape` and all the
- values have the same shape as the corresponding value in `shape`
- Any other object simply has to have the same type.
- If `allow_empty` is set, lists and dicts in `data` will pass even if they are empty.
- """
- if not isinstance(data, type(shape)):
- return False
- if isinstance(data, dict):
- return (
- (allow_empty or len(data) > 0)
- and all(has_shape(k, list(shape.keys())[0]) for k in data.keys())
- and all(has_shape(v, list(shape.values())[0]) for v in data.values())
- )
- elif isinstance(data, list):
- return (allow_empty or len(data) > 0) and all(has_shape(v, shape[0]) for v in data)
- elif isinstance(data, tuple):
- return len(data) == len(shape) and all(
- has_shape(data[i], shape[i]) for i in range(len(data))
- )
- else:
- return True
- class SnubaTSDBTest(TestCase, SnubaTestCase):
- def setUp(self):
- super().setUp()
- self.db = SnubaTSDB()
- self.now = before_now(hours=4).replace(hour=0, minute=0, second=0, microsecond=0)
- self.proj1 = self.create_project()
- env1 = "test"
- env2 = "dev"
- defaultenv = ""
- release1 = "1" * 10
- release2 = "2" * 10
- self.release1 = Release.objects.create(
- organization_id=self.organization.id, version=release1, date_added=self.now
- )
- self.release1.add_project(self.proj1)
- self.release2 = Release.objects.create(
- organization_id=self.organization.id, version=release2, date_added=self.now
- )
- self.release2.add_project(self.proj1)
- for r in range(0, 14400, 600): # Every 10 min for 4 hours
- self.store_event(
- data={
- "event_id": (str(r) * 32)[:32],
- "message": "message 1",
- "platform": "python",
- "fingerprint": [["group-1"], ["group-2"]][
- (r // 600) % 2
- ], # Switch every 10 mins
- "timestamp": iso_format(self.now + timedelta(seconds=r)),
- "tags": {
- "foo": "bar",
- "baz": "quux",
- # Switch every 2 hours
- "environment": [env1, None][(r // 7200) % 3],
- "sentry:user": f"id:user{r // 3300}",
- },
- "user": {
- # change every 55 min so some hours have 1 user, some have 2
- "id": f"user{r // 3300}",
- },
- "release": str(r // 3600) * 10, # 1 per hour,
- },
- project_id=self.proj1.id,
- )
- groups = Group.objects.filter(project=self.proj1).order_by("id")
- self.proj1group1 = groups[0]
- self.proj1group2 = groups[1]
- self.env1 = Environment.objects.get(name=env1)
- self.env2 = self.create_environment(name=env2) # No events
- self.defaultenv = Environment.objects.get(name=defaultenv)
- self.group1release1env1 = GroupRelease.objects.get(
- project_id=self.proj1.id,
- group_id=self.proj1group1.id,
- release_id=self.release1.id,
- environment=env1,
- )
- self.group1release2env1 = GroupRelease.objects.create(
- project_id=self.proj1.id,
- group_id=self.proj1group1.id,
- release_id=self.release2.id,
- environment=env1,
- )
- self.group2release1env1 = GroupRelease.objects.get(
- project_id=self.proj1.id,
- group_id=self.proj1group2.id,
- release_id=self.release1.id,
- environment=env1,
- )
- def test_range_single(self):
- env1 = "test"
- project = self.create_project()
- for r in range(0, 600 * 6 * 4, 300): # Every 10 min for 4 hours
- self.store_event(
- data={
- "event_id": (str(r) * 32)[:32],
- "message": "message 1",
- "platform": "python",
- "fingerprint": ["group-1"],
- "timestamp": iso_format(self.now + timedelta(seconds=r)),
- "tags": {
- "foo": "bar",
- "baz": "quux",
- # Switch every 2 hours
- "environment": [env1, None][(r // 7200) % 3],
- "sentry:user": f"id:user{r // 3300}",
- },
- "user": {
- # change every 55 min so some hours have 1 user, some have 2
- "id": f"user{r // 3300}",
- },
- "release": str(r // 3600) * 10, # 1 per hour,
- },
- project_id=project.id,
- )
- groups = Group.objects.filter(project=project).order_by("id")
- group = groups[0]
- dts = [self.now + timedelta(hours=i) for i in range(4)]
- assert self.db.get_range(
- TSDBModel.group,
- [group.id],
- dts[0],
- dts[-1],
- rollup=3600,
- tenant_ids={"referrer": "r", "organization_id": 1234},
- ) == {
- group.id: [
- (timestamp(dts[0]), 6 * 2),
- (timestamp(dts[1]), 6 * 2),
- (timestamp(dts[2]), 6 * 2),
- (timestamp(dts[3]), 6 * 2),
- ]
- }
- def test_range_groups(self):
- dts = [self.now + timedelta(hours=i) for i in range(4)]
- assert self.db.get_range(
- TSDBModel.group,
- [self.proj1group1.id],
- dts[0],
- dts[-1],
- rollup=3600,
- tenant_ids={"referrer": "r", "organization_id": 1234},
- ) == {
- self.proj1group1.id: [
- (timestamp(dts[0]), 3),
- (timestamp(dts[1]), 3),
- (timestamp(dts[2]), 3),
- (timestamp(dts[3]), 3),
- ]
- }
- # Multiple groups
- assert self.db.get_range(
- TSDBModel.group,
- [self.proj1group1.id, self.proj1group2.id],
- dts[0],
- dts[-1],
- rollup=3600,
- tenant_ids={"referrer": "r", "organization_id": 1234},
- ) == {
- self.proj1group1.id: [
- (timestamp(dts[0]), 3),
- (timestamp(dts[1]), 3),
- (timestamp(dts[2]), 3),
- (timestamp(dts[3]), 3),
- ],
- self.proj1group2.id: [
- (timestamp(dts[0]), 3),
- (timestamp(dts[1]), 3),
- (timestamp(dts[2]), 3),
- (timestamp(dts[3]), 3),
- ],
- }
- assert (
- self.db.get_range(
- TSDBModel.group,
- [],
- dts[0],
- dts[-1],
- rollup=3600,
- tenant_ids={"referrer": "test", "organization_id": 1},
- )
- == {}
- )
- def test_range_releases(self):
- dts = [self.now + timedelta(hours=i) for i in range(4)]
- assert self.db.get_range(
- TSDBModel.release,
- [self.release1.id],
- dts[0],
- dts[-1],
- rollup=3600,
- tenant_ids={"referrer": "r", "organization_id": 1234},
- ) == {
- self.release1.id: [
- (timestamp(dts[0]), 0),
- (timestamp(dts[1]), 6),
- (timestamp(dts[2]), 0),
- (timestamp(dts[3]), 0),
- ]
- }
- def test_range_project(self):
- dts = [self.now + timedelta(hours=i) for i in range(4)]
- assert self.db.get_range(
- TSDBModel.project,
- [self.proj1.id],
- dts[0],
- dts[-1],
- rollup=3600,
- tenant_ids={"referrer": "r", "organization_id": 1234},
- ) == {
- self.proj1.id: [
- (timestamp(dts[0]), 6),
- (timestamp(dts[1]), 6),
- (timestamp(dts[2]), 6),
- (timestamp(dts[3]), 6),
- ]
- }
- def test_range_environment_filter(self):
- dts = [self.now + timedelta(hours=i) for i in range(4)]
- assert self.db.get_range(
- TSDBModel.project,
- [self.proj1.id],
- dts[0],
- dts[-1],
- rollup=3600,
- environment_ids=[self.env1.id],
- tenant_ids={"referrer": "r", "organization_id": 1234},
- ) == {
- self.proj1.id: [
- (timestamp(dts[0]), 6),
- (timestamp(dts[1]), 6),
- (timestamp(dts[2]), 0),
- (timestamp(dts[3]), 0),
- ]
- }
- # No events submitted for env2
- assert self.db.get_range(
- TSDBModel.project,
- [self.proj1.id],
- dts[0],
- dts[-1],
- rollup=3600,
- environment_ids=[self.env2.id],
- tenant_ids={"referrer": "r", "organization_id": 1234},
- ) == {
- self.proj1.id: [
- (timestamp(dts[0]), 0),
- (timestamp(dts[1]), 0),
- (timestamp(dts[2]), 0),
- (timestamp(dts[3]), 0),
- ]
- }
- # Events submitted with no environment should match default environment
- assert self.db.get_range(
- TSDBModel.project,
- [self.proj1.id],
- dts[0],
- dts[-1],
- rollup=3600,
- environment_ids=[self.defaultenv.id],
- tenant_ids={"referrer": "r", "organization_id": 1234},
- ) == {
- self.proj1.id: [
- (timestamp(dts[0]), 0),
- (timestamp(dts[1]), 0),
- (timestamp(dts[2]), 6),
- (timestamp(dts[3]), 6),
- ]
- }
- def test_range_rollups(self):
- # Daily
- daystart = self.now.replace(hour=0) # day buckets start on day boundaries
- dts = [daystart + timedelta(days=i) for i in range(2)]
- assert self.db.get_range(
- TSDBModel.project,
- [self.proj1.id],
- dts[0],
- dts[-1],
- rollup=86400,
- tenant_ids={"referrer": "r", "organization_id": 1234},
- ) == {self.proj1.id: [(timestamp(dts[0]), 24), (timestamp(dts[1]), 0)]}
- # Minutely
- dts = [self.now + timedelta(minutes=i) for i in range(120)]
- # Expect every 10th minute to have a 1, else 0
- expected = [(d.timestamp(), 1 if i % 10 == 0 else 0) for i, d in enumerate(dts)]
- assert self.db.get_range(
- TSDBModel.project,
- [self.proj1.id],
- dts[0],
- dts[-1],
- rollup=60,
- tenant_ids={"referrer": "r", "organization_id": 1234},
- ) == {self.proj1.id: expected}
- def test_distinct_counts_series_users(self):
- dts = [self.now + timedelta(hours=i) for i in range(4)]
- assert self.db.get_distinct_counts_series(
- TSDBModel.users_affected_by_group,
- [self.proj1group1.id],
- dts[0],
- dts[-1],
- rollup=3600,
- tenant_ids={"referrer": "r", "organization_id": 1234},
- ) == {
- self.proj1group1.id: [
- (timestamp(dts[0]), 1),
- (timestamp(dts[1]), 1),
- (timestamp(dts[2]), 1),
- (timestamp(dts[3]), 2),
- ]
- }
- dts = [self.now + timedelta(hours=i) for i in range(4)]
- assert self.db.get_distinct_counts_series(
- TSDBModel.users_affected_by_project,
- [self.proj1.id],
- dts[0],
- dts[-1],
- rollup=3600,
- tenant_ids={"referrer": "r", "organization_id": 1234},
- ) == {
- self.proj1.id: [
- (timestamp(dts[0]), 1),
- (timestamp(dts[1]), 2),
- (timestamp(dts[2]), 2),
- (timestamp(dts[3]), 2),
- ]
- }
- assert (
- self.db.get_distinct_counts_series(
- TSDBModel.users_affected_by_group,
- [],
- dts[0],
- dts[-1],
- rollup=3600,
- tenant_ids={"referrer": "r", "organization_id": 1234},
- )
- == {}
- )
- def get_distinct_counts_totals_users(self):
- assert self.db.get_distinct_counts_totals(
- TSDBModel.users_affected_by_group,
- [self.proj1group1.id],
- self.now,
- self.now + timedelta(hours=4),
- rollup=3600,
- tenant_ids={"referrer": "r", "organization_id": 1234},
- ) == {
- self.proj1group1.id: 2 # 2 unique users overall
- }
- assert self.db.get_distinct_counts_totals(
- TSDBModel.users_affected_by_group,
- [self.proj1group1.id],
- self.now,
- self.now,
- rollup=3600,
- tenant_ids={"referrer": "r", "organization_id": 1234},
- ) == {
- self.proj1group1.id: 1 # Only 1 unique user in the first hour
- }
- assert self.db.get_distinct_counts_totals(
- TSDBModel.users_affected_by_project,
- [self.proj1.id],
- self.now,
- self.now + timedelta(hours=4),
- rollup=3600,
- tenant_ids={"referrer": "r", "organization_id": 1234},
- ) == {self.proj1.id: 2}
- assert (
- self.db.get_distinct_counts_totals(
- TSDBModel.users_affected_by_group,
- [],
- self.now,
- self.now + timedelta(hours=4),
- rollup=3600,
- tenant_ids={"referrer": "r", "organization_id": 1234},
- )
- == {}
- )
- def test_most_frequent(self):
- assert self.db.get_most_frequent(
- TSDBModel.frequent_issues_by_project,
- [self.proj1.id],
- self.now,
- self.now + timedelta(hours=4),
- rollup=3600,
- tenant_ids={"referrer": "r", "organization_id": 1234},
- ) in [
- {self.proj1.id: [(self.proj1group1.id, 2.0), (self.proj1group2.id, 1.0)]},
- {self.proj1.id: [(self.proj1group2.id, 2.0), (self.proj1group1.id, 1.0)]},
- ] # Both issues equally frequent
- assert (
- self.db.get_most_frequent(
- TSDBModel.frequent_issues_by_project,
- [],
- self.now,
- self.now + timedelta(hours=4),
- rollup=3600,
- tenant_ids={"referrer": "r", "organization_id": 1234},
- )
- == {}
- )
- def test_frequency_series(self):
- dts = [self.now + timedelta(hours=i) for i in range(4)]
- assert self.db.get_frequency_series(
- TSDBModel.frequent_releases_by_group,
- {
- self.proj1group1.id: (self.group1release1env1.id, self.group1release2env1.id),
- self.proj1group2.id: (self.group2release1env1.id,),
- },
- dts[0],
- dts[-1],
- rollup=3600,
- tenant_ids={"referrer": "r", "organization_id": 1234},
- ) == {
- self.proj1group1.id: [
- (timestamp(dts[0]), {self.group1release1env1.id: 0, self.group1release2env1.id: 0}),
- (timestamp(dts[1]), {self.group1release1env1.id: 3, self.group1release2env1.id: 0}),
- (timestamp(dts[2]), {self.group1release1env1.id: 0, self.group1release2env1.id: 3}),
- (timestamp(dts[3]), {self.group1release1env1.id: 0, self.group1release2env1.id: 0}),
- ],
- self.proj1group2.id: [
- (timestamp(dts[0]), {self.group2release1env1.id: 0}),
- (timestamp(dts[1]), {self.group2release1env1.id: 3}),
- (timestamp(dts[2]), {self.group2release1env1.id: 0}),
- (timestamp(dts[3]), {self.group2release1env1.id: 0}),
- ],
- }
- assert (
- self.db.get_frequency_series(
- TSDBModel.frequent_releases_by_group,
- {},
- dts[0],
- dts[-1],
- rollup=3600,
- tenant_ids={"referrer": "r", "organization_id": 1234},
- )
- == {}
- )
- def test_result_shape(self):
- """
- Tests that the results from the different TSDB methods have the
- expected format.
- """
- project_id = self.proj1.id
- dts = [self.now + timedelta(hours=i) for i in range(4)]
- results = self.db.get_most_frequent(
- TSDBModel.frequent_issues_by_project,
- [project_id],
- dts[0],
- dts[0],
- tenant_ids={"referrer": "r", "organization_id": 1234},
- )
- assert has_shape(results, {1: [(1, 1.0)]})
- results = self.db.get_most_frequent_series(
- TSDBModel.frequent_issues_by_project,
- [project_id],
- dts[0],
- dts[0],
- tenant_ids={"referrer": "r", "organization_id": 1234},
- )
- assert has_shape(results, {1: [(1, {1: 1.0})]})
- items = {
- # {project_id: (issue_id, issue_id, ...)}
- project_id: (self.proj1group1.id, self.proj1group2.id)
- }
- results = self.db.get_frequency_series(
- TSDBModel.frequent_issues_by_project,
- items,
- dts[0],
- dts[-1],
- tenant_ids={"referrer": "r", "organization_id": 1234},
- )
- assert has_shape(results, {1: [(1, {1: 1})]})
- results = self.db.get_frequency_totals(
- TSDBModel.frequent_issues_by_project,
- items,
- dts[0],
- dts[-1],
- tenant_ids={"referrer": "r", "organization_id": 1234},
- )
- assert has_shape(results, {1: {1: 1}})
- results = self.db.get_range(
- TSDBModel.project,
- [project_id],
- dts[0],
- dts[-1],
- tenant_ids={"referrer": "r", "organization_id": 1234},
- )
- assert has_shape(results, {1: [(1, 1)]})
- results = self.db.get_distinct_counts_series(
- TSDBModel.users_affected_by_project,
- [project_id],
- dts[0],
- dts[-1],
- tenant_ids={"referrer": "r", "organization_id": 1234},
- )
- assert has_shape(results, {1: [(1, 1)]})
- results = self.db.get_distinct_counts_totals(
- TSDBModel.users_affected_by_project,
- [project_id],
- dts[0],
- dts[-1],
- tenant_ids={"referrer": "r", "organization_id": 1234},
- )
- assert has_shape(results, {1: 1})
- results = self.db.get_distinct_counts_union(
- TSDBModel.users_affected_by_project,
- [project_id],
- dts[0],
- dts[-1],
- tenant_ids={"referrer": "r", "organization_id": 1234},
- )
- assert has_shape(results, 1)
- def test_calculated_limit(self):
- with patch("sentry.tsdb.snuba.raw_snql_query") as snuba:
- # 24h test
- rollup = 3600
- end = self.now
- start = end + timedelta(days=-1, seconds=rollup)
- self.db.get_data(TSDBModel.group, [1, 2, 3, 4, 5], start, end, rollup=rollup)
- assert snuba.call_args.args[0].query.limit == Limit(120)
- # 14 day test
- rollup = 86400
- start = end + timedelta(days=-14, seconds=rollup)
- self.db.get_data(TSDBModel.group, [1, 2, 3, 4, 5], start, end, rollup=rollup)
- assert snuba.call_args.args[0].query.limit == Limit(70)
- # 1h test
- rollup = 3600
- end = self.now
- start = end + timedelta(hours=-1, seconds=rollup)
- self.db.get_data(TSDBModel.group, [1, 2, 3, 4, 5], start, end, rollup=rollup)
- assert snuba.call_args.args[0].query.limit == Limit(5)
- @patch("sentry.utils.snuba.OVERRIDE_OPTIONS", new={"consistent": True})
- def test_tsdb_with_consistent(self):
- with patch("sentry.utils.snuba._apply_cache_and_build_results") as snuba:
- rollup = 3600
- end = self.now
- start = end + timedelta(days=-1, seconds=rollup)
- self.db.get_data(TSDBModel.group, [1, 2, 3, 4, 5], start, end, rollup=rollup)
- assert snuba.call_args.args[0][0][0].query.limit == Limit(120)
- assert snuba.call_args.args[0][0][0].flags.consistent is True
- @region_silo_test
- class SnubaTSDBGroupProfilingTest(TestCase, SnubaTestCase, SearchIssueTestMixin):
- def setUp(self):
- super().setUp()
- self.db = SnubaTSDB()
- self.now = before_now(hours=4).replace(hour=0, minute=0, second=0, microsecond=0)
- self.proj1 = self.create_project()
- self.env1 = Environment.objects.get_or_create(
- organization_id=self.proj1.organization_id, name="test"
- )[0]
- self.env2 = Environment.objects.get_or_create(
- organization_id=self.proj1.organization_id, name="dev"
- )[0]
- defaultenv = ""
- group1_fingerprint = f"{ProfileFileIOGroupType.type_id}-group1"
- group2_fingerprint = f"{ProfileFileIOGroupType.type_id}-group2"
- groups = {}
- for r in range(0, 14400, 600): # Every 10 min for 4 hours
- event, occurrence, group_info = self.store_search_issue(
- project_id=self.proj1.id,
- # change every 55 min so some hours have 1 user, some have 2
- user_id=r // 3300,
- fingerprints=[group1_fingerprint] if ((r // 600) % 2) else [group2_fingerprint],
- # release_version=str(r // 3600) * 10, # 1 per hour,
- environment=[self.env1.name, None][(r // 7200) % 3],
- insert_time=self.now + timedelta(seconds=r),
- )
- if group_info:
- groups[group_info.group.id] = group_info.group
- all_groups = list(groups.values())
- self.proj1group1 = all_groups[0]
- self.proj1group2 = all_groups[1]
- self.defaultenv = Environment.objects.get(name=defaultenv)
- def test_range_group_manual_group_time_rollup(self):
- project = self.create_project()
- # these are the only granularities/rollups that be actually be used
- GRANULARITIES = [
- (10, timedelta(seconds=10), 5),
- (60 * 60, timedelta(hours=1), 6),
- (60 * 60 * 24, timedelta(days=1), 15),
- ]
- start = before_now(days=15).replace(hour=0, minute=0, second=0)
- for step, delta, times in GRANULARITIES:
- series = [start + (delta * i) for i in range(times)]
- series_ts = [int(ts.timestamp()) for ts in series]
- assert self.db.get_optimal_rollup(series[0], series[-1]) == step
- assert self.db.get_optimal_rollup_series(series[0], end=series[-1], rollup=None) == (
- step,
- series_ts,
- )
- for time_step in series:
- _, _, group_info = self.store_search_issue(
- project_id=project.id,
- user_id=0,
- fingerprints=[f"test_range_group_manual_group_time_rollup-{step}"],
- environment=None,
- insert_time=time_step,
- )
- assert group_info is not None
- assert self.db.get_range(
- TSDBModel.group_generic,
- [group_info.group.id],
- series[0],
- series[-1],
- rollup=None,
- tenant_ids={"referrer": "test", "organization_id": 1},
- ) == {group_info.group.id: [(ts, 1) for ts in series_ts]}
- def test_range_groups_mult(self):
- now = before_now(days=1).replace(hour=10, minute=0, second=0, microsecond=0)
- dts = [now + timedelta(hours=i) for i in range(4)]
- project = self.create_project()
- group_fingerprint = f"{ProfileFileIOGroupType.type_id}-group4"
- groups = []
- for i in range(0, 11):
- _, _, group_info = self.store_search_issue(
- project_id=project.id,
- user_id=0,
- fingerprints=[group_fingerprint],
- environment=None,
- insert_time=now + timedelta(minutes=i * 10),
- )
- if group_info:
- groups.append(group_info.group)
- group = groups[0]
- assert self.db.get_range(
- TSDBModel.group_generic,
- [group.id],
- dts[0],
- dts[-1],
- rollup=3600,
- tenant_ids={"referrer": "test", "organization_id": 1},
- ) == {
- group.id: [
- (timestamp(dts[0]), 6),
- (timestamp(dts[1]), 5),
- (timestamp(dts[2]), 0),
- (timestamp(dts[3]), 0),
- ]
- }
- def test_range_groups_simple(self):
- project = self.create_project()
- now = before_now(days=1).replace(hour=10, minute=0, second=0, microsecond=0)
- group_fingerprint = f"{ProfileFileIOGroupType.type_id}-group5"
- ids = [1, 2, 3, 4, 5]
- groups = []
- for r in ids:
- # for r in range(0, 9, 1):
- event, occurrence, group_info = self.store_search_issue(
- project_id=project.id,
- # change every 55 min so some hours have 1 user, some have 2
- user_id=r,
- fingerprints=[group_fingerprint],
- environment=None,
- # release_version=str(r // 3600) * 10, # 1 per hour,
- insert_time=now,
- )
- if group_info:
- groups.append(group_info.group)
- group = groups[0]
- dts = [now + timedelta(hours=i) for i in range(4)]
- assert self.db.get_range(
- TSDBModel.group_generic,
- [group.id],
- dts[0],
- dts[-1],
- rollup=3600,
- tenant_ids={"referrer": "test", "organization_id": 1},
- ) == {
- group.id: [
- (timestamp(dts[0]), len(ids)),
- (timestamp(dts[1]), 0),
- (timestamp(dts[2]), 0),
- (timestamp(dts[3]), 0),
- ]
- }
- def test_range_groups(self):
- dts = [self.now + timedelta(hours=i) for i in range(4)]
- # Multiple groups
- assert self.db.get_range(
- TSDBModel.group_generic,
- [self.proj1group1.id, self.proj1group2.id],
- dts[0],
- dts[-1],
- rollup=3600,
- tenant_ids={"referrer": "test", "organization_id": 1},
- ) == {
- self.proj1group1.id: [
- (timestamp(dts[0]), 3),
- (timestamp(dts[1]), 3),
- (timestamp(dts[2]), 3),
- (timestamp(dts[3]), 3),
- ],
- self.proj1group2.id: [
- (timestamp(dts[0]), 3),
- (timestamp(dts[1]), 3),
- (timestamp(dts[2]), 3),
- (timestamp(dts[3]), 3),
- ],
- }
- assert (
- self.db.get_range(
- TSDBModel.group_generic,
- [],
- dts[0],
- dts[-1],
- rollup=3600,
- tenant_ids={"referrer": "test", "organization_id": 1},
- )
- == {}
- )
- def test_get_distinct_counts_totals_users(self):
- assert self.db.get_distinct_counts_totals(
- TSDBModel.users_affected_by_generic_group,
- [self.proj1group1.id],
- self.now,
- self.now + timedelta(hours=4),
- rollup=3600,
- tenant_ids={"referrer": "test", "organization_id": 1},
- ) == {
- self.proj1group1.id: 5 # 5 unique users overall
- }
- assert self.db.get_distinct_counts_totals(
- TSDBModel.users_affected_by_generic_group,
- [self.proj1group1.id],
- self.now,
- self.now,
- rollup=3600,
- tenant_ids={"referrer": "test", "organization_id": 1},
- ) == {
- self.proj1group1.id: 1 # Only 1 unique user in the first hour
- }
- assert (
- self.db.get_distinct_counts_totals(
- TSDBModel.users_affected_by_generic_group,
- [],
- self.now,
- self.now + timedelta(hours=4),
- rollup=3600,
- tenant_ids={"referrer": "test", "organization_id": 1},
- )
- == {}
- )
- def test_get_sums(self):
- assert self.db.get_sums(
- model=TSDBModel.group_generic,
- keys=[self.proj1group1.id, self.proj1group2.id],
- start=self.now,
- end=self.now + timedelta(hours=4),
- tenant_ids={"referrer": "test", "organization_id": 1},
- ) == {self.proj1group1.id: 12, self.proj1group2.id: 12}
- def test_get_data_or_conditions_parsed(self):
- """
- Verify parsing the legacy format with nested OR conditions works
- """
- conditions = [
- # or conditions in the legacy format needs open and close brackets for precedence
- # there's some special casing when parsing conditions that specifically handles this
- [
- [["isNull", ["environment"]], "=", 1],
- ["environment", "IN", [self.env1.name]],
- ]
- ]
- data1 = self.db.get_data(
- model=TSDBModel.group_generic,
- keys=[self.proj1group1.id, self.proj1group2.id],
- conditions=conditions,
- start=self.now,
- end=self.now + timedelta(hours=4),
- tenant_ids={"referrer": "test", "organization_id": 1},
- )
- data2 = self.db.get_data(
- model=TSDBModel.group_generic,
- keys=[self.proj1group1.id, self.proj1group2.id],
- start=self.now,
- end=self.now + timedelta(hours=4),
- tenant_ids={"referrer": "test", "organization_id": 1},
- )
- # the above queries should return the same data since all groups either have:
- # environment=None or environment=test
- # so the condition really shouldn't be filtering anything
- assert data1 == data2
- class AddJitterToSeriesTest(TestCase):
- def setUp(self):
- self.db = SnubaTSDB()
- def run_test(self, end, interval, jitter, expected_start, expected_end):
- start = end - interval
- rollup, rollup_series = self.db.get_optimal_rollup_series(start, end)
- series = self.db._add_jitter_to_series(rollup_series, start, rollup, jitter)
- assert to_datetime(series[0]) == expected_start
- assert to_datetime(series[-1]) == expected_end
- def test(self):
- self.run_test(
- end=datetime(2022, 5, 18, 10, 23, 4, tzinfo=UTC),
- interval=timedelta(hours=1),
- jitter=5,
- expected_start=datetime(2022, 5, 18, 9, 22, 55, tzinfo=UTC),
- expected_end=datetime(2022, 5, 18, 10, 22, 55, tzinfo=UTC),
- )
- self.run_test(
- end=datetime(2022, 5, 18, 10, 23, 8, tzinfo=UTC),
- interval=timedelta(hours=1),
- jitter=5,
- expected_start=datetime(2022, 5, 18, 9, 23, 5, tzinfo=UTC),
- expected_end=datetime(2022, 5, 18, 10, 23, 5, tzinfo=UTC),
- )
- # Jitter should be the same
- self.run_test(
- end=datetime(2022, 5, 18, 10, 23, 8, tzinfo=UTC),
- interval=timedelta(hours=1),
- jitter=55,
- expected_start=datetime(2022, 5, 18, 9, 23, 5, tzinfo=UTC),
- expected_end=datetime(2022, 5, 18, 10, 23, 5, tzinfo=UTC),
- )
- self.run_test(
- end=datetime(2022, 5, 18, 22, 33, 2, tzinfo=UTC),
- interval=timedelta(minutes=1),
- jitter=3,
- expected_start=datetime(2022, 5, 18, 22, 31, 53, tzinfo=UTC),
- expected_end=datetime(2022, 5, 18, 22, 32, 53, tzinfo=UTC),
- )
- def test_empty_series(self):
- assert self.db._add_jitter_to_series([], datetime(2022, 5, 18, 10, 23, 4), 60, 127) == []
- assert self.db._add_jitter_to_series([], datetime(2022, 5, 18, 10, 23, 4), 60, None) == []
|