123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853 |
- import functools
- from datetime import datetime, timedelta
- from typing import Optional, Sequence
- from unittest import mock
- from unittest.mock import patch
- import pytz
- from sentry.event_manager import _pull_out_data
- from sentry.models import Environment, Group, GroupRelease, Release
- from sentry.testutils import SnubaTestCase, TestCase
- from sentry.testutils.helpers.datetime import iso_format
- from sentry.tsdb.base import TSDBModel
- from sentry.tsdb.snuba import SnubaTSDB
- from sentry.utils.dates import to_datetime, to_timestamp
- from sentry.utils.snuba import aliased_query
- def timestamp(d):
- t = int(to_timestamp(d))
- return t - (t % 3600)
- def has_shape(data, shape, allow_empty=False):
- """
- Determine if a data object has the provided shape
- At any level, the object in `data` and in `shape` must have the same type.
- A dict is the same shape if all its keys and values have the same shape as the
- key/value in `shape`. The number of keys/values is not relevant.
- A list is the same shape if all its items have the same shape as the value
- in `shape`
- A tuple is the same shape if it has the same length as `shape` and all the
- values have the same shape as the corresponding value in `shape`
- Any other object simply has to have the same type.
- If `allow_empty` is set, lists and dicts in `data` will pass even if they are empty.
- """
- if not isinstance(data, type(shape)):
- return False
- if isinstance(data, dict):
- return (
- (allow_empty or len(data) > 0)
- and all(has_shape(k, list(shape.keys())[0]) for k in data.keys())
- and all(has_shape(v, list(shape.values())[0]) for v in data.values())
- )
- elif isinstance(data, list):
- return (allow_empty or len(data) > 0) and all(has_shape(v, shape[0]) for v in data)
- elif isinstance(data, tuple):
- return len(data) == len(shape) and all(
- has_shape(data[i], shape[i]) for i in range(len(data))
- )
- else:
- return True
- class SnubaTSDBTest(TestCase, SnubaTestCase):
- def setUp(self):
- super().setUp()
- self.db = SnubaTSDB()
- self.now = (datetime.utcnow() - timedelta(hours=4)).replace(
- hour=0, minute=0, second=0, microsecond=0, tzinfo=pytz.UTC
- )
- self.proj1 = self.create_project()
- env1 = "test"
- env2 = "dev"
- defaultenv = ""
- release1 = "1" * 10
- release2 = "2" * 10
- self.release1 = Release.objects.create(
- organization_id=self.organization.id, version=release1, date_added=self.now
- )
- self.release1.add_project(self.proj1)
- self.release2 = Release.objects.create(
- organization_id=self.organization.id, version=release2, date_added=self.now
- )
- self.release2.add_project(self.proj1)
- for r in range(0, 14400, 600): # Every 10 min for 4 hours
- self.store_event(
- data={
- "event_id": (str(r) * 32)[:32],
- "message": "message 1",
- "platform": "python",
- "fingerprint": [["group-1"], ["group-2"]][
- (r // 600) % 2
- ], # Switch every 10 mins
- "timestamp": iso_format(self.now + timedelta(seconds=r)),
- "tags": {
- "foo": "bar",
- "baz": "quux",
- # Switch every 2 hours
- "environment": [env1, None][(r // 7200) % 3],
- "sentry:user": f"id:user{r // 3300}",
- },
- "user": {
- # change every 55 min so some hours have 1 user, some have 2
- "id": f"user{r // 3300}",
- "email": f"user{r}@sentry.io",
- },
- "release": str(r // 3600) * 10, # 1 per hour,
- },
- project_id=self.proj1.id,
- )
- groups = Group.objects.filter(project=self.proj1).order_by("id")
- self.proj1group1 = groups[0]
- self.proj1group2 = groups[1]
- self.env1 = Environment.objects.get(name=env1)
- self.env2 = self.create_environment(name=env2) # No events
- self.defaultenv = Environment.objects.get(name=defaultenv)
- self.group1release1env1 = GroupRelease.objects.get(
- project_id=self.proj1.id,
- group_id=self.proj1group1.id,
- release_id=self.release1.id,
- environment=env1,
- )
- self.group1release2env1 = GroupRelease.objects.create(
- project_id=self.proj1.id,
- group_id=self.proj1group1.id,
- release_id=self.release2.id,
- environment=env1,
- )
- self.group2release1env1 = GroupRelease.objects.get(
- project_id=self.proj1.id,
- group_id=self.proj1group2.id,
- release_id=self.release1.id,
- environment=env1,
- )
- def test_range_single(self):
- env1 = "test"
- project = self.create_project()
- for r in range(0, 600 * 6 * 4, 300): # Every 10 min for 4 hours
- self.store_event(
- data={
- "event_id": (str(r) * 32)[:32],
- "message": "message 1",
- "platform": "python",
- "fingerprint": ["group-1"],
- "timestamp": iso_format(self.now + timedelta(seconds=r)),
- "tags": {
- "foo": "bar",
- "baz": "quux",
- # Switch every 2 hours
- "environment": [env1, None][(r // 7200) % 3],
- "sentry:user": f"id:user{r // 3300}",
- },
- "user": {
- # change every 55 min so some hours have 1 user, some have 2
- "id": f"user{r // 3300}",
- "email": f"user{r}@sentry.io",
- },
- "release": str(r // 3600) * 10, # 1 per hour,
- },
- project_id=project.id,
- )
- groups = Group.objects.filter(project=project).order_by("id")
- group = groups[0]
- dts = [self.now + timedelta(hours=i) for i in range(4)]
- assert self.db.get_range(TSDBModel.group, [group.id], dts[0], dts[-1], rollup=3600) == {
- group.id: [
- (timestamp(dts[0]), 6 * 2),
- (timestamp(dts[1]), 6 * 2),
- (timestamp(dts[2]), 6 * 2),
- (timestamp(dts[3]), 6 * 2),
- ]
- }
- def test_range_groups(self):
- dts = [self.now + timedelta(hours=i) for i in range(4)]
- assert self.db.get_range(
- TSDBModel.group, [self.proj1group1.id], dts[0], dts[-1], rollup=3600
- ) == {
- self.proj1group1.id: [
- (timestamp(dts[0]), 3),
- (timestamp(dts[1]), 3),
- (timestamp(dts[2]), 3),
- (timestamp(dts[3]), 3),
- ]
- }
- # Multiple groups
- assert self.db.get_range(
- TSDBModel.group,
- [self.proj1group1.id, self.proj1group2.id],
- dts[0],
- dts[-1],
- rollup=3600,
- ) == {
- self.proj1group1.id: [
- (timestamp(dts[0]), 3),
- (timestamp(dts[1]), 3),
- (timestamp(dts[2]), 3),
- (timestamp(dts[3]), 3),
- ],
- self.proj1group2.id: [
- (timestamp(dts[0]), 3),
- (timestamp(dts[1]), 3),
- (timestamp(dts[2]), 3),
- (timestamp(dts[3]), 3),
- ],
- }
- assert self.db.get_range(TSDBModel.group, [], dts[0], dts[-1], rollup=3600) == {}
- def test_range_releases(self):
- dts = [self.now + timedelta(hours=i) for i in range(4)]
- assert self.db.get_range(
- TSDBModel.release, [self.release1.id], dts[0], dts[-1], rollup=3600
- ) == {
- self.release1.id: [
- (timestamp(dts[0]), 0),
- (timestamp(dts[1]), 6),
- (timestamp(dts[2]), 0),
- (timestamp(dts[3]), 0),
- ]
- }
- def test_range_project(self):
- dts = [self.now + timedelta(hours=i) for i in range(4)]
- assert self.db.get_range(
- TSDBModel.project, [self.proj1.id], dts[0], dts[-1], rollup=3600
- ) == {
- self.proj1.id: [
- (timestamp(dts[0]), 6),
- (timestamp(dts[1]), 6),
- (timestamp(dts[2]), 6),
- (timestamp(dts[3]), 6),
- ]
- }
- def test_range_environment_filter(self):
- dts = [self.now + timedelta(hours=i) for i in range(4)]
- assert self.db.get_range(
- TSDBModel.project,
- [self.proj1.id],
- dts[0],
- dts[-1],
- rollup=3600,
- environment_ids=[self.env1.id],
- ) == {
- self.proj1.id: [
- (timestamp(dts[0]), 6),
- (timestamp(dts[1]), 6),
- (timestamp(dts[2]), 0),
- (timestamp(dts[3]), 0),
- ]
- }
- # No events submitted for env2
- assert self.db.get_range(
- TSDBModel.project,
- [self.proj1.id],
- dts[0],
- dts[-1],
- rollup=3600,
- environment_ids=[self.env2.id],
- ) == {
- self.proj1.id: [
- (timestamp(dts[0]), 0),
- (timestamp(dts[1]), 0),
- (timestamp(dts[2]), 0),
- (timestamp(dts[3]), 0),
- ]
- }
- # Events submitted with no environment should match default environment
- assert self.db.get_range(
- TSDBModel.project,
- [self.proj1.id],
- dts[0],
- dts[-1],
- rollup=3600,
- environment_ids=[self.defaultenv.id],
- ) == {
- self.proj1.id: [
- (timestamp(dts[0]), 0),
- (timestamp(dts[1]), 0),
- (timestamp(dts[2]), 6),
- (timestamp(dts[3]), 6),
- ]
- }
- def test_range_rollups(self):
- # Daily
- daystart = self.now.replace(hour=0) # day buckets start on day boundaries
- dts = [daystart + timedelta(days=i) for i in range(2)]
- assert self.db.get_range(
- TSDBModel.project, [self.proj1.id], dts[0], dts[-1], rollup=86400
- ) == {self.proj1.id: [(timestamp(dts[0]), 24), (timestamp(dts[1]), 0)]}
- # Minutely
- dts = [self.now + timedelta(minutes=i) for i in range(120)]
- # Expect every 10th minute to have a 1, else 0
- expected = [(to_timestamp(d), 1 if i % 10 == 0 else 0) for i, d in enumerate(dts)]
- assert self.db.get_range(
- TSDBModel.project, [self.proj1.id], dts[0], dts[-1], rollup=60
- ) == {self.proj1.id: expected}
- def test_distinct_counts_series_users(self):
- dts = [self.now + timedelta(hours=i) for i in range(4)]
- assert self.db.get_distinct_counts_series(
- TSDBModel.users_affected_by_group, [self.proj1group1.id], dts[0], dts[-1], rollup=3600
- ) == {
- self.proj1group1.id: [
- (timestamp(dts[0]), 1),
- (timestamp(dts[1]), 1),
- (timestamp(dts[2]), 1),
- (timestamp(dts[3]), 2),
- ]
- }
- dts = [self.now + timedelta(hours=i) for i in range(4)]
- assert self.db.get_distinct_counts_series(
- TSDBModel.users_affected_by_project, [self.proj1.id], dts[0], dts[-1], rollup=3600
- ) == {
- self.proj1.id: [
- (timestamp(dts[0]), 1),
- (timestamp(dts[1]), 2),
- (timestamp(dts[2]), 2),
- (timestamp(dts[3]), 2),
- ]
- }
- assert (
- self.db.get_distinct_counts_series(
- TSDBModel.users_affected_by_group, [], dts[0], dts[-1], rollup=3600
- )
- == {}
- )
- def get_distinct_counts_totals_users(self):
- assert self.db.get_distinct_counts_totals(
- TSDBModel.users_affected_by_group,
- [self.proj1group1.id],
- self.now,
- self.now + timedelta(hours=4),
- rollup=3600,
- ) == {
- self.proj1group1.id: 2 # 2 unique users overall
- }
- assert self.db.get_distinct_counts_totals(
- TSDBModel.users_affected_by_group,
- [self.proj1group1.id],
- self.now,
- self.now,
- rollup=3600,
- ) == {
- self.proj1group1.id: 1 # Only 1 unique user in the first hour
- }
- assert self.db.get_distinct_counts_totals(
- TSDBModel.users_affected_by_project,
- [self.proj1.id],
- self.now,
- self.now + timedelta(hours=4),
- rollup=3600,
- ) == {self.proj1.id: 2}
- assert (
- self.db.get_distinct_counts_totals(
- TSDBModel.users_affected_by_group,
- [],
- self.now,
- self.now + timedelta(hours=4),
- rollup=3600,
- )
- == {}
- )
- def test_most_frequent(self):
- assert self.db.get_most_frequent(
- TSDBModel.frequent_issues_by_project,
- [self.proj1.id],
- self.now,
- self.now + timedelta(hours=4),
- rollup=3600,
- ) in [
- {self.proj1.id: [(self.proj1group1.id, 2.0), (self.proj1group2.id, 1.0)]},
- {self.proj1.id: [(self.proj1group2.id, 2.0), (self.proj1group1.id, 1.0)]},
- ] # Both issues equally frequent
- assert (
- self.db.get_most_frequent(
- TSDBModel.frequent_issues_by_project,
- [],
- self.now,
- self.now + timedelta(hours=4),
- rollup=3600,
- )
- == {}
- )
- def test_frequency_series(self):
- dts = [self.now + timedelta(hours=i) for i in range(4)]
- assert self.db.get_frequency_series(
- TSDBModel.frequent_releases_by_group,
- {
- self.proj1group1.id: (self.group1release1env1.id, self.group1release2env1.id),
- self.proj1group2.id: (self.group2release1env1.id,),
- },
- dts[0],
- dts[-1],
- rollup=3600,
- ) == {
- self.proj1group1.id: [
- (timestamp(dts[0]), {self.group1release1env1.id: 0, self.group1release2env1.id: 0}),
- (timestamp(dts[1]), {self.group1release1env1.id: 3, self.group1release2env1.id: 0}),
- (timestamp(dts[2]), {self.group1release1env1.id: 0, self.group1release2env1.id: 3}),
- (timestamp(dts[3]), {self.group1release1env1.id: 0, self.group1release2env1.id: 0}),
- ],
- self.proj1group2.id: [
- (timestamp(dts[0]), {self.group2release1env1.id: 0}),
- (timestamp(dts[1]), {self.group2release1env1.id: 3}),
- (timestamp(dts[2]), {self.group2release1env1.id: 0}),
- (timestamp(dts[3]), {self.group2release1env1.id: 0}),
- ],
- }
- assert (
- self.db.get_frequency_series(
- TSDBModel.frequent_releases_by_group, {}, dts[0], dts[-1], rollup=3600
- )
- == {}
- )
- def test_result_shape(self):
- """
- Tests that the results from the different TSDB methods have the
- expected format.
- """
- project_id = self.proj1.id
- dts = [self.now + timedelta(hours=i) for i in range(4)]
- results = self.db.get_most_frequent(
- TSDBModel.frequent_issues_by_project, [project_id], dts[0], dts[0]
- )
- assert has_shape(results, {1: [(1, 1.0)]})
- results = self.db.get_most_frequent_series(
- TSDBModel.frequent_issues_by_project, [project_id], dts[0], dts[0]
- )
- assert has_shape(results, {1: [(1, {1: 1.0})]})
- items = {
- # {project_id: (issue_id, issue_id, ...)}
- project_id: (self.proj1group1.id, self.proj1group2.id)
- }
- results = self.db.get_frequency_series(
- TSDBModel.frequent_issues_by_project, items, dts[0], dts[-1]
- )
- assert has_shape(results, {1: [(1, {1: 1})]})
- results = self.db.get_frequency_totals(
- TSDBModel.frequent_issues_by_project, items, dts[0], dts[-1]
- )
- assert has_shape(results, {1: {1: 1}})
- results = self.db.get_range(TSDBModel.project, [project_id], dts[0], dts[-1])
- assert has_shape(results, {1: [(1, 1)]})
- results = self.db.get_distinct_counts_series(
- TSDBModel.users_affected_by_project, [project_id], dts[0], dts[-1]
- )
- assert has_shape(results, {1: [(1, 1)]})
- results = self.db.get_distinct_counts_totals(
- TSDBModel.users_affected_by_project, [project_id], dts[0], dts[-1]
- )
- assert has_shape(results, {1: 1})
- results = self.db.get_distinct_counts_union(
- TSDBModel.users_affected_by_project, [project_id], dts[0], dts[-1]
- )
- assert has_shape(results, 1)
- def test_calculated_limit(self):
- with patch("sentry.tsdb.snuba.snuba") as snuba:
- # 24h test
- rollup = 3600
- end = self.now
- start = end + timedelta(days=-1, seconds=rollup)
- self.db.get_data(TSDBModel.group, [1, 2, 3, 4, 5], start, end, rollup=rollup)
- assert snuba.query.call_args[1]["limit"] == 120
- # 14 day test
- rollup = 86400
- start = end + timedelta(days=-14, seconds=rollup)
- self.db.get_data(TSDBModel.group, [1, 2, 3, 4, 5], start, end, rollup=rollup)
- assert snuba.query.call_args[1]["limit"] == 70
- # 1h test
- rollup = 3600
- end = self.now
- start = end + timedelta(hours=-1, seconds=rollup)
- self.db.get_data(TSDBModel.group, [1, 2, 3, 4, 5], start, end, rollup=rollup)
- assert snuba.query.call_args[1]["limit"] == 5
- class SnubaTSDBGroupPerformanceTest(TestCase, SnubaTestCase):
- def setUp(self):
- super().setUp()
- self.db = SnubaTSDB()
- self.now = (datetime.utcnow() - timedelta(hours=4)).replace(
- hour=0, minute=0, second=0, microsecond=0, tzinfo=pytz.UTC
- )
- self.proj1 = self.create_project()
- env1 = "test"
- env2 = "dev"
- defaultenv = ""
- self.proj1group1 = self.create_group(project=self.proj1)
- self.proj1group2 = self.create_group(project=self.proj1)
- for r in range(0, 14400, 600): # Every 10 min for 4 hours
- self.__insert_transaction(
- environment=[env1, None][(r // 7200) % 3],
- project_id=self.proj1.id,
- # change every 55 min so some hours have 1 user, some have 2
- user_id=f"user{r // 3300}",
- email=f"user{r}@sentry.io",
- # release_version=str(r // 3600) * 10, # 1 per hour,
- insert_timestamp=self.now + timedelta(seconds=r),
- groups=[[self.proj1group1], [self.proj1group2]][(r // 600) % 2],
- transaction_name=str(r),
- )
- self.env1 = Environment.objects.get(name=env1)
- self.env2 = self.create_environment(name=env2) # No events
- self.defaultenv = Environment.objects.get(name=defaultenv)
- def __insert_transaction(
- self,
- environment: Optional[str],
- project_id: int,
- user_id: str,
- email: str,
- insert_timestamp: datetime,
- groups: Sequence[int],
- transaction_name: str,
- ):
- def inject_group_ids(jobs, projects, _groups=None):
- _pull_out_data(jobs, projects)
- if _groups:
- for job in jobs:
- job["event"].groups = _groups
- return jobs, projects
- event_data = {
- "type": "transaction",
- "level": "info",
- "message": "transaction message",
- "tags": {
- "environment": environment,
- "sentry:user": f"id:{user_id}",
- },
- "user": {
- "id": user_id,
- "email": email,
- },
- "contexts": {"trace": {"trace_id": "b" * 32, "span_id": "c" * 16, "op": ""}},
- "timestamp": insert_timestamp.timestamp(),
- "start_timestamp": insert_timestamp.timestamp(),
- "transaction": transaction_name,
- }
- with mock.patch(
- "sentry.event_manager._pull_out_data",
- functools.partial(
- inject_group_ids,
- _groups=groups,
- ),
- ):
- event = self.store_event(
- data=event_data,
- project_id=project_id,
- )
- assert event
- from sentry.utils import snuba
- result = snuba.raw_query(
- dataset=snuba.Dataset.Transactions,
- start=insert_timestamp - timedelta(days=1),
- end=insert_timestamp + timedelta(days=1),
- selected_columns=[
- "event_id",
- "project_id",
- "environment",
- "group_ids",
- "tags[sentry:user]",
- "timestamp",
- ],
- groupby=None,
- filter_keys={"project_id": [project_id], "event_id": [event.event_id]},
- )
- assert len(result["data"]) == 1
- assert result["data"][0]["event_id"] == event.event_id
- assert result["data"][0]["project_id"] == event.project_id
- assert result["data"][0]["group_ids"] == [g.id for g in groups]
- assert result["data"][0]["tags[sentry:user]"] == f"id:{user_id}"
- assert result["data"][0]["environment"] == (environment if environment else None)
- assert result["data"][0]["timestamp"] == insert_timestamp.isoformat()
- return event
- def test_range_groups_single(self):
- from sentry.snuba.dataset import Dataset
- now = (datetime.utcnow() - timedelta(hours=4)).replace(
- hour=0, minute=0, second=0, microsecond=0, tzinfo=pytz.UTC
- )
- dts = [now + timedelta(hours=i) for i in range(4)]
- project = self.create_project()
- group = self.create_group(project=project, first_seen=now)
- # not sure what's going on here, but `times=1,2,3,4` work fine
- # fails with anything above 4
- times = 4
- event_ids = []
- events = []
- for i in range(0, times):
- res = self.__insert_transaction(
- environment=None,
- project_id=project.id,
- user_id="my_user",
- email="test@email.com",
- insert_timestamp=now + timedelta(minutes=i * 10),
- groups=[group],
- transaction_name=str(i),
- )
- grouped_by_project = aliased_query(
- dataset=Dataset.Transactions,
- start=None,
- end=None,
- groupby=None,
- conditions=None,
- filter_keys={"project_id": [project.id], "event_id": [res.event_id]},
- selected_columns=["event_id", "project_id", "group_ids"],
- aggregations=None,
- )
- assert grouped_by_project["data"][0]["event_id"] == res.event_id
- from sentry.eventstore.models import Event
- event_from_nodestore = Event(project_id=project.id, event_id=res.event_id)
- assert event_from_nodestore.event_id == res.event_id
- event_ids.append(res.event_id)
- events.append(res)
- transactions_for_project = aliased_query(
- dataset=Dataset.Transactions,
- start=None,
- end=None,
- groupby=None,
- conditions=None,
- filter_keys={"project_id": [project.id]},
- selected_columns=["project_id", "event_id"],
- aggregations=None,
- )
- assert len(transactions_for_project["data"]) == times
- transactions_by_group = aliased_query(
- dataset=Dataset.Transactions,
- start=None,
- end=None,
- # start=group.first_seen,
- # end=now + timedelta(hours=4),
- groupby=["group_id"],
- conditions=None,
- filter_keys={"project_id": [project.id], "group_id": [group.id]},
- aggregations=[
- ["arrayJoin", ["group_ids"], "group_id"],
- ["count()", "", "times_seen"],
- ],
- )
- assert transactions_by_group["data"][0]["times_seen"] == times # 1 + (times % 5)
- assert self.db.get_range(
- TSDBModel.group_performance,
- [group.id],
- dts[0],
- dts[-1],
- rollup=3600,
- ) == {
- group.id: [
- # (timestamp(dts[0]), 1 + (times % 5)),
- (timestamp(dts[0]), times),
- (timestamp(dts[1]), 0),
- (timestamp(dts[2]), 0),
- (timestamp(dts[3]), 0),
- ]
- }
- def test_range_groups_mult(self):
- now = (datetime.utcnow() - timedelta(hours=4)).replace(
- hour=0, minute=0, second=0, microsecond=0, tzinfo=pytz.UTC
- )
- dts = [now + timedelta(hours=i) for i in range(4)]
- project = self.create_project()
- group = self.create_group(project=project)
- ids = ["a", "b", "c", "d", "e", "f", "1", "2", "3", "4", "5"]
- for i, _id in enumerate(ids):
- self.__insert_transaction(
- environment=None,
- project_id=project.id,
- user_id="my_user",
- email="test@email.com",
- insert_timestamp=now + timedelta(minutes=i * 10),
- groups=[group],
- transaction_name=_id,
- )
- assert self.db.get_range(
- TSDBModel.group_performance,
- [group.id],
- dts[0],
- dts[-1],
- rollup=3600,
- ) == {
- group.id: [
- (timestamp(dts[0]), 6),
- (timestamp(dts[1]), 5),
- (timestamp(dts[2]), 0),
- (timestamp(dts[3]), 0),
- ]
- }
- def test_range_groups_simple(self):
- project = self.create_project()
- group = self.create_group(project=project)
- now = datetime.utcnow().replace(hour=0, minute=0, second=0, microsecond=0, tzinfo=pytz.UTC)
- # for r in range(0, 14400, 600): # Every 10 min for 4 hours
- # for r in [1, 2, 3, 4, 5, 6, 7, 8]:
- ids = ["a", "b", "c", "d", "e"] # , "f"]
- for r in ids:
- # for r in range(0, 9, 1):
- self.__insert_transaction(
- environment=None,
- project_id=project.id,
- # change every 55 min so some hours have 1 user, some have 2
- user_id=f"user{r}",
- email=f"user{r}@sentry.io",
- # release_version=str(r // 3600) * 10, # 1 per hour,
- insert_timestamp=now,
- groups=[group],
- transaction_name=r,
- )
- dts = [now + timedelta(hours=i) for i in range(4)]
- assert self.db.get_range(
- TSDBModel.group_performance,
- [group.id],
- dts[0],
- dts[-1],
- rollup=3600,
- ) == {
- group.id: [
- (timestamp(dts[0]), len(ids)),
- (timestamp(dts[1]), 0),
- (timestamp(dts[2]), 0),
- (timestamp(dts[3]), 0),
- ]
- }
- def test_range_groups(self):
- dts = [self.now + timedelta(hours=i) for i in range(4)]
- # Multiple groups
- assert self.db.get_range(
- TSDBModel.group_performance,
- [self.proj1group1.id, self.proj1group2.id],
- dts[0],
- dts[-1],
- rollup=3600,
- ) == {
- self.proj1group1.id: [
- (timestamp(dts[0]), 3),
- (timestamp(dts[1]), 3),
- (timestamp(dts[2]), 3),
- (timestamp(dts[3]), 3),
- ],
- self.proj1group2.id: [
- (timestamp(dts[0]), 3),
- (timestamp(dts[1]), 3),
- (timestamp(dts[2]), 3),
- (timestamp(dts[3]), 3),
- ],
- }
- assert (
- self.db.get_range(TSDBModel.group_performance, [], dts[0], dts[-1], rollup=3600) == {}
- )
- class AddJitterToSeriesTest(TestCase):
- def setUp(self):
- self.db = SnubaTSDB()
- def run_test(self, end, interval, jitter, expected_start, expected_end):
- end = end.replace(tzinfo=pytz.UTC)
- start = end - interval
- rollup, rollup_series = self.db.get_optimal_rollup_series(start, end)
- series = self.db._add_jitter_to_series(rollup_series, start, rollup, jitter)
- assert to_datetime(series[0]) == expected_start.replace(tzinfo=pytz.UTC)
- assert to_datetime(series[-1]) == expected_end.replace(tzinfo=pytz.UTC)
- def test(self):
- self.run_test(
- end=datetime(2022, 5, 18, 10, 23, 4),
- interval=timedelta(hours=1),
- jitter=5,
- expected_start=datetime(2022, 5, 18, 9, 22, 55),
- expected_end=datetime(2022, 5, 18, 10, 22, 55),
- )
- self.run_test(
- end=datetime(2022, 5, 18, 10, 23, 8),
- interval=timedelta(hours=1),
- jitter=5,
- expected_start=datetime(2022, 5, 18, 9, 23, 5),
- expected_end=datetime(2022, 5, 18, 10, 23, 5),
- )
- # Jitter should be the same
- self.run_test(
- end=datetime(2022, 5, 18, 10, 23, 8),
- interval=timedelta(hours=1),
- jitter=55,
- expected_start=datetime(2022, 5, 18, 9, 23, 5),
- expected_end=datetime(2022, 5, 18, 10, 23, 5),
- )
- self.run_test(
- end=datetime(2022, 5, 18, 22, 33, 2),
- interval=timedelta(minutes=1),
- jitter=3,
- expected_start=datetime(2022, 5, 18, 22, 31, 53),
- expected_end=datetime(2022, 5, 18, 22, 32, 53),
- )
- def test_empty_series(self):
- assert self.db._add_jitter_to_series([], datetime(2022, 5, 18, 10, 23, 4), 60, 127) == []
- assert self.db._add_jitter_to_series([], datetime(2022, 5, 18, 10, 23, 4), 60, None) == []
|