1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297 |
- #!/usr/bin/env python
- import time
- from sentry.runner import configure
- from sentry.types.activity import ActivityType
- configure()
- import itertools
- import random
- from datetime import datetime, timedelta
- from hashlib import sha1
- from random import randint
- from uuid import uuid4
- from django.conf import settings
- from django.db import IntegrityError, transaction
- from django.db.models import F
- from django.utils import timezone
- from pytz import utc
- from sentry import buffer, roles, tsdb
- from sentry.event_manager import HashDiscarded
- from sentry.incidents.logic import create_alert_rule, create_alert_rule_trigger, create_incident
- from sentry.incidents.models import AlertRuleThresholdType, IncidentType
- from sentry.models import (
- TOMBSTONE_FIELDS_FROM_GROUP,
- Activity,
- Broadcast,
- Commit,
- CommitAuthor,
- CommitFileChange,
- Deploy,
- Environment,
- EventAttachment,
- File,
- Group,
- GroupRelease,
- GroupTombstone,
- Organization,
- OrganizationAccessRequest,
- OrganizationMember,
- Project,
- Release,
- ReleaseCommit,
- ReleaseEnvironment,
- ReleaseFile,
- ReleaseProjectEnvironment,
- Repository,
- Team,
- User,
- UserReport,
- )
- from sentry.monitors.models import (
- CheckInStatus,
- Monitor,
- MonitorCheckIn,
- MonitorEnvironment,
- MonitorStatus,
- MonitorType,
- )
- from sentry.signals import mocks_loaded
- from sentry.similarity import features
- from sentry.utils import loremipsum
- from sentry.utils.hashlib import md5_text
- from sentry.utils.samples import create_sample_event as _create_sample_event
- from sentry.utils.samples import create_trace, generate_user, random_normal
- PLATFORMS = itertools.cycle(["ruby", "php", "python", "java", "javascript"])
- LEVELS = itertools.cycle(["error", "error", "error", "fatal", "warning"])
- ENVIRONMENTS = itertools.cycle(["production", "production", "staging", "alpha", "beta", ""])
- MONITOR_NAMES = itertools.cycle(settings.CELERYBEAT_SCHEDULE.keys())
- MONITOR_SCHEDULES = itertools.cycle(["* * * * *", "0 * * * *", "0 0 * * *"])
- LONG_MESSAGE = """Code: 0.
- DB::Exception: String is too long for DateTime: 2018-10-26T19:14:18+00:00. Stack trace:
- 0. clickhouse-server(StackTrace::StackTrace()+0x16) [0x99e9626]
- 1. clickhouse-server(DB::Exception::Exception(std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, int)+0x22) [0x3087172]
- 2. clickhouse-server(DB::FunctionComparison<DB::EqualsOp, DB::NameEquals>::executeDateOrDateTimeOrEnumOrUUIDWithConstString(DB::Block&, unsigned long, DB::IColumn const*, DB::IColumn const*, std::shared_ptr<DB::IDataType const> const&, std::shared_ptr<DB::IDataType const> const&, bool, unsigned long)+0x13c8) [0x3b233d8]
- 3. clickhouse-server(DB::FunctionComparison<DB::EqualsOp, DB::NameEquals>::executeImpl(DB::Block&, std::vector<unsigned long, std::allocator<unsigned long> > const&, unsigned long, unsigned long)+0x576) [0x3bafc86]
- 4. clickhouse-server(DB::PreparedFunctionImpl::defaultImplementationForNulls(DB::Block&, std::vector<unsigned long, std::allocator<unsigned long> > const&, unsigned long, unsigned long)+0x174) [0x7953cd4]
- 5. clickhouse-server(DB::PreparedFunctionImpl::executeWithoutLowCardinalityColumns(DB::Block&, std::vector<unsigned long, std::allocator<unsigned long> > const&, unsigned long, unsigned long)+0x54) [0x7953b04]
- 6. clickhouse-server(DB::PreparedFunctionImpl::execute(DB::Block&, std::vector<unsigned long, std::allocator<unsigned long> > const&, unsigned long, unsigned long)+0x3e2) [0x7954222]
- 7. clickhouse-server(DB::ExpressionAction::execute(DB::Block&, std::unordered_map<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> >, unsigned long, std::hash<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > >, std::equal_to<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > >, std::allocator<std::pair<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > const, unsigned long> > >&) const+0x69b) [0x7b021fb]
- 8. clickhouse-server(DB::ExpressionActions::execute(DB::Block&) const+0xe6) [0x7b03676]
- 9. clickhouse-server(DB::FilterBlockInputStream::FilterBlockInputStream(std::shared_ptr<DB::IBlockInputStream> const&, std::shared_ptr<DB::ExpressionActions> const&, std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, bool)+0x711) [0x79b7e31]
- 10. clickhouse-server() [0x75e9443]
- 11. clickhouse-server(DB::InterpreterSelectQuery::executeImpl(DB::InterpreterSelectQuery::Pipeline&, std::shared_ptr<DB::IBlockInputStream> const&, bool)+0x118f) [0x75f212f]
- 12. clickhouse-server(DB::InterpreterSelectQuery::InterpreterSelectQuery(std::shared_ptr<DB::IAST> const&, DB::Context const&, std::shared_ptr<DB::IBlockInputStream> const&, std::shared_ptr<DB::IStorage> const&, std::vector<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::allocator<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > > > const&, DB::QueryProcessingStage::Enum, unsigned long, bool)+0x5e6) [0x75f2d46]
- 13. clickhouse-server(DB::InterpreterSelectQuery::InterpreterSelectQuery(std::shared_ptr<DB::IAST> const&, DB::Context const&, std::vector<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::allocator<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > > > const&, DB::QueryProcessingStage::Enum, unsigned long, bool)+0x56) [0x75f3aa6]
- 14. clickhouse-server(DB::InterpreterSelectWithUnionQuery::InterpreterSelectWithUnionQuery(std::shared_ptr<DB::IAST> const&, DB::Context const&, std::vector<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::allocator<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > > > const&, DB::QueryProcessingStage::Enum, unsigned long, bool)+0x7e7) [0x75ffab7]
- 15. clickhouse-server(DB::InterpreterFactory::get(std::shared_ptr<DB::IAST>&, DB::Context&, DB::QueryProcessingStage::Enum)+0x3a8) [0x75dc138]
- 16. clickhouse-server() [0x768fad9]
- 17. clickhouse-server(DB::executeQuery(std::__cxx11::basic..."""
- def make_sentence(words=None):
- if words is None:
- words = int(random.weibullvariate(8, 3))
- return " ".join(random.choice(loremipsum.words) for _ in range(words))
- def create_sample_event(*args, **kwargs):
- try:
- event = _create_sample_event(*args, **kwargs)
- except HashDiscarded as e:
- print(f"> Skipping Event: {e.message}") # NOQA
- else:
- if event is not None:
- features.record([event])
- return event
- def generate_commits(user):
- commits = []
- for i in range(random.randint(1, 20)):
- if i == 1:
- filename = "raven/base.py"
- else:
- filename = random.choice(loremipsum.words) + ".js"
- if random.randint(0, 5) == 1:
- author = (user.name, user.email)
- else:
- author = (
- f"{random.choice(loremipsum.words)} {random.choice(loremipsum.words)}",
- f"{random.choice(loremipsum.words)}@example.com",
- )
- commits.append(
- {
- "key": sha1(uuid4().bytes).hexdigest(),
- "message": f"feat: Do something to {filename}\n{make_sentence()}",
- "author": author,
- "files": [(filename, "M")],
- }
- )
- return commits
- def generate_tombstones(project, user):
- # attempt to create a high enough previous_group_id
- # that it won't conflict with any group ids
- prev_group_id = 100000
- try:
- prev_group_id = (
- max(
- GroupTombstone.objects.order_by("-previous_group_id")[0].previous_group_id,
- prev_group_id,
- )
- + 1
- )
- except IndexError:
- pass
- for group in Group.objects.filter(project=project)[:5]:
- GroupTombstone.objects.create(
- previous_group_id=prev_group_id,
- actor_id=user.id,
- **{name: getattr(group, name) for name in TOMBSTONE_FIELDS_FROM_GROUP},
- )
- prev_group_id += 1
- def create_system_time_series():
- now = datetime.utcnow().replace(tzinfo=utc)
- for _ in range(60):
- count = randint(1, 10)
- tsdb.incr_multi(
- (
- (tsdb.models.internal, "client-api.all-versions.responses.2xx"),
- (tsdb.models.internal, "client-api.all-versions.requests"),
- ),
- now,
- int(count * 0.9),
- )
- tsdb.incr_multi(
- ((tsdb.models.internal, "client-api.all-versions.responses.4xx"),),
- now,
- int(count * 0.05),
- )
- tsdb.incr_multi(
- ((tsdb.models.internal, "client-api.all-versions.responses.5xx"),),
- now,
- int(count * 0.1),
- )
- now = now - timedelta(seconds=1)
- for _ in range(24 * 30):
- count = randint(100, 1000)
- tsdb.incr_multi(
- (
- (tsdb.models.internal, "client-api.all-versions.responses.2xx"),
- (tsdb.models.internal, "client-api.all-versions.requests"),
- ),
- now,
- int(count * 4.9),
- )
- tsdb.incr_multi(
- ((tsdb.models.internal, "client-api.all-versions.responses.4xx"),),
- now,
- int(count * 0.05),
- )
- tsdb.incr_multi(
- ((tsdb.models.internal, "client-api.all-versions.responses.5xx"),),
- now,
- int(count * 0.1),
- )
- now = now - timedelta(hours=1)
- def create_sample_time_series(event, release=None):
- if event is None:
- return
- group = event.group
- project = group.project
- key = project.key_set.all()[0]
- now = datetime.utcnow().replace(tzinfo=utc)
- environment = Environment.get_or_create(
- project=project, name=Environment.get_name_or_default(event.get_tag("environment"))
- )
- if release:
- ReleaseEnvironment.get_or_create(
- project=project, release=release, environment=environment, datetime=now
- )
- grouprelease = GroupRelease.get_or_create(
- group=group, release=release, environment=environment, datetime=now
- )
- for _ in range(60):
- count = randint(1, 10)
- tsdb.incr_multi(
- ((tsdb.models.project, project.id), (tsdb.models.group, group.id)),
- now,
- count,
- environment_id=environment.id,
- )
- tsdb.incr_multi(
- (
- (tsdb.models.organization_total_received, project.organization_id),
- (tsdb.models.project_total_received, project.id),
- (tsdb.models.key_total_received, key.id),
- ),
- now,
- int(count * 1.1),
- )
- tsdb.incr(
- tsdb.models.project_total_forwarded,
- project.id,
- now,
- int(count * 1.1),
- )
- tsdb.incr_multi(
- (
- (tsdb.models.organization_total_rejected, project.organization_id),
- (tsdb.models.project_total_rejected, project.id),
- (tsdb.models.key_total_rejected, key.id),
- ),
- now,
- int(count * 0.1),
- )
- frequencies = [
- (tsdb.models.frequent_issues_by_project, {project.id: {group.id: count}}),
- (tsdb.models.frequent_environments_by_group, {group.id: {environment.id: count}}),
- ]
- if release:
- frequencies.append(
- (tsdb.models.frequent_releases_by_group, {group.id: {grouprelease.id: count}})
- )
- tsdb.record_frequency_multi(frequencies, now)
- now = now - timedelta(seconds=1)
- for _ in range(24 * 30):
- count = randint(100, 1000)
- tsdb.incr_multi(
- ((tsdb.models.project, group.project.id), (tsdb.models.group, group.id)),
- now,
- count,
- environment_id=environment.id,
- )
- tsdb.incr_multi(
- (
- (tsdb.models.organization_total_received, project.organization_id),
- (tsdb.models.project_total_received, project.id),
- (tsdb.models.key_total_received, key.id),
- ),
- now,
- int(count * 1.1),
- )
- tsdb.incr_multi(
- (
- (tsdb.models.organization_total_rejected, project.organization_id),
- (tsdb.models.project_total_rejected, project.id),
- (tsdb.models.key_total_rejected, key.id),
- ),
- now,
- int(count * 0.1),
- )
- frequencies = [
- (tsdb.models.frequent_issues_by_project, {project.id: {group.id: count}}),
- (tsdb.models.frequent_environments_by_group, {group.id: {environment.id: count}}),
- ]
- if release:
- frequencies.append(
- (tsdb.models.frequent_releases_by_group, {group.id: {grouprelease.id: count}})
- )
- tsdb.record_frequency_multi(frequencies, now)
- now = now - timedelta(hours=1)
- def main(
- skip_default_setup=False,
- num_events=1,
- extra_events=False,
- load_trends=False,
- load_performance_issues=False,
- slow=False,
- ):
- try:
- user = User.objects.filter(is_superuser=True)[0]
- except IndexError:
- raise Exception("No superuser exists (run `make bootstrap`)")
- dummy_user, _ = User.objects.get_or_create(
- username="dummy@example.com", defaults={"email": "dummy@example.com"}
- )
- dummy_user.set_password("dummy")
- dummy_user.save()
- mocks = (
- ("Massive Dynamic", ("Ludic Science",)),
- ("Captain Planet", ("Earth", "Fire", "Wind", "Water", "Heart")),
- )
- project_map = {}
- Broadcast.objects.create(
- title="Learn about Source Maps",
- message="Source maps are JSON files that contain information on how to map your transpiled source code back to their original source.",
- link="https://docs.sentry.io/platforms/javascript/#source-maps",
- )
- if settings.SENTRY_SINGLE_ORGANIZATION:
- org = Organization.get_default()
- print(f"Mocking org {org.name}") # NOQA
- else:
- print("Mocking org {}".format("Default")) # NOQA
- org, _ = Organization.objects.get_or_create(slug="default")
- OrganizationMember.objects.get_or_create(
- user=user, organization=org, role=roles.get_top_dog().id
- )
- dummy_member, _ = OrganizationMember.objects.get_or_create(
- user=dummy_user, organization=org, defaults={"role": roles.get_default().id}
- )
- # Allow for 0 events, if you only want transactions
- event1 = event2 = event3 = event4 = event5 = None
- if skip_default_setup:
- # Quickly fetch/create the teams and projects
- for team_name, project_names in mocks:
- print(f"> Mocking team {team_name}") # NOQA
- team, _ = Team.objects.get_or_create(name=team_name, defaults={"organization": org})
- for project_name in project_names:
- print(f" > Mocking project {project_name}") # NOQA
- project, _ = Project.objects.get_or_create(
- name=project_name,
- defaults={
- "organization": org,
- "first_event": timezone.now(),
- "flags": Project.flags.has_releases,
- },
- )
- project_map[project_name] = project
- project.add_team(team)
- else:
- for team_name, project_names in mocks:
- print(f"> Mocking team {team_name}") # NOQA
- team, _ = Team.objects.get_or_create(name=team_name, defaults={"organization": org})
- for project_name in project_names:
- print(f" > Mocking project {project_name}") # NOQA
- project, _ = Project.objects.get_or_create(
- name=project_name,
- defaults={
- "organization": org,
- "first_event": timezone.now(),
- "flags": Project.flags.has_releases,
- },
- )
- project_map[project_name] = project
- project.add_team(team)
- if not project.first_event:
- project.update(first_event=project.date_added)
- if not project.flags.has_releases:
- project.update(flags=F("flags").bitor(Project.flags.has_releases))
- environment = Environment.get_or_create(project=project, name=next(ENVIRONMENTS))
- monitor, _ = Monitor.objects.get_or_create(
- name=next(MONITOR_NAMES),
- project_id=project.id,
- organization_id=org.id,
- type=MonitorType.CRON_JOB,
- defaults={
- "status": MonitorStatus.DISABLED,
- "config": {"schedule": next(MONITOR_SCHEDULES)},
- "next_checkin": timezone.now() + timedelta(minutes=60),
- "last_checkin": timezone.now(),
- },
- )
- monitor_env, _ = MonitorEnvironment.objects.get_or_create(
- monitor=monitor,
- environment=environment,
- defaults={
- "status": MonitorStatus.DISABLED,
- "next_checkin": timezone.now() + timedelta(minutes=60),
- "last_checkin": timezone.now(),
- },
- )
- MonitorCheckIn.objects.create(
- project_id=monitor.project_id,
- monitor=monitor,
- monitor_environment=monitor_env,
- status=CheckInStatus.OK
- if monitor_env.status == MonitorStatus.OK
- else CheckInStatus.ERROR,
- )
- with transaction.atomic():
- has_release = Release.objects.filter(
- version=sha1(uuid4().bytes).hexdigest(),
- organization_id=project.organization_id,
- projects=project,
- ).exists()
- if not has_release:
- release = Release.objects.filter(
- version=sha1(uuid4().bytes).hexdigest(),
- organization_id=project.organization_id,
- ).first()
- if not release:
- release = Release.objects.create(
- version=sha1(uuid4().bytes).hexdigest(),
- organization_id=project.organization_id,
- )
- release.add_project(project)
- generate_tombstones(project, user)
- raw_commits = generate_commits(user)
- try:
- with transaction.atomic():
- repo, _ = Repository.objects.get_or_create(
- organization_id=org.id,
- provider="integrations:github",
- external_id="example/example",
- defaults={
- "name": "Example Repo",
- "url": "https://github.com/example/example",
- },
- )
- except IntegrityError:
- # for users with legacy github plugin
- # upgrade to the new integration
- repo = Repository.objects.get(
- organization_id=org.id,
- provider="github",
- external_id="example/example",
- name="Example Repo",
- )
- repo.provider = "integrations:github"
- repo.save()
- authors = set()
- for commit_index, raw_commit in enumerate(raw_commits):
- author = CommitAuthor.objects.get_or_create(
- organization_id=org.id,
- email=raw_commit["author"][1],
- defaults={"name": raw_commit["author"][0]},
- )[0]
- commit = Commit.objects.get_or_create(
- organization_id=org.id,
- repository_id=repo.id,
- key=raw_commit["key"],
- defaults={"author": author, "message": raw_commit["message"]},
- )[0]
- authors.add(author)
- for file in raw_commit["files"]:
- ReleaseFile.objects.get_or_create(
- organization_id=project.organization_id,
- release_id=release.id,
- name=file[0],
- file=File.objects.get_or_create(
- name=file[0], type="release.file", checksum="abcde" * 8, size=13043
- )[0],
- defaults={"organization_id": project.organization_id},
- )
- CommitFileChange.objects.get_or_create(
- organization_id=org.id, commit=commit, filename=file[0], type=file[1]
- )
- ReleaseCommit.objects.get_or_create(
- organization_id=org.id, release=release, commit=commit, order=commit_index
- )
- # create an unreleased commit
- Commit.objects.get_or_create(
- organization_id=org.id,
- repository_id=repo.id,
- key=sha1(uuid4().bytes).hexdigest(),
- defaults={
- "author": CommitAuthor.objects.get_or_create(
- organization_id=org.id, email=user.email, defaults={"name": user.name}
- )[0],
- "message": "feat: Do something to {}\n{}".format(
- random.choice(loremipsum.words) + ".js", make_sentence()
- ),
- },
- )[0]
- Activity.objects.create(
- type=ActivityType.RELEASE.value,
- project=project,
- ident=release.version,
- user_id=user.id,
- data={"version": release.version},
- )
- deploy = Deploy.objects.create(
- organization_id=project.organization_id,
- release=release,
- environment_id=environment.id,
- )
- release.update(
- commit_count=len(raw_commits),
- last_commit_id=commit.id,
- total_deploys=Deploy.objects.filter(release=release).count(),
- last_deploy_id=deploy.id,
- authors=[str(a.id) for a in authors],
- )
- ReleaseProjectEnvironment.objects.create_or_update(
- project=project,
- environment=environment,
- release=release,
- defaults={"last_deploy_id": deploy.id},
- )
- Activity.objects.create(
- type=ActivityType.DEPLOY.value,
- project=project,
- ident=release.version,
- data={
- "version": release.version,
- "deploy_id": deploy.id,
- "environment": environment.name,
- },
- datetime=deploy.date_finished,
- )
- # Add a bunch of additional dummy events to support pagination
- if extra_events:
- for _ in range(45):
- platform = next(PLATFORMS)
- create_sample_event(
- project=project,
- platform=platform,
- release=release.version,
- level=next(LEVELS),
- environment=next(ENVIRONMENTS),
- message="This is a mostly useless example %s exception" % platform,
- checksum=md5_text(platform + str(_)).hexdigest(),
- user=generate_user(),
- )
- for _ in range(num_events):
- event1 = create_sample_event(
- project=project,
- platform="python",
- release=release.version,
- environment=next(ENVIRONMENTS),
- user=generate_user(),
- )
- EventAttachment.objects.create(
- project_id=project.id,
- event_id=event1.event_id,
- name="example-logfile.txt",
- file_id=File.objects.get_or_create(
- name="example-logfile.txt",
- type="text/plain",
- checksum="abcde" * 8,
- size=13043,
- )[0].id,
- )
- event2 = create_sample_event(
- project=project,
- platform="javascript",
- release=release.version,
- environment=next(ENVIRONMENTS),
- sdk={"name": "raven-js", "version": "2.1.0"},
- user=generate_user(),
- )
- event3 = create_sample_event(project, "java")
- event4 = create_sample_event(
- project=project,
- platform="ruby",
- release=release.version,
- environment=next(ENVIRONMENTS),
- user=generate_user(),
- )
- event5 = create_sample_event(
- project=project,
- platform="cocoa",
- release=release.version,
- environment=next(ENVIRONMENTS),
- user=generate_user(),
- )
- create_sample_event(
- project=project,
- platform="php",
- release=release.version,
- environment=next(ENVIRONMENTS),
- message=LONG_MESSAGE,
- user=generate_user(),
- )
- create_sample_event(
- project=project,
- platform="cocoa",
- sample_name="react-native",
- release=release.version,
- environment=next(ENVIRONMENTS),
- user=generate_user(),
- )
- create_sample_event(
- project=project,
- platform="pii",
- release=release.version,
- environment=next(ENVIRONMENTS),
- user=generate_user(),
- )
- if event5:
- Commit.objects.get_or_create(
- organization_id=org.id,
- repository_id=repo.id,
- key=sha1(uuid4().bytes).hexdigest(),
- defaults={
- "author": CommitAuthor.objects.get_or_create(
- organization_id=org.id,
- email=user.email,
- defaults={"name": user.name},
- )[0],
- "message": f"Ooops!\nFixes {event5.group.qualified_short_id}",
- },
- )[0]
- create_sample_event(project=project, environment=next(ENVIRONMENTS), platform="csp")
- if event3:
- UserReport.objects.create(
- project_id=project.id,
- event_id=event3.event_id,
- group_id=event3.group.id,
- name="Jane Bloggs",
- email="jane@example.com",
- comments=make_sentence(),
- )
- # Metric alerts
- alert_rule = create_alert_rule(
- org,
- [project],
- "My Alert Rule",
- "level:error",
- "count()",
- 10,
- AlertRuleThresholdType.ABOVE,
- 1,
- )
- create_alert_rule_trigger(alert_rule, "critical", 10)
- create_incident(
- org,
- type_=IncidentType.DETECTED,
- title="My Incident",
- date_started=datetime.utcnow().replace(tzinfo=utc),
- alert_rule=alert_rule,
- projects=[project],
- )
- print(f" > Loading time series data") # NOQA
- if event1:
- create_sample_time_series(event1, release=release)
- if event2:
- create_sample_time_series(event2, release=release)
- if event3:
- create_sample_time_series(event3)
- if event4:
- create_sample_time_series(event4, release=release)
- if event5:
- create_sample_time_series(event5, release=release)
- if hasattr(buffer, "process_pending"):
- print(" > Processing pending buffers") # NOQA
- buffer.process_pending()
- mocks_loaded.send(project=project, sender=__name__)
- OrganizationAccessRequest.objects.create_or_update(member=dummy_member, team=team)
- create_mock_transactions(project_map, load_trends, load_performance_issues, slow)
- Activity.objects.create(
- type=ActivityType.RELEASE.value,
- project=project,
- ident="4f38b65c62c4565aa94bba391ff8946922a8eed4",
- user_id=user.id,
- data={"version": "4f38b65c62c4565aa94bba391ff8946922a8eed4"},
- )
- create_system_time_series()
- def create_mock_transactions(
- project_map, load_trends=False, load_performance_issues=False, slow=False
- ):
- backend_project = project_map["Earth"]
- frontend_project = project_map["Fire"]
- service_projects = [
- project_map["Wind"],
- project_map["Water"],
- project_map["Heart"],
- ]
- for project in project_map.values():
- if not project.flags.has_transactions:
- project.update(flags=F("flags").bitor(Project.flags.has_transactions))
- timestamp = timezone.now()
- print(f" > Loading a trace") # NOQA
- create_trace(
- slow,
- timestamp - timedelta(milliseconds=random_normal(4000, 250, 1000)),
- timestamp,
- generate_user(),
- uuid4().hex,
- None,
- {
- "project": frontend_project,
- "transaction": "/plants/:plantId/",
- "frontend": True,
- "errors": 1,
- "children": [
- {
- "project": backend_project,
- "transaction": "/api/plants/",
- "children": [
- {
- "project": service_projects[0],
- "transaction": "/products/all/",
- "children": [],
- },
- {
- "project": service_projects[1],
- "transaction": "/analytics/",
- "children": [],
- },
- {
- "project": service_projects[2],
- "transaction": "tasks.create_invoice",
- "children": [
- {
- "project": service_projects[2],
- "transaction": "tasks.process_invoice",
- "children": [
- {
- "project": service_projects[2],
- "transaction": "tasks.process_invoice",
- "children": [
- {
- "project": service_projects[2],
- "transaction": "tasks.process_invoice",
- "children": [
- {
- "project": service_projects[2],
- "transaction": "tasks.process_invoice",
- "children": [],
- },
- ],
- },
- ],
- },
- ],
- },
- ],
- },
- ],
- },
- ],
- },
- )
- if load_trends:
- print(f" > Loading trends data") # NOQA
- for day in range(14):
- for hour in range(24):
- timestamp = timezone.now() - timedelta(days=day, hours=hour)
- transaction_user = generate_user()
- trace_id = uuid4().hex
- frontend_span_id = uuid4().hex[:16]
- frontend_root_span_id = uuid4().hex[:16]
- frontend_duration = random_normal(2000 - 50 * day, 250, 1000)
- create_sample_event(
- project=frontend_project,
- platform="javascript-transaction",
- transaction="/trends/:frontend/",
- event_id=uuid4().hex,
- user=transaction_user,
- timestamp=timestamp,
- # start_timestamp decreases based on day so that there's a trend
- start_timestamp=timestamp - timedelta(milliseconds=frontend_duration),
- measurements={
- "fp": {"value": random_normal(1250 - 50 * day, 200, 500)},
- "fcp": {"value": random_normal(1250 - 50 * day, 200, 500)},
- "lcp": {"value": random_normal(2800 - 50 * day, 400, 2000)},
- "fid": {"value": random_normal(5 - 0.125 * day, 2, 1)},
- },
- # Root
- parent_span_id=None,
- span_id=frontend_root_span_id,
- trace=trace_id,
- spans=[
- {
- "same_process_as_parent": True,
- "op": "http",
- "description": "GET /api/plants/?all_plants=1",
- "data": {
- "duration": random_normal(
- 1 - 0.05 * day, 0.25, 0.01, frontend_duration / 1000
- ),
- "offset": 0.02,
- },
- "span_id": frontend_span_id,
- "trace_id": trace_id,
- }
- ],
- )
- # try to give clickhouse some breathing room
- if slow:
- time.sleep(0.05)
- backend_duration = random_normal(1500 + 50 * day, 250, 500)
- create_sample_event(
- project=backend_project,
- platform="transaction",
- transaction="/trends/backend/",
- event_id=uuid4().hex,
- user=transaction_user,
- timestamp=timestamp,
- start_timestamp=timestamp - timedelta(milliseconds=backend_duration),
- # match the trace from the javascript transaction
- trace=trace_id,
- parent_span_id=frontend_root_span_id,
- spans=[],
- )
- # try to give clickhouse some breathing room
- if slow:
- time.sleep(0.05)
- if load_performance_issues:
- def load_n_plus_one_issue():
- trace_id = uuid4().hex
- transaction_user = generate_user()
- frontend_root_span_id = uuid4().hex[:16]
- n_plus_one_db_current_offset = timestamp
- n_plus_one_db_duration = timedelta(milliseconds=100)
- parent_span_id = uuid4().hex[:16]
- source_span = {
- "timestamp": (timestamp + n_plus_one_db_duration).timestamp(),
- "start_timestamp": (timestamp + timedelta(milliseconds=10)).timestamp(),
- "description": "SELECT `books_book`.`id`, `books_book`.`title`, `books_book`.`author_id` FROM `books_book` ORDER BY `books_book`.`id` DESC LIMIT 10",
- "op": "db",
- "parent_span_id": parent_span_id,
- "span_id": uuid4().hex[:16],
- "hash": "858fea692d4d93e8",
- }
- def make_repeating_span(duration):
- nonlocal timestamp
- nonlocal n_plus_one_db_current_offset
- nonlocal n_plus_one_db_duration
- n_plus_one_db_duration += timedelta(milliseconds=duration) + timedelta(
- milliseconds=1
- )
- n_plus_one_db_current_offset = timestamp + n_plus_one_db_duration
- return {
- "timestamp": (
- n_plus_one_db_current_offset + timedelta(milliseconds=duration)
- ).timestamp(),
- "start_timestamp": (
- n_plus_one_db_current_offset + timedelta(milliseconds=1)
- ).timestamp(),
- "description": "SELECT `books_author`.`id`, `books_author`.`name` FROM `books_author` WHERE `books_author`.`id` = %s LIMIT 21",
- "op": "db",
- "span_id": uuid4().hex[:16],
- "parent_span_id": parent_span_id,
- "hash": "63f1e89e6a073441",
- }
- repeating_spans = [make_repeating_span(200) for _ in range(10)]
- parent_span = {
- "timestamp": (
- timestamp + n_plus_one_db_duration + timedelta(milliseconds=200)
- ).timestamp(),
- "start_timestamp": timestamp.timestamp(),
- "description": "new",
- "op": "django.view",
- "parent_span_id": uuid4().hex[:16],
- "span_id": parent_span_id,
- "hash": "0f43fb6f6e01ca52",
- }
- create_sample_event(
- project=backend_project,
- platform="transaction",
- transaction="/n_plus_one_db/backend/",
- event_id=uuid4().hex,
- user=transaction_user,
- timestamp=timestamp + n_plus_one_db_duration + timedelta(milliseconds=300),
- start_timestamp=timestamp,
- trace=trace_id,
- parent_span_id=frontend_root_span_id,
- spans=[
- parent_span,
- source_span,
- ]
- + repeating_spans,
- )
- time.sleep(1.0)
- create_sample_event(
- project=backend_project,
- platform="transaction",
- transaction="/file-io-main-thread/",
- event_id=uuid4().hex,
- user=transaction_user,
- timestamp=timestamp + timedelta(milliseconds=300),
- start_timestamp=timestamp,
- trace=trace_id,
- parent_span_id=frontend_root_span_id,
- spans=[
- parent_span,
- {
- "timestamp": (timestamp + timedelta(milliseconds=200)).timestamp(),
- "start_timestamp": timestamp.timestamp(),
- "description": "1669031858711_file.txt (4.0 kB)",
- "op": "file.write",
- "span_id": uuid4().hex[:16],
- "parent_span_id": parent_span_id,
- "status": "ok",
- "data": {
- "blocked_ui_thread": True,
- "call_stack": [
- {
- "function": "onClick",
- "in_app": True,
- "lineno": 2,
- "module": "io.sentry.samples.android.MainActivity$$ExternalSyntheticLambda6",
- "native": False,
- },
- {
- "filename": "MainActivity.java",
- "function": "lambda$onCreate$5$io-sentry-samples-android-MainActivity",
- "in_app": True,
- "lineno": 93,
- "module": "io.sentry.samples.android.MainActivity",
- "native": False,
- },
- ],
- "file.path": "/data/user/0/io.sentry.samples.android/files/1669031858711_file.txt",
- "file.size": 4010,
- },
- },
- ],
- )
- def load_uncompressed_asset_issue():
- time.sleep(1.0)
- transaction_user = generate_user()
- trace_id = uuid4().hex
- parent_span_id = uuid4().hex[:16]
- parent_span = {
- "timestamp": (timestamp + timedelta(milliseconds=300)).timestamp(),
- "start_timestamp": timestamp.timestamp(),
- "description": "new",
- "op": "pageload",
- "parent_span_id": uuid4().hex[:16],
- "span_id": parent_span_id,
- "hash": "0f43fb6f6e01ca52",
- }
- spans = [
- {
- "timestamp": (timestamp + timedelta(milliseconds=1000)).timestamp(),
- "start_timestamp": (timestamp + timedelta(milliseconds=300)).timestamp(),
- "description": "https://s1.sentry-cdn.com/_static/dist/sentry/entrypoints/app.js",
- "op": "resource.script",
- "parent_span_id": parent_span_id,
- "span_id": uuid4().hex[:16],
- "hash": "858fea692d4d93e9",
- "data": {
- "http.transfer_size": 1_000_000,
- "http.response_content_length": 1_000_000,
- "http.decoded_response_content_length": 1_000_000,
- },
- },
- ]
- create_sample_event(
- project=backend_project,
- platform="transaction",
- transaction="/uncompressed-asset/",
- event_id=uuid4().hex,
- user=transaction_user,
- timestamp=timestamp + timedelta(milliseconds=300),
- start_timestamp=timestamp,
- trace=trace_id,
- parent_span_id=parent_span_id,
- spans=[parent_span] + spans,
- )
- def load_consecutive_db_issue():
- time.sleep(1.0)
- transaction_user = generate_user()
- trace_id = uuid4().hex
- parent_span_id = uuid4().hex[:16]
- parent_span = {
- "timestamp": (timestamp + timedelta(milliseconds=300)).timestamp(),
- "start_timestamp": timestamp.timestamp(),
- "description": "new",
- "op": "django.view",
- "parent_span_id": uuid4().hex[:16],
- "span_id": parent_span_id,
- "hash": "0f43fb6f6e01ca52",
- }
- spans = [
- {
- "timestamp": (timestamp + timedelta(milliseconds=1000)).timestamp(),
- "start_timestamp": (timestamp + timedelta(milliseconds=300)).timestamp(),
- "description": "SELECT `customer`.`id` FROM `customers` WHERE `customer`.`name` = 'customerName'",
- "op": "db",
- "parent_span_id": parent_span_id,
- "span_id": uuid4().hex[:16],
- "hash": "858fea692d4d93e9",
- },
- {
- "timestamp": (timestamp + timedelta(milliseconds=2000)).timestamp(),
- "start_timestamp": (timestamp + timedelta(milliseconds=1000)).timestamp(),
- "description": "SELECT COUNT(*) FROM `customers`",
- "op": "db",
- "parent_span_id": parent_span_id,
- "span_id": uuid4().hex[:16],
- "hash": "858fea692d4d93e7",
- },
- {
- "timestamp": (timestamp + timedelta(milliseconds=3000)).timestamp(),
- "start_timestamp": (timestamp + timedelta(milliseconds=2000)).timestamp(),
- "description": "SELECT COUNT(*) FROM `items`",
- "op": "db",
- "parent_span_id": parent_span_id,
- "span_id": uuid4().hex[:16],
- "hash": "858fea692d4d93e6",
- },
- ]
- create_sample_event(
- project=backend_project,
- platform="transaction",
- transaction="/consecutive-db/",
- event_id=uuid4().hex,
- user=transaction_user,
- timestamp=timestamp + timedelta(milliseconds=300),
- start_timestamp=timestamp,
- trace=trace_id,
- parent_span_id=parent_span_id,
- spans=[parent_span] + spans,
- )
- def load_render_blocking_asset_issue():
- transaction_user = generate_user()
- trace_id = uuid4().hex
- parent_span_id = uuid4().hex[:16]
- spans = [
- {
- "timestamp": (timestamp + timedelta(milliseconds=1300)).timestamp(),
- "start_timestamp": (timestamp + timedelta(milliseconds=300)).timestamp(),
- "description": "https://example.com/asset.js",
- "op": "resource.script",
- "parent_span_id": parent_span_id,
- "span_id": uuid4().hex[:16],
- "hash": "858fea692d4d93e8",
- "data": {"http.response_content_length": 1000001},
- }
- ]
- create_sample_event(
- project=frontend_project,
- platform="transaction",
- transaction="/render-blocking-asset/",
- event_id=uuid4().hex,
- user=transaction_user,
- timestamp=timestamp + timedelta(milliseconds=300),
- start_timestamp=timestamp,
- trace=trace_id,
- parent_span_id=parent_span_id,
- spans=spans,
- measurements={
- "fcp": {"value": 2500.0},
- },
- )
- def load_m_n_plus_one_issue():
- trace_id = uuid4().hex
- transaction_user = generate_user()
- parent_span_id = uuid4().hex[:16]
- duration = 200
- def make_repeating_span(i):
- nonlocal timestamp
- nonlocal duration
- start_timestamp = timestamp + timedelta(milliseconds=i * (duration + 1))
- end_timestamp = start_timestamp + timedelta(milliseconds=duration)
- op = "http" if i % 2 == 0 else "db"
- description = "GET /" if i % 2 == 0 else "SELECT * FROM authors WHERE id = %s"
- hash = "63f1e89e6a073441" if i % 2 == 0 else "a109ff3ef40f7fb3"
- return {
- "timestamp": end_timestamp.timestamp(),
- "start_timestamp": start_timestamp.timestamp(),
- "description": description,
- "op": op,
- "span_id": uuid4().hex[:16],
- "parent_span_id": parent_span_id,
- "hash": hash,
- }
- span_count = 10
- repeating_spans = [make_repeating_span(i) for i in range(span_count)]
- parent_span = {
- "timestamp": (
- timestamp + timedelta(milliseconds=span_count * (duration + 1))
- ).timestamp(),
- "start_timestamp": timestamp.timestamp(),
- "description": "execute",
- "op": "graphql.execute",
- "parent_span_id": uuid4().hex[:16],
- "span_id": parent_span_id,
- "hash": "0f43fb6f6e01ca52",
- }
- create_sample_event(
- project=backend_project,
- platform="transaction",
- transaction="/m_n_plus_one_db/backend/",
- event_id=uuid4().hex,
- user=transaction_user,
- timestamp=timestamp + timedelta(milliseconds=span_count * (duration + 1) + 100),
- start_timestamp=timestamp,
- trace=trace_id,
- spans=[parent_span] + repeating_spans,
- )
- def load_performance_issues():
- print(f" > Loading performance issues data") # NOQA
- print(f" > Loading n plus one issue") # NOQA
- load_n_plus_one_issue()
- print(f" > Loading consecutive db issue") # NOQA
- load_consecutive_db_issue()
- print(f" > Loading uncompressed asset issue") # NOQA
- load_uncompressed_asset_issue()
- print(f" > Loading render blocking asset issue") # NOQA
- load_render_blocking_asset_issue()
- print(f" > Loading MN+1 issue") # NOQA
- load_m_n_plus_one_issue()
- load_performance_issues()
- if __name__ == "__main__":
- settings.CELERY_ALWAYS_EAGER = True
- from optparse import OptionParser
- parser = OptionParser()
- parser.add_option("--events", default=1, type=int, help="number of events to generate")
- parser.add_option(
- "--skip-default-setup",
- default=False,
- action="store_true",
- help="Skips creating the default project, teams and timeseries, useful when only loading specific transactions",
- )
- parser.add_option(
- "--extra-events",
- default=False,
- action="store_true",
- help="add multiple events for each error group",
- )
- parser.add_option(
- "--load-trends",
- default=False,
- action="store_true",
- help="load multiple transactions for each id to show trends",
- )
- parser.add_option(
- "--load-performance-issues",
- default=False,
- action="store_true",
- help="load transactions with performance issues, still needs options/flags on for issues to appear.",
- )
- parser.add_option(
- "--slow",
- default=False,
- action="store_true",
- help="sleep between each transaction to let clickhouse rest",
- )
- (options, args) = parser.parse_args()
- try:
- main(
- skip_default_setup=options.skip_default_setup,
- num_events=options.events,
- extra_events=options.extra_events,
- load_trends=options.load_trends,
- load_performance_issues=options.load_performance_issues,
- slow=options.slow,
- )
- from sentry.issues.producer import get_occurrence_producer
- get_occurrence_producer().close()
- except Exception:
- # Avoid reporting any issues recursively back into Sentry
- import sys
- import traceback
- traceback.print_exc()
- sys.exit(1)
|