12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286 |
- #!/usr/bin/env python
- import time
- from sentry.runner import configure
- from sentry.types.activity import ActivityType
- configure()
- import itertools
- import random
- from datetime import datetime, timedelta
- from hashlib import sha1
- from random import randint
- from uuid import uuid4
- from django.conf import settings
- from django.db import IntegrityError, transaction
- from django.db.models import F
- from django.utils import timezone
- from pytz import utc
- from sentry import buffer, roles, tsdb
- from sentry.event_manager import HashDiscarded
- from sentry.incidents.logic import create_alert_rule, create_alert_rule_trigger, create_incident
- from sentry.incidents.models import AlertRuleThresholdType, IncidentType
- from sentry.models import (
- TOMBSTONE_FIELDS_FROM_GROUP,
- Activity,
- Broadcast,
- CheckInStatus,
- Commit,
- CommitAuthor,
- CommitFileChange,
- Deploy,
- Environment,
- EventAttachment,
- File,
- Group,
- GroupRelease,
- GroupTombstone,
- Monitor,
- MonitorCheckIn,
- MonitorStatus,
- MonitorType,
- Organization,
- OrganizationAccessRequest,
- OrganizationMember,
- Project,
- Release,
- ReleaseCommit,
- ReleaseEnvironment,
- ReleaseFile,
- ReleaseProjectEnvironment,
- Repository,
- Team,
- User,
- UserReport,
- )
- from sentry.signals import mocks_loaded
- from sentry.similarity import features
- from sentry.utils import loremipsum
- from sentry.utils.hashlib import md5_text
- from sentry.utils.samples import create_sample_event as _create_sample_event
- from sentry.utils.samples import create_trace, generate_user, random_normal
- PLATFORMS = itertools.cycle(["ruby", "php", "python", "java", "javascript"])
- LEVELS = itertools.cycle(["error", "error", "error", "fatal", "warning"])
- ENVIRONMENTS = itertools.cycle(["production", "production", "staging", "alpha", "beta", ""])
- MONITOR_NAMES = itertools.cycle(settings.CELERYBEAT_SCHEDULE.keys())
- MONITOR_SCHEDULES = itertools.cycle(["* * * * *", "0 * * * *", "0 0 * * *"])
- LONG_MESSAGE = """Code: 0.
- DB::Exception: String is too long for DateTime: 2018-10-26T19:14:18+00:00. Stack trace:
- 0. clickhouse-server(StackTrace::StackTrace()+0x16) [0x99e9626]
- 1. clickhouse-server(DB::Exception::Exception(std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, int)+0x22) [0x3087172]
- 2. clickhouse-server(DB::FunctionComparison<DB::EqualsOp, DB::NameEquals>::executeDateOrDateTimeOrEnumOrUUIDWithConstString(DB::Block&, unsigned long, DB::IColumn const*, DB::IColumn const*, std::shared_ptr<DB::IDataType const> const&, std::shared_ptr<DB::IDataType const> const&, bool, unsigned long)+0x13c8) [0x3b233d8]
- 3. clickhouse-server(DB::FunctionComparison<DB::EqualsOp, DB::NameEquals>::executeImpl(DB::Block&, std::vector<unsigned long, std::allocator<unsigned long> > const&, unsigned long, unsigned long)+0x576) [0x3bafc86]
- 4. clickhouse-server(DB::PreparedFunctionImpl::defaultImplementationForNulls(DB::Block&, std::vector<unsigned long, std::allocator<unsigned long> > const&, unsigned long, unsigned long)+0x174) [0x7953cd4]
- 5. clickhouse-server(DB::PreparedFunctionImpl::executeWithoutLowCardinalityColumns(DB::Block&, std::vector<unsigned long, std::allocator<unsigned long> > const&, unsigned long, unsigned long)+0x54) [0x7953b04]
- 6. clickhouse-server(DB::PreparedFunctionImpl::execute(DB::Block&, std::vector<unsigned long, std::allocator<unsigned long> > const&, unsigned long, unsigned long)+0x3e2) [0x7954222]
- 7. clickhouse-server(DB::ExpressionAction::execute(DB::Block&, std::unordered_map<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> >, unsigned long, std::hash<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > >, std::equal_to<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > >, std::allocator<std::pair<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > const, unsigned long> > >&) const+0x69b) [0x7b021fb]
- 8. clickhouse-server(DB::ExpressionActions::execute(DB::Block&) const+0xe6) [0x7b03676]
- 9. clickhouse-server(DB::FilterBlockInputStream::FilterBlockInputStream(std::shared_ptr<DB::IBlockInputStream> const&, std::shared_ptr<DB::ExpressionActions> const&, std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, bool)+0x711) [0x79b7e31]
- 10. clickhouse-server() [0x75e9443]
- 11. clickhouse-server(DB::InterpreterSelectQuery::executeImpl(DB::InterpreterSelectQuery::Pipeline&, std::shared_ptr<DB::IBlockInputStream> const&, bool)+0x118f) [0x75f212f]
- 12. clickhouse-server(DB::InterpreterSelectQuery::InterpreterSelectQuery(std::shared_ptr<DB::IAST> const&, DB::Context const&, std::shared_ptr<DB::IBlockInputStream> const&, std::shared_ptr<DB::IStorage> const&, std::vector<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::allocator<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > > > const&, DB::QueryProcessingStage::Enum, unsigned long, bool)+0x5e6) [0x75f2d46]
- 13. clickhouse-server(DB::InterpreterSelectQuery::InterpreterSelectQuery(std::shared_ptr<DB::IAST> const&, DB::Context const&, std::vector<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::allocator<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > > > const&, DB::QueryProcessingStage::Enum, unsigned long, bool)+0x56) [0x75f3aa6]
- 14. clickhouse-server(DB::InterpreterSelectWithUnionQuery::InterpreterSelectWithUnionQuery(std::shared_ptr<DB::IAST> const&, DB::Context const&, std::vector<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::allocator<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > > > const&, DB::QueryProcessingStage::Enum, unsigned long, bool)+0x7e7) [0x75ffab7]
- 15. clickhouse-server(DB::InterpreterFactory::get(std::shared_ptr<DB::IAST>&, DB::Context&, DB::QueryProcessingStage::Enum)+0x3a8) [0x75dc138]
- 16. clickhouse-server() [0x768fad9]
- 17. clickhouse-server(DB::executeQuery(std::__cxx11::basic..."""
- def make_sentence(words=None):
- if words is None:
- words = int(random.weibullvariate(8, 3))
- return " ".join(random.choice(loremipsum.words) for _ in range(words))
- def create_sample_event(*args, **kwargs):
- try:
- event = _create_sample_event(*args, **kwargs)
- except HashDiscarded as e:
- print(f"> Skipping Event: {e.message}") # NOQA
- else:
- if event is not None:
- features.record([event])
- return event
- def generate_commits(user):
- commits = []
- for i in range(random.randint(1, 20)):
- if i == 1:
- filename = "raven/base.py"
- else:
- filename = random.choice(loremipsum.words) + ".js"
- if random.randint(0, 5) == 1:
- author = (user.name, user.email)
- else:
- author = (
- f"{random.choice(loremipsum.words)} {random.choice(loremipsum.words)}",
- f"{random.choice(loremipsum.words)}@example.com",
- )
- commits.append(
- {
- "key": sha1(uuid4().bytes).hexdigest(),
- "message": f"feat: Do something to {filename}\n{make_sentence()}",
- "author": author,
- "files": [(filename, "M")],
- }
- )
- return commits
- def generate_tombstones(project, user):
- # attempt to create a high enough previous_group_id
- # that it won't conflict with any group ids
- prev_group_id = 100000
- try:
- prev_group_id = (
- max(
- GroupTombstone.objects.order_by("-previous_group_id")[0].previous_group_id,
- prev_group_id,
- )
- + 1
- )
- except IndexError:
- pass
- for group in Group.objects.filter(project=project)[:5]:
- GroupTombstone.objects.create(
- previous_group_id=prev_group_id,
- actor_id=user.id,
- **{name: getattr(group, name) for name in TOMBSTONE_FIELDS_FROM_GROUP},
- )
- prev_group_id += 1
- def create_system_time_series():
- now = datetime.utcnow().replace(tzinfo=utc)
- for _ in range(60):
- count = randint(1, 10)
- tsdb.incr_multi(
- (
- (tsdb.models.internal, "client-api.all-versions.responses.2xx"),
- (tsdb.models.internal, "client-api.all-versions.requests"),
- ),
- now,
- int(count * 0.9),
- )
- tsdb.incr_multi(
- ((tsdb.models.internal, "client-api.all-versions.responses.4xx"),),
- now,
- int(count * 0.05),
- )
- tsdb.incr_multi(
- ((tsdb.models.internal, "client-api.all-versions.responses.5xx"),),
- now,
- int(count * 0.1),
- )
- now = now - timedelta(seconds=1)
- for _ in range(24 * 30):
- count = randint(100, 1000)
- tsdb.incr_multi(
- (
- (tsdb.models.internal, "client-api.all-versions.responses.2xx"),
- (tsdb.models.internal, "client-api.all-versions.requests"),
- ),
- now,
- int(count * 4.9),
- )
- tsdb.incr_multi(
- ((tsdb.models.internal, "client-api.all-versions.responses.4xx"),),
- now,
- int(count * 0.05),
- )
- tsdb.incr_multi(
- ((tsdb.models.internal, "client-api.all-versions.responses.5xx"),),
- now,
- int(count * 0.1),
- )
- now = now - timedelta(hours=1)
- def create_sample_time_series(event, release=None):
- if event is None:
- return
- group = event.group
- project = group.project
- key = project.key_set.all()[0]
- now = datetime.utcnow().replace(tzinfo=utc)
- environment = Environment.get_or_create(
- project=project, name=Environment.get_name_or_default(event.get_tag("environment"))
- )
- if release:
- ReleaseEnvironment.get_or_create(
- project=project, release=release, environment=environment, datetime=now
- )
- grouprelease = GroupRelease.get_or_create(
- group=group, release=release, environment=environment, datetime=now
- )
- for _ in range(60):
- count = randint(1, 10)
- tsdb.incr_multi(
- ((tsdb.models.project, project.id), (tsdb.models.group, group.id)),
- now,
- count,
- environment_id=environment.id,
- )
- tsdb.incr_multi(
- (
- (tsdb.models.organization_total_received, project.organization_id),
- (tsdb.models.project_total_received, project.id),
- (tsdb.models.key_total_received, key.id),
- ),
- now,
- int(count * 1.1),
- )
- tsdb.incr(
- tsdb.models.project_total_forwarded,
- project.id,
- now,
- int(count * 1.1),
- )
- tsdb.incr_multi(
- (
- (tsdb.models.organization_total_rejected, project.organization_id),
- (tsdb.models.project_total_rejected, project.id),
- (tsdb.models.key_total_rejected, key.id),
- ),
- now,
- int(count * 0.1),
- )
- frequencies = [
- (tsdb.models.frequent_issues_by_project, {project.id: {group.id: count}}),
- (tsdb.models.frequent_environments_by_group, {group.id: {environment.id: count}}),
- ]
- if release:
- frequencies.append(
- (tsdb.models.frequent_releases_by_group, {group.id: {grouprelease.id: count}})
- )
- tsdb.record_frequency_multi(frequencies, now)
- now = now - timedelta(seconds=1)
- for _ in range(24 * 30):
- count = randint(100, 1000)
- tsdb.incr_multi(
- ((tsdb.models.project, group.project.id), (tsdb.models.group, group.id)),
- now,
- count,
- environment_id=environment.id,
- )
- tsdb.incr_multi(
- (
- (tsdb.models.organization_total_received, project.organization_id),
- (tsdb.models.project_total_received, project.id),
- (tsdb.models.key_total_received, key.id),
- ),
- now,
- int(count * 1.1),
- )
- tsdb.incr_multi(
- (
- (tsdb.models.organization_total_rejected, project.organization_id),
- (tsdb.models.project_total_rejected, project.id),
- (tsdb.models.key_total_rejected, key.id),
- ),
- now,
- int(count * 0.1),
- )
- frequencies = [
- (tsdb.models.frequent_issues_by_project, {project.id: {group.id: count}}),
- (tsdb.models.frequent_environments_by_group, {group.id: {environment.id: count}}),
- ]
- if release:
- frequencies.append(
- (tsdb.models.frequent_releases_by_group, {group.id: {grouprelease.id: count}})
- )
- tsdb.record_frequency_multi(frequencies, now)
- now = now - timedelta(hours=1)
- def main(
- skip_default_setup=False,
- num_events=1,
- extra_events=False,
- load_trends=False,
- load_performance_issues=False,
- slow=False,
- ):
- try:
- user = User.objects.filter(is_superuser=True)[0]
- except IndexError:
- raise Exception("No superuser exists (run `make bootstrap`)")
- dummy_user, _ = User.objects.get_or_create(
- username="dummy@example.com", defaults={"email": "dummy@example.com"}
- )
- dummy_user.set_password("dummy")
- dummy_user.save()
- mocks = (
- ("Massive Dynamic", ("Ludic Science",)),
- ("Captain Planet", ("Earth", "Fire", "Wind", "Water", "Heart")),
- )
- project_map = {}
- Broadcast.objects.create(
- title="Learn about Source Maps",
- message="Source maps are JSON files that contain information on how to map your transpiled source code back to their original source.",
- link="https://docs.sentry.io/platforms/javascript/#source-maps",
- )
- if settings.SENTRY_SINGLE_ORGANIZATION:
- org = Organization.get_default()
- print(f"Mocking org {org.name}") # NOQA
- else:
- print("Mocking org {}".format("Default")) # NOQA
- org, _ = Organization.objects.get_or_create(slug="default")
- OrganizationMember.objects.get_or_create(
- user=user, organization=org, role=roles.get_top_dog().id
- )
- dummy_member, _ = OrganizationMember.objects.get_or_create(
- user=dummy_user, organization=org, defaults={"role": roles.get_default().id}
- )
- # Allow for 0 events, if you only want transactions
- event1 = event2 = event3 = event4 = event5 = None
- if skip_default_setup:
- # Quickly fetch/create the teams and projects
- for team_name, project_names in mocks:
- print(f"> Mocking team {team_name}") # NOQA
- team, _ = Team.objects.get_or_create(name=team_name, defaults={"organization": org})
- for project_name in project_names:
- print(f" > Mocking project {project_name}") # NOQA
- project, _ = Project.objects.get_or_create(
- name=project_name,
- defaults={
- "organization": org,
- "first_event": timezone.now(),
- "flags": Project.flags.has_releases,
- },
- )
- project_map[project_name] = project
- project.add_team(team)
- else:
- for team_name, project_names in mocks:
- print(f"> Mocking team {team_name}") # NOQA
- team, _ = Team.objects.get_or_create(name=team_name, defaults={"organization": org})
- for project_name in project_names:
- print(f" > Mocking project {project_name}") # NOQA
- project, _ = Project.objects.get_or_create(
- name=project_name,
- defaults={
- "organization": org,
- "first_event": timezone.now(),
- "flags": Project.flags.has_releases,
- },
- )
- project_map[project_name] = project
- project.add_team(team)
- if not project.first_event:
- project.update(first_event=project.date_added)
- if not project.flags.has_releases:
- project.update(flags=F("flags").bitor(Project.flags.has_releases))
- monitor, created = Monitor.objects.get_or_create(
- name=next(MONITOR_NAMES),
- project_id=project.id,
- organization_id=org.id,
- type=MonitorType.CRON_JOB,
- defaults={
- "config": {"schedule": next(MONITOR_SCHEDULES)},
- "next_checkin": timezone.now() + timedelta(minutes=60),
- "last_checkin": timezone.now(),
- },
- )
- if not created:
- if not (monitor.config or {}).get("schedule"):
- monitor.config = {"schedule": next(MONITOR_SCHEDULES)}
- monitor.update(
- config=monitor.config,
- status=MonitorStatus.OK if randint(0, 10) < 7 else MonitorStatus.ERROR,
- last_checkin=timezone.now(),
- next_checkin=monitor.get_next_scheduled_checkin(timezone.now()),
- )
- MonitorCheckIn.objects.create(
- project_id=monitor.project_id,
- monitor=monitor,
- status=CheckInStatus.OK
- if monitor.status == MonitorStatus.OK
- else CheckInStatus.ERROR,
- )
- with transaction.atomic():
- has_release = Release.objects.filter(
- version=sha1(uuid4().bytes).hexdigest(),
- organization_id=project.organization_id,
- projects=project,
- ).exists()
- if not has_release:
- release = Release.objects.filter(
- version=sha1(uuid4().bytes).hexdigest(),
- organization_id=project.organization_id,
- ).first()
- if not release:
- release = Release.objects.create(
- version=sha1(uuid4().bytes).hexdigest(),
- organization_id=project.organization_id,
- )
- release.add_project(project)
- generate_tombstones(project, user)
- raw_commits = generate_commits(user)
- try:
- with transaction.atomic():
- repo, _ = Repository.objects.get_or_create(
- organization_id=org.id,
- provider="integrations:github",
- external_id="example/example",
- defaults={
- "name": "Example Repo",
- "url": "https://github.com/example/example",
- },
- )
- except IntegrityError:
- # for users with legacy github plugin
- # upgrade to the new integration
- repo = Repository.objects.get(
- organization_id=org.id,
- provider="github",
- external_id="example/example",
- name="Example Repo",
- )
- repo.provider = "integrations:github"
- repo.save()
- authors = set()
- for commit_index, raw_commit in enumerate(raw_commits):
- author = CommitAuthor.objects.get_or_create(
- organization_id=org.id,
- email=raw_commit["author"][1],
- defaults={"name": raw_commit["author"][0]},
- )[0]
- commit = Commit.objects.get_or_create(
- organization_id=org.id,
- repository_id=repo.id,
- key=raw_commit["key"],
- defaults={"author": author, "message": raw_commit["message"]},
- )[0]
- authors.add(author)
- for file in raw_commit["files"]:
- ReleaseFile.objects.get_or_create(
- organization_id=project.organization_id,
- release_id=release.id,
- name=file[0],
- file=File.objects.get_or_create(
- name=file[0], type="release.file", checksum="abcde" * 8, size=13043
- )[0],
- defaults={"organization_id": project.organization_id},
- )
- CommitFileChange.objects.get_or_create(
- organization_id=org.id, commit=commit, filename=file[0], type=file[1]
- )
- ReleaseCommit.objects.get_or_create(
- organization_id=org.id, release=release, commit=commit, order=commit_index
- )
- # create an unreleased commit
- Commit.objects.get_or_create(
- organization_id=org.id,
- repository_id=repo.id,
- key=sha1(uuid4().bytes).hexdigest(),
- defaults={
- "author": CommitAuthor.objects.get_or_create(
- organization_id=org.id, email=user.email, defaults={"name": user.name}
- )[0],
- "message": "feat: Do something to {}\n{}".format(
- random.choice(loremipsum.words) + ".js", make_sentence()
- ),
- },
- )[0]
- Activity.objects.create(
- type=ActivityType.RELEASE.value,
- project=project,
- ident=release.version,
- user=user,
- data={"version": release.version},
- )
- environment = Environment.get_or_create(project=project, name=next(ENVIRONMENTS))
- deploy = Deploy.objects.create(
- organization_id=project.organization_id,
- release=release,
- environment_id=environment.id,
- )
- release.update(
- commit_count=len(raw_commits),
- last_commit_id=commit.id,
- total_deploys=Deploy.objects.filter(release=release).count(),
- last_deploy_id=deploy.id,
- authors=[str(a.id) for a in authors],
- )
- ReleaseProjectEnvironment.objects.create_or_update(
- project=project,
- environment=environment,
- release=release,
- defaults={"last_deploy_id": deploy.id},
- )
- Activity.objects.create(
- type=ActivityType.DEPLOY.value,
- project=project,
- ident=release.version,
- data={
- "version": release.version,
- "deploy_id": deploy.id,
- "environment": environment.name,
- },
- datetime=deploy.date_finished,
- )
- # Add a bunch of additional dummy events to support pagination
- if extra_events:
- for _ in range(45):
- platform = next(PLATFORMS)
- create_sample_event(
- project=project,
- platform=platform,
- release=release.version,
- level=next(LEVELS),
- environment=next(ENVIRONMENTS),
- message="This is a mostly useless example %s exception" % platform,
- checksum=md5_text(platform + str(_)).hexdigest(),
- user=generate_user(),
- )
- for _ in range(num_events):
- event1 = create_sample_event(
- project=project,
- platform="python",
- release=release.version,
- environment=next(ENVIRONMENTS),
- user=generate_user(),
- )
- EventAttachment.objects.create(
- project_id=project.id,
- event_id=event1.event_id,
- name="example-logfile.txt",
- file_id=File.objects.get_or_create(
- name="example-logfile.txt",
- type="text/plain",
- checksum="abcde" * 8,
- size=13043,
- )[0].id,
- )
- event2 = create_sample_event(
- project=project,
- platform="javascript",
- release=release.version,
- environment=next(ENVIRONMENTS),
- sdk={"name": "raven-js", "version": "2.1.0"},
- user=generate_user(),
- )
- event3 = create_sample_event(project, "java")
- event4 = create_sample_event(
- project=project,
- platform="ruby",
- release=release.version,
- environment=next(ENVIRONMENTS),
- user=generate_user(),
- )
- event5 = create_sample_event(
- project=project,
- platform="cocoa",
- release=release.version,
- environment=next(ENVIRONMENTS),
- user=generate_user(),
- )
- create_sample_event(
- project=project,
- platform="php",
- release=release.version,
- environment=next(ENVIRONMENTS),
- message=LONG_MESSAGE,
- user=generate_user(),
- )
- create_sample_event(
- project=project,
- platform="cocoa",
- sample_name="react-native",
- release=release.version,
- environment=next(ENVIRONMENTS),
- user=generate_user(),
- )
- create_sample_event(
- project=project,
- platform="pii",
- release=release.version,
- environment=next(ENVIRONMENTS),
- user=generate_user(),
- )
- if event5:
- Commit.objects.get_or_create(
- organization_id=org.id,
- repository_id=repo.id,
- key=sha1(uuid4().bytes).hexdigest(),
- defaults={
- "author": CommitAuthor.objects.get_or_create(
- organization_id=org.id,
- email=user.email,
- defaults={"name": user.name},
- )[0],
- "message": f"Ooops!\nFixes {event5.group.qualified_short_id}",
- },
- )[0]
- create_sample_event(project=project, environment=next(ENVIRONMENTS), platform="csp")
- if event3:
- UserReport.objects.create(
- project_id=project.id,
- event_id=event3.event_id,
- group_id=event3.group.id,
- name="Jane Bloggs",
- email="jane@example.com",
- comments=make_sentence(),
- )
- # Metric alerts
- alert_rule = create_alert_rule(
- org,
- [project],
- "My Alert Rule",
- "level:error",
- "count()",
- 10,
- AlertRuleThresholdType.ABOVE,
- 1,
- )
- create_alert_rule_trigger(alert_rule, "critical", 10)
- create_incident(
- org,
- type_=IncidentType.DETECTED,
- title="My Incident",
- date_started=datetime.utcnow().replace(tzinfo=utc),
- alert_rule=alert_rule,
- projects=[project],
- )
- print(f" > Loading time series data") # NOQA
- if event1:
- create_sample_time_series(event1, release=release)
- if event2:
- create_sample_time_series(event2, release=release)
- if event3:
- create_sample_time_series(event3)
- if event4:
- create_sample_time_series(event4, release=release)
- if event5:
- create_sample_time_series(event5, release=release)
- if hasattr(buffer, "process_pending"):
- print(" > Processing pending buffers") # NOQA
- buffer.process_pending()
- mocks_loaded.send(project=project, sender=__name__)
- OrganizationAccessRequest.objects.create_or_update(member=dummy_member, team=team)
- create_mock_transactions(project_map, load_trends, load_performance_issues, slow)
- Activity.objects.create(
- type=ActivityType.RELEASE.value,
- project=project,
- ident="4f38b65c62c4565aa94bba391ff8946922a8eed4",
- user=user,
- data={"version": "4f38b65c62c4565aa94bba391ff8946922a8eed4"},
- )
- create_system_time_series()
- def create_mock_transactions(
- project_map, load_trends=False, load_performance_issues=False, slow=False
- ):
- backend_project = project_map["Earth"]
- frontend_project = project_map["Fire"]
- service_projects = [
- project_map["Wind"],
- project_map["Water"],
- project_map["Heart"],
- ]
- for project in project_map.values():
- if not project.flags.has_transactions:
- project.update(flags=F("flags").bitor(Project.flags.has_transactions))
- timestamp = timezone.now()
- print(f" > Loading a trace") # NOQA
- create_trace(
- slow,
- timestamp - timedelta(milliseconds=random_normal(4000, 250, 1000)),
- timestamp,
- generate_user(),
- uuid4().hex,
- None,
- {
- "project": frontend_project,
- "transaction": "/plants/:plantId/",
- "frontend": True,
- "errors": 1,
- "children": [
- {
- "project": backend_project,
- "transaction": "/api/plants/",
- "children": [
- {
- "project": service_projects[0],
- "transaction": "/products/all/",
- "children": [],
- },
- {
- "project": service_projects[1],
- "transaction": "/analytics/",
- "children": [],
- },
- {
- "project": service_projects[2],
- "transaction": "tasks.create_invoice",
- "children": [
- {
- "project": service_projects[2],
- "transaction": "tasks.process_invoice",
- "children": [
- {
- "project": service_projects[2],
- "transaction": "tasks.process_invoice",
- "children": [
- {
- "project": service_projects[2],
- "transaction": "tasks.process_invoice",
- "children": [
- {
- "project": service_projects[2],
- "transaction": "tasks.process_invoice",
- "children": [],
- },
- ],
- },
- ],
- },
- ],
- },
- ],
- },
- ],
- },
- ],
- },
- )
- if load_trends:
- print(f" > Loading trends data") # NOQA
- for day in range(14):
- for hour in range(24):
- timestamp = timezone.now() - timedelta(days=day, hours=hour)
- transaction_user = generate_user()
- trace_id = uuid4().hex
- frontend_span_id = uuid4().hex[:16]
- frontend_root_span_id = uuid4().hex[:16]
- frontend_duration = random_normal(2000 - 50 * day, 250, 1000)
- create_sample_event(
- project=frontend_project,
- platform="javascript-transaction",
- transaction="/trends/:frontend/",
- event_id=uuid4().hex,
- user=transaction_user,
- timestamp=timestamp,
- # start_timestamp decreases based on day so that there's a trend
- start_timestamp=timestamp - timedelta(milliseconds=frontend_duration),
- measurements={
- "fp": {"value": random_normal(1250 - 50 * day, 200, 500)},
- "fcp": {"value": random_normal(1250 - 50 * day, 200, 500)},
- "lcp": {"value": random_normal(2800 - 50 * day, 400, 2000)},
- "fid": {"value": random_normal(5 - 0.125 * day, 2, 1)},
- },
- # Root
- parent_span_id=None,
- span_id=frontend_root_span_id,
- trace=trace_id,
- spans=[
- {
- "same_process_as_parent": True,
- "op": "http",
- "description": "GET /api/plants/?all_plants=1",
- "data": {
- "duration": random_normal(
- 1 - 0.05 * day, 0.25, 0.01, frontend_duration / 1000
- ),
- "offset": 0.02,
- },
- "span_id": frontend_span_id,
- "trace_id": trace_id,
- }
- ],
- )
- # try to give clickhouse some breathing room
- if slow:
- time.sleep(0.05)
- backend_duration = random_normal(1500 + 50 * day, 250, 500)
- create_sample_event(
- project=backend_project,
- platform="transaction",
- transaction="/trends/backend/",
- event_id=uuid4().hex,
- user=transaction_user,
- timestamp=timestamp,
- start_timestamp=timestamp - timedelta(milliseconds=backend_duration),
- # match the trace from the javascript transaction
- trace=trace_id,
- parent_span_id=frontend_root_span_id,
- spans=[],
- )
- # try to give clickhouse some breathing room
- if slow:
- time.sleep(0.05)
- if load_performance_issues:
- def load_n_plus_one_issue():
- trace_id = uuid4().hex
- transaction_user = generate_user()
- frontend_root_span_id = uuid4().hex[:16]
- n_plus_one_db_current_offset = timestamp
- n_plus_one_db_duration = timedelta(milliseconds=100)
- parent_span_id = uuid4().hex[:16]
- source_span = {
- "timestamp": (timestamp + n_plus_one_db_duration).timestamp(),
- "start_timestamp": (timestamp + timedelta(milliseconds=10)).timestamp(),
- "description": "SELECT `books_book`.`id`, `books_book`.`title`, `books_book`.`author_id` FROM `books_book` ORDER BY `books_book`.`id` DESC LIMIT 10",
- "op": "db",
- "parent_span_id": parent_span_id,
- "span_id": uuid4().hex[:16],
- "hash": "858fea692d4d93e8",
- }
- def make_repeating_span(duration):
- nonlocal timestamp
- nonlocal n_plus_one_db_current_offset
- nonlocal n_plus_one_db_duration
- n_plus_one_db_duration += timedelta(milliseconds=duration) + timedelta(
- milliseconds=1
- )
- n_plus_one_db_current_offset = timestamp + n_plus_one_db_duration
- return {
- "timestamp": (
- n_plus_one_db_current_offset + timedelta(milliseconds=duration)
- ).timestamp(),
- "start_timestamp": (
- n_plus_one_db_current_offset + timedelta(milliseconds=1)
- ).timestamp(),
- "description": "SELECT `books_author`.`id`, `books_author`.`name` FROM `books_author` WHERE `books_author`.`id` = %s LIMIT 21",
- "op": "db",
- "span_id": uuid4().hex[:16],
- "parent_span_id": parent_span_id,
- "hash": "63f1e89e6a073441",
- }
- repeating_spans = [make_repeating_span(200) for _ in range(10)]
- parent_span = {
- "timestamp": (
- timestamp + n_plus_one_db_duration + timedelta(milliseconds=200)
- ).timestamp(),
- "start_timestamp": timestamp.timestamp(),
- "description": "new",
- "op": "django.view",
- "parent_span_id": uuid4().hex[:16],
- "span_id": parent_span_id,
- "hash": "0f43fb6f6e01ca52",
- }
- create_sample_event(
- project=backend_project,
- platform="transaction",
- transaction="/n_plus_one_db/backend/",
- event_id=uuid4().hex,
- user=transaction_user,
- timestamp=timestamp + n_plus_one_db_duration + timedelta(milliseconds=300),
- start_timestamp=timestamp,
- trace=trace_id,
- parent_span_id=frontend_root_span_id,
- spans=[
- parent_span,
- source_span,
- ]
- + repeating_spans,
- )
- time.sleep(1.0)
- create_sample_event(
- project=backend_project,
- platform="transaction",
- transaction="/file-io-main-thread/",
- event_id=uuid4().hex,
- user=transaction_user,
- timestamp=timestamp + timedelta(milliseconds=300),
- start_timestamp=timestamp,
- trace=trace_id,
- parent_span_id=frontend_root_span_id,
- spans=[
- parent_span,
- {
- "timestamp": (timestamp + timedelta(milliseconds=200)).timestamp(),
- "start_timestamp": timestamp.timestamp(),
- "description": "1669031858711_file.txt (4.0 kB)",
- "op": "file.write",
- "span_id": uuid4().hex[:16],
- "parent_span_id": parent_span_id,
- "status": "ok",
- "data": {
- "blocked_ui_thread": True,
- "call_stack": [
- {
- "function": "onClick",
- "in_app": True,
- "lineno": 2,
- "module": "io.sentry.samples.android.MainActivity$$ExternalSyntheticLambda6",
- "native": False,
- },
- {
- "filename": "MainActivity.java",
- "function": "lambda$onCreate$5$io-sentry-samples-android-MainActivity",
- "in_app": True,
- "lineno": 93,
- "module": "io.sentry.samples.android.MainActivity",
- "native": False,
- },
- ],
- "file.path": "/data/user/0/io.sentry.samples.android/files/1669031858711_file.txt",
- "file.size": 4010,
- },
- },
- ],
- )
- def load_uncompressed_asset_issue():
- time.sleep(1.0)
- transaction_user = generate_user()
- trace_id = uuid4().hex
- parent_span_id = uuid4().hex[:16]
- parent_span = {
- "timestamp": (timestamp + timedelta(milliseconds=300)).timestamp(),
- "start_timestamp": timestamp.timestamp(),
- "description": "new",
- "op": "pageload",
- "parent_span_id": uuid4().hex[:16],
- "span_id": parent_span_id,
- "hash": "0f43fb6f6e01ca52",
- }
- spans = [
- {
- "timestamp": (timestamp + timedelta(milliseconds=1000)).timestamp(),
- "start_timestamp": (timestamp + timedelta(milliseconds=300)).timestamp(),
- "description": "https://s1.sentry-cdn.com/_static/dist/sentry/entrypoints/app.js",
- "op": "resource.script",
- "parent_span_id": parent_span_id,
- "span_id": uuid4().hex[:16],
- "hash": "858fea692d4d93e9",
- "data": {
- "Transfer Size": 1_000_000,
- "Encoded Body Size": 1_000_000,
- "Decoded Body Size": 1_000_000,
- },
- },
- ]
- create_sample_event(
- project=backend_project,
- platform="transaction",
- transaction="/uncompressed-asset/",
- event_id=uuid4().hex,
- user=transaction_user,
- timestamp=timestamp + timedelta(milliseconds=300),
- start_timestamp=timestamp,
- trace=trace_id,
- parent_span_id=parent_span_id,
- spans=[parent_span] + spans,
- )
- def load_consecutive_db_issue():
- time.sleep(1.0)
- transaction_user = generate_user()
- trace_id = uuid4().hex
- parent_span_id = uuid4().hex[:16]
- parent_span = {
- "timestamp": (timestamp + timedelta(milliseconds=300)).timestamp(),
- "start_timestamp": timestamp.timestamp(),
- "description": "new",
- "op": "django.view",
- "parent_span_id": uuid4().hex[:16],
- "span_id": parent_span_id,
- "hash": "0f43fb6f6e01ca52",
- }
- spans = [
- {
- "timestamp": (timestamp + timedelta(milliseconds=1000)).timestamp(),
- "start_timestamp": (timestamp + timedelta(milliseconds=300)).timestamp(),
- "description": "SELECT `customer`.`id` FROM `customers` WHERE `customer`.`name` = 'customerName'",
- "op": "db",
- "parent_span_id": parent_span_id,
- "span_id": uuid4().hex[:16],
- "hash": "858fea692d4d93e9",
- },
- {
- "timestamp": (timestamp + timedelta(milliseconds=2000)).timestamp(),
- "start_timestamp": (timestamp + timedelta(milliseconds=1000)).timestamp(),
- "description": "SELECT COUNT(*) FROM `customers`",
- "op": "db",
- "parent_span_id": parent_span_id,
- "span_id": uuid4().hex[:16],
- "hash": "858fea692d4d93e7",
- },
- {
- "timestamp": (timestamp + timedelta(milliseconds=3000)).timestamp(),
- "start_timestamp": (timestamp + timedelta(milliseconds=2000)).timestamp(),
- "description": "SELECT COUNT(*) FROM `items`",
- "op": "db",
- "parent_span_id": parent_span_id,
- "span_id": uuid4().hex[:16],
- "hash": "858fea692d4d93e6",
- },
- ]
- create_sample_event(
- project=backend_project,
- platform="transaction",
- transaction="/consecutive-db/",
- event_id=uuid4().hex,
- user=transaction_user,
- timestamp=timestamp + timedelta(milliseconds=300),
- start_timestamp=timestamp,
- trace=trace_id,
- parent_span_id=parent_span_id,
- spans=[parent_span] + spans,
- )
- def load_render_blocking_asset_issue():
- transaction_user = generate_user()
- trace_id = uuid4().hex
- parent_span_id = uuid4().hex[:16]
- spans = [
- {
- "timestamp": (timestamp + timedelta(milliseconds=1300)).timestamp(),
- "start_timestamp": (timestamp + timedelta(milliseconds=300)).timestamp(),
- "description": "https://example.com/asset.js",
- "op": "resource.script",
- "parent_span_id": parent_span_id,
- "span_id": uuid4().hex[:16],
- "hash": "858fea692d4d93e8",
- }
- ]
- create_sample_event(
- project=frontend_project,
- platform="transaction",
- transaction="/render-blocking-asset/",
- event_id=uuid4().hex,
- user=transaction_user,
- timestamp=timestamp + timedelta(milliseconds=300),
- start_timestamp=timestamp,
- trace=trace_id,
- parent_span_id=parent_span_id,
- spans=spans,
- measurements={
- "fcp": {"value": 2500.0},
- },
- )
- def load_m_n_plus_one_issue():
- trace_id = uuid4().hex
- transaction_user = generate_user()
- parent_span_id = uuid4().hex[:16]
- duration = 200
- def make_repeating_span(i):
- nonlocal timestamp
- nonlocal duration
- start_timestamp = timestamp + timedelta(milliseconds=i * (duration + 1))
- end_timestamp = start_timestamp + timedelta(milliseconds=duration)
- op = "http" if i % 2 == 0 else "db"
- description = "GET /" if i % 2 == 0 else "SELECT * FROM authors WHERE id = %s"
- hash = "63f1e89e6a073441" if i % 2 == 0 else "a109ff3ef40f7fb3"
- return {
- "timestamp": end_timestamp.timestamp(),
- "start_timestamp": start_timestamp.timestamp(),
- "description": description,
- "op": op,
- "span_id": uuid4().hex[:16],
- "parent_span_id": parent_span_id,
- "hash": hash,
- }
- span_count = 10
- repeating_spans = [make_repeating_span(i) for i in range(span_count)]
- parent_span = {
- "timestamp": (
- timestamp + timedelta(milliseconds=span_count * (duration + 1))
- ).timestamp(),
- "start_timestamp": timestamp.timestamp(),
- "description": "execute",
- "op": "graphql.execute",
- "parent_span_id": uuid4().hex[:16],
- "span_id": parent_span_id,
- "hash": "0f43fb6f6e01ca52",
- }
- create_sample_event(
- project=backend_project,
- platform="transaction",
- transaction="/m_n_plus_one_db/backend/",
- event_id=uuid4().hex,
- user=transaction_user,
- timestamp=timestamp + timedelta(milliseconds=span_count * (duration + 1) + 100),
- start_timestamp=timestamp,
- trace=trace_id,
- spans=[parent_span] + repeating_spans,
- )
- def load_performance_issues():
- print(f" > Loading performance issues data") # NOQA
- print(f" > Loading n plus one issue") # NOQA
- load_n_plus_one_issue()
- print(f" > Loading consecutive db issue") # NOQA
- load_consecutive_db_issue()
- print(f" > Loading uncompressed asset issue") # NOQA
- load_uncompressed_asset_issue()
- print(f" > Loading render blocking asset issue") # NOQA
- load_render_blocking_asset_issue()
- print(f" > Loading MN+1 issue") # NOQA
- load_m_n_plus_one_issue()
- load_performance_issues()
- if __name__ == "__main__":
- settings.CELERY_ALWAYS_EAGER = True
- from optparse import OptionParser
- parser = OptionParser()
- parser.add_option("--events", default=1, type=int, help="number of events to generate")
- parser.add_option(
- "--skip-default-setup",
- default=False,
- action="store_true",
- help="Skips creating the default project, teams and timeseries, useful when only loading specific transactions",
- )
- parser.add_option(
- "--extra-events",
- default=False,
- action="store_true",
- help="add multiple events for each error group",
- )
- parser.add_option(
- "--load-trends",
- default=False,
- action="store_true",
- help="load multiple transactions for each id to show trends",
- )
- parser.add_option(
- "--load-performance-issues",
- default=False,
- action="store_true",
- help="load transactions with performance issues, still needs options/flags on for issues to appear.",
- )
- parser.add_option(
- "--slow",
- default=False,
- action="store_true",
- help="sleep between each transaction to let clickhouse rest",
- )
- (options, args) = parser.parse_args()
- try:
- main(
- skip_default_setup=options.skip_default_setup,
- num_events=options.events,
- extra_events=options.extra_events,
- load_trends=options.load_trends,
- load_performance_issues=options.load_performance_issues,
- slow=options.slow,
- )
- except Exception:
- # Avoid reporting any issues recursively back into Sentry
- import sys
- import traceback
- traceback.print_exc()
- sys.exit(1)
|