123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749 |
- #!/usr/bin/env python
- from __future__ import absolute_import
- from sentry.runner import configure
- configure()
- import itertools
- import random
- from datetime import datetime, timedelta
- from hashlib import sha1
- from random import randint
- from uuid import uuid4
- import six
- from django.conf import settings
- from django.db import IntegrityError, transaction
- from django.db.models import F
- from django.utils import timezone
- from loremipsum import Generator
- from pytz import utc
- from sentry import buffer, roles, tsdb
- from sentry.event_manager import HashDiscarded
- from sentry.models import (
- Activity, Broadcast, Commit, CommitAuthor, CommitFileChange, Deploy, EventAttachment, Event,
- Environment, File, Group, GroupRelease, GroupTombstone, Organization,
- OrganizationAccessRequest, OrganizationMember, Project, Release,
- ReleaseCommit, ReleaseEnvironment, ReleaseProjectEnvironment, ReleaseFile, Repository,
- Team, TOMBSTONE_FIELDS_FROM_GROUP, User, UserReport, Monitor, MonitorStatus, MonitorType, MonitorCheckIn, CheckInStatus
- )
- from sentry.signals import mocks_loaded
- from sentry.similarity import features
- from sentry.utils.hashlib import md5_text
- from sentry.utils.samples import create_sample_event as _create_sample_event
- from sentry.utils.samples import generate_user
- loremipsum = Generator()
- PLATFORMS = itertools.cycle([
- 'ruby',
- 'php',
- 'python',
- 'java',
- 'javascript',
- ])
- LEVELS = itertools.cycle([
- 'error',
- 'error',
- 'error',
- 'fatal',
- 'warning',
- ])
- ENVIRONMENTS = itertools.cycle([
- 'production',
- 'production',
- 'staging',
- 'alpha',
- 'beta',
- ''
- ])
- MONITOR_NAMES = itertools.cycle(settings.CELERYBEAT_SCHEDULE.keys())
- MONITOR_SCHEDULES = itertools.cycle([
- '* * * * *',
- '0 * * * *',
- '0 0 * * *',
- ])
- LONG_MESSAGE = """Code: 0.
- DB::Exception: String is too long for DateTime: 2018-10-26T19:14:18+00:00. Stack trace:
- 0. clickhouse-server(StackTrace::StackTrace()+0x16) [0x99e9626]
- 1. clickhouse-server(DB::Exception::Exception(std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, int)+0x22) [0x3087172]
- 2. clickhouse-server(DB::FunctionComparison<DB::EqualsOp, DB::NameEquals>::executeDateOrDateTimeOrEnumOrUUIDWithConstString(DB::Block&, unsigned long, DB::IColumn const*, DB::IColumn const*, std::shared_ptr<DB::IDataType const> const&, std::shared_ptr<DB::IDataType const> const&, bool, unsigned long)+0x13c8) [0x3b233d8]
- 3. clickhouse-server(DB::FunctionComparison<DB::EqualsOp, DB::NameEquals>::executeImpl(DB::Block&, std::vector<unsigned long, std::allocator<unsigned long> > const&, unsigned long, unsigned long)+0x576) [0x3bafc86]
- 4. clickhouse-server(DB::PreparedFunctionImpl::defaultImplementationForNulls(DB::Block&, std::vector<unsigned long, std::allocator<unsigned long> > const&, unsigned long, unsigned long)+0x174) [0x7953cd4]
- 5. clickhouse-server(DB::PreparedFunctionImpl::executeWithoutLowCardinalityColumns(DB::Block&, std::vector<unsigned long, std::allocator<unsigned long> > const&, unsigned long, unsigned long)+0x54) [0x7953b04]
- 6. clickhouse-server(DB::PreparedFunctionImpl::execute(DB::Block&, std::vector<unsigned long, std::allocator<unsigned long> > const&, unsigned long, unsigned long)+0x3e2) [0x7954222]
- 7. clickhouse-server(DB::ExpressionAction::execute(DB::Block&, std::unordered_map<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> >, unsigned long, std::hash<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > >, std::equal_to<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > >, std::allocator<std::pair<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > const, unsigned long> > >&) const+0x69b) [0x7b021fb]
- 8. clickhouse-server(DB::ExpressionActions::execute(DB::Block&) const+0xe6) [0x7b03676]
- 9. clickhouse-server(DB::FilterBlockInputStream::FilterBlockInputStream(std::shared_ptr<DB::IBlockInputStream> const&, std::shared_ptr<DB::ExpressionActions> const&, std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, bool)+0x711) [0x79b7e31]
- 10. clickhouse-server() [0x75e9443]
- 11. clickhouse-server(DB::InterpreterSelectQuery::executeImpl(DB::InterpreterSelectQuery::Pipeline&, std::shared_ptr<DB::IBlockInputStream> const&, bool)+0x118f) [0x75f212f]
- 12. clickhouse-server(DB::InterpreterSelectQuery::InterpreterSelectQuery(std::shared_ptr<DB::IAST> const&, DB::Context const&, std::shared_ptr<DB::IBlockInputStream> const&, std::shared_ptr<DB::IStorage> const&, std::vector<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::allocator<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > > > const&, DB::QueryProcessingStage::Enum, unsigned long, bool)+0x5e6) [0x75f2d46]
- 13. clickhouse-server(DB::InterpreterSelectQuery::InterpreterSelectQuery(std::shared_ptr<DB::IAST> const&, DB::Context const&, std::vector<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::allocator<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > > > const&, DB::QueryProcessingStage::Enum, unsigned long, bool)+0x56) [0x75f3aa6]
- 14. clickhouse-server(DB::InterpreterSelectWithUnionQuery::InterpreterSelectWithUnionQuery(std::shared_ptr<DB::IAST> const&, DB::Context const&, std::vector<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::allocator<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > > > const&, DB::QueryProcessingStage::Enum, unsigned long, bool)+0x7e7) [0x75ffab7]
- 15. clickhouse-server(DB::InterpreterFactory::get(std::shared_ptr<DB::IAST>&, DB::Context&, DB::QueryProcessingStage::Enum)+0x3a8) [0x75dc138]
- 16. clickhouse-server() [0x768fad9]
- 17. clickhouse-server(DB::executeQuery(std::__cxx11::basic..."""
- def make_sentence(words=None):
- if words is None:
- words = int(random.weibullvariate(8, 3))
- return ' '.join(random.choice(loremipsum.words) for _ in range(words))
- def create_sample_event(*args, **kwargs):
- try:
- event = _create_sample_event(*args, **kwargs)
- except HashDiscarded as e:
- print("> Skipping Event: {}".format(e.message)) # NOQA
- else:
- if event is not None:
- features.record([event])
- return event
- def generate_commits(user):
- commits = []
- for i in range(random.randint(1, 20)):
- if i == 1:
- filename = 'raven/base.py'
- else:
- filename = random.choice(loremipsum.words) + '.js'
- if random.randint(0, 5) == 1:
- author = (user.name, user.email)
- else:
- author = (
- '{} {}'.format(
- random.choice(loremipsum.words),
- random.choice(loremipsum.words),
- ),
- '{}@example.com'.format(
- random.choice(loremipsum.words),
- ),
- )
- commits.append({
- 'key': sha1(uuid4().hex).hexdigest(),
- 'message': 'feat: Do something to {}\n{}'.format(filename, make_sentence()),
- 'author': author,
- 'files': [
- (filename, 'M'),
- ],
- })
- return commits
- def generate_tombstones(project, user):
- # attempt to create a high enough previous_group_id
- # that it won't conflict with any group ids
- prev_group_id = 100000
- try:
- prev_group_id = max(
- GroupTombstone.objects.order_by('-previous_group_id')[0].previous_group_id,
- prev_group_id
- ) + 1
- except IndexError:
- pass
- for group in Group.objects.filter(project=project)[:5]:
- GroupTombstone.objects.create(
- previous_group_id=prev_group_id,
- actor_id=user.id,
- **{name: getattr(group, name) for name in TOMBSTONE_FIELDS_FROM_GROUP}
- )
- prev_group_id += 1
- def create_system_time_series():
- now = datetime.utcnow().replace(tzinfo=utc)
- for _ in xrange(60):
- count = randint(1, 10)
- tsdb.incr_multi((
- (tsdb.models.internal, 'client-api.all-versions.responses.2xx'),
- (tsdb.models.internal, 'client-api.all-versions.requests'),
- ), now, int(count * 0.9))
- tsdb.incr_multi((
- (tsdb.models.internal, 'client-api.all-versions.responses.4xx'),
- ), now, int(count * 0.05))
- tsdb.incr_multi((
- (tsdb.models.internal, 'client-api.all-versions.responses.5xx'),
- ), now, int(count * 0.1))
- now = now - timedelta(seconds=1)
- for _ in xrange(24 * 30):
- count = randint(100, 1000)
- tsdb.incr_multi((
- (tsdb.models.internal, 'client-api.all-versions.responses.2xx'),
- (tsdb.models.internal, 'client-api.all-versions.requests'),
- ), now, int(count * 4.9))
- tsdb.incr_multi((
- (tsdb.models.internal, 'client-api.all-versions.responses.4xx'),
- ), now, int(count * 0.05))
- tsdb.incr_multi((
- (tsdb.models.internal, 'client-api.all-versions.responses.5xx'),
- ), now, int(count * 0.1))
- now = now - timedelta(hours=1)
- def create_sample_time_series(event, release=None):
- if event is None:
- return
- Event.objects.bind_nodes([event], 'data')
- group = event.group
- project = group.project
- key = project.key_set.all()[0]
- now = datetime.utcnow().replace(tzinfo=utc)
- environment = Environment.get_or_create(
- project=project,
- name=Environment.get_name_or_default(event.get_tag('environment')),
- )
- if release:
- ReleaseEnvironment.get_or_create(
- project=project,
- release=release,
- environment=environment,
- datetime=now,
- )
- grouprelease = GroupRelease.get_or_create(
- group=group,
- release=release,
- environment=environment,
- datetime=now,
- )
- for _ in xrange(60):
- count = randint(1, 10)
- tsdb.incr_multi((
- (tsdb.models.project, project.id),
- (tsdb.models.group, group.id),
- ), now, count, environment_id=environment.id)
- tsdb.incr_multi((
- (tsdb.models.organization_total_received, project.organization_id),
- (tsdb.models.project_total_forwarded, project.id),
- (tsdb.models.project_total_received, project.id),
- (tsdb.models.key_total_received, key.id),
- ), now, int(count * 1.1))
- tsdb.incr_multi((
- (tsdb.models.organization_total_rejected, project.organization_id),
- (tsdb.models.project_total_rejected, project.id),
- (tsdb.models.key_total_rejected, key.id),
- ), now, int(count * 0.1))
- frequencies = [
- (tsdb.models.frequent_issues_by_project, {
- project.id: {
- group.id: count,
- },
- }),
- (tsdb.models.frequent_environments_by_group, {
- group.id: {
- environment.id: count,
- },
- })
- ]
- if release:
- frequencies.append(
- (tsdb.models.frequent_releases_by_group, {
- group.id: {
- grouprelease.id: count,
- },
- })
- )
- tsdb.record_frequency_multi(frequencies, now)
- now = now - timedelta(seconds=1)
- for _ in xrange(24 * 30):
- count = randint(100, 1000)
- tsdb.incr_multi((
- (tsdb.models.project, group.project.id),
- (tsdb.models.group, group.id),
- ), now, count, environment_id=environment.id)
- tsdb.incr_multi((
- (tsdb.models.organization_total_received, project.organization_id),
- (tsdb.models.project_total_received, project.id),
- (tsdb.models.key_total_received, key.id),
- ), now, int(count * 1.1))
- tsdb.incr_multi((
- (tsdb.models.organization_total_rejected, project.organization_id),
- (tsdb.models.project_total_rejected, project.id),
- (tsdb.models.key_total_rejected, key.id),
- ), now, int(count * 0.1))
- frequencies = [
- (tsdb.models.frequent_issues_by_project, {
- project.id: {
- group.id: count,
- },
- }),
- (tsdb.models.frequent_environments_by_group, {
- group.id: {
- environment.id: count,
- },
- })
- ]
- if release:
- frequencies.append(
- (tsdb.models.frequent_releases_by_group, {
- group.id: {
- grouprelease.id: count,
- },
- })
- )
- tsdb.record_frequency_multi(frequencies, now)
- now = now - timedelta(hours=1)
- def main(num_events=1, extra_events=False):
- user = User.objects.filter(is_superuser=True)[0]
- dummy_user, _ = User.objects.get_or_create(
- username='dummy@example.com',
- defaults={
- 'email': 'dummy@example.com',
- }
- )
- dummy_user.set_password('dummy')
- dummy_user.save()
- mocks = (
- ('Massive Dynamic', ('Ludic Science',)),
- ('Captain Planet', ('Earth', 'Fire', 'Wind', 'Water', 'Heart')),
- )
- Broadcast.objects.create(
- title="Learn about Source Maps",
- message="Source maps are JSON files that contain information on how to map your transpiled source code back to their original source.",
- link="https://docs.sentry.io/hosted/clients/javascript/sourcemaps/#uploading-source-maps-to-sentry",
- )
- if settings.SENTRY_SINGLE_ORGANIZATION:
- org = Organization.get_default()
- print('Mocking org {}'.format(org.name)) # NOQA
- else:
- print('Mocking org {}'.format('Default')) # NOQA
- org, _ = Organization.objects.get_or_create(
- slug='default',
- )
- OrganizationMember.objects.get_or_create(
- user=user,
- organization=org,
- role=roles.get_top_dog().id,
- )
- dummy_member, _ = OrganizationMember.objects.get_or_create(
- user=dummy_user,
- organization=org,
- defaults={
- 'role': roles.get_default().id,
- }
- )
- for team_name, project_names in mocks:
- print('> Mocking team {}'.format(team_name)) # NOQA
- team, _ = Team.objects.get_or_create(
- name=team_name,
- defaults={
- 'organization': org,
- },
- )
- for project_name in project_names:
- print(' > Mocking project {}'.format(project_name)) # NOQA
- project, _ = Project.objects.get_or_create(
- name=project_name,
- defaults={
- 'organization': org,
- 'first_event': timezone.now(),
- 'flags': Project.flags.has_releases,
- }
- )
- project.add_team(team)
- if not project.first_event:
- project.update(
- first_event=project.date_added,
- )
- if not project.flags.has_releases:
- project.update(
- flags=F('flags').bitor(Project.flags.has_releases),
- )
- monitor, created = Monitor.objects.get_or_create(
- name=next(MONITOR_NAMES),
- project_id=project.id,
- organization_id=org.id,
- type=MonitorType.CRON_JOB,
- defaults={
- 'config': {
- 'schedule': next(MONITOR_SCHEDULES),
- },
- 'next_checkin': timezone.now() + timedelta(minutes=60),
- 'last_checkin': timezone.now(),
- }
- )
- if not created:
- if not (monitor.config or {}).get('schedule'):
- monitor.config = {'schedule': next(MONITOR_SCHEDULES)}
- monitor.update(
- config=monitor.config,
- status=MonitorStatus.OK if randint(0, 10) < 7 else MonitorStatus.ERROR,
- last_checkin=timezone.now(),
- next_checkin=monitor.get_next_scheduled_checkin(timezone.now()),
- )
- MonitorCheckIn.objects.create(
- project_id=monitor.project_id,
- monitor=monitor,
- status=CheckInStatus.OK if monitor.status == MonitorStatus.OK else CheckInStatus.ERROR,
- )
- with transaction.atomic():
- has_release = Release.objects.filter(
- version=sha1(uuid4().bytes).hexdigest(),
- organization_id=project.organization_id,
- projects=project
- ).exists()
- if not has_release:
- release = Release.objects.filter(
- version=sha1(uuid4().bytes).hexdigest(),
- organization_id=project.organization_id,
- ).first()
- if not release:
- release = Release.objects.create(
- version=sha1(uuid4().bytes).hexdigest(),
- organization_id=project.organization_id
- )
- release.add_project(project)
- generate_tombstones(project, user)
- raw_commits = generate_commits(user)
- try:
- with transaction.atomic():
- repo, _ = Repository.objects.get_or_create(
- organization_id=org.id,
- provider='integrations:github',
- external_id='example/example',
- defaults={
- 'name': 'Example Repo',
- 'url': 'https://github.com/example/example',
- }
- )
- except IntegrityError:
- # for users with legacy github plugin
- # upgrade to the new integration
- repo = Repository.objects.get(
- organization_id=org.id,
- provider='github',
- external_id='example/example',
- name='Example Repo',
- )
- repo.provider = 'integrations:github'
- repo.save()
- authors = set()
- for commit_index, raw_commit in enumerate(raw_commits):
- author = CommitAuthor.objects.get_or_create(
- organization_id=org.id,
- email=raw_commit['author'][1],
- defaults={'name': raw_commit['author'][0]}
- )[0]
- commit = Commit.objects.get_or_create(
- organization_id=org.id,
- repository_id=repo.id,
- key=raw_commit['key'],
- defaults={
- 'author': author,
- 'message': raw_commit['message'],
- },
- )[0]
- authors.add(author)
- for file in raw_commit['files']:
- ReleaseFile.objects.get_or_create(
- organization_id=project.organization_id,
- release=release,
- name=file[0],
- file=File.objects.get_or_create(
- name=file[0],
- type='release.file',
- checksum='abcde' * 8,
- size=13043,
- )[0],
- defaults={'organization_id': project.organization_id}
- )
- CommitFileChange.objects.get_or_create(
- organization_id=org.id,
- commit=commit,
- filename=file[0],
- type=file[1],
- )
- ReleaseCommit.objects.get_or_create(
- organization_id=org.id,
- release=release,
- commit=commit,
- order=commit_index,
- )
- # create an unreleased commit
- Commit.objects.get_or_create(
- organization_id=org.id,
- repository_id=repo.id,
- key=sha1(uuid4().hex).hexdigest(),
- defaults={
- 'author': CommitAuthor.objects.get_or_create(
- organization_id=org.id,
- email=user.email,
- defaults={'name': user.name}
- )[0],
- 'message': 'feat: Do something to {}\n{}'.format(random.choice(loremipsum.words) + '.js', make_sentence()),
- },
- )[0]
- Activity.objects.create(
- type=Activity.RELEASE,
- project=project,
- ident=release.version,
- user=user,
- data={'version': release.version},
- )
- environment = Environment.get_or_create(
- project=project,
- name=six.next(ENVIRONMENTS)
- )
- deploy = Deploy.objects.create(
- organization_id=project.organization_id,
- release=release,
- environment_id=environment.id,
- )
- release.update(
- commit_count=len(raw_commits),
- last_commit_id=commit.id,
- total_deploys=Deploy.objects.filter(release=release).count(),
- last_deploy_id=deploy.id,
- authors=[six.text_type(a.id) for a in authors],
- )
- ReleaseProjectEnvironment.objects.create_or_update(
- project=project,
- environment=environment,
- release=release,
- defaults={'last_deploy_id': deploy.id}
- )
- Activity.objects.create(
- type=Activity.DEPLOY,
- project=project,
- ident=release.version,
- data={
- 'version': release.version,
- 'deploy_id': deploy.id,
- 'environment': environment.name
- },
- datetime=deploy.date_finished,
- )
- # Add a bunch of additional dummy events to support pagination
- if extra_events:
- for _ in range(45):
- platform = six.next(PLATFORMS)
- create_sample_event(
- project=project,
- platform=platform,
- release=release.version,
- level=six.next(LEVELS),
- environment=six.next(ENVIRONMENTS),
- message='This is a mostly useless example %s exception' % platform,
- checksum=md5_text(platform + six.text_type(_)).hexdigest(),
- user=generate_user(),
- )
- for _ in range(num_events):
- event1 = create_sample_event(
- project=project,
- platform='python',
- release=release.version,
- environment=six.next(ENVIRONMENTS),
- user=generate_user(),
- )
- EventAttachment.objects.create(
- project_id=project.id,
- group_id=event1.group_id,
- event_id=event1.event_id,
- name='example-logfile.txt',
- file=File.objects.get_or_create(
- name='example-logfile.txt',
- type='text/plain',
- checksum='abcde' * 8,
- size=13043,
- )[0],
- )
- event2 = create_sample_event(
- project=project,
- platform='javascript',
- release=release.version,
- environment=six.next(ENVIRONMENTS),
- sdk={
- 'name': 'raven-js',
- 'version': '2.1.0',
- },
- user=generate_user(),
- )
- event3 = create_sample_event(project, 'java')
- event4 = create_sample_event(
- project=project,
- platform='ruby',
- release=release.version,
- environment=six.next(ENVIRONMENTS),
- user=generate_user(),
- )
- event5 = create_sample_event(
- project=project,
- platform='cocoa',
- release=release.version,
- environment=six.next(ENVIRONMENTS),
- user=generate_user(),
- )
- create_sample_event(
- project=project,
- platform='php',
- release=release.version,
- environment=six.next(ENVIRONMENTS),
- message=LONG_MESSAGE,
- user=generate_user(),
- )
- create_sample_event(
- project=project,
- platform='cocoa',
- sample_name='react-native',
- release=release.version,
- environment=six.next(ENVIRONMENTS),
- user=generate_user(),
- )
- create_sample_event(
- project=project,
- platform='pii',
- release=release.version,
- environment=six.next(ENVIRONMENTS),
- user=generate_user(),
- )
- if event5:
- Commit.objects.get_or_create(
- organization_id=org.id,
- repository_id=repo.id,
- key=sha1(uuid4().hex).hexdigest(),
- defaults={
- 'author': CommitAuthor.objects.get_or_create(
- organization_id=org.id,
- email=user.email,
- defaults={'name': user.name}
- )[0],
- 'message': 'Ooops!\nFixes {}'.format(event5.group.qualified_short_id),
- },
- )[0]
- create_sample_event(
- project=project,
- environment=six.next(ENVIRONMENTS),
- platform='csp',
- )
- if event3:
- UserReport.objects.create(
- project=project,
- event_id=event3.event_id,
- group=event3.group,
- name='Jane Doe',
- email='jane@example.com',
- comments=make_sentence(),
- )
- print(' > Loading time series data'.format(project_name)) # NOQA
- create_sample_time_series(event1, release=release)
- create_sample_time_series(event2, release=release)
- create_sample_time_series(event3)
- create_sample_time_series(event4, release=release)
- create_sample_time_series(event5, release=release)
- if hasattr(buffer, 'process_pending'):
- print(' > Processing pending buffers') # NOQA
- buffer.process_pending()
- mocks_loaded.send(project=project, sender=__name__)
- OrganizationAccessRequest.objects.create_or_update(
- member=dummy_member,
- team=team,
- )
- Activity.objects.create(
- type=Activity.RELEASE,
- project=project,
- ident='4f38b65c62c4565aa94bba391ff8946922a8eed4',
- user=user,
- data={'version': '4f38b65c62c4565aa94bba391ff8946922a8eed4'},
- )
- create_system_time_series()
- if __name__ == '__main__':
- settings.CELERY_ALWAYS_EAGER = True
- from optparse import OptionParser
- parser = OptionParser()
- parser.add_option('--events', dest='num_events', default=1, type=int)
- parser.add_option('--extra-events', dest='extra_events', default=False, action='store_true')
- (options, args) = parser.parse_args()
- try:
- main(
- num_events=options.num_events,
- extra_events=options.extra_events,
- )
- except Exception:
- # Avoid reporting any issues recursively back into Sentry
- import traceback
- import sys
- traceback.print_exc()
- sys.exit(1)
|