load-mocks 28 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734
  1. #!/usr/bin/env python
  2. from __future__ import absolute_import
  3. from sentry.runner import configure
  4. configure()
  5. import itertools
  6. import random
  7. from datetime import datetime, timedelta
  8. from hashlib import sha1
  9. from random import randint
  10. from uuid import uuid4
  11. import six
  12. from django.conf import settings
  13. from django.db import IntegrityError, transaction
  14. from django.db.models import F
  15. from django.utils import timezone
  16. from pytz import utc
  17. from sentry import buffer, roles, tsdb
  18. from sentry.event_manager import HashDiscarded
  19. from sentry.incidents.logic import (
  20. AlertRuleNameAlreadyUsedError,
  21. create_alert_rule,
  22. create_alert_rule_trigger,
  23. create_incident,
  24. )
  25. from sentry.incidents.models import IncidentType, AlertRuleThresholdType
  26. from sentry.models import (
  27. Activity,
  28. Broadcast,
  29. Commit,
  30. CommitAuthor,
  31. CommitFileChange,
  32. Deploy,
  33. EventAttachment,
  34. Environment,
  35. File,
  36. Group,
  37. GroupRelease,
  38. GroupTombstone,
  39. Organization,
  40. OrganizationAccessRequest,
  41. OrganizationMember,
  42. Project,
  43. Release,
  44. ReleaseCommit,
  45. ReleaseEnvironment,
  46. ReleaseProjectEnvironment,
  47. ReleaseFile,
  48. Repository,
  49. Team,
  50. TOMBSTONE_FIELDS_FROM_GROUP,
  51. User,
  52. UserReport,
  53. Monitor,
  54. MonitorStatus,
  55. MonitorType,
  56. MonitorCheckIn,
  57. CheckInStatus,
  58. )
  59. from sentry.signals import mocks_loaded
  60. from sentry.similarity import features
  61. from sentry.snuba.models import QueryAggregations
  62. from sentry.utils import loremipsum
  63. from sentry.utils.hashlib import md5_text
  64. from sentry.utils.samples import create_sample_event as _create_sample_event
  65. from sentry.utils.samples import generate_user
  66. PLATFORMS = itertools.cycle(["ruby", "php", "python", "java", "javascript"])
  67. LEVELS = itertools.cycle(["error", "error", "error", "fatal", "warning"])
  68. ENVIRONMENTS = itertools.cycle(["production", "production", "staging", "alpha", "beta", ""])
  69. MONITOR_NAMES = itertools.cycle(settings.CELERYBEAT_SCHEDULE.keys())
  70. MONITOR_SCHEDULES = itertools.cycle(["* * * * *", "0 * * * *", "0 0 * * *"])
  71. LONG_MESSAGE = """Code: 0.
  72. DB::Exception: String is too long for DateTime: 2018-10-26T19:14:18+00:00. Stack trace:
  73. 0. clickhouse-server(StackTrace::StackTrace()+0x16) [0x99e9626]
  74. 1. clickhouse-server(DB::Exception::Exception(std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, int)+0x22) [0x3087172]
  75. 2. clickhouse-server(DB::FunctionComparison<DB::EqualsOp, DB::NameEquals>::executeDateOrDateTimeOrEnumOrUUIDWithConstString(DB::Block&, unsigned long, DB::IColumn const*, DB::IColumn const*, std::shared_ptr<DB::IDataType const> const&, std::shared_ptr<DB::IDataType const> const&, bool, unsigned long)+0x13c8) [0x3b233d8]
  76. 3. clickhouse-server(DB::FunctionComparison<DB::EqualsOp, DB::NameEquals>::executeImpl(DB::Block&, std::vector<unsigned long, std::allocator<unsigned long> > const&, unsigned long, unsigned long)+0x576) [0x3bafc86]
  77. 4. clickhouse-server(DB::PreparedFunctionImpl::defaultImplementationForNulls(DB::Block&, std::vector<unsigned long, std::allocator<unsigned long> > const&, unsigned long, unsigned long)+0x174) [0x7953cd4]
  78. 5. clickhouse-server(DB::PreparedFunctionImpl::executeWithoutLowCardinalityColumns(DB::Block&, std::vector<unsigned long, std::allocator<unsigned long> > const&, unsigned long, unsigned long)+0x54) [0x7953b04]
  79. 6. clickhouse-server(DB::PreparedFunctionImpl::execute(DB::Block&, std::vector<unsigned long, std::allocator<unsigned long> > const&, unsigned long, unsigned long)+0x3e2) [0x7954222]
  80. 7. clickhouse-server(DB::ExpressionAction::execute(DB::Block&, std::unordered_map<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> >, unsigned long, std::hash<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > >, std::equal_to<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > >, std::allocator<std::pair<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > const, unsigned long> > >&) const+0x69b) [0x7b021fb]
  81. 8. clickhouse-server(DB::ExpressionActions::execute(DB::Block&) const+0xe6) [0x7b03676]
  82. 9. clickhouse-server(DB::FilterBlockInputStream::FilterBlockInputStream(std::shared_ptr<DB::IBlockInputStream> const&, std::shared_ptr<DB::ExpressionActions> const&, std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, bool)+0x711) [0x79b7e31]
  83. 10. clickhouse-server() [0x75e9443]
  84. 11. clickhouse-server(DB::InterpreterSelectQuery::executeImpl(DB::InterpreterSelectQuery::Pipeline&, std::shared_ptr<DB::IBlockInputStream> const&, bool)+0x118f) [0x75f212f]
  85. 12. clickhouse-server(DB::InterpreterSelectQuery::InterpreterSelectQuery(std::shared_ptr<DB::IAST> const&, DB::Context const&, std::shared_ptr<DB::IBlockInputStream> const&, std::shared_ptr<DB::IStorage> const&, std::vector<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::allocator<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > > > const&, DB::QueryProcessingStage::Enum, unsigned long, bool)+0x5e6) [0x75f2d46]
  86. 13. clickhouse-server(DB::InterpreterSelectQuery::InterpreterSelectQuery(std::shared_ptr<DB::IAST> const&, DB::Context const&, std::vector<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::allocator<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > > > const&, DB::QueryProcessingStage::Enum, unsigned long, bool)+0x56) [0x75f3aa6]
  87. 14. clickhouse-server(DB::InterpreterSelectWithUnionQuery::InterpreterSelectWithUnionQuery(std::shared_ptr<DB::IAST> const&, DB::Context const&, std::vector<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::allocator<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > > > const&, DB::QueryProcessingStage::Enum, unsigned long, bool)+0x7e7) [0x75ffab7]
  88. 15. clickhouse-server(DB::InterpreterFactory::get(std::shared_ptr<DB::IAST>&, DB::Context&, DB::QueryProcessingStage::Enum)+0x3a8) [0x75dc138]
  89. 16. clickhouse-server() [0x768fad9]
  90. 17. clickhouse-server(DB::executeQuery(std::__cxx11::basic..."""
  91. def make_sentence(words=None):
  92. if words is None:
  93. words = int(random.weibullvariate(8, 3))
  94. return " ".join(random.choice(loremipsum.words) for _ in range(words))
  95. def create_sample_event(*args, **kwargs):
  96. try:
  97. event = _create_sample_event(*args, **kwargs)
  98. except HashDiscarded as e:
  99. print("> Skipping Event: {}".format(e.message)) # NOQA
  100. else:
  101. if event is not None:
  102. features.record([event])
  103. return event
  104. def generate_commits(user):
  105. commits = []
  106. for i in range(random.randint(1, 20)):
  107. if i == 1:
  108. filename = "raven/base.py"
  109. else:
  110. filename = random.choice(loremipsum.words) + ".js"
  111. if random.randint(0, 5) == 1:
  112. author = (user.name, user.email)
  113. else:
  114. author = (
  115. "{} {}".format(random.choice(loremipsum.words), random.choice(loremipsum.words)),
  116. "{}@example.com".format(random.choice(loremipsum.words)),
  117. )
  118. commits.append(
  119. {
  120. "key": sha1(uuid4().hex).hexdigest(),
  121. "message": "feat: Do something to {}\n{}".format(filename, make_sentence()),
  122. "author": author,
  123. "files": [(filename, "M")],
  124. }
  125. )
  126. return commits
  127. def generate_tombstones(project, user):
  128. # attempt to create a high enough previous_group_id
  129. # that it won't conflict with any group ids
  130. prev_group_id = 100000
  131. try:
  132. prev_group_id = (
  133. max(
  134. GroupTombstone.objects.order_by("-previous_group_id")[0].previous_group_id,
  135. prev_group_id,
  136. )
  137. + 1
  138. )
  139. except IndexError:
  140. pass
  141. for group in Group.objects.filter(project=project)[:5]:
  142. GroupTombstone.objects.create(
  143. previous_group_id=prev_group_id,
  144. actor_id=user.id,
  145. **{name: getattr(group, name) for name in TOMBSTONE_FIELDS_FROM_GROUP}
  146. )
  147. prev_group_id += 1
  148. def create_system_time_series():
  149. now = datetime.utcnow().replace(tzinfo=utc)
  150. for _ in xrange(60):
  151. count = randint(1, 10)
  152. tsdb.incr_multi(
  153. (
  154. (tsdb.models.internal, "client-api.all-versions.responses.2xx"),
  155. (tsdb.models.internal, "client-api.all-versions.requests"),
  156. ),
  157. now,
  158. int(count * 0.9),
  159. )
  160. tsdb.incr_multi(
  161. ((tsdb.models.internal, "client-api.all-versions.responses.4xx"),),
  162. now,
  163. int(count * 0.05),
  164. )
  165. tsdb.incr_multi(
  166. ((tsdb.models.internal, "client-api.all-versions.responses.5xx"),),
  167. now,
  168. int(count * 0.1),
  169. )
  170. now = now - timedelta(seconds=1)
  171. for _ in xrange(24 * 30):
  172. count = randint(100, 1000)
  173. tsdb.incr_multi(
  174. (
  175. (tsdb.models.internal, "client-api.all-versions.responses.2xx"),
  176. (tsdb.models.internal, "client-api.all-versions.requests"),
  177. ),
  178. now,
  179. int(count * 4.9),
  180. )
  181. tsdb.incr_multi(
  182. ((tsdb.models.internal, "client-api.all-versions.responses.4xx"),),
  183. now,
  184. int(count * 0.05),
  185. )
  186. tsdb.incr_multi(
  187. ((tsdb.models.internal, "client-api.all-versions.responses.5xx"),),
  188. now,
  189. int(count * 0.1),
  190. )
  191. now = now - timedelta(hours=1)
  192. def create_sample_time_series(event, release=None):
  193. if event is None:
  194. return
  195. group = event.group
  196. project = group.project
  197. key = project.key_set.all()[0]
  198. now = datetime.utcnow().replace(tzinfo=utc)
  199. environment = Environment.get_or_create(
  200. project=project, name=Environment.get_name_or_default(event.get_tag("environment"))
  201. )
  202. if release:
  203. ReleaseEnvironment.get_or_create(
  204. project=project, release=release, environment=environment, datetime=now
  205. )
  206. grouprelease = GroupRelease.get_or_create(
  207. group=group, release=release, environment=environment, datetime=now
  208. )
  209. for _ in xrange(60):
  210. count = randint(1, 10)
  211. tsdb.incr_multi(
  212. ((tsdb.models.project, project.id), (tsdb.models.group, group.id)),
  213. now,
  214. count,
  215. environment_id=environment.id,
  216. )
  217. tsdb.incr_multi(
  218. (
  219. (tsdb.models.organization_total_received, project.organization_id),
  220. (tsdb.models.project_total_received, project.id),
  221. (tsdb.models.key_total_received, key.id),
  222. ),
  223. now,
  224. int(count * 1.1),
  225. )
  226. tsdb.incr(
  227. tsdb.models.project_total_forwarded, project.id, now, int(count * 1.1),
  228. )
  229. tsdb.incr_multi(
  230. (
  231. (tsdb.models.organization_total_rejected, project.organization_id),
  232. (tsdb.models.project_total_rejected, project.id),
  233. (tsdb.models.key_total_rejected, key.id),
  234. ),
  235. now,
  236. int(count * 0.1),
  237. )
  238. frequencies = [
  239. (tsdb.models.frequent_issues_by_project, {project.id: {group.id: count}}),
  240. (tsdb.models.frequent_environments_by_group, {group.id: {environment.id: count}}),
  241. ]
  242. if release:
  243. frequencies.append(
  244. (tsdb.models.frequent_releases_by_group, {group.id: {grouprelease.id: count}})
  245. )
  246. tsdb.record_frequency_multi(frequencies, now)
  247. now = now - timedelta(seconds=1)
  248. for _ in xrange(24 * 30):
  249. count = randint(100, 1000)
  250. tsdb.incr_multi(
  251. ((tsdb.models.project, group.project.id), (tsdb.models.group, group.id)),
  252. now,
  253. count,
  254. environment_id=environment.id,
  255. )
  256. tsdb.incr_multi(
  257. (
  258. (tsdb.models.organization_total_received, project.organization_id),
  259. (tsdb.models.project_total_received, project.id),
  260. (tsdb.models.key_total_received, key.id),
  261. ),
  262. now,
  263. int(count * 1.1),
  264. )
  265. tsdb.incr_multi(
  266. (
  267. (tsdb.models.organization_total_rejected, project.organization_id),
  268. (tsdb.models.project_total_rejected, project.id),
  269. (tsdb.models.key_total_rejected, key.id),
  270. ),
  271. now,
  272. int(count * 0.1),
  273. )
  274. frequencies = [
  275. (tsdb.models.frequent_issues_by_project, {project.id: {group.id: count}}),
  276. (tsdb.models.frequent_environments_by_group, {group.id: {environment.id: count}}),
  277. ]
  278. if release:
  279. frequencies.append(
  280. (tsdb.models.frequent_releases_by_group, {group.id: {grouprelease.id: count}})
  281. )
  282. tsdb.record_frequency_multi(frequencies, now)
  283. now = now - timedelta(hours=1)
  284. def main(num_events=1, extra_events=False):
  285. user = User.objects.filter(is_superuser=True)[0]
  286. dummy_user, _ = User.objects.get_or_create(
  287. username="dummy@example.com", defaults={"email": "dummy@example.com"}
  288. )
  289. dummy_user.set_password("dummy")
  290. dummy_user.save()
  291. mocks = (
  292. ("Massive Dynamic", ("Ludic Science",)),
  293. ("Captain Planet", ("Earth", "Fire", "Wind", "Water", "Heart")),
  294. )
  295. Broadcast.objects.create(
  296. title="Learn about Source Maps",
  297. message="Source maps are JSON files that contain information on how to map your transpiled source code back to their original source.",
  298. link="https://docs.sentry.io/platforms/javascript/#source-maps",
  299. )
  300. if settings.SENTRY_SINGLE_ORGANIZATION:
  301. org = Organization.get_default()
  302. print("Mocking org {}".format(org.name)) # NOQA
  303. else:
  304. print("Mocking org {}".format("Default")) # NOQA
  305. org, _ = Organization.objects.get_or_create(slug="default")
  306. OrganizationMember.objects.get_or_create(
  307. user=user, organization=org, role=roles.get_top_dog().id
  308. )
  309. dummy_member, _ = OrganizationMember.objects.get_or_create(
  310. user=dummy_user, organization=org, defaults={"role": roles.get_default().id}
  311. )
  312. for team_name, project_names in mocks:
  313. print("> Mocking team {}".format(team_name)) # NOQA
  314. team, _ = Team.objects.get_or_create(name=team_name, defaults={"organization": org})
  315. for project_name in project_names:
  316. print(" > Mocking project {}".format(project_name)) # NOQA
  317. project, _ = Project.objects.get_or_create(
  318. name=project_name,
  319. defaults={
  320. "organization": org,
  321. "first_event": timezone.now(),
  322. "flags": Project.flags.has_releases,
  323. },
  324. )
  325. project.add_team(team)
  326. if not project.first_event:
  327. project.update(first_event=project.date_added)
  328. if not project.flags.has_releases:
  329. project.update(flags=F("flags").bitor(Project.flags.has_releases))
  330. monitor, created = Monitor.objects.get_or_create(
  331. name=next(MONITOR_NAMES),
  332. project_id=project.id,
  333. organization_id=org.id,
  334. type=MonitorType.CRON_JOB,
  335. defaults={
  336. "config": {"schedule": next(MONITOR_SCHEDULES)},
  337. "next_checkin": timezone.now() + timedelta(minutes=60),
  338. "last_checkin": timezone.now(),
  339. },
  340. )
  341. if not created:
  342. if not (monitor.config or {}).get("schedule"):
  343. monitor.config = {"schedule": next(MONITOR_SCHEDULES)}
  344. monitor.update(
  345. config=monitor.config,
  346. status=MonitorStatus.OK if randint(0, 10) < 7 else MonitorStatus.ERROR,
  347. last_checkin=timezone.now(),
  348. next_checkin=monitor.get_next_scheduled_checkin(timezone.now()),
  349. )
  350. MonitorCheckIn.objects.create(
  351. project_id=monitor.project_id,
  352. monitor=monitor,
  353. status=CheckInStatus.OK
  354. if monitor.status == MonitorStatus.OK
  355. else CheckInStatus.ERROR,
  356. )
  357. with transaction.atomic():
  358. has_release = Release.objects.filter(
  359. version=sha1(uuid4().bytes).hexdigest(),
  360. organization_id=project.organization_id,
  361. projects=project,
  362. ).exists()
  363. if not has_release:
  364. release = Release.objects.filter(
  365. version=sha1(uuid4().bytes).hexdigest(),
  366. organization_id=project.organization_id,
  367. ).first()
  368. if not release:
  369. release = Release.objects.create(
  370. version=sha1(uuid4().bytes).hexdigest(),
  371. organization_id=project.organization_id,
  372. )
  373. release.add_project(project)
  374. generate_tombstones(project, user)
  375. raw_commits = generate_commits(user)
  376. try:
  377. with transaction.atomic():
  378. repo, _ = Repository.objects.get_or_create(
  379. organization_id=org.id,
  380. provider="integrations:github",
  381. external_id="example/example",
  382. defaults={
  383. "name": "Example Repo",
  384. "url": "https://github.com/example/example",
  385. },
  386. )
  387. except IntegrityError:
  388. # for users with legacy github plugin
  389. # upgrade to the new integration
  390. repo = Repository.objects.get(
  391. organization_id=org.id,
  392. provider="github",
  393. external_id="example/example",
  394. name="Example Repo",
  395. )
  396. repo.provider = "integrations:github"
  397. repo.save()
  398. authors = set()
  399. for commit_index, raw_commit in enumerate(raw_commits):
  400. author = CommitAuthor.objects.get_or_create(
  401. organization_id=org.id,
  402. email=raw_commit["author"][1],
  403. defaults={"name": raw_commit["author"][0]},
  404. )[0]
  405. commit = Commit.objects.get_or_create(
  406. organization_id=org.id,
  407. repository_id=repo.id,
  408. key=raw_commit["key"],
  409. defaults={"author": author, "message": raw_commit["message"]},
  410. )[0]
  411. authors.add(author)
  412. for file in raw_commit["files"]:
  413. ReleaseFile.objects.get_or_create(
  414. organization_id=project.organization_id,
  415. release=release,
  416. name=file[0],
  417. file=File.objects.get_or_create(
  418. name=file[0], type="release.file", checksum="abcde" * 8, size=13043
  419. )[0],
  420. defaults={"organization_id": project.organization_id},
  421. )
  422. CommitFileChange.objects.get_or_create(
  423. organization_id=org.id, commit=commit, filename=file[0], type=file[1]
  424. )
  425. ReleaseCommit.objects.get_or_create(
  426. organization_id=org.id, release=release, commit=commit, order=commit_index
  427. )
  428. # create an unreleased commit
  429. Commit.objects.get_or_create(
  430. organization_id=org.id,
  431. repository_id=repo.id,
  432. key=sha1(uuid4().hex).hexdigest(),
  433. defaults={
  434. "author": CommitAuthor.objects.get_or_create(
  435. organization_id=org.id, email=user.email, defaults={"name": user.name}
  436. )[0],
  437. "message": "feat: Do something to {}\n{}".format(
  438. random.choice(loremipsum.words) + ".js", make_sentence()
  439. ),
  440. },
  441. )[0]
  442. Activity.objects.create(
  443. type=Activity.RELEASE,
  444. project=project,
  445. ident=release.version,
  446. user=user,
  447. data={"version": release.version},
  448. )
  449. environment = Environment.get_or_create(project=project, name=six.next(ENVIRONMENTS))
  450. deploy = Deploy.objects.create(
  451. organization_id=project.organization_id,
  452. release=release,
  453. environment_id=environment.id,
  454. )
  455. release.update(
  456. commit_count=len(raw_commits),
  457. last_commit_id=commit.id,
  458. total_deploys=Deploy.objects.filter(release=release).count(),
  459. last_deploy_id=deploy.id,
  460. authors=[six.text_type(a.id) for a in authors],
  461. )
  462. ReleaseProjectEnvironment.objects.create_or_update(
  463. project=project,
  464. environment=environment,
  465. release=release,
  466. defaults={"last_deploy_id": deploy.id},
  467. )
  468. Activity.objects.create(
  469. type=Activity.DEPLOY,
  470. project=project,
  471. ident=release.version,
  472. data={
  473. "version": release.version,
  474. "deploy_id": deploy.id,
  475. "environment": environment.name,
  476. },
  477. datetime=deploy.date_finished,
  478. )
  479. # Add a bunch of additional dummy events to support pagination
  480. if extra_events:
  481. for _ in range(45):
  482. platform = six.next(PLATFORMS)
  483. create_sample_event(
  484. project=project,
  485. platform=platform,
  486. release=release.version,
  487. level=six.next(LEVELS),
  488. environment=six.next(ENVIRONMENTS),
  489. message="This is a mostly useless example %s exception" % platform,
  490. checksum=md5_text(platform + six.text_type(_)).hexdigest(),
  491. user=generate_user(),
  492. )
  493. for _ in range(num_events):
  494. event1 = create_sample_event(
  495. project=project,
  496. platform="python",
  497. release=release.version,
  498. environment=six.next(ENVIRONMENTS),
  499. user=generate_user(),
  500. )
  501. EventAttachment.objects.create(
  502. project_id=project.id,
  503. event_id=event1.event_id,
  504. name="example-logfile.txt",
  505. file=File.objects.get_or_create(
  506. name="example-logfile.txt",
  507. type="text/plain",
  508. checksum="abcde" * 8,
  509. size=13043,
  510. )[0],
  511. )
  512. event2 = create_sample_event(
  513. project=project,
  514. platform="javascript",
  515. release=release.version,
  516. environment=six.next(ENVIRONMENTS),
  517. sdk={"name": "raven-js", "version": "2.1.0"},
  518. user=generate_user(),
  519. )
  520. event3 = create_sample_event(project, "java")
  521. event4 = create_sample_event(
  522. project=project,
  523. platform="ruby",
  524. release=release.version,
  525. environment=six.next(ENVIRONMENTS),
  526. user=generate_user(),
  527. )
  528. event5 = create_sample_event(
  529. project=project,
  530. platform="cocoa",
  531. release=release.version,
  532. environment=six.next(ENVIRONMENTS),
  533. user=generate_user(),
  534. )
  535. create_sample_event(
  536. project=project,
  537. platform="php",
  538. release=release.version,
  539. environment=six.next(ENVIRONMENTS),
  540. message=LONG_MESSAGE,
  541. user=generate_user(),
  542. )
  543. create_sample_event(
  544. project=project,
  545. platform="cocoa",
  546. sample_name="react-native",
  547. release=release.version,
  548. environment=six.next(ENVIRONMENTS),
  549. user=generate_user(),
  550. )
  551. create_sample_event(
  552. project=project,
  553. platform="pii",
  554. release=release.version,
  555. environment=six.next(ENVIRONMENTS),
  556. user=generate_user(),
  557. )
  558. if event5:
  559. Commit.objects.get_or_create(
  560. organization_id=org.id,
  561. repository_id=repo.id,
  562. key=sha1(uuid4().hex).hexdigest(),
  563. defaults={
  564. "author": CommitAuthor.objects.get_or_create(
  565. organization_id=org.id, email=user.email, defaults={"name": user.name}
  566. )[0],
  567. "message": "Ooops!\nFixes {}".format(event5.group.qualified_short_id),
  568. },
  569. )[0]
  570. create_sample_event(project=project, environment=six.next(ENVIRONMENTS), platform="csp")
  571. if event3:
  572. UserReport.objects.create(
  573. project=project,
  574. event_id=event3.event_id,
  575. group=event3.group,
  576. name="Jane Bloggs",
  577. email="jane@example.com",
  578. comments=make_sentence(),
  579. )
  580. try:
  581. # Metric alerts
  582. alert_rule = create_alert_rule(
  583. org,
  584. [project],
  585. "My Alert Rule",
  586. "level:error",
  587. "count()",
  588. 10,
  589. AlertRuleThresholdType.ABOVE,
  590. 1,
  591. )
  592. create_alert_rule_trigger(alert_rule, "critical", 10)
  593. create_incident(
  594. org,
  595. type_=IncidentType.DETECTED,
  596. title="My Incident",
  597. date_started=datetime.utcnow().replace(tzinfo=utc),
  598. alert_rule=alert_rule,
  599. projects=[project],
  600. )
  601. except AlertRuleNameAlreadyUsedError:
  602. pass
  603. print(" > Loading time series data".format(project_name)) # NOQA
  604. create_sample_time_series(event1, release=release)
  605. create_sample_time_series(event2, release=release)
  606. create_sample_time_series(event3)
  607. create_sample_time_series(event4, release=release)
  608. create_sample_time_series(event5, release=release)
  609. if hasattr(buffer, "process_pending"):
  610. print(" > Processing pending buffers") # NOQA
  611. buffer.process_pending()
  612. mocks_loaded.send(project=project, sender=__name__)
  613. OrganizationAccessRequest.objects.create_or_update(member=dummy_member, team=team)
  614. Activity.objects.create(
  615. type=Activity.RELEASE,
  616. project=project,
  617. ident="4f38b65c62c4565aa94bba391ff8946922a8eed4",
  618. user=user,
  619. data={"version": "4f38b65c62c4565aa94bba391ff8946922a8eed4"},
  620. )
  621. create_system_time_series()
  622. if __name__ == "__main__":
  623. settings.CELERY_ALWAYS_EAGER = True
  624. from optparse import OptionParser
  625. parser = OptionParser()
  626. parser.add_option("--events", dest="num_events", default=1, type=int)
  627. parser.add_option("--extra-events", dest="extra_events", default=False, action="store_true")
  628. (options, args) = parser.parse_args()
  629. try:
  630. main(num_events=options.num_events, extra_events=options.extra_events)
  631. except Exception:
  632. # Avoid reporting any issues recursively back into Sentry
  633. import traceback
  634. import sys
  635. traceback.print_exc()
  636. sys.exit(1)