Browse Source

feat(demo): releases and quick organization (#24603)

Stephen Cefali 4 years ago
parent
commit
1f4b4d41d7

+ 7 - 2
src/sentry/conf/server.py

@@ -2154,12 +2154,17 @@ DEMO_ORG_OWNER_EMAIL = None
 DEMO_DATA_GEN_PARAMS = {
     "MAX_DAYS": 7,  # how many days of data
     "SCALE_FACTOR": 1,  # scales the frequency of events
-    "BASE_OFFSET": 0.5,  # higher values increases the minum number of events in an hour
-    "NAME_STEP_SIZE": 20,  # higher value means fewr possible test users in sample
+    "BASE_OFFSET": 0.5,  # higher values increases the minimum number of events in an hour
+    "NAME_STEP_SIZE": 10,  # higher value means fewer possible test users in sample
     "BREADCRUMB_LOOKBACK_TIME": 5,  # how far back should breadcrumbs go from the time of the event
     "DEFAULT_BACKOFF_TIME": 0,  # backoff time between sending events
     "ERROR_BACKOFF_TIME": 0.5,  # backoff time after a snuba error
+    "NUM_RELEASES": 3,
 }
 
+# parameters for an org when quickly generating them synchronously
+DEMO_DATA_QUICK_GEN_PARAMS = DEMO_DATA_GEN_PARAMS.copy()
+DEMO_DATA_QUICK_GEN_PARAMS.update(MAX_DAYS=1, SCALE_FACTOR=0.5, NAME_STEP_SIZE=100)
+
 # adds an extra JS to HTML template
 INJECTED_SCRIPT_ASSETS = []

+ 196 - 25
src/sentry/demo/data_population.py

@@ -10,12 +10,23 @@ from collections import defaultdict
 from datetime import timedelta
 from django.conf import settings
 from django.utils import timezone
+from hashlib import sha1
 from uuid import uuid4
 from typing import List
 
 from sentry.interfaces.user import User as UserInterface
-from sentry.models import Project
-from sentry.utils import json
+from sentry.models import (
+    File,
+    Project,
+    Release,
+    Repository,
+    CommitAuthor,
+    Commit,
+    ReleaseFile,
+    CommitFileChange,
+    ReleaseCommit,
+)
+from sentry.utils import json, loremipsum
 from sentry.utils.dates import to_timestamp
 from sentry.utils.samples import (
     random_geo,
@@ -26,18 +37,35 @@ from sentry.utils.samples import (
 from sentry.utils.snuba import SnubaError
 
 
-MAX_DAYS = settings.DEMO_DATA_GEN_PARAMS["MAX_DAYS"]
-SCALE_FACTOR = settings.DEMO_DATA_GEN_PARAMS["SCALE_FACTOR"]
-BASE_OFFSET = settings.DEMO_DATA_GEN_PARAMS["BASE_OFFSET"]
-NAME_STEP_SIZE = settings.DEMO_DATA_GEN_PARAMS["NAME_STEP_SIZE"]
-BREADCRUMB_LOOKBACK_TIME = settings.DEMO_DATA_GEN_PARAMS["BREADCRUMB_LOOKBACK_TIME"]
-DEFAULT_BACKOFF_TIME = settings.DEMO_DATA_GEN_PARAMS["DEFAULT_BACKOFF_TIME"]
-ERROR_BACKOFF_TIME = settings.DEMO_DATA_GEN_PARAMS["ERROR_BACKOFF_TIME"]
+commit_message_base_messages = [
+    "feat: Do something to",
+    "feat: Update code in",
+    "ref: Refactor code in",
+    "fix: Fix bug in",
+]
+
+base_paths_by_file_type = {"js": ["components/", "views/"], "py": ["flask/", "routes/"]}
 
 
 logger = logging.getLogger(__name__)
 
 
+def get_config(quick):
+    """
+    Returns the data generation config
+    Depends on if we are doing a quick-gen or not
+    """
+    if quick:
+        return settings.DEMO_DATA_QUICK_GEN_PARAMS
+    else:
+        return settings.DEMO_DATA_GEN_PARAMS
+
+
+def get_config_var(name, quick):
+    config = get_config(quick)
+    return config[name]
+
+
 def get_data_file_path(file_name):
     return os.path.join(os.path.dirname(__file__), "data", file_name)
 
@@ -106,22 +134,143 @@ def get_user_by_id(id_0_offset):
     ).to_json()
 
 
-def generate_user():
+def generate_user(quick=False):
+    NAME_STEP_SIZE = get_config_var("NAME_STEP_SIZE", quick)
     name_list = get_list_of_names()
     id_0_offset = random.randrange(0, len(name_list), NAME_STEP_SIZE)
     return get_user_by_id(id_0_offset)
 
 
-def safe_send_event(data):
+def gen_random_author():
+    author = "{} {}".format(random.choice(loremipsum.words), random.choice(loremipsum.words))
+    return (
+        author,
+        "{}@example.com".format(author.replace(" ", ".")),
+    )
+
+
+def get_release_from_time(org_id, timestamp):
+    """
+    Returns the most release before a specific time
+    """
+    return (
+        Release.objects.filter(organization_id=org_id, date_added__lte=timestamp)
+        .order_by("-date_added")
+        .first()
+    )
+
+
+def generate_commits(required_files, file_extensions):
+    """
+    Generate the JSON for commits that are a combination of randomly generated files
+    And a set of files (required_files) with specific file extensions
+    """
+    commits = []
+    for i in range(random.randint(len(required_files), 20)):
+        if i < len(required_files):
+            filename = required_files[i]
+        else:
+            # create a realistic file path based off the extension we choose
+            extension = random.choice(file_extensions)
+            base_path = random.choice(base_paths_by_file_type[extension])
+            filename = base_path + random.choice(loremipsum.words) + "." + extension
+
+        # TODO: pass in user list for commits
+        author = gen_random_author()
+
+        base_message = random.choice(commit_message_base_messages)
+
+        commits.append(
+            {
+                "key": sha1(uuid4().bytes).hexdigest(),
+                "message": f"{base_message} {filename}",
+                "author": author,
+                "files": [(filename, "M")],
+            }
+        )
+    return commits
+
+
+def generate_releases(projects, quick):
+    config = get_config(quick)
+    NUM_RELEASES = config["NUM_RELEASES"]
+    MAX_DAYS = config["MAX_DAYS"]
+    release_time = timezone.now() - timedelta(days=MAX_DAYS)
+    hourly_release_cadence = MAX_DAYS * 24.0 / NUM_RELEASES
+    org = projects[0].organization
+    org_id = org.id
+    for i in range(NUM_RELEASES):
+        release = Release.objects.create(
+            version=f"V{i + 1}",
+            organization_id=org_id,
+            date_added=release_time,
+        )
+        for project in projects:
+            release.add_project(project)
+
+        # TODO: unhardcode params when we add more scenarios
+        raw_commits = generate_commits(["components/ShoppingCart.js", "flask/app.py"], ["js", "py"])
+
+        repo, _ = Repository.objects.get_or_create(
+            organization_id=org.id,
+            external_id="example/example",
+            defaults={
+                "name": "Example Repo",
+            },
+        )
+        authors = set()
+
+        for commit_index, raw_commit in enumerate(raw_commits):
+            author = CommitAuthor.objects.get_or_create(
+                organization_id=org.id,
+                email=raw_commit["author"][1],
+                defaults={"name": raw_commit["author"][0]},
+            )[0]
+            commit = Commit.objects.get_or_create(
+                organization_id=org.id,
+                repository_id=repo.id,
+                key=raw_commit["key"],
+                defaults={
+                    "author": author,
+                    "message": raw_commit["message"],
+                    "date_added": release_time,
+                },
+            )[0]
+            authors.add(author)
+
+            for file in raw_commit["files"]:
+                ReleaseFile.objects.get_or_create(
+                    organization_id=project.organization_id,
+                    release=release,
+                    name=file[0],
+                    file=File.objects.get_or_create(
+                        name=file[0], type="release.file", checksum="abcde" * 8, size=13043
+                    )[0],
+                    defaults={"organization_id": project.organization_id},
+                )
+
+                CommitFileChange.objects.get_or_create(
+                    organization_id=org.id, commit=commit, filename=file[0], type=file[1]
+                )
+
+            ReleaseCommit.objects.get_or_create(
+                organization_id=org.id, release=release, commit=commit, order=commit_index
+            )
+
+        release_time += timedelta(hours=hourly_release_cadence)
+
+
+def safe_send_event(data, quick):
     project = data.pop("project")
+    config = get_config(quick)
     # TODO: make a batched update version of create_sample_event
     try:
         create_sample_event_basic(data, project.id)
-        time.sleep(DEFAULT_BACKOFF_TIME)
+        time.sleep(config["DEFAULT_BACKOFF_TIME"])
     except SnubaError:
         # if snuba fails, just back off and continue
         logger.info("safe_send_event.snuba_error")
-        time.sleep(ERROR_BACKOFF_TIME)
+        time.sleep(config["ERROR_BACKOFF_TIME"])
 
 
 def clean_event(event_json):
@@ -164,9 +313,9 @@ def fix_timestamps(event_json):
         event_json["start_timestamp"] = to_timestamp(start_timestamp)
 
 
-def fix_error_event(event_json):
+def fix_error_event(event_json, quick=False):
     fix_timestamps(event_json)
-    fix_breadrumbs(event_json)
+    fix_breadrumbs(event_json, quick)
 
 
 def fix_transaction_event(event_json, old_span_id):
@@ -283,11 +432,12 @@ def fix_measurements(event_json):
         measurements.update(measurement_markers)
 
 
-def fix_breadrumbs(event_json):
+def fix_breadrumbs(event_json, quick):
     """
     Fixes the timestamps on breadcrumbs to match the current time
     Evenly spaces out all breadcrumbs starting at BREADCRUMB_LOOKBACK_TIME ago
     """
+    BREADCRUMB_LOOKBACK_TIME = get_config_var("BREADCRUMB_LOOKBACK_TIME", quick)
     breadcrumbs = event_json.get("breadcrumbs", {}).get("values", [])
     num_breadcrumbs = len(breadcrumbs)
     breadcrumb_time_step = BREADCRUMB_LOOKBACK_TIME * 1.0 / num_breadcrumbs
@@ -298,7 +448,9 @@ def fix_breadrumbs(event_json):
         curr_time += breadcrumb_time_step
 
 
-def populate_connected_event_scenario_1(react_project: Project, python_project: Project):
+def populate_connected_event_scenario_1(
+    react_project: Project, python_project: Project, quick=False
+):
     """
     This function populates a set of four related events with the same trace id:
     - Front-end transaction
@@ -312,6 +464,12 @@ def populate_connected_event_scenario_1(react_project: Project, python_project:
     python_transaction = get_event_from_file("python_transaction_1.json")
     python_error = get_event_from_file("python_error_1.json")
 
+    config = get_config(quick)
+    MAX_DAYS = config["MAX_DAYS"]
+    SCALE_FACTOR = config["SCALE_FACTOR"]
+    BASE_OFFSET = config["BASE_OFFSET"]
+
+    start_time = timezone.now() - timedelta(days=MAX_DAYS)
     log_extra = {
         "organization_slug": react_project.organization.slug,
         "MAX_DAYS": MAX_DAYS,
@@ -324,6 +482,7 @@ def populate_connected_event_scenario_1(react_project: Project, python_project:
             base = distribution_v1(hour)
             # determine the number of events we want in this hour
             num_events = int((BASE_OFFSET + SCALE_FACTOR * base) * random.uniform(0.6, 1.0))
+            timestamps = []
             for i in range(num_events):
                 logger.info(
                     "populate_connected_event_scenario_1.send_event_series", extra=log_extra
@@ -331,10 +490,18 @@ def populate_connected_event_scenario_1(react_project: Project, python_project:
 
                 # pick the minutes randomly (which means events will sent be out of order)
                 minute = random.randint(0, 60)
-                timestamp = timezone.now() - timedelta(days=day, hours=hour, minutes=minute)
+                timestamp = start_time + timedelta(days=day, hours=hour, minutes=minute)
                 timestamp = timestamp.replace(tzinfo=pytz.utc)
-                transaction_user = generate_user()
+                timestamps.append(timestamp)
+
+            # sort the timestamps
+            timestamps.sort()
+
+            for timestamp in timestamps:
+                transaction_user = generate_user(quick)
                 trace_id = uuid4().hex
+                release = get_release_from_time(react_project.organization_id, timestamp)
+                release_sha = release.version
 
                 old_span_id = react_transaction["contexts"]["trace"]["span_id"]
                 frontend_root_span_id = uuid4().hex[:16]
@@ -355,6 +522,7 @@ def populate_connected_event_scenario_1(react_project: Project, python_project:
                     platform=react_project.platform,
                     event_id=uuid4().hex,
                     user=transaction_user,
+                    release=release_sha,
                     timestamp=timestamp,
                     # start_timestamp decreases based on day so that there's a trend
                     start_timestamp=timestamp - timedelta(seconds=frontend_duration),
@@ -368,7 +536,7 @@ def populate_connected_event_scenario_1(react_project: Project, python_project:
                 )
 
                 fix_transaction_event(local_event, old_span_id)
-                safe_send_event(local_event)
+                safe_send_event(local_event, quick)
 
                 # note picking the 0th span is arbitrary
                 backend_parent_id = local_event["spans"][0]["span_id"]
@@ -380,10 +548,11 @@ def populate_connected_event_scenario_1(react_project: Project, python_project:
                     platform=react_project.platform,
                     timestamp=timestamp,
                     user=transaction_user,
+                    release=release_sha,
                     contexts=frontend_context,
                 )
-                fix_error_event(local_event)
-                safe_send_event(local_event)
+                fix_error_event(local_event, quick)
+                safe_send_event(local_event, quick)
 
                 # python transaction
                 old_span_id = python_transaction["contexts"]["trace"]["span_id"]
@@ -405,10 +574,11 @@ def populate_connected_event_scenario_1(react_project: Project, python_project:
                     timestamp=timestamp,
                     start_timestamp=timestamp - timedelta(milliseconds=backend_duration),
                     user=transaction_user,
+                    release=release_sha,
                     contexts=backend_context,
                 )
                 fix_transaction_event(local_event, old_span_id)
-                safe_send_event(local_event)
+                safe_send_event(local_event, quick)
 
                 # python error
                 local_event = copy.deepcopy(python_error)
@@ -417,8 +587,9 @@ def populate_connected_event_scenario_1(react_project: Project, python_project:
                     platform=python_project.platform,
                     timestamp=timestamp,
                     user=transaction_user,
+                    release=release_sha,
                     contexts=backend_context,
                 )
-                fix_error_event(local_event)
-                safe_send_event(local_event)
+                fix_error_event(local_event, quick)
+                safe_send_event(local_event, quick)
     logger.info("populate_connected_event_scenario_1.finished", extra=log_extra)

+ 14 - 8
src/sentry/demo/demo_org_manager.py

@@ -18,16 +18,14 @@ from sentry.models import (
 )
 from sentry.utils.email import create_fake_email
 
-from .data_population import (
-    populate_connected_event_scenario_1,
-)
-from .utils import NoDemoOrgReady, generate_random_name
+from .data_population import populate_connected_event_scenario_1, generate_releases
+from .utils import generate_random_name
 from .models import DemoUser, DemoOrganization, DemoOrgStatus
 
 logger = logging.getLogger(__name__)
 
 
-def create_demo_org() -> Organization:
+def create_demo_org(quick=False) -> Organization:
     # wrap the main org setup in transaction
     with transaction.atomic():
         name = generate_random_name()
@@ -57,7 +55,11 @@ def create_demo_org() -> Organization:
         )
 
     # TODO: delete org if data population fails
-    populate_connected_event_scenario_1(react_project, python_project)
+    logger.info(
+        "create_demo_org.post-transaction", extra={"organization_slug": org.slug, "quick": quick}
+    )
+    generate_releases([react_project, python_project], quick=quick)
+    populate_connected_event_scenario_1(react_project, python_project, quick=quick)
 
     return org
 
@@ -65,14 +67,18 @@ def create_demo_org() -> Organization:
 def assign_demo_org() -> Tuple[Organization, User]:
     from .tasks import build_up_org_buffer
 
+    demo_org = None
     # option to skip the buffer when testing things out locally
     if settings.DEMO_NO_ORG_BUFFER:
         org = create_demo_org()
-        demo_org = DemoOrganization.objects.get(organization=org)
     else:
         demo_org = DemoOrganization.objects.filter(status=DemoOrgStatus.PENDING).first()
+        # if no org in buffer, make a quick one with fewer events
         if not demo_org:
-            raise NoDemoOrgReady()
+            org = create_demo_org(quick=True)
+
+    if not demo_org:
+        demo_org = DemoOrganization.objects.get(organization=org)
 
     org = demo_org.organization
 

+ 0 - 4
src/sentry/demo/utils.py

@@ -1,9 +1,5 @@
 import petname
 
 
-class NoDemoOrgReady(Exception):
-    pass
-
-
 def generate_random_name() -> str:
     return petname.Generate(2, " ", letters=10).title()

+ 14 - 7
src/sentry/web/frontend/demo_start.py

@@ -1,10 +1,13 @@
+import logging
+
 from django.http import Http404
 from django.conf import settings
 
-from sentry.demo.utils import NoDemoOrgReady
 from sentry.utils import auth
 from sentry.web.frontend.base import BaseView
 
+logger = logging.getLogger(__name__)
+
 
 class DemoStartView(BaseView):
     csrf_protect = False
@@ -15,22 +18,26 @@ class DemoStartView(BaseView):
         if not settings.DEMO_MODE:
             raise Http404
 
+        logger.info("post.start")
+
         # move this import here so we Django doesn't discover the models
         # for demo mode except when Demo mode is actually active
         from sentry.demo.demo_org_manager import assign_demo_org
 
         # assign the demo org and get the user
-        try:
-            _, user = assign_demo_org()
-        except NoDemoOrgReady:
-            # TODO: handle NoDemoOrgReady error
-            raise
+        org, user = assign_demo_org()
+
+        logger.info("post.assigned_org", extra={"organization_slug": org.slug})
 
         auth.login(request, user)
 
         resp = self.redirect(auth.get_login_redirect(request))
         # set a cookie of whether the user accepteed tracking so we know
         # whether to initialize analytics when accepted_tracking=1
-        resp.set_cookie("accepted_tracking", request.POST.get("accepted_tracking"))
+        # 0 means don't show the footer to accept cookies (user already declined)
+        # no value means we show the footer to accept cookies (user has neither accepted nor declined)
+        accepted_tracking = request.POST.get("accepted_tracking")
+        if accepted_tracking in ["0", "1"]:
+            resp.set_cookie("accepted_tracking", accepted_tracking)
 
         return resp

+ 12 - 5
tests/sentry/demo/test_demo_org_manager.py

@@ -6,7 +6,6 @@ from django.test import override_settings
 
 from sentry.demo.demo_org_manager import create_demo_org, assign_demo_org
 from sentry.demo.models import DemoOrganization, DemoUser, DemoOrgStatus
-from sentry.demo.utils import NoDemoOrgReady
 from sentry.models import (
     User,
     Organization,
@@ -26,8 +25,12 @@ org_name = "Org Name"
 
 @override_settings(DEMO_MODE=True, DEMO_ORG_OWNER_EMAIL=org_owner_email)
 class DemoOrgManagerTeest(TestCase):
+    @mock.patch("sentry.demo.demo_org_manager.generate_releases")
+    @mock.patch("sentry.demo.demo_org_manager.populate_connected_event_scenario_1")
     @mock.patch("sentry.demo.demo_org_manager.generate_random_name", return_value=org_name)
-    def test_create_demo_org(self, mock_generate_name):
+    def test_create_demo_org(
+        self, mock_generate_name, mock_populate_connected_event, mock_gen_releases
+    ):
         owner = User.objects.create(email=org_owner_email)
 
         create_demo_org()
@@ -43,6 +46,7 @@ class DemoOrgManagerTeest(TestCase):
 
         assert len(Project.objects.filter(organization=org)) == 2
         assert not ProjectKey.objects.filter(project__organization=org).exists()
+        mock_populate_connected_event.assert_called_once_with(mock.ANY, mock.ANY, quick=False)
 
     @mock.patch("sentry.demo.demo_org_manager.generate_random_name", return_value=org_name)
     def test_no_owner(self, mock_generate_name):
@@ -79,6 +83,9 @@ class DemoOrgManagerTeest(TestCase):
 
         mock_build_up_org_buffer.assert_called_once_with()
 
-    def test_no_org_ready(self):
-        with pytest.raises(NoDemoOrgReady):
-            assign_demo_org()
+    @mock.patch("sentry.demo.demo_org_manager.generate_releases")
+    @mock.patch("sentry.demo.demo_org_manager.populate_connected_event_scenario_1")
+    def test_no_org_ready(self, mock_populate_connected_event, mock_gen_releases):
+        User.objects.create(email=org_owner_email)
+        [org, user] = assign_demo_org()
+        mock_populate_connected_event.assert_called_once_with(mock.ANY, mock.ANY, quick=True)