Browse Source

Feat/sentry plugin migration v1 (#15299)

Stephen Cefali 5 years ago
parent
commit
26c0d1593f

+ 6 - 0
.travis.yml

@@ -173,6 +173,11 @@ matrix:
       name: 'Django 1.9 Acceptance'
       env: DJANGO_VERSION=">=1.9,<1.10" TEST_SUITE=acceptance USE_SNUBA=1
 
+    # allowed to fail
+    - <<: *acceptance_default
+      name: 'Plugins'
+      env: TEST_SUITE=plugins DB=postgres PERCY_TOKEN=${PLUGIN_PERCY_TOKEN}
+
     - python: 2.7
       name: 'Frontend'
       env: TEST_SUITE=js
@@ -258,6 +263,7 @@ matrix:
     - name: 'Django 1.9 Backend [Postgres] (1/2)'
     - name: 'Django 1.9 Backend [Postgres] (2/2)'
     - name: 'Django 1.9 Acceptance'
+    - name: 'Plugins'
 
 notifications:
   webhooks:

+ 9 - 0
Makefile

@@ -180,6 +180,13 @@ endif
 
 	@echo ""
 
+test-plugins:
+	@echo "--> Building static assets"
+	@$(WEBPACK) --display errors-only
+	@echo "--> Running plugin tests"
+	py.test tests/sentry_plugins -vv --cov . --cov-report="xml:.artifacts/plugins.coverage.xml" --junit-xml=".artifacts/plugins.junit.xml"
+	@echo ""
+
 lint: lint-python lint-js
 
 # configuration for flake8 can be found in setup.cfg
@@ -230,6 +237,7 @@ travis-test-snuba: test-snuba
 travis-test-symbolicator: test-symbolicator
 travis-test-js: test-js
 travis-test-cli: test-cli
+travis-test-plugins: test-plugins
 travis-test-dist:
 	# NOTE: We quiet down output here to workaround an issue in travis that
 	# causes the build to fail with a EAGAIN when writing a large amount of
@@ -253,3 +261,4 @@ travis-scan-js: travis-noop
 travis-scan-cli: travis-noop
 travis-scan-dist: travis-noop
 travis-scan-lint: scan-python
+travis-scan-plugins: travis-noop

+ 31 - 0
conftest.py

@@ -18,6 +18,37 @@ def pytest_configure(config):
     # being used
     warnings.filterwarnings("error", "", Warning, r"^(?!(|kombu|raven|sentry))")
 
+    # if we are running any tests for plugins, we need to make sure we install them first
+    if any("tests/sentry_plugins" in s for s in config.getoption("file_or_dir")):
+        install_sentry_plugins()
+
+
+def install_sentry_plugins():
+    # Sentry's pytest plugin explicitly doesn't load plugins, so let's load all of them
+    # and ignore the fact that we're not *just* testing our own
+    # Note: We could manually register/configure INSTALLED_APPS by traversing our entry points
+    # or package directories, but this is easier assuming Sentry doesn't change APIs.
+    # Note: Order of operations matters here.
+    from sentry.runner.importer import install_plugin_apps
+    from django.conf import settings
+
+    install_plugin_apps("sentry.new_apps", settings)
+
+    from sentry.runner.initializer import register_plugins
+
+    register_plugins(settings, test_plugins=True)
+
+    settings.ASANA_CLIENT_ID = "abc"
+    settings.ASANA_CLIENT_SECRET = "123"
+    settings.BITBUCKET_CONSUMER_KEY = "abc"
+    settings.BITBUCKET_CONSUMER_SECRET = "123"
+    settings.GITHUB_APP_ID = "abc"
+    settings.GITHUB_API_SECRET = "123"
+    settings.GITHUB_APPS_APP_ID = "abc"
+    settings.GITHUB_APPS_API_SECRET = "123"
+    # this isn't the real secret
+    settings.SENTRY_OPTIONS["github.integration-hook-secret"] = "b3002c3e321d4b7880360d397db2ccfd"
+
 
 def pytest_collection_modifyitems(items):
     for item in items:

+ 4 - 0
requirements-base.txt

@@ -79,3 +79,7 @@ ua-parser>=0.6.1,<0.8.0
 unidiff>=0.5.4
 urllib3==1.24.2
 uwsgi>2.0.0,<2.1.0
+
+# sentry-plugins specific dependencies
+cached-property
+phabricator>=0.6.0,<1.0

+ 29 - 1
setup.py

@@ -138,7 +138,35 @@ setup(
     cmdclass=cmdclass,
     license="BSD",
     include_package_data=True,
-    entry_points={"console_scripts": ["sentry = sentry.runner:main"]},
+    entry_points={
+        "console_scripts": ["sentry = sentry.runner:main"],
+        "sentry.new_apps": [
+            "jira_ac = new_sentry_plugins.jira_ac",
+            "jira = new_sentry_plugins.jira",
+            "sessionstack = new_sentry_plugins.sessionstack",
+        ],
+        "sentry.new_plugins": [
+            "amazon_sqs = new_sentry_plugins.amazon_sqs.plugin:AmazonSQSPlugin",
+            "asana = new_sentry_plugins.asana.plugin:AsanaPlugin",
+            "bitbucket = new_sentry_plugins.bitbucket.plugin:BitbucketPlugin",
+            "clubhouse = new_sentry_plugins.clubhouse.plugin:ClubhousePlugin",
+            "github = new_sentry_plugins.github.plugin:GitHubPlugin",
+            "gitlab = new_sentry_plugins.gitlab.plugin:GitLabPlugin",
+            "heroku = new_sentry_plugins.heroku.plugin:HerokuPlugin",
+            "jira = new_sentry_plugins.jira.plugin:JiraPlugin",
+            "jira_ac = new_sentry_plugins.jira_ac.plugin:JiraACPlugin",
+            "pagerduty = new_sentry_plugins.pagerduty.plugin:PagerDutyPlugin",
+            "phabricator = new_sentry_plugins.phabricator.plugin:PhabricatorPlugin",
+            "pivotal = new_sentry_plugins.pivotal.plugin:PivotalPlugin",
+            "pushover = new_sentry_plugins.pushover.plugin:PushoverPlugin",
+            "segment = new_sentry_plugins.segment.plugin:SegmentPlugin",
+            "sessionstack = new_sentry_plugins.sessionstack.plugin:SessionStackPlugin",
+            "slack = new_sentry_plugins.slack.plugin:SlackPlugin",
+            "splunk = new_sentry_plugins.splunk.plugin:SplunkPlugin",
+            "victorops = new_sentry_plugins.victorops.plugin:VictorOpsPlugin",
+            "vsts = new_sentry_plugins.vsts.plugin:VstsPlugin",
+        ],
+    },
     classifiers=[
         "Framework :: Django",
         "Intended Audience :: Developers",

+ 29 - 0
src/new_sentry_plugins/__init__.py

@@ -0,0 +1,29 @@
+from __future__ import absolute_import
+
+try:
+    VERSION = __import__("pkg_resources").get_distribution("sentry-plugins").version
+except Exception:
+    VERSION = "unknown"
+
+# Try to hook our webhook watcher into the rest of the watchers
+# iff this module is installed in editable mode.
+if "site-packages" not in __file__:
+    import os
+
+    root = os.path.normpath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir))
+    node_modules = os.path.join(root, "node_modules")
+
+    if os.path.isdir(node_modules):
+        from django.conf import settings
+
+        settings.SENTRY_WATCHERS += (
+            (
+                "webpack.plugins",
+                [
+                    os.path.join(node_modules, ".bin", "webpack"),
+                    "--output-pathinfo",
+                    "--watch",
+                    "--config={}".format(os.path.join(root, "webpack.config.js")),
+                ],
+            ),
+        )

+ 1 - 0
src/new_sentry_plugins/amazon_sqs/__init__.py

@@ -0,0 +1 @@
+from __future__ import absolute_import

+ 138 - 0
src/new_sentry_plugins/amazon_sqs/plugin.py

@@ -0,0 +1,138 @@
+from __future__ import absolute_import
+
+import logging
+
+import boto3
+from botocore.client import ClientError
+from new_sentry_plugins.base import CorePluginMixin
+from sentry.plugins.bases.data_forwarding import DataForwardingPlugin
+from new_sentry_plugins.utils import get_secret_field_config
+from sentry.utils import json, metrics
+
+logger = logging.getLogger(__name__)
+
+
+def get_regions():
+    return boto3.session.Session().get_available_regions("sqs")
+
+
+class AmazonSQSPlugin(CorePluginMixin, DataForwardingPlugin):
+    title = "Amazon SQS"
+    slug = "amazon-sqs"
+    description = "Forward Sentry events to Amazon SQS."
+    conf_key = "amazon-sqs"
+
+    def get_config(self, project, **kwargs):
+        return [
+            {
+                "name": "queue_url",
+                "label": "Queue URL",
+                "type": "url",
+                "placeholder": "https://sqs-us-east-1.amazonaws.com/12345678/myqueue",
+            },
+            {
+                "name": "region",
+                "label": "Region",
+                "type": "select",
+                "choices": tuple((z, z) for z in get_regions()),
+            },
+            get_secret_field_config(
+                name="access_key", label="Access Key", secret=self.get_option("access_key", project)
+            ),
+            get_secret_field_config(
+                name="secret_key", label="Secret Key", secret=self.get_option("secret_key", project)
+            ),
+            {
+                "name": "message_group_id",
+                "label": "Message Group ID",
+                "type": "text",
+                "required": False,
+                "placeholder": "Required for FIFO queues, exclude for standard queues",
+            },
+        ]
+
+    def forward_event(self, event, payload):
+        queue_url = self.get_option("queue_url", event.project)
+        access_key = self.get_option("access_key", event.project)
+        secret_key = self.get_option("secret_key", event.project)
+        region = self.get_option("region", event.project)
+        message_group_id = self.get_option("message_group_id", event.project)
+
+        if not all((queue_url, access_key, secret_key, region)):
+            return
+
+        # TODO(dcramer): Amazon doesnt support payloads larger than 256kb
+        # We could support this by simply trimming it and allowing upload
+        # to S3
+        message = json.dumps(payload)
+        if len(message) > 256 * 1024:
+            return False
+
+        try:
+            client = boto3.client(
+                service_name="sqs",
+                aws_access_key_id=access_key,
+                aws_secret_access_key=secret_key,
+                region_name=region,
+            )
+
+            message = {"QueueUrl": queue_url, "MessageBody": message}
+
+            # need a MessageGroupId for FIFO queues
+            # note that if MessageGroupId is specified for non-FIFO, this will fail
+            if message_group_id:
+                from uuid import uuid4
+
+                message["MessageGroupId"] = message_group_id
+                # if content based de-duplication is not enabled, we need to provide a
+                # MessageDeduplicationId
+                message["MessageDeduplicationId"] = uuid4().hex
+
+            client.send_message(**message)
+        except ClientError as e:
+            if e.message.startswith("An error occurred (AccessDenied)"):
+                # If there's an issue with the user's token then we can't do
+                # anything to recover. Just log and continue.
+                metrics_name = "new_sentry_plugins.amazon_sqs.access_token_invalid"
+                logger.info(
+                    metrics_name,
+                    extra={
+                        "queue_url": queue_url,
+                        "access_key": access_key,
+                        "region": region,
+                        "project_id": event.project.id,
+                        "organization_id": event.project.organization_id,
+                    },
+                )
+                metrics.incr(
+                    metrics_name,
+                    tags={
+                        "project_id": event.project_id,
+                        "organization_id": event.project.organization_id,
+                    },
+                )
+                return False
+            elif e.message.endswith("must contain the parameter MessageGroupId."):
+                metrics_name = "new_sentry_plugins.amazon_sqs.missing_message_group_id"
+                logger.info(
+                    metrics_name,
+                    extra={
+                        "queue_url": queue_url,
+                        "access_key": access_key,
+                        "region": region,
+                        "project_id": event.project.id,
+                        "organization_id": event.project.organization_id,
+                        "message_group_id": message_group_id,
+                    },
+                )
+                metrics.incr(
+                    metrics_name,
+                    tags={
+                        "project_id": event.project_id,
+                        "organization_id": event.project.organization_id,
+                    },
+                )
+                return False
+            raise
+
+        return True

+ 126 - 0
src/new_sentry_plugins/anonymizeip.py

@@ -0,0 +1,126 @@
+# Port of https://github.com/samuelmeuli/anonymize-ip to Python 2
+"""
+MIT License
+
+Copyright (c) 2018 Samuel Meuli
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
+"""
+from __future__ import absolute_import, unicode_literals
+
+import six
+
+from ipaddress import ip_address
+
+
+def anonymize_ip(
+    address, ipv4_mask="255.255.255.0", ipv6_mask="ffff:ffff:ffff:0000:0000:0000:0000:0000"
+):
+    """
+    Anonymize the provided IPv4 or IPv6 address by setting parts of the
+    address to 0
+    :param str|int address: IP address to be anonymized
+    :param str ipv4_mask: Mask that defines which parts of an IPv4 address are
+    set to 0 (default: "255.255.255.0")
+    :param str ipv6_mask: Mask that defines which parts of an IPv6 address are
+    set to 0 (default: "ffff:ffff:ffff:0000:0000:0000:0000:0000")
+    :return: Anonymized IP address
+    :rtype: str
+    """
+
+    # IP address to be anonymized
+    address_packed = ip_address(six.text_type(address)).packed
+    address_len = len(address_packed)
+
+    if address_len == 4:
+        # IPv4
+        ipv4_mask_packed = ip_address(ipv4_mask).packed
+        __validate_ipv4_mask(ipv4_mask_packed)
+        return __apply_mask(address_packed, ipv4_mask_packed, 4)
+    elif address_len == 16:
+        # IPv6
+        ipv6_mask_packed = ip_address(ipv6_mask).packed
+        __validate_ipv6_mask(ipv6_mask_packed)
+        return __apply_mask(address_packed, ipv6_mask_packed, 16)
+    else:
+        # Invalid address
+        raise ValueError("Address does not consist of 4 (IPv4) or 16 (IPv6) " "octets")
+
+
+def __apply_mask(address_packed, mask_packed, nr_bytes):
+    """
+    Perform a bitwise AND operation on all corresponding bytes between the
+    mask and the provided address. Mask parts set to 0 will become 0 in the
+    anonymized IP address as well
+    :param bytes address_packed: Binary representation of the IP address to
+    be anonymized
+    :param bytes mask_packed: Binary representation of the corresponding IP
+    address mask
+    :param int nr_bytes: Number of bytes in the address (4 for IPv4, 16 for
+    IPv6)
+    :return: Anonymized IP address
+    :rtype: str
+    """
+
+    anon_packed = bytearray()
+    for i in range(0, nr_bytes):
+        anon_packed.append(ord(mask_packed[i]) & ord(address_packed[i]))
+    return six.text_type(ip_address(six.binary_type(anon_packed)))
+
+
+def __validate_ipv4_mask(mask_packed):
+    # Test that mask only contains valid numbers
+    for byte in mask_packed:
+        if byte != b"\x00" and byte != b"\xff":
+            raise ValueError("ipv4_mask must only contain numbers 0 or 255")
+
+    # Test that IP address does not get anonymized completely
+    if mask_packed == b"\x00\x00\x00\x00":
+        raise ValueError(
+            'ipv4_mask cannot be set to "0.0.0.0" (all ' "anonymized addresses will be 0.0.0.0)"
+        )
+
+    # Test that IP address is changed by anonymization
+    if mask_packed == b"\xff\xff\xff\xff":
+        raise ValueError(
+            'ipv4_mask cannot be set to "255.255.255.255" ' "(addresses will not be anonymized)"
+        )
+
+
+def __validate_ipv6_mask(mask_packed):
+    # Test that mask only contains valid numbers
+    for byte in mask_packed:
+        if byte != b"\x00" and byte != b"\xff":
+            raise ValueError("ipv6_mask must only contain numbers 0 or ffff")
+
+    # Test that IP address does not get anonymized completely
+    if mask_packed == b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00":
+        raise ValueError(
+            "ipv6_mask cannot be set to "
+            '"0000:0000:0000:0000:0000:0000:0000:0000" (all '
+            "anonymized addresses will be 0.0.0.0)"
+        )
+
+    # Test that IP address is changed by anonymization
+    if mask_packed == b"\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff":
+        raise ValueError(
+            "ipv6_mask cannot be set to "
+            '"ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff" '
+            "(addresses will not be anonymized)"
+        )

+ 8 - 0
src/new_sentry_plugins/asana/README.rst

@@ -0,0 +1,8 @@
+You'll have to create an application in Asana to get a client ID and secret. Use the following for the redirect URL::
+
+    <URL_TO_SENTRY>/account/settings/social/associate/complete/asana/
+
+Ensure you've configured Asana auth in Sentry::
+
+    ASANA_CLIENT_ID = 'Asana Client ID'
+    ASANA_CLIENT_SECRET = 'Asana Client Secret'

Some files were not shown because too many files changed in this diff