Browse Source

ref(nodestore): Remove option toggle (#16713)

These options were only used to facilitate gradual rollout, we
no longer want to maintain a way to change the nodestore sample
rate or caching behaviour.
Lyn Nagara 5 years ago
parent
commit
f8c75bee32

+ 5 - 17
src/sentry/nodestore/base.py

@@ -1,7 +1,6 @@
 from __future__ import absolute_import
 
 import six
-from hashlib import md5
 
 from base64 import b64encode
 from threading import local
@@ -96,22 +95,20 @@ class NodeStorage(local, Service):
         raise NotImplementedError
 
     def _get_cache_item(self, id):
-        if self.cache and self.should_cache(id):
+        if self.cache:
             return self.cache.get(id)
 
     def _get_cache_items(self, id_list):
-        cacheable_ids = [id for id in id_list if self.should_cache(id)]
-        if self.cache and self.sample_rate != 0.0:
-            return self.cache.get_many(cacheable_ids)
+        if self.cache:
+            return self.cache.get_many(id_list)
         return {}
 
     def _set_cache_item(self, id, data):
         if self.cache and data:
-            if self.should_cache(id):
-                self.cache.set(id, data)
+            self.cache.set(id, data)
 
     def _set_cache_items(self, items):
-        cacheable_items = {k: v for k, v in six.iteritems(items) if v and self.should_cache(k)}
+        cacheable_items = {k: v for k, v in six.iteritems(items) if v}
         if self.cache:
             self.cache.set_many(cacheable_items)
 
@@ -129,12 +126,3 @@ class NodeStorage(local, Service):
             return caches["nodedata"]
         except InvalidCacheBackendError:
             return None
-
-    @memoize
-    def sample_rate(self):
-        from sentry import options
-
-        return options.get("nodedata.cache-sample-rate", 0.0)
-
-    def should_cache(self, id):
-        return (int(md5(id).hexdigest(), 16) % 1000) / 1000.0 < self.sample_rate

+ 1 - 4
src/sentry/nodestore/bigtable/backend.py

@@ -10,7 +10,6 @@ from google.cloud.bigtable.row_set import RowSet
 from simplejson import JSONEncoder, _default_decoder
 from django.utils import timezone
 
-from sentry import options
 from sentry.nodestore.base import NodeStorage
 
 
@@ -168,9 +167,7 @@ class BigtableNodeStorage(NodeStorage):
     def set(self, id, data, ttl=None):
         row = self.encode_row(id, data, ttl)
         row.commit()
-        cache_on_save = options.get("nodedata.cache-on-save")
-        if cache_on_save:
-            self._set_cache_item(id, data)
+        self._set_cache_item(id, data)
 
     def encode_row(self, id, data, ttl=None):
         data = json_dumps(data)

+ 1 - 4
src/sentry/nodestore/django/backend.py

@@ -4,7 +4,6 @@ import math
 
 from django.utils import timezone
 
-from sentry import options
 from sentry.db.models import create_or_update
 from sentry.nodestore.base import NodeStorage
 
@@ -44,9 +43,7 @@ class DjangoNodeStorage(NodeStorage):
 
     def set(self, id, data, ttl=None):
         create_or_update(Node, id=id, values={"data": data, "timestamp": timezone.now()})
-        cache_on_save = options.get("nodedata.cache-on-save")
-        if cache_on_save:
-            self._set_cache_item(id, data)
+        self._set_cache_item(id, data)
 
     def cleanup(self, cutoff_timestamp):
         from sentry.db.deletion import BulkDeleteQuery

+ 52 - 56
tests/sentry/nodestore/bigtable/backend/tests.py

@@ -53,60 +53,56 @@ class BigtableNodeStorageTest(TestCase):
         assert not self.ns.get(nodes[1][0])
 
     def test_cache(self):
-        with self.options({"nodedata.cache-sample-rate": 1.0, "nodedata.cache-on-save": True}):
-            node_1 = ("a" * 32, {"foo": "a"})
-            node_2 = ("b" * 32, {"foo": "b"})
-            node_3 = ("c" * 32, {"foo": "c"})
-
-            for node_id, data in [node_1, node_2, node_3]:
-                self.ns.set(node_id, data)
-
-            # Get / get multi populates cache
-            assert self.ns.get(node_1[0]) == node_1[1]
-            assert self.ns.get_multi([node_2[0], node_3[0]]) == {
-                node_2[0]: node_2[1],
-                node_3[0]: node_3[1],
-            }
-            with mock.patch.object(self.ns.connection, "read_row") as mock_read_row:
-                assert self.ns.get(node_1[0]) == node_1[1]
-                assert self.ns.get(node_2[0]) == node_2[1]
-                assert self.ns.get(node_3[0]) == node_3[1]
-                assert mock_read_row.call_count == 0
-
-            with mock.patch.object(self.ns.connection, "read_rows") as mock_read_rows:
-                assert self.ns.get_multi([node_1[0], node_2[0], node_3[0]])
-                assert mock_read_rows.call_count == 0
-
-            # Manually deleted item should still retreivable from cache
-            row = self.ns.connection.row(node_1[0])
-            row.delete()
-            row.commit()
+        node_1 = ("a" * 32, {"foo": "a"})
+        node_2 = ("b" * 32, {"foo": "b"})
+        node_3 = ("c" * 32, {"foo": "c"})
+
+        for node_id, data in [node_1, node_2, node_3]:
+            self.ns.set(node_id, data)
+
+        # Get / get multi populates cache
+        assert self.ns.get(node_1[0]) == node_1[1]
+        assert self.ns.get_multi([node_2[0], node_3[0]]) == {
+            node_2[0]: node_2[1],
+            node_3[0]: node_3[1],
+        }
+        with mock.patch.object(self.ns.connection, "read_row") as mock_read_row:
             assert self.ns.get(node_1[0]) == node_1[1]
-            assert self.ns.get_multi([node_1[0], node_2[0]]) == {
-                node_1[0]: node_1[1],
-                node_2[0]: node_2[1],
-            }
-
-            # Deletion clars cache
-            self.ns.delete(node_1[0])
-            assert self.ns.get_multi([node_1[0], node_2[0]]) == {
-                node_1[0]: None,
-                node_2[0]: node_2[1],
-            }
-            self.ns.delete_multi([node_1[0], node_2[0]])
-            assert self.ns.get_multi([node_1[0], node_2[0]]) == {node_1[0]: None, node_2[0]: None}
-
-            # Setting the item updates cache
-            new_value = {"event_id": "d" * 32}
-            self.ns.set(node_1[0], new_value)
-            with mock.patch.object(self.ns.connection, "read_row") as mock_read_row:
-                assert self.ns.get(node_1[0]) == new_value
-                assert mock_read_row.call_count == 0
-
-            # Missing rows are never cached
-            assert self.ns.get("node_4") is None
-            with mock.patch.object(self.ns.connection, "read_row") as mock_read_row:
-                mock_read_row.return_value = None
-                self.ns.get("node_4")
-                self.ns.get("node_4")
-                assert mock_read_row.call_count == 2
+            assert self.ns.get(node_2[0]) == node_2[1]
+            assert self.ns.get(node_3[0]) == node_3[1]
+            assert mock_read_row.call_count == 0
+
+        with mock.patch.object(self.ns.connection, "read_rows") as mock_read_rows:
+            assert self.ns.get_multi([node_1[0], node_2[0], node_3[0]])
+            assert mock_read_rows.call_count == 0
+
+        # Manually deleted item should still retreivable from cache
+        row = self.ns.connection.row(node_1[0])
+        row.delete()
+        row.commit()
+        assert self.ns.get(node_1[0]) == node_1[1]
+        assert self.ns.get_multi([node_1[0], node_2[0]]) == {
+            node_1[0]: node_1[1],
+            node_2[0]: node_2[1],
+        }
+
+        # Deletion clars cache
+        self.ns.delete(node_1[0])
+        assert self.ns.get_multi([node_1[0], node_2[0]]) == {node_1[0]: None, node_2[0]: node_2[1]}
+        self.ns.delete_multi([node_1[0], node_2[0]])
+        assert self.ns.get_multi([node_1[0], node_2[0]]) == {node_1[0]: None, node_2[0]: None}
+
+        # Setting the item updates cache
+        new_value = {"event_id": "d" * 32}
+        self.ns.set(node_1[0], new_value)
+        with mock.patch.object(self.ns.connection, "read_row") as mock_read_row:
+            assert self.ns.get(node_1[0]) == new_value
+            assert mock_read_row.call_count == 0
+
+        # Missing rows are never cached
+        assert self.ns.get("node_4") is None
+        with mock.patch.object(self.ns.connection, "read_row") as mock_read_row:
+            mock_read_row.return_value = None
+            self.ns.get("node_4")
+            self.ns.get("node_4")
+            assert mock_read_row.call_count == 2

+ 50 - 51
tests/sentry/nodestore/django/backend/tests.py

@@ -80,55 +80,54 @@ class DjangoNodeStorageTest(TestCase):
         assert not Node.objects.filter(id=node2.id).exists()
 
     def test_cache(self):
-        with self.options({"nodedata.cache-sample-rate": 1.0, "nodedata.cache-on-save": True}):
-            node_1 = ("a" * 32, {"foo": "a"})
-            node_2 = ("b" * 32, {"foo": "b"})
-            node_3 = ("c" * 32, {"foo": "c"})
-
-            for node_id, data in [node_1, node_2, node_3]:
-                Node.objects.create(id=node_id, data=data)
-
-            # Get / get multi populates cache
-            assert self.ns.get(node_1[0]) == node_1[1]
-            assert self.ns.get_multi([node_2[0], node_3[0]]) == {
-                node_2[0]: node_2[1],
-                node_3[0]: node_3[1],
-            }
-            with mock.patch.object(Node.objects, "get") as mock_get:
-                assert self.ns.get(node_1[0]) == node_1[1]
-                assert self.ns.get(node_2[0]) == node_2[1]
-                assert self.ns.get(node_3[0]) == node_3[1]
-                assert mock_get.call_count == 0
-
-            with mock.patch.object(Node.objects, "filter") as mock_filter:
-                assert self.ns.get_multi([node_1[0], node_2[0], node_3[0]])
-                assert mock_filter.call_count == 0
-
-            # Manually deleted item should still retreivable from cache
-            Node.objects.get(id=node_1[0]).delete()
+        node_1 = ("a" * 32, {"foo": "a"})
+        node_2 = ("b" * 32, {"foo": "b"})
+        node_3 = ("c" * 32, {"foo": "c"})
+
+        for node_id, data in [node_1, node_2, node_3]:
+            Node.objects.create(id=node_id, data=data)
+
+        # Get / get multi populates cache
+        assert self.ns.get(node_1[0]) == node_1[1]
+        assert self.ns.get_multi([node_2[0], node_3[0]]) == {
+            node_2[0]: node_2[1],
+            node_3[0]: node_3[1],
+        }
+        with mock.patch.object(Node.objects, "get") as mock_get:
             assert self.ns.get(node_1[0]) == node_1[1]
-            assert self.ns.get_multi([node_1[0], node_2[0]]) == {
-                node_1[0]: node_1[1],
-                node_2[0]: node_2[1],
-            }
-
-            # Deletion clars cache
-            self.ns.delete(node_1[0])
-            assert self.ns.get_multi([node_1[0], node_2[0]]) == {node_2[0]: node_2[1]}
-            self.ns.delete_multi([node_1[0], node_2[0]])
-            assert self.ns.get_multi([node_1[0], node_2[0]]) == {}
-
-            # Setting the item updates cache
-            new_value = {"event_id": "d" * 32}
-            self.ns.set(node_1[0], new_value)
-            with mock.patch.object(Node.objects, "get") as mock_get:
-                assert self.ns.get(node_1[0]) == new_value
-                assert mock_get.call_count == 0
-
-            # Missing rows are never cached
-            assert self.ns.get("node_4") is None
-            with mock.patch.object(Node.objects, "get") as mock_get:
-                mock_get.side_effect = Node.DoesNotExist
-                self.ns.get("node_4")
-                self.ns.get("node_4")
-                assert mock_get.call_count == 2
+            assert self.ns.get(node_2[0]) == node_2[1]
+            assert self.ns.get(node_3[0]) == node_3[1]
+            assert mock_get.call_count == 0
+
+        with mock.patch.object(Node.objects, "filter") as mock_filter:
+            assert self.ns.get_multi([node_1[0], node_2[0], node_3[0]])
+            assert mock_filter.call_count == 0
+
+        # Manually deleted item should still retreivable from cache
+        Node.objects.get(id=node_1[0]).delete()
+        assert self.ns.get(node_1[0]) == node_1[1]
+        assert self.ns.get_multi([node_1[0], node_2[0]]) == {
+            node_1[0]: node_1[1],
+            node_2[0]: node_2[1],
+        }
+
+        # Deletion clars cache
+        self.ns.delete(node_1[0])
+        assert self.ns.get_multi([node_1[0], node_2[0]]) == {node_2[0]: node_2[1]}
+        self.ns.delete_multi([node_1[0], node_2[0]])
+        assert self.ns.get_multi([node_1[0], node_2[0]]) == {}
+
+        # Setting the item updates cache
+        new_value = {"event_id": "d" * 32}
+        self.ns.set(node_1[0], new_value)
+        with mock.patch.object(Node.objects, "get") as mock_get:
+            assert self.ns.get(node_1[0]) == new_value
+            assert mock_get.call_count == 0
+
+        # Missing rows are never cached
+        assert self.ns.get("node_4") is None
+        with mock.patch.object(Node.objects, "get") as mock_get:
+            mock_get.side_effect = Node.DoesNotExist
+            self.ns.get("node_4")
+            self.ns.get("node_4")
+            assert mock_get.call_count == 2