Browse Source

ref: first pass at removing some dead code in tests (#82792)

I only got through about ~30% of the test files -- will follow up with
other ones

some explanation for some patterns:
- deleted function => nothing called it
- `raise SomeType` -> `raise AssertionError` -- `AssertionError` is
excluded from coverage
- `pass` -> `raise NotImplementedError` -- these functions are never
called -- only used for their shape / name and `NotImplementedError` is
excluded from coverage
- `try: some_test_code except: fail(...)` -> no need for the `try`, an
uncaught exception already fails the test

<!-- Describe your PR here. -->
anthony sottile 2 months ago
parent
commit
777bb39118

+ 1 - 4
tests/sentry/sentry_metrics/consumers/test_slicing_router.py

@@ -169,10 +169,7 @@ def test_validate_slicing_consumer_config(monkeypatch) -> None:
         {"bootstrap.servers": "127.0.0.1:9092"},
     )
 
-    try:
-        _validate_slicing_consumer_config("generic_metrics")
-    except SlicingConfigurationException as e:
-        assert False, f"Should not raise exception: {e}"
+    _validate_slicing_consumer_config("generic_metrics")  # should not raise
 
 
 def test_validate_slicing_config(monkeypatch) -> None:

+ 1 - 9
tests/sentry/sentry_metrics/test_postgres_indexer.py

@@ -1,7 +1,5 @@
-from collections.abc import Mapping
-
 from sentry.sentry_metrics.configuration import UseCaseKey
-from sentry.sentry_metrics.indexer.base import FetchType, Metadata, UseCaseKeyCollection
+from sentry.sentry_metrics.indexer.base import UseCaseKeyCollection
 from sentry.sentry_metrics.indexer.cache import CachingIndexer
 from sentry.sentry_metrics.indexer.postgres.postgres_v2 import PGStringIndexerV2, indexer_cache
 from sentry.sentry_metrics.use_case_id_registry import UseCaseID
@@ -9,12 +7,6 @@ from sentry.testutils.cases import TestCase
 from sentry.utils.cache import cache
 
 
-def assert_fetch_type_for_tag_string_set(
-    meta: Mapping[str, Metadata], fetch_type: FetchType, str_set: set[str]
-):
-    assert all([meta[string].fetch_type == fetch_type for string in str_set])
-
-
 class PostgresIndexerV2Test(TestCase):
     def setUp(self) -> None:
         self.strings = {"hello", "hey", "hi"}

+ 0 - 4
tests/sentry/sentry_metrics/test_snuba.py

@@ -23,10 +23,6 @@ class SnubaMetricsInterfaceTest(MetricsInterfaceTestCase):
     This test is also very similar to those in the Metrics Layer.
     """
 
-    @property
-    def now(self):
-        return BaseMetricsLayerTestCase.MOCK_DATETIME
-
     def test_count_query(self):
         generic_metrics_backend.distribution(
             self.use_case_id,

+ 1 - 1
tests/sentry/sentry_metrics/test_strings.py

@@ -118,7 +118,7 @@ def test_shared_mri_string_range(mri, id):
             "metric_stats": (800, 899),
         }[parsed_mri.namespace]
     except KeyError:
-        raise Exception(f"Unknown namespace: {parsed_mri.namespace}")
+        raise AssertionError(f"Unknown namespace: {parsed_mri.namespace}")
 
     start += PREFIX
     end += PREFIX

+ 1 - 7
tests/sentry/snuba/metrics/test_metrics_layer/test_metrics_enhanced_performance.py

@@ -530,7 +530,7 @@ class PerformanceMetricsLayerTestCase(BaseMetricsLayerTestCase, TestCase):
             )
 
         with pytest.raises(InvalidParams):
-            metrics_query = self.build_metrics_query(
+            self.build_metrics_query(
                 before_now="1h",
                 granularity="1h",
                 select=[
@@ -550,12 +550,6 @@ class PerformanceMetricsLayerTestCase(BaseMetricsLayerTestCase, TestCase):
                 offset=Offset(offset=0),
                 include_series=False,
             )
-            get_series(
-                [self.project],
-                metrics_query=metrics_query,
-                include_meta=True,
-                use_case_id=UseCaseID.TRANSACTIONS,
-            )
 
     def test_query_with_sum_if_column(self):
         for value, transaction in ((10, "/foo"), (20, "/bar"), (30, "/lorem")):

+ 1 - 5
tests/sentry/snuba/metrics/test_utils.py

@@ -162,11 +162,7 @@ test_get_num_intervals_cases = [
     ids=[x[5] for x in test_get_num_intervals_cases],
 )
 def test_get_num_intervals(start, end, granularity, interval, expected, test_message):
-    if start is not None:
-        start_date = datetime.fromisoformat(start)
-    else:
-        start_date = None
-
+    start_date = datetime.fromisoformat(start)
     end_date = datetime.fromisoformat(end)
 
     actual = get_num_intervals(

+ 0 - 10
tests/sentry/tasks/test_relay.py

@@ -7,7 +7,6 @@ from django.db import router, transaction
 
 from sentry.db.postgres.transactions import in_test_hide_transaction_boundary
 from sentry.models.options.project_option import ProjectOption
-from sentry.models.project import Project
 from sentry.models.projectkey import ProjectKey, ProjectKeyStatus
 from sentry.relay.projectconfig_cache.redis import RedisProjectConfigCache
 from sentry.relay.projectconfig_debounce_cache.redis import RedisProjectConfigDebounceCache
@@ -28,15 +27,6 @@ def _cache_keys_for_project(project):
         yield key.public_key
 
 
-def _cache_keys_for_org(org):
-    # The `ProjectKey` model doesn't have any attribute we can use to filter by
-    # org, and the `Project` model doesn't have a project key exposed. So using
-    # the org we fetch the project, and then the project key.
-    for proj in Project.objects.filter(organization_id=org.id):
-        for key in ProjectKey.objects.filter(project_id=proj.id):
-            yield key.public_key
-
-
 @pytest.fixture(autouse=True)
 def disable_auto_on_commit():
     simulated_transaction_watermarks.state["default"] = -1

+ 1 - 1
tests/sentry/tasks/test_relocation.py

@@ -2192,7 +2192,7 @@ class PostprocessingTest(RelocationTaskTestCase):
 
     @staticmethod
     def noop_relocated_signal_receiver(sender, **kwargs) -> None:
-        pass
+        raise NotImplementedError
 
     def test_success(
         self,

+ 2 - 2
tests/sentry/tasks/test_reprocessing2.py

@@ -351,7 +351,7 @@ def test_max_events(
                     != group_id
                 )
             else:
-                raise ValueError(remaining_events)
+                raise AssertionError(remaining_events)
         else:
             assert event is not None
             assert event.group_id != group_id
@@ -367,7 +367,7 @@ def test_max_events(
         assert event.group is not None
         assert event.group.times_seen == 5
     else:
-        raise ValueError(remaining_events)
+        raise AssertionError(remaining_events)
 
     assert is_group_finished(group_id)
 

+ 2 - 8
tests/sentry/tasks/test_store.py

@@ -87,12 +87,6 @@ def mock_refund():
         yield m
 
 
-@pytest.fixture
-def mock_metrics_timing():
-    with mock.patch("sentry.tasks.store.metrics.timing") as m:
-        yield m
-
-
 @django_db_all
 def test_move_to_process_event(
     default_project, mock_process_event, mock_save_event, mock_symbolicate_event, register_plugin
@@ -242,7 +236,7 @@ def options_model(request, default_organization, default_project):
     elif request.param == "project":
         return default_project
     else:
-        raise ValueError(request.param)
+        raise AssertionError(request.param)
 
 
 @django_db_all
@@ -278,7 +272,7 @@ def test_scrubbing_after_processing(
             "sentry:relay_pii_config", '{"applications": {"extra.ooo": ["@anything:replace"]}}'
         )
     else:
-        raise ValueError(setting_method)
+        raise AssertionError(setting_method)
 
     data = {
         "project": default_project.id,

Some files were not shown because too many files changed in this diff