Browse Source

feat(alerts): add modified event user frequency condition (#78886)

adds special condition for evaluating alerts where we ensure that all
events for the user query have the same tags as the ones specified in
the tag conditions on the alert.

This is a "special" alert, and will be enabled for select organizations,
and is not meant to be a robust permanent feature, but more of a quick
hack.


This works by creating a new condition
`EventUniqueUserFrequencyConditionWithConditions` (a mouthful, I know.
open to naming suggestions).

This condition will look at all of the tag conditions in the query, and
apply those to the UserFrequency query that is generated. This will
ensure that if a customer has a tag like "region = EU" in this alert, it
will only count users who have the EU tag on those error events.
Josh Ferge 4 months ago
parent
commit
97e7aed59f

+ 1 - 0
src/sentry/constants.py

@@ -272,6 +272,7 @@ _SENTRY_RULES = (
     "sentry.rules.conditions.tagged_event.TaggedEventCondition",
     "sentry.rules.conditions.event_frequency.EventFrequencyCondition",
     "sentry.rules.conditions.event_frequency.EventUniqueUserFrequencyCondition",
+    "sentry.rules.conditions.event_frequency.EventUniqueUserFrequencyConditionWithConditions",
     "sentry.rules.conditions.event_frequency.EventFrequencyPercentCondition",
     "sentry.rules.conditions.event_attribute.EventAttributeCondition",
     "sentry.rules.conditions.level.LevelCondition",

+ 1 - 1
src/sentry/features/temporary.py

@@ -529,7 +529,7 @@ def register_temporary_features(manager: FeatureManager):
     # Enabled unresolved issue webhook for organization
     manager.add("organizations:webhooks-unresolved", OrganizationFeature, FeatureHandlerStrategy.OPTIONS, api_expose=True)
     # Enable EventUniqueUserFrequencyConditionWithConditions special alert condition
-    manager.add("organizations:event-unique-user-frequency-condition-with-conditions", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False)
+    manager.add("organizations:event-unique-user-frequency-condition-with-conditions", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
     # Use spans instead of transactions for dynamic sampling calculations. This will become the new default.
     manager.add("organizations:dynamic-sampling-spans", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False)
     # NOTE: Don't add features down here! Add them to their specific group and sort

+ 202 - 1
src/sentry/rules/conditions/event_frequency.py

@@ -13,12 +13,14 @@ from django.core.cache import cache
 from django.db.models import QuerySet
 from django.db.models.enums import TextChoices
 from django.utils import timezone
+from snuba_sdk import Op
 
-from sentry import release_health, tsdb
+from sentry import features, release_health, tsdb
 from sentry.eventstore.models import GroupEvent
 from sentry.issues.constants import get_issue_tsdb_group_model, get_issue_tsdb_user_group_model
 from sentry.issues.grouptype import GroupCategory, get_group_type_by_type_id
 from sentry.models.group import Group
+from sentry.models.project import Project
 from sentry.rules import EventState
 from sentry.rules.conditions.base import EventCondition, GenericCondition
 from sentry.tsdb.base import TSDBModel
@@ -533,6 +535,205 @@ class EventUniqueUserFrequencyCondition(BaseEventFrequencyCondition):
         return "uniq", "user"
 
 
+class EventUniqueUserFrequencyConditionWithConditions(EventUniqueUserFrequencyCondition):
+    id = "sentry.rules.conditions.event_frequency.EventUniqueUserFrequencyConditionWithConditions"
+
+    def query_hook(
+        self, event: GroupEvent, start: datetime, end: datetime, environment_id: int
+    ) -> int:
+        assert self.rule
+        if not features.has(
+            "organizations:event-unique-user-frequency-condition-with-conditions",
+            Project.objects.get(id=self.rule.project_id).organization,
+        ):
+            raise NotImplementedError(
+                "EventUniqueUserFrequencyConditionWithConditions is not enabled for this organization"
+            )
+        if self.rule.data["filter_match"] == "any":
+            raise NotImplementedError(
+                "EventUniqueUserFrequencyConditionWithConditions does not support filter_match == any"
+            )
+
+        conditions = []
+
+        for condition in self.rule.data["conditions"]:
+            if condition["id"] == self.id:
+                continue
+
+            snuba_condition = self.convert_rule_condition_to_snuba_condition(condition)
+            if snuba_condition:
+                conditions.append(snuba_condition)
+
+        total = self.get_chunked_result(
+            tsdb_function=self.tsdb.get_distinct_counts_totals_with_conditions,
+            model=get_issue_tsdb_user_group_model(GroupCategory.ERROR),
+            organization_id=event.group.project.organization_id,
+            group_ids=[event.group.id],
+            start=start,
+            end=end,
+            environment_id=environment_id,
+            referrer_suffix="batch_alert_event_uniq_user_frequency",
+            conditions=conditions,
+        )
+        return total[event.group.id]
+
+    def batch_query_hook(
+        self, group_ids: set[int], start: datetime, end: datetime, environment_id: int
+    ) -> dict[int, int]:
+        assert self.rule
+        if not features.has(
+            "organizations:event-unique-user-frequency-condition-with-conditions",
+            self.rule.project.organization,
+        ):
+            raise NotImplementedError(
+                "EventUniqueUserFrequencyConditionWithConditions is not enabled for this organization"
+            )
+
+        if self.rule.data["filter_match"] == "any":
+            raise NotImplementedError(
+                "EventUniqueUserFrequencyConditionWithConditions does not support filter_match == any"
+            )
+        batch_totals: dict[int, int] = defaultdict(int)
+        groups = Group.objects.filter(id__in=group_ids).values(
+            "id", "type", "project_id", "project__organization_id"
+        )
+        error_issue_ids, generic_issue_ids = self.get_error_and_generic_group_ids(groups)
+        organization_id = self.get_value_from_groups(groups, "project__organization_id")
+
+        conditions = []
+
+        for condition in self.rule.data["conditions"]:
+            if condition["id"] == self.id:
+                continue
+
+            snuba_condition = self.convert_rule_condition_to_snuba_condition(condition)
+            if snuba_condition:
+                conditions.append(snuba_condition)
+
+        if error_issue_ids and organization_id:
+            error_totals = self.get_chunked_result(
+                tsdb_function=self.tsdb.get_distinct_counts_totals_with_conditions,
+                model=get_issue_tsdb_user_group_model(GroupCategory.ERROR),
+                group_ids=error_issue_ids,
+                organization_id=organization_id,
+                start=start,
+                end=end,
+                environment_id=environment_id,
+                referrer_suffix="batch_alert_event_uniq_user_frequency",
+                conditions=conditions,
+            )
+            batch_totals.update(error_totals)
+
+        if generic_issue_ids and organization_id:
+            error_totals = self.get_chunked_result(
+                tsdb_function=self.tsdb.get_distinct_counts_totals_with_conditions,
+                model=get_issue_tsdb_user_group_model(GroupCategory.PERFORMANCE),
+                group_ids=generic_issue_ids,
+                organization_id=organization_id,
+                start=start,
+                end=end,
+                environment_id=environment_id,
+                referrer_suffix="batch_alert_event_uniq_user_frequency",
+                conditions=conditions,
+            )
+            batch_totals.update(error_totals)
+
+        return batch_totals
+
+    def get_snuba_query_result(
+        self,
+        tsdb_function: Callable[..., Any],
+        keys: list[int],
+        group_id: int,
+        organization_id: int,
+        model: TSDBModel,
+        start: datetime,
+        end: datetime,
+        environment_id: int,
+        referrer_suffix: str,
+        conditions: list[tuple[str, str, str]] | None = None,
+    ) -> Mapping[int, int]:
+        result: Mapping[int, int] = tsdb_function(
+            model=model,
+            keys=keys,
+            start=start,
+            end=end,
+            environment_id=environment_id,
+            use_cache=True,
+            jitter_value=group_id,
+            tenant_ids={"organization_id": organization_id},
+            referrer_suffix=referrer_suffix,
+            conditions=conditions,
+        )
+        return result
+
+    def get_chunked_result(
+        self,
+        tsdb_function: Callable[..., Any],
+        model: TSDBModel,
+        group_ids: list[int],
+        organization_id: int,
+        start: datetime,
+        end: datetime,
+        environment_id: int,
+        referrer_suffix: str,
+        conditions: list[tuple[str, str, str]] | None = None,
+    ) -> dict[int, int]:
+        batch_totals: dict[int, int] = defaultdict(int)
+        group_id = group_ids[0]
+        for group_chunk in chunked(group_ids, SNUBA_LIMIT):
+            result = self.get_snuba_query_result(
+                tsdb_function=tsdb_function,
+                model=model,
+                keys=[group_id for group_id in group_chunk],
+                group_id=group_id,
+                organization_id=organization_id,
+                start=start,
+                end=end,
+                environment_id=environment_id,
+                referrer_suffix=referrer_suffix,
+                conditions=conditions,
+            )
+            batch_totals.update(result)
+        return batch_totals
+
+    @staticmethod
+    def convert_rule_condition_to_snuba_condition(
+        condition: dict[str, Any]
+    ) -> tuple[str, str, str] | None:
+        if condition["id"] != "sentry.rules.filters.tagged_event.TaggedEventFilter":
+            return None
+        lhs = f"tags[{condition['key']}]"
+        rhs = condition["value"]
+        match condition["match"]:
+            case "eq":
+                operator = Op.EQ
+            case "ne":
+                operator = Op.NEQ
+            case "sw":
+                operator = Op.LIKE
+                rhs = f"{rhs}%"
+            case "ew":
+                operator = Op.LIKE
+                rhs = f"%{rhs}"
+            case "co":
+                operator = Op.LIKE
+                rhs = f"%{rhs}%"
+            case "nc":
+                operator = Op.NOT_LIKE
+                rhs = f"%{rhs}%"
+            case "is":
+                operator = Op.IS_NOT_NULL
+                rhs = None
+            case "ns":
+                operator = Op.IS_NULL
+                rhs = None
+            case _:
+                raise ValueError(f"Unsupported match type: {condition['match']}")
+
+        return (lhs, operator.value, rhs)
+
+
 PERCENT_INTERVALS: dict[str, tuple[str, timedelta]] = {
     "1m": ("1 minute", timedelta(minutes=1)),
     "5m": ("5 minutes", timedelta(minutes=5)),

+ 15 - 4
src/sentry/rules/processing/delayed_processing.py

@@ -70,9 +70,12 @@ class UniqueConditionQuery(NamedTuple):
 class DataAndGroups(NamedTuple):
     data: EventFrequencyConditionData
     group_ids: set[int]
+    rule_id: int | None = None
 
     def __repr__(self):
-        return f"<DataAndGroups data: {self.data} group_ids: {self.group_ids}>"
+        return (
+            f"<DataAndGroups data: {self.data} group_ids: {self.group_ids} rule_id: {self.rule_id}>"
+        )
 
 
 def fetch_project(project_id: int) -> Project | None:
@@ -176,7 +179,7 @@ def get_condition_query_groups(
                     data_and_groups.group_ids.update(rules_to_groups[rule.id])
                 else:
                     condition_groups[condition_query] = DataAndGroups(
-                        condition_data, set(rules_to_groups[rule.id])
+                        condition_data, set(rules_to_groups[rule.id]), rule.id
                     )
     return condition_groups
 
@@ -305,7 +308,7 @@ def get_condition_group_results(
     current_time = datetime.now(tz=timezone.utc)
     project_id = project.id
 
-    for unique_condition, (condition_data, group_ids) in condition_groups.items():
+    for unique_condition, (condition_data, group_ids, rule_id) in condition_groups.items():
         cls_id = unique_condition.cls_id
         condition_cls = rules.get(cls_id)
         if condition_cls is None:
@@ -316,7 +319,15 @@ def get_condition_group_results(
             )
             continue
 
-        condition_inst = condition_cls(project=project, data=condition_data)  # type: ignore[arg-type]
+        if rule_id:
+            rule = Rule.objects.get(id=rule_id)
+        else:
+            rule = None
+
+        condition_inst = condition_cls(
+            project=project, data=condition_data, rule=rule  # type: ignore[arg-type]
+        )
+
         if not isinstance(condition_inst, BaseEventFrequencyCondition):
             logger.warning("Unregistered condition %r", cls_id, extra={"project_id": project_id})
             continue

+ 1 - 1
src/sentry/tsdb/base.py

@@ -567,7 +567,7 @@ class BaseTSDB(Service):
         jitter_value: int | None = None,
         tenant_ids: dict[str, int | str] | None = None,
         referrer_suffix: str | None = None,
-        conditions: list[dict[str, Any]] | None = None,
+        conditions: list[tuple[str, str, str]] | None = None,
     ) -> dict[int, Any]:
         """
         Count distinct items during a time range with conditions.

+ 1 - 1
src/sentry/tsdb/snuba.py

@@ -812,7 +812,7 @@ class SnubaTSDB(BaseTSDB):
         jitter_value: int | None = None,
         tenant_ids: dict[str, int | str] | None = None,
         referrer_suffix: str | None = None,
-        conditions: list[dict[str, Any]] | None = None,
+        conditions: list[tuple[str, str, str]] | None = None,
     ) -> dict[int, Any]:
         """
         Count distinct items during a time range with conditions.

+ 1 - 1
tests/sentry/api/endpoints/test_project_agnostic_rule_conditions.py

@@ -11,4 +11,4 @@ class ProjectAgnosticRuleConditionsTest(APITestCase):
         response = self.client.get(url, format="json")
 
         assert response.status_code == 200, response.content
-        assert len(response.data) == 12
+        assert len(response.data) == 13

+ 6 - 9
tests/sentry/api/endpoints/test_project_rules_configuration.py

@@ -4,7 +4,6 @@ from sentry.constants import TICKET_ACTIONS
 from sentry.integrations.github_enterprise.actions import GitHubEnterpriseCreateTicketAction
 from sentry.rules import MatchType
 from sentry.rules import rules as default_rules
-from sentry.rules.filters.issue_category import IssueCategoryFilter
 from sentry.rules.registry import RuleRegistry
 from sentry.testutils.cases import APITestCase
 
@@ -33,7 +32,7 @@ class ProjectRuleConfigurationTest(APITestCase):
 
         response = self.get_success_response(self.organization.slug, project1.slug)
         assert len(response.data["actions"]) == 12
-        assert len(response.data["conditions"]) == 9
+        assert len(response.data["conditions"]) == 10
         assert len(response.data["filters"]) == 9
 
     @property
@@ -148,7 +147,7 @@ class ProjectRuleConfigurationTest(APITestCase):
                 "service": {"type": "choice", "choices": [[sentry_app.slug, sentry_app.name]]}
             },
         } in response.data["actions"]
-        assert len(response.data["conditions"]) == 9
+        assert len(response.data["conditions"]) == 10
         assert len(response.data["filters"]) == 9
 
     @patch("sentry.sentry_apps.components.SentryAppComponentPreparer.run")
@@ -179,19 +178,17 @@ class ProjectRuleConfigurationTest(APITestCase):
             "formFields": settings_schema["settings"],
             "sentryAppInstallationUuid": str(install.uuid),
         } in response.data["actions"]
-        assert len(response.data["conditions"]) == 9
+        assert len(response.data["conditions"]) == 10
         assert len(response.data["filters"]) == 9
 
     def test_issue_type_and_category_filter_feature(self):
         response = self.get_success_response(self.organization.slug, self.project.slug)
         assert len(response.data["actions"]) == 12
-        assert len(response.data["conditions"]) == 9
-        assert len(response.data["filters"]) == 9
 
-        filter_ids = {f["id"] for f in response.data["filters"]}
-        assert IssueCategoryFilter.id in filter_ids
+        assert len(response.data["conditions"]) == 10
+        assert len(response.data["filters"]) == 9
+        assert len(response.data["conditions"]) == 10
 
-    def test_is_in_feature(self):
         response = self.get_success_response(self.organization.slug, self.project.slug)
         tagged_event_filter = next(
             (

+ 146 - 3
tests/sentry/rules/processing/test_delayed_processing.py

@@ -37,10 +37,11 @@ from sentry.rules.processing.delayed_processing import (
     process_delayed_alert_conditions,
     process_rulegroups_in_batches,
 )
-from sentry.rules.processing.processor import PROJECT_ID_BUFFER_LIST_KEY
+from sentry.rules.processing.processor import PROJECT_ID_BUFFER_LIST_KEY, RuleProcessor
 from sentry.testutils.cases import PerformanceIssueTestCase, RuleTestCase, TestCase
 from sentry.testutils.factories import EventType
 from sentry.testutils.helpers.datetime import before_now, freeze_time
+from sentry.testutils.helpers.features import with_feature
 from sentry.testutils.helpers.options import override_options
 from sentry.testutils.helpers.redis import mock_redis_buffer
 from sentry.utils import json
@@ -1110,6 +1111,148 @@ class ProcessDelayedAlertConditionsTest(CreateEventTestCase, PerformanceIssueTes
         assert (two_conditions_match_all_rule.id, group5.id) in rule_fire_histories
         self.assert_buffer_cleared(project_id=self.project.id)
 
+        assert (two_conditions_match_all_rule.id, group5.id) in rule_fire_histories
+        self.assert_buffer_cleared(project_id=self.project.id)
+
+    @with_feature("organizations:event-unique-user-frequency-condition-with-conditions")
+    def test_special_event_frequency_condition(self):
+        Rule.objects.all().delete()
+        event_frequency_special_condition = Rule.objects.create(
+            label="Event Frequency Special Condition",
+            project=self.project,
+            environment_id=self.environment.id,
+            data={
+                "filter_match": "all",
+                "action_match": "all",
+                "actions": [
+                    {"id": "sentry.rules.actions.notify_event.NotifyEventAction"},
+                    {
+                        "id": "sentry.rules.actions.notify_event_service.NotifyEventServiceAction",
+                        "service": "mail",
+                    },
+                ],
+                "conditions": [
+                    {
+                        "id": "sentry.rules.conditions.event_frequency.EventUniqueUserFrequencyConditionWithConditions",
+                        "value": 2,
+                        "comparisonType": "count",
+                        "interval": "1m",
+                    },
+                    {
+                        "match": "eq",
+                        "id": "sentry.rules.filters.tagged_event.TaggedEventFilter",
+                        "key": "region",
+                        "value": "EU",
+                    },
+                ],
+            },
+        )
+
+        self.create_event(
+            self.project.id, FROZEN_TIME, "group-1", self.environment.name, tags=[["region", "US"]]
+        )
+        self.create_event(
+            self.project.id, FROZEN_TIME, "group-1", self.environment.name, tags=[["region", "US"]]
+        )
+        evaluated_event = self.create_event(
+            self.project.id, FROZEN_TIME, "group-1", self.environment.name, tags=[["region", "EU"]]
+        )
+        assert evaluated_event.group
+
+        group1 = evaluated_event.group
+
+        project_ids = buffer.backend.get_sorted_set(
+            PROJECT_ID_BUFFER_LIST_KEY, 0, self.buffer_timestamp
+        )
+        rp = RuleProcessor(
+            evaluated_event.for_group(evaluated_event.group),
+            is_new=False,
+            is_regression=False,
+            is_new_group_environment=False,
+            has_reappeared=False,
+        )
+        rp.apply()
+
+        apply_delayed(project_ids[0][0])
+        rule_fire_histories = RuleFireHistory.objects.filter(
+            rule__in=[event_frequency_special_condition],
+            group__in=[group1],
+            event_id__in=[evaluated_event.event_id],
+            project=self.project,
+        ).values_list("rule", "group")
+        assert len(rule_fire_histories) == 0
+        self.assert_buffer_cleared(project_id=self.project.id)
+
+    @with_feature("organizations:event-unique-user-frequency-condition-with-conditions")
+    def test_special_event_frequency_condition_passes(self):
+        Rule.objects.all().delete()
+        event_frequency_special_condition = Rule.objects.create(
+            label="Event Frequency Special Condition",
+            project=self.project,
+            environment_id=self.environment.id,
+            data={
+                "filter_match": "all",
+                "action_match": "all",
+                "actions": [
+                    {"id": "sentry.rules.actions.notify_event.NotifyEventAction"},
+                    {
+                        "id": "sentry.rules.actions.notify_event_service.NotifyEventServiceAction",
+                        "service": "mail",
+                    },
+                ],
+                "conditions": [
+                    {
+                        "id": "sentry.rules.conditions.event_frequency.EventUniqueUserFrequencyConditionWithConditions",
+                        "value": 2,
+                        "comparisonType": "count",
+                        "interval": "1m",
+                    },
+                    {
+                        "match": "eq",
+                        "id": "sentry.rules.filters.tagged_event.TaggedEventFilter",
+                        "key": "region",
+                        "value": "EU",
+                    },
+                ],
+            },
+        )
+
+        self.create_event(
+            self.project.id, FROZEN_TIME, "group-1", self.environment.name, tags=[["region", "EU"]]
+        )
+        self.create_event(
+            self.project.id, FROZEN_TIME, "group-1", self.environment.name, tags=[["region", "EU"]]
+        )
+        evaluated_event = self.create_event(
+            self.project.id, FROZEN_TIME, "group-1", self.environment.name, tags=[["region", "EU"]]
+        )
+        assert evaluated_event.group
+
+        group1 = evaluated_event.group
+
+        project_ids = buffer.backend.get_sorted_set(
+            PROJECT_ID_BUFFER_LIST_KEY, 0, self.buffer_timestamp
+        )
+        rp = RuleProcessor(
+            evaluated_event.for_group(evaluated_event.group),
+            is_new=False,
+            is_regression=False,
+            is_new_group_environment=False,
+            has_reappeared=False,
+        )
+        rp.apply()
+
+        apply_delayed(project_ids[0][0])
+        rule_fire_histories = RuleFireHistory.objects.filter(
+            rule__in=[event_frequency_special_condition],
+            group__in=[group1],
+            event_id__in=[evaluated_event.event_id],
+            project=self.project,
+        ).values_list("rule", "group")
+        assert len(rule_fire_histories) == 1
+        assert (event_frequency_special_condition.id, group1.id) in rule_fire_histories
+        self.assert_buffer_cleared(project_id=self.project.id)
+
     def test_apply_delayed_shared_condition_diff_filter(self):
         self._push_base_events()
         project_three = self.create_project(organization=self.organization)
@@ -1421,10 +1564,10 @@ class DataAndGroupsTest(TestCase):
     """
 
     def test_repr(self):
-        condition = DataAndGroups(data=TEST_RULE_SLOW_CONDITION, group_ids={1, 2})
+        condition = DataAndGroups(data=TEST_RULE_SLOW_CONDITION, group_ids={1, 2}, rule_id=1)
         assert (
             repr(condition)
-            == "<DataAndGroups data: {'id': 'sentry.rules.conditions.event_frequency.EventFrequencyCondition', 'value': 1, 'interval': '1h'} group_ids: {1, 2}>"
+            == "<DataAndGroups data: {'id': 'sentry.rules.conditions.event_frequency.EventFrequencyCondition', 'value': 1, 'interval': '1h'} group_ids: {1, 2} rule_id: 1>"
         )
 
 

+ 316 - 0
tests/snuba/rules/conditions/test_event_frequency.py

@@ -6,6 +6,7 @@ from uuid import uuid4
 
 import pytest
 from django.utils import timezone
+from snuba_sdk import Op
 
 from sentry.issues.grouptype import PerformanceNPlusOneGroupType
 from sentry.models.group import Group
@@ -15,6 +16,7 @@ from sentry.rules.conditions.event_frequency import (
     EventFrequencyCondition,
     EventFrequencyPercentCondition,
     EventUniqueUserFrequencyCondition,
+    EventUniqueUserFrequencyConditionWithConditions,
 )
 from sentry.testutils.abstract import Abstract
 from sentry.testutils.cases import (
@@ -24,6 +26,7 @@ from sentry.testutils.cases import (
     SnubaTestCase,
 )
 from sentry.testutils.helpers.datetime import before_now, freeze_time, iso_format
+from sentry.testutils.helpers.features import apply_feature_flag_on_cls
 from sentry.testutils.skips import requires_snuba
 from sentry.utils.samples import load_data
 
@@ -529,6 +532,309 @@ class EventUniqueUserFrequencyConditionTestCase(StandardIntervalTestBase):
             )
 
 
+@apply_feature_flag_on_cls("organizations:event-unique-user-frequency-condition-with-conditions")
+class EventUniqueUserFrequencyConditionWithConditionsTestCase(StandardIntervalTestBase):
+    __test__ = Abstract(__module__, __qualname__)
+
+    rule_cls = EventUniqueUserFrequencyConditionWithConditions
+
+    def increment(self, event, count, environment=None, timestamp=None):
+        timestamp = timestamp if timestamp else before_now(minutes=1)
+        data = {"fingerprint": event.data["fingerprint"]}
+        if environment:
+            data["environment"] = environment
+
+        for _ in range(count):
+            event_data = deepcopy(data)
+            event_data["user"] = {"id": uuid4().hex}
+            self.add_event(
+                data=event_data,
+                project_id=self.project.id,
+                timestamp=timestamp,
+            )
+
+    def test_comparison(self):
+        # Test data is 4 events in the current period and 2 events in the comparison period, so
+        # a 100% increase.
+        event = self.add_event(
+            data={
+                "fingerprint": ["something_random"],
+                "user": {"id": uuid4().hex},
+            },
+            project_id=self.project.id,
+            timestamp=before_now(minutes=1),
+        )
+        self.increment(
+            event,
+            3,
+            timestamp=timezone.now() - timedelta(minutes=1),
+        )
+        self.increment(
+            event,
+            2,
+            timestamp=timezone.now() - timedelta(days=1, minutes=20),
+        )
+        data = {
+            "interval": "1h",
+            "value": 99,
+            "comparisonType": "percent",
+            "comparisonInterval": "1d",
+            "id": "EventFrequencyConditionWithConditions",
+        }
+
+        rule = self.get_rule(
+            data=data,
+            rule=Rule(
+                environment_id=None,
+                project_id=self.project.id,
+                data={
+                    "conditions": [data],
+                    "filter_match": "all",
+                },
+            ),
+        )
+        self.assertPasses(rule, event, is_new=False)
+
+        data = {
+            "interval": "1h",
+            "value": 101,
+            "comparisonType": "percent",
+            "comparisonInterval": "1d",
+            "id": "EventFrequencyConditionWithConditions",
+        }
+
+        rule = self.get_rule(
+            data=data,
+            rule=Rule(
+                environment_id=None,
+                project_id=self.project.id,
+                data={
+                    "conditions": [data],
+                    "filter_match": "all",
+                },
+            ),
+        )
+        self.assertDoesNotPass(rule, event, is_new=False)
+
+    def test_comparison_empty_comparison_period(self):
+        # Test data is 1 event in the current period and 0 events in the comparison period. This
+        # should always result in 0 and never fire.
+        event = self.add_event(
+            data={
+                "fingerprint": ["something_random"],
+                "user": {"id": uuid4().hex},
+            },
+            project_id=self.project.id,
+            timestamp=before_now(minutes=1),
+        )
+        data = {
+            "filter_match": "all",
+            "conditions": [
+                {
+                    "interval": "1h",
+                    "value": 0,
+                    "comparisonType": "percent",
+                    "comparisonInterval": "1d",
+                }
+            ],
+        }
+        rule = self.get_rule(
+            data=data, rule=Rule(environment_id=None, project_id=self.project.id, data=data)
+        )
+        self.assertDoesNotPass(rule, event, is_new=False)
+
+        data = {
+            "filter_match": "all",
+            "conditions": [
+                {
+                    "interval": "1h",
+                    "value": 100,
+                    "comparisonType": "percent",
+                    "comparisonInterval": "1d",
+                }
+            ],
+        }
+        rule = self.get_rule(
+            data=data, rule=Rule(environment_id=None, project_id=self.project.id, data=data)
+        )
+        self.assertDoesNotPass(rule, event, is_new=False)
+
+    def _run_test(self, minutes, data, passes, add_events=False):
+        if not self.environment:
+            self.environment = self.create_environment(name="prod")
+        data["filter_match"] = "all"
+        data["conditions"] = data.get("conditions", [])
+        rule = self.get_rule(
+            data=data,
+            rule=Rule(environment_id=None, project_id=self.project.id, data=data),
+        )
+        environment_rule = self.get_rule(
+            data=data,
+            rule=Rule(
+                environment_id=self.environment.id,
+                project_id=self.project.id,
+                data=data,
+            ),
+        )
+
+        event = self.add_event(
+            data={
+                "fingerprint": ["something_random"],
+                "user": {"id": uuid4().hex},
+            },
+            project_id=self.project.id,
+            timestamp=before_now(minutes=minutes),
+        )
+        if add_events:
+            self.increment(
+                event,
+                data["value"] + 1,
+                environment=self.environment.name,
+                timestamp=timezone.now() - timedelta(minutes=minutes),
+            )
+            self.increment(
+                event,
+                data["value"] + 1,
+                timestamp=timezone.now() - timedelta(minutes=minutes),
+            )
+
+        if passes:
+            self.assertPasses(rule, event, is_new=False)
+            self.assertPasses(environment_rule, event, is_new=False)
+        else:
+            self.assertDoesNotPass(rule, event, is_new=False)
+            self.assertDoesNotPass(environment_rule, event, is_new=False)
+
+
+def test_convert_rule_condition_to_snuba_condition():
+
+    # Test non-TaggedEventFilter condition
+    condition = {"id": "some.other.condition"}
+    assert (
+        EventUniqueUserFrequencyConditionWithConditions.convert_rule_condition_to_snuba_condition(
+            condition
+        )
+        is None
+    )
+
+    # Test TaggedEventFilter conditions
+    base_condition = {
+        "id": "sentry.rules.filters.tagged_event.TaggedEventFilter",
+        "key": "test_key",
+        "value": "test_value",
+    }
+
+    # Test equality
+    eq_condition = {**base_condition, "match": "eq"}
+    assert (
+        EventUniqueUserFrequencyConditionWithConditions.convert_rule_condition_to_snuba_condition(
+            eq_condition
+        )
+        == (
+            "tags[test_key]",
+            Op.EQ.value,
+            "test_value",
+        )
+    )
+
+    # Test inequality
+    ne_condition = {**base_condition, "match": "ne"}
+    assert (
+        EventUniqueUserFrequencyConditionWithConditions.convert_rule_condition_to_snuba_condition(
+            ne_condition
+        )
+        == (
+            "tags[test_key]",
+            Op.NEQ.value,
+            "test_value",
+        )
+    )
+
+    # Test starts with
+    sw_condition = {**base_condition, "match": "sw"}
+    assert (
+        EventUniqueUserFrequencyConditionWithConditions.convert_rule_condition_to_snuba_condition(
+            sw_condition
+        )
+        == (
+            "tags[test_key]",
+            Op.LIKE.value,
+            "test_value%",
+        )
+    )
+
+    # Test ends with
+    ew_condition = {**base_condition, "match": "ew"}
+    assert (
+        EventUniqueUserFrequencyConditionWithConditions.convert_rule_condition_to_snuba_condition(
+            ew_condition
+        )
+        == (
+            "tags[test_key]",
+            Op.LIKE.value,
+            "%test_value",
+        )
+    )
+
+    # Test contains
+    co_condition = {**base_condition, "match": "co"}
+    assert (
+        EventUniqueUserFrequencyConditionWithConditions.convert_rule_condition_to_snuba_condition(
+            co_condition
+        )
+        == (
+            "tags[test_key]",
+            Op.LIKE.value,
+            "%test_value%",
+        )
+    )
+
+    # Test not contains
+    nc_condition = {**base_condition, "match": "nc"}
+    assert (
+        EventUniqueUserFrequencyConditionWithConditions.convert_rule_condition_to_snuba_condition(
+            nc_condition
+        )
+        == (
+            "tags[test_key]",
+            Op.NOT_LIKE.value,
+            "%test_value%",
+        )
+    )
+
+    # Test is not null
+    is_condition = {**base_condition, "match": "is"}
+    assert (
+        EventUniqueUserFrequencyConditionWithConditions.convert_rule_condition_to_snuba_condition(
+            is_condition
+        )
+        == (
+            "tags[test_key]",
+            Op.IS_NOT_NULL.value,
+            None,
+        )
+    )
+
+    # Test is null
+    ns_condition = {**base_condition, "match": "ns"}
+    assert (
+        EventUniqueUserFrequencyConditionWithConditions.convert_rule_condition_to_snuba_condition(
+            ns_condition
+        )
+        == (
+            "tags[test_key]",
+            Op.IS_NULL.value,
+            None,
+        )
+    )
+
+    # Test unsupported match type
+    with pytest.raises(ValueError, match="Unsupported match type: unsupported"):
+        EventUniqueUserFrequencyConditionWithConditions.convert_rule_condition_to_snuba_condition(
+            {**base_condition, "match": "unsupported"}
+        )
+
+
 class EventFrequencyPercentConditionTestCase(BaseEventFrequencyPercentTest, RuleTestCase):
     __test__ = Abstract(__module__, __qualname__)
 
@@ -769,6 +1075,16 @@ class ErrorIssueUniqueUserFrequencyConditionTestCase(
     pass
 
 
+@freeze_time(
+    (timezone.now() - timedelta(days=2)).replace(hour=12, minute=40, second=0, microsecond=0)
+)
+class ErrorIssueUniqueUserFrequencyConditionWithConditionsTestCase(
+    ErrorEventMixin,
+    EventUniqueUserFrequencyConditionWithConditionsTestCase,
+):
+    pass
+
+
 @freeze_time(
     (timezone.now() - timedelta(days=2)).replace(hour=12, minute=40, second=0, microsecond=0)
 )

Some files were not shown because too many files changed in this diff