Просмотр исходного кода

feat(discover): Add top level transactions dataset (#71940)

Add a top level API to create transaction events
and timeseries queries.
Tests are _very_ similar to the `discover` tests,
except we load transactions data
Shruthi 8 месяцев назад
Родитель
Сommit
3e4a85fde1

+ 1 - 0
src/sentry/search/events/builder/discover.py

@@ -542,6 +542,7 @@ class BaseQueryBuilder:
             # Chained or statements become field:a OR (field:b OR (...))
             operator == Or
             and is_where_condition(lhs_where)
+            and rhs_where
             and isinstance(rhs_where[0], Or)
             # Even in a long chain the first condition would be the next field
             and isinstance(rhs_where[0].conditions[0], Condition)

+ 15 - 0
src/sentry/search/events/datasets/discover.py

@@ -34,6 +34,7 @@ from sentry.search.events.constants import (
     DEVICE_CLASS_ALIAS,
     ERROR_HANDLED_ALIAS,
     ERROR_UNHANDLED_ALIAS,
+    EVENT_TYPE_ALIAS,
     FUNCTION_ALIASES,
     HTTP_STATUS_CODE_ALIAS,
     ISSUE_ALIAS,
@@ -93,6 +94,7 @@ from sentry.search.events.fields import (
 from sentry.search.events.filter import to_list
 from sentry.search.events.types import SelectType, WhereType
 from sentry.search.utils import DEVICE_CLASS
+from sentry.snuba.dataset import Dataset
 from sentry.snuba.referrer import Referrer
 from sentry.utils.numbers import format_grouped_length
 
@@ -132,6 +134,7 @@ class DiscoverDatasetConfig(DatasetConfig):
             SEMVER_BUILD_ALIAS: self._semver_build_filter_converter,
             TRACE_PARENT_SPAN_ALIAS: self._trace_parent_span_converter,
             "performance.issue_ids": self._performance_issue_ids_filter_converter,
+            EVENT_TYPE_ALIAS: self._event_type_filter_converter,
         }
 
     @property
@@ -2002,3 +2005,15 @@ class DiscoverDatasetConfig(DatasetConfig):
 
     def _key_transaction_filter_converter(self, search_filter: SearchFilter) -> WhereType | None:
         return filter_aliases.team_key_transaction_filter(self.builder, search_filter)
+
+    def _event_type_filter_converter(self, search_filter: SearchFilter) -> WhereType | None:
+        if self.builder.dataset == Dataset.Transactions:
+            if search_filter.operator in ["=", "IN"] and search_filter.value.value in [
+                "transaction",
+                ["transaction"],
+            ]:
+                return None
+            raise InvalidSearchQuery(
+                "Invalid value for event.type condition. Allowed value is transaction."
+            )
+        return self.builder.default_filter_converter(search_filter)

+ 222 - 85
src/sentry/snuba/discover.py

@@ -174,7 +174,7 @@ def transform_tips(tips):
     }
 
 
-def query(
+def _query(
     selected_columns,
     query,
     params,
@@ -199,43 +199,15 @@ def query(
     extra_columns=None,
     on_demand_metrics_enabled=False,
     on_demand_metrics_type=None,
+    dataset=Dataset.Discover,
 ) -> EventsResponse:
-    """
-    High-level API for doing arbitrary user queries against events.
-
-    This function operates on the Discover public event schema and
-    virtual fields/aggregate functions for selected columns and
-    conditions are supported through this function.
-
-    The resulting list will have all internal field names mapped
-    back into their public schema names.
-
-    selected_columns (Sequence[str]) List of public aliases to fetch.
-    query (str) Filter query string to create conditions from.
-    params (Dict[str, str]) Filtering parameters with start, end, project_id, environment
-    equations (Sequence[str]) List of equations to calculate for the query
-    orderby (None|str|Sequence[str]) The field to order results by.
-    offset (None|int) The record offset to read.
-    limit (int) The number of records to fetch.
-    referrer (str|None) A referrer string to help locate the origin of this query.
-    auto_fields (bool) Set to true to have project + eventid fields automatically added.
-    auto_aggregations (bool) Whether aggregates should be added automatically if they're used
-                    in conditions, and there's at least one aggregate already.
-    include_equation_fields (bool) Whether fields should be added automatically if they're used in
-                    equations
-    allow_metric_aggregates (bool) Ignored here, only used in metric enhanced performance
-    use_aggregate_conditions (bool) Set to true if aggregates conditions should be used at all.
-    conditions (Sequence[Condition]) List of conditions that are passed directly to snuba without
-                    any additional processing.
-    transform_alias_to_input_format (bool) Whether aggregate columns should be returned in the originally
-                                requested function format.
-    sample (float) The sample rate to run the query with
-    """
     if not selected_columns:
         raise InvalidSearchQuery("No columns selected")
 
+    assert dataset in [Dataset.Discover, Dataset.Transactions]
+
     builder = QueryBuilder(
-        Dataset.Discover,
+        dataset,
         params,
         snuba_params=snuba_params,
         query=query,
@@ -266,7 +238,7 @@ def query(
     return result
 
 
-def timeseries_query(
+def _timeseries_query(
     selected_columns: Sequence[str],
     query: str,
     params: ParamsType,
@@ -280,34 +252,13 @@ def timeseries_query(
     use_metrics_layer=False,
     on_demand_metrics_enabled=False,
     on_demand_metrics_type=None,
+    dataset=Dataset.Discover,
 ):
-    """
-    High-level API for doing arbitrary user timeseries queries against events.
-
-    This function operates on the public event schema and
-    virtual fields/aggregate functions for selected columns and
-    conditions are supported through this function.
-
-    This function is intended to only get timeseries based
-    results and thus requires the `rollup` parameter.
-
-    Returns a SnubaTSResult object that has been zerofilled in
-    case of gaps.
-
-    selected_columns (Sequence[str]) List of public aliases to fetch.
-    query (str) Filter query string to create conditions from.
-    params (Dict[str, str]) Filtering parameters with start, end, project_id, environment,
-    rollup (int) The bucket width in seconds
-    referrer (str|None) A referrer string to help locate the origin of this query.
-    comparison_delta: A timedelta used to convert this into a comparison query. We make a second
-    query time-shifted back by comparison_delta, and compare the results to get the % change for each
-    time bucket. Requires that we only pass
-    allow_metric_aggregates (bool) Ignored here, only used in metric enhanced performance
-    """
+    assert dataset in [Dataset.Discover, Dataset.Transactions]
     with sentry_sdk.start_span(op="discover.discover", description="timeseries.filter_transform"):
         equations, columns = categorize_columns(selected_columns)
         base_builder = TimeseriesQueryBuilder(
-            Dataset.Discover,
+            dataset,
             params,
             rollup,
             query=query,
@@ -326,7 +277,7 @@ def timeseries_query(
             comp_query_params["start"] -= comparison_delta
             comp_query_params["end"] -= comparison_delta
             comparison_builder = TimeseriesQueryBuilder(
-                Dataset.Discover,
+                dataset,
                 comp_query_params,
                 rollup,
                 query=query,
@@ -387,6 +338,148 @@ def timeseries_query(
     )
 
 
+def query(
+    selected_columns,
+    query,
+    params,
+    snuba_params=None,
+    equations=None,
+    orderby=None,
+    offset=None,
+    limit=50,
+    referrer=None,
+    auto_fields=False,
+    auto_aggregations=False,
+    include_equation_fields=False,
+    allow_metric_aggregates=False,
+    use_aggregate_conditions=False,
+    conditions=None,
+    functions_acl=None,
+    transform_alias_to_input_format=False,
+    sample=None,
+    has_metrics=False,
+    use_metrics_layer=False,
+    skip_tag_resolution=False,
+    extra_columns=None,
+    on_demand_metrics_enabled=False,
+    on_demand_metrics_type=None,
+) -> EventsResponse:
+    """
+    High-level API for doing arbitrary user queries against events.
+
+    This function operates on the Discover public event schema and
+    virtual fields/aggregate functions for selected columns and
+    conditions are supported through this function.
+
+    The resulting list will have all internal field names mapped
+    back into their public schema names.
+
+    selected_columns (Sequence[str]) List of public aliases to fetch.
+    query (str) Filter query string to create conditions from.
+    params (Dict[str, str]) Filtering parameters with start, end, project_id, environment
+    equations (Sequence[str]) List of equations to calculate for the query
+    orderby (None|str|Sequence[str]) The field to order results by.
+    offset (None|int) The record offset to read.
+    limit (int) The number of records to fetch.
+    referrer (str|None) A referrer string to help locate the origin of this query.
+    auto_fields (bool) Set to true to have project + eventid fields automatically added.
+    auto_aggregations (bool) Whether aggregates should be added automatically if they're used
+                    in conditions, and there's at least one aggregate already.
+    include_equation_fields (bool) Whether fields should be added automatically if they're used in
+                    equations
+    allow_metric_aggregates (bool) Ignored here, only used in metric enhanced performance
+    use_aggregate_conditions (bool) Set to true if aggregates conditions should be used at all.
+    conditions (Sequence[Condition]) List of conditions that are passed directly to snuba without
+                    any additional processing.
+    transform_alias_to_input_format (bool) Whether aggregate columns should be returned in the originally
+                                requested function format.
+    sample (float) The sample rate to run the query with
+    """
+    return _query(
+        selected_columns,
+        query,
+        params,
+        snuba_params=snuba_params,
+        equations=equations,
+        orderby=orderby,
+        offset=offset,
+        limit=limit,
+        referrer=referrer,
+        auto_fields=auto_fields,
+        auto_aggregations=auto_aggregations,
+        include_equation_fields=include_equation_fields,
+        allow_metric_aggregates=allow_metric_aggregates,
+        use_aggregate_conditions=use_aggregate_conditions,
+        conditions=conditions,
+        functions_acl=functions_acl,
+        transform_alias_to_input_format=transform_alias_to_input_format,
+        sample=sample,
+        has_metrics=has_metrics,
+        use_metrics_layer=use_metrics_layer,
+        skip_tag_resolution=skip_tag_resolution,
+        extra_columns=extra_columns,
+        on_demand_metrics_enabled=on_demand_metrics_enabled,
+        on_demand_metrics_type=on_demand_metrics_type,
+        dataset=Dataset.Discover,
+    )
+
+
+def timeseries_query(
+    selected_columns: Sequence[str],
+    query: str,
+    params: ParamsType,
+    rollup: int,
+    referrer: str | None = None,
+    zerofill_results: bool = True,
+    comparison_delta: timedelta | None = None,
+    functions_acl: list[str] | None = None,
+    allow_metric_aggregates=False,
+    has_metrics=False,
+    use_metrics_layer=False,
+    on_demand_metrics_enabled=False,
+    on_demand_metrics_type=None,
+):
+    """
+    High-level API for doing arbitrary user timeseries queries against events.
+
+    This function operates on the public event schema and
+    virtual fields/aggregate functions for selected columns and
+    conditions are supported through this function.
+
+    This function is intended to only get timeseries based
+    results and thus requires the `rollup` parameter.
+
+    Returns a SnubaTSResult object that has been zerofilled in
+    case of gaps.
+
+    selected_columns (Sequence[str]) List of public aliases to fetch.
+    query (str) Filter query string to create conditions from.
+    params (Dict[str, str]) Filtering parameters with start, end, project_id, environment,
+    rollup (int) The bucket width in seconds
+    referrer (str|None) A referrer string to help locate the origin of this query.
+    comparison_delta: A timedelta used to convert this into a comparison query. We make a second
+    query time-shifted back by comparison_delta, and compare the results to get the % change for each
+    time bucket. Requires that we only pass
+    allow_metric_aggregates (bool) Ignored here, only used in metric enhanced performance
+    """
+    return _timeseries_query(
+        selected_columns,
+        query,
+        params,
+        rollup,
+        referrer,
+        zerofill_results=zerofill_results,
+        allow_metric_aggregates=allow_metric_aggregates,
+        comparison_delta=comparison_delta,
+        functions_acl=functions_acl,
+        has_metrics=has_metrics,
+        use_metrics_layer=use_metrics_layer,
+        on_demand_metrics_enabled=on_demand_metrics_enabled,
+        on_demand_metrics_type=on_demand_metrics_type,
+        dataset=Dataset.Discover,
+    )
+
+
 def create_result_key(result_row, fields, issues) -> str:
     values = []
     for field in fields:
@@ -413,7 +506,7 @@ def create_result_key(result_row, fields, issues) -> str:
     return result
 
 
-def top_events_timeseries(
+def _top_events_timeseries(
     timeseries_columns,
     selected_columns,
     user_query,
@@ -431,32 +524,12 @@ def top_events_timeseries(
     functions_acl=None,
     on_demand_metrics_enabled: bool = False,
     on_demand_metrics_type=None,
+    dataset=Dataset.Discover,
 ):
-    """
-    High-level API for doing arbitrary user timeseries queries for a limited number of top events
-
-    Returns a dictionary of SnubaTSResult objects that have been zerofilled in
-    case of gaps. Each value of the dictionary should match the result of a timeseries query
-
-    timeseries_columns (Sequence[str]) List of public aliases to fetch for the timeseries query,
-                    usually matches the y-axis of the graph
-    selected_columns (Sequence[str]) List of public aliases to fetch for the events query,
-                    this is to determine what the top events are
-    user_query (str) Filter query string to create conditions from. needs to be user_query
-                    to not conflict with the function query
-    params (Dict[str, str]) Filtering parameters with start, end, project_id, environment,
-    orderby (Sequence[str]) The fields to order results by.
-    rollup (int) The bucket width in seconds
-    limit (int) The number of events to get timeseries for
-    organization (Organization) Used to map group ids to short ids
-    referrer (str|None) A referrer string to help locate the origin of this query.
-    top_events (dict|None) A dictionary with a 'data' key containing a list of dictionaries that
-                    represent the top events matching the query. Useful when you have found
-                    the top events earlier and want to save a query.
-    """
+    assert dataset in [Dataset.Discover, Dataset.Transactions]
     if top_events is None:
         with sentry_sdk.start_span(op="discover.discover", description="top_events.fetch_events"):
-            top_events = query(
+            top_events = _query(
                 selected_columns,
                 query=user_query,
                 params=params,
@@ -468,10 +541,11 @@ def top_events_timeseries(
                 use_aggregate_conditions=True,
                 include_equation_fields=True,
                 skip_tag_resolution=True,
+                dataset=dataset,
             )
 
     top_events_builder = TopEventsQueryBuilder(
-        Dataset.Discover,
+        dataset,
         params,
         rollup,
         top_events["data"],
@@ -487,7 +561,7 @@ def top_events_timeseries(
     )
     if len(top_events["data"]) == limit and include_other:
         other_events_builder = TopEventsQueryBuilder(
-            Dataset.Discover,
+            dataset,
             params,
             rollup,
             top_events["data"],
@@ -572,6 +646,69 @@ def top_events_timeseries(
     return results
 
 
+def top_events_timeseries(
+    timeseries_columns,
+    selected_columns,
+    user_query,
+    params,
+    orderby,
+    rollup,
+    limit,
+    organization,
+    equations=None,
+    referrer=None,
+    top_events=None,
+    allow_empty=True,
+    zerofill_results=True,
+    include_other=False,
+    functions_acl=None,
+    on_demand_metrics_enabled: bool = False,
+    on_demand_metrics_type=None,
+):
+    """
+    High-level API for doing arbitrary user timeseries queries for a limited number of top events
+
+    Returns a dictionary of SnubaTSResult objects that have been zerofilled in
+    case of gaps. Each value of the dictionary should match the result of a timeseries query
+
+    timeseries_columns (Sequence[str]) List of public aliases to fetch for the timeseries query,
+                    usually matches the y-axis of the graph
+    selected_columns (Sequence[str]) List of public aliases to fetch for the events query,
+                    this is to determine what the top events are
+    user_query (str) Filter query string to create conditions from. needs to be user_query
+                    to not conflict with the function query
+    params (Dict[str, str]) Filtering parameters with start, end, project_id, environment,
+    orderby (Sequence[str]) The fields to order results by.
+    rollup (int) The bucket width in seconds
+    limit (int) The number of events to get timeseries for
+    organization (Organization) Used to map group ids to short ids
+    referrer (str|None) A referrer string to help locate the origin of this query.
+    top_events (dict|None) A dictionary with a 'data' key containing a list of dictionaries that
+                    represent the top events matching the query. Useful when you have found
+                    the top events earlier and want to save a query.
+    """
+    return _top_events_timeseries(
+        timeseries_columns,
+        selected_columns,
+        user_query,
+        params,
+        orderby,
+        rollup,
+        limit,
+        organization,
+        equations=equations,
+        referrer=referrer,
+        top_events=top_events,
+        allow_empty=allow_empty,
+        zerofill_results=zerofill_results,
+        include_other=include_other,
+        functions_acl=functions_acl,
+        on_demand_metrics_enabled=on_demand_metrics_enabled,
+        on_demand_metrics_type=on_demand_metrics_type,
+        dataset=Dataset.Discover,
+    )
+
+
 def get_id(result):
     if result:
         return result[1]

+ 143 - 0
src/sentry/snuba/transactions.py

@@ -0,0 +1,143 @@
+import logging
+from collections.abc import Sequence
+from datetime import timedelta
+
+from sentry.search.events.types import ParamsType
+from sentry.snuba import discover
+from sentry.snuba.dataset import Dataset
+from sentry.utils.snuba import SnubaTSResult
+
+logger = logging.getLogger(__name__)
+
+
+def query(
+    selected_columns,
+    query,
+    params,
+    snuba_params=None,
+    equations=None,
+    orderby=None,
+    offset=None,
+    limit=50,
+    referrer=None,
+    auto_fields=False,
+    auto_aggregations=False,
+    include_equation_fields=False,
+    allow_metric_aggregates=False,
+    use_aggregate_conditions=False,
+    conditions=None,
+    functions_acl=None,
+    transform_alias_to_input_format=False,
+    sample=None,
+    has_metrics=False,
+    use_metrics_layer=False,
+    skip_tag_resolution=False,
+    extra_columns=None,
+    on_demand_metrics_enabled=False,
+    on_demand_metrics_type=None,
+) -> discover.EventsResponse:
+    return discover._query(
+        selected_columns,
+        query,
+        params,
+        snuba_params=snuba_params,
+        equations=equations,
+        orderby=orderby,
+        offset=offset,
+        limit=limit,
+        referrer=referrer,
+        auto_fields=auto_fields,
+        auto_aggregations=auto_aggregations,
+        include_equation_fields=include_equation_fields,
+        allow_metric_aggregates=allow_metric_aggregates,
+        use_aggregate_conditions=use_aggregate_conditions,
+        conditions=conditions,
+        functions_acl=functions_acl,
+        transform_alias_to_input_format=transform_alias_to_input_format,
+        sample=sample,
+        has_metrics=has_metrics,
+        use_metrics_layer=use_metrics_layer,
+        skip_tag_resolution=skip_tag_resolution,
+        extra_columns=extra_columns,
+        on_demand_metrics_enabled=on_demand_metrics_enabled,
+        on_demand_metrics_type=on_demand_metrics_type,
+        dataset=Dataset.Transactions,
+    )
+
+
+def timeseries_query(
+    selected_columns: Sequence[str],
+    query: str,
+    params: ParamsType,
+    rollup: int,
+    referrer: str | None = None,
+    zerofill_results: bool = True,
+    comparison_delta: timedelta | None = None,
+    functions_acl: list[str] | None = None,
+    allow_metric_aggregates=False,
+    has_metrics=False,
+    use_metrics_layer=False,
+    on_demand_metrics_enabled=False,
+    on_demand_metrics_type=None,
+) -> SnubaTSResult:
+    """
+    High-level API for doing arbitrary user timeseries queries against events.
+    this API should match that of sentry.snuba.discover.timeseries_query
+    """
+    return discover._timeseries_query(
+        selected_columns,
+        query,
+        params,
+        rollup,
+        referrer,
+        zerofill_results=zerofill_results,
+        allow_metric_aggregates=allow_metric_aggregates,
+        comparison_delta=comparison_delta,
+        functions_acl=functions_acl,
+        has_metrics=has_metrics,
+        use_metrics_layer=use_metrics_layer,
+        on_demand_metrics_enabled=on_demand_metrics_enabled,
+        on_demand_metrics_type=on_demand_metrics_type,
+        dataset=Dataset.Transactions,
+    )
+
+
+def top_events_timeseries(
+    timeseries_columns,
+    selected_columns,
+    user_query,
+    params,
+    orderby,
+    rollup,
+    limit,
+    organization,
+    equations=None,
+    referrer=None,
+    top_events=None,
+    allow_empty=True,
+    zerofill_results=True,
+    include_other=False,
+    functions_acl=None,
+    on_demand_metrics_enabled: bool = False,
+    on_demand_metrics_type=None,
+):
+    return discover._top_events_timeseries(
+        timeseries_columns,
+        selected_columns,
+        user_query,
+        params,
+        orderby,
+        rollup,
+        limit,
+        organization,
+        equations=equations,
+        referrer=referrer,
+        top_events=top_events,
+        allow_empty=allow_empty,
+        zerofill_results=zerofill_results,
+        include_other=include_other,
+        functions_acl=functions_acl,
+        on_demand_metrics_enabled=on_demand_metrics_enabled,
+        on_demand_metrics_type=on_demand_metrics_type,
+        dataset=Dataset.Transactions,
+    )

+ 2 - 0
src/sentry/snuba/utils.py

@@ -10,6 +10,7 @@ from sentry.snuba import (
     profiles,
     spans_indexed,
     spans_metrics,
+    transactions,
 )
 
 # Doesn't map 1:1 with real datasets, but rather what we present to users
@@ -24,6 +25,7 @@ DATASET_OPTIONS = {
     "profileFunctions": functions,
     "spansIndexed": spans_indexed,
     "spansMetrics": spans_metrics,
+    "transactions": transactions,
 }
 DATASET_LABELS = {value: key for key, value in DATASET_OPTIONS.items()}
 

+ 1 - 1
tests/sentry/snuba/test_discover_query.py

@@ -29,7 +29,7 @@ from sentry.utils.samples import load_data
 ARRAY_COLUMNS = ["measurements", "span_op_breakdowns"]
 
 
-class QueryIntegrationTest(SnubaTestCase, TestCase):
+class DiscoverQueryIntegrationTest(SnubaTestCase, TestCase):
     def setUp(self):
         super().setUp()
         self.environment = self.create_environment(self.project, name="prod")

+ 1 - 1
tests/sentry/snuba/test_discover_timeseries_query.py

@@ -54,7 +54,7 @@ class TimeseriesBase(SnubaTestCase, TestCase):
         )
 
 
-class TimeseriesQueryTest(TimeseriesBase):
+class DiscoverTimeseriesQueryTest(TimeseriesBase):
     def test_invalid_field_in_function(self):
         with pytest.raises(InvalidSearchQuery):
             discover.timeseries_query(

+ 3113 - 0
tests/sentry/snuba/test_transactions.py

@@ -0,0 +1,3113 @@
+from __future__ import annotations
+
+from datetime import timedelta
+
+import pytest
+from django.utils import timezone
+
+from sentry.discover.arithmetic import ArithmeticValidationError
+from sentry.discover.models import TeamKeyTransaction
+from sentry.exceptions import InvalidSearchQuery
+from sentry.models.projectteam import ProjectTeam
+from sentry.models.releaseprojectenvironment import ReleaseStages
+from sentry.models.transaction_threshold import (
+    ProjectTransactionThreshold,
+    ProjectTransactionThresholdOverride,
+    TransactionMetric,
+)
+from sentry.search.events.constants import (
+    RELEASE_STAGE_ALIAS,
+    SEMVER_ALIAS,
+    SEMVER_BUILD_ALIAS,
+    SEMVER_PACKAGE_ALIAS,
+)
+from sentry.snuba import discover, transactions
+from sentry.testutils.cases import SnubaTestCase, TestCase
+from sentry.testutils.helpers.datetime import before_now, iso_format
+from sentry.utils.samples import load_data
+
+ARRAY_COLUMNS = ["measurements", "span_op_breakdowns"]
+
+
+class TransactionQueryIntegrationTest(SnubaTestCase, TestCase):
+    def setUp(self):
+        super().setUp()
+        self.environment = self.create_environment(self.project, name="prod")
+        self.release = self.create_release(self.project, version="first-release")
+        self.now = before_now()
+        self.one_min_ago = before_now(minutes=1)
+        self.two_min_ago = before_now(minutes=2)
+
+        self.event_time = self.one_min_ago
+        # error event
+        data = load_data("javascript")
+        data["timestamp"] = iso_format(before_now(minutes=10))
+        self.store_event(data=data, project_id=self.project.id)
+
+        # transaction event
+        data = load_data("transaction", timestamp=self.event_time)
+        data["transaction"] = "a" * 32
+        data["user"] = {"id": "99", "email": "bruce@example.com", "username": "brucew"}
+        data["release"] = "first-release"
+        data["environment"] = self.environment.name
+        data["tags"] = [["key1", "value1"]]
+        self.event = self.store_event(data=data, project_id=self.project.id)
+
+        self.params = {
+            "organization_id": self.organization.id,
+            "project_id": [self.project.id],
+            "start": before_now(days=1),
+            "end": self.now,
+        }
+
+    def test_transaction_query(self):
+        result = transactions.query(
+            selected_columns=["transaction"],
+            query="",
+            params=self.params,
+            referrer="test_transactions_query",
+        )
+        data = result["data"]
+        assert len(data) == 1
+        assert data[0] == {"transaction": "a" * 32}
+
+    def test_error_query(self):
+        with pytest.raises(InvalidSearchQuery):
+            transactions.query(
+                selected_columns=["id"],
+                query="event.type:error",
+                params=self.params,
+                referrer="test_transactions_query",
+            )
+
+    def test_any_function(self):
+        results = transactions.query(
+            selected_columns=["count()", "any(transaction)", "any(user.id)"],
+            query="event.type:transaction",
+            params={
+                "start": before_now(minutes=5),
+                "end": before_now(seconds=1),
+                "project_id": [self.project.id],
+            },
+            referrer="discover",
+            use_aggregate_conditions=True,
+        )
+
+        data = results["data"]
+        assert len(data) == 1
+        assert data[0]["any_transaction"] == "a" * 32
+        assert data[0]["any_user_id"] == "99"
+        assert data[0]["count"] == 1
+
+    def test_auto_fields_aggregates(self):
+        result = transactions.query(
+            selected_columns=["count_unique(user.email)"],
+            referrer="discover",
+            query="",
+            params=self.params,
+            auto_fields=True,
+        )
+        data = result["data"]
+        assert len(data) == 1
+        assert data[0]["count_unique_user_email"] == 1
+
+    def test_auto_fields_simple_fields(self):
+        result = transactions.query(
+            selected_columns=["user.email", "release"],
+            referrer="discover",
+            query="",
+            params=self.params,
+            auto_fields=True,
+        )
+        data = result["data"]
+        assert len(data) == 1
+        assert data[0]["id"] == self.event.event_id
+        assert data[0]["user.email"] == "bruce@example.com"
+        assert data[0]["release"] == "first-release"
+        assert data[0]["project.name"] == self.project.slug
+
+        assert len(result["meta"]["fields"]) == 4
+        assert result["meta"]["fields"] == {
+            "user.email": "string",
+            "release": "string",
+            "id": "string",
+            "project.name": "string",
+        }
+
+    def test_conditional_filter(self):
+        project2 = self.create_project(organization=self.organization)
+        project3 = self.create_project(organization=self.organization)
+
+        data = load_data("transaction", timestamp=before_now(seconds=3))
+        data["transaction"] = "b" * 32
+        self.event = self.store_event(data=data, project_id=project2.id)
+
+        data = load_data("transaction", timestamp=before_now(seconds=3))
+        data["transaction"] = "c" * 32
+        self.event = self.store_event(data=data, project_id=project3.id)
+
+        result = transactions.query(
+            selected_columns=["project", "transaction"],
+            query=f"project:{self.project.slug} OR project:{project2.slug}",
+            params={
+                "project_id": [self.project.id, project2.id],
+                "start": self.two_min_ago,
+                "end": self.now,
+            },
+            orderby="transaction",
+            referrer="discover",
+        )
+
+        data = result["data"]
+        assert len(data) == 2
+        assert data[0]["project"] == self.project.slug
+        assert data[1]["project"] == project2.slug
+
+    def test_nested_conditional_filter(self):
+        project2 = self.create_project(organization=self.organization)
+
+        data = load_data("transaction", timestamp=before_now(seconds=3))
+        data["release"] = "a" * 32
+        self.event = self.store_event(data=data, project_id=self.project.id)
+
+        data = load_data("transaction", timestamp=before_now(seconds=3))
+        data["release"] = "b" * 32
+        self.event = self.store_event(data=data, project_id=self.project.id)
+
+        data = load_data("transaction", timestamp=before_now(seconds=3))
+        data["release"] = "c" * 32
+        self.event = self.store_event(data=data, project_id=self.project.id)
+
+        data = load_data("transaction", timestamp=before_now(seconds=3))
+        data["release"] = "a" * 32
+        self.event = self.store_event(data=data, project_id=project2.id)
+
+        result = transactions.query(
+            selected_columns=["release"],
+            query="(release:{} OR release:{}) AND project:{}".format(
+                "a" * 32, "b" * 32, self.project.slug
+            ),
+            params={
+                "project_id": [self.project.id, project2.id],
+                "start": self.two_min_ago,
+                "end": self.now,
+            },
+            orderby="release",
+            referrer="discover",
+        )
+
+        data = result["data"]
+        assert len(data) == 2
+        assert data[0]["release"] == "a" * 32
+        assert data[1]["release"] == "b" * 32
+
+    def test_environment_condition(self):
+        result = transactions.query(
+            selected_columns=["id", "message"],
+            query=f"environment:{self.create_environment(self.project).name}",
+            params=self.params,
+            referrer="discover",
+        )
+        assert len(result["data"]) == 0
+
+        result = transactions.query(
+            selected_columns=["id", "message"],
+            query=f"environment:{self.environment.name}",
+            params=self.params,
+            referrer="discover",
+        )
+        assert len(result["data"]) == 1
+        data = result["data"]
+        assert data[0]["id"] == self.event.event_id
+        assert data[0]["message"] == "a" * 32
+
+    def test_field_alias_with_component(self):
+        result = transactions.query(
+            selected_columns=["project.id", "user", "user.email"],
+            query="",
+            params=self.params,
+            referrer="discover",
+        )
+        data = result["data"]
+        assert len(data) == 1
+        assert data[0]["project.id"] == self.project.id
+        assert data[0]["user"] == "id:99"
+        assert data[0]["user.email"] == "bruce@example.com"
+
+        assert len(result["meta"]["fields"]) == 3
+        assert result["meta"]["fields"] == {
+            "project.id": "integer",
+            "user": "string",
+            "user.email": "string",
+        }
+
+    def test_field_aliasing_in_aggregate_functions_and_groupby(self):
+        result = transactions.query(
+            selected_columns=["project.id", "count_unique(user.email)"],
+            query="",
+            params=self.params,
+            auto_fields=True,
+            referrer="discover",
+        )
+        data = result["data"]
+        assert len(data) == 1
+        assert data[0]["project.id"] == self.project.id
+        assert data[0]["count_unique_user_email"] == 1
+
+    def test_field_aliasing_in_conditions(self):
+        result = transactions.query(
+            selected_columns=["project.id", "user.email"],
+            query="user.email:bruce@example.com",
+            params=self.params,
+            referrer="discover",
+            auto_fields=True,
+        )
+        data = result["data"]
+        assert len(data) == 1
+        assert data[0]["project.id"] == self.project.id
+        assert data[0]["user.email"] == "bruce@example.com"
+
+    def test_field_aliasing_in_selected_columns(self):
+        result = transactions.query(
+            selected_columns=["project.id", "user", "release", "timestamp.to_hour"],
+            query="",
+            params=self.params,
+            referrer="discover",
+        )
+        data = result["data"]
+        assert len(data) == 1
+        assert data[0]["project.id"] == self.project.id
+        assert data[0]["user"] == "id:99"
+        assert data[0]["release"] == "first-release"
+
+        event_hour = self.event_time.replace(minute=0, second=0)
+        assert data[0]["timestamp.to_hour"] == iso_format(event_hour) + "+00:00"
+
+        assert len(result["meta"]["fields"]) == 4
+        assert result["meta"]["fields"] == {
+            "project.id": "integer",
+            "user": "string",
+            "release": "string",
+            "timestamp.to_hour": "date",
+        }
+
+    def test_latest_release_condition(self):
+        result = transactions.query(
+            selected_columns=["id", "message"],
+            query="release:latest",
+            params=self.params,
+            referrer="discover",
+        )
+        assert len(result["data"]) == 1
+        data = result["data"]
+        assert data[0]["id"] == self.event.event_id
+        assert data[0]["message"] == self.event.transaction
+        assert "event_id" not in data[0]
+
+    def test_message_filter(self):
+        data = load_data("transaction", timestamp=before_now(seconds=3))
+        data["transaction"] = "oh yeah"
+        self.event = self.store_event(data, project_id=self.project.id)
+
+        data = load_data("transaction", timestamp=before_now(seconds=3))
+        data["transaction"] = "oh no"
+        self.event = self.store_event(data, project_id=self.project.id)
+
+        tests: list[tuple[str, list[str]]] = [
+            ('message:"oh no"', ["oh no"]),
+            ('message:"oh yeah"', ["oh yeah"]),
+            ('message:""', []),
+            ("has:message", ["a" * 32, "oh no", "oh yeah"]),
+            ("!has:message", []),
+            ("message:oh*", ["oh no", "oh yeah"]),
+            ('message:"oh *"', ["oh no", "oh yeah"]),
+            ('message:["oh meh"]', []),
+            ('message:["oh yeah"]', ["oh yeah"]),
+            ('message:["oh yeah", "oh no"]', ["oh no", "oh yeah"]),
+        ]
+
+        for query, expected in tests:
+            result = transactions.query(
+                selected_columns=["message"],
+                query=query,
+                params=self.params,
+                orderby="message",
+                referrer="test_discover_query",
+            )
+
+            data = result["data"]
+            assert len(data) == len(expected)
+            assert [item["message"] for item in data] == expected
+
+    def test_release_condition(self):
+        result = transactions.query(
+            selected_columns=["id", "message"],
+            query=f"release:{self.create_release(self.project).version}",
+            params=self.params,
+            referrer="discover",
+        )
+        assert len(result["data"]) == 0
+
+        result = transactions.query(
+            selected_columns=["id", "message"],
+            query=f"release:{self.release.version}",
+            params=self.params,
+            referrer="discover",
+        )
+        assert len(result["data"]) == 1
+        data = result["data"]
+        assert data[0]["id"] == self.event.event_id
+        assert data[0]["message"] == self.event.transaction
+        assert "event_id" not in data[0]
+
+    def test_semver_condition(self):
+        release_1 = self.create_release(version="test@1.2.3")
+        release_2 = self.create_release(version="test@1.2.4")
+        release_3 = self.create_release(version="test@1.2.5")
+
+        data = load_data("transaction", timestamp=before_now(minutes=1))
+        data["release"] = release_1.version
+        release_1_e_1 = self.store_event(data, project_id=self.project.id).event_id
+
+        data = load_data("transaction", timestamp=before_now(minutes=1))
+        data["release"] = release_1.version
+        release_1_e_2 = self.store_event(data, project_id=self.project.id).event_id
+
+        data = load_data("transaction", timestamp=before_now(minutes=1))
+        data["release"] = release_2.version
+        release_2_e_1 = self.store_event(data, project_id=self.project.id).event_id
+
+        data = load_data("transaction", timestamp=before_now(minutes=1))
+        data["release"] = release_2.version
+        release_2_e_2 = self.store_event(data, project_id=self.project.id).event_id
+
+        data = load_data("transaction", timestamp=before_now(minutes=1))
+        data["release"] = release_3.version
+        release_3_e_1 = self.store_event(data, project_id=self.project.id).event_id
+
+        data = load_data("transaction", timestamp=before_now(minutes=1))
+        data["release"] = release_3.version
+        release_3_e_2 = self.store_event(data, project_id=self.project.id).event_id
+
+        result = transactions.query(
+            selected_columns=["id"],
+            query=f"{SEMVER_ALIAS}:>1.2.3",
+            params=self.params,
+            referrer="discover",
+        )
+        assert {r["id"] for r in result["data"]} == {
+            release_2_e_1,
+            release_2_e_2,
+            release_3_e_1,
+            release_3_e_2,
+        }
+        result = transactions.query(
+            selected_columns=["id"],
+            query=f"{SEMVER_ALIAS}:>=1.2.3",
+            params=self.params,
+            referrer="discover",
+        )
+        assert {r["id"] for r in result["data"]} == {
+            release_1_e_1,
+            release_1_e_2,
+            release_2_e_1,
+            release_2_e_2,
+            release_3_e_1,
+            release_3_e_2,
+        }
+        result = transactions.query(
+            selected_columns=["id"],
+            query=f"{SEMVER_ALIAS}:<1.2.4",
+            params=self.params,
+            referrer="discover",
+        )
+        assert {r["id"] for r in result["data"]} == {release_1_e_1, release_1_e_2}
+        result = transactions.query(
+            selected_columns=["id"],
+            query=f"!{SEMVER_ALIAS}:1.2.3",
+            params=self.params,
+            referrer="discover",
+        )
+        assert {r["id"] for r in result["data"]} == {
+            self.event.event_id,
+            release_2_e_1,
+            release_2_e_2,
+            release_3_e_1,
+            release_3_e_2,
+        }
+
+    def test_release_stage_condition(self):
+        replaced_release = self.create_release(
+            version="replaced_release",
+            environments=[self.environment],
+            adopted=timezone.now(),
+            unadopted=timezone.now(),
+        )
+        adopted_release = self.create_release(
+            version="adopted_release",
+            environments=[self.environment],
+            adopted=timezone.now(),
+        )
+        self.create_release(version="not_adopted_release", environments=[self.environment])
+
+        data = load_data("transaction", timestamp=before_now(minutes=1))
+        data["release"] = adopted_release.version
+        data["environment"] = self.environment.name
+        adopted_release_e_1 = self.store_event(data, project_id=self.project.id).event_id
+
+        data = load_data("transaction", timestamp=before_now(minutes=1))
+        data["release"] = adopted_release.version
+        data["environment"] = self.environment.name
+        adopted_release_e_2 = self.store_event(data, project_id=self.project.id).event_id
+
+        data = load_data("transaction", timestamp=before_now(minutes=1))
+        data["release"] = replaced_release.version
+        data["environment"] = self.environment.name
+        replaced_release_e_1 = self.store_event(data, project_id=self.project.id).event_id
+
+        data = load_data("transaction", timestamp=before_now(minutes=1))
+        data["release"] = replaced_release.version
+        data["environment"] = self.environment.name
+        replaced_release_e_2 = self.store_event(data, project_id=self.project.id).event_id
+
+        self.params["environment"] = [self.environment.name]
+        self.params["environment_objects"] = [self.environment]
+
+        result = transactions.query(
+            selected_columns=["id"],
+            query=f"{RELEASE_STAGE_ALIAS}:{ReleaseStages.ADOPTED.value}",
+            params=self.params,
+            referrer="discover",
+        )
+        assert {r["id"] for r in result["data"]} == {
+            adopted_release_e_1,
+            adopted_release_e_2,
+        }
+
+        result = transactions.query(
+            selected_columns=["id"],
+            query=f"!{RELEASE_STAGE_ALIAS}:{ReleaseStages.LOW_ADOPTION.value}",
+            params=self.params,
+            referrer="discover",
+        )
+        assert {r["id"] for r in result["data"]} == {
+            adopted_release_e_1,
+            adopted_release_e_2,
+            replaced_release_e_1,
+            replaced_release_e_2,
+        }
+        result = transactions.query(
+            selected_columns=["id"],
+            query=f"{RELEASE_STAGE_ALIAS}:[{ReleaseStages.ADOPTED.value}, {ReleaseStages.REPLACED.value}]",
+            params=self.params,
+            referrer="discover",
+        )
+        assert {r["id"] for r in result["data"]} == {
+            adopted_release_e_1,
+            adopted_release_e_2,
+            replaced_release_e_1,
+            replaced_release_e_2,
+        }
+
+    def test_semver_package_condition(self):
+        release_1 = self.create_release(version="test@1.2.3")
+        release_2 = self.create_release(version="test2@1.2.4")
+
+        data = load_data("transaction", timestamp=before_now(minutes=1))
+        data["release"] = release_1.version
+        data["environment"] = self.environment.name
+        release_1_e_1 = self.store_event(data, project_id=self.project.id).event_id
+
+        data = load_data("transaction", timestamp=before_now(minutes=1))
+        data["release"] = release_1.version
+        data["environment"] = self.environment.name
+        release_1_e_2 = self.store_event(data, project_id=self.project.id).event_id
+
+        data = load_data("transaction", timestamp=before_now(minutes=1))
+        data["release"] = release_2.version
+        data["environment"] = self.environment.name
+        release_2_e_1 = self.store_event(data, project_id=self.project.id).event_id
+
+        result = transactions.query(
+            selected_columns=["id"],
+            referrer="discover",
+            query=f"{SEMVER_PACKAGE_ALIAS}:test",
+            params=self.params,
+        )
+        assert {r["id"] for r in result["data"]} == {
+            release_1_e_1,
+            release_1_e_2,
+        }
+        result = transactions.query(
+            selected_columns=["id"],
+            query=f"{SEMVER_PACKAGE_ALIAS}:test2",
+            referrer="discover",
+            params=self.params,
+        )
+        assert {r["id"] for r in result["data"]} == {
+            release_2_e_1,
+        }
+
+    def test_semver_build_condition(self):
+        release_1 = self.create_release(version="test@1.2.3+123")
+        release_2 = self.create_release(version="test2@1.2.4+124")
+
+        data = load_data("transaction", timestamp=before_now(minutes=1))
+        data["release"] = release_1.version
+        data["environment"] = self.environment.name
+        release_1_e_1 = self.store_event(data, project_id=self.project.id).event_id
+
+        data = load_data("transaction", timestamp=before_now(minutes=1))
+        data["release"] = release_1.version
+        data["environment"] = self.environment.name
+        release_1_e_2 = self.store_event(data, project_id=self.project.id).event_id
+
+        data = load_data("transaction", timestamp=before_now(minutes=1))
+        data["release"] = release_2.version
+        data["environment"] = self.environment.name
+        release_2_e_1 = self.store_event(data, project_id=self.project.id).event_id
+
+        result = transactions.query(
+            selected_columns=["id"],
+            query=f"{SEMVER_BUILD_ALIAS}:123",
+            params=self.params,
+            referrer="discover",
+        )
+        assert {r["id"] for r in result["data"]} == {
+            release_1_e_1,
+            release_1_e_2,
+        }
+        result = transactions.query(
+            selected_columns=["id"],
+            query=f"{SEMVER_BUILD_ALIAS}:124",
+            params=self.params,
+            referrer="discover",
+        )
+        assert {r["id"] for r in result["data"]} == {
+            release_2_e_1,
+        }
+        result = transactions.query(
+            selected_columns=["id"],
+            query=f"{SEMVER_BUILD_ALIAS}:>=123",
+            params=self.params,
+            referrer="discover",
+        )
+        assert {r["id"] for r in result["data"]} == {release_1_e_1, release_1_e_2, release_2_e_1}
+
+    def test_message_orderby(self):
+        data = load_data("transaction", timestamp=before_now(seconds=3))
+        data["transaction"] = "oh yeah"
+        self.event = self.store_event(data, project_id=self.project.id)
+
+        data = load_data("transaction", timestamp=before_now(seconds=3))
+        data["transaction"] = "oh no"
+        self.event = self.store_event(data, project_id=self.project.id)
+
+        tests = [
+            ("message", ["a" * 32, "oh no", "oh yeah"]),
+            (
+                "-message",
+                [
+                    "oh yeah",
+                    "oh no",
+                    "a" * 32,
+                ],
+            ),
+        ]
+
+        for orderby, expected in tests:
+            result = transactions.query(
+                selected_columns=["message"],
+                query="",
+                params=self.params,
+                orderby=orderby,
+                referrer="test_discover_query",
+            )
+
+            data = result["data"]
+            assert len(data) == 3
+            assert [item["message"] for item in data] == expected
+
+    def test_missing_project(self):
+        project_ids = []
+        other_project = None
+        for project_name in ["a" * 32, "z" * 32, "m" * 32]:
+            other_project = self.create_project(organization=self.organization, slug=project_name)
+            project_ids.append(other_project.id)
+            data = load_data("transaction", timestamp=before_now(seconds=3))
+            data["transaction"] = "ohh no"
+            self.event = self.store_event(data, project_id=other_project.id)
+
+        self.params["project_id"] = project_ids
+
+        # delete the last project so its missing
+        if other_project is not None:
+            other_project.delete()
+
+        result = transactions.query(
+            selected_columns=["message", "project"],
+            query="",
+            params=self.params,
+            orderby="project",
+            referrer="test_discover_query",
+        )
+        data = result["data"]
+        assert len(data) == 2
+        assert [item["project"] for item in data] == ["a" * 32, "z" * 32]
+
+    def test_offsets(self):
+        data = load_data("transaction", timestamp=before_now(seconds=3))
+        data["transaction"] = "hello1"
+        self.event = self.store_event(data, project_id=self.project.id)
+
+        data = load_data("transaction", timestamp=before_now(seconds=3))
+        data["transaction"] = "hello2"
+        self.event = self.store_event(data, project_id=self.project.id)
+
+        result = transactions.query(
+            selected_columns=["message"],
+            query="",
+            params=self.params,
+            orderby="message",
+            limit=1,
+            offset=2,
+            referrer="discover",
+        )
+
+        data = result["data"]
+        assert len(data) == 1
+        # because we're ording by `message`, and offset by 2, the message should be `hello2`
+        # order would be a * 32, hello1, hello2
+        assert data[0]["message"] == "hello2"
+
+    def test_orderby_field_alias(self):
+        events = (
+            ("a" * 32, "ok", False),
+            ("b" * 32, "already_exists", True),
+            ("c" * 32, "aborted", None),
+        )
+        for event in events:
+            data = load_data("transaction", timestamp=before_now(minutes=10))
+            data["event_id"] = event[0]
+            data["transaction"] = event[0]
+            data["contexts"]["trace"]["status"] = event[1]
+            self.store_event(data=data, project_id=self.project.id)
+
+        queries = [
+            (["transaction.status"], [0, 6, 10]),
+            ("transaction.status", [0, 6, 10]),
+            (["-transaction.status"], [10, 6, 0]),
+            ("-transaction.status", [10, 6, 0]),
+        ]
+
+        for orderby, expected in queries:
+            result = transactions.query(
+                selected_columns=["transaction", "transaction.status"],
+                query="",
+                orderby=orderby,
+                params={
+                    "organization_id": self.organization.id,
+                    "project_id": [self.project.id],
+                    "start": before_now(minutes=12),
+                    "end": before_now(minutes=8),
+                },
+                referrer="discover",
+            )
+
+            data = result["data"]
+            assert [x["transaction.status"] for x in data] == expected
+
+    def test_transaction_status(self):
+        data = load_data("transaction", timestamp=before_now(minutes=1))
+        data["transaction"] = "/test_transaction/success"
+        data["contexts"]["trace"]["status"] = "ok"
+        self.store_event(data, project_id=self.project.id)
+
+        data = load_data("transaction", timestamp=before_now(minutes=1))
+        data["transaction"] = "/test_transaction/aborted"
+        data["contexts"]["trace"]["status"] = "aborted"
+        self.store_event(data, project_id=self.project.id)
+
+        data = load_data("transaction", timestamp=before_now(minutes=1))
+        data["transaction"] = "/test_transaction/already_exists"
+        data["contexts"]["trace"]["status"] = "already_exists"
+        self.store_event(data, project_id=self.project.id)
+
+        result = transactions.query(
+            selected_columns=["transaction.status"],
+            query="",
+            params=self.params,
+            referrer="discover",
+        )
+        data = result["data"]
+        assert len(data) == 4
+        assert {
+            data[0]["transaction.status"],
+            data[1]["transaction.status"],
+            data[2]["transaction.status"],
+            data[3]["transaction.status"],
+        } == {0, 10, 6}
+
+    def test_project_in_condition_with_or(self):
+        project2 = self.create_project(organization=self.organization)
+        event_data = load_data("transaction", timestamp=before_now(seconds=3))
+        self.store_event(data=event_data, project_id=project2.id)
+        expected = sorted([self.project.slug])
+
+        result = transactions.query(
+            selected_columns=["project"],
+            query=f"project:{self.project.slug} or event.type:transaction",
+            params={
+                "organization_id": self.organization.id,
+                "project_id": [self.project.id, project2.id],
+                "start": self.two_min_ago,
+                "end": self.now,
+            },
+            orderby="project",
+            referrer="discover",
+        )
+        data = result["data"]
+        assert len(data) == len(expected)
+        assert [item["project"] for item in data] == expected
+
+    def test_project_mapping(self):
+        other_project = self.create_project(organization=self.organization)
+        self.params["project_id"] = [other_project.id]
+
+        data = load_data("transaction", timestamp=before_now(minutes=1))
+        data["transaction"] = "hello"
+        self.store_event(data, project_id=other_project.id)
+
+        result = transactions.query(
+            selected_columns=["project", "message"],
+            query="",
+            params=self.params,
+            orderby="project",
+            referrer="discover",
+        )
+
+        data = result["data"]
+        assert len(data) == 1
+        assert data[0]["project"] == other_project.slug
+
+    def test_sorting_and_reverse_sorting_project_name(self):
+        project_ids = []
+        for project_name in ["a" * 32, "z" * 32, "m" * 32]:
+            other_project = self.create_project(organization=self.organization, slug=project_name)
+            project_ids.append(other_project.id)
+            data = load_data("transaction", timestamp=before_now(minutes=1))
+            self.store_event(data, project_id=other_project.id)
+
+        self.params["project_id"] = project_ids
+
+        result = transactions.query(
+            selected_columns=["project", "message"],
+            query="",
+            params=self.params,
+            orderby="-project",
+            referrer="test_discover_query",
+        )
+        data = result["data"]
+        assert len(data) == 3
+        assert [item["project"] for item in data] == ["z" * 32, "m" * 32, "a" * 32]
+
+        result = transactions.query(
+            selected_columns=["project", "message"],
+            query="",
+            params=self.params,
+            orderby="project",
+            referrer="test_discover_query",
+        )
+        data = result["data"]
+        assert len(data) == 3
+        assert [item["project"] for item in data] == ["a" * 32, "m" * 32, "z" * 32]
+
+    def test_tags_colliding_with_fields(self):
+        data = load_data("transaction", timestamp=before_now(minutes=1))
+        data["tags"] = [["id", "new"]]
+        event = self.store_event(data, project_id=self.project.id)
+
+        tests = [
+            ("id", "", sorted([self.event.event_id, event.event_id])),
+            ("id", f"id:{event.event_id}", [event.event_id]),
+            ("tags[id]", "", ["", "new"]),
+            ("tags[id]", "tags[id]:new", ["new"]),
+        ]
+
+        for column, query, expected in tests:
+            result = transactions.query(
+                selected_columns=[column],
+                query=query,
+                params=self.params,
+                orderby=column,
+                referrer="test_discover_query",
+            )
+            data = result["data"]
+            assert len(data) == len(expected), (query, expected)
+            assert [item[column] for item in data] == expected
+
+    def test_tags_orderby(self):
+        data = load_data("transaction", timestamp=before_now(minutes=1))
+        data["tags"] = [["key1", "value2"]]
+        self.store_event(data, project_id=self.project.id)
+
+        tests = [
+            ("key1", "key1", ["value1", "value2"]),
+            ("key1", "-key1", ["value2", "value1"]),
+            ("tags[key1]", "tags[key1]", ["value1", "value2"]),
+            ("tags[key1]", "-tags[key1]", ["value2", "value1"]),
+        ]
+
+        for column, orderby, expected in tests:
+            result = transactions.query(
+                selected_columns=[column],
+                query="",
+                params=self.params,
+                orderby=orderby,
+                referrer="test_discover_query",
+            )
+            data = result["data"]
+            assert len(data) == len(expected)
+            assert [item[column] for item in data] == expected
+
+    def test_tags_filter(self):
+        data = load_data("transaction", timestamp=before_now(minutes=1))
+        data["tags"] = [["key1", "value2"]]
+        self.store_event(data, project_id=self.project.id)
+
+        tests: list[tuple[str, str, list[str]]] = [
+            ("key1", "", ["value1", "value2"]),
+            ("key1", "has:key1", ["value1", "value2"]),
+            ("key1", "!has:key1", []),
+            ("key1", "key1:value1", ["value1"]),
+            ("key1", "key1:value2", ["value2"]),
+            ("key1", 'key1:""', []),
+            ("key1", "key1:value*", ["value1", "value2"]),
+            ("key1", 'key1:["value1"]', ["value1"]),
+            ("key1", 'key1:["value1", "value2"]', ["value1", "value2"]),
+            ("tags[key1]", "", ["value1", "value2"]),
+            # has does not work with tags[...] syntax
+            # ("tags[key1]", 'has:"tags[key1]"', ["value1", "value2"]),
+            # ("tags[key1]", '!has:"tags[key1]"', []),
+            ("tags[key1]", "tags[key1]:value1", ["value1"]),
+            ("tags[key1]", "tags[key1]:value2", ["value2"]),
+            ("tags[key1]", 'tags[key1]:""', []),
+            ("tags[key1]", "tags[key1]:value*", ["value1", "value2"]),
+            ("tags[key1]", 'tags[key1]:["value1"]', ["value1"]),
+            ("tags[key1]", 'tags[key1]:["value1", "value2"]', ["value1", "value2"]),
+        ]
+
+        for column, query, expected in tests:
+            result = transactions.query(
+                selected_columns=[column],
+                query=query,
+                params=self.params,
+                orderby=column,
+                referrer="test_discover_query",
+            )
+            data = result["data"]
+            assert len(data) == len(expected), (column, query, expected)
+            assert [item[column] for item in data] == expected
+
+    def test_team_key_transactions(self):
+        team1 = self.create_team(organization=self.organization, name="Team A")
+        self.project.add_team(team1)
+
+        team2 = self.create_team(organization=self.organization, name="Team B")
+        self.project.add_team(team2)
+
+        txns = ["/blah_transaction/"]
+        key_txns = [
+            (team1, "/foo_transaction/"),
+            (team2, "/zoo_transaction/"),
+        ]
+
+        for transaction in txns:
+            data = load_data(
+                "transaction",
+                timestamp=before_now(minutes=(5)),
+            )
+            data["transaction"] = transaction
+            self.store_event(data, project_id=self.project.id)
+
+        for team, transaction in key_txns:
+            data = load_data(
+                "transaction",
+                timestamp=before_now(minutes=(5)),
+            )
+            data["transaction"] = transaction
+            self.store_event(data, project_id=self.project.id)
+            TeamKeyTransaction.objects.create(
+                organization=self.organization,
+                transaction=transaction,
+                project_team=ProjectTeam.objects.get(project=self.project, team=team),
+            )
+
+        queries = [
+            ("", [("/blah_transaction/", 0), ("/foo_transaction/", 1), ("/zoo_transaction/", 1)]),
+            ("has:team_key_transaction", [("/foo_transaction/", 1), ("/zoo_transaction/", 1)]),
+            ("!has:team_key_transaction", [("/blah_transaction/", 0)]),
+            ("team_key_transaction:true", [("/foo_transaction/", 1), ("/zoo_transaction/", 1)]),
+            ("team_key_transaction:false", [("/blah_transaction/", 0)]),
+        ]
+
+        for query, expected_results in queries:
+            result = transactions.query(
+                selected_columns=["transaction", "team_key_transaction"],
+                query=query,
+                params={
+                    "start": before_now(minutes=10),
+                    "end": before_now(minutes=2),
+                    "project_id": [self.project.id],
+                    "organization_id": self.organization.id,
+                    "team_id": [team1.id, team2.id],
+                },
+                referrer="test_discover_query",
+            )
+
+            data = result["data"]
+            assert len(data) == len(expected_results)
+            assert [
+                (x["transaction"], x["team_key_transaction"])
+                for x in sorted(data, key=lambda k: k["transaction"])
+            ] == expected_results
+
+    def test_timestamp_rounding_fields(self):
+        result = transactions.query(
+            selected_columns=["timestamp.to_hour", "timestamp.to_day"],
+            query="",
+            params=self.params,
+            referrer="test_discover_query",
+        )
+        data = result["data"]
+        assert len(data) == 1
+
+        hour = self.event_time.replace(minute=0, second=0, microsecond=0)
+        day = hour.replace(hour=0)
+        assert [item["timestamp.to_hour"] for item in data] == [f"{iso_format(hour)}+00:00"]
+        assert [item["timestamp.to_day"] for item in data] == [f"{iso_format(day)}+00:00"]
+
+    def test_timestamp_rounding_filters(self):
+        one_day_ago = before_now(days=1)
+        two_day_ago = before_now(days=2)
+        three_day_ago = before_now(days=3)
+        self.params["start"] = three_day_ago
+
+        data = load_data("transaction", timestamp=two_day_ago)
+        self.store_event(data, project_id=self.project.id)
+
+        result = transactions.query(
+            selected_columns=["timestamp.to_hour", "timestamp.to_day"],
+            query=f"timestamp.to_hour:<{iso_format(one_day_ago)} timestamp.to_day:<{iso_format(one_day_ago)}",
+            params=self.params,
+            referrer="test_discover_query",
+        )
+        data = result["data"]
+        assert len(data) == 1
+
+        hour = two_day_ago.replace(minute=0, second=0, microsecond=0)
+        day = hour.replace(hour=0)
+        assert [item["timestamp.to_hour"] for item in data] == [f"{iso_format(hour)}+00:00"]
+        assert [item["timestamp.to_day"] for item in data] == [f"{iso_format(day)}+00:00"]
+
+    def test_user_display(self):
+        # `user.display` should give `username`
+        data = load_data("transaction", timestamp=self.event_time)
+        data["transaction"] = "a" * 32
+        data["user"] = {"username": "brucew", "id": "1234", "ip": "127.0.0.1"}
+        self.event = self.store_event(data=data, project_id=self.project.id)
+
+        # `user.display` should give `id`
+        data = load_data("transaction", timestamp=self.event_time)
+        data["transaction"] = "a" * 32
+        data["user"] = {"id": "1234", "ip": "127.0.0.1"}
+        self.event = self.store_event(data=data, project_id=self.project.id)
+
+        # `user.display` should give `ip`
+        data = load_data("transaction", timestamp=self.event_time)
+        data["transaction"] = "a" * 32
+        data["user"] = {"ip_address": "127.0.0.1"}
+        self.event = self.store_event(data=data, project_id=self.project.id)
+
+        result = transactions.query(
+            selected_columns=["user.display"],
+            query="",
+            params=self.params,
+            referrer="test_discover_query",
+        )
+        data = result["data"]
+        assert len(data) == 4
+        assert {item["user.display"] for item in data} == {
+            "bruce@example.com",
+            "brucew",
+            "1234",
+            "127.0.0.1",
+        }
+
+    def test_user_display_filter(self):
+        # `user.display` should give `username`
+        data = load_data("transaction", timestamp=self.event_time)
+        data["transaction"] = "a" * 32
+        data["user"] = {"username": "brucew", "ip": "127.0.0.1"}
+        self.event = self.store_event(data=data, project_id=self.project.id)
+
+        result = transactions.query(
+            selected_columns=["user.display"],
+            query="has:user.display user.display:bruce@example.com",
+            params=self.params,
+            referrer="test_discover_query",
+        )
+        data = result["data"]
+        assert len(data) == 1
+        assert [item["user.display"] for item in data] == ["bruce@example.com"]
+
+    def test_using_project_and_project_name(self):
+        project_ids = []
+        for project_name in ["a" * 32, "z" * 32, "m" * 32]:
+            other_project = self.create_project(organization=self.organization, slug=project_name)
+            project_ids.append(other_project.id)
+            data = load_data("transaction", timestamp=self.event_time)
+            self.store_event(data=data, project_id=other_project.id)
+
+        self.params["project_id"] = project_ids
+
+        result = transactions.query(
+            selected_columns=["project.name", "message", "project"],
+            query="",
+            params=self.params,
+            orderby="project.name",
+            referrer="test_discover_query",
+        )
+        data = result["data"]
+        assert len(data) == 3
+        assert [item["project.name"] for item in data] == [
+            "a" * 32,
+            "m" * 32,
+            "z" * 32,
+        ]
+
+    @pytest.mark.xfail(reason="Started failing on ClickHouse 21.8")
+    def test_snql_wip_project_threshold_config(self):
+        ProjectTransactionThreshold.objects.create(
+            project=self.project,
+            organization=self.project.organization,
+            threshold=100,
+            metric=TransactionMetric.DURATION.value,
+        )
+
+        project2 = self.create_project()
+        ProjectTransactionThreshold.objects.create(
+            project=project2,
+            organization=project2.organization,
+            threshold=600,
+            metric=TransactionMetric.LCP.value,
+        )
+
+        events = [
+            ("a" * 10, 300),
+            ("b" * 10, 300),
+            ("c" * 10, 3000),
+            ("d" * 10, 3000),
+        ]
+        for idx, event in enumerate(events):
+            data = load_data(
+                "transaction",
+                timestamp=before_now(minutes=(3 + idx)),
+                start_timestamp=before_now(minutes=(3 + idx), milliseconds=event[1]),
+            )
+            data["event_id"] = f"{idx}" * 32
+            data["transaction"] = event[0]
+            self.store_event(data, project_id=self.project.id)
+
+            if idx % 2:
+                ProjectTransactionThresholdOverride.objects.create(
+                    transaction=event[0],
+                    project=self.project,
+                    organization=self.organization,
+                    threshold=1000,
+                    metric=TransactionMetric.DURATION.value,
+                )
+
+        data = load_data(
+            "transaction", timestamp=before_now(minutes=3), start_timestamp=before_now(minutes=4)
+        )
+        data["transaction"] = "e" * 10
+        self.store_event(data, project_id=project2.id)
+
+        expected_transaction = ["a" * 10, "b" * 10, "c" * 10, "d" * 10, "e" * 10]
+        expected_project_threshold_config = [
+            ["duration", 100],
+            ["duration", 1000],
+            ["duration", 100],
+            ["duration", 1000],
+            ["lcp", 600],
+        ]
+
+        result = transactions.query(
+            selected_columns=["project", "transaction", "project_threshold_config"],
+            query="",
+            params={
+                "start": before_now(minutes=10),
+                "end": before_now(minutes=2),
+                "project_id": [self.project.id, project2.id],
+                "organization_id": self.organization.id,
+            },
+            referrer="test_discover_query",
+        )
+
+        assert len(result["data"]) == 5
+        sorted_data = sorted(result["data"], key=lambda k: k["transaction"])
+
+        assert [row["transaction"] for row in sorted_data] == expected_transaction
+        assert [row["project_threshold_config"][0] for row in sorted_data] == [
+            r[0] for r in expected_project_threshold_config
+        ]
+        assert [row["project_threshold_config"][1] for row in sorted_data] == [
+            r[1] for r in expected_project_threshold_config
+        ]
+
+        ProjectTransactionThreshold.objects.filter(
+            project=project2,
+            organization=project2.organization,
+        ).delete()
+
+        expected_transaction = ["e" * 10]
+        expected_project_threshold_config = [["duration", 300]]
+
+        result = transactions.query(
+            selected_columns=["project", "transaction", "project_threshold_config"],
+            query="",
+            params={
+                "start": before_now(minutes=10),
+                "end": before_now(minutes=2),
+                "project_id": [project2.id],
+                "organization_id": self.organization.id,
+            },
+            referrer="test_discover_query",
+        )
+
+        assert len(result["data"]) == 1
+        sorted_data = sorted(result["data"], key=lambda k: k["transaction"])
+
+        assert [row["transaction"] for row in sorted_data] == expected_transaction
+        assert [row["project_threshold_config"][0] for row in sorted_data] == [
+            r[0] for r in expected_project_threshold_config
+        ]
+        assert [row["project_threshold_config"][1] for row in sorted_data] == [
+            r[1] for r in expected_project_threshold_config
+        ]
+
+    def test_to_other_function(self):
+        project = self.create_project()
+
+        for i in range(3):
+            data = load_data("transaction", timestamp=before_now(minutes=5))
+            data["transaction"] = f"/to_other/{i}"
+            data["release"] = "aaaa"
+            self.store_event(data, project_id=project.id)
+
+        data = load_data("transaction", timestamp=before_now(minutes=5))
+        data["transaction"] = "/to_other/y"
+        data["release"] = "yyyy"
+        self.store_event(data, project_id=project.id)
+
+        data = load_data("transaction", timestamp=before_now(minutes=5))
+        data["transaction"] = "/to_other/z"
+        data["release"] = "zzzz"
+        self.store_event(data, project_id=project.id)
+
+        columns1 = ["transaction", 'to_other(release,"aaaa")']
+        columns2 = ["transaction", 'to_other(release,"aaaa",old,new)']
+
+        test_cases = [
+            (columns1, "", ["this", "this", "this", "that", "that"], "to_other_release__aaaa"),
+            (columns2, "", ["new", "new", "new", "old", "old"], "to_other_release__aaaa__old_new"),
+        ]
+
+        for cols, query, expected, alias in test_cases:
+            result = transactions.query(
+                selected_columns=cols,
+                query=query,
+                orderby="transaction",
+                params={
+                    "start": before_now(minutes=10),
+                    "end": before_now(minutes=2),
+                    "project_id": [project.id],
+                },
+                referrer="test_discover_query",
+            )
+
+            data = result["data"]
+            assert len(data) == len(expected)
+            assert [x[alias] for x in data] == expected
+
+    def test_count_if_function(self):
+        for i in range(3):
+            data = load_data("transaction", timestamp=before_now(minutes=5))
+            data["release"] = "aaaa"
+            self.store_event(data, project_id=self.project.id)
+
+        data = load_data("transaction", timestamp=before_now(minutes=5))
+        data["release"] = "bbbb"
+        self.store_event(data, project_id=self.project.id)
+
+        data = load_data("transaction", timestamp=before_now(minutes=5))
+        data["release"] = "cccc"
+        self.store_event(data, project_id=self.project.id)
+
+        columns1 = ["count()", "count_if(release,equals,aaaa)", "count_if(release,notEquals,aaaa)"]
+        columns2 = ["count()", "count_if(release,less,bbbb)", "count_if(release,lessOrEquals,bbbb)"]
+
+        test_cases = [
+            (
+                columns1,
+                "",
+                {
+                    "count": 5,
+                    "count_if_release_equals_aaaa": 3,
+                    "count_if_release_notEquals_aaaa": 2,
+                },
+            ),
+            (
+                columns2,
+                "",
+                {
+                    "count": 5,
+                    "count_if_release_less_bbbb": 3,
+                    "count_if_release_lessOrEquals_bbbb": 4,
+                },
+            ),
+        ]
+
+        for cols, query, expected in test_cases:
+            result = transactions.query(
+                selected_columns=cols,
+                query=query,
+                params={
+                    "start": before_now(minutes=10),
+                    "end": before_now(minutes=2),
+                    "project_id": [self.project.id],
+                },
+                referrer="test_discover_query",
+            )
+
+            data = result["data"]
+            assert len(data) == 1
+            assert data[0] == expected
+
+    def test_count_if_function_with_unicode(self):
+        unicode_phrase1 = "\u716e\u6211\u66f4\u591a\u7684\u98df\u7269\uff0c\u6211\u9913\u4e86"
+        unicode_phrase2 = "\u53cd\u6b63\u611b\u60c5\u4e0d\u5c31\u90a3\u6837"
+        for i in range(3):
+            data = load_data("transaction", timestamp=before_now(minutes=5))
+            data["release"] = unicode_phrase1
+            self.store_event(data, project_id=self.project.id)
+
+        data = load_data("transaction", timestamp=before_now(minutes=5))
+        data["release"] = unicode_phrase2
+        self.store_event(data, project_id=self.project.id)
+
+        columns1 = [
+            "count()",
+            f"count_if(release,equals,{unicode_phrase1})",
+            f"count_if(release,notEquals,{unicode_phrase1})",
+        ]
+
+        test_cases = [
+            (
+                columns1,
+                "",
+                {
+                    "count": 4,
+                    "count_if_release_equals__u716e_u6211_u66f4_u591a_u7684_u98df_u7269_uff0c_u6211_u9913_u4e86": 3,
+                    "count_if_release_notEquals__u716e_u6211_u66f4_u591a_u7684_u98df_u7269_uff0c_u6211_u9913_u4e86": 1,
+                },
+            ),
+        ]
+
+        for cols, query, expected in test_cases:
+            result = transactions.query(
+                selected_columns=cols,
+                query=query,
+                params={
+                    "start": before_now(minutes=10),
+                    "end": before_now(minutes=2),
+                    "project_id": [self.project.id],
+                },
+                referrer="test_discover_query",
+            )
+
+            data = result["data"]
+            assert len(data) == 1
+            assert data[0] == expected
+
+    def test_failure_count_function(self):
+        project = self.create_project()
+
+        data = load_data("transaction", timestamp=before_now(minutes=5))
+        data["transaction"] = "/failure_count/success"
+        self.store_event(data, project_id=project.id)
+
+        data = load_data("transaction", timestamp=before_now(minutes=5))
+        data["transaction"] = "/failure_count/unknown"
+        data["contexts"]["trace"]["status"] = "unknown_error"
+        self.store_event(data, project_id=project.id)
+
+        for i in range(6):
+            data = load_data("transaction", timestamp=before_now(minutes=5))
+            data["transaction"] = f"/failure_count/{i}"
+            data["contexts"]["trace"]["status"] = "unauthenticated"
+            self.store_event(data, project_id=project.id)
+
+        data = load_data("transaction", timestamp=before_now(minutes=5))
+        data["transaction"] = "/failure_count/0"
+        data["contexts"]["trace"]["status"] = "unauthenticated"
+        self.store_event(data, project_id=project.id)
+
+        queries = [
+            ("", 8, True),
+            ("failure_count():>0", 6, True),
+            ("failure_count():>0", 8, False),
+        ]
+
+        for query, expected_length, use_aggregate_conditions in queries:
+            result = transactions.query(
+                selected_columns=["transaction", "failure_count()"],
+                query=query,
+                orderby="transaction",
+                params={
+                    "start": before_now(minutes=10),
+                    "end": before_now(minutes=2),
+                    "project_id": [project.id],
+                },
+                use_aggregate_conditions=use_aggregate_conditions,
+                referrer="discover",
+            )
+            data = result["data"]
+
+            assert len(data) == expected_length
+            assert data[0]["failure_count"] == 2
+            assert data[1]["failure_count"] == 1
+
+    def test_apdex_function(self):
+        project = self.create_project()
+
+        ProjectTransactionThreshold.objects.create(
+            project=project,
+            organization=project.organization,
+            threshold=400,
+            metric=TransactionMetric.DURATION.value,
+        )
+
+        ProjectTransactionThresholdOverride.objects.create(
+            project=project,
+            transaction="/apdex/ace",
+            organization=project.organization,
+            threshold=400,
+            metric=TransactionMetric.LCP.value,
+        )
+
+        project2 = self.create_project()
+
+        events = [
+            ("ace", 400),
+            ("ace", 400),
+            ("one", 400),
+            ("one", 400),
+            ("two", 3000),
+            ("two", 3000),
+            ("three", 300),
+            ("three", 3000),
+            ("zorp", 300),
+            ("zorp", 3000),
+        ]
+        for idx, event in enumerate(events):
+            data = load_data(
+                "transaction",
+                timestamp=before_now(minutes=(5 + idx)),
+                start_timestamp=before_now(minutes=(5 + idx), milliseconds=event[1]),
+            )
+            data["measurements"]["lcp"]["value"] = 3000
+            data["event_id"] = f"{idx}" * 32
+            data["transaction"] = f"/apdex/{event[0]}"
+            data["user"] = {"email": f"{idx}@example.com"}
+
+            if event[0] == "zorp":
+                self.store_event(data, project_id=project2.id)  # No custom thresholds for project2
+            else:
+                self.store_event(data, project_id=project.id)
+
+        queries = [
+            ("", [0.5, 0.5, 0.25, 0.0, 0.25], ["apdex(100)"], "apdex_100"),
+            ("", [0.0, 1.0, 0.5, 0.0, 0.5], ["apdex()"], "apdex"),
+            ("apdex(100):<0.5", [0.25, 0.0, 0.25], ["apdex(100)"], "apdex_100"),
+            ("apdex():>0", [1.0, 0.5, 0.5], ["apdex()"], "apdex"),
+        ]
+
+        for query, expected_apdex, col, alias in queries:
+            result = transactions.query(
+                selected_columns=["transaction"] + col,
+                query=query,
+                orderby="transaction",
+                referrer="discover",
+                params={
+                    "start": before_now(minutes=30),
+                    "end": before_now(minutes=2),
+                    "project_id": [project.id, project2.id],
+                    "organization_id": self.organization.id,
+                },
+                use_aggregate_conditions=True,
+            )
+            data = result["data"]
+            assert len(data) == len(expected_apdex)
+            assert [
+                x[alias] for x in sorted(data, key=lambda k: k["transaction"])
+            ] == expected_apdex
+
+    def test_count_miserable_function(self):
+        project = self.create_project()
+
+        ProjectTransactionThreshold.objects.create(
+            project=project,
+            organization=project.organization,
+            threshold=400,
+            metric=TransactionMetric.DURATION.value,
+        )
+
+        ProjectTransactionThresholdOverride.objects.create(
+            project=project,
+            transaction="/count_miserable/ace",
+            organization=project.organization,
+            threshold=400,
+            metric=TransactionMetric.LCP.value,
+        )
+
+        project2 = self.create_project()
+
+        events = [
+            ("ace", 400),
+            ("ace", 400),
+            ("one", 400),
+            ("one", 400),
+            ("two", 3000),
+            ("two", 3000),
+            ("three", 300),
+            ("three", 3000),
+            ("zorp", 300),
+            ("zorp", 3000),
+        ]
+        for idx, event in enumerate(events):
+            data = load_data(
+                "transaction",
+                timestamp=before_now(minutes=(5 + idx)),
+                start_timestamp=before_now(minutes=(5 + idx), milliseconds=event[1]),
+            )
+            data["measurements"]["lcp"]["value"] = 3000
+            data["event_id"] = f"{idx}" * 32
+            data["transaction"] = f"/count_miserable/{event[0]}"
+            data["user"] = {"email": f"{idx}@example.com"}
+
+            if event[0] == "zorp":
+                self.store_event(data, project_id=project2.id)  # No custom thresholds for project2
+            else:
+                self.store_event(data, project_id=project.id)
+
+        queries = [
+            (
+                "",
+                [0, 0, 1, 2, 1],
+                ["count_miserable(user,100)"],
+                "count_miserable_user_100",
+            ),
+            ("", [2, 0, 1, 2, 1], ["count_miserable(user)"], "count_miserable_user"),
+            (
+                "count_miserable(user,100):<2",
+                [0, 0, 1, 1],
+                ["count_miserable(user,100)"],
+                "count_miserable_user_100",
+            ),
+            (
+                "count_miserable(user):>0",
+                [2, 1, 2, 1],
+                ["count_miserable(user)"],
+                "count_miserable_user",
+            ),
+        ]
+
+        for query, expected_count_miserable, col, alias in queries:
+            result = transactions.query(
+                selected_columns=["transaction"] + col,
+                query=query,
+                orderby="transaction",
+                referrer="discover",
+                params={
+                    "start": before_now(minutes=30),
+                    "end": before_now(minutes=2),
+                    "project_id": [project.id, project2.id],
+                    "organization_id": self.organization.id,
+                },
+                use_aggregate_conditions=True,
+            )
+
+            data = result["data"]
+            assert len(data) == len(expected_count_miserable)
+            assert [
+                x[alias] for x in sorted(data, key=lambda k: k["transaction"])
+            ] == expected_count_miserable
+
+    def test_user_misery_function(self):
+        project = self.create_project()
+
+        ProjectTransactionThreshold.objects.create(
+            project=project,
+            organization=project.organization,
+            threshold=400,
+            metric=TransactionMetric.DURATION.value,
+        )
+
+        ProjectTransactionThresholdOverride.objects.create(
+            project=project,
+            transaction="/user_misery/ace",
+            organization=project.organization,
+            threshold=400,
+            metric=TransactionMetric.LCP.value,
+        )
+
+        project2 = self.create_project()
+
+        events = [
+            ("ace", 400),
+            ("ace", 400),
+            ("one", 400),
+            ("one", 400),
+            ("two", 3000),
+            ("two", 3000),
+            ("three", 300),
+            ("three", 3000),
+            ("zorp", 300),
+            ("zorp", 3000),
+        ]
+        for idx, event in enumerate(events):
+            data = load_data(
+                "transaction",
+                timestamp=before_now(minutes=(5 + idx)),
+                start_timestamp=before_now(minutes=(5 + idx), milliseconds=event[1]),
+            )
+            data["measurements"]["lcp"]["value"] = 3000
+            data["event_id"] = f"{idx}" * 32
+            data["transaction"] = f"/user_misery/{event[0]}"
+            data["user"] = {"email": f"{idx}@example.com"}
+
+            if event[0] == "zorp":
+                self.store_event(data, project_id=project2.id)  # No custom thresholds for project2
+            else:
+                self.store_event(data, project_id=project.id)
+
+        queries = [
+            (
+                "",
+                [0.0492, 0.0492, 0.0575, 0.0659, 0.0575],
+                ["user_misery(100)"],
+                "user_misery_100",
+            ),
+            ("", [0.0659, 0.0492, 0.0575, 0.0659, 0.0575], ["user_misery()"], "user_misery"),
+            (
+                "user_misery(100):<0.06",
+                [0.0492, 0.0492, 0.0575, 0.0575],
+                ["user_misery(100)"],
+                "user_misery_100",
+            ),
+            (
+                "user_misery():>0.05",
+                [0.0659, 0.0575, 0.0659, 0.0575],
+                ["user_misery()"],
+                "user_misery",
+            ),
+        ]
+
+        similar = lambda a, b: abs(a - b) < 0.001
+
+        for query, expected_user_misery, col, alias in queries:
+            result = transactions.query(
+                selected_columns=["transaction"] + col,
+                referrer="discover",
+                query=query,
+                orderby="transaction",
+                params={
+                    "start": before_now(minutes=30),
+                    "end": before_now(minutes=2),
+                    "project_id": [project.id, project2.id],
+                    "organization_id": self.organization.id,
+                },
+                use_aggregate_conditions=True,
+            )
+
+            data = result["data"]
+            assert len(data) == len(expected_user_misery)
+            for i, misery in enumerate(sorted(data, key=lambda k: k["transaction"])):
+                assert similar(misery[alias], expected_user_misery[i])
+
+    def test_count(self):
+        project = self.create_project()
+
+        for i in range(6):
+            data = load_data("transaction", timestamp=before_now(minutes=5))
+            data["transaction"] = "/count/6"
+            self.store_event(data, project_id=project.id)
+        for i in range(8):
+            data = load_data("transaction", timestamp=before_now(minutes=5))
+            data["transaction"] = "/count/8"
+            self.store_event(data, project_id=project.id)
+
+        queries = [
+            ("", 2, (6, 8), True),
+            ("count():>6", 2, (6, 8), False),
+            ("count():>6", 1, (8,), True),
+        ]
+
+        for query, expected_length, expected_counts, use_aggregate_conditions in queries:
+            result = transactions.query(
+                selected_columns=["transaction", "count()"],
+                query=query,
+                orderby="transaction",
+                params={
+                    "start": before_now(minutes=10),
+                    "end": before_now(minutes=2),
+                    "project_id": [project.id],
+                },
+                referrer="discover",
+                use_aggregate_conditions=use_aggregate_conditions,
+            )
+            data = result["data"]
+
+            assert len(data) == expected_length
+            for index, count in enumerate(data):
+                assert count["count"] == expected_counts[index]
+
+    def test_compare_numeric_aggregate_function(self):
+        project = self.create_project()
+
+        for i in range(6):
+            data = load_data(
+                "transaction",
+                timestamp=before_now(minutes=3),
+                start_timestamp=before_now(minutes=4 + i),
+            )
+            data["transaction"] = "/percentile"
+            self.store_event(data, project_id=project.id)
+
+        fields = [
+            (
+                [
+                    "transaction",
+                    "p50(measurements.lcp)",
+                    "compare_numeric_aggregate(p50_measurements_lcp,greater,2000)",
+                ],
+                "",
+            ),
+            (
+                [
+                    "transaction",
+                    "p50(measurements.lcp)",
+                    "compare_numeric_aggregate(p50_measurements_lcp,less,2000)",
+                ],
+                "",
+            ),
+        ]
+
+        expected_results = [
+            ("compare_numeric_aggregate_p50_measurements_lcp_greater_2000", 1),
+            ("compare_numeric_aggregate_p50_measurements_lcp_less_2000", 0),
+        ]
+
+        for i, test_case in enumerate(fields):
+            selected, query = test_case
+            result = transactions.query(
+                referrer="discover",
+                selected_columns=selected,
+                query=query,
+                orderby="transaction",
+                params={
+                    "start": before_now(minutes=10),
+                    "end": before_now(minutes=2),
+                    "project_id": [project.id],
+                },
+                use_aggregate_conditions=True,
+            )
+            alias, expected_value = expected_results[i]
+            data = result["data"]
+
+            assert data[0][alias] == expected_value
+
+    def test_last_seen(self):
+        project = self.create_project()
+
+        expected_timestamp = before_now(minutes=3)
+        string_condition_timestamp = before_now(minutes=4).strftime("%Y-%m-%dT%H:%M:%S+00:00")
+
+        data = load_data("transaction", timestamp=expected_timestamp)
+        data["transaction"] = "/last_seen"
+        self.store_event(data, project_id=project.id)
+
+        for i in range(6):
+            data = load_data("transaction", timestamp=before_now(minutes=i + 4))
+            data["transaction"] = "/last_seen"
+            self.store_event(data, project_id=project.id)
+
+        queries = [
+            ("", 1, True),
+            (f"last_seen():>{string_condition_timestamp}", 1, True),
+            ("last_seen():>0", 1, False),
+        ]
+
+        for query, expected_length, use_aggregate_conditions in queries:
+            result = transactions.query(
+                selected_columns=["transaction", "last_seen()"],
+                query=query,
+                referrer="discover",
+                orderby="transaction",
+                params={
+                    "start": before_now(minutes=10),
+                    "end": before_now(minutes=2),
+                    "project_id": [project.id],
+                },
+                use_aggregate_conditions=use_aggregate_conditions,
+            )
+            data = result["data"]
+
+            assert len(data) == expected_length
+            assert data[0]["last_seen"] == expected_timestamp.strftime("%Y-%m-%dT%H:%M:%S+00:00")
+
+    def test_latest_event(self):
+        project = self.create_project()
+
+        expected_timestamp = before_now(minutes=3)
+        data = load_data("transaction", timestamp=expected_timestamp)
+        data["transaction"] = "/latest_event"
+        stored_event = self.store_event(data, project_id=project.id)
+
+        for i in range(6):
+            data = load_data("transaction", timestamp=before_now(minutes=i + 4))
+            data["transaction"] = "/latest_event"
+            self.store_event(data, project_id=project.id)
+
+        result = transactions.query(
+            selected_columns=["transaction", "latest_event()"],
+            query="",
+            orderby="transaction",
+            referrer="discover",
+            params={
+                "start": before_now(minutes=10),
+                "end": before_now(minutes=2),
+                "project_id": [project.id],
+            },
+            use_aggregate_conditions=False,
+        )
+        data = result["data"]
+
+        assert len(data) == 1
+        assert data[0]["latest_event"] == stored_event.event_id
+
+    def test_failure_rate(self):
+        project = self.create_project()
+
+        for i in range(6):
+            data = load_data("transaction", timestamp=before_now(minutes=5))
+            data["transaction"] = "/failure_rate/over"
+            data["contexts"]["trace"]["status"] = "unauthenticated"
+            self.store_event(data, project_id=project.id)
+        for i in range(4):
+            data = load_data("transaction", timestamp=before_now(minutes=5))
+            data["transaction"] = "/failure_rate/over"
+            self.store_event(data, project_id=project.id)
+        for i in range(7):
+            data = load_data("transaction", timestamp=before_now(minutes=5))
+            data["transaction"] = "/failure_rate/under"
+            self.store_event(data, project_id=project.id)
+        for i in range(3):
+            data = load_data("transaction", timestamp=before_now(minutes=5))
+            data["transaction"] = "/failure_rate/under"
+            data["contexts"]["trace"]["status"] = "unauthenticated"
+            self.store_event(data, project_id=project.id)
+
+        queries = [
+            ("", 2, True),
+            ("failure_rate():>0.5", 1, True),
+            ("failure_rate():>0.5", 2, False),
+        ]
+
+        for query, expected_length, use_aggregate_conditions in queries:
+            result = transactions.query(
+                selected_columns=["transaction", "failure_rate()"],
+                query=query,
+                orderby="transaction",
+                params={
+                    "start": before_now(minutes=10),
+                    "end": before_now(minutes=2),
+                    "project_id": [project.id],
+                },
+                use_aggregate_conditions=use_aggregate_conditions,
+                referrer="discover",
+            )
+            data = result["data"]
+
+            assert len(data) == expected_length
+            assert data[0]["failure_rate"] == 0.6
+            if expected_length > 1:
+                assert data[1]["failure_rate"] == 0.3
+
+    def _create_percentile_events(self, project):
+        for i in range(6):
+            start = before_now(minutes=3)
+            end = start - timedelta(minutes=1 + i)
+            data = load_data(
+                "transaction",
+                timestamp=start,
+                start_timestamp=end,
+            )
+            data["transaction"] = "/p50"
+            self.store_event(data, project_id=project.id)
+
+    def test_percentile(self):
+        project = self.create_project()
+
+        self._create_percentile_events(project)
+
+        queries = [
+            ("", 1, True),
+            ("percentile(transaction.duration, 0.7):>0", 1, False),
+            ("percentile(transaction.duration, 0.7):>500000", 0, True),
+            ("percentile(transaction.duration, 0.7):>100000", 1, True),
+        ]
+
+        for query, expected_length, use_aggregate_conditions in queries:
+            result = transactions.query(
+                referrer="discover",
+                selected_columns=[
+                    "transaction",
+                    "percentile(transaction.duration, 0.7)",
+                    "percentile(transaction.duration, 0.5)",
+                ],
+                query=query,
+                orderby="transaction",
+                params={
+                    "start": before_now(minutes=20),
+                    "end": before_now(minutes=2),
+                    "project_id": [project.id],
+                },
+                use_aggregate_conditions=use_aggregate_conditions,
+            )
+            data = result["data"]
+
+            assert len(data) == expected_length
+            if expected_length > 0:
+                assert data[0]["percentile_transaction_duration_0_7"] == 270000
+                assert data[0]["percentile_transaction_duration_0_5"] == 210000
+
+    def test_p50(self):
+        project = self.create_project()
+
+        self._create_percentile_events(project)
+
+        queries = [
+            ("", 1, True),
+            ("p50(transaction.duration):>0", 1, False),
+            ("p50(transaction.duration):>500000", 0, True),
+            ("p50(transaction.duration):>100000", 1, True),
+        ]
+
+        for query, expected_length, use_aggregate_conditions in queries:
+            result = transactions.query(
+                referrer="discover",
+                selected_columns=[
+                    "transaction",
+                    "p50(transaction.duration)",
+                ],
+                query=query,
+                orderby="transaction",
+                params={
+                    "start": before_now(minutes=20),
+                    "end": before_now(minutes=2),
+                    "project_id": [project.id],
+                },
+                use_aggregate_conditions=use_aggregate_conditions,
+            )
+            data = result["data"]
+
+            assert len(data) == expected_length
+            if expected_length > 0:
+                assert data[0]["p50_transaction_duration"] == 210000
+
+    def test_p75(self):
+        project = self.create_project()
+
+        self._create_percentile_events(project)
+
+        queries = [
+            ("", 1, True),
+            ("p75(transaction.duration):>0", 1, False),
+            ("p75(transaction.duration):>500000", 0, True),
+            ("p75(transaction.duration):>100000", 1, True),
+        ]
+
+        for query, expected_length, use_aggregate_conditions in queries:
+            result = transactions.query(
+                selected_columns=[
+                    "transaction",
+                    "p75(transaction.duration)",
+                ],
+                query=query,
+                orderby="transaction",
+                params={
+                    "start": before_now(minutes=20),
+                    "end": before_now(minutes=2),
+                    "project_id": [project.id],
+                },
+                use_aggregate_conditions=use_aggregate_conditions,
+                referrer="discover",
+            )
+            data = result["data"]
+
+            assert len(data) == expected_length
+            if expected_length > 0:
+                assert data[0]["p75_transaction_duration"] == 285000
+
+    def test_p95(self):
+        project = self.create_project()
+
+        self._create_percentile_events(project)
+
+        queries = [
+            ("", 1, True),
+            ("p95(transaction.duration):>0", 1, False),
+            ("p95(transaction.duration):>500000", 0, True),
+            ("p95(transaction.duration):>100000", 1, True),
+        ]
+
+        for query, expected_length, use_aggregate_conditions in queries:
+            result = transactions.query(
+                selected_columns=[
+                    "transaction",
+                    "p95(transaction.duration)",
+                ],
+                query=query,
+                orderby="transaction",
+                params={
+                    "start": before_now(minutes=20),
+                    "end": before_now(minutes=2),
+                    "project_id": [project.id],
+                },
+                use_aggregate_conditions=use_aggregate_conditions,
+                referrer="discover",
+            )
+            data = result["data"]
+
+            assert len(data) == expected_length
+            if expected_length > 0:
+                assert data[0]["p95_transaction_duration"] == 345000
+
+    def test_p99(self):
+        project = self.create_project()
+
+        self._create_percentile_events(project)
+
+        queries = [
+            ("", 1, True),
+            ("p99(transaction.duration):>0", 1, False),
+            ("p99(transaction.duration):>500000", 0, True),
+            ("p99(transaction.duration):>100000", 1, True),
+        ]
+
+        for query, expected_length, use_aggregate_conditions in queries:
+            result = transactions.query(
+                selected_columns=[
+                    "transaction",
+                    "p99(transaction.duration)",
+                ],
+                query=query,
+                orderby="transaction",
+                params={
+                    "start": before_now(minutes=20),
+                    "end": before_now(minutes=2),
+                    "project_id": [project.id],
+                },
+                use_aggregate_conditions=use_aggregate_conditions,
+                referrer="discover",
+            )
+            data = result["data"]
+
+            assert len(data) == expected_length
+            if expected_length > 0:
+                assert data[0]["p99_transaction_duration"] == 357000
+
+    def test_p100(self):
+        project = self.create_project()
+
+        self._create_percentile_events(project)
+
+        queries = [
+            ("", 1, True),
+            ("p100(transaction.duration):>0", 1, False),
+            ("p100(transaction.duration):>500000", 0, True),
+            ("p100(transaction.duration):>100000", 1, True),
+        ]
+
+        for query, expected_length, use_aggregate_conditions in queries:
+            result = transactions.query(
+                selected_columns=[
+                    "transaction",
+                    "p100(transaction.duration)",
+                ],
+                query=query,
+                orderby="transaction",
+                params={
+                    "start": before_now(minutes=20),
+                    "end": before_now(minutes=2),
+                    "project_id": [project.id],
+                },
+                use_aggregate_conditions=use_aggregate_conditions,
+                referrer="discover",
+            )
+            data = result["data"]
+
+            assert len(data) == expected_length
+            if expected_length > 0:
+                assert data[0]["p100_transaction_duration"] == 360000
+
+    def test_p100_with_measurement(self):
+        project = self.create_project()
+
+        for i in range(6):
+            data = load_data(
+                "transaction",
+                timestamp=before_now(minutes=3),
+                start_timestamp=before_now(minutes=4 + i),
+            )
+            data["transaction"] = "/p100"
+            data["measurements"]["frames_total"] = {"value": 100 * i}
+            data["measurements"]["frames_slow"] = {"value": 50 * i}
+            self.store_event(data, project_id=project.id)
+
+        queries = [
+            ("", 1, True),
+            ("p100(measurements.frames_slow_rate):>0", 1, False),
+            ("p100(measurements.frames_slow_rate):>0.6", 0, True),
+            ("p100(measurements.frames_slow_rate):>0.4", 1, True),
+        ]
+
+        for query, expected_length, use_aggregate_conditions in queries:
+            result = transactions.query(
+                selected_columns=[
+                    "transaction",
+                    "p100(measurements.frames_slow_rate)",
+                ],
+                query=query,
+                orderby="transaction",
+                params={
+                    "start": before_now(minutes=20),
+                    "end": before_now(minutes=2),
+                    "project_id": [project.id],
+                },
+                use_aggregate_conditions=use_aggregate_conditions,
+                referrer="discover",
+            )
+            data = result["data"]
+
+            assert len(data) == expected_length
+            if expected_length > 0:
+                assert data[0]["p100_measurements_frames_slow_rate"] == 0.5
+
+    def test_count_unique(self):
+        for idx in range(3):
+            data = load_data(
+                "transaction",
+                timestamp=before_now(minutes=3),
+            )
+            data["user"] = {"email": f"{idx}@example.com"}
+            data["tags"] = {"foo": "bar" if idx < 1 else "baz"}
+            self.store_event(data, project_id=self.project.id)
+
+        result = transactions.query(
+            selected_columns=["count_unique(user.display)", "count_unique(foo)"],
+            query="",
+            params={
+                "start": before_now(minutes=4),
+                "end": before_now(minutes=2),
+                "project_id": [self.project.id],
+            },
+            use_aggregate_conditions=True,
+            referrer="discover",
+        )
+        data = result["data"]
+
+        assert len(data) == 1
+        assert data[0]["count_unique_user_display"] == 3
+        assert data[0]["count_unique_foo"] == 2
+
+    def test_min_max(self):
+        """Testing both min and max since they're so similar"""
+        for idx in range(3):
+            start = before_now(minutes=3)
+            end = start - timedelta(minutes=1 + idx)
+            data = load_data(
+                "transaction",
+                timestamp=start,
+                start_timestamp=end,
+            )
+            self.store_event(data, project_id=self.project.id)
+
+        result = transactions.query(
+            selected_columns=[
+                "min(transaction.duration)",
+                "max(transaction.duration)",
+            ],
+            query="",
+            params={
+                "start": before_now(minutes=4),
+                "end": before_now(minutes=2),
+                "project_id": [self.project.id],
+            },
+            use_aggregate_conditions=True,
+            referrer="discover",
+        )
+        data = result["data"]
+
+        assert len(data) == 1
+        assert data[0]["min_transaction_duration"] == 60000
+        assert data[0]["max_transaction_duration"] == 180000
+
+    def test_stats_functions(self):
+        for idx in range(3):
+            start = before_now(minutes=3)
+            end = start - timedelta(minutes=1 + idx)
+            data = load_data(
+                "transaction",
+                timestamp=start,
+                start_timestamp=end,
+            )
+            self.store_event(data, project_id=self.project.id)
+
+        queries = [
+            ("var(transaction.duration)", "var_transaction_duration", 3600000000),
+            ("stddev(transaction.duration)", "stddev_transaction_duration", 60000),
+            # This is a nonsense cov&corr column, but gives us a consistent result for tests
+            (
+                "cov(transaction.duration,transaction.duration)",
+                "cov_transaction_duration_transaction_duration",
+                3600000000,
+            ),
+            (
+                "corr(transaction.duration,transaction.duration)",
+                "corr_transaction_duration_transaction_duration",
+                1,
+            ),
+        ]
+
+        for column, alias, expected in queries:
+            result = transactions.query(
+                selected_columns=[column],
+                query="",
+                params={
+                    "start": before_now(minutes=4),
+                    "end": before_now(minutes=2),
+                    "project_id": [self.project.id],
+                },
+                use_aggregate_conditions=True,
+                referrer="discover",
+            )
+            data = result["data"]
+
+            assert len(data) == 1, column
+            assert data[0][alias] == expected, column
+
+    def test_count_at_least(self):
+        end = before_now(minutes=3)
+        start_one_minute = end - timedelta(minutes=1)
+        start_two_minute = end - timedelta(minutes=2)
+        for idx in range(3):
+            data = load_data(
+                "transaction",
+                timestamp=end,
+                start_timestamp=start_one_minute if idx < 1 else start_two_minute,
+            )
+            self.store_event(data, project_id=self.project.id)
+
+        result = transactions.query(
+            selected_columns=[
+                "count_at_least(transaction.duration,60000)",
+                "count_at_least(transaction.duration,120000)",
+            ],
+            query="",
+            params={
+                "start": before_now(minutes=4),
+                "end": before_now(minutes=2),
+                "project_id": [self.project.id],
+            },
+            use_aggregate_conditions=True,
+            referrer="discover",
+        )
+        data = result["data"]
+
+        assert len(data) == 1
+        assert data[0]["count_at_least_transaction_duration_60000"] == 3
+        assert data[0]["count_at_least_transaction_duration_120000"] == 2
+
+    def test_eps(self):
+        project = self.create_project()
+
+        for _ in range(6):
+            data = load_data(
+                "transaction",
+                timestamp=before_now(minutes=3),
+            )
+            data["transaction"] = "/eps"
+            self.store_event(data, project_id=project.id)
+
+        queries = [
+            ("", 1, True),
+            ("eps():>1", 0, True),
+            ("eps():>1", 1, False),
+            ("eps(10):>0.5", 1, True),
+            ("tps():>1", 0, True),
+            ("tps():>1", 1, False),
+            ("tps(10):>0.5", 1, True),
+        ]
+
+        for query, expected_length, use_aggregate_conditions in queries:
+            result = transactions.query(
+                selected_columns=[
+                    "transaction",
+                    "eps()",
+                    "eps(10)",
+                    "eps(60)",
+                    "tps()",
+                    "tps(10)",
+                    "tps(60)",
+                ],
+                query=query,
+                orderby="transaction",
+                params={
+                    "start": before_now(minutes=4),
+                    "end": before_now(minutes=2),
+                    "project_id": [project.id],
+                },
+                use_aggregate_conditions=use_aggregate_conditions,
+                referrer="discover",
+            )
+            data = result["data"]
+
+            assert len(data) == expected_length
+            if expected_length > 0:
+                assert data[0]["eps"] == 0.05
+                assert data[0]["eps_10"] == 0.6
+                assert data[0]["eps_60"] == 0.1
+                assert data[0]["tps"] == 0.05
+                assert data[0]["tps_10"] == 0.6
+                assert data[0]["tps_60"] == 0.1
+
+    def test_epm(self):
+        project = self.create_project()
+
+        for _ in range(6):
+            data = load_data(
+                "transaction",
+                timestamp=before_now(minutes=3),
+            )
+            data["transaction"] = "/epm"
+            self.store_event(data, project_id=project.id)
+
+        queries = [
+            ("", 1, True),
+            ("epm():>3", 0, True),
+            ("epm():>3", 1, False),
+            ("epm(10):>3", 1, True),
+            ("tpm():>3", 0, True),
+            ("tpm():>3", 1, False),
+            ("tpm(10):>3", 1, True),
+        ]
+
+        for query, expected_length, use_aggregate_conditions in queries:
+            result = transactions.query(
+                selected_columns=[
+                    "transaction",
+                    "epm()",
+                    "epm(10)",
+                    "epm(60)",
+                    "tpm()",
+                    "tpm(10)",
+                    "tpm(60)",
+                ],
+                query=query,
+                orderby="transaction",
+                params={
+                    "start": before_now(minutes=4),
+                    "end": before_now(minutes=2),
+                    "project_id": [project.id],
+                },
+                use_aggregate_conditions=use_aggregate_conditions,
+                referrer="discover",
+            )
+            data = result["data"]
+
+            assert len(data) == expected_length
+            if expected_length > 0:
+                assert data[0]["epm"] == 3
+                assert data[0]["epm_10"] == 36.0
+                assert data[0]["epm_60"] == 6
+                assert data[0]["tpm"] == 3
+                assert data[0]["tpm_10"] == 36.0
+                assert data[0]["tpm_60"] == 6
+
+    def test_transaction_status_filter(self):
+        data = load_data("transaction", timestamp=before_now(minutes=1))
+        data["transaction"] = "/test_transaction/success"
+        data["contexts"]["trace"]["status"] = "ok"
+        self.store_event(data, project_id=self.project.id)
+        self.store_event(data, project_id=self.project.id)
+
+        data = load_data("transaction", timestamp=before_now(minutes=1))
+        data["transaction"] = "/test_transaction/already_exists"
+        data["contexts"]["trace"]["status"] = "already_exists"
+        self.store_event(data, project_id=self.project.id)
+
+        def run_query(query, expected_statuses, message):
+            result = transactions.query(
+                selected_columns=["transaction.status"],
+                query=query,
+                params=self.params,
+                referrer="discover",
+            )
+            data = result["data"]
+            assert len(data) == len(expected_statuses), message
+            assert sorted(item["transaction.status"] for item in data) == sorted(
+                expected_statuses
+            ), message
+
+        run_query("has:transaction.status transaction.status:ok", [0, 0, 0], "status 'ok'")
+        run_query(
+            "has:transaction.status transaction.status:[ok,already_exists]",
+            [0, 0, 0, 6],
+            "status 'ok' or 'already_exists'",
+        )
+        run_query("has:transaction.status !transaction.status:ok", [6], "status not 'ok'")
+        run_query(
+            "has:transaction.status !transaction.status:already_exists",
+            [0, 0, 0],
+            "status not 'already_exists'",
+        )
+        run_query(
+            "has:transaction.status !transaction.status:[ok,already_exists]",
+            [],
+            "status not 'ok' and not 'already_exists'",
+        )
+        run_query("!has:transaction.status", [], "status nonexistant")
+
+    def test_orderby_aggregate_function(self):
+        project = self.create_project()
+
+        data = load_data("transaction", timestamp=before_now(minutes=5))
+        data["transaction"] = "/failure_count/success"
+        self.store_event(data, project_id=project.id)
+
+        data = load_data("transaction", timestamp=before_now(minutes=5))
+        data["transaction"] = "/failure_count/unknown"
+        data["contexts"]["trace"]["status"] = "unknown_error"
+        self.store_event(data, project_id=project.id)
+
+        for i in range(6):
+            data = load_data("transaction", timestamp=before_now(minutes=5))
+            data["transaction"] = f"/failure_count/{i}"
+            data["contexts"]["trace"]["status"] = "unauthenticated"
+            self.store_event(data, project_id=project.id)
+
+        data = load_data("transaction", timestamp=before_now(minutes=5))
+        data["transaction"] = "/failure_count/0"
+        data["contexts"]["trace"]["status"] = "unauthenticated"
+        self.store_event(data, project_id=project.id)
+
+        orderbys = [
+            (["failure_count()"], [0, 0, 1, 1, 1, 1, 1, 2]),
+            ("failure_count()", [0, 0, 1, 1, 1, 1, 1, 2]),
+            (["-failure_count()"], [2, 1, 1, 1, 1, 1, 0, 0]),
+            ("-failure_count()", [2, 1, 1, 1, 1, 1, 0, 0]),
+            ("failure_count", [0, 0, 1, 1, 1, 1, 1, 2]),
+            ("-failure_count", [2, 1, 1, 1, 1, 1, 0, 0]),
+        ]
+
+        for orderby, expected in orderbys:
+            result = transactions.query(
+                selected_columns=["transaction", "failure_count()"],
+                query="",
+                orderby=orderby,
+                params={
+                    "start": before_now(minutes=10),
+                    "end": before_now(minutes=2),
+                    "project_id": [project.id],
+                },
+                referrer="discover",
+            )
+            data = result["data"]
+
+            assert [x["failure_count"] for x in data] == expected
+
+    @pytest.mark.skip("setting snuba config is too slow")
+    def test_spans_op_array_field(self):
+        trace_context = {
+            "parent_span_id": "8988cec7cc0779c1",
+            "type": "trace",
+            "op": "http.server",
+            "trace_id": "a7d67cf796774551a95be6543cacd459",
+            "span_id": "babaae0d4b7512d9",
+            "status": "ok",
+            "hash": "a" * 16,
+            "exclusive_time": 1.2345,
+        }
+        data = load_data(
+            "transaction", timestamp=before_now(minutes=10), trace_context=trace_context, spans=[]
+        )
+        self.store_event(data=data, project_id=self.project.id)
+
+        queries = [
+            ("has:spans_op", 1),
+            ("!has:spans_op", 0),
+        ]
+
+        for query, expected_len in queries:
+            result = discover.query(
+                selected_columns=["spans_op"],
+                query=query,
+                params={
+                    "organization_id": self.organization.id,
+                    "project_id": [self.project.id],
+                    "start": before_now(minutes=12),
+                    "end": before_now(minutes=8),
+                },
+                referrer="discover",
+            )
+            data = result["data"]
+            assert len(data) == expected_len
+
+    def test_reflective_types(self):
+        results = transactions.query(
+            selected_columns=[
+                "p50(measurements.lcp)",
+                "p50(measurements.foo)",
+                "p50(spans.foo)",
+            ],
+            query="event.type:transaction",
+            params=self.params,
+            use_aggregate_conditions=True,
+            referrer="discover",
+        )
+
+        assert results["meta"]["fields"] == {
+            "p50_measurements_lcp": "duration",
+            "p50_measurements_foo": "number",
+            "p50_spans_foo": "duration",
+        }
+
+    def test_measurements(self):
+        event_data = load_data("transaction", timestamp=before_now(seconds=3))
+        self.store_event(data=event_data, project_id=self.project.id)
+
+        results = transactions.query(
+            selected_columns=[
+                "measurements.fp",
+                "measurements.fcp",
+                "measurements.lcp",
+                "measurements.fid",
+                "measurements.cls",
+                "measurements.does_not_exist",
+            ],
+            query="event.type:transaction !transaction:{}".format("a" * 32),
+            params=self.params,
+            referrer="discover",
+        )
+
+        data = results["data"]
+        assert len(data) == 1
+        assert data[0]["measurements.fp"] == event_data["measurements"]["fp"]["value"]
+        assert data[0]["measurements.fcp"] == event_data["measurements"]["fcp"]["value"]
+        assert data[0]["measurements.lcp"] == event_data["measurements"]["lcp"]["value"]
+        assert data[0]["measurements.fid"] == event_data["measurements"]["fid"]["value"]
+        assert data[0]["measurements.cls"] == event_data["measurements"]["cls"]["value"]
+        assert data[0]["measurements.does_not_exist"] is None
+
+    def test_conditions_with_special_columns(self):
+        for val in ["b", "c", "d"]:
+            data = load_data("transaction")
+            data["timestamp"] = iso_format(self.one_min_ago)
+            data["transaction"] = val * 32
+            data["logentry"] = {"formatted": val * 32}
+            data["tags"] = {"sub_customer.is-Enterprise-42": val * 32}
+            self.store_event(data=data, project_id=self.project.id)
+
+        result = transactions.query(
+            selected_columns=["title", "message"],
+            query="event.type:transaction (title:{} OR message:{})".format("b" * 32, "c" * 32),
+            params={
+                "project_id": [self.project.id],
+                "start": self.two_min_ago,
+                "end": self.now,
+            },
+            orderby="title",
+            referrer="discover",
+        )
+
+        data = result["data"]
+        assert len(data) == 2
+        assert data[0]["title"] == "b" * 32
+        assert data[1]["title"] == "c" * 32
+
+        result = transactions.query(
+            selected_columns=["title", "sub_customer.is-Enterprise-42"],
+            query="event.type:transaction (title:{} AND sub_customer.is-Enterprise-42:{})".format(
+                "b" * 32, "b" * 32
+            ),
+            params={
+                "project_id": [self.project.id],
+                "start": self.two_min_ago,
+                "end": self.now,
+            },
+            orderby="title",
+            referrer="discover",
+        )
+
+        data = result["data"]
+        assert len(data) == 1
+        assert data[0]["title"] == "b" * 32
+        assert data[0]["sub_customer.is-Enterprise-42"] == "b" * 32
+
+    def test_conditions_with_aggregates(self):
+        events = [("a", 2), ("b", 3), ("c", 4)]
+        for ev in events:
+            val = ev[0] * 32
+            for i in range(ev[1]):
+                data = load_data("transaction")
+                data["timestamp"] = iso_format(self.one_min_ago)
+                data["transaction"] = f"{val}-{i}"
+                data["logentry"] = {"formatted": val}
+                data["tags"] = {"trek": val}
+                self.store_event(data=data, project_id=self.project.id)
+
+        result = transactions.query(
+            selected_columns=["trek", "count()"],
+            query="event.type:transaction (trek:{} OR trek:{}) AND count():>2".format(
+                "a" * 32, "b" * 32
+            ),
+            params={
+                "project_id": [self.project.id],
+                "start": self.two_min_ago,
+                "end": self.now,
+            },
+            orderby="trek",
+            use_aggregate_conditions=True,
+            referrer="discover",
+        )
+
+        data = result["data"]
+        assert len(data) == 1
+        assert data[0]["trek"] == "b" * 32
+        assert data[0]["count"] == 3
+
+    def test_conditions_with_nested_aggregates(self):
+        events = [("a", 2), ("b", 3), ("c", 4)]
+        for ev in events:
+            val = ev[0] * 32
+            for i in range(ev[1]):
+                data = load_data("transaction")
+                data["timestamp"] = iso_format(self.one_min_ago)
+                data["transaction"] = f"{val}-{i}"
+                data["logentry"] = {"formatted": val}
+                data["tags"] = {"trek": val}
+                self.store_event(data=data, project_id=self.project.id)
+
+        result = transactions.query(
+            selected_columns=["trek", "count()"],
+            query="(event.type:transaction AND (trek:{} AND (transaction:*{}* AND count():>2)))".format(
+                "b" * 32, "b" * 32
+            ),
+            params={
+                "project_id": [self.project.id],
+                "start": self.two_min_ago,
+                "end": self.now,
+            },
+            orderby="trek",
+            use_aggregate_conditions=True,
+            referrer="discover",
+        )
+
+        data = result["data"]
+        assert len(data) == 1
+        assert data[0]["trek"] == "b" * 32
+        assert data[0]["count"] == 3
+
+        with pytest.raises(InvalidSearchQuery) as err:
+            transactions.query(
+                selected_columns=["trek", "transaction"],
+                query="(event.type:transaction AND (trek:{} AND (transaction:*{}* AND count():>2)))".format(
+                    "b" * 32, "b" * 32
+                ),
+                referrer="discover",
+                params={
+                    "project_id": [self.project.id],
+                    "start": self.two_min_ago,
+                    "end": self.now,
+                },
+                orderby="trek",
+                use_aggregate_conditions=True,
+            )
+        assert "used in a condition but is not a selected column" in str(err)
+
+    def test_conditions_with_timestamps(self):
+        events = [("b", 1), ("c", 2), ("d", 3)]
+        for t, ev in enumerate(events):
+            val = ev[0] * 32
+            for i in range(ev[1]):
+                data = load_data("transaction", timestamp=self.now - timedelta(seconds=3 * t + 1))
+                data["transaction"] = f"{val}"
+                self.store_event(data=data, project_id=self.project.id)
+
+        results = transactions.query(
+            selected_columns=["transaction", "count()"],
+            query="event.type:transaction AND (timestamp:<{} OR timestamp:>{})".format(
+                iso_format(self.now - timedelta(seconds=5)),
+                iso_format(self.now - timedelta(seconds=3)),
+            ),
+            params={
+                "project_id": [self.project.id],
+                "start": self.two_min_ago,
+                "end": self.now,
+            },
+            orderby="transaction",
+            use_aggregate_conditions=True,
+            referrer="discover",
+        )
+
+        data = results["data"]
+        assert len(data) == 3
+        assert data[0]["transaction"] == "a" * 32
+        assert data[0]["count"] == 1
+        assert data[1]["transaction"] == "b" * 32
+        assert data[1]["count"] == 1
+        assert data[2]["transaction"] == "d" * 32
+        assert data[2]["count"] == 3
+
+    def test_timestamp_rollup_filter(self):
+        event_hour = self.event_time.replace(minute=0, second=0)
+        result = transactions.query(
+            selected_columns=["project.id", "user", "release"],
+            query="timestamp.to_hour:" + iso_format(event_hour),
+            params=self.params,
+            referrer="discover",
+        )
+        data = result["data"]
+        assert len(data) == 1
+        assert data[0]["project.id"] == self.project.id
+        assert data[0]["user"] == "id:99"
+        assert data[0]["release"] == "first-release"
+
+        assert len(result["meta"]["fields"]) == 3
+        assert result["meta"]["fields"] == {
+            "project.id": "integer",
+            "user": "string",
+            "release": "string",
+        }
+
+    def test_count_with_or(self):
+        data = load_data("transaction", timestamp=before_now(seconds=3))
+        data["transaction"] = "a" * 32
+        self.store_event(data=data, project_id=self.project.id)
+
+        results = transactions.query(
+            selected_columns=["transaction", "count()"],
+            query="event.type:transaction AND (count():<1 OR count():>0)",
+            params=self.params,
+            orderby="transaction",
+            use_aggregate_conditions=True,
+            referrer="discover",
+        )
+
+        data = results["data"]
+        assert len(data) == 1
+        assert data[0]["transaction"] == "a" * 32
+        assert data[0]["count"] == 2
+
+    def test_array_join(self):
+        data = load_data("transaction", timestamp=before_now(seconds=90))
+        data["transaction"] = "foo"
+        data["measurements"] = {
+            "fp": {"value": 1000},
+            "fcp": {"value": 1000},
+            "lcp": {"value": 1000},
+        }
+        self.store_event(data=data, project_id=self.project.id)
+
+        results = transactions.query(
+            selected_columns=["array_join(measurements_key)"],
+            query="transaction:foo",
+            params=self.params,
+            functions_acl=["array_join"],
+            referrer="discover",
+        )
+        assert {"fcp", "fp", "lcp"} == {
+            row["array_join_measurements_key"] for row in results["data"]
+        }
+
+    def test_access_to_private_functions(self):
+        # using private functions directly without access should error
+        with pytest.raises(InvalidSearchQuery, match="array_join: no access to private function"):
+            transactions.query(
+                selected_columns=["array_join(tags.key)"],
+                query="",
+                params={
+                    "project_id": [self.project.id],
+                    "start": self.two_min_ago,
+                    "end": self.now,
+                },
+                referrer="discover",
+            )
+
+        # using private functions in an aggregation without access should error
+        with pytest.raises(InvalidSearchQuery, match="histogram: no access to private function"):
+            for array_column in ARRAY_COLUMNS:
+                transactions.query(
+                    selected_columns=[f"histogram({array_column}_value, 1,0,1)"],
+                    query=f"histogram({array_column}_value, 1,0,1):>0",
+                    params={
+                        "project_id": [self.project.id],
+                        "start": self.two_min_ago,
+                        "end": self.now,
+                    },
+                    use_aggregate_conditions=True,
+                    referrer="discover",
+                )
+
+        # using private functions in an aggregation without access should error
+        # with auto aggregation on
+        with pytest.raises(InvalidSearchQuery, match="histogram: no access to private function"):
+            for array_column in ARRAY_COLUMNS:
+                transactions.query(
+                    selected_columns=["count()"],
+                    query=f"histogram({array_column}_value, 1,0,1):>0",
+                    params={
+                        "project_id": [self.project.id],
+                        "start": self.two_min_ago,
+                        "end": self.now,
+                    },
+                    referrer="discover",
+                    auto_aggregations=True,
+                    use_aggregate_conditions=True,
+                )
+
+    def test_sum_array_combinator(self):
+        data = load_data("transaction", timestamp=before_now(seconds=3))
+        data["measurements"] = {
+            "fp": {"value": 1000},
+            "fcp": {"value": 1000},
+            "lcp": {"value": 1000},
+        }
+        self.store_event(data=data, project_id=self.project.id)
+
+        results = transactions.query(
+            selected_columns=["sumArray(measurements_value)"],
+            query="!transaction:{}".format("a" * 32),
+            params=self.params,
+            # make sure to opt in to gain access to the function
+            functions_acl=["sumArray"],
+            referrer="discover",
+            # -Array combinator is only supported in SnQL
+        )
+        assert results["data"][0]["sumArray_measurements_value"] == 3000.0
+
+    def test_span_op_breakdowns(self):
+        event_data = load_data("transaction", timestamp=before_now(seconds=3))
+        self.store_event(data=event_data, project_id=self.project.id)
+
+        results = transactions.query(
+            selected_columns=[
+                "spans.http",
+                "spans.db",
+                "spans.resource",
+                "spans.browser",
+                "spans.total.time",
+                "spans.does_not_exist",
+            ],
+            query="event.type:transaction !transaction:{}".format("a" * 32),
+            params=self.params,
+            referrer="discover",
+        )
+
+        data = results["data"]
+        assert len(data) == 1
+        span_ops = event_data["breakdowns"]["span_ops"]
+        assert data[0]["spans.http"] == span_ops["ops.http"]["value"]
+        assert data[0]["spans.db"] == span_ops["ops.db"]["value"]
+        assert data[0]["spans.resource"] == span_ops["ops.resource"]["value"]
+        assert data[0]["spans.browser"] == span_ops["ops.browser"]["value"]
+        assert data[0]["spans.total.time"] == span_ops["total.time"]["value"]
+        assert data[0]["spans.does_not_exist"] is None
+
+
+class TransactionsArithmeticTest(SnubaTestCase, TestCase):
+    def setUp(self):
+        super().setUp()
+
+        self.day_ago = before_now(days=1).replace(hour=10, minute=0, second=0, microsecond=0)
+        self.now = before_now()
+        event_data = load_data("transaction")
+        # Half of duration so we don't get weird rounding differences when comparing the results
+        event_data["breakdowns"]["span_ops"]["ops.http"]["value"] = 1500
+        event_data["start_timestamp"] = iso_format(self.day_ago + timedelta(minutes=30))
+        event_data["timestamp"] = iso_format(self.day_ago + timedelta(minutes=30, seconds=3))
+        self.store_event(data=event_data, project_id=self.project.id)
+        self.params = {"project_id": [self.project.id], "start": self.day_ago, "end": self.now}
+        self.query = "event.type:transaction"
+
+    def test_simple(self):
+        results = transactions.query(
+            selected_columns=[
+                "spans.http",
+                "transaction.duration",
+            ],
+            equations=["spans.http / transaction.duration"],
+            query=self.query,
+            params=self.params,
+            referrer="discover",
+        )
+        assert len(results["data"]) == 1
+        result = results["data"][0]
+        assert result["equation[0]"] == result["spans.http"] / result["transaction.duration"]
+
+    def test_multiple_equations(self):
+        results = transactions.query(
+            selected_columns=[
+                "spans.http",
+                "transaction.duration",
+            ],
+            equations=[
+                "spans.http / transaction.duration",
+                "transaction.duration / spans.http",
+                "1500 + transaction.duration",
+            ],
+            query=self.query,
+            params=self.params,
+            referrer="discover",
+        )
+        assert len(results["data"]) == 1
+        result = results["data"][0]
+        assert result["equation[0]"] == result["spans.http"] / result["transaction.duration"]
+        assert result["equation[1]"] == result["transaction.duration"] / result["spans.http"]
+        assert result["equation[2]"] == 1500 + result["transaction.duration"]
+
+    def test_invalid_field(self):
+        with pytest.raises(ArithmeticValidationError):
+            transactions.query(
+                selected_columns=[
+                    "spans.http",
+                    "transaction.status",
+                ],
+                # while transaction_status is a uint8, there's no reason we should allow arith on it
+                equations=["spans.http / transaction.status"],
+                query=self.query,
+                params=self.params,
+                referrer="discover",
+            )
+
+    def test_invalid_function(self):
+        with pytest.raises(ArithmeticValidationError):
+            transactions.query(
+                selected_columns=[
+                    "p50(transaction.duration)",
+                    "last_seen()",
+                ],
+                equations=["p50(transaction.duration) / last_seen()"],
+                query=self.query,
+                params=self.params,
+                referrer="discover",
+            )
+
+    def test_unselected_field(self):
+        with pytest.raises(InvalidSearchQuery):
+            transactions.query(
+                selected_columns=[
+                    "spans.http",
+                ],
+                equations=["spans.http / transaction.duration"],
+                query=self.query,
+                params=self.params,
+                referrer="discover",
+            )
+
+    def test_unselected_function(self):
+        with pytest.raises(InvalidSearchQuery):
+            transactions.query(
+                selected_columns=[
+                    "p50(transaction.duration)",
+                ],
+                equations=["p50(transaction.duration) / p100(transaction.duration)"],
+                query=self.query,
+                params=self.params,
+                referrer="discover",
+            )
+
+    def test_orderby_equation(self):
+        for i in range(1, 3):
+            event_data = load_data("transaction")
+            # Half of duration so we don't get weird rounding differences when comparing the results
+            event_data["breakdowns"]["span_ops"]["ops.http"]["value"] = 300 * i
+            event_data["start_timestamp"] = iso_format(self.day_ago + timedelta(minutes=30))
+            event_data["timestamp"] = iso_format(self.day_ago + timedelta(minutes=30, seconds=3))
+            self.store_event(data=event_data, project_id=self.project.id)
+        query_params = {
+            "selected_columns": [
+                "spans.http",
+                "transaction.duration",
+            ],
+            "equations": [
+                "spans.http / transaction.duration",
+                "transaction.duration / spans.http",
+                "1500 + transaction.duration",
+            ],
+            "orderby": ["equation[0]"],
+            "query": self.query,
+            "params": self.params,
+            "referrer": "discover",
+        }
+        results = transactions.query(**query_params)
+        assert len(results["data"]) == 3
+        assert [result["equation[0]"] for result in results["data"]] == [0.1, 0.2, 0.5]
+
+        query_params["orderby"] = ["equation[1]"]
+        results = transactions.query(**query_params)
+        assert len(results["data"]) == 3
+        assert [result["equation[1]"] for result in results["data"]] == [2, 5, 10]
+
+        query_params["orderby"] = ["-equation[0]"]
+        results = transactions.query(**query_params)
+        assert len(results["data"]) == 3
+        assert [result["equation[0]"] for result in results["data"]] == [0.5, 0.2, 0.1]
+
+    def test_orderby_nonexistent_equation(self):
+        with pytest.raises(InvalidSearchQuery):
+            transactions.query(
+                selected_columns=[
+                    "spans.http",
+                    "transaction.duration",
+                ],
+                orderby=["equation[1]"],
+                query=self.query,
+                params=self.params,
+                referrer="discover",
+            )
+
+    def test_equation_without_field_or_function(self):
+        with pytest.raises(InvalidSearchQuery):
+            transactions.query(
+                selected_columns=[
+                    "spans.http",
+                    "transaction.duration",
+                ],
+                equations=[
+                    "5 + 5",
+                ],
+                query=self.query,
+                params=self.params,
+                referrer="discover",
+            )
+
+    def test_aggregate_equation(self):
+        results = transactions.query(
+            selected_columns=[
+                "p50(transaction.duration)",
+            ],
+            equations=["p50(transaction.duration) / 2"],
+            query=self.query,
+            params=self.params,
+            referrer="discover",
+        )
+        assert len(results["data"]) == 1
+        result = results["data"][0]
+        assert result["equation[0]"] == result["p50_transaction_duration"] / 2
+
+    def test_multiple_aggregate_equation(self):
+        results = transactions.query(
+            selected_columns=[
+                "p50(transaction.duration)",
+                "count()",
+            ],
+            equations=["p50(transaction.duration) + 2", "p50(transaction.duration) / count()"],
+            query=self.query,
+            params=self.params,
+            referrer="discover",
+        )
+        assert len(results["data"]) == 1
+        result = results["data"][0]
+        assert result["equation[0]"] == result["p50_transaction_duration"] + 2
+        assert result["equation[1]"] == result["p50_transaction_duration"] / result["count"]
+
+    def test_multiple_operators(self):
+        results = transactions.query(
+            selected_columns=[
+                "p50(transaction.duration)",
+                "p100(transaction.duration)",
+                "count()",
+            ],
+            equations=[
+                "p50(transaction.duration) / p100(transaction.duration) * 100",
+                "100 + count() * 5 - 3 / 5",
+                "count() + count() / count() * count() - count()",
+            ],
+            query=self.query,
+            params=self.params,
+            referrer="discover",
+        )
+        assert len(results["data"]) == 1
+        result = results["data"][0]
+        assert (
+            result["equation[0]"]
+            == result["p50_transaction_duration"] / result["p100_transaction_duration"] * 100
+        )
+        assert result["equation[1]"] == 100 + result["count"] * 5 - 3 / 5
+        assert (
+            result["equation[2]"]
+            == result["count"]
+            + result["count"] / result["count"] * result["count"]
+            - result["count"]
+        )
+
+    def test_nan_equation_results(self):
+        for i in range(1, 3):
+            event_data = load_data("transaction")
+            # Half of duration so we don't get weird rounding differences when comparing the results
+            event_data["breakdowns"]["span_ops"]["ops.http"]["value"] = 0
+            event_data["start_timestamp"] = iso_format(self.day_ago + timedelta(minutes=30))
+            event_data["timestamp"] = iso_format(self.day_ago + timedelta(minutes=30, seconds=3))
+            self.store_event(data=event_data, project_id=self.project.id)
+        query_params = {
+            "selected_columns": [
+                "spans.http",
+                "transaction.duration",
+            ],
+            "equations": [
+                "transaction.duration / spans.http",  # inf
+                "spans.http / spans.http",  # nan
+            ],
+            "orderby": ["equation[0]"],
+            "query": self.query,
+            "params": self.params,
+            "referrer": "discover",
+        }
+        results = transactions.query(**query_params)
+        assert len(results["data"]) == 3
+        assert [result["equation[0]"] for result in results["data"]] == [2, None, None]
+
+        query_params["orderby"] = ["equation[1]"]
+        results = transactions.query(**query_params)
+        assert len(results["data"]) == 3
+        assert [result["equation[1]"] for result in results["data"]] == [1, None, None]
+
+        query_params["orderby"] = ["-equation[0]"]
+        results = transactions.query(**query_params)
+        assert len(results["data"]) == 3
+        assert [result["equation[0]"] for result in results["data"]] == [2, None, None]

+ 604 - 0
tests/sentry/snuba/test_transactions_timeseries_query.py

@@ -0,0 +1,604 @@
+from datetime import timedelta
+from unittest.mock import patch
+
+import pytest
+
+from sentry.exceptions import InvalidSearchQuery
+from sentry.models.transaction_threshold import ProjectTransactionThreshold, TransactionMetric
+from sentry.snuba import transactions
+from sentry.snuba.dataset import Dataset
+from sentry.testutils.cases import SnubaTestCase, TestCase
+from sentry.testutils.helpers.datetime import before_now, iso_format
+from sentry.utils.samples import load_data
+
+ARRAY_COLUMNS = ["measurements", "span_op_breakdowns"]
+
+
+class TimeseriesBase(SnubaTestCase, TestCase):
+    def setUp(self):
+        super().setUp()
+
+        self.one_min_ago = before_now(minutes=1)
+        self.day_ago = before_now(days=1).replace(hour=10, minute=0, second=0, microsecond=0)
+
+        # transaction event
+        data = load_data("transaction", timestamp=self.day_ago + timedelta(hours=1))
+        data["event_id"] = "a" * 32
+        data["transaction"] = "very bad"
+        data["user"] = {"id": 1}
+        data["tags"] = {"important": "yes"}
+        self.store_event(data=data, project_id=self.project.id)
+
+        data = load_data("transaction", timestamp=self.day_ago + timedelta(hours=1, minutes=1))
+        data["event_id"] = "b" * 32
+        data["transaction"] = "oh my"
+        data["user"] = {}
+        data["tags"] = {"important": "no"}
+        self.store_event(data=data, project_id=self.project.id)
+
+        data = load_data("transaction", timestamp=self.day_ago + timedelta(hours=2, minutes=1))
+        data["event_id"] = "c" * 32
+        data["transaction"] = "very bad"
+        data["user"] = {}
+        data["tags"] = {"important": "yes"}
+        self.store_event(data=data, project_id=self.project.id)
+
+
+class TransactionsTimeseriesQueryTest(TimeseriesBase):
+    def test_invalid_field_in_function(self):
+        with pytest.raises(InvalidSearchQuery):
+            transactions.timeseries_query(
+                selected_columns=["min(transaction)"],
+                query="transaction:api.issue.delete",
+                referrer="test_discover_query",
+                params={"project_id": [self.project.id]},
+                rollup=1800,
+            )
+
+    def test_missing_start_and_end(self):
+        with pytest.raises(InvalidSearchQuery):
+            transactions.timeseries_query(
+                selected_columns=["count()"],
+                query="transaction:api.issue.delete",
+                referrer="test_discover_query",
+                params={"project_id": [self.project.id]},
+                rollup=1800,
+            )
+
+    def test_no_aggregations(self):
+        with pytest.raises(InvalidSearchQuery):
+            transactions.timeseries_query(
+                selected_columns=["transaction", "title"],
+                query="transaction:api.issue.delete",
+                referrer="test_discover_query",
+                params={
+                    "start": self.day_ago,
+                    "end": self.day_ago + timedelta(hours=2),
+                    "project_id": [self.project.id],
+                },
+                rollup=1800,
+            )
+
+    def test_field_alias(self):
+        result = transactions.timeseries_query(
+            selected_columns=["p95()"],
+            query="event.type:transaction transaction:api.issue.delete",
+            referrer="test_discover_query",
+            params={
+                "start": self.day_ago,
+                "end": self.day_ago + timedelta(hours=2),
+                "project_id": [self.project.id],
+            },
+            rollup=3600,
+        )
+        assert len(result.data["data"]) == 3
+
+    def test_failure_rate_field_alias(self):
+        result = transactions.timeseries_query(
+            selected_columns=["failure_rate()"],
+            query="event.type:transaction transaction:api.issue.delete",
+            referrer="test_discover_query",
+            params={
+                "start": self.day_ago,
+                "end": self.day_ago + timedelta(hours=2),
+                "project_id": [self.project.id],
+            },
+            rollup=3600,
+        )
+        assert len(result.data["data"]) == 3
+
+    def test_aggregate_function(self):
+        result = transactions.timeseries_query(
+            selected_columns=["count()"],
+            query="",
+            referrer="test_discover_query",
+            params={
+                "start": self.day_ago,
+                "end": self.day_ago + timedelta(hours=2),
+                "project_id": [self.project.id],
+            },
+            rollup=3600,
+        )
+        assert len(result.data["data"]) == 3
+        assert [2] == [val["count"] for val in result.data["data"] if "count" in val]
+
+        result = transactions.timeseries_query(
+            selected_columns=["count_unique(user)"],
+            query="",
+            referrer="test_discover_query",
+            params={
+                "start": self.day_ago,
+                "end": self.day_ago + timedelta(hours=2),
+                "project_id": [self.project.id],
+            },
+            rollup=3600,
+        )
+        assert len(result.data["data"]) == 3
+        keys = set()
+        for row in result.data["data"]:
+            keys.update(list(row.keys()))
+        assert "count_unique_user" in keys
+        assert "time" in keys
+
+    def test_comparison_aggregate_function_invalid(self):
+        with pytest.raises(
+            InvalidSearchQuery, match="Only one column can be selected for comparison queries"
+        ):
+            transactions.timeseries_query(
+                selected_columns=["count()", "count_unique(user)"],
+                query="",
+                referrer="test_discover_query",
+                params={
+                    "start": self.day_ago,
+                    "end": self.day_ago + timedelta(hours=2),
+                    "project_id": [self.project.id],
+                },
+                rollup=3600,
+                comparison_delta=timedelta(days=1),
+            )
+
+    def test_comparison_aggregate_function(self):
+        data = load_data("transaction", timestamp=self.day_ago + timedelta(hours=1))
+        data["user"] = {"id": 1}
+        self.store_event(data=data, project_id=self.project.id)
+
+        result = transactions.timeseries_query(
+            selected_columns=["count()"],
+            query="",
+            referrer="test_discover_query",
+            params={
+                "start": self.day_ago,
+                "end": self.day_ago + timedelta(hours=2),
+                "project_id": [self.project.id],
+            },
+            rollup=3600,
+            comparison_delta=timedelta(days=1),
+        )
+        assert len(result.data["data"]) == 3
+        # Values should all be 0, since there is no comparison period data at all.
+        assert [(0, 0), (3, 0), (0, 0)] == [
+            (val.get("count", 0), val.get("comparisonCount", 0)) for val in result.data["data"]
+        ]
+
+        data = load_data("transaction", timestamp=self.day_ago + timedelta(days=-1, hours=1))
+        data["user"] = {"id": 1}
+        self.store_event(data=data, project_id=self.project.id)
+
+        data = load_data(
+            "transaction", timestamp=self.day_ago + timedelta(days=-1, hours=1, minutes=2)
+        )
+        data["user"] = {"id": 2}
+        self.store_event(data=data, project_id=self.project.id)
+
+        data = load_data(
+            "transaction", timestamp=self.day_ago + timedelta(days=-1, hours=2, minutes=1)
+        )
+        data["user"] = {}
+        self.store_event(data=data, project_id=self.project.id)
+
+        result = transactions.timeseries_query(
+            selected_columns=["count()"],
+            query="",
+            referrer="test_discover_query",
+            params={
+                "start": self.day_ago,
+                "end": self.day_ago + timedelta(hours=2, minutes=1),
+                "project_id": [self.project.id],
+            },
+            rollup=3600,
+            comparison_delta=timedelta(days=1),
+        )
+        assert len(result.data["data"]) == 3
+        # In the second bucket we have 3 events in the current period and 2 in the comparison, so
+        # we get a result of 50% increase
+        assert [(0, 0), (3, 2), (0, 0)] == [
+            (val.get("count", 0), val.get("comparisonCount", 0)) for val in result.data["data"]
+        ]
+
+        result = transactions.timeseries_query(
+            selected_columns=["count_unique(user)"],
+            query="",
+            params={
+                "start": self.day_ago,
+                "end": self.day_ago + timedelta(hours=2, minutes=2),
+                "project_id": [self.project.id],
+            },
+            rollup=3600,
+            referrer="test_discover_query",
+            comparison_delta=timedelta(days=1),
+        )
+        assert len(result.data["data"]) == 3
+        # In the second bucket we have 1 unique user in the current period and 2 in the comparison, so
+        # we get a result of -50%
+        assert [(0, 0), (1, 2), (0, 0)] == [
+            (val.get("count_unique_user", 0), val.get("comparisonCount", 0))
+            for val in result.data["data"]
+        ]
+
+    def test_count_miserable(self):
+        event_data = load_data("transaction")
+        event_data["transaction"] = "api/foo/"
+        # Half of duration so we don't get weird rounding differences when comparing the results
+        event_data["breakdowns"]["span_ops"]["ops.http"]["value"] = 300
+        event_data["start_timestamp"] = iso_format(self.day_ago + timedelta(minutes=30))
+        event_data["timestamp"] = iso_format(self.day_ago + timedelta(minutes=30, seconds=3))
+        self.store_event(data=event_data, project_id=self.project.id)
+        ProjectTransactionThreshold.objects.create(
+            project=self.project,
+            organization=self.project.organization,
+            threshold=100,
+            metric=TransactionMetric.DURATION.value,
+        )
+
+        project2 = self.create_project()
+        ProjectTransactionThreshold.objects.create(
+            project=project2,
+            organization=project2.organization,
+            threshold=100,
+            metric=TransactionMetric.DURATION.value,
+        )
+
+        result = transactions.timeseries_query(
+            selected_columns=["count_miserable(user)"],
+            referrer="test_discover_query",
+            query="transaction:api/foo/",
+            params={
+                "start": self.day_ago,
+                "end": self.day_ago + timedelta(hours=2),
+                "project_id": [self.project.id, project2.id],
+                "organization_id": self.organization.id,
+            },
+            rollup=3600,
+        )
+        assert len(result.data["data"]) == 3
+        assert [1] == [
+            val["count_miserable_user"]
+            for val in result.data["data"]
+            if "count_miserable_user" in val
+        ]
+
+    def test_count_miserable_with_arithmetic(self):
+        event_data = load_data("transaction")
+        event_data["transaction"] = "api/foo/"
+        # Half of duration so we don't get weird rounding differences when comparing the results
+        event_data["breakdowns"]["span_ops"]["ops.http"]["value"] = 300
+        event_data["start_timestamp"] = iso_format(self.day_ago + timedelta(minutes=30))
+        event_data["timestamp"] = iso_format(self.day_ago + timedelta(minutes=30, seconds=3))
+        self.store_event(data=event_data, project_id=self.project.id)
+        ProjectTransactionThreshold.objects.create(
+            project=self.project,
+            organization=self.project.organization,
+            threshold=100,
+            metric=TransactionMetric.DURATION.value,
+        )
+
+        project2 = self.create_project()
+        ProjectTransactionThreshold.objects.create(
+            project=project2,
+            organization=project2.organization,
+            threshold=100,
+            metric=TransactionMetric.DURATION.value,
+        )
+
+        result = transactions.timeseries_query(
+            selected_columns=["equation|count_miserable(user) - 100"],
+            referrer="test_discover_query",
+            query="transaction:api/foo/",
+            params={
+                "start": self.day_ago,
+                "end": self.day_ago + timedelta(hours=2),
+                "project_id": [self.project.id, project2.id],
+                "organization_id": self.organization.id,
+            },
+            rollup=3600,
+        )
+        assert len(result.data["data"]) == 3
+        assert [1 - 100] == [
+            val["equation[0]"] for val in result.data["data"] if "equation[0]" in val
+        ]
+
+    def test_equation_function(self):
+        result = transactions.timeseries_query(
+            selected_columns=["equation|count() / 100"],
+            query="",
+            referrer="test_discover_query",
+            params={
+                "start": self.day_ago,
+                "end": self.day_ago + timedelta(hours=2),
+                "project_id": [self.project.id],
+            },
+            rollup=3600,
+        )
+        assert len(result.data["data"]) == 3
+        assert [0.02] == [val["equation[0]"] for val in result.data["data"] if "equation[0]" in val]
+
+        result = transactions.timeseries_query(
+            selected_columns=["equation|count_unique(user) / 100"],
+            query="",
+            params={
+                "start": self.day_ago,
+                "end": self.day_ago + timedelta(hours=2),
+                "project_id": [self.project.id],
+            },
+            rollup=3600,
+            referrer="test_discover_query",
+        )
+        assert len(result.data["data"]) == 3
+        keys = set()
+        for row in result.data["data"]:
+            keys.update(list(row.keys()))
+        assert "equation[0]" in keys
+        assert "time" in keys
+
+    def test_zerofilling(self):
+        result = transactions.timeseries_query(
+            selected_columns=["count()"],
+            query="",
+            referrer="test_discover_query",
+            params={
+                "start": self.day_ago,
+                "end": self.day_ago + timedelta(hours=3),
+                "project_id": [self.project.id],
+            },
+            rollup=3600,
+        )
+        assert len(result.data["data"]) == 4, "Should have empty results"
+        assert [2, 1] == [
+            val["count"] for val in result.data["data"] if "count" in val
+        ], result.data["data"]
+
+    def test_conditional_filter(self):
+        project2 = self.create_project(organization=self.organization)
+        project3 = self.create_project(organization=self.organization)
+
+        data = load_data("transaction", timestamp=self.one_min_ago)
+        self.store_event(data=data, project_id=project2.id)
+
+        data = load_data("transaction", timestamp=self.one_min_ago)
+        self.store_event(data=data, project_id=project3.id)
+
+        result = transactions.timeseries_query(
+            selected_columns=["count()"],
+            query=f"project:{self.project.slug} OR project:{project2.slug}",
+            params={
+                "start": before_now(minutes=5),
+                "end": before_now(seconds=1),
+                "project_id": [self.project.id, project2.id, project3.id],
+            },
+            rollup=3600,
+            referrer="test_discover_query",
+        )
+
+        data = result.data["data"]
+        assert len([d for d in data if "count" in d]) == 1
+        for d in data:
+            if "count" in d:
+                assert d["count"] == 1
+
+    def test_nested_conditional_filter(self):
+        project2 = self.create_project(organization=self.organization)
+
+        data = load_data("transaction", timestamp=self.one_min_ago)
+        data["release"] = "a" * 32
+        self.store_event(data=data, project_id=self.project.id)
+
+        data = load_data("transaction", timestamp=self.one_min_ago)
+        data["release"] = "b" * 32
+        self.store_event(data=data, project_id=self.project.id)
+
+        data = load_data("transaction", timestamp=self.one_min_ago)
+        data["release"] = "c" * 32
+        self.store_event(data=data, project_id=project2.id)
+
+        data = load_data("transaction", timestamp=self.one_min_ago)
+        data["release"] = "a" * 32
+        self.store_event(data=data, project_id=project2.id)
+
+        result = transactions.timeseries_query(
+            selected_columns=["release", "count()"],
+            query="(release:{} OR release:{}) AND project:{}".format(
+                "a" * 32, "b" * 32, self.project.slug
+            ),
+            params={
+                "start": before_now(minutes=5),
+                "end": before_now(seconds=1),
+                "project_id": [self.project.id, project2.id],
+            },
+            rollup=3600,
+            referrer="test_discover_query",
+        )
+
+        data = result.data["data"]
+        data = result.data["data"]
+        assert len([d for d in data if "count" in d]) == 1
+        for d in data:
+            if "count" in d:
+                assert d["count"] == 2
+
+
+@pytest.mark.skip("These tests are specific to json which we no longer use")
+class TopEventsTimeseriesQueryTest(TimeseriesBase):
+    @patch("sentry.snuba.discover.raw_query")
+    def test_project_filter_adjusts_filter(self, mock_query):
+        """While the function is called with 2 project_ids, we should limit it down to the 1 in top_events"""
+        project2 = self.create_project(organization=self.organization)
+        top_events = {
+            "data": [
+                {
+                    "project": self.project.slug,
+                    "project.id": self.project.id,
+                }
+            ]
+        }
+        start = before_now(minutes=5)
+        end = before_now(seconds=1)
+        transactions.top_events_timeseries(
+            selected_columns=["project", "count()"],
+            params={
+                "start": start,
+                "end": end,
+                "project_id": [self.project.id, project2.id],
+            },
+            rollup=3600,
+            top_events=top_events,
+            timeseries_columns=["count()"],
+            user_query="",
+            orderby=["count()"],
+            limit=10000,
+            organization=self.organization,
+        )
+        mock_query.assert_called_with(
+            aggregations=[["count", None, "count"]],
+            conditions=[],
+            # Should be limited to the project in top_events
+            filter_keys={"project_id": [self.project.id]},
+            selected_columns=[
+                "project_id",
+                [
+                    "transform",
+                    [
+                        ["toString", ["project_id"]],
+                        ["array", [f"'{project.id}'" for project in [self.project, project2]]],
+                        ["array", [f"'{project.slug}'" for project in [self.project, project2]]],
+                        "''",
+                    ],
+                    "project",
+                ],
+            ],
+            start=start,
+            end=end,
+            rollup=3600,
+            orderby=["time", "project_id"],
+            groupby=["time", "project_id"],
+            dataset=Dataset.Discover,
+            limit=10000,
+            referrer=None,
+        )
+
+    @patch("sentry.snuba.discover.raw_query")
+    def test_timestamp_fields(self, mock_query):
+        timestamp1 = before_now(days=2, minutes=5)
+        timestamp2 = before_now(minutes=2)
+        top_events = {
+            "data": [
+                {
+                    "timestamp": iso_format(timestamp1),
+                    "timestamp.to_hour": iso_format(timestamp1.replace(minute=0, second=0)),
+                    "timestamp.to_day": iso_format(timestamp1.replace(hour=0, minute=0, second=0)),
+                },
+                {
+                    "timestamp": iso_format(timestamp2),
+                    "timestamp.to_hour": iso_format(timestamp2.replace(minute=0, second=0)),
+                    "timestamp.to_day": iso_format(timestamp2.replace(hour=0, minute=0, second=0)),
+                },
+            ]
+        }
+        start = before_now(days=3, minutes=10)
+        end = before_now(minutes=1)
+        transactions.top_events_timeseries(
+            selected_columns=["timestamp", "timestamp.to_day", "timestamp.to_hour", "count()"],
+            params={
+                "start": start,
+                "end": end,
+                "project_id": [self.project.id],
+            },
+            rollup=3600,
+            top_events=top_events,
+            timeseries_columns=["count()"],
+            user_query="",
+            orderby=["count()"],
+            limit=10000,
+            organization=self.organization,
+        )
+        to_hour = ["toStartOfHour", ["timestamp"], "timestamp.to_hour"]
+        to_day = ["toStartOfDay", ["timestamp"], "timestamp.to_day"]
+        mock_query.assert_called_with(
+            aggregations=[["count", None, "count"]],
+            conditions=[
+                # Each timestamp field should generated a nested condition.
+                # Within each, the conditions will be ORed together.
+                [
+                    ["timestamp", "=", iso_format(timestamp1)],
+                    ["timestamp", "=", iso_format(timestamp2)],
+                ],
+                [
+                    [
+                        to_day,
+                        "=",
+                        iso_format(timestamp1.replace(hour=0, minute=0, second=0)),
+                    ],
+                    [
+                        to_day,
+                        "=",
+                        iso_format(timestamp2.replace(hour=0, minute=0, second=0)),
+                    ],
+                ],
+                [
+                    [to_hour, "=", iso_format(timestamp1.replace(minute=0, second=0))],
+                    [to_hour, "=", iso_format(timestamp2.replace(minute=0, second=0))],
+                ],
+            ],
+            filter_keys={"project_id": [self.project.id]},
+            selected_columns=[
+                "timestamp",
+                to_day,
+                to_hour,
+            ],
+            start=start,
+            end=end,
+            rollup=3600,
+            orderby=["time", "timestamp", "timestamp.to_day", "timestamp.to_hour"],
+            groupby=["time", "timestamp", "timestamp.to_day", "timestamp.to_hour"],
+            dataset=Dataset.Discover,
+            limit=10000,
+            referrer=None,
+        )
+
+    @patch("sentry.snuba.discover.query")
+    def test_equation_fields_are_auto_added(self, mock_query):
+        start = before_now(minutes=5)
+        end = before_now(seconds=1)
+        transactions.top_events_timeseries(
+            selected_columns=["count()"],
+            equations=["equation|count_unique(user) * 2"],
+            params={"start": start, "end": end, "project_id": [self.project.id]},
+            rollup=3600,
+            timeseries_columns=[],
+            user_query="",
+            orderby=["equation[0]"],
+            limit=10000,
+            organization=self.organization,
+        )
+
+        mock_query.assert_called_with(
+            ["count()"],
+            query="",
+            params={"start": start, "end": end, "project_id": [self.project.id]},
+            equations=["equation|count_unique(user) * 2"],
+            orderby=["equation[0]"],
+            referrer=None,
+            limit=10000,
+            auto_aggregations=True,
+            use_aggregate_conditions=True,
+            include_equation_fields=True,
+        )

Разница между файлами не показана из-за своего большого размера
+ 458 - 376
tests/snuba/api/endpoints/test_organization_events.py


Некоторые файлы не были показаны из-за большого количества измененных файлов