Browse Source

feat(trends): Handle single transaction trends (#50808)

- add new trend type - `any`. if `any` is passed in, breakpoint detector
will return any last breakpoint and will label it as either an
improvement or a regression
- fix setting granularity in MetricsQueryBuilder
- if `withTimeseries` is passed in to the API, return all stats data.
this will be used in the frontend for single transaction summary since
we'll need stats data in any case
Dameli Ushbayeva 1 year ago
parent
commit
40adf29b15

+ 19 - 13
src/sentry/api/endpoints/organization_events_trendsv2.py

@@ -27,7 +27,8 @@ logger = logging.getLogger(__name__)
 
 IMPROVED = "improved"
 REGRESSION = "regression"
-TREND_TYPES = [IMPROVED, REGRESSION]
+ANY = "any"
+TREND_TYPES = [IMPROVED, REGRESSION, ANY]
 
 TOP_EVENTS_LIMIT = 50
 EVENTS_PER_QUERY = 10
@@ -201,7 +202,9 @@ class OrganizationEventsNewTrendsStatsEndpoint(OrganizationEventsV2EndpointBase)
                 "trendFunction": None,
             }
 
-            trends_request["sort"] = request.GET.get("sort", "trend_percentage()")
+            trends_request["sort"] = (
+                "" if trend_type == ANY else request.GET.get("sort", "trend_percentage()")
+            )
             trends_request["trendFunction"] = trend_function
             trends_request["data"] = stats_data
 
@@ -217,17 +220,20 @@ class OrganizationEventsNewTrendsStatsEndpoint(OrganizationEventsV2EndpointBase)
 
         def get_stats_data_for_trending_events(results):
             trending_transaction_names_stats = {}
-            for t in results["data"]:
-                transaction_name = t["transaction"]
-                project = t["project"]
-                t_p_key = project + "," + transaction_name
-                if t_p_key in stats_data:
-                    trending_transaction_names_stats[t_p_key] = stats_data[t_p_key]
-                else:
-                    logger.warning(
-                        "trends.trends-request.timeseries.key-mismatch",
-                        extra={"result_key": t_p_key, "timeseries_keys": stats_data.keys()},
-                    )
+            if request.GET.get("withTimeseries", False):
+                trending_transaction_names_stats = stats_data
+            else:
+                for t in results["data"]:
+                    transaction_name = t["transaction"]
+                    project = t["project"]
+                    t_p_key = project + "," + transaction_name
+                    if t_p_key in stats_data:
+                        trending_transaction_names_stats[t_p_key] = stats_data[t_p_key]
+                    else:
+                        logger.warning(
+                            "trends.trends-request.timeseries.key-mismatch",
+                            extra={"result_key": t_p_key, "timeseries_keys": stats_data.keys()},
+                        )
 
             return {
                 "events": self.handle_results_with_meta(

+ 3 - 3
src/sentry/search/events/builder/metrics.py

@@ -68,6 +68,8 @@ class MetricsQueryBuilder(QueryBuilder):
         # always true if this is being called
         kwargs["has_metrics"] = True
         assert dataset is None or dataset in [Dataset.PerformanceMetrics, Dataset.Metrics]
+        if granularity is not None:
+            self._granularity = granularity
         super().__init__(
             # TODO: defaulting to Metrics for now so I don't have to update incidents tests. Should be
             # PerformanceMetrics
@@ -81,8 +83,6 @@ class MetricsQueryBuilder(QueryBuilder):
         if org_id is None or not isinstance(org_id, int):
             raise InvalidSearchQuery("Organization id required to create a metrics query")
         self.organization_id: int = org_id
-        if granularity is not None:
-            self._granularity = granularity
 
     def validate_aggregate_arguments(self) -> None:
         if not self.use_metrics_layer:
@@ -188,7 +188,7 @@ class MetricsQueryBuilder(QueryBuilder):
 
         In special cases granularity can be set manually bypassing the granularity calculation below.
         """
-        if hasattr(self, "_granularity"):
+        if hasattr(self, "_granularity") and getattr(self, "_granularity") is not None:
             return Granularity(self._granularity)
 
         if self.end is None or self.start is None: