|
@@ -1,6 +1,7 @@
|
|
|
from __future__ import annotations
|
|
|
|
|
|
from dataclasses import replace
|
|
|
+from datetime import datetime
|
|
|
from typing import Any, Mapping, Union
|
|
|
|
|
|
from snuba_sdk import (
|
|
@@ -26,6 +27,10 @@ from sentry.utils.snuba import raw_snql_query
|
|
|
FilterTypes = Union[Column, CurriedFunction, Condition, BooleanCondition]
|
|
|
|
|
|
|
|
|
+ALLOWED_GRANULARITIES = [10, 60, 3600, 86400]
|
|
|
+ALLOWED_GRANULARITIES = sorted(ALLOWED_GRANULARITIES) # Ensure it's ordered
|
|
|
+
|
|
|
+
|
|
|
def run_query(request: Request) -> Mapping[str, Any]:
|
|
|
"""
|
|
|
Entrypoint for executing a metrics query in Snuba.
|
|
@@ -44,8 +49,6 @@ def run_query(request: Request) -> Mapping[str, Any]:
|
|
|
assert len(metrics_query.scope.org_ids) == 1 # Initially only allow 1 org id
|
|
|
organization_id = metrics_query.scope.org_ids[0]
|
|
|
tenant_ids = request.tenant_ids or {"organization_id": organization_id}
|
|
|
- if "use_case_id" not in tenant_ids and metrics_query.scope.use_case_id is not None:
|
|
|
- tenant_ids["use_case_id"] = metrics_query.scope.use_case_id
|
|
|
request.tenant_ids = tenant_ids
|
|
|
|
|
|
# Process intervals
|
|
@@ -57,11 +60,19 @@ def run_query(request: Request) -> Mapping[str, Any]:
|
|
|
metrics_query.start, metrics_query.end, metrics_query.rollup.interval
|
|
|
)
|
|
|
metrics_query = metrics_query.set_start(start).set_end(end)
|
|
|
+ if metrics_query.rollup.granularity is None:
|
|
|
+ granularity = _resolve_granularity(
|
|
|
+ metrics_query.start, metrics_query.end, metrics_query.rollup.interval
|
|
|
+ )
|
|
|
+ metrics_query = metrics_query.set_rollup(
|
|
|
+ replace(metrics_query.rollup, granularity=granularity)
|
|
|
+ )
|
|
|
|
|
|
# Resolves MRI or public name in metrics_query
|
|
|
try:
|
|
|
resolved_metrics_query, mappings = _resolve_metrics_query(metrics_query)
|
|
|
request.query = resolved_metrics_query
|
|
|
+ request.tenant_ids["use_case_id"] = resolved_metrics_query.scope.use_case_id
|
|
|
except Exception as e:
|
|
|
metrics.incr(
|
|
|
"metrics_layer.query",
|
|
@@ -108,6 +119,18 @@ GENERIC_ENTITIES = {
|
|
|
}
|
|
|
|
|
|
|
|
|
+def _resolve_use_case_id_str(metrics_query: MetricsQuery) -> str:
|
|
|
+ # Automatically resolve the use_case_id if it is not provided
|
|
|
+ # TODO: At the moment only a single Timeseries is allowed. In the future this will need to find
|
|
|
+ # all the Timeseries and ensure they all have the same use case.
|
|
|
+ mri = metrics_query.query.metric.mri
|
|
|
+ parsed_mri = parse_mri(mri)
|
|
|
+ if parsed_mri is None:
|
|
|
+ raise InvalidParams(f"'{mri}' is not a valid MRI")
|
|
|
+
|
|
|
+ return parsed_mri.namespace
|
|
|
+
|
|
|
+
|
|
|
def _resolve_metrics_entity(mri: str) -> EntityKey:
|
|
|
parsed_mri = parse_mri(mri)
|
|
|
if parsed_mri is None:
|
|
@@ -119,6 +142,41 @@ def _resolve_metrics_entity(mri: str) -> EntityKey:
|
|
|
return GENERIC_ENTITIES[parsed_mri.entity]
|
|
|
|
|
|
|
|
|
+def _resolve_granularity(start: datetime, end: datetime, interval: int | None) -> int:
|
|
|
+ """
|
|
|
+ Returns the granularity in seconds based on the start, end, and interval.
|
|
|
+ If the interval is set, then find the largest granularity that is smaller or equal to the interval.
|
|
|
+
|
|
|
+ If the interval is None, then it must be a totals query, which means this will use the biggest granularity
|
|
|
+ that matches the offset from the time range. This function does no automatic fitting of the time range to
|
|
|
+ a performant granularity.
|
|
|
+
|
|
|
+ E.g. if the time range is 7 days, but going from 3:01:53pm to 3:01:53pm, then it has to use the 10s
|
|
|
+ granularity, and the performance will suffer.
|
|
|
+ """
|
|
|
+ if interval is not None:
|
|
|
+ for granularity in ALLOWED_GRANULARITIES[::-1]:
|
|
|
+ if granularity <= interval:
|
|
|
+ return granularity
|
|
|
+
|
|
|
+ return ALLOWED_GRANULARITIES[0] # Default to smallest granularity
|
|
|
+
|
|
|
+ found_granularities = []
|
|
|
+ for t in [start, end]:
|
|
|
+ rounded_to_day = t.replace(hour=0, minute=0, second=0, microsecond=0)
|
|
|
+ second_diff = int((t - rounded_to_day).total_seconds())
|
|
|
+
|
|
|
+ found = None
|
|
|
+ for granularity in ALLOWED_GRANULARITIES[::-1]:
|
|
|
+ if second_diff % granularity == 0:
|
|
|
+ found = granularity
|
|
|
+ break
|
|
|
+
|
|
|
+ found_granularities.append(found if found is not None else ALLOWED_GRANULARITIES[0])
|
|
|
+
|
|
|
+ return min(found_granularities)
|
|
|
+
|
|
|
+
|
|
|
def _resolve_metrics_query(
|
|
|
metrics_query: MetricsQuery,
|
|
|
) -> tuple[MetricsQuery, Mapping[str, str | int]]:
|
|
@@ -128,7 +186,6 @@ def _resolve_metrics_query(
|
|
|
"""
|
|
|
assert metrics_query.query is not None
|
|
|
metric = metrics_query.query.metric
|
|
|
- scope = metrics_query.scope
|
|
|
mappings: dict[str, str | int] = {}
|
|
|
if not metric.public_name and metric.mri:
|
|
|
public_name = get_public_name_from_mri(metric.mri)
|
|
@@ -143,8 +200,14 @@ def _resolve_metrics_query(
|
|
|
)
|
|
|
mappings[metric.public_name] = mri
|
|
|
|
|
|
- org_id = scope.org_ids[0]
|
|
|
- use_case_id = string_to_use_case_id(scope.use_case_id)
|
|
|
+ org_id = metrics_query.scope.org_ids[0]
|
|
|
+ use_case_id_str = _resolve_use_case_id_str(metrics_query)
|
|
|
+ if metrics_query.scope.use_case_id is None:
|
|
|
+ metrics_query = metrics_query.set_scope(
|
|
|
+ metrics_query.scope.set_use_case_id(use_case_id_str)
|
|
|
+ )
|
|
|
+
|
|
|
+ use_case_id = string_to_use_case_id(use_case_id_str)
|
|
|
metric_id = resolve_weak(
|
|
|
use_case_id, org_id, metrics_query.query.metric.mri
|
|
|
) # only support raw metrics for now
|