Browse Source

ref(snuba): python typings updated to 3.9+ standard (SNS-2588) (#64092)

This PR references
[SNS-2588](https://getsentry.atlassian.net/browse/SNS-2588)

I updated all typing references to the 3.9+ standard that were
previously still using the classes from the typing package in the files:

https://github.com/getsentry/sentry/blob/master/src/sentry/utils/snuba.py
https://github.com/getsentry/sentry/blob/master/src/sentry/tsdb/snuba.py



[SNS-2588]:
https://getsentry.atlassian.net/browse/SNS-2588?atlOrigin=eyJpIjoiNWRkNTljNzYxNjVmNDY3MDlhMDU5Y2ZhYzA5YTRkZjUiLCJwIjoiZ2l0aHViLWNvbS1KU1cifQ
Kyle Mumma 1 year ago
parent
commit
00da8ce04a
2 changed files with 14 additions and 25 deletions
  1. 3 3
      src/sentry/tsdb/snuba.py
  2. 11 22
      src/sentry/utils/snuba.py

+ 3 - 3
src/sentry/tsdb/snuba.py

@@ -6,7 +6,7 @@ import itertools
 from collections.abc import Mapping, Set
 from copy import deepcopy
 from datetime import datetime
-from typing import Any, List, Optional, Sequence
+from typing import Any, Optional, Sequence
 
 from snuba_sdk import (
     Column,
@@ -378,7 +378,7 @@ class SnubaTSDB(BaseTSDB):
             model_aggregate = None
 
         aggregated_as = "aggregate"
-        aggregations: List[SelectableExpression] = [
+        aggregations: list[SelectableExpression] = [
             Function(
                 aggregation,
                 [Column(model_aggregate)] if model_aggregate else [],
@@ -395,7 +395,7 @@ class SnubaTSDB(BaseTSDB):
             limit = min(10000, int(len(keys) * ((end - start).total_seconds() / rollup)))
 
             # build up order by
-            orderby: List[OrderBy] = []
+            orderby: list[OrderBy] = []
             if group_on_time:
                 orderby.append(OrderBy(Column("time"), Direction.DESC))
             if group_on_model and model_group is not None:

+ 11 - 22
src/sentry/utils/snuba.py

@@ -11,18 +11,7 @@ from contextlib import contextmanager
 from copy import deepcopy
 from datetime import datetime, timedelta, timezone
 from hashlib import sha1
-from typing import (
-    Any,
-    Callable,
-    Dict,
-    List,
-    Mapping,
-    MutableMapping,
-    Optional,
-    Sequence,
-    Tuple,
-    Union,
-)
+from typing import Any, Callable, Mapping, MutableMapping, Optional, Sequence, Union
 from urllib.parse import urlparse
 
 import sentry_sdk
@@ -181,7 +170,7 @@ METRICS_COLUMN_MAP = {
 }
 
 
-DATASETS: Dict[Dataset, Dict[str, str]] = {
+DATASETS: dict[Dataset, dict[str, str]] = {
     Dataset.Events: SENTRY_SNUBA_MAP,
     Dataset.Transactions: TRANSACTIONS_SNUBA_MAP,
     Dataset.Discover: DISCOVER_COLUMN_MAP,
@@ -521,7 +510,7 @@ def get_organization_id_from_project_ids(project_ids: Sequence[int]) -> int:
     return organization_id
 
 
-def infer_project_ids_from_related_models(filter_keys: Mapping[str, Sequence[int]]) -> List[int]:
+def infer_project_ids_from_related_models(filter_keys: Mapping[str, Sequence[int]]) -> list[int]:
     ids = [set(get_related_project_ids(k, filter_keys[k])) for k in filter_keys]
     return list(set.union(*ids))
 
@@ -592,7 +581,7 @@ def _prepare_start_end(
     end: Optional[datetime],
     organization_id: int,
     group_ids: Optional[Sequence[int]],
-) -> Tuple[datetime, datetime]:
+) -> tuple[datetime, datetime]:
     if not start:
         start = datetime(2008, 5, 8)
     if not end:
@@ -796,8 +785,8 @@ def raw_query(
 
 SnubaQuery = Union[Request, MutableMapping[str, Any]]
 Translator = Callable[[Any], Any]
-SnubaQueryBody = Tuple[SnubaQuery, Translator, Translator]
-ResultSet = List[Mapping[str, Any]]  # TODO: Would be nice to make this a concrete structure
+SnubaQueryBody = tuple[SnubaQuery, Translator, Translator]
+ResultSet = list[Mapping[str, Any]]  # TODO: Would be nice to make this a concrete structure
 
 
 def raw_snql_query(
@@ -815,7 +804,7 @@ def raw_snql_query(
 
 
 def bulk_snql_query(
-    requests: List[Request],
+    requests: list[Request],
     referrer: Optional[str] = None,
     use_cache: bool = False,
 ) -> ResultSet:
@@ -826,7 +815,7 @@ def bulk_snql_query(
 
 
 def bulk_snuba_queries(
-    requests: List[Request],
+    requests: list[Request],
     referrer: Optional[str] = None,
     use_cache: bool = False,
     use_mql: bool = False,
@@ -895,7 +884,7 @@ def _apply_cache_and_build_results(
     if use_cache:
         cache_keys = [get_cache_key(query_params[0]) for _, query_params in query_param_list]
         cache_data = cache.get_many(cache_keys)
-        to_query: List[Tuple[int, SnubaQueryBody, Optional[str]]] = []
+        to_query: list[tuple[int, SnubaQueryBody, Optional[str]]] = []
         for (query_pos, query_params), cache_key in zip(query_param_list, cache_keys):
             cached_result = cache_data.get(cache_key)
             metric_tags = {"referrer": referrer} if referrer else None
@@ -1017,7 +1006,7 @@ def _bulk_snuba_query(
     return results
 
 
-RawResult = Tuple[urllib3.response.HTTPResponse, Callable[[Any], Any], Callable[[Any], Any]]
+RawResult = tuple[urllib3.response.HTTPResponse, Callable[[Any], Any], Callable[[Any], Any]]
 
 
 def _snql_query(
@@ -1062,7 +1051,7 @@ def _snuba_query(
         raise SnubaError(err)
 
 
-def _legacy_snql_query(params: Tuple[SnubaQuery, Hub, Mapping[str, str], str, bool]) -> RawResult:
+def _legacy_snql_query(params: tuple[SnubaQuery, Hub, Mapping[str, str], str, bool]) -> RawResult:
     # Convert the JSON query to SnQL and run it
     query_data, thread_hub, headers, parent_api, _ = params
     query_params, forward, reverse = query_data