Browse Source

meta(linting): Fixing flake8-logging issues (partial) (#61222)

Fixing
[flake8-logging](https://github.com/adamchainz/flake8-logging#rules)
issues, splitting from #60850 to make reviewing more manageable.
Bartek Ogryczak 1 year ago
parent
commit
124c7e8182

+ 1 - 1
setup.py

@@ -11,7 +11,7 @@ if python_version != (3, 8):
     import logging
 
     logger = logging.getLogger()
-    logger.warning(f"A Python version different than 3.8 is being used ({python_version})")
+    logger.warning("A Python version different than 3.8 is being used (%s)", python_version)
 
 
 from distutils.command.build import build as BuildCommand

+ 2 - 2
src/sentry/api/endpoints/artifact_lookup.py

@@ -223,8 +223,8 @@ def try_resolve_release_dist(
             dist = Distribution.objects.get(release=release, name=dist_name)
     except (Release.DoesNotExist, Distribution.DoesNotExist):
         pass
-    except Exception as exc:
-        logger.error("Failed to read", exc_info=exc)
+    except Exception:
+        logger.exception("Failed to read")
 
     return release, dist
 

+ 1 - 2
src/sentry/api/endpoints/group_details.py

@@ -332,9 +332,8 @@ class GroupDetailsEndpoint(GroupEndpoint, EnvironmentMixin):
             )
             return Response(serialized, status=response.status_code)
         except client.ApiError as e:
-            logging.error(
+            logging.exception(
                 "group_details:put client.ApiError",
-                exc_info=True,
             )
             return Response(e.body, status=e.status_code)
         except Exception:

+ 1 - 2
src/sentry/api/endpoints/group_event_details.py

@@ -144,9 +144,8 @@ class GroupEventDetailsEndpoint(GroupEndpoint):
                     except ValidationError:
                         return Response(status=400)
                     except Exception:
-                        logging.error(
+                        logging.exception(
                             "group_event_details:get_helpful",
-                            exc_info=True,
                         )
                         return Response(status=500)
             else:

+ 2 - 2
src/sentry/api/endpoints/project_release_files.py

@@ -86,8 +86,8 @@ class ReleaseFilesMixin:
             try:
                 # Only Read from artifact index if it has a positive artifact count
                 artifact_index = read_artifact_index(release, dist, artifact_count__gt=0)
-            except Exception as exc:
-                logger.error("Failed to read artifact index", exc_info=exc)
+            except Exception:
+                logger.exception("Failed to read artifact index")
                 artifact_index = None
 
             if artifact_index is not None:

+ 3 - 4
src/sentry/api/helpers/deprecation.py

@@ -54,20 +54,20 @@ def _should_be_blocked(deprecation_date: datetime, now: datetime, key: str):
         try:
             brownout_cron = options.get(cron_key)
         except UnknownOption:
-            logger.error(f"Unrecognized deprecation key {key}")
+            logger.exception("Unrecognized deprecation key %s", key)
             brownout_cron = options.get("api.deprecation.brownout-cron")
 
         try:
             brownout_duration = options.get(duration_key)
         except UnknownOption:
-            logger.error(f"Unrecognized deprecation duration {key}")
+            logger.exception("Unrecognized deprecation duration %s", key)
             brownout_duration = options.get("api.deprecation.brownout-duration")
 
         # Validate the formats, allow requests to pass through if validation failed
         try:
             brownout_duration = isodate.parse_duration(brownout_duration)
         except ISO8601Error:
-            logger.error("Invalid ISO8601 format for blackout duration")
+            logger.exception("Invalid ISO8601 format for blackout duration")
             return False
 
         if not croniter.is_valid(brownout_cron):
@@ -114,7 +114,6 @@ def deprecated(
     def decorator(func):
         @functools.wraps(func)
         def endpoint_method(self, request: Request, *args, **kwargs):
-
             # Don't do anything for deprecated endpoints on self hosted
             if is_self_hosted():
                 return func(self, request, *args, **kwargs)

+ 1 - 1
src/sentry/api/invite_helper.py

@@ -117,7 +117,7 @@ class ApiInviteHelper:
         )
         if invite_context is None:
             if logger:
-                logger.error("Invalid pending invite cookie", exc_info=True)
+                logger.exception("Invalid pending invite cookie")
             return None
 
         api_invite_helper = ApiInviteHelper(

+ 3 - 2
src/sentry/api/serializers/models/group.py

@@ -260,8 +260,9 @@ class GroupSerializerBase(Serializer, ABC):
         if len(organization_id_list) > 1:
             # this should never happen but if it does we should know about it
             logger.warning(
-                "Found multiple organizations for groups: %s, with orgs: %s"
-                % ([item.id for item in item_list], organization_id_list)
+                "Found multiple organizations for groups: %s, with orgs: %s",
+                [item.id for item in item_list],
+                organization_id_list,
             )
 
         # should only have 1 org at this point

+ 5 - 5
src/sentry/snuba/metrics/extraction.py

@@ -438,7 +438,7 @@ def _extract_aggregate_components(aggregate: str) -> Optional[Tuple[str, List[st
         function, _, args, _ = query_builder.parse_function(match)
         return function, args
     except InvalidSearchQuery:
-        logger.error(f"Failed to parse aggregate: {aggregate}", exc_info=True)
+        logger.exception("Failed to parse aggregate: %s", aggregate)
 
     return None
 
@@ -535,7 +535,7 @@ def _get_query_supported_by(query: Optional[str]) -> SupportedBy:
 
         return SupportedBy(standard_metrics=standard_metrics, on_demand_metrics=on_demand_metrics)
     except InvalidSearchQuery:
-        logger.error(f"Failed to parse search query: {query}", exc_info=True)
+        logger.exception("Failed to parse search query: %s", query)
         return SupportedBy.neither()
 
 
@@ -648,7 +648,7 @@ def to_standard_metrics_query(query: str) -> str:
     try:
         tokens = _parse_search_query(query)
     except InvalidSearchQuery:
-        logger.error(f"Failed to parse search query: {query}", exc_info=True)
+        logger.exception("Failed to parse search query: %s", query)
         raise
 
     cleaned_query = to_standard_metrics_tokens(tokens)
@@ -1144,9 +1144,9 @@ class OnDemandMetricSpec:
             # Third step is to generate the actual Relay rule that contains all rules nested. We assume that the query
             # being passed here, can be satisfied ONLY by on demand metrics.
             rule_condition = SearchQueryConverter(parsed_query.conditions).convert()
-        except Exception as exc:
+        except Exception:
             if not parsed_query.is_empty():
-                logger.error(f"Error while converting search query '{self.query}'", exc_info=exc)
+                logger.exception("Error while converting search query '%s'", self.query)
 
             return None
 

+ 2 - 2
src/sentry/snuba/query_subscriptions/consumer.py

@@ -124,13 +124,13 @@ def handle_message(
                         EntityKey(contents["entity"]),
                     )
                 else:
-                    logger.error(
+                    logger.exception(
                         "Topic not registered with QuerySubscriptionConsumer, can't remove "
                         "non-existent subscription from Snuba",
                         extra={"topic": topic, "subscription_id": contents["subscription_id"]},
                     )
             except InvalidMessageError as e:
-                logger.exception(e)
+                logger.exception(str(e))
             except Exception:
                 logger.exception("Failed to delete unused subscription from snuba.")
             return

Some files were not shown because too many files changed in this diff