Browse Source

ref: upgrade to black stable (#34177)

asottile-sentry 2 years ago
parent
commit
cf018ef83b

+ 4 - 7
bin/typed_code.py

@@ -215,13 +215,10 @@ def print_results(
     teams: Set[str],
 ) -> None:
     """Pretty print the results."""
-    tuples = (
-        sorted(
-            ((team, get_result(covered_by_team, not_covered_by_team, team)) for team in teams),
-            key=lambda x: x[1],
-        )
-        + [(TOTALS_KEY, get_result(covered_by_team, not_covered_by_team, TOTALS_KEY))]
-    )
+    tuples = sorted(
+        ((team, get_result(covered_by_team, not_covered_by_team, team)) for team in teams),
+        key=lambda x: x[1],
+    ) + [(TOTALS_KEY, get_result(covered_by_team, not_covered_by_team, TOTALS_KEY))]
 
     bar = "=" * int(BAR_LENGTH / 2)
     print(f"{bar} Python coverage by team {bar}")  # NOQA S002

+ 1 - 1
requirements-pre-commit.txt

@@ -1,5 +1,5 @@
 pre-commit==2.15.0
-black==21.9b0
+black==22.3.0
 flake8==3.9.2
 flake8-bugbear==21.4.3
 pyupgrade==2.29.0

+ 1 - 1
src/bitfield/models.py

@@ -133,7 +133,7 @@ class BitField(BigIntegerField):
             if isinstance(value, int) and value < 0:
                 new_value = 0
                 for bit_number, _ in enumerate(self.flags):
-                    new_value |= value & (2 ** bit_number)
+                    new_value |= value & (2**bit_number)
                 value = new_value
 
             value = BitHandler(value, self.flags, self.labels)

+ 10 - 16
src/sentry/auth/password_validation.py

@@ -95,14 +95,11 @@ class MinimumLengthValidator:
             )
 
     def get_help_text(self):
-        return (
-            ungettext(
-                "Your password must contain at least %(min_length)d character.",
-                "Your password must contain at least %(min_length)d characters.",
-                self.min_length,
-            )
-            % {"min_length": self.min_length}
-        )
+        return ungettext(
+            "Your password must contain at least %(min_length)d character.",
+            "Your password must contain at least %(min_length)d characters.",
+            self.min_length,
+        ) % {"min_length": self.min_length}
 
 
 class MaximumLengthValidator:
@@ -126,14 +123,11 @@ class MaximumLengthValidator:
             )
 
     def get_help_text(self):
-        return (
-            ungettext(
-                "Your password must contain no more than %(max_length)d character.",
-                "Your password must contain no more than %(max_length)d characters.",
-                self.max_length,
-            )
-            % {"max_length": self.max_length}
-        )
+        return ungettext(
+            "Your password must contain no more than %(max_length)d character.",
+            "Your password must contain no more than %(max_length)d characters.",
+            self.max_length,
+        ) % {"max_length": self.max_length}
 
 
 class NumericPasswordValidator:

+ 1 - 1
src/sentry/conf/server.py

@@ -2480,7 +2480,7 @@ SENTRY_USE_UWSGI = True
 
 # When copying attachments for to-be-reprocessed events into processing store,
 # how large is an individual file chunk? Each chunk is stored as Redis key.
-SENTRY_REPROCESSING_ATTACHMENT_CHUNK_SIZE = 2 ** 20
+SENTRY_REPROCESSING_ATTACHMENT_CHUNK_SIZE = 2**20
 
 # Which cluster is used to store auxiliary data for reprocessing. Note that
 # this cluster is not used to store attachments etc, that still happens on

+ 1 - 1
src/sentry/data_export/tasks.py

@@ -290,7 +290,7 @@ def store_export_chunk_as_blob(data_export, bytes_written, fileobj, blob_size=DE
                 # there is a maximum file size allowed, so we need to make sure we don't exceed it
                 # NOTE: there seems to be issues with downloading files larger than 1 GB on slower
                 # networks, limit the export to 1 GB for now to improve reliability
-                if bytes_written + bytes_offset >= min(MAX_FILE_SIZE, 2 ** 30):
+                if bytes_written + bytes_offset >= min(MAX_FILE_SIZE, 2**30):
                     raise ExportDataFileTooBig()
     except ExportDataFileTooBig:
         return 0

+ 0 - 1
src/sentry/db/models/fields/bounded.py

@@ -80,7 +80,6 @@ if settings.SENTRY_USE_BIG_INTS:
                 assert value <= self.MAX_VALUE
             return cast(int, super().get_prep_value(value))
 
-
 else:
     # we want full on classes for these
     class BoundedBigIntegerField(BoundedIntegerField):  # type: ignore

+ 6 - 10
src/sentry/grouping/strategies/newstyle.py

@@ -391,16 +391,12 @@ def frame(
         # special case empty functions not to have a hint
         if not func:
             function_component.update(contributes=False)
-        elif (
-            func
-            in (
-                "?",
-                "<anonymous function>",
-                "<anonymous>",
-                "Anonymous function",
-            )
-            or func.endswith("/<")
-        ):
+        elif func in (
+            "?",
+            "<anonymous function>",
+            "<anonymous>",
+            "Anonymous function",
+        ) or func.endswith("/<"):
             function_component.update(contributes=False, hint="ignored unknown function name")
         if (func == "eval") or frame.abs_path in (
             "[native code]",

+ 1 - 1
src/sentry/models/file.py

@@ -38,7 +38,7 @@ UPLOAD_RETRY_TIME = getattr(settings, "SENTRY_UPLOAD_RETRY_TIME", 60)  # 1min
 DEFAULT_BLOB_SIZE = 1024 * 1024  # one mb
 CHUNK_STATE_HEADER = "__state"
 MULTI_BLOB_UPLOAD_CONCURRENCY = 8
-MAX_FILE_SIZE = 2 ** 31  # 2GB is the maximum offset supported by fileblob
+MAX_FILE_SIZE = 2**31  # 2GB is the maximum offset supported by fileblob
 
 
 class nooplogger:

+ 1 - 1
src/sentry/options/defaults.py

@@ -29,7 +29,7 @@ register("system.root-api-key", flags=FLAG_PRIORITIZE_DISK)
 register("system.logging-format", default=LoggingFormat.HUMAN, flags=FLAG_NOSTORE)
 # This is used for the chunk upload endpoint
 register("system.upload-url-prefix", flags=FLAG_PRIORITIZE_DISK)
-register("system.maximum-file-size", default=2 ** 31, flags=FLAG_PRIORITIZE_DISK)
+register("system.maximum-file-size", default=2**31, flags=FLAG_PRIORITIZE_DISK)
 
 # Redis
 register(

Some files were not shown because too many files changed in this diff