Browse Source

ref(pyupgrade): f-strings complete (#23758)

josh 4 years ago
parent
commit
fbf245e76c

+ 4 - 4
examples/oauth2_consumer_webserver/app.py

@@ -20,13 +20,13 @@ oauth = OAuth()
 sentry = oauth.remote_app(
     "sentry",
     base_url=BASE_URL,
-    authorize_url="{}/oauth/authorize/".format(BASE_URL),
+    authorize_url=f"{BASE_URL}/oauth/authorize/",
     request_token_url=None,
     request_token_params={
         "scope": "project:releases event:read org:read org:write",
         "response_type": "code",
     },
-    access_token_url="{}/oauth/token/".format(BASE_URL),
+    access_token_url=f"{BASE_URL}/oauth/token/",
     access_token_method="POST",
     access_token_params={"grant_type": "authorization_code"},
     consumer_key=CLIENT_ID,
@@ -44,8 +44,8 @@ def index():
 
     from urllib2 import Request, urlopen, URLError
 
-    headers = {"Authorization": "Bearer {}".format(access_token)}
-    req = Request("{}/api/0/organizations/".format(BASE_URL), None, headers)
+    headers = {"Authorization": f"Bearer {access_token}"}
+    req = Request(f"{BASE_URL}/api/0/organizations/", None, headers)
     try:
         res = urlopen(req)
     except URLError as e:

+ 3 - 7
setup.py

@@ -80,7 +80,7 @@ cmdclass = {
 
 
 def get_requirements(env):
-    with open("requirements-{}.txt".format(env)) as fp:
+    with open(f"requirements-{env}.txt") as fp:
         return [x.strip() for x in fp.read().split("\n") if not x.startswith("#")]
 
 
@@ -109,12 +109,8 @@ setup(
     cmdclass=cmdclass,
     license="BSL-1.1",
     include_package_data=True,
-    package_data={
-        "sentry": ["static/sentry/{}/**".format(d) for d in ("dist", "js", "images", "vendor")]
-    },
-    exclude_package_data={
-        "sentry": ["static/sentry/{}/**".format(d) for d in ("app", "fonts", "less")]
-    },
+    package_data={"sentry": [f"static/sentry/{d}/**" for d in ("dist", "js", "images", "vendor")]},
+    exclude_package_data={"sentry": [f"static/sentry/{d}/**" for d in ("app", "fonts", "less")]},
     entry_points={
         "console_scripts": ["sentry = sentry.runner:main"],
         "sentry.apps": [

+ 2 - 2
src/bitfield/types.py

@@ -135,9 +135,9 @@ class BitHandler:
         return cmp(self._value, other)
 
     def __repr__(self):
-        return "<%s: %s>" % (
+        return "<{}: {}>".format(
             self.__class__.__name__,
-            ", ".join("%s=%s" % (k, self.get_bit(n).is_set) for n, k in enumerate(self._keys)),
+            ", ".join("{}={}".format(k, self.get_bit(n).is_set) for n, k in enumerate(self._keys)),
         )
 
     def __str__(self):

+ 1 - 1
src/sentry/__init__.py

@@ -37,7 +37,7 @@ def get_revision():
 
 def get_version():
     if __build__:
-        return "%s.%s" % (__version__, __build__)
+        return f"{__version__}.{__build__}"
     return __version__
 
 

+ 1 - 1
src/sentry/bgtasks/api.py

@@ -27,7 +27,7 @@ class BgTask:
 
     @property
     def name(self):
-        return "%s:%s" % (self.callback.__module__, self.callback.__name__)
+        return f"{self.callback.__module__}:{self.callback.__name__}"
 
     def run(self):
         if self.running:

+ 1 - 1
src/sentry/buffer/base.py

@@ -10,7 +10,7 @@ from sentry.utils.services import Service
 class BufferMount(type):
     def __new__(cls, name, bases, attrs):
         new_cls = type.__new__(cls, name, bases, attrs)
-        new_cls.logger = logging.getLogger("sentry.buffer.%s" % (new_cls.__name__.lower(),))
+        new_cls.logger = logging.getLogger(f"sentry.buffer.{new_cls.__name__.lower()}")
         return new_cls
 
 

+ 5 - 5
src/sentry/buffer/redis.py

@@ -74,10 +74,10 @@ class RedisBuffer(Buffer):
         """
         Returns a Redis-compatible key for the model given filters.
         """
-        return "b:k:%s:%s" % (
+        return "b:k:{}:{}".format(
             model._meta,
             md5_text(
-                "&".join("%s=%s" % (k, self._coerce_val(v)) for k, v in sorted(filters.items()))
+                "&".join("{}={}".format(k, self._coerce_val(v)) for k, v in sorted(filters.items()))
             ).hexdigest(),
         )
 
@@ -103,7 +103,7 @@ class RedisBuffer(Buffer):
         return self._make_pending_key(crc32(key) % self.pending_partitions)
 
     def _make_lock_key(self, key):
-        return "l:%s" % (key,)
+        return f"l:{key}"
 
     def _dump_values(self, values):
         result = {}
@@ -142,7 +142,7 @@ class RedisBuffer(Buffer):
         elif type_ == "f":
             return float(value)
         else:
-            raise TypeError("invalid type: {}".format(type_))
+            raise TypeError(f"invalid type: {type_}")
 
     def incr(self, model, columns, filters, extra=None, signal_only=None):
         """
@@ -164,7 +164,7 @@ class RedisBuffer(Buffer):
         conn = self.cluster.get_local_client_for_key(key)
 
         pipe = conn.pipeline()
-        pipe.hsetnx(key, "m", "%s.%s" % (model.__module__, model.__name__))
+        pipe.hsetnx(key, "m", f"{model.__module__}.{model.__name__}")
         # TODO(dcramer): once this goes live in production, we can kill the pickle path
         # (this is to ensure a zero downtime deploy where we can transition event processing)
         pipe.hsetnx(key, "f", pickle.dumps(filters))

+ 1 - 1
src/sentry/cache/redis.py

@@ -20,7 +20,7 @@ class CommonRedisCache(BaseCache):
         key = self.make_key(key, version=version)
         v = json.dumps(value) if not raw else value
         if len(v) > self.max_size:
-            raise ValueTooLarge("Cache key too large: %r %r" % (key, len(v)))
+            raise ValueTooLarge("Cache key too large: {!r} {!r}".format(key, len(v)))
         if timeout:
             self.client.setex(key, int(timeout), v)
         else:

+ 1 - 1
src/sentry/conf/locale.py

@@ -8,7 +8,7 @@ from sentry.utils import json
 def dirname_to_local(dir_name):
     if "_" in dir_name:
         pre, post = dir_name.split("_", 1)
-        dir_name = "{}-{}".format(pre, post.lower())
+        dir_name = f"{pre}-{post.lower()}"
     return dir_name
 
 

+ 1 - 1
src/sentry/conf/server.py

@@ -610,7 +610,7 @@ CELERY_ROUTES = ("sentry.queue.routers.SplitQueueRouter",)
 def create_partitioned_queues(name):
     exchange = Exchange(name, type="direct")
     for num in range(1):
-        CELERY_QUEUES.append(Queue("{}-{}".format(name, num), exchange=exchange))
+        CELERY_QUEUES.append(Queue(f"{name}-{num}", exchange=exchange))
 
 
 create_partitioned_queues("counters")

Some files were not shown because too many files changed in this diff