Browse Source

Intermediate changes
commit_hash:f785655b6e4979e4b61af2cb8227296a279f7ab6

robot-piglet 3 months ago
parent
commit
9dc6b2fddb

+ 6 - 3
contrib/python/httpcore/.dist-info/METADATA

@@ -1,13 +1,12 @@
 Metadata-Version: 2.3
 Name: httpcore
-Version: 1.0.6
+Version: 1.0.7
 Summary: A minimal low-level HTTP client.
 Project-URL: Documentation, https://www.encode.io/httpcore
 Project-URL: Homepage, https://www.encode.io/httpcore/
 Project-URL: Source, https://github.com/encode/httpcore
 Author-email: Tom Christie <tom@tomchristie.com>
-License-Expression: BSD-3-Clause
-License-File: LICENSE.md
+License: BSD-3-Clause
 Classifier: Development Status :: 3 - Alpha
 Classifier: Environment :: Web Environment
 Classifier: Framework :: AsyncIO
@@ -153,6 +152,10 @@ All notable changes to this project will be documented in this file.
 
 The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
 
+## Version 1.0.7 (November 15th, 2024)
+
+- Support `proxy=…` configuration on `ConnectionPool()`. (#974)
+
 ## Version 1.0.6 (October 1st, 2024)
 
 - Relax `trio` dependency pinning. (#956)

+ 3 - 2
contrib/python/httpcore/httpcore/__init__.py

@@ -34,7 +34,7 @@ from ._exceptions import (
     WriteError,
     WriteTimeout,
 )
-from ._models import URL, Origin, Request, Response
+from ._models import URL, Origin, Proxy, Request, Response
 from ._ssl import default_ssl_context
 from ._sync import (
     ConnectionInterface,
@@ -79,6 +79,7 @@ __all__ = [
     "URL",
     "Request",
     "Response",
+    "Proxy",
     # async
     "AsyncHTTPConnection",
     "AsyncConnectionPool",
@@ -130,7 +131,7 @@ __all__ = [
     "WriteError",
 ]
 
-__version__ = "1.0.6"
+__version__ = "1.0.7"
 
 
 __locals = locals()

+ 14 - 12
contrib/python/httpcore/httpcore/_api.py

@@ -1,17 +1,19 @@
-from contextlib import contextmanager
-from typing import Iterator, Optional, Union
+from __future__ import annotations
+
+import contextlib
+import typing
 
 from ._models import URL, Extensions, HeaderTypes, Response
 from ._sync.connection_pool import ConnectionPool
 
 
 def request(
-    method: Union[bytes, str],
-    url: Union[URL, bytes, str],
+    method: bytes | str,
+    url: URL | bytes | str,
     *,
     headers: HeaderTypes = None,
-    content: Union[bytes, Iterator[bytes], None] = None,
-    extensions: Optional[Extensions] = None,
+    content: bytes | typing.Iterator[bytes] | None = None,
+    extensions: Extensions | None = None,
 ) -> Response:
     """
     Sends an HTTP request, returning the response.
@@ -45,15 +47,15 @@ def request(
         )
 
 
-@contextmanager
+@contextlib.contextmanager
 def stream(
-    method: Union[bytes, str],
-    url: Union[URL, bytes, str],
+    method: bytes | str,
+    url: URL | bytes | str,
     *,
     headers: HeaderTypes = None,
-    content: Union[bytes, Iterator[bytes], None] = None,
-    extensions: Optional[Extensions] = None,
-) -> Iterator[Response]:
+    content: bytes | typing.Iterator[bytes] | None = None,
+    extensions: Extensions | None = None,
+) -> typing.Iterator[Response]:
     """
     Sends an HTTP request, returning the response within a content manager.
 

+ 16 - 14
contrib/python/httpcore/httpcore/_async/connection.py

@@ -1,8 +1,10 @@
+from __future__ import annotations
+
 import itertools
 import logging
 import ssl
-from types import TracebackType
-from typing import Iterable, Iterator, Optional, Type
+import types
+import typing
 
 from .._backends.auto import AutoBackend
 from .._backends.base import SOCKET_OPTION, AsyncNetworkBackend, AsyncNetworkStream
@@ -20,7 +22,7 @@ RETRIES_BACKOFF_FACTOR = 0.5  # 0s, 0.5s, 1s, 2s, 4s, etc.
 logger = logging.getLogger("httpcore.connection")
 
 
-def exponential_backoff(factor: float) -> Iterator[float]:
+def exponential_backoff(factor: float) -> typing.Iterator[float]:
     """
     Generate a geometric sequence that has a ratio of 2 and starts with 0.
 
@@ -37,15 +39,15 @@ class AsyncHTTPConnection(AsyncConnectionInterface):
     def __init__(
         self,
         origin: Origin,
-        ssl_context: Optional[ssl.SSLContext] = None,
-        keepalive_expiry: Optional[float] = None,
+        ssl_context: ssl.SSLContext | None = None,
+        keepalive_expiry: float | None = None,
         http1: bool = True,
         http2: bool = False,
         retries: int = 0,
-        local_address: Optional[str] = None,
-        uds: Optional[str] = None,
-        network_backend: Optional[AsyncNetworkBackend] = None,
-        socket_options: Optional[Iterable[SOCKET_OPTION]] = None,
+        local_address: str | None = None,
+        uds: str | None = None,
+        network_backend: AsyncNetworkBackend | None = None,
+        socket_options: typing.Iterable[SOCKET_OPTION] | None = None,
     ) -> None:
         self._origin = origin
         self._ssl_context = ssl_context
@@ -59,7 +61,7 @@ class AsyncHTTPConnection(AsyncConnectionInterface):
         self._network_backend: AsyncNetworkBackend = (
             AutoBackend() if network_backend is None else network_backend
         )
-        self._connection: Optional[AsyncConnectionInterface] = None
+        self._connection: AsyncConnectionInterface | None = None
         self._connect_failed: bool = False
         self._request_lock = AsyncLock()
         self._socket_options = socket_options
@@ -208,13 +210,13 @@ class AsyncHTTPConnection(AsyncConnectionInterface):
     # These context managers are not used in the standard flow, but are
     # useful for testing or working with connection instances directly.
 
-    async def __aenter__(self) -> "AsyncHTTPConnection":
+    async def __aenter__(self) -> AsyncHTTPConnection:
         return self
 
     async def __aexit__(
         self,
-        exc_type: Optional[Type[BaseException]] = None,
-        exc_value: Optional[BaseException] = None,
-        traceback: Optional[TracebackType] = None,
+        exc_type: type[BaseException] | None = None,
+        exc_value: BaseException | None = None,
+        traceback: types.TracebackType | None = None,
     ) -> None:
         await self.aclose()

+ 69 - 29
contrib/python/httpcore/httpcore/_async/connection_pool.py

@@ -1,12 +1,14 @@
+from __future__ import annotations
+
 import ssl
 import sys
-from types import TracebackType
-from typing import AsyncIterable, AsyncIterator, Iterable, List, Optional, Type
+import types
+import typing
 
 from .._backends.auto import AutoBackend
 from .._backends.base import SOCKET_OPTION, AsyncNetworkBackend
 from .._exceptions import ConnectionNotAvailable, UnsupportedProtocol
-from .._models import Origin, Request, Response
+from .._models import Origin, Proxy, Request, Response
 from .._synchronization import AsyncEvent, AsyncShieldCancellation, AsyncThreadLock
 from .connection import AsyncHTTPConnection
 from .interfaces import AsyncConnectionInterface, AsyncRequestInterface
@@ -15,12 +17,10 @@ from .interfaces import AsyncConnectionInterface, AsyncRequestInterface
 class AsyncPoolRequest:
     def __init__(self, request: Request) -> None:
         self.request = request
-        self.connection: Optional[AsyncConnectionInterface] = None
+        self.connection: AsyncConnectionInterface | None = None
         self._connection_acquired = AsyncEvent()
 
-    def assign_to_connection(
-        self, connection: Optional[AsyncConnectionInterface]
-    ) -> None:
+    def assign_to_connection(self, connection: AsyncConnectionInterface | None) -> None:
         self.connection = connection
         self._connection_acquired.set()
 
@@ -29,7 +29,7 @@ class AsyncPoolRequest:
         self._connection_acquired = AsyncEvent()
 
     async def wait_for_connection(
-        self, timeout: Optional[float] = None
+        self, timeout: float | None = None
     ) -> AsyncConnectionInterface:
         if self.connection is None:
             await self._connection_acquired.wait(timeout=timeout)
@@ -47,17 +47,18 @@ class AsyncConnectionPool(AsyncRequestInterface):
 
     def __init__(
         self,
-        ssl_context: Optional[ssl.SSLContext] = None,
-        max_connections: Optional[int] = 10,
-        max_keepalive_connections: Optional[int] = None,
-        keepalive_expiry: Optional[float] = None,
+        ssl_context: ssl.SSLContext | None = None,
+        proxy: Proxy | None = None,
+        max_connections: int | None = 10,
+        max_keepalive_connections: int | None = None,
+        keepalive_expiry: float | None = None,
         http1: bool = True,
         http2: bool = False,
         retries: int = 0,
-        local_address: Optional[str] = None,
-        uds: Optional[str] = None,
-        network_backend: Optional[AsyncNetworkBackend] = None,
-        socket_options: Optional[Iterable[SOCKET_OPTION]] = None,
+        local_address: str | None = None,
+        uds: str | None = None,
+        network_backend: AsyncNetworkBackend | None = None,
+        socket_options: typing.Iterable[SOCKET_OPTION] | None = None,
     ) -> None:
         """
         A connection pool for making HTTP requests.
@@ -89,7 +90,7 @@ class AsyncConnectionPool(AsyncRequestInterface):
              in the TCP socket when the connection was established.
         """
         self._ssl_context = ssl_context
-
+        self._proxy = proxy
         self._max_connections = (
             sys.maxsize if max_connections is None else max_connections
         )
@@ -116,8 +117,8 @@ class AsyncConnectionPool(AsyncRequestInterface):
 
         # The mutable state on a connection pool is the queue of incoming requests,
         # and the set of connections that are servicing those requests.
-        self._connections: List[AsyncConnectionInterface] = []
-        self._requests: List[AsyncPoolRequest] = []
+        self._connections: list[AsyncConnectionInterface] = []
+        self._requests: list[AsyncPoolRequest] = []
 
         # We only mutate the state of the connection pool within an 'optional_thread_lock'
         # context. This holds a threading lock unless we're running in async mode,
@@ -125,6 +126,45 @@ class AsyncConnectionPool(AsyncRequestInterface):
         self._optional_thread_lock = AsyncThreadLock()
 
     def create_connection(self, origin: Origin) -> AsyncConnectionInterface:
+        if self._proxy is not None:
+            if self._proxy.url.scheme in (b"socks5", b"socks5h"):
+                from .socks_proxy import AsyncSocks5Connection
+
+                return AsyncSocks5Connection(
+                    proxy_origin=self._proxy.url.origin,
+                    proxy_auth=self._proxy.auth,
+                    remote_origin=origin,
+                    ssl_context=self._ssl_context,
+                    keepalive_expiry=self._keepalive_expiry,
+                    http1=self._http1,
+                    http2=self._http2,
+                    network_backend=self._network_backend,
+                )
+            elif origin.scheme == b"http":
+                from .http_proxy import AsyncForwardHTTPConnection
+
+                return AsyncForwardHTTPConnection(
+                    proxy_origin=self._proxy.url.origin,
+                    proxy_headers=self._proxy.headers,
+                    proxy_ssl_context=self._proxy.ssl_context,
+                    remote_origin=origin,
+                    keepalive_expiry=self._keepalive_expiry,
+                    network_backend=self._network_backend,
+                )
+            from .http_proxy import AsyncTunnelHTTPConnection
+
+            return AsyncTunnelHTTPConnection(
+                proxy_origin=self._proxy.url.origin,
+                proxy_headers=self._proxy.headers,
+                proxy_ssl_context=self._proxy.ssl_context,
+                remote_origin=origin,
+                ssl_context=self._ssl_context,
+                keepalive_expiry=self._keepalive_expiry,
+                http1=self._http1,
+                http2=self._http2,
+                network_backend=self._network_backend,
+            )
+
         return AsyncHTTPConnection(
             origin=origin,
             ssl_context=self._ssl_context,
@@ -139,7 +179,7 @@ class AsyncConnectionPool(AsyncRequestInterface):
         )
 
     @property
-    def connections(self) -> List[AsyncConnectionInterface]:
+    def connections(self) -> list[AsyncConnectionInterface]:
         """
         Return a list of the connections currently in the pool.
 
@@ -217,7 +257,7 @@ class AsyncConnectionPool(AsyncRequestInterface):
 
         # Return the response. Note that in this case we still have to manage
         # the point at which the response is closed.
-        assert isinstance(response.stream, AsyncIterable)
+        assert isinstance(response.stream, typing.AsyncIterable)
         return Response(
             status=response.status,
             headers=response.headers,
@@ -227,7 +267,7 @@ class AsyncConnectionPool(AsyncRequestInterface):
             extensions=response.extensions,
         )
 
-    def _assign_requests_to_connections(self) -> List[AsyncConnectionInterface]:
+    def _assign_requests_to_connections(self) -> list[AsyncConnectionInterface]:
         """
         Manage the state of the connection pool, assigning incoming
         requests to connections as available.
@@ -298,7 +338,7 @@ class AsyncConnectionPool(AsyncRequestInterface):
 
         return closing_connections
 
-    async def _close_connections(self, closing: List[AsyncConnectionInterface]) -> None:
+    async def _close_connections(self, closing: list[AsyncConnectionInterface]) -> None:
         # Close connections which have been removed from the pool.
         with AsyncShieldCancellation():
             for connection in closing:
@@ -312,14 +352,14 @@ class AsyncConnectionPool(AsyncRequestInterface):
             self._connections = []
         await self._close_connections(closing_connections)
 
-    async def __aenter__(self) -> "AsyncConnectionPool":
+    async def __aenter__(self) -> AsyncConnectionPool:
         return self
 
     async def __aexit__(
         self,
-        exc_type: Optional[Type[BaseException]] = None,
-        exc_value: Optional[BaseException] = None,
-        traceback: Optional[TracebackType] = None,
+        exc_type: type[BaseException] | None = None,
+        exc_value: BaseException | None = None,
+        traceback: types.TracebackType | None = None,
     ) -> None:
         await self.aclose()
 
@@ -349,7 +389,7 @@ class AsyncConnectionPool(AsyncRequestInterface):
 class PoolByteStream:
     def __init__(
         self,
-        stream: AsyncIterable[bytes],
+        stream: typing.AsyncIterable[bytes],
         pool_request: AsyncPoolRequest,
         pool: AsyncConnectionPool,
     ) -> None:
@@ -358,7 +398,7 @@ class PoolByteStream:
         self._pool = pool
         self._closed = False
 
-    async def __aiter__(self) -> AsyncIterator[bytes]:
+    async def __aiter__(self) -> typing.AsyncIterator[bytes]:
         try:
             async for part in self._stream:
                 yield part

+ 26 - 33
contrib/python/httpcore/httpcore/_async/http11.py

@@ -1,18 +1,11 @@
+from __future__ import annotations
+
 import enum
 import logging
 import ssl
 import time
-from types import TracebackType
-from typing import (
-    Any,
-    AsyncIterable,
-    AsyncIterator,
-    List,
-    Optional,
-    Tuple,
-    Type,
-    Union,
-)
+import types
+import typing
 
 import h11
 
@@ -33,7 +26,7 @@ logger = logging.getLogger("httpcore.http11")
 
 
 # A subset of `h11.Event` types supported by `_send_event`
-H11SendEvent = Union[
+H11SendEvent = typing.Union[
     h11.Request,
     h11.Data,
     h11.EndOfMessage,
@@ -55,12 +48,12 @@ class AsyncHTTP11Connection(AsyncConnectionInterface):
         self,
         origin: Origin,
         stream: AsyncNetworkStream,
-        keepalive_expiry: Optional[float] = None,
+        keepalive_expiry: float | None = None,
     ) -> None:
         self._origin = origin
         self._network_stream = stream
-        self._keepalive_expiry: Optional[float] = keepalive_expiry
-        self._expire_at: Optional[float] = None
+        self._keepalive_expiry: float | None = keepalive_expiry
+        self._expire_at: float | None = None
         self._state = HTTPConnectionState.NEW
         self._state_lock = AsyncLock()
         self._request_count = 0
@@ -160,16 +153,14 @@ class AsyncHTTP11Connection(AsyncConnectionInterface):
         timeouts = request.extensions.get("timeout", {})
         timeout = timeouts.get("write", None)
 
-        assert isinstance(request.stream, AsyncIterable)
+        assert isinstance(request.stream, typing.AsyncIterable)
         async for chunk in request.stream:
             event = h11.Data(data=chunk)
             await self._send_event(event, timeout=timeout)
 
         await self._send_event(h11.EndOfMessage(), timeout=timeout)
 
-    async def _send_event(
-        self, event: h11.Event, timeout: Optional[float] = None
-    ) -> None:
+    async def _send_event(self, event: h11.Event, timeout: float | None = None) -> None:
         bytes_to_send = self._h11_state.send(event)
         if bytes_to_send is not None:
             await self._network_stream.write(bytes_to_send, timeout=timeout)
@@ -178,7 +169,7 @@ class AsyncHTTP11Connection(AsyncConnectionInterface):
 
     async def _receive_response_headers(
         self, request: Request
-    ) -> Tuple[bytes, int, bytes, List[Tuple[bytes, bytes]], bytes]:
+    ) -> tuple[bytes, int, bytes, list[tuple[bytes, bytes]], bytes]:
         timeouts = request.extensions.get("timeout", {})
         timeout = timeouts.get("read", None)
 
@@ -202,7 +193,9 @@ class AsyncHTTP11Connection(AsyncConnectionInterface):
 
         return http_version, event.status_code, event.reason, headers, trailing_data
 
-    async def _receive_response_body(self, request: Request) -> AsyncIterator[bytes]:
+    async def _receive_response_body(
+        self, request: Request
+    ) -> typing.AsyncIterator[bytes]:
         timeouts = request.extensions.get("timeout", {})
         timeout = timeouts.get("read", None)
 
@@ -214,8 +207,8 @@ class AsyncHTTP11Connection(AsyncConnectionInterface):
                 break
 
     async def _receive_event(
-        self, timeout: Optional[float] = None
-    ) -> Union[h11.Event, Type[h11.PAUSED]]:
+        self, timeout: float | None = None
+    ) -> h11.Event | type[h11.PAUSED]:
         while True:
             with map_exceptions({h11.RemoteProtocolError: RemoteProtocolError}):
                 event = self._h11_state.next_event()
@@ -316,14 +309,14 @@ class AsyncHTTP11Connection(AsyncConnectionInterface):
     # These context managers are not used in the standard flow, but are
     # useful for testing or working with connection instances directly.
 
-    async def __aenter__(self) -> "AsyncHTTP11Connection":
+    async def __aenter__(self) -> AsyncHTTP11Connection:
         return self
 
     async def __aexit__(
         self,
-        exc_type: Optional[Type[BaseException]] = None,
-        exc_value: Optional[BaseException] = None,
-        traceback: Optional[TracebackType] = None,
+        exc_type: type[BaseException] | None = None,
+        exc_value: BaseException | None = None,
+        traceback: types.TracebackType | None = None,
     ) -> None:
         await self.aclose()
 
@@ -334,7 +327,7 @@ class HTTP11ConnectionByteStream:
         self._request = request
         self._closed = False
 
-    async def __aiter__(self) -> AsyncIterator[bytes]:
+    async def __aiter__(self) -> typing.AsyncIterator[bytes]:
         kwargs = {"request": self._request}
         try:
             async with Trace("receive_response_body", logger, self._request, kwargs):
@@ -360,7 +353,7 @@ class AsyncHTTP11UpgradeStream(AsyncNetworkStream):
         self._stream = stream
         self._leading_data = leading_data
 
-    async def read(self, max_bytes: int, timeout: Optional[float] = None) -> bytes:
+    async def read(self, max_bytes: int, timeout: float | None = None) -> bytes:
         if self._leading_data:
             buffer = self._leading_data[:max_bytes]
             self._leading_data = self._leading_data[max_bytes:]
@@ -368,7 +361,7 @@ class AsyncHTTP11UpgradeStream(AsyncNetworkStream):
         else:
             return await self._stream.read(max_bytes, timeout)
 
-    async def write(self, buffer: bytes, timeout: Optional[float] = None) -> None:
+    async def write(self, buffer: bytes, timeout: float | None = None) -> None:
         await self._stream.write(buffer, timeout)
 
     async def aclose(self) -> None:
@@ -377,10 +370,10 @@ class AsyncHTTP11UpgradeStream(AsyncNetworkStream):
     async def start_tls(
         self,
         ssl_context: ssl.SSLContext,
-        server_hostname: Optional[str] = None,
-        timeout: Optional[float] = None,
+        server_hostname: str | None = None,
+        timeout: float | None = None,
     ) -> AsyncNetworkStream:
         return await self._stream.start_tls(ssl_context, server_hostname, timeout)
 
-    def get_extra_info(self, info: str) -> Any:
+    def get_extra_info(self, info: str) -> typing.Any:
         return self._stream.get_extra_info(info)

+ 22 - 28
contrib/python/httpcore/httpcore/_async/http2.py

@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 import enum
 import logging
 import time
@@ -45,14 +47,14 @@ class AsyncHTTP2Connection(AsyncConnectionInterface):
         self,
         origin: Origin,
         stream: AsyncNetworkStream,
-        keepalive_expiry: typing.Optional[float] = None,
+        keepalive_expiry: float | None = None,
     ):
         self._origin = origin
         self._network_stream = stream
-        self._keepalive_expiry: typing.Optional[float] = keepalive_expiry
+        self._keepalive_expiry: float | None = keepalive_expiry
         self._h2_state = h2.connection.H2Connection(config=self.CONFIG)
         self._state = HTTPConnectionState.IDLE
-        self._expire_at: typing.Optional[float] = None
+        self._expire_at: float | None = None
         self._request_count = 0
         self._init_lock = AsyncLock()
         self._state_lock = AsyncLock()
@@ -63,24 +65,20 @@ class AsyncHTTP2Connection(AsyncConnectionInterface):
         self._connection_error = False
 
         # Mapping from stream ID to response stream events.
-        self._events: typing.Dict[
+        self._events: dict[
             int,
-            typing.Union[
-                h2.events.ResponseReceived,
-                h2.events.DataReceived,
-                h2.events.StreamEnded,
-                h2.events.StreamReset,
-            ],
+            h2.events.ResponseReceived
+            | h2.events.DataReceived
+            | h2.events.StreamEnded
+            | h2.events.StreamReset,
         ] = {}
 
         # Connection terminated events are stored as state since
         # we need to handle them for all streams.
-        self._connection_terminated: typing.Optional[h2.events.ConnectionTerminated] = (
-            None
-        )
+        self._connection_terminated: h2.events.ConnectionTerminated | None = None
 
-        self._read_exception: typing.Optional[Exception] = None
-        self._write_exception: typing.Optional[Exception] = None
+        self._read_exception: Exception | None = None
+        self._write_exception: Exception | None = None
 
     async def handle_async_request(self, request: Request) -> Response:
         if not self.can_handle_request(request.url.origin):
@@ -284,7 +282,7 @@ class AsyncHTTP2Connection(AsyncConnectionInterface):
 
     async def _receive_response(
         self, request: Request, stream_id: int
-    ) -> typing.Tuple[int, typing.List[typing.Tuple[bytes, bytes]]]:
+    ) -> tuple[int, list[tuple[bytes, bytes]]]:
         """
         Return the response status code and headers for a given stream ID.
         """
@@ -321,9 +319,7 @@ class AsyncHTTP2Connection(AsyncConnectionInterface):
 
     async def _receive_stream_event(
         self, request: Request, stream_id: int
-    ) -> typing.Union[
-        h2.events.ResponseReceived, h2.events.DataReceived, h2.events.StreamEnded
-    ]:
+    ) -> h2.events.ResponseReceived | h2.events.DataReceived | h2.events.StreamEnded:
         """
         Return the next available event for a given stream ID.
 
@@ -337,7 +333,7 @@ class AsyncHTTP2Connection(AsyncConnectionInterface):
         return event
 
     async def _receive_events(
-        self, request: Request, stream_id: typing.Optional[int] = None
+        self, request: Request, stream_id: int | None = None
     ) -> None:
         """
         Read some data from the network until we see one or more events
@@ -425,9 +421,7 @@ class AsyncHTTP2Connection(AsyncConnectionInterface):
 
     # Wrappers around network read/write operations...
 
-    async def _read_incoming_data(
-        self, request: Request
-    ) -> typing.List[h2.events.Event]:
+    async def _read_incoming_data(self, request: Request) -> list[h2.events.Event]:
         timeouts = request.extensions.get("timeout", {})
         timeout = timeouts.get("read", None)
 
@@ -451,7 +445,7 @@ class AsyncHTTP2Connection(AsyncConnectionInterface):
             self._connection_error = True
             raise exc
 
-        events: typing.List[h2.events.Event] = self._h2_state.receive_data(data)
+        events: list[h2.events.Event] = self._h2_state.receive_data(data)
 
         return events
 
@@ -544,14 +538,14 @@ class AsyncHTTP2Connection(AsyncConnectionInterface):
     # These context managers are not used in the standard flow, but are
     # useful for testing or working with connection instances directly.
 
-    async def __aenter__(self) -> "AsyncHTTP2Connection":
+    async def __aenter__(self) -> AsyncHTTP2Connection:
         return self
 
     async def __aexit__(
         self,
-        exc_type: typing.Optional[typing.Type[BaseException]] = None,
-        exc_value: typing.Optional[BaseException] = None,
-        traceback: typing.Optional[types.TracebackType] = None,
+        exc_type: type[BaseException] | None = None,
+        exc_value: BaseException | None = None,
+        traceback: types.TracebackType | None = None,
     ) -> None:
         await self.aclose()
 

+ 36 - 37
contrib/python/httpcore/httpcore/_async/http_proxy.py

@@ -1,7 +1,9 @@
+from __future__ import annotations
+
+import base64
 import logging
 import ssl
-from base64 import b64encode
-from typing import Iterable, List, Mapping, Optional, Sequence, Tuple, Union
+import typing
 
 from .._backends.base import SOCKET_OPTION, AsyncNetworkBackend
 from .._exceptions import ProxyError
@@ -22,17 +24,18 @@ from .connection_pool import AsyncConnectionPool
 from .http11 import AsyncHTTP11Connection
 from .interfaces import AsyncConnectionInterface
 
-HeadersAsSequence = Sequence[Tuple[Union[bytes, str], Union[bytes, str]]]
-HeadersAsMapping = Mapping[Union[bytes, str], Union[bytes, str]]
+ByteOrStr = typing.Union[bytes, str]
+HeadersAsSequence = typing.Sequence[typing.Tuple[ByteOrStr, ByteOrStr]]
+HeadersAsMapping = typing.Mapping[ByteOrStr, ByteOrStr]
 
 
 logger = logging.getLogger("httpcore.proxy")
 
 
 def merge_headers(
-    default_headers: Optional[Sequence[Tuple[bytes, bytes]]] = None,
-    override_headers: Optional[Sequence[Tuple[bytes, bytes]]] = None,
-) -> List[Tuple[bytes, bytes]]:
+    default_headers: typing.Sequence[tuple[bytes, bytes]] | None = None,
+    override_headers: typing.Sequence[tuple[bytes, bytes]] | None = None,
+) -> list[tuple[bytes, bytes]]:
     """
     Append default_headers and override_headers, de-duplicating if a key exists
     in both cases.
@@ -48,33 +51,28 @@ def merge_headers(
     return default_headers + override_headers
 
 
-def build_auth_header(username: bytes, password: bytes) -> bytes:
-    userpass = username + b":" + password
-    return b"Basic " + b64encode(userpass)
-
-
-class AsyncHTTPProxy(AsyncConnectionPool):
+class AsyncHTTPProxy(AsyncConnectionPool):  # pragma: nocover
     """
     A connection pool that sends requests via an HTTP proxy.
     """
 
     def __init__(
         self,
-        proxy_url: Union[URL, bytes, str],
-        proxy_auth: Optional[Tuple[Union[bytes, str], Union[bytes, str]]] = None,
-        proxy_headers: Union[HeadersAsMapping, HeadersAsSequence, None] = None,
-        ssl_context: Optional[ssl.SSLContext] = None,
-        proxy_ssl_context: Optional[ssl.SSLContext] = None,
-        max_connections: Optional[int] = 10,
-        max_keepalive_connections: Optional[int] = None,
-        keepalive_expiry: Optional[float] = None,
+        proxy_url: URL | bytes | str,
+        proxy_auth: tuple[bytes | str, bytes | str] | None = None,
+        proxy_headers: HeadersAsMapping | HeadersAsSequence | None = None,
+        ssl_context: ssl.SSLContext | None = None,
+        proxy_ssl_context: ssl.SSLContext | None = None,
+        max_connections: int | None = 10,
+        max_keepalive_connections: int | None = None,
+        keepalive_expiry: float | None = None,
         http1: bool = True,
         http2: bool = False,
         retries: int = 0,
-        local_address: Optional[str] = None,
-        uds: Optional[str] = None,
-        network_backend: Optional[AsyncNetworkBackend] = None,
-        socket_options: Optional[Iterable[SOCKET_OPTION]] = None,
+        local_address: str | None = None,
+        uds: str | None = None,
+        network_backend: AsyncNetworkBackend | None = None,
+        socket_options: typing.Iterable[SOCKET_OPTION] | None = None,
     ) -> None:
         """
         A connection pool for making HTTP requests.
@@ -139,7 +137,8 @@ class AsyncHTTPProxy(AsyncConnectionPool):
         if proxy_auth is not None:
             username = enforce_bytes(proxy_auth[0], name="proxy_auth")
             password = enforce_bytes(proxy_auth[1], name="proxy_auth")
-            authorization = build_auth_header(username, password)
+            userpass = username + b":" + password
+            authorization = b"Basic " + base64.b64encode(userpass)
             self._proxy_headers = [
                 (b"Proxy-Authorization", authorization)
             ] + self._proxy_headers
@@ -172,11 +171,11 @@ class AsyncForwardHTTPConnection(AsyncConnectionInterface):
         self,
         proxy_origin: Origin,
         remote_origin: Origin,
-        proxy_headers: Union[HeadersAsMapping, HeadersAsSequence, None] = None,
-        keepalive_expiry: Optional[float] = None,
-        network_backend: Optional[AsyncNetworkBackend] = None,
-        socket_options: Optional[Iterable[SOCKET_OPTION]] = None,
-        proxy_ssl_context: Optional[ssl.SSLContext] = None,
+        proxy_headers: HeadersAsMapping | HeadersAsSequence | None = None,
+        keepalive_expiry: float | None = None,
+        network_backend: AsyncNetworkBackend | None = None,
+        socket_options: typing.Iterable[SOCKET_OPTION] | None = None,
+        proxy_ssl_context: ssl.SSLContext | None = None,
     ) -> None:
         self._connection = AsyncHTTPConnection(
             origin=proxy_origin,
@@ -236,14 +235,14 @@ class AsyncTunnelHTTPConnection(AsyncConnectionInterface):
         self,
         proxy_origin: Origin,
         remote_origin: Origin,
-        ssl_context: Optional[ssl.SSLContext] = None,
-        proxy_ssl_context: Optional[ssl.SSLContext] = None,
-        proxy_headers: Optional[Sequence[Tuple[bytes, bytes]]] = None,
-        keepalive_expiry: Optional[float] = None,
+        ssl_context: ssl.SSLContext | None = None,
+        proxy_ssl_context: ssl.SSLContext | None = None,
+        proxy_headers: typing.Sequence[tuple[bytes, bytes]] | None = None,
+        keepalive_expiry: float | None = None,
         http1: bool = True,
         http2: bool = False,
-        network_backend: Optional[AsyncNetworkBackend] = None,
-        socket_options: Optional[Iterable[SOCKET_OPTION]] = None,
+        network_backend: AsyncNetworkBackend | None = None,
+        socket_options: typing.Iterable[SOCKET_OPTION] | None = None,
     ) -> None:
         self._connection: AsyncConnectionInterface = AsyncHTTPConnection(
             origin=proxy_origin,

+ 14 - 12
contrib/python/httpcore/httpcore/_async/interfaces.py

@@ -1,5 +1,7 @@
-from contextlib import asynccontextmanager
-from typing import AsyncIterator, Optional, Union
+from __future__ import annotations
+
+import contextlib
+import typing
 
 from .._models import (
     URL,
@@ -18,12 +20,12 @@ from .._models import (
 class AsyncRequestInterface:
     async def request(
         self,
-        method: Union[bytes, str],
-        url: Union[URL, bytes, str],
+        method: bytes | str,
+        url: URL | bytes | str,
         *,
         headers: HeaderTypes = None,
-        content: Union[bytes, AsyncIterator[bytes], None] = None,
-        extensions: Optional[Extensions] = None,
+        content: bytes | typing.AsyncIterator[bytes] | None = None,
+        extensions: Extensions | None = None,
     ) -> Response:
         # Strict type checking on our parameters.
         method = enforce_bytes(method, name="method")
@@ -47,16 +49,16 @@ class AsyncRequestInterface:
             await response.aclose()
         return response
 
-    @asynccontextmanager
+    @contextlib.asynccontextmanager
     async def stream(
         self,
-        method: Union[bytes, str],
-        url: Union[URL, bytes, str],
+        method: bytes | str,
+        url: URL | bytes | str,
         *,
         headers: HeaderTypes = None,
-        content: Union[bytes, AsyncIterator[bytes], None] = None,
-        extensions: Optional[Extensions] = None,
-    ) -> AsyncIterator[Response]:
+        content: bytes | typing.AsyncIterator[bytes] | None = None,
+        extensions: Extensions | None = None,
+    ) -> typing.AsyncIterator[Response]:
         # Strict type checking on our parameters.
         method = enforce_bytes(method, name="method")
         url = enforce_url(url, name="url")

+ 30 - 31
contrib/python/httpcore/httpcore/_async/socks_proxy.py

@@ -1,8 +1,9 @@
+from __future__ import annotations
+
 import logging
 import ssl
-import typing
 
-from socksio import socks5
+import socksio
 
 from .._backends.auto import AutoBackend
 from .._backends.base import AsyncNetworkBackend, AsyncNetworkStream
@@ -43,24 +44,24 @@ async def _init_socks5_connection(
     *,
     host: bytes,
     port: int,
-    auth: typing.Optional[typing.Tuple[bytes, bytes]] = None,
+    auth: tuple[bytes, bytes] | None = None,
 ) -> None:
-    conn = socks5.SOCKS5Connection()
+    conn = socksio.socks5.SOCKS5Connection()
 
     # Auth method request
     auth_method = (
-        socks5.SOCKS5AuthMethod.NO_AUTH_REQUIRED
+        socksio.socks5.SOCKS5AuthMethod.NO_AUTH_REQUIRED
         if auth is None
-        else socks5.SOCKS5AuthMethod.USERNAME_PASSWORD
+        else socksio.socks5.SOCKS5AuthMethod.USERNAME_PASSWORD
     )
-    conn.send(socks5.SOCKS5AuthMethodsRequest([auth_method]))
+    conn.send(socksio.socks5.SOCKS5AuthMethodsRequest([auth_method]))
     outgoing_bytes = conn.data_to_send()
     await stream.write(outgoing_bytes)
 
     # Auth method response
     incoming_bytes = await stream.read(max_bytes=4096)
     response = conn.receive_data(incoming_bytes)
-    assert isinstance(response, socks5.SOCKS5AuthReply)
+    assert isinstance(response, socksio.socks5.SOCKS5AuthReply)
     if response.method != auth_method:
         requested = AUTH_METHODS.get(auth_method, "UNKNOWN")
         responded = AUTH_METHODS.get(response.method, "UNKNOWN")
@@ -68,25 +69,25 @@ async def _init_socks5_connection(
             f"Requested {requested} from proxy server, but got {responded}."
         )
 
-    if response.method == socks5.SOCKS5AuthMethod.USERNAME_PASSWORD:
+    if response.method == socksio.socks5.SOCKS5AuthMethod.USERNAME_PASSWORD:
         # Username/password request
         assert auth is not None
         username, password = auth
-        conn.send(socks5.SOCKS5UsernamePasswordRequest(username, password))
+        conn.send(socksio.socks5.SOCKS5UsernamePasswordRequest(username, password))
         outgoing_bytes = conn.data_to_send()
         await stream.write(outgoing_bytes)
 
         # Username/password response
         incoming_bytes = await stream.read(max_bytes=4096)
         response = conn.receive_data(incoming_bytes)
-        assert isinstance(response, socks5.SOCKS5UsernamePasswordReply)
+        assert isinstance(response, socksio.socks5.SOCKS5UsernamePasswordReply)
         if not response.success:
             raise ProxyError("Invalid username/password")
 
     # Connect request
     conn.send(
-        socks5.SOCKS5CommandRequest.from_address(
-            socks5.SOCKS5Command.CONNECT, (host, port)
+        socksio.socks5.SOCKS5CommandRequest.from_address(
+            socksio.socks5.SOCKS5Command.CONNECT, (host, port)
         )
     )
     outgoing_bytes = conn.data_to_send()
@@ -95,31 +96,29 @@ async def _init_socks5_connection(
     # Connect response
     incoming_bytes = await stream.read(max_bytes=4096)
     response = conn.receive_data(incoming_bytes)
-    assert isinstance(response, socks5.SOCKS5Reply)
-    if response.reply_code != socks5.SOCKS5ReplyCode.SUCCEEDED:
+    assert isinstance(response, socksio.socks5.SOCKS5Reply)
+    if response.reply_code != socksio.socks5.SOCKS5ReplyCode.SUCCEEDED:
         reply_code = REPLY_CODES.get(response.reply_code, "UNKOWN")
         raise ProxyError(f"Proxy Server could not connect: {reply_code}.")
 
 
-class AsyncSOCKSProxy(AsyncConnectionPool):
+class AsyncSOCKSProxy(AsyncConnectionPool):  # pragma: nocover
     """
     A connection pool that sends requests via an HTTP proxy.
     """
 
     def __init__(
         self,
-        proxy_url: typing.Union[URL, bytes, str],
-        proxy_auth: typing.Optional[
-            typing.Tuple[typing.Union[bytes, str], typing.Union[bytes, str]]
-        ] = None,
-        ssl_context: typing.Optional[ssl.SSLContext] = None,
-        max_connections: typing.Optional[int] = 10,
-        max_keepalive_connections: typing.Optional[int] = None,
-        keepalive_expiry: typing.Optional[float] = None,
+        proxy_url: URL | bytes | str,
+        proxy_auth: tuple[bytes | str, bytes | str] | None = None,
+        ssl_context: ssl.SSLContext | None = None,
+        max_connections: int | None = 10,
+        max_keepalive_connections: int | None = None,
+        keepalive_expiry: float | None = None,
         http1: bool = True,
         http2: bool = False,
         retries: int = 0,
-        network_backend: typing.Optional[AsyncNetworkBackend] = None,
+        network_backend: AsyncNetworkBackend | None = None,
     ) -> None:
         """
         A connection pool for making HTTP requests.
@@ -167,7 +166,7 @@ class AsyncSOCKSProxy(AsyncConnectionPool):
             username, password = proxy_auth
             username_bytes = enforce_bytes(username, name="proxy_auth")
             password_bytes = enforce_bytes(password, name="proxy_auth")
-            self._proxy_auth: typing.Optional[typing.Tuple[bytes, bytes]] = (
+            self._proxy_auth: tuple[bytes, bytes] | None = (
                 username_bytes,
                 password_bytes,
             )
@@ -192,12 +191,12 @@ class AsyncSocks5Connection(AsyncConnectionInterface):
         self,
         proxy_origin: Origin,
         remote_origin: Origin,
-        proxy_auth: typing.Optional[typing.Tuple[bytes, bytes]] = None,
-        ssl_context: typing.Optional[ssl.SSLContext] = None,
-        keepalive_expiry: typing.Optional[float] = None,
+        proxy_auth: tuple[bytes, bytes] | None = None,
+        ssl_context: ssl.SSLContext | None = None,
+        keepalive_expiry: float | None = None,
         http1: bool = True,
         http2: bool = False,
-        network_backend: typing.Optional[AsyncNetworkBackend] = None,
+        network_backend: AsyncNetworkBackend | None = None,
     ) -> None:
         self._proxy_origin = proxy_origin
         self._remote_origin = remote_origin
@@ -211,7 +210,7 @@ class AsyncSocks5Connection(AsyncConnectionInterface):
             AutoBackend() if network_backend is None else network_backend
         )
         self._connect_lock = AsyncLock()
-        self._connection: typing.Optional[AsyncConnectionInterface] = None
+        self._connection: AsyncConnectionInterface | None = None
         self._connect_failed = False
 
     async def handle_async_request(self, request: Request) -> Response:

Some files were not shown because too many files changed in this diff