Browse Source

[rh:curlcffi] Add support for `curl_cffi`

Authored by: coletdjnz, Grub4K, pukkandan, bashonly

Co-authored-by: Simon Sawicki <contact@grub4k.xyz>
Co-authored-by: pukkandan <pukkandan.ytdlp@gmail.com>
Co-authored-by: bashonly <bashonly@protonmail.com>
coletdjnz 1 year ago
parent
commit
52f5be1f1e

+ 19 - 3
.github/workflows/build.yml

@@ -247,9 +247,25 @@ jobs:
         run: |
           brew install coreutils
           python3 devscripts/install_deps.py --user -o --include build
-          python3 devscripts/install_deps.py --print --include pyinstaller > requirements.txt
+          python3 devscripts/install_deps.py --print --include pyinstaller_macos > requirements.txt
           # We need to ignore wheels otherwise we break universal2 builds
           python3 -m pip install -U --user --no-binary :all: -r requirements.txt
+          # We need to fuse our own universal2 wheels for curl_cffi
+          python3 -m pip install -U --user delocate
+          mkdir curl_cffi_whls curl_cffi_universal2
+          python3 devscripts/install_deps.py --print -o --include curl_cffi > requirements.txt
+          for platform in "macosx_11_0_arm64" "macosx_11_0_x86_64"; do
+            python3 -m pip download \
+              --only-binary=:all: \
+              --platform "${platform}" \
+              --pre -d curl_cffi_whls \
+              -r requirements.txt
+          done
+          python3 -m delocate.cmd.delocate_fuse curl_cffi_whls/curl_cffi*.whl -w curl_cffi_universal2
+          python3 -m delocate.cmd.delocate_fuse curl_cffi_whls/cffi*.whl -w curl_cffi_universal2
+          cd curl_cffi_universal2
+          for wheel in *cffi*.whl; do mv -n -- "${wheel}" "${wheel/x86_64/universal2}"; done
+          python3 -m pip install -U --user *cffi*.whl
 
       - name: Prepare
         run: |
@@ -303,7 +319,7 @@ jobs:
         run: |
           brew install coreutils
           python3 devscripts/install_deps.py --user -o --include build
-          python3 devscripts/install_deps.py --user --include pyinstaller
+          python3 devscripts/install_deps.py --user --include pyinstaller_macos --include curl_cffi
 
       - name: Prepare
         run: |
@@ -345,7 +361,7 @@ jobs:
       - name: Install Requirements
         run: | # Custom pyinstaller built with https://github.com/yt-dlp/pyinstaller-builds
           python devscripts/install_deps.py -o --include build
-          python devscripts/install_deps.py --include py2exe
+          python devscripts/install_deps.py --include py2exe --include curl_cffi
           python -m pip install -U "https://yt-dlp.github.io/Pyinstaller-Builds/x86_64/pyinstaller-5.8.0-py3-none-any.whl"
 
       - name: Prepare

+ 1 - 1
.github/workflows/core.yml

@@ -53,7 +53,7 @@ jobs:
       with:
         python-version: ${{ matrix.python-version }}
     - name: Install test requirements
-      run: python3 ./devscripts/install_deps.py --include dev
+      run: python3 ./devscripts/install_deps.py --include dev --include curl_cffi
     - name: Run tests
       continue-on-error: False
       run: |

+ 9 - 0
README.md

@@ -196,6 +196,15 @@ While all the other dependencies are optional, `ffmpeg` and `ffprobe` are highly
 * [**websockets**](https://github.com/aaugustin/websockets)\* - For downloading over websocket. Licensed under [BSD-3-Clause](https://github.com/aaugustin/websockets/blob/main/LICENSE)
 * [**requests**](https://github.com/psf/requests)\* - HTTP library. For HTTPS proxy and persistent connections support. Licensed under [Apache-2.0](https://github.com/psf/requests/blob/main/LICENSE)
 
+#### Impersonation
+
+The following provide support for impersonating browser requests. This may be required for some sites that employ TLS fingerprinting. 
+
+* [**curl_cffi**](https://github.com/yifeikong/curl_cffi) (recommended) - Python binding for [curl-impersonate](https://github.com/lwthiker/curl-impersonate). Provides impersonation targets for Chrome, Edge and Safari. Licensed under [MIT](https://github.com/yifeikong/curl_cffi/blob/main/LICENSE)
+  * Can be installed with the `curl_cffi` group, e.g. `pip install yt-dlp[default,curl_cffi]`
+  * Only included in `yt-dlp.exe`, `yt-dlp_macos` and `yt-dlp_macos_legacy` builds
+
+
 ### Metadata
 
 * [**mutagen**](https://github.com/quodlibet/mutagen)\* - For `--embed-thumbnail` in certain formats. Licensed under [GPLv2+](https://github.com/quodlibet/mutagen/blob/master/COPYING)

+ 2 - 0
pyproject.toml

@@ -53,6 +53,7 @@ dependencies = [
 
 [project.optional-dependencies]
 default = []
+curl_cffi = ["curl-cffi==0.5.10; implementation_name=='cpython'"]
 secretstorage = [
     "cffi",
     "secretstorage",
@@ -69,6 +70,7 @@ dev = [
     "pytest",
 ]
 pyinstaller = ["pyinstaller>=6.3"]
+pyinstaller_macos = ["pyinstaller==5.13.2"]  # needed for curl_cffi builds
 py2exe = ["py2exe>=0.12"]
 
 [project.urls]

+ 329 - 105
test/test_networking.py

@@ -30,7 +30,7 @@ from http.cookiejar import CookieJar
 from test.conftest import validate_and_send
 from test.helper import FakeYDL, http_server_port, verify_address_availability
 from yt_dlp.cookies import YoutubeDLCookieJar
-from yt_dlp.dependencies import brotli, requests, urllib3
+from yt_dlp.dependencies import brotli, curl_cffi, requests, urllib3
 from yt_dlp.networking import (
     HEADRequest,
     PUTRequest,
@@ -57,7 +57,7 @@ from yt_dlp.networking.impersonate import (
 )
 from yt_dlp.utils import YoutubeDLError
 from yt_dlp.utils._utils import _YDLLogger as FakeLogger
-from yt_dlp.utils.networking import HTTPHeaderDict
+from yt_dlp.utils.networking import HTTPHeaderDict, std_headers
 
 TEST_DIR = os.path.dirname(os.path.abspath(__file__))
 
@@ -79,6 +79,7 @@ def _build_proxy_handler(name):
 
 class HTTPTestRequestHandler(http.server.BaseHTTPRequestHandler):
     protocol_version = 'HTTP/1.1'
+    default_request_version = 'HTTP/1.1'
 
     def log_message(self, format, *args):
         pass
@@ -116,6 +117,8 @@ class HTTPTestRequestHandler(http.server.BaseHTTPRequestHandler):
     def _read_data(self):
         if 'Content-Length' in self.headers:
             return self.rfile.read(int(self.headers['Content-Length']))
+        else:
+            return b''
 
     def do_POST(self):
         data = self._read_data() + str(self.headers).encode()
@@ -199,7 +202,8 @@ class HTTPTestRequestHandler(http.server.BaseHTTPRequestHandler):
             self._headers()
         elif self.path.startswith('/308-to-headers'):
             self.send_response(308)
-            self.send_header('Location', '/headers')
+            # redirect to "localhost" for testing cookie redirection handling
+            self.send_header('Location', f'http://localhost:{self.connection.getsockname()[1]}/headers')
             self.send_header('Content-Length', '0')
             self.end_headers()
         elif self.path == '/trailing_garbage':
@@ -314,7 +318,7 @@ class TestRequestHandlerBase:
 
 
 class TestHTTPRequestHandler(TestRequestHandlerBase):
-    @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+    @pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
     def test_verify_cert(self, handler):
         with handler() as rh:
             with pytest.raises(CertificateVerifyError):
@@ -325,7 +329,7 @@ class TestHTTPRequestHandler(TestRequestHandlerBase):
             assert r.status == 200
             r.close()
 
-    @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+    @pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
     def test_ssl_error(self, handler):
         # HTTPS server with too old TLS version
         # XXX: is there a better way to test this than to create a new server?
@@ -339,11 +343,11 @@ class TestHTTPRequestHandler(TestRequestHandlerBase):
         https_server_thread.start()
 
         with handler(verify=False) as rh:
-            with pytest.raises(SSLError, match=r'ssl(?:v3|/tls) alert handshake failure') as exc_info:
+            with pytest.raises(SSLError, match=r'(?i)ssl(?:v3|/tls).alert.handshake.failure') as exc_info:
                 validate_and_send(rh, Request(f'https://127.0.0.1:{https_port}/headers'))
             assert not issubclass(exc_info.type, CertificateVerifyError)
 
-    @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+    @pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
     def test_percent_encode(self, handler):
         with handler() as rh:
             # Unicode characters should be encoded with uppercase percent-encoding
@@ -355,7 +359,7 @@ class TestHTTPRequestHandler(TestRequestHandlerBase):
             assert res.status == 200
             res.close()
 
-    @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+    @pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
     @pytest.mark.parametrize('path', [
         '/a/b/./../../headers',
         '/redirect_dotsegments',
@@ -371,6 +375,7 @@ class TestHTTPRequestHandler(TestRequestHandlerBase):
             assert res.url == f'http://127.0.0.1:{self.http_port}/headers'
             res.close()
 
+    # Not supported by CurlCFFI (non-standard)
     @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
     def test_unicode_path_redirection(self, handler):
         with handler() as rh:
@@ -378,7 +383,7 @@ class TestHTTPRequestHandler(TestRequestHandlerBase):
             assert r.url == f'http://127.0.0.1:{self.http_port}/%E4%B8%AD%E6%96%87.html'
             r.close()
 
-    @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+    @pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
     def test_raise_http_error(self, handler):
         with handler() as rh:
             for bad_status in (400, 500, 599, 302):
@@ -388,7 +393,7 @@ class TestHTTPRequestHandler(TestRequestHandlerBase):
             # Should not raise an error
             validate_and_send(rh, Request('http://127.0.0.1:%d/gen_200' % self.http_port)).close()
 
-    @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+    @pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
     def test_response_url(self, handler):
         with handler() as rh:
             # Response url should be that of the last url in redirect chain
@@ -399,62 +404,50 @@ class TestHTTPRequestHandler(TestRequestHandlerBase):
             assert res2.url == f'http://127.0.0.1:{self.http_port}/gen_200'
             res2.close()
 
-    @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
-    def test_redirect(self, handler):
+    # Covers some basic cases we expect some level of consistency between request handlers for
+    @pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
+    @pytest.mark.parametrize('redirect_status,method,expected', [
+        # A 303 must either use GET or HEAD for subsequent request
+        (303, 'POST', ('', 'GET', False)),
+        (303, 'HEAD', ('', 'HEAD', False)),
+
+        # 301 and 302 turn POST only into a GET
+        (301, 'POST', ('', 'GET', False)),
+        (301, 'HEAD', ('', 'HEAD', False)),
+        (302, 'POST', ('', 'GET', False)),
+        (302, 'HEAD', ('', 'HEAD', False)),
+
+        # 307 and 308 should not change method
+        (307, 'POST', ('testdata', 'POST', True)),
+        (308, 'POST', ('testdata', 'POST', True)),
+        (307, 'HEAD', ('', 'HEAD', False)),
+        (308, 'HEAD', ('', 'HEAD', False)),
+    ])
+    def test_redirect(self, handler, redirect_status, method, expected):
         with handler() as rh:
-            def do_req(redirect_status, method, assert_no_content=False):
-                data = b'testdata' if method in ('POST', 'PUT') else None
-                res = validate_and_send(
-                    rh, Request(f'http://127.0.0.1:{self.http_port}/redirect_{redirect_status}', method=method, data=data))
-
-                headers = b''
-                data_sent = b''
-                if data is not None:
-                    data_sent += res.read(len(data))
-                    if data_sent != data:
-                        headers += data_sent
-                        data_sent = b''
-
-                headers += res.read()
-
-                if assert_no_content or data is None:
-                    assert b'Content-Type' not in headers
-                    assert b'Content-Length' not in headers
-                else:
-                    assert b'Content-Type' in headers
-                    assert b'Content-Length' in headers
-
-                return data_sent.decode(), res.headers.get('method', '')
-
-            # A 303 must either use GET or HEAD for subsequent request
-            assert do_req(303, 'POST', True) == ('', 'GET')
-            assert do_req(303, 'HEAD') == ('', 'HEAD')
-
-            assert do_req(303, 'PUT', True) == ('', 'GET')
-
-            # 301 and 302 turn POST only into a GET
-            assert do_req(301, 'POST', True) == ('', 'GET')
-            assert do_req(301, 'HEAD') == ('', 'HEAD')
-            assert do_req(302, 'POST', True) == ('', 'GET')
-            assert do_req(302, 'HEAD') == ('', 'HEAD')
-
-            assert do_req(301, 'PUT') == ('testdata', 'PUT')
-            assert do_req(302, 'PUT') == ('testdata', 'PUT')
+            data = b'testdata' if method == 'POST' else None
+            headers = {}
+            if data is not None:
+                headers['Content-Type'] = 'application/test'
+            res = validate_and_send(
+                rh, Request(f'http://127.0.0.1:{self.http_port}/redirect_{redirect_status}', method=method, data=data,
+                            headers=headers))
 
-            # 307 and 308 should not change method
-            for m in ('POST', 'PUT'):
-                assert do_req(307, m) == ('testdata', m)
-                assert do_req(308, m) == ('testdata', m)
+            headers = b''
+            data_recv = b''
+            if data is not None:
+                data_recv += res.read(len(data))
+                if data_recv != data:
+                    headers += data_recv
+                    data_recv = b''
 
-            assert do_req(307, 'HEAD') == ('', 'HEAD')
-            assert do_req(308, 'HEAD') == ('', 'HEAD')
+            headers += res.read()
 
-            # These should not redirect and instead raise an HTTPError
-            for code in (300, 304, 305, 306):
-                with pytest.raises(HTTPError):
-                    do_req(code, 'GET')
+            assert expected[0] == data_recv.decode()
+            assert expected[1] == res.headers.get('method')
+            assert expected[2] == ('content-length' in headers.decode().lower())
 
-    @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+    @pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
     def test_request_cookie_header(self, handler):
         # We should accept a Cookie header being passed as in normal headers and handle it appropriately.
         with handler() as rh:
@@ -463,16 +456,17 @@ class TestHTTPRequestHandler(TestRequestHandlerBase):
                 rh, Request(
                     f'http://127.0.0.1:{self.http_port}/headers',
                     headers={'Cookie': 'test=test'})).read().decode()
-            assert 'Cookie: test=test' in res
+            assert 'cookie: test=test' in res.lower()
 
             # Specified Cookie header should be removed on any redirect
             res = validate_and_send(
                 rh, Request(
                     f'http://127.0.0.1:{self.http_port}/308-to-headers',
-                    headers={'Cookie': 'test=test'})).read().decode()
-            assert 'Cookie: test=test' not in res
+                    headers={'Cookie': 'test=test2'})).read().decode()
+            assert 'cookie: test=test2' not in res.lower()
 
         # Specified Cookie header should override global cookiejar for that request
+        # Whether cookies from the cookiejar is applied on the redirect is considered undefined for now
         cookiejar = YoutubeDLCookieJar()
         cookiejar.set_cookie(http.cookiejar.Cookie(
             version=0, name='test', value='ytdlp', port=None, port_specified=False,
@@ -482,23 +476,23 @@ class TestHTTPRequestHandler(TestRequestHandlerBase):
 
         with handler(cookiejar=cookiejar) as rh:
             data = validate_and_send(
-                rh, Request(f'http://127.0.0.1:{self.http_port}/headers', headers={'cookie': 'test=test'})).read()
-            assert b'Cookie: test=ytdlp' not in data
-            assert b'Cookie: test=test' in data
+                rh, Request(f'http://127.0.0.1:{self.http_port}/headers', headers={'cookie': 'test=test3'})).read()
+            assert b'cookie: test=ytdlp' not in data.lower()
+            assert b'cookie: test=test3' in data.lower()
 
-    @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+    @pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
     def test_redirect_loop(self, handler):
         with handler() as rh:
             with pytest.raises(HTTPError, match='redirect loop'):
                 validate_and_send(rh, Request(f'http://127.0.0.1:{self.http_port}/redirect_loop'))
 
-    @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+    @pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
     def test_incompleteread(self, handler):
         with handler(timeout=2) as rh:
-            with pytest.raises(IncompleteRead):
+            with pytest.raises(IncompleteRead, match='13 bytes read, 234221 more expected'):
                 validate_and_send(rh, Request('http://127.0.0.1:%d/incompleteread' % self.http_port)).read()
 
-    @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+    @pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
     def test_cookies(self, handler):
         cookiejar = YoutubeDLCookieJar()
         cookiejar.set_cookie(http.cookiejar.Cookie(
@@ -507,47 +501,66 @@ class TestHTTPRequestHandler(TestRequestHandlerBase):
 
         with handler(cookiejar=cookiejar) as rh:
             data = validate_and_send(rh, Request(f'http://127.0.0.1:{self.http_port}/headers')).read()
-            assert b'Cookie: test=ytdlp' in data
+            assert b'cookie: test=ytdlp' in data.lower()
 
         # Per request
         with handler() as rh:
             data = validate_and_send(
                 rh, Request(f'http://127.0.0.1:{self.http_port}/headers', extensions={'cookiejar': cookiejar})).read()
-            assert b'Cookie: test=ytdlp' in data
+            assert b'cookie: test=ytdlp' in data.lower()
 
-    @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+    @pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
     def test_headers(self, handler):
 
         with handler(headers=HTTPHeaderDict({'test1': 'test', 'test2': 'test2'})) as rh:
             # Global Headers
-            data = validate_and_send(rh, Request(f'http://127.0.0.1:{self.http_port}/headers')).read()
-            assert b'Test1: test' in data
+            data = validate_and_send(rh, Request(f'http://127.0.0.1:{self.http_port}/headers')).read().lower()
+            assert b'test1: test' in data
 
             # Per request headers, merged with global
             data = validate_and_send(rh, Request(
-                f'http://127.0.0.1:{self.http_port}/headers', headers={'test2': 'changed', 'test3': 'test3'})).read()
-            assert b'Test1: test' in data
-            assert b'Test2: changed' in data
-            assert b'Test2: test2' not in data
-            assert b'Test3: test3' in data
-
-    @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
-    def test_timeout(self, handler):
+                f'http://127.0.0.1:{self.http_port}/headers', headers={'test2': 'changed', 'test3': 'test3'})).read().lower()
+            assert b'test1: test' in data
+            assert b'test2: changed' in data
+            assert b'test2: test2' not in data
+            assert b'test3: test3' in data
+
+    @pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
+    def test_read_timeout(self, handler):
         with handler() as rh:
             # Default timeout is 20 seconds, so this should go through
             validate_and_send(
-                rh, Request(f'http://127.0.0.1:{self.http_port}/timeout_3'))
+                rh, Request(f'http://127.0.0.1:{self.http_port}/timeout_1'))
 
-        with handler(timeout=0.5) as rh:
+        with handler(timeout=0.1) as rh:
             with pytest.raises(TransportError):
                 validate_and_send(
-                    rh, Request(f'http://127.0.0.1:{self.http_port}/timeout_1'))
+                    rh, Request(f'http://127.0.0.1:{self.http_port}/timeout_5'))
 
             # Per request timeout, should override handler timeout
             validate_and_send(
                 rh, Request(f'http://127.0.0.1:{self.http_port}/timeout_1', extensions={'timeout': 4}))
 
-    @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+    @pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
+    def test_connect_timeout(self, handler):
+        # nothing should be listening on this port
+        connect_timeout_url = 'http://10.255.255.255'
+        with handler(timeout=0.01) as rh:
+            now = time.time()
+            with pytest.raises(TransportError):
+                validate_and_send(
+                    rh, Request(connect_timeout_url))
+            assert 0.01 <= time.time() - now < 20
+
+        with handler() as rh:
+            with pytest.raises(TransportError):
+                # Per request timeout, should override handler timeout
+                now = time.time()
+                validate_and_send(
+                    rh, Request(connect_timeout_url, extensions={'timeout': 0.01}))
+                assert 0.01 <= time.time() - now < 20
+
+    @pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
     def test_source_address(self, handler):
         source_address = f'127.0.0.{random.randint(5, 255)}'
         # on some systems these loopback addresses we need for testing may not be available
@@ -558,6 +571,7 @@ class TestHTTPRequestHandler(TestRequestHandlerBase):
                 rh, Request(f'http://127.0.0.1:{self.http_port}/source_address')).read().decode()
             assert source_address == data
 
+    # Not supported by CurlCFFI
     @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
     def test_gzip_trailing_garbage(self, handler):
         with handler() as rh:
@@ -575,7 +589,7 @@ class TestHTTPRequestHandler(TestRequestHandlerBase):
             assert res.headers.get('Content-Encoding') == 'br'
             assert res.read() == b'<html><video src="/vid.mp4" /></html>'
 
-    @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+    @pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
     def test_deflate(self, handler):
         with handler() as rh:
             res = validate_and_send(
@@ -585,7 +599,7 @@ class TestHTTPRequestHandler(TestRequestHandlerBase):
             assert res.headers.get('Content-Encoding') == 'deflate'
             assert res.read() == b'<html><video src="/vid.mp4" /></html>'
 
-    @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+    @pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
     def test_gzip(self, handler):
         with handler() as rh:
             res = validate_and_send(
@@ -595,7 +609,7 @@ class TestHTTPRequestHandler(TestRequestHandlerBase):
             assert res.headers.get('Content-Encoding') == 'gzip'
             assert res.read() == b'<html><video src="/vid.mp4" /></html>'
 
-    @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+    @pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
     def test_multiple_encodings(self, handler):
         with handler() as rh:
             for pair in ('gzip,deflate', 'deflate, gzip', 'gzip, gzip', 'deflate, deflate'):
@@ -606,17 +620,18 @@ class TestHTTPRequestHandler(TestRequestHandlerBase):
                 assert res.headers.get('Content-Encoding') == pair
                 assert res.read() == b'<html><video src="/vid.mp4" /></html>'
 
+    # Not supported by curl_cffi
     @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
     def test_unsupported_encoding(self, handler):
         with handler() as rh:
             res = validate_and_send(
                 rh, Request(
                     f'http://127.0.0.1:{self.http_port}/content-encoding',
-                    headers={'ytdl-encoding': 'unsupported'}))
+                    headers={'ytdl-encoding': 'unsupported', 'Accept-Encoding': '*'}))
             assert res.headers.get('Content-Encoding') == 'unsupported'
             assert res.read() == b'raw'
 
-    @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+    @pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
     def test_read(self, handler):
         with handler() as rh:
             res = validate_and_send(
@@ -624,9 +639,12 @@ class TestHTTPRequestHandler(TestRequestHandlerBase):
             assert res.readable()
             assert res.read(1) == b'H'
             assert res.read(3) == b'ost'
+            assert res.read().decode().endswith('\n\n')
+            assert res.read() == b''
 
 
 class TestHTTPProxy(TestRequestHandlerBase):
+    # Note: this only tests http urls over non-CONNECT proxy
     @classmethod
     def setup_class(cls):
         super().setup_class()
@@ -646,7 +664,7 @@ class TestHTTPProxy(TestRequestHandlerBase):
         cls.geo_proxy_thread.daemon = True
         cls.geo_proxy_thread.start()
 
-    @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+    @pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
     def test_http_proxy(self, handler):
         http_proxy = f'http://127.0.0.1:{self.proxy_port}'
         geo_proxy = f'http://127.0.0.1:{self.geo_port}'
@@ -672,7 +690,7 @@ class TestHTTPProxy(TestRequestHandlerBase):
             assert res != f'normal: {real_url}'
             assert 'Accept' in res
 
-    @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+    @pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
     def test_noproxy(self, handler):
         with handler(proxies={'proxy': f'http://127.0.0.1:{self.proxy_port}'}) as rh:
             # NO_PROXY
@@ -682,7 +700,7 @@ class TestHTTPProxy(TestRequestHandlerBase):
                     'utf-8')
                 assert 'Accept' in nop_response
 
-    @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+    @pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
     def test_allproxy(self, handler):
         url = 'http://foo.com/bar'
         with handler() as rh:
@@ -690,7 +708,7 @@ class TestHTTPProxy(TestRequestHandlerBase):
                 'utf-8')
             assert response == f'normal: {url}'
 
-    @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+    @pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
     def test_http_proxy_with_idn(self, handler):
         with handler(proxies={
             'http': f'http://127.0.0.1:{self.proxy_port}',
@@ -702,7 +720,6 @@ class TestHTTPProxy(TestRequestHandlerBase):
 
 
 class TestClientCertificate:
-
     @classmethod
     def setup_class(cls):
         certfn = os.path.join(TEST_DIR, 'testcert.pem')
@@ -728,27 +745,27 @@ class TestClientCertificate:
         ) as rh:
             validate_and_send(rh, Request(f'https://127.0.0.1:{self.port}/video.html')).read().decode()
 
-    @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+    @pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
     def test_certificate_combined_nopass(self, handler):
         self._run_test(handler, client_cert={
             'client_certificate': os.path.join(self.certdir, 'clientwithkey.crt'),
         })
 
-    @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+    @pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
     def test_certificate_nocombined_nopass(self, handler):
         self._run_test(handler, client_cert={
             'client_certificate': os.path.join(self.certdir, 'client.crt'),
             'client_certificate_key': os.path.join(self.certdir, 'client.key'),
         })
 
-    @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+    @pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
     def test_certificate_combined_pass(self, handler):
         self._run_test(handler, client_cert={
             'client_certificate': os.path.join(self.certdir, 'clientwithencryptedkey.crt'),
             'client_certificate_password': 'foobar',
         })
 
-    @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+    @pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
     def test_certificate_nocombined_pass(self, handler):
         self._run_test(handler, client_cert={
             'client_certificate': os.path.join(self.certdir, 'client.crt'),
@@ -757,6 +774,18 @@ class TestClientCertificate:
         })
 
 
+@pytest.mark.parametrize('handler', ['CurlCFFI'], indirect=True)
+class TestHTTPImpersonateRequestHandler(TestRequestHandlerBase):
+    def test_supported_impersonate_targets(self, handler):
+        with handler(headers=std_headers) as rh:
+            # note: this assumes the impersonate request handler supports the impersonate extension
+            for target in rh.supported_targets:
+                res = validate_and_send(rh, Request(
+                    f'http://127.0.0.1:{self.http_port}/headers', extensions={'impersonate': target}))
+                assert res.status == 200
+                assert std_headers['user-agent'].lower() not in res.read().decode().lower()
+
+
 class TestRequestHandlerMisc:
     """Misc generic tests for request handlers, not related to request or validation testing"""
     @pytest.mark.parametrize('handler,logger_name', [
@@ -935,6 +964,172 @@ class TestRequestsRequestHandler(TestRequestHandlerBase):
         assert called
 
 
+@pytest.mark.parametrize('handler', ['CurlCFFI'], indirect=True)
+class TestCurlCFFIRequestHandler(TestRequestHandlerBase):
+
+    @pytest.mark.parametrize('params,extensions', [
+        ({}, {'impersonate': ImpersonateTarget('chrome')}),
+        ({'impersonate': ImpersonateTarget('chrome', '110')}, {}),
+        ({'impersonate': ImpersonateTarget('chrome', '99')}, {'impersonate': ImpersonateTarget('chrome', '110')}),
+    ])
+    def test_impersonate(self, handler, params, extensions):
+        with handler(headers=std_headers, **params) as rh:
+            res = validate_and_send(
+                rh, Request(f'http://127.0.0.1:{self.http_port}/headers', extensions=extensions)).read().decode()
+            assert 'sec-ch-ua: "Chromium";v="110"' in res
+            # Check that user agent is added over ours
+            assert 'User-Agent: Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.0.0 Safari/537.36' in res
+
+    def test_headers(self, handler):
+        with handler(headers=std_headers) as rh:
+            # Ensure curl-impersonate overrides our standard headers (usually added
+            res = validate_and_send(
+                rh, Request(f'http://127.0.0.1:{self.http_port}/headers', extensions={
+                    'impersonate': ImpersonateTarget('safari')}, headers={'x-custom': 'test', 'sec-fetch-mode': 'custom'})).read().decode().lower()
+
+            assert std_headers['user-agent'].lower() not in res
+            assert std_headers['accept-language'].lower() not in res
+            assert std_headers['sec-fetch-mode'].lower() not in res
+            # other than UA, custom headers that differ from std_headers should be kept
+            assert 'sec-fetch-mode: custom' in res
+            assert 'x-custom: test' in res
+            # but when not impersonating don't remove std_headers
+            res = validate_and_send(
+                rh, Request(f'http://127.0.0.1:{self.http_port}/headers', headers={'x-custom': 'test'})).read().decode().lower()
+            # std_headers should be present
+            for k, v in std_headers.items():
+                assert f'{k}: {v}'.lower() in res
+
+    @pytest.mark.parametrize('raised,expected,match', [
+        (lambda: curl_cffi.requests.errors.RequestsError(
+            '', code=curl_cffi.const.CurlECode.PARTIAL_FILE), IncompleteRead, None),
+        (lambda: curl_cffi.requests.errors.RequestsError(
+            '', code=curl_cffi.const.CurlECode.OPERATION_TIMEDOUT), TransportError, None),
+        (lambda: curl_cffi.requests.errors.RequestsError(
+            '', code=curl_cffi.const.CurlECode.RECV_ERROR), TransportError, None),
+    ])
+    def test_response_error_mapping(self, handler, monkeypatch, raised, expected, match):
+        import curl_cffi.requests
+
+        from yt_dlp.networking._curlcffi import CurlCFFIResponseAdapter
+        curl_res = curl_cffi.requests.Response()
+        res = CurlCFFIResponseAdapter(curl_res)
+
+        def mock_read(*args, **kwargs):
+            try:
+                raise raised()
+            except Exception as e:
+                e.response = curl_res
+                raise
+        monkeypatch.setattr(res.fp, 'read', mock_read)
+
+        with pytest.raises(expected, match=match) as exc_info:
+            res.read()
+
+        assert exc_info.type is expected
+
+    @pytest.mark.parametrize('raised,expected,match', [
+        (lambda: curl_cffi.requests.errors.RequestsError(
+            '', code=curl_cffi.const.CurlECode.OPERATION_TIMEDOUT), TransportError, None),
+        (lambda: curl_cffi.requests.errors.RequestsError(
+            '', code=curl_cffi.const.CurlECode.PEER_FAILED_VERIFICATION), CertificateVerifyError, None),
+        (lambda: curl_cffi.requests.errors.RequestsError(
+            '', code=curl_cffi.const.CurlECode.SSL_CONNECT_ERROR), SSLError, None),
+        (lambda: curl_cffi.requests.errors.RequestsError(
+            '', code=curl_cffi.const.CurlECode.TOO_MANY_REDIRECTS), HTTPError, None),
+        (lambda: curl_cffi.requests.errors.RequestsError(
+            '', code=curl_cffi.const.CurlECode.PROXY), ProxyError, None),
+    ])
+    def test_request_error_mapping(self, handler, monkeypatch, raised, expected, match):
+        import curl_cffi.requests
+        curl_res = curl_cffi.requests.Response()
+        curl_res.status_code = 301
+
+        with handler() as rh:
+            original_get_instance = rh._get_instance
+
+            def mock_get_instance(*args, **kwargs):
+                instance = original_get_instance(*args, **kwargs)
+
+                def request(*_, **__):
+                    try:
+                        raise raised()
+                    except Exception as e:
+                        e.response = curl_res
+                        raise
+                monkeypatch.setattr(instance, 'request', request)
+                return instance
+
+            monkeypatch.setattr(rh, '_get_instance', mock_get_instance)
+
+            with pytest.raises(expected) as exc_info:
+                rh.send(Request('http://fake'))
+
+            assert exc_info.type is expected
+
+    def test_response_reader(self, handler):
+        class FakeResponse:
+            def __init__(self, raise_error=False):
+                self.raise_error = raise_error
+                self.closed = False
+
+            def iter_content(self):
+                yield b'foo'
+                yield b'bar'
+                yield b'z'
+                if self.raise_error:
+                    raise Exception('test')
+
+            def close(self):
+                self.closed = True
+
+        from yt_dlp.networking._curlcffi import CurlCFFIResponseReader
+
+        res = CurlCFFIResponseReader(FakeResponse())
+        assert res.readable
+        assert res.bytes_read == 0
+        assert res.read(1) == b'f'
+        assert res.bytes_read == 3
+        assert res._buffer == b'oo'
+
+        assert res.read(2) == b'oo'
+        assert res.bytes_read == 3
+        assert res._buffer == b''
+
+        assert res.read(2) == b'ba'
+        assert res.bytes_read == 6
+        assert res._buffer == b'r'
+
+        assert res.read(3) == b'rz'
+        assert res.bytes_read == 7
+        assert res._buffer == b''
+        assert res.closed
+        assert res._response.closed
+
+        # should handle no size param
+        res2 = CurlCFFIResponseReader(FakeResponse())
+        assert res2.read() == b'foobarz'
+        assert res2.bytes_read == 7
+        assert res2._buffer == b''
+        assert res2.closed
+
+        # should close on an exception
+        res3 = CurlCFFIResponseReader(FakeResponse(raise_error=True))
+        with pytest.raises(Exception, match='test'):
+            res3.read()
+        assert res3._buffer == b''
+        assert res3.bytes_read == 7
+        assert res3.closed
+
+        # buffer should be cleared on close
+        res4 = CurlCFFIResponseReader(FakeResponse())
+        res4.read(2)
+        assert res4._buffer == b'o'
+        res4.close()
+        assert res4.closed
+        assert res4._buffer == b''
+
+
 def run_validation(handler, error, req, **handler_kwargs):
     with handler(**handler_kwargs) as rh:
         if error:
@@ -979,6 +1174,10 @@ class TestRequestHandlerValidation:
             ('ws', False, {}),
             ('wss', False, {}),
         ]),
+        ('CurlCFFI', [
+            ('http', False, {}),
+            ('https', False, {}),
+        ]),
         (NoCheckRH, [('http', False, {})]),
         (ValidationRH, [('http', UnsupportedRequest, {})])
     ]
@@ -1002,6 +1201,14 @@ class TestRequestHandlerValidation:
             ('socks5', False),
             ('socks5h', False),
         ]),
+        ('CurlCFFI', 'http', [
+            ('http', False),
+            ('https', False),
+            ('socks4', False),
+            ('socks4a', False),
+            ('socks5', False),
+            ('socks5h', False),
+        ]),
         (NoCheckRH, 'http', [('http', False)]),
         (HTTPSupportedRH, 'http', [('http', UnsupportedRequest)]),
         ('Websockets', 'ws', [('http', UnsupportedRequest)]),
@@ -1019,6 +1226,10 @@ class TestRequestHandlerValidation:
             ('all', False),
             ('unrelated', False),
         ]),
+        ('CurlCFFI', [
+            ('all', False),
+            ('unrelated', False),
+        ]),
         (NoCheckRH, [('all', False)]),
         (HTTPSupportedRH, [('all', UnsupportedRequest)]),
         (HTTPSupportedRH, [('no', UnsupportedRequest)]),
@@ -1040,6 +1251,19 @@ class TestRequestHandlerValidation:
             ({'timeout': 'notatimeout'}, AssertionError),
             ({'unsupported': 'value'}, UnsupportedRequest),
         ]),
+        ('CurlCFFI', 'http', [
+            ({'cookiejar': 'notacookiejar'}, AssertionError),
+            ({'cookiejar': YoutubeDLCookieJar()}, False),
+            ({'timeout': 1}, False),
+            ({'timeout': 'notatimeout'}, AssertionError),
+            ({'unsupported': 'value'}, UnsupportedRequest),
+            ({'impersonate': ImpersonateTarget('badtarget', None, None, None)}, UnsupportedRequest),
+            ({'impersonate': 123}, AssertionError),
+            ({'impersonate': ImpersonateTarget('chrome', None, None, None)}, False),
+            ({'impersonate': ImpersonateTarget(None, None, None, None)}, False),
+            ({'impersonate': ImpersonateTarget()}, False),
+            ({'impersonate': 'chrome'}, AssertionError)
+        ]),
         (NoCheckRH, 'http', [
             ({'cookiejar': 'notacookiejar'}, False),
             ({'somerandom': 'test'}, False),  # but any extension is allowed through
@@ -1059,7 +1283,7 @@ class TestRequestHandlerValidation:
     def test_url_scheme(self, handler, scheme, fail, handler_kwargs):
         run_validation(handler, fail, Request(f'{scheme}://'), **(handler_kwargs or {}))
 
-    @pytest.mark.parametrize('handler,fail', [('Urllib', False), ('Requests', False)], indirect=['handler'])
+    @pytest.mark.parametrize('handler,fail', [('Urllib', False), ('Requests', False), ('CurlCFFI', False)], indirect=['handler'])
     def test_no_proxy(self, handler, fail):
         run_validation(handler, fail, Request('http://', proxies={'no': '127.0.0.1,github.com'}))
         run_validation(handler, fail, Request('http://'), proxies={'no': '127.0.0.1,github.com'})
@@ -1082,13 +1306,13 @@ class TestRequestHandlerValidation:
         run_validation(handler, fail, Request(f'{req_scheme}://', proxies={req_scheme: f'{scheme}://example.com'}))
         run_validation(handler, fail, Request(f'{req_scheme}://'), proxies={req_scheme: f'{scheme}://example.com'})
 
-    @pytest.mark.parametrize('handler', ['Urllib', HTTPSupportedRH, 'Requests'], indirect=True)
+    @pytest.mark.parametrize('handler', ['Urllib', HTTPSupportedRH, 'Requests', 'CurlCFFI'], indirect=True)
     def test_empty_proxy(self, handler):
         run_validation(handler, False, Request('http://', proxies={'http': None}))
         run_validation(handler, False, Request('http://'), proxies={'http': None})
 
     @pytest.mark.parametrize('proxy_url', ['//example.com', 'example.com', '127.0.0.1', '/a/b/c'])
-    @pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
+    @pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
     def test_invalid_proxy_url(self, handler, proxy_url):
         run_validation(handler, UnsupportedRequest, Request('http://', proxies={'http': proxy_url}))
 

+ 14 - 19
test/test_socks.py

@@ -286,8 +286,14 @@ def ctx(request):
     return CTX_MAP[request.param]()
 
 
+@pytest.mark.parametrize(
+    'handler,ctx', [
+        ('Urllib', 'http'),
+        ('Requests', 'http'),
+        ('Websockets', 'ws'),
+        ('CurlCFFI', 'http')
+    ], indirect=True)
 class TestSocks4Proxy:
-    @pytest.mark.parametrize('handler,ctx', [('Urllib', 'http'), ('Requests', 'http'), ('Websockets', 'ws')], indirect=True)
     def test_socks4_no_auth(self, handler, ctx):
         with handler() as rh:
             with ctx.socks_server(Socks4ProxyHandler) as server_address:
@@ -295,7 +301,6 @@ class TestSocks4Proxy:
                     rh, proxies={'all': f'socks4://{server_address}'})
                 assert response['version'] == 4
 
-    @pytest.mark.parametrize('handler,ctx', [('Urllib', 'http'), ('Requests', 'http'), ('Websockets', 'ws')], indirect=True)
     def test_socks4_auth(self, handler, ctx):
         with handler() as rh:
             with ctx.socks_server(Socks4ProxyHandler, user_id='user') as server_address:
@@ -305,7 +310,6 @@ class TestSocks4Proxy:
                     rh, proxies={'all': f'socks4://user:@{server_address}'})
                 assert response['version'] == 4
 
-    @pytest.mark.parametrize('handler,ctx', [('Urllib', 'http'), ('Requests', 'http'), ('Websockets', 'ws')], indirect=True)
     def test_socks4a_ipv4_target(self, handler, ctx):
         with ctx.socks_server(Socks4ProxyHandler) as server_address:
             with handler(proxies={'all': f'socks4a://{server_address}'}) as rh:
@@ -313,7 +317,6 @@ class TestSocks4Proxy:
                 assert response['version'] == 4
                 assert (response['ipv4_address'] == '127.0.0.1') != (response['domain_address'] == '127.0.0.1')
 
-    @pytest.mark.parametrize('handler,ctx', [('Urllib', 'http'), ('Requests', 'http'), ('Websockets', 'ws')], indirect=True)
     def test_socks4a_domain_target(self, handler, ctx):
         with ctx.socks_server(Socks4ProxyHandler) as server_address:
             with handler(proxies={'all': f'socks4a://{server_address}'}) as rh:
@@ -322,7 +325,6 @@ class TestSocks4Proxy:
                 assert response['ipv4_address'] is None
                 assert response['domain_address'] == 'localhost'
 
-    @pytest.mark.parametrize('handler,ctx', [('Urllib', 'http'), ('Requests', 'http'), ('Websockets', 'ws')], indirect=True)
     def test_ipv4_client_source_address(self, handler, ctx):
         with ctx.socks_server(Socks4ProxyHandler) as server_address:
             source_address = f'127.0.0.{random.randint(5, 255)}'
@@ -333,7 +335,6 @@ class TestSocks4Proxy:
                 assert response['client_address'][0] == source_address
                 assert response['version'] == 4
 
-    @pytest.mark.parametrize('handler,ctx', [('Urllib', 'http'), ('Requests', 'http'), ('Websockets', 'ws')], indirect=True)
     @pytest.mark.parametrize('reply_code', [
         Socks4CD.REQUEST_REJECTED_OR_FAILED,
         Socks4CD.REQUEST_REJECTED_CANNOT_CONNECT_TO_IDENTD,
@@ -345,7 +346,6 @@ class TestSocks4Proxy:
                 with pytest.raises(ProxyError):
                     ctx.socks_info_request(rh)
 
-    @pytest.mark.parametrize('handler,ctx', [('Urllib', 'http'), ('Requests', 'http'), ('Websockets', 'ws')], indirect=True)
     def test_ipv6_socks4_proxy(self, handler, ctx):
         with ctx.socks_server(Socks4ProxyHandler, bind_ip='::1') as server_address:
             with handler(proxies={'all': f'socks4://{server_address}'}) as rh:
@@ -354,7 +354,6 @@ class TestSocks4Proxy:
                 assert response['ipv4_address'] == '127.0.0.1'
                 assert response['version'] == 4
 
-    @pytest.mark.parametrize('handler,ctx', [('Urllib', 'http'), ('Requests', 'http'), ('Websockets', 'ws')], indirect=True)
     def test_timeout(self, handler, ctx):
         with ctx.socks_server(Socks4ProxyHandler, sleep=2) as server_address:
             with handler(proxies={'all': f'socks4://{server_address}'}, timeout=0.5) as rh:
@@ -362,9 +361,15 @@ class TestSocks4Proxy:
                     ctx.socks_info_request(rh)
 
 
+@pytest.mark.parametrize(
+    'handler,ctx', [
+        ('Urllib', 'http'),
+        ('Requests', 'http'),
+        ('Websockets', 'ws'),
+        ('CurlCFFI', 'http')
+    ], indirect=True)
 class TestSocks5Proxy:
 
-    @pytest.mark.parametrize('handler,ctx', [('Urllib', 'http'), ('Requests', 'http'), ('Websockets', 'ws')], indirect=True)
     def test_socks5_no_auth(self, handler, ctx):
         with ctx.socks_server(Socks5ProxyHandler) as server_address:
             with handler(proxies={'all': f'socks5://{server_address}'}) as rh:
@@ -372,7 +377,6 @@ class TestSocks5Proxy:
                 assert response['auth_methods'] == [0x0]
                 assert response['version'] == 5
 
-    @pytest.mark.parametrize('handler,ctx', [('Urllib', 'http'), ('Requests', 'http'), ('Websockets', 'ws')], indirect=True)
     def test_socks5_user_pass(self, handler, ctx):
         with ctx.socks_server(Socks5ProxyHandler, auth=('test', 'testpass')) as server_address:
             with handler() as rh:
@@ -385,7 +389,6 @@ class TestSocks5Proxy:
                 assert response['auth_methods'] == [Socks5Auth.AUTH_NONE, Socks5Auth.AUTH_USER_PASS]
                 assert response['version'] == 5
 
-    @pytest.mark.parametrize('handler,ctx', [('Urllib', 'http'), ('Requests', 'http'), ('Websockets', 'ws')], indirect=True)
     def test_socks5_ipv4_target(self, handler, ctx):
         with ctx.socks_server(Socks5ProxyHandler) as server_address:
             with handler(proxies={'all': f'socks5://{server_address}'}) as rh:
@@ -393,7 +396,6 @@ class TestSocks5Proxy:
                 assert response['ipv4_address'] == '127.0.0.1'
                 assert response['version'] == 5
 
-    @pytest.mark.parametrize('handler,ctx', [('Urllib', 'http'), ('Requests', 'http'), ('Websockets', 'ws')], indirect=True)
     def test_socks5_domain_target(self, handler, ctx):
         with ctx.socks_server(Socks5ProxyHandler) as server_address:
             with handler(proxies={'all': f'socks5://{server_address}'}) as rh:
@@ -401,7 +403,6 @@ class TestSocks5Proxy:
                 assert (response['ipv4_address'] == '127.0.0.1') != (response['ipv6_address'] == '::1')
                 assert response['version'] == 5
 
-    @pytest.mark.parametrize('handler,ctx', [('Urllib', 'http'), ('Requests', 'http'), ('Websockets', 'ws')], indirect=True)
     def test_socks5h_domain_target(self, handler, ctx):
         with ctx.socks_server(Socks5ProxyHandler) as server_address:
             with handler(proxies={'all': f'socks5h://{server_address}'}) as rh:
@@ -410,7 +411,6 @@ class TestSocks5Proxy:
                 assert response['domain_address'] == 'localhost'
                 assert response['version'] == 5
 
-    @pytest.mark.parametrize('handler,ctx', [('Urllib', 'http'), ('Requests', 'http'), ('Websockets', 'ws')], indirect=True)
     def test_socks5h_ip_target(self, handler, ctx):
         with ctx.socks_server(Socks5ProxyHandler) as server_address:
             with handler(proxies={'all': f'socks5h://{server_address}'}) as rh:
@@ -419,7 +419,6 @@ class TestSocks5Proxy:
                 assert response['domain_address'] is None
                 assert response['version'] == 5
 
-    @pytest.mark.parametrize('handler,ctx', [('Urllib', 'http'), ('Requests', 'http'), ('Websockets', 'ws')], indirect=True)
     def test_socks5_ipv6_destination(self, handler, ctx):
         with ctx.socks_server(Socks5ProxyHandler) as server_address:
             with handler(proxies={'all': f'socks5://{server_address}'}) as rh:
@@ -427,7 +426,6 @@ class TestSocks5Proxy:
                 assert response['ipv6_address'] == '::1'
                 assert response['version'] == 5
 
-    @pytest.mark.parametrize('handler,ctx', [('Urllib', 'http'), ('Requests', 'http'), ('Websockets', 'ws')], indirect=True)
     def test_ipv6_socks5_proxy(self, handler, ctx):
         with ctx.socks_server(Socks5ProxyHandler, bind_ip='::1') as server_address:
             with handler(proxies={'all': f'socks5://{server_address}'}) as rh:
@@ -438,7 +436,6 @@ class TestSocks5Proxy:
 
     # XXX: is there any feasible way of testing IPv6 source addresses?
     # Same would go for non-proxy source_address test...
-    @pytest.mark.parametrize('handler,ctx', [('Urllib', 'http'), ('Requests', 'http'), ('Websockets', 'ws')], indirect=True)
     def test_ipv4_client_source_address(self, handler, ctx):
         with ctx.socks_server(Socks5ProxyHandler) as server_address:
             source_address = f'127.0.0.{random.randint(5, 255)}'
@@ -448,7 +445,6 @@ class TestSocks5Proxy:
                 assert response['client_address'][0] == source_address
                 assert response['version'] == 5
 
-    @pytest.mark.parametrize('handler,ctx', [('Urllib', 'http'), ('Requests', 'http'), ('Websockets', 'ws')], indirect=True)
     @pytest.mark.parametrize('reply_code', [
         Socks5Reply.GENERAL_FAILURE,
         Socks5Reply.CONNECTION_NOT_ALLOWED,
@@ -465,7 +461,6 @@ class TestSocks5Proxy:
                 with pytest.raises(ProxyError):
                     ctx.socks_info_request(rh)
 
-    @pytest.mark.parametrize('handler,ctx', [('Urllib', 'http'), ('Websockets', 'ws')], indirect=True)
     def test_timeout(self, handler, ctx):
         with ctx.socks_server(Socks5ProxyHandler, sleep=2) as server_address:
             with handler(proxies={'all': f'socks5://{server_address}'}, timeout=1) as rh:

+ 4 - 2
yt_dlp/__pyinstaller/hook-yt_dlp.py

@@ -1,6 +1,6 @@
 import sys
 
-from PyInstaller.utils.hooks import collect_submodules
+from PyInstaller.utils.hooks import collect_submodules, collect_data_files
 
 
 def pycryptodome_module():
@@ -25,10 +25,12 @@ def get_hidden_imports():
     for module in ('websockets', 'requests', 'urllib3'):
         yield from collect_submodules(module)
     # These are auto-detected, but explicitly add them just in case
-    yield from ('mutagen', 'brotli', 'certifi', 'secretstorage')
+    yield from ('mutagen', 'brotli', 'certifi', 'secretstorage', 'curl_cffi')
 
 
 hiddenimports = list(get_hidden_imports())
 print(f'Adding imports: {hiddenimports}')
 
 excludedimports = ['youtube_dl', 'youtube_dlc', 'test', 'ytdlp_plugins', 'devscripts', 'bundle']
+
+datas = collect_data_files('curl_cffi', includes=['cacert.pem'])

+ 4 - 0
yt_dlp/dependencies/__init__.py

@@ -74,6 +74,10 @@ else:
     if hasattr(xattr, 'set'):  # pyxattr
         xattr._yt_dlp__identifier = 'pyxattr'
 
+try:
+    import curl_cffi
+except ImportError:
+    curl_cffi = None
 
 from . import Cryptodome
 

+ 7 - 0
yt_dlp/networking/__init__.py

@@ -28,3 +28,10 @@ except ImportError:
     pass
 except Exception as e:
     warnings.warn(f'Failed to import "websockets" request handler: {e}' + bug_reports_message())
+
+try:
+    from . import _curlcffi  # noqa: F401
+except ImportError:
+    pass
+except Exception as e:
+    warnings.warn(f'Failed to import "curl_cffi" request handler: {e}' + bug_reports_message())

+ 221 - 0
yt_dlp/networking/_curlcffi.py

@@ -0,0 +1,221 @@
+from __future__ import annotations
+
+import io
+import math
+import urllib.parse
+
+from ._helper import InstanceStoreMixin, select_proxy
+from .common import (
+    Features,
+    Request,
+    Response,
+    register_preference,
+    register_rh,
+)
+from .exceptions import (
+    CertificateVerifyError,
+    HTTPError,
+    IncompleteRead,
+    ProxyError,
+    SSLError,
+    TransportError,
+)
+from .impersonate import ImpersonateRequestHandler, ImpersonateTarget
+from ..dependencies import curl_cffi
+from ..utils import int_or_none
+
+if curl_cffi is None:
+    raise ImportError('curl_cffi is not installed')
+
+curl_cffi_version = tuple(int_or_none(x, default=0) for x in curl_cffi.__version__.split('.'))
+
+if curl_cffi_version != (0, 5, 10):
+    curl_cffi._yt_dlp__version = f'{curl_cffi.__version__} (unsupported)'
+    raise ImportError('Only curl_cffi 0.5.10 is supported')
+
+import curl_cffi.requests
+from curl_cffi.const import CurlECode, CurlOpt
+
+
+class CurlCFFIResponseReader(io.IOBase):
+    def __init__(self, response: curl_cffi.requests.Response):
+        self._response = response
+        self._iterator = response.iter_content()
+        self._buffer = b''
+        self.bytes_read = 0
+
+    def readable(self):
+        return True
+
+    def read(self, size=None):
+        exception_raised = True
+        try:
+            while self._iterator and (size is None or len(self._buffer) < size):
+                chunk = next(self._iterator, None)
+                if chunk is None:
+                    self._iterator = None
+                    break
+                self._buffer += chunk
+                self.bytes_read += len(chunk)
+
+            if size is None:
+                size = len(self._buffer)
+            data = self._buffer[:size]
+            self._buffer = self._buffer[size:]
+
+            # "free" the curl instance if the response is fully read.
+            # curl_cffi doesn't do this automatically and only allows one open response per thread
+            if not self._iterator and not self._buffer:
+                self.close()
+            exception_raised = False
+            return data
+        finally:
+            if exception_raised:
+                self.close()
+
+    def close(self):
+        if not self.closed:
+            self._response.close()
+            self._buffer = b''
+        super().close()
+
+
+class CurlCFFIResponseAdapter(Response):
+    fp: CurlCFFIResponseReader
+
+    def __init__(self, response: curl_cffi.requests.Response):
+        super().__init__(
+            fp=CurlCFFIResponseReader(response),
+            headers=response.headers,
+            url=response.url,
+            status=response.status_code)
+
+    def read(self, amt=None):
+        try:
+            return self.fp.read(amt)
+        except curl_cffi.requests.errors.RequestsError as e:
+            if e.code == CurlECode.PARTIAL_FILE:
+                content_length = int_or_none(e.response.headers.get('Content-Length'))
+                raise IncompleteRead(
+                    partial=self.fp.bytes_read,
+                    expected=content_length - self.fp.bytes_read if content_length is not None else None,
+                    cause=e) from e
+            raise TransportError(cause=e) from e
+
+
+@register_rh
+class CurlCFFIRH(ImpersonateRequestHandler, InstanceStoreMixin):
+    RH_NAME = 'curl_cffi'
+    _SUPPORTED_URL_SCHEMES = ('http', 'https')
+    _SUPPORTED_FEATURES = (Features.NO_PROXY, Features.ALL_PROXY)
+    _SUPPORTED_PROXY_SCHEMES = ('http', 'https', 'socks4', 'socks4a', 'socks5', 'socks5h')
+    _SUPPORTED_IMPERSONATE_TARGET_MAP = {
+        ImpersonateTarget('chrome', '110', 'windows', '10'): curl_cffi.requests.BrowserType.chrome110,
+        ImpersonateTarget('chrome', '107', 'windows', '10'): curl_cffi.requests.BrowserType.chrome107,
+        ImpersonateTarget('chrome', '104', 'windows', '10'): curl_cffi.requests.BrowserType.chrome104,
+        ImpersonateTarget('chrome', '101', 'windows', '10'): curl_cffi.requests.BrowserType.chrome101,
+        ImpersonateTarget('chrome', '100', 'windows', '10'): curl_cffi.requests.BrowserType.chrome100,
+        ImpersonateTarget('chrome', '99', 'windows', '10'): curl_cffi.requests.BrowserType.chrome99,
+        ImpersonateTarget('edge', '101', 'windows', '10'): curl_cffi.requests.BrowserType.edge101,
+        ImpersonateTarget('edge', '99', 'windows', '10'): curl_cffi.requests.BrowserType.edge99,
+        ImpersonateTarget('safari', '15.5', 'macos', '12'): curl_cffi.requests.BrowserType.safari15_5,
+        ImpersonateTarget('safari', '15.3', 'macos', '11'): curl_cffi.requests.BrowserType.safari15_3,
+        ImpersonateTarget('chrome', '99', 'android', '12'): curl_cffi.requests.BrowserType.chrome99_android,
+    }
+
+    def _create_instance(self, cookiejar=None):
+        return curl_cffi.requests.Session(cookies=cookiejar)
+
+    def _check_extensions(self, extensions):
+        super()._check_extensions(extensions)
+        extensions.pop('impersonate', None)
+        extensions.pop('cookiejar', None)
+        extensions.pop('timeout', None)
+
+    def _send(self, request: Request):
+        max_redirects_exceeded = False
+        session: curl_cffi.requests.Session = self._get_instance(
+            cookiejar=self._get_cookiejar(request) if 'cookie' not in request.headers else None)
+
+        if self.verbose:
+            session.curl.setopt(CurlOpt.VERBOSE, 1)
+
+        proxies = self._get_proxies(request)
+        if 'no' in proxies:
+            session.curl.setopt(CurlOpt.NOPROXY, proxies['no'])
+            proxies.pop('no', None)
+
+        # curl doesn't support per protocol proxies, so we select the one that matches the request protocol
+        proxy = select_proxy(request.url, proxies=proxies)
+        if proxy:
+            session.curl.setopt(CurlOpt.PROXY, proxy)
+            scheme = urllib.parse.urlparse(request.url).scheme.lower()
+            if scheme != 'http':
+                # Enable HTTP CONNECT for HTTPS urls.
+                # Don't use CONNECT for http for compatibility with urllib behaviour.
+                # See: https://curl.se/libcurl/c/CURLOPT_HTTPPROXYTUNNEL.html
+                session.curl.setopt(CurlOpt.HTTPPROXYTUNNEL, 1)
+
+        headers = self._get_impersonate_headers(request)
+
+        if self._client_cert:
+            session.curl.setopt(CurlOpt.SSLCERT, self._client_cert['client_certificate'])
+            client_certificate_key = self._client_cert.get('client_certificate_key')
+            client_certificate_password = self._client_cert.get('client_certificate_password')
+            if client_certificate_key:
+                session.curl.setopt(CurlOpt.SSLKEY, client_certificate_key)
+            if client_certificate_password:
+                session.curl.setopt(CurlOpt.KEYPASSWD, client_certificate_password)
+
+        timeout = self._calculate_timeout(request)
+
+        # set CURLOPT_LOW_SPEED_LIMIT and CURLOPT_LOW_SPEED_TIME to act as a read timeout. [1]
+        # curl_cffi does not currently do this. [2]
+        # Note: CURLOPT_LOW_SPEED_TIME is in seconds, so we need to round up to the nearest second. [3]
+        # [1] https://unix.stackexchange.com/a/305311
+        # [2] https://github.com/yifeikong/curl_cffi/issues/156
+        # [3] https://curl.se/libcurl/c/CURLOPT_LOW_SPEED_TIME.html
+        session.curl.setopt(CurlOpt.LOW_SPEED_LIMIT, 1)  # 1 byte per second
+        session.curl.setopt(CurlOpt.LOW_SPEED_TIME, math.ceil(timeout))
+
+        try:
+            curl_response = session.request(
+                method=request.method,
+                url=request.url,
+                headers=headers,
+                data=request.data,
+                verify=self.verify,
+                max_redirects=5,
+                timeout=timeout,
+                impersonate=self._SUPPORTED_IMPERSONATE_TARGET_MAP.get(
+                    self._get_request_target(request)),
+                interface=self.source_address,
+                stream=True
+            )
+        except curl_cffi.requests.errors.RequestsError as e:
+            if e.code == CurlECode.PEER_FAILED_VERIFICATION:
+                raise CertificateVerifyError(cause=e) from e
+
+            elif e.code == CurlECode.SSL_CONNECT_ERROR:
+                raise SSLError(cause=e) from e
+
+            elif e.code == CurlECode.TOO_MANY_REDIRECTS:
+                max_redirects_exceeded = True
+                curl_response = e.response
+
+            elif e.code == CurlECode.PROXY:
+                raise ProxyError(cause=e) from e
+            else:
+                raise TransportError(cause=e) from e
+
+        response = CurlCFFIResponseAdapter(curl_response)
+
+        if not 200 <= response.status < 300:
+            raise HTTPError(response, redirect_loop=max_redirects_exceeded)
+
+        return response
+
+
+@register_preference(CurlCFFIRH)
+def curl_cffi_preference(rh, request):
+    return -100

Some files were not shown because too many files changed in this diff