mirror of
https://github.com/yt-dlp/yt-dlp.git
synced 2024-11-16 21:33:19 +00:00
Merge branch 'yt-dlp:master' into rls/arm-ubuntu-bump
This commit is contained in:
commit
bd43228f06
22
.github/workflows/build.yml
vendored
22
.github/workflows/build.yml
vendored
|
@ -249,9 +249,25 @@ jobs:
|
|||
run: |
|
||||
brew install coreutils
|
||||
python3 devscripts/install_deps.py --user -o --include build
|
||||
python3 devscripts/install_deps.py --print --include pyinstaller > requirements.txt
|
||||
python3 devscripts/install_deps.py --print --include pyinstaller_macos > requirements.txt
|
||||
# We need to ignore wheels otherwise we break universal2 builds
|
||||
python3 -m pip install -U --user --no-binary :all: -r requirements.txt
|
||||
# We need to fuse our own universal2 wheels for curl_cffi
|
||||
python3 -m pip install -U --user delocate
|
||||
mkdir curl_cffi_whls curl_cffi_universal2
|
||||
python3 devscripts/install_deps.py --print -o --include curl_cffi > requirements.txt
|
||||
for platform in "macosx_11_0_arm64" "macosx_11_0_x86_64"; do
|
||||
python3 -m pip download \
|
||||
--only-binary=:all: \
|
||||
--platform "${platform}" \
|
||||
--pre -d curl_cffi_whls \
|
||||
-r requirements.txt
|
||||
done
|
||||
python3 -m delocate.cmd.delocate_fuse curl_cffi_whls/curl_cffi*.whl -w curl_cffi_universal2
|
||||
python3 -m delocate.cmd.delocate_fuse curl_cffi_whls/cffi*.whl -w curl_cffi_universal2
|
||||
cd curl_cffi_universal2
|
||||
for wheel in *cffi*.whl; do mv -n -- "${wheel}" "${wheel/x86_64/universal2}"; done
|
||||
python3 -m pip install -U --user *cffi*.whl
|
||||
|
||||
- name: Prepare
|
||||
run: |
|
||||
|
@ -305,7 +321,7 @@ jobs:
|
|||
run: |
|
||||
brew install coreutils
|
||||
python3 devscripts/install_deps.py --user -o --include build
|
||||
python3 devscripts/install_deps.py --user --include pyinstaller
|
||||
python3 devscripts/install_deps.py --user --include pyinstaller_macos --include curl_cffi
|
||||
|
||||
- name: Prepare
|
||||
run: |
|
||||
|
@ -347,7 +363,7 @@ jobs:
|
|||
- name: Install Requirements
|
||||
run: | # Custom pyinstaller built with https://github.com/yt-dlp/pyinstaller-builds
|
||||
python devscripts/install_deps.py -o --include build
|
||||
python devscripts/install_deps.py --include py2exe
|
||||
python devscripts/install_deps.py --include py2exe --include curl_cffi
|
||||
python -m pip install -U "https://yt-dlp.github.io/Pyinstaller-Builds/x86_64/pyinstaller-5.8.0-py3-none-any.whl"
|
||||
|
||||
- name: Prepare
|
||||
|
|
2
.github/workflows/core.yml
vendored
2
.github/workflows/core.yml
vendored
|
@ -53,7 +53,7 @@ jobs:
|
|||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install test requirements
|
||||
run: python3 ./devscripts/install_deps.py --include dev
|
||||
run: python3 ./devscripts/install_deps.py --include dev --include curl_cffi
|
||||
- name: Run tests
|
||||
continue-on-error: False
|
||||
run: |
|
||||
|
|
13
README.md
13
README.md
|
@ -196,6 +196,15 @@ ### Networking
|
|||
* [**websockets**](https://github.com/aaugustin/websockets)\* - For downloading over websocket. Licensed under [BSD-3-Clause](https://github.com/aaugustin/websockets/blob/main/LICENSE)
|
||||
* [**requests**](https://github.com/psf/requests)\* - HTTP library. For HTTPS proxy and persistent connections support. Licensed under [Apache-2.0](https://github.com/psf/requests/blob/main/LICENSE)
|
||||
|
||||
#### Impersonation
|
||||
|
||||
The following provide support for impersonating browser requests. This may be required for some sites that employ TLS fingerprinting.
|
||||
|
||||
* [**curl_cffi**](https://github.com/yifeikong/curl_cffi) (recommended) - Python binding for [curl-impersonate](https://github.com/lwthiker/curl-impersonate). Provides impersonation targets for Chrome, Edge and Safari. Licensed under [MIT](https://github.com/yifeikong/curl_cffi/blob/main/LICENSE)
|
||||
* Can be installed with the `curl_cffi` group, e.g. `pip install yt-dlp[default,curl_cffi]`
|
||||
* Only included in `yt-dlp.exe`, `yt-dlp_macos` and `yt-dlp_macos_legacy` builds
|
||||
|
||||
|
||||
### Metadata
|
||||
|
||||
* [**mutagen**](https://github.com/quodlibet/mutagen)\* - For `--embed-thumbnail` in certain formats. Licensed under [GPLv2+](https://github.com/quodlibet/mutagen/blob/master/COPYING)
|
||||
|
@ -389,6 +398,10 @@ ## Network Options:
|
|||
direct connection
|
||||
--socket-timeout SECONDS Time to wait before giving up, in seconds
|
||||
--source-address IP Client-side IP address to bind to
|
||||
--impersonate CLIENT[:OS] Client to impersonate for requests. E.g.
|
||||
chrome, chrome-110, chrome:windows-10. Pass
|
||||
--impersonate="" to impersonate any client.
|
||||
--list-impersonate-targets List available clients to impersonate.
|
||||
-4, --force-ipv4 Make all connections via IPv4
|
||||
-6, --force-ipv6 Make all connections via IPv6
|
||||
--enable-file-urls Enable file:// URLs. This is disabled by
|
||||
|
|
|
@ -53,6 +53,7 @@ dependencies = [
|
|||
|
||||
[project.optional-dependencies]
|
||||
default = []
|
||||
curl_cffi = ["curl-cffi==0.5.10; implementation_name=='cpython'"]
|
||||
secretstorage = [
|
||||
"cffi",
|
||||
"secretstorage",
|
||||
|
@ -69,6 +70,7 @@ dev = [
|
|||
"pytest",
|
||||
]
|
||||
pyinstaller = ["pyinstaller>=6.3"]
|
||||
pyinstaller_macos = ["pyinstaller==5.13.2"] # needed for curl_cffi builds
|
||||
py2exe = ["py2exe>=0.12"]
|
||||
|
||||
[project.urls]
|
||||
|
|
|
@ -27,9 +27,10 @@
|
|||
from email.message import Message
|
||||
from http.cookiejar import CookieJar
|
||||
|
||||
from test.conftest import validate_and_send
|
||||
from test.helper import FakeYDL, http_server_port, verify_address_availability
|
||||
from yt_dlp.cookies import YoutubeDLCookieJar
|
||||
from yt_dlp.dependencies import brotli, requests, urllib3
|
||||
from yt_dlp.dependencies import brotli, curl_cffi, requests, urllib3
|
||||
from yt_dlp.networking import (
|
||||
HEADRequest,
|
||||
PUTRequest,
|
||||
|
@ -50,10 +51,13 @@
|
|||
TransportError,
|
||||
UnsupportedRequest,
|
||||
)
|
||||
from yt_dlp.networking.impersonate import (
|
||||
ImpersonateRequestHandler,
|
||||
ImpersonateTarget,
|
||||
)
|
||||
from yt_dlp.utils import YoutubeDLError
|
||||
from yt_dlp.utils._utils import _YDLLogger as FakeLogger
|
||||
from yt_dlp.utils.networking import HTTPHeaderDict
|
||||
|
||||
from test.conftest import validate_and_send
|
||||
from yt_dlp.utils.networking import HTTPHeaderDict, std_headers
|
||||
|
||||
TEST_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
|
@ -75,6 +79,7 @@ def do_GET(self):
|
|||
|
||||
class HTTPTestRequestHandler(http.server.BaseHTTPRequestHandler):
|
||||
protocol_version = 'HTTP/1.1'
|
||||
default_request_version = 'HTTP/1.1'
|
||||
|
||||
def log_message(self, format, *args):
|
||||
pass
|
||||
|
@ -112,6 +117,8 @@ def _status(self, status):
|
|||
def _read_data(self):
|
||||
if 'Content-Length' in self.headers:
|
||||
return self.rfile.read(int(self.headers['Content-Length']))
|
||||
else:
|
||||
return b''
|
||||
|
||||
def do_POST(self):
|
||||
data = self._read_data() + str(self.headers).encode()
|
||||
|
@ -195,7 +202,8 @@ def do_GET(self):
|
|||
self._headers()
|
||||
elif self.path.startswith('/308-to-headers'):
|
||||
self.send_response(308)
|
||||
self.send_header('Location', '/headers')
|
||||
# redirect to "localhost" for testing cookie redirection handling
|
||||
self.send_header('Location', f'http://localhost:{self.connection.getsockname()[1]}/headers')
|
||||
self.send_header('Content-Length', '0')
|
||||
self.end_headers()
|
||||
elif self.path == '/trailing_garbage':
|
||||
|
@ -310,7 +318,7 @@ def setup_class(cls):
|
|||
|
||||
|
||||
class TestHTTPRequestHandler(TestRequestHandlerBase):
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
|
||||
def test_verify_cert(self, handler):
|
||||
with handler() as rh:
|
||||
with pytest.raises(CertificateVerifyError):
|
||||
|
@ -321,7 +329,7 @@ def test_verify_cert(self, handler):
|
|||
assert r.status == 200
|
||||
r.close()
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
|
||||
def test_ssl_error(self, handler):
|
||||
# HTTPS server with too old TLS version
|
||||
# XXX: is there a better way to test this than to create a new server?
|
||||
|
@ -335,11 +343,11 @@ def test_ssl_error(self, handler):
|
|||
https_server_thread.start()
|
||||
|
||||
with handler(verify=False) as rh:
|
||||
with pytest.raises(SSLError, match=r'ssl(?:v3|/tls) alert handshake failure') as exc_info:
|
||||
with pytest.raises(SSLError, match=r'(?i)ssl(?:v3|/tls).alert.handshake.failure') as exc_info:
|
||||
validate_and_send(rh, Request(f'https://127.0.0.1:{https_port}/headers'))
|
||||
assert not issubclass(exc_info.type, CertificateVerifyError)
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
|
||||
def test_percent_encode(self, handler):
|
||||
with handler() as rh:
|
||||
# Unicode characters should be encoded with uppercase percent-encoding
|
||||
|
@ -351,7 +359,7 @@ def test_percent_encode(self, handler):
|
|||
assert res.status == 200
|
||||
res.close()
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
|
||||
@pytest.mark.parametrize('path', [
|
||||
'/a/b/./../../headers',
|
||||
'/redirect_dotsegments',
|
||||
|
@ -367,6 +375,7 @@ def test_remove_dot_segments(self, handler, path):
|
|||
assert res.url == f'http://127.0.0.1:{self.http_port}/headers'
|
||||
res.close()
|
||||
|
||||
# Not supported by CurlCFFI (non-standard)
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
|
||||
def test_unicode_path_redirection(self, handler):
|
||||
with handler() as rh:
|
||||
|
@ -374,7 +383,7 @@ def test_unicode_path_redirection(self, handler):
|
|||
assert r.url == f'http://127.0.0.1:{self.http_port}/%E4%B8%AD%E6%96%87.html'
|
||||
r.close()
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
|
||||
def test_raise_http_error(self, handler):
|
||||
with handler() as rh:
|
||||
for bad_status in (400, 500, 599, 302):
|
||||
|
@ -384,7 +393,7 @@ def test_raise_http_error(self, handler):
|
|||
# Should not raise an error
|
||||
validate_and_send(rh, Request('http://127.0.0.1:%d/gen_200' % self.http_port)).close()
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
|
||||
def test_response_url(self, handler):
|
||||
with handler() as rh:
|
||||
# Response url should be that of the last url in redirect chain
|
||||
|
@ -395,62 +404,50 @@ def test_response_url(self, handler):
|
|||
assert res2.url == f'http://127.0.0.1:{self.http_port}/gen_200'
|
||||
res2.close()
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
|
||||
def test_redirect(self, handler):
|
||||
# Covers some basic cases we expect some level of consistency between request handlers for
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
|
||||
@pytest.mark.parametrize('redirect_status,method,expected', [
|
||||
# A 303 must either use GET or HEAD for subsequent request
|
||||
(303, 'POST', ('', 'GET', False)),
|
||||
(303, 'HEAD', ('', 'HEAD', False)),
|
||||
|
||||
# 301 and 302 turn POST only into a GET
|
||||
(301, 'POST', ('', 'GET', False)),
|
||||
(301, 'HEAD', ('', 'HEAD', False)),
|
||||
(302, 'POST', ('', 'GET', False)),
|
||||
(302, 'HEAD', ('', 'HEAD', False)),
|
||||
|
||||
# 307 and 308 should not change method
|
||||
(307, 'POST', ('testdata', 'POST', True)),
|
||||
(308, 'POST', ('testdata', 'POST', True)),
|
||||
(307, 'HEAD', ('', 'HEAD', False)),
|
||||
(308, 'HEAD', ('', 'HEAD', False)),
|
||||
])
|
||||
def test_redirect(self, handler, redirect_status, method, expected):
|
||||
with handler() as rh:
|
||||
def do_req(redirect_status, method, assert_no_content=False):
|
||||
data = b'testdata' if method in ('POST', 'PUT') else None
|
||||
res = validate_and_send(
|
||||
rh, Request(f'http://127.0.0.1:{self.http_port}/redirect_{redirect_status}', method=method, data=data))
|
||||
data = b'testdata' if method == 'POST' else None
|
||||
headers = {}
|
||||
if data is not None:
|
||||
headers['Content-Type'] = 'application/test'
|
||||
res = validate_and_send(
|
||||
rh, Request(f'http://127.0.0.1:{self.http_port}/redirect_{redirect_status}', method=method, data=data,
|
||||
headers=headers))
|
||||
|
||||
headers = b''
|
||||
data_sent = b''
|
||||
if data is not None:
|
||||
data_sent += res.read(len(data))
|
||||
if data_sent != data:
|
||||
headers += data_sent
|
||||
data_sent = b''
|
||||
headers = b''
|
||||
data_recv = b''
|
||||
if data is not None:
|
||||
data_recv += res.read(len(data))
|
||||
if data_recv != data:
|
||||
headers += data_recv
|
||||
data_recv = b''
|
||||
|
||||
headers += res.read()
|
||||
headers += res.read()
|
||||
|
||||
if assert_no_content or data is None:
|
||||
assert b'Content-Type' not in headers
|
||||
assert b'Content-Length' not in headers
|
||||
else:
|
||||
assert b'Content-Type' in headers
|
||||
assert b'Content-Length' in headers
|
||||
assert expected[0] == data_recv.decode()
|
||||
assert expected[1] == res.headers.get('method')
|
||||
assert expected[2] == ('content-length' in headers.decode().lower())
|
||||
|
||||
return data_sent.decode(), res.headers.get('method', '')
|
||||
|
||||
# A 303 must either use GET or HEAD for subsequent request
|
||||
assert do_req(303, 'POST', True) == ('', 'GET')
|
||||
assert do_req(303, 'HEAD') == ('', 'HEAD')
|
||||
|
||||
assert do_req(303, 'PUT', True) == ('', 'GET')
|
||||
|
||||
# 301 and 302 turn POST only into a GET
|
||||
assert do_req(301, 'POST', True) == ('', 'GET')
|
||||
assert do_req(301, 'HEAD') == ('', 'HEAD')
|
||||
assert do_req(302, 'POST', True) == ('', 'GET')
|
||||
assert do_req(302, 'HEAD') == ('', 'HEAD')
|
||||
|
||||
assert do_req(301, 'PUT') == ('testdata', 'PUT')
|
||||
assert do_req(302, 'PUT') == ('testdata', 'PUT')
|
||||
|
||||
# 307 and 308 should not change method
|
||||
for m in ('POST', 'PUT'):
|
||||
assert do_req(307, m) == ('testdata', m)
|
||||
assert do_req(308, m) == ('testdata', m)
|
||||
|
||||
assert do_req(307, 'HEAD') == ('', 'HEAD')
|
||||
assert do_req(308, 'HEAD') == ('', 'HEAD')
|
||||
|
||||
# These should not redirect and instead raise an HTTPError
|
||||
for code in (300, 304, 305, 306):
|
||||
with pytest.raises(HTTPError):
|
||||
do_req(code, 'GET')
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
|
||||
def test_request_cookie_header(self, handler):
|
||||
# We should accept a Cookie header being passed as in normal headers and handle it appropriately.
|
||||
with handler() as rh:
|
||||
|
@ -459,16 +456,17 @@ def test_request_cookie_header(self, handler):
|
|||
rh, Request(
|
||||
f'http://127.0.0.1:{self.http_port}/headers',
|
||||
headers={'Cookie': 'test=test'})).read().decode()
|
||||
assert 'Cookie: test=test' in res
|
||||
assert 'cookie: test=test' in res.lower()
|
||||
|
||||
# Specified Cookie header should be removed on any redirect
|
||||
res = validate_and_send(
|
||||
rh, Request(
|
||||
f'http://127.0.0.1:{self.http_port}/308-to-headers',
|
||||
headers={'Cookie': 'test=test'})).read().decode()
|
||||
assert 'Cookie: test=test' not in res
|
||||
headers={'Cookie': 'test=test2'})).read().decode()
|
||||
assert 'cookie: test=test2' not in res.lower()
|
||||
|
||||
# Specified Cookie header should override global cookiejar for that request
|
||||
# Whether cookies from the cookiejar is applied on the redirect is considered undefined for now
|
||||
cookiejar = YoutubeDLCookieJar()
|
||||
cookiejar.set_cookie(http.cookiejar.Cookie(
|
||||
version=0, name='test', value='ytdlp', port=None, port_specified=False,
|
||||
|
@ -478,23 +476,23 @@ def test_request_cookie_header(self, handler):
|
|||
|
||||
with handler(cookiejar=cookiejar) as rh:
|
||||
data = validate_and_send(
|
||||
rh, Request(f'http://127.0.0.1:{self.http_port}/headers', headers={'cookie': 'test=test'})).read()
|
||||
assert b'Cookie: test=ytdlp' not in data
|
||||
assert b'Cookie: test=test' in data
|
||||
rh, Request(f'http://127.0.0.1:{self.http_port}/headers', headers={'cookie': 'test=test3'})).read()
|
||||
assert b'cookie: test=ytdlp' not in data.lower()
|
||||
assert b'cookie: test=test3' in data.lower()
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
|
||||
def test_redirect_loop(self, handler):
|
||||
with handler() as rh:
|
||||
with pytest.raises(HTTPError, match='redirect loop'):
|
||||
validate_and_send(rh, Request(f'http://127.0.0.1:{self.http_port}/redirect_loop'))
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
|
||||
def test_incompleteread(self, handler):
|
||||
with handler(timeout=2) as rh:
|
||||
with pytest.raises(IncompleteRead):
|
||||
with pytest.raises(IncompleteRead, match='13 bytes read, 234221 more expected'):
|
||||
validate_and_send(rh, Request('http://127.0.0.1:%d/incompleteread' % self.http_port)).read()
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
|
||||
def test_cookies(self, handler):
|
||||
cookiejar = YoutubeDLCookieJar()
|
||||
cookiejar.set_cookie(http.cookiejar.Cookie(
|
||||
|
@ -503,47 +501,66 @@ def test_cookies(self, handler):
|
|||
|
||||
with handler(cookiejar=cookiejar) as rh:
|
||||
data = validate_and_send(rh, Request(f'http://127.0.0.1:{self.http_port}/headers')).read()
|
||||
assert b'Cookie: test=ytdlp' in data
|
||||
assert b'cookie: test=ytdlp' in data.lower()
|
||||
|
||||
# Per request
|
||||
with handler() as rh:
|
||||
data = validate_and_send(
|
||||
rh, Request(f'http://127.0.0.1:{self.http_port}/headers', extensions={'cookiejar': cookiejar})).read()
|
||||
assert b'Cookie: test=ytdlp' in data
|
||||
assert b'cookie: test=ytdlp' in data.lower()
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
|
||||
def test_headers(self, handler):
|
||||
|
||||
with handler(headers=HTTPHeaderDict({'test1': 'test', 'test2': 'test2'})) as rh:
|
||||
# Global Headers
|
||||
data = validate_and_send(rh, Request(f'http://127.0.0.1:{self.http_port}/headers')).read()
|
||||
assert b'Test1: test' in data
|
||||
data = validate_and_send(rh, Request(f'http://127.0.0.1:{self.http_port}/headers')).read().lower()
|
||||
assert b'test1: test' in data
|
||||
|
||||
# Per request headers, merged with global
|
||||
data = validate_and_send(rh, Request(
|
||||
f'http://127.0.0.1:{self.http_port}/headers', headers={'test2': 'changed', 'test3': 'test3'})).read()
|
||||
assert b'Test1: test' in data
|
||||
assert b'Test2: changed' in data
|
||||
assert b'Test2: test2' not in data
|
||||
assert b'Test3: test3' in data
|
||||
f'http://127.0.0.1:{self.http_port}/headers', headers={'test2': 'changed', 'test3': 'test3'})).read().lower()
|
||||
assert b'test1: test' in data
|
||||
assert b'test2: changed' in data
|
||||
assert b'test2: test2' not in data
|
||||
assert b'test3: test3' in data
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
|
||||
def test_timeout(self, handler):
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
|
||||
def test_read_timeout(self, handler):
|
||||
with handler() as rh:
|
||||
# Default timeout is 20 seconds, so this should go through
|
||||
validate_and_send(
|
||||
rh, Request(f'http://127.0.0.1:{self.http_port}/timeout_3'))
|
||||
rh, Request(f'http://127.0.0.1:{self.http_port}/timeout_1'))
|
||||
|
||||
with handler(timeout=0.5) as rh:
|
||||
with handler(timeout=0.1) as rh:
|
||||
with pytest.raises(TransportError):
|
||||
validate_and_send(
|
||||
rh, Request(f'http://127.0.0.1:{self.http_port}/timeout_1'))
|
||||
rh, Request(f'http://127.0.0.1:{self.http_port}/timeout_5'))
|
||||
|
||||
# Per request timeout, should override handler timeout
|
||||
validate_and_send(
|
||||
rh, Request(f'http://127.0.0.1:{self.http_port}/timeout_1', extensions={'timeout': 4}))
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
|
||||
def test_connect_timeout(self, handler):
|
||||
# nothing should be listening on this port
|
||||
connect_timeout_url = 'http://10.255.255.255'
|
||||
with handler(timeout=0.01) as rh:
|
||||
now = time.time()
|
||||
with pytest.raises(TransportError):
|
||||
validate_and_send(
|
||||
rh, Request(connect_timeout_url))
|
||||
assert 0.01 <= time.time() - now < 20
|
||||
|
||||
with handler() as rh:
|
||||
with pytest.raises(TransportError):
|
||||
# Per request timeout, should override handler timeout
|
||||
now = time.time()
|
||||
validate_and_send(
|
||||
rh, Request(connect_timeout_url, extensions={'timeout': 0.01}))
|
||||
assert 0.01 <= time.time() - now < 20
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
|
||||
def test_source_address(self, handler):
|
||||
source_address = f'127.0.0.{random.randint(5, 255)}'
|
||||
# on some systems these loopback addresses we need for testing may not be available
|
||||
|
@ -554,6 +571,7 @@ def test_source_address(self, handler):
|
|||
rh, Request(f'http://127.0.0.1:{self.http_port}/source_address')).read().decode()
|
||||
assert source_address == data
|
||||
|
||||
# Not supported by CurlCFFI
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
|
||||
def test_gzip_trailing_garbage(self, handler):
|
||||
with handler() as rh:
|
||||
|
@ -571,7 +589,7 @@ def test_brotli(self, handler):
|
|||
assert res.headers.get('Content-Encoding') == 'br'
|
||||
assert res.read() == b'<html><video src="/vid.mp4" /></html>'
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
|
||||
def test_deflate(self, handler):
|
||||
with handler() as rh:
|
||||
res = validate_and_send(
|
||||
|
@ -581,7 +599,7 @@ def test_deflate(self, handler):
|
|||
assert res.headers.get('Content-Encoding') == 'deflate'
|
||||
assert res.read() == b'<html><video src="/vid.mp4" /></html>'
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
|
||||
def test_gzip(self, handler):
|
||||
with handler() as rh:
|
||||
res = validate_and_send(
|
||||
|
@ -591,7 +609,7 @@ def test_gzip(self, handler):
|
|||
assert res.headers.get('Content-Encoding') == 'gzip'
|
||||
assert res.read() == b'<html><video src="/vid.mp4" /></html>'
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
|
||||
def test_multiple_encodings(self, handler):
|
||||
with handler() as rh:
|
||||
for pair in ('gzip,deflate', 'deflate, gzip', 'gzip, gzip', 'deflate, deflate'):
|
||||
|
@ -602,17 +620,18 @@ def test_multiple_encodings(self, handler):
|
|||
assert res.headers.get('Content-Encoding') == pair
|
||||
assert res.read() == b'<html><video src="/vid.mp4" /></html>'
|
||||
|
||||
# Not supported by curl_cffi
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
|
||||
def test_unsupported_encoding(self, handler):
|
||||
with handler() as rh:
|
||||
res = validate_and_send(
|
||||
rh, Request(
|
||||
f'http://127.0.0.1:{self.http_port}/content-encoding',
|
||||
headers={'ytdl-encoding': 'unsupported'}))
|
||||
headers={'ytdl-encoding': 'unsupported', 'Accept-Encoding': '*'}))
|
||||
assert res.headers.get('Content-Encoding') == 'unsupported'
|
||||
assert res.read() == b'raw'
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
|
||||
def test_read(self, handler):
|
||||
with handler() as rh:
|
||||
res = validate_and_send(
|
||||
|
@ -620,9 +639,12 @@ def test_read(self, handler):
|
|||
assert res.readable()
|
||||
assert res.read(1) == b'H'
|
||||
assert res.read(3) == b'ost'
|
||||
assert res.read().decode().endswith('\n\n')
|
||||
assert res.read() == b''
|
||||
|
||||
|
||||
class TestHTTPProxy(TestRequestHandlerBase):
|
||||
# Note: this only tests http urls over non-CONNECT proxy
|
||||
@classmethod
|
||||
def setup_class(cls):
|
||||
super().setup_class()
|
||||
|
@ -642,7 +664,7 @@ def setup_class(cls):
|
|||
cls.geo_proxy_thread.daemon = True
|
||||
cls.geo_proxy_thread.start()
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
|
||||
def test_http_proxy(self, handler):
|
||||
http_proxy = f'http://127.0.0.1:{self.proxy_port}'
|
||||
geo_proxy = f'http://127.0.0.1:{self.geo_port}'
|
||||
|
@ -668,7 +690,7 @@ def test_http_proxy(self, handler):
|
|||
assert res != f'normal: {real_url}'
|
||||
assert 'Accept' in res
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
|
||||
def test_noproxy(self, handler):
|
||||
with handler(proxies={'proxy': f'http://127.0.0.1:{self.proxy_port}'}) as rh:
|
||||
# NO_PROXY
|
||||
|
@ -678,7 +700,7 @@ def test_noproxy(self, handler):
|
|||
'utf-8')
|
||||
assert 'Accept' in nop_response
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
|
||||
def test_allproxy(self, handler):
|
||||
url = 'http://foo.com/bar'
|
||||
with handler() as rh:
|
||||
|
@ -686,7 +708,7 @@ def test_allproxy(self, handler):
|
|||
'utf-8')
|
||||
assert response == f'normal: {url}'
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
|
||||
def test_http_proxy_with_idn(self, handler):
|
||||
with handler(proxies={
|
||||
'http': f'http://127.0.0.1:{self.proxy_port}',
|
||||
|
@ -698,7 +720,6 @@ def test_http_proxy_with_idn(self, handler):
|
|||
|
||||
|
||||
class TestClientCertificate:
|
||||
|
||||
@classmethod
|
||||
def setup_class(cls):
|
||||
certfn = os.path.join(TEST_DIR, 'testcert.pem')
|
||||
|
@ -724,27 +745,27 @@ def _run_test(self, handler, **handler_kwargs):
|
|||
) as rh:
|
||||
validate_and_send(rh, Request(f'https://127.0.0.1:{self.port}/video.html')).read().decode()
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
|
||||
def test_certificate_combined_nopass(self, handler):
|
||||
self._run_test(handler, client_cert={
|
||||
'client_certificate': os.path.join(self.certdir, 'clientwithkey.crt'),
|
||||
})
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
|
||||
def test_certificate_nocombined_nopass(self, handler):
|
||||
self._run_test(handler, client_cert={
|
||||
'client_certificate': os.path.join(self.certdir, 'client.crt'),
|
||||
'client_certificate_key': os.path.join(self.certdir, 'client.key'),
|
||||
})
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
|
||||
def test_certificate_combined_pass(self, handler):
|
||||
self._run_test(handler, client_cert={
|
||||
'client_certificate': os.path.join(self.certdir, 'clientwithencryptedkey.crt'),
|
||||
'client_certificate_password': 'foobar',
|
||||
})
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
|
||||
def test_certificate_nocombined_pass(self, handler):
|
||||
self._run_test(handler, client_cert={
|
||||
'client_certificate': os.path.join(self.certdir, 'client.crt'),
|
||||
|
@ -753,6 +774,18 @@ def test_certificate_nocombined_pass(self, handler):
|
|||
})
|
||||
|
||||
|
||||
@pytest.mark.parametrize('handler', ['CurlCFFI'], indirect=True)
|
||||
class TestHTTPImpersonateRequestHandler(TestRequestHandlerBase):
|
||||
def test_supported_impersonate_targets(self, handler):
|
||||
with handler(headers=std_headers) as rh:
|
||||
# note: this assumes the impersonate request handler supports the impersonate extension
|
||||
for target in rh.supported_targets:
|
||||
res = validate_and_send(rh, Request(
|
||||
f'http://127.0.0.1:{self.http_port}/headers', extensions={'impersonate': target}))
|
||||
assert res.status == 200
|
||||
assert std_headers['user-agent'].lower() not in res.read().decode().lower()
|
||||
|
||||
|
||||
class TestRequestHandlerMisc:
|
||||
"""Misc generic tests for request handlers, not related to request or validation testing"""
|
||||
@pytest.mark.parametrize('handler,logger_name', [
|
||||
|
@ -931,6 +964,172 @@ def mock_close(*args, **kwargs):
|
|||
assert called
|
||||
|
||||
|
||||
@pytest.mark.parametrize('handler', ['CurlCFFI'], indirect=True)
|
||||
class TestCurlCFFIRequestHandler(TestRequestHandlerBase):
|
||||
|
||||
@pytest.mark.parametrize('params,extensions', [
|
||||
({}, {'impersonate': ImpersonateTarget('chrome')}),
|
||||
({'impersonate': ImpersonateTarget('chrome', '110')}, {}),
|
||||
({'impersonate': ImpersonateTarget('chrome', '99')}, {'impersonate': ImpersonateTarget('chrome', '110')}),
|
||||
])
|
||||
def test_impersonate(self, handler, params, extensions):
|
||||
with handler(headers=std_headers, **params) as rh:
|
||||
res = validate_and_send(
|
||||
rh, Request(f'http://127.0.0.1:{self.http_port}/headers', extensions=extensions)).read().decode()
|
||||
assert 'sec-ch-ua: "Chromium";v="110"' in res
|
||||
# Check that user agent is added over ours
|
||||
assert 'User-Agent: Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.0.0 Safari/537.36' in res
|
||||
|
||||
def test_headers(self, handler):
|
||||
with handler(headers=std_headers) as rh:
|
||||
# Ensure curl-impersonate overrides our standard headers (usually added
|
||||
res = validate_and_send(
|
||||
rh, Request(f'http://127.0.0.1:{self.http_port}/headers', extensions={
|
||||
'impersonate': ImpersonateTarget('safari')}, headers={'x-custom': 'test', 'sec-fetch-mode': 'custom'})).read().decode().lower()
|
||||
|
||||
assert std_headers['user-agent'].lower() not in res
|
||||
assert std_headers['accept-language'].lower() not in res
|
||||
assert std_headers['sec-fetch-mode'].lower() not in res
|
||||
# other than UA, custom headers that differ from std_headers should be kept
|
||||
assert 'sec-fetch-mode: custom' in res
|
||||
assert 'x-custom: test' in res
|
||||
# but when not impersonating don't remove std_headers
|
||||
res = validate_and_send(
|
||||
rh, Request(f'http://127.0.0.1:{self.http_port}/headers', headers={'x-custom': 'test'})).read().decode().lower()
|
||||
# std_headers should be present
|
||||
for k, v in std_headers.items():
|
||||
assert f'{k}: {v}'.lower() in res
|
||||
|
||||
@pytest.mark.parametrize('raised,expected,match', [
|
||||
(lambda: curl_cffi.requests.errors.RequestsError(
|
||||
'', code=curl_cffi.const.CurlECode.PARTIAL_FILE), IncompleteRead, None),
|
||||
(lambda: curl_cffi.requests.errors.RequestsError(
|
||||
'', code=curl_cffi.const.CurlECode.OPERATION_TIMEDOUT), TransportError, None),
|
||||
(lambda: curl_cffi.requests.errors.RequestsError(
|
||||
'', code=curl_cffi.const.CurlECode.RECV_ERROR), TransportError, None),
|
||||
])
|
||||
def test_response_error_mapping(self, handler, monkeypatch, raised, expected, match):
|
||||
import curl_cffi.requests
|
||||
|
||||
from yt_dlp.networking._curlcffi import CurlCFFIResponseAdapter
|
||||
curl_res = curl_cffi.requests.Response()
|
||||
res = CurlCFFIResponseAdapter(curl_res)
|
||||
|
||||
def mock_read(*args, **kwargs):
|
||||
try:
|
||||
raise raised()
|
||||
except Exception as e:
|
||||
e.response = curl_res
|
||||
raise
|
||||
monkeypatch.setattr(res.fp, 'read', mock_read)
|
||||
|
||||
with pytest.raises(expected, match=match) as exc_info:
|
||||
res.read()
|
||||
|
||||
assert exc_info.type is expected
|
||||
|
||||
@pytest.mark.parametrize('raised,expected,match', [
|
||||
(lambda: curl_cffi.requests.errors.RequestsError(
|
||||
'', code=curl_cffi.const.CurlECode.OPERATION_TIMEDOUT), TransportError, None),
|
||||
(lambda: curl_cffi.requests.errors.RequestsError(
|
||||
'', code=curl_cffi.const.CurlECode.PEER_FAILED_VERIFICATION), CertificateVerifyError, None),
|
||||
(lambda: curl_cffi.requests.errors.RequestsError(
|
||||
'', code=curl_cffi.const.CurlECode.SSL_CONNECT_ERROR), SSLError, None),
|
||||
(lambda: curl_cffi.requests.errors.RequestsError(
|
||||
'', code=curl_cffi.const.CurlECode.TOO_MANY_REDIRECTS), HTTPError, None),
|
||||
(lambda: curl_cffi.requests.errors.RequestsError(
|
||||
'', code=curl_cffi.const.CurlECode.PROXY), ProxyError, None),
|
||||
])
|
||||
def test_request_error_mapping(self, handler, monkeypatch, raised, expected, match):
|
||||
import curl_cffi.requests
|
||||
curl_res = curl_cffi.requests.Response()
|
||||
curl_res.status_code = 301
|
||||
|
||||
with handler() as rh:
|
||||
original_get_instance = rh._get_instance
|
||||
|
||||
def mock_get_instance(*args, **kwargs):
|
||||
instance = original_get_instance(*args, **kwargs)
|
||||
|
||||
def request(*_, **__):
|
||||
try:
|
||||
raise raised()
|
||||
except Exception as e:
|
||||
e.response = curl_res
|
||||
raise
|
||||
monkeypatch.setattr(instance, 'request', request)
|
||||
return instance
|
||||
|
||||
monkeypatch.setattr(rh, '_get_instance', mock_get_instance)
|
||||
|
||||
with pytest.raises(expected) as exc_info:
|
||||
rh.send(Request('http://fake'))
|
||||
|
||||
assert exc_info.type is expected
|
||||
|
||||
def test_response_reader(self, handler):
|
||||
class FakeResponse:
|
||||
def __init__(self, raise_error=False):
|
||||
self.raise_error = raise_error
|
||||
self.closed = False
|
||||
|
||||
def iter_content(self):
|
||||
yield b'foo'
|
||||
yield b'bar'
|
||||
yield b'z'
|
||||
if self.raise_error:
|
||||
raise Exception('test')
|
||||
|
||||
def close(self):
|
||||
self.closed = True
|
||||
|
||||
from yt_dlp.networking._curlcffi import CurlCFFIResponseReader
|
||||
|
||||
res = CurlCFFIResponseReader(FakeResponse())
|
||||
assert res.readable
|
||||
assert res.bytes_read == 0
|
||||
assert res.read(1) == b'f'
|
||||
assert res.bytes_read == 3
|
||||
assert res._buffer == b'oo'
|
||||
|
||||
assert res.read(2) == b'oo'
|
||||
assert res.bytes_read == 3
|
||||
assert res._buffer == b''
|
||||
|
||||
assert res.read(2) == b'ba'
|
||||
assert res.bytes_read == 6
|
||||
assert res._buffer == b'r'
|
||||
|
||||
assert res.read(3) == b'rz'
|
||||
assert res.bytes_read == 7
|
||||
assert res._buffer == b''
|
||||
assert res.closed
|
||||
assert res._response.closed
|
||||
|
||||
# should handle no size param
|
||||
res2 = CurlCFFIResponseReader(FakeResponse())
|
||||
assert res2.read() == b'foobarz'
|
||||
assert res2.bytes_read == 7
|
||||
assert res2._buffer == b''
|
||||
assert res2.closed
|
||||
|
||||
# should close on an exception
|
||||
res3 = CurlCFFIResponseReader(FakeResponse(raise_error=True))
|
||||
with pytest.raises(Exception, match='test'):
|
||||
res3.read()
|
||||
assert res3._buffer == b''
|
||||
assert res3.bytes_read == 7
|
||||
assert res3.closed
|
||||
|
||||
# buffer should be cleared on close
|
||||
res4 = CurlCFFIResponseReader(FakeResponse())
|
||||
res4.read(2)
|
||||
assert res4._buffer == b'o'
|
||||
res4.close()
|
||||
assert res4.closed
|
||||
assert res4._buffer == b''
|
||||
|
||||
|
||||
def run_validation(handler, error, req, **handler_kwargs):
|
||||
with handler(**handler_kwargs) as rh:
|
||||
if error:
|
||||
|
@ -975,6 +1174,10 @@ class HTTPSupportedRH(ValidationRH):
|
|||
('ws', False, {}),
|
||||
('wss', False, {}),
|
||||
]),
|
||||
('CurlCFFI', [
|
||||
('http', False, {}),
|
||||
('https', False, {}),
|
||||
]),
|
||||
(NoCheckRH, [('http', False, {})]),
|
||||
(ValidationRH, [('http', UnsupportedRequest, {})])
|
||||
]
|
||||
|
@ -998,6 +1201,14 @@ class HTTPSupportedRH(ValidationRH):
|
|||
('socks5', False),
|
||||
('socks5h', False),
|
||||
]),
|
||||
('CurlCFFI', 'http', [
|
||||
('http', False),
|
||||
('https', False),
|
||||
('socks4', False),
|
||||
('socks4a', False),
|
||||
('socks5', False),
|
||||
('socks5h', False),
|
||||
]),
|
||||
(NoCheckRH, 'http', [('http', False)]),
|
||||
(HTTPSupportedRH, 'http', [('http', UnsupportedRequest)]),
|
||||
('Websockets', 'ws', [('http', UnsupportedRequest)]),
|
||||
|
@ -1015,6 +1226,10 @@ class HTTPSupportedRH(ValidationRH):
|
|||
('all', False),
|
||||
('unrelated', False),
|
||||
]),
|
||||
('CurlCFFI', [
|
||||
('all', False),
|
||||
('unrelated', False),
|
||||
]),
|
||||
(NoCheckRH, [('all', False)]),
|
||||
(HTTPSupportedRH, [('all', UnsupportedRequest)]),
|
||||
(HTTPSupportedRH, [('no', UnsupportedRequest)]),
|
||||
|
@ -1036,6 +1251,19 @@ class HTTPSupportedRH(ValidationRH):
|
|||
({'timeout': 'notatimeout'}, AssertionError),
|
||||
({'unsupported': 'value'}, UnsupportedRequest),
|
||||
]),
|
||||
('CurlCFFI', 'http', [
|
||||
({'cookiejar': 'notacookiejar'}, AssertionError),
|
||||
({'cookiejar': YoutubeDLCookieJar()}, False),
|
||||
({'timeout': 1}, False),
|
||||
({'timeout': 'notatimeout'}, AssertionError),
|
||||
({'unsupported': 'value'}, UnsupportedRequest),
|
||||
({'impersonate': ImpersonateTarget('badtarget', None, None, None)}, UnsupportedRequest),
|
||||
({'impersonate': 123}, AssertionError),
|
||||
({'impersonate': ImpersonateTarget('chrome', None, None, None)}, False),
|
||||
({'impersonate': ImpersonateTarget(None, None, None, None)}, False),
|
||||
({'impersonate': ImpersonateTarget()}, False),
|
||||
({'impersonate': 'chrome'}, AssertionError)
|
||||
]),
|
||||
(NoCheckRH, 'http', [
|
||||
({'cookiejar': 'notacookiejar'}, False),
|
||||
({'somerandom': 'test'}, False), # but any extension is allowed through
|
||||
|
@ -1055,7 +1283,7 @@ class HTTPSupportedRH(ValidationRH):
|
|||
def test_url_scheme(self, handler, scheme, fail, handler_kwargs):
|
||||
run_validation(handler, fail, Request(f'{scheme}://'), **(handler_kwargs or {}))
|
||||
|
||||
@pytest.mark.parametrize('handler,fail', [('Urllib', False), ('Requests', False)], indirect=['handler'])
|
||||
@pytest.mark.parametrize('handler,fail', [('Urllib', False), ('Requests', False), ('CurlCFFI', False)], indirect=['handler'])
|
||||
def test_no_proxy(self, handler, fail):
|
||||
run_validation(handler, fail, Request('http://', proxies={'no': '127.0.0.1,github.com'}))
|
||||
run_validation(handler, fail, Request('http://'), proxies={'no': '127.0.0.1,github.com'})
|
||||
|
@ -1078,13 +1306,13 @@ def test_proxy_scheme(self, handler, req_scheme, scheme, fail):
|
|||
run_validation(handler, fail, Request(f'{req_scheme}://', proxies={req_scheme: f'{scheme}://example.com'}))
|
||||
run_validation(handler, fail, Request(f'{req_scheme}://'), proxies={req_scheme: f'{scheme}://example.com'})
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Urllib', HTTPSupportedRH, 'Requests'], indirect=True)
|
||||
@pytest.mark.parametrize('handler', ['Urllib', HTTPSupportedRH, 'Requests', 'CurlCFFI'], indirect=True)
|
||||
def test_empty_proxy(self, handler):
|
||||
run_validation(handler, False, Request('http://', proxies={'http': None}))
|
||||
run_validation(handler, False, Request('http://'), proxies={'http': None})
|
||||
|
||||
@pytest.mark.parametrize('proxy_url', ['//example.com', 'example.com', '127.0.0.1', '/a/b/c'])
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests'], indirect=True)
|
||||
@pytest.mark.parametrize('handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
|
||||
def test_invalid_proxy_url(self, handler, proxy_url):
|
||||
run_validation(handler, UnsupportedRequest, Request('http://', proxies={'http': proxy_url}))
|
||||
|
||||
|
@ -1113,6 +1341,10 @@ def __init__(self, request):
|
|||
|
||||
class FakeRH(RequestHandler):
|
||||
|
||||
def __init__(self, *args, **params):
|
||||
self.params = params
|
||||
super().__init__(*args, **params)
|
||||
|
||||
def _validate(self, request):
|
||||
return
|
||||
|
||||
|
@ -1271,15 +1503,10 @@ def test_compat_opener(self):
|
|||
('', {'all': '__noproxy__'}),
|
||||
(None, {'http': 'http://127.0.0.1:8081', 'https': 'http://127.0.0.1:8081'}) # env, set https
|
||||
])
|
||||
def test_proxy(self, proxy, expected):
|
||||
old_http_proxy = os.environ.get('HTTP_PROXY')
|
||||
try:
|
||||
os.environ['HTTP_PROXY'] = 'http://127.0.0.1:8081' # ensure that provided proxies override env
|
||||
with FakeYDL({'proxy': proxy}) as ydl:
|
||||
assert ydl.proxies == expected
|
||||
finally:
|
||||
if old_http_proxy:
|
||||
os.environ['HTTP_PROXY'] = old_http_proxy
|
||||
def test_proxy(self, proxy, expected, monkeypatch):
|
||||
monkeypatch.setenv('HTTP_PROXY', 'http://127.0.0.1:8081')
|
||||
with FakeYDL({'proxy': proxy}) as ydl:
|
||||
assert ydl.proxies == expected
|
||||
|
||||
def test_compat_request(self):
|
||||
with FakeRHYDL() as ydl:
|
||||
|
@ -1331,6 +1558,95 @@ def test_legacy_server_connect_error(self):
|
|||
with pytest.raises(SSLError, match='testerror'):
|
||||
ydl.urlopen('ssl://testerror')
|
||||
|
||||
def test_unsupported_impersonate_target(self):
|
||||
class FakeImpersonationRHYDL(FakeYDL):
|
||||
def __init__(self, *args, **kwargs):
|
||||
class HTTPRH(RequestHandler):
|
||||
def _send(self, request: Request):
|
||||
pass
|
||||
_SUPPORTED_URL_SCHEMES = ('http',)
|
||||
_SUPPORTED_PROXY_SCHEMES = None
|
||||
|
||||
super().__init__(*args, **kwargs)
|
||||
self._request_director = self.build_request_director([HTTPRH])
|
||||
|
||||
with FakeImpersonationRHYDL() as ydl:
|
||||
with pytest.raises(
|
||||
RequestError,
|
||||
match=r'Impersonate target "test" is not available'
|
||||
):
|
||||
ydl.urlopen(Request('http://', extensions={'impersonate': ImpersonateTarget('test', None, None, None)}))
|
||||
|
||||
def test_unsupported_impersonate_extension(self):
|
||||
class FakeHTTPRHYDL(FakeYDL):
|
||||
def __init__(self, *args, **kwargs):
|
||||
class IRH(ImpersonateRequestHandler):
|
||||
def _send(self, request: Request):
|
||||
pass
|
||||
|
||||
_SUPPORTED_URL_SCHEMES = ('http',)
|
||||
_SUPPORTED_IMPERSONATE_TARGET_MAP = {ImpersonateTarget('abc',): 'test'}
|
||||
_SUPPORTED_PROXY_SCHEMES = None
|
||||
|
||||
super().__init__(*args, **kwargs)
|
||||
self._request_director = self.build_request_director([IRH])
|
||||
|
||||
with FakeHTTPRHYDL() as ydl:
|
||||
with pytest.raises(
|
||||
RequestError,
|
||||
match=r'Impersonate target "test" is not available'
|
||||
):
|
||||
ydl.urlopen(Request('http://', extensions={'impersonate': ImpersonateTarget('test', None, None, None)}))
|
||||
|
||||
def test_raise_impersonate_error(self):
|
||||
with pytest.raises(
|
||||
YoutubeDLError,
|
||||
match=r'Impersonate target "test" is not available'
|
||||
):
|
||||
FakeYDL({'impersonate': ImpersonateTarget('test', None, None, None)})
|
||||
|
||||
def test_pass_impersonate_param(self, monkeypatch):
|
||||
|
||||
class IRH(ImpersonateRequestHandler):
|
||||
def _send(self, request: Request):
|
||||
pass
|
||||
|
||||
_SUPPORTED_URL_SCHEMES = ('http',)
|
||||
_SUPPORTED_IMPERSONATE_TARGET_MAP = {ImpersonateTarget('abc'): 'test'}
|
||||
|
||||
# Bypass the check on initialize
|
||||
brh = FakeYDL.build_request_director
|
||||
monkeypatch.setattr(FakeYDL, 'build_request_director', lambda cls, handlers, preferences=None: brh(cls, handlers=[IRH]))
|
||||
|
||||
with FakeYDL({
|
||||
'impersonate': ImpersonateTarget('abc', None, None, None)
|
||||
}) as ydl:
|
||||
rh = self.build_handler(ydl, IRH)
|
||||
assert rh.impersonate == ImpersonateTarget('abc', None, None, None)
|
||||
|
||||
def test_get_impersonate_targets(self):
|
||||
handlers = []
|
||||
for target_client in ('abc', 'xyz', 'asd'):
|
||||
class TestRH(ImpersonateRequestHandler):
|
||||
def _send(self, request: Request):
|
||||
pass
|
||||
_SUPPORTED_URL_SCHEMES = ('http',)
|
||||
_SUPPORTED_IMPERSONATE_TARGET_MAP = {ImpersonateTarget(target_client,): 'test'}
|
||||
RH_KEY = target_client
|
||||
RH_NAME = target_client
|
||||
handlers.append(TestRH)
|
||||
|
||||
with FakeYDL() as ydl:
|
||||
ydl._request_director = ydl.build_request_director(handlers)
|
||||
assert set(ydl._get_available_impersonate_targets()) == {
|
||||
(ImpersonateTarget('xyz'), 'xyz'),
|
||||
(ImpersonateTarget('abc'), 'abc'),
|
||||
(ImpersonateTarget('asd'), 'asd')
|
||||
}
|
||||
assert ydl._impersonate_target_available(ImpersonateTarget('abc'))
|
||||
assert ydl._impersonate_target_available(ImpersonateTarget())
|
||||
assert not ydl._impersonate_target_available(ImpersonateTarget('zxy'))
|
||||
|
||||
@pytest.mark.parametrize('proxy_key,proxy_url,expected', [
|
||||
('http', '__noproxy__', None),
|
||||
('no', '127.0.0.1,foo.bar', '127.0.0.1,foo.bar'),
|
||||
|
@ -1341,23 +1657,17 @@ def test_legacy_server_connect_error(self):
|
|||
('http', 'socks4://example.com', 'socks4://example.com'),
|
||||
('unrelated', '/bad/proxy', '/bad/proxy'), # clean_proxies should ignore bad proxies
|
||||
])
|
||||
def test_clean_proxy(self, proxy_key, proxy_url, expected):
|
||||
def test_clean_proxy(self, proxy_key, proxy_url, expected, monkeypatch):
|
||||
# proxies should be cleaned in urlopen()
|
||||
with FakeRHYDL() as ydl:
|
||||
req = ydl.urlopen(Request('test://', proxies={proxy_key: proxy_url})).request
|
||||
assert req.proxies[proxy_key] == expected
|
||||
|
||||
# and should also be cleaned when building the handler
|
||||
env_key = f'{proxy_key.upper()}_PROXY'
|
||||
old_env_proxy = os.environ.get(env_key)
|
||||
try:
|
||||
os.environ[env_key] = proxy_url # ensure that provided proxies override env
|
||||
with FakeYDL() as ydl:
|
||||
rh = self.build_handler(ydl)
|
||||
assert rh.proxies[proxy_key] == expected
|
||||
finally:
|
||||
if old_env_proxy:
|
||||
os.environ[env_key] = old_env_proxy
|
||||
monkeypatch.setenv(f'{proxy_key.upper()}_PROXY', proxy_url)
|
||||
with FakeYDL() as ydl:
|
||||
rh = self.build_handler(ydl)
|
||||
assert rh.proxies[proxy_key] == expected
|
||||
|
||||
def test_clean_proxy_header(self):
|
||||
with FakeRHYDL() as ydl:
|
||||
|
@ -1629,3 +1939,71 @@ def test_compat(self):
|
|||
assert res.geturl() == res.url
|
||||
assert res.info() is res.headers
|
||||
assert res.getheader('test') == res.get_header('test')
|
||||
|
||||
|
||||
class TestImpersonateTarget:
|
||||
@pytest.mark.parametrize('target_str,expected', [
|
||||
('abc', ImpersonateTarget('abc', None, None, None)),
|
||||
('abc-120_esr', ImpersonateTarget('abc', '120_esr', None, None)),
|
||||
('abc-120:xyz', ImpersonateTarget('abc', '120', 'xyz', None)),
|
||||
('abc-120:xyz-5.6', ImpersonateTarget('abc', '120', 'xyz', '5.6')),
|
||||
('abc:xyz', ImpersonateTarget('abc', None, 'xyz', None)),
|
||||
('abc:', ImpersonateTarget('abc', None, None, None)),
|
||||
('abc-120:', ImpersonateTarget('abc', '120', None, None)),
|
||||
(':xyz', ImpersonateTarget(None, None, 'xyz', None)),
|
||||
(':xyz-6.5', ImpersonateTarget(None, None, 'xyz', '6.5')),
|
||||
(':', ImpersonateTarget(None, None, None, None)),
|
||||
('', ImpersonateTarget(None, None, None, None)),
|
||||
])
|
||||
def test_target_from_str(self, target_str, expected):
|
||||
assert ImpersonateTarget.from_str(target_str) == expected
|
||||
|
||||
@pytest.mark.parametrize('target_str', [
|
||||
'-120', ':-12.0', '-12:-12', '-:-',
|
||||
'::', 'a-c-d:', 'a-c-d:e-f-g', 'a:b:'
|
||||
])
|
||||
def test_target_from_invalid_str(self, target_str):
|
||||
with pytest.raises(ValueError):
|
||||
ImpersonateTarget.from_str(target_str)
|
||||
|
||||
@pytest.mark.parametrize('target,expected', [
|
||||
(ImpersonateTarget('abc', None, None, None), 'abc'),
|
||||
(ImpersonateTarget('abc', '120', None, None), 'abc-120'),
|
||||
(ImpersonateTarget('abc', '120', 'xyz', None), 'abc-120:xyz'),
|
||||
(ImpersonateTarget('abc', '120', 'xyz', '5'), 'abc-120:xyz-5'),
|
||||
(ImpersonateTarget('abc', None, 'xyz', None), 'abc:xyz'),
|
||||
(ImpersonateTarget('abc', '120', None, None), 'abc-120'),
|
||||
(ImpersonateTarget('abc', '120', 'xyz', None), 'abc-120:xyz'),
|
||||
(ImpersonateTarget('abc', None, 'xyz'), 'abc:xyz'),
|
||||
(ImpersonateTarget(None, None, 'xyz', '6.5'), ':xyz-6.5'),
|
||||
(ImpersonateTarget('abc', ), 'abc'),
|
||||
(ImpersonateTarget(None, None, None, None), ''),
|
||||
])
|
||||
def test_str(self, target, expected):
|
||||
assert str(target) == expected
|
||||
|
||||
@pytest.mark.parametrize('args', [
|
||||
('abc', None, None, '5'),
|
||||
('abc', '120', None, '5'),
|
||||
(None, '120', None, None),
|
||||
(None, '120', None, '5'),
|
||||
(None, None, None, '5'),
|
||||
(None, '120', 'xyz', '5'),
|
||||
])
|
||||
def test_invalid_impersonate_target(self, args):
|
||||
with pytest.raises(ValueError):
|
||||
ImpersonateTarget(*args)
|
||||
|
||||
@pytest.mark.parametrize('target1,target2,is_in,is_eq', [
|
||||
(ImpersonateTarget('abc', None, None, None), ImpersonateTarget('abc', None, None, None), True, True),
|
||||
(ImpersonateTarget('abc', None, None, None), ImpersonateTarget('abc', '120', None, None), True, False),
|
||||
(ImpersonateTarget('abc', None, 'xyz', 'test'), ImpersonateTarget('abc', '120', 'xyz', None), True, False),
|
||||
(ImpersonateTarget('abc', '121', 'xyz', 'test'), ImpersonateTarget('abc', '120', 'xyz', 'test'), False, False),
|
||||
(ImpersonateTarget('abc'), ImpersonateTarget('abc', '120', 'xyz', 'test'), True, False),
|
||||
(ImpersonateTarget('abc', '120', 'xyz', 'test'), ImpersonateTarget('abc'), True, False),
|
||||
(ImpersonateTarget(), ImpersonateTarget('abc', '120', 'xyz'), True, False),
|
||||
(ImpersonateTarget(), ImpersonateTarget(), True, True),
|
||||
])
|
||||
def test_impersonate_target_in(self, target1, target2, is_in, is_eq):
|
||||
assert (target1 in target2) is is_in
|
||||
assert (target1 == target2) is is_eq
|
||||
|
|
|
@ -286,8 +286,14 @@ def ctx(request):
|
|||
return CTX_MAP[request.param]()
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'handler,ctx', [
|
||||
('Urllib', 'http'),
|
||||
('Requests', 'http'),
|
||||
('Websockets', 'ws'),
|
||||
('CurlCFFI', 'http')
|
||||
], indirect=True)
|
||||
class TestSocks4Proxy:
|
||||
@pytest.mark.parametrize('handler,ctx', [('Urllib', 'http'), ('Requests', 'http'), ('Websockets', 'ws')], indirect=True)
|
||||
def test_socks4_no_auth(self, handler, ctx):
|
||||
with handler() as rh:
|
||||
with ctx.socks_server(Socks4ProxyHandler) as server_address:
|
||||
|
@ -295,7 +301,6 @@ def test_socks4_no_auth(self, handler, ctx):
|
|||
rh, proxies={'all': f'socks4://{server_address}'})
|
||||
assert response['version'] == 4
|
||||
|
||||
@pytest.mark.parametrize('handler,ctx', [('Urllib', 'http'), ('Requests', 'http'), ('Websockets', 'ws')], indirect=True)
|
||||
def test_socks4_auth(self, handler, ctx):
|
||||
with handler() as rh:
|
||||
with ctx.socks_server(Socks4ProxyHandler, user_id='user') as server_address:
|
||||
|
@ -305,7 +310,6 @@ def test_socks4_auth(self, handler, ctx):
|
|||
rh, proxies={'all': f'socks4://user:@{server_address}'})
|
||||
assert response['version'] == 4
|
||||
|
||||
@pytest.mark.parametrize('handler,ctx', [('Urllib', 'http'), ('Requests', 'http'), ('Websockets', 'ws')], indirect=True)
|
||||
def test_socks4a_ipv4_target(self, handler, ctx):
|
||||
with ctx.socks_server(Socks4ProxyHandler) as server_address:
|
||||
with handler(proxies={'all': f'socks4a://{server_address}'}) as rh:
|
||||
|
@ -313,7 +317,6 @@ def test_socks4a_ipv4_target(self, handler, ctx):
|
|||
assert response['version'] == 4
|
||||
assert (response['ipv4_address'] == '127.0.0.1') != (response['domain_address'] == '127.0.0.1')
|
||||
|
||||
@pytest.mark.parametrize('handler,ctx', [('Urllib', 'http'), ('Requests', 'http'), ('Websockets', 'ws')], indirect=True)
|
||||
def test_socks4a_domain_target(self, handler, ctx):
|
||||
with ctx.socks_server(Socks4ProxyHandler) as server_address:
|
||||
with handler(proxies={'all': f'socks4a://{server_address}'}) as rh:
|
||||
|
@ -322,7 +325,6 @@ def test_socks4a_domain_target(self, handler, ctx):
|
|||
assert response['ipv4_address'] is None
|
||||
assert response['domain_address'] == 'localhost'
|
||||
|
||||
@pytest.mark.parametrize('handler,ctx', [('Urllib', 'http'), ('Requests', 'http'), ('Websockets', 'ws')], indirect=True)
|
||||
def test_ipv4_client_source_address(self, handler, ctx):
|
||||
with ctx.socks_server(Socks4ProxyHandler) as server_address:
|
||||
source_address = f'127.0.0.{random.randint(5, 255)}'
|
||||
|
@ -333,7 +335,6 @@ def test_ipv4_client_source_address(self, handler, ctx):
|
|||
assert response['client_address'][0] == source_address
|
||||
assert response['version'] == 4
|
||||
|
||||
@pytest.mark.parametrize('handler,ctx', [('Urllib', 'http'), ('Requests', 'http'), ('Websockets', 'ws')], indirect=True)
|
||||
@pytest.mark.parametrize('reply_code', [
|
||||
Socks4CD.REQUEST_REJECTED_OR_FAILED,
|
||||
Socks4CD.REQUEST_REJECTED_CANNOT_CONNECT_TO_IDENTD,
|
||||
|
@ -345,7 +346,6 @@ def test_socks4_errors(self, handler, ctx, reply_code):
|
|||
with pytest.raises(ProxyError):
|
||||
ctx.socks_info_request(rh)
|
||||
|
||||
@pytest.mark.parametrize('handler,ctx', [('Urllib', 'http'), ('Requests', 'http'), ('Websockets', 'ws')], indirect=True)
|
||||
def test_ipv6_socks4_proxy(self, handler, ctx):
|
||||
with ctx.socks_server(Socks4ProxyHandler, bind_ip='::1') as server_address:
|
||||
with handler(proxies={'all': f'socks4://{server_address}'}) as rh:
|
||||
|
@ -354,7 +354,6 @@ def test_ipv6_socks4_proxy(self, handler, ctx):
|
|||
assert response['ipv4_address'] == '127.0.0.1'
|
||||
assert response['version'] == 4
|
||||
|
||||
@pytest.mark.parametrize('handler,ctx', [('Urllib', 'http'), ('Requests', 'http'), ('Websockets', 'ws')], indirect=True)
|
||||
def test_timeout(self, handler, ctx):
|
||||
with ctx.socks_server(Socks4ProxyHandler, sleep=2) as server_address:
|
||||
with handler(proxies={'all': f'socks4://{server_address}'}, timeout=0.5) as rh:
|
||||
|
@ -362,9 +361,15 @@ def test_timeout(self, handler, ctx):
|
|||
ctx.socks_info_request(rh)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'handler,ctx', [
|
||||
('Urllib', 'http'),
|
||||
('Requests', 'http'),
|
||||
('Websockets', 'ws'),
|
||||
('CurlCFFI', 'http')
|
||||
], indirect=True)
|
||||
class TestSocks5Proxy:
|
||||
|
||||
@pytest.mark.parametrize('handler,ctx', [('Urllib', 'http'), ('Requests', 'http'), ('Websockets', 'ws')], indirect=True)
|
||||
def test_socks5_no_auth(self, handler, ctx):
|
||||
with ctx.socks_server(Socks5ProxyHandler) as server_address:
|
||||
with handler(proxies={'all': f'socks5://{server_address}'}) as rh:
|
||||
|
@ -372,7 +377,6 @@ def test_socks5_no_auth(self, handler, ctx):
|
|||
assert response['auth_methods'] == [0x0]
|
||||
assert response['version'] == 5
|
||||
|
||||
@pytest.mark.parametrize('handler,ctx', [('Urllib', 'http'), ('Requests', 'http'), ('Websockets', 'ws')], indirect=True)
|
||||
def test_socks5_user_pass(self, handler, ctx):
|
||||
with ctx.socks_server(Socks5ProxyHandler, auth=('test', 'testpass')) as server_address:
|
||||
with handler() as rh:
|
||||
|
@ -385,7 +389,6 @@ def test_socks5_user_pass(self, handler, ctx):
|
|||
assert response['auth_methods'] == [Socks5Auth.AUTH_NONE, Socks5Auth.AUTH_USER_PASS]
|
||||
assert response['version'] == 5
|
||||
|
||||
@pytest.mark.parametrize('handler,ctx', [('Urllib', 'http'), ('Requests', 'http'), ('Websockets', 'ws')], indirect=True)
|
||||
def test_socks5_ipv4_target(self, handler, ctx):
|
||||
with ctx.socks_server(Socks5ProxyHandler) as server_address:
|
||||
with handler(proxies={'all': f'socks5://{server_address}'}) as rh:
|
||||
|
@ -393,7 +396,6 @@ def test_socks5_ipv4_target(self, handler, ctx):
|
|||
assert response['ipv4_address'] == '127.0.0.1'
|
||||
assert response['version'] == 5
|
||||
|
||||
@pytest.mark.parametrize('handler,ctx', [('Urllib', 'http'), ('Requests', 'http'), ('Websockets', 'ws')], indirect=True)
|
||||
def test_socks5_domain_target(self, handler, ctx):
|
||||
with ctx.socks_server(Socks5ProxyHandler) as server_address:
|
||||
with handler(proxies={'all': f'socks5://{server_address}'}) as rh:
|
||||
|
@ -401,7 +403,6 @@ def test_socks5_domain_target(self, handler, ctx):
|
|||
assert (response['ipv4_address'] == '127.0.0.1') != (response['ipv6_address'] == '::1')
|
||||
assert response['version'] == 5
|
||||
|
||||
@pytest.mark.parametrize('handler,ctx', [('Urllib', 'http'), ('Requests', 'http'), ('Websockets', 'ws')], indirect=True)
|
||||
def test_socks5h_domain_target(self, handler, ctx):
|
||||
with ctx.socks_server(Socks5ProxyHandler) as server_address:
|
||||
with handler(proxies={'all': f'socks5h://{server_address}'}) as rh:
|
||||
|
@ -410,7 +411,6 @@ def test_socks5h_domain_target(self, handler, ctx):
|
|||
assert response['domain_address'] == 'localhost'
|
||||
assert response['version'] == 5
|
||||
|
||||
@pytest.mark.parametrize('handler,ctx', [('Urllib', 'http'), ('Requests', 'http'), ('Websockets', 'ws')], indirect=True)
|
||||
def test_socks5h_ip_target(self, handler, ctx):
|
||||
with ctx.socks_server(Socks5ProxyHandler) as server_address:
|
||||
with handler(proxies={'all': f'socks5h://{server_address}'}) as rh:
|
||||
|
@ -419,7 +419,6 @@ def test_socks5h_ip_target(self, handler, ctx):
|
|||
assert response['domain_address'] is None
|
||||
assert response['version'] == 5
|
||||
|
||||
@pytest.mark.parametrize('handler,ctx', [('Urllib', 'http'), ('Requests', 'http'), ('Websockets', 'ws')], indirect=True)
|
||||
def test_socks5_ipv6_destination(self, handler, ctx):
|
||||
with ctx.socks_server(Socks5ProxyHandler) as server_address:
|
||||
with handler(proxies={'all': f'socks5://{server_address}'}) as rh:
|
||||
|
@ -427,7 +426,6 @@ def test_socks5_ipv6_destination(self, handler, ctx):
|
|||
assert response['ipv6_address'] == '::1'
|
||||
assert response['version'] == 5
|
||||
|
||||
@pytest.mark.parametrize('handler,ctx', [('Urllib', 'http'), ('Requests', 'http'), ('Websockets', 'ws')], indirect=True)
|
||||
def test_ipv6_socks5_proxy(self, handler, ctx):
|
||||
with ctx.socks_server(Socks5ProxyHandler, bind_ip='::1') as server_address:
|
||||
with handler(proxies={'all': f'socks5://{server_address}'}) as rh:
|
||||
|
@ -438,7 +436,6 @@ def test_ipv6_socks5_proxy(self, handler, ctx):
|
|||
|
||||
# XXX: is there any feasible way of testing IPv6 source addresses?
|
||||
# Same would go for non-proxy source_address test...
|
||||
@pytest.mark.parametrize('handler,ctx', [('Urllib', 'http'), ('Requests', 'http'), ('Websockets', 'ws')], indirect=True)
|
||||
def test_ipv4_client_source_address(self, handler, ctx):
|
||||
with ctx.socks_server(Socks5ProxyHandler) as server_address:
|
||||
source_address = f'127.0.0.{random.randint(5, 255)}'
|
||||
|
@ -448,7 +445,6 @@ def test_ipv4_client_source_address(self, handler, ctx):
|
|||
assert response['client_address'][0] == source_address
|
||||
assert response['version'] == 5
|
||||
|
||||
@pytest.mark.parametrize('handler,ctx', [('Urllib', 'http'), ('Requests', 'http'), ('Websockets', 'ws')], indirect=True)
|
||||
@pytest.mark.parametrize('reply_code', [
|
||||
Socks5Reply.GENERAL_FAILURE,
|
||||
Socks5Reply.CONNECTION_NOT_ALLOWED,
|
||||
|
@ -465,7 +461,6 @@ def test_socks5_errors(self, handler, ctx, reply_code):
|
|||
with pytest.raises(ProxyError):
|
||||
ctx.socks_info_request(rh)
|
||||
|
||||
@pytest.mark.parametrize('handler,ctx', [('Urllib', 'http'), ('Websockets', 'ws')], indirect=True)
|
||||
def test_timeout(self, handler, ctx):
|
||||
with ctx.socks_server(Socks5ProxyHandler, sleep=2) as server_address:
|
||||
with handler(proxies={'all': f'socks5://{server_address}'}, timeout=1) as rh:
|
||||
|
|
|
@ -32,8 +32,6 @@
|
|||
)
|
||||
from yt_dlp.utils.networking import HTTPHeaderDict
|
||||
|
||||
from test.conftest import validate_and_send
|
||||
|
||||
TEST_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
|
||||
|
@ -66,7 +64,9 @@ def process_request(self, request):
|
|||
|
||||
def create_websocket_server(**ws_kwargs):
|
||||
import websockets.sync.server
|
||||
wsd = websockets.sync.server.serve(websocket_handler, '127.0.0.1', 0, process_request=process_request, **ws_kwargs)
|
||||
wsd = websockets.sync.server.serve(
|
||||
websocket_handler, '127.0.0.1', 0,
|
||||
process_request=process_request, open_timeout=2, **ws_kwargs)
|
||||
ws_port = wsd.socket.getsockname()[1]
|
||||
ws_server_thread = threading.Thread(target=wsd.serve_forever)
|
||||
ws_server_thread.daemon = True
|
||||
|
@ -100,6 +100,19 @@ def create_mtls_wss_websocket_server():
|
|||
return create_websocket_server(ssl_context=sslctx)
|
||||
|
||||
|
||||
def ws_validate_and_send(rh, req):
|
||||
rh.validate(req)
|
||||
max_tries = 3
|
||||
for i in range(max_tries):
|
||||
try:
|
||||
return rh.send(req)
|
||||
except TransportError as e:
|
||||
if i < (max_tries - 1) and 'connection closed during handshake' in str(e):
|
||||
# websockets server sometimes hangs on new connections
|
||||
continue
|
||||
raise
|
||||
|
||||
|
||||
@pytest.mark.skipif(not websockets, reason='websockets must be installed to test websocket request handlers')
|
||||
class TestWebsSocketRequestHandlerConformance:
|
||||
@classmethod
|
||||
|
@ -119,7 +132,7 @@ def setup_class(cls):
|
|||
@pytest.mark.parametrize('handler', ['Websockets'], indirect=True)
|
||||
def test_basic_websockets(self, handler):
|
||||
with handler() as rh:
|
||||
ws = validate_and_send(rh, Request(self.ws_base_url))
|
||||
ws = ws_validate_and_send(rh, Request(self.ws_base_url))
|
||||
assert 'upgrade' in ws.headers
|
||||
assert ws.status == 101
|
||||
ws.send('foo')
|
||||
|
@ -131,7 +144,7 @@ def test_basic_websockets(self, handler):
|
|||
@pytest.mark.parametrize('handler', ['Websockets'], indirect=True)
|
||||
def test_send_types(self, handler, msg, opcode):
|
||||
with handler() as rh:
|
||||
ws = validate_and_send(rh, Request(self.ws_base_url))
|
||||
ws = ws_validate_and_send(rh, Request(self.ws_base_url))
|
||||
ws.send(msg)
|
||||
assert int(ws.recv()) == opcode
|
||||
ws.close()
|
||||
|
@ -140,10 +153,10 @@ def test_send_types(self, handler, msg, opcode):
|
|||
def test_verify_cert(self, handler):
|
||||
with handler() as rh:
|
||||
with pytest.raises(CertificateVerifyError):
|
||||
validate_and_send(rh, Request(self.wss_base_url))
|
||||
ws_validate_and_send(rh, Request(self.wss_base_url))
|
||||
|
||||
with handler(verify=False) as rh:
|
||||
ws = validate_and_send(rh, Request(self.wss_base_url))
|
||||
ws = ws_validate_and_send(rh, Request(self.wss_base_url))
|
||||
assert ws.status == 101
|
||||
ws.close()
|
||||
|
||||
|
@ -151,7 +164,7 @@ def test_verify_cert(self, handler):
|
|||
def test_ssl_error(self, handler):
|
||||
with handler(verify=False) as rh:
|
||||
with pytest.raises(SSLError, match=r'ssl(?:v3|/tls) alert handshake failure') as exc_info:
|
||||
validate_and_send(rh, Request(self.bad_wss_host))
|
||||
ws_validate_and_send(rh, Request(self.bad_wss_host))
|
||||
assert not issubclass(exc_info.type, CertificateVerifyError)
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Websockets'], indirect=True)
|
||||
|
@ -163,7 +176,7 @@ def test_ssl_error(self, handler):
|
|||
])
|
||||
def test_percent_encode(self, handler, path, expected):
|
||||
with handler() as rh:
|
||||
ws = validate_and_send(rh, Request(f'{self.ws_base_url}{path}'))
|
||||
ws = ws_validate_and_send(rh, Request(f'{self.ws_base_url}{path}'))
|
||||
ws.send('path')
|
||||
assert ws.recv() == expected
|
||||
assert ws.status == 101
|
||||
|
@ -174,7 +187,7 @@ def test_remove_dot_segments(self, handler):
|
|||
with handler() as rh:
|
||||
# This isn't a comprehensive test,
|
||||
# but it should be enough to check whether the handler is removing dot segments
|
||||
ws = validate_and_send(rh, Request(f'{self.ws_base_url}/a/b/./../../test'))
|
||||
ws = ws_validate_and_send(rh, Request(f'{self.ws_base_url}/a/b/./../../test'))
|
||||
assert ws.status == 101
|
||||
ws.send('path')
|
||||
assert ws.recv() == '/test'
|
||||
|
@ -187,7 +200,7 @@ def test_remove_dot_segments(self, handler):
|
|||
def test_raise_http_error(self, handler, status):
|
||||
with handler() as rh:
|
||||
with pytest.raises(HTTPError) as exc_info:
|
||||
validate_and_send(rh, Request(f'{self.ws_base_url}/gen_{status}'))
|
||||
ws_validate_and_send(rh, Request(f'{self.ws_base_url}/gen_{status}'))
|
||||
assert exc_info.value.status == status
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Websockets'], indirect=True)
|
||||
|
@ -198,7 +211,7 @@ def test_raise_http_error(self, handler, status):
|
|||
def test_timeout(self, handler, params, extensions):
|
||||
with handler(**params) as rh:
|
||||
with pytest.raises(TransportError):
|
||||
validate_and_send(rh, Request(self.ws_base_url, extensions=extensions))
|
||||
ws_validate_and_send(rh, Request(self.ws_base_url, extensions=extensions))
|
||||
|
||||
@pytest.mark.parametrize('handler', ['Websockets'], indirect=True)
|
||||
def test_cookies(self, handler):
|
||||
|
@ -210,18 +223,18 @@ def test_cookies(self, handler):
|
|||
comment_url=None, rest={}))
|
||||
|
||||
with handler(cookiejar=cookiejar) as rh:
|
||||
ws = validate_and_send(rh, Request(self.ws_base_url))
|
||||
ws = ws_validate_and_send(rh, Request(self.ws_base_url))
|
||||
ws.send('headers')
|
||||
assert json.loads(ws.recv())['cookie'] == 'test=ytdlp'
|
||||
ws.close()
|
||||
|
||||
with handler() as rh:
|
||||
ws = validate_and_send(rh, Request(self.ws_base_url))
|
||||
ws = ws_validate_and_send(rh, Request(self.ws_base_url))
|
||||
ws.send('headers')
|
||||
assert 'cookie' not in json.loads(ws.recv())
|
||||
ws.close()
|
||||
|
||||
ws = validate_and_send(rh, Request(self.ws_base_url, extensions={'cookiejar': cookiejar}))
|
||||
ws = ws_validate_and_send(rh, Request(self.ws_base_url, extensions={'cookiejar': cookiejar}))
|
||||
ws.send('headers')
|
||||
assert json.loads(ws.recv())['cookie'] == 'test=ytdlp'
|
||||
ws.close()
|
||||
|
@ -231,7 +244,7 @@ def test_source_address(self, handler):
|
|||
source_address = f'127.0.0.{random.randint(5, 255)}'
|
||||
verify_address_availability(source_address)
|
||||
with handler(source_address=source_address) as rh:
|
||||
ws = validate_and_send(rh, Request(self.ws_base_url))
|
||||
ws = ws_validate_and_send(rh, Request(self.ws_base_url))
|
||||
ws.send('source_address')
|
||||
assert source_address == ws.recv()
|
||||
ws.close()
|
||||
|
@ -240,7 +253,7 @@ def test_source_address(self, handler):
|
|||
def test_response_url(self, handler):
|
||||
with handler() as rh:
|
||||
url = f'{self.ws_base_url}/something'
|
||||
ws = validate_and_send(rh, Request(url))
|
||||
ws = ws_validate_and_send(rh, Request(url))
|
||||
assert ws.url == url
|
||||
ws.close()
|
||||
|
||||
|
@ -248,14 +261,14 @@ def test_response_url(self, handler):
|
|||
def test_request_headers(self, handler):
|
||||
with handler(headers=HTTPHeaderDict({'test1': 'test', 'test2': 'test2'})) as rh:
|
||||
# Global Headers
|
||||
ws = validate_and_send(rh, Request(self.ws_base_url))
|
||||
ws = ws_validate_and_send(rh, Request(self.ws_base_url))
|
||||
ws.send('headers')
|
||||
headers = HTTPHeaderDict(json.loads(ws.recv()))
|
||||
assert headers['test1'] == 'test'
|
||||
ws.close()
|
||||
|
||||
# Per request headers, merged with global
|
||||
ws = validate_and_send(rh, Request(
|
||||
ws = ws_validate_and_send(rh, Request(
|
||||
self.ws_base_url, headers={'test2': 'changed', 'test3': 'test3'}))
|
||||
ws.send('headers')
|
||||
headers = HTTPHeaderDict(json.loads(ws.recv()))
|
||||
|
@ -288,7 +301,7 @@ def test_mtls(self, handler, client_cert):
|
|||
verify=False,
|
||||
client_cert=client_cert
|
||||
) as rh:
|
||||
validate_and_send(rh, Request(self.mtls_wss_base_url)).close()
|
||||
ws_validate_and_send(rh, Request(self.mtls_wss_base_url)).close()
|
||||
|
||||
|
||||
def create_fake_ws_connection(raised):
|
||||
|
|
|
@ -42,6 +42,7 @@
|
|||
SSLError,
|
||||
network_exceptions,
|
||||
)
|
||||
from .networking.impersonate import ImpersonateRequestHandler
|
||||
from .plugins import directories as plugin_directories
|
||||
from .postprocessor import _PLUGIN_CLASSES as plugin_pps
|
||||
from .postprocessor import (
|
||||
|
@ -99,6 +100,7 @@
|
|||
SameFileError,
|
||||
UnavailableVideoError,
|
||||
UserNotLive,
|
||||
YoutubeDLError,
|
||||
age_restricted,
|
||||
args_to_str,
|
||||
bug_reports_message,
|
||||
|
@ -402,6 +404,8 @@ class YoutubeDL:
|
|||
- "detect_or_warn": check whether we can do anything
|
||||
about it, warn otherwise (default)
|
||||
source_address: Client-side IP address to bind to.
|
||||
impersonate: Client to impersonate for requests.
|
||||
An ImpersonateTarget (from yt_dlp.networking.impersonate)
|
||||
sleep_interval_requests: Number of seconds to sleep between requests
|
||||
during extraction
|
||||
sleep_interval: Number of seconds to sleep before each download when
|
||||
|
@ -713,6 +717,13 @@ def check_deprecated(param, option, suggestion):
|
|||
for msg in self.params.get('_deprecation_warnings', []):
|
||||
self.deprecated_feature(msg)
|
||||
|
||||
if impersonate_target := self.params.get('impersonate'):
|
||||
if not self._impersonate_target_available(impersonate_target):
|
||||
raise YoutubeDLError(
|
||||
f'Impersonate target "{impersonate_target}" is not available. '
|
||||
f'Use --list-impersonate-targets to see available targets. '
|
||||
f'You may be missing dependencies required to support this target.')
|
||||
|
||||
if 'list-formats' in self.params['compat_opts']:
|
||||
self.params['listformats_table'] = False
|
||||
|
||||
|
@ -4077,6 +4088,22 @@ def _opener(self):
|
|||
handler = self._request_director.handlers['Urllib']
|
||||
return handler._get_instance(cookiejar=self.cookiejar, proxies=self.proxies)
|
||||
|
||||
def _get_available_impersonate_targets(self):
|
||||
# todo(future): make available as public API
|
||||
return [
|
||||
(target, rh.RH_NAME)
|
||||
for rh in self._request_director.handlers.values()
|
||||
if isinstance(rh, ImpersonateRequestHandler)
|
||||
for target in rh.supported_targets
|
||||
]
|
||||
|
||||
def _impersonate_target_available(self, target):
|
||||
# todo(future): make available as public API
|
||||
return any(
|
||||
rh.is_supported_target(target)
|
||||
for rh in self._request_director.handlers.values()
|
||||
if isinstance(rh, ImpersonateRequestHandler))
|
||||
|
||||
def urlopen(self, req):
|
||||
""" Start an HTTP download """
|
||||
if isinstance(req, str):
|
||||
|
@ -4108,9 +4135,13 @@ def urlopen(self, req):
|
|||
raise RequestError(
|
||||
'file:// URLs are disabled by default in yt-dlp for security reasons. '
|
||||
'Use --enable-file-urls to enable at your own risk.', cause=ue) from ue
|
||||
if 'unsupported proxy type: "https"' in ue.msg.lower():
|
||||
if (
|
||||
'unsupported proxy type: "https"' in ue.msg.lower()
|
||||
and 'requests' not in self._request_director.handlers
|
||||
and 'curl_cffi' not in self._request_director.handlers
|
||||
):
|
||||
raise RequestError(
|
||||
'To use an HTTPS proxy for this request, one of the following dependencies needs to be installed: requests')
|
||||
'To use an HTTPS proxy for this request, one of the following dependencies needs to be installed: requests, curl_cffi')
|
||||
|
||||
elif (
|
||||
re.match(r'unsupported url scheme: "wss?"', ue.msg.lower())
|
||||
|
@ -4120,6 +4151,13 @@ def urlopen(self, req):
|
|||
'This request requires WebSocket support. '
|
||||
'Ensure one of the following dependencies are installed: websockets',
|
||||
cause=ue) from ue
|
||||
|
||||
elif re.match(r'unsupported (?:extensions: impersonate|impersonate target)', ue.msg.lower()):
|
||||
raise RequestError(
|
||||
f'Impersonate target "{req.extensions["impersonate"]}" is not available.'
|
||||
f' See --list-impersonate-targets for available targets.'
|
||||
f' This request requires browser impersonation, however you may be missing dependencies'
|
||||
f' required to support this target.')
|
||||
raise
|
||||
except SSLError as e:
|
||||
if 'UNSAFE_LEGACY_RENEGOTIATION_DISABLED' in str(e):
|
||||
|
@ -4152,6 +4190,7 @@ def build_request_director(self, handlers, preferences=None):
|
|||
'timeout': 'socket_timeout',
|
||||
'legacy_ssl_support': 'legacyserverconnect',
|
||||
'enable_file_urls': 'enable_file_urls',
|
||||
'impersonate': 'impersonate',
|
||||
'client_cert': {
|
||||
'client_certificate': 'client_certificate',
|
||||
'client_certificate_key': 'client_certificate_key',
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
from .downloader.external import get_external_downloader
|
||||
from .extractor import list_extractor_classes
|
||||
from .extractor.adobepass import MSO_INFO
|
||||
from .networking.impersonate import ImpersonateTarget
|
||||
from .options import parseOpts
|
||||
from .postprocessor import (
|
||||
FFmpegExtractAudioPP,
|
||||
|
@ -48,6 +49,7 @@
|
|||
float_or_none,
|
||||
format_field,
|
||||
int_or_none,
|
||||
join_nonempty,
|
||||
match_filter_func,
|
||||
parse_bytes,
|
||||
parse_duration,
|
||||
|
@ -388,6 +390,9 @@ def parse_chapters(name, value, advanced=False):
|
|||
f'Supported keyrings are: {", ".join(sorted(SUPPORTED_KEYRINGS))}')
|
||||
opts.cookiesfrombrowser = (browser_name, profile, keyring, container)
|
||||
|
||||
if opts.impersonate is not None:
|
||||
opts.impersonate = ImpersonateTarget.from_str(opts.impersonate.lower())
|
||||
|
||||
# MetadataParser
|
||||
def metadataparser_actions(f):
|
||||
if isinstance(f, str):
|
||||
|
@ -911,6 +916,7 @@ def parse_options(argv=None):
|
|||
'postprocessors': postprocessors,
|
||||
'fixup': opts.fixup,
|
||||
'source_address': opts.source_address,
|
||||
'impersonate': opts.impersonate,
|
||||
'call_home': opts.call_home,
|
||||
'sleep_interval_requests': opts.sleep_interval_requests,
|
||||
'sleep_interval': opts.sleep_interval,
|
||||
|
@ -980,6 +986,41 @@ def _real_main(argv=None):
|
|||
traceback.print_exc()
|
||||
ydl._download_retcode = 100
|
||||
|
||||
if opts.list_impersonate_targets:
|
||||
|
||||
known_targets = [
|
||||
# List of simplified targets we know are supported,
|
||||
# to help users know what dependencies may be required.
|
||||
(ImpersonateTarget('chrome'), 'curl_cffi'),
|
||||
(ImpersonateTarget('edge'), 'curl_cffi'),
|
||||
(ImpersonateTarget('safari'), 'curl_cffi'),
|
||||
]
|
||||
|
||||
available_targets = ydl._get_available_impersonate_targets()
|
||||
|
||||
def make_row(target, handler):
|
||||
return [
|
||||
join_nonempty(target.client.title(), target.version, delim='-') or '-',
|
||||
join_nonempty((target.os or "").title(), target.os_version, delim='-') or '-',
|
||||
handler,
|
||||
]
|
||||
|
||||
rows = [make_row(target, handler) for target, handler in available_targets]
|
||||
|
||||
for known_target, known_handler in known_targets:
|
||||
if not any(
|
||||
known_target in target and handler == known_handler
|
||||
for target, handler in available_targets
|
||||
):
|
||||
rows.append([
|
||||
ydl._format_out(text, ydl.Styles.SUPPRESS)
|
||||
for text in make_row(known_target, f'{known_handler} (not available)')
|
||||
])
|
||||
|
||||
ydl.to_screen('[info] Available impersonate targets')
|
||||
ydl.to_stdout(render_table(['Client', 'OS', 'Source'], rows, extra_gap=2, delim='-'))
|
||||
return
|
||||
|
||||
if not actual_use:
|
||||
if pre_process:
|
||||
return ydl._download_retcode
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import sys
|
||||
|
||||
from PyInstaller.utils.hooks import collect_submodules
|
||||
from PyInstaller.utils.hooks import collect_submodules, collect_data_files
|
||||
|
||||
|
||||
def pycryptodome_module():
|
||||
|
@ -25,10 +25,12 @@ def get_hidden_imports():
|
|||
for module in ('websockets', 'requests', 'urllib3'):
|
||||
yield from collect_submodules(module)
|
||||
# These are auto-detected, but explicitly add them just in case
|
||||
yield from ('mutagen', 'brotli', 'certifi', 'secretstorage')
|
||||
yield from ('mutagen', 'brotli', 'certifi', 'secretstorage', 'curl_cffi')
|
||||
|
||||
|
||||
hiddenimports = list(get_hidden_imports())
|
||||
print(f'Adding imports: {hiddenimports}')
|
||||
|
||||
excludedimports = ['youtube_dl', 'youtube_dlc', 'test', 'ytdlp_plugins', 'devscripts', 'bundle']
|
||||
|
||||
datas = collect_data_files('curl_cffi', includes=['cacert.pem'])
|
||||
|
|
|
@ -74,6 +74,10 @@
|
|||
if hasattr(xattr, 'set'): # pyxattr
|
||||
xattr._yt_dlp__identifier = 'pyxattr'
|
||||
|
||||
try:
|
||||
import curl_cffi
|
||||
except ImportError:
|
||||
curl_cffi = None
|
||||
|
||||
from . import Cryptodome
|
||||
|
||||
|
|
|
@ -76,6 +76,23 @@ class ImgurIE(ImgurBaseIE):
|
|||
'thumbnail': 'https://i.imgur.com/jxBXAMCh.jpg',
|
||||
'dislike_count': int,
|
||||
},
|
||||
}, {
|
||||
# needs Accept header, ref: https://github.com/yt-dlp/yt-dlp/issues/9458
|
||||
'url': 'https://imgur.com/zV03bd5',
|
||||
'md5': '59df97884e8ba76143ff6b640a0e2904',
|
||||
'info_dict': {
|
||||
'id': 'zV03bd5',
|
||||
'ext': 'mp4',
|
||||
'title': 'Ive - Liz',
|
||||
'timestamp': 1710491255,
|
||||
'upload_date': '20240315',
|
||||
'like_count': int,
|
||||
'dislike_count': int,
|
||||
'duration': 56.92,
|
||||
'comment_count': int,
|
||||
'release_timestamp': 1710491255,
|
||||
'release_date': '20240315',
|
||||
},
|
||||
}]
|
||||
|
||||
def _real_extract(self, url):
|
||||
|
@ -192,6 +209,7 @@ def og_get_size(media_type):
|
|||
'id': video_id,
|
||||
'formats': formats,
|
||||
'thumbnail': url_or_none(search('thumbnailUrl')),
|
||||
'http_headers': {'Accept': '*/*'},
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -28,3 +28,10 @@
|
|||
pass
|
||||
except Exception as e:
|
||||
warnings.warn(f'Failed to import "websockets" request handler: {e}' + bug_reports_message())
|
||||
|
||||
try:
|
||||
from . import _curlcffi # noqa: F401
|
||||
except ImportError:
|
||||
pass
|
||||
except Exception as e:
|
||||
warnings.warn(f'Failed to import "curl_cffi" request handler: {e}' + bug_reports_message())
|
||||
|
|
221
yt_dlp/networking/_curlcffi.py
Normal file
221
yt_dlp/networking/_curlcffi.py
Normal file
|
@ -0,0 +1,221 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import io
|
||||
import math
|
||||
import urllib.parse
|
||||
|
||||
from ._helper import InstanceStoreMixin, select_proxy
|
||||
from .common import (
|
||||
Features,
|
||||
Request,
|
||||
Response,
|
||||
register_preference,
|
||||
register_rh,
|
||||
)
|
||||
from .exceptions import (
|
||||
CertificateVerifyError,
|
||||
HTTPError,
|
||||
IncompleteRead,
|
||||
ProxyError,
|
||||
SSLError,
|
||||
TransportError,
|
||||
)
|
||||
from .impersonate import ImpersonateRequestHandler, ImpersonateTarget
|
||||
from ..dependencies import curl_cffi
|
||||
from ..utils import int_or_none
|
||||
|
||||
if curl_cffi is None:
|
||||
raise ImportError('curl_cffi is not installed')
|
||||
|
||||
curl_cffi_version = tuple(int_or_none(x, default=0) for x in curl_cffi.__version__.split('.'))
|
||||
|
||||
if curl_cffi_version != (0, 5, 10):
|
||||
curl_cffi._yt_dlp__version = f'{curl_cffi.__version__} (unsupported)'
|
||||
raise ImportError('Only curl_cffi 0.5.10 is supported')
|
||||
|
||||
import curl_cffi.requests
|
||||
from curl_cffi.const import CurlECode, CurlOpt
|
||||
|
||||
|
||||
class CurlCFFIResponseReader(io.IOBase):
|
||||
def __init__(self, response: curl_cffi.requests.Response):
|
||||
self._response = response
|
||||
self._iterator = response.iter_content()
|
||||
self._buffer = b''
|
||||
self.bytes_read = 0
|
||||
|
||||
def readable(self):
|
||||
return True
|
||||
|
||||
def read(self, size=None):
|
||||
exception_raised = True
|
||||
try:
|
||||
while self._iterator and (size is None or len(self._buffer) < size):
|
||||
chunk = next(self._iterator, None)
|
||||
if chunk is None:
|
||||
self._iterator = None
|
||||
break
|
||||
self._buffer += chunk
|
||||
self.bytes_read += len(chunk)
|
||||
|
||||
if size is None:
|
||||
size = len(self._buffer)
|
||||
data = self._buffer[:size]
|
||||
self._buffer = self._buffer[size:]
|
||||
|
||||
# "free" the curl instance if the response is fully read.
|
||||
# curl_cffi doesn't do this automatically and only allows one open response per thread
|
||||
if not self._iterator and not self._buffer:
|
||||
self.close()
|
||||
exception_raised = False
|
||||
return data
|
||||
finally:
|
||||
if exception_raised:
|
||||
self.close()
|
||||
|
||||
def close(self):
|
||||
if not self.closed:
|
||||
self._response.close()
|
||||
self._buffer = b''
|
||||
super().close()
|
||||
|
||||
|
||||
class CurlCFFIResponseAdapter(Response):
|
||||
fp: CurlCFFIResponseReader
|
||||
|
||||
def __init__(self, response: curl_cffi.requests.Response):
|
||||
super().__init__(
|
||||
fp=CurlCFFIResponseReader(response),
|
||||
headers=response.headers,
|
||||
url=response.url,
|
||||
status=response.status_code)
|
||||
|
||||
def read(self, amt=None):
|
||||
try:
|
||||
return self.fp.read(amt)
|
||||
except curl_cffi.requests.errors.RequestsError as e:
|
||||
if e.code == CurlECode.PARTIAL_FILE:
|
||||
content_length = int_or_none(e.response.headers.get('Content-Length'))
|
||||
raise IncompleteRead(
|
||||
partial=self.fp.bytes_read,
|
||||
expected=content_length - self.fp.bytes_read if content_length is not None else None,
|
||||
cause=e) from e
|
||||
raise TransportError(cause=e) from e
|
||||
|
||||
|
||||
@register_rh
|
||||
class CurlCFFIRH(ImpersonateRequestHandler, InstanceStoreMixin):
|
||||
RH_NAME = 'curl_cffi'
|
||||
_SUPPORTED_URL_SCHEMES = ('http', 'https')
|
||||
_SUPPORTED_FEATURES = (Features.NO_PROXY, Features.ALL_PROXY)
|
||||
_SUPPORTED_PROXY_SCHEMES = ('http', 'https', 'socks4', 'socks4a', 'socks5', 'socks5h')
|
||||
_SUPPORTED_IMPERSONATE_TARGET_MAP = {
|
||||
ImpersonateTarget('chrome', '110', 'windows', '10'): curl_cffi.requests.BrowserType.chrome110,
|
||||
ImpersonateTarget('chrome', '107', 'windows', '10'): curl_cffi.requests.BrowserType.chrome107,
|
||||
ImpersonateTarget('chrome', '104', 'windows', '10'): curl_cffi.requests.BrowserType.chrome104,
|
||||
ImpersonateTarget('chrome', '101', 'windows', '10'): curl_cffi.requests.BrowserType.chrome101,
|
||||
ImpersonateTarget('chrome', '100', 'windows', '10'): curl_cffi.requests.BrowserType.chrome100,
|
||||
ImpersonateTarget('chrome', '99', 'windows', '10'): curl_cffi.requests.BrowserType.chrome99,
|
||||
ImpersonateTarget('edge', '101', 'windows', '10'): curl_cffi.requests.BrowserType.edge101,
|
||||
ImpersonateTarget('edge', '99', 'windows', '10'): curl_cffi.requests.BrowserType.edge99,
|
||||
ImpersonateTarget('safari', '15.5', 'macos', '12'): curl_cffi.requests.BrowserType.safari15_5,
|
||||
ImpersonateTarget('safari', '15.3', 'macos', '11'): curl_cffi.requests.BrowserType.safari15_3,
|
||||
ImpersonateTarget('chrome', '99', 'android', '12'): curl_cffi.requests.BrowserType.chrome99_android,
|
||||
}
|
||||
|
||||
def _create_instance(self, cookiejar=None):
|
||||
return curl_cffi.requests.Session(cookies=cookiejar)
|
||||
|
||||
def _check_extensions(self, extensions):
|
||||
super()._check_extensions(extensions)
|
||||
extensions.pop('impersonate', None)
|
||||
extensions.pop('cookiejar', None)
|
||||
extensions.pop('timeout', None)
|
||||
|
||||
def _send(self, request: Request):
|
||||
max_redirects_exceeded = False
|
||||
session: curl_cffi.requests.Session = self._get_instance(
|
||||
cookiejar=self._get_cookiejar(request) if 'cookie' not in request.headers else None)
|
||||
|
||||
if self.verbose:
|
||||
session.curl.setopt(CurlOpt.VERBOSE, 1)
|
||||
|
||||
proxies = self._get_proxies(request)
|
||||
if 'no' in proxies:
|
||||
session.curl.setopt(CurlOpt.NOPROXY, proxies['no'])
|
||||
proxies.pop('no', None)
|
||||
|
||||
# curl doesn't support per protocol proxies, so we select the one that matches the request protocol
|
||||
proxy = select_proxy(request.url, proxies=proxies)
|
||||
if proxy:
|
||||
session.curl.setopt(CurlOpt.PROXY, proxy)
|
||||
scheme = urllib.parse.urlparse(request.url).scheme.lower()
|
||||
if scheme != 'http':
|
||||
# Enable HTTP CONNECT for HTTPS urls.
|
||||
# Don't use CONNECT for http for compatibility with urllib behaviour.
|
||||
# See: https://curl.se/libcurl/c/CURLOPT_HTTPPROXYTUNNEL.html
|
||||
session.curl.setopt(CurlOpt.HTTPPROXYTUNNEL, 1)
|
||||
|
||||
headers = self._get_impersonate_headers(request)
|
||||
|
||||
if self._client_cert:
|
||||
session.curl.setopt(CurlOpt.SSLCERT, self._client_cert['client_certificate'])
|
||||
client_certificate_key = self._client_cert.get('client_certificate_key')
|
||||
client_certificate_password = self._client_cert.get('client_certificate_password')
|
||||
if client_certificate_key:
|
||||
session.curl.setopt(CurlOpt.SSLKEY, client_certificate_key)
|
||||
if client_certificate_password:
|
||||
session.curl.setopt(CurlOpt.KEYPASSWD, client_certificate_password)
|
||||
|
||||
timeout = self._calculate_timeout(request)
|
||||
|
||||
# set CURLOPT_LOW_SPEED_LIMIT and CURLOPT_LOW_SPEED_TIME to act as a read timeout. [1]
|
||||
# curl_cffi does not currently do this. [2]
|
||||
# Note: CURLOPT_LOW_SPEED_TIME is in seconds, so we need to round up to the nearest second. [3]
|
||||
# [1] https://unix.stackexchange.com/a/305311
|
||||
# [2] https://github.com/yifeikong/curl_cffi/issues/156
|
||||
# [3] https://curl.se/libcurl/c/CURLOPT_LOW_SPEED_TIME.html
|
||||
session.curl.setopt(CurlOpt.LOW_SPEED_LIMIT, 1) # 1 byte per second
|
||||
session.curl.setopt(CurlOpt.LOW_SPEED_TIME, math.ceil(timeout))
|
||||
|
||||
try:
|
||||
curl_response = session.request(
|
||||
method=request.method,
|
||||
url=request.url,
|
||||
headers=headers,
|
||||
data=request.data,
|
||||
verify=self.verify,
|
||||
max_redirects=5,
|
||||
timeout=timeout,
|
||||
impersonate=self._SUPPORTED_IMPERSONATE_TARGET_MAP.get(
|
||||
self._get_request_target(request)),
|
||||
interface=self.source_address,
|
||||
stream=True
|
||||
)
|
||||
except curl_cffi.requests.errors.RequestsError as e:
|
||||
if e.code == CurlECode.PEER_FAILED_VERIFICATION:
|
||||
raise CertificateVerifyError(cause=e) from e
|
||||
|
||||
elif e.code == CurlECode.SSL_CONNECT_ERROR:
|
||||
raise SSLError(cause=e) from e
|
||||
|
||||
elif e.code == CurlECode.TOO_MANY_REDIRECTS:
|
||||
max_redirects_exceeded = True
|
||||
curl_response = e.response
|
||||
|
||||
elif e.code == CurlECode.PROXY:
|
||||
raise ProxyError(cause=e) from e
|
||||
else:
|
||||
raise TransportError(cause=e) from e
|
||||
|
||||
response = CurlCFFIResponseAdapter(curl_response)
|
||||
|
||||
if not 200 <= response.status < 300:
|
||||
raise HTTPError(response, redirect_loop=max_redirects_exceeded)
|
||||
|
||||
return response
|
||||
|
||||
|
||||
@register_preference(CurlCFFIRH)
|
||||
def curl_cffi_preference(rh, request):
|
||||
return -100
|
|
@ -307,8 +307,7 @@ def _send(self, request):
|
|||
|
||||
max_redirects_exceeded = False
|
||||
|
||||
session = self._get_instance(
|
||||
cookiejar=request.extensions.get('cookiejar') or self.cookiejar)
|
||||
session = self._get_instance(cookiejar=self._get_cookiejar(request))
|
||||
|
||||
try:
|
||||
requests_res = session.request(
|
||||
|
@ -316,8 +315,8 @@ def _send(self, request):
|
|||
url=request.url,
|
||||
data=request.data,
|
||||
headers=headers,
|
||||
timeout=float(request.extensions.get('timeout') or self.timeout),
|
||||
proxies=request.proxies or self.proxies,
|
||||
timeout=self._calculate_timeout(request),
|
||||
proxies=self._get_proxies(request),
|
||||
allow_redirects=True,
|
||||
stream=True
|
||||
)
|
||||
|
|
|
@ -389,11 +389,11 @@ def _send(self, request):
|
|||
)
|
||||
|
||||
opener = self._get_instance(
|
||||
proxies=request.proxies or self.proxies,
|
||||
cookiejar=request.extensions.get('cookiejar') or self.cookiejar
|
||||
proxies=self._get_proxies(request),
|
||||
cookiejar=self._get_cookiejar(request)
|
||||
)
|
||||
try:
|
||||
res = opener.open(urllib_req, timeout=float(request.extensions.get('timeout') or self.timeout))
|
||||
res = opener.open(urllib_req, timeout=self._calculate_timeout(request))
|
||||
except urllib.error.HTTPError as e:
|
||||
if isinstance(e.fp, (http.client.HTTPResponse, urllib.response.addinfourl)):
|
||||
# Prevent file object from being closed when urllib.error.HTTPError is destroyed.
|
||||
|
|
|
@ -112,10 +112,10 @@ def close(self):
|
|||
logging.getLogger(name).removeHandler(handler)
|
||||
|
||||
def _send(self, request):
|
||||
timeout = float(request.extensions.get('timeout') or self.timeout)
|
||||
timeout = self._calculate_timeout(request)
|
||||
headers = self._merge_headers(request.headers)
|
||||
if 'cookie' not in headers:
|
||||
cookiejar = request.extensions.get('cookiejar') or self.cookiejar
|
||||
cookiejar = self._get_cookiejar(request)
|
||||
cookie_header = cookiejar.get_cookie_header(request.url)
|
||||
if cookie_header:
|
||||
headers['cookie'] = cookie_header
|
||||
|
@ -125,7 +125,7 @@ def _send(self, request):
|
|||
'source_address': (self.source_address, 0) if self.source_address else None,
|
||||
'timeout': timeout
|
||||
}
|
||||
proxy = select_proxy(request.url, request.proxies or self.proxies or {})
|
||||
proxy = select_proxy(request.url, self._get_proxies(request))
|
||||
try:
|
||||
if proxy:
|
||||
socks_proxy_options = make_socks_proxy_opts(proxy)
|
||||
|
|
|
@ -256,6 +256,15 @@ def _make_sslcontext(self):
|
|||
def _merge_headers(self, request_headers):
|
||||
return HTTPHeaderDict(self.headers, request_headers)
|
||||
|
||||
def _calculate_timeout(self, request):
|
||||
return float(request.extensions.get('timeout') or self.timeout)
|
||||
|
||||
def _get_cookiejar(self, request):
|
||||
return request.extensions.get('cookiejar') or self.cookiejar
|
||||
|
||||
def _get_proxies(self, request):
|
||||
return (request.proxies or self.proxies).copy()
|
||||
|
||||
def _check_url_scheme(self, request: Request):
|
||||
scheme = urllib.parse.urlparse(request.url).scheme.lower()
|
||||
if self._SUPPORTED_URL_SCHEMES is not None and scheme not in self._SUPPORTED_URL_SCHEMES:
|
||||
|
@ -491,7 +500,7 @@ class Response(io.IOBase):
|
|||
|
||||
def __init__(
|
||||
self,
|
||||
fp: typing.IO,
|
||||
fp: io.IOBase,
|
||||
url: str,
|
||||
headers: Mapping[str, str],
|
||||
status: int = 200,
|
||||
|
|
141
yt_dlp/networking/impersonate.py
Normal file
141
yt_dlp/networking/impersonate.py
Normal file
|
@ -0,0 +1,141 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import re
|
||||
from abc import ABC
|
||||
from dataclasses import dataclass
|
||||
from typing import Any
|
||||
|
||||
from .common import RequestHandler, register_preference
|
||||
from .exceptions import UnsupportedRequest
|
||||
from ..compat.types import NoneType
|
||||
from ..utils import classproperty, join_nonempty
|
||||
from ..utils.networking import std_headers
|
||||
|
||||
|
||||
@dataclass(order=True, frozen=True)
|
||||
class ImpersonateTarget:
|
||||
"""
|
||||
A target for browser impersonation.
|
||||
|
||||
Parameters:
|
||||
@param client: the client to impersonate
|
||||
@param version: the client version to impersonate
|
||||
@param os: the client OS to impersonate
|
||||
@param os_version: the client OS version to impersonate
|
||||
|
||||
Note: None is used to indicate to match any.
|
||||
|
||||
"""
|
||||
client: str | None = None
|
||||
version: str | None = None
|
||||
os: str | None = None
|
||||
os_version: str | None = None
|
||||
|
||||
def __post_init__(self):
|
||||
if self.version and not self.client:
|
||||
raise ValueError('client is required if version is set')
|
||||
if self.os_version and not self.os:
|
||||
raise ValueError('os is required if os_version is set')
|
||||
|
||||
def __contains__(self, target: ImpersonateTarget):
|
||||
if not isinstance(target, ImpersonateTarget):
|
||||
return False
|
||||
return (
|
||||
(self.client is None or target.client is None or self.client == target.client)
|
||||
and (self.version is None or target.version is None or self.version == target.version)
|
||||
and (self.os is None or target.os is None or self.os == target.os)
|
||||
and (self.os_version is None or target.os_version is None or self.os_version == target.os_version)
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
return f'{join_nonempty(self.client, self.version)}:{join_nonempty(self.os, self.os_version)}'.rstrip(':')
|
||||
|
||||
@classmethod
|
||||
def from_str(cls, target: str):
|
||||
mobj = re.fullmatch(r'(?:(?P<client>[^:-]+)(?:-(?P<version>[^:-]+))?)?(?::(?:(?P<os>[^:-]+)(?:-(?P<os_version>[^:-]+))?)?)?', target)
|
||||
if not mobj:
|
||||
raise ValueError(f'Invalid impersonate target "{target}"')
|
||||
return cls(**mobj.groupdict())
|
||||
|
||||
|
||||
class ImpersonateRequestHandler(RequestHandler, ABC):
|
||||
"""
|
||||
Base class for request handlers that support browser impersonation.
|
||||
|
||||
This provides a method for checking the validity of the impersonate extension,
|
||||
which can be used in _check_extensions.
|
||||
|
||||
Impersonate targets consist of a client, version, os and os_ver.
|
||||
See the ImpersonateTarget class for more details.
|
||||
|
||||
The following may be defined:
|
||||
- `_SUPPORTED_IMPERSONATE_TARGET_MAP`: a dict mapping supported targets to custom object.
|
||||
Any Request with an impersonate target not in this list will raise an UnsupportedRequest.
|
||||
Set to None to disable this check.
|
||||
Note: Entries are in order of preference
|
||||
|
||||
Parameters:
|
||||
@param impersonate: the default impersonate target to use for requests.
|
||||
Set to None to disable impersonation.
|
||||
"""
|
||||
_SUPPORTED_IMPERSONATE_TARGET_MAP: dict[ImpersonateTarget, Any] = {}
|
||||
|
||||
def __init__(self, *, impersonate: ImpersonateTarget = None, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
self.impersonate = impersonate
|
||||
|
||||
def _check_impersonate_target(self, target: ImpersonateTarget):
|
||||
assert isinstance(target, (ImpersonateTarget, NoneType))
|
||||
if target is None or not self.supported_targets:
|
||||
return
|
||||
if not self.is_supported_target(target):
|
||||
raise UnsupportedRequest(f'Unsupported impersonate target: {target}')
|
||||
|
||||
def _check_extensions(self, extensions):
|
||||
super()._check_extensions(extensions)
|
||||
if 'impersonate' in extensions:
|
||||
self._check_impersonate_target(extensions.get('impersonate'))
|
||||
|
||||
def _validate(self, request):
|
||||
super()._validate(request)
|
||||
self._check_impersonate_target(self.impersonate)
|
||||
|
||||
def _resolve_target(self, target: ImpersonateTarget | None):
|
||||
"""Resolve a target to a supported target."""
|
||||
if target is None:
|
||||
return
|
||||
for supported_target in self.supported_targets:
|
||||
if target in supported_target:
|
||||
if self.verbose:
|
||||
self._logger.stdout(
|
||||
f'{self.RH_NAME}: resolved impersonate target {target} to {supported_target}')
|
||||
return supported_target
|
||||
|
||||
@classproperty
|
||||
def supported_targets(self) -> tuple[ImpersonateTarget, ...]:
|
||||
return tuple(self._SUPPORTED_IMPERSONATE_TARGET_MAP.keys())
|
||||
|
||||
def is_supported_target(self, target: ImpersonateTarget):
|
||||
assert isinstance(target, ImpersonateTarget)
|
||||
return self._resolve_target(target) is not None
|
||||
|
||||
def _get_request_target(self, request):
|
||||
"""Get the requested target for the request"""
|
||||
return self._resolve_target(request.extensions.get('impersonate') or self.impersonate)
|
||||
|
||||
def _get_impersonate_headers(self, request):
|
||||
headers = self._merge_headers(request.headers)
|
||||
if self._get_request_target(request) is not None:
|
||||
# remove all headers present in std_headers
|
||||
# todo: change this to not depend on std_headers
|
||||
for k, v in std_headers.items():
|
||||
if headers.get(k) == v:
|
||||
headers.pop(k)
|
||||
return headers
|
||||
|
||||
|
||||
@register_preference(ImpersonateRequestHandler)
|
||||
def impersonate_preference(rh, request):
|
||||
if request.extensions.get('impersonate') or rh.impersonate:
|
||||
return 1000
|
||||
return 0
|
|
@ -515,6 +515,18 @@ def _alias_callback(option, opt_str, value, parser, opts, nargs):
|
|||
metavar='IP', dest='source_address', default=None,
|
||||
help='Client-side IP address to bind to',
|
||||
)
|
||||
network.add_option(
|
||||
'--impersonate',
|
||||
metavar='CLIENT[:OS]', dest='impersonate', default=None,
|
||||
help=(
|
||||
'Client to impersonate for requests. E.g. chrome, chrome-110, chrome:windows-10. '
|
||||
'Pass --impersonate="" to impersonate any client.'),
|
||||
)
|
||||
network.add_option(
|
||||
'--list-impersonate-targets',
|
||||
dest='list_impersonate_targets', default=False, action='store_true',
|
||||
help='List available clients to impersonate.',
|
||||
)
|
||||
network.add_option(
|
||||
'-4', '--force-ipv4',
|
||||
action='store_const', const='0.0.0.0', dest='source_address',
|
||||
|
|
Loading…
Reference in a new issue