Compare commits

..

No commits in common. "5e1200ac69b1bccb129099c37820f0ed0a2f538a" and "77fd3567f459087d60acaf35df1a4bbbcacc1af4" have entirely different histories.

2 changed files with 27 additions and 16 deletions

View File

@ -525,10 +525,10 @@ class TestHTTPRequestHandler(TestRequestHandlerBase):
validate_and_send( validate_and_send(
rh, Request(f'http://127.0.0.1:{self.http_port}/timeout_1')) rh, Request(f'http://127.0.0.1:{self.http_port}/timeout_1'))
with handler(timeout=0.1) as rh: with handler(timeout=0.01) as rh:
with pytest.raises(TransportError): with pytest.raises(TransportError):
validate_and_send( validate_and_send(
rh, Request(f'http://127.0.0.1:{self.http_port}/timeout_5')) rh, Request(f'http://127.0.0.1:{self.http_port}/timeout_1'))
# Per request timeout, should override handler timeout # Per request timeout, should override handler timeout
validate_and_send( validate_and_send(
@ -1372,10 +1372,15 @@ class TestYoutubeDLNetworking:
('', {'all': '__noproxy__'}), ('', {'all': '__noproxy__'}),
(None, {'http': 'http://127.0.0.1:8081', 'https': 'http://127.0.0.1:8081'}) # env, set https (None, {'http': 'http://127.0.0.1:8081', 'https': 'http://127.0.0.1:8081'}) # env, set https
]) ])
def test_proxy(self, proxy, expected, monkeypatch): def test_proxy(self, proxy, expected):
monkeypatch.setenv('HTTP_PROXY', 'http://127.0.0.1:8081') old_http_proxy = os.environ.get('HTTP_PROXY')
with FakeYDL({'proxy': proxy}) as ydl: try:
assert ydl.proxies == expected os.environ['HTTP_PROXY'] = 'http://127.0.0.1:8081' # ensure that provided proxies override env
with FakeYDL({'proxy': proxy}) as ydl:
assert ydl.proxies == expected
finally:
if old_http_proxy:
os.environ['HTTP_PROXY'] = old_http_proxy
def test_compat_request(self): def test_compat_request(self):
with FakeRHYDL() as ydl: with FakeRHYDL() as ydl:
@ -1503,17 +1508,23 @@ class TestYoutubeDLNetworking:
('http', 'socks4://example.com', 'socks4://example.com'), ('http', 'socks4://example.com', 'socks4://example.com'),
('unrelated', '/bad/proxy', '/bad/proxy'), # clean_proxies should ignore bad proxies ('unrelated', '/bad/proxy', '/bad/proxy'), # clean_proxies should ignore bad proxies
]) ])
def test_clean_proxy(self, proxy_key, proxy_url, expected, monkeypatch): def test_clean_proxy(self, proxy_key, proxy_url, expected):
# proxies should be cleaned in urlopen() # proxies should be cleaned in urlopen()
with FakeRHYDL() as ydl: with FakeRHYDL() as ydl:
req = ydl.urlopen(Request('test://', proxies={proxy_key: proxy_url})).request req = ydl.urlopen(Request('test://', proxies={proxy_key: proxy_url})).request
assert req.proxies[proxy_key] == expected assert req.proxies[proxy_key] == expected
# and should also be cleaned when building the handler # and should also be cleaned when building the handler
monkeypatch.setenv(f'{proxy_key.upper()}_PROXY', proxy_url) env_key = f'{proxy_key.upper()}_PROXY'
with FakeYDL() as ydl: old_env_proxy = os.environ.get(env_key)
rh = self.build_handler(ydl) try:
assert rh.proxies[proxy_key] == expected os.environ[env_key] = proxy_url # ensure that provided proxies override env
with FakeYDL() as ydl:
rh = self.build_handler(ydl)
assert rh.proxies[proxy_key] == expected
finally:
if old_env_proxy:
os.environ[env_key] = old_env_proxy
def test_clean_proxy_header(self): def test_clean_proxy_header(self):
with FakeRHYDL() as ydl: with FakeRHYDL() as ydl:

View File

@ -138,13 +138,13 @@ class CurlCFFIRH(ImpersonateRequestHandler, InstanceStoreMixin):
proxies = self._get_proxies(request) proxies = self._get_proxies(request)
if 'no' in proxies: if 'no' in proxies:
session.curl.setopt(CurlOpt.NOPROXY, proxies['no']) session.curl.setopt(CurlOpt.NOPROXY, proxies['no'].encode())
proxies.pop('no', None) proxies.pop('no', None)
# curl doesn't support per protocol proxies, so we select the one that matches the request protocol # curl doesn't support per protocol proxies, so we select the one that matches the request protocol
proxy = select_proxy(request.url, proxies=proxies) proxy = select_proxy(request.url, proxies=proxies)
if proxy: if proxy:
session.curl.setopt(CurlOpt.PROXY, proxy) session.curl.setopt(CurlOpt.PROXY, proxy.encode())
scheme = urllib.parse.urlparse(request.url).scheme.lower() scheme = urllib.parse.urlparse(request.url).scheme.lower()
if scheme != 'http': if scheme != 'http':
# Enable HTTP CONNECT for HTTPS urls. # Enable HTTP CONNECT for HTTPS urls.
@ -155,13 +155,13 @@ class CurlCFFIRH(ImpersonateRequestHandler, InstanceStoreMixin):
headers = self._get_impersonate_headers(request) headers = self._get_impersonate_headers(request)
if self._client_cert: if self._client_cert:
session.curl.setopt(CurlOpt.SSLCERT, self._client_cert['client_certificate']) session.curl.setopt(CurlOpt.SSLCERT, self._client_cert['client_certificate'].encode())
client_certificate_key = self._client_cert.get('client_certificate_key') client_certificate_key = self._client_cert.get('client_certificate_key')
client_certificate_password = self._client_cert.get('client_certificate_password') client_certificate_password = self._client_cert.get('client_certificate_password')
if client_certificate_key: if client_certificate_key:
session.curl.setopt(CurlOpt.SSLKEY, client_certificate_key) session.curl.setopt(CurlOpt.SSLKEY, client_certificate_key.encode())
if client_certificate_password: if client_certificate_password:
session.curl.setopt(CurlOpt.KEYPASSWD, client_certificate_password) session.curl.setopt(CurlOpt.KEYPASSWD, client_certificate_password.encode())
timeout = self._calculate_timeout(request) timeout = self._calculate_timeout(request)