mirror of
https://github.com/yt-dlp/yt-dlp.git
synced 2024-11-24 16:21:24 +01:00
Compare commits
2 Commits
e3bd4a1327
...
c57f34ec5f
Author | SHA1 | Date | |
---|---|---|---|
|
c57f34ec5f | ||
|
3b79c01491 |
|
@ -26,6 +26,7 @@ import zlib
|
||||||
from email.message import Message
|
from email.message import Message
|
||||||
from http.cookiejar import CookieJar
|
from http.cookiejar import CookieJar
|
||||||
|
|
||||||
|
from test.conftest import validate_and_send
|
||||||
from test.helper import FakeYDL, http_server_port
|
from test.helper import FakeYDL, http_server_port
|
||||||
from yt_dlp.cookies import YoutubeDLCookieJar
|
from yt_dlp.cookies import YoutubeDLCookieJar
|
||||||
from yt_dlp.dependencies import brotli, requests, urllib3
|
from yt_dlp.dependencies import brotli, requests, urllib3
|
||||||
|
@ -52,8 +53,6 @@ from yt_dlp.networking.exceptions import (
|
||||||
from yt_dlp.utils._utils import _YDLLogger as FakeLogger
|
from yt_dlp.utils._utils import _YDLLogger as FakeLogger
|
||||||
from yt_dlp.utils.networking import HTTPHeaderDict, std_headers
|
from yt_dlp.utils.networking import HTTPHeaderDict, std_headers
|
||||||
|
|
||||||
from test.conftest import validate_and_send
|
|
||||||
|
|
||||||
TEST_DIR = os.path.dirname(os.path.abspath(__file__))
|
TEST_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
import io
|
import io
|
||||||
import math
|
import math
|
||||||
|
import urllib.parse
|
||||||
|
|
||||||
from ._helper import InstanceStoreMixin, select_proxy
|
from ._helper import InstanceStoreMixin, select_proxy
|
||||||
from .common import (
|
from .common import (
|
||||||
|
@ -124,14 +125,13 @@ class CurlCFFIRH(ImpersonateRequestHandler, InstanceStoreMixin):
|
||||||
|
|
||||||
def _send(self, request: Request):
|
def _send(self, request: Request):
|
||||||
max_redirects_exceeded = False
|
max_redirects_exceeded = False
|
||||||
cookiejar = request.extensions.get('cookiejar') or self.cookiejar
|
|
||||||
session: curl_cffi.requests.Session = self._get_instance(
|
session: curl_cffi.requests.Session = self._get_instance(
|
||||||
cookiejar=cookiejar if 'cookie' not in request.headers else None)
|
cookiejar=self._get_cookiejar(request) if 'cookie' not in request.headers else None)
|
||||||
|
|
||||||
if self.verbose:
|
if self.verbose:
|
||||||
session.curl.setopt(CurlOpt.VERBOSE, 1)
|
session.curl.setopt(CurlOpt.VERBOSE, 1)
|
||||||
|
|
||||||
proxies = (request.proxies or self.proxies).copy()
|
proxies = self._get_proxies(request)
|
||||||
if 'no' in proxies:
|
if 'no' in proxies:
|
||||||
session.curl.setopt(CurlOpt.NOPROXY, proxies['no'].encode())
|
session.curl.setopt(CurlOpt.NOPROXY, proxies['no'].encode())
|
||||||
proxies.pop('no', None)
|
proxies.pop('no', None)
|
||||||
|
@ -140,8 +140,11 @@ class CurlCFFIRH(ImpersonateRequestHandler, InstanceStoreMixin):
|
||||||
proxy = select_proxy(request.url, proxies=proxies)
|
proxy = select_proxy(request.url, proxies=proxies)
|
||||||
if proxy:
|
if proxy:
|
||||||
session.curl.setopt(CurlOpt.PROXY, proxy.encode())
|
session.curl.setopt(CurlOpt.PROXY, proxy.encode())
|
||||||
if proxy.startswith('https'):
|
scheme = urllib.parse.urlparse(request.url).scheme.lower()
|
||||||
# enable HTTP CONNECT for https urls
|
if scheme != 'http':
|
||||||
|
# Enable HTTP CONNECT for HTTPS urls.
|
||||||
|
# Don't use CONNECT for http for compatibility with urllib behaviour.
|
||||||
|
# See: https://curl.se/libcurl/c/CURLOPT_HTTPPROXYTUNNEL.html
|
||||||
session.curl.setopt(CurlOpt.HTTPPROXYTUNNEL, 1)
|
session.curl.setopt(CurlOpt.HTTPPROXYTUNNEL, 1)
|
||||||
|
|
||||||
headers = self._get_impersonate_headers(request)
|
headers = self._get_impersonate_headers(request)
|
||||||
|
@ -155,7 +158,7 @@ class CurlCFFIRH(ImpersonateRequestHandler, InstanceStoreMixin):
|
||||||
if client_certificate_password:
|
if client_certificate_password:
|
||||||
session.curl.setopt(CurlOpt.KEYPASSWD, client_certificate_password.encode())
|
session.curl.setopt(CurlOpt.KEYPASSWD, client_certificate_password.encode())
|
||||||
|
|
||||||
timeout = float(request.extensions.get('timeout') or self.timeout)
|
timeout = self._calculate_timeout(request)
|
||||||
|
|
||||||
# set CURLOPT_LOW_SPEED_LIMIT and CURLOPT_LOW_SPEED_TIME to act as a read timeout. [1]
|
# set CURLOPT_LOW_SPEED_LIMIT and CURLOPT_LOW_SPEED_TIME to act as a read timeout. [1]
|
||||||
# curl_cffi does not currently do this. [2]
|
# curl_cffi does not currently do this. [2]
|
||||||
|
|
|
@ -297,8 +297,7 @@ class RequestsRH(RequestHandler, InstanceStoreMixin):
|
||||||
|
|
||||||
max_redirects_exceeded = False
|
max_redirects_exceeded = False
|
||||||
|
|
||||||
session = self._get_instance(
|
session = self._get_instance(cookiejar=self._get_cookiejar(request))
|
||||||
cookiejar=request.extensions.get('cookiejar') or self.cookiejar)
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
requests_res = session.request(
|
requests_res = session.request(
|
||||||
|
@ -306,8 +305,8 @@ class RequestsRH(RequestHandler, InstanceStoreMixin):
|
||||||
url=request.url,
|
url=request.url,
|
||||||
data=request.data,
|
data=request.data,
|
||||||
headers=headers,
|
headers=headers,
|
||||||
timeout=float(request.extensions.get('timeout') or self.timeout),
|
timeout=self._calculate_timeout(request),
|
||||||
proxies=request.proxies or self.proxies,
|
proxies=self._get_proxies(request),
|
||||||
allow_redirects=True,
|
allow_redirects=True,
|
||||||
stream=True
|
stream=True
|
||||||
)
|
)
|
||||||
|
|
|
@ -389,11 +389,11 @@ class UrllibRH(RequestHandler, InstanceStoreMixin):
|
||||||
)
|
)
|
||||||
|
|
||||||
opener = self._get_instance(
|
opener = self._get_instance(
|
||||||
proxies=request.proxies or self.proxies,
|
proxies=self._get_proxies(request),
|
||||||
cookiejar=request.extensions.get('cookiejar') or self.cookiejar
|
cookiejar=self._get_cookiejar(request)
|
||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
res = opener.open(urllib_req, timeout=float(request.extensions.get('timeout') or self.timeout))
|
res = opener.open(urllib_req, timeout=self._calculate_timeout(request))
|
||||||
except urllib.error.HTTPError as e:
|
except urllib.error.HTTPError as e:
|
||||||
if isinstance(e.fp, (http.client.HTTPResponse, urllib.response.addinfourl)):
|
if isinstance(e.fp, (http.client.HTTPResponse, urllib.response.addinfourl)):
|
||||||
# Prevent file object from being closed when urllib.error.HTTPError is destroyed.
|
# Prevent file object from being closed when urllib.error.HTTPError is destroyed.
|
||||||
|
|
|
@ -5,20 +5,26 @@ import logging
|
||||||
import ssl
|
import ssl
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
from ._helper import create_connection, select_proxy, make_socks_proxy_opts, create_socks_proxy_socket
|
from ._helper import (
|
||||||
from .common import Response, register_rh, Features
|
create_connection,
|
||||||
|
create_socks_proxy_socket,
|
||||||
|
make_socks_proxy_opts,
|
||||||
|
select_proxy,
|
||||||
|
)
|
||||||
|
from .common import Features, Response, register_rh
|
||||||
from .exceptions import (
|
from .exceptions import (
|
||||||
CertificateVerifyError,
|
CertificateVerifyError,
|
||||||
HTTPError,
|
HTTPError,
|
||||||
|
ProxyError,
|
||||||
RequestError,
|
RequestError,
|
||||||
SSLError,
|
SSLError,
|
||||||
TransportError, ProxyError,
|
TransportError,
|
||||||
)
|
)
|
||||||
from .websocket import WebSocketRequestHandler, WebSocketResponse
|
from .websocket import WebSocketRequestHandler, WebSocketResponse
|
||||||
from ..compat import functools
|
from ..compat import functools
|
||||||
from ..dependencies import websockets
|
from ..dependencies import websockets
|
||||||
from ..utils import int_or_none
|
|
||||||
from ..socks import ProxyError as SocksProxyError
|
from ..socks import ProxyError as SocksProxyError
|
||||||
|
from ..utils import int_or_none
|
||||||
|
|
||||||
if not websockets:
|
if not websockets:
|
||||||
raise ImportError('websockets is not installed')
|
raise ImportError('websockets is not installed')
|
||||||
|
@ -98,10 +104,10 @@ class WebsocketsRH(WebSocketRequestHandler):
|
||||||
extensions.pop('cookiejar', None)
|
extensions.pop('cookiejar', None)
|
||||||
|
|
||||||
def _send(self, request):
|
def _send(self, request):
|
||||||
timeout = float(request.extensions.get('timeout') or self.timeout)
|
timeout = self._calculate_timeout(request)
|
||||||
headers = self._merge_headers(request.headers)
|
headers = self._merge_headers(request.headers)
|
||||||
if 'cookie' not in headers:
|
if 'cookie' not in headers:
|
||||||
cookiejar = request.extensions.get('cookiejar') or self.cookiejar
|
cookiejar = self._get_cookiejar(request)
|
||||||
cookie_header = cookiejar.get_cookie_header(request.url)
|
cookie_header = cookiejar.get_cookie_header(request.url)
|
||||||
if cookie_header:
|
if cookie_header:
|
||||||
headers['cookie'] = cookie_header
|
headers['cookie'] = cookie_header
|
||||||
|
@ -111,7 +117,7 @@ class WebsocketsRH(WebSocketRequestHandler):
|
||||||
'source_address': (self.source_address, 0) if self.source_address else None,
|
'source_address': (self.source_address, 0) if self.source_address else None,
|
||||||
'timeout': timeout
|
'timeout': timeout
|
||||||
}
|
}
|
||||||
proxy = select_proxy(request.url, request.proxies or self.proxies or {})
|
proxy = select_proxy(request.url, self._get_proxies(request))
|
||||||
try:
|
try:
|
||||||
if proxy:
|
if proxy:
|
||||||
socks_proxy_options = make_socks_proxy_opts(proxy)
|
socks_proxy_options = make_socks_proxy_opts(proxy)
|
||||||
|
|
|
@ -272,6 +272,15 @@ class RequestHandler(abc.ABC):
|
||||||
def _merge_headers(self, request_headers):
|
def _merge_headers(self, request_headers):
|
||||||
return HTTPHeaderDict(self.headers, request_headers)
|
return HTTPHeaderDict(self.headers, request_headers)
|
||||||
|
|
||||||
|
def _calculate_timeout(self, request):
|
||||||
|
return float(request.extensions.get('timeout') or self.timeout)
|
||||||
|
|
||||||
|
def _get_cookiejar(self, request):
|
||||||
|
return request.extensions.get('cookiejar') or self.cookiejar
|
||||||
|
|
||||||
|
def _get_proxies(self, request):
|
||||||
|
return (request.proxies or self.proxies).copy()
|
||||||
|
|
||||||
def _check_url_scheme(self, request: Request):
|
def _check_url_scheme(self, request: Request):
|
||||||
scheme = urllib.parse.urlparse(request.url).scheme.lower()
|
scheme = urllib.parse.urlparse(request.url).scheme.lower()
|
||||||
if self._SUPPORTED_URL_SCHEMES is not None and scheme not in self._SUPPORTED_URL_SCHEMES:
|
if self._SUPPORTED_URL_SCHEMES is not None and scheme not in self._SUPPORTED_URL_SCHEMES:
|
||||||
|
|
|
@ -1,20 +1,18 @@
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from abc import ABC
|
from abc import ABC
|
||||||
from typing import Optional, Any
|
from typing import Any, Optional, Tuple
|
||||||
|
|
||||||
from .common import RequestHandler, register_preference
|
from .common import RequestHandler, register_preference
|
||||||
from .exceptions import UnsupportedRequest
|
from .exceptions import UnsupportedRequest
|
||||||
from ..compat.types import NoneType
|
from ..compat.types import NoneType
|
||||||
from ..utils.networking import std_headers
|
from ..utils.networking import std_headers
|
||||||
|
|
||||||
|
ImpersonateTarget = Tuple[str, Optional[str], Optional[str], Optional[str]]
|
||||||
ImpersonateTarget = tuple[Optional[str], Optional[str], Optional[str], Optional[str]]
|
|
||||||
|
|
||||||
|
|
||||||
def parse_impersonate_target(target: str) -> ImpersonateTarget:
|
def parse_impersonate_target(target: str) -> ImpersonateTarget:
|
||||||
client = version = os = os_vers = None
|
client = version = os = os_vers = None
|
||||||
if not target:
|
|
||||||
return client, version, os, os_vers
|
|
||||||
parts = target.split(':')
|
parts = target.split(':')
|
||||||
if len(parts):
|
if len(parts):
|
||||||
client = parts[0]
|
client = parts[0]
|
||||||
|
|
|
@ -2,7 +2,7 @@ from __future__ import annotations
|
||||||
|
|
||||||
import abc
|
import abc
|
||||||
|
|
||||||
from .common import Response, RequestHandler
|
from .common import RequestHandler, Response
|
||||||
|
|
||||||
|
|
||||||
class WebSocketResponse(Response):
|
class WebSocketResponse(Response):
|
||||||
|
|
Loading…
Reference in New Issue
Block a user