mirror of
https://github.com/yt-dlp/yt-dlp.git
synced 2024-11-24 08:11:31 +01:00
Compare commits
No commits in common. "c57f34ec5f4cd5583da11e9dc03783f94cd9885e" and "e3bd4a1327e61fed4bacb2fcdc2bfa51b91343f7" have entirely different histories.
c57f34ec5f
...
e3bd4a1327
|
@ -26,7 +26,6 @@ import zlib
|
|||
from email.message import Message
|
||||
from http.cookiejar import CookieJar
|
||||
|
||||
from test.conftest import validate_and_send
|
||||
from test.helper import FakeYDL, http_server_port
|
||||
from yt_dlp.cookies import YoutubeDLCookieJar
|
||||
from yt_dlp.dependencies import brotli, requests, urllib3
|
||||
|
@ -53,6 +52,8 @@ from yt_dlp.networking.exceptions import (
|
|||
from yt_dlp.utils._utils import _YDLLogger as FakeLogger
|
||||
from yt_dlp.utils.networking import HTTPHeaderDict, std_headers
|
||||
|
||||
from test.conftest import validate_and_send
|
||||
|
||||
TEST_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
import io
|
||||
import math
|
||||
import urllib.parse
|
||||
|
||||
from ._helper import InstanceStoreMixin, select_proxy
|
||||
from .common import (
|
||||
|
@ -125,13 +124,14 @@ class CurlCFFIRH(ImpersonateRequestHandler, InstanceStoreMixin):
|
|||
|
||||
def _send(self, request: Request):
|
||||
max_redirects_exceeded = False
|
||||
cookiejar = request.extensions.get('cookiejar') or self.cookiejar
|
||||
session: curl_cffi.requests.Session = self._get_instance(
|
||||
cookiejar=self._get_cookiejar(request) if 'cookie' not in request.headers else None)
|
||||
cookiejar=cookiejar if 'cookie' not in request.headers else None)
|
||||
|
||||
if self.verbose:
|
||||
session.curl.setopt(CurlOpt.VERBOSE, 1)
|
||||
|
||||
proxies = self._get_proxies(request)
|
||||
proxies = (request.proxies or self.proxies).copy()
|
||||
if 'no' in proxies:
|
||||
session.curl.setopt(CurlOpt.NOPROXY, proxies['no'].encode())
|
||||
proxies.pop('no', None)
|
||||
|
@ -140,11 +140,8 @@ class CurlCFFIRH(ImpersonateRequestHandler, InstanceStoreMixin):
|
|||
proxy = select_proxy(request.url, proxies=proxies)
|
||||
if proxy:
|
||||
session.curl.setopt(CurlOpt.PROXY, proxy.encode())
|
||||
scheme = urllib.parse.urlparse(request.url).scheme.lower()
|
||||
if scheme != 'http':
|
||||
# Enable HTTP CONNECT for HTTPS urls.
|
||||
# Don't use CONNECT for http for compatibility with urllib behaviour.
|
||||
# See: https://curl.se/libcurl/c/CURLOPT_HTTPPROXYTUNNEL.html
|
||||
if proxy.startswith('https'):
|
||||
# enable HTTP CONNECT for https urls
|
||||
session.curl.setopt(CurlOpt.HTTPPROXYTUNNEL, 1)
|
||||
|
||||
headers = self._get_impersonate_headers(request)
|
||||
|
@ -158,7 +155,7 @@ class CurlCFFIRH(ImpersonateRequestHandler, InstanceStoreMixin):
|
|||
if client_certificate_password:
|
||||
session.curl.setopt(CurlOpt.KEYPASSWD, client_certificate_password.encode())
|
||||
|
||||
timeout = self._calculate_timeout(request)
|
||||
timeout = float(request.extensions.get('timeout') or self.timeout)
|
||||
|
||||
# set CURLOPT_LOW_SPEED_LIMIT and CURLOPT_LOW_SPEED_TIME to act as a read timeout. [1]
|
||||
# curl_cffi does not currently do this. [2]
|
||||
|
|
|
@ -297,7 +297,8 @@ class RequestsRH(RequestHandler, InstanceStoreMixin):
|
|||
|
||||
max_redirects_exceeded = False
|
||||
|
||||
session = self._get_instance(cookiejar=self._get_cookiejar(request))
|
||||
session = self._get_instance(
|
||||
cookiejar=request.extensions.get('cookiejar') or self.cookiejar)
|
||||
|
||||
try:
|
||||
requests_res = session.request(
|
||||
|
@ -305,8 +306,8 @@ class RequestsRH(RequestHandler, InstanceStoreMixin):
|
|||
url=request.url,
|
||||
data=request.data,
|
||||
headers=headers,
|
||||
timeout=self._calculate_timeout(request),
|
||||
proxies=self._get_proxies(request),
|
||||
timeout=float(request.extensions.get('timeout') or self.timeout),
|
||||
proxies=request.proxies or self.proxies,
|
||||
allow_redirects=True,
|
||||
stream=True
|
||||
)
|
||||
|
|
|
@ -389,11 +389,11 @@ class UrllibRH(RequestHandler, InstanceStoreMixin):
|
|||
)
|
||||
|
||||
opener = self._get_instance(
|
||||
proxies=self._get_proxies(request),
|
||||
cookiejar=self._get_cookiejar(request)
|
||||
proxies=request.proxies or self.proxies,
|
||||
cookiejar=request.extensions.get('cookiejar') or self.cookiejar
|
||||
)
|
||||
try:
|
||||
res = opener.open(urllib_req, timeout=self._calculate_timeout(request))
|
||||
res = opener.open(urllib_req, timeout=float(request.extensions.get('timeout') or self.timeout))
|
||||
except urllib.error.HTTPError as e:
|
||||
if isinstance(e.fp, (http.client.HTTPResponse, urllib.response.addinfourl)):
|
||||
# Prevent file object from being closed when urllib.error.HTTPError is destroyed.
|
||||
|
|
|
@ -5,26 +5,20 @@ import logging
|
|||
import ssl
|
||||
import sys
|
||||
|
||||
from ._helper import (
|
||||
create_connection,
|
||||
create_socks_proxy_socket,
|
||||
make_socks_proxy_opts,
|
||||
select_proxy,
|
||||
)
|
||||
from .common import Features, Response, register_rh
|
||||
from ._helper import create_connection, select_proxy, make_socks_proxy_opts, create_socks_proxy_socket
|
||||
from .common import Response, register_rh, Features
|
||||
from .exceptions import (
|
||||
CertificateVerifyError,
|
||||
HTTPError,
|
||||
ProxyError,
|
||||
RequestError,
|
||||
SSLError,
|
||||
TransportError,
|
||||
TransportError, ProxyError,
|
||||
)
|
||||
from .websocket import WebSocketRequestHandler, WebSocketResponse
|
||||
from ..compat import functools
|
||||
from ..dependencies import websockets
|
||||
from ..socks import ProxyError as SocksProxyError
|
||||
from ..utils import int_or_none
|
||||
from ..socks import ProxyError as SocksProxyError
|
||||
|
||||
if not websockets:
|
||||
raise ImportError('websockets is not installed')
|
||||
|
@ -104,10 +98,10 @@ class WebsocketsRH(WebSocketRequestHandler):
|
|||
extensions.pop('cookiejar', None)
|
||||
|
||||
def _send(self, request):
|
||||
timeout = self._calculate_timeout(request)
|
||||
timeout = float(request.extensions.get('timeout') or self.timeout)
|
||||
headers = self._merge_headers(request.headers)
|
||||
if 'cookie' not in headers:
|
||||
cookiejar = self._get_cookiejar(request)
|
||||
cookiejar = request.extensions.get('cookiejar') or self.cookiejar
|
||||
cookie_header = cookiejar.get_cookie_header(request.url)
|
||||
if cookie_header:
|
||||
headers['cookie'] = cookie_header
|
||||
|
@ -117,7 +111,7 @@ class WebsocketsRH(WebSocketRequestHandler):
|
|||
'source_address': (self.source_address, 0) if self.source_address else None,
|
||||
'timeout': timeout
|
||||
}
|
||||
proxy = select_proxy(request.url, self._get_proxies(request))
|
||||
proxy = select_proxy(request.url, request.proxies or self.proxies or {})
|
||||
try:
|
||||
if proxy:
|
||||
socks_proxy_options = make_socks_proxy_opts(proxy)
|
||||
|
|
|
@ -272,15 +272,6 @@ class RequestHandler(abc.ABC):
|
|||
def _merge_headers(self, request_headers):
|
||||
return HTTPHeaderDict(self.headers, request_headers)
|
||||
|
||||
def _calculate_timeout(self, request):
|
||||
return float(request.extensions.get('timeout') or self.timeout)
|
||||
|
||||
def _get_cookiejar(self, request):
|
||||
return request.extensions.get('cookiejar') or self.cookiejar
|
||||
|
||||
def _get_proxies(self, request):
|
||||
return (request.proxies or self.proxies).copy()
|
||||
|
||||
def _check_url_scheme(self, request: Request):
|
||||
scheme = urllib.parse.urlparse(request.url).scheme.lower()
|
||||
if self._SUPPORTED_URL_SCHEMES is not None and scheme not in self._SUPPORTED_URL_SCHEMES:
|
||||
|
|
|
@ -1,18 +1,20 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from abc import ABC
|
||||
from typing import Any, Optional, Tuple
|
||||
from typing import Optional, Any
|
||||
|
||||
from .common import RequestHandler, register_preference
|
||||
from .exceptions import UnsupportedRequest
|
||||
from ..compat.types import NoneType
|
||||
from ..utils.networking import std_headers
|
||||
|
||||
ImpersonateTarget = Tuple[str, Optional[str], Optional[str], Optional[str]]
|
||||
|
||||
ImpersonateTarget = tuple[Optional[str], Optional[str], Optional[str], Optional[str]]
|
||||
|
||||
|
||||
def parse_impersonate_target(target: str) -> ImpersonateTarget:
|
||||
client = version = os = os_vers = None
|
||||
if not target:
|
||||
return client, version, os, os_vers
|
||||
parts = target.split(':')
|
||||
if len(parts):
|
||||
client = parts[0]
|
||||
|
|
|
@ -2,7 +2,7 @@ from __future__ import annotations
|
|||
|
||||
import abc
|
||||
|
||||
from .common import RequestHandler, Response
|
||||
from .common import Response, RequestHandler
|
||||
|
||||
|
||||
class WebSocketResponse(Response):
|
||||
|
|
Loading…
Reference in New Issue
Block a user