Compare commits

...

4 Commits

Author SHA1 Message Date
pukkandan
f660e2f461
lint 2023-11-29 05:45:06 +05:30
pukkandan
5c59dca0d5
socket.error is OSError 2023-11-29 05:45:03 +05:30
pukkandan
c56f5eea4c
Closes #8417 2023-11-29 05:45:00 +05:30
pukkandan
8e6eb3380f
[ie/youtube] Skip incomplete initial data
I no longer have an example where this is applicable, but
this is supposed to be the expected behavior anyway

Ref: https://discord.com/channels/807245652072857610/808027148308840478/1137755274992685107
2023-11-29 04:10:17 +05:30
15 changed files with 36 additions and 25 deletions

View File

@ -9,11 +9,7 @@ sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
import re import re
from devscripts.utils import ( from devscripts.utils import get_filename_args, read_file, write_file
get_filename_args,
read_file,
write_file,
)
VERBOSE_TMPL = ''' VERBOSE_TMPL = '''
- type: checkboxes - type: checkboxes

View File

@ -9,7 +9,7 @@ sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from test.helper import FakeYDL, report_warning from test.helper import FakeYDL, report_warning
from yt_dlp.update import Updater, UpdateInfo from yt_dlp.update import UpdateInfo, Updater
TEST_API_DATA = { TEST_API_DATA = {
'yt-dlp/yt-dlp/latest': { 'yt-dlp/yt-dlp/latest': {

View File

@ -60,7 +60,13 @@ from .postprocessor import (
get_postprocessor, get_postprocessor,
) )
from .postprocessor.ffmpeg import resolve_mapping as resolve_recode_mapping from .postprocessor.ffmpeg import resolve_mapping as resolve_recode_mapping
from .update import REPOSITORY, _get_system_deprecation, _make_label, current_git_head, detect_variant from .update import (
REPOSITORY,
_get_system_deprecation,
_make_label,
current_git_head,
detect_variant,
)
from .utils import ( from .utils import (
DEFAULT_OUTTMPL, DEFAULT_OUTTMPL,
IDENTITY, IDENTITY,

View File

@ -152,7 +152,7 @@ class BanByeChannelIE(BanByeBaseIE):
'sort': 'new', 'sort': 'new',
'limit': self._PAGE_SIZE, 'limit': self._PAGE_SIZE,
'offset': page_num * self._PAGE_SIZE, 'offset': page_num * self._PAGE_SIZE,
}, note=f'Downloading page {page_num+1}') }, note=f'Downloading page {page_num + 1}')
return [ return [
self.url_result(f"{self._VIDEO_BASE}/{video['_id']}", BanByeIE) self.url_result(f"{self._VIDEO_BASE}/{video['_id']}", BanByeIE)
for video in data['items'] for video in data['items']

View File

@ -35,8 +35,8 @@ from ..utils import (
unified_timestamp, unified_timestamp,
unsmuggle_url, unsmuggle_url,
update_url_query, update_url_query,
urlhandle_detect_ext,
url_or_none, url_or_none,
urlhandle_detect_ext,
urljoin, urljoin,
variadic, variadic,
xpath_attr, xpath_attr,

View File

@ -536,7 +536,7 @@ class PanoptoListIE(PanoptoBaseIE):
} }
response = self._call_api( response = self._call_api(
base_url, '/Services/Data.svc/GetSessions', f'{display_id} page {page+1}', base_url, '/Services/Data.svc/GetSessions', f'{display_id} page {page + 1}',
data={'queryParameters': params}, fatal=False) data={'queryParameters': params}, fatal=False)
for result in get_first(response, 'Results', default=[]): for result in get_first(response, 'Results', default=[]):

View File

@ -70,7 +70,7 @@ class WordpressPlaylistEmbedIE(InfoExtractor):
'height': int_or_none(traverse_obj(track, ('dimensions', 'original', 'height'))), 'height': int_or_none(traverse_obj(track, ('dimensions', 'original', 'height'))),
'width': int_or_none(traverse_obj(track, ('dimensions', 'original', 'width'))), 'width': int_or_none(traverse_obj(track, ('dimensions', 'original', 'width'))),
} for track in traverse_obj(playlist_json, ('tracks', ...), expected_type=dict)] } for track in traverse_obj(playlist_json, ('tracks', ...), expected_type=dict)]
yield self.playlist_result(entries, self._generic_id(url) + f'-wp-playlist-{i+1}', 'Wordpress Playlist') yield self.playlist_result(entries, self._generic_id(url) + f'-wp-playlist-{i + 1}', 'Wordpress Playlist')
class WordpressMiniAudioPlayerEmbedIE(InfoExtractor): class WordpressMiniAudioPlayerEmbedIE(InfoExtractor):

View File

@ -5298,6 +5298,7 @@ class YoutubeTabBaseInfoExtractor(YoutubeBaseInfoExtractor):
# See: https://github.com/yt-dlp/yt-dlp/issues/116 # See: https://github.com/yt-dlp/yt-dlp/issues/116
if not traverse_obj(data, 'contents', 'currentVideoEndpoint', 'onResponseReceivedActions'): if not traverse_obj(data, 'contents', 'currentVideoEndpoint', 'onResponseReceivedActions'):
retry.error = ExtractorError('Incomplete yt initial data received') retry.error = ExtractorError('Incomplete yt initial data received')
data = None
continue continue
return webpage, data return webpage, data

View File

@ -28,4 +28,3 @@ except ImportError:
pass pass
except Exception as e: except Exception as e:
warnings.warn(f'Failed to import "websockets" request handler: {e}' + bug_reports_message()) warnings.warn(f'Failed to import "websockets" request handler: {e}' + bug_reports_message())

View File

@ -219,7 +219,7 @@ def _socket_connect(ip_addr, timeout, source_address):
sock.bind(source_address) sock.bind(source_address)
sock.connect(sa) sock.connect(sa)
return sock return sock
except socket.error: except OSError:
sock.close() sock.close()
raise raise
@ -237,7 +237,7 @@ def create_socks_proxy_socket(dest_addr, proxy_args, proxy_ip_addr, timeout, sou
sock.bind(source_address) sock.bind(source_address)
sock.connect(dest_addr) sock.connect(dest_addr)
return sock return sock
except socket.error: except OSError:
sock.close() sock.close()
raise raise
@ -255,7 +255,7 @@ def create_connection(
host, port = address host, port = address
ip_addrs = socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM) ip_addrs = socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM)
if not ip_addrs: if not ip_addrs:
raise socket.error('getaddrinfo returns an empty list') raise OSError('getaddrinfo returns an empty list')
if source_address is not None: if source_address is not None:
af = socket.AF_INET if ':' not in source_address[0] else socket.AF_INET6 af = socket.AF_INET if ':' not in source_address[0] else socket.AF_INET6
ip_addrs = [addr for addr in ip_addrs if addr[0] == af] ip_addrs = [addr for addr in ip_addrs if addr[0] == af]
@ -272,7 +272,7 @@ def create_connection(
# https://bugs.python.org/issue36820 # https://bugs.python.org/issue36820
err = None err = None
return sock return sock
except socket.error as e: except OSError as e:
err = e err = e
try: try:

View File

@ -188,6 +188,7 @@ class RequestsSession(requests.sessions.Session):
""" """
Ensure unified redirect method handling with our urllib redirect handler. Ensure unified redirect method handling with our urllib redirect handler.
""" """
def rebuild_method(self, prepared_request, response): def rebuild_method(self, prepared_request, response):
new_method = get_redirect_method(prepared_request.method, response.status_code) new_method = get_redirect_method(prepared_request.method, response.status_code)
@ -218,6 +219,7 @@ class Urllib3LoggingFilter(logging.Filter):
class Urllib3LoggingHandler(logging.Handler): class Urllib3LoggingHandler(logging.Handler):
"""Redirect urllib3 logs to our logger""" """Redirect urllib3 logs to our logger"""
def __init__(self, logger, *args, **kwargs): def __init__(self, logger, *args, **kwargs):
super().__init__(*args, **kwargs) super().__init__(*args, **kwargs)
self._logger = logger self._logger = logger
@ -367,7 +369,7 @@ class SocksHTTPConnection(urllib3.connection.HTTPConnection):
self, f'Connection to {self.host} timed out. (connect timeout={self.timeout})') from e self, f'Connection to {self.host} timed out. (connect timeout={self.timeout})') from e
except SocksProxyError as e: except SocksProxyError as e:
raise urllib3.exceptions.ProxyError(str(e), e) from e raise urllib3.exceptions.ProxyError(str(e), e) from e
except (OSError, socket.error) as e: except OSError as e:
raise urllib3.exceptions.NewConnectionError( raise urllib3.exceptions.NewConnectionError(
self, f'Failed to establish a new connection: {e}') from e self, f'Failed to establish a new connection: {e}') from e

View File

@ -5,20 +5,26 @@ import logging
import ssl import ssl
import sys import sys
from ._helper import create_connection, select_proxy, make_socks_proxy_opts, create_socks_proxy_socket from ._helper import (
from .common import Response, register_rh, Features create_connection,
create_socks_proxy_socket,
make_socks_proxy_opts,
select_proxy,
)
from .common import Features, Response, register_rh
from .exceptions import ( from .exceptions import (
CertificateVerifyError, CertificateVerifyError,
HTTPError, HTTPError,
ProxyError,
RequestError, RequestError,
SSLError, SSLError,
TransportError, ProxyError, TransportError,
) )
from .websocket import WebSocketRequestHandler, WebSocketResponse from .websocket import WebSocketRequestHandler, WebSocketResponse
from ..compat import functools from ..compat import functools
from ..dependencies import websockets from ..dependencies import websockets
from ..utils import int_or_none
from ..socks import ProxyError as SocksProxyError from ..socks import ProxyError as SocksProxyError
from ..utils import int_or_none
if not websockets: if not websockets:
raise ImportError('websockets is not installed') raise ImportError('websockets is not installed')

View File

@ -2,7 +2,7 @@ from __future__ import annotations
import abc import abc
from .common import Response, RequestHandler from .common import RequestHandler, Response
class WebSocketResponse(Response): class WebSocketResponse(Response):

View File

@ -49,7 +49,7 @@ class Socks5AddressType:
ATYP_IPV6 = 0x04 ATYP_IPV6 = 0x04
class ProxyError(socket.error): class ProxyError(OSError):
ERR_SUCCESS = 0x00 ERR_SUCCESS = 0x00
def __init__(self, code=None, msg=None): def __init__(self, code=None, msg=None):

View File

@ -558,7 +558,7 @@ class LenientJSONDecoder(json.JSONDecoder):
s = self._close_object(e) s = self._close_object(e)
if s is not None: if s is not None:
continue continue
raise type(e)(f'{e.msg} in {s[e.pos-10:e.pos+10]!r}', s, e.pos) raise type(e)(f'{e.msg} in {s[e.pos - 10:e.pos + 10]!r}', s, e.pos)
assert False, 'Too many attempts to decode JSON' assert False, 'Too many attempts to decode JSON'
@ -1885,6 +1885,7 @@ def setproctitle(title):
buf = ctypes.create_string_buffer(len(title_bytes)) buf = ctypes.create_string_buffer(len(title_bytes))
buf.value = title_bytes buf.value = title_bytes
try: try:
# PR_SET_NAME = 15 Ref: /usr/include/linux/prctl.h
libc.prctl(15, buf, 0, 0, 0) libc.prctl(15, buf, 0, 0, 0)
except AttributeError: except AttributeError:
return # Strange libc, just skip this return # Strange libc, just skip this
@ -5072,7 +5073,7 @@ def truncate_string(s, left, right=0):
assert left > 3 and right >= 0 assert left > 3 and right >= 0
if s is None or len(s) <= left + right: if s is None or len(s) <= left + right:
return s return s
return f'{s[:left-3]}...{s[-right:] if right else ""}' return f'{s[:left - 3]}...{s[-right:] if right else ""}'
def orderedSet_from_options(options, alias_dict, *, use_regex=False, start=None): def orderedSet_from_options(options, alias_dict, *, use_regex=False, start=None):