mirror of
https://github.com/yt-dlp/yt-dlp.git
synced 2024-11-26 01:01:25 +01:00
Compare commits
13 Commits
532dce4e1b
...
9062818568
Author | SHA1 | Date | |
---|---|---|---|
|
9062818568 | ||
|
f2a4983df7 | ||
|
bacc31b05a | ||
|
8a450b3aeb | ||
|
4f7c6e90c8 | ||
|
4458983842 | ||
|
07d3a44dae | ||
|
de9b89b71f | ||
|
1b1a31cf6e | ||
|
cb02b582a8 | ||
|
313b607af4 | ||
|
fe01d245b6 | ||
|
bcec568ea7 |
|
@ -120,19 +120,22 @@ class TestFFmpegFD(unittest.TestCase):
|
|||
downloader._call_downloader('test', {**TEST_INFO, 'ext': 'mp4'})
|
||||
self.assertEqual(self._args, [
|
||||
'ffmpeg', '-y', '-hide_banner', '-i', 'http://www.example.com/',
|
||||
'-c', 'copy', '-f', 'mp4', 'file:test'])
|
||||
'-c', 'copy', '-f', 'mp4', 'file:test', '-progress', 'pipe:1'])
|
||||
|
||||
# Test cookies arg is added
|
||||
ydl.cookiejar.set_cookie(http.cookiejar.Cookie(**TEST_COOKIE))
|
||||
downloader._call_downloader('test', {**TEST_INFO, 'ext': 'mp4'})
|
||||
self.assertEqual(self._args, [
|
||||
'ffmpeg', '-y', '-hide_banner', '-cookies', 'test=ytdlp; path=/; domain=.example.com;\r\n',
|
||||
'-i', 'http://www.example.com/', '-c', 'copy', '-f', 'mp4', 'file:test'])
|
||||
'ffmpeg', '-y', '-hide_banner', '-cookies',
|
||||
'test=ytdlp; path=/; domain=.example.com;\r\n', '-i',
|
||||
'http://www.example.com/', '-c', 'copy', '-f', 'mp4',
|
||||
'file:test', '-progress', 'pipe:1'])
|
||||
|
||||
# Test with non-url input (ffmpeg reads from stdin '-' for websockets)
|
||||
downloader._call_downloader('test', {'url': 'x', 'ext': 'mp4'})
|
||||
self.assertEqual(self._args, [
|
||||
'ffmpeg', '-y', '-hide_banner', '-i', 'x', '-c', 'copy', '-f', 'mp4', 'file:test'])
|
||||
'ffmpeg', '-y', '-hide_banner', '-i', 'x', '-c', 'copy', '-f',
|
||||
'mp4', 'file:test', '-progress', 'pipe:1'])
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
|
|
@ -16,6 +16,7 @@ from ..minicurses import (
|
|||
from ..utils import (
|
||||
IDENTITY,
|
||||
NO_DEFAULT,
|
||||
FormatProgressInfos,
|
||||
LockingUnsupportedError,
|
||||
Namespace,
|
||||
RetryManager,
|
||||
|
@ -26,11 +27,9 @@ from ..utils import (
|
|||
format_bytes,
|
||||
join_nonempty,
|
||||
parse_bytes,
|
||||
remove_start,
|
||||
sanitize_open,
|
||||
shell_quote,
|
||||
timeconvert,
|
||||
timetuple_from_msec,
|
||||
try_call,
|
||||
)
|
||||
|
||||
|
@ -120,56 +119,6 @@ class FileDownloader:
|
|||
def FD_NAME(cls):
|
||||
return re.sub(r'(?<=[a-z])(?=[A-Z])', '_', cls.__name__[:-2]).lower()
|
||||
|
||||
@staticmethod
|
||||
def format_seconds(seconds):
|
||||
if seconds is None:
|
||||
return ' Unknown'
|
||||
time = timetuple_from_msec(seconds * 1000)
|
||||
if time.hours > 99:
|
||||
return '--:--:--'
|
||||
return '%02d:%02d:%02d' % time[:-1]
|
||||
|
||||
@classmethod
|
||||
def format_eta(cls, seconds):
|
||||
return f'{remove_start(cls.format_seconds(seconds), "00:"):>8s}'
|
||||
|
||||
@staticmethod
|
||||
def calc_percent(byte_counter, data_len):
|
||||
if data_len is None:
|
||||
return None
|
||||
return float(byte_counter) / float(data_len) * 100.0
|
||||
|
||||
@staticmethod
|
||||
def format_percent(percent):
|
||||
return ' N/A%' if percent is None else f'{percent:>5.1f}%'
|
||||
|
||||
@classmethod
|
||||
def calc_eta(cls, start_or_rate, now_or_remaining, total=NO_DEFAULT, current=NO_DEFAULT):
|
||||
if total is NO_DEFAULT:
|
||||
rate, remaining = start_or_rate, now_or_remaining
|
||||
if None in (rate, remaining):
|
||||
return None
|
||||
return int(float(remaining) / rate)
|
||||
|
||||
start, now = start_or_rate, now_or_remaining
|
||||
if total is None:
|
||||
return None
|
||||
if now is None:
|
||||
now = time.time()
|
||||
rate = cls.calc_speed(start, now, current)
|
||||
return rate and int((float(total) - float(current)) / rate)
|
||||
|
||||
@staticmethod
|
||||
def calc_speed(start, now, bytes):
|
||||
dif = now - start
|
||||
if bytes == 0 or dif < 0.001: # One millisecond
|
||||
return None
|
||||
return float(bytes) / dif
|
||||
|
||||
@staticmethod
|
||||
def format_speed(speed):
|
||||
return ' Unknown B/s' if speed is None else f'{format_bytes(speed):>10s}/s'
|
||||
|
||||
@staticmethod
|
||||
def format_retries(retries):
|
||||
return 'inf' if retries == float('inf') else int(retries)
|
||||
|
@ -348,18 +297,16 @@ class FileDownloader:
|
|||
return tmpl
|
||||
return default
|
||||
|
||||
_format_bytes = lambda k: f'{format_bytes(s.get(k)):>10s}'
|
||||
|
||||
if s['status'] == 'finished':
|
||||
if self.params.get('noprogress'):
|
||||
self.to_screen('[download] Download completed')
|
||||
speed = try_call(lambda: s['total_bytes'] / s['elapsed'])
|
||||
s.update({
|
||||
'speed': speed,
|
||||
'_speed_str': self.format_speed(speed).strip(),
|
||||
'_total_bytes_str': _format_bytes('total_bytes'),
|
||||
'_elapsed_str': self.format_seconds(s.get('elapsed')),
|
||||
'_percent_str': self.format_percent(100),
|
||||
'_speed_str': FormatProgressInfos.format_speed(speed).strip(),
|
||||
'_total_bytes_str': format_bytes(s.get('total_bytes')),
|
||||
'_elapsed_str': FormatProgressInfos.format_seconds(s.get('elapsed')),
|
||||
'_percent_str': FormatProgressInfos.format_percent(100),
|
||||
})
|
||||
self._report_progress_status(s, join_nonempty(
|
||||
'100%%',
|
||||
|
@ -378,16 +325,16 @@ class FileDownloader:
|
|||
self._progress_delta_time += update_delta
|
||||
|
||||
s.update({
|
||||
'_eta_str': self.format_eta(s.get('eta')).strip(),
|
||||
'_speed_str': self.format_speed(s.get('speed')),
|
||||
'_percent_str': self.format_percent(try_call(
|
||||
'_eta_str': FormatProgressInfos.format_eta(s.get('eta')),
|
||||
'_speed_str': FormatProgressInfos.format_speed(s.get('speed')),
|
||||
'_percent_str': FormatProgressInfos.format_percent(try_call(
|
||||
lambda: 100 * s['downloaded_bytes'] / s['total_bytes'],
|
||||
lambda: 100 * s['downloaded_bytes'] / s['total_bytes_estimate'],
|
||||
lambda: s['downloaded_bytes'] == 0 and 0)),
|
||||
'_total_bytes_str': _format_bytes('total_bytes'),
|
||||
'_total_bytes_estimate_str': _format_bytes('total_bytes_estimate'),
|
||||
'_downloaded_bytes_str': _format_bytes('downloaded_bytes'),
|
||||
'_elapsed_str': self.format_seconds(s.get('elapsed')),
|
||||
'_total_bytes_str': format_bytes(s.get('total_bytes')),
|
||||
'_total_bytes_estimate_str': format_bytes(s.get('total_bytes_estimate')),
|
||||
'_downloaded_bytes_str': format_bytes(s.get('downloaded_bytes')),
|
||||
'_elapsed_str': FormatProgressInfos.format_seconds(s.get('elapsed')),
|
||||
})
|
||||
|
||||
msg_template = with_fields(
|
||||
|
|
|
@ -4,14 +4,17 @@ import json
|
|||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
import time
|
||||
import uuid
|
||||
|
||||
from .fragment import FragmentFD
|
||||
from ..networking import Request
|
||||
from ..postprocessor.ffmpeg import EXT_TO_OUT_FORMATS, FFmpegPostProcessor
|
||||
from ..postprocessor.ffmpeg import (
|
||||
EXT_TO_OUT_FORMATS,
|
||||
FFmpegPostProcessor,
|
||||
FFmpegProgressTracker,
|
||||
)
|
||||
from ..utils import (
|
||||
Popen,
|
||||
RetryManager,
|
||||
|
@ -621,26 +624,23 @@ class FFmpegFD(ExternalFD):
|
|||
|
||||
args = [encodeArgument(opt) for opt in args]
|
||||
args.append(encodeFilename(ffpp._ffmpeg_filename_argument(tmpfilename), True))
|
||||
args += ['-progress', 'pipe:1']
|
||||
self._debug_cmd(args)
|
||||
|
||||
piped = any(fmt['url'] in ('-', 'pipe:') for fmt in selected_formats)
|
||||
with Popen(args, stdin=subprocess.PIPE, env=env) as proc:
|
||||
if piped:
|
||||
self.on_process_started(proc, proc.stdin)
|
||||
try:
|
||||
retval = proc.wait()
|
||||
except BaseException as e:
|
||||
# subprocces.run would send the SIGKILL signal to ffmpeg and the
|
||||
# mp4 file couldn't be played, but if we ask ffmpeg to quit it
|
||||
# produces a file that is playable (this is mostly useful for live
|
||||
# streams). Note that Windows is not affected and produces playable
|
||||
# files (see https://github.com/ytdl-org/youtube-dl/issues/8300).
|
||||
if isinstance(e, KeyboardInterrupt) and sys.platform != 'win32' and not piped:
|
||||
proc.communicate_or_kill(b'q')
|
||||
else:
|
||||
proc.kill(timeout=None)
|
||||
raise
|
||||
return retval
|
||||
self._debug_cmd(args)
|
||||
ffmpeg_progress_tracker = FFmpegProgressTracker(info_dict, args, self._ffmpeg_hook)
|
||||
proc = ffmpeg_progress_tracker.ffmpeg_proc
|
||||
if piped:
|
||||
self.on_process_started(proc, proc.stdin)
|
||||
_, _, return_code = ffmpeg_progress_tracker.run_ffmpeg_subprocess()
|
||||
return return_code
|
||||
|
||||
def _ffmpeg_hook(self, status, info_dict):
|
||||
status['downloaded_bytes'] = status.get('outputted', 0)
|
||||
if status.get('status') == 'ffmpeg_running':
|
||||
status['status'] = 'downloading'
|
||||
self._hook_progress(status, info_dict)
|
||||
|
||||
|
||||
class AVconvFD(FFmpegFD):
|
||||
|
|
|
@ -11,6 +11,7 @@ from ..networking.exceptions import (
|
|||
)
|
||||
from ..utils import (
|
||||
ContentTooShortError,
|
||||
FormatProgressInfos,
|
||||
RetryManager,
|
||||
ThrottledDownload,
|
||||
XAttrMetadataError,
|
||||
|
@ -298,11 +299,11 @@ class HttpFD(FileDownloader):
|
|||
before = after
|
||||
|
||||
# Progress message
|
||||
speed = self.calc_speed(start, now, byte_counter - ctx.resume_len)
|
||||
speed = FormatProgressInfos.calc_speed(start, now, byte_counter - ctx.resume_len)
|
||||
if ctx.data_len is None:
|
||||
eta = None
|
||||
else:
|
||||
eta = self.calc_eta(start, time.time(), ctx.data_len - ctx.resume_len, byte_counter - ctx.resume_len)
|
||||
eta = FormatProgressInfos.calc_eta(start, time.time(), ctx.data_len - ctx.resume_len, byte_counter - ctx.resume_len)
|
||||
|
||||
self._hook_progress({
|
||||
'status': 'downloading',
|
||||
|
|
|
@ -5,6 +5,7 @@ import time
|
|||
|
||||
from .common import FileDownloader
|
||||
from ..utils import (
|
||||
FormatProgressInfos,
|
||||
Popen,
|
||||
check_executable,
|
||||
encodeArgument,
|
||||
|
@ -50,8 +51,8 @@ class RtmpFD(FileDownloader):
|
|||
resume_percent = percent
|
||||
resume_downloaded_data_len = downloaded_data_len
|
||||
time_now = time.time()
|
||||
eta = self.calc_eta(start, time_now, 100 - resume_percent, percent - resume_percent)
|
||||
speed = self.calc_speed(start, time_now, downloaded_data_len - resume_downloaded_data_len)
|
||||
eta = FormatProgressInfos.calc_eta(start, time_now, 100 - resume_percent, percent - resume_percent)
|
||||
speed = FormatProgressInfos.calc_speed(start, time_now, downloaded_data_len - resume_downloaded_data_len)
|
||||
data_len = None
|
||||
if percent > 0:
|
||||
data_len = int(downloaded_data_len * 100 / percent)
|
||||
|
|
|
@ -205,6 +205,26 @@ class ArchiveOrgIE(InfoExtractor):
|
|||
},
|
||||
},
|
||||
],
|
||||
}, {
|
||||
# The reviewbody is None for one of the reviews; just need to extract data without crashing
|
||||
'url': 'https://archive.org/details/gd95-04-02.sbd.11622.sbeok.shnf/gd95-04-02d1t04.shn',
|
||||
'info_dict': {
|
||||
'id': 'gd95-04-02.sbd.11622.sbeok.shnf/gd95-04-02d1t04.shn',
|
||||
'ext': 'mp3',
|
||||
'title': 'Stuck Inside of Mobile with the Memphis Blues Again',
|
||||
'creators': ['Grateful Dead'],
|
||||
'duration': 338.31,
|
||||
'track': 'Stuck Inside of Mobile with the Memphis Blues Again',
|
||||
'description': 'md5:764348a470b986f1217ffd38d6ac7b72',
|
||||
'display_id': 'gd95-04-02d1t04.shn',
|
||||
'location': 'Pyramid Arena',
|
||||
'uploader': 'jon@archive.org',
|
||||
'album': '1995-04-02 - Pyramid Arena',
|
||||
'upload_date': '20040519',
|
||||
'track_number': 4,
|
||||
'release_date': '19950402',
|
||||
'timestamp': 1084927901,
|
||||
},
|
||||
}]
|
||||
|
||||
@staticmethod
|
||||
|
@ -335,7 +355,7 @@ class ArchiveOrgIE(InfoExtractor):
|
|||
info['comments'].append({
|
||||
'id': review.get('review_id'),
|
||||
'author': review.get('reviewer'),
|
||||
'text': str_or_none(review.get('reviewtitle'), '') + '\n\n' + review.get('reviewbody'),
|
||||
'text': join_nonempty('reviewtitle', 'reviewbody', from_dict=review, delim='\n\n'),
|
||||
'timestamp': unified_timestamp(review.get('createdate')),
|
||||
'parent': 'root'})
|
||||
|
||||
|
|
|
@ -563,13 +563,13 @@ class FacebookIE(InfoExtractor):
|
|||
return extract_video_data(try_get(
|
||||
js_data, lambda x: x['jsmods']['instances'], list) or [])
|
||||
|
||||
def extract_dash_manifest(video, formats):
|
||||
def extract_dash_manifest(vid_data, formats, mpd_url=None):
|
||||
dash_manifest = traverse_obj(
|
||||
video, 'dash_manifest', 'playlist', 'dash_manifest_xml_string', expected_type=str)
|
||||
vid_data, 'dash_manifest', 'playlist', 'dash_manifest_xml_string', 'manifest_xml', expected_type=str)
|
||||
if dash_manifest:
|
||||
formats.extend(self._parse_mpd_formats(
|
||||
compat_etree_fromstring(urllib.parse.unquote_plus(dash_manifest)),
|
||||
mpd_url=url_or_none(video.get('dash_manifest_url'))))
|
||||
mpd_url=url_or_none(video.get('dash_manifest_url')) or mpd_url))
|
||||
|
||||
def process_formats(info):
|
||||
# Downloads with browser's User-Agent are rate limited. Working around
|
||||
|
@ -619,9 +619,12 @@ class FacebookIE(InfoExtractor):
|
|||
video = video['creation_story']
|
||||
video['owner'] = traverse_obj(video, ('short_form_video_context', 'video_owner'))
|
||||
video.update(reel_info)
|
||||
fmt_data = traverse_obj(video, ('videoDeliveryLegacyFields', {dict})) or video
|
||||
|
||||
formats = []
|
||||
q = qualities(['sd', 'hd'])
|
||||
|
||||
# Legacy formats extraction
|
||||
fmt_data = traverse_obj(video, ('videoDeliveryLegacyFields', {dict})) or video
|
||||
for key, format_id in (('playable_url', 'sd'), ('playable_url_quality_hd', 'hd'),
|
||||
('playable_url_dash', ''), ('browser_native_hd_url', 'hd'),
|
||||
('browser_native_sd_url', 'sd')):
|
||||
|
@ -629,7 +632,7 @@ class FacebookIE(InfoExtractor):
|
|||
if not playable_url:
|
||||
continue
|
||||
if determine_ext(playable_url) == 'mpd':
|
||||
formats.extend(self._extract_mpd_formats(playable_url, video_id))
|
||||
formats.extend(self._extract_mpd_formats(playable_url, video_id, fatal=False))
|
||||
else:
|
||||
formats.append({
|
||||
'format_id': format_id,
|
||||
|
@ -638,6 +641,28 @@ class FacebookIE(InfoExtractor):
|
|||
'url': playable_url,
|
||||
})
|
||||
extract_dash_manifest(fmt_data, formats)
|
||||
|
||||
# New videoDeliveryResponse formats extraction
|
||||
fmt_data = traverse_obj(video, ('videoDeliveryResponseFragment', 'videoDeliveryResponseResult'))
|
||||
mpd_urls = traverse_obj(fmt_data, ('dash_manifest_urls', ..., 'manifest_url', {url_or_none}))
|
||||
dash_manifests = traverse_obj(fmt_data, ('dash_manifests', lambda _, v: v['manifest_xml']))
|
||||
for idx, dash_manifest in enumerate(dash_manifests):
|
||||
extract_dash_manifest(dash_manifest, formats, mpd_url=traverse_obj(mpd_urls, idx))
|
||||
if not dash_manifests:
|
||||
# Only extract from MPD URLs if the manifests are not already provided
|
||||
for mpd_url in mpd_urls:
|
||||
formats.extend(self._extract_mpd_formats(mpd_url, video_id, fatal=False))
|
||||
for prog_fmt in traverse_obj(fmt_data, ('progressive_urls', lambda _, v: v['progressive_url'])):
|
||||
format_id = traverse_obj(prog_fmt, ('metadata', 'quality', {str.lower}))
|
||||
formats.append({
|
||||
'format_id': format_id,
|
||||
# sd, hd formats w/o resolution info should be deprioritized below DASH
|
||||
'quality': q(format_id) - 3,
|
||||
'url': prog_fmt['progressive_url'],
|
||||
})
|
||||
for m3u8_url in traverse_obj(fmt_data, ('hls_playlist_urls', ..., 'hls_playlist_url', {url_or_none})):
|
||||
formats.extend(self._extract_m3u8_formats(m3u8_url, video_id, 'mp4', fatal=False, m3u8_id='hls'))
|
||||
|
||||
if not formats:
|
||||
# Do not append false positive entry w/o any formats
|
||||
return
|
||||
|
|
|
@ -1,15 +1,27 @@
|
|||
import functools
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
|
||||
from ..minicurses import (
|
||||
BreaklineStatusPrinter,
|
||||
MultilineLogger,
|
||||
MultilinePrinter,
|
||||
QuietMultilinePrinter,
|
||||
)
|
||||
from ..networking import Request
|
||||
from ..networking.exceptions import HTTPError, network_exceptions
|
||||
from ..utils import (
|
||||
FormatProgressInfos,
|
||||
Namespace,
|
||||
PostProcessingError,
|
||||
RetryManager,
|
||||
_configuration_args,
|
||||
deprecation_warning,
|
||||
encodeFilename,
|
||||
format_bytes,
|
||||
join_nonempty,
|
||||
try_call,
|
||||
)
|
||||
|
||||
|
||||
|
@ -56,7 +68,9 @@ class PostProcessor(metaclass=PostProcessorMetaClass):
|
|||
self._progress_hooks = []
|
||||
self.add_progress_hook(self.report_progress)
|
||||
self.set_downloader(downloader)
|
||||
self._out_files = self.set_out_files()
|
||||
self.PP_NAME = self.pp_key()
|
||||
self._prepare_multiline_status()
|
||||
|
||||
@classmethod
|
||||
def pp_key(cls):
|
||||
|
@ -102,6 +116,11 @@ class PostProcessor(metaclass=PostProcessorMetaClass):
|
|||
return self._downloader.params.get(name, default, *args, **kwargs)
|
||||
return default
|
||||
|
||||
def set_out_files(self):
|
||||
if not self._downloader:
|
||||
return None
|
||||
return getattr(self._downloader, '_out_files', None) or self._downloader.ydl._out_files
|
||||
|
||||
def set_downloader(self, downloader):
|
||||
"""Sets the downloader for this PP."""
|
||||
self._downloader = downloader
|
||||
|
@ -173,25 +192,116 @@ class PostProcessor(metaclass=PostProcessorMetaClass):
|
|||
# See YoutubeDl.py (search for postprocessor_hooks) for a description of this interface
|
||||
self._progress_hooks.append(ph)
|
||||
|
||||
def report_progress(self, s):
|
||||
s['_default_template'] = '%(postprocessor)s %(status)s' % s # noqa: UP031
|
||||
if not self._downloader:
|
||||
return
|
||||
def report_destination(self, filename):
|
||||
"""Report destination filename."""
|
||||
self.to_screen('[processing] Destination: ' + filename)
|
||||
|
||||
def _prepare_multiline_status(self, lines=1):
|
||||
if self._downloader:
|
||||
if self._downloader.params.get('noprogress'):
|
||||
self._multiline = QuietMultilinePrinter()
|
||||
elif self._downloader.params.get('logger'):
|
||||
self._multiline = MultilineLogger(self._downloader.params['logger'], lines)
|
||||
elif self._downloader.params.get('progress_with_newline'):
|
||||
self._multiline = BreaklineStatusPrinter(self._downloader._out_files.out, lines)
|
||||
elif hasattr(self._downloader, '_out_files'):
|
||||
self._multiline = MultilinePrinter(self._downloader._out_files.out, lines, not self._downloader.params.get('quiet'))
|
||||
else:
|
||||
self._multiline = MultilinePrinter(sys.stdout, lines, not self._downloader.params.get('quiet'))
|
||||
self._multiline.allow_colors = self._multiline._HAVE_FULLCAP and not self._downloader.params.get('no_color')
|
||||
else:
|
||||
self._multiline = MultilinePrinter(sys.stdout, lines, True)
|
||||
self._multiline.allow_colors = self._multiline._HAVE_FULLCAP
|
||||
|
||||
def _finish_multiline_status(self):
|
||||
self._multiline.end()
|
||||
|
||||
ProgressStyles = Namespace(
|
||||
processed_bytes='light blue',
|
||||
percent='light blue',
|
||||
eta='yellow',
|
||||
speed='green',
|
||||
elapsed='bold white',
|
||||
total_bytes='',
|
||||
total_bytes_estimate='',
|
||||
)
|
||||
|
||||
def _report_progress_status(self, s, default_template):
|
||||
for name, style in self.ProgressStyles.items_:
|
||||
name = f'_{name}_str'
|
||||
if name not in s:
|
||||
continue
|
||||
s[name] = self._format_progress(s[name], style)
|
||||
s['_default_template'] = default_template % s
|
||||
|
||||
progress_dict = s.copy()
|
||||
progress_dict.pop('info_dict')
|
||||
progress_dict = {'info': s['info_dict'], 'progress': progress_dict}
|
||||
|
||||
progress_template = self.get_param('progress_template', {})
|
||||
tmpl = progress_template.get('postprocess')
|
||||
if tmpl:
|
||||
self._downloader.to_screen(
|
||||
self._downloader.evaluate_outtmpl(tmpl, progress_dict), quiet=False)
|
||||
|
||||
progress_template = self._downloader.params.get('progress_template', {})
|
||||
self._multiline.print_at_line(self._downloader.evaluate_outtmpl(
|
||||
progress_template.get('process') or '[processing] %(progress._default_template)s',
|
||||
progress_dict), s.get('progress_idx') or 0)
|
||||
self._downloader.to_console_title(self._downloader.evaluate_outtmpl(
|
||||
progress_template.get('postprocess-title') or 'yt-dlp %(progress._default_template)s',
|
||||
progress_template.get('download-title') or 'yt-dlp %(progress._default_template)s',
|
||||
progress_dict))
|
||||
|
||||
def _format_progress(self, *args, **kwargs):
|
||||
return self._downloader._format_text(
|
||||
self._multiline.stream, self._multiline.allow_colors, *args, **kwargs)
|
||||
|
||||
def report_progress(self, s):
|
||||
def with_fields(*tups, default=''):
|
||||
for *fields, tmpl in tups:
|
||||
if all(s.get(f) is not None for f in fields):
|
||||
return tmpl
|
||||
return default
|
||||
|
||||
if not self._downloader:
|
||||
return
|
||||
|
||||
if s['status'] == 'finished':
|
||||
if self._downloader.params.get('noprogress'):
|
||||
self.to_screen('[processing] Download completed')
|
||||
speed = try_call(lambda: s['total_bytes'] / s['elapsed'])
|
||||
s.update({
|
||||
'speed': speed,
|
||||
'_speed_str': FormatProgressInfos.format_speed(speed).strip(),
|
||||
'_total_bytes_str': format_bytes(s.get('total_bytes')),
|
||||
'_elapsed_str': FormatProgressInfos.format_seconds(s.get('elapsed')),
|
||||
'_percent_str': FormatProgressInfos.format_percent(100),
|
||||
})
|
||||
self._report_progress_status(s, join_nonempty(
|
||||
'100%%',
|
||||
with_fields(('total_bytes', 'of %(_total_bytes_str)s')),
|
||||
with_fields(('elapsed', 'in %(_elapsed_str)s')),
|
||||
with_fields(('speed', 'at %(_speed_str)s')),
|
||||
delim=' '))
|
||||
|
||||
if s['status'] != 'processing':
|
||||
return
|
||||
|
||||
s.update({
|
||||
'_eta_str': FormatProgressInfos.format_eta(s.get('eta')),
|
||||
'_speed_str': FormatProgressInfos.format_speed(s.get('speed')),
|
||||
'_percent_str': FormatProgressInfos.format_percent(try_call(
|
||||
lambda: 100 * s['processed_bytes'] / s['total_bytes'],
|
||||
lambda: 100 * s['processed_bytes'] / s['total_bytes_estimate'],
|
||||
lambda: s['processed_bytes'] == 0 and 0)),
|
||||
'_total_bytes_str': format_bytes(s.get('total_bytes')),
|
||||
'_total_bytes_estimate_str': format_bytes(s.get('total_bytes_estimate')),
|
||||
'_processed_bytes_str': format_bytes(s.get('processed_bytes')),
|
||||
'_elapsed_str': FormatProgressInfos.format_seconds(s.get('elapsed')),
|
||||
})
|
||||
|
||||
msg_template = with_fields(
|
||||
('total_bytes', '%(_percent_str)s of %(_total_bytes_str)s at %(_speed_str)s ETA %(_eta_str)s'),
|
||||
('processed_bytes', 'elapsed', '%(_processed_bytes_str)s at %(_speed_str)s (%(_elapsed_str)s)'),
|
||||
('processed_bytes', '%(_processed_bytes_str)s at %(_speed_str)s'),
|
||||
default='%(_percent_str)s at %(_speed_str)s ETA %(_eta_str)s')
|
||||
|
||||
self._report_progress_status(s, msg_template)
|
||||
|
||||
def _retry_download(self, err, count, retries):
|
||||
# While this is not an extractor, it behaves similar to one and
|
||||
# so obey extractor_retries and "--retry-sleep extractor"
|
||||
|
|
|
@ -6,7 +6,10 @@ import json
|
|||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
import time
|
||||
from queue import Queue
|
||||
from threading import Thread
|
||||
|
||||
from .common import PostProcessor
|
||||
from ..compat import imghdr
|
||||
|
@ -24,6 +27,7 @@ from ..utils import (
|
|||
encodeFilename,
|
||||
filter_dict,
|
||||
float_or_none,
|
||||
int_or_none,
|
||||
is_outdated_version,
|
||||
orderedSet,
|
||||
prepend_extension,
|
||||
|
@ -203,7 +207,7 @@ class FFmpegPostProcessor(PostProcessor):
|
|||
|
||||
@property
|
||||
def available(self):
|
||||
return self.basename is not None
|
||||
return self._ffmpeg_location.get() or self.basename is not None
|
||||
|
||||
@property
|
||||
def executable(self):
|
||||
|
@ -327,11 +331,9 @@ class FFmpegPostProcessor(PostProcessor):
|
|||
return abs(d1 - d2) > tolerance
|
||||
|
||||
def run_ffmpeg_multiple_files(self, input_paths, out_path, opts, **kwargs):
|
||||
return self.real_run_ffmpeg(
|
||||
[(path, []) for path in input_paths],
|
||||
[(out_path, opts)], **kwargs)
|
||||
return self.real_run_ffmpeg([(path, []) for path in input_paths], [(out_path, opts)], **kwargs)
|
||||
|
||||
def real_run_ffmpeg(self, input_path_opts, output_path_opts, *, expected_retcodes=(0,)):
|
||||
def real_run_ffmpeg(self, input_path_opts, output_path_opts, *, expected_retcodes=(0,), info_dict=None):
|
||||
self.check_version()
|
||||
|
||||
oldest_mtime = min(
|
||||
|
@ -351,19 +353,20 @@ class FFmpegPostProcessor(PostProcessor):
|
|||
args += self._configuration_args(self.basename, keys)
|
||||
if name == 'i':
|
||||
args.append('-i')
|
||||
return (
|
||||
[encodeArgument(arg) for arg in args]
|
||||
+ [encodeFilename(self._ffmpeg_filename_argument(file), True)])
|
||||
return [encodeArgument(arg) for arg in args] + [encodeFilename(self._ffmpeg_filename_argument(file), True)]
|
||||
|
||||
for arg_type, path_opts in (('i', input_path_opts), ('o', output_path_opts)):
|
||||
cmd += itertools.chain.from_iterable(
|
||||
make_args(path, list(opts), arg_type, i + 1)
|
||||
for i, (path, opts) in enumerate(path_opts) if path)
|
||||
|
||||
cmd += ['-progress', 'pipe:1']
|
||||
self.write_debug(f'ffmpeg command line: {shell_quote(cmd)}')
|
||||
_, stderr, returncode = Popen.run(
|
||||
cmd, text=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE)
|
||||
if returncode not in variadic(expected_retcodes):
|
||||
|
||||
ffmpeg_progress_tracker = FFmpegProgressTracker(info_dict, cmd, self._ffmpeg_hook, self._downloader)
|
||||
_, stderr, return_code = ffmpeg_progress_tracker.run_ffmpeg_subprocess()
|
||||
if return_code not in variadic(expected_retcodes):
|
||||
stderr = stderr.strip()
|
||||
self.write_debug(stderr)
|
||||
raise FFmpegPostProcessorError(stderr.strip().splitlines()[-1])
|
||||
for out_path, _ in output_path_opts:
|
||||
|
@ -371,7 +374,7 @@ class FFmpegPostProcessor(PostProcessor):
|
|||
self.try_utime(out_path, oldest_mtime, oldest_mtime)
|
||||
return stderr
|
||||
|
||||
def run_ffmpeg(self, path, out_path, opts, **kwargs):
|
||||
def run_ffmpeg(self, path, out_path, opts, informations=None, **kwargs):
|
||||
return self.run_ffmpeg_multiple_files([path], out_path, opts, **kwargs)
|
||||
|
||||
@staticmethod
|
||||
|
@ -436,6 +439,12 @@ class FFmpegPostProcessor(PostProcessor):
|
|||
if directive in opts:
|
||||
yield f'{directive} {opts[directive]}\n'
|
||||
|
||||
def _ffmpeg_hook(self, status, info_dict):
|
||||
status['processed_bytes'] = status.get('outputted', 0)
|
||||
if status.get('status') == 'ffmpeg_running':
|
||||
status['status'] = 'processing'
|
||||
self._hook_progress(status, info_dict)
|
||||
|
||||
|
||||
class FFmpegExtractAudioPP(FFmpegPostProcessor):
|
||||
COMMON_AUDIO_EXTS = (*MEDIA_EXTENSIONS.common_audio, 'wma')
|
||||
|
@ -470,14 +479,14 @@ class FFmpegExtractAudioPP(FFmpegPostProcessor):
|
|||
return ['-vbr', f'{int(q)}']
|
||||
return ['-q:a', f'{q}']
|
||||
|
||||
def run_ffmpeg(self, path, out_path, codec, more_opts):
|
||||
def run_ffmpeg(self, path, out_path, codec, more_opts, informations=None):
|
||||
if codec is None:
|
||||
acodec_opts = []
|
||||
else:
|
||||
acodec_opts = ['-acodec', codec]
|
||||
opts = ['-vn', *acodec_opts, *more_opts]
|
||||
try:
|
||||
FFmpegPostProcessor.run_ffmpeg(self, path, out_path, opts)
|
||||
FFmpegPostProcessor.run_ffmpeg(self, path, out_path, opts, informations)
|
||||
except FFmpegPostProcessorError as err:
|
||||
raise PostProcessingError(f'audio conversion failed: {err.msg}')
|
||||
|
||||
|
@ -528,7 +537,7 @@ class FFmpegExtractAudioPP(FFmpegPostProcessor):
|
|||
return [], information
|
||||
|
||||
self.to_screen(f'Destination: {new_path}')
|
||||
self.run_ffmpeg(path, temp_path, acodec, more_opts)
|
||||
self.run_ffmpeg(path, temp_path, acodec, more_opts, information)
|
||||
|
||||
os.replace(path, orig_path)
|
||||
os.replace(temp_path, new_path)
|
||||
|
@ -571,7 +580,7 @@ class FFmpegVideoConvertorPP(FFmpegPostProcessor):
|
|||
|
||||
outpath = replace_extension(filename, target_ext, source_ext)
|
||||
self.to_screen(f'{self._ACTION.title()} video from {source_ext} to {target_ext}; Destination: {outpath}')
|
||||
self.run_ffmpeg(filename, outpath, self._options(target_ext))
|
||||
self.run_ffmpeg(filename, outpath, self._options(target_ext), info)
|
||||
|
||||
info['filepath'] = outpath
|
||||
info['format'] = info['ext'] = target_ext
|
||||
|
@ -837,7 +846,7 @@ class FFmpegMergerPP(FFmpegPostProcessor):
|
|||
if fmt.get('vcodec') != 'none':
|
||||
args.extend(['-map', f'{i}:v:0'])
|
||||
self.to_screen(f'Merging formats into "{filename}"')
|
||||
self.run_ffmpeg_multiple_files(info['__files_to_merge'], temp_filename, args)
|
||||
self.run_ffmpeg_multiple_files(info['__files_to_merge'], temp_filename, args, info_dict=info)
|
||||
os.rename(encodeFilename(temp_filename), encodeFilename(filename))
|
||||
return info['__files_to_merge'], info
|
||||
|
||||
|
@ -1007,7 +1016,7 @@ class FFmpegSubtitlesConvertorPP(FFmpegPostProcessor):
|
|||
else:
|
||||
sub_filenames.append(srt_file)
|
||||
|
||||
self.run_ffmpeg(old_file, new_file, ['-f', new_format])
|
||||
self.run_ffmpeg(old_file, new_file, ['-f', new_format], info)
|
||||
|
||||
with open(new_file, encoding='utf-8') as f:
|
||||
subs[lang] = {
|
||||
|
@ -1190,3 +1199,236 @@ class FFmpegConcatPP(FFmpegPostProcessor):
|
|||
'ext': ie_copy['ext'],
|
||||
}]
|
||||
return files_to_delete, info
|
||||
|
||||
|
||||
class FFmpegProgressTracker:
|
||||
def __init__(self, info_dict, ffmpeg_args, hook_progress, ydl=None):
|
||||
self.ydl = ydl
|
||||
self._info_dict = info_dict
|
||||
self._ffmpeg_args = ffmpeg_args
|
||||
self._hook_progress = hook_progress
|
||||
self._stdout_queue, self._stderr_queue = Queue(), Queue()
|
||||
self._streams, self._stderr_buffer, self._stdout_buffer = ['', ''], '', ''
|
||||
self._progress_pattern = re.compile(r'''(?x)
|
||||
(?:
|
||||
frame=\s*(?P<frame>\S+)\n
|
||||
fps=\s*(?P<fps>\S+)\n
|
||||
stream_\d+_\d+_q=\s*(?P<stream_d_d_q>\S+)\n
|
||||
)?
|
||||
bitrate=\s*(?P<bitrate>\S+)\n
|
||||
total_size=\s*(?P<total_size>\S+)\n
|
||||
out_time_us=\s*(?P<out_time_us>\S+)\n
|
||||
out_time_ms=\s*(?P<out_time_ms>\S+)\n
|
||||
out_time=\s*(?P<out_time>\S+)\n
|
||||
dup_frames=\s*(?P<dup_frames>\S+)\n
|
||||
drop_frames=\s*(?P<drop_frames>\S+)\n
|
||||
speed=\s*(?P<speed>\S+)\n
|
||||
progress=\s*(?P<progress>\S+)
|
||||
''')
|
||||
|
||||
if self.ydl:
|
||||
self.ydl.write_debug(f'ffmpeg command line: {shell_quote(self._ffmpeg_args)}')
|
||||
self.ffmpeg_proc = Popen(self._ffmpeg_args, universal_newlines=True,
|
||||
encoding='utf8', stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
self._start_time = time.time()
|
||||
|
||||
def trigger_progress_hook(self, dct):
|
||||
self._status.update(dct)
|
||||
self._hook_progress(self._status, self._info_dict)
|
||||
|
||||
def run_ffmpeg_subprocess(self):
|
||||
if self._info_dict and self.ydl:
|
||||
return self._track_ffmpeg_progress()
|
||||
return self._run_ffmpeg_without_progress_tracking()
|
||||
|
||||
def _run_ffmpeg_without_progress_tracking(self):
|
||||
"""Simply run ffmpeg and only care about the last stderr, stdout and the retcode"""
|
||||
stdout, stderr = self.ffmpeg_proc.communicate_or_kill()
|
||||
retcode = self.ffmpeg_proc.returncode
|
||||
return stdout, stderr, retcode
|
||||
|
||||
def _track_ffmpeg_progress(self):
|
||||
""" Track ffmpeg progress in a non blocking way using queues"""
|
||||
self._start_time = time.time()
|
||||
# args needed to track ffmpeg progress from stdout
|
||||
self._duration_to_track, self._total_duration = self._compute_duration_to_track()
|
||||
self._total_filesize = self._compute_total_filesize(self._duration_to_track, self._total_duration)
|
||||
self._status = {
|
||||
'filename': self._ffmpeg_args[-3].split(':')[-1],
|
||||
'status': 'ffmpeg_running',
|
||||
'total_bytes': self._total_filesize,
|
||||
'elapsed': 0,
|
||||
'outputted': 0,
|
||||
}
|
||||
|
||||
out_listener = Thread(
|
||||
target=self._enqueue_lines,
|
||||
args=(self.ffmpeg_proc.stdout, self._stdout_queue),
|
||||
daemon=True,
|
||||
)
|
||||
err_listener = Thread(
|
||||
target=self._enqueue_lines,
|
||||
args=(self.ffmpeg_proc.stderr, self._stderr_queue),
|
||||
daemon=True,
|
||||
)
|
||||
out_listener.start()
|
||||
err_listener.start()
|
||||
|
||||
retcode = self._wait_for_ffmpeg()
|
||||
|
||||
self._status.update({
|
||||
'status': 'finished',
|
||||
'outputted': self._total_filesize,
|
||||
})
|
||||
time.sleep(.5) # Needed if ffmpeg didn't release the file in time for yt-dlp to change its name
|
||||
return self._streams[0], self._streams[1], retcode
|
||||
|
||||
@staticmethod
|
||||
def _enqueue_lines(out, queue):
|
||||
for line in iter(out.readline, ''):
|
||||
queue.put(line.rstrip())
|
||||
out.close()
|
||||
|
||||
def _save_stream(self, lines, to_stderr=False):
|
||||
if not lines:
|
||||
return
|
||||
self._streams[to_stderr] += lines
|
||||
|
||||
self.ydl.to_screen('\r', skip_eol=True)
|
||||
for msg in lines.splitlines():
|
||||
if msg.strip():
|
||||
self.ydl.write_debug(f'ffmpeg: {msg}')
|
||||
|
||||
def _handle_lines(self):
|
||||
if not self._stdout_queue.empty():
|
||||
stdout_line = self._stdout_queue.get_nowait()
|
||||
self._stdout_buffer += stdout_line + '\n'
|
||||
self._parse_ffmpeg_output()
|
||||
|
||||
if not self._stderr_queue.empty():
|
||||
stderr_line = self._stderr_queue.get_nowait()
|
||||
self._stderr_buffer += stderr_line
|
||||
|
||||
def _wait_for_ffmpeg(self):
|
||||
retcode = self.ffmpeg_proc.poll()
|
||||
while retcode is None:
|
||||
time.sleep(.01)
|
||||
self._handle_lines()
|
||||
self._status.update({
|
||||
'elapsed': time.time() - self._start_time,
|
||||
})
|
||||
self._hook_progress(self._status, self._info_dict)
|
||||
retcode = self.ffmpeg_proc.poll()
|
||||
return retcode
|
||||
|
||||
def _parse_ffmpeg_output(self):
|
||||
ffmpeg_prog_infos = re.match(self._progress_pattern, self._stdout_buffer)
|
||||
if not ffmpeg_prog_infos:
|
||||
return
|
||||
eta_seconds = self._compute_eta(ffmpeg_prog_infos, self._duration_to_track)
|
||||
bitrate_int = self._compute_bitrate(ffmpeg_prog_infos.group('bitrate'))
|
||||
# Not using ffmpeg 'total_size' value as it's imprecise and gives progress percentage over 100
|
||||
out_time_second = int_or_none(ffmpeg_prog_infos.group('out_time_us')) // 1_000_000
|
||||
try:
|
||||
outputted_bytes_int = int_or_none(out_time_second / self._duration_to_track * self._total_filesize)
|
||||
except ZeroDivisionError:
|
||||
outputted_bytes_int = 0
|
||||
self._status.update({
|
||||
'outputted': outputted_bytes_int,
|
||||
'speed': bitrate_int,
|
||||
'eta': eta_seconds,
|
||||
})
|
||||
self._hook_progress(self._status, self._info_dict)
|
||||
self._stderr_buffer = re.sub(r'=\s+', '=', self._stderr_buffer)
|
||||
print(self._stdout_buffer, file=sys.stdout, end='')
|
||||
print(self._stderr_buffer, file=sys.stderr)
|
||||
self._stdout_buffer = ''
|
||||
self._stderr_buffer = ''
|
||||
|
||||
def _compute_total_filesize(self, duration_to_track, total_duration):
|
||||
if not total_duration:
|
||||
return 0
|
||||
filesize = self._info_dict.get('filesize')
|
||||
if not filesize:
|
||||
filesize = self._info_dict.get('filesize_approx', 0)
|
||||
return filesize * duration_to_track // total_duration
|
||||
|
||||
def _compute_duration_to_track(self):
|
||||
duration = self._info_dict.get('duration')
|
||||
if not duration:
|
||||
return 0, 0
|
||||
|
||||
start_time, end_time = 0, duration
|
||||
for i, arg in enumerate(self._ffmpeg_args[:-1]):
|
||||
next_arg_is_a_timestamp = re.match(r'(?P<at>(-ss|-sseof|-to))', arg)
|
||||
this_arg_is_a_timestamp = re.match(r'(?P<at>(-ss|-sseof|-to))=(?P<timestamp>\d+)', arg)
|
||||
if not (next_arg_is_a_timestamp or this_arg_is_a_timestamp):
|
||||
continue
|
||||
elif next_arg_is_a_timestamp:
|
||||
timestamp_seconds = self.ffmpeg_time_string_to_seconds(self._ffmpeg_args[i + 1])
|
||||
else:
|
||||
timestamp_seconds = self.ffmpeg_time_string_to_seconds(this_arg_is_a_timestamp.group('timestamp'))
|
||||
if next_arg_is_a_timestamp.group('at') == '-ss':
|
||||
start_time = timestamp_seconds
|
||||
elif next_arg_is_a_timestamp.group('at') == '-sseof':
|
||||
start_time = end_time - timestamp_seconds
|
||||
elif next_arg_is_a_timestamp.group('at') == '-to':
|
||||
end_time = timestamp_seconds
|
||||
|
||||
duration_to_track = end_time - start_time
|
||||
if duration_to_track >= 0:
|
||||
return duration_to_track, duration
|
||||
return 0, duration
|
||||
|
||||
@staticmethod
|
||||
def _compute_eta(ffmpeg_prog_infos, duration_to_track):
|
||||
try:
|
||||
speed = float_or_none(ffmpeg_prog_infos.group('speed')[:-1])
|
||||
out_time_second = int_or_none(ffmpeg_prog_infos.group('out_time_us')) // 1_000_000
|
||||
eta_seconds = (duration_to_track - out_time_second) // speed
|
||||
except (TypeError, ZeroDivisionError):
|
||||
eta_seconds = 0
|
||||
return eta_seconds
|
||||
|
||||
@staticmethod
|
||||
def ffmpeg_time_string_to_seconds(time_string):
|
||||
ffmpeg_time_seconds = 0
|
||||
hms_parsed = re.match(r'((?P<Hour>\d+):)?((?P<Minute>\d+):)?(?P<Second>\d+)(\.(?P<float>\d+))?', time_string)
|
||||
smu_parse = re.match(r'(?P<Time>\d+)(?P<Unit>[mu]?s)', time_string)
|
||||
if hms_parsed:
|
||||
if hms_parsed.group('Hour'):
|
||||
ffmpeg_time_seconds += 3600 * int_or_none(hms_parsed.group('Hour'))
|
||||
if hms_parsed.group('Minute'):
|
||||
ffmpeg_time_seconds += 60 * int_or_none(hms_parsed.group('Minute'))
|
||||
ffmpeg_time_seconds += int_or_none(hms_parsed.group('Second'))
|
||||
if hms_parsed.group('float'):
|
||||
float_part = hms_parsed.group('float')
|
||||
ffmpeg_time_seconds += int_or_none(float_part) / (10 ** len(float_part))
|
||||
elif smu_parse:
|
||||
ffmpeg_time_seconds = int_or_none(smu_parse.group('Time'))
|
||||
prefix_and_unit = smu_parse.group('Unit')
|
||||
if prefix_and_unit == 'ms':
|
||||
ffmpeg_time_seconds /= 1_000
|
||||
elif prefix_and_unit == 'us':
|
||||
ffmpeg_time_seconds /= 1_000_000
|
||||
return ffmpeg_time_seconds
|
||||
|
||||
@staticmethod
|
||||
def _compute_bitrate(bitrate):
|
||||
bitrate_str = re.match(r'(?P<Integer>\d+)(\.(?P<float>\d+))?(?P<Prefix>[gmk])?bits/s', bitrate)
|
||||
try:
|
||||
no_prefix_bitrate = int_or_none(bitrate_str.group('Integer'))
|
||||
if bitrate_str.group('float'):
|
||||
float_part = bitrate_str.group('float')
|
||||
no_prefix_bitrate += int_or_none(float_part) / (10 ** len(float_part))
|
||||
if bitrate_str.group('Prefix'):
|
||||
unit_prefix = bitrate_str.group('Prefix')
|
||||
if unit_prefix == 'g':
|
||||
no_prefix_bitrate *= 1_000_000_000
|
||||
elif unit_prefix == 'm':
|
||||
no_prefix_bitrate *= 1_000_000
|
||||
elif unit_prefix == 'k':
|
||||
no_prefix_bitrate *= 1_000
|
||||
except (TypeError, AttributeError):
|
||||
return 0
|
||||
return no_prefix_bitrate
|
||||
|
|
|
@ -5676,3 +5676,51 @@ class _YDLLogger:
|
|||
def stderr(self, message):
|
||||
if self._ydl:
|
||||
self._ydl.to_stderr(message)
|
||||
|
||||
|
||||
class FormatProgressInfos:
|
||||
@staticmethod
|
||||
def format_seconds(seconds):
|
||||
if seconds is None:
|
||||
return ' Unknown'
|
||||
time = timetuple_from_msec(seconds * 1000)
|
||||
if time.hours > 99:
|
||||
return '--:--:--'
|
||||
if not time.hours:
|
||||
return '%02d:%02d' % time[1:-1]
|
||||
return '%02d:%02d:%02d' % time[:-1]
|
||||
|
||||
format_eta = format_seconds
|
||||
|
||||
@staticmethod
|
||||
def calc_percent(byte_counter, data_len):
|
||||
if data_len is None:
|
||||
return None
|
||||
return float(byte_counter) / float(data_len) * 100.0
|
||||
|
||||
@staticmethod
|
||||
def format_percent(percent):
|
||||
return ' N/A%' if percent is None else f'{percent:>5.1f}%'
|
||||
|
||||
@staticmethod
|
||||
def calc_eta(start, now, total, current):
|
||||
if total is None:
|
||||
return None
|
||||
if now is None:
|
||||
now = time.time()
|
||||
dif = now - start
|
||||
if current == 0 or dif < 0.001: # One millisecond
|
||||
return None
|
||||
rate = float(current) / dif
|
||||
return int((float(total) - float(current)) / rate)
|
||||
|
||||
@staticmethod
|
||||
def calc_speed(start, now, bytes):
|
||||
dif = now - start
|
||||
if bytes == 0 or dif < 0.001: # One millisecond
|
||||
return None
|
||||
return float(bytes) / dif
|
||||
|
||||
@staticmethod
|
||||
def format_speed(speed):
|
||||
return ' Unknown B/s' if speed is None else f'{format_bytes(speed):>10s}/s'
|
||||
|
|
Loading…
Reference in New Issue
Block a user