mirror of
https://github.com/yt-dlp/yt-dlp.git
synced 2024-11-26 17:21:23 +01:00
Compare commits
14 Commits
3cba46ad24
...
987b76650e
Author | SHA1 | Date | |
---|---|---|---|
|
987b76650e | ||
|
b83ca24eb7 | ||
|
240a7d43c8 | ||
|
f13df591d4 | ||
|
8a450b3aeb | ||
|
4f7c6e90c8 | ||
|
4458983842 | ||
|
07d3a44dae | ||
|
de9b89b71f | ||
|
1b1a31cf6e | ||
|
cb02b582a8 | ||
|
313b607af4 | ||
|
fe01d245b6 | ||
|
bcec568ea7 |
3
.github/workflows/build.yml
vendored
3
.github/workflows/build.yml
vendored
|
@ -504,7 +504,8 @@ jobs:
|
||||||
- windows32
|
- windows32
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/download-artifact@v4
|
- name: Download artifacts
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
with:
|
with:
|
||||||
path: artifact
|
path: artifact
|
||||||
pattern: build-bin-*
|
pattern: build-bin-*
|
||||||
|
|
17
.github/workflows/release-master.yml
vendored
17
.github/workflows/release-master.yml
vendored
|
@ -28,3 +28,20 @@ jobs:
|
||||||
actions: write # For cleaning up cache
|
actions: write # For cleaning up cache
|
||||||
id-token: write # mandatory for trusted publishing
|
id-token: write # mandatory for trusted publishing
|
||||||
secrets: inherit
|
secrets: inherit
|
||||||
|
|
||||||
|
publish_pypi:
|
||||||
|
needs: [release]
|
||||||
|
if: vars.MASTER_PYPI_PROJECT != ''
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
id-token: write # mandatory for trusted publishing
|
||||||
|
steps:
|
||||||
|
- name: Download artifacts
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
path: dist
|
||||||
|
name: build-pypi
|
||||||
|
- name: Publish to PyPI
|
||||||
|
uses: pypa/gh-action-pypi-publish@release/v1
|
||||||
|
with:
|
||||||
|
verbose: true
|
||||||
|
|
17
.github/workflows/release-nightly.yml
vendored
17
.github/workflows/release-nightly.yml
vendored
|
@ -41,3 +41,20 @@ jobs:
|
||||||
actions: write # For cleaning up cache
|
actions: write # For cleaning up cache
|
||||||
id-token: write # mandatory for trusted publishing
|
id-token: write # mandatory for trusted publishing
|
||||||
secrets: inherit
|
secrets: inherit
|
||||||
|
|
||||||
|
publish_pypi:
|
||||||
|
needs: [release]
|
||||||
|
if: vars.NIGHTLY_PYPI_PROJECT != ''
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
id-token: write # mandatory for trusted publishing
|
||||||
|
steps:
|
||||||
|
- name: Download artifacts
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
path: dist
|
||||||
|
name: build-pypi
|
||||||
|
- name: Publish to PyPI
|
||||||
|
uses: pypa/gh-action-pypi-publish@release/v1
|
||||||
|
with:
|
||||||
|
verbose: true
|
||||||
|
|
19
.github/workflows/release.yml
vendored
19
.github/workflows/release.yml
vendored
|
@ -2,10 +2,6 @@ name: Release
|
||||||
on:
|
on:
|
||||||
workflow_call:
|
workflow_call:
|
||||||
inputs:
|
inputs:
|
||||||
prerelease:
|
|
||||||
required: false
|
|
||||||
default: true
|
|
||||||
type: boolean
|
|
||||||
source:
|
source:
|
||||||
required: false
|
required: false
|
||||||
default: ''
|
default: ''
|
||||||
|
@ -18,6 +14,10 @@ on:
|
||||||
required: false
|
required: false
|
||||||
default: ''
|
default: ''
|
||||||
type: string
|
type: string
|
||||||
|
prerelease:
|
||||||
|
required: false
|
||||||
|
default: true
|
||||||
|
type: boolean
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
inputs:
|
inputs:
|
||||||
source:
|
source:
|
||||||
|
@ -278,11 +278,20 @@ jobs:
|
||||||
make clean-cache
|
make clean-cache
|
||||||
python -m build --no-isolation .
|
python -m build --no-isolation .
|
||||||
|
|
||||||
|
- name: Upload artifacts
|
||||||
|
if: github.event_name != 'workflow_dispatch'
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: build-pypi
|
||||||
|
path: |
|
||||||
|
dist/*
|
||||||
|
compression-level: 0
|
||||||
|
|
||||||
- name: Publish to PyPI
|
- name: Publish to PyPI
|
||||||
|
if: github.event_name == 'workflow_dispatch'
|
||||||
uses: pypa/gh-action-pypi-publish@release/v1
|
uses: pypa/gh-action-pypi-publish@release/v1
|
||||||
with:
|
with:
|
||||||
verbose: true
|
verbose: true
|
||||||
attestations: false # Currently doesn't work w/ reusable workflows (breaks nightly)
|
|
||||||
|
|
||||||
publish:
|
publish:
|
||||||
needs: [prepare, build]
|
needs: [prepare, build]
|
||||||
|
|
|
@ -52,7 +52,7 @@ default = [
|
||||||
"pycryptodomex",
|
"pycryptodomex",
|
||||||
"requests>=2.32.2,<3",
|
"requests>=2.32.2,<3",
|
||||||
"urllib3>=1.26.17,<3",
|
"urllib3>=1.26.17,<3",
|
||||||
"websockets>=13.0",
|
"websockets>=13.0,<14",
|
||||||
]
|
]
|
||||||
curl-cffi = [
|
curl-cffi = [
|
||||||
"curl-cffi==0.5.10; os_name=='nt' and implementation_name=='cpython'",
|
"curl-cffi==0.5.10; os_name=='nt' and implementation_name=='cpython'",
|
||||||
|
|
|
@ -120,19 +120,22 @@ class TestFFmpegFD(unittest.TestCase):
|
||||||
downloader._call_downloader('test', {**TEST_INFO, 'ext': 'mp4'})
|
downloader._call_downloader('test', {**TEST_INFO, 'ext': 'mp4'})
|
||||||
self.assertEqual(self._args, [
|
self.assertEqual(self._args, [
|
||||||
'ffmpeg', '-y', '-hide_banner', '-i', 'http://www.example.com/',
|
'ffmpeg', '-y', '-hide_banner', '-i', 'http://www.example.com/',
|
||||||
'-c', 'copy', '-f', 'mp4', 'file:test'])
|
'-c', 'copy', '-f', 'mp4', 'file:test', '-progress', 'pipe:1'])
|
||||||
|
|
||||||
# Test cookies arg is added
|
# Test cookies arg is added
|
||||||
ydl.cookiejar.set_cookie(http.cookiejar.Cookie(**TEST_COOKIE))
|
ydl.cookiejar.set_cookie(http.cookiejar.Cookie(**TEST_COOKIE))
|
||||||
downloader._call_downloader('test', {**TEST_INFO, 'ext': 'mp4'})
|
downloader._call_downloader('test', {**TEST_INFO, 'ext': 'mp4'})
|
||||||
self.assertEqual(self._args, [
|
self.assertEqual(self._args, [
|
||||||
'ffmpeg', '-y', '-hide_banner', '-cookies', 'test=ytdlp; path=/; domain=.example.com;\r\n',
|
'ffmpeg', '-y', '-hide_banner', '-cookies',
|
||||||
'-i', 'http://www.example.com/', '-c', 'copy', '-f', 'mp4', 'file:test'])
|
'test=ytdlp; path=/; domain=.example.com;\r\n', '-i',
|
||||||
|
'http://www.example.com/', '-c', 'copy', '-f', 'mp4',
|
||||||
|
'file:test', '-progress', 'pipe:1'])
|
||||||
|
|
||||||
# Test with non-url input (ffmpeg reads from stdin '-' for websockets)
|
# Test with non-url input (ffmpeg reads from stdin '-' for websockets)
|
||||||
downloader._call_downloader('test', {'url': 'x', 'ext': 'mp4'})
|
downloader._call_downloader('test', {'url': 'x', 'ext': 'mp4'})
|
||||||
self.assertEqual(self._args, [
|
self.assertEqual(self._args, [
|
||||||
'ffmpeg', '-y', '-hide_banner', '-i', 'x', '-c', 'copy', '-f', 'mp4', 'file:test'])
|
'ffmpeg', '-y', '-hide_banner', '-i', 'x', '-c', 'copy', '-f',
|
||||||
|
'mp4', 'file:test', '-progress', 'pipe:1'])
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
|
|
@ -24,7 +24,7 @@ try:
|
||||||
from Crypto.Cipher import AES, PKCS1_OAEP, Blowfish, PKCS1_v1_5 # noqa: F401
|
from Crypto.Cipher import AES, PKCS1_OAEP, Blowfish, PKCS1_v1_5 # noqa: F401
|
||||||
from Crypto.Hash import CMAC, SHA1 # noqa: F401
|
from Crypto.Hash import CMAC, SHA1 # noqa: F401
|
||||||
from Crypto.PublicKey import RSA # noqa: F401
|
from Crypto.PublicKey import RSA # noqa: F401
|
||||||
except ImportError:
|
except (ImportError, OSError):
|
||||||
__version__ = f'broken {__version__}'.strip()
|
__version__ = f'broken {__version__}'.strip()
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -16,6 +16,7 @@ from ..minicurses import (
|
||||||
from ..utils import (
|
from ..utils import (
|
||||||
IDENTITY,
|
IDENTITY,
|
||||||
NO_DEFAULT,
|
NO_DEFAULT,
|
||||||
|
FormatProgressInfos,
|
||||||
LockingUnsupportedError,
|
LockingUnsupportedError,
|
||||||
Namespace,
|
Namespace,
|
||||||
RetryManager,
|
RetryManager,
|
||||||
|
@ -26,11 +27,9 @@ from ..utils import (
|
||||||
format_bytes,
|
format_bytes,
|
||||||
join_nonempty,
|
join_nonempty,
|
||||||
parse_bytes,
|
parse_bytes,
|
||||||
remove_start,
|
|
||||||
sanitize_open,
|
sanitize_open,
|
||||||
shell_quote,
|
shell_quote,
|
||||||
timeconvert,
|
timeconvert,
|
||||||
timetuple_from_msec,
|
|
||||||
try_call,
|
try_call,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -120,56 +119,6 @@ class FileDownloader:
|
||||||
def FD_NAME(cls):
|
def FD_NAME(cls):
|
||||||
return re.sub(r'(?<=[a-z])(?=[A-Z])', '_', cls.__name__[:-2]).lower()
|
return re.sub(r'(?<=[a-z])(?=[A-Z])', '_', cls.__name__[:-2]).lower()
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def format_seconds(seconds):
|
|
||||||
if seconds is None:
|
|
||||||
return ' Unknown'
|
|
||||||
time = timetuple_from_msec(seconds * 1000)
|
|
||||||
if time.hours > 99:
|
|
||||||
return '--:--:--'
|
|
||||||
return '%02d:%02d:%02d' % time[:-1]
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def format_eta(cls, seconds):
|
|
||||||
return f'{remove_start(cls.format_seconds(seconds), "00:"):>8s}'
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def calc_percent(byte_counter, data_len):
|
|
||||||
if data_len is None:
|
|
||||||
return None
|
|
||||||
return float(byte_counter) / float(data_len) * 100.0
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def format_percent(percent):
|
|
||||||
return ' N/A%' if percent is None else f'{percent:>5.1f}%'
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def calc_eta(cls, start_or_rate, now_or_remaining, total=NO_DEFAULT, current=NO_DEFAULT):
|
|
||||||
if total is NO_DEFAULT:
|
|
||||||
rate, remaining = start_or_rate, now_or_remaining
|
|
||||||
if None in (rate, remaining):
|
|
||||||
return None
|
|
||||||
return int(float(remaining) / rate)
|
|
||||||
|
|
||||||
start, now = start_or_rate, now_or_remaining
|
|
||||||
if total is None:
|
|
||||||
return None
|
|
||||||
if now is None:
|
|
||||||
now = time.time()
|
|
||||||
rate = cls.calc_speed(start, now, current)
|
|
||||||
return rate and int((float(total) - float(current)) / rate)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def calc_speed(start, now, bytes):
|
|
||||||
dif = now - start
|
|
||||||
if bytes == 0 or dif < 0.001: # One millisecond
|
|
||||||
return None
|
|
||||||
return float(bytes) / dif
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def format_speed(speed):
|
|
||||||
return ' Unknown B/s' if speed is None else f'{format_bytes(speed):>10s}/s'
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def format_retries(retries):
|
def format_retries(retries):
|
||||||
return 'inf' if retries == float('inf') else int(retries)
|
return 'inf' if retries == float('inf') else int(retries)
|
||||||
|
@ -348,18 +297,16 @@ class FileDownloader:
|
||||||
return tmpl
|
return tmpl
|
||||||
return default
|
return default
|
||||||
|
|
||||||
_format_bytes = lambda k: f'{format_bytes(s.get(k)):>10s}'
|
|
||||||
|
|
||||||
if s['status'] == 'finished':
|
if s['status'] == 'finished':
|
||||||
if self.params.get('noprogress'):
|
if self.params.get('noprogress'):
|
||||||
self.to_screen('[download] Download completed')
|
self.to_screen('[download] Download completed')
|
||||||
speed = try_call(lambda: s['total_bytes'] / s['elapsed'])
|
speed = try_call(lambda: s['total_bytes'] / s['elapsed'])
|
||||||
s.update({
|
s.update({
|
||||||
'speed': speed,
|
'speed': speed,
|
||||||
'_speed_str': self.format_speed(speed).strip(),
|
'_speed_str': FormatProgressInfos.format_speed(speed).strip(),
|
||||||
'_total_bytes_str': _format_bytes('total_bytes'),
|
'_total_bytes_str': format_bytes(s.get('total_bytes')),
|
||||||
'_elapsed_str': self.format_seconds(s.get('elapsed')),
|
'_elapsed_str': FormatProgressInfos.format_seconds(s.get('elapsed')),
|
||||||
'_percent_str': self.format_percent(100),
|
'_percent_str': FormatProgressInfos.format_percent(100),
|
||||||
})
|
})
|
||||||
self._report_progress_status(s, join_nonempty(
|
self._report_progress_status(s, join_nonempty(
|
||||||
'100%%',
|
'100%%',
|
||||||
|
@ -378,16 +325,16 @@ class FileDownloader:
|
||||||
self._progress_delta_time += update_delta
|
self._progress_delta_time += update_delta
|
||||||
|
|
||||||
s.update({
|
s.update({
|
||||||
'_eta_str': self.format_eta(s.get('eta')).strip(),
|
'_eta_str': FormatProgressInfos.format_eta(s.get('eta')),
|
||||||
'_speed_str': self.format_speed(s.get('speed')),
|
'_speed_str': FormatProgressInfos.format_speed(s.get('speed')),
|
||||||
'_percent_str': self.format_percent(try_call(
|
'_percent_str': FormatProgressInfos.format_percent(try_call(
|
||||||
lambda: 100 * s['downloaded_bytes'] / s['total_bytes'],
|
lambda: 100 * s['downloaded_bytes'] / s['total_bytes'],
|
||||||
lambda: 100 * s['downloaded_bytes'] / s['total_bytes_estimate'],
|
lambda: 100 * s['downloaded_bytes'] / s['total_bytes_estimate'],
|
||||||
lambda: s['downloaded_bytes'] == 0 and 0)),
|
lambda: s['downloaded_bytes'] == 0 and 0)),
|
||||||
'_total_bytes_str': _format_bytes('total_bytes'),
|
'_total_bytes_str': format_bytes(s.get('total_bytes')),
|
||||||
'_total_bytes_estimate_str': _format_bytes('total_bytes_estimate'),
|
'_total_bytes_estimate_str': format_bytes(s.get('total_bytes_estimate')),
|
||||||
'_downloaded_bytes_str': _format_bytes('downloaded_bytes'),
|
'_downloaded_bytes_str': format_bytes(s.get('downloaded_bytes')),
|
||||||
'_elapsed_str': self.format_seconds(s.get('elapsed')),
|
'_elapsed_str': FormatProgressInfos.format_seconds(s.get('elapsed')),
|
||||||
})
|
})
|
||||||
|
|
||||||
msg_template = with_fields(
|
msg_template = with_fields(
|
||||||
|
|
|
@ -4,14 +4,17 @@ import json
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
|
||||||
import tempfile
|
import tempfile
|
||||||
import time
|
import time
|
||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
from .fragment import FragmentFD
|
from .fragment import FragmentFD
|
||||||
from ..networking import Request
|
from ..networking import Request
|
||||||
from ..postprocessor.ffmpeg import EXT_TO_OUT_FORMATS, FFmpegPostProcessor
|
from ..postprocessor.ffmpeg import (
|
||||||
|
EXT_TO_OUT_FORMATS,
|
||||||
|
FFmpegPostProcessor,
|
||||||
|
FFmpegProgressTracker,
|
||||||
|
)
|
||||||
from ..utils import (
|
from ..utils import (
|
||||||
Popen,
|
Popen,
|
||||||
RetryManager,
|
RetryManager,
|
||||||
|
@ -621,26 +624,23 @@ class FFmpegFD(ExternalFD):
|
||||||
|
|
||||||
args = [encodeArgument(opt) for opt in args]
|
args = [encodeArgument(opt) for opt in args]
|
||||||
args.append(encodeFilename(ffpp._ffmpeg_filename_argument(tmpfilename), True))
|
args.append(encodeFilename(ffpp._ffmpeg_filename_argument(tmpfilename), True))
|
||||||
|
args += ['-progress', 'pipe:1']
|
||||||
self._debug_cmd(args)
|
self._debug_cmd(args)
|
||||||
|
|
||||||
piped = any(fmt['url'] in ('-', 'pipe:') for fmt in selected_formats)
|
piped = any(fmt['url'] in ('-', 'pipe:') for fmt in selected_formats)
|
||||||
with Popen(args, stdin=subprocess.PIPE, env=env) as proc:
|
self._debug_cmd(args)
|
||||||
if piped:
|
ffmpeg_progress_tracker = FFmpegProgressTracker(info_dict, args, self._ffmpeg_hook)
|
||||||
self.on_process_started(proc, proc.stdin)
|
proc = ffmpeg_progress_tracker.ffmpeg_proc
|
||||||
try:
|
if piped:
|
||||||
retval = proc.wait()
|
self.on_process_started(proc, proc.stdin)
|
||||||
except BaseException as e:
|
_, _, return_code = ffmpeg_progress_tracker.run_ffmpeg_subprocess()
|
||||||
# subprocces.run would send the SIGKILL signal to ffmpeg and the
|
return return_code
|
||||||
# mp4 file couldn't be played, but if we ask ffmpeg to quit it
|
|
||||||
# produces a file that is playable (this is mostly useful for live
|
def _ffmpeg_hook(self, status, info_dict):
|
||||||
# streams). Note that Windows is not affected and produces playable
|
status['downloaded_bytes'] = status.get('outputted', 0)
|
||||||
# files (see https://github.com/ytdl-org/youtube-dl/issues/8300).
|
if status.get('status') == 'ffmpeg_running':
|
||||||
if isinstance(e, KeyboardInterrupt) and sys.platform != 'win32' and not piped:
|
status['status'] = 'downloading'
|
||||||
proc.communicate_or_kill(b'q')
|
self._hook_progress(status, info_dict)
|
||||||
else:
|
|
||||||
proc.kill(timeout=None)
|
|
||||||
raise
|
|
||||||
return retval
|
|
||||||
|
|
||||||
|
|
||||||
class AVconvFD(FFmpegFD):
|
class AVconvFD(FFmpegFD):
|
||||||
|
|
|
@ -11,6 +11,7 @@ from ..networking.exceptions import (
|
||||||
)
|
)
|
||||||
from ..utils import (
|
from ..utils import (
|
||||||
ContentTooShortError,
|
ContentTooShortError,
|
||||||
|
FormatProgressInfos,
|
||||||
RetryManager,
|
RetryManager,
|
||||||
ThrottledDownload,
|
ThrottledDownload,
|
||||||
XAttrMetadataError,
|
XAttrMetadataError,
|
||||||
|
@ -298,11 +299,11 @@ class HttpFD(FileDownloader):
|
||||||
before = after
|
before = after
|
||||||
|
|
||||||
# Progress message
|
# Progress message
|
||||||
speed = self.calc_speed(start, now, byte_counter - ctx.resume_len)
|
speed = FormatProgressInfos.calc_speed(start, now, byte_counter - ctx.resume_len)
|
||||||
if ctx.data_len is None:
|
if ctx.data_len is None:
|
||||||
eta = None
|
eta = None
|
||||||
else:
|
else:
|
||||||
eta = self.calc_eta(start, time.time(), ctx.data_len - ctx.resume_len, byte_counter - ctx.resume_len)
|
eta = FormatProgressInfos.calc_eta(start, time.time(), ctx.data_len - ctx.resume_len, byte_counter - ctx.resume_len)
|
||||||
|
|
||||||
self._hook_progress({
|
self._hook_progress({
|
||||||
'status': 'downloading',
|
'status': 'downloading',
|
||||||
|
|
|
@ -5,6 +5,7 @@ import time
|
||||||
|
|
||||||
from .common import FileDownloader
|
from .common import FileDownloader
|
||||||
from ..utils import (
|
from ..utils import (
|
||||||
|
FormatProgressInfos,
|
||||||
Popen,
|
Popen,
|
||||||
check_executable,
|
check_executable,
|
||||||
encodeArgument,
|
encodeArgument,
|
||||||
|
@ -50,8 +51,8 @@ class RtmpFD(FileDownloader):
|
||||||
resume_percent = percent
|
resume_percent = percent
|
||||||
resume_downloaded_data_len = downloaded_data_len
|
resume_downloaded_data_len = downloaded_data_len
|
||||||
time_now = time.time()
|
time_now = time.time()
|
||||||
eta = self.calc_eta(start, time_now, 100 - resume_percent, percent - resume_percent)
|
eta = FormatProgressInfos.calc_eta(start, time_now, 100 - resume_percent, percent - resume_percent)
|
||||||
speed = self.calc_speed(start, time_now, downloaded_data_len - resume_downloaded_data_len)
|
speed = FormatProgressInfos.calc_speed(start, time_now, downloaded_data_len - resume_downloaded_data_len)
|
||||||
data_len = None
|
data_len = None
|
||||||
if percent > 0:
|
if percent > 0:
|
||||||
data_len = int(downloaded_data_len * 100 / percent)
|
data_len = int(downloaded_data_len * 100 / percent)
|
||||||
|
|
|
@ -1,15 +1,27 @@
|
||||||
import functools
|
import functools
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from ..minicurses import (
|
||||||
|
BreaklineStatusPrinter,
|
||||||
|
MultilineLogger,
|
||||||
|
MultilinePrinter,
|
||||||
|
QuietMultilinePrinter,
|
||||||
|
)
|
||||||
from ..networking import Request
|
from ..networking import Request
|
||||||
from ..networking.exceptions import HTTPError, network_exceptions
|
from ..networking.exceptions import HTTPError, network_exceptions
|
||||||
from ..utils import (
|
from ..utils import (
|
||||||
|
FormatProgressInfos,
|
||||||
|
Namespace,
|
||||||
PostProcessingError,
|
PostProcessingError,
|
||||||
RetryManager,
|
RetryManager,
|
||||||
_configuration_args,
|
_configuration_args,
|
||||||
deprecation_warning,
|
deprecation_warning,
|
||||||
encodeFilename,
|
encodeFilename,
|
||||||
|
format_bytes,
|
||||||
|
join_nonempty,
|
||||||
|
try_call,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -56,7 +68,9 @@ class PostProcessor(metaclass=PostProcessorMetaClass):
|
||||||
self._progress_hooks = []
|
self._progress_hooks = []
|
||||||
self.add_progress_hook(self.report_progress)
|
self.add_progress_hook(self.report_progress)
|
||||||
self.set_downloader(downloader)
|
self.set_downloader(downloader)
|
||||||
|
self._out_files = self.set_out_files()
|
||||||
self.PP_NAME = self.pp_key()
|
self.PP_NAME = self.pp_key()
|
||||||
|
self._prepare_multiline_status()
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def pp_key(cls):
|
def pp_key(cls):
|
||||||
|
@ -102,6 +116,11 @@ class PostProcessor(metaclass=PostProcessorMetaClass):
|
||||||
return self._downloader.params.get(name, default, *args, **kwargs)
|
return self._downloader.params.get(name, default, *args, **kwargs)
|
||||||
return default
|
return default
|
||||||
|
|
||||||
|
def set_out_files(self):
|
||||||
|
if not self._downloader:
|
||||||
|
return None
|
||||||
|
return getattr(self._downloader, '_out_files', None) or self._downloader.ydl._out_files
|
||||||
|
|
||||||
def set_downloader(self, downloader):
|
def set_downloader(self, downloader):
|
||||||
"""Sets the downloader for this PP."""
|
"""Sets the downloader for this PP."""
|
||||||
self._downloader = downloader
|
self._downloader = downloader
|
||||||
|
@ -173,25 +192,116 @@ class PostProcessor(metaclass=PostProcessorMetaClass):
|
||||||
# See YoutubeDl.py (search for postprocessor_hooks) for a description of this interface
|
# See YoutubeDl.py (search for postprocessor_hooks) for a description of this interface
|
||||||
self._progress_hooks.append(ph)
|
self._progress_hooks.append(ph)
|
||||||
|
|
||||||
def report_progress(self, s):
|
def report_destination(self, filename):
|
||||||
s['_default_template'] = '%(postprocessor)s %(status)s' % s # noqa: UP031
|
"""Report destination filename."""
|
||||||
if not self._downloader:
|
self.to_screen('[processing] Destination: ' + filename)
|
||||||
return
|
|
||||||
|
def _prepare_multiline_status(self, lines=1):
|
||||||
|
if self._downloader:
|
||||||
|
if self._downloader.params.get('noprogress'):
|
||||||
|
self._multiline = QuietMultilinePrinter()
|
||||||
|
elif self._downloader.params.get('logger'):
|
||||||
|
self._multiline = MultilineLogger(self._downloader.params['logger'], lines)
|
||||||
|
elif self._downloader.params.get('progress_with_newline'):
|
||||||
|
self._multiline = BreaklineStatusPrinter(self._downloader._out_files.out, lines)
|
||||||
|
elif hasattr(self._downloader, '_out_files'):
|
||||||
|
self._multiline = MultilinePrinter(self._downloader._out_files.out, lines, not self._downloader.params.get('quiet'))
|
||||||
|
else:
|
||||||
|
self._multiline = MultilinePrinter(sys.stdout, lines, not self._downloader.params.get('quiet'))
|
||||||
|
self._multiline.allow_colors = self._multiline._HAVE_FULLCAP and not self._downloader.params.get('no_color')
|
||||||
|
else:
|
||||||
|
self._multiline = MultilinePrinter(sys.stdout, lines, True)
|
||||||
|
self._multiline.allow_colors = self._multiline._HAVE_FULLCAP
|
||||||
|
|
||||||
|
def _finish_multiline_status(self):
|
||||||
|
self._multiline.end()
|
||||||
|
|
||||||
|
ProgressStyles = Namespace(
|
||||||
|
processed_bytes='light blue',
|
||||||
|
percent='light blue',
|
||||||
|
eta='yellow',
|
||||||
|
speed='green',
|
||||||
|
elapsed='bold white',
|
||||||
|
total_bytes='',
|
||||||
|
total_bytes_estimate='',
|
||||||
|
)
|
||||||
|
|
||||||
|
def _report_progress_status(self, s, default_template):
|
||||||
|
for name, style in self.ProgressStyles.items_:
|
||||||
|
name = f'_{name}_str'
|
||||||
|
if name not in s:
|
||||||
|
continue
|
||||||
|
s[name] = self._format_progress(s[name], style)
|
||||||
|
s['_default_template'] = default_template % s
|
||||||
|
|
||||||
progress_dict = s.copy()
|
progress_dict = s.copy()
|
||||||
progress_dict.pop('info_dict')
|
progress_dict.pop('info_dict')
|
||||||
progress_dict = {'info': s['info_dict'], 'progress': progress_dict}
|
progress_dict = {'info': s['info_dict'], 'progress': progress_dict}
|
||||||
|
|
||||||
progress_template = self.get_param('progress_template', {})
|
progress_template = self._downloader.params.get('progress_template', {})
|
||||||
tmpl = progress_template.get('postprocess')
|
self._multiline.print_at_line(self._downloader.evaluate_outtmpl(
|
||||||
if tmpl:
|
progress_template.get('process') or '[processing] %(progress._default_template)s',
|
||||||
self._downloader.to_screen(
|
progress_dict), s.get('progress_idx') or 0)
|
||||||
self._downloader.evaluate_outtmpl(tmpl, progress_dict), quiet=False)
|
|
||||||
|
|
||||||
self._downloader.to_console_title(self._downloader.evaluate_outtmpl(
|
self._downloader.to_console_title(self._downloader.evaluate_outtmpl(
|
||||||
progress_template.get('postprocess-title') or 'yt-dlp %(progress._default_template)s',
|
progress_template.get('download-title') or 'yt-dlp %(progress._default_template)s',
|
||||||
progress_dict))
|
progress_dict))
|
||||||
|
|
||||||
|
def _format_progress(self, *args, **kwargs):
|
||||||
|
return self._downloader._format_text(
|
||||||
|
self._multiline.stream, self._multiline.allow_colors, *args, **kwargs)
|
||||||
|
|
||||||
|
def report_progress(self, s):
|
||||||
|
def with_fields(*tups, default=''):
|
||||||
|
for *fields, tmpl in tups:
|
||||||
|
if all(s.get(f) is not None for f in fields):
|
||||||
|
return tmpl
|
||||||
|
return default
|
||||||
|
|
||||||
|
if not self._downloader:
|
||||||
|
return
|
||||||
|
|
||||||
|
if s['status'] == 'finished':
|
||||||
|
if self._downloader.params.get('noprogress'):
|
||||||
|
self.to_screen('[processing] Download completed')
|
||||||
|
speed = try_call(lambda: s['total_bytes'] / s['elapsed'])
|
||||||
|
s.update({
|
||||||
|
'speed': speed,
|
||||||
|
'_speed_str': FormatProgressInfos.format_speed(speed).strip(),
|
||||||
|
'_total_bytes_str': format_bytes(s.get('total_bytes')),
|
||||||
|
'_elapsed_str': FormatProgressInfos.format_seconds(s.get('elapsed')),
|
||||||
|
'_percent_str': FormatProgressInfos.format_percent(100),
|
||||||
|
})
|
||||||
|
self._report_progress_status(s, join_nonempty(
|
||||||
|
'100%%',
|
||||||
|
with_fields(('total_bytes', 'of %(_total_bytes_str)s')),
|
||||||
|
with_fields(('elapsed', 'in %(_elapsed_str)s')),
|
||||||
|
with_fields(('speed', 'at %(_speed_str)s')),
|
||||||
|
delim=' '))
|
||||||
|
|
||||||
|
if s['status'] != 'processing':
|
||||||
|
return
|
||||||
|
|
||||||
|
s.update({
|
||||||
|
'_eta_str': FormatProgressInfos.format_eta(s.get('eta')),
|
||||||
|
'_speed_str': FormatProgressInfos.format_speed(s.get('speed')),
|
||||||
|
'_percent_str': FormatProgressInfos.format_percent(try_call(
|
||||||
|
lambda: 100 * s['processed_bytes'] / s['total_bytes'],
|
||||||
|
lambda: 100 * s['processed_bytes'] / s['total_bytes_estimate'],
|
||||||
|
lambda: s['processed_bytes'] == 0 and 0)),
|
||||||
|
'_total_bytes_str': format_bytes(s.get('total_bytes')),
|
||||||
|
'_total_bytes_estimate_str': format_bytes(s.get('total_bytes_estimate')),
|
||||||
|
'_processed_bytes_str': format_bytes(s.get('processed_bytes')),
|
||||||
|
'_elapsed_str': FormatProgressInfos.format_seconds(s.get('elapsed')),
|
||||||
|
})
|
||||||
|
|
||||||
|
msg_template = with_fields(
|
||||||
|
('total_bytes', '%(_percent_str)s of %(_total_bytes_str)s at %(_speed_str)s ETA %(_eta_str)s'),
|
||||||
|
('processed_bytes', 'elapsed', '%(_processed_bytes_str)s at %(_speed_str)s (%(_elapsed_str)s)'),
|
||||||
|
('processed_bytes', '%(_processed_bytes_str)s at %(_speed_str)s'),
|
||||||
|
default='%(_percent_str)s at %(_speed_str)s ETA %(_eta_str)s')
|
||||||
|
|
||||||
|
self._report_progress_status(s, msg_template)
|
||||||
|
|
||||||
def _retry_download(self, err, count, retries):
|
def _retry_download(self, err, count, retries):
|
||||||
# While this is not an extractor, it behaves similar to one and
|
# While this is not an extractor, it behaves similar to one and
|
||||||
# so obey extractor_retries and "--retry-sleep extractor"
|
# so obey extractor_retries and "--retry-sleep extractor"
|
||||||
|
|
|
@ -6,7 +6,10 @@ import json
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import subprocess
|
import subprocess
|
||||||
|
import sys
|
||||||
import time
|
import time
|
||||||
|
from queue import Queue
|
||||||
|
from threading import Thread
|
||||||
|
|
||||||
from .common import PostProcessor
|
from .common import PostProcessor
|
||||||
from ..compat import imghdr
|
from ..compat import imghdr
|
||||||
|
@ -24,6 +27,7 @@ from ..utils import (
|
||||||
encodeFilename,
|
encodeFilename,
|
||||||
filter_dict,
|
filter_dict,
|
||||||
float_or_none,
|
float_or_none,
|
||||||
|
int_or_none,
|
||||||
is_outdated_version,
|
is_outdated_version,
|
||||||
orderedSet,
|
orderedSet,
|
||||||
prepend_extension,
|
prepend_extension,
|
||||||
|
@ -203,7 +207,7 @@ class FFmpegPostProcessor(PostProcessor):
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def available(self):
|
def available(self):
|
||||||
return self.basename is not None
|
return self._ffmpeg_location.get() or self.basename is not None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def executable(self):
|
def executable(self):
|
||||||
|
@ -327,11 +331,9 @@ class FFmpegPostProcessor(PostProcessor):
|
||||||
return abs(d1 - d2) > tolerance
|
return abs(d1 - d2) > tolerance
|
||||||
|
|
||||||
def run_ffmpeg_multiple_files(self, input_paths, out_path, opts, **kwargs):
|
def run_ffmpeg_multiple_files(self, input_paths, out_path, opts, **kwargs):
|
||||||
return self.real_run_ffmpeg(
|
return self.real_run_ffmpeg([(path, []) for path in input_paths], [(out_path, opts)], **kwargs)
|
||||||
[(path, []) for path in input_paths],
|
|
||||||
[(out_path, opts)], **kwargs)
|
|
||||||
|
|
||||||
def real_run_ffmpeg(self, input_path_opts, output_path_opts, *, expected_retcodes=(0,)):
|
def real_run_ffmpeg(self, input_path_opts, output_path_opts, *, expected_retcodes=(0,), info_dict=None):
|
||||||
self.check_version()
|
self.check_version()
|
||||||
|
|
||||||
oldest_mtime = min(
|
oldest_mtime = min(
|
||||||
|
@ -351,19 +353,20 @@ class FFmpegPostProcessor(PostProcessor):
|
||||||
args += self._configuration_args(self.basename, keys)
|
args += self._configuration_args(self.basename, keys)
|
||||||
if name == 'i':
|
if name == 'i':
|
||||||
args.append('-i')
|
args.append('-i')
|
||||||
return (
|
return [encodeArgument(arg) for arg in args] + [encodeFilename(self._ffmpeg_filename_argument(file), True)]
|
||||||
[encodeArgument(arg) for arg in args]
|
|
||||||
+ [encodeFilename(self._ffmpeg_filename_argument(file), True)])
|
|
||||||
|
|
||||||
for arg_type, path_opts in (('i', input_path_opts), ('o', output_path_opts)):
|
for arg_type, path_opts in (('i', input_path_opts), ('o', output_path_opts)):
|
||||||
cmd += itertools.chain.from_iterable(
|
cmd += itertools.chain.from_iterable(
|
||||||
make_args(path, list(opts), arg_type, i + 1)
|
make_args(path, list(opts), arg_type, i + 1)
|
||||||
for i, (path, opts) in enumerate(path_opts) if path)
|
for i, (path, opts) in enumerate(path_opts) if path)
|
||||||
|
|
||||||
|
cmd += ['-progress', 'pipe:1']
|
||||||
self.write_debug(f'ffmpeg command line: {shell_quote(cmd)}')
|
self.write_debug(f'ffmpeg command line: {shell_quote(cmd)}')
|
||||||
_, stderr, returncode = Popen.run(
|
|
||||||
cmd, text=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE)
|
ffmpeg_progress_tracker = FFmpegProgressTracker(info_dict, cmd, self._ffmpeg_hook, self._downloader)
|
||||||
if returncode not in variadic(expected_retcodes):
|
_, stderr, return_code = ffmpeg_progress_tracker.run_ffmpeg_subprocess()
|
||||||
|
if return_code not in variadic(expected_retcodes):
|
||||||
|
stderr = stderr.strip()
|
||||||
self.write_debug(stderr)
|
self.write_debug(stderr)
|
||||||
raise FFmpegPostProcessorError(stderr.strip().splitlines()[-1])
|
raise FFmpegPostProcessorError(stderr.strip().splitlines()[-1])
|
||||||
for out_path, _ in output_path_opts:
|
for out_path, _ in output_path_opts:
|
||||||
|
@ -371,7 +374,7 @@ class FFmpegPostProcessor(PostProcessor):
|
||||||
self.try_utime(out_path, oldest_mtime, oldest_mtime)
|
self.try_utime(out_path, oldest_mtime, oldest_mtime)
|
||||||
return stderr
|
return stderr
|
||||||
|
|
||||||
def run_ffmpeg(self, path, out_path, opts, **kwargs):
|
def run_ffmpeg(self, path, out_path, opts, informations=None, **kwargs):
|
||||||
return self.run_ffmpeg_multiple_files([path], out_path, opts, **kwargs)
|
return self.run_ffmpeg_multiple_files([path], out_path, opts, **kwargs)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
@ -436,6 +439,12 @@ class FFmpegPostProcessor(PostProcessor):
|
||||||
if directive in opts:
|
if directive in opts:
|
||||||
yield f'{directive} {opts[directive]}\n'
|
yield f'{directive} {opts[directive]}\n'
|
||||||
|
|
||||||
|
def _ffmpeg_hook(self, status, info_dict):
|
||||||
|
status['processed_bytes'] = status.get('outputted', 0)
|
||||||
|
if status.get('status') == 'ffmpeg_running':
|
||||||
|
status['status'] = 'processing'
|
||||||
|
self._hook_progress(status, info_dict)
|
||||||
|
|
||||||
|
|
||||||
class FFmpegExtractAudioPP(FFmpegPostProcessor):
|
class FFmpegExtractAudioPP(FFmpegPostProcessor):
|
||||||
COMMON_AUDIO_EXTS = (*MEDIA_EXTENSIONS.common_audio, 'wma')
|
COMMON_AUDIO_EXTS = (*MEDIA_EXTENSIONS.common_audio, 'wma')
|
||||||
|
@ -470,14 +479,14 @@ class FFmpegExtractAudioPP(FFmpegPostProcessor):
|
||||||
return ['-vbr', f'{int(q)}']
|
return ['-vbr', f'{int(q)}']
|
||||||
return ['-q:a', f'{q}']
|
return ['-q:a', f'{q}']
|
||||||
|
|
||||||
def run_ffmpeg(self, path, out_path, codec, more_opts):
|
def run_ffmpeg(self, path, out_path, codec, more_opts, informations=None):
|
||||||
if codec is None:
|
if codec is None:
|
||||||
acodec_opts = []
|
acodec_opts = []
|
||||||
else:
|
else:
|
||||||
acodec_opts = ['-acodec', codec]
|
acodec_opts = ['-acodec', codec]
|
||||||
opts = ['-vn', *acodec_opts, *more_opts]
|
opts = ['-vn', *acodec_opts, *more_opts]
|
||||||
try:
|
try:
|
||||||
FFmpegPostProcessor.run_ffmpeg(self, path, out_path, opts)
|
FFmpegPostProcessor.run_ffmpeg(self, path, out_path, opts, informations)
|
||||||
except FFmpegPostProcessorError as err:
|
except FFmpegPostProcessorError as err:
|
||||||
raise PostProcessingError(f'audio conversion failed: {err.msg}')
|
raise PostProcessingError(f'audio conversion failed: {err.msg}')
|
||||||
|
|
||||||
|
@ -528,7 +537,7 @@ class FFmpegExtractAudioPP(FFmpegPostProcessor):
|
||||||
return [], information
|
return [], information
|
||||||
|
|
||||||
self.to_screen(f'Destination: {new_path}')
|
self.to_screen(f'Destination: {new_path}')
|
||||||
self.run_ffmpeg(path, temp_path, acodec, more_opts)
|
self.run_ffmpeg(path, temp_path, acodec, more_opts, information)
|
||||||
|
|
||||||
os.replace(path, orig_path)
|
os.replace(path, orig_path)
|
||||||
os.replace(temp_path, new_path)
|
os.replace(temp_path, new_path)
|
||||||
|
@ -571,7 +580,7 @@ class FFmpegVideoConvertorPP(FFmpegPostProcessor):
|
||||||
|
|
||||||
outpath = replace_extension(filename, target_ext, source_ext)
|
outpath = replace_extension(filename, target_ext, source_ext)
|
||||||
self.to_screen(f'{self._ACTION.title()} video from {source_ext} to {target_ext}; Destination: {outpath}')
|
self.to_screen(f'{self._ACTION.title()} video from {source_ext} to {target_ext}; Destination: {outpath}')
|
||||||
self.run_ffmpeg(filename, outpath, self._options(target_ext))
|
self.run_ffmpeg(filename, outpath, self._options(target_ext), info)
|
||||||
|
|
||||||
info['filepath'] = outpath
|
info['filepath'] = outpath
|
||||||
info['format'] = info['ext'] = target_ext
|
info['format'] = info['ext'] = target_ext
|
||||||
|
@ -837,7 +846,7 @@ class FFmpegMergerPP(FFmpegPostProcessor):
|
||||||
if fmt.get('vcodec') != 'none':
|
if fmt.get('vcodec') != 'none':
|
||||||
args.extend(['-map', f'{i}:v:0'])
|
args.extend(['-map', f'{i}:v:0'])
|
||||||
self.to_screen(f'Merging formats into "{filename}"')
|
self.to_screen(f'Merging formats into "{filename}"')
|
||||||
self.run_ffmpeg_multiple_files(info['__files_to_merge'], temp_filename, args)
|
self.run_ffmpeg_multiple_files(info['__files_to_merge'], temp_filename, args, info_dict=info)
|
||||||
os.rename(encodeFilename(temp_filename), encodeFilename(filename))
|
os.rename(encodeFilename(temp_filename), encodeFilename(filename))
|
||||||
return info['__files_to_merge'], info
|
return info['__files_to_merge'], info
|
||||||
|
|
||||||
|
@ -1007,7 +1016,7 @@ class FFmpegSubtitlesConvertorPP(FFmpegPostProcessor):
|
||||||
else:
|
else:
|
||||||
sub_filenames.append(srt_file)
|
sub_filenames.append(srt_file)
|
||||||
|
|
||||||
self.run_ffmpeg(old_file, new_file, ['-f', new_format])
|
self.run_ffmpeg(old_file, new_file, ['-f', new_format], info)
|
||||||
|
|
||||||
with open(new_file, encoding='utf-8') as f:
|
with open(new_file, encoding='utf-8') as f:
|
||||||
subs[lang] = {
|
subs[lang] = {
|
||||||
|
@ -1190,3 +1199,236 @@ class FFmpegConcatPP(FFmpegPostProcessor):
|
||||||
'ext': ie_copy['ext'],
|
'ext': ie_copy['ext'],
|
||||||
}]
|
}]
|
||||||
return files_to_delete, info
|
return files_to_delete, info
|
||||||
|
|
||||||
|
|
||||||
|
class FFmpegProgressTracker:
|
||||||
|
def __init__(self, info_dict, ffmpeg_args, hook_progress, ydl=None):
|
||||||
|
self.ydl = ydl
|
||||||
|
self._info_dict = info_dict
|
||||||
|
self._ffmpeg_args = ffmpeg_args
|
||||||
|
self._hook_progress = hook_progress
|
||||||
|
self._stdout_queue, self._stderr_queue = Queue(), Queue()
|
||||||
|
self._streams, self._stderr_buffer, self._stdout_buffer = ['', ''], '', ''
|
||||||
|
self._progress_pattern = re.compile(r'''(?x)
|
||||||
|
(?:
|
||||||
|
frame=\s*(?P<frame>\S+)\n
|
||||||
|
fps=\s*(?P<fps>\S+)\n
|
||||||
|
stream_\d+_\d+_q=\s*(?P<stream_d_d_q>\S+)\n
|
||||||
|
)?
|
||||||
|
bitrate=\s*(?P<bitrate>\S+)\n
|
||||||
|
total_size=\s*(?P<total_size>\S+)\n
|
||||||
|
out_time_us=\s*(?P<out_time_us>\S+)\n
|
||||||
|
out_time_ms=\s*(?P<out_time_ms>\S+)\n
|
||||||
|
out_time=\s*(?P<out_time>\S+)\n
|
||||||
|
dup_frames=\s*(?P<dup_frames>\S+)\n
|
||||||
|
drop_frames=\s*(?P<drop_frames>\S+)\n
|
||||||
|
speed=\s*(?P<speed>\S+)\n
|
||||||
|
progress=\s*(?P<progress>\S+)
|
||||||
|
''')
|
||||||
|
|
||||||
|
if self.ydl:
|
||||||
|
self.ydl.write_debug(f'ffmpeg command line: {shell_quote(self._ffmpeg_args)}')
|
||||||
|
self.ffmpeg_proc = Popen(self._ffmpeg_args, universal_newlines=True,
|
||||||
|
encoding='utf8', stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
|
self._start_time = time.time()
|
||||||
|
|
||||||
|
def trigger_progress_hook(self, dct):
|
||||||
|
self._status.update(dct)
|
||||||
|
self._hook_progress(self._status, self._info_dict)
|
||||||
|
|
||||||
|
def run_ffmpeg_subprocess(self):
|
||||||
|
if self._info_dict and self.ydl:
|
||||||
|
return self._track_ffmpeg_progress()
|
||||||
|
return self._run_ffmpeg_without_progress_tracking()
|
||||||
|
|
||||||
|
def _run_ffmpeg_without_progress_tracking(self):
|
||||||
|
"""Simply run ffmpeg and only care about the last stderr, stdout and the retcode"""
|
||||||
|
stdout, stderr = self.ffmpeg_proc.communicate_or_kill()
|
||||||
|
retcode = self.ffmpeg_proc.returncode
|
||||||
|
return stdout, stderr, retcode
|
||||||
|
|
||||||
|
def _track_ffmpeg_progress(self):
|
||||||
|
""" Track ffmpeg progress in a non blocking way using queues"""
|
||||||
|
self._start_time = time.time()
|
||||||
|
# args needed to track ffmpeg progress from stdout
|
||||||
|
self._duration_to_track, self._total_duration = self._compute_duration_to_track()
|
||||||
|
self._total_filesize = self._compute_total_filesize(self._duration_to_track, self._total_duration)
|
||||||
|
self._status = {
|
||||||
|
'filename': self._ffmpeg_args[-3].split(':')[-1],
|
||||||
|
'status': 'ffmpeg_running',
|
||||||
|
'total_bytes': self._total_filesize,
|
||||||
|
'elapsed': 0,
|
||||||
|
'outputted': 0,
|
||||||
|
}
|
||||||
|
|
||||||
|
out_listener = Thread(
|
||||||
|
target=self._enqueue_lines,
|
||||||
|
args=(self.ffmpeg_proc.stdout, self._stdout_queue),
|
||||||
|
daemon=True,
|
||||||
|
)
|
||||||
|
err_listener = Thread(
|
||||||
|
target=self._enqueue_lines,
|
||||||
|
args=(self.ffmpeg_proc.stderr, self._stderr_queue),
|
||||||
|
daemon=True,
|
||||||
|
)
|
||||||
|
out_listener.start()
|
||||||
|
err_listener.start()
|
||||||
|
|
||||||
|
retcode = self._wait_for_ffmpeg()
|
||||||
|
|
||||||
|
self._status.update({
|
||||||
|
'status': 'finished',
|
||||||
|
'outputted': self._total_filesize,
|
||||||
|
})
|
||||||
|
time.sleep(.5) # Needed if ffmpeg didn't release the file in time for yt-dlp to change its name
|
||||||
|
return self._streams[0], self._streams[1], retcode
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _enqueue_lines(out, queue):
|
||||||
|
for line in iter(out.readline, ''):
|
||||||
|
queue.put(line.rstrip())
|
||||||
|
out.close()
|
||||||
|
|
||||||
|
def _save_stream(self, lines, to_stderr=False):
|
||||||
|
if not lines:
|
||||||
|
return
|
||||||
|
self._streams[to_stderr] += lines
|
||||||
|
|
||||||
|
self.ydl.to_screen('\r', skip_eol=True)
|
||||||
|
for msg in lines.splitlines():
|
||||||
|
if msg.strip():
|
||||||
|
self.ydl.write_debug(f'ffmpeg: {msg}')
|
||||||
|
|
||||||
|
def _handle_lines(self):
|
||||||
|
if not self._stdout_queue.empty():
|
||||||
|
stdout_line = self._stdout_queue.get_nowait()
|
||||||
|
self._stdout_buffer += stdout_line + '\n'
|
||||||
|
self._parse_ffmpeg_output()
|
||||||
|
|
||||||
|
if not self._stderr_queue.empty():
|
||||||
|
stderr_line = self._stderr_queue.get_nowait()
|
||||||
|
self._stderr_buffer += stderr_line
|
||||||
|
|
||||||
|
def _wait_for_ffmpeg(self):
|
||||||
|
retcode = self.ffmpeg_proc.poll()
|
||||||
|
while retcode is None:
|
||||||
|
time.sleep(.01)
|
||||||
|
self._handle_lines()
|
||||||
|
self._status.update({
|
||||||
|
'elapsed': time.time() - self._start_time,
|
||||||
|
})
|
||||||
|
self._hook_progress(self._status, self._info_dict)
|
||||||
|
retcode = self.ffmpeg_proc.poll()
|
||||||
|
return retcode
|
||||||
|
|
||||||
|
def _parse_ffmpeg_output(self):
|
||||||
|
ffmpeg_prog_infos = re.match(self._progress_pattern, self._stdout_buffer)
|
||||||
|
if not ffmpeg_prog_infos:
|
||||||
|
return
|
||||||
|
eta_seconds = self._compute_eta(ffmpeg_prog_infos, self._duration_to_track)
|
||||||
|
bitrate_int = self._compute_bitrate(ffmpeg_prog_infos.group('bitrate'))
|
||||||
|
# Not using ffmpeg 'total_size' value as it's imprecise and gives progress percentage over 100
|
||||||
|
out_time_second = int_or_none(ffmpeg_prog_infos.group('out_time_us')) // 1_000_000
|
||||||
|
try:
|
||||||
|
outputted_bytes_int = int_or_none(out_time_second / self._duration_to_track * self._total_filesize)
|
||||||
|
except ZeroDivisionError:
|
||||||
|
outputted_bytes_int = 0
|
||||||
|
self._status.update({
|
||||||
|
'outputted': outputted_bytes_int,
|
||||||
|
'speed': bitrate_int,
|
||||||
|
'eta': eta_seconds,
|
||||||
|
})
|
||||||
|
self._hook_progress(self._status, self._info_dict)
|
||||||
|
self._stderr_buffer = re.sub(r'=\s+', '=', self._stderr_buffer)
|
||||||
|
print(self._stdout_buffer, file=sys.stdout, end='')
|
||||||
|
print(self._stderr_buffer, file=sys.stderr)
|
||||||
|
self._stdout_buffer = ''
|
||||||
|
self._stderr_buffer = ''
|
||||||
|
|
||||||
|
def _compute_total_filesize(self, duration_to_track, total_duration):
|
||||||
|
if not total_duration:
|
||||||
|
return 0
|
||||||
|
filesize = self._info_dict.get('filesize')
|
||||||
|
if not filesize:
|
||||||
|
filesize = self._info_dict.get('filesize_approx', 0)
|
||||||
|
return filesize * duration_to_track // total_duration
|
||||||
|
|
||||||
|
def _compute_duration_to_track(self):
|
||||||
|
duration = self._info_dict.get('duration')
|
||||||
|
if not duration:
|
||||||
|
return 0, 0
|
||||||
|
|
||||||
|
start_time, end_time = 0, duration
|
||||||
|
for i, arg in enumerate(self._ffmpeg_args[:-1]):
|
||||||
|
next_arg_is_a_timestamp = re.match(r'(?P<at>(-ss|-sseof|-to))', arg)
|
||||||
|
this_arg_is_a_timestamp = re.match(r'(?P<at>(-ss|-sseof|-to))=(?P<timestamp>\d+)', arg)
|
||||||
|
if not (next_arg_is_a_timestamp or this_arg_is_a_timestamp):
|
||||||
|
continue
|
||||||
|
elif next_arg_is_a_timestamp:
|
||||||
|
timestamp_seconds = self.ffmpeg_time_string_to_seconds(self._ffmpeg_args[i + 1])
|
||||||
|
else:
|
||||||
|
timestamp_seconds = self.ffmpeg_time_string_to_seconds(this_arg_is_a_timestamp.group('timestamp'))
|
||||||
|
if next_arg_is_a_timestamp.group('at') == '-ss':
|
||||||
|
start_time = timestamp_seconds
|
||||||
|
elif next_arg_is_a_timestamp.group('at') == '-sseof':
|
||||||
|
start_time = end_time - timestamp_seconds
|
||||||
|
elif next_arg_is_a_timestamp.group('at') == '-to':
|
||||||
|
end_time = timestamp_seconds
|
||||||
|
|
||||||
|
duration_to_track = end_time - start_time
|
||||||
|
if duration_to_track >= 0:
|
||||||
|
return duration_to_track, duration
|
||||||
|
return 0, duration
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _compute_eta(ffmpeg_prog_infos, duration_to_track):
|
||||||
|
try:
|
||||||
|
speed = float_or_none(ffmpeg_prog_infos.group('speed')[:-1])
|
||||||
|
out_time_second = int_or_none(ffmpeg_prog_infos.group('out_time_us')) // 1_000_000
|
||||||
|
eta_seconds = (duration_to_track - out_time_second) // speed
|
||||||
|
except (TypeError, ZeroDivisionError):
|
||||||
|
eta_seconds = 0
|
||||||
|
return eta_seconds
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def ffmpeg_time_string_to_seconds(time_string):
|
||||||
|
ffmpeg_time_seconds = 0
|
||||||
|
hms_parsed = re.match(r'((?P<Hour>\d+):)?((?P<Minute>\d+):)?(?P<Second>\d+)(\.(?P<float>\d+))?', time_string)
|
||||||
|
smu_parse = re.match(r'(?P<Time>\d+)(?P<Unit>[mu]?s)', time_string)
|
||||||
|
if hms_parsed:
|
||||||
|
if hms_parsed.group('Hour'):
|
||||||
|
ffmpeg_time_seconds += 3600 * int_or_none(hms_parsed.group('Hour'))
|
||||||
|
if hms_parsed.group('Minute'):
|
||||||
|
ffmpeg_time_seconds += 60 * int_or_none(hms_parsed.group('Minute'))
|
||||||
|
ffmpeg_time_seconds += int_or_none(hms_parsed.group('Second'))
|
||||||
|
if hms_parsed.group('float'):
|
||||||
|
float_part = hms_parsed.group('float')
|
||||||
|
ffmpeg_time_seconds += int_or_none(float_part) / (10 ** len(float_part))
|
||||||
|
elif smu_parse:
|
||||||
|
ffmpeg_time_seconds = int_or_none(smu_parse.group('Time'))
|
||||||
|
prefix_and_unit = smu_parse.group('Unit')
|
||||||
|
if prefix_and_unit == 'ms':
|
||||||
|
ffmpeg_time_seconds /= 1_000
|
||||||
|
elif prefix_and_unit == 'us':
|
||||||
|
ffmpeg_time_seconds /= 1_000_000
|
||||||
|
return ffmpeg_time_seconds
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _compute_bitrate(bitrate):
|
||||||
|
bitrate_str = re.match(r'(?P<Integer>\d+)(\.(?P<float>\d+))?(?P<Prefix>[gmk])?bits/s', bitrate)
|
||||||
|
try:
|
||||||
|
no_prefix_bitrate = int_or_none(bitrate_str.group('Integer'))
|
||||||
|
if bitrate_str.group('float'):
|
||||||
|
float_part = bitrate_str.group('float')
|
||||||
|
no_prefix_bitrate += int_or_none(float_part) / (10 ** len(float_part))
|
||||||
|
if bitrate_str.group('Prefix'):
|
||||||
|
unit_prefix = bitrate_str.group('Prefix')
|
||||||
|
if unit_prefix == 'g':
|
||||||
|
no_prefix_bitrate *= 1_000_000_000
|
||||||
|
elif unit_prefix == 'm':
|
||||||
|
no_prefix_bitrate *= 1_000_000
|
||||||
|
elif unit_prefix == 'k':
|
||||||
|
no_prefix_bitrate *= 1_000
|
||||||
|
except (TypeError, AttributeError):
|
||||||
|
return 0
|
||||||
|
return no_prefix_bitrate
|
||||||
|
|
|
@ -5676,3 +5676,51 @@ class _YDLLogger:
|
||||||
def stderr(self, message):
|
def stderr(self, message):
|
||||||
if self._ydl:
|
if self._ydl:
|
||||||
self._ydl.to_stderr(message)
|
self._ydl.to_stderr(message)
|
||||||
|
|
||||||
|
|
||||||
|
class FormatProgressInfos:
|
||||||
|
@staticmethod
|
||||||
|
def format_seconds(seconds):
|
||||||
|
if seconds is None:
|
||||||
|
return ' Unknown'
|
||||||
|
time = timetuple_from_msec(seconds * 1000)
|
||||||
|
if time.hours > 99:
|
||||||
|
return '--:--:--'
|
||||||
|
if not time.hours:
|
||||||
|
return '%02d:%02d' % time[1:-1]
|
||||||
|
return '%02d:%02d:%02d' % time[:-1]
|
||||||
|
|
||||||
|
format_eta = format_seconds
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def calc_percent(byte_counter, data_len):
|
||||||
|
if data_len is None:
|
||||||
|
return None
|
||||||
|
return float(byte_counter) / float(data_len) * 100.0
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def format_percent(percent):
|
||||||
|
return ' N/A%' if percent is None else f'{percent:>5.1f}%'
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def calc_eta(start, now, total, current):
|
||||||
|
if total is None:
|
||||||
|
return None
|
||||||
|
if now is None:
|
||||||
|
now = time.time()
|
||||||
|
dif = now - start
|
||||||
|
if current == 0 or dif < 0.001: # One millisecond
|
||||||
|
return None
|
||||||
|
rate = float(current) / dif
|
||||||
|
return int((float(total) - float(current)) / rate)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def calc_speed(start, now, bytes):
|
||||||
|
dif = now - start
|
||||||
|
if bytes == 0 or dif < 0.001: # One millisecond
|
||||||
|
return None
|
||||||
|
return float(bytes) / dif
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def format_speed(speed):
|
||||||
|
return ' Unknown B/s' if speed is None else f'{format_bytes(speed):>10s}/s'
|
||||||
|
|
Loading…
Reference in New Issue
Block a user