mirror of
https://github.com/yt-dlp/yt-dlp.git
synced 2024-11-26 17:21:23 +01:00
Compare commits
9 Commits
c9dae2e0f0
...
50e9b8d24d
Author | SHA1 | Date | |
---|---|---|---|
|
50e9b8d24d | ||
|
b83ca24eb7 | ||
|
240a7d43c8 | ||
|
f13df591d4 | ||
|
fb127cccd2 | ||
|
50306a11eb | ||
|
deab7eb786 | ||
|
a70f889927 | ||
|
a746212d7b |
3
.github/workflows/build.yml
vendored
3
.github/workflows/build.yml
vendored
|
@ -504,7 +504,8 @@ jobs:
|
||||||
- windows32
|
- windows32
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/download-artifact@v4
|
- name: Download artifacts
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
with:
|
with:
|
||||||
path: artifact
|
path: artifact
|
||||||
pattern: build-bin-*
|
pattern: build-bin-*
|
||||||
|
|
17
.github/workflows/release-master.yml
vendored
17
.github/workflows/release-master.yml
vendored
|
@ -28,3 +28,20 @@ jobs:
|
||||||
actions: write # For cleaning up cache
|
actions: write # For cleaning up cache
|
||||||
id-token: write # mandatory for trusted publishing
|
id-token: write # mandatory for trusted publishing
|
||||||
secrets: inherit
|
secrets: inherit
|
||||||
|
|
||||||
|
publish_pypi:
|
||||||
|
needs: [release]
|
||||||
|
if: vars.MASTER_PYPI_PROJECT != ''
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
id-token: write # mandatory for trusted publishing
|
||||||
|
steps:
|
||||||
|
- name: Download artifacts
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
path: dist
|
||||||
|
name: build-pypi
|
||||||
|
- name: Publish to PyPI
|
||||||
|
uses: pypa/gh-action-pypi-publish@release/v1
|
||||||
|
with:
|
||||||
|
verbose: true
|
||||||
|
|
17
.github/workflows/release-nightly.yml
vendored
17
.github/workflows/release-nightly.yml
vendored
|
@ -41,3 +41,20 @@ jobs:
|
||||||
actions: write # For cleaning up cache
|
actions: write # For cleaning up cache
|
||||||
id-token: write # mandatory for trusted publishing
|
id-token: write # mandatory for trusted publishing
|
||||||
secrets: inherit
|
secrets: inherit
|
||||||
|
|
||||||
|
publish_pypi:
|
||||||
|
needs: [release]
|
||||||
|
if: vars.NIGHTLY_PYPI_PROJECT != ''
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
id-token: write # mandatory for trusted publishing
|
||||||
|
steps:
|
||||||
|
- name: Download artifacts
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
path: dist
|
||||||
|
name: build-pypi
|
||||||
|
- name: Publish to PyPI
|
||||||
|
uses: pypa/gh-action-pypi-publish@release/v1
|
||||||
|
with:
|
||||||
|
verbose: true
|
||||||
|
|
19
.github/workflows/release.yml
vendored
19
.github/workflows/release.yml
vendored
|
@ -2,10 +2,6 @@ name: Release
|
||||||
on:
|
on:
|
||||||
workflow_call:
|
workflow_call:
|
||||||
inputs:
|
inputs:
|
||||||
prerelease:
|
|
||||||
required: false
|
|
||||||
default: true
|
|
||||||
type: boolean
|
|
||||||
source:
|
source:
|
||||||
required: false
|
required: false
|
||||||
default: ''
|
default: ''
|
||||||
|
@ -18,6 +14,10 @@ on:
|
||||||
required: false
|
required: false
|
||||||
default: ''
|
default: ''
|
||||||
type: string
|
type: string
|
||||||
|
prerelease:
|
||||||
|
required: false
|
||||||
|
default: true
|
||||||
|
type: boolean
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
inputs:
|
inputs:
|
||||||
source:
|
source:
|
||||||
|
@ -278,11 +278,20 @@ jobs:
|
||||||
make clean-cache
|
make clean-cache
|
||||||
python -m build --no-isolation .
|
python -m build --no-isolation .
|
||||||
|
|
||||||
|
- name: Upload artifacts
|
||||||
|
if: github.event_name != 'workflow_dispatch'
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: build-pypi
|
||||||
|
path: |
|
||||||
|
dist/*
|
||||||
|
compression-level: 0
|
||||||
|
|
||||||
- name: Publish to PyPI
|
- name: Publish to PyPI
|
||||||
|
if: github.event_name == 'workflow_dispatch'
|
||||||
uses: pypa/gh-action-pypi-publish@release/v1
|
uses: pypa/gh-action-pypi-publish@release/v1
|
||||||
with:
|
with:
|
||||||
verbose: true
|
verbose: true
|
||||||
attestations: false # Currently doesn't work w/ reusable workflows (breaks nightly)
|
|
||||||
|
|
||||||
publish:
|
publish:
|
||||||
needs: [prepare, build]
|
needs: [prepare, build]
|
||||||
|
|
|
@ -52,7 +52,7 @@ default = [
|
||||||
"pycryptodomex",
|
"pycryptodomex",
|
||||||
"requests>=2.32.2,<3",
|
"requests>=2.32.2,<3",
|
||||||
"urllib3>=1.26.17,<3",
|
"urllib3>=1.26.17,<3",
|
||||||
"websockets>=13.0",
|
"websockets>=13.0,<14",
|
||||||
]
|
]
|
||||||
curl-cffi = [
|
curl-cffi = [
|
||||||
"curl-cffi==0.5.10; os_name=='nt' and implementation_name=='cpython'",
|
"curl-cffi==0.5.10; os_name=='nt' and implementation_name=='cpython'",
|
||||||
|
|
|
@ -24,7 +24,7 @@ try:
|
||||||
from Crypto.Cipher import AES, PKCS1_OAEP, Blowfish, PKCS1_v1_5 # noqa: F401
|
from Crypto.Cipher import AES, PKCS1_OAEP, Blowfish, PKCS1_v1_5 # noqa: F401
|
||||||
from Crypto.Hash import CMAC, SHA1 # noqa: F401
|
from Crypto.Hash import CMAC, SHA1 # noqa: F401
|
||||||
from Crypto.PublicKey import RSA # noqa: F401
|
from Crypto.PublicKey import RSA # noqa: F401
|
||||||
except ImportError:
|
except (ImportError, OSError):
|
||||||
__version__ = f'broken {__version__}'.strip()
|
__version__ = f'broken {__version__}'.strip()
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
from .common import InfoExtractor
|
from .common import InfoExtractor
|
||||||
|
from ..compat import compat_urllib_parse_urlparse
|
||||||
from ..utils import (
|
from ..utils import (
|
||||||
ExtractorError,
|
ExtractorError,
|
||||||
clean_html,
|
clean_html,
|
||||||
|
@ -276,8 +277,16 @@ class VidioLiveIE(VidioBaseIE):
|
||||||
display_id, note='Downloading HLS token JSON', data=b'')
|
display_id, note='Downloading HLS token JSON', data=b'')
|
||||||
formats.extend(self._extract_m3u8_formats(
|
formats.extend(self._extract_m3u8_formats(
|
||||||
sources['source'] + '?' + token_json.get('token', ''), display_id, 'mp4', 'm3u8_native'))
|
sources['source'] + '?' + token_json.get('token', ''), display_id, 'mp4', 'm3u8_native'))
|
||||||
if str_or_none(sources.get('source_dash')):
|
if str_or_none(sources.get('source_dash')): # TODO: Find live example with source_dash
|
||||||
pass
|
parsed_base_dash = compat_urllib_parse_urlparse(sources['source_dash'])
|
||||||
|
token_json = self._download_json(
|
||||||
|
'https://www.vidio.com/live/%s/tokens?type=dash' % video_id,
|
||||||
|
display_id, note='Downloading DASH token JSON', data=b'')
|
||||||
|
parsed_tokenized_dash = parsed_base_dash._replace(path=token_json.get('token', '')
|
||||||
|
+ (parsed_base_dash.path if parsed_base_dash.path[0] == '/'
|
||||||
|
else '/' + parsed_base_dash.path))
|
||||||
|
formats.extend(self._extract_mpd_formats(
|
||||||
|
parsed_tokenized_dash.geturl(), display_id, 'dash'))
|
||||||
else:
|
else:
|
||||||
if stream_meta.get('stream_token_url'):
|
if stream_meta.get('stream_token_url'):
|
||||||
token_json = self._download_json(
|
token_json = self._download_json(
|
||||||
|
@ -287,7 +296,15 @@ class VidioLiveIE(VidioBaseIE):
|
||||||
stream_meta['stream_token_url'] + '?' + token_json.get('token', ''),
|
stream_meta['stream_token_url'] + '?' + token_json.get('token', ''),
|
||||||
display_id, 'mp4', 'm3u8_native'))
|
display_id, 'mp4', 'm3u8_native'))
|
||||||
if stream_meta.get('stream_dash_url'):
|
if stream_meta.get('stream_dash_url'):
|
||||||
pass
|
parsed_base_dash = compat_urllib_parse_urlparse(stream_meta['stream_dash_url'])
|
||||||
|
token_json = self._download_json(
|
||||||
|
'https://www.vidio.com/live/%s/tokens?type=dash' % video_id,
|
||||||
|
display_id, note='Downloading DASH token JSON', data=b'')
|
||||||
|
parsed_tokenized_dash = parsed_base_dash._replace(path=token_json.get('token', '')
|
||||||
|
+ (parsed_base_dash.path if parsed_base_dash.path[0] == '/'
|
||||||
|
else '/' + parsed_base_dash.path))
|
||||||
|
formats.extend(self._extract_mpd_formats(
|
||||||
|
parsed_tokenized_dash.geturl(), display_id, 'dash'))
|
||||||
if stream_meta.get('stream_url'):
|
if stream_meta.get('stream_url'):
|
||||||
formats.extend(self._extract_m3u8_formats(
|
formats.extend(self._extract_m3u8_formats(
|
||||||
stream_meta['stream_url'], display_id, 'mp4', 'm3u8_native'))
|
stream_meta['stream_url'], display_id, 'mp4', 'm3u8_native'))
|
||||||
|
|
Loading…
Reference in New Issue
Block a user