mirror of
https://github.com/yt-dlp/yt-dlp.git
synced 2024-11-26 17:21:23 +01:00
Compare commits
6 Commits
67086aed63
...
e395ce4091
Author | SHA1 | Date | |
---|---|---|---|
|
e395ce4091 | ||
|
b83ca24eb7 | ||
|
240a7d43c8 | ||
|
f13df591d4 | ||
|
0588bd7c82 | ||
|
205826121d |
3
.github/workflows/build.yml
vendored
3
.github/workflows/build.yml
vendored
|
@ -504,7 +504,8 @@ jobs:
|
||||||
- windows32
|
- windows32
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/download-artifact@v4
|
- name: Download artifacts
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
with:
|
with:
|
||||||
path: artifact
|
path: artifact
|
||||||
pattern: build-bin-*
|
pattern: build-bin-*
|
||||||
|
|
17
.github/workflows/release-master.yml
vendored
17
.github/workflows/release-master.yml
vendored
|
@ -28,3 +28,20 @@ jobs:
|
||||||
actions: write # For cleaning up cache
|
actions: write # For cleaning up cache
|
||||||
id-token: write # mandatory for trusted publishing
|
id-token: write # mandatory for trusted publishing
|
||||||
secrets: inherit
|
secrets: inherit
|
||||||
|
|
||||||
|
publish_pypi:
|
||||||
|
needs: [release]
|
||||||
|
if: vars.MASTER_PYPI_PROJECT != ''
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
id-token: write # mandatory for trusted publishing
|
||||||
|
steps:
|
||||||
|
- name: Download artifacts
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
path: dist
|
||||||
|
name: build-pypi
|
||||||
|
- name: Publish to PyPI
|
||||||
|
uses: pypa/gh-action-pypi-publish@release/v1
|
||||||
|
with:
|
||||||
|
verbose: true
|
||||||
|
|
17
.github/workflows/release-nightly.yml
vendored
17
.github/workflows/release-nightly.yml
vendored
|
@ -41,3 +41,20 @@ jobs:
|
||||||
actions: write # For cleaning up cache
|
actions: write # For cleaning up cache
|
||||||
id-token: write # mandatory for trusted publishing
|
id-token: write # mandatory for trusted publishing
|
||||||
secrets: inherit
|
secrets: inherit
|
||||||
|
|
||||||
|
publish_pypi:
|
||||||
|
needs: [release]
|
||||||
|
if: vars.NIGHTLY_PYPI_PROJECT != ''
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
id-token: write # mandatory for trusted publishing
|
||||||
|
steps:
|
||||||
|
- name: Download artifacts
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
path: dist
|
||||||
|
name: build-pypi
|
||||||
|
- name: Publish to PyPI
|
||||||
|
uses: pypa/gh-action-pypi-publish@release/v1
|
||||||
|
with:
|
||||||
|
verbose: true
|
||||||
|
|
19
.github/workflows/release.yml
vendored
19
.github/workflows/release.yml
vendored
|
@ -2,10 +2,6 @@ name: Release
|
||||||
on:
|
on:
|
||||||
workflow_call:
|
workflow_call:
|
||||||
inputs:
|
inputs:
|
||||||
prerelease:
|
|
||||||
required: false
|
|
||||||
default: true
|
|
||||||
type: boolean
|
|
||||||
source:
|
source:
|
||||||
required: false
|
required: false
|
||||||
default: ''
|
default: ''
|
||||||
|
@ -18,6 +14,10 @@ on:
|
||||||
required: false
|
required: false
|
||||||
default: ''
|
default: ''
|
||||||
type: string
|
type: string
|
||||||
|
prerelease:
|
||||||
|
required: false
|
||||||
|
default: true
|
||||||
|
type: boolean
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
inputs:
|
inputs:
|
||||||
source:
|
source:
|
||||||
|
@ -278,11 +278,20 @@ jobs:
|
||||||
make clean-cache
|
make clean-cache
|
||||||
python -m build --no-isolation .
|
python -m build --no-isolation .
|
||||||
|
|
||||||
|
- name: Upload artifacts
|
||||||
|
if: github.event_name != 'workflow_dispatch'
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: build-pypi
|
||||||
|
path: |
|
||||||
|
dist/*
|
||||||
|
compression-level: 0
|
||||||
|
|
||||||
- name: Publish to PyPI
|
- name: Publish to PyPI
|
||||||
|
if: github.event_name == 'workflow_dispatch'
|
||||||
uses: pypa/gh-action-pypi-publish@release/v1
|
uses: pypa/gh-action-pypi-publish@release/v1
|
||||||
with:
|
with:
|
||||||
verbose: true
|
verbose: true
|
||||||
attestations: false # Currently doesn't work w/ reusable workflows (breaks nightly)
|
|
||||||
|
|
||||||
publish:
|
publish:
|
||||||
needs: [prepare, build]
|
needs: [prepare, build]
|
||||||
|
|
|
@ -52,7 +52,7 @@ default = [
|
||||||
"pycryptodomex",
|
"pycryptodomex",
|
||||||
"requests>=2.32.2,<3",
|
"requests>=2.32.2,<3",
|
||||||
"urllib3>=1.26.17,<3",
|
"urllib3>=1.26.17,<3",
|
||||||
"websockets>=13.0",
|
"websockets>=13.0,<14",
|
||||||
]
|
]
|
||||||
curl-cffi = [
|
curl-cffi = [
|
||||||
"curl-cffi==0.5.10; os_name=='nt' and implementation_name=='cpython'",
|
"curl-cffi==0.5.10; os_name=='nt' and implementation_name=='cpython'",
|
||||||
|
|
|
@ -24,7 +24,7 @@ try:
|
||||||
from Crypto.Cipher import AES, PKCS1_OAEP, Blowfish, PKCS1_v1_5 # noqa: F401
|
from Crypto.Cipher import AES, PKCS1_OAEP, Blowfish, PKCS1_v1_5 # noqa: F401
|
||||||
from Crypto.Hash import CMAC, SHA1 # noqa: F401
|
from Crypto.Hash import CMAC, SHA1 # noqa: F401
|
||||||
from Crypto.PublicKey import RSA # noqa: F401
|
from Crypto.PublicKey import RSA # noqa: F401
|
||||||
except ImportError:
|
except (ImportError, OSError):
|
||||||
__version__ = f'broken {__version__}'.strip()
|
__version__ = f'broken {__version__}'.strip()
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -506,6 +506,7 @@ from .dfb import DFBIE
|
||||||
from .dhm import DHMIE
|
from .dhm import DHMIE
|
||||||
from .digitalconcerthall import DigitalConcertHallIE
|
from .digitalconcerthall import DigitalConcertHallIE
|
||||||
from .digiteka import DigitekaIE
|
from .digiteka import DigitekaIE
|
||||||
|
from .digiview import DigiviewIE
|
||||||
from .discogs import DiscogsReleasePlaylistIE
|
from .discogs import DiscogsReleasePlaylistIE
|
||||||
from .disney import DisneyIE
|
from .disney import DisneyIE
|
||||||
from .dispeak import DigitallySpeakingIE
|
from .dispeak import DigitallySpeakingIE
|
||||||
|
|
113
yt_dlp/extractor/digiview.py
Normal file
113
yt_dlp/extractor/digiview.py
Normal file
|
@ -0,0 +1,113 @@
|
||||||
|
import urllib.parse
|
||||||
|
|
||||||
|
from yt_dlp.utils import int_or_none
|
||||||
|
|
||||||
|
from ..networking import Request
|
||||||
|
|
||||||
|
from .youtube import YoutubeIE
|
||||||
|
|
||||||
|
|
||||||
|
class DigiviewIE(YoutubeIE):
|
||||||
|
IE_DESC = 'Digiview'
|
||||||
|
IE_NAME = 'digiview'
|
||||||
|
_VALID_URL = r'https?://(?:www\.)?ladigitale\.dev/digiview/#/v/(?P<id>[0-9a-f]+)'
|
||||||
|
_TESTS = [
|
||||||
|
{
|
||||||
|
# normal video
|
||||||
|
'url': 'https://ladigitale.dev/digiview/#/v/663e17b35e979',
|
||||||
|
'md5': 'acdf2c99c1e4d67664c9fbc5695986a9',
|
||||||
|
'info_dict': {
|
||||||
|
'id': 'BaW_jenozKc',
|
||||||
|
'ext': 'mp4',
|
||||||
|
'title': 'youtube-dl test video "\'/\\ä↭𝕐',
|
||||||
|
'channel': 'Philipp Hagemeister',
|
||||||
|
'channel_id': 'UCLqxVugv74EIW3VWh2NOa3Q',
|
||||||
|
'channel_url': r're:https?://(?:www\.)?youtube\.com/channel/UCLqxVugv74EIW3VWh2NOa3Q',
|
||||||
|
'upload_date': '20121002',
|
||||||
|
'description': 'md5:8fb536f4877b8a7455c2ec23794dbc22',
|
||||||
|
'categories': ['Science & Technology'],
|
||||||
|
'tags': ['youtube-dl'],
|
||||||
|
'duration': 10,
|
||||||
|
'view_count': int,
|
||||||
|
'like_count': int,
|
||||||
|
'availability': 'public',
|
||||||
|
'playable_in_embed': True,
|
||||||
|
'thumbnail': 'https://i.ytimg.com/vi/BaW_jenozKc/maxresdefault.jpg',
|
||||||
|
'live_status': 'not_live',
|
||||||
|
'age_limit': 0,
|
||||||
|
'comment_count': int,
|
||||||
|
'channel_follower_count': int,
|
||||||
|
'uploader': 'Philipp Hagemeister',
|
||||||
|
'uploader_url': 'https://www.youtube.com/@PhilippHagemeister',
|
||||||
|
'uploader_id': '@PhilippHagemeister',
|
||||||
|
'heatmap': 'count:100',
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
# cut video
|
||||||
|
'url': 'https://ladigitale.dev/digiview/#/v/663e17f2f3f18',
|
||||||
|
'md5': 'acdf2c99c1e4d67664c9fbc5695986a9',
|
||||||
|
'info_dict': {
|
||||||
|
'id': 'BaW_jenozKc',
|
||||||
|
'ext': 'mp4',
|
||||||
|
'title': 'youtube-dl test video "\'/\\ä↭𝕐',
|
||||||
|
'channel': 'Philipp Hagemeister',
|
||||||
|
'channel_id': 'UCLqxVugv74EIW3VWh2NOa3Q',
|
||||||
|
'channel_url': r're:https?://(?:www\.)?youtube\.com/channel/UCLqxVugv74EIW3VWh2NOa3Q',
|
||||||
|
'upload_date': '20121002',
|
||||||
|
'description': 'md5:8fb536f4877b8a7455c2ec23794dbc22',
|
||||||
|
'categories': ['Science & Technology'],
|
||||||
|
'tags': ['youtube-dl'],
|
||||||
|
'duration': 3,
|
||||||
|
'view_count': int,
|
||||||
|
'like_count': int,
|
||||||
|
'availability': 'public',
|
||||||
|
'playable_in_embed': True,
|
||||||
|
'thumbnail': 'https://i.ytimg.com/vi/BaW_jenozKc/maxresdefault.jpg',
|
||||||
|
'live_status': 'not_live',
|
||||||
|
'age_limit': 0,
|
||||||
|
'comment_count': int,
|
||||||
|
'channel_follower_count': int,
|
||||||
|
'uploader': 'Philipp Hagemeister',
|
||||||
|
'uploader_url': 'https://www.youtube.com/@PhilippHagemeister',
|
||||||
|
'uploader_id': '@PhilippHagemeister',
|
||||||
|
'heatmap': 'count:100',
|
||||||
|
}
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
def _real_extract(self, url):
|
||||||
|
video_id = self._match_id(url)
|
||||||
|
webpage_data = self._download_json(
|
||||||
|
Request(
|
||||||
|
'https://ladigitale.dev/digiview/inc/recuperer_video.php',
|
||||||
|
data=urllib.parse.urlencode({'id': video_id}).encode(),
|
||||||
|
method='POST',
|
||||||
|
),
|
||||||
|
video_id,
|
||||||
|
)
|
||||||
|
|
||||||
|
youtube_ie = YoutubeIE()
|
||||||
|
youtube_ie.set_downloader(self._downloader)
|
||||||
|
info = youtube_ie._real_extract(webpage_data['videoId'])
|
||||||
|
|
||||||
|
# replace the YouTube metadata by the Digiview one
|
||||||
|
info['title'] = webpage_data.get('titre') or info['title']
|
||||||
|
info['description'] = webpage_data.get('description') or info['description']
|
||||||
|
|
||||||
|
ffmpeg_args = []
|
||||||
|
|
||||||
|
start_time = int_or_none(webpage_data.get('debut'))
|
||||||
|
if start_time is not None and start_time != 0:
|
||||||
|
ffmpeg_args.extend(['-ss', str(start_time)])
|
||||||
|
|
||||||
|
end_time = int_or_none(webpage_data.get('fin'))
|
||||||
|
if end_time is not None and end_time != info['duration']:
|
||||||
|
ffmpeg_args.extend(['-t', str(end_time - (start_time or 0))])
|
||||||
|
|
||||||
|
if ffmpeg_args and self._downloader:
|
||||||
|
# cut the video if specified in the Digiview webpage
|
||||||
|
ppargs = self._downloader.params.get("postprocessor_args")
|
||||||
|
ppargs.setdefault("merger", []).extend(ffmpeg_args)
|
||||||
|
|
||||||
|
return info
|
Loading…
Reference in New Issue
Block a user