Compare commits

...

6 Commits

Author SHA1 Message Date
Laurent FAVOLE
e395ce4091
Merge 0588bd7c82 into b83ca24eb7 2024-11-10 14:42:04 +01:00
sepro
b83ca24eb7
[core] Catch broken Cryptodome installations (#11486)
Authored by: seproDev
2024-11-10 00:53:49 +01:00
bashonly
240a7d43c8
[build] Pin websockets version to >=13.0,<14 (#11488)
websockets 14.0 causes CI test failures (a lot more of them)

Authored by: bashonly
2024-11-09 23:46:47 +00:00
bashonly
f13df591d4
[build] Enable attestations for trusted publishing (#11420)
Reverts 428ffb75aa

Authored by: bashonly
2024-11-09 23:26:02 +00:00
bashonly
0588bd7c82
Merge branch 'master' into digiview-extractor 2024-05-30 22:36:11 -05:00
Laurent FAVOLE
205826121d
Add Digiview extractor 2024-05-10 16:01:57 +02:00
8 changed files with 166 additions and 8 deletions

View File

@ -504,7 +504,8 @@ jobs:
- windows32
runs-on: ubuntu-latest
steps:
- uses: actions/download-artifact@v4
- name: Download artifacts
uses: actions/download-artifact@v4
with:
path: artifact
pattern: build-bin-*

View File

@ -28,3 +28,20 @@ jobs:
actions: write # For cleaning up cache
id-token: write # mandatory for trusted publishing
secrets: inherit
publish_pypi:
needs: [release]
if: vars.MASTER_PYPI_PROJECT != ''
runs-on: ubuntu-latest
permissions:
id-token: write # mandatory for trusted publishing
steps:
- name: Download artifacts
uses: actions/download-artifact@v4
with:
path: dist
name: build-pypi
- name: Publish to PyPI
uses: pypa/gh-action-pypi-publish@release/v1
with:
verbose: true

View File

@ -41,3 +41,20 @@ jobs:
actions: write # For cleaning up cache
id-token: write # mandatory for trusted publishing
secrets: inherit
publish_pypi:
needs: [release]
if: vars.NIGHTLY_PYPI_PROJECT != ''
runs-on: ubuntu-latest
permissions:
id-token: write # mandatory for trusted publishing
steps:
- name: Download artifacts
uses: actions/download-artifact@v4
with:
path: dist
name: build-pypi
- name: Publish to PyPI
uses: pypa/gh-action-pypi-publish@release/v1
with:
verbose: true

View File

@ -2,10 +2,6 @@ name: Release
on:
workflow_call:
inputs:
prerelease:
required: false
default: true
type: boolean
source:
required: false
default: ''
@ -18,6 +14,10 @@ on:
required: false
default: ''
type: string
prerelease:
required: false
default: true
type: boolean
workflow_dispatch:
inputs:
source:
@ -278,11 +278,20 @@ jobs:
make clean-cache
python -m build --no-isolation .
- name: Upload artifacts
if: github.event_name != 'workflow_dispatch'
uses: actions/upload-artifact@v4
with:
name: build-pypi
path: |
dist/*
compression-level: 0
- name: Publish to PyPI
if: github.event_name == 'workflow_dispatch'
uses: pypa/gh-action-pypi-publish@release/v1
with:
verbose: true
attestations: false # Currently doesn't work w/ reusable workflows (breaks nightly)
publish:
needs: [prepare, build]

View File

@ -52,7 +52,7 @@ default = [
"pycryptodomex",
"requests>=2.32.2,<3",
"urllib3>=1.26.17,<3",
"websockets>=13.0",
"websockets>=13.0,<14",
]
curl-cffi = [
"curl-cffi==0.5.10; os_name=='nt' and implementation_name=='cpython'",

View File

@ -24,7 +24,7 @@ try:
from Crypto.Cipher import AES, PKCS1_OAEP, Blowfish, PKCS1_v1_5 # noqa: F401
from Crypto.Hash import CMAC, SHA1 # noqa: F401
from Crypto.PublicKey import RSA # noqa: F401
except ImportError:
except (ImportError, OSError):
__version__ = f'broken {__version__}'.strip()

View File

@ -506,6 +506,7 @@ from .dfb import DFBIE
from .dhm import DHMIE
from .digitalconcerthall import DigitalConcertHallIE
from .digiteka import DigitekaIE
from .digiview import DigiviewIE
from .discogs import DiscogsReleasePlaylistIE
from .disney import DisneyIE
from .dispeak import DigitallySpeakingIE

View File

@ -0,0 +1,113 @@
import urllib.parse
from yt_dlp.utils import int_or_none
from ..networking import Request
from .youtube import YoutubeIE
class DigiviewIE(YoutubeIE):
IE_DESC = 'Digiview'
IE_NAME = 'digiview'
_VALID_URL = r'https?://(?:www\.)?ladigitale\.dev/digiview/#/v/(?P<id>[0-9a-f]+)'
_TESTS = [
{
# normal video
'url': 'https://ladigitale.dev/digiview/#/v/663e17b35e979',
'md5': 'acdf2c99c1e4d67664c9fbc5695986a9',
'info_dict': {
'id': 'BaW_jenozKc',
'ext': 'mp4',
'title': 'youtube-dl test video "\'/\\ä↭𝕐',
'channel': 'Philipp Hagemeister',
'channel_id': 'UCLqxVugv74EIW3VWh2NOa3Q',
'channel_url': r're:https?://(?:www\.)?youtube\.com/channel/UCLqxVugv74EIW3VWh2NOa3Q',
'upload_date': '20121002',
'description': 'md5:8fb536f4877b8a7455c2ec23794dbc22',
'categories': ['Science & Technology'],
'tags': ['youtube-dl'],
'duration': 10,
'view_count': int,
'like_count': int,
'availability': 'public',
'playable_in_embed': True,
'thumbnail': 'https://i.ytimg.com/vi/BaW_jenozKc/maxresdefault.jpg',
'live_status': 'not_live',
'age_limit': 0,
'comment_count': int,
'channel_follower_count': int,
'uploader': 'Philipp Hagemeister',
'uploader_url': 'https://www.youtube.com/@PhilippHagemeister',
'uploader_id': '@PhilippHagemeister',
'heatmap': 'count:100',
}
},
{
# cut video
'url': 'https://ladigitale.dev/digiview/#/v/663e17f2f3f18',
'md5': 'acdf2c99c1e4d67664c9fbc5695986a9',
'info_dict': {
'id': 'BaW_jenozKc',
'ext': 'mp4',
'title': 'youtube-dl test video "\'/\\ä↭𝕐',
'channel': 'Philipp Hagemeister',
'channel_id': 'UCLqxVugv74EIW3VWh2NOa3Q',
'channel_url': r're:https?://(?:www\.)?youtube\.com/channel/UCLqxVugv74EIW3VWh2NOa3Q',
'upload_date': '20121002',
'description': 'md5:8fb536f4877b8a7455c2ec23794dbc22',
'categories': ['Science & Technology'],
'tags': ['youtube-dl'],
'duration': 3,
'view_count': int,
'like_count': int,
'availability': 'public',
'playable_in_embed': True,
'thumbnail': 'https://i.ytimg.com/vi/BaW_jenozKc/maxresdefault.jpg',
'live_status': 'not_live',
'age_limit': 0,
'comment_count': int,
'channel_follower_count': int,
'uploader': 'Philipp Hagemeister',
'uploader_url': 'https://www.youtube.com/@PhilippHagemeister',
'uploader_id': '@PhilippHagemeister',
'heatmap': 'count:100',
}
},
]
def _real_extract(self, url):
video_id = self._match_id(url)
webpage_data = self._download_json(
Request(
'https://ladigitale.dev/digiview/inc/recuperer_video.php',
data=urllib.parse.urlencode({'id': video_id}).encode(),
method='POST',
),
video_id,
)
youtube_ie = YoutubeIE()
youtube_ie.set_downloader(self._downloader)
info = youtube_ie._real_extract(webpage_data['videoId'])
# replace the YouTube metadata by the Digiview one
info['title'] = webpage_data.get('titre') or info['title']
info['description'] = webpage_data.get('description') or info['description']
ffmpeg_args = []
start_time = int_or_none(webpage_data.get('debut'))
if start_time is not None and start_time != 0:
ffmpeg_args.extend(['-ss', str(start_time)])
end_time = int_or_none(webpage_data.get('fin'))
if end_time is not None and end_time != info['duration']:
ffmpeg_args.extend(['-t', str(end_time - (start_time or 0))])
if ffmpeg_args and self._downloader:
# cut the video if specified in the Digiview webpage
ppargs = self._downloader.params.get("postprocessor_args")
ppargs.setdefault("merger", []).extend(ffmpeg_args)
return info