Compare commits

...

8 Commits

Author SHA1 Message Date
marieell
8f61d47c2b
Merge ddc7e9a1bb into b83ca24eb7 2024-11-10 09:19:38 +05:30
sepro
b83ca24eb7
[core] Catch broken Cryptodome installations (#11486)
Authored by: seproDev
2024-11-10 00:53:49 +01:00
bashonly
240a7d43c8
[build] Pin websockets version to >=13.0,<14 (#11488)
websockets 14.0 causes CI test failures (a lot more of them)

Authored by: bashonly
2024-11-09 23:46:47 +00:00
bashonly
f13df591d4
[build] Enable attestations for trusted publishing (#11420)
Reverts 428ffb75aa

Authored by: bashonly
2024-11-09 23:26:02 +00:00
marieell
ddc7e9a1bb
[ie/ARD] Use traverse_obj
Co-authored-by: Simon Sawicki <accounts@grub4k.xyz>
2024-08-28 22:58:11 +02:00
marieell
a434c7b7e2
[ie/ARD] Review feedback
Co-authored-by: Simon Sawicki <accounts@grub4k.xyz>
2024-08-14 23:13:21 +02:00
marieell
75ff02cf4e [ie/ARD] Review feedback
Co-authored-by: Simon Sawicki <accounts@grub4k.xyz>
2024-08-13 00:16:30 +02:00
marieell
4e17e8eec2 [ie/ARD] Add Audiothek (#5605) 2024-08-06 00:35:14 +02:00
8 changed files with 171 additions and 8 deletions

View File

@ -504,7 +504,8 @@ jobs:
- windows32 - windows32
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/download-artifact@v4 - name: Download artifacts
uses: actions/download-artifact@v4
with: with:
path: artifact path: artifact
pattern: build-bin-* pattern: build-bin-*

View File

@ -28,3 +28,20 @@ jobs:
actions: write # For cleaning up cache actions: write # For cleaning up cache
id-token: write # mandatory for trusted publishing id-token: write # mandatory for trusted publishing
secrets: inherit secrets: inherit
publish_pypi:
needs: [release]
if: vars.MASTER_PYPI_PROJECT != ''
runs-on: ubuntu-latest
permissions:
id-token: write # mandatory for trusted publishing
steps:
- name: Download artifacts
uses: actions/download-artifact@v4
with:
path: dist
name: build-pypi
- name: Publish to PyPI
uses: pypa/gh-action-pypi-publish@release/v1
with:
verbose: true

View File

@ -41,3 +41,20 @@ jobs:
actions: write # For cleaning up cache actions: write # For cleaning up cache
id-token: write # mandatory for trusted publishing id-token: write # mandatory for trusted publishing
secrets: inherit secrets: inherit
publish_pypi:
needs: [release]
if: vars.NIGHTLY_PYPI_PROJECT != ''
runs-on: ubuntu-latest
permissions:
id-token: write # mandatory for trusted publishing
steps:
- name: Download artifacts
uses: actions/download-artifact@v4
with:
path: dist
name: build-pypi
- name: Publish to PyPI
uses: pypa/gh-action-pypi-publish@release/v1
with:
verbose: true

View File

@ -2,10 +2,6 @@ name: Release
on: on:
workflow_call: workflow_call:
inputs: inputs:
prerelease:
required: false
default: true
type: boolean
source: source:
required: false required: false
default: '' default: ''
@ -18,6 +14,10 @@ on:
required: false required: false
default: '' default: ''
type: string type: string
prerelease:
required: false
default: true
type: boolean
workflow_dispatch: workflow_dispatch:
inputs: inputs:
source: source:
@ -278,11 +278,20 @@ jobs:
make clean-cache make clean-cache
python -m build --no-isolation . python -m build --no-isolation .
- name: Upload artifacts
if: github.event_name != 'workflow_dispatch'
uses: actions/upload-artifact@v4
with:
name: build-pypi
path: |
dist/*
compression-level: 0
- name: Publish to PyPI - name: Publish to PyPI
if: github.event_name == 'workflow_dispatch'
uses: pypa/gh-action-pypi-publish@release/v1 uses: pypa/gh-action-pypi-publish@release/v1
with: with:
verbose: true verbose: true
attestations: false # Currently doesn't work w/ reusable workflows (breaks nightly)
publish: publish:
needs: [prepare, build] needs: [prepare, build]

View File

@ -52,7 +52,7 @@ default = [
"pycryptodomex", "pycryptodomex",
"requests>=2.32.2,<3", "requests>=2.32.2,<3",
"urllib3>=1.26.17,<3", "urllib3>=1.26.17,<3",
"websockets>=13.0", "websockets>=13.0,<14",
] ]
curl-cffi = [ curl-cffi = [
"curl-cffi==0.5.10; os_name=='nt' and implementation_name=='cpython'", "curl-cffi==0.5.10; os_name=='nt' and implementation_name=='cpython'",

View File

@ -24,7 +24,7 @@ try:
from Crypto.Cipher import AES, PKCS1_OAEP, Blowfish, PKCS1_v1_5 # noqa: F401 from Crypto.Cipher import AES, PKCS1_OAEP, Blowfish, PKCS1_v1_5 # noqa: F401
from Crypto.Hash import CMAC, SHA1 # noqa: F401 from Crypto.Hash import CMAC, SHA1 # noqa: F401
from Crypto.PublicKey import RSA # noqa: F401 from Crypto.PublicKey import RSA # noqa: F401
except ImportError: except (ImportError, OSError):
__version__ = f'broken {__version__}'.strip() __version__ = f'broken {__version__}'.strip()

View File

@ -149,6 +149,7 @@ from .archiveorg import (
from .arcpublishing import ArcPublishingIE from .arcpublishing import ArcPublishingIE
from .ard import ( from .ard import (
ARDIE, ARDIE,
ARDAudiothekIE,
ARDBetaMediathekIE, ARDBetaMediathekIE,
ARDMediathekCollectionIE, ARDMediathekCollectionIE,
) )

View File

@ -1,4 +1,5 @@
import functools import functools
import json
import re import re
from .common import InfoExtractor from .common import InfoExtractor
@ -601,3 +602,120 @@ class ARDMediathekCollectionIE(InfoExtractor):
return self.playlist_result( return self.playlist_result(
OnDemandPagedList(fetch_page, self._PAGE_SIZE), full_id, display_id=display_id, OnDemandPagedList(fetch_page, self._PAGE_SIZE), full_id, display_id=display_id,
title=page_data.get('title'), description=page_data.get('synopsis')) title=page_data.get('title'), description=page_data.get('synopsis'))
class ARDAudiothekIE(InfoExtractor):
IE_NAME = 'ARD:audiothek'
_VALID_URL = r'''(?x)https://
(?:www\.)?ardaudiothek\.de/
(?:player|live|episode|(?P<playlist>sendung|serie|sammlung))/
(?P<display_id>(?(playlist)[^?#]+?|[^?#]+))/
(?P<id>[a-zA-Z0-9]+)
(?(playlist)/(?P<season>\d+)?/?(?:[?#]|$))'''
_TESTS = [{
'url': 'https://www.ardaudiothek.de/sendung/1live-caiman-club/53375276/',
'info_dict': {
'id': '53375276',
'title': '1LIVE Caiman Club',
'description': 'md5:003cff043a41b14cf045b960b89aaa86',
},
'playlist_mincount': 22,
}, {
'url': 'https://www.ardaudiothek.de/episode/1live-caiman-club/caiman-club-s04e04-cash-out/1live/13556081/',
'info_dict': {
'id': '13556081',
'ext': 'mp3',
'upload_date': '20240717',
'duration': 3339,
'title': 'CAIMAN CLUB (S04E04): Cash Out',
'thumbnail': 'https://api.ardmediathek.de/image-service/images/urn:ard:image:d5014b612429c396',
'description': 'md5:8decf7974ed1cbf5a9d2c537940e1c4b',
'display_id': '1live-caiman-club/caiman-club-s04e04-cash-out/1live',
'timestamp': 1721181641,
'series': '1LIVE Caiman Club',
},
}]
_QUERY_PLAYLIST = '''\
show(id: "%s") {
title
description
items {
nodes {
url
episodeNumber
grouping
isPublished
}
}
}'''
_QUERY_ITEM = '''\
item(id: "%s") {
audioList {
href
distributionType
}
show {
title
}
image {
url
}
synopsis
title
duration
startDate
}'''
_GRAPHQL_ENDPOINT = 'https://api.ardaudiothek.de/graphql'
def _graphql_query(self, display_id, query):
return self._download_json(
self._GRAPHQL_ENDPOINT,
display_id,
data=json.dumps({'query': '{' + query + '}'}).encode(),
headers={
'Content-Type': 'application/json',
},
)['data']
def _real_extract(self, url):
video_id, display_id, playlist_type, season_number = self._match_valid_url(url).group(
'id', 'display_id', 'playlist', 'season')
if re.match('^[/-]*$', display_id):
display_id = video_id
if playlist_type:
playlist_info = self.graphql_query(display_id, self._QUERY_PLAYLIST % video_id)['show']
episodes = playlist_info['items']['nodes']
entries = []
for episode in episodes:
if episode['isPublished']:
entries.append(self.url_result(
episode['url'],
ie=ARDAudiothekIE.ie_key()))
return self.playlist_result(entries, video_id, playlist_title=display_id, **traverse_obj(playlist_info, {
'title': ('title', {str}),
'description': ('description', {str}),
}))
return {
'display_id': display_id,
'formats': traverse_obj(self.graphql_query(display_id, self._QUERY_ITEM % video_id), (
'item', 'audioList', lambda _, v: url_or_none(v['href']), {
'url': 'href',
'format_id': ('distributionType', {str}),
})),
'id': video_id,
**traverse_obj(item, {
'description': ('synopsis', {str}),
'duration': ('duration', {int_or_none}),
'series': ('show', 'title'),
'thumbnail': ('image', 'url', {url_or_none}),
'timestamp': ('startDate', {parse_iso8601}),
'title': ('title', {str}),
}),
}