Compare commits

...

6 Commits

Author SHA1 Message Date
Mozi
d906c44fdb
Merge 322acaef8a into b83ca24eb7 2024-11-10 09:19:38 +05:30
sepro
b83ca24eb7
[core] Catch broken Cryptodome installations (#11486)
Authored by: seproDev
2024-11-10 00:53:49 +01:00
bashonly
240a7d43c8
[build] Pin websockets version to >=13.0,<14 (#11488)
websockets 14.0 causes CI test failures (a lot more of them)

Authored by: bashonly
2024-11-09 23:46:47 +00:00
bashonly
f13df591d4
[build] Enable attestations for trusted publishing (#11420)
Reverts 428ffb75aa

Authored by: bashonly
2024-11-09 23:26:02 +00:00
Mozi
322acaef8a fix code style in _extractors.py
Co-authored-by: N/Ame <173015200+grqz@users.noreply.github.com>
2024-08-26 14:25:25 +00:00
Mozi
905672d378 [ie/aparat:playlist] Add extractor 2024-08-25 17:40:28 +00:00
8 changed files with 125 additions and 9 deletions

View File

@ -504,7 +504,8 @@ jobs:
- windows32
runs-on: ubuntu-latest
steps:
- uses: actions/download-artifact@v4
- name: Download artifacts
uses: actions/download-artifact@v4
with:
path: artifact
pattern: build-bin-*

View File

@ -28,3 +28,20 @@ jobs:
actions: write # For cleaning up cache
id-token: write # mandatory for trusted publishing
secrets: inherit
publish_pypi:
needs: [release]
if: vars.MASTER_PYPI_PROJECT != ''
runs-on: ubuntu-latest
permissions:
id-token: write # mandatory for trusted publishing
steps:
- name: Download artifacts
uses: actions/download-artifact@v4
with:
path: dist
name: build-pypi
- name: Publish to PyPI
uses: pypa/gh-action-pypi-publish@release/v1
with:
verbose: true

View File

@ -41,3 +41,20 @@ jobs:
actions: write # For cleaning up cache
id-token: write # mandatory for trusted publishing
secrets: inherit
publish_pypi:
needs: [release]
if: vars.NIGHTLY_PYPI_PROJECT != ''
runs-on: ubuntu-latest
permissions:
id-token: write # mandatory for trusted publishing
steps:
- name: Download artifacts
uses: actions/download-artifact@v4
with:
path: dist
name: build-pypi
- name: Publish to PyPI
uses: pypa/gh-action-pypi-publish@release/v1
with:
verbose: true

View File

@ -2,10 +2,6 @@ name: Release
on:
workflow_call:
inputs:
prerelease:
required: false
default: true
type: boolean
source:
required: false
default: ''
@ -18,6 +14,10 @@ on:
required: false
default: ''
type: string
prerelease:
required: false
default: true
type: boolean
workflow_dispatch:
inputs:
source:
@ -278,11 +278,20 @@ jobs:
make clean-cache
python -m build --no-isolation .
- name: Upload artifacts
if: github.event_name != 'workflow_dispatch'
uses: actions/upload-artifact@v4
with:
name: build-pypi
path: |
dist/*
compression-level: 0
- name: Publish to PyPI
if: github.event_name == 'workflow_dispatch'
uses: pypa/gh-action-pypi-publish@release/v1
with:
verbose: true
attestations: false # Currently doesn't work w/ reusable workflows (breaks nightly)
publish:
needs: [prepare, build]

View File

@ -52,7 +52,7 @@ default = [
"pycryptodomex",
"requests>=2.32.2,<3",
"urllib3>=1.26.17,<3",
"websockets>=13.0",
"websockets>=13.0,<14",
]
curl-cffi = [
"curl-cffi==0.5.10; os_name=='nt' and implementation_name=='cpython'",

View File

@ -24,7 +24,7 @@ try:
from Crypto.Cipher import AES, PKCS1_OAEP, Blowfish, PKCS1_v1_5 # noqa: F401
from Crypto.Hash import CMAC, SHA1 # noqa: F401
from Crypto.PublicKey import RSA # noqa: F401
except ImportError:
except (ImportError, OSError):
__version__ = f'broken {__version__}'.strip()

View File

@ -135,7 +135,10 @@ from .antenna import (
from .anvato import AnvatoIE
from .aol import AolIE
from .apa import APAIE
from .aparat import AparatIE
from .aparat import (
AparatIE,
AparatPlaylistIE,
)
from .appleconnect import AppleConnectIE
from .applepodcasts import ApplePodcastsIE
from .appletrailers import (

View File

@ -1,9 +1,12 @@
import urllib.parse
from .common import InfoExtractor
from ..utils import (
get_element_by_id,
int_or_none,
merge_dicts,
mimetype2ext,
traverse_obj,
url_or_none,
)
@ -86,3 +89,69 @@ class AparatIE(InfoExtractor):
'duration': int_or_none(options.get('duration')),
'formats': formats,
})
class AparatPlaylistIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?aparat\.com/playlist/(?P<id>\d+)'
_TESTS = [{
'url': 'https://www.aparat.com/playlist/1001307',
'info_dict': {
'id': '1001307',
'title': 'مبانی یادگیری عمیق',
'description': '',
'thumbnails': 'count:2',
'channel': 'mrmohammadi_iust',
'channel_id': '6463423',
'channel_url': 'https://www.aparat.com/mrmohammadi_iust',
'channel_follower_count': int,
},
'playlist_mincount': 1,
'params': {
'skip_download': True,
},
}, {
'url': 'https://www.aparat.com/playlist/1234567',
'info_dict': {
'id': '1234567',
'title': 'ساخت اکانت',
'description': '',
'thumbnails': 'count:0',
'channel': 'reza.shadow',
'channel_id': '8159952',
'channel_url': 'https://www.aparat.com/reza.shadow',
'channel_follower_count': int,
},
'playlist_count': 0,
'params': {
'skip_download': True,
},
}, {
'url': 'https://www.aparat.com/playlist/1256882',
'only_matching': True,
}]
def _real_extract(self, url):
playlist_id = self._match_id(url)
info = self._download_json(
f'https://www.aparat.com/api/fa/v1/video/playlist/one/playlist_id/{playlist_id}', playlist_id)
info_dict = traverse_obj(info, ('data', 'attributes', {
'playlist_title': ('title'),
'description': ('description'),
}), default={})
info_dict.update(thumbnails=traverse_obj([
traverse_obj(info, ('data', 'attributes', {'url': ('big_poster', {url_or_none})})),
traverse_obj(info, ('data', 'attributes', {'url': ('small_poster', {url_or_none})})),
], (...), default=[]))
info_dict.update(**traverse_obj(info, ('included', lambda _, v: v['type'] == 'channel', 'attributes', {
'channel': ('username'),
'channel_id': ('id'),
'channel_url': ('link', {lambda x: urllib.parse.urljoin(url, x)}), # starts with a slash
'channel_follower_count': ('follower_cnt', {int_or_none}),
}), get_all=False))
return self.playlist_result(traverse_obj(info, (
'included', lambda _, v: v['type'] == 'Video', 'attributes', 'uid',
{lambda uid: self.url_result(f'https://www.aparat.com/v/{uid}?playlist={playlist_id}')},
), default=[]), playlist_id, **info_dict)