Compare commits

...

13 Commits

Author SHA1 Message Date
N/Ame
0e9f6587c5
Merge 13ed574168 into b83ca24eb7 2024-11-10 14:42:01 +01:00
sepro
b83ca24eb7
[core] Catch broken Cryptodome installations (#11486)
Authored by: seproDev
2024-11-10 00:53:49 +01:00
bashonly
240a7d43c8
[build] Pin websockets version to >=13.0,<14 (#11488)
websockets 14.0 causes CI test failures (a lot more of them)

Authored by: bashonly
2024-11-09 23:46:47 +00:00
bashonly
f13df591d4
[build] Enable attestations for trusted publishing (#11420)
Reverts 428ffb75aa

Authored by: bashonly
2024-11-09 23:26:02 +00:00
grqx_wsl
13ed574168 [BiliBiliBangumiIE] supports play_info extraction from webpage
- i.e. extracts premiums formats with logged-in cookies, haven't tested with format `12240` yet.
  * test url: https://www.bilibili.com/bangumi/play/ep829434, cookies: logged-in, non-premium
2024-08-20 00:46:21 +12:00
grqx_wsl
79bb63957d Merge remote-tracking branch 'upstream' into biliTryLook 2024-08-17 10:58:16 +12:00
grqx_wsl
d5dbdbccd3 _download_playinfo: more understandable note 2024-07-27 23:10:53 +12:00
grqx_wsl
b2965fa3b2 [BiliBiliBangumiIE] support format 12240(format name 智能修复, premium only)
[cleanup]code formatting
2024-07-27 22:51:15 +12:00
grqx_wsl
510e29a42c add support for _get_interactive_entries 2024-07-27 22:09:44 +12:00
grqx_wsl
90f4203632 keep the original play_info traversal 2024-07-26 10:46:41 +12:00
grqx_wsl
b01183f904 pops param try_look when logged in. 2024-07-26 10:04:18 +12:00
grqx_wsl
29a5968278 - Applied try_look to festival videos
- Removed redundant calls to `_download_playinfo`
2024-07-26 03:07:32 +12:00
grqx_wsl
e187799c58 patch from https://github.com/yt-dlp/yt-dlp/issues/10554#issuecomment-2250014807
modified:   yt_dlp/extractor/bilibili.py
2024-07-26 02:36:04 +12:00
7 changed files with 77 additions and 22 deletions

View File

@ -504,7 +504,8 @@ jobs:
- windows32
runs-on: ubuntu-latest
steps:
- uses: actions/download-artifact@v4
- name: Download artifacts
uses: actions/download-artifact@v4
with:
path: artifact
pattern: build-bin-*

View File

@ -28,3 +28,20 @@ jobs:
actions: write # For cleaning up cache
id-token: write # mandatory for trusted publishing
secrets: inherit
publish_pypi:
needs: [release]
if: vars.MASTER_PYPI_PROJECT != ''
runs-on: ubuntu-latest
permissions:
id-token: write # mandatory for trusted publishing
steps:
- name: Download artifacts
uses: actions/download-artifact@v4
with:
path: dist
name: build-pypi
- name: Publish to PyPI
uses: pypa/gh-action-pypi-publish@release/v1
with:
verbose: true

View File

@ -41,3 +41,20 @@ jobs:
actions: write # For cleaning up cache
id-token: write # mandatory for trusted publishing
secrets: inherit
publish_pypi:
needs: [release]
if: vars.NIGHTLY_PYPI_PROJECT != ''
runs-on: ubuntu-latest
permissions:
id-token: write # mandatory for trusted publishing
steps:
- name: Download artifacts
uses: actions/download-artifact@v4
with:
path: dist
name: build-pypi
- name: Publish to PyPI
uses: pypa/gh-action-pypi-publish@release/v1
with:
verbose: true

View File

@ -2,10 +2,6 @@ name: Release
on:
workflow_call:
inputs:
prerelease:
required: false
default: true
type: boolean
source:
required: false
default: ''
@ -18,6 +14,10 @@ on:
required: false
default: ''
type: string
prerelease:
required: false
default: true
type: boolean
workflow_dispatch:
inputs:
source:
@ -278,11 +278,20 @@ jobs:
make clean-cache
python -m build --no-isolation .
- name: Upload artifacts
if: github.event_name != 'workflow_dispatch'
uses: actions/upload-artifact@v4
with:
name: build-pypi
path: |
dist/*
compression-level: 0
- name: Publish to PyPI
if: github.event_name == 'workflow_dispatch'
uses: pypa/gh-action-pypi-publish@release/v1
with:
verbose: true
attestations: false # Currently doesn't work w/ reusable workflows (breaks nightly)
publish:
needs: [prepare, build]

View File

@ -52,7 +52,7 @@ default = [
"pycryptodomex",
"requests>=2.32.2,<3",
"urllib3>=1.26.17,<3",
"websockets>=13.0",
"websockets>=13.0,<14",
]
curl-cffi = [
"curl-cffi==0.5.10; os_name=='nt' and implementation_name=='cpython'",

View File

@ -24,7 +24,7 @@ try:
from Crypto.Cipher import AES, PKCS1_OAEP, Blowfish, PKCS1_v1_5 # noqa: F401
from Crypto.Hash import CMAC, SHA1 # noqa: F401
from Crypto.PublicKey import RSA # noqa: F401
except ImportError:
except (ImportError, OSError):
__version__ = f'broken {__version__}'.strip()

View File

@ -165,14 +165,18 @@ class BilibiliBaseIE(InfoExtractor):
params['w_rid'] = hashlib.md5(f'{query}{self._get_wbi_key(video_id)}'.encode()).hexdigest()
return params
def _download_playinfo(self, bvid, cid, headers=None, qn=None):
params = {'bvid': bvid, 'cid': cid, 'fnval': 4048}
if qn:
params['qn'] = qn
def _download_playinfo(self, bvid, cid, headers=None, **kwargs):
params = {'bvid': bvid, 'cid': cid, 'fnval': 4048, **kwargs}
if self.is_logged_in:
params.pop('try_look', None)
if kwargs.get('qn'):
note = f'Downloading video format {kwargs["qn"]} for cid {cid}'
else:
note = f'Downloading video formats for cid {cid}'
return self._download_json(
'https://api.bilibili.com/x/player/wbi/playurl', bvid,
query=self._sign_wbi(params, bvid), headers=headers,
note=f'Downloading video formats for cid {cid} {qn or ""}')['data']
query=self._sign_wbi(params, bvid), headers=headers, note=note)['data']
def json2srt(self, json_data):
srt_data = ''
@ -286,7 +290,7 @@ class BilibiliBaseIE(InfoExtractor):
('data', 'interaction', 'graph_version', {int_or_none}))
cid_edges = self._get_divisions(video_id, graph_version, {1: {'cid': cid}}, 1)
for cid, edges in cid_edges.items():
play_info = self._download_playinfo(video_id, cid, headers=headers)
play_info = self._download_playinfo(video_id, cid, headers=headers, try_look=1)
yield {
**metainfo,
'id': f'{video_id}_{cid}',
@ -688,11 +692,12 @@ class BiliBiliIE(BilibiliBaseIE):
aid = video_data.get('aid')
old_video_id = format_field(aid, None, f'%s_part{part_id or 1}')
cid = traverse_obj(video_data, ('pages', part_id - 1, 'cid')) if part_id else video_data.get('cid')
if is_festival or not self.is_logged_in:
query = {'try_look': 1} if not self.is_logged_in else {}
play_info = self._download_playinfo(video_id, cid, headers=headers, **query)
festival_info = {}
if is_festival:
play_info = self._download_playinfo(video_id, cid, headers=headers)
festival_info = traverse_obj(initial_state, {
'uploader': ('videoInfo', 'upName'),
'uploader_id': ('videoInfo', 'upMid', {str_or_none}),
@ -730,7 +735,7 @@ class BiliBiliIE(BilibiliBaseIE):
else:
formats = self.extract_formats(play_info)
if not traverse_obj(play_info, ('dash')):
if not play_info.get('dash'):
# we only have legacy formats and need additional work
has_qn = lambda x: x in traverse_obj(formats, (..., 'quality'))
for qn in traverse_obj(play_info, ('accept_quality', lambda _, v: not has_qn(v), {int})):
@ -860,10 +865,16 @@ class BiliBiliBangumiIE(BilibiliBaseIE):
self.raise_login_required('This video is for premium members only')
headers['Referer'] = url
play_info = self._search_json(
r'playurlSSRData\s*?=\s*?', webpage, 'embedded page info', episode_id,
end_pattern='\n', default=None)
if not play_info:
play_info = self._download_json(
'https://api.bilibili.com/pgc/player/web/v2/playurl', episode_id,
'Extracting episode', query={'fnval': '4048', 'ep_id': episode_id},
'Extracting episode', query={'fnval': 12240, 'ep_id': episode_id},
headers=headers)
premium_only = play_info.get('code') == -10403
play_info = traverse_obj(play_info, ('result', 'video_info', {dict})) or {}