Compare commits

...

7 Commits

Author SHA1 Message Date
Synarp
5d6d87ce82
Merge 440238ebc9 into b83ca24eb7 2024-11-10 09:19:44 +05:30
sepro
b83ca24eb7
[core] Catch broken Cryptodome installations (#11486)
Authored by: seproDev
2024-11-10 00:53:49 +01:00
bashonly
240a7d43c8
[build] Pin websockets version to >=13.0,<14 (#11488)
websockets 14.0 causes CI test failures (a lot more of them)

Authored by: bashonly
2024-11-09 23:46:47 +00:00
bashonly
f13df591d4
[build] Enable attestations for trusted publishing (#11420)
Reverts 428ffb75aa

Authored by: bashonly
2024-11-09 23:26:02 +00:00
Synarp
440238ebc9 fix displayed page number 2024-06-12 02:45:36 +02:00
Synarp
5ea1d902d3 fix style issues 2024-06-12 02:34:23 +02:00
Synarp
47fb5ba647 workaround for the 1000 item API-limit 2024-06-12 01:46:57 +02:00
7 changed files with 91 additions and 13 deletions

View File

@ -504,7 +504,8 @@ jobs:
- windows32
runs-on: ubuntu-latest
steps:
- uses: actions/download-artifact@v4
- name: Download artifacts
uses: actions/download-artifact@v4
with:
path: artifact
pattern: build-bin-*

View File

@ -28,3 +28,20 @@ jobs:
actions: write # For cleaning up cache
id-token: write # mandatory for trusted publishing
secrets: inherit
publish_pypi:
needs: [release]
if: vars.MASTER_PYPI_PROJECT != ''
runs-on: ubuntu-latest
permissions:
id-token: write # mandatory for trusted publishing
steps:
- name: Download artifacts
uses: actions/download-artifact@v4
with:
path: dist
name: build-pypi
- name: Publish to PyPI
uses: pypa/gh-action-pypi-publish@release/v1
with:
verbose: true

View File

@ -41,3 +41,20 @@ jobs:
actions: write # For cleaning up cache
id-token: write # mandatory for trusted publishing
secrets: inherit
publish_pypi:
needs: [release]
if: vars.NIGHTLY_PYPI_PROJECT != ''
runs-on: ubuntu-latest
permissions:
id-token: write # mandatory for trusted publishing
steps:
- name: Download artifacts
uses: actions/download-artifact@v4
with:
path: dist
name: build-pypi
- name: Publish to PyPI
uses: pypa/gh-action-pypi-publish@release/v1
with:
verbose: true

View File

@ -2,10 +2,6 @@ name: Release
on:
workflow_call:
inputs:
prerelease:
required: false
default: true
type: boolean
source:
required: false
default: ''
@ -18,6 +14,10 @@ on:
required: false
default: ''
type: string
prerelease:
required: false
default: true
type: boolean
workflow_dispatch:
inputs:
source:
@ -278,11 +278,20 @@ jobs:
make clean-cache
python -m build --no-isolation .
- name: Upload artifacts
if: github.event_name != 'workflow_dispatch'
uses: actions/upload-artifact@v4
with:
name: build-pypi
path: |
dist/*
compression-level: 0
- name: Publish to PyPI
if: github.event_name == 'workflow_dispatch'
uses: pypa/gh-action-pypi-publish@release/v1
with:
verbose: true
attestations: false # Currently doesn't work w/ reusable workflows (breaks nightly)
publish:
needs: [prepare, build]

View File

@ -52,7 +52,7 @@ default = [
"pycryptodomex",
"requests>=2.32.2,<3",
"urllib3>=1.26.17,<3",
"websockets>=13.0",
"websockets>=13.0,<14",
]
curl-cffi = [
"curl-cffi==0.5.10; os_name=='nt' and implementation_name=='cpython'",

View File

@ -24,7 +24,7 @@ try:
from Crypto.Cipher import AES, PKCS1_OAEP, Blowfish, PKCS1_v1_5 # noqa: F401
from Crypto.Hash import CMAC, SHA1 # noqa: F401
from Crypto.PublicKey import RSA # noqa: F401
except ImportError:
except (ImportError, OSError):
__version__ = f'broken {__version__}'.strip()

View File

@ -78,7 +78,7 @@ class LBRYBaseIE(InfoExtractor):
return info
def _fetch_page(self, display_id, url, params, page):
def _fetch_page(self, display_id, url, params, metapage_nr, page):
page += 1
page_params = {
'no_totals': True,
@ -86,8 +86,12 @@ class LBRYBaseIE(InfoExtractor):
'page_size': self._PAGE_SIZE,
**params,
}
if metapage_nr == 0:
resource = f'page {page}'
else:
resource = f'page {metapage_nr+1}_{page}'
result = self._call_api_proxy(
'claim_search', display_id, page_params, f'page {page}')
'claim_search', display_id, page_params, resource)
for item in traverse_obj(result, ('items', lambda _, v: v['name'] and v['claim_id'])):
yield {
**self._parse_stream(item, url),
@ -96,6 +100,32 @@ class LBRYBaseIE(InfoExtractor):
'url': self._permanent_url(url, item['name'], item['claim_id']),
}
def _metapage_entries(self, display_id, url, params):
if 'release_time' in params:
raise ExtractorError('release_time isn\'t allowed because _metapage_entires needs to specify it.')
if not ('order_by' in params and params['order_by'] == ['release_time']):
raise ExtractorError('videos must be sorted by release_time for _metapage_entries to work.')
last_metapage = []
metapage = OnDemandPagedList(
functools.partial(self._fetch_page, display_id, url, params, 0),
self._PAGE_SIZE).getslice()
metapage_nr = 1
while len(metapage) > 0:
yield from metapage
next_metapage_params = {
**params,
'release_time': '<={}'.format(metapage[-1]['release_timestamp']),
}
last_metapage = metapage
metapage = OnDemandPagedList(
functools.partial(self._fetch_page, display_id, url, next_metapage_params, metapage_nr),
self._PAGE_SIZE).getslice()
metapage = [x for x in metapage if x not in last_metapage]
metapage_nr += 1
def _playlist_entries(self, url, display_id, claim_param, metadata):
qs = parse_qs(url)
content = qs.get('content', [None])[0]
@ -123,9 +153,13 @@ class LBRYBaseIE(InfoExtractor):
languages.append('none')
params['any_languages'] = languages
entries = OnDemandPagedList(
functools.partial(self._fetch_page, display_id, url, params),
self._PAGE_SIZE)
if qs.get('order', ['new'])[0] == 'new':
entries = self._metapage_entries(display_id, url, params)
else:
self.report_warning('Extraction is limited to 1000 Videos when not sorting by newest.')
entries = OnDemandPagedList(
functools.partial(self._fetch_page, display_id, url, params, 0),
self._PAGE_SIZE)
return self.playlist_result(
entries, display_id, **traverse_obj(metadata, ('value', {