Compare commits

...

25 Commits

Author SHA1 Message Date
HobbyistDev
b2e7bc9b59
Merge dfc4769853 into b83ca24eb7 2024-11-10 14:42:04 +01:00
sepro
b83ca24eb7
[core] Catch broken Cryptodome installations (#11486)
Authored by: seproDev
2024-11-10 00:53:49 +01:00
bashonly
240a7d43c8
[build] Pin websockets version to >=13.0,<14 (#11488)
websockets 14.0 causes CI test failures (a lot more of them)

Authored by: bashonly
2024-11-09 23:46:47 +00:00
bashonly
f13df591d4
[build] Enable attestations for trusted publishing (#11420)
Reverts 428ffb75aa

Authored by: bashonly
2024-11-09 23:26:02 +00:00
HobbyistDev
dfc4769853 Fix formatting issue 2024-08-02 18:30:11 +09:00
HobbyistDev
694de75a67
Merge branch 'yt-dlp:master' into viu-indonesia-fix-6482-partial 2024-08-02 18:21:30 +09:00
bashonly
b688846068
Merge branch 'master' into viu-indonesia-fix-6482-partial 2024-06-17 10:24:04 -05:00
HobbyistDev
f92dedc92a handle _download_json error correctly
This commit will fallback to webpage extraction if the API url is not found in particular regiom
2024-04-11 07:13:34 +08:00
HobbyistDev
9a6fc7f863
Merge branch 'yt-dlp:master' into viu-indonesia-fix-6482-partial 2024-04-10 07:36:16 +08:00
HobbyistDev
1c04f8d345 merge the ID extraction and the old way extraction
This commit is untested outside `ID` region
2024-04-10 07:34:46 +08:00
HobbyistDev
7733909f74
Merge branch 'yt-dlp:master' into viu-indonesia-fix-6482-partial 2024-04-06 12:03:09 +08:00
HobbyistDev
3f7d3d20bd delete single use variable current_product_subtitle_info 2024-03-01 13:24:37 +08:00
HobbyistDev
e9e2fe84a2 use regex rather than full string match in thumbnail test 2024-03-01 13:19:20 +08:00
HobbyistDev
9dcf080549 update comment 2024-02-26 20:19:46 +08:00
HobbyistDev
b13592aafc Merge branch 'viu-indonesia-fix-6482-partial' of https://github.com/HobbyistDev/yt-dlp into viu-indonesia-fix-6482-partial 2024-02-26 20:15:20 +08:00
HobbyistDev
66d3eb246a
Merge branch 'yt-dlp:master' into viu-indonesia-fix-6482-partial 2024-02-26 20:15:03 +08:00
HobbyistDev
dbb084c001 add comment about possible to bypass geo-blocking 2024-02-26 19:36:26 +08:00
HobbyistDev
1ec2c788bb add note to skip as geo-restricted to Indonesia 2024-02-24 15:56:55 +08:00
HobbyistDev
13879e2cef remove duplicated same extraction process to thumbnails 2024-02-24 15:49:02 +08:00
HobbyistDev
c7a528c198 remove trailing spaces 2024-02-21 18:41:12 +08:00
HobbyistDev
7b9f56ade5 reformat product_detail_json 2024-02-21 18:39:21 +08:00
HobbyistDev
c588ffdede inline remove unneccessary variable stream_urls 2024-02-14 21:25:01 +08:00
HobbyistDev
827821d6ca
Merge branch 'yt-dlp:master' into viu-indonesia-fix-6482-partial 2024-02-14 21:21:46 +08:00
HobbyistDev
1ba1fb1327 [viu:ott] Fix indonesian (and probably malaysian) viu extraction 2024-02-13 20:37:26 +08:00
HobbyistDev
74dc1bf198 [ie/viu] Fix _VALID_URL in ViuOTTIE 2024-02-05 13:29:21 +09:00
7 changed files with 100 additions and 18 deletions

View File

@ -504,7 +504,8 @@ jobs:
- windows32 - windows32
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/download-artifact@v4 - name: Download artifacts
uses: actions/download-artifact@v4
with: with:
path: artifact path: artifact
pattern: build-bin-* pattern: build-bin-*

View File

@ -28,3 +28,20 @@ jobs:
actions: write # For cleaning up cache actions: write # For cleaning up cache
id-token: write # mandatory for trusted publishing id-token: write # mandatory for trusted publishing
secrets: inherit secrets: inherit
publish_pypi:
needs: [release]
if: vars.MASTER_PYPI_PROJECT != ''
runs-on: ubuntu-latest
permissions:
id-token: write # mandatory for trusted publishing
steps:
- name: Download artifacts
uses: actions/download-artifact@v4
with:
path: dist
name: build-pypi
- name: Publish to PyPI
uses: pypa/gh-action-pypi-publish@release/v1
with:
verbose: true

View File

@ -41,3 +41,20 @@ jobs:
actions: write # For cleaning up cache actions: write # For cleaning up cache
id-token: write # mandatory for trusted publishing id-token: write # mandatory for trusted publishing
secrets: inherit secrets: inherit
publish_pypi:
needs: [release]
if: vars.NIGHTLY_PYPI_PROJECT != ''
runs-on: ubuntu-latest
permissions:
id-token: write # mandatory for trusted publishing
steps:
- name: Download artifacts
uses: actions/download-artifact@v4
with:
path: dist
name: build-pypi
- name: Publish to PyPI
uses: pypa/gh-action-pypi-publish@release/v1
with:
verbose: true

View File

@ -2,10 +2,6 @@ name: Release
on: on:
workflow_call: workflow_call:
inputs: inputs:
prerelease:
required: false
default: true
type: boolean
source: source:
required: false required: false
default: '' default: ''
@ -18,6 +14,10 @@ on:
required: false required: false
default: '' default: ''
type: string type: string
prerelease:
required: false
default: true
type: boolean
workflow_dispatch: workflow_dispatch:
inputs: inputs:
source: source:
@ -278,11 +278,20 @@ jobs:
make clean-cache make clean-cache
python -m build --no-isolation . python -m build --no-isolation .
- name: Upload artifacts
if: github.event_name != 'workflow_dispatch'
uses: actions/upload-artifact@v4
with:
name: build-pypi
path: |
dist/*
compression-level: 0
- name: Publish to PyPI - name: Publish to PyPI
if: github.event_name == 'workflow_dispatch'
uses: pypa/gh-action-pypi-publish@release/v1 uses: pypa/gh-action-pypi-publish@release/v1
with: with:
verbose: true verbose: true
attestations: false # Currently doesn't work w/ reusable workflows (breaks nightly)
publish: publish:
needs: [prepare, build] needs: [prepare, build]

View File

@ -52,7 +52,7 @@ default = [
"pycryptodomex", "pycryptodomex",
"requests>=2.32.2,<3", "requests>=2.32.2,<3",
"urllib3>=1.26.17,<3", "urllib3>=1.26.17,<3",
"websockets>=13.0", "websockets>=13.0,<14",
] ]
curl-cffi = [ curl-cffi = [
"curl-cffi==0.5.10; os_name=='nt' and implementation_name=='cpython'", "curl-cffi==0.5.10; os_name=='nt' and implementation_name=='cpython'",

View File

@ -24,7 +24,7 @@ try:
from Crypto.Cipher import AES, PKCS1_OAEP, Blowfish, PKCS1_v1_5 # noqa: F401 from Crypto.Cipher import AES, PKCS1_OAEP, Blowfish, PKCS1_v1_5 # noqa: F401
from Crypto.Hash import CMAC, SHA1 # noqa: F401 from Crypto.Hash import CMAC, SHA1 # noqa: F401
from Crypto.PublicKey import RSA # noqa: F401 from Crypto.PublicKey import RSA # noqa: F401
except ImportError: except (ImportError, OSError):
__version__ = f'broken {__version__}'.strip() __version__ = f'broken {__version__}'.strip()

View File

@ -5,9 +5,11 @@ import urllib.parse
import uuid import uuid
from .common import InfoExtractor from .common import InfoExtractor
from ..networking.exceptions import HTTPError
from ..utils import ( from ..utils import (
ExtractorError, ExtractorError,
int_or_none, int_or_none,
merge_dicts,
remove_end, remove_end,
smuggle_url, smuggle_url,
strip_or_none, strip_or_none,
@ -150,7 +152,7 @@ class ViuPlaylistIE(ViuBaseIE):
class ViuOTTIE(InfoExtractor): class ViuOTTIE(InfoExtractor):
IE_NAME = 'viu:ott' IE_NAME = 'viu:ott'
_NETRC_MACHINE = 'viu' _NETRC_MACHINE = 'viu'
_VALID_URL = r'https?://(?:www\.)?viu\.com/ott/(?P<country_code>[a-z]{2})/(?P<lang_code>[a-z]{2}-[a-z]{2})/vod/(?P<id>\d+)' _VALID_URL = r'https?://(?:www\.)?viu\.com/ott/(?P<country_code>[a-z]{2})/(?P<lang_code>[a-z]{2}(?:-[a-z]{2})?)/vod/(?P<id>\d+)'
_TESTS = [{ _TESTS = [{
'url': 'http://www.viu.com/ott/sg/en-us/vod/3421/The%20Prime%20Minister%20and%20I', 'url': 'http://www.viu.com/ott/sg/en-us/vod/3421/The%20Prime%20Minister%20and%20I',
'info_dict': { 'info_dict': {
@ -195,6 +197,19 @@ class ViuOTTIE(InfoExtractor):
'noplaylist': False, 'noplaylist': False,
}, },
'skip': 'Geo-restricted to Hong Kong', 'skip': 'Geo-restricted to Hong Kong',
}, {
'url': 'https://www.viu.com/ott/id/id/vod/2221644/Detective-Conan',
'info_dict': {
'id': '2221644',
'ext': 'mp4',
'description': 'md5:b199bcdb07b1e01a03529f155349ddd5',
'duration': 1425,
'series': 'Detective Conan',
'title': 'Detective Conan - Episode 1150',
'episode': 'Detective Conan - Episode 1150',
'episode_number': 1150,
'thumbnail': r're:https?://prod-images\.viu\.com/clip_asset_v6/\d+/\d+/[a-f0-9]+',
},
}] }]
_AREA_ID = { _AREA_ID = {
@ -270,27 +285,43 @@ class ViuOTTIE(InfoExtractor):
url, idata = unsmuggle_url(url, {}) url, idata = unsmuggle_url(url, {})
country_code, lang_code, video_id = self._match_valid_url(url).groups() country_code, lang_code, video_id = self._match_valid_url(url).groups()
webpage = self._download_webpage(url, video_id, fatal=False)
json_ld = self._search_json_ld(webpage, video_id, fatal=False)
next_js_data = (self._search_nextjs_data(webpage, video_id, fatal=False) or {}).get('props')
runtime_info = traverse_obj(next_js_data, ('initialState', 'app', 'runtimeInfo'))
query = { query = {
'r': 'vod/ajax-detail', 'r': 'vod/ajax-detail',
'platform_flag_label': 'web', 'platform_flag_label': 'web',
'product_id': video_id, 'product_id': video_id,
} }
area_id = self._AREA_ID.get(country_code.upper()) area_id = self._AREA_ID.get(country_code.upper()) or runtime_info.get('areaId')
if area_id: if area_id:
query['area_id'] = area_id query['area_id'] = area_id
try:
product_data = self._download_json( product_data = self._download_json(
f'http://www.viu.com/ott/{country_code}/index.php', video_id, f'http://www.viu.com/ott/{country_code}/index.php', video_id,
'Downloading video info', query=query)['data'] 'Downloading video info', query=query)['data']
# The `fatal` in `_download_json` didn't prevent json error
# FIXME: probably the error still too broad
except ExtractorError as e:
if not isinstance(e.cause, (json.JSONDecodeError, HTTPError)):
raise
# NOTE: some geo-blocked like https://www.viu.com/ott/sg/en/vod/108599/The-Beauty-Inside actually can bypassed
# on other region (like in ID)
product_data = traverse_obj(
next_js_data, ('pageProps', 'fallback', lambda k, v: v if re.match(r'@"PRODUCT_DETAIL"[^:]+', k) else None),
get_all=False)['data']
video_data = product_data.get('current_product') video_data = product_data.get('current_product')
if not video_data: if not video_data:
self.raise_geo_restricted() self.raise_geo_restricted()
series_id = video_data.get('series_id') series_id = video_data.get('series_id') or traverse_obj(product_data, ('series', 'series_id'))
if self._yes_playlist(series_id, video_id, idata): if self._yes_playlist(series_id, video_id, idata):
series = product_data.get('series') or {} series = product_data.get('series') or traverse_obj(product_data, ('series', 'name')) or {}
product = series.get('product') product = series.get('product')
if product: if product:
entries = [] entries = []
@ -308,7 +339,9 @@ class ViuOTTIE(InfoExtractor):
duration_limit = False duration_limit = False
query = { query = {
'ccs_product_id': video_data['ccs_product_id'], 'ccs_product_id': video_data['ccs_product_id'],
'language_flag_id': self._LANGUAGE_FLAG.get(lang_code.lower()) or '3', 'language_flag_id': self._LANGUAGE_FLAG.get(lang_code.lower()) or runtime_info.get('languageFlagId') or '3',
'platform_flag_label': 'web',
'countryCode': country_code.upper(),
} }
def download_playback(): def download_playback():
@ -384,7 +417,7 @@ class ViuOTTIE(InfoExtractor):
}) })
title = strip_or_none(video_data.get('synopsis')) title = strip_or_none(video_data.get('synopsis'))
return { return merge_dicts({
'id': video_id, 'id': video_id,
'title': title, 'title': title,
'description': video_data.get('description'), 'description': video_data.get('description'),
@ -395,7 +428,12 @@ class ViuOTTIE(InfoExtractor):
'thumbnail': url_or_none(video_data.get('cover_image_url')), 'thumbnail': url_or_none(video_data.get('cover_image_url')),
'formats': formats, 'formats': formats,
'subtitles': subtitles, 'subtitles': subtitles,
} }, traverse_obj(json_ld, {
'thumbnails': 'thumbnails',
'title': 'title',
'episode': 'episode',
'episode_number': 'episode_number',
}))
class ViuOTTIndonesiaBaseIE(InfoExtractor): class ViuOTTIndonesiaBaseIE(InfoExtractor):