mirror of
https://github.com/yt-dlp/yt-dlp.git
synced 2024-11-07 07:51:26 +01:00
Compare commits
27 Commits
9b22165ca3
...
70e5415ce7
Author | SHA1 | Date | |
---|---|---|---|
|
70e5415ce7 | ||
|
9d4e996b8a | ||
|
87264d4fda | ||
|
a00af29853 | ||
|
0b6ad22e6a | ||
|
5438593a35 | ||
|
9970d74c83 | ||
|
20314dd46f | ||
|
1d03633c5a | ||
|
2cb3226ca8 | ||
|
8afd9468b0 | ||
|
ef12dbdcd3 | ||
|
46acc418a5 | ||
|
6ba3085616 | ||
|
f6e97090d2 | ||
|
2863fcf2b6 | ||
|
c76c96677f | ||
|
15b252dfd2 | ||
|
312a2d1e8b | ||
|
54579be436 | ||
|
05adfd883a | ||
|
3ff494f6f4 | ||
|
9b5bedf13a | ||
|
cb480e390d | ||
|
25a4bd345a | ||
|
3906de0755 | ||
|
7d337ca977 |
|
@ -12,7 +12,7 @@ body:
|
|||
options:
|
||||
- label: I'm reporting that yt-dlp is broken on a **supported** site
|
||||
required: true
|
||||
- label: I've verified that I'm running yt-dlp version **%(version)s** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
||||
- label: I've verified that I have **updated yt-dlp to nightly or master** ([update instructions](https://github.com/yt-dlp/yt-dlp#update-channels))
|
||||
required: true
|
||||
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
||||
required: true
|
||||
|
|
|
@ -12,7 +12,7 @@ body:
|
|||
options:
|
||||
- label: I'm reporting a new site support request
|
||||
required: true
|
||||
- label: I've verified that I'm running yt-dlp version **%(version)s** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
||||
- label: I've verified that I have **updated yt-dlp to nightly or master** ([update instructions](https://github.com/yt-dlp/yt-dlp#update-channels))
|
||||
required: true
|
||||
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
||||
required: true
|
||||
|
|
|
@ -12,7 +12,7 @@ body:
|
|||
options:
|
||||
- label: I'm requesting a site-specific feature
|
||||
required: true
|
||||
- label: I've verified that I'm running yt-dlp version **%(version)s** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
||||
- label: I've verified that I have **updated yt-dlp to nightly or master** ([update instructions](https://github.com/yt-dlp/yt-dlp#update-channels))
|
||||
required: true
|
||||
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
||||
required: true
|
||||
|
|
2
.github/ISSUE_TEMPLATE_tmpl/4_bug_report.yml
vendored
2
.github/ISSUE_TEMPLATE_tmpl/4_bug_report.yml
vendored
|
@ -12,7 +12,7 @@ body:
|
|||
options:
|
||||
- label: I'm reporting a bug unrelated to a specific site
|
||||
required: true
|
||||
- label: I've verified that I'm running yt-dlp version **%(version)s** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
||||
- label: I've verified that I have **updated yt-dlp to nightly or master** ([update instructions](https://github.com/yt-dlp/yt-dlp#update-channels))
|
||||
required: true
|
||||
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
||||
required: true
|
||||
|
|
|
@ -14,7 +14,7 @@ body:
|
|||
required: true
|
||||
- label: I've looked through the [README](https://github.com/yt-dlp/yt-dlp#readme)
|
||||
required: true
|
||||
- label: I've verified that I'm running yt-dlp version **%(version)s** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
||||
- label: I've verified that I have **updated yt-dlp to nightly or master** ([update instructions](https://github.com/yt-dlp/yt-dlp#update-channels))
|
||||
required: true
|
||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
||||
required: true
|
||||
|
|
2
.github/ISSUE_TEMPLATE_tmpl/6_question.yml
vendored
2
.github/ISSUE_TEMPLATE_tmpl/6_question.yml
vendored
|
@ -20,7 +20,7 @@ body:
|
|||
required: true
|
||||
- label: I've looked through the [README](https://github.com/yt-dlp/yt-dlp#readme)
|
||||
required: true
|
||||
- label: I've verified that I'm running yt-dlp version **%(version)s** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
||||
- label: I've verified that I have **updated yt-dlp to nightly or master** ([update instructions](https://github.com/yt-dlp/yt-dlp#update-channels))
|
||||
required: true
|
||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar questions **including closed ones**. DO NOT post duplicates
|
||||
required: true
|
||||
|
|
69
.github/workflows/build.yml
vendored
69
.github/workflows/build.yml
vendored
|
@ -30,6 +30,10 @@ on:
|
|||
meta_files:
|
||||
default: true
|
||||
type: boolean
|
||||
origin:
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
secrets:
|
||||
GPG_SIGNING_KEY:
|
||||
required: false
|
||||
|
@ -37,11 +41,13 @@ on:
|
|||
workflow_dispatch:
|
||||
inputs:
|
||||
version:
|
||||
description: Version tag (YYYY.MM.DD[.REV])
|
||||
description: |
|
||||
VERSION: yyyy.mm.dd[.rev] or rev
|
||||
required: true
|
||||
type: string
|
||||
channel:
|
||||
description: Update channel (stable/nightly/...)
|
||||
description: |
|
||||
SOURCE of this build's updates: stable/nightly/master/<repo>
|
||||
required: true
|
||||
default: stable
|
||||
type: string
|
||||
|
@ -73,16 +79,34 @@ on:
|
|||
description: SHA2-256SUMS, SHA2-512SUMS, _update_spec
|
||||
default: true
|
||||
type: boolean
|
||||
origin:
|
||||
description: .
|
||||
required: false
|
||||
default: ''
|
||||
type: choice
|
||||
options:
|
||||
- ''
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
process:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
origin: ${{ steps.process_origin.outputs.origin }}
|
||||
steps:
|
||||
- name: Process origin
|
||||
id: process_origin
|
||||
run: |
|
||||
echo "origin=${{ inputs.origin || github.repository }}" >> "$GITHUB_OUTPUT"
|
||||
|
||||
unix:
|
||||
needs: process
|
||||
if: inputs.unix
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
|
@ -96,22 +120,21 @@ jobs:
|
|||
auto-activate-base: false
|
||||
- name: Install Requirements
|
||||
run: |
|
||||
sudo apt-get -y install zip pandoc man sed
|
||||
python -m pip install -U pip setuptools wheel
|
||||
python -m pip install -U Pyinstaller -r requirements.txt
|
||||
sudo apt -y install zip pandoc man sed
|
||||
reqs=$(mktemp)
|
||||
cat > $reqs << EOF
|
||||
cat > "$reqs" << EOF
|
||||
python=3.10.*
|
||||
pyinstaller
|
||||
cffi
|
||||
brotli-python
|
||||
secretstorage
|
||||
EOF
|
||||
sed '/^brotli.*/d' requirements.txt >> $reqs
|
||||
mamba create -n build --file $reqs
|
||||
sed -E '/^(brotli|secretstorage).*/d' requirements.txt >> "$reqs"
|
||||
mamba create -n build --file "$reqs"
|
||||
|
||||
- name: Prepare
|
||||
run: |
|
||||
python devscripts/update-version.py -c ${{ inputs.channel }} ${{ inputs.version }}
|
||||
python devscripts/update-version.py -c "${{ inputs.channel }}" -r "${{ needs.process.outputs.origin }}" "${{ inputs.version }}"
|
||||
python devscripts/make_lazy_extractors.py
|
||||
- name: Build Unix platform-independent binary
|
||||
run: |
|
||||
|
@ -150,6 +173,7 @@ jobs:
|
|||
yt-dlp_linux.zip
|
||||
|
||||
linux_arm:
|
||||
needs: process
|
||||
if: inputs.linux_arm
|
||||
permissions:
|
||||
contents: read
|
||||
|
@ -162,7 +186,7 @@ jobs:
|
|||
- aarch64
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
path: ./repo
|
||||
- name: Virtualized Install, Prepare & Build
|
||||
|
@ -185,7 +209,7 @@ jobs:
|
|||
run: |
|
||||
cd repo
|
||||
python3.8 -m pip install -U Pyinstaller -r requirements.txt # Cached version may be out of date
|
||||
python3.8 devscripts/update-version.py -c ${{ inputs.channel }} ${{ inputs.version }}
|
||||
python3.8 devscripts/update-version.py -c "${{ inputs.channel }}" -r "${{ needs.process.outputs.origin }}" "${{ inputs.version }}"
|
||||
python3.8 devscripts/make_lazy_extractors.py
|
||||
python3.8 pyinst.py
|
||||
|
||||
|
@ -206,11 +230,12 @@ jobs:
|
|||
repo/dist/yt-dlp_linux_${{ (matrix.architecture == 'armv7' && 'armv7l') || matrix.architecture }}
|
||||
|
||||
macos:
|
||||
needs: process
|
||||
if: inputs.macos
|
||||
runs-on: macos-11
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
# NB: Building universal2 does not work with python from actions/setup-python
|
||||
- name: Install Requirements
|
||||
run: |
|
||||
|
@ -221,7 +246,7 @@ jobs:
|
|||
|
||||
- name: Prepare
|
||||
run: |
|
||||
python3 devscripts/update-version.py -c ${{ inputs.channel }} ${{ inputs.version }}
|
||||
python3 devscripts/update-version.py -c "${{ inputs.channel }}" -r "${{ needs.process.outputs.origin }}" "${{ inputs.version }}"
|
||||
python3 devscripts/make_lazy_extractors.py
|
||||
- name: Build
|
||||
run: |
|
||||
|
@ -247,11 +272,12 @@ jobs:
|
|||
dist/yt-dlp_macos.zip
|
||||
|
||||
macos_legacy:
|
||||
needs: process
|
||||
if: inputs.macos_legacy
|
||||
runs-on: macos-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
- name: Install Python
|
||||
# We need the official Python, because the GA ones only support newer macOS versions
|
||||
env:
|
||||
|
@ -272,7 +298,7 @@ jobs:
|
|||
|
||||
- name: Prepare
|
||||
run: |
|
||||
python3 devscripts/update-version.py -c ${{ inputs.channel }} ${{ inputs.version }}
|
||||
python3 devscripts/update-version.py -c "${{ inputs.channel }}" -r "${{ needs.process.outputs.origin }}" "${{ inputs.version }}"
|
||||
python3 devscripts/make_lazy_extractors.py
|
||||
- name: Build
|
||||
run: |
|
||||
|
@ -296,11 +322,12 @@ jobs:
|
|||
dist/yt-dlp_macos_legacy
|
||||
|
||||
windows:
|
||||
needs: process
|
||||
if: inputs.windows
|
||||
runs-on: windows-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v4
|
||||
with: # 3.8 is used for Win7 support
|
||||
python-version: "3.8"
|
||||
|
@ -311,7 +338,7 @@ jobs:
|
|||
|
||||
- name: Prepare
|
||||
run: |
|
||||
python devscripts/update-version.py -c ${{ inputs.channel }} ${{ inputs.version }}
|
||||
python devscripts/update-version.py -c "${{ inputs.channel }}" -r "${{ needs.process.outputs.origin }}" "${{ inputs.version }}"
|
||||
python devscripts/make_lazy_extractors.py
|
||||
- name: Build
|
||||
run: |
|
||||
|
@ -343,11 +370,12 @@ jobs:
|
|||
dist/yt-dlp_win.zip
|
||||
|
||||
windows32:
|
||||
needs: process
|
||||
if: inputs.windows32
|
||||
runs-on: windows-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v4
|
||||
with: # 3.7 is used for Vista support. See https://github.com/yt-dlp/yt-dlp/issues/390
|
||||
python-version: "3.7"
|
||||
|
@ -359,7 +387,7 @@ jobs:
|
|||
|
||||
- name: Prepare
|
||||
run: |
|
||||
python devscripts/update-version.py -c ${{ inputs.channel }} ${{ inputs.version }}
|
||||
python devscripts/update-version.py -c "${{ inputs.channel }}" -r "${{ needs.process.outputs.origin }}" "${{ inputs.version }}"
|
||||
python devscripts/make_lazy_extractors.py
|
||||
- name: Build
|
||||
run: |
|
||||
|
@ -387,6 +415,7 @@ jobs:
|
|||
meta_files:
|
||||
if: inputs.meta_files && always() && !cancelled()
|
||||
needs:
|
||||
- process
|
||||
- unix
|
||||
- linux_arm
|
||||
- macos
|
||||
|
|
2
.github/workflows/codeql.yml
vendored
2
.github/workflows/codeql.yml
vendored
|
@ -29,7 +29,7 @@ jobs:
|
|||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
|
|
2
.github/workflows/core.yml
vendored
2
.github/workflows/core.yml
vendored
|
@ -27,7 +27,7 @@ jobs:
|
|||
python-version: pypy-3.9
|
||||
run-tests-ext: bat
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
|
|
4
.github/workflows/download.yml
vendored
4
.github/workflows/download.yml
vendored
|
@ -9,7 +9,7 @@ jobs:
|
|||
if: "contains(github.event.head_commit.message, 'ci run dl')"
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
|
@ -39,7 +39,7 @@ jobs:
|
|||
python-version: pypy-3.9
|
||||
run-tests-ext: bat
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
|
|
97
.github/workflows/publish.yml
vendored
97
.github/workflows/publish.yml
vendored
|
@ -1,97 +0,0 @@
|
|||
name: Publish
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
channel:
|
||||
default: stable
|
||||
required: true
|
||||
type: string
|
||||
version:
|
||||
required: true
|
||||
type: string
|
||||
target_commitish:
|
||||
required: true
|
||||
type: string
|
||||
prerelease:
|
||||
default: false
|
||||
required: true
|
||||
type: boolean
|
||||
secrets:
|
||||
ARCHIVE_REPO_TOKEN:
|
||||
required: false
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/download-artifact@v3
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
|
||||
- name: Generate release notes
|
||||
run: |
|
||||
printf '%s' \
|
||||
'[![Installation](https://img.shields.io/badge/-Which%20file%20should%20I%20download%3F-white.svg?style=for-the-badge)]' \
|
||||
'(https://github.com/yt-dlp/yt-dlp#installation "Installation instructions") ' \
|
||||
'[![Documentation](https://img.shields.io/badge/-Docs-brightgreen.svg?style=for-the-badge&logo=GitBook&labelColor=555555)]' \
|
||||
'(https://github.com/yt-dlp/yt-dlp/tree/2023.03.04#readme "Documentation") ' \
|
||||
'[![Donate](https://img.shields.io/badge/_-Donate-red.svg?logo=githubsponsors&labelColor=555555&style=for-the-badge)]' \
|
||||
'(https://github.com/yt-dlp/yt-dlp/blob/master/Collaborators.md#collaborators "Donate") ' \
|
||||
'[![Discord](https://img.shields.io/discord/807245652072857610?color=blue&labelColor=555555&label=&logo=discord&style=for-the-badge)]' \
|
||||
'(https://discord.gg/H5MNcFW63r "Discord") ' \
|
||||
${{ inputs.channel != 'nightly' && '"[![Nightly](https://img.shields.io/badge/Get%20nightly%20builds-purple.svg?style=for-the-badge)]" \
|
||||
"(https://github.com/yt-dlp/yt-dlp-nightly-builds/releases/latest \"Nightly builds\")"' || '' }} \
|
||||
> ./RELEASE_NOTES
|
||||
printf '\n\n' >> ./RELEASE_NOTES
|
||||
cat >> ./RELEASE_NOTES << EOF
|
||||
#### A description of the various files are in the [README](https://github.com/yt-dlp/yt-dlp#release-files)
|
||||
---
|
||||
$(python ./devscripts/make_changelog.py -vv --collapsible)
|
||||
EOF
|
||||
printf '%s\n\n' '**This is an automated nightly pre-release build**' >> ./NIGHTLY_NOTES
|
||||
cat ./RELEASE_NOTES >> ./NIGHTLY_NOTES
|
||||
printf '%s\n\n' 'Generated from: https://github.com/${{ github.repository }}/commit/${{ inputs.target_commitish }}' >> ./ARCHIVE_NOTES
|
||||
cat ./RELEASE_NOTES >> ./ARCHIVE_NOTES
|
||||
|
||||
- name: Archive nightly release
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.ARCHIVE_REPO_TOKEN }}
|
||||
GH_REPO: ${{ vars.ARCHIVE_REPO }}
|
||||
if: |
|
||||
inputs.channel == 'nightly' && env.GH_TOKEN != '' && env.GH_REPO != ''
|
||||
run: |
|
||||
gh release create \
|
||||
--notes-file ARCHIVE_NOTES \
|
||||
--title "yt-dlp nightly ${{ inputs.version }}" \
|
||||
${{ inputs.version }} \
|
||||
artifact/*
|
||||
|
||||
- name: Prune old nightly release
|
||||
if: inputs.channel == 'nightly' && !vars.ARCHIVE_REPO
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
run: |
|
||||
gh release delete --yes --cleanup-tag "nightly" || true
|
||||
git tag --delete "nightly" || true
|
||||
sleep 5 # Enough time to cover deletion race condition
|
||||
|
||||
- name: Publish release${{ inputs.channel == 'nightly' && ' (nightly)' || '' }}
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
if: (inputs.channel == 'nightly' && !vars.ARCHIVE_REPO) || inputs.channel != 'nightly'
|
||||
run: |
|
||||
gh release create \
|
||||
--notes-file ${{ inputs.channel == 'nightly' && 'NIGHTLY_NOTES' || 'RELEASE_NOTES' }} \
|
||||
--target ${{ inputs.target_commitish }} \
|
||||
--title "yt-dlp ${{ inputs.channel == 'nightly' && 'nightly ' || '' }}${{ inputs.version }}" \
|
||||
${{ inputs.prerelease && '--prerelease' || '' }} \
|
||||
${{ inputs.channel == 'nightly' && '"nightly"' || inputs.version }} \
|
||||
artifact/*
|
4
.github/workflows/quick-test.yml
vendored
4
.github/workflows/quick-test.yml
vendored
|
@ -9,7 +9,7 @@ jobs:
|
|||
if: "!contains(github.event.head_commit.message, 'ci skip all')"
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up Python 3.11
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
|
@ -25,7 +25,7 @@ jobs:
|
|||
if: "!contains(github.event.head_commit.message, 'ci skip all')"
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v4
|
||||
- name: Install flake8
|
||||
run: pip install flake8
|
||||
|
|
28
.github/workflows/release-master.yml
vendored
Normal file
28
.github/workflows/release-master.yml
vendored
Normal file
|
@ -0,0 +1,28 @@
|
|||
name: Release (master)
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
paths:
|
||||
- "yt_dlp/**.py"
|
||||
- "!yt_dlp/version.py"
|
||||
- "setup.py"
|
||||
- "pyinst.py"
|
||||
concurrency:
|
||||
group: release-master
|
||||
cancel-in-progress: true
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
release:
|
||||
if: vars.BUILD_MASTER != ''
|
||||
uses: ./.github/workflows/release.yml
|
||||
with:
|
||||
prerelease: true
|
||||
source: master
|
||||
permissions:
|
||||
contents: write
|
||||
packages: write
|
||||
id-token: write # mandatory for trusted publishing
|
||||
secrets: inherit
|
57
.github/workflows/release-nightly.yml
vendored
57
.github/workflows/release-nightly.yml
vendored
|
@ -1,52 +1,35 @@
|
|||
name: Release (nightly)
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
paths:
|
||||
- "yt_dlp/**.py"
|
||||
- "!yt_dlp/version.py"
|
||||
concurrency:
|
||||
group: release-nightly
|
||||
cancel-in-progress: true
|
||||
schedule:
|
||||
- cron: '23 23 * * *'
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
prepare:
|
||||
check_nightly:
|
||||
if: vars.BUILD_NIGHTLY != ''
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
version: ${{ steps.get_version.outputs.version }}
|
||||
|
||||
commit: ${{ steps.check_for_new_commits.outputs.commit }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Get version
|
||||
id: get_version
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Check for new commits
|
||||
id: check_for_new_commits
|
||||
run: |
|
||||
python devscripts/update-version.py "$(date -u +"%H%M%S")" | grep -Po "version=\d+(\.\d+){3}" >> "$GITHUB_OUTPUT"
|
||||
relevant_files=("yt_dlp/*.py" ':!yt_dlp/version.py' "setup.py" "pyinst.py")
|
||||
echo "commit=$(git log --format=%H -1 --since="24 hours ago" -- "${relevant_files[@]}")" | tee "$GITHUB_OUTPUT"
|
||||
|
||||
build:
|
||||
needs: prepare
|
||||
uses: ./.github/workflows/build.yml
|
||||
release:
|
||||
needs: [check_nightly]
|
||||
if: ${{ needs.check_nightly.outputs.commit }}
|
||||
uses: ./.github/workflows/release.yml
|
||||
with:
|
||||
version: ${{ needs.prepare.outputs.version }}
|
||||
channel: nightly
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write # For package cache
|
||||
secrets:
|
||||
GPG_SIGNING_KEY: ${{ secrets.GPG_SIGNING_KEY }}
|
||||
|
||||
publish:
|
||||
needs: [prepare, build]
|
||||
uses: ./.github/workflows/publish.yml
|
||||
secrets:
|
||||
ARCHIVE_REPO_TOKEN: ${{ secrets.ARCHIVE_REPO_TOKEN }}
|
||||
prerelease: true
|
||||
source: nightly
|
||||
permissions:
|
||||
contents: write
|
||||
with:
|
||||
channel: nightly
|
||||
prerelease: true
|
||||
version: ${{ needs.prepare.outputs.version }}
|
||||
target_commitish: ${{ github.sha }}
|
||||
packages: write
|
||||
id-token: write # mandatory for trusted publishing
|
||||
secrets: inherit
|
||||
|
|
358
.github/workflows/release.yml
vendored
358
.github/workflows/release.yml
vendored
|
@ -1,14 +1,45 @@
|
|||
name: Release
|
||||
on:
|
||||
workflow_dispatch:
|
||||
workflow_call:
|
||||
inputs:
|
||||
version:
|
||||
description: Version tag (YYYY.MM.DD[.REV])
|
||||
prerelease:
|
||||
required: false
|
||||
default: true
|
||||
type: boolean
|
||||
source:
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
channel:
|
||||
description: Update channel (stable/nightly/...)
|
||||
target:
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
version:
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
source:
|
||||
description: |
|
||||
SOURCE of this release's updates:
|
||||
channel, repo, tag, or channel/repo@tag
|
||||
(default: <current_repo>)
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
target:
|
||||
description: |
|
||||
TARGET to publish this release to:
|
||||
channel, tag, or channel@tag
|
||||
(default: <source> if writable else <current_repo>[@source_tag])
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
version:
|
||||
description: |
|
||||
VERSION: yyyy.mm.dd[.rev] or rev
|
||||
(default: auto-generated)
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
|
@ -26,12 +57,18 @@ jobs:
|
|||
contents: write
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
channel: ${{ steps.set_channel.outputs.channel }}
|
||||
version: ${{ steps.update_version.outputs.version }}
|
||||
channel: ${{ steps.setup_variables.outputs.channel }}
|
||||
version: ${{ steps.setup_variables.outputs.version }}
|
||||
target_repo: ${{ steps.setup_variables.outputs.target_repo }}
|
||||
target_repo_token: ${{ steps.setup_variables.outputs.target_repo_token }}
|
||||
target_tag: ${{ steps.setup_variables.outputs.target_tag }}
|
||||
pypi_project: ${{ steps.setup_variables.outputs.pypi_project }}
|
||||
pypi_suffix: ${{ steps.setup_variables.outputs.pypi_suffix }}
|
||||
pypi_token: ${{ steps.setup_variables.outputs.pypi_token }}
|
||||
head_sha: ${{ steps.get_target.outputs.head_sha }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
|
@ -39,25 +76,133 @@ jobs:
|
|||
with:
|
||||
python-version: "3.10"
|
||||
|
||||
- name: Set channel
|
||||
id: set_channel
|
||||
- name: Process inputs
|
||||
id: process_inputs
|
||||
run: |
|
||||
CHANNEL="${{ github.repository == 'yt-dlp/yt-dlp' && 'stable' || github.repository }}"
|
||||
echo "channel=${{ inputs.channel || '$CHANNEL' }}" > "$GITHUB_OUTPUT"
|
||||
cat << EOF
|
||||
::group::Inputs
|
||||
prerelease=${{ inputs.prerelease }}
|
||||
source=${{ inputs.source }}
|
||||
target=${{ inputs.target }}
|
||||
version=${{ inputs.version }}
|
||||
::endgroup::
|
||||
EOF
|
||||
IFS='@' read -r source_repo source_tag <<<"${{ inputs.source }}"
|
||||
IFS='@' read -r target_repo target_tag <<<"${{ inputs.target }}"
|
||||
cat << EOF >> "$GITHUB_OUTPUT"
|
||||
source_repo=${source_repo}
|
||||
source_tag=${source_tag}
|
||||
target_repo=${target_repo}
|
||||
target_tag=${target_tag}
|
||||
EOF
|
||||
|
||||
- name: Update version
|
||||
id: update_version
|
||||
- name: Setup variables
|
||||
id: setup_variables
|
||||
env:
|
||||
source_repo: ${{ steps.process_inputs.outputs.source_repo }}
|
||||
source_tag: ${{ steps.process_inputs.outputs.source_tag }}
|
||||
target_repo: ${{ steps.process_inputs.outputs.target_repo }}
|
||||
target_tag: ${{ steps.process_inputs.outputs.target_tag }}
|
||||
run: |
|
||||
REVISION="${{ vars.PUSH_VERSION_COMMIT == '' && '$(date -u +"%H%M%S")' || '' }}"
|
||||
REVISION="${{ inputs.prerelease && '$(date -u +"%H%M%S")' || '$REVISION' }}"
|
||||
python devscripts/update-version.py ${{ inputs.version || '$REVISION' }} | \
|
||||
grep -Po "version=\d+\.\d+\.\d+(\.\d+)?" >> "$GITHUB_OUTPUT"
|
||||
# unholy bash monstrosity (sincere apologies)
|
||||
fallback_token () {
|
||||
if ${{ !secrets.ARCHIVE_REPO_TOKEN }}; then
|
||||
echo "::error::Repository access secret ${target_repo_token^^} not found"
|
||||
exit 1
|
||||
fi
|
||||
target_repo_token=ARCHIVE_REPO_TOKEN
|
||||
return 0
|
||||
}
|
||||
|
||||
source_is_channel=0
|
||||
[[ "${source_repo}" == 'stable' ]] && source_repo='yt-dlp/yt-dlp'
|
||||
if [[ -z "${source_repo}" ]]; then
|
||||
source_repo='${{ github.repository }}'
|
||||
elif [[ '${{ vars[format('{0}_archive_repo', env.source_repo)] }}' ]]; then
|
||||
source_is_channel=1
|
||||
source_channel='${{ vars[format('{0}_archive_repo', env.source_repo)] }}'
|
||||
elif [[ -z "${source_tag}" && "${source_repo}" != */* ]]; then
|
||||
source_tag="${source_repo}"
|
||||
source_repo='${{ github.repository }}'
|
||||
fi
|
||||
resolved_source="${source_repo}"
|
||||
if [[ "${source_tag}" ]]; then
|
||||
resolved_source="${resolved_source}@${source_tag}"
|
||||
elif [[ "${source_repo}" == 'yt-dlp/yt-dlp' ]]; then
|
||||
resolved_source='stable'
|
||||
fi
|
||||
|
||||
revision="${{ (inputs.prerelease || !vars.PUSH_VERSION_COMMIT) && '$(date -u +"%H%M%S")' || '' }}"
|
||||
version="$(
|
||||
python devscripts/update-version.py \
|
||||
-c "${resolved_source}" -r "${{ github.repository }}" ${{ inputs.version || '$revision' }} | \
|
||||
grep -Po "version=\K\d+\.\d+\.\d+(\.\d+)?")"
|
||||
|
||||
if [[ "${target_repo}" ]]; then
|
||||
if [[ -z "${target_tag}" ]]; then
|
||||
if [[ '${{ vars[format('{0}_archive_repo', env.target_repo)] }}' ]]; then
|
||||
target_tag="${source_tag:-${version}}"
|
||||
else
|
||||
target_tag="${target_repo}"
|
||||
target_repo='${{ github.repository }}'
|
||||
fi
|
||||
fi
|
||||
if [[ "${target_repo}" != '${{ github.repository}}' ]]; then
|
||||
target_repo='${{ vars[format('{0}_archive_repo', env.target_repo)] }}'
|
||||
target_repo_token='${{ env.target_repo }}_archive_repo_token'
|
||||
${{ !!secrets[format('{0}_archive_repo_token', env.target_repo)] }} || fallback_token
|
||||
pypi_project='${{ vars[format('{0}_pypi_project', env.target_repo)] }}'
|
||||
pypi_suffix='${{ vars[format('{0}_pypi_suffix', env.target_repo)] }}'
|
||||
${{ !secrets[format('{0}_pypi_token', env.target_repo)] }} || pypi_token='${{ env.target_repo }}_pypi_token'
|
||||
fi
|
||||
else
|
||||
target_tag="${source_tag:-${version}}"
|
||||
if ((source_is_channel)); then
|
||||
target_repo="${source_channel}"
|
||||
target_repo_token='${{ env.source_repo }}_archive_repo_token'
|
||||
${{ !!secrets[format('{0}_archive_repo_token', env.source_repo)] }} || fallback_token
|
||||
pypi_project='${{ vars[format('{0}_pypi_project', env.source_repo)] }}'
|
||||
pypi_suffix='${{ vars[format('{0}_pypi_suffix', env.source_repo)] }}'
|
||||
${{ !secrets[format('{0}_pypi_token', env.source_repo)] }} || pypi_token='${{ env.source_repo }}_pypi_token'
|
||||
else
|
||||
target_repo='${{ github.repository }}'
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ "${target_repo}" == '${{ github.repository }}' ]] && ${{ !inputs.prerelease }}; then
|
||||
pypi_project='${{ vars.PYPI_PROJECT }}'
|
||||
fi
|
||||
if [[ -z "${pypi_token}" && "${pypi_project}" ]]; then
|
||||
if ${{ !secrets.PYPI_TOKEN }}; then
|
||||
pypi_token=OIDC
|
||||
else
|
||||
pypi_token=PYPI_TOKEN
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "::group::Output variables"
|
||||
cat << EOF | tee -a "$GITHUB_OUTPUT"
|
||||
channel=${resolved_source}
|
||||
version=${version}
|
||||
target_repo=${target_repo}
|
||||
target_repo_token=${target_repo_token}
|
||||
target_tag=${target_tag}
|
||||
pypi_project=${pypi_project}
|
||||
pypi_suffix=${pypi_suffix}
|
||||
pypi_token=${pypi_token}
|
||||
EOF
|
||||
echo "::endgroup::"
|
||||
|
||||
- name: Update documentation
|
||||
env:
|
||||
version: ${{ steps.setup_variables.outputs.version }}
|
||||
target_repo: ${{ steps.setup_variables.outputs.target_repo }}
|
||||
if: |
|
||||
!inputs.prerelease && env.target_repo == github.repository
|
||||
run: |
|
||||
make doc
|
||||
sed '/### /Q' Changelog.md >> ./CHANGELOG
|
||||
echo '### ${{ steps.update_version.outputs.version }}' >> ./CHANGELOG
|
||||
echo '### ${{ env.version }}' >> ./CHANGELOG
|
||||
python ./devscripts/make_changelog.py -vv -c >> ./CHANGELOG
|
||||
echo >> ./CHANGELOG
|
||||
grep -Poz '(?s)### \d+\.\d+\.\d+.+' 'Changelog.md' | head -n -1 >> ./CHANGELOG
|
||||
|
@ -65,12 +210,16 @@ jobs:
|
|||
|
||||
- name: Push to release
|
||||
id: push_release
|
||||
if: ${{ !inputs.prerelease }}
|
||||
env:
|
||||
version: ${{ steps.setup_variables.outputs.version }}
|
||||
target_repo: ${{ steps.setup_variables.outputs.target_repo }}
|
||||
if: |
|
||||
!inputs.prerelease && env.target_repo == github.repository
|
||||
run: |
|
||||
git config --global user.name github-actions
|
||||
git config --global user.email github-actions@example.com
|
||||
git config --global user.email github-actions@github.com
|
||||
git add -u
|
||||
git commit -m "Release ${{ steps.update_version.outputs.version }}" \
|
||||
git commit -m "Release ${{ env.version }}" \
|
||||
-m "Created by: ${{ github.event.sender.login }}" -m ":ci skip all :ci run dl"
|
||||
git push origin --force ${{ github.event.ref }}:release
|
||||
|
||||
|
@ -80,7 +229,10 @@ jobs:
|
|||
echo "head_sha=$(git rev-parse HEAD)" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Update master
|
||||
if: vars.PUSH_VERSION_COMMIT != '' && !inputs.prerelease
|
||||
env:
|
||||
target_repo: ${{ steps.setup_variables.outputs.target_repo }}
|
||||
if: |
|
||||
vars.PUSH_VERSION_COMMIT != '' && !inputs.prerelease && env.target_repo == github.repository
|
||||
run: git push origin ${{ github.event.ref }}
|
||||
|
||||
build:
|
||||
|
@ -89,75 +241,159 @@ jobs:
|
|||
with:
|
||||
version: ${{ needs.prepare.outputs.version }}
|
||||
channel: ${{ needs.prepare.outputs.channel }}
|
||||
origin: ${{ needs.prepare.outputs.target_repo }}
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write # For package cache
|
||||
secrets:
|
||||
GPG_SIGNING_KEY: ${{ secrets.GPG_SIGNING_KEY }}
|
||||
|
||||
publish_pypi_homebrew:
|
||||
publish_pypi:
|
||||
needs: [prepare, build]
|
||||
if: ${{ needs.prepare.outputs.pypi_project }}
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
id-token: write # mandatory for trusted publishing
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
|
||||
- name: Install Requirements
|
||||
run: |
|
||||
sudo apt-get -y install pandoc man
|
||||
sudo apt -y install pandoc man
|
||||
python -m pip install -U pip setuptools wheel twine
|
||||
python -m pip install -U -r requirements.txt
|
||||
|
||||
- name: Prepare
|
||||
run: |
|
||||
python devscripts/update-version.py ${{ needs.prepare.outputs.version }}
|
||||
python devscripts/make_lazy_extractors.py
|
||||
|
||||
- name: Build and publish on PyPI
|
||||
env:
|
||||
TWINE_USERNAME: __token__
|
||||
TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }}
|
||||
if: env.TWINE_PASSWORD != '' && !inputs.prerelease
|
||||
version: ${{ needs.prepare.outputs.version }}
|
||||
suffix: ${{ needs.prepare.outputs.pypi_suffix }}
|
||||
channel: ${{ needs.prepare.outputs.channel }}
|
||||
target_repo: ${{ needs.prepare.outputs.target_repo }}
|
||||
pypi_project: ${{ needs.prepare.outputs.pypi_project }}
|
||||
run: |
|
||||
python devscripts/update-version.py -c "${{ env.channel }}" -r "${{ env.target_repo }}" -s "${{ env.suffix }}" "${{ env.version }}"
|
||||
python devscripts/make_lazy_extractors.py
|
||||
sed -i -E "s/(name=')[^']+(', # package name)/\1${{ env.pypi_project }}\2/" setup.py
|
||||
|
||||
- name: Build
|
||||
run: |
|
||||
rm -rf dist/*
|
||||
make pypi-files
|
||||
python devscripts/set-variant.py pip -M "You installed yt-dlp with pip or using the wheel from PyPi; Use that to update"
|
||||
python setup.py sdist bdist_wheel
|
||||
|
||||
- name: Publish to PyPI via token
|
||||
env:
|
||||
TWINE_USERNAME: __token__
|
||||
TWINE_PASSWORD: ${{ secrets[needs.prepare.outputs.pypi_token] }}
|
||||
if: |
|
||||
needs.prepare.outputs.pypi_token != 'OIDC' && env.TWINE_PASSWORD
|
||||
run: |
|
||||
twine upload dist/*
|
||||
|
||||
- name: Checkout Homebrew repository
|
||||
env:
|
||||
BREW_TOKEN: ${{ secrets.BREW_TOKEN }}
|
||||
PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }}
|
||||
if: env.BREW_TOKEN != '' && env.PYPI_TOKEN != '' && !inputs.prerelease
|
||||
uses: actions/checkout@v3
|
||||
- name: Publish to PyPI via trusted publishing
|
||||
if: |
|
||||
needs.prepare.outputs.pypi_token == 'OIDC'
|
||||
uses: pypa/gh-action-pypi-publish@release/v1
|
||||
with:
|
||||
repository: yt-dlp/homebrew-taps
|
||||
path: taps
|
||||
ssh-key: ${{ secrets.BREW_TOKEN }}
|
||||
|
||||
- name: Update Homebrew Formulae
|
||||
env:
|
||||
BREW_TOKEN: ${{ secrets.BREW_TOKEN }}
|
||||
PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }}
|
||||
if: env.BREW_TOKEN != '' && env.PYPI_TOKEN != '' && !inputs.prerelease
|
||||
run: |
|
||||
python devscripts/update-formulae.py taps/Formula/yt-dlp.rb "${{ needs.prepare.outputs.version }}"
|
||||
git -C taps/ config user.name github-actions
|
||||
git -C taps/ config user.email github-actions@example.com
|
||||
git -C taps/ commit -am 'yt-dlp: ${{ needs.prepare.outputs.version }}'
|
||||
git -C taps/ push
|
||||
verbose: true
|
||||
|
||||
publish:
|
||||
needs: [prepare, build]
|
||||
uses: ./.github/workflows/publish.yml
|
||||
permissions:
|
||||
contents: write
|
||||
with:
|
||||
channel: ${{ needs.prepare.outputs.channel }}
|
||||
prerelease: ${{ inputs.prerelease }}
|
||||
version: ${{ needs.prepare.outputs.version }}
|
||||
target_commitish: ${{ needs.prepare.outputs.head_sha }}
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/download-artifact@v3
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
|
||||
- name: Generate release notes
|
||||
env:
|
||||
head_sha: ${{ needs.prepare.outputs.head_sha }}
|
||||
target_repo: ${{ needs.prepare.outputs.target_repo }}
|
||||
target_tag: ${{ needs.prepare.outputs.target_tag }}
|
||||
run: |
|
||||
printf '%s' \
|
||||
'[![Installation](https://img.shields.io/badge/-Which%20file%20should%20I%20download%3F-white.svg?style=for-the-badge)]' \
|
||||
'(https://github.com/${{ github.repository }}#installation "Installation instructions") ' \
|
||||
'[![Documentation](https://img.shields.io/badge/-Docs-brightgreen.svg?style=for-the-badge&logo=GitBook&labelColor=555555)]' \
|
||||
'(https://github.com/${{ github.repository }}' \
|
||||
'${{ env.target_repo == github.repository && format('/tree/{0}', env.target_tag) || '' }}#readme "Documentation") ' \
|
||||
'[![Donate](https://img.shields.io/badge/_-Donate-red.svg?logo=githubsponsors&labelColor=555555&style=for-the-badge)]' \
|
||||
'(https://github.com/yt-dlp/yt-dlp/blob/master/Collaborators.md#collaborators "Donate") ' \
|
||||
'[![Discord](https://img.shields.io/discord/807245652072857610?color=blue&labelColor=555555&label=&logo=discord&style=for-the-badge)]' \
|
||||
'(https://discord.gg/H5MNcFW63r "Discord") ' \
|
||||
${{ env.target_repo == 'yt-dlp/yt-dlp' && '\
|
||||
"[![Nightly](https://img.shields.io/badge/Get%20nightly%20builds-purple.svg?style=for-the-badge)]" \
|
||||
"(https://github.com/yt-dlp/yt-dlp-nightly-builds/releases/latest \"Nightly builds\") " \
|
||||
"[![Master](https://img.shields.io/badge/Get%20master%20builds-lightblue.svg?style=for-the-badge)]" \
|
||||
"(https://github.com/yt-dlp/yt-dlp-master-builds/releases/latest \"Master builds\")"' || '' }} > ./RELEASE_NOTES
|
||||
printf '\n\n' >> ./RELEASE_NOTES
|
||||
cat >> ./RELEASE_NOTES << EOF
|
||||
#### A description of the various files are in the [README](https://github.com/${{ github.repository }}#release-files)
|
||||
---
|
||||
$(python ./devscripts/make_changelog.py -vv --collapsible)
|
||||
EOF
|
||||
printf '%s\n\n' '**This is a pre-release build**' >> ./PRERELEASE_NOTES
|
||||
cat ./RELEASE_NOTES >> ./PRERELEASE_NOTES
|
||||
printf '%s\n\n' 'Generated from: https://github.com/${{ github.repository }}/commit/${{ env.head_sha }}' >> ./ARCHIVE_NOTES
|
||||
cat ./RELEASE_NOTES >> ./ARCHIVE_NOTES
|
||||
|
||||
- name: Publish to archive repo
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets[needs.prepare.outputs.target_repo_token] }}
|
||||
GH_REPO: ${{ needs.prepare.outputs.target_repo }}
|
||||
version: ${{ needs.prepare.outputs.version }}
|
||||
channel: ${{ needs.prepare.outputs.channel }}
|
||||
if: |
|
||||
inputs.prerelease && env.GH_TOKEN != '' && env.GH_REPO != '' && env.GH_REPO != github.repository
|
||||
run: |
|
||||
title="${{ startswith(env.GH_REPO, 'yt-dlp/') && 'yt-dlp ' || '' }}${{ env.channel }}"
|
||||
gh release create \
|
||||
--notes-file ARCHIVE_NOTES \
|
||||
--title "${title} ${{ env.version }}" \
|
||||
${{ env.version }} \
|
||||
artifact/*
|
||||
|
||||
- name: Prune old release
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
version: ${{ needs.prepare.outputs.version }}
|
||||
target_repo: ${{ needs.prepare.outputs.target_repo }}
|
||||
target_tag: ${{ needs.prepare.outputs.target_tag }}
|
||||
if: |
|
||||
env.target_repo == github.repository && env.target_tag != env.version
|
||||
run: |
|
||||
gh release delete --yes --cleanup-tag "${{ env.target_tag }}" || true
|
||||
git tag --delete "${{ env.target_tag }}" || true
|
||||
sleep 5 # Enough time to cover deletion race condition
|
||||
|
||||
- name: Publish release
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
version: ${{ needs.prepare.outputs.version }}
|
||||
target_repo: ${{ needs.prepare.outputs.target_repo }}
|
||||
target_tag: ${{ needs.prepare.outputs.target_tag }}
|
||||
head_sha: ${{ needs.prepare.outputs.head_sha }}
|
||||
if: |
|
||||
env.target_repo == github.repository
|
||||
run: |
|
||||
title="${{ github.repository == 'yt-dlp/yt-dlp' && 'yt-dlp ' || '' }}"
|
||||
title+="${{ env.target_tag != env.version && format('{0} ', env.target_tag) || '' }}"
|
||||
gh release create \
|
||||
--notes-file ${{ inputs.prerelease && 'PRERELEASE_NOTES' || 'RELEASE_NOTES' }} \
|
||||
--target ${{ env.head_sha }} \
|
||||
--title "${title}${{ env.version }}" \
|
||||
${{ inputs.prerelease && '--prerelease' || '' }} \
|
||||
${{ env.target_tag }} \
|
||||
artifact/*
|
||||
|
|
27
README.md
27
README.md
|
@ -121,7 +121,7 @@ yt-dlp is a [youtube-dl](https://github.com/ytdl-org/youtube-dl) fork based on t
|
|||
|
||||
* **Self updater**: The releases can be updated using `yt-dlp -U`, and downgraded using `--update-to` if required
|
||||
|
||||
* **Nightly builds**: [Automated nightly builds](#update-channels) can be used with `--update-to nightly`
|
||||
* **Automated builds**: [Nightly/master builds](#update-channels) can be used with `--update-to nightly` and `--update-to master`
|
||||
|
||||
See [changelog](Changelog.md) or [commits](https://github.com/yt-dlp/yt-dlp/commits) for the full list of changes
|
||||
|
||||
|
@ -193,9 +193,11 @@ For other third-party package managers, see [the wiki](https://github.com/yt-dlp
|
|||
|
||||
<a id="update-channels"/>
|
||||
|
||||
There are currently two release channels for binaries, `stable` and `nightly`.
|
||||
`stable` is the default channel, and many of its changes have been tested by users of the nightly channel.
|
||||
The `nightly` channel has releases built after each push to the master branch, and will have the most recent fixes and additions, but also have more risk of regressions. They are available in [their own repo](https://github.com/yt-dlp/yt-dlp-nightly-builds/releases).
|
||||
There are currently three release channels for binaries: `stable`, `nightly` and `master`.
|
||||
|
||||
* `stable` is the default channel, and many of its changes have been tested by users of the `nightly` and `master` channels.
|
||||
* The `nightly` channel has releases scheduled to build every day around midnight UTC, for a snapshot of the project's new patches and changes. This is the **recommended channel for regular users** of yt-dlp. The `nightly` releases are available from [yt-dlp/yt-dlp-nightly-builds](https://github.com/yt-dlp/yt-dlp-nightly-builds/releases) or as development releases of the `yt-dlp` PyPI package (which can be installed with pip's `--pre` flag).
|
||||
* The `master` channel features releases that are built after each push to the master branch, and these will have the very latest fixes and additions, but may also be more prone to regressions. They are available from [yt-dlp/yt-dlp-master-builds](https://github.com/yt-dlp/yt-dlp-master-builds/releases).
|
||||
|
||||
When using `--update`/`-U`, a release binary will only update to its current channel.
|
||||
`--update-to CHANNEL` can be used to switch to a different channel when a newer version is available. `--update-to [CHANNEL@]TAG` can also be used to upgrade or downgrade to specific tags from a channel.
|
||||
|
@ -203,10 +205,19 @@ When using `--update`/`-U`, a release binary will only update to its current cha
|
|||
You may also use `--update-to <repository>` (`<owner>/<repository>`) to update to a channel on a completely different repository. Be careful with what repository you are updating to though, there is no verification done for binaries from different repositories.
|
||||
|
||||
Example usage:
|
||||
* `yt-dlp --update-to nightly` change to `nightly` channel and update to its latest release
|
||||
* `yt-dlp --update-to stable@2023.02.17` upgrade/downgrade to release to `stable` channel tag `2023.02.17`
|
||||
* `yt-dlp --update-to 2023.01.06` upgrade/downgrade to tag `2023.01.06` if it exists on the current channel
|
||||
* `yt-dlp --update-to example/yt-dlp@2023.03.01` upgrade/downgrade to the release from the `example/yt-dlp` repository, tag `2023.03.01`
|
||||
* `yt-dlp --update-to master` switch to the `master` channel and update to its latest release
|
||||
* `yt-dlp --update-to stable@2023.07.06` upgrade/downgrade to release to `stable` channel tag `2023.07.06`
|
||||
* `yt-dlp --update-to 2023.10.07` upgrade/downgrade to tag `2023.10.07` if it exists on the current channel
|
||||
* `yt-dlp --update-to example/yt-dlp@2023.09.24` upgrade/downgrade to the release from the `example/yt-dlp` repository, tag `2023.09.24`
|
||||
|
||||
**Important**: Any user experiencing an issue with the `stable` release should install or update to the `nightly` release before submitting a bug report:
|
||||
```
|
||||
# To update to nightly from stable executable/binary:
|
||||
yt-dlp --update-to nightly
|
||||
|
||||
# To install nightly with pip:
|
||||
python -m pip install -U --pre yt-dlp
|
||||
```
|
||||
|
||||
<!-- MANPAGE: BEGIN EXCLUDED SECTION -->
|
||||
## RELEASE FILES
|
||||
|
|
|
@ -104,5 +104,10 @@
|
|||
"when": "8a8b54523addf46dfd50ef599761a81bc22362e6",
|
||||
"short": "[rh:requests] Add handler for `requests` HTTP library (#3668)\n\n\tAdds support for HTTPS proxies and persistent connections (keep-alive)",
|
||||
"authors": ["bashonly", "coletdjnz", "Grub4K"]
|
||||
},
|
||||
{
|
||||
"action": "add",
|
||||
"when": "1d03633c5a1621b9f3a756f0a4f9dc61fab3aeaa",
|
||||
"short": "[priority] **The release channels have been adjusted!**\n\t* [`master`](https://github.com/yt-dlp/yt-dlp-master-builds) builds are made after each push, containing the latest fixes (but also possibly bugs). This was previously the `nightly` channel.\n\t* [`nightly`](https://github.com/yt-dlp/yt-dlp-nightly-builds) builds are now made once a day, if there were any changes."
|
||||
}
|
||||
]
|
||||
|
|
|
@ -12,7 +12,6 @@ import re
|
|||
from devscripts.utils import (
|
||||
get_filename_args,
|
||||
read_file,
|
||||
read_version,
|
||||
write_file,
|
||||
)
|
||||
|
||||
|
@ -35,19 +34,18 @@ VERBOSE_TMPL = '''
|
|||
description: |
|
||||
It should start like this:
|
||||
placeholder: |
|
||||
[debug] Command-line config: ['-vU', 'test:youtube']
|
||||
[debug] Portable config "yt-dlp.conf": ['-i']
|
||||
[debug] Command-line config: ['-vU', 'https://www.youtube.com/watch?v=BaW_jenozKc']
|
||||
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
||||
[debug] yt-dlp version %(version)s [9d339c4] (win32_exe)
|
||||
[debug] yt-dlp version nightly@... from yt-dlp/yt-dlp [b634ba742] (win_exe)
|
||||
[debug] Python 3.8.10 (CPython 64bit) - Windows-10-10.0.22000-SP0
|
||||
[debug] Checking exe version: ffmpeg -bsfs
|
||||
[debug] Checking exe version: ffprobe -bsfs
|
||||
[debug] exe versions: ffmpeg N-106550-g072101bd52-20220410 (fdk,setts), ffprobe N-106624-g391ce570c8-20220415, phantomjs 2.1.1
|
||||
[debug] Optional libraries: Cryptodome-3.15.0, brotli-1.0.9, certifi-2022.06.15, mutagen-1.45.1, sqlite3-2.6.0, websockets-10.3
|
||||
[debug] Proxy map: {}
|
||||
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp/releases/latest
|
||||
Latest version: %(version)s, Current version: %(version)s
|
||||
yt-dlp is up to date (%(version)s)
|
||||
[debug] Request Handlers: urllib, requests
|
||||
[debug] Loaded 1893 extractors
|
||||
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp-nightly-builds/releases/latest
|
||||
yt-dlp is up to date (nightly@... from yt-dlp/yt-dlp-nightly-builds)
|
||||
[youtube] Extracting URL: https://www.youtube.com/watch?v=BaW_jenozKc
|
||||
<more lines>
|
||||
render: shell
|
||||
validations:
|
||||
|
@ -66,7 +64,7 @@ NO_SKIP = '''
|
|||
|
||||
|
||||
def main():
|
||||
fields = {'version': read_version(), 'no_skip': NO_SKIP}
|
||||
fields = {'no_skip': NO_SKIP}
|
||||
fields['verbose'] = VERBOSE_TMPL % fields
|
||||
fields['verbose_optional'] = re.sub(r'(\n\s+validations:)?\n\s+required: true', '', fields['verbose'])
|
||||
|
||||
|
|
|
@ -1,39 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
"""
|
||||
Usage: python3 ./devscripts/update-formulae.py <path-to-formulae-rb> <version>
|
||||
version can be either 0-aligned (yt-dlp version) or normalized (PyPi version)
|
||||
"""
|
||||
|
||||
# Allow direct execution
|
||||
import os
|
||||
import sys
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
|
||||
import json
|
||||
import re
|
||||
import urllib.request
|
||||
|
||||
from devscripts.utils import read_file, write_file
|
||||
|
||||
filename, version = sys.argv[1:]
|
||||
|
||||
normalized_version = '.'.join(str(int(x)) for x in version.split('.'))
|
||||
|
||||
pypi_release = json.loads(urllib.request.urlopen(
|
||||
'https://pypi.org/pypi/yt-dlp/%s/json' % normalized_version
|
||||
).read().decode())
|
||||
|
||||
tarball_file = next(x for x in pypi_release['urls'] if x['filename'].endswith('.tar.gz'))
|
||||
|
||||
sha256sum = tarball_file['digests']['sha256']
|
||||
url = tarball_file['url']
|
||||
|
||||
formulae_text = read_file(filename)
|
||||
|
||||
formulae_text = re.sub(r'sha256 "[0-9a-f]*?"', 'sha256 "%s"' % sha256sum, formulae_text, count=1)
|
||||
formulae_text = re.sub(r'url "[^"]*?"', 'url "%s"' % url, formulae_text, count=1)
|
||||
|
||||
write_file(filename, formulae_text)
|
|
@ -20,7 +20,7 @@ def get_new_version(version, revision):
|
|||
version = datetime.now(timezone.utc).strftime('%Y.%m.%d')
|
||||
|
||||
if revision:
|
||||
assert revision.isdigit(), 'Revision must be a number'
|
||||
assert revision.isdecimal(), 'Revision must be a number'
|
||||
else:
|
||||
old_version = read_version().split('.')
|
||||
if version.split('.') == old_version[:3]:
|
||||
|
@ -46,6 +46,10 @@ VARIANT = None
|
|||
UPDATE_HINT = None
|
||||
|
||||
CHANNEL = {channel!r}
|
||||
|
||||
ORIGIN = {origin!r}
|
||||
|
||||
_pkg_version = {package_version!r}
|
||||
'''
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
@ -53,6 +57,12 @@ if __name__ == '__main__':
|
|||
parser.add_argument(
|
||||
'-c', '--channel', default='stable',
|
||||
help='Select update channel (default: %(default)s)')
|
||||
parser.add_argument(
|
||||
'-r', '--origin', default='local',
|
||||
help='Select origin/repository (default: %(default)s)')
|
||||
parser.add_argument(
|
||||
'-s', '--suffix', default='',
|
||||
help='Add an alphanumeric suffix to the package version, e.g. "dev"')
|
||||
parser.add_argument(
|
||||
'-o', '--output', default='yt_dlp/version.py',
|
||||
help='The output file to write to (default: %(default)s)')
|
||||
|
@ -66,6 +76,7 @@ if __name__ == '__main__':
|
|||
args.version if args.version and '.' in args.version
|
||||
else get_new_version(None, args.version))
|
||||
write_file(args.output, VERSION_TEMPLATE.format(
|
||||
version=version, git_head=git_head, channel=args.channel))
|
||||
version=version, git_head=git_head, channel=args.channel, origin=args.origin,
|
||||
package_version=f'{version}{args.suffix}'))
|
||||
|
||||
print(f'version={version} ({args.channel}), head={git_head}')
|
||||
|
|
|
@ -13,10 +13,11 @@ def write_file(fname, content, mode='w'):
|
|||
return f.write(content)
|
||||
|
||||
|
||||
def read_version(fname='yt_dlp/version.py'):
|
||||
def read_version(fname='yt_dlp/version.py', varname='__version__'):
|
||||
"""Get the version without importing the package"""
|
||||
exec(compile(read_file(fname), fname, 'exec'))
|
||||
return locals()['__version__']
|
||||
items = {}
|
||||
exec(compile(read_file(fname), fname, 'exec'), items)
|
||||
return items[varname]
|
||||
|
||||
|
||||
def get_filename_args(has_infile=False, default_outfile=None):
|
||||
|
|
|
@ -1,8 +1,9 @@
|
|||
mutagen
|
||||
pycryptodomex
|
||||
websockets
|
||||
brotli; platform_python_implementation=='CPython'
|
||||
brotlicffi; platform_python_implementation!='CPython'
|
||||
brotli; implementation_name=='cpython'
|
||||
brotlicffi; implementation_name!='cpython'
|
||||
certifi
|
||||
requests>=2.31.0,<3
|
||||
urllib3>=1.26.17,<3
|
||||
urllib3>=1.26.17,<3
|
||||
secretstorage; sys_platform=='linux' and (implementation_name!='pypy' or implementation_version>='7.3.10')
|
||||
|
|
4
setup.py
4
setup.py
|
@ -18,7 +18,7 @@ except ImportError:
|
|||
|
||||
from devscripts.utils import read_file, read_version
|
||||
|
||||
VERSION = read_version()
|
||||
VERSION = read_version(varname='_pkg_version')
|
||||
|
||||
DESCRIPTION = 'A youtube-dl fork with additional features and patches'
|
||||
|
||||
|
@ -142,7 +142,7 @@ def main():
|
|||
params = build_params()
|
||||
|
||||
setup(
|
||||
name='yt-dlp',
|
||||
name='yt-dlp', # package name (do not change/remove comment)
|
||||
version=VERSION,
|
||||
maintainer='pukkandan',
|
||||
maintainer_email='pukkandan.ytdlp@gmail.com',
|
||||
|
|
199
test/test_update.py
Normal file
199
test/test_update.py
Normal file
|
@ -0,0 +1,199 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
# Allow direct execution
|
||||
import os
|
||||
import sys
|
||||
import unittest
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
|
||||
from test.helper import FakeYDL, report_warning
|
||||
from yt_dlp.update import Updater, UpdateInfo
|
||||
|
||||
TEST_API_DATA = {
|
||||
'yt-dlp/yt-dlp/latest': {
|
||||
'tag_name': '2023.12.31',
|
||||
'target_commitish': 'bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb',
|
||||
'name': 'yt-dlp 2023.12.31',
|
||||
'body': 'BODY',
|
||||
},
|
||||
'yt-dlp/yt-dlp-nightly-builds/latest': {
|
||||
'tag_name': '2023.12.31.123456',
|
||||
'target_commitish': 'master',
|
||||
'name': 'yt-dlp nightly 2023.12.31.123456',
|
||||
'body': 'Generated from: https://github.com/yt-dlp/yt-dlp/commit/cccccccccccccccccccccccccccccccccccccccc',
|
||||
},
|
||||
'yt-dlp/yt-dlp-master-builds/latest': {
|
||||
'tag_name': '2023.12.31.987654',
|
||||
'target_commitish': 'master',
|
||||
'name': 'yt-dlp master 2023.12.31.987654',
|
||||
'body': 'Generated from: https://github.com/yt-dlp/yt-dlp/commit/dddddddddddddddddddddddddddddddddddddddd',
|
||||
},
|
||||
'yt-dlp/yt-dlp/tags/testing': {
|
||||
'tag_name': 'testing',
|
||||
'target_commitish': '9999999999999999999999999999999999999999',
|
||||
'name': 'testing',
|
||||
'body': 'BODY',
|
||||
},
|
||||
'fork/yt-dlp/latest': {
|
||||
'tag_name': '2050.12.31',
|
||||
'target_commitish': 'eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee',
|
||||
'name': '2050.12.31',
|
||||
'body': 'BODY',
|
||||
},
|
||||
'fork/yt-dlp/tags/pr0000': {
|
||||
'tag_name': 'pr0000',
|
||||
'target_commitish': 'ffffffffffffffffffffffffffffffffffffffff',
|
||||
'name': 'pr1234 2023.11.11.000000',
|
||||
'body': 'BODY',
|
||||
},
|
||||
'fork/yt-dlp/tags/pr1234': {
|
||||
'tag_name': 'pr1234',
|
||||
'target_commitish': '0000000000000000000000000000000000000000',
|
||||
'name': 'pr1234 2023.12.31.555555',
|
||||
'body': 'BODY',
|
||||
},
|
||||
'fork/yt-dlp/tags/pr9999': {
|
||||
'tag_name': 'pr9999',
|
||||
'target_commitish': '1111111111111111111111111111111111111111',
|
||||
'name': 'pr9999',
|
||||
'body': 'BODY',
|
||||
},
|
||||
'fork/yt-dlp-satellite/tags/pr987': {
|
||||
'tag_name': 'pr987',
|
||||
'target_commitish': 'master',
|
||||
'name': 'pr987',
|
||||
'body': 'Generated from: https://github.com/yt-dlp/yt-dlp/commit/2222222222222222222222222222222222222222',
|
||||
},
|
||||
}
|
||||
|
||||
TEST_LOCKFILE_V1 = '''# This file is used for regulating self-update
|
||||
lock 2022.08.18.36 .+ Python 3.6
|
||||
lock 2023.11.13 .+ Python 3.7
|
||||
'''
|
||||
|
||||
TEST_LOCKFILE_V2 = '''# This file is used for regulating self-update
|
||||
lockV2 yt-dlp/yt-dlp 2022.08.18.36 .+ Python 3.6
|
||||
lockV2 yt-dlp/yt-dlp 2023.11.13 .+ Python 3.7
|
||||
'''
|
||||
|
||||
TEST_LOCKFILE_V1_V2 = '''# This file is used for regulating self-update
|
||||
lock 2022.08.18.36 .+ Python 3.6
|
||||
lock 2023.11.13 .+ Python 3.7
|
||||
lockV2 yt-dlp/yt-dlp 2022.08.18.36 .+ Python 3.6
|
||||
lockV2 yt-dlp/yt-dlp 2023.11.13 .+ Python 3.7
|
||||
lockV2 fork/yt-dlp pr0000 .+ Python 3.6
|
||||
lockV2 fork/yt-dlp pr1234 .+ Python 3.7
|
||||
lockV2 fork/yt-dlp pr9999 .+ Python 3.11
|
||||
'''
|
||||
|
||||
|
||||
class FakeUpdater(Updater):
|
||||
current_version = '2022.01.01'
|
||||
current_commit = 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'
|
||||
|
||||
_channel = 'stable'
|
||||
_origin = 'yt-dlp/yt-dlp'
|
||||
|
||||
def _download_update_spec(self, *args, **kwargs):
|
||||
return TEST_LOCKFILE_V1_V2
|
||||
|
||||
def _call_api(self, tag):
|
||||
tag = f'tags/{tag}' if tag != 'latest' else tag
|
||||
return TEST_API_DATA[f'{self.requested_repo}/{tag}']
|
||||
|
||||
def _report_error(self, msg, *args, **kwargs):
|
||||
report_warning(msg)
|
||||
|
||||
|
||||
class TestUpdate(unittest.TestCase):
|
||||
maxDiff = None
|
||||
|
||||
def test_update_spec(self):
|
||||
ydl = FakeYDL()
|
||||
updater = FakeUpdater(ydl, 'stable@latest')
|
||||
|
||||
def test(lockfile, identifier, input_tag, expect_tag, exact=False, repo='yt-dlp/yt-dlp'):
|
||||
updater._identifier = identifier
|
||||
updater._exact = exact
|
||||
updater.requested_repo = repo
|
||||
result = updater._process_update_spec(lockfile, input_tag)
|
||||
self.assertEqual(
|
||||
result, expect_tag,
|
||||
f'{identifier!r} requesting {repo}@{input_tag} (exact={exact}) '
|
||||
f'returned {result!r} instead of {expect_tag!r}')
|
||||
|
||||
test(TEST_LOCKFILE_V1, 'zip Python 3.11.0', '2023.11.13', '2023.11.13')
|
||||
test(TEST_LOCKFILE_V1, 'zip stable Python 3.11.0', '2023.11.13', '2023.11.13', exact=True)
|
||||
test(TEST_LOCKFILE_V1, 'zip Python 3.6.0', '2023.11.13', '2022.08.18.36')
|
||||
test(TEST_LOCKFILE_V1, 'zip stable Python 3.6.0', '2023.11.13', None, exact=True)
|
||||
test(TEST_LOCKFILE_V1, 'zip Python 3.7.0', '2023.11.13', '2023.11.13')
|
||||
test(TEST_LOCKFILE_V1, 'zip stable Python 3.7.1', '2023.11.13', '2023.11.13')
|
||||
test(TEST_LOCKFILE_V1, 'zip Python 3.7.1', '2023.12.31', '2023.11.13')
|
||||
test(TEST_LOCKFILE_V1, 'zip stable Python 3.7.1', '2023.12.31', '2023.11.13')
|
||||
|
||||
test(TEST_LOCKFILE_V2, 'zip Python 3.11.1', '2023.11.13', '2023.11.13')
|
||||
test(TEST_LOCKFILE_V2, 'zip stable Python 3.11.1', '2023.12.31', '2023.12.31')
|
||||
test(TEST_LOCKFILE_V2, 'zip Python 3.6.1', '2023.11.13', '2022.08.18.36')
|
||||
test(TEST_LOCKFILE_V2, 'zip stable Python 3.7.2', '2023.11.13', '2023.11.13')
|
||||
test(TEST_LOCKFILE_V2, 'zip Python 3.7.2', '2023.12.31', '2023.11.13')
|
||||
|
||||
test(TEST_LOCKFILE_V1_V2, 'zip Python 3.11.2', '2023.11.13', '2023.11.13')
|
||||
test(TEST_LOCKFILE_V1_V2, 'zip stable Python 3.11.2', '2023.12.31', '2023.12.31')
|
||||
test(TEST_LOCKFILE_V1_V2, 'zip Python 3.6.2', '2023.11.13', '2022.08.18.36')
|
||||
test(TEST_LOCKFILE_V1_V2, 'zip stable Python 3.7.3', '2023.11.13', '2023.11.13')
|
||||
test(TEST_LOCKFILE_V1_V2, 'zip Python 3.7.3', '2023.12.31', '2023.11.13')
|
||||
test(TEST_LOCKFILE_V1_V2, 'zip Python 3.6.3', 'pr0000', None, repo='fork/yt-dlp')
|
||||
test(TEST_LOCKFILE_V1_V2, 'zip stable Python 3.7.4', 'pr0000', 'pr0000', repo='fork/yt-dlp')
|
||||
test(TEST_LOCKFILE_V1_V2, 'zip Python 3.6.4', 'pr0000', None, repo='fork/yt-dlp')
|
||||
test(TEST_LOCKFILE_V1_V2, 'zip Python 3.7.4', 'pr1234', None, repo='fork/yt-dlp')
|
||||
test(TEST_LOCKFILE_V1_V2, 'zip stable Python 3.8.1', 'pr1234', 'pr1234', repo='fork/yt-dlp')
|
||||
test(TEST_LOCKFILE_V1_V2, 'zip Python 3.7.5', 'pr1234', None, repo='fork/yt-dlp')
|
||||
test(TEST_LOCKFILE_V1_V2, 'zip Python 3.11.3', 'pr9999', None, repo='fork/yt-dlp')
|
||||
test(TEST_LOCKFILE_V1_V2, 'zip stable Python 3.12.0', 'pr9999', 'pr9999', repo='fork/yt-dlp')
|
||||
test(TEST_LOCKFILE_V1_V2, 'zip Python 3.11.4', 'pr9999', None, repo='fork/yt-dlp')
|
||||
|
||||
def test_query_update(self):
|
||||
ydl = FakeYDL()
|
||||
|
||||
def test(target, expected, current_version=None, current_commit=None, identifier=None):
|
||||
updater = FakeUpdater(ydl, target)
|
||||
if current_version:
|
||||
updater.current_version = current_version
|
||||
if current_commit:
|
||||
updater.current_commit = current_commit
|
||||
updater._identifier = identifier or 'zip'
|
||||
update_info = updater.query_update(_output=True)
|
||||
self.assertDictEqual(
|
||||
update_info.__dict__ if update_info else {}, expected.__dict__ if expected else {})
|
||||
|
||||
test('yt-dlp/yt-dlp@latest', UpdateInfo(
|
||||
'2023.12.31', version='2023.12.31', requested_version='2023.12.31', commit='b' * 40))
|
||||
test('yt-dlp/yt-dlp-nightly-builds@latest', UpdateInfo(
|
||||
'2023.12.31.123456', version='2023.12.31.123456', requested_version='2023.12.31.123456', commit='c' * 40))
|
||||
test('yt-dlp/yt-dlp-master-builds@latest', UpdateInfo(
|
||||
'2023.12.31.987654', version='2023.12.31.987654', requested_version='2023.12.31.987654', commit='d' * 40))
|
||||
test('fork/yt-dlp@latest', UpdateInfo(
|
||||
'2050.12.31', version='2050.12.31', requested_version='2050.12.31', commit='e' * 40))
|
||||
test('fork/yt-dlp@pr0000', UpdateInfo(
|
||||
'pr0000', version='2023.11.11.000000', requested_version='2023.11.11.000000', commit='f' * 40))
|
||||
test('fork/yt-dlp@pr1234', UpdateInfo(
|
||||
'pr1234', version='2023.12.31.555555', requested_version='2023.12.31.555555', commit='0' * 40))
|
||||
test('fork/yt-dlp@pr9999', UpdateInfo(
|
||||
'pr9999', version=None, requested_version=None, commit='1' * 40))
|
||||
test('fork/yt-dlp-satellite@pr987', UpdateInfo(
|
||||
'pr987', version=None, requested_version=None, commit='2' * 40))
|
||||
test('yt-dlp/yt-dlp', None, current_version='2024.01.01')
|
||||
test('stable', UpdateInfo(
|
||||
'2023.12.31', version='2023.12.31', requested_version='2023.12.31', commit='b' * 40))
|
||||
test('nightly', UpdateInfo(
|
||||
'2023.12.31.123456', version='2023.12.31.123456', requested_version='2023.12.31.123456', commit='c' * 40))
|
||||
test('master', UpdateInfo(
|
||||
'2023.12.31.987654', version='2023.12.31.987654', requested_version='2023.12.31.987654', commit='d' * 40))
|
||||
test('testing', None, current_commit='9' * 40)
|
||||
test('testing', UpdateInfo('testing', commit='9' * 40))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
|
@ -1,30 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
# Allow direct execution
|
||||
import os
|
||||
import sys
|
||||
import unittest
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
|
||||
import json
|
||||
|
||||
from yt_dlp.update import rsa_verify
|
||||
|
||||
|
||||
class TestUpdate(unittest.TestCase):
|
||||
def test_rsa_verify(self):
|
||||
UPDATES_RSA_KEY = (0x9d60ee4d8f805312fdb15a62f87b95bd66177b91df176765d13514a0f1754bcd2057295c5b6f1d35daa6742c3ffc9a82d3e118861c207995a8031e151d863c9927e304576bc80692bc8e094896fcf11b66f3e29e04e3a71e9a11558558acea1840aec37fc396fb6b65dc81a1c4144e03bd1c011de62e3f1357b327d08426fe93, 65537)
|
||||
with open(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'versions.json'), 'rb') as f:
|
||||
versions_info = f.read().decode()
|
||||
versions_info = json.loads(versions_info)
|
||||
signature = versions_info['signature']
|
||||
del versions_info['signature']
|
||||
self.assertTrue(rsa_verify(
|
||||
json.dumps(versions_info, sort_keys=True).encode(),
|
||||
signature, UPDATES_RSA_KEY))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
|
@ -1,34 +0,0 @@
|
|||
{
|
||||
"latest": "2013.01.06",
|
||||
"signature": "72158cdba391628569ffdbea259afbcf279bbe3d8aeb7492690735dc1cfa6afa754f55c61196f3871d429599ab22f2667f1fec98865527b32632e7f4b3675a7ef0f0fbe084d359256ae4bba68f0d33854e531a70754712f244be71d4b92e664302aa99653ee4df19800d955b6c4149cd2b3f24288d6e4b40b16126e01f4c8ce6",
|
||||
"versions": {
|
||||
"2013.01.02": {
|
||||
"bin": [
|
||||
"http://youtube-dl.org/downloads/2013.01.02/youtube-dl",
|
||||
"f5b502f8aaa77675c4884938b1e4871ebca2611813a0c0e74f60c0fbd6dcca6b"
|
||||
],
|
||||
"exe": [
|
||||
"http://youtube-dl.org/downloads/2013.01.02/youtube-dl.exe",
|
||||
"75fa89d2ce297d102ff27675aa9d92545bbc91013f52ec52868c069f4f9f0422"
|
||||
],
|
||||
"tar": [
|
||||
"http://youtube-dl.org/downloads/2013.01.02/youtube-dl-2013.01.02.tar.gz",
|
||||
"6a66d022ac8e1c13da284036288a133ec8dba003b7bd3a5179d0c0daca8c8196"
|
||||
]
|
||||
},
|
||||
"2013.01.06": {
|
||||
"bin": [
|
||||
"http://youtube-dl.org/downloads/2013.01.06/youtube-dl",
|
||||
"64b6ed8865735c6302e836d4d832577321b4519aa02640dc508580c1ee824049"
|
||||
],
|
||||
"exe": [
|
||||
"http://youtube-dl.org/downloads/2013.01.06/youtube-dl.exe",
|
||||
"58609baf91e4389d36e3ba586e21dab882daaaee537e4448b1265392ae86ff84"
|
||||
],
|
||||
"tar": [
|
||||
"http://youtube-dl.org/downloads/2013.01.06/youtube-dl-2013.01.06.tar.gz",
|
||||
"fe77ab20a95d980ed17a659aa67e371fdd4d656d19c4c7950e7b720b0c2f1a86"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
|
@ -60,7 +60,7 @@ from .postprocessor import (
|
|||
get_postprocessor,
|
||||
)
|
||||
from .postprocessor.ffmpeg import resolve_mapping as resolve_recode_mapping
|
||||
from .update import REPOSITORY, _get_system_deprecation, current_git_head, detect_variant
|
||||
from .update import REPOSITORY, _get_system_deprecation, _make_label, current_git_head, detect_variant
|
||||
from .utils import (
|
||||
DEFAULT_OUTTMPL,
|
||||
IDENTITY,
|
||||
|
@ -158,7 +158,7 @@ from .utils.networking import (
|
|||
clean_proxies,
|
||||
std_headers,
|
||||
)
|
||||
from .version import CHANNEL, RELEASE_GIT_HEAD, VARIANT, __version__
|
||||
from .version import CHANNEL, ORIGIN, RELEASE_GIT_HEAD, VARIANT, __version__
|
||||
|
||||
if compat_os_name == 'nt':
|
||||
import ctypes
|
||||
|
@ -3544,7 +3544,7 @@ class YoutubeDL:
|
|||
'version': __version__,
|
||||
'current_git_head': current_git_head(),
|
||||
'release_git_head': RELEASE_GIT_HEAD,
|
||||
'repository': REPOSITORY,
|
||||
'repository': ORIGIN,
|
||||
})
|
||||
|
||||
if remove_private_keys:
|
||||
|
@ -3927,8 +3927,8 @@ class YoutubeDL:
|
|||
source += '*'
|
||||
klass = type(self)
|
||||
write_debug(join_nonempty(
|
||||
f'{"yt-dlp" if REPOSITORY == "yt-dlp/yt-dlp" else REPOSITORY} version',
|
||||
f'{CHANNEL}@{__version__}',
|
||||
f'{REPOSITORY.rpartition("/")[2]} version',
|
||||
_make_label(ORIGIN, CHANNEL.partition('@')[2] or __version__, __version__),
|
||||
f'[{RELEASE_GIT_HEAD[:9]}]' if RELEASE_GIT_HEAD else '',
|
||||
'' if source == 'unknown' else f'({source})',
|
||||
'' if _IN_CLI else 'API' if klass == YoutubeDL else f'API:{self.__module__}.{klass.__qualname__}',
|
||||
|
|
|
@ -25,7 +25,7 @@ def get_hidden_imports():
|
|||
for module in ('websockets', 'requests', 'urllib3'):
|
||||
yield from collect_submodules(module)
|
||||
# These are auto-detected, but explicitly add them just in case
|
||||
yield from ('mutagen', 'brotli', 'certifi')
|
||||
yield from ('mutagen', 'brotli', 'certifi', 'secretstorage')
|
||||
|
||||
|
||||
hiddenimports = list(get_hidden_imports())
|
||||
|
|
|
@ -953,6 +953,7 @@ from .lastfm import (
|
|||
LastFMPlaylistIE,
|
||||
LastFMUserIE,
|
||||
)
|
||||
from .laxarxames import LaXarxaMesIE
|
||||
from .lbry import (
|
||||
LBRYIE,
|
||||
LBRYChannelIE,
|
||||
|
@ -1387,7 +1388,10 @@ from .oftv import (
|
|||
from .oktoberfesttv import OktoberfestTVIE
|
||||
from .olympics import OlympicsReplayIE
|
||||
from .on24 import On24IE
|
||||
from .ondemandkorea import OnDemandKoreaIE
|
||||
from .ondemandkorea import (
|
||||
OnDemandKoreaIE,
|
||||
OnDemandKoreaProgramIE,
|
||||
)
|
||||
from .onefootball import OneFootballIE
|
||||
from .onenewsnz import OneNewsNZIE
|
||||
from .oneplace import OnePlacePodcastIE
|
||||
|
@ -1416,6 +1420,7 @@ from .orf import (
|
|||
ORFTVthekIE,
|
||||
ORFFM4StoryIE,
|
||||
ORFRadioIE,
|
||||
ORFPodcastIE,
|
||||
ORFIPTVIE,
|
||||
)
|
||||
from .outsidetv import OutsideTVIE
|
||||
|
@ -1578,6 +1583,10 @@ from .radiocanada import (
|
|||
RadioCanadaIE,
|
||||
RadioCanadaAudioVideoIE,
|
||||
)
|
||||
from .radiocomercial import (
|
||||
RadioComercialIE,
|
||||
RadioComercialPlaylistIE,
|
||||
)
|
||||
from .radiode import RadioDeIE
|
||||
from .radiojavan import RadioJavanIE
|
||||
from .radiobremen import RadioBremenIE
|
||||
|
@ -1758,6 +1767,11 @@ from .samplefocus import SampleFocusIE
|
|||
from .sapo import SapoIE
|
||||
from .savefrom import SaveFromIE
|
||||
from .sbs import SBSIE
|
||||
from .sbscokr import (
|
||||
SBSCoKrIE,
|
||||
SBSCoKrAllvodProgramIE,
|
||||
SBSCoKrProgramsVodIE,
|
||||
)
|
||||
from .screen9 import Screen9IE
|
||||
from .screencast import ScreencastIE
|
||||
from .screencastify import ScreencastifyIE
|
||||
|
@ -1902,6 +1916,8 @@ from .srmediathek import SRMediathekIE
|
|||
from .stacommu import (
|
||||
StacommuLiveIE,
|
||||
StacommuVODIE,
|
||||
TheaterComplexTownVODIE,
|
||||
TheaterComplexTownPPVIE,
|
||||
)
|
||||
from .stanfordoc import StanfordOpenClassroomIE
|
||||
from .startv import StarTVIE
|
||||
|
@ -2014,7 +2030,6 @@ from .thestar import TheStarIE
|
|||
from .thesun import TheSunIE
|
||||
from .theweatherchannel import TheWeatherChannelIE
|
||||
from .thisamericanlife import ThisAmericanLifeIE
|
||||
from .thisav import ThisAVIE
|
||||
from .thisoldhouse import ThisOldHouseIE
|
||||
from .thisvid import (
|
||||
ThisVidIE,
|
||||
|
|
|
@ -21,10 +21,10 @@ class BrilliantpalaBaseIE(InfoExtractor):
|
|||
|
||||
def _get_logged_in_username(self, url, video_id):
|
||||
webpage, urlh = self._download_webpage_handle(url, video_id)
|
||||
if self._LOGIN_API == urlh.url:
|
||||
if urlh.url.startswith(self._LOGIN_API):
|
||||
self.raise_login_required()
|
||||
return self._html_search_regex(
|
||||
r'"username"\s*:\s*"(?P<username>[^"]+)"', webpage, 'stream page info', 'username')
|
||||
r'"username"\s*:\s*"(?P<username>[^"]+)"', webpage, 'logged-in username')
|
||||
|
||||
def _perform_login(self, username, password):
|
||||
login_form = self._hidden_inputs(self._download_webpage(
|
||||
|
|
73
yt_dlp/extractor/laxarxames.py
Normal file
73
yt_dlp/extractor/laxarxames.py
Normal file
|
@ -0,0 +1,73 @@
|
|||
import json
|
||||
|
||||
from .brightcove import BrightcoveNewIE
|
||||
from .common import InfoExtractor
|
||||
from ..utils import ExtractorError
|
||||
from ..utils.traversal import traverse_obj
|
||||
|
||||
|
||||
class LaXarxaMesIE(InfoExtractor):
|
||||
_VALID_URL = r'https?://(?:www\.)?laxarxames\.cat/(?:[^/?#]+/)*?(player|movie-details)/(?P<id>\d+)'
|
||||
_NETRC_MACHINE = 'laxarxames'
|
||||
_TOKEN = None
|
||||
_TESTS = [{
|
||||
'url': 'https://www.laxarxames.cat/player/3459421',
|
||||
'md5': '0966f46c34275934c19af78f3df6e2bc',
|
||||
'info_dict': {
|
||||
'id': '6339612436112',
|
||||
'ext': 'mp4',
|
||||
'title': 'Resum | UA Horta — UD Viladecans',
|
||||
'timestamp': 1697905186,
|
||||
'thumbnail': r're:https?://.*\.jpg',
|
||||
'description': '',
|
||||
'upload_date': '20231021',
|
||||
'duration': 129.44,
|
||||
'tags': ['ott', 'esports', '23-24', ' futbol', ' futbol-partits', 'elit', 'resum'],
|
||||
'uploader_id': '5779379807001',
|
||||
},
|
||||
'skip': 'Requires login',
|
||||
}]
|
||||
|
||||
def _perform_login(self, username, password):
|
||||
if self._TOKEN:
|
||||
return
|
||||
|
||||
login = self._download_json(
|
||||
'https://api.laxarxames.cat/Authorization/SignIn', None, note='Logging in', headers={
|
||||
'X-Tenantorigin': 'https://laxarxames.cat',
|
||||
'Content-Type': 'application/json',
|
||||
}, data=json.dumps({
|
||||
'Username': username,
|
||||
'Password': password,
|
||||
'Device': {
|
||||
'PlatformCode': 'WEB',
|
||||
'Name': 'Mac OS ()',
|
||||
},
|
||||
}).encode(), expected_status=401)
|
||||
|
||||
self._TOKEN = traverse_obj(login, ('AuthorizationToken', 'Token', {str}))
|
||||
if not self._TOKEN:
|
||||
raise ExtractorError('Login failed', expected=True)
|
||||
|
||||
def _real_extract(self, url):
|
||||
video_id = self._match_id(url)
|
||||
if not self._TOKEN:
|
||||
self.raise_login_required()
|
||||
|
||||
media_play_info = self._download_json(
|
||||
'https://api.laxarxames.cat/Media/GetMediaPlayInfo', video_id,
|
||||
data=json.dumps({
|
||||
'MediaId': int(video_id),
|
||||
'StreamType': 'MAIN'
|
||||
}).encode(), headers={
|
||||
'Authorization': f'Bearer {self._TOKEN}',
|
||||
'X-Tenantorigin': 'https://laxarxames.cat',
|
||||
'Content-Type': 'application/json',
|
||||
})
|
||||
|
||||
if not traverse_obj(media_play_info, ('ContentUrl', {str})):
|
||||
self.raise_no_formats('No video found', expected=True)
|
||||
|
||||
return self.url_result(
|
||||
f'https://players.brightcove.net/5779379807001/default_default/index.html?videoId={media_play_info["ContentUrl"]}',
|
||||
BrightcoveNewIE, video_id, media_play_info.get('Title'))
|
|
@ -142,6 +142,9 @@ class NetEaseMusicIE(NetEaseMusicBaseIE):
|
|||
'subtitles': {'lyrics': [{'ext': 'lrc'}]},
|
||||
"duration": 256,
|
||||
'thumbnail': r're:^http.*\.jpg',
|
||||
'album': '偶像练习生 表演曲目合集',
|
||||
'average_rating': int,
|
||||
'album_artist': '偶像练习生',
|
||||
},
|
||||
}, {
|
||||
'note': 'No lyrics.',
|
||||
|
@ -155,6 +158,9 @@ class NetEaseMusicIE(NetEaseMusicBaseIE):
|
|||
'timestamp': 1202745600,
|
||||
'duration': 263,
|
||||
'thumbnail': r're:^http.*\.jpg',
|
||||
'album': 'Piano Solos Vol. 2',
|
||||
'album_artist': 'Dustin O\'Halloran',
|
||||
'average_rating': int,
|
||||
},
|
||||
}, {
|
||||
'url': 'https://y.music.163.com/m/song?app_version=8.8.45&id=95670&uct2=sKnvS4+0YStsWkqsPhFijw%3D%3D&dlt=0846',
|
||||
|
@ -171,6 +177,9 @@ class NetEaseMusicIE(NetEaseMusicBaseIE):
|
|||
'duration': 268,
|
||||
'alt_title': '伴唱:现代人乐队 合唱:总政歌舞团',
|
||||
'thumbnail': r're:^http.*\.jpg',
|
||||
'average_rating': int,
|
||||
'album': '红色摇滚',
|
||||
'album_artist': '侯牧人',
|
||||
},
|
||||
}, {
|
||||
'url': 'http://music.163.com/#/song?id=32102397',
|
||||
|
@ -186,6 +195,9 @@ class NetEaseMusicIE(NetEaseMusicBaseIE):
|
|||
'subtitles': {'lyrics': [{'ext': 'lrc'}]},
|
||||
'duration': 199,
|
||||
'thumbnail': r're:^http.*\.jpg',
|
||||
'album': 'Bad Blood',
|
||||
'average_rating': int,
|
||||
'album_artist': 'Taylor Swift',
|
||||
},
|
||||
'skip': 'Blocked outside Mainland China',
|
||||
}, {
|
||||
|
@ -203,6 +215,9 @@ class NetEaseMusicIE(NetEaseMusicBaseIE):
|
|||
'duration': 229,
|
||||
'alt_title': '说出愿望吧(Genie)',
|
||||
'thumbnail': r're:^http.*\.jpg',
|
||||
'average_rating': int,
|
||||
'album': 'Oh!',
|
||||
'album_artist': '少女时代',
|
||||
},
|
||||
'skip': 'Blocked outside Mainland China',
|
||||
}]
|
||||
|
@ -253,12 +268,15 @@ class NetEaseMusicIE(NetEaseMusicBaseIE):
|
|||
'formats': formats,
|
||||
'alt_title': '/'.join(traverse_obj(info, (('transNames', 'alias'), ...))) or None,
|
||||
'creator': ' / '.join(traverse_obj(info, ('artists', ..., 'name'))) or None,
|
||||
'album_artist': ' / '.join(traverse_obj(info, ('album', 'artists', ..., 'name'))) or None,
|
||||
**lyric_data,
|
||||
**traverse_obj(info, {
|
||||
'title': ('name', {str}),
|
||||
'timestamp': ('album', 'publishTime', {self.kilo_or_none}),
|
||||
'thumbnail': ('album', 'picUrl', {url_or_none}),
|
||||
'duration': ('duration', {self.kilo_or_none}),
|
||||
'album': ('album', 'name', {str}),
|
||||
'average_rating': ('score', {int_or_none}),
|
||||
}),
|
||||
}
|
||||
|
||||
|
|
|
@ -3,6 +3,8 @@ import re
|
|||
from .common import InfoExtractor
|
||||
from ..utils import (
|
||||
ExtractorError,
|
||||
clean_html,
|
||||
get_element_by_class,
|
||||
int_or_none,
|
||||
join_nonempty,
|
||||
parse_duration,
|
||||
|
@ -45,25 +47,36 @@ class NhkBaseIE(InfoExtractor):
|
|||
self.cache.store('nhk', 'api_info', api_info)
|
||||
return api_info
|
||||
|
||||
def _extract_formats_and_subtitles(self, vod_id):
|
||||
def _extract_stream_info(self, vod_id):
|
||||
for refresh in (False, True):
|
||||
api_info = self._get_api_info(refresh)
|
||||
if not api_info:
|
||||
continue
|
||||
|
||||
api_url = api_info.pop('url')
|
||||
stream_url = traverse_obj(
|
||||
meta = traverse_obj(
|
||||
self._download_json(
|
||||
api_url, vod_id, 'Downloading stream url info', fatal=False, query={
|
||||
**api_info,
|
||||
'type': 'json',
|
||||
'optional_id': vod_id,
|
||||
'active_flg': 1,
|
||||
}),
|
||||
('meta', 0, 'movie_url', ('mb_auto', 'auto_sp', 'auto_pc'), {url_or_none}), get_all=False)
|
||||
if stream_url:
|
||||
return self._extract_m3u8_formats_and_subtitles(stream_url, vod_id)
|
||||
}), ('meta', 0))
|
||||
stream_url = traverse_obj(
|
||||
meta, ('movie_url', ('mb_auto', 'auto_sp', 'auto_pc'), {url_or_none}), get_all=False)
|
||||
|
||||
if stream_url:
|
||||
formats, subtitles = self._extract_m3u8_formats_and_subtitles(stream_url, vod_id)
|
||||
return {
|
||||
**traverse_obj(meta, {
|
||||
'duration': ('duration', {int_or_none}),
|
||||
'timestamp': ('publication_date', {unified_timestamp}),
|
||||
'release_timestamp': ('insert_date', {unified_timestamp}),
|
||||
'modified_timestamp': ('update_date', {unified_timestamp}),
|
||||
}),
|
||||
'formats': formats,
|
||||
'subtitles': subtitles,
|
||||
}
|
||||
raise ExtractorError('Unable to extract stream url')
|
||||
|
||||
def _extract_episode_info(self, url, episode=None):
|
||||
|
@ -77,11 +90,11 @@ class NhkBaseIE(InfoExtractor):
|
|||
if fetch_episode:
|
||||
episode = self._call_api(
|
||||
episode_id, lang, is_video, True, episode_id[:4] == '9999')[0]
|
||||
title = episode.get('sub_title_clean') or episode['sub_title']
|
||||
|
||||
def get_clean_field(key):
|
||||
return episode.get(key + '_clean') or episode.get(key)
|
||||
return clean_html(episode.get(key + '_clean') or episode.get(key))
|
||||
|
||||
title = get_clean_field('sub_title')
|
||||
series = get_clean_field('title')
|
||||
|
||||
thumbnails = []
|
||||
|
@ -96,22 +109,30 @@ class NhkBaseIE(InfoExtractor):
|
|||
'url': 'https://www3.nhk.or.jp' + img_path,
|
||||
})
|
||||
|
||||
episode_name = title
|
||||
if series and title:
|
||||
title = f'{series} - {title}'
|
||||
elif series and not title:
|
||||
title = series
|
||||
series = None
|
||||
episode_name = None
|
||||
else: # title, no series
|
||||
episode_name = None
|
||||
|
||||
info = {
|
||||
'id': episode_id + '-' + lang,
|
||||
'title': '%s - %s' % (series, title) if series and title else title,
|
||||
'title': title,
|
||||
'description': get_clean_field('description'),
|
||||
'thumbnails': thumbnails,
|
||||
'series': series,
|
||||
'episode': title,
|
||||
'episode': episode_name,
|
||||
}
|
||||
|
||||
if is_video:
|
||||
vod_id = episode['vod_id']
|
||||
formats, subs = self._extract_formats_and_subtitles(vod_id)
|
||||
|
||||
info.update({
|
||||
**self._extract_stream_info(vod_id),
|
||||
'id': vod_id,
|
||||
'formats': formats,
|
||||
'subtitles': subs,
|
||||
})
|
||||
|
||||
else:
|
||||
|
@ -148,6 +169,14 @@ class NhkVodIE(NhkBaseIE):
|
|||
'thumbnail': 'md5:51bcef4a21936e7fea1ff4e06353f463',
|
||||
'episode': 'The Tohoku Shinkansen: Full Speed Ahead',
|
||||
'series': 'Japan Railway Journal',
|
||||
'modified_timestamp': 1694243656,
|
||||
'timestamp': 1681428600,
|
||||
'release_timestamp': 1693883728,
|
||||
'duration': 1679,
|
||||
'upload_date': '20230413',
|
||||
'modified_date': '20230909',
|
||||
'release_date': '20230905',
|
||||
|
||||
},
|
||||
}, {
|
||||
# video clip
|
||||
|
@ -161,6 +190,13 @@ class NhkVodIE(NhkBaseIE):
|
|||
'thumbnail': 'md5:d6a4d9b6e9be90aaadda0bcce89631ed',
|
||||
'series': 'Dining with the Chef',
|
||||
'episode': 'Chef Saito\'s Family recipe: MENCHI-KATSU',
|
||||
'duration': 148,
|
||||
'upload_date': '20190816',
|
||||
'release_date': '20230902',
|
||||
'release_timestamp': 1693619292,
|
||||
'modified_timestamp': 1694168033,
|
||||
'modified_date': '20230908',
|
||||
'timestamp': 1565997540,
|
||||
},
|
||||
}, {
|
||||
# radio
|
||||
|
@ -170,7 +206,7 @@ class NhkVodIE(NhkBaseIE):
|
|||
'ext': 'm4a',
|
||||
'title': 'Living in Japan - Tips for Travelers to Japan / Ramen Vending Machines',
|
||||
'series': 'Living in Japan',
|
||||
'description': 'md5:850611969932874b4a3309e0cae06c2f',
|
||||
'description': 'md5:0a0e2077d8f07a03071e990a6f51bfab',
|
||||
'thumbnail': 'md5:960622fb6e06054a4a1a0c97ea752545',
|
||||
'episode': 'Tips for Travelers to Japan / Ramen Vending Machines'
|
||||
},
|
||||
|
@ -212,6 +248,23 @@ class NhkVodIE(NhkBaseIE):
|
|||
'description': 'md5:9c1d6cbeadb827b955b20e99ab920ff0',
|
||||
},
|
||||
'skip': 'expires 2023-10-15',
|
||||
}, {
|
||||
# a one-off (single-episode series). title from the api is just '<p></p>'
|
||||
'url': 'https://www3.nhk.or.jp/nhkworld/en/ondemand/video/3004952/',
|
||||
'info_dict': {
|
||||
'id': 'nw_vod_v_en_3004_952_20230723091000_01_1690074552',
|
||||
'ext': 'mp4',
|
||||
'title': 'Barakan Discovers AMAMI OSHIMA: Isson\'s Treasure Island',
|
||||
'description': 'md5:5db620c46a0698451cc59add8816b797',
|
||||
'thumbnail': 'md5:67d9ff28009ba379bfa85ad1aaa0e2bd',
|
||||
'release_date': '20230905',
|
||||
'timestamp': 1690103400,
|
||||
'duration': 2939,
|
||||
'release_timestamp': 1693898699,
|
||||
'modified_timestamp': 1698057495,
|
||||
'modified_date': '20231023',
|
||||
'upload_date': '20230723',
|
||||
},
|
||||
}]
|
||||
|
||||
def _real_extract(self, url):
|
||||
|
@ -226,13 +279,15 @@ class NhkVodProgramIE(NhkBaseIE):
|
|||
'info_dict': {
|
||||
'id': 'sumo',
|
||||
'title': 'GRAND SUMO Highlights',
|
||||
'description': 'md5:fc20d02dc6ce85e4b72e0273aa52fdbf',
|
||||
},
|
||||
'playlist_mincount': 12,
|
||||
'playlist_mincount': 0,
|
||||
}, {
|
||||
'url': 'https://www3.nhk.or.jp/nhkworld/en/ondemand/program/video/japanrailway',
|
||||
'info_dict': {
|
||||
'id': 'japanrailway',
|
||||
'title': 'Japan Railway Journal',
|
||||
'description': 'md5:ea39d93af7d05835baadf10d1aae0e3f',
|
||||
},
|
||||
'playlist_mincount': 12,
|
||||
}, {
|
||||
|
@ -241,6 +296,7 @@ class NhkVodProgramIE(NhkBaseIE):
|
|||
'info_dict': {
|
||||
'id': 'japanrailway',
|
||||
'title': 'Japan Railway Journal',
|
||||
'description': 'md5:ea39d93af7d05835baadf10d1aae0e3f',
|
||||
},
|
||||
'playlist_mincount': 5,
|
||||
}, {
|
||||
|
@ -265,11 +321,11 @@ class NhkVodProgramIE(NhkBaseIE):
|
|||
entries.append(self._extract_episode_info(
|
||||
urljoin(url, episode_path), episode))
|
||||
|
||||
program_title = None
|
||||
if entries:
|
||||
program_title = entries[0].get('series')
|
||||
html = self._download_webpage(url, program_id)
|
||||
program_title = clean_html(get_element_by_class('p-programDetail__title', html))
|
||||
program_description = clean_html(get_element_by_class('p-programDetail__text', html))
|
||||
|
||||
return self.playlist_result(entries, program_id, program_title)
|
||||
return self.playlist_result(entries, program_id, program_title, program_description)
|
||||
|
||||
|
||||
class NhkForSchoolBangumiIE(InfoExtractor):
|
||||
|
@ -421,6 +477,7 @@ class NhkRadiruIE(InfoExtractor):
|
|||
'skip': 'Episode expired on 2023-04-16',
|
||||
'info_dict': {
|
||||
'channel': 'NHK-FM',
|
||||
'uploader': 'NHK-FM',
|
||||
'description': 'md5:94b08bdeadde81a97df4ec882acce3e9',
|
||||
'ext': 'm4a',
|
||||
'id': '0449_01_3853544',
|
||||
|
@ -441,6 +498,7 @@ class NhkRadiruIE(InfoExtractor):
|
|||
'title': 'ベストオブクラシック',
|
||||
'description': '世界中の上質な演奏会をじっくり堪能する本格派クラシック番組。',
|
||||
'channel': 'NHK-FM',
|
||||
'uploader': 'NHK-FM',
|
||||
'thumbnail': 'https://www.nhk.or.jp/prog/img/458/g458.jpg',
|
||||
},
|
||||
'playlist_mincount': 3,
|
||||
|
@ -454,6 +512,7 @@ class NhkRadiruIE(InfoExtractor):
|
|||
'title': '有島武郎「一房のぶどう」',
|
||||
'description': '朗読:川野一宇(ラジオ深夜便アンカー)\r\n\r\n(2016年12月8日放送「ラジオ深夜便『アンカー朗読シリーズ』」より)',
|
||||
'channel': 'NHKラジオ第1、NHK-FM',
|
||||
'uploader': 'NHKラジオ第1、NHK-FM',
|
||||
'timestamp': 1635757200,
|
||||
'thumbnail': 'https://www.nhk.or.jp/radioondemand/json/F300/img/corner/box_109_thumbnail.jpg',
|
||||
'release_date': '20161207',
|
||||
|
@ -469,6 +528,7 @@ class NhkRadiruIE(InfoExtractor):
|
|||
'id': 'F261_01_3855109',
|
||||
'ext': 'm4a',
|
||||
'channel': 'NHKラジオ第1',
|
||||
'uploader': 'NHKラジオ第1',
|
||||
'timestamp': 1681635900,
|
||||
'release_date': '20230416',
|
||||
'series': 'NHKラジオニュース',
|
||||
|
@ -513,6 +573,7 @@ class NhkRadiruIE(InfoExtractor):
|
|||
series_meta = traverse_obj(meta, {
|
||||
'title': 'program_name',
|
||||
'channel': 'media_name',
|
||||
'uploader': 'media_name',
|
||||
'thumbnail': (('thumbnail_c', 'thumbnail_p'), {url_or_none}),
|
||||
}, get_all=False)
|
||||
|
||||
|
@ -541,6 +602,7 @@ class NhkRadioNewsPageIE(InfoExtractor):
|
|||
'thumbnail': 'https://www.nhk.or.jp/radioondemand/json/F261/img/RADIONEWS_640.jpg',
|
||||
'description': 'md5:bf2c5b397e44bc7eb26de98d8f15d79d',
|
||||
'channel': 'NHKラジオ第1',
|
||||
'uploader': 'NHKラジオ第1',
|
||||
'title': 'NHKラジオニュース',
|
||||
}
|
||||
}]
|
||||
|
|
|
@ -13,7 +13,7 @@ from ..utils import (
|
|||
|
||||
|
||||
class NovaEmbedIE(InfoExtractor):
|
||||
_VALID_URL = r'https?://media\.cms\.nova\.cz/embed/(?P<id>[^/?#&]+)'
|
||||
_VALID_URL = r'https?://media(?:tn)?\.cms\.nova\.cz/embed/(?P<id>[^/?#&]+)'
|
||||
_TESTS = [{
|
||||
'url': 'https://media.cms.nova.cz/embed/8o0n0r?autoplay=1',
|
||||
'info_dict': {
|
||||
|
@ -37,6 +37,16 @@ class NovaEmbedIE(InfoExtractor):
|
|||
'duration': 114,
|
||||
},
|
||||
'params': {'skip_download': 'm3u8'},
|
||||
}, {
|
||||
'url': 'https://mediatn.cms.nova.cz/embed/EU5ELEsmOHt?autoplay=1',
|
||||
'info_dict': {
|
||||
'id': 'EU5ELEsmOHt',
|
||||
'ext': 'mp4',
|
||||
'title': 'Haptické křeslo, bionická ruka nebo roboti. Reportérka se podívala na Týden inovací',
|
||||
'thumbnail': r're:^https?://.*\.jpg',
|
||||
'duration': 1780,
|
||||
},
|
||||
'params': {'skip_download': 'm3u8'},
|
||||
}]
|
||||
|
||||
def _real_extract(self, url):
|
||||
|
|
|
@ -1,21 +1,21 @@
|
|||
import re
|
||||
|
||||
from .common import InfoExtractor
|
||||
from ..compat import compat_urlparse
|
||||
from ..utils import (
|
||||
int_or_none,
|
||||
js_to_json,
|
||||
parse_duration,
|
||||
url_or_none,
|
||||
)
|
||||
from ..utils.traversal import traverse_obj
|
||||
|
||||
|
||||
class NTVDeIE(InfoExtractor):
|
||||
IE_NAME = 'n-tv.de'
|
||||
_VALID_URL = r'https?://(?:www\.)?n-tv\.de/mediathek/videos/[^/?#]+/[^/?#]+-article(?P<id>.+)\.html'
|
||||
_VALID_URL = r'https?://(?:www\.)?n-tv\.de/mediathek/(?:videos|magazine)/[^/?#]+/[^/?#]+-article(?P<id>[^/?#]+)\.html'
|
||||
|
||||
_TESTS = [{
|
||||
'url': 'http://www.n-tv.de/mediathek/videos/panorama/Schnee-und-Glaette-fuehren-zu-zahlreichen-Unfaellen-und-Staus-article14438086.html',
|
||||
'md5': '6ef2514d4b1e8e03ca24b49e2f167153',
|
||||
'md5': '6bcf2a6638cb83f45d5561659a1cb498',
|
||||
'info_dict': {
|
||||
'id': '14438086',
|
||||
'ext': 'mp4',
|
||||
|
@ -23,51 +23,61 @@ class NTVDeIE(InfoExtractor):
|
|||
'title': 'Schnee und Glätte führen zu zahlreichen Unfällen und Staus',
|
||||
'alt_title': 'Winterchaos auf deutschen Straßen',
|
||||
'description': 'Schnee und Glätte sorgen deutschlandweit für einen chaotischen Start in die Woche: Auf den Straßen kommt es zu kilometerlangen Staus und Dutzenden Glätteunfällen. In Düsseldorf und München wirbelt der Schnee zudem den Flugplan durcheinander. Dutzende Flüge landen zu spät, einige fallen ganz aus.',
|
||||
'duration': 4020,
|
||||
'duration': 67,
|
||||
'timestamp': 1422892797,
|
||||
'upload_date': '20150202',
|
||||
},
|
||||
}, {
|
||||
'url': 'https://www.n-tv.de/mediathek/magazine/auslandsreport/Juedische-Siedler-wollten-Rache-die-wollten-nur-toeten-article24523089.html',
|
||||
'md5': 'c5c6014c014ccc3359470e1d34472bfd',
|
||||
'info_dict': {
|
||||
'id': '24523089',
|
||||
'ext': 'mp4',
|
||||
'thumbnail': r're:^https?://.*\.jpg$',
|
||||
'title': 'Jüdische Siedler "wollten Rache, die wollten nur töten"',
|
||||
'alt_title': 'Israelische Gewalt fern von Gaza',
|
||||
'description': 'Vier Tage nach dem Massaker der Hamas greifen jüdische Siedler das Haus einer palästinensischen Familie im Westjordanland an. Die Überlebenden berichten, sie waren unbewaffnet, die Angreifer seien nur auf "Rache und Töten" aus gewesen. Als die Toten beerdigt werden sollen, eröffnen die Siedler erneut das Feuer.',
|
||||
'duration': 326,
|
||||
'timestamp': 1699688294,
|
||||
'upload_date': '20231111',
|
||||
},
|
||||
}]
|
||||
|
||||
def _real_extract(self, url):
|
||||
video_id = self._match_id(url)
|
||||
webpage = self._download_webpage(url, video_id)
|
||||
|
||||
info = self._parse_json(self._search_regex(
|
||||
r'(?s)ntv\.pageInfo\.article\s*=\s*(\{.*?\});', webpage, 'info'),
|
||||
video_id, transform_source=js_to_json)
|
||||
timestamp = int_or_none(info.get('publishedDateAsUnixTimeStamp'))
|
||||
vdata = self._parse_json(self._search_regex(
|
||||
r'(?s)\$\(\s*"\#player"\s*\)\s*\.data\(\s*"player",\s*(\{.*?\})\);',
|
||||
webpage, 'player data'), video_id,
|
||||
transform_source=lambda s: js_to_json(re.sub(r'advertising:\s*{[^}]+},', '', s)))
|
||||
duration = parse_duration(vdata.get('duration'))
|
||||
info = self._search_json(
|
||||
r'article:', webpage, 'info', video_id, transform_source=js_to_json)
|
||||
|
||||
vdata = self._search_json(
|
||||
r'\$\(\s*"#playerwrapper"\s*\)\s*\.data\(\s*"player",',
|
||||
webpage, 'player data', video_id,
|
||||
transform_source=lambda s: js_to_json(re.sub(r'ivw:[^},]+', '', s)))['setup']['source']
|
||||
|
||||
formats = []
|
||||
if vdata.get('video'):
|
||||
if vdata.get('progressive'):
|
||||
formats.append({
|
||||
'format_id': 'flash',
|
||||
'url': 'rtmp://fms.n-tv.de/%s' % vdata['video'],
|
||||
'format_id': 'http',
|
||||
'url': vdata['progressive'],
|
||||
})
|
||||
if vdata.get('videoMp4'):
|
||||
formats.append({
|
||||
'format_id': 'mobile',
|
||||
'url': compat_urlparse.urljoin('http://video.n-tv.de', vdata['videoMp4']),
|
||||
'tbr': 400, # estimation
|
||||
})
|
||||
if vdata.get('videoM3u8'):
|
||||
m3u8_url = compat_urlparse.urljoin('http://video.n-tv.de', vdata['videoM3u8'])
|
||||
if vdata.get('hls'):
|
||||
formats.extend(self._extract_m3u8_formats(
|
||||
m3u8_url, video_id, ext='mp4', entry_protocol='m3u8_native',
|
||||
quality=1, m3u8_id='hls', fatal=False))
|
||||
vdata['hls'], video_id, 'mp4', m3u8_id='hls', fatal=False))
|
||||
if vdata.get('dash'):
|
||||
formats.extend(self._extract_mpd_formats(vdata['dash'], video_id, fatal=False, mpd_id='dash'))
|
||||
|
||||
return {
|
||||
'id': video_id,
|
||||
'title': info['headline'],
|
||||
'description': info.get('intro'),
|
||||
'alt_title': info.get('kicker'),
|
||||
'timestamp': timestamp,
|
||||
'thumbnail': vdata.get('html5VideoPoster'),
|
||||
'duration': duration,
|
||||
**traverse_obj(info, {
|
||||
'title': 'headline',
|
||||
'description': 'intro',
|
||||
'alt_title': 'kicker',
|
||||
'timestamp': ('publishedDateAsUnixTimeStamp', {int_or_none}),
|
||||
}),
|
||||
**traverse_obj(vdata, {
|
||||
'thumbnail': ('poster', {url_or_none}),
|
||||
'duration': ('length', {int_or_none}),
|
||||
}),
|
||||
'formats': formats,
|
||||
}
|
||||
|
|
|
@ -1,87 +1,167 @@
|
|||
import functools
|
||||
import re
|
||||
import uuid
|
||||
|
||||
from .common import InfoExtractor
|
||||
from ..networking import HEADRequest
|
||||
from ..utils import (
|
||||
ExtractorError,
|
||||
js_to_json,
|
||||
OnDemandPagedList,
|
||||
float_or_none,
|
||||
int_or_none,
|
||||
join_nonempty,
|
||||
parse_age_limit,
|
||||
parse_qs,
|
||||
unified_strdate,
|
||||
url_or_none,
|
||||
)
|
||||
from ..utils.traversal import traverse_obj
|
||||
|
||||
|
||||
class OnDemandKoreaIE(InfoExtractor):
|
||||
_VALID_URL = r'https?://(?:www\.)?ondemandkorea\.com/(?P<id>[^/]+)\.html'
|
||||
_VALID_URL = r'https?://(?:www\.)?ondemandkorea\.com/(?:en/)?player/vod/[a-z0-9-]+\?(?:[^#]+&)?contentId=(?P<id>\d+)'
|
||||
_GEO_COUNTRIES = ['US', 'CA']
|
||||
|
||||
_TESTS = [{
|
||||
'url': 'https://www.ondemandkorea.com/ask-us-anything-e351.html',
|
||||
'url': 'https://www.ondemandkorea.com/player/vod/ask-us-anything?contentId=686471',
|
||||
'md5': 'e2ff77255d989e3135bde0c5889fbce8',
|
||||
'info_dict': {
|
||||
'id': 'ask-us-anything-e351',
|
||||
'id': '686471',
|
||||
'ext': 'mp4',
|
||||
'title': 'Ask Us Anything : Jung Sung-ho, Park Seul-gi, Kim Bo-min, Yang Seung-won - 09/24/2022',
|
||||
'description': 'A talk show/game show with a school theme where celebrity guests appear as “transfer students.”',
|
||||
'thumbnail': r're:^https?://.*\.jpg$',
|
||||
'title': 'Ask Us Anything: Jung Sung-ho, Park Seul-gi, Kim Bo-min, Yang Seung-won',
|
||||
'thumbnail': r're:^https?://.*\.(jpg|jpeg|png)',
|
||||
'duration': 5486.955,
|
||||
'release_date': '20220924',
|
||||
'series': 'Ask Us Anything',
|
||||
'series_id': 11790,
|
||||
'episode_number': 351,
|
||||
'episode': 'Jung Sung-ho, Park Seul-gi, Kim Bo-min, Yang Seung-won',
|
||||
},
|
||||
'params': {
|
||||
'skip_download': 'm3u8 download'
|
||||
}
|
||||
}, {
|
||||
'url': 'https://www.ondemandkorea.com/work-later-drink-now-e1.html',
|
||||
'url': 'https://www.ondemandkorea.com/player/vod/breakup-probation-a-week?contentId=1595796',
|
||||
'md5': '57266c720006962be7ff415b24775caa',
|
||||
'info_dict': {
|
||||
'id': 'work-later-drink-now-e1',
|
||||
'id': '1595796',
|
||||
'ext': 'mp4',
|
||||
'title': 'Work Later, Drink Now : E01',
|
||||
'description': 'Work Later, Drink First follows three women who find solace in a glass of liquor at the end of the day. So-hee, who gets comfort from a cup of soju af',
|
||||
'thumbnail': r're:^https?://.*\.png$',
|
||||
'subtitles': {
|
||||
'English': 'mincount:1',
|
||||
},
|
||||
'title': 'Breakup Probation, A Week: E08',
|
||||
'thumbnail': r're:^https?://.*\.(jpg|jpeg|png)',
|
||||
'duration': 1586.0,
|
||||
'release_date': '20231001',
|
||||
'series': 'Breakup Probation, A Week',
|
||||
'series_id': 22912,
|
||||
'episode_number': 8,
|
||||
'episode': 'E08',
|
||||
},
|
||||
'params': {
|
||||
'skip_download': 'm3u8 download'
|
||||
}
|
||||
}, {
|
||||
'url': 'https://www.ondemandkorea.com/player/vod/the-outlaws?contentId=369531',
|
||||
'md5': 'fa5523b87aa1f6d74fc622a97f2b47cd',
|
||||
'info_dict': {
|
||||
'id': '369531',
|
||||
'ext': 'mp4',
|
||||
'release_date': '20220519',
|
||||
'duration': 7267.0,
|
||||
'title': 'The Outlaws: Main Movie',
|
||||
'thumbnail': r're:^https?://.*\.(jpg|jpeg|png)',
|
||||
'age_limit': 18,
|
||||
},
|
||||
}, {
|
||||
'url': 'https://www.ondemandkorea.com/en/player/vod/capture-the-moment-how-is-that-possible?contentId=1605006',
|
||||
'only_matching': True,
|
||||
}]
|
||||
|
||||
def _real_extract(self, url):
|
||||
video_id = self._match_id(url)
|
||||
webpage = self._download_webpage(url, video_id, fatal=False)
|
||||
|
||||
if not webpage:
|
||||
# Page sometimes returns captcha page with HTTP 403
|
||||
raise ExtractorError(
|
||||
'Unable to access page. You may have been blocked.',
|
||||
expected=True)
|
||||
data = self._download_json(
|
||||
f'https://odkmedia.io/odx/api/v3/playback/{video_id}/', video_id, fatal=False,
|
||||
headers={'service-name': 'odk'}, query={'did': str(uuid.uuid4())}, expected_status=(403, 404))
|
||||
if not traverse_obj(data, ('result', {dict})):
|
||||
msg = traverse_obj(data, ('messages', '__default'), 'title', expected_type=str)
|
||||
raise ExtractorError(msg or 'Got empty response from playback API', expected=True)
|
||||
|
||||
if 'msg_block_01.png' in webpage:
|
||||
self.raise_geo_restricted(
|
||||
msg='This content is not available in your region',
|
||||
countries=self._GEO_COUNTRIES)
|
||||
data = data['result']
|
||||
|
||||
if 'This video is only available to ODK PLUS members.' in webpage:
|
||||
raise ExtractorError(
|
||||
'This video is only available to ODK PLUS members.',
|
||||
expected=True)
|
||||
def try_geo_bypass(url):
|
||||
return traverse_obj(url, ({parse_qs}, 'stream_url', 0, {url_or_none})) or url
|
||||
|
||||
if 'ODK PREMIUM Members Only' in webpage:
|
||||
raise ExtractorError(
|
||||
'This video is only available to ODK PREMIUM members.',
|
||||
expected=True)
|
||||
def try_upgrade_quality(url):
|
||||
mod_url = re.sub(r'_720(p?)\.m3u8', r'_1080\1.m3u8', url)
|
||||
return mod_url if mod_url != url and self._request_webpage(
|
||||
HEADRequest(mod_url), video_id, note='Checking for higher quality format',
|
||||
errnote='No higher quality format found', fatal=False) else url
|
||||
|
||||
title = self._search_regex(
|
||||
r'class=["\']episode_title["\'][^>]*>([^<]+)',
|
||||
webpage, 'episode_title', fatal=False) or self._og_search_title(webpage)
|
||||
formats = []
|
||||
for m3u8_url in traverse_obj(data, (('sources', 'manifest'), ..., 'url', {url_or_none}, {try_geo_bypass})):
|
||||
formats.extend(self._extract_m3u8_formats(try_upgrade_quality(m3u8_url), video_id, fatal=False))
|
||||
|
||||
jw_config = self._parse_json(
|
||||
self._search_regex((
|
||||
r'(?P<options>{\s*[\'"]tracks[\'"].*?})[)\];]+$',
|
||||
r'playlist\s*=\s*\[(?P<options>.+)];?$',
|
||||
r'odkPlayer\.init.*?(?P<options>{[^;]+}).*?;',
|
||||
), webpage, 'jw config', flags=re.MULTILINE | re.DOTALL, group='options'),
|
||||
video_id, transform_source=js_to_json)
|
||||
info = self._parse_jwplayer_data(
|
||||
jw_config, video_id, require_title=False, m3u8_id='hls',
|
||||
base_url=url)
|
||||
subtitles = {}
|
||||
for track in traverse_obj(data, ('text_tracks', lambda _, v: url_or_none(v['url']))):
|
||||
subtitles.setdefault(track.get('language', 'und'), []).append({
|
||||
'url': track['url'],
|
||||
'ext': track.get('codec'),
|
||||
'name': track.get('label'),
|
||||
})
|
||||
|
||||
info.update({
|
||||
'title': title,
|
||||
'description': self._og_search_description(webpage),
|
||||
'thumbnail': self._og_search_thumbnail(webpage)
|
||||
})
|
||||
return info
|
||||
def if_series(key=None):
|
||||
return lambda obj: obj[key] if key and obj['kind'] == 'series' else None
|
||||
|
||||
return {
|
||||
'id': video_id,
|
||||
'title': join_nonempty(
|
||||
('episode', 'program', 'title'),
|
||||
('episode', 'title'), from_dict=data, delim=': '),
|
||||
**traverse_obj(data, {
|
||||
'thumbnail': ('episode', 'images', 'thumbnail', {url_or_none}),
|
||||
'release_date': ('episode', 'release_date', {lambda x: x.replace('-', '')}, {unified_strdate}),
|
||||
'duration': ('duration', {functools.partial(float_or_none, scale=1000)}),
|
||||
'age_limit': ('age_rating', 'name', {lambda x: x.replace('R', '')}, {parse_age_limit}),
|
||||
'series': ('episode', {if_series(key='program')}, 'title'),
|
||||
'series_id': ('episode', {if_series(key='program')}, 'id'),
|
||||
'episode': ('episode', {if_series(key='title')}),
|
||||
'episode_number': ('episode', {if_series(key='number')}, {int_or_none}),
|
||||
}, get_all=False),
|
||||
'formats': formats,
|
||||
'subtitles': subtitles,
|
||||
}
|
||||
|
||||
|
||||
class OnDemandKoreaProgramIE(InfoExtractor):
|
||||
_VALID_URL = r'https?://(?:www\.)?ondemandkorea\.com/(?:en/)?player/vod/(?P<id>[a-z0-9-]+)(?:$|#)'
|
||||
_GEO_COUNTRIES = ['US', 'CA']
|
||||
|
||||
_TESTS = [{
|
||||
'url': 'https://www.ondemandkorea.com/player/vod/uskn-news',
|
||||
'info_dict': {
|
||||
'id': 'uskn-news',
|
||||
},
|
||||
'playlist_mincount': 755,
|
||||
}, {
|
||||
'url': 'https://www.ondemandkorea.com/en/player/vod/the-land',
|
||||
'info_dict': {
|
||||
'id': 'the-land',
|
||||
},
|
||||
'playlist_count': 52,
|
||||
}]
|
||||
|
||||
_PAGE_SIZE = 100
|
||||
|
||||
def _fetch_page(self, display_id, page):
|
||||
page += 1
|
||||
page_data = self._download_json(
|
||||
f'https://odkmedia.io/odx/api/v3/program/{display_id}/episodes/', display_id,
|
||||
headers={'service-name': 'odk'}, query={
|
||||
'page': page,
|
||||
'page_size': self._PAGE_SIZE,
|
||||
}, note=f'Downloading page {page}', expected_status=404)
|
||||
for episode in traverse_obj(page_data, ('result', 'results', ...)):
|
||||
yield self.url_result(
|
||||
f'https://www.ondemandkorea.com/player/vod/{display_id}?contentId={episode["id"]}',
|
||||
ie=OnDemandKoreaIE, video_title=episode.get('title'))
|
||||
|
||||
def _real_extract(self, url):
|
||||
display_id = self._match_id(url)
|
||||
|
||||
entries = OnDemandPagedList(functools.partial(
|
||||
self._fetch_page, display_id), self._PAGE_SIZE)
|
||||
|
||||
return self.playlist_result(entries, display_id)
|
||||
|
|
|
@ -4,15 +4,16 @@ import re
|
|||
from .common import InfoExtractor
|
||||
from ..networking import HEADRequest
|
||||
from ..utils import (
|
||||
InAdvancePagedList,
|
||||
clean_html,
|
||||
determine_ext,
|
||||
float_or_none,
|
||||
InAdvancePagedList,
|
||||
int_or_none,
|
||||
join_nonempty,
|
||||
make_archive_id,
|
||||
mimetype2ext,
|
||||
orderedSet,
|
||||
remove_end,
|
||||
make_archive_id,
|
||||
smuggle_url,
|
||||
strip_jsonp,
|
||||
try_call,
|
||||
|
@ -21,6 +22,7 @@ from ..utils import (
|
|||
unsmuggle_url,
|
||||
url_or_none,
|
||||
)
|
||||
from ..utils.traversal import traverse_obj
|
||||
|
||||
|
||||
class ORFTVthekIE(InfoExtractor):
|
||||
|
@ -334,6 +336,45 @@ class ORFRadioIE(InfoExtractor):
|
|||
self._entries(data, station or station2), show_id, data.get('title'), clean_html(data.get('subtitle')))
|
||||
|
||||
|
||||
class ORFPodcastIE(InfoExtractor):
|
||||
IE_NAME = 'orf:podcast'
|
||||
_STATION_RE = '|'.join(map(re.escape, (
|
||||
'bgl', 'fm4', 'ktn', 'noe', 'oe1', 'oe3',
|
||||
'ooe', 'sbg', 'stm', 'tir', 'tv', 'vbg', 'wie')))
|
||||
_VALID_URL = rf'https?://sound\.orf\.at/podcast/(?P<station>{_STATION_RE})/(?P<show>[\w-]+)/(?P<id>[\w-]+)'
|
||||
_TESTS = [{
|
||||
'url': 'https://sound.orf.at/podcast/oe3/fruehstueck-bei-mir/nicolas-stockhammer-15102023',
|
||||
'md5': '526a5700e03d271a1505386a8721ab9b',
|
||||
'info_dict': {
|
||||
'id': 'nicolas-stockhammer-15102023',
|
||||
'ext': 'mp3',
|
||||
'title': 'Nicolas Stockhammer (15.10.2023)',
|
||||
'duration': 3396.0,
|
||||
'series': 'Frühstück bei mir',
|
||||
},
|
||||
'skip': 'ORF podcasts are only available for a limited time'
|
||||
}]
|
||||
|
||||
def _real_extract(self, url):
|
||||
station, show, show_id = self._match_valid_url(url).group('station', 'show', 'id')
|
||||
data = self._download_json(
|
||||
f'https://audioapi.orf.at/radiothek/api/2.0/podcast/{station}/{show}/{show_id}', show_id)
|
||||
|
||||
return {
|
||||
'id': show_id,
|
||||
'ext': 'mp3',
|
||||
'vcodec': 'none',
|
||||
**traverse_obj(data, ('payload', {
|
||||
'url': ('enclosures', 0, 'url'),
|
||||
'ext': ('enclosures', 0, 'type', {mimetype2ext}),
|
||||
'title': 'title',
|
||||
'description': ('description', {clean_html}),
|
||||
'duration': ('duration', {functools.partial(float_or_none, scale=1000)}),
|
||||
'series': ('podcast', 'title'),
|
||||
})),
|
||||
}
|
||||
|
||||
|
||||
class ORFIPTVIE(InfoExtractor):
|
||||
IE_NAME = 'orf:iptv'
|
||||
IE_DESC = 'iptv.ORF.at'
|
||||
|
|
|
@ -4,6 +4,7 @@ from ..utils import (
|
|||
parse_iso8601,
|
||||
unescapeHTML,
|
||||
)
|
||||
from ..utils.traversal import traverse_obj
|
||||
|
||||
|
||||
class PeriscopeBaseIE(InfoExtractor):
|
||||
|
@ -20,22 +21,25 @@ class PeriscopeBaseIE(InfoExtractor):
|
|||
title = broadcast.get('status') or 'Periscope Broadcast'
|
||||
uploader = broadcast.get('user_display_name') or broadcast.get('username')
|
||||
title = '%s - %s' % (uploader, title) if uploader else title
|
||||
is_live = broadcast.get('state').lower() == 'running'
|
||||
|
||||
thumbnails = [{
|
||||
'url': broadcast[image],
|
||||
} for image in ('image_url', 'image_url_small') if broadcast.get(image)]
|
||||
} for image in ('image_url', 'image_url_medium', 'image_url_small') if broadcast.get(image)]
|
||||
|
||||
return {
|
||||
'id': broadcast.get('id') or video_id,
|
||||
'title': title,
|
||||
'timestamp': parse_iso8601(broadcast.get('created_at')),
|
||||
'timestamp': parse_iso8601(broadcast.get('created_at')) or int_or_none(
|
||||
broadcast.get('created_at_ms'), scale=1000),
|
||||
'release_timestamp': int_or_none(broadcast.get('scheduled_start_ms'), scale=1000),
|
||||
'uploader': uploader,
|
||||
'uploader_id': broadcast.get('user_id') or broadcast.get('username'),
|
||||
'thumbnails': thumbnails,
|
||||
'view_count': int_or_none(broadcast.get('total_watched')),
|
||||
'tags': broadcast.get('tags'),
|
||||
'is_live': is_live,
|
||||
'live_status': {
|
||||
'running': 'is_live',
|
||||
'not_started': 'is_upcoming',
|
||||
}.get(traverse_obj(broadcast, ('state', {str.lower}))) or 'was_live'
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
|
|
150
yt_dlp/extractor/radiocomercial.py
Normal file
150
yt_dlp/extractor/radiocomercial.py
Normal file
|
@ -0,0 +1,150 @@
|
|||
import itertools
|
||||
|
||||
from .common import InfoExtractor
|
||||
from ..networking.exceptions import HTTPError
|
||||
from ..utils import (
|
||||
ExtractorError,
|
||||
extract_attributes,
|
||||
get_element_by_class,
|
||||
get_element_html_by_class,
|
||||
get_element_text_and_html_by_tag,
|
||||
get_elements_html_by_class,
|
||||
int_or_none,
|
||||
join_nonempty,
|
||||
try_call,
|
||||
unified_strdate,
|
||||
update_url,
|
||||
urljoin
|
||||
)
|
||||
from ..utils.traversal import traverse_obj
|
||||
|
||||
|
||||
class RadioComercialIE(InfoExtractor):
|
||||
_VALID_URL = r'https?://(?:www\.)?radiocomercial\.pt/podcasts/[^/?#]+/t?(?P<season>\d+)/(?P<id>[\w-]+)'
|
||||
_TESTS = [{
|
||||
'url': 'https://radiocomercial.pt/podcasts/o-homem-que-mordeu-o-cao/t6/taylor-swift-entranhando-se-que-nem-uma-espada-no-ventre-dos-fas#page-content-wrapper',
|
||||
'md5': '5f4fe8e485b29d2e8fd495605bc2c7e4',
|
||||
'info_dict': {
|
||||
'id': 'taylor-swift-entranhando-se-que-nem-uma-espada-no-ventre-dos-fas',
|
||||
'ext': 'mp3',
|
||||
'title': 'Taylor Swift entranhando-se que nem uma espada no ventre dos fãs.',
|
||||
'release_date': '20231025',
|
||||
'thumbnail': r're:https://radiocomercial.pt/upload/[^.]+.jpg',
|
||||
'season': 6
|
||||
}
|
||||
}, {
|
||||
'url': 'https://radiocomercial.pt/podcasts/convenca-me-num-minuto/t3/convenca-me-num-minuto-que-os-lobisomens-existem',
|
||||
'md5': '47e96c273aef96a8eb160cd6cf46d782',
|
||||
'info_dict': {
|
||||
'id': 'convenca-me-num-minuto-que-os-lobisomens-existem',
|
||||
'ext': 'mp3',
|
||||
'title': 'Convença-me num minuto que os lobisomens existem',
|
||||
'release_date': '20231026',
|
||||
'thumbnail': r're:https://radiocomercial.pt/upload/[^.]+.jpg',
|
||||
'season': 3
|
||||
}
|
||||
}, {
|
||||
'url': 'https://radiocomercial.pt/podcasts/inacreditavel-by-ines-castel-branco/t2/o-desastre-de-aviao',
|
||||
'md5': '69be64255420fec23b7259955d771e54',
|
||||
'info_dict': {
|
||||
'id': 'o-desastre-de-aviao',
|
||||
'ext': 'mp3',
|
||||
'title': 'O desastre de avião',
|
||||
'description': 'md5:8a82beeb372641614772baab7246245f',
|
||||
'release_date': '20231101',
|
||||
'thumbnail': r're:https://radiocomercial.pt/upload/[^.]+.jpg',
|
||||
'season': 2
|
||||
},
|
||||
'params': {
|
||||
# inconsistant md5
|
||||
'skip_download': True,
|
||||
},
|
||||
}, {
|
||||
'url': 'https://radiocomercial.pt/podcasts/tnt-todos-no-top/2023/t-n-t-29-de-outubro',
|
||||
'md5': '91d32d4d4b1407272068b102730fc9fa',
|
||||
'info_dict': {
|
||||
'id': 't-n-t-29-de-outubro',
|
||||
'ext': 'mp3',
|
||||
'title': 'T.N.T 29 de outubro',
|
||||
'release_date': '20231029',
|
||||
'thumbnail': r're:https://radiocomercial.pt/upload/[^.]+.jpg',
|
||||
'season': 2023
|
||||
}
|
||||
}]
|
||||
|
||||
def _real_extract(self, url):
|
||||
video_id, season = self._match_valid_url(url).group('id', 'season')
|
||||
webpage = self._download_webpage(url, video_id)
|
||||
return {
|
||||
'id': video_id,
|
||||
'title': self._html_extract_title(webpage),
|
||||
'description': self._og_search_description(webpage, default=None),
|
||||
'release_date': unified_strdate(get_element_by_class(
|
||||
'date', get_element_html_by_class('descriptions', webpage) or '')),
|
||||
'thumbnail': self._og_search_thumbnail(webpage),
|
||||
'season': int_or_none(season),
|
||||
'url': extract_attributes(get_element_html_by_class('audiofile', webpage) or '').get('href'),
|
||||
}
|
||||
|
||||
|
||||
class RadioComercialPlaylistIE(InfoExtractor):
|
||||
_VALID_URL = r'https?://(?:www\.)?radiocomercial\.pt/podcasts/(?P<id>[\w-]+)(?:/t?(?P<season>\d+))?/?(?:$|[?#])'
|
||||
_TESTS = [{
|
||||
'url': 'https://radiocomercial.pt/podcasts/convenca-me-num-minuto/t3',
|
||||
'info_dict': {
|
||||
'id': 'convenca-me-num-minuto_t3',
|
||||
'title': 'Convença-me num Minuto - Temporada 3',
|
||||
},
|
||||
'playlist_mincount': 32
|
||||
}, {
|
||||
'url': 'https://radiocomercial.pt/podcasts/o-homem-que-mordeu-o-cao',
|
||||
'info_dict': {
|
||||
'id': 'o-homem-que-mordeu-o-cao',
|
||||
'title': 'O Homem Que Mordeu o Cão',
|
||||
},
|
||||
'playlist_mincount': 19
|
||||
}, {
|
||||
'url': 'https://radiocomercial.pt/podcasts/as-minhas-coisas-favoritas',
|
||||
'info_dict': {
|
||||
'id': 'as-minhas-coisas-favoritas',
|
||||
'title': 'As Minhas Coisas Favoritas',
|
||||
},
|
||||
'playlist_mincount': 131
|
||||
}, {
|
||||
'url': 'https://radiocomercial.pt/podcasts/tnt-todos-no-top/t2023',
|
||||
'info_dict': {
|
||||
'id': 'tnt-todos-no-top_t2023',
|
||||
'title': 'TNT - Todos No Top - Temporada 2023',
|
||||
},
|
||||
'playlist_mincount': 39
|
||||
}]
|
||||
|
||||
def _entries(self, url, playlist_id):
|
||||
for page in itertools.count(1):
|
||||
try:
|
||||
webpage = self._download_webpage(
|
||||
f'{url}/{page}', playlist_id, f'Downloading page {page}')
|
||||
except ExtractorError as e:
|
||||
if isinstance(e.cause, HTTPError) and e.cause.status == 404:
|
||||
break
|
||||
raise
|
||||
|
||||
episodes = get_elements_html_by_class('tm-ouvir-podcast', webpage)
|
||||
if not episodes:
|
||||
break
|
||||
for url_path in traverse_obj(episodes, (..., {extract_attributes}, 'href')):
|
||||
episode_url = urljoin(url, url_path)
|
||||
if RadioComercialIE.suitable(episode_url):
|
||||
yield episode_url
|
||||
|
||||
def _real_extract(self, url):
|
||||
podcast, season = self._match_valid_url(url).group('id', 'season')
|
||||
playlist_id = join_nonempty(podcast, season, delim='_t')
|
||||
url = update_url(url, query=None, fragment=None)
|
||||
webpage = self._download_webpage(url, playlist_id)
|
||||
|
||||
name = try_call(lambda: get_element_text_and_html_by_tag('h1', webpage)[0])
|
||||
title = name if name == season else join_nonempty(name, season, delim=' - Temporada ')
|
||||
|
||||
return self.playlist_from_matches(
|
||||
self._entries(url, playlist_id), playlist_id, title, ie=RadioComercialIE)
|
200
yt_dlp/extractor/sbscokr.py
Normal file
200
yt_dlp/extractor/sbscokr.py
Normal file
|
@ -0,0 +1,200 @@
|
|||
from .common import InfoExtractor
|
||||
from ..utils import (
|
||||
clean_html,
|
||||
int_or_none,
|
||||
parse_iso8601,
|
||||
parse_resolution,
|
||||
url_or_none,
|
||||
)
|
||||
from ..utils.traversal import traverse_obj
|
||||
|
||||
|
||||
class SBSCoKrIE(InfoExtractor):
|
||||
IE_NAME = 'sbs.co.kr'
|
||||
_VALID_URL = [r'https?://allvod\.sbs\.co\.kr/allvod/vod(?:Package)?EndPage\.do\?(?:[^#]+&)?mdaId=(?P<id>\d+)',
|
||||
r'https?://programs\.sbs\.co\.kr/(?:enter|drama|culture|sports|plus|mtv|kth)/[a-z0-9]+/(?:vod|clip|movie)/\d+/(?P<id>(?:OC)?\d+)']
|
||||
|
||||
_TESTS = [{
|
||||
'url': 'https://programs.sbs.co.kr/enter/dongsang2/clip/52007/OC467706746?div=main_pop_clip',
|
||||
'md5': 'c3f6d45e1fb5682039d94cda23c36f19',
|
||||
'info_dict': {
|
||||
'id': 'OC467706746',
|
||||
'ext': 'mp4',
|
||||
'title': '‘아슬아슬’ 박군♥한영의 새 집 인테리어 대첩♨',
|
||||
'description': 'md5:6a71eb1979ee4a94ea380310068ccab4',
|
||||
'thumbnail': 'https://img2.sbs.co.kr/ops_clip_img/2023/10/10/34c4c0f9-a9a5-4ff6-a92e-9bb4b5f6fa65915w1280.jpg',
|
||||
'release_timestamp': 1696889400,
|
||||
'release_date': '20231009',
|
||||
'view_count': int,
|
||||
'like_count': int,
|
||||
'duration': 238,
|
||||
'age_limit': 15,
|
||||
'series': '동상이몽2_너는 내 운명',
|
||||
'episode': '레이디제인, ‘혼전임신설’ ‘3개월’ 앞당긴 결혼식 비하인드 스토리 최초 공개!',
|
||||
'episode_number': 311,
|
||||
},
|
||||
}, {
|
||||
'url': 'https://allvod.sbs.co.kr/allvod/vodPackageEndPage.do?mdaId=22000489324&combiId=PA000000284&packageType=A&isFreeYN=',
|
||||
'md5': 'bf46b2e89fda7ae7de01f5743cef7236',
|
||||
'info_dict': {
|
||||
'id': '22000489324',
|
||||
'ext': 'mp4',
|
||||
'title': '[다시보기] 트롤리 15회',
|
||||
'description': 'md5:0e55d74bef1ac55c61ae90c73ac485f4',
|
||||
'thumbnail': 'https://img2.sbs.co.kr/img/sbs_cms/WE/2023/02/14/arC1676333794938-1280-720.jpg',
|
||||
'release_timestamp': 1676325600,
|
||||
'release_date': '20230213',
|
||||
'view_count': int,
|
||||
'like_count': int,
|
||||
'duration': 5931,
|
||||
'age_limit': 15,
|
||||
'series': '트롤리',
|
||||
'episode': '이거 다 거짓말이야',
|
||||
'episode_number': 15,
|
||||
},
|
||||
}, {
|
||||
'url': 'https://programs.sbs.co.kr/enter/fourman/vod/69625/22000508948',
|
||||
'md5': '41e8ae4cc6c8424f4e4d76661a4becbf',
|
||||
'info_dict': {
|
||||
'id': '22000508948',
|
||||
'ext': 'mp4',
|
||||
'title': '[다시보기] 신발 벗고 돌싱포맨 104회',
|
||||
'description': 'md5:c6a247383c4dd661e4b956bf4d3b586e',
|
||||
'thumbnail': 'https://img2.sbs.co.kr/img/sbs_cms/WE/2023/08/30/2vb1693355446261-1280-720.jpg',
|
||||
'release_timestamp': 1693342800,
|
||||
'release_date': '20230829',
|
||||
'view_count': int,
|
||||
'like_count': int,
|
||||
'duration': 7036,
|
||||
'age_limit': 15,
|
||||
'series': '신발 벗고 돌싱포맨',
|
||||
'episode': '돌싱포맨 저격수들 등장!',
|
||||
'episode_number': 104,
|
||||
},
|
||||
}]
|
||||
|
||||
def _call_api(self, video_id, rscuse=''):
|
||||
return self._download_json(
|
||||
f'https://api.play.sbs.co.kr/1.0/sbs_vodall/{video_id}', video_id,
|
||||
note=f'Downloading m3u8 information {rscuse}',
|
||||
query={
|
||||
'platform': 'pcweb',
|
||||
'protocol': 'download',
|
||||
'absolute_show': 'Y',
|
||||
'service': 'program',
|
||||
'ssl': 'Y',
|
||||
'rscuse': rscuse,
|
||||
})
|
||||
|
||||
def _real_extract(self, url):
|
||||
video_id = self._match_id(url)
|
||||
|
||||
details = self._call_api(video_id)
|
||||
source = traverse_obj(details, ('vod', 'source', 'mediasource', {dict})) or {}
|
||||
|
||||
formats = []
|
||||
for stream in traverse_obj(details, (
|
||||
'vod', 'source', 'mediasourcelist', lambda _, v: v['mediaurl'] or v['mediarscuse']
|
||||
), default=[source]):
|
||||
if not stream.get('mediaurl'):
|
||||
new_source = traverse_obj(
|
||||
self._call_api(video_id, rscuse=stream['mediarscuse']),
|
||||
('vod', 'source', 'mediasource', {dict})) or {}
|
||||
if new_source.get('mediarscuse') == source.get('mediarscuse') or not new_source.get('mediaurl'):
|
||||
continue
|
||||
stream = new_source
|
||||
formats.append({
|
||||
'url': stream['mediaurl'],
|
||||
'format_id': stream.get('mediarscuse'),
|
||||
'format_note': stream.get('medianame'),
|
||||
**parse_resolution(stream.get('quality')),
|
||||
'preference': int_or_none(stream.get('mediarscuse'))
|
||||
})
|
||||
|
||||
caption_url = traverse_obj(details, ('vod', 'source', 'subtitle', {url_or_none}))
|
||||
|
||||
return {
|
||||
'id': video_id,
|
||||
**traverse_obj(details, ('vod', {
|
||||
'title': ('info', 'title'),
|
||||
'duration': ('info', 'duration', {int_or_none}),
|
||||
'view_count': ('info', 'viewcount', {int_or_none}),
|
||||
'like_count': ('info', 'likecount', {int_or_none}),
|
||||
'description': ('info', 'synopsis', {clean_html}),
|
||||
'episode': ('info', 'content', ('contenttitle', 'title')),
|
||||
'episode_number': ('info', 'content', 'number', {int_or_none}),
|
||||
'series': ('info', 'program', 'programtitle'),
|
||||
'age_limit': ('info', 'targetage', {int_or_none}),
|
||||
'release_timestamp': ('info', 'broaddate', {parse_iso8601}),
|
||||
'thumbnail': ('source', 'thumbnail', 'origin', {url_or_none}),
|
||||
}), get_all=False),
|
||||
'formats': formats,
|
||||
'subtitles': {'ko': [{'url': caption_url}]} if caption_url else None,
|
||||
}
|
||||
|
||||
|
||||
class SBSCoKrAllvodProgramIE(InfoExtractor):
|
||||
IE_NAME = 'sbs.co.kr:allvod_program'
|
||||
_VALID_URL = r'https?://allvod\.sbs\.co\.kr/allvod/vod(?:Free)?ProgramDetail\.do\?(?:[^#]+&)?pgmId=(?P<id>P?\d+)'
|
||||
|
||||
_TESTS = [{
|
||||
'url': 'https://allvod.sbs.co.kr/allvod/vodFreeProgramDetail.do?type=legend&pgmId=22000010159&listOrder=vodCntAsc',
|
||||
'info_dict': {
|
||||
'_type': 'playlist',
|
||||
'id': '22000010159',
|
||||
},
|
||||
'playlist_count': 18,
|
||||
}, {
|
||||
'url': 'https://allvod.sbs.co.kr/allvod/vodProgramDetail.do?pgmId=P460810577',
|
||||
'info_dict': {
|
||||
'_type': 'playlist',
|
||||
'id': 'P460810577',
|
||||
},
|
||||
'playlist_count': 13,
|
||||
}]
|
||||
|
||||
def _real_extract(self, url):
|
||||
program_id = self._match_id(url)
|
||||
|
||||
details = self._download_json(
|
||||
'https://allvod.sbs.co.kr/allvod/vodProgramDetail/vodProgramDetailAjax.do',
|
||||
program_id, note='Downloading program details',
|
||||
query={
|
||||
'pgmId': program_id,
|
||||
'currentCount': '10000',
|
||||
})
|
||||
|
||||
return self.playlist_result(
|
||||
[self.url_result(f'https://allvod.sbs.co.kr/allvod/vodEndPage.do?mdaId={video_id}', SBSCoKrIE)
|
||||
for video_id in traverse_obj(details, ('list', ..., 'mdaId'))], program_id)
|
||||
|
||||
|
||||
class SBSCoKrProgramsVodIE(InfoExtractor):
|
||||
IE_NAME = 'sbs.co.kr:programs_vod'
|
||||
_VALID_URL = r'https?://programs\.sbs\.co\.kr/(?:enter|drama|culture|sports|plus|mtv)/(?P<id>[a-z0-9]+)/vods'
|
||||
|
||||
_TESTS = [{
|
||||
'url': 'https://programs.sbs.co.kr/culture/morningwide/vods/65007',
|
||||
'info_dict': {
|
||||
'_type': 'playlist',
|
||||
'id': '00000210215',
|
||||
},
|
||||
'playlist_mincount': 9782,
|
||||
}, {
|
||||
'url': 'https://programs.sbs.co.kr/enter/dongsang2/vods/52006',
|
||||
'info_dict': {
|
||||
'_type': 'playlist',
|
||||
'id': '22000010476',
|
||||
},
|
||||
'playlist_mincount': 312,
|
||||
}]
|
||||
|
||||
def _real_extract(self, url):
|
||||
program_slug = self._match_id(url)
|
||||
|
||||
program_id = self._download_json(
|
||||
f'https://static.apis.sbs.co.kr/program-api/1.0/menu/{program_slug}', program_slug,
|
||||
note='Downloading program menu data')['program']['programid']
|
||||
|
||||
return self.url_result(
|
||||
f'https://allvod.sbs.co.kr/allvod/vodProgramDetail.do?pgmId={program_id}', SBSCoKrAllvodProgramIE)
|
|
@ -38,9 +38,48 @@ class StacommuBaseIE(WrestleUniverseBaseIE):
|
|||
return None
|
||||
return traverse_obj(encryption_data, {'key': ('key', {decrypt}), 'iv': ('iv', {decrypt})})
|
||||
|
||||
def _extract_vod(self, url):
|
||||
video_id = self._match_id(url)
|
||||
video_info = self._download_metadata(
|
||||
url, video_id, 'ja', ('dehydratedState', 'queries', 0, 'state', 'data'))
|
||||
hls_info, decrypt = self._call_encrypted_api(
|
||||
video_id, ':watch', 'stream information', data={'method': 1})
|
||||
|
||||
return {
|
||||
'id': video_id,
|
||||
'formats': self._get_formats(hls_info, ('protocolHls', 'url', {url_or_none}), video_id),
|
||||
'hls_aes': self._extract_hls_key(hls_info, 'protocolHls', decrypt),
|
||||
**traverse_obj(video_info, {
|
||||
'title': ('displayName', {str}),
|
||||
'description': ('description', {str}),
|
||||
'timestamp': ('watchStartTime', {int_or_none}),
|
||||
'thumbnail': ('keyVisualUrl', {url_or_none}),
|
||||
'cast': ('casts', ..., 'displayName', {str}),
|
||||
'duration': ('duration', {int}),
|
||||
}),
|
||||
}
|
||||
|
||||
def _extract_ppv(self, url):
|
||||
video_id = self._match_id(url)
|
||||
video_info = self._call_api(video_id, msg='video information', query={'al': 'ja'}, auth=False)
|
||||
hls_info, decrypt = self._call_encrypted_api(
|
||||
video_id, ':watchArchive', 'stream information', data={'method': 1})
|
||||
|
||||
return {
|
||||
'id': video_id,
|
||||
'formats': self._get_formats(hls_info, ('hls', 'urls', ..., {url_or_none}), video_id),
|
||||
'hls_aes': self._extract_hls_key(hls_info, 'hls', decrypt),
|
||||
**traverse_obj(video_info, {
|
||||
'title': ('displayName', {str}),
|
||||
'timestamp': ('startTime', {int_or_none}),
|
||||
'thumbnail': ('keyVisualUrl', {url_or_none}),
|
||||
'duration': ('duration', {int_or_none}),
|
||||
}),
|
||||
}
|
||||
|
||||
|
||||
class StacommuVODIE(StacommuBaseIE):
|
||||
_VALID_URL = r'https?://www\.stacommu\.jp/videos/episodes/(?P<id>[\da-zA-Z]+)'
|
||||
_VALID_URL = r'https?://www\.stacommu\.jp/(?:en/)?videos/episodes/(?P<id>[\da-zA-Z]+)'
|
||||
_TESTS = [{
|
||||
# not encrypted
|
||||
'url': 'https://www.stacommu.jp/videos/episodes/aXcVKjHyAENEjard61soZZ',
|
||||
|
@ -79,34 +118,19 @@ class StacommuVODIE(StacommuBaseIE):
|
|||
'params': {
|
||||
'skip_download': 'm3u8',
|
||||
},
|
||||
}, {
|
||||
'url': 'https://www.stacommu.jp/en/videos/episodes/aXcVKjHyAENEjard61soZZ',
|
||||
'only_matching': True,
|
||||
}]
|
||||
|
||||
_API_PATH = 'videoEpisodes'
|
||||
|
||||
def _real_extract(self, url):
|
||||
video_id = self._match_id(url)
|
||||
video_info = self._download_metadata(
|
||||
url, video_id, 'ja', ('dehydratedState', 'queries', 0, 'state', 'data'))
|
||||
hls_info, decrypt = self._call_encrypted_api(
|
||||
video_id, ':watch', 'stream information', data={'method': 1})
|
||||
|
||||
return {
|
||||
'id': video_id,
|
||||
'formats': self._get_formats(hls_info, ('protocolHls', 'url', {url_or_none}), video_id),
|
||||
'hls_aes': self._extract_hls_key(hls_info, 'protocolHls', decrypt),
|
||||
**traverse_obj(video_info, {
|
||||
'title': ('displayName', {str}),
|
||||
'description': ('description', {str}),
|
||||
'timestamp': ('watchStartTime', {int_or_none}),
|
||||
'thumbnail': ('keyVisualUrl', {url_or_none}),
|
||||
'cast': ('casts', ..., 'displayName', {str}),
|
||||
'duration': ('duration', {int}),
|
||||
}),
|
||||
}
|
||||
return self._extract_vod(url)
|
||||
|
||||
|
||||
class StacommuLiveIE(StacommuBaseIE):
|
||||
_VALID_URL = r'https?://www\.stacommu\.jp/live/(?P<id>[\da-zA-Z]+)'
|
||||
_VALID_URL = r'https?://www\.stacommu\.jp/(?:en/)?live/(?P<id>[\da-zA-Z]+)'
|
||||
_TESTS = [{
|
||||
'url': 'https://www.stacommu.jp/live/d2FJ3zLnndegZJCAEzGM3m',
|
||||
'info_dict': {
|
||||
|
@ -125,24 +149,83 @@ class StacommuLiveIE(StacommuBaseIE):
|
|||
'params': {
|
||||
'skip_download': 'm3u8',
|
||||
},
|
||||
}, {
|
||||
'url': 'https://www.stacommu.jp/en/live/d2FJ3zLnndegZJCAEzGM3m',
|
||||
'only_matching': True,
|
||||
}]
|
||||
|
||||
_API_PATH = 'events'
|
||||
|
||||
def _real_extract(self, url):
|
||||
video_id = self._match_id(url)
|
||||
video_info = self._call_api(video_id, msg='video information', query={'al': 'ja'}, auth=False)
|
||||
hls_info, decrypt = self._call_encrypted_api(
|
||||
video_id, ':watchArchive', 'stream information', data={'method': 1})
|
||||
return self._extract_ppv(url)
|
||||
|
||||
return {
|
||||
'id': video_id,
|
||||
'formats': self._get_formats(hls_info, ('hls', 'urls', ..., {url_or_none}), video_id),
|
||||
'hls_aes': self._extract_hls_key(hls_info, 'hls', decrypt),
|
||||
**traverse_obj(video_info, {
|
||||
'title': ('displayName', {str}),
|
||||
'timestamp': ('startTime', {int_or_none}),
|
||||
'thumbnail': ('keyVisualUrl', {url_or_none}),
|
||||
'duration': ('duration', {int_or_none}),
|
||||
}),
|
||||
}
|
||||
|
||||
class TheaterComplexTownBaseIE(StacommuBaseIE):
|
||||
_NETRC_MACHINE = 'theatercomplextown'
|
||||
_API_HOST = 'api.theater-complex.town'
|
||||
_LOGIN_QUERY = {'key': 'AIzaSyAgNCqToaIz4a062EeIrkhI_xetVfAOrfc'}
|
||||
_LOGIN_HEADERS = {
|
||||
'Accept': '*/*',
|
||||
'Content-Type': 'application/json',
|
||||
'X-Client-Version': 'Chrome/JsCore/9.23.0/FirebaseCore-web',
|
||||
'Referer': 'https://www.theater-complex.town/',
|
||||
'Origin': 'https://www.theater-complex.town',
|
||||
}
|
||||
|
||||
|
||||
class TheaterComplexTownVODIE(TheaterComplexTownBaseIE):
|
||||
_VALID_URL = r'https?://(?:www\.)?theater-complex\.town/(?:en/)?videos/episodes/(?P<id>\w+)'
|
||||
IE_NAME = 'theatercomplextown:vod'
|
||||
_TESTS = [{
|
||||
'url': 'https://www.theater-complex.town/videos/episodes/hoxqidYNoAn7bP92DN6p78',
|
||||
'info_dict': {
|
||||
'id': 'hoxqidYNoAn7bP92DN6p78',
|
||||
'ext': 'mp4',
|
||||
'title': '演劇ドラフトグランプリ2023 劇団『恋のぼり』〜劇団名決定秘話ラジオ',
|
||||
'description': 'md5:a7e2e9cf570379ea67fb630f345ff65d',
|
||||
'cast': ['玉城 裕規', '石川 凌雅'],
|
||||
'thumbnail': 'https://image.theater-complex.town/5URnXX6KCeDysuFrPkP38o/5URnXX6KCeDysuFrPkP38o',
|
||||
'upload_date': '20231103',
|
||||
'timestamp': 1699016400,
|
||||
'duration': 868,
|
||||
},
|
||||
'params': {
|
||||
'skip_download': 'm3u8',
|
||||
},
|
||||
}, {
|
||||
'url': 'https://www.theater-complex.town/en/videos/episodes/6QT7XYwM9dJz5Gf9VB6K5y',
|
||||
'only_matching': True,
|
||||
}]
|
||||
|
||||
_API_PATH = 'videoEpisodes'
|
||||
|
||||
def _real_extract(self, url):
|
||||
return self._extract_vod(url)
|
||||
|
||||
|
||||
class TheaterComplexTownPPVIE(TheaterComplexTownBaseIE):
|
||||
_VALID_URL = r'https?://(?:www\.)?theater-complex\.town/(?:en/)?ppv/(?P<id>\w+)'
|
||||
IE_NAME = 'theatercomplextown:ppv'
|
||||
_TESTS = [{
|
||||
'url': 'https://www.theater-complex.town/ppv/wytW3X7khrjJBUpKuV3jen',
|
||||
'info_dict': {
|
||||
'id': 'wytW3X7khrjJBUpKuV3jen',
|
||||
'ext': 'mp4',
|
||||
'title': 'BREAK FREE STARS 11月5日(日)12:30千秋楽公演',
|
||||
'thumbnail': 'https://image.theater-complex.town/5GWEB31JcTUfjtgdeV5t6o/5GWEB31JcTUfjtgdeV5t6o',
|
||||
'upload_date': '20231105',
|
||||
'timestamp': 1699155000,
|
||||
'duration': 8378,
|
||||
},
|
||||
'params': {
|
||||
'skip_download': 'm3u8',
|
||||
},
|
||||
}, {
|
||||
'url': 'https://www.theater-complex.town/en/ppv/wytW3X7khrjJBUpKuV3jen',
|
||||
'only_matching': True,
|
||||
}]
|
||||
|
||||
_API_PATH = 'events'
|
||||
|
||||
def _real_extract(self, url):
|
||||
return self._extract_ppv(url)
|
||||
|
|
|
@ -1,66 +0,0 @@
|
|||
from .common import InfoExtractor
|
||||
from ..utils import remove_end
|
||||
|
||||
|
||||
class ThisAVIE(InfoExtractor):
|
||||
_VALID_URL = r'https?://(?:www\.)?thisav\.com/video/(?P<id>[0-9]+)/.*'
|
||||
_TESTS = [{
|
||||
# jwplayer
|
||||
'url': 'http://www.thisav.com/video/47734/%98%26sup1%3B%83%9E%83%82---just-fit.html',
|
||||
'md5': '0480f1ef3932d901f0e0e719f188f19b',
|
||||
'info_dict': {
|
||||
'id': '47734',
|
||||
'ext': 'flv',
|
||||
'title': '高樹マリア - Just fit',
|
||||
'uploader': 'dj7970',
|
||||
'uploader_id': 'dj7970'
|
||||
}
|
||||
}, {
|
||||
# html5 media
|
||||
'url': 'http://www.thisav.com/video/242352/nerdy-18yo-big-ass-tattoos-and-glasses.html',
|
||||
'md5': 'ba90c076bd0f80203679e5b60bf523ee',
|
||||
'info_dict': {
|
||||
'id': '242352',
|
||||
'ext': 'mp4',
|
||||
'title': 'Nerdy 18yo Big Ass Tattoos and Glasses',
|
||||
'uploader': 'cybersluts',
|
||||
'uploader_id': 'cybersluts',
|
||||
},
|
||||
}]
|
||||
|
||||
def _real_extract(self, url):
|
||||
mobj = self._match_valid_url(url)
|
||||
|
||||
video_id = mobj.group('id')
|
||||
webpage = self._download_webpage(url, video_id)
|
||||
title = remove_end(self._html_extract_title(webpage), ' - 視頻 - ThisAV.com-世界第一中文成人娛樂網站')
|
||||
video_url = self._html_search_regex(
|
||||
r"addVariable\('file','([^']+)'\);", webpage, 'video url', default=None)
|
||||
if video_url:
|
||||
info_dict = {
|
||||
'formats': [{
|
||||
'url': video_url,
|
||||
}],
|
||||
}
|
||||
else:
|
||||
entries = self._parse_html5_media_entries(url, webpage, video_id)
|
||||
if entries:
|
||||
info_dict = entries[0]
|
||||
else:
|
||||
info_dict = self._extract_jwplayer_data(
|
||||
webpage, video_id, require_title=False)
|
||||
uploader = self._html_search_regex(
|
||||
r': <a href="http://www\.thisav\.com/user/[0-9]+/(?:[^"]+)">([^<]+)</a>',
|
||||
webpage, 'uploader name', fatal=False)
|
||||
uploader_id = self._html_search_regex(
|
||||
r': <a href="http://www\.thisav\.com/user/[0-9]+/([^"]+)">(?:[^<]+)</a>',
|
||||
webpage, 'uploader id', fatal=False)
|
||||
|
||||
info_dict.update({
|
||||
'id': video_id,
|
||||
'uploader': uploader,
|
||||
'uploader_id': uploader_id,
|
||||
'title': title,
|
||||
})
|
||||
|
||||
return info_dict
|
|
@ -1,11 +1,23 @@
|
|||
import json
|
||||
|
||||
from .common import InfoExtractor
|
||||
from .zype import ZypeIE
|
||||
from ..networking import HEADRequest
|
||||
from ..networking.exceptions import HTTPError
|
||||
from ..utils import (
|
||||
ExtractorError,
|
||||
filter_dict,
|
||||
parse_qs,
|
||||
try_call,
|
||||
urlencode_postdata,
|
||||
)
|
||||
|
||||
|
||||
class ThisOldHouseIE(InfoExtractor):
|
||||
_VALID_URL = r'https?://(?:www\.)?thisoldhouse\.com/(?:watch|how-to|tv-episode|(?:[^/]+/)?\d+)/(?P<id>[^/?#]+)'
|
||||
_NETRC_MACHINE = 'thisoldhouse'
|
||||
_VALID_URL = r'https?://(?:www\.)?thisoldhouse\.com/(?:watch|how-to|tv-episode|(?:[^/?#]+/)?\d+)/(?P<id>[^/?#]+)'
|
||||
_TESTS = [{
|
||||
'url': 'https://www.thisoldhouse.com/how-to/how-to-build-storage-bench',
|
||||
'url': 'https://www.thisoldhouse.com/furniture/21017078/how-to-build-a-storage-bench',
|
||||
'info_dict': {
|
||||
'id': '5dcdddf673c3f956ef5db202',
|
||||
'ext': 'mp4',
|
||||
|
@ -23,13 +35,16 @@ class ThisOldHouseIE(InfoExtractor):
|
|||
'skip_download': True,
|
||||
},
|
||||
}, {
|
||||
# Page no longer has video
|
||||
'url': 'https://www.thisoldhouse.com/watch/arlington-arts-crafts-arts-and-crafts-class-begins',
|
||||
'only_matching': True,
|
||||
}, {
|
||||
# 404 Not Found
|
||||
'url': 'https://www.thisoldhouse.com/tv-episode/ask-toh-shelf-rough-electric',
|
||||
'only_matching': True,
|
||||
}, {
|
||||
'url': 'https://www.thisoldhouse.com/furniture/21017078/how-to-build-a-storage-bench',
|
||||
# 404 Not Found
|
||||
'url': 'https://www.thisoldhouse.com/how-to/how-to-build-storage-bench',
|
||||
'only_matching': True,
|
||||
}, {
|
||||
'url': 'https://www.thisoldhouse.com/21113884/s41-e13-paradise-lost',
|
||||
|
@ -39,17 +54,51 @@ class ThisOldHouseIE(InfoExtractor):
|
|||
'url': 'https://www.thisoldhouse.com/21083431/seaside-transformation-the-westerly-project',
|
||||
'only_matching': True,
|
||||
}]
|
||||
_ZYPE_TMPL = 'https://player.zype.com/embed/%s.html?api_key=hsOk_yMSPYNrT22e9pu8hihLXjaZf0JW5jsOWv4ZqyHJFvkJn6rtToHl09tbbsbe'
|
||||
|
||||
_LOGIN_URL = 'https://login.thisoldhouse.com/usernamepassword/login'
|
||||
|
||||
def _perform_login(self, username, password):
|
||||
self._request_webpage(
|
||||
HEADRequest('https://www.thisoldhouse.com/insider'), None, 'Requesting session cookies')
|
||||
urlh = self._request_webpage(
|
||||
'https://www.thisoldhouse.com/wp-login.php', None, 'Requesting login info',
|
||||
errnote='Unable to login', query={'redirect_to': 'https://www.thisoldhouse.com/insider'})
|
||||
|
||||
try:
|
||||
auth_form = self._download_webpage(
|
||||
self._LOGIN_URL, None, 'Submitting credentials', headers={
|
||||
'Content-Type': 'application/json',
|
||||
'Referer': urlh.url,
|
||||
}, data=json.dumps(filter_dict({
|
||||
**{('client_id' if k == 'client' else k): v[0] for k, v in parse_qs(urlh.url).items()},
|
||||
'tenant': 'thisoldhouse',
|
||||
'username': username,
|
||||
'password': password,
|
||||
'popup_options': {},
|
||||
'sso': True,
|
||||
'_csrf': try_call(lambda: self._get_cookies(self._LOGIN_URL)['_csrf'].value),
|
||||
'_intstate': 'deprecated',
|
||||
}), separators=(',', ':')).encode())
|
||||
except ExtractorError as e:
|
||||
if isinstance(e.cause, HTTPError) and e.cause.status == 401:
|
||||
raise ExtractorError('Invalid username or password', expected=True)
|
||||
raise
|
||||
|
||||
self._request_webpage(
|
||||
'https://login.thisoldhouse.com/login/callback', None, 'Completing login',
|
||||
data=urlencode_postdata(self._hidden_inputs(auth_form)))
|
||||
|
||||
def _real_extract(self, url):
|
||||
display_id = self._match_id(url)
|
||||
webpage = self._download_webpage(url, display_id)
|
||||
if 'To Unlock This content' in webpage:
|
||||
self.raise_login_required(method='cookies')
|
||||
video_url = self._search_regex(
|
||||
self.raise_login_required(
|
||||
'This video is only available for subscribers. '
|
||||
'Note that --cookies-from-browser may not work due to this site using session cookies')
|
||||
|
||||
video_url, video_id = self._search_regex(
|
||||
r'<iframe[^>]+src=[\'"]((?:https?:)?//(?:www\.)?thisoldhouse\.(?:chorus\.build|com)/videos/zype/([0-9a-f]{24})[^\'"]*)[\'"]',
|
||||
webpage, 'video url')
|
||||
if 'subscription_required=true' in video_url or 'c-entry-group-labels__image' in webpage:
|
||||
return self.url_result(self._request_webpage(HEADRequest(video_url), display_id).url, 'Zype', display_id)
|
||||
video_id = self._search_regex(r'(?:https?:)?//(?:www\.)?thisoldhouse\.(?:chorus\.build|com)/videos/zype/([0-9a-f]{24})', video_url, 'video id')
|
||||
return self.url_result(self._ZYPE_TMPL % video_id, 'Zype', video_id)
|
||||
webpage, 'video url', group=(1, 2))
|
||||
video_url = self._request_webpage(HEADRequest(video_url), video_id, 'Resolving Zype URL').url
|
||||
|
||||
return self.url_result(video_url, ZypeIE, video_id)
|
||||
|
|
|
@ -1563,7 +1563,7 @@ class TwitterBroadcastIE(TwitterBaseIE, PeriscopeBaseIE):
|
|||
IE_NAME = 'twitter:broadcast'
|
||||
_VALID_URL = TwitterBaseIE._BASE_REGEX + r'i/broadcasts/(?P<id>[0-9a-zA-Z]{13})'
|
||||
|
||||
_TEST = {
|
||||
_TESTS = [{
|
||||
# untitled Periscope video
|
||||
'url': 'https://twitter.com/i/broadcasts/1yNGaQLWpejGj',
|
||||
'info_dict': {
|
||||
|
@ -1571,11 +1571,42 @@ class TwitterBroadcastIE(TwitterBaseIE, PeriscopeBaseIE):
|
|||
'ext': 'mp4',
|
||||
'title': 'Andrea May Sahouri - Periscope Broadcast',
|
||||
'uploader': 'Andrea May Sahouri',
|
||||
'uploader_id': '1PXEdBZWpGwKe',
|
||||
'uploader_id': 'andreamsahouri',
|
||||
'uploader_url': 'https://twitter.com/andreamsahouri',
|
||||
'timestamp': 1590973638,
|
||||
'upload_date': '20200601',
|
||||
'thumbnail': r're:^https?://[^?#]+\.jpg\?token=',
|
||||
'view_count': int,
|
||||
},
|
||||
}
|
||||
}, {
|
||||
'url': 'https://twitter.com/i/broadcasts/1ZkKzeyrPbaxv',
|
||||
'info_dict': {
|
||||
'id': '1ZkKzeyrPbaxv',
|
||||
'ext': 'mp4',
|
||||
'title': 'Starship | SN10 | High-Altitude Flight Test',
|
||||
'uploader': 'SpaceX',
|
||||
'uploader_id': 'SpaceX',
|
||||
'uploader_url': 'https://twitter.com/SpaceX',
|
||||
'timestamp': 1614812942,
|
||||
'upload_date': '20210303',
|
||||
'thumbnail': r're:^https?://[^?#]+\.jpg\?token=',
|
||||
'view_count': int,
|
||||
},
|
||||
}, {
|
||||
'url': 'https://twitter.com/i/broadcasts/1OyKAVQrgzwGb',
|
||||
'info_dict': {
|
||||
'id': '1OyKAVQrgzwGb',
|
||||
'ext': 'mp4',
|
||||
'title': 'Starship Flight Test',
|
||||
'uploader': 'SpaceX',
|
||||
'uploader_id': 'SpaceX',
|
||||
'uploader_url': 'https://twitter.com/SpaceX',
|
||||
'timestamp': 1681993964,
|
||||
'upload_date': '20230420',
|
||||
'thumbnail': r're:^https?://[^?#]+\.jpg\?token=',
|
||||
'view_count': int,
|
||||
},
|
||||
}]
|
||||
|
||||
def _real_extract(self, url):
|
||||
broadcast_id = self._match_id(url)
|
||||
|
@ -1585,6 +1616,12 @@ class TwitterBroadcastIE(TwitterBaseIE, PeriscopeBaseIE):
|
|||
if not broadcast:
|
||||
raise ExtractorError('Broadcast no longer exists', expected=True)
|
||||
info = self._parse_broadcast_data(broadcast, broadcast_id)
|
||||
info['title'] = broadcast.get('status') or info.get('title')
|
||||
info['uploader_id'] = broadcast.get('twitter_username') or info.get('uploader_id')
|
||||
info['uploader_url'] = format_field(broadcast, 'twitter_username', 'https://twitter.com/%s', default=None)
|
||||
if info['live_status'] == 'is_upcoming':
|
||||
return info
|
||||
|
||||
media_key = broadcast['media_key']
|
||||
source = self._call_api(
|
||||
f'live_video_stream/status/{media_key}', media_key)['source']
|
||||
|
|
|
@ -164,11 +164,15 @@ class KnownPiracyIE(UnsupportedInfoExtractor):
|
|||
r'viewsb\.com',
|
||||
r'filemoon\.sx',
|
||||
r'hentai\.animestigma\.com',
|
||||
r'thisav\.com',
|
||||
)
|
||||
|
||||
_TESTS = [{
|
||||
'url': 'http://dood.to/e/5s1wmbdacezb',
|
||||
'only_matching': True,
|
||||
}, {
|
||||
'url': 'https://thisav.com/en/terms',
|
||||
'only_matching': True,
|
||||
}]
|
||||
|
||||
def _real_extract(self, url):
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
import json
|
||||
import random
|
||||
import itertools
|
||||
import urllib.parse
|
||||
|
@ -18,24 +19,33 @@ from ..utils import (
|
|||
|
||||
|
||||
class WeiboBaseIE(InfoExtractor):
|
||||
def _update_visitor_cookies(self, video_id):
|
||||
def _update_visitor_cookies(self, visitor_url, video_id):
|
||||
headers = {'Referer': visitor_url}
|
||||
chrome_ver = self._search_regex(
|
||||
r'Chrome/(\d+)', self.get_param('http_headers')['User-Agent'], 'user agent version', default='90')
|
||||
visitor_data = self._download_json(
|
||||
'https://passport.weibo.com/visitor/genvisitor', video_id,
|
||||
note='Generating first-visit guest request',
|
||||
transform_source=strip_jsonp,
|
||||
headers=headers, transform_source=strip_jsonp,
|
||||
data=urlencode_postdata({
|
||||
'cb': 'gen_callback',
|
||||
'fp': '{"os":"2","browser":"Gecko57,0,0,0","fonts":"undefined","screenInfo":"1440*900*24","plugins":""}',
|
||||
}))
|
||||
'fp': json.dumps({
|
||||
'os': '1',
|
||||
'browser': f'Chrome{chrome_ver},0,0,0',
|
||||
'fonts': 'undefined',
|
||||
'screenInfo': '1920*1080*24',
|
||||
'plugins': ''
|
||||
}, separators=(',', ':'))}))['data']
|
||||
|
||||
self._download_webpage(
|
||||
'https://passport.weibo.com/visitor/visitor', video_id,
|
||||
note='Running first-visit callback to get guest cookies',
|
||||
query={
|
||||
headers=headers, query={
|
||||
'a': 'incarnate',
|
||||
't': visitor_data['data']['tid'],
|
||||
'w': 2,
|
||||
'c': '%03d' % visitor_data['data']['confidence'],
|
||||
't': visitor_data['tid'],
|
||||
'w': 3 if visitor_data.get('new_tid') else 2,
|
||||
'c': f'{visitor_data.get("confidence", 100):03d}',
|
||||
'gc': '',
|
||||
'cb': 'cross_domain',
|
||||
'from': 'weibo',
|
||||
'_rand': random.random(),
|
||||
|
@ -44,7 +54,7 @@ class WeiboBaseIE(InfoExtractor):
|
|||
def _weibo_download_json(self, url, video_id, *args, fatal=True, note='Downloading JSON metadata', **kwargs):
|
||||
webpage, urlh = self._download_webpage_handle(url, video_id, *args, fatal=fatal, note=note, **kwargs)
|
||||
if urllib.parse.urlparse(urlh.url).netloc == 'passport.weibo.com':
|
||||
self._update_visitor_cookies(video_id)
|
||||
self._update_visitor_cookies(urlh.url, video_id)
|
||||
webpage = self._download_webpage(url, video_id, *args, fatal=fatal, note=note, **kwargs)
|
||||
return self._parse_json(webpage, video_id, fatal=fatal)
|
||||
|
||||
|
|
|
@ -2,10 +2,12 @@ from .common import InfoExtractor
|
|||
from ..utils import (
|
||||
ExtractorError,
|
||||
int_or_none,
|
||||
str_or_none,
|
||||
js_to_json,
|
||||
parse_filesize,
|
||||
parse_resolution,
|
||||
str_or_none,
|
||||
traverse_obj,
|
||||
url_basename,
|
||||
urlencode_postdata,
|
||||
urljoin,
|
||||
)
|
||||
|
@ -41,6 +43,18 @@ class ZoomIE(InfoExtractor):
|
|||
'ext': 'mp4',
|
||||
'title': 'Timea Andrea Lelik\'s Personal Meeting Room',
|
||||
},
|
||||
'skip': 'This recording has expired',
|
||||
}, {
|
||||
# view_with_share URL
|
||||
'url': 'https://cityofdetroit.zoom.us/rec/share/VjE-5kW3xmgbEYqR5KzRgZ1OFZvtMtiXk5HyRJo5kK4m5PYE6RF4rF_oiiO_9qaM.UTAg1MI7JSnF3ZjX',
|
||||
'md5': 'bdc7867a5934c151957fb81321b3c024',
|
||||
'info_dict': {
|
||||
'id': 'VjE-5kW3xmgbEYqR5KzRgZ1OFZvtMtiXk5HyRJo5kK4m5PYE6RF4rF_oiiO_9qaM.UTAg1MI7JSnF3ZjX',
|
||||
'ext': 'mp4',
|
||||
'title': 'February 2022 Detroit Revenue Estimating Conference',
|
||||
'duration': 7299,
|
||||
'formats': 'mincount:3',
|
||||
},
|
||||
}]
|
||||
|
||||
def _get_page_data(self, webpage, video_id):
|
||||
|
@ -72,6 +86,7 @@ class ZoomIE(InfoExtractor):
|
|||
|
||||
def _real_extract(self, url):
|
||||
base_url, url_type, video_id = self._match_valid_url(url).group('base_url', 'type', 'id')
|
||||
query = {}
|
||||
|
||||
if url_type == 'share':
|
||||
webpage = self._get_real_webpage(url, base_url, video_id, 'share')
|
||||
|
@ -80,6 +95,7 @@ class ZoomIE(InfoExtractor):
|
|||
f'{base_url}nws/recording/1.0/play/share-info/{meeting_id}',
|
||||
video_id, note='Downloading share info JSON')['result']['redirectUrl']
|
||||
url = urljoin(base_url, redirect_path)
|
||||
query['continueMode'] = 'true'
|
||||
|
||||
webpage = self._get_real_webpage(url, base_url, video_id, 'play')
|
||||
file_id = self._get_page_data(webpage, video_id)['fileId']
|
||||
|
@ -88,7 +104,7 @@ class ZoomIE(InfoExtractor):
|
|||
raise ExtractorError('Unable to extract file ID')
|
||||
|
||||
data = self._download_json(
|
||||
f'{base_url}nws/recording/1.0/play/info/{file_id}', video_id,
|
||||
f'{base_url}nws/recording/1.0/play/info/{file_id}', video_id, query=query,
|
||||
note='Downloading play info JSON')['result']
|
||||
|
||||
subtitles = {}
|
||||
|
@ -104,10 +120,10 @@ class ZoomIE(InfoExtractor):
|
|||
if data.get('viewMp4Url'):
|
||||
formats.append({
|
||||
'format_note': 'Camera stream',
|
||||
'url': str_or_none(data.get('viewMp4Url')),
|
||||
'url': data['viewMp4Url'],
|
||||
'width': int_or_none(traverse_obj(data, ('viewResolvtions', 0))),
|
||||
'height': int_or_none(traverse_obj(data, ('viewResolvtions', 1))),
|
||||
'format_id': str_or_none(traverse_obj(data, ('recording', 'id'))),
|
||||
'format_id': 'view',
|
||||
'ext': 'mp4',
|
||||
'filesize_approx': parse_filesize(str_or_none(traverse_obj(data, ('recording', 'fileSizeInMB')))),
|
||||
'preference': 0
|
||||
|
@ -116,14 +132,26 @@ class ZoomIE(InfoExtractor):
|
|||
if data.get('shareMp4Url'):
|
||||
formats.append({
|
||||
'format_note': 'Screen share stream',
|
||||
'url': str_or_none(data.get('shareMp4Url')),
|
||||
'url': data['shareMp4Url'],
|
||||
'width': int_or_none(traverse_obj(data, ('shareResolvtions', 0))),
|
||||
'height': int_or_none(traverse_obj(data, ('shareResolvtions', 1))),
|
||||
'format_id': str_or_none(traverse_obj(data, ('shareVideo', 'id'))),
|
||||
'format_id': 'share',
|
||||
'ext': 'mp4',
|
||||
'preference': -1
|
||||
})
|
||||
|
||||
view_with_share_url = data.get('viewMp4WithshareUrl')
|
||||
if view_with_share_url:
|
||||
formats.append({
|
||||
**parse_resolution(self._search_regex(
|
||||
r'_(\d+x\d+)\.mp4', url_basename(view_with_share_url), 'resolution', default=None)),
|
||||
'format_note': 'Screen share with camera',
|
||||
'url': view_with_share_url,
|
||||
'format_id': 'view_with_share',
|
||||
'ext': 'mp4',
|
||||
'preference': 1
|
||||
})
|
||||
|
||||
return {
|
||||
'id': video_id,
|
||||
'title': str_or_none(traverse_obj(data, ('meet', 'topic'))),
|
||||
|
|
|
@ -255,7 +255,7 @@ class RequestsRH(RequestHandler, InstanceStoreMixin):
|
|||
handler.setFormatter(logging.Formatter('requests: %(message)s'))
|
||||
handler.addFilter(Urllib3LoggingFilter())
|
||||
logger.addHandler(handler)
|
||||
# Use ERROR to suppress pool reuse warning (could filter instead?)
|
||||
# TODO: Use a logger filter to suppress pool reuse warning instead
|
||||
logger.setLevel(logging.ERROR)
|
||||
|
||||
if self.verbose:
|
||||
|
|
526
yt_dlp/update.py
526
yt_dlp/update.py
|
@ -1,3 +1,5 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import atexit
|
||||
import contextlib
|
||||
import hashlib
|
||||
|
@ -7,6 +9,7 @@ import platform
|
|||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
from dataclasses import dataclass
|
||||
from zipimport import zipimporter
|
||||
|
||||
from .compat import functools # isort: split
|
||||
|
@ -14,24 +17,35 @@ from .compat import compat_realpath, compat_shlex_quote
|
|||
from .networking import Request
|
||||
from .networking.exceptions import HTTPError, network_exceptions
|
||||
from .utils import (
|
||||
NO_DEFAULT,
|
||||
Popen,
|
||||
cached_method,
|
||||
deprecation_warning,
|
||||
format_field,
|
||||
remove_end,
|
||||
remove_start,
|
||||
shell_quote,
|
||||
system_identifier,
|
||||
version_tuple,
|
||||
)
|
||||
from .version import CHANNEL, UPDATE_HINT, VARIANT, __version__
|
||||
from .version import (
|
||||
CHANNEL,
|
||||
ORIGIN,
|
||||
RELEASE_GIT_HEAD,
|
||||
UPDATE_HINT,
|
||||
VARIANT,
|
||||
__version__,
|
||||
)
|
||||
|
||||
UPDATE_SOURCES = {
|
||||
'stable': 'yt-dlp/yt-dlp',
|
||||
'nightly': 'yt-dlp/yt-dlp-nightly-builds',
|
||||
'master': 'yt-dlp/yt-dlp-master-builds',
|
||||
}
|
||||
REPOSITORY = UPDATE_SOURCES['stable']
|
||||
_INVERSE_UPDATE_SOURCES = {value: key for key, value in UPDATE_SOURCES.items()}
|
||||
|
||||
_VERSION_RE = re.compile(r'(\d+\.)*\d+')
|
||||
_HASH_PATTERN = r'[\da-f]{40}'
|
||||
_COMMIT_RE = re.compile(rf'Generated from: https://(?:[^/?#]+/){{3}}commit/(?P<hash>{_HASH_PATTERN})')
|
||||
|
||||
API_BASE_URL = 'https://api.github.com/repos'
|
||||
|
||||
|
@ -112,6 +126,10 @@ def is_non_updateable():
|
|||
detect_variant(), _NON_UPDATEABLE_REASONS['unknown' if VARIANT else 'other'])
|
||||
|
||||
|
||||
def _get_binary_name():
|
||||
return format_field(_FILE_SUFFIXES, detect_variant(), template='yt-dlp%s', ignore=None, default=None)
|
||||
|
||||
|
||||
def _get_system_deprecation():
|
||||
MIN_SUPPORTED, MIN_RECOMMENDED = (3, 7), (3, 8)
|
||||
|
||||
|
@ -146,73 +164,117 @@ def _sha256_file(path):
|
|||
return h.hexdigest()
|
||||
|
||||
|
||||
def _make_label(origin, tag, version=None):
|
||||
if '/' in origin:
|
||||
channel = _INVERSE_UPDATE_SOURCES.get(origin, origin)
|
||||
else:
|
||||
channel = origin
|
||||
label = f'{channel}@{tag}'
|
||||
if version and version != tag:
|
||||
label += f' build {version}'
|
||||
if channel != origin:
|
||||
label += f' from {origin}'
|
||||
return label
|
||||
|
||||
|
||||
@dataclass
|
||||
class UpdateInfo:
|
||||
"""
|
||||
Update target information
|
||||
|
||||
Can be created by `query_update()` or manually.
|
||||
|
||||
Attributes:
|
||||
tag The release tag that will be updated to. If from query_update,
|
||||
the value is after API resolution and update spec processing.
|
||||
The only property that is required.
|
||||
version The actual numeric version (if available) of the binary to be updated to,
|
||||
after API resolution and update spec processing. (default: None)
|
||||
requested_version Numeric version of the binary being requested (if available),
|
||||
after API resolution only. (default: None)
|
||||
commit Commit hash (if available) of the binary to be updated to,
|
||||
after API resolution and update spec processing. (default: None)
|
||||
This value will only match the RELEASE_GIT_HEAD of prerelease builds.
|
||||
binary_name Filename of the binary to be updated to. (default: current binary name)
|
||||
checksum Expected checksum (if available) of the binary to be
|
||||
updated to. (default: None)
|
||||
"""
|
||||
tag: str
|
||||
version: str | None = None
|
||||
requested_version: str | None = None
|
||||
commit: str | None = None
|
||||
|
||||
binary_name: str | None = _get_binary_name()
|
||||
checksum: str | None = None
|
||||
|
||||
_has_update = True
|
||||
|
||||
|
||||
class Updater:
|
||||
_exact = True
|
||||
# XXX: use class variables to simplify testing
|
||||
_channel = CHANNEL
|
||||
_origin = ORIGIN
|
||||
|
||||
def __init__(self, ydl, target=None):
|
||||
def __init__(self, ydl, target: str | None = None):
|
||||
self.ydl = ydl
|
||||
# For backwards compat, target needs to be treated as if it could be None
|
||||
self.requested_channel, sep, self.requested_tag = (target or self._channel).rpartition('@')
|
||||
# Check if requested_tag is actually the requested repo/channel
|
||||
if not sep and ('/' in self.requested_tag or self.requested_tag in UPDATE_SOURCES):
|
||||
self.requested_channel = self.requested_tag
|
||||
self.requested_tag: str = None # type: ignore (we set it later)
|
||||
elif not self.requested_channel:
|
||||
# User did not specify a channel, so we are requesting the default channel
|
||||
self.requested_channel = self._channel.partition('@')[0]
|
||||
|
||||
self.target_channel, sep, self.target_tag = (target or CHANNEL).rpartition('@')
|
||||
# stable => stable@latest
|
||||
if not sep and ('/' in self.target_tag or self.target_tag in UPDATE_SOURCES):
|
||||
self.target_channel = self.target_tag
|
||||
self.target_tag = None
|
||||
elif not self.target_channel:
|
||||
self.target_channel = CHANNEL.partition('@')[0]
|
||||
|
||||
if not self.target_tag:
|
||||
self.target_tag = 'latest'
|
||||
# --update should not be treated as an exact tag request even if CHANNEL has a @tag
|
||||
self._exact = bool(target) and target != self._channel
|
||||
if not self.requested_tag:
|
||||
# User did not specify a tag, so we request 'latest' and track that no exact tag was passed
|
||||
self.requested_tag = 'latest'
|
||||
self._exact = False
|
||||
elif self.target_tag != 'latest':
|
||||
self.target_tag = f'tags/{self.target_tag}'
|
||||
|
||||
if '/' in self.target_channel:
|
||||
self._target_repo = self.target_channel
|
||||
if self.target_channel not in (CHANNEL, *UPDATE_SOURCES.values()):
|
||||
if '/' in self.requested_channel:
|
||||
# requested_channel is actually a repository
|
||||
self.requested_repo = self.requested_channel
|
||||
if not self.requested_repo.startswith('yt-dlp/') and self.requested_repo != self._origin:
|
||||
self.ydl.report_warning(
|
||||
f'You are switching to an {self.ydl._format_err("unofficial", "red")} executable '
|
||||
f'from {self.ydl._format_err(self._target_repo, self.ydl.Styles.EMPHASIS)}. '
|
||||
f'from {self.ydl._format_err(self.requested_repo, self.ydl.Styles.EMPHASIS)}. '
|
||||
f'Run {self.ydl._format_err("at your own risk", "light red")}')
|
||||
self._block_restart('Automatically restarting into custom builds is disabled for security reasons')
|
||||
else:
|
||||
self._target_repo = UPDATE_SOURCES.get(self.target_channel)
|
||||
if not self._target_repo:
|
||||
# Check if requested_channel resolves to a known repository or else raise
|
||||
self.requested_repo = UPDATE_SOURCES.get(self.requested_channel)
|
||||
if not self.requested_repo:
|
||||
self._report_error(
|
||||
f'Invalid update channel {self.target_channel!r} requested. '
|
||||
f'Invalid update channel {self.requested_channel!r} requested. '
|
||||
f'Valid channels are {", ".join(UPDATE_SOURCES)}', True)
|
||||
|
||||
def _version_compare(self, a, b, channel=CHANNEL):
|
||||
if self._exact and channel != self.target_channel:
|
||||
return False
|
||||
self._identifier = f'{detect_variant()} {system_identifier()}'
|
||||
|
||||
if _VERSION_RE.fullmatch(f'{a}.{b}'):
|
||||
a, b = version_tuple(a), version_tuple(b)
|
||||
return a == b if self._exact else a >= b
|
||||
return a == b
|
||||
@property
|
||||
def current_version(self):
|
||||
"""Current version"""
|
||||
return __version__
|
||||
|
||||
@functools.cached_property
|
||||
def _tag(self):
|
||||
if self._version_compare(self.current_version, self.latest_version):
|
||||
return self.target_tag
|
||||
@property
|
||||
def current_commit(self):
|
||||
"""Current commit hash"""
|
||||
return RELEASE_GIT_HEAD
|
||||
|
||||
identifier = f'{detect_variant()} {self.target_channel} {system_identifier()}'
|
||||
for line in self._download('_update_spec', 'latest').decode().splitlines():
|
||||
if not line.startswith('lock '):
|
||||
continue
|
||||
_, tag, pattern = line.split(' ', 2)
|
||||
if re.match(pattern, identifier):
|
||||
if not self._exact:
|
||||
return f'tags/{tag}'
|
||||
elif self.target_tag == 'latest' or not self._version_compare(
|
||||
tag, self.target_tag[5:], channel=self.target_channel):
|
||||
self._report_error(
|
||||
f'yt-dlp cannot be updated above {tag} since you are on an older Python version', True)
|
||||
return f'tags/{self.current_version}'
|
||||
return self.target_tag
|
||||
def _download_asset(self, name, tag=None):
|
||||
if not tag:
|
||||
tag = self.requested_tag
|
||||
|
||||
@cached_method
|
||||
def _get_version_info(self, tag):
|
||||
url = f'{API_BASE_URL}/{self._target_repo}/releases/{tag}'
|
||||
path = 'latest/download' if tag == 'latest' else f'download/{tag}'
|
||||
url = f'https://github.com/{self.requested_repo}/releases/{path}/{name}'
|
||||
self.ydl.write_debug(f'Downloading {name} from {url}')
|
||||
return self.ydl.urlopen(url).read()
|
||||
|
||||
def _call_api(self, tag):
|
||||
tag = f'tags/{tag}' if tag != 'latest' else tag
|
||||
url = f'{API_BASE_URL}/{self.requested_repo}/releases/{tag}'
|
||||
self.ydl.write_debug(f'Fetching release info: {url}')
|
||||
return json.loads(self.ydl.urlopen(Request(url, headers={
|
||||
'Accept': 'application/vnd.github+json',
|
||||
|
@ -220,105 +282,175 @@ class Updater:
|
|||
'X-GitHub-Api-Version': '2022-11-28',
|
||||
})).read().decode())
|
||||
|
||||
@property
|
||||
def current_version(self):
|
||||
"""Current version"""
|
||||
return __version__
|
||||
def _get_version_info(self, tag: str) -> tuple[str | None, str | None]:
|
||||
if _VERSION_RE.fullmatch(tag):
|
||||
return tag, None
|
||||
|
||||
@staticmethod
|
||||
def _label(channel, tag):
|
||||
"""Label for a given channel and tag"""
|
||||
return f'{channel}@{remove_start(tag, "tags/")}'
|
||||
api_info = self._call_api(tag)
|
||||
|
||||
def _get_actual_tag(self, tag):
|
||||
if tag.startswith('tags/'):
|
||||
return tag[5:]
|
||||
return self._get_version_info(tag)['tag_name']
|
||||
if tag == 'latest':
|
||||
requested_version = api_info['tag_name']
|
||||
else:
|
||||
match = re.search(rf'\s+(?P<version>{_VERSION_RE.pattern})$', api_info.get('name', ''))
|
||||
requested_version = match.group('version') if match else None
|
||||
|
||||
@property
|
||||
def new_version(self):
|
||||
"""Version of the latest release we can update to"""
|
||||
return self._get_actual_tag(self._tag)
|
||||
if re.fullmatch(_HASH_PATTERN, api_info.get('target_commitish', '')):
|
||||
target_commitish = api_info['target_commitish']
|
||||
else:
|
||||
match = _COMMIT_RE.match(api_info.get('body', ''))
|
||||
target_commitish = match.group('hash') if match else None
|
||||
|
||||
@property
|
||||
def latest_version(self):
|
||||
"""Version of the target release"""
|
||||
return self._get_actual_tag(self.target_tag)
|
||||
if not (requested_version or target_commitish):
|
||||
self._report_error('One of either version or commit hash must be available on the release', expected=True)
|
||||
|
||||
@property
|
||||
def has_update(self):
|
||||
"""Whether there is an update available"""
|
||||
return not self._version_compare(self.current_version, self.new_version)
|
||||
return requested_version, target_commitish
|
||||
|
||||
@functools.cached_property
|
||||
def filename(self):
|
||||
"""Filename of the executable"""
|
||||
return compat_realpath(_get_variant_and_executable_path()[1])
|
||||
def _download_update_spec(self, source_tags):
|
||||
for tag in source_tags:
|
||||
try:
|
||||
return self._download_asset('_update_spec', tag=tag).decode()
|
||||
except network_exceptions as error:
|
||||
if isinstance(error, HTTPError) and error.status == 404:
|
||||
continue
|
||||
self._report_network_error(f'fetch update spec: {error}')
|
||||
|
||||
def _download(self, name, tag):
|
||||
slug = 'latest/download' if tag == 'latest' else f'download/{tag[5:]}'
|
||||
url = f'https://github.com/{self._target_repo}/releases/{slug}/{name}'
|
||||
self.ydl.write_debug(f'Downloading {name} from {url}')
|
||||
return self.ydl.urlopen(url).read()
|
||||
|
||||
@functools.cached_property
|
||||
def release_name(self):
|
||||
"""The release filename"""
|
||||
return f'yt-dlp{_FILE_SUFFIXES[detect_variant()]}'
|
||||
|
||||
@functools.cached_property
|
||||
def release_hash(self):
|
||||
"""Hash of the latest release"""
|
||||
hash_data = dict(ln.split()[::-1] for ln in self._download('SHA2-256SUMS', self._tag).decode().splitlines())
|
||||
return hash_data[self.release_name]
|
||||
|
||||
def _report_error(self, msg, expected=False):
|
||||
self.ydl.report_error(msg, tb=False if expected else None)
|
||||
self.ydl._download_retcode = 100
|
||||
|
||||
def _report_permission_error(self, file):
|
||||
self._report_error(f'Unable to write to {file}; Try running as administrator', True)
|
||||
|
||||
def _report_network_error(self, action, delim=';'):
|
||||
self._report_error(
|
||||
f'Unable to {action}{delim} visit '
|
||||
f'https://github.com/{self._target_repo}/releases/{self.target_tag.replace("tags/", "tag/")}', True)
|
||||
f'The requested tag {self.requested_tag} does not exist for {self.requested_repo}', True)
|
||||
return None
|
||||
|
||||
def _process_update_spec(self, lockfile: str, resolved_tag: str):
|
||||
lines = lockfile.splitlines()
|
||||
is_version2 = any(line.startswith('lockV2 ') for line in lines)
|
||||
|
||||
for line in lines:
|
||||
if is_version2:
|
||||
if not line.startswith(f'lockV2 {self.requested_repo} '):
|
||||
continue
|
||||
_, _, tag, pattern = line.split(' ', 3)
|
||||
else:
|
||||
if not line.startswith('lock '):
|
||||
continue
|
||||
_, tag, pattern = line.split(' ', 2)
|
||||
|
||||
if re.match(pattern, self._identifier):
|
||||
if _VERSION_RE.fullmatch(tag):
|
||||
if not self._exact:
|
||||
return tag
|
||||
elif self._version_compare(tag, resolved_tag):
|
||||
return resolved_tag
|
||||
elif tag != resolved_tag:
|
||||
continue
|
||||
|
||||
self._report_error(
|
||||
f'yt-dlp cannot be updated to {resolved_tag} since you are on an older Python version', True)
|
||||
return None
|
||||
|
||||
return resolved_tag
|
||||
|
||||
def _version_compare(self, a: str, b: str):
|
||||
"""
|
||||
Compare two version strings
|
||||
|
||||
This function SHOULD NOT be called if self._exact == True
|
||||
"""
|
||||
if _VERSION_RE.fullmatch(f'{a}.{b}'):
|
||||
return version_tuple(a) >= version_tuple(b)
|
||||
return a == b
|
||||
|
||||
def query_update(self, *, _output=False) -> UpdateInfo | None:
|
||||
"""Fetches and returns info about the available update"""
|
||||
if not self.requested_repo:
|
||||
self._report_error('No target repository could be determined from input')
|
||||
return None
|
||||
|
||||
def check_update(self):
|
||||
"""Report whether there is an update available"""
|
||||
if not self._target_repo:
|
||||
return False
|
||||
try:
|
||||
self.ydl.to_screen((
|
||||
f'Available version: {self._label(self.target_channel, self.latest_version)}, ' if self.target_tag == 'latest' else ''
|
||||
) + f'Current version: {self._label(CHANNEL, self.current_version)}')
|
||||
requested_version, target_commitish = self._get_version_info(self.requested_tag)
|
||||
except network_exceptions as e:
|
||||
return self._report_network_error(f'obtain version info ({e})', delim='; Please try again later or')
|
||||
self._report_network_error(f'obtain version info ({e})', delim='; Please try again later or')
|
||||
return None
|
||||
|
||||
if self._exact and self._origin != self.requested_repo:
|
||||
has_update = True
|
||||
elif requested_version:
|
||||
if self._exact:
|
||||
has_update = self.current_version != requested_version
|
||||
else:
|
||||
has_update = not self._version_compare(self.current_version, requested_version)
|
||||
elif target_commitish:
|
||||
has_update = target_commitish != self.current_commit
|
||||
else:
|
||||
has_update = False
|
||||
|
||||
resolved_tag = requested_version if self.requested_tag == 'latest' else self.requested_tag
|
||||
current_label = _make_label(self._origin, self._channel.partition("@")[2] or self.current_version, self.current_version)
|
||||
requested_label = _make_label(self.requested_repo, resolved_tag, requested_version)
|
||||
latest_or_requested = f'{"Latest" if self.requested_tag == "latest" else "Requested"} version: {requested_label}'
|
||||
if not has_update:
|
||||
if _output:
|
||||
self.ydl.to_screen(f'{latest_or_requested}\nyt-dlp is up to date ({current_label})')
|
||||
return None
|
||||
|
||||
update_spec = self._download_update_spec(('latest', None) if requested_version else (None,))
|
||||
if not update_spec:
|
||||
return None
|
||||
# `result_` prefixed vars == post-_process_update_spec() values
|
||||
result_tag = self._process_update_spec(update_spec, resolved_tag)
|
||||
if not result_tag or result_tag == self.current_version:
|
||||
return None
|
||||
elif result_tag == resolved_tag:
|
||||
result_version = requested_version
|
||||
elif _VERSION_RE.fullmatch(result_tag):
|
||||
result_version = result_tag
|
||||
else: # actual version being updated to is unknown
|
||||
result_version = None
|
||||
|
||||
checksum = None
|
||||
# Non-updateable variants can get update_info but need to skip checksum
|
||||
if not is_non_updateable():
|
||||
self.ydl.to_screen(f'Current Build Hash: {_sha256_file(self.filename)}')
|
||||
try:
|
||||
hashes = self._download_asset('SHA2-256SUMS', result_tag)
|
||||
except network_exceptions as error:
|
||||
if not isinstance(error, HTTPError) or error.status != 404:
|
||||
self._report_network_error(f'fetch checksums: {error}')
|
||||
return None
|
||||
self.ydl.report_warning('No hash information found for the release, skipping verification')
|
||||
else:
|
||||
for ln in hashes.decode().splitlines():
|
||||
if ln.endswith(_get_binary_name()):
|
||||
checksum = ln.split()[0]
|
||||
break
|
||||
if not checksum:
|
||||
self.ydl.report_warning('The hash could not be found in the checksum file, skipping verification')
|
||||
|
||||
if self.has_update:
|
||||
return True
|
||||
if _output:
|
||||
update_label = _make_label(self.requested_repo, result_tag, result_version)
|
||||
self.ydl.to_screen(
|
||||
f'Current version: {current_label}\n{latest_or_requested}'
|
||||
+ (f'\nUpgradable to: {update_label}' if update_label != requested_label else ''))
|
||||
|
||||
if self.target_tag == self._tag:
|
||||
self.ydl.to_screen(f'yt-dlp is up to date ({self._label(CHANNEL, self.current_version)})')
|
||||
elif not self._exact:
|
||||
self.ydl.report_warning('yt-dlp cannot be updated any further since you are on an older Python version')
|
||||
return False
|
||||
return UpdateInfo(
|
||||
tag=result_tag,
|
||||
version=result_version,
|
||||
requested_version=requested_version,
|
||||
commit=target_commitish if result_tag == resolved_tag else None,
|
||||
checksum=checksum)
|
||||
|
||||
def update(self):
|
||||
def update(self, update_info=NO_DEFAULT):
|
||||
"""Update yt-dlp executable to the latest version"""
|
||||
if not self.check_update():
|
||||
return
|
||||
if update_info is NO_DEFAULT:
|
||||
update_info = self.query_update(_output=True)
|
||||
if not update_info:
|
||||
return False
|
||||
|
||||
err = is_non_updateable()
|
||||
if err:
|
||||
return self._report_error(err, True)
|
||||
self.ydl.to_screen(f'Updating to {self._label(self.target_channel, self.new_version)} ...')
|
||||
if (_VERSION_RE.fullmatch(self.target_tag[5:])
|
||||
and version_tuple(self.target_tag[5:]) < (2023, 3, 2)):
|
||||
self.ydl.report_warning('You are downgrading to a version without --update-to')
|
||||
self._block_restart('Cannot automatically restart to a version without --update-to')
|
||||
self._report_error(err, True)
|
||||
return False
|
||||
|
||||
self.ydl.to_screen(f'Current Build Hash: {_sha256_file(self.filename)}')
|
||||
|
||||
update_label = _make_label(self.requested_repo, update_info.tag, update_info.version)
|
||||
self.ydl.to_screen(f'Updating to {update_label} ...')
|
||||
|
||||
directory = os.path.dirname(self.filename)
|
||||
if not os.access(self.filename, os.W_OK):
|
||||
|
@ -337,20 +469,17 @@ class Updater:
|
|||
return self._report_error('Unable to remove the old version')
|
||||
|
||||
try:
|
||||
newcontent = self._download(self.release_name, self._tag)
|
||||
newcontent = self._download_asset(update_info.binary_name, update_info.tag)
|
||||
except network_exceptions as e:
|
||||
if isinstance(e, HTTPError) and e.status == 404:
|
||||
return self._report_error(
|
||||
f'The requested tag {self._label(self.target_channel, self.target_tag)} does not exist', True)
|
||||
return self._report_network_error(f'fetch updates: {e}')
|
||||
f'The requested tag {self.requested_repo}@{update_info.tag} does not exist', True)
|
||||
return self._report_network_error(f'fetch updates: {e}', tag=update_info.tag)
|
||||
|
||||
try:
|
||||
expected_hash = self.release_hash
|
||||
except Exception:
|
||||
self.ydl.report_warning('no hash information found for the release')
|
||||
else:
|
||||
if hashlib.sha256(newcontent).hexdigest() != expected_hash:
|
||||
return self._report_network_error('verify the new executable')
|
||||
if not update_info.checksum:
|
||||
self._block_restart('Automatically restarting into unverified builds is disabled for security reasons')
|
||||
elif hashlib.sha256(newcontent).hexdigest() != update_info.checksum:
|
||||
return self._report_network_error('verify the new executable', tag=update_info.tag)
|
||||
|
||||
try:
|
||||
with open(new_filename, 'wb') as outf:
|
||||
|
@ -387,9 +516,14 @@ class Updater:
|
|||
return self._report_error(
|
||||
f'Unable to set permissions. Run: sudo chmod a+rx {compat_shlex_quote(self.filename)}')
|
||||
|
||||
self.ydl.to_screen(f'Updated yt-dlp to {self._label(self.target_channel, self.new_version)}')
|
||||
self.ydl.to_screen(f'Updated yt-dlp to {update_label}')
|
||||
return True
|
||||
|
||||
@functools.cached_property
|
||||
def filename(self):
|
||||
"""Filename of the executable"""
|
||||
return compat_realpath(_get_variant_and_executable_path()[1])
|
||||
|
||||
@functools.cached_property
|
||||
def cmd(self):
|
||||
"""The command-line to run the executable, if known"""
|
||||
|
@ -412,6 +546,71 @@ class Updater:
|
|||
return self.ydl._download_retcode
|
||||
self.restart = wrapper
|
||||
|
||||
def _report_error(self, msg, expected=False):
|
||||
self.ydl.report_error(msg, tb=False if expected else None)
|
||||
self.ydl._download_retcode = 100
|
||||
|
||||
def _report_permission_error(self, file):
|
||||
self._report_error(f'Unable to write to {file}; try running as administrator', True)
|
||||
|
||||
def _report_network_error(self, action, delim=';', tag=None):
|
||||
if not tag:
|
||||
tag = self.requested_tag
|
||||
self._report_error(
|
||||
f'Unable to {action}{delim} visit https://github.com/{self.requested_repo}/releases/'
|
||||
+ tag if tag == "latest" else f"tag/{tag}", True)
|
||||
|
||||
# XXX: Everything below this line in this class is deprecated / for compat only
|
||||
@property
|
||||
def _target_tag(self):
|
||||
"""Deprecated; requested tag with 'tags/' prepended when necessary for API calls"""
|
||||
return f'tags/{self.requested_tag}' if self.requested_tag != 'latest' else self.requested_tag
|
||||
|
||||
def _check_update(self):
|
||||
"""Deprecated; report whether there is an update available"""
|
||||
return bool(self.query_update(_output=True))
|
||||
|
||||
def __getattr__(self, attribute: str):
|
||||
"""Compat getter function for deprecated attributes"""
|
||||
deprecated_props_map = {
|
||||
'check_update': '_check_update',
|
||||
'target_tag': '_target_tag',
|
||||
'target_channel': 'requested_channel',
|
||||
}
|
||||
update_info_props_map = {
|
||||
'has_update': '_has_update',
|
||||
'new_version': 'version',
|
||||
'latest_version': 'requested_version',
|
||||
'release_name': 'binary_name',
|
||||
'release_hash': 'checksum',
|
||||
}
|
||||
|
||||
if attribute not in deprecated_props_map and attribute not in update_info_props_map:
|
||||
raise AttributeError(f'{type(self).__name__!r} object has no attribute {attribute!r}')
|
||||
|
||||
msg = f'{type(self).__name__}.{attribute} is deprecated and will be removed in a future version'
|
||||
if attribute in deprecated_props_map:
|
||||
source_name = deprecated_props_map[attribute]
|
||||
if not source_name.startswith('_'):
|
||||
msg += f'. Please use {source_name!r} instead'
|
||||
source = self
|
||||
mapping = deprecated_props_map
|
||||
|
||||
else: # attribute in update_info_props_map
|
||||
msg += '. Please call query_update() instead'
|
||||
source = self.query_update()
|
||||
if source is None:
|
||||
source = UpdateInfo('', None, None, None)
|
||||
source._has_update = False
|
||||
mapping = update_info_props_map
|
||||
|
||||
deprecation_warning(msg)
|
||||
for target_name, source_name in mapping.items():
|
||||
value = getattr(source, source_name)
|
||||
setattr(self, target_name, value)
|
||||
|
||||
return getattr(self, attribute)
|
||||
|
||||
|
||||
def run_update(ydl):
|
||||
"""Update the program file with the latest version from the repository
|
||||
|
@ -420,45 +619,4 @@ def run_update(ydl):
|
|||
return Updater(ydl).update()
|
||||
|
||||
|
||||
# Deprecated
|
||||
def update_self(to_screen, verbose, opener):
|
||||
import traceback
|
||||
|
||||
deprecation_warning(f'"{__name__}.update_self" is deprecated and may be removed '
|
||||
f'in a future version. Use "{__name__}.run_update(ydl)" instead')
|
||||
|
||||
printfn = to_screen
|
||||
|
||||
class FakeYDL():
|
||||
to_screen = printfn
|
||||
|
||||
def report_warning(self, msg, *args, **kwargs):
|
||||
return printfn(f'WARNING: {msg}', *args, **kwargs)
|
||||
|
||||
def report_error(self, msg, tb=None):
|
||||
printfn(f'ERROR: {msg}')
|
||||
if not verbose:
|
||||
return
|
||||
if tb is None:
|
||||
# Copied from YoutubeDL.trouble
|
||||
if sys.exc_info()[0]:
|
||||
tb = ''
|
||||
if hasattr(sys.exc_info()[1], 'exc_info') and sys.exc_info()[1].exc_info[0]:
|
||||
tb += ''.join(traceback.format_exception(*sys.exc_info()[1].exc_info))
|
||||
tb += traceback.format_exc()
|
||||
else:
|
||||
tb_data = traceback.format_list(traceback.extract_stack())
|
||||
tb = ''.join(tb_data)
|
||||
if tb:
|
||||
printfn(tb)
|
||||
|
||||
def write_debug(self, msg, *args, **kwargs):
|
||||
printfn(f'[debug] {msg}', *args, **kwargs)
|
||||
|
||||
def urlopen(self, url):
|
||||
return opener.open(url)
|
||||
|
||||
return run_update(FakeYDL())
|
||||
|
||||
|
||||
__all__ = ['Updater']
|
||||
|
|
|
@ -9,3 +9,7 @@ VARIANT = None
|
|||
UPDATE_HINT = None
|
||||
|
||||
CHANNEL = 'stable'
|
||||
|
||||
ORIGIN = 'yt-dlp/yt-dlp'
|
||||
|
||||
_pkg_version = '2023.10.13'
|
||||
|
|
Loading…
Reference in New Issue
Block a user