1
0
mirror of https://github.com/yt-dlp/yt-dlp.git synced 2026-01-17 04:11:41 +00:00

Compare commits

...

10 Commits

Author SHA1 Message Date
github-actions[bot]
7c27965ff6 Release 2025.09.05
Created by: bashonly

:ci skip all
2025-09-05 22:46:00 +00:00
bashonly
50136eeeb3 [build] Overhaul Linux builds and refactor release workflow (#13997)
- Use `manylinux-shared` images for Linux builds
- Discontinue `yt-dlp_linux_armv7l`/`linux_armv7l_exe` release binary
- Add `yt-dlp_linux_armv7l.zip`/`linux_armv7l_dir` release binary
- Add `yt-dlp_musllinux` and `yt-dlp_musllinux_aarch64` release binaries
- Migrate `linux_exe` build strategy from staticx+musl to manylinux2014/glibc2.17
- Rewrite release.yml's "unholy bash monstrosity" as devscripts/setup_variables.py

Closes #10072, Closes #10630, Closes #10578, Closes #13976, Closes #13977, Closes #14106
Authored by: bashonly
2025-09-05 22:38:20 +00:00
Chase Ryan
603acdff07 [ie/charlierose] Fix extractor (#14231)
Authored by: gitchasing
2025-09-05 23:28:52 +02:00
sepro
d925e92b71 [ie/vevo] Restore extractors (#14203)
Partially reverts 6f4c1bb593

Authored by: seproDev
2025-08-31 00:41:52 +02:00
sepro
ed24640943 [ie/lrt] Fix extractors (#14193)
Closes #13501
Authored by: seproDev
2025-08-30 00:28:44 +02:00
sepro
76bb46002c Fix --id deprecation warning (#14190)
Authored by: seproDev
2025-08-29 22:06:53 +02:00
InvalidUsernameException
1e28f6bf74 [ie/kick:vod] Support ongoing livestream VODs (#14154)
Authored by: InvalidUsernameException
2025-08-28 01:26:49 +00:00
garret1317
0b51005b48 [ie/ITVBTCC] Fix extractor (#14161)
Closes #14156
Authored by: garret1317
2025-08-28 01:19:25 +00:00
Abdulmohsen
223baa81f6 [ie/tver] Extract more metadata (#14165)
Authored by: arabcoders
2025-08-28 01:18:10 +00:00
Gegham Zakaryan
18fe696df9 [ie/googledrive] Fix subtitles extraction (#14139)
Authored by: zakaryan2004
2025-08-28 01:12:08 +00:00
35 changed files with 1720 additions and 501 deletions

View File

@@ -12,10 +12,13 @@ on:
unix: unix:
default: true default: true
type: boolean type: boolean
linux_static: linux:
default: true default: true
type: boolean type: boolean
linux_arm: linux_armv7l:
default: true
type: boolean
musllinux:
default: true default: true
type: boolean type: boolean
macos: macos:
@@ -37,7 +40,9 @@ on:
version: version:
description: | description: |
VERSION: yyyy.mm.dd[.rev] or rev VERSION: yyyy.mm.dd[.rev] or rev
required: true (default: auto-generated)
required: false
default: ''
type: string type: string
channel: channel:
description: | description: |
@@ -49,12 +54,16 @@ on:
description: yt-dlp, yt-dlp.tar.gz description: yt-dlp, yt-dlp.tar.gz
default: true default: true
type: boolean type: boolean
linux_static: linux:
description: yt-dlp_linux description: yt-dlp_linux, yt-dlp_linux.zip, yt-dlp_linux_aarch64, yt-dlp_linux_aarch64.zip
default: true default: true
type: boolean type: boolean
linux_arm: linux_armv7l:
description: yt-dlp_linux_aarch64, yt-dlp_linux_armv7l description: yt-dlp_linux_armv7l.zip
default: true
type: boolean
musllinux:
description: yt-dlp_musllinux, yt-dlp_musllinux.zip, yt-dlp_musllinux_aarch64, yt-dlp_musllinux_aarch64.zip
default: true default: true
type: boolean type: boolean
macos: macos:
@@ -81,16 +90,51 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
outputs: outputs:
origin: ${{ steps.process_origin.outputs.origin }} origin: ${{ steps.process_origin.outputs.origin }}
timestamp: ${{ steps.process_origin.outputs.timestamp }}
version: ${{ steps.process_origin.outputs.version }}
steps: steps:
- name: Process origin - name: Process origin
id: process_origin id: process_origin
env:
ORIGIN: ${{ inputs.origin }}
REPOSITORY: ${{ github.repository }}
VERSION: ${{ inputs.version }}
shell: python
run: | run: |
echo "origin=${{ inputs.origin == 'current repo' && github.repository || inputs.origin }}" | tee "$GITHUB_OUTPUT" import datetime as dt
import json
import os
import re
origin = os.environ['ORIGIN']
timestamp = dt.datetime.now(tz=dt.timezone.utc).strftime('%Y.%m.%d.%H%M%S.%f')
version = os.getenv('VERSION')
if version and '.' not in version:
# build.yml was dispatched with only a revision as the version input value
version_parts = [*timestamp.split('.')[:3], version]
elif not version:
# build.yml was dispatched without any version input value, so include .HHMMSS revision
version_parts = timestamp.split('.')[:4]
else:
# build.yml was called or dispatched with a complete version input value
version_parts = version.split('.')
assert all(re.fullmatch(r'[0-9]+', part) for part in version_parts), 'Version must be numeric'
outputs = {
'origin': os.environ['REPOSITORY'] if origin == 'current repo' else origin,
'timestamp': timestamp,
'version': '.'.join(version_parts),
}
print(json.dumps(outputs, indent=2))
with open(os.environ['GITHUB_OUTPUT'], 'a') as f:
f.write('\n'.join(f'{key}={value}' for key, value in outputs.items()))
unix: unix:
needs: process needs: process
if: inputs.unix if: inputs.unix
runs-on: ubuntu-latest runs-on: ubuntu-latest
env:
CHANNEL: ${{ inputs.channel }}
ORIGIN: ${{ needs.process.outputs.origin }}
VERSION: ${{ needs.process.outputs.version }}
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with: with:
@@ -103,7 +147,7 @@ jobs:
sudo apt -y install zip pandoc man sed sudo apt -y install zip pandoc man sed
- name: Prepare - name: Prepare
run: | run: |
python devscripts/update-version.py -c "${{ inputs.channel }}" -r "${{ needs.process.outputs.origin }}" "${{ inputs.version }}" python devscripts/update-version.py -c "${CHANNEL}" -r "${ORIGIN}" "${VERSION}"
python devscripts/update_changelog.py -vv python devscripts/update_changelog.py -vv
python devscripts/make_lazy_extractors.py python devscripts/make_lazy_extractors.py
- name: Build Unix platform-independent binary - name: Build Unix platform-independent binary
@@ -117,7 +161,7 @@ jobs:
version="$(./yt-dlp --version)" version="$(./yt-dlp --version)"
./yt-dlp_downgraded -v --update-to yt-dlp/yt-dlp@2023.03.04 ./yt-dlp_downgraded -v --update-to yt-dlp/yt-dlp@2023.03.04
downgraded_version="$(./yt-dlp_downgraded --version)" downgraded_version="$(./yt-dlp_downgraded --version)"
[[ "$version" != "$downgraded_version" ]] [[ "${version}" != "${downgraded_version}" ]]
- name: Upload artifacts - name: Upload artifacts
uses: actions/upload-artifact@v4 uses: actions/upload-artifact@v4
with: with:
@@ -127,99 +171,156 @@ jobs:
yt-dlp.tar.gz yt-dlp.tar.gz
compression-level: 0 compression-level: 0
linux_static: linux:
needs: process needs: process
if: inputs.linux_static if: inputs.linux
runs-on: ubuntu-latest runs-on: ${{ matrix.runner }}
strategy:
fail-fast: false
matrix:
include:
- exe: yt-dlp_linux
platform: x86_64
runner: ubuntu-24.04
- exe: yt-dlp_linux_aarch64
platform: aarch64
runner: ubuntu-24.04-arm
env:
CHANNEL: ${{ inputs.channel }}
ORIGIN: ${{ needs.process.outputs.origin }}
VERSION: ${{ needs.process.outputs.version }}
EXE_NAME: ${{ matrix.exe }}
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- name: Build static executable - name: Build executable
env: env:
channel: ${{ inputs.channel }} SERVICE: linux_${{ matrix.platform }}
origin: ${{ needs.process.outputs.origin }} run: |
version: ${{ inputs.version }} mkdir -p ./dist
pushd bundle/docker
docker compose up --build --exit-code-from "${SERVICE}" "${SERVICE}"
popd
sudo chown "${USER}:docker" "./dist/${EXE_NAME}"
- name: Verify executable in container
if: vars.UPDATE_TO_VERIFICATION
env:
SERVICE: linux_${{ matrix.platform }}_verify
run: | run: |
mkdir ~/build
cd bundle/docker cd bundle/docker
docker compose up --build static docker compose up --build --exit-code-from "${SERVICE}" "${SERVICE}"
sudo chown "${USER}:docker" ~/build/yt-dlp_linux
- name: Verify --update-to - name: Verify --update-to
if: vars.UPDATE_TO_VERIFICATION if: vars.UPDATE_TO_VERIFICATION
run: | run: |
chmod +x ~/build/yt-dlp_linux chmod +x "./dist/${EXE_NAME}"
cp ~/build/yt-dlp_linux ~/build/yt-dlp_linux_downgraded mkdir -p ~/testing
version="$(~/build/yt-dlp_linux --version)" cp "./dist/${EXE_NAME}" ~/testing/"${EXE_NAME}_downgraded"
~/build/yt-dlp_linux_downgraded -v --update-to yt-dlp/yt-dlp@2023.03.04 version="$("./dist/${EXE_NAME}" --version)"
downgraded_version="$(~/build/yt-dlp_linux_downgraded --version)" ~/testing/"${EXE_NAME}_downgraded" -v --update-to yt-dlp/yt-dlp@2023.03.04
[[ "$version" != "$downgraded_version" ]] downgraded_version="$(~/testing/"${EXE_NAME}_downgraded" --version)"
[[ "${version}" != "${downgraded_version}" ]]
- name: Upload artifacts
uses: actions/upload-artifact@v4
with:
name: build-bin-${{ github.job }}_${{ matrix.platform }}
path: |
dist/${{ matrix.exe }}*
compression-level: 0
linux_armv7l:
needs: process
if: inputs.linux_armv7l
permissions:
contents: read
runs-on: ubuntu-24.04-arm
env:
CHANNEL: ${{ inputs.channel }}
ORIGIN: ${{ needs.process.outputs.origin }}
VERSION: ${{ needs.process.outputs.version }}
EXE_NAME: yt-dlp_linux_armv7l
steps:
- uses: actions/checkout@v4
- name: Cache requirements
id: cache-venv
uses: actions/cache@v4
env:
SEGMENT_DOWNLOAD_TIMEOUT_MINS: 1
with:
path: |
~/yt-dlp-build-venv
key: cache-reqs-${{ github.job }}-${{ github.ref }}-${{ needs.process.outputs.timestamp }}
restore-keys: |
cache-reqs-${{ github.job }}-${{ github.ref }}-
cache-reqs-${{ github.job }}-
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
with:
platforms: linux/arm/v7
- name: Build executable
env:
SERVICE: linux_armv7l
run: |
mkdir -p ./dist
mkdir -p ~/yt-dlp-build-venv
cd bundle/docker
docker compose up --build --exit-code-from "${SERVICE}" "${SERVICE}"
- name: Verify executable in container
if: vars.UPDATE_TO_VERIFICATION
env:
SERVICE: linux_armv7l_verify
run: |
cd bundle/docker
docker compose up --build --exit-code-from "${SERVICE}" "${SERVICE}"
- name: Upload artifacts - name: Upload artifacts
uses: actions/upload-artifact@v4 uses: actions/upload-artifact@v4
with: with:
name: build-bin-${{ github.job }} name: build-bin-${{ github.job }}
path: | path: |
~/build/yt-dlp_linux dist/yt-dlp_linux_armv7l.zip
compression-level: 0 compression-level: 0
linux_arm: musllinux:
needs: process needs: process
if: inputs.linux_arm if: inputs.musllinux
permissions: runs-on: ${{ matrix.runner }}
contents: read
packages: write # for creating cache
runs-on: ubuntu-latest
strategy: strategy:
fail-fast: false
matrix: matrix:
architecture: include:
- armv7 - exe: yt-dlp_musllinux
- aarch64 platform: x86_64
runner: ubuntu-24.04
- exe: yt-dlp_musllinux_aarch64
platform: aarch64
runner: ubuntu-24.04-arm
env:
CHANNEL: ${{ inputs.channel }}
ORIGIN: ${{ needs.process.outputs.origin }}
VERSION: ${{ needs.process.outputs.version }}
EXE_NAME: ${{ matrix.exe }}
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with: - name: Build executable
path: ./repo env:
- name: Virtualized Install, Prepare & Build SERVICE: musllinux_${{ matrix.platform }}
uses: yt-dlp/run-on-arch-action@v3 run: |
with: mkdir -p ./dist
# Ref: https://github.com/uraimo/run-on-arch-action/issues/55 pushd bundle/docker
env: | docker compose up --build --exit-code-from "${SERVICE}" "${SERVICE}"
GITHUB_WORKFLOW: build popd
githubToken: ${{ github.token }} # To cache image sudo chown "${USER}:docker" "./dist/${EXE_NAME}"
arch: ${{ matrix.architecture }} - name: Verify executable in container
distro: ubuntu20.04 # Standalone executable should be built on minimum supported OS if: vars.UPDATE_TO_VERIFICATION
dockerRunArgs: --volume "${PWD}/repo:/repo" env:
install: | # Installing Python 3.10 from the Deadsnakes repo raises errors SERVICE: musllinux_${{ matrix.platform }}_verify
apt update run: |
apt -y install zlib1g-dev libffi-dev python3.9 python3.9-dev python3.9-distutils python3-pip \ cd bundle/docker
python3-secretstorage # Cannot build cryptography wheel in virtual armv7 environment docker compose up --build --exit-code-from "${SERVICE}" "${SERVICE}"
python3.9 -m pip install -U pip wheel 'setuptools>=71.0.2'
# XXX: Keep this in sync with pyproject.toml (it can't be accessed at this stage) and exclude secretstorage
python3.9 -m pip install -U Pyinstaller mutagen pycryptodomex brotli certifi cffi \
'requests>=2.32.2,<3' 'urllib3>=2.0.2,<3' 'websockets>=13.0'
run: |
cd repo
python3.9 devscripts/install_deps.py -o --include build
python3.9 devscripts/install_deps.py --include pyinstaller # Cached versions may be out of date
python3.9 devscripts/update-version.py -c "${{ inputs.channel }}" -r "${{ needs.process.outputs.origin }}" "${{ inputs.version }}"
python3.9 devscripts/make_lazy_extractors.py
python3.9 -m bundle.pyinstaller
if ${{ vars.UPDATE_TO_VERIFICATION && 'true' || 'false' }}; then
arch="${{ (matrix.architecture == 'armv7' && 'armv7l') || matrix.architecture }}"
chmod +x ./dist/yt-dlp_linux_${arch}
cp ./dist/yt-dlp_linux_${arch} ./dist/yt-dlp_linux_${arch}_downgraded
version="$(./dist/yt-dlp_linux_${arch} --version)"
./dist/yt-dlp_linux_${arch}_downgraded -v --update-to yt-dlp/yt-dlp@2023.03.04
downgraded_version="$(./dist/yt-dlp_linux_${arch}_downgraded --version)"
[[ "$version" != "$downgraded_version" ]]
fi
- name: Upload artifacts - name: Upload artifacts
uses: actions/upload-artifact@v4 uses: actions/upload-artifact@v4
with: with:
name: build-bin-linux_${{ matrix.architecture }} name: build-bin-${{ github.job }}_${{ matrix.platform }}
path: | # run-on-arch-action designates armv7l as armv7 path: |
repo/dist/yt-dlp_linux_${{ (matrix.architecture == 'armv7' && 'armv7l') || matrix.architecture }} dist/${{ matrix.exe }}*
compression-level: 0 compression-level: 0
macos: macos:
@@ -227,22 +328,28 @@ jobs:
if: inputs.macos if: inputs.macos
permissions: permissions:
contents: read contents: read
actions: write # For cleaning up cache
runs-on: macos-14 runs-on: macos-14
env:
CHANNEL: ${{ inputs.channel }}
ORIGIN: ${{ needs.process.outputs.origin }}
VERSION: ${{ needs.process.outputs.version }}
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
# NB: Building universal2 does not work with python from actions/setup-python # NB: Building universal2 does not work with python from actions/setup-python
- name: Restore cached requirements - name: Cache requirements
id: restore-cache id: cache-venv
uses: actions/cache/restore@v4 uses: actions/cache@v4
env: env:
SEGMENT_DOWNLOAD_TIMEOUT_MINS: 1 SEGMENT_DOWNLOAD_TIMEOUT_MINS: 1
with: with:
path: | path: |
~/yt-dlp-build-venv ~/yt-dlp-build-venv
key: cache-reqs-${{ github.job }}-${{ github.ref }} key: cache-reqs-${{ github.job }}-${{ github.ref }}-${{ needs.process.outputs.timestamp }}
restore-keys: |
cache-reqs-${{ github.job }}-${{ github.ref }}-
cache-reqs-${{ github.job }}-
- name: Install Requirements - name: Install Requirements
run: | run: |
@@ -287,7 +394,7 @@ jobs:
- name: Prepare - name: Prepare
run: | run: |
python3 devscripts/update-version.py -c "${{ inputs.channel }}" -r "${{ needs.process.outputs.origin }}" "${{ inputs.version }}" python3 devscripts/update-version.py -c "${CHANNEL}" -r "${ORIGIN}" "${VERSION}"
python3 devscripts/make_lazy_extractors.py python3 devscripts/make_lazy_extractors.py
- name: Build - name: Build
run: | run: |
@@ -315,27 +422,11 @@ jobs:
dist/yt-dlp_macos.zip dist/yt-dlp_macos.zip
compression-level: 0 compression-level: 0
- name: Cleanup cache
if: steps.restore-cache.outputs.cache-hit == 'true'
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
cache_key: cache-reqs-${{ github.job }}-${{ github.ref }}
run: |
gh cache delete "${cache_key}"
- name: Cache requirements
uses: actions/cache/save@v4
with:
path: |
~/yt-dlp-build-venv
key: cache-reqs-${{ github.job }}-${{ github.ref }}
windows: windows:
needs: process needs: process
if: inputs.windows if: inputs.windows
permissions: permissions:
contents: read contents: read
actions: write # For cleaning up cache
runs-on: ${{ matrix.runner }} runs-on: ${{ matrix.runner }}
strategy: strategy:
fail-fast: false fail-fast: false
@@ -353,6 +444,14 @@ jobs:
runner: windows-11-arm runner: windows-11-arm
python_version: '3.13' # arm64 only has Python >= 3.11 available python_version: '3.13' # arm64 only has Python >= 3.11 available
suffix: '_arm64' suffix: '_arm64'
env:
CHANNEL: ${{ inputs.channel }}
ORIGIN: ${{ needs.process.outputs.origin }}
VERSION: ${{ needs.process.outputs.version }}
SUFFIX: ${{ matrix.suffix }}
BASE_CACHE_KEY: cache-reqs-${{ github.job }}_${{ matrix.arch }}-${{ matrix.python_version }}
# Use custom PyInstaller built with https://github.com/yt-dlp/Pyinstaller-builds
PYINSTALLER_URL: https://yt-dlp.github.io/Pyinstaller-Builds/${{ matrix.arch }}/pyinstaller-6.15.0-py3-none-any.whl
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
@@ -361,49 +460,60 @@ jobs:
python-version: ${{ matrix.python_version }} python-version: ${{ matrix.python_version }}
architecture: ${{ matrix.arch }} architecture: ${{ matrix.arch }}
- name: Restore cached requirements - name: Cache requirements
id: restore-cache id: cache-venv
if: matrix.arch == 'arm64' if: matrix.arch == 'arm64'
uses: actions/cache/restore@v4 uses: actions/cache@v4
env: env:
SEGMENT_DOWNLOAD_TIMEOUT_MINS: 1 SEGMENT_DOWNLOAD_TIMEOUT_MINS: 1
with: with:
path: | path: |
/yt-dlp-build-venv /yt-dlp-build-venv
key: cache-reqs-${{ github.job }}_${{ matrix.arch }}-${{ matrix.python_version }}-${{ github.ref }} key: ${{ env.BASE_CACHE_KEY }}-${{ github.ref }}-${{ needs.process.outputs.timestamp }}
restore-keys: |
${{ env.BASE_CACHE_KEY }}-${{ github.ref }}-
${{ env.BASE_CACHE_KEY }}-
- name: Install Requirements - name: Install Requirements
env:
ARCH: ${{ matrix.arch }}
shell: pwsh
run: | run: |
python -m venv /yt-dlp-build-venv python -m venv /yt-dlp-build-venv
/yt-dlp-build-venv/Scripts/Activate.ps1 /yt-dlp-build-venv/Scripts/Activate.ps1
python devscripts/install_deps.py -o --include build python devscripts/install_deps.py -o --include build
python devscripts/install_deps.py ${{ (matrix.arch != 'x86' && '--include curl-cffi') || '' }} if ("${Env:ARCH}" -eq "x86") {
# Use custom pyinstaller built with https://github.com/yt-dlp/pyinstaller-builds python devscripts/install_deps.py
python -m pip install -U "https://yt-dlp.github.io/Pyinstaller-Builds/${{ matrix.arch }}/pyinstaller-6.15.0-py3-none-any.whl" } else {
python devscripts/install_deps.py --include curl-cffi
}
python -m pip install -U "${Env:PYINSTALLER_URL}"
- name: Prepare - name: Prepare
shell: pwsh
run: | run: |
python devscripts/update-version.py -c "${{ inputs.channel }}" -r "${{ needs.process.outputs.origin }}" "${{ inputs.version }}" python devscripts/update-version.py -c "${Env:CHANNEL}" -r "${Env:ORIGIN}" "${Env:VERSION}"
python devscripts/make_lazy_extractors.py python devscripts/make_lazy_extractors.py
- name: Build - name: Build
shell: pwsh
run: | run: |
/yt-dlp-build-venv/Scripts/Activate.ps1 /yt-dlp-build-venv/Scripts/Activate.ps1
python -m bundle.pyinstaller python -m bundle.pyinstaller
python -m bundle.pyinstaller --onedir python -m bundle.pyinstaller --onedir
Compress-Archive -Path ./dist/yt-dlp${{ matrix.suffix }}/* -DestinationPath ./dist/yt-dlp_win${{ matrix.suffix }}.zip Compress-Archive -Path ./dist/yt-dlp${Env:SUFFIX}/* -DestinationPath ./dist/yt-dlp_win${Env:SUFFIX}.zip
- name: Verify --update-to - name: Verify --update-to
if: vars.UPDATE_TO_VERIFICATION if: vars.UPDATE_TO_VERIFICATION
shell: pwsh
run: | run: |
foreach ($name in @("yt-dlp${{ matrix.suffix }}")) { $name = "yt-dlp${Env:SUFFIX}"
Copy-Item "./dist/${name}.exe" "./dist/${name}_downgraded.exe" Copy-Item "./dist/${name}.exe" "./dist/${name}_downgraded.exe"
$version = & "./dist/${name}.exe" --version $version = & "./dist/${name}.exe" --version
& "./dist/${name}_downgraded.exe" -v --update-to yt-dlp/yt-dlp@2025.08.20 & "./dist/${name}_downgraded.exe" -v --update-to yt-dlp/yt-dlp@2025.08.20
$downgraded_version = & "./dist/${name}_downgraded.exe" --version $downgraded_version = & "./dist/${name}_downgraded.exe" --version
if ($version -eq $downgraded_version) { if ($version -eq $downgraded_version) {
exit 1 exit 1
}
} }
- name: Upload artifacts - name: Upload artifacts
@@ -415,30 +525,14 @@ jobs:
dist/yt-dlp_win${{ matrix.suffix }}.zip dist/yt-dlp_win${{ matrix.suffix }}.zip
compression-level: 0 compression-level: 0
- name: Cleanup cache
if: |
matrix.arch == 'arm64' && steps.restore-cache.outputs.cache-hit == 'true'
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
cache_key: cache-reqs-${{ github.job }}_${{ matrix.arch }}-${{ matrix.python_version }}-${{ github.ref }}
run: |
gh cache delete "${cache_key}"
- name: Cache requirements
if: matrix.arch == 'arm64'
uses: actions/cache/save@v4
with:
path: |
/yt-dlp-build-venv
key: cache-reqs-${{ github.job }}_${{ matrix.arch }}-${{ matrix.python_version }}-${{ github.ref }}
meta_files: meta_files:
if: always() && !cancelled() if: always() && !cancelled()
needs: needs:
- process - process
- unix - unix
- linux_static - linux
- linux_arm - linux_armv7l
- musllinux
- macos - macos
- windows - windows
runs-on: ubuntu-latest runs-on: ubuntu-latest
@@ -469,38 +563,38 @@ jobs:
lock 2023.11.16 (?!win_x86_exe).+ Python 3\.7 lock 2023.11.16 (?!win_x86_exe).+ Python 3\.7
lock 2023.11.16 win_x86_exe .+ Windows-(?:Vista|2008Server) lock 2023.11.16 win_x86_exe .+ Windows-(?:Vista|2008Server)
lock 2024.10.22 py2exe .+ lock 2024.10.22 py2exe .+
lock 2024.10.22 linux_(?:armv7l|aarch64)_exe .+-glibc2\.(?:[12]?\d|30)\b
lock 2024.10.22 zip Python 3\.8 lock 2024.10.22 zip Python 3\.8
lock 2024.10.22 win(?:_x86)?_exe Python 3\.[78].+ Windows-(?:7-|2008ServerR2) lock 2024.10.22 win(?:_x86)?_exe Python 3\.[78].+ Windows-(?:7-|2008ServerR2)
lock 2025.08.11 darwin_legacy_exe .+ lock 2025.08.11 darwin_legacy_exe .+
lock 2025.08.27 linux_armv7l_exe .+
lockV2 yt-dlp/yt-dlp 2022.08.18.36 .+ Python 3\.6 lockV2 yt-dlp/yt-dlp 2022.08.18.36 .+ Python 3\.6
lockV2 yt-dlp/yt-dlp 2023.11.16 (?!win_x86_exe).+ Python 3\.7 lockV2 yt-dlp/yt-dlp 2023.11.16 (?!win_x86_exe).+ Python 3\.7
lockV2 yt-dlp/yt-dlp 2023.11.16 win_x86_exe .+ Windows-(?:Vista|2008Server) lockV2 yt-dlp/yt-dlp 2023.11.16 win_x86_exe .+ Windows-(?:Vista|2008Server)
lockV2 yt-dlp/yt-dlp 2024.10.22 py2exe .+ lockV2 yt-dlp/yt-dlp 2024.10.22 py2exe .+
lockV2 yt-dlp/yt-dlp 2024.10.22 linux_(?:armv7l|aarch64)_exe .+-glibc2\.(?:[12]?\d|30)\b
lockV2 yt-dlp/yt-dlp 2024.10.22 zip Python 3\.8 lockV2 yt-dlp/yt-dlp 2024.10.22 zip Python 3\.8
lockV2 yt-dlp/yt-dlp 2024.10.22 win(?:_x86)?_exe Python 3\.[78].+ Windows-(?:7-|2008ServerR2) lockV2 yt-dlp/yt-dlp 2024.10.22 win(?:_x86)?_exe Python 3\.[78].+ Windows-(?:7-|2008ServerR2)
lockV2 yt-dlp/yt-dlp 2025.08.11 darwin_legacy_exe .+ lockV2 yt-dlp/yt-dlp 2025.08.11 darwin_legacy_exe .+
lockV2 yt-dlp/yt-dlp 2025.08.27 linux_armv7l_exe .+
lockV2 yt-dlp/yt-dlp-nightly-builds 2023.11.15.232826 (?!win_x86_exe).+ Python 3\.7 lockV2 yt-dlp/yt-dlp-nightly-builds 2023.11.15.232826 (?!win_x86_exe).+ Python 3\.7
lockV2 yt-dlp/yt-dlp-nightly-builds 2023.11.15.232826 win_x86_exe .+ Windows-(?:Vista|2008Server) lockV2 yt-dlp/yt-dlp-nightly-builds 2023.11.15.232826 win_x86_exe .+ Windows-(?:Vista|2008Server)
lockV2 yt-dlp/yt-dlp-nightly-builds 2024.10.22.051025 py2exe .+ lockV2 yt-dlp/yt-dlp-nightly-builds 2024.10.22.051025 py2exe .+
lockV2 yt-dlp/yt-dlp-nightly-builds 2024.10.22.051025 linux_(?:armv7l|aarch64)_exe .+-glibc2\.(?:[12]?\d|30)\b
lockV2 yt-dlp/yt-dlp-nightly-builds 2024.10.22.051025 zip Python 3\.8 lockV2 yt-dlp/yt-dlp-nightly-builds 2024.10.22.051025 zip Python 3\.8
lockV2 yt-dlp/yt-dlp-nightly-builds 2024.10.22.051025 win(?:_x86)?_exe Python 3\.[78].+ Windows-(?:7-|2008ServerR2) lockV2 yt-dlp/yt-dlp-nightly-builds 2024.10.22.051025 win(?:_x86)?_exe Python 3\.[78].+ Windows-(?:7-|2008ServerR2)
lockV2 yt-dlp/yt-dlp-nightly-builds 2025.08.12.233030 darwin_legacy_exe .+ lockV2 yt-dlp/yt-dlp-nightly-builds 2025.08.12.233030 darwin_legacy_exe .+
lockV2 yt-dlp/yt-dlp-nightly-builds 2025.08.30.232839 linux_armv7l_exe .+
lockV2 yt-dlp/yt-dlp-master-builds 2023.11.15.232812 (?!win_x86_exe).+ Python 3\.7 lockV2 yt-dlp/yt-dlp-master-builds 2023.11.15.232812 (?!win_x86_exe).+ Python 3\.7
lockV2 yt-dlp/yt-dlp-master-builds 2023.11.15.232812 win_x86_exe .+ Windows-(?:Vista|2008Server) lockV2 yt-dlp/yt-dlp-master-builds 2023.11.15.232812 win_x86_exe .+ Windows-(?:Vista|2008Server)
lockV2 yt-dlp/yt-dlp-master-builds 2024.10.22.045052 py2exe .+ lockV2 yt-dlp/yt-dlp-master-builds 2024.10.22.045052 py2exe .+
lockV2 yt-dlp/yt-dlp-master-builds 2024.10.22.060347 linux_(?:armv7l|aarch64)_exe .+-glibc2\.(?:[12]?\d|30)\b
lockV2 yt-dlp/yt-dlp-master-builds 2024.10.22.060347 zip Python 3\.8 lockV2 yt-dlp/yt-dlp-master-builds 2024.10.22.060347 zip Python 3\.8
lockV2 yt-dlp/yt-dlp-master-builds 2024.10.22.060347 win(?:_x86)?_exe Python 3\.[78].+ Windows-(?:7-|2008ServerR2) lockV2 yt-dlp/yt-dlp-master-builds 2024.10.22.060347 win(?:_x86)?_exe Python 3\.[78].+ Windows-(?:7-|2008ServerR2)
lockV2 yt-dlp/yt-dlp-master-builds 2025.08.12.232447 darwin_legacy_exe .+ lockV2 yt-dlp/yt-dlp-master-builds 2025.08.12.232447 darwin_legacy_exe .+
lockV2 yt-dlp/yt-dlp-master-builds 2025.09.05.212910 linux_armv7l_exe .+
EOF EOF
- name: Sign checksum files - name: Sign checksum files
env: env:
GPG_SIGNING_KEY: ${{ secrets.GPG_SIGNING_KEY }} GPG_SIGNING_KEY: ${{ secrets.GPG_SIGNING_KEY }}
if: env.GPG_SIGNING_KEY != '' if: env.GPG_SIGNING_KEY
run: | run: |
gpg --batch --import <<< "${{ secrets.GPG_SIGNING_KEY }}" gpg --batch --import <<< "${{ secrets.GPG_SIGNING_KEY }}"
for signfile in ./SHA*SUMS; do for signfile in ./SHA*SUMS; do

22
.github/workflows/cache-warmer.yml vendored Normal file
View File

@@ -0,0 +1,22 @@
name: Keep cache warm
on:
workflow_dispatch:
schedule:
- cron: '0 22 1,6,11,16,21,27 * *'
jobs:
build:
if: |
vars.KEEP_CACHE_WARM || github.event_name == 'workflow_dispatch'
uses: ./.github/workflows/build.yml
with:
version: '999999'
channel: stable
unix: false
linux: false
linux_armv7l: true
musllinux: false
macos: true
windows: true
permissions:
contents: read

View File

@@ -6,10 +6,12 @@ on:
paths: paths:
- "yt_dlp/**.py" - "yt_dlp/**.py"
- "!yt_dlp/version.py" - "!yt_dlp/version.py"
- "bundle/*.py" - "bundle/**"
- "pyproject.toml" - "pyproject.toml"
- "Makefile" - "Makefile"
- ".github/workflows/build.yml" - ".github/workflows/build.yml"
- ".github/workflows/release.yml"
- ".github/workflows/release-master.yml"
concurrency: concurrency:
group: release-master group: release-master
permissions: permissions:
@@ -17,21 +19,20 @@ permissions:
jobs: jobs:
release: release:
if: vars.BUILD_MASTER != '' if: vars.BUILD_MASTER
uses: ./.github/workflows/release.yml uses: ./.github/workflows/release.yml
with: with:
prerelease: true prerelease: true
source: master source: ${{ (github.repository != 'yt-dlp/yt-dlp' && vars.MASTER_ARCHIVE_REPO) || 'master' }}
target: 'master'
permissions: permissions:
contents: write contents: write
packages: write # For package cache
actions: write # For cleaning up cache
id-token: write # mandatory for trusted publishing id-token: write # mandatory for trusted publishing
secrets: inherit secrets: inherit
publish_pypi: publish_pypi:
needs: [release] needs: [release]
if: vars.MASTER_PYPI_PROJECT != '' if: vars.MASTER_PYPI_PROJECT
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions: permissions:
id-token: write # mandatory for trusted publishing id-token: write # mandatory for trusted publishing

View File

@@ -7,7 +7,7 @@ permissions:
jobs: jobs:
check_nightly: check_nightly:
if: vars.BUILD_NIGHTLY != '' if: vars.BUILD_NIGHTLY
runs-on: ubuntu-latest runs-on: ubuntu-latest
outputs: outputs:
commit: ${{ steps.check_for_new_commits.outputs.commit }} commit: ${{ steps.check_for_new_commits.outputs.commit }}
@@ -22,9 +22,13 @@ jobs:
"yt_dlp/*.py" "yt_dlp/*.py"
':!yt_dlp/version.py' ':!yt_dlp/version.py'
"bundle/*.py" "bundle/*.py"
"bundle/docker/compose.yml"
"bundle/docker/linux/*"
"pyproject.toml" "pyproject.toml"
"Makefile" "Makefile"
".github/workflows/build.yml" ".github/workflows/build.yml"
".github/workflows/release.yml"
".github/workflows/release-nightly.yml"
) )
echo "commit=$(git log --format=%H -1 --since="24 hours ago" -- "${relevant_files[@]}")" | tee "$GITHUB_OUTPUT" echo "commit=$(git log --format=%H -1 --since="24 hours ago" -- "${relevant_files[@]}")" | tee "$GITHUB_OUTPUT"
@@ -34,17 +38,16 @@ jobs:
uses: ./.github/workflows/release.yml uses: ./.github/workflows/release.yml
with: with:
prerelease: true prerelease: true
source: nightly source: ${{ (github.repository != 'yt-dlp/yt-dlp' && vars.NIGHTLY_ARCHIVE_REPO) || 'nightly' }}
target: 'nightly'
permissions: permissions:
contents: write contents: write
packages: write # For package cache
actions: write # For cleaning up cache
id-token: write # mandatory for trusted publishing id-token: write # mandatory for trusted publishing
secrets: inherit secrets: inherit
publish_pypi: publish_pypi:
needs: [release] needs: [release]
if: vars.NIGHTLY_PYPI_PROJECT != '' if: vars.NIGHTLY_PYPI_PROJECT
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions: permissions:
id-token: write # mandatory for trusted publishing id-token: write # mandatory for trusted publishing

View File

@@ -14,6 +14,10 @@ on:
required: false required: false
default: '' default: ''
type: string type: string
linux_armv7l:
required: false
default: false
type: boolean
prerelease: prerelease:
required: false required: false
default: true default: true
@@ -43,6 +47,10 @@ on:
required: false required: false
default: '' default: ''
type: string type: string
linux_armv7l:
description: Include linux_armv7l
default: true
type: boolean
prerelease: prerelease:
description: Pre-release description: Pre-release
default: false default: false
@@ -77,135 +85,57 @@ jobs:
- name: Process inputs - name: Process inputs
id: process_inputs id: process_inputs
env:
INPUTS: ${{ toJSON(inputs) }}
run: | run: |
cat << EOF python -m devscripts.setup_variables process_inputs
::group::Inputs
prerelease=${{ inputs.prerelease }}
source=${{ inputs.source }}
target=${{ inputs.target }}
version=${{ inputs.version }}
::endgroup::
EOF
IFS='@' read -r source_repo source_tag <<<"${{ inputs.source }}"
IFS='@' read -r target_repo target_tag <<<"${{ inputs.target }}"
cat << EOF >> "$GITHUB_OUTPUT"
source_repo=${source_repo}
source_tag=${source_tag}
target_repo=${target_repo}
target_tag=${target_tag}
EOF
- name: Setup variables - name: Setup variables
id: setup_variables id: setup_variables
env: env:
source_repo: ${{ steps.process_inputs.outputs.source_repo }} INPUTS: ${{ toJSON(inputs) }}
source_tag: ${{ steps.process_inputs.outputs.source_tag }} PROCESSED: ${{ toJSON(steps.process_inputs.outputs) }}
target_repo: ${{ steps.process_inputs.outputs.target_repo }} REPOSITORY: ${{ github.repository }}
target_tag: ${{ steps.process_inputs.outputs.target_tag }} PUSH_VERSION_COMMIT: ${{ vars.PUSH_VERSION_COMMIT }}
PYPI_PROJECT: ${{ vars.PYPI_PROJECT }}
SOURCE_PYPI_PROJECT: ${{ vars[format('{0}_pypi_project', steps.process_inputs.outputs.source_repo)] }}
SOURCE_PYPI_SUFFIX: ${{ vars[format('{0}_pypi_suffix', steps.process_inputs.outputs.source_repo)] }}
TARGET_PYPI_PROJECT: ${{ vars[format('{0}_pypi_project', steps.process_inputs.outputs.target_repo)] }}
TARGET_PYPI_SUFFIX: ${{ vars[format('{0}_pypi_suffix', steps.process_inputs.outputs.target_repo)] }}
SOURCE_ARCHIVE_REPO: ${{ vars[format('{0}_archive_repo', steps.process_inputs.outputs.source_repo)] }}
TARGET_ARCHIVE_REPO: ${{ vars[format('{0}_archive_repo', steps.process_inputs.outputs.target_repo)] }}
HAS_SOURCE_ARCHIVE_REPO_TOKEN: ${{ !!secrets[format('{0}_archive_repo_token', steps.process_inputs.outputs.source_repo)] }}
HAS_TARGET_ARCHIVE_REPO_TOKEN: ${{ !!secrets[format('{0}_archive_repo_token', steps.process_inputs.outputs.target_repo)] }}
HAS_ARCHIVE_REPO_TOKEN: ${{ !!secrets.ARCHIVE_REPO_TOKEN }}
run: | run: |
# unholy bash monstrosity (sincere apologies) python -m devscripts.setup_variables
fallback_token () {
if ${{ !secrets.ARCHIVE_REPO_TOKEN }}; then
echo "::error::Repository access secret ${target_repo_token^^} not found"
exit 1
fi
target_repo_token=ARCHIVE_REPO_TOKEN
return 0
}
source_is_channel=0 - name: Update version & documentation
[[ "${source_repo}" == 'stable' ]] && source_repo='yt-dlp/yt-dlp'
if [[ -z "${source_repo}" ]]; then
source_repo='${{ github.repository }}'
elif [[ '${{ vars[format('{0}_archive_repo', env.source_repo)] }}' ]]; then
source_is_channel=1
source_channel='${{ vars[format('{0}_archive_repo', env.source_repo)] }}'
elif [[ -z "${source_tag}" && "${source_repo}" != */* ]]; then
source_tag="${source_repo}"
source_repo='${{ github.repository }}'
fi
resolved_source="${source_repo}"
if [[ "${source_tag}" ]]; then
resolved_source="${resolved_source}@${source_tag}"
elif [[ "${source_repo}" == 'yt-dlp/yt-dlp' ]]; then
resolved_source='stable'
fi
revision="${{ (inputs.prerelease || !vars.PUSH_VERSION_COMMIT) && '$(date -u +"%H%M%S")' || '' }}"
version="$(
python devscripts/update-version.py \
-c "${resolved_source}" -r "${{ github.repository }}" ${{ inputs.version || '$revision' }} | \
grep -Po "version=\K\d+\.\d+\.\d+(\.\d+)?")"
if [[ "${target_repo}" ]]; then
if [[ -z "${target_tag}" ]]; then
if [[ '${{ vars[format('{0}_archive_repo', env.target_repo)] }}' ]]; then
target_tag="${source_tag:-${version}}"
else
target_tag="${target_repo}"
target_repo='${{ github.repository }}'
fi
fi
if [[ "${target_repo}" != '${{ github.repository}}' ]]; then
target_repo='${{ vars[format('{0}_archive_repo', env.target_repo)] }}'
target_repo_token='${{ env.target_repo }}_archive_repo_token'
${{ !!secrets[format('{0}_archive_repo_token', env.target_repo)] }} || fallback_token
pypi_project='${{ vars[format('{0}_pypi_project', env.target_repo)] }}'
pypi_suffix='${{ vars[format('{0}_pypi_suffix', env.target_repo)] }}'
fi
else
target_tag="${source_tag:-${version}}"
if ((source_is_channel)); then
target_repo="${source_channel}"
target_repo_token='${{ env.source_repo }}_archive_repo_token'
${{ !!secrets[format('{0}_archive_repo_token', env.source_repo)] }} || fallback_token
pypi_project='${{ vars[format('{0}_pypi_project', env.source_repo)] }}'
pypi_suffix='${{ vars[format('{0}_pypi_suffix', env.source_repo)] }}'
else
target_repo='${{ github.repository }}'
fi
fi
if [[ "${target_repo}" == '${{ github.repository }}' ]] && ${{ !inputs.prerelease }}; then
pypi_project='${{ vars.PYPI_PROJECT }}'
fi
echo "::group::Output variables"
cat << EOF | tee -a "$GITHUB_OUTPUT"
channel=${resolved_source}
version=${version}
target_repo=${target_repo}
target_repo_token=${target_repo_token}
target_tag=${target_tag}
pypi_project=${pypi_project}
pypi_suffix=${pypi_suffix}
EOF
echo "::endgroup::"
- name: Update documentation
env: env:
version: ${{ steps.setup_variables.outputs.version }} CHANNEL: ${{ steps.setup_variables.outputs.channel }}
target_repo: ${{ steps.setup_variables.outputs.target_repo }} # Use base repo since this could be committed; build jobs will call this again with true origin
if: | REPOSITORY: ${{ github.repository }}
!inputs.prerelease && env.target_repo == github.repository VERSION: ${{ steps.setup_variables.outputs.version }}
run: | run: |
python devscripts/update-version.py -c "${CHANNEL}" -r "${REPOSITORY}" "${VERSION}"
python devscripts/update_changelog.py -vv python devscripts/update_changelog.py -vv
make doc make doc
- name: Push to release - name: Push to release
id: push_release id: push_release
env: env:
version: ${{ steps.setup_variables.outputs.version }} VERSION: ${{ steps.setup_variables.outputs.version }}
target_repo: ${{ steps.setup_variables.outputs.target_repo }} GITHUB_EVENT_SENDER_LOGIN: ${{ github.event.sender.login }}
GITHUB_EVENT_REF: ${{ github.event.ref }}
if: | if: |
!inputs.prerelease && env.target_repo == github.repository !inputs.prerelease && steps.setup_variables.outputs.target_repo == github.repository
run: | run: |
git config --global user.name "github-actions[bot]" git config --global user.name "github-actions[bot]"
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com" git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
git add -u git add -u
git commit -m "Release ${{ env.version }}" \ git commit -m "Release ${VERSION}" \
-m "Created by: ${{ github.event.sender.login }}" -m ":ci skip all" -m "Created by: ${GITHUB_EVENT_SENDER_LOGIN}" -m ":ci skip all"
git push origin --force ${{ github.event.ref }}:release git push origin --force "${GITHUB_EVENT_REF}:release"
- name: Get target commitish - name: Get target commitish
id: get_target id: get_target
@@ -214,10 +144,10 @@ jobs:
- name: Update master - name: Update master
env: env:
target_repo: ${{ steps.setup_variables.outputs.target_repo }} GITHUB_EVENT_REF: ${{ github.event.ref }}
if: | if: |
vars.PUSH_VERSION_COMMIT != '' && !inputs.prerelease && env.target_repo == github.repository vars.PUSH_VERSION_COMMIT && !inputs.prerelease && steps.setup_variables.outputs.target_repo == github.repository
run: git push origin ${{ github.event.ref }} run: git push origin "${GITHUB_EVENT_REF}"
build: build:
needs: prepare needs: prepare
@@ -226,10 +156,9 @@ jobs:
version: ${{ needs.prepare.outputs.version }} version: ${{ needs.prepare.outputs.version }}
channel: ${{ needs.prepare.outputs.channel }} channel: ${{ needs.prepare.outputs.channel }}
origin: ${{ needs.prepare.outputs.target_repo }} origin: ${{ needs.prepare.outputs.target_repo }}
linux_armv7l: ${{ inputs.linux_armv7l }}
permissions: permissions:
contents: read contents: read
packages: write # For package cache
actions: write # For cleaning up cache
secrets: secrets:
GPG_SIGNING_KEY: ${{ secrets.GPG_SIGNING_KEY }} GPG_SIGNING_KEY: ${{ secrets.GPG_SIGNING_KEY }}
@@ -255,16 +184,16 @@ jobs:
- name: Prepare - name: Prepare
env: env:
version: ${{ needs.prepare.outputs.version }} VERSION: ${{ needs.prepare.outputs.version }}
suffix: ${{ needs.prepare.outputs.pypi_suffix }} SUFFIX: ${{ needs.prepare.outputs.pypi_suffix }}
channel: ${{ needs.prepare.outputs.channel }} CHANNEL: ${{ needs.prepare.outputs.channel }}
target_repo: ${{ needs.prepare.outputs.target_repo }} TARGET_REPO: ${{ needs.prepare.outputs.target_repo }}
pypi_project: ${{ needs.prepare.outputs.pypi_project }} PYPI_PROJECT: ${{ needs.prepare.outputs.pypi_project }}
run: | run: |
python devscripts/update-version.py -c "${{ env.channel }}" -r "${{ env.target_repo }}" -s "${{ env.suffix }}" "${{ env.version }}" python devscripts/update-version.py -c "${CHANNEL}" -r "${TARGET_REPO}" -s "${SUFFIX}" "${VERSION}"
python devscripts/update_changelog.py -vv python devscripts/update_changelog.py -vv
python devscripts/make_lazy_extractors.py python devscripts/make_lazy_extractors.py
sed -i -E '0,/(name = ")[^"]+(")/s//\1${{ env.pypi_project }}\2/' pyproject.toml sed -i -E '0,/(name = ")[^"]+(")/s//\1'"${PYPI_PROJECT}"'\2/' pyproject.toml
- name: Build - name: Build
run: | run: |
@@ -298,7 +227,11 @@ jobs:
permissions: permissions:
contents: write contents: write
runs-on: ubuntu-latest runs-on: ubuntu-latest
env:
TARGET_REPO: ${{ needs.prepare.outputs.target_repo }}
TARGET_TAG: ${{ needs.prepare.outputs.target_tag }}
VERSION: ${{ needs.prepare.outputs.version }}
HEAD_SHA: ${{ needs.prepare.outputs.head_sha }}
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with: with:
@@ -314,81 +247,80 @@ jobs:
- name: Generate release notes - name: Generate release notes
env: env:
head_sha: ${{ needs.prepare.outputs.head_sha }} REPOSITORY: ${{ github.repository }}
target_repo: ${{ needs.prepare.outputs.target_repo }} BASE_REPO: yt-dlp/yt-dlp
target_tag: ${{ needs.prepare.outputs.target_tag }} NIGHTLY_REPO: yt-dlp/yt-dlp-nightly-builds
MASTER_REPO: yt-dlp/yt-dlp-master-builds
DOCS_PATH: ${{ env.TARGET_REPO == github.repository && format('/tree/{0}', env.TARGET_TAG) || '' }}
run: | run: |
printf '%s' \ printf '%s' \
'[![Installation](https://img.shields.io/badge/-Which%20file%20to%20download%3F-white.svg?style=for-the-badge)]' \ "[![Installation](https://img.shields.io/badge/-Which%20file%20to%20download%3F-white.svg?style=for-the-badge)]" \
'(https://github.com/${{ github.repository }}#installation "Installation instructions") ' \ "(https://github.com/${REPOSITORY}#installation \"Installation instructions\") " \
'[![Discord](https://img.shields.io/discord/807245652072857610?color=blue&labelColor=555555&label=&logo=discord&style=for-the-badge)]' \ "[![Discord](https://img.shields.io/discord/807245652072857610?color=blue&labelColor=555555&label=&logo=discord&style=for-the-badge)]" \
'(https://discord.gg/H5MNcFW63r "Discord") ' \ "(https://discord.gg/H5MNcFW63r \"Discord\") " \
'[![Donate](https://img.shields.io/badge/_-Donate-red.svg?logo=githubsponsors&labelColor=555555&style=for-the-badge)]' \ "[![Donate](https://img.shields.io/badge/_-Donate-red.svg?logo=githubsponsors&labelColor=555555&style=for-the-badge)]" \
'(https://github.com/yt-dlp/yt-dlp/blob/master/Collaborators.md#collaborators "Donate") ' \ "(https://github.com/${BASE_REPO}/blob/master/Collaborators.md#collaborators \"Donate\") " \
'[![Documentation](https://img.shields.io/badge/-Docs-brightgreen.svg?style=for-the-badge&logo=GitBook&labelColor=555555)]' \ "[![Documentation](https://img.shields.io/badge/-Docs-brightgreen.svg?style=for-the-badge&logo=GitBook&labelColor=555555)]" \
'(https://github.com/${{ github.repository }}' \ "(https://github.com/${REPOSITORY}${DOCS_PATH}#readme \"Documentation\") " > ./RELEASE_NOTES
'${{ env.target_repo == github.repository && format('/tree/{0}', env.target_tag) || '' }}#readme "Documentation") ' \ if [[ "${TARGET_REPO}" == "${BASE_REPO}" ]]; then
${{ env.target_repo == 'yt-dlp/yt-dlp' && '\ printf '%s' \
"[![Nightly](https://img.shields.io/badge/Nightly%20builds-purple.svg?style=for-the-badge)]" \ "[![Nightly](https://img.shields.io/badge/Nightly%20builds-purple.svg?style=for-the-badge)]" \
"(https://github.com/yt-dlp/yt-dlp-nightly-builds/releases/latest \"Nightly builds\") " \ "(https://github.com/${NIGHTLY_REPO}/releases/latest \"Nightly builds\") " \
"[![Master](https://img.shields.io/badge/Master%20builds-lightblue.svg?style=for-the-badge)]" \ "[![Master](https://img.shields.io/badge/Master%20builds-lightblue.svg?style=for-the-badge)]" \
"(https://github.com/yt-dlp/yt-dlp-master-builds/releases/latest \"Master builds\")"' || '' }} > ./RELEASE_NOTES "(https://github.com/${MASTER_REPO}/releases/latest \"Master builds\")" >> ./RELEASE_NOTES
fi
printf '\n\n' >> ./RELEASE_NOTES printf '\n\n' >> ./RELEASE_NOTES
cat >> ./RELEASE_NOTES << EOF cat >> ./RELEASE_NOTES << EOF
#### A description of the various files is in the [README](https://github.com/${{ github.repository }}#release-files) #### A description of the various files is in the [README](https://github.com/${REPOSITORY}#release-files)
--- ---
$(python ./devscripts/make_changelog.py -vv --collapsible) $(python ./devscripts/make_changelog.py -vv --collapsible)
EOF EOF
printf '%s\n\n' '**This is a pre-release build**' >> ./PRERELEASE_NOTES printf '%s\n\n' '**This is a pre-release build**' >> ./PRERELEASE_NOTES
cat ./RELEASE_NOTES >> ./PRERELEASE_NOTES cat ./RELEASE_NOTES >> ./PRERELEASE_NOTES
printf '%s\n\n' 'Generated from: https://github.com/${{ github.repository }}/commit/${{ env.head_sha }}' >> ./ARCHIVE_NOTES printf '%s\n\n' "Generated from: https://github.com/${REPOSITORY}/commit/${HEAD_SHA}" >> ./ARCHIVE_NOTES
cat ./RELEASE_NOTES >> ./ARCHIVE_NOTES cat ./RELEASE_NOTES >> ./ARCHIVE_NOTES
- name: Publish to archive repo - name: Publish to archive repo
env: env:
GH_TOKEN: ${{ secrets[needs.prepare.outputs.target_repo_token] }} GH_TOKEN: ${{ secrets[needs.prepare.outputs.target_repo_token] }}
GH_REPO: ${{ needs.prepare.outputs.target_repo }} GH_REPO: ${{ needs.prepare.outputs.target_repo }}
version: ${{ needs.prepare.outputs.version }} TITLE_PREFIX: ${{ startswith(env.TARGET_REPO, 'yt-dlp/') && 'yt-dlp ' || '' }}
channel: ${{ needs.prepare.outputs.channel }} TITLE: ${{ inputs.target != env.TARGET_REPO && inputs.target || needs.prepare.outputs.channel }}
if: | if: |
inputs.prerelease && env.GH_TOKEN != '' && env.GH_REPO != '' && env.GH_REPO != github.repository inputs.prerelease && env.GH_TOKEN && env.GH_REPO && env.GH_REPO != github.repository
run: | run: |
title="${{ startswith(env.GH_REPO, 'yt-dlp/') && 'yt-dlp ' || '' }}${{ env.channel }}"
gh release create \ gh release create \
--notes-file ARCHIVE_NOTES \ --notes-file ARCHIVE_NOTES \
--title "${title} ${{ env.version }}" \ --title "${TITLE_PREFIX}${TITLE} ${VERSION}" \
${{ env.version }} \ "${VERSION}" \
artifact/* artifact/*
- name: Prune old release - name: Prune old release
env: env:
GH_TOKEN: ${{ github.token }} GH_TOKEN: ${{ github.token }}
version: ${{ needs.prepare.outputs.version }}
target_repo: ${{ needs.prepare.outputs.target_repo }}
target_tag: ${{ needs.prepare.outputs.target_tag }}
if: | if: |
env.target_repo == github.repository && env.target_tag != env.version env.TARGET_REPO == github.repository && env.TARGET_TAG != env.VERSION
run: | run: |
gh release delete --yes --cleanup-tag "${{ env.target_tag }}" || true gh release delete --yes --cleanup-tag "${TARGET_TAG}" || true
git tag --delete "${{ env.target_tag }}" || true git tag --delete "${TARGET_TAG}" || true
sleep 5 # Enough time to cover deletion race condition sleep 5 # Enough time to cover deletion race condition
- name: Publish release - name: Publish release
env: env:
GH_TOKEN: ${{ github.token }} GH_TOKEN: ${{ github.token }}
version: ${{ needs.prepare.outputs.version }} NOTES_FILE: ${{ inputs.prerelease && 'PRERELEASE_NOTES' || 'RELEASE_NOTES' }}
target_repo: ${{ needs.prepare.outputs.target_repo }} TITLE_PREFIX: ${{ github.repository == 'yt-dlp/yt-dlp' && 'yt-dlp ' || '' }}
target_tag: ${{ needs.prepare.outputs.target_tag }} TITLE: ${{ env.TARGET_TAG != env.VERSION && format('{0} ', env.TARGET_TAG) || '' }}
head_sha: ${{ needs.prepare.outputs.head_sha }} PRERELEASE: ${{ inputs.prerelease && '1' || '0' }}
if: | if: |
env.target_repo == github.repository env.TARGET_REPO == github.repository
run: | run: |
title="${{ github.repository == 'yt-dlp/yt-dlp' && 'yt-dlp ' || '' }}" gh_options=(
title+="${{ env.target_tag != env.version && format('{0} ', env.target_tag) || '' }}" --notes-file "${NOTES_FILE}"
gh release create \ --target "${HEAD_SHA}"
--notes-file ${{ inputs.prerelease && 'PRERELEASE_NOTES' || 'RELEASE_NOTES' }} \ --title "${TITLE_PREFIX}${TITLE}${VERSION}"
--target ${{ env.head_sha }} \ )
--title "${title}${{ env.version }}" \ if ((PRERELEASE)); then
${{ inputs.prerelease && '--prerelease' || '' }} \ gh_options+=(--prerelease)
${{ env.target_tag }} \ fi
artifact/* gh release create "${gh_options[@]}" "${TARGET_TAG}" artifact/*

View File

@@ -806,3 +806,5 @@ junyilou
PierreMesure PierreMesure
Randalix Randalix
runarmod runarmod
gitchasing
zakaryan2004

View File

@@ -4,6 +4,23 @@
# To create a release, dispatch the https://github.com/yt-dlp/yt-dlp/actions/workflows/release.yml workflow on master # To create a release, dispatch the https://github.com/yt-dlp/yt-dlp/actions/workflows/release.yml workflow on master
--> -->
### 2025.09.05
#### Core changes
- [Fix `--id` deprecation warning](https://github.com/yt-dlp/yt-dlp/commit/76bb46002c9a9655f2b1d29d4840e75e79037cfa) ([#14190](https://github.com/yt-dlp/yt-dlp/issues/14190)) by [seproDev](https://github.com/seproDev)
#### Extractor changes
- **charlierose**: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/603acdff07f0226088916886002d2ad8309ff9d3) ([#14231](https://github.com/yt-dlp/yt-dlp/issues/14231)) by [gitchasing](https://github.com/gitchasing)
- **googledrive**: [Fix subtitles extraction](https://github.com/yt-dlp/yt-dlp/commit/18fe696df9d60804a8f5cb8cd74f38111d6eb711) ([#14139](https://github.com/yt-dlp/yt-dlp/issues/14139)) by [zakaryan2004](https://github.com/zakaryan2004)
- **itvbtcc**: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/0b51005b4819e7cea222fcbaf8e60391db4f732c) ([#14161](https://github.com/yt-dlp/yt-dlp/issues/14161)) by [garret1317](https://github.com/garret1317)
- **kick**: vod: [Support ongoing livestream VODs](https://github.com/yt-dlp/yt-dlp/commit/1e28f6bf743627b909135bb9a88537ad2deccaf0) ([#14154](https://github.com/yt-dlp/yt-dlp/issues/14154)) by [InvalidUsernameException](https://github.com/InvalidUsernameException)
- **lrt**: [Fix extractors](https://github.com/yt-dlp/yt-dlp/commit/ed24640943872c4cf30d7cc4601bec87b50ba03c) ([#14193](https://github.com/yt-dlp/yt-dlp/issues/14193)) by [seproDev](https://github.com/seproDev)
- **tver**: [Extract more metadata](https://github.com/yt-dlp/yt-dlp/commit/223baa81f6637dcdef108f817180d8d1ae9fa213) ([#14165](https://github.com/yt-dlp/yt-dlp/issues/14165)) by [arabcoders](https://github.com/arabcoders)
- **vevo**: [Restore extractors](https://github.com/yt-dlp/yt-dlp/commit/d925e92b710153d0d51d030f115b3c87226bc0f0) ([#14203](https://github.com/yt-dlp/yt-dlp/issues/14203)) by [seproDev](https://github.com/seproDev)
#### Misc. changes
- **build**: [Overhaul Linux builds and refactor release workflow](https://github.com/yt-dlp/yt-dlp/commit/50136eeeb3767289b236f140b759f23b39b00888) ([#13997](https://github.com/yt-dlp/yt-dlp/issues/13997)) by [bashonly](https://github.com/bashonly)
### 2025.08.27 ### 2025.08.27
#### Extractor changes #### Extractor changes

View File

@@ -105,14 +105,20 @@ File|Description
File|Description File|Description
:---|:--- :---|:---
[yt-dlp_linux](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_linux)|Linux (glibc 2.17+) standalone x86_64 binary
[yt-dlp_linux.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_linux.zip)|Unpackaged Linux (glibc 2.17+) x86_64 executable (no auto-update)
[yt-dlp_linux_aarch64](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_linux_aarch64)|Linux (glibc 2.17+) standalone aarch64 binary
[yt-dlp_linux_aarch64.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_linux_aarch64.zip)|Unpackaged Linux (glibc 2.17+) aarch64 executable (no auto-update)
[yt-dlp_linux_armv7l.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_linux_armv7l.zip)|Unpackaged Linux (glibc 2.31+) armv7l executable (no auto-update)
[yt-dlp_musllinux](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_musllinux)|Linux (musl 1.2+) standalone x86_64 binary
[yt-dlp_musllinux.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_musllinux.zip)|Unpackaged Linux (musl 1.2+) x86_64 executable (no auto-update)
[yt-dlp_musllinux_aarch64](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_musllinux_aarch64)|Linux (musl 1.2+) standalone aarch64 binary
[yt-dlp_musllinux_aarch64.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_musllinux_aarch64.zip)|Unpackaged Linux (musl 1.2+) aarch64 executable (no auto-update)
[yt-dlp_x86.exe](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_x86.exe)|Windows (Win8+) standalone x86 (32-bit) binary [yt-dlp_x86.exe](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_x86.exe)|Windows (Win8+) standalone x86 (32-bit) binary
[yt-dlp_arm64.exe](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_arm64.exe)|Windows (Win10+) standalone arm64 (64-bit) binary [yt-dlp_win_x86.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_win_x86.zip)|Unpackaged Windows (Win8+) x86 (32-bit) executable (no auto-update)
[yt-dlp_linux](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_linux)|Linux standalone x64 binary [yt-dlp_arm64.exe](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_arm64.exe)|Windows (Win10+) standalone ARM64 binary
[yt-dlp_linux_armv7l](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_linux_armv7l)|Linux standalone armv7l (32-bit) binary [yt-dlp_win_arm64.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_win_arm64.zip)|Unpackaged Windows (Win10+) ARM64 executable (no auto-update)
[yt-dlp_linux_aarch64](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_linux_aarch64)|Linux standalone aarch64 (64-bit) binary
[yt-dlp_win.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_win.zip)|Unpackaged Windows (Win8+) x64 executable (no auto-update) [yt-dlp_win.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_win.zip)|Unpackaged Windows (Win8+) x64 executable (no auto-update)
[yt-dlp_win_x86.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_win_x86.zip)|Unpackaged Windows (Win8+) x86 executable (no auto-update)
[yt-dlp_win_arm64.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_win_arm64.zip)|Unpackaged Windows (Win10+) arm64 executable (no auto-update)
[yt-dlp_macos.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_macos.zip)|Unpackaged MacOS (10.15+) executable (no auto-update) [yt-dlp_macos.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_macos.zip)|Unpackaged MacOS (10.15+) executable (no auto-update)
#### Misc #### Misc
@@ -206,7 +212,7 @@ The following provide support for impersonating browser requests. This may be re
* [**curl_cffi**](https://github.com/lexiforest/curl_cffi) (recommended) - Python binding for [curl-impersonate](https://github.com/lexiforest/curl-impersonate). Provides impersonation targets for Chrome, Edge and Safari. Licensed under [MIT](https://github.com/lexiforest/curl_cffi/blob/main/LICENSE) * [**curl_cffi**](https://github.com/lexiforest/curl_cffi) (recommended) - Python binding for [curl-impersonate](https://github.com/lexiforest/curl-impersonate). Provides impersonation targets for Chrome, Edge and Safari. Licensed under [MIT](https://github.com/lexiforest/curl_cffi/blob/main/LICENSE)
* Can be installed with the `curl-cffi` group, e.g. `pip install "yt-dlp[default,curl-cffi]"` * Can be installed with the `curl-cffi` group, e.g. `pip install "yt-dlp[default,curl-cffi]"`
* Currently included in `yt-dlp.exe`, `yt-dlp_linux` and `yt-dlp_macos` builds * Currently included in most builds *except* `yt-dlp` (Unix zipimport binary), `yt-dlp_x86` (Windows 32-bit) and `yt-dlp_musllinux_aarch64`
### Metadata ### Metadata

View File

@@ -1,10 +1,153 @@
services: services:
static:
build: static linux_x86_64:
build:
context: linux
target: build
platforms:
- "linux/amd64"
args:
BUILDIMAGE: ghcr.io/yt-dlp/manylinux2014_x86_64-shared:latest
environment: environment:
channel: ${channel} EXE_NAME: ${EXE_NAME:?}
origin: ${origin} CHANNEL: ${CHANNEL:?}
version: ${version} ORIGIN: ${ORIGIN:?}
VERSION:
volumes: volumes:
- ~/build:/build
- ../..:/yt-dlp - ../..:/yt-dlp
linux_x86_64_verify:
build:
context: linux
target: verify
platforms:
- "linux/amd64"
args:
VERIFYIMAGE: quay.io/pypa/manylinux2014_x86_64:latest
environment:
EXE_NAME: ${EXE_NAME:?}
volumes:
- ../../dist:/build
linux_aarch64:
build:
context: linux
target: build
platforms:
- "linux/arm64"
args:
BUILDIMAGE: ghcr.io/yt-dlp/manylinux2014_aarch64-shared:latest
environment:
EXE_NAME: ${EXE_NAME:?}
CHANNEL: ${CHANNEL:?}
ORIGIN: ${ORIGIN:?}
VERSION:
volumes:
- ../..:/yt-dlp
linux_aarch64_verify:
build:
context: linux
target: verify
platforms:
- "linux/arm64"
args:
VERIFYIMAGE: quay.io/pypa/manylinux2014_aarch64:latest
environment:
EXE_NAME: ${EXE_NAME:?}
SKIP_UPDATE_TO: "1" # TODO: remove when there is a glibc2.17 aarch64 release to --update-to
volumes:
- ../../dist:/build
linux_armv7l:
build:
context: linux
target: build
platforms:
- "linux/arm/v7"
args:
BUILDIMAGE: ghcr.io/yt-dlp/manylinux_2_31_armv7l-shared:latest
environment:
EXE_NAME: ${EXE_NAME:?}
CHANNEL: ${CHANNEL:?}
ORIGIN: ${ORIGIN:?}
VERSION:
SKIP_ONEFILE_BUILD: "1"
volumes:
- ../..:/yt-dlp
- ~/yt-dlp-build-venv:/yt-dlp-build-venv
linux_armv7l_verify:
build:
context: linux
target: verify
platforms:
- "linux/arm/v7"
args:
VERIFYIMAGE: arm32v7/debian:bullseye
environment:
EXE_NAME: ${EXE_NAME:?}
TEST_ONEDIR_BUILD: "1"
volumes:
- ../../dist:/build
musllinux_x86_64:
build:
context: linux
target: build
platforms:
- "linux/amd64"
args:
BUILDIMAGE: ghcr.io/yt-dlp/musllinux_1_2_x86_64-shared:latest
environment:
EXE_NAME: ${EXE_NAME:?}
CHANNEL: ${CHANNEL:?}
ORIGIN: ${ORIGIN:?}
VERSION:
volumes:
- ../..:/yt-dlp
musllinux_x86_64_verify:
build:
context: linux
target: verify
platforms:
- "linux/amd64"
args:
VERIFYIMAGE: alpine:3.22
environment:
EXE_NAME: ${EXE_NAME:?}
SKIP_UPDATE_TO: "1" # TODO: remove when there is a musllinux_aarch64 release to --update-to
volumes:
- ../../dist:/build
musllinux_aarch64:
build:
context: linux
target: build
platforms:
- "linux/arm64"
args:
BUILDIMAGE: ghcr.io/yt-dlp/musllinux_1_2_aarch64-shared:latest
environment:
EXE_NAME: ${EXE_NAME:?}
CHANNEL: ${CHANNEL:?}
ORIGIN: ${ORIGIN:?}
VERSION:
EXCLUDE_CURL_CFFI: "1"
volumes:
- ../..:/yt-dlp
musllinux_aarch64_verify:
build:
context: linux
target: verify
platforms:
- "linux/arm64"
args:
VERIFYIMAGE: alpine:3.22
environment:
EXE_NAME: ${EXE_NAME:?}
SKIP_UPDATE_TO: "1" # TODO: remove when there is a musllinux_aarch64 release to --update-to
volumes:
- ../../dist:/build

View File

@@ -0,0 +1,16 @@
ARG BUILDIMAGE=ghcr.io/yt-dlp/manylinux2014_x86_64-shared:latest
ARG VERIFYIMAGE=alpine:3.22
FROM $BUILDIMAGE AS build
WORKDIR /yt-dlp
COPY build.sh /build.sh
ENTRYPOINT ["/build.sh"]
FROM $VERIFYIMAGE AS verify
WORKDIR /testing
COPY verify.sh /verify.sh
ENTRYPOINT ["/verify.sh"]

46
bundle/docker/linux/build.sh Executable file
View File

@@ -0,0 +1,46 @@
#!/bin/bash
set -exuo pipefail
if [[ -z "${USE_PYTHON_VERSION:-}" ]]; then
USE_PYTHON_VERSION="3.13"
fi
function runpy {
"/opt/shared-cpython-${USE_PYTHON_VERSION}/bin/python${USE_PYTHON_VERSION}" "$@"
}
function venvpy {
"python${USE_PYTHON_VERSION}" "$@"
}
INCLUDES=(
--include pyinstaller
--include secretstorage
)
if [[ -z "${EXCLUDE_CURL_CFFI:-}" ]]; then
INCLUDES+=(--include curl-cffi)
fi
runpy -m venv /yt-dlp-build-venv
source /yt-dlp-build-venv/bin/activate
# Inside the venv we use venvpy instead of runpy
venvpy -m ensurepip --upgrade --default-pip
venvpy -m devscripts.install_deps -o --include build
venvpy -m devscripts.install_deps "${INCLUDES[@]}"
venvpy -m devscripts.make_lazy_extractors
venvpy devscripts/update-version.py -c "${CHANNEL}" -r "${ORIGIN}" "${VERSION}"
if [[ -z "${SKIP_ONEDIR_BUILD:-}" ]]; then
mkdir -p /build
venvpy -m bundle.pyinstaller --onedir --distpath=/build
pushd "/build/${EXE_NAME}"
chmod +x "${EXE_NAME}"
venvpy -m zipfile -c "/yt-dlp/dist/${EXE_NAME}.zip" ./
popd
fi
if [[ -z "${SKIP_ONEFILE_BUILD:-}" ]]; then
venvpy -m bundle.pyinstaller
chmod +x "./dist/${EXE_NAME}"
fi

44
bundle/docker/linux/verify.sh Executable file
View File

@@ -0,0 +1,44 @@
#!/bin/sh
set -eu
if [ -n "${TEST_ONEDIR_BUILD:-}" ]; then
echo "Extracting zip to verify onedir build"
if command -v python3 >/dev/null 2>&1; then
python3 -m zipfile -e "/build/${EXE_NAME}.zip" ./
else
echo "Attempting to install unzip"
if command -v dnf >/dev/null 2>&1; then
dnf -y install --allowerasing unzip
elif command -v yum >/dev/null 2>&1; then
yum -y install unzip
elif command -v apt-get >/dev/null 2>&1; then
DEBIAN_FRONTEND=noninteractive apt-get update -qq
DEBIAN_FRONTEND=noninteractive apt-get install -qq -y --no-install-recommends unzip
elif command -v apk >/dev/null 2>&1; then
apk add --no-cache unzip
else
echo "Unsupported image"
exit 1
fi
unzip "/build/${EXE_NAME}.zip" -d ./
fi
else
echo "Verifying onefile build"
cp "/build/${EXE_NAME}" ./
fi
chmod +x "./${EXE_NAME}"
if [ -n "${SKIP_UPDATE_TO:-}" ] || [ -n "${TEST_ONEDIR_BUILD:-}" ]; then
"./${EXE_NAME}" -v || true
"./${EXE_NAME}" --version
exit 0
fi
cp "./${EXE_NAME}" "./${EXE_NAME}_downgraded"
version="$("./${EXE_NAME}" --version)"
"./${EXE_NAME}_downgraded" -v --update-to yt-dlp/yt-dlp@2023.03.04
downgraded_version="$("./${EXE_NAME}_downgraded" --version)"
if [ "${version}" = "${downgraded_version}" ]; then
exit 1
fi

View File

@@ -1,21 +0,0 @@
FROM alpine:3.19 as base
RUN apk --update add --no-cache \
build-base \
python3 \
pipx \
;
RUN pipx install pyinstaller
# Requires above step to prepare the shared venv
RUN ~/.local/share/pipx/shared/bin/python -m pip install -U wheel
RUN apk --update add --no-cache \
scons \
patchelf \
binutils \
;
RUN pipx install staticx
WORKDIR /yt-dlp
COPY entrypoint.sh /entrypoint.sh
ENTRYPOINT /entrypoint.sh

View File

@@ -1,14 +0,0 @@
#!/bin/ash
set -e
source ~/.local/share/pipx/venvs/pyinstaller/bin/activate
python -m devscripts.install_deps -o --include build
python -m devscripts.install_deps --include secretstorage --include curl-cffi
python -m devscripts.make_lazy_extractors
python devscripts/update-version.py -c "${channel}" -r "${origin}" "${version}"
python -m bundle.pyinstaller
deactivate
source ~/.local/share/pipx/venvs/staticx/bin/activate
staticx /yt-dlp/dist/yt-dlp_linux /build/yt-dlp_linux
deactivate

View File

@@ -13,6 +13,8 @@ from PyInstaller.__main__ import run as run_pyinstaller
from devscripts.utils import read_version from devscripts.utils import read_version
OS_NAME, MACHINE, ARCH = sys.platform, platform.machine().lower(), platform.architecture()[0][:2] OS_NAME, MACHINE, ARCH = sys.platform, platform.machine().lower(), platform.architecture()[0][:2]
if OS_NAME == 'linux' and platform.libc_ver()[0] != 'glibc':
OS_NAME = 'musllinux'
if MACHINE in ('x86', 'x86_64', 'amd64', 'i386', 'i686'): if MACHINE in ('x86', 'x86_64', 'amd64', 'i386', 'i686'):
MACHINE = 'x86' if ARCH == '32' else '' MACHINE = 'x86' if ARCH == '32' else ''

View File

@@ -0,0 +1,157 @@
# Allow direct execution
import os
import sys
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
import datetime as dt
import json
from devscripts.utils import calculate_version
STABLE_REPOSITORY = 'yt-dlp/yt-dlp'
def setup_variables(environment):
"""
`environment` must contain these keys:
REPOSITORY, INPUTS, PROCESSED,
PUSH_VERSION_COMMIT, PYPI_PROJECT,
SOURCE_PYPI_PROJECT, SOURCE_PYPI_SUFFIX,
TARGET_PYPI_PROJECT, TARGET_PYPI_SUFFIX,
SOURCE_ARCHIVE_REPO, TARGET_ARCHIVE_REPO,
HAS_SOURCE_ARCHIVE_REPO_TOKEN,
HAS_TARGET_ARCHIVE_REPO_TOKEN,
HAS_ARCHIVE_REPO_TOKEN
`INPUTS` must contain these keys:
prerelease
`PROCESSED` must contain these keys:
source_repo, source_tag,
target_repo, target_tag
"""
REPOSITORY = environment['REPOSITORY']
INPUTS = json.loads(environment['INPUTS'])
PROCESSED = json.loads(environment['PROCESSED'])
source_channel = None
does_not_have_needed_token = False
target_repo_token = None
pypi_project = None
pypi_suffix = None
source_repo = PROCESSED['source_repo']
source_tag = PROCESSED['source_tag']
if source_repo == 'stable':
source_repo = STABLE_REPOSITORY
if not source_repo:
source_repo = REPOSITORY
elif environment['SOURCE_ARCHIVE_REPO']:
source_channel = environment['SOURCE_ARCHIVE_REPO']
elif not source_tag and '/' not in source_repo:
source_tag = source_repo
source_repo = REPOSITORY
resolved_source = source_repo
if source_tag:
resolved_source = f'{resolved_source}@{source_tag}'
elif source_repo == STABLE_REPOSITORY:
resolved_source = 'stable'
revision = None
if INPUTS['prerelease'] or not environment['PUSH_VERSION_COMMIT']:
revision = dt.datetime.now(tz=dt.timezone.utc).strftime('%H%M%S')
version = calculate_version(INPUTS.get('version') or revision)
target_repo = PROCESSED['target_repo']
target_tag = PROCESSED['target_tag']
if target_repo:
if target_repo == 'stable':
target_repo = STABLE_REPOSITORY
if not target_tag:
if target_repo == STABLE_REPOSITORY:
target_tag = version
elif environment['TARGET_ARCHIVE_REPO']:
target_tag = source_tag or version
else:
target_tag = target_repo
target_repo = REPOSITORY
if target_repo != REPOSITORY:
target_repo = environment['TARGET_ARCHIVE_REPO']
target_repo_token = f'{PROCESSED["target_repo"].upper()}_ARCHIVE_REPO_TOKEN'
if not json.loads(environment['HAS_TARGET_ARCHIVE_REPO_TOKEN']):
does_not_have_needed_token = True
pypi_project = environment['TARGET_PYPI_PROJECT'] or None
pypi_suffix = environment['TARGET_PYPI_SUFFIX'] or None
else:
target_tag = source_tag or version
if source_channel:
target_repo = source_channel
target_repo_token = f'{PROCESSED["source_repo"].upper()}_ARCHIVE_REPO_TOKEN'
if not json.loads(environment['HAS_SOURCE_ARCHIVE_REPO_TOKEN']):
does_not_have_needed_token = True
pypi_project = environment['SOURCE_PYPI_PROJECT'] or None
pypi_suffix = environment['SOURCE_PYPI_SUFFIX'] or None
else:
target_repo = REPOSITORY
if does_not_have_needed_token:
if not json.loads(environment['HAS_ARCHIVE_REPO_TOKEN']):
print(f'::error::Repository access secret {target_repo_token} not found')
return None
target_repo_token = 'ARCHIVE_REPO_TOKEN'
if target_repo == REPOSITORY and not INPUTS['prerelease']:
pypi_project = environment['PYPI_PROJECT'] or None
return {
'channel': resolved_source,
'version': version,
'target_repo': target_repo,
'target_repo_token': target_repo_token,
'target_tag': target_tag,
'pypi_project': pypi_project,
'pypi_suffix': pypi_suffix,
}
def process_inputs(inputs):
outputs = {}
for key in ('source', 'target'):
repo, _, tag = inputs.get(key, '').partition('@')
outputs[f'{key}_repo'] = repo
outputs[f'{key}_tag'] = tag
return outputs
if __name__ == '__main__':
if not os.getenv('GITHUB_OUTPUT'):
print('This script is only intended for use with GitHub Actions', file=sys.stderr)
sys.exit(1)
if 'process_inputs' in sys.argv:
inputs = json.loads(os.environ['INPUTS'])
print('::group::Inputs')
print(json.dumps(inputs, indent=2))
print('::endgroup::')
outputs = process_inputs(inputs)
print('::group::Processed')
print(json.dumps(outputs, indent=2))
print('::endgroup::')
with open(os.environ['GITHUB_OUTPUT'], 'a') as f:
f.write('\n'.join(f'{key}={value}' for key, value in outputs.items()))
sys.exit(0)
outputs = setup_variables(dict(os.environ))
if not outputs:
sys.exit(1)
print('::group::Output variables')
print(json.dumps(outputs, indent=2))
print('::endgroup::')
with open(os.environ['GITHUB_OUTPUT'], 'a') as f:
f.write('\n'.join(f'{key}={value or ""}' for key, value in outputs.items()))

View File

@@ -0,0 +1,331 @@
# Allow direct execution
import os
import sys
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
import datetime as dt
import json
from devscripts.setup_variables import STABLE_REPOSITORY, process_inputs, setup_variables
from devscripts.utils import calculate_version
def _test(github_repository, note, repo_vars, repo_secrets, inputs, expected=None, ignore_revision=False):
inp = inputs.copy()
inp.setdefault('linux_armv7l', True)
inp.setdefault('prerelease', False)
processed = process_inputs(inp)
source_repo = processed['source_repo'].upper()
target_repo = processed['target_repo'].upper()
variables = {k.upper(): v for k, v in repo_vars.items()}
secrets = {k.upper(): v for k, v in repo_secrets.items()}
env = {
# Keep this in sync with prepare.setup_variables in release.yml
'INPUTS': json.dumps(inp),
'PROCESSED': json.dumps(processed),
'REPOSITORY': github_repository,
'PUSH_VERSION_COMMIT': variables.get('PUSH_VERSION_COMMIT') or '',
'PYPI_PROJECT': variables.get('PYPI_PROJECT') or '',
'SOURCE_PYPI_PROJECT': variables.get(f'{source_repo}_PYPI_PROJECT') or '',
'SOURCE_PYPI_SUFFIX': variables.get(f'{source_repo}_PYPI_SUFFIX') or '',
'TARGET_PYPI_PROJECT': variables.get(f'{target_repo}_PYPI_PROJECT') or '',
'TARGET_PYPI_SUFFIX': variables.get(f'{target_repo}_PYPI_SUFFIX') or '',
'SOURCE_ARCHIVE_REPO': variables.get(f'{source_repo}_ARCHIVE_REPO') or '',
'TARGET_ARCHIVE_REPO': variables.get(f'{target_repo}_ARCHIVE_REPO') or '',
'HAS_SOURCE_ARCHIVE_REPO_TOKEN': json.dumps(bool(secrets.get(f'{source_repo}_ARCHIVE_REPO_TOKEN'))),
'HAS_TARGET_ARCHIVE_REPO_TOKEN': json.dumps(bool(secrets.get(f'{target_repo}_ARCHIVE_REPO_TOKEN'))),
'HAS_ARCHIVE_REPO_TOKEN': json.dumps(bool(secrets.get('ARCHIVE_REPO_TOKEN'))),
}
result = setup_variables(env)
if not expected:
print(' {\n' + '\n'.join(f' {k!r}: {v!r},' for k, v in result.items()) + '\n }')
return
exp = expected.copy()
if ignore_revision:
assert len(result['version']) == len(exp['version']), f'revision missing: {github_repository} {note}'
version_is_tag = result['version'] == result['target_tag']
for dct in (result, exp):
dct['version'] = '.'.join(dct['version'].split('.')[:3])
if version_is_tag:
dct['target_tag'] = dct['version']
assert result == exp, f'unexpected result: {github_repository} {note}'
def main():
DEFAULT_VERSION_WITH_REVISION = dt.datetime.now(tz=dt.timezone.utc).strftime('%Y.%m.%d.%H%M%S')
DEFAULT_VERSION = calculate_version()
BASE_REPO_VARS = {
'MASTER_ARCHIVE_REPO': 'yt-dlp/yt-dlp-master-builds',
'NIGHTLY_ARCHIVE_REPO': 'yt-dlp/yt-dlp-nightly-builds',
'NIGHTLY_PYPI_PROJECT': 'yt-dlp',
'NIGHTLY_PYPI_SUFFIX': 'dev',
'PUSH_VERSION_COMMIT': '1',
'PYPI_PROJECT': 'yt-dlp',
}
BASE_REPO_SECRETS = {
'ARCHIVE_REPO_TOKEN': '1',
}
FORK_REPOSITORY = 'fork/yt-dlp'
FORK_ORG = FORK_REPOSITORY.partition('/')[0]
_test(
STABLE_REPOSITORY, 'official vars/secrets, stable',
BASE_REPO_VARS, BASE_REPO_SECRETS, {}, {
'channel': 'stable',
'version': DEFAULT_VERSION,
'target_repo': STABLE_REPOSITORY,
'target_repo_token': None,
'target_tag': DEFAULT_VERSION,
'pypi_project': 'yt-dlp',
'pypi_suffix': None,
})
_test(
STABLE_REPOSITORY, 'official vars/secrets, nightly (w/o target)',
BASE_REPO_VARS, BASE_REPO_SECRETS, {
'source': 'nightly',
'prerelease': True,
}, {
'channel': 'nightly',
'version': DEFAULT_VERSION_WITH_REVISION,
'target_repo': 'yt-dlp/yt-dlp-nightly-builds',
'target_repo_token': 'ARCHIVE_REPO_TOKEN',
'target_tag': DEFAULT_VERSION_WITH_REVISION,
'pypi_project': 'yt-dlp',
'pypi_suffix': 'dev',
}, ignore_revision=True)
_test(
STABLE_REPOSITORY, 'official vars/secrets, nightly',
BASE_REPO_VARS, BASE_REPO_SECRETS, {
'source': 'nightly',
'target': 'nightly',
'prerelease': True,
}, {
'channel': 'nightly',
'version': DEFAULT_VERSION_WITH_REVISION,
'target_repo': 'yt-dlp/yt-dlp-nightly-builds',
'target_repo_token': 'ARCHIVE_REPO_TOKEN',
'target_tag': DEFAULT_VERSION_WITH_REVISION,
'pypi_project': 'yt-dlp',
'pypi_suffix': 'dev',
}, ignore_revision=True)
_test(
STABLE_REPOSITORY, 'official vars/secrets, master (w/o target)',
BASE_REPO_VARS, BASE_REPO_SECRETS, {
'source': 'master',
'prerelease': True,
}, {
'channel': 'master',
'version': DEFAULT_VERSION_WITH_REVISION,
'target_repo': 'yt-dlp/yt-dlp-master-builds',
'target_repo_token': 'ARCHIVE_REPO_TOKEN',
'target_tag': DEFAULT_VERSION_WITH_REVISION,
'pypi_project': None,
'pypi_suffix': None,
}, ignore_revision=True)
_test(
STABLE_REPOSITORY, 'official vars/secrets, master',
BASE_REPO_VARS, BASE_REPO_SECRETS, {
'source': 'master',
'target': 'master',
'prerelease': True,
}, {
'channel': 'master',
'version': DEFAULT_VERSION_WITH_REVISION,
'target_repo': 'yt-dlp/yt-dlp-master-builds',
'target_repo_token': 'ARCHIVE_REPO_TOKEN',
'target_tag': DEFAULT_VERSION_WITH_REVISION,
'pypi_project': None,
'pypi_suffix': None,
}, ignore_revision=True)
_test(
STABLE_REPOSITORY, 'official vars/secrets, special tag, updates to stable',
BASE_REPO_VARS, BASE_REPO_SECRETS, {
'target': f'{STABLE_REPOSITORY}@experimental',
'prerelease': True,
}, {
'channel': 'stable',
'version': DEFAULT_VERSION_WITH_REVISION,
'target_repo': STABLE_REPOSITORY,
'target_repo_token': None,
'target_tag': 'experimental',
'pypi_project': None,
'pypi_suffix': None,
}, ignore_revision=True)
_test(
STABLE_REPOSITORY, 'official vars/secrets, special tag, "stable" as target repo',
BASE_REPO_VARS, BASE_REPO_SECRETS, {
'target': 'stable@experimental',
'prerelease': True,
}, {
'channel': 'stable',
'version': DEFAULT_VERSION_WITH_REVISION,
'target_repo': STABLE_REPOSITORY,
'target_repo_token': None,
'target_tag': 'experimental',
'pypi_project': None,
'pypi_suffix': None,
}, ignore_revision=True)
_test(
FORK_REPOSITORY, 'fork w/o vars/secrets, stable',
{}, {}, {}, {
'channel': FORK_REPOSITORY,
'version': DEFAULT_VERSION_WITH_REVISION,
'target_repo': FORK_REPOSITORY,
'target_repo_token': None,
'target_tag': DEFAULT_VERSION_WITH_REVISION,
'pypi_project': None,
'pypi_suffix': None,
}, ignore_revision=True)
_test(
FORK_REPOSITORY, 'fork w/o vars/secrets, prerelease',
{}, {}, {'prerelease': True}, {
'channel': FORK_REPOSITORY,
'version': DEFAULT_VERSION_WITH_REVISION,
'target_repo': FORK_REPOSITORY,
'target_repo_token': None,
'target_tag': DEFAULT_VERSION_WITH_REVISION,
'pypi_project': None,
'pypi_suffix': None,
}, ignore_revision=True)
_test(
FORK_REPOSITORY, 'fork w/o vars/secrets, nightly',
{}, {}, {
'prerelease': True,
'source': 'nightly',
'target': 'nightly',
}, {
'channel': f'{FORK_REPOSITORY}@nightly',
'version': DEFAULT_VERSION_WITH_REVISION,
'target_repo': FORK_REPOSITORY,
'target_repo_token': None,
'target_tag': 'nightly',
'pypi_project': None,
'pypi_suffix': None,
}, ignore_revision=True)
_test(
FORK_REPOSITORY, 'fork w/o vars/secrets, master',
{}, {}, {
'prerelease': True,
'source': 'master',
'target': 'master',
}, {
'channel': f'{FORK_REPOSITORY}@master',
'version': DEFAULT_VERSION_WITH_REVISION,
'target_repo': FORK_REPOSITORY,
'target_repo_token': None,
'target_tag': 'master',
'pypi_project': None,
'pypi_suffix': None,
}, ignore_revision=True)
_test(
FORK_REPOSITORY, 'fork w/o vars/secrets, revision',
{}, {}, {'version': '123'}, {
'channel': FORK_REPOSITORY,
'version': f'{DEFAULT_VERSION[:10]}.123',
'target_repo': FORK_REPOSITORY,
'target_repo_token': None,
'target_tag': f'{DEFAULT_VERSION[:10]}.123',
'pypi_project': None,
'pypi_suffix': None,
})
_test(
FORK_REPOSITORY, 'fork w/ PUSH_VERSION_COMMIT, stable',
{'PUSH_VERSION_COMMIT': '1'}, {}, {}, {
'channel': FORK_REPOSITORY,
'version': DEFAULT_VERSION,
'target_repo': FORK_REPOSITORY,
'target_repo_token': None,
'target_tag': DEFAULT_VERSION,
'pypi_project': None,
'pypi_suffix': None,
})
_test(
FORK_REPOSITORY, 'fork w/ PUSH_VERSION_COMMIT, prerelease',
{'PUSH_VERSION_COMMIT': '1'}, {}, {'prerelease': True}, {
'channel': FORK_REPOSITORY,
'version': DEFAULT_VERSION_WITH_REVISION,
'target_repo': FORK_REPOSITORY,
'target_repo_token': None,
'target_tag': DEFAULT_VERSION_WITH_REVISION,
'pypi_project': None,
'pypi_suffix': None,
}, ignore_revision=True)
_test(
FORK_REPOSITORY, 'fork w/NIGHTLY_ARCHIVE_REPO_TOKEN, nightly', {
'NIGHTLY_ARCHIVE_REPO': f'{FORK_ORG}/yt-dlp-nightly-builds',
'PYPI_PROJECT': 'yt-dlp-test',
}, {
'NIGHTLY_ARCHIVE_REPO_TOKEN': '1',
}, {
'source': f'{FORK_ORG}/yt-dlp-nightly-builds',
'target': 'nightly',
'prerelease': True,
}, {
'channel': f'{FORK_ORG}/yt-dlp-nightly-builds',
'version': DEFAULT_VERSION_WITH_REVISION,
'target_repo': f'{FORK_ORG}/yt-dlp-nightly-builds',
'target_repo_token': 'NIGHTLY_ARCHIVE_REPO_TOKEN',
'target_tag': DEFAULT_VERSION_WITH_REVISION,
'pypi_project': None,
'pypi_suffix': None,
}, ignore_revision=True)
_test(
FORK_REPOSITORY, 'fork w/MASTER_ARCHIVE_REPO_TOKEN, master', {
'MASTER_ARCHIVE_REPO': f'{FORK_ORG}/yt-dlp-master-builds',
'MASTER_PYPI_PROJECT': 'yt-dlp-test',
'MASTER_PYPI_SUFFIX': 'dev',
}, {
'MASTER_ARCHIVE_REPO_TOKEN': '1',
}, {
'source': f'{FORK_ORG}/yt-dlp-master-builds',
'target': 'master',
'prerelease': True,
}, {
'channel': f'{FORK_ORG}/yt-dlp-master-builds',
'version': DEFAULT_VERSION_WITH_REVISION,
'target_repo': f'{FORK_ORG}/yt-dlp-master-builds',
'target_repo_token': 'MASTER_ARCHIVE_REPO_TOKEN',
'target_tag': DEFAULT_VERSION_WITH_REVISION,
'pypi_project': 'yt-dlp-test',
'pypi_suffix': 'dev',
}, ignore_revision=True)
_test(
FORK_REPOSITORY, 'fork, non-numeric tag',
{}, {}, {'source': 'experimental'}, {
'channel': f'{FORK_REPOSITORY}@experimental',
'version': DEFAULT_VERSION_WITH_REVISION,
'target_repo': FORK_REPOSITORY,
'target_repo_token': None,
'target_tag': 'experimental',
'pypi_project': None,
'pypi_suffix': None,
}, ignore_revision=True)
_test(
FORK_REPOSITORY, 'fork, non-numeric tag, updates to stable',
{}, {}, {
'prerelease': True,
'source': 'stable',
'target': 'experimental',
}, {
'channel': 'stable',
'version': DEFAULT_VERSION_WITH_REVISION,
'target_repo': FORK_REPOSITORY,
'target_repo_token': None,
'target_tag': 'experimental',
'pypi_project': None,
'pypi_suffix': None,
}, ignore_revision=True)
print('all tests passed')
if __name__ == '__main__':
main()

View File

@@ -9,24 +9,9 @@ sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
import argparse import argparse
import contextlib import contextlib
import datetime as dt
import sys import sys
from devscripts.utils import read_version, run_process, write_file from devscripts.utils import calculate_version, run_process, write_file
def get_new_version(version, revision):
if not version:
version = dt.datetime.now(dt.timezone.utc).strftime('%Y.%m.%d')
if revision:
assert revision.isdecimal(), 'Revision must be a number'
else:
old_version = read_version().split('.')
if version.split('.') == old_version[:3]:
revision = str(int(([*old_version, 0])[3]) + 1)
return f'{version}.{revision}' if revision else version
def get_git_head(): def get_git_head():
@@ -72,9 +57,7 @@ if __name__ == '__main__':
args = parser.parse_args() args = parser.parse_args()
git_head = get_git_head() git_head = get_git_head()
version = ( version = calculate_version(args.version)
args.version if args.version and '.' in args.version
else get_new_version(None, args.version))
write_file(args.output, VERSION_TEMPLATE.format( write_file(args.output, VERSION_TEMPLATE.format(
version=version, git_head=git_head, channel=args.channel, origin=args.origin, version=version, git_head=git_head, channel=args.channel, origin=args.origin,
package_version=f'{version}{args.suffix}')) package_version=f'{version}{args.suffix}'))

View File

@@ -20,7 +20,9 @@ if __name__ == '__main__':
'--changelog-path', type=Path, default=Path(__file__).parent.parent / 'Changelog.md', '--changelog-path', type=Path, default=Path(__file__).parent.parent / 'Changelog.md',
help='path to the Changelog file') help='path to the Changelog file')
args = parser.parse_args() args = parser.parse_args()
new_entry = create_changelog(args)
header, sep, changelog = read_file(args.changelog_path).partition('\n### ') header, sep, changelog = read_file(args.changelog_path).partition('\n### ')
write_file(args.changelog_path, f'{header}{sep}{read_version()}\n{new_entry}\n{sep}{changelog}') current_version = read_version()
if current_version != changelog.splitlines()[0]:
new_entry = create_changelog(args)
write_file(args.changelog_path, f'{header}{sep}{current_version}\n{new_entry}\n{sep}{changelog}')

View File

@@ -1,5 +1,7 @@
import argparse import argparse
import datetime as dt
import functools import functools
import re
import subprocess import subprocess
@@ -20,6 +22,23 @@ def read_version(fname='yt_dlp/version.py', varname='__version__'):
return items[varname] return items[varname]
def calculate_version(version=None, fname='yt_dlp/version.py'):
if version and '.' in version:
return version
revision = version
version = dt.datetime.now(dt.timezone.utc).strftime('%Y.%m.%d')
if revision:
assert re.fullmatch(r'[0-9]+', revision), 'Revision must be numeric'
else:
old_version = read_version(fname=fname).split('.')
if version.split('.') == old_version[:3]:
revision = str(int(([*old_version, 0])[3]) + 1)
return f'{version}.{revision}' if revision else version
def get_filename_args(has_infile=False, default_outfile=None): def get_filename_args(has_infile=False, default_outfile=None):
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
if has_infile: if has_infile:

View File

@@ -1601,6 +1601,8 @@ The only reliable way to check if a site is supported is to try it.
- **Vbox7** - **Vbox7**
- **Veo** - **Veo**
- **Vesti**: Вести.Ru (**Currently broken**) - **Vesti**: Вести.Ru (**Currently broken**)
- **Vevo**
- **VevoPlaylist**
- **VGTV**: VGTV, BTTV, FTV, Aftenposten and Aftonbladet - **VGTV**: VGTV, BTTV, FTV, Aftenposten and Aftonbladet
- **vh1.com** - **vh1.com**
- **vhx:embed**: [*vimeo*](## "netrc machine") - **vhx:embed**: [*vimeo*](## "netrc machine")

View File

@@ -9,7 +9,7 @@ sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from test.helper import FakeYDL, report_warning from test.helper import FakeYDL, report_warning
from yt_dlp.update import UpdateInfo, Updater from yt_dlp.update import UpdateInfo, Updater, UPDATE_SOURCES, _make_label
# XXX: Keep in sync with yt_dlp.update.UPDATE_SOURCES # XXX: Keep in sync with yt_dlp.update.UPDATE_SOURCES
@@ -280,6 +280,26 @@ class TestUpdate(unittest.TestCase):
test('testing', None, current_commit='9' * 40) test('testing', None, current_commit='9' * 40)
test('testing', UpdateInfo('testing', commit='9' * 40)) test('testing', UpdateInfo('testing', commit='9' * 40))
def test_make_label(self):
STABLE_REPO = UPDATE_SOURCES['stable']
NIGHTLY_REPO = UPDATE_SOURCES['nightly']
MASTER_REPO = UPDATE_SOURCES['master']
for inputs, expected in [
([STABLE_REPO, '2025.09.02', '2025.09.02'], f'stable@2025.09.02 from {STABLE_REPO}'),
([NIGHTLY_REPO, '2025.09.02.123456', '2025.09.02.123456'], f'nightly@2025.09.02.123456 from {NIGHTLY_REPO}'),
([MASTER_REPO, '2025.09.02.987654', '2025.09.02.987654'], f'master@2025.09.02.987654 from {MASTER_REPO}'),
(['fork/yt-dlp', 'experimental', '2025.12.31.000000'], 'fork/yt-dlp@experimental build 2025.12.31.000000'),
(['fork/yt-dlp', '2025.09.02', '2025.09.02'], 'fork/yt-dlp@2025.09.02'),
([STABLE_REPO, 'experimental', '2025.12.31.000000'], f'{STABLE_REPO}@experimental build 2025.12.31.000000'),
([STABLE_REPO, 'experimental'], f'{STABLE_REPO}@experimental'),
(['fork/yt-dlp', 'experimental'], 'fork/yt-dlp@experimental'),
]:
result = _make_label(*inputs)
self.assertEqual(
result, expected,
f'{inputs!r} returned {result!r} instead of {expected!r}')
if __name__ == '__main__': if __name__ == '__main__':
unittest.main() unittest.main()

View File

@@ -979,6 +979,7 @@ def parse_options(argv=None):
'geo_bypass': opts.geo_bypass, 'geo_bypass': opts.geo_bypass,
'geo_bypass_country': opts.geo_bypass_country, 'geo_bypass_country': opts.geo_bypass_country,
'geo_bypass_ip_block': opts.geo_bypass_ip_block, 'geo_bypass_ip_block': opts.geo_bypass_ip_block,
'useid': opts.useid or None,
'warn_when_outdated': opts.update_self is None, 'warn_when_outdated': opts.update_self is None,
'_warnings': warnings, '_warnings': warnings,
'_deprecation_warnings': deprecation_warnings, '_deprecation_warnings': deprecation_warnings,

View File

@@ -2288,6 +2288,10 @@ from .varzesh3 import Varzesh3IE
from .vbox7 import Vbox7IE from .vbox7 import Vbox7IE
from .veo import VeoIE from .veo import VeoIE
from .vesti import VestiIE from .vesti import VestiIE
from .vevo import (
VevoIE,
VevoPlaylistIE,
)
from .vgtv import ( from .vgtv import (
VGTVIE, VGTVIE,
BTArticleIE, BTArticleIE,

View File

@@ -6,7 +6,7 @@ class CharlieRoseIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?charlierose\.com/(?:video|episode)(?:s|/player)/(?P<id>\d+)' _VALID_URL = r'https?://(?:www\.)?charlierose\.com/(?:video|episode)(?:s|/player)/(?P<id>\d+)'
_TESTS = [{ _TESTS = [{
'url': 'https://charlierose.com/videos/27996', 'url': 'https://charlierose.com/videos/27996',
'md5': 'fda41d49e67d4ce7c2411fd2c4702e09', 'md5': '4405b662f557f94aa256fa6a7baf7426',
'info_dict': { 'info_dict': {
'id': '27996', 'id': '27996',
'ext': 'mp4', 'ext': 'mp4',
@@ -39,12 +39,16 @@ class CharlieRoseIE(InfoExtractor):
self._PLAYER_BASE % video_id, webpage, video_id, self._PLAYER_BASE % video_id, webpage, video_id,
m3u8_entry_protocol='m3u8_native')[0] m3u8_entry_protocol='m3u8_native')[0]
self._remove_duplicate_formats(info_dict['formats']) self._remove_duplicate_formats(info_dict['formats'])
for fmt in info_dict['formats']:
if fmt.get('protocol') == 'm3u8_native':
fmt['__needs_testing'] = True
info_dict.update({ info_dict.update({
'id': video_id, 'id': video_id,
'title': title, 'title': title,
'thumbnail': self._og_search_thumbnail(webpage), 'thumbnail': self._og_search_thumbnail(webpage),
'description': self._og_search_description(webpage), 'description': self._og_search_description(webpage),
'_format_sort_fields': ('proto',),
}) })
return info_dict return info_dict

View File

@@ -90,6 +90,10 @@ class DisneyIE(InfoExtractor):
webpage, 'embed data'), video_id) webpage, 'embed data'), video_id)
video_data = page_data['video'] video_data = page_data['video']
for external in video_data.get('externals', []):
if external.get('source') == 'vevo':
return self.url_result('vevo:' + external['data_id'], 'Vevo')
video_id = video_data['id'] video_id = video_data['id']
title = video_data['title'] title = video_data['title']

View File

@@ -12,6 +12,7 @@ from ..utils import (
get_element_html_by_id, get_element_html_by_id,
int_or_none, int_or_none,
lowercase_escape, lowercase_escape,
parse_qs,
try_get, try_get,
update_url_query, update_url_query,
) )
@@ -111,14 +112,18 @@ class GoogleDriveIE(InfoExtractor):
self._caption_formats_ext.append(f.attrib['fmt_code']) self._caption_formats_ext.append(f.attrib['fmt_code'])
def _get_captions_by_type(self, video_id, subtitles_id, caption_type, def _get_captions_by_type(self, video_id, subtitles_id, caption_type,
origin_lang_code=None): origin_lang_code=None, origin_lang_name=None):
if not subtitles_id or not caption_type: if not subtitles_id or not caption_type:
return return
captions = {} captions = {}
for caption_entry in self._captions_xml.findall( for caption_entry in self._captions_xml.findall(
self._CAPTIONS_ENTRY_TAG[caption_type]): self._CAPTIONS_ENTRY_TAG[caption_type]):
caption_lang_code = caption_entry.attrib.get('lang_code') caption_lang_code = caption_entry.attrib.get('lang_code')
if not caption_lang_code: caption_name = caption_entry.attrib.get('name') or origin_lang_name
if not caption_lang_code or not caption_name:
self.report_warning(f'Missing necessary caption metadata. '
f'Need lang_code and name attributes. '
f'Found: {caption_entry.attrib}')
continue continue
caption_format_data = [] caption_format_data = []
for caption_format in self._caption_formats_ext: for caption_format in self._caption_formats_ext:
@@ -129,7 +134,7 @@ class GoogleDriveIE(InfoExtractor):
'lang': (caption_lang_code if origin_lang_code is None 'lang': (caption_lang_code if origin_lang_code is None
else origin_lang_code), else origin_lang_code),
'type': 'track', 'type': 'track',
'name': '', 'name': caption_name,
'kind': '', 'kind': '',
} }
if origin_lang_code is not None: if origin_lang_code is not None:
@@ -155,14 +160,15 @@ class GoogleDriveIE(InfoExtractor):
self._download_subtitles_xml(video_id, subtitles_id, hl) self._download_subtitles_xml(video_id, subtitles_id, hl)
if not self._captions_xml: if not self._captions_xml:
return return
track = self._captions_xml.find('track') track = next((t for t in self._captions_xml.findall('track') if t.attrib.get('cantran') == 'true'), None)
if track is None: if track is None:
return return
origin_lang_code = track.attrib.get('lang_code') origin_lang_code = track.attrib.get('lang_code')
if not origin_lang_code: origin_lang_name = track.attrib.get('name')
if not origin_lang_code or not origin_lang_name:
return return
return self._get_captions_by_type( return self._get_captions_by_type(
video_id, subtitles_id, 'automatic_captions', origin_lang_code) video_id, subtitles_id, 'automatic_captions', origin_lang_code, origin_lang_name)
def _real_extract(self, url): def _real_extract(self, url):
video_id = self._match_id(url) video_id = self._match_id(url)
@@ -268,10 +274,8 @@ class GoogleDriveIE(InfoExtractor):
subtitles_id = None subtitles_id = None
ttsurl = get_value('ttsurl') ttsurl = get_value('ttsurl')
if ttsurl: if ttsurl:
# the video Id for subtitles will be the last value in the ttsurl # the subtitles ID is the vid param of the ttsurl query
# query string subtitles_id = parse_qs(ttsurl).get('vid', [None])[-1]
subtitles_id = ttsurl.encode().decode(
'unicode_escape').split('=')[-1]
self.cookiejar.clear(domain='.google.com', path='/', name='NID') self.cookiejar.clear(domain='.google.com', path='/', name='NID')

View File

@@ -18,6 +18,7 @@ from ..utils import (
url_or_none, url_or_none,
urljoin, urljoin,
) )
from ..utils.traversal import traverse_obj
class ITVIE(InfoExtractor): class ITVIE(InfoExtractor):
@@ -223,6 +224,7 @@ class ITVBTCCIE(InfoExtractor):
}, },
'playlist_count': 12, 'playlist_count': 12,
}, { }, {
# news page, can have absent `data` field
'url': 'https://www.itv.com/news/2021-10-27/i-have-to-protect-the-country-says-rishi-sunak-as-uk-faces-interest-rate-hike', 'url': 'https://www.itv.com/news/2021-10-27/i-have-to-protect-the-country-says-rishi-sunak-as-uk-faces-interest-rate-hike',
'info_dict': { 'info_dict': {
'id': 'i-have-to-protect-the-country-says-rishi-sunak-as-uk-faces-interest-rate-hike', 'id': 'i-have-to-protect-the-country-says-rishi-sunak-as-uk-faces-interest-rate-hike',
@@ -243,7 +245,7 @@ class ITVBTCCIE(InfoExtractor):
entries = [] entries = []
for video in json_map: for video in json_map:
if not any(video['data'].get(attr) == 'Brightcove' for attr in ('name', 'type')): if not any(traverse_obj(video, ('data', attr)) == 'Brightcove' for attr in ('name', 'type')):
continue continue
video_id = video['data']['id'] video_id = video['data']['id']
account_id = video['data']['accountId'] account_id = video['data']['accountId']

View File

@@ -95,26 +95,47 @@ class KickVODIE(KickBaseIE):
IE_NAME = 'kick:vod' IE_NAME = 'kick:vod'
_VALID_URL = r'https?://(?:www\.)?kick\.com/[\w-]+/videos/(?P<id>[\da-f]{8}-(?:[\da-f]{4}-){3}[\da-f]{12})' _VALID_URL = r'https?://(?:www\.)?kick\.com/[\w-]+/videos/(?P<id>[\da-f]{8}-(?:[\da-f]{4}-){3}[\da-f]{12})'
_TESTS = [{ _TESTS = [{
'url': 'https://kick.com/xqc/videos/8dd97a8d-e17f-48fb-8bc3-565f88dbc9ea', # Regular VOD
'md5': '3870f94153e40e7121a6e46c068b70cb', 'url': 'https://kick.com/xqc/videos/5c697a87-afce-4256-b01f-3c8fe71ef5cb',
'info_dict': { 'info_dict': {
'id': '8dd97a8d-e17f-48fb-8bc3-565f88dbc9ea', 'id': '5c697a87-afce-4256-b01f-3c8fe71ef5cb',
'ext': 'mp4', 'ext': 'mp4',
'title': '18+ #ad 🛑LIVE🛑CLICK🛑DRAMA🛑NEWS🛑STUFF🛑REACT🛑GET IN HHERE🛑BOP BOP🛑WEEEE WOOOO🛑', 'title': '🐗LIVE🐗CLICK🐗HERE🐗DRAMA🐗ALL DAY🐗NEWS🐗VIDEOS🐗CLIPS🐗GAMES🐗STUFF🐗WOW🐗IM HERE🐗LETS GO🐗COOL🐗VERY NICE🐗',
'description': 'THE BEST AT ABSOLUTELY EVERYTHING. THE JUICER. LEADER OF THE JUICERS.', 'description': 'THE BEST AT ABSOLUTELY EVERYTHING. THE JUICER. LEADER OF THE JUICERS.',
'channel': 'xqc',
'channel_id': '668',
'uploader': 'xQc', 'uploader': 'xQc',
'uploader_id': '676', 'uploader_id': '676',
'upload_date': '20240909', 'channel': 'xqc',
'timestamp': 1725919141, 'channel_id': '668',
'duration': 10155.0,
'thumbnail': r're:^https?://.*\.jpg',
'view_count': int, 'view_count': int,
'categories': ['Just Chatting'], 'age_limit': 18,
'age_limit': 0, 'duration': 22278.0,
'thumbnail': r're:^https?://.*\.jpg',
'categories': ['Deadlock'],
'timestamp': 1756082443,
'upload_date': '20250825',
}, },
'params': {'skip_download': 'm3u8'}, 'params': {'skip_download': 'm3u8'},
}, {
# VOD of ongoing livestream (at the time of writing the test, ID rotates every two days)
'url': 'https://kick.com/a-log-burner/videos/5230df84-ea38-46e1-be4f-f5949ae55641',
'info_dict': {
'id': '5230df84-ea38-46e1-be4f-f5949ae55641',
'ext': 'mp4',
'title': r're:😴 Cozy Fireplace ASMR 🔥 | Relax, Focus, Sleep 💤',
'description': 'md5:080bc713eac0321a7b376a1b53816d1b',
'uploader': 'A_Log_Burner',
'uploader_id': '65114691',
'channel': 'a-log-burner',
'channel_id': '63967687',
'view_count': int,
'age_limit': 18,
'thumbnail': r're:^https?://.*\.jpg',
'categories': ['Other, Watch Party'],
'timestamp': int,
'upload_date': str,
'live_status': 'is_live',
},
'skip': 'live',
}] }]
def _real_extract(self, url): def _real_extract(self, url):
@@ -137,6 +158,7 @@ class KickVODIE(KickBaseIE):
'categories': ('livestream', 'categories', ..., 'name', {str}), 'categories': ('livestream', 'categories', ..., 'name', {str}),
'view_count': ('views', {int_or_none}), 'view_count': ('views', {int_or_none}),
'age_limit': ('livestream', 'is_mature', {bool}, {lambda x: 18 if x else 0}), 'age_limit': ('livestream', 'is_mature', {bool}, {lambda x: 18 if x else 0}),
'is_live': ('livestream', 'is_live', {bool}),
}), }),
} }

View File

@@ -1,22 +1,14 @@
from .common import InfoExtractor from .common import InfoExtractor
from ..utils import ( from ..utils import (
clean_html, clean_html,
merge_dicts,
traverse_obj,
unified_timestamp, unified_timestamp,
url_or_none, url_or_none,
urljoin, urljoin,
) )
from ..utils.traversal import traverse_obj
class LRTBaseIE(InfoExtractor): class LRTStreamIE(InfoExtractor):
def _extract_js_var(self, webpage, var_name, default=None):
return self._search_regex(
fr'{var_name}\s*=\s*(["\'])((?:(?!\1).)+)\1',
webpage, var_name.replace('_', ' '), default, group=2)
class LRTStreamIE(LRTBaseIE):
_VALID_URL = r'https?://(?:www\.)?lrt\.lt/mediateka/tiesiogiai/(?P<id>[\w-]+)' _VALID_URL = r'https?://(?:www\.)?lrt\.lt/mediateka/tiesiogiai/(?P<id>[\w-]+)'
_TESTS = [{ _TESTS = [{
'url': 'https://www.lrt.lt/mediateka/tiesiogiai/lrt-opus', 'url': 'https://www.lrt.lt/mediateka/tiesiogiai/lrt-opus',
@@ -31,86 +23,110 @@ class LRTStreamIE(LRTBaseIE):
def _real_extract(self, url): def _real_extract(self, url):
video_id = self._match_id(url) video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id) webpage = self._download_webpage(url, video_id)
streams_data = self._download_json(self._extract_js_var(webpage, 'tokenURL'), video_id)
# TODO: Use _search_nextjs_v13_data once fixed
get_stream_url = self._search_regex(
r'\\"get_streams_url\\":\\"([^"]+)\\"', webpage, 'stream URL')
streams_data = self._download_json(get_stream_url, video_id)
formats, subtitles = [], {} formats, subtitles = [], {}
for stream_url in traverse_obj(streams_data, ( for stream_url in traverse_obj(streams_data, (
'response', 'data', lambda k, _: k.startswith('content')), expected_type=url_or_none): 'response', 'data', lambda k, _: k.startswith('content'), {url_or_none})):
fmts, subs = self._extract_m3u8_formats_and_subtitles(stream_url, video_id, 'mp4', m3u8_id='hls', live=True) fmts, subs = self._extract_m3u8_formats_and_subtitles(
stream_url, video_id, 'mp4', m3u8_id='hls', live=True)
formats.extend(fmts) formats.extend(fmts)
subtitles = self._merge_subtitles(subtitles, subs) subtitles = self._merge_subtitles(subtitles, subs)
stream_title = self._extract_js_var(webpage, 'video_title', 'LRT')
return { return {
'id': video_id, 'id': video_id,
'formats': formats, 'formats': formats,
'subtitles': subtitles, 'subtitles': subtitles,
'is_live': True, 'is_live': True,
'title': f'{self._og_search_title(webpage)} - {stream_title}', 'title': self._og_search_title(webpage),
} }
class LRTVODIE(LRTBaseIE): class LRTVODIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?lrt\.lt(?P<path>/mediateka/irasas/(?P<id>[0-9]+))' _VALID_URL = [
r'https?://(?:(?:www|archyvai)\.)?lrt\.lt/mediateka/irasas/(?P<id>[0-9]+)',
r'https?://(?:(?:www|archyvai)\.)?lrt\.lt/mediateka/video/[^?#]+\?(?:[^#]*&)?episode=(?P<id>[0-9]+)',
]
_TESTS = [{ _TESTS = [{
# m3u8 download # m3u8 download
'url': 'https://www.lrt.lt/mediateka/irasas/2000127261/greita-ir-gardu-sicilijos-ikvepta-klasikiniu-makaronu-su-baklazanais-vakariene', 'url': 'https://www.lrt.lt/mediateka/irasas/2000127261/greita-ir-gardu-sicilijos-ikvepta-klasikiniu-makaronu-su-baklazanais-vakariene',
'info_dict': { 'info_dict': {
'id': '2000127261', 'id': '2000127261',
'ext': 'mp4', 'ext': 'mp4',
'title': 'Greita ir gardu: Sicilijos įkvėpta klasikinių makaronų su baklažanais vakarienė', 'title': 'Nustebinkite svečius klasikiniu makaronų su baklažanais receptu',
'description': 'md5:ad7d985f51b0dc1489ba2d76d7ed47fa', 'description': 'md5:ad7d985f51b0dc1489ba2d76d7ed47fa',
'duration': 3035, 'timestamp': 1604086200,
'timestamp': 1604079000,
'upload_date': '20201030', 'upload_date': '20201030',
'tags': ['LRT TELEVIZIJA', 'Beatos virtuvė', 'Beata Nicholson', 'Makaronai', 'Baklažanai', 'Vakarienė', 'Receptas'], 'tags': ['LRT TELEVIZIJA', 'Beatos virtuvė', 'Beata Nicholson', 'Makaronai', 'Baklažanai', 'Vakarienė', 'Receptas'],
'thumbnail': 'https://www.lrt.lt/img/2020/10/30/764041-126478-1287x836.jpg', 'thumbnail': 'https://www.lrt.lt/img/2020/10/30/764041-126478-1287x836.jpg',
'channel': 'Beatos virtuvė',
}, },
}, { }, {
# direct mp3 download # audio download
'url': 'http://www.lrt.lt/mediateka/irasas/1013074524/', 'url': 'https://www.lrt.lt/mediateka/irasas/1013074524/kita-tema',
'md5': '389da8ca3cad0f51d12bed0c844f6a0a', 'md5': 'fc982f10274929c66fdff65f75615cb0',
'info_dict': { 'info_dict': {
'id': '1013074524', 'id': '1013074524',
'ext': 'mp3', 'ext': 'mp4',
'title': 'Kita tema 2016-09-05 15:05', 'title': 'Kita tema',
'description': 'md5:1b295a8fc7219ed0d543fc228c931fb5', 'description': 'md5:1b295a8fc7219ed0d543fc228c931fb5',
'duration': 3008, 'channel': 'Kita tema',
'view_count': int, 'timestamp': 1473087900,
'like_count': int, 'upload_date': '20160905',
}, },
}, {
'url': 'https://www.lrt.lt/mediateka/video/auksinis-protas-vasara?episode=2000420320&season=%2Fmediateka%2Fvideo%2Fauksinis-protas-vasara%2F2025',
'info_dict': {
'id': '2000420320',
'ext': 'mp4',
'title': 'Kuris senovės romėnų poetas aprašė Narcizo mitą?',
'description': 'Intelektinė viktorina. Ved. Arūnas Valinskas ir Andrius Tapinas.',
'channel': 'Auksinis protas. Vasara',
'thumbnail': 'https://www.lrt.lt/img/2025/06/09/2094343-987905-1287x836.jpg',
'tags': ['LRT TELEVIZIJA', 'Auksinis protas'],
'timestamp': 1749851040,
'upload_date': '20250613',
},
}, {
'url': 'https://archyvai.lrt.lt/mediateka/video/ziniu-riteriai-ir-damos?episode=49685&season=%2Fmediateka%2Fvideo%2Fziniu-riteriai-ir-damos%2F2013',
'only_matching': True,
}, {
'url': 'https://archyvai.lrt.lt/mediateka/irasas/2000077058/panorama-1989-baltijos-kelias',
'only_matching': True,
}] }]
def _real_extract(self, url): def _real_extract(self, url):
path, video_id = self._match_valid_url(url).group('path', 'id') video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id) webpage = self._download_webpage(url, video_id)
media_url = self._extract_js_var(webpage, 'main_url', path) # TODO: Use _search_nextjs_v13_data once fixed
media = self._download_json(self._extract_js_var( canonical_url = (
webpage, 'media_info_url', self._search_regex(r'\\"(?:article|data)\\":{[^}]*\\"url\\":\\"(/[^"]+)\\"', webpage, 'content URL', fatal=False)
'https://www.lrt.lt/servisai/stream_url/vod/media_info/'), or self._search_regex(r'<link\s+rel="canonical"\s*href="(/[^"]+)"', webpage, 'canonical URL'))
video_id, query={'url': media_url})
media = self._download_json(
'https://www.lrt.lt/servisai/stream_url/vod/media_info/',
video_id, query={'url': canonical_url})
jw_data = self._parse_jwplayer_data( jw_data = self._parse_jwplayer_data(
media['playlist_item'], video_id, base_url=url) media['playlist_item'], video_id, base_url=url)
json_ld_data = self._search_json_ld(webpage, video_id) return {
**jw_data,
tags = [] **traverse_obj(media, {
for tag in (media.get('tags') or []): 'id': ('id', {str}),
tag_name = tag.get('name') 'title': ('title', {str}),
if not tag_name: 'description': ('content', {clean_html}),
continue 'timestamp': ('date', {lambda x: x.replace('.', '/')}, {unified_timestamp}),
tags.append(tag_name) 'tags': ('tags', ..., 'name', {str}),
}),
clean_info = {
'description': clean_html(media.get('content')),
'tags': tags,
} }
return merge_dicts(clean_info, jw_data, json_ld_data)
class LRTRadioIE(InfoExtractor):
class LRTRadioIE(LRTBaseIE):
_VALID_URL = r'https?://(?:www\.)?lrt\.lt/radioteka/irasas/(?P<id>\d+)/(?P<path>[^?#/]+)' _VALID_URL = r'https?://(?:www\.)?lrt\.lt/radioteka/irasas/(?P<id>\d+)/(?P<path>[^?#/]+)'
_TESTS = [{ _TESTS = [{
# m3u8 download # m3u8 download

View File

@@ -111,8 +111,12 @@ class MySpaceIE(InfoExtractor):
search_data('stream-url'), search_data('hls-stream-url'), search_data('stream-url'), search_data('hls-stream-url'),
search_data('http-stream-url')) search_data('http-stream-url'))
if not formats: if not formats:
vevo_id = search_data('vevo-id')
youtube_id = search_data('youtube-id') youtube_id = search_data('youtube-id')
if youtube_id: if vevo_id:
self.to_screen(f'Vevo video detected: {vevo_id}')
return self.url_result(f'vevo:{vevo_id}', ie='Vevo')
elif youtube_id:
self.to_screen(f'Youtube video detected: {youtube_id}') self.to_screen(f'Youtube video detected: {youtube_id}')
return self.url_result(youtube_id, ie='Youtube') return self.url_result(youtube_id, ie='Youtube')
else: else:

View File

@@ -45,6 +45,8 @@ class TVerIE(StreaksBaseIE):
'release_timestamp': 1651453200, 'release_timestamp': 1651453200,
'release_date': '20220502', 'release_date': '20220502',
'_old_archive_ids': ['brightcovenew ref:baeebeac-a2a6-4dbf-9eb3-c40d59b40068'], '_old_archive_ids': ['brightcovenew ref:baeebeac-a2a6-4dbf-9eb3-c40d59b40068'],
'series_id': 'sru35hwdd2',
'season_id': 'ss2lcn4af6',
}, },
}, { }, {
# via Brightcove backend (deprecated) # via Brightcove backend (deprecated)
@@ -67,6 +69,8 @@ class TVerIE(StreaksBaseIE):
'upload_date': '20220501', 'upload_date': '20220501',
'release_timestamp': 1651453200, 'release_timestamp': 1651453200,
'release_date': '20220502', 'release_date': '20220502',
'series_id': 'sru35hwdd2',
'season_id': 'ss2lcn4af6',
}, },
'params': {'extractor_args': {'tver': {'backend': ['brightcove']}}}, 'params': {'extractor_args': {'tver': {'backend': ['brightcove']}}},
}, { }, {
@@ -202,6 +206,8 @@ class TVerIE(StreaksBaseIE):
'description': ('description', {str}), 'description': ('description', {str}),
'release_timestamp': ('viewStatus', 'startAt', {int_or_none}), 'release_timestamp': ('viewStatus', 'startAt', {int_or_none}),
'episode_number': ('no', {int_or_none}), 'episode_number': ('no', {int_or_none}),
'series_id': ('seriesID', {str}),
'season_id': ('seasonID', {str}),
}), }),
} }

352
yt_dlp/extractor/vevo.py Normal file
View File

@@ -0,0 +1,352 @@
import json
import re
from .common import InfoExtractor
from ..networking.exceptions import HTTPError
from ..utils import (
ExtractorError,
int_or_none,
parse_iso8601,
parse_qs,
)
class VevoBaseIE(InfoExtractor):
def _extract_json(self, webpage, video_id):
return self._parse_json(
self._search_regex(
r'window\.__INITIAL_STORE__\s*=\s*({.+?});\s*</script>',
webpage, 'initial store'),
video_id)
class VevoIE(VevoBaseIE):
"""
Accepts urls from vevo.com or in the format 'vevo:{id}'
(currently used by MTVIE and MySpaceIE)
"""
_VALID_URL = r'''(?x)
(?:https?://(?:www\.)?vevo\.com/watch/(?!playlist|genre)(?:[^/]+/(?:[^/]+/)?)?|
https?://cache\.vevo\.com/m/html/embed\.html\?video=|
https?://videoplayer\.vevo\.com/embed/embedded\?videoId=|
https?://embed\.vevo\.com/.*?[?&]isrc=|
https?://tv\.vevo\.com/watch/artist/(?:[^/]+)/|
vevo:)
(?P<id>[^&?#]+)'''
_EMBED_REGEX = [r'<iframe[^>]+?src=(["\'])(?P<url>(?:https?:)?//(?:cache\.)?vevo\.com/.+?)\1']
_TESTS = [{
'url': 'http://www.vevo.com/watch/hurts/somebody-to-die-for/GB1101300280',
'md5': '95ee28ee45e70130e3ab02b0f579ae23',
'info_dict': {
'id': 'GB1101300280',
'ext': 'mp4',
'title': 'Hurts - Somebody to Die For',
'timestamp': 1372057200,
'upload_date': '20130624',
'uploader': 'Hurts',
'track': 'Somebody to Die For',
'artist': 'Hurts',
'genre': 'Pop',
},
'expected_warnings': ['Unable to download SMIL file', 'Unable to download info'],
}, {
'note': 'v3 SMIL format',
'url': 'http://www.vevo.com/watch/cassadee-pope/i-wish-i-could-break-your-heart/USUV71302923',
'md5': 'f6ab09b034f8c22969020b042e5ac7fc',
'info_dict': {
'id': 'USUV71302923',
'ext': 'mp4',
'title': 'Cassadee Pope - I Wish I Could Break Your Heart',
'timestamp': 1392796919,
'upload_date': '20140219',
'uploader': 'Cassadee Pope',
'track': 'I Wish I Could Break Your Heart',
'artist': 'Cassadee Pope',
'genre': 'Country',
},
'expected_warnings': ['Unable to download SMIL file', 'Unable to download info'],
}, {
'note': 'Age-limited video',
'url': 'https://www.vevo.com/watch/justin-timberlake/tunnel-vision-explicit/USRV81300282',
'info_dict': {
'id': 'USRV81300282',
'ext': 'mp4',
'title': 'Justin Timberlake - Tunnel Vision (Explicit)',
'age_limit': 18,
'timestamp': 1372888800,
'upload_date': '20130703',
'uploader': 'Justin Timberlake',
'track': 'Tunnel Vision (Explicit)',
'artist': 'Justin Timberlake',
'genre': 'Pop',
},
'expected_warnings': ['Unable to download SMIL file', 'Unable to download info'],
}, {
'note': 'No video_info',
'url': 'http://www.vevo.com/watch/k-camp-1/Till-I-Die/USUV71503000',
'md5': '8b83cc492d72fc9cf74a02acee7dc1b0',
'info_dict': {
'id': 'USUV71503000',
'ext': 'mp4',
'title': 'K Camp ft. T.I. - Till I Die',
'age_limit': 18,
'timestamp': 1449468000,
'upload_date': '20151207',
'uploader': 'K Camp',
'track': 'Till I Die',
'artist': 'K Camp',
'genre': 'Hip-Hop',
},
'expected_warnings': ['Unable to download SMIL file', 'Unable to download info'],
}, {
'note': 'Featured test',
'url': 'https://www.vevo.com/watch/lemaitre/Wait/USUV71402190',
'md5': 'd28675e5e8805035d949dc5cf161071d',
'info_dict': {
'id': 'USUV71402190',
'ext': 'mp4',
'title': 'Lemaitre ft. LoLo - Wait',
'age_limit': 0,
'timestamp': 1413432000,
'upload_date': '20141016',
'uploader': 'Lemaitre',
'track': 'Wait',
'artist': 'Lemaitre',
'genre': 'Electronic',
},
'expected_warnings': ['Unable to download SMIL file', 'Unable to download info'],
}, {
'note': 'Only available via webpage',
'url': 'http://www.vevo.com/watch/GBUV71600656',
'md5': '67e79210613865b66a47c33baa5e37fe',
'info_dict': {
'id': 'GBUV71600656',
'ext': 'mp4',
'title': 'ABC - Viva Love',
'age_limit': 0,
'timestamp': 1461830400,
'upload_date': '20160428',
'uploader': 'ABC',
'track': 'Viva Love',
'artist': 'ABC',
'genre': 'Pop',
},
'expected_warnings': ['Failed to download video versions info'],
}, {
# no genres available
'url': 'http://www.vevo.com/watch/INS171400764',
'only_matching': True,
}, {
# Another case available only via the webpage; using streams/streamsV3 formats
# Geo-restricted to Netherlands/Germany
'url': 'http://www.vevo.com/watch/boostee/pop-corn-clip-officiel/FR1A91600909',
'only_matching': True,
}, {
'url': 'https://embed.vevo.com/?isrc=USH5V1923499&partnerId=4d61b777-8023-4191-9ede-497ed6c24647&partnerAdCode=',
'only_matching': True,
}, {
'url': 'https://tv.vevo.com/watch/artist/janet-jackson/US0450100550',
'only_matching': True,
}]
_VERSIONS = {
0: 'youtube', # only in AuthenticateVideo videoVersions
1: 'level3',
2: 'akamai',
3: 'level3',
4: 'amazon',
}
def _initialize_api(self, video_id):
webpage = self._download_webpage(
'https://accounts.vevo.com/token', None,
note='Retrieving oauth token',
errnote='Unable to retrieve oauth token',
data=json.dumps({
'client_id': 'SPupX1tvqFEopQ1YS6SS',
'grant_type': 'urn:vevo:params:oauth:grant-type:anonymous',
}).encode(),
headers={
'Content-Type': 'application/json',
})
if re.search(r'(?i)THIS PAGE IS CURRENTLY UNAVAILABLE IN YOUR REGION', webpage):
self.raise_geo_restricted(
f'{self.IE_NAME} said: This page is currently unavailable in your region')
auth_info = self._parse_json(webpage, video_id)
self._api_url_template = self.http_scheme() + '//apiv2.vevo.com/%s?token=' + auth_info['legacy_token']
def _call_api(self, path, *args, **kwargs):
try:
data = self._download_json(self._api_url_template % path, *args, **kwargs)
except ExtractorError as e:
if isinstance(e.cause, HTTPError):
errors = self._parse_json(e.cause.response.read().decode(), None)['errors']
error_message = ', '.join([error['message'] for error in errors])
raise ExtractorError(f'{self.IE_NAME} said: {error_message}', expected=True)
raise
return data
def _real_extract(self, url):
video_id = self._match_id(url)
self._initialize_api(video_id)
video_info = self._call_api(
f'video/{video_id}', video_id, 'Downloading api video info',
'Failed to download video info')
video_versions = self._call_api(
f'video/{video_id}/streams', video_id,
'Downloading video versions info',
'Failed to download video versions info',
fatal=False)
# Some videos are only available via webpage (e.g.
# https://github.com/ytdl-org/youtube-dl/issues/9366)
if not video_versions:
webpage = self._download_webpage(url, video_id)
json_data = self._extract_json(webpage, video_id)
if 'streams' in json_data.get('default', {}):
video_versions = json_data['default']['streams'][video_id][0]
else:
video_versions = [
value
for key, value in json_data['apollo']['data'].items()
if key.startswith(f'{video_id}.streams')]
uploader = None
artist = None
featured_artist = None
artists = video_info.get('artists')
for curr_artist in artists:
if curr_artist.get('role') == 'Featured':
featured_artist = curr_artist['name']
else:
artist = uploader = curr_artist['name']
formats = []
for video_version in video_versions:
version = self._VERSIONS.get(video_version.get('version'), 'generic')
version_url = video_version.get('url')
if not version_url:
continue
if '.ism' in version_url:
continue
elif '.mpd' in version_url:
formats.extend(self._extract_mpd_formats(
version_url, video_id, mpd_id=f'dash-{version}',
note=f'Downloading {version} MPD information',
errnote=f'Failed to download {version} MPD information',
fatal=False))
elif '.m3u8' in version_url:
formats.extend(self._extract_m3u8_formats(
version_url, video_id, 'mp4', 'm3u8_native',
m3u8_id=f'hls-{version}',
note=f'Downloading {version} m3u8 information',
errnote=f'Failed to download {version} m3u8 information',
fatal=False))
else:
m = re.search(r'''(?xi)
_(?P<quality>[a-z0-9]+)
_(?P<width>[0-9]+)x(?P<height>[0-9]+)
_(?P<vcodec>[a-z0-9]+)
_(?P<vbr>[0-9]+)
_(?P<acodec>[a-z0-9]+)
_(?P<abr>[0-9]+)
\.(?P<ext>[a-z0-9]+)''', version_url)
if not m:
continue
formats.append({
'url': version_url,
'format_id': f'http-{version}-{video_version.get("quality") or m.group("quality")}',
'vcodec': m.group('vcodec'),
'acodec': m.group('acodec'),
'vbr': int(m.group('vbr')),
'abr': int(m.group('abr')),
'ext': m.group('ext'),
'width': int(m.group('width')),
'height': int(m.group('height')),
})
track = video_info['title']
if featured_artist:
artist = f'{artist} ft. {featured_artist}'
title = f'{artist} - {track}' if artist else track
genres = video_info.get('genres')
genre = (
genres[0] if genres and isinstance(genres, list)
and isinstance(genres[0], str) else None)
is_explicit = video_info.get('isExplicit')
if is_explicit is True:
age_limit = 18
elif is_explicit is False:
age_limit = 0
else:
age_limit = None
return {
'id': video_id,
'title': title,
'formats': formats,
'thumbnail': video_info.get('imageUrl') or video_info.get('thumbnailUrl'),
'timestamp': parse_iso8601(video_info.get('releaseDate')),
'uploader': uploader,
'duration': int_or_none(video_info.get('duration')),
'view_count': int_or_none(video_info.get('views', {}).get('total')),
'age_limit': age_limit,
'track': track,
'artist': uploader,
'genre': genre,
}
class VevoPlaylistIE(VevoBaseIE):
_VALID_URL = r'https?://(?:www\.)?vevo\.com/watch/(?P<kind>playlist|genre)/(?P<id>[^/?#&]+)'
_TESTS = [{
'url': 'http://www.vevo.com/watch/genre/rock',
'info_dict': {
'id': 'rock',
'title': 'Rock',
},
'playlist_count': 20,
}, {
'url': 'http://www.vevo.com/watch/genre/rock?index=0',
'only_matching': True,
}]
def _real_extract(self, url):
mobj = self._match_valid_url(url)
playlist_id = mobj.group('id')
playlist_kind = mobj.group('kind')
webpage = self._download_webpage(url, playlist_id)
qs = parse_qs(url)
index = qs.get('index', [None])[0]
if index:
video_id = self._search_regex(
r'<meta[^>]+content=(["\'])vevo://video/(?P<id>.+?)\1[^>]*>',
webpage, 'video id', default=None, group='id')
if video_id:
return self.url_result(f'vevo:{video_id}', VevoIE.ie_key())
playlists = self._extract_json(webpage, playlist_id)['default'][f'{playlist_kind}s']
playlist = (next(iter(playlists.values()))
if playlist_kind == 'playlist' else playlists[playlist_id])
entries = [
self.url_result(f'vevo:{src}', VevoIE.ie_key())
for src in playlist['isrcs']]
return self.playlist_result(
entries, playlist.get('playlistId') or playlist_id,
playlist.get('name'), playlist.get('description'))

View File

@@ -58,15 +58,28 @@ def _get_variant_and_executable_path():
"""@returns (variant, executable_path)""" """@returns (variant, executable_path)"""
if getattr(sys, 'frozen', False): if getattr(sys, 'frozen', False):
path = sys.executable path = sys.executable
# py2exe is unsupported but we should still correctly identify it for debugging purposes
# py2exe: No longer officially supported, but still identify it to block updates
if not hasattr(sys, '_MEIPASS'): if not hasattr(sys, '_MEIPASS'):
return 'py2exe', path return 'py2exe', path
if sys._MEIPASS == os.path.dirname(path):
return f'{sys.platform}_dir', path # staticx builds: sys.executable returns a /tmp/ path
if sys.platform == 'darwin': # No longer officially supported, but still identify them to block updates
# Ref: https://staticx.readthedocs.io/en/latest/usage.html#run-time-information
if static_exe_path := os.getenv('STATICX_PROG_PATH'):
return 'linux_static_exe', static_exe_path
# We know it's a PyInstaller bundle, but is it "onedir" or "onefile"?
suffix = 'dir' if sys._MEIPASS == os.path.dirname(path) else 'exe'
system_platform = remove_end(sys.platform, '32')
if system_platform == 'darwin':
# darwin_legacy_exe is no longer supported, but still identify it to block updates # darwin_legacy_exe is no longer supported, but still identify it to block updates
machine = '_legacy' if version_tuple(platform.mac_ver()[0]) < (10, 15) else '' machine = '_legacy' if version_tuple(platform.mac_ver()[0]) < (10, 15) else ''
return f'darwin{machine}_exe', path return f'darwin{machine}_{suffix}', path
if system_platform == 'linux' and platform.libc_ver()[0] != 'glibc':
system_platform = 'musllinux'
machine = f'_{platform.machine().lower()}' machine = f'_{platform.machine().lower()}'
is_64bits = sys.maxsize > 2**32 is_64bits = sys.maxsize > 2**32
@@ -77,12 +90,8 @@ def _get_variant_and_executable_path():
# See: https://github.com/yt-dlp/yt-dlp/issues/11813 # See: https://github.com/yt-dlp/yt-dlp/issues/11813
elif machine[1:] == 'aarch64' and not is_64bits: elif machine[1:] == 'aarch64' and not is_64bits:
machine = '_armv7l' machine = '_armv7l'
# sys.executable returns a /tmp/ path for staticx builds (linux_static)
# Ref: https://staticx.readthedocs.io/en/latest/usage.html#run-time-information
if static_exe_path := os.getenv('STATICX_PROG_PATH'):
path = static_exe_path
return f'{remove_end(sys.platform, "32")}{machine}_exe', path return f'{system_platform}{machine}_{suffix}', path
path = os.path.dirname(__file__) path = os.path.dirname(__file__)
if isinstance(__loader__, zipimporter): if isinstance(__loader__, zipimporter):
@@ -118,7 +127,8 @@ _FILE_SUFFIXES = {
'darwin_exe': '_macos', 'darwin_exe': '_macos',
'linux_exe': '_linux', 'linux_exe': '_linux',
'linux_aarch64_exe': '_linux_aarch64', 'linux_aarch64_exe': '_linux_aarch64',
'linux_armv7l_exe': '_linux_armv7l', 'musllinux_exe': '_musllinux',
'musllinux_aarch64_exe': '_musllinux_aarch64',
} }
_NON_UPDATEABLE_REASONS = { _NON_UPDATEABLE_REASONS = {
@@ -146,21 +156,6 @@ def _get_binary_name():
def _get_system_deprecation(): def _get_system_deprecation():
MIN_SUPPORTED, MIN_RECOMMENDED = (3, 9), (3, 10) MIN_SUPPORTED, MIN_RECOMMENDED = (3, 9), (3, 10)
EXE_MSG_TMPL = ('Support for {} has been deprecated. '
'See https://github.com/yt-dlp/yt-dlp/{} for details.\n{}')
STOP_MSG = 'You may stop receiving updates on this version at any time!'
variant = detect_variant()
# Temporary until linux_armv7l executable builds are discontinued
if variant == 'linux_armv7l_exe':
return EXE_MSG_TMPL.format(
f'{variant} (the PyInstaller-bundled executable for the Linux armv7l platform)',
'issues/13976', STOP_MSG)
# Temporary until linux_aarch64_exe is built with Python >=3.10 instead of Python 3.9
if variant == 'linux_aarch64_exe':
return None
if sys.version_info > MIN_RECOMMENDED: if sys.version_info > MIN_RECOMMENDED:
return None return None
@@ -199,16 +194,14 @@ def _sha256_file(path):
def _make_label(origin, tag, version=None): def _make_label(origin, tag, version=None):
if '/' in origin: if tag != version:
channel = _INVERSE_UPDATE_SOURCES.get(origin, origin) if version:
else: return f'{origin}@{tag} build {version}'
channel = origin return f'{origin}@{tag}'
label = f'{channel}@{tag}'
if version and version != tag: if channel := _INVERSE_UPDATE_SOURCES.get(origin):
label += f' build {version}' return f'{channel}@{tag} from {origin}'
if channel != origin: return f'{origin}@{tag}'
label += f' from {origin}'
return label
@dataclass @dataclass

View File

@@ -1,8 +1,8 @@
# Autogenerated by devscripts/update-version.py # Autogenerated by devscripts/update-version.py
__version__ = '2025.08.27' __version__ = '2025.09.05'
RELEASE_GIT_HEAD = '8cd37b85d492edb56a4f7506ea05527b85a6b02b' RELEASE_GIT_HEAD = '50136eeeb3767289b236f140b759f23b39b00888'
VARIANT = None VARIANT = None
@@ -12,4 +12,4 @@ CHANNEL = 'stable'
ORIGIN = 'yt-dlp/yt-dlp' ORIGIN = 'yt-dlp/yt-dlp'
_pkg_version = '2025.08.27' _pkg_version = '2025.09.05'