mirror of
https://github.com/yt-dlp/yt-dlp.git
synced 2026-01-12 01:41:26 +00:00
Compare commits
75 Commits
2025.09.05
...
2025.10.22
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a75399d89f | ||
|
|
c9356f308d | ||
|
|
de7b3c0705 | ||
|
|
2c9091e355 | ||
|
|
dfc0a84c19 | ||
|
|
fe5ae54a7b | ||
|
|
78748b506f | ||
|
|
c7bda2192a | ||
|
|
4e6a693057 | ||
|
|
264044286d | ||
|
|
a98e7f9f58 | ||
|
|
0ea5d5882d | ||
|
|
cdc533b114 | ||
|
|
c2e124881f | ||
|
|
ad55bfcfb7 | ||
|
|
739125d40f | ||
|
|
5f94f05490 | ||
|
|
5d7678195a | ||
|
|
eafedc2181 | ||
|
|
8eb8695139 | ||
|
|
df160ab18d | ||
|
|
6d41aaf21c | ||
|
|
a6673a8e82 | ||
|
|
87be1bb96a | ||
|
|
ccc25d6710 | ||
|
|
5513036104 | ||
|
|
bd5ed90419 | ||
|
|
88e2a2de8e | ||
|
|
12b57d2858 | ||
|
|
b7b7910d96 | ||
|
|
50e452fd7d | ||
|
|
94c5622be9 | ||
|
|
7df5acc546 | ||
|
|
4429fd0450 | ||
|
|
2e81e298cd | ||
|
|
7f5d9f8543 | ||
|
|
f8750504c2 | ||
|
|
8821682f15 | ||
|
|
08d7899683 | ||
|
|
98b6b0d339 | ||
|
|
bf5d18016b | ||
|
|
4bc19adc87 | ||
|
|
b2c01d0498 | ||
|
|
e123a48f11 | ||
|
|
820c6e2445 | ||
|
|
677997d84e | ||
|
|
b81e9272dc | ||
|
|
df4b4e8ccf | ||
|
|
f3829463c7 | ||
|
|
ae3923b6b2 | ||
|
|
8ab262c66b | ||
|
|
e2d37bcc8e | ||
|
|
eb4b3a5fc7 | ||
|
|
65e90aea29 | ||
|
|
17bfaa53ed | ||
|
|
8cb037c0b0 | ||
|
|
7d9e48b22a | ||
|
|
f5cb721185 | ||
|
|
83b8409366 | ||
|
|
ba80446855 | ||
|
|
22ea0688ed | ||
|
|
5c1abcdc49 | ||
|
|
3d9a88bd8e | ||
|
|
9def9a4b0e | ||
|
|
679587dac7 | ||
|
|
a1c98226a4 | ||
|
|
c8ede5f34d | ||
|
|
a183837ec8 | ||
|
|
067062bb87 | ||
|
|
8597a4331e | ||
|
|
48a214bef4 | ||
|
|
6a763a55d8 | ||
|
|
e6e6b51214 | ||
|
|
7c9b10ebc8 | ||
|
|
cd94e70040 |
2
.github/FUNDING.yml
vendored
2
.github/FUNDING.yml
vendored
@@ -10,4 +10,4 @@ liberapay: # Replace with a single Liberapay username
|
||||
issuehunt: # Replace with a single IssueHunt username
|
||||
otechie: # Replace with a single Otechie username
|
||||
|
||||
custom: ['https://github.com/yt-dlp/yt-dlp/blob/master/Collaborators.md#collaborators']
|
||||
custom: ['https://github.com/yt-dlp/yt-dlp/blob/master/Maintainers.md#maintainers']
|
||||
|
||||
2
.github/ISSUE_TEMPLATE/1_broken_site.yml
vendored
2
.github/ISSUE_TEMPLATE/1_broken_site.yml
vendored
@@ -24,6 +24,8 @@ body:
|
||||
required: true
|
||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766), [the FAQ](https://github.com/yt-dlp/yt-dlp/wiki/FAQ), and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%3Aissue%20-label%3Aspam%20%20) for similar issues **including closed ones**. DO NOT post duplicates
|
||||
required: true
|
||||
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||
required: true
|
||||
- label: I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and I'm willing to share it if required
|
||||
- type: input
|
||||
id: region
|
||||
|
||||
@@ -24,6 +24,8 @@ body:
|
||||
required: true
|
||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%3Aissue%20-label%3Aspam%20%20) for similar requests **including closed ones**. DO NOT post duplicates
|
||||
required: true
|
||||
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||
required: true
|
||||
- label: I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and am willing to share it if required
|
||||
- type: input
|
||||
id: region
|
||||
|
||||
@@ -22,6 +22,8 @@ body:
|
||||
required: true
|
||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%3Aissue%20-label%3Aspam%20%20) for similar requests **including closed ones**. DO NOT post duplicates
|
||||
required: true
|
||||
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||
required: true
|
||||
- label: I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and I'm willing to share it if required
|
||||
- type: input
|
||||
id: region
|
||||
|
||||
2
.github/ISSUE_TEMPLATE/4_bug_report.yml
vendored
2
.github/ISSUE_TEMPLATE/4_bug_report.yml
vendored
@@ -20,6 +20,8 @@ body:
|
||||
required: true
|
||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766), [the FAQ](https://github.com/yt-dlp/yt-dlp/wiki/FAQ), and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%3Aissue%20-label%3Aspam%20%20) for similar issues **including closed ones**. DO NOT post duplicates
|
||||
required: true
|
||||
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||
required: true
|
||||
- type: textarea
|
||||
id: description
|
||||
attributes:
|
||||
|
||||
2
.github/ISSUE_TEMPLATE/5_feature_request.yml
vendored
2
.github/ISSUE_TEMPLATE/5_feature_request.yml
vendored
@@ -22,6 +22,8 @@ body:
|
||||
required: true
|
||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%3Aissue%20-label%3Aspam%20%20) for similar requests **including closed ones**. DO NOT post duplicates
|
||||
required: true
|
||||
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||
required: true
|
||||
- type: textarea
|
||||
id: description
|
||||
attributes:
|
||||
|
||||
2
.github/ISSUE_TEMPLATE/6_question.yml
vendored
2
.github/ISSUE_TEMPLATE/6_question.yml
vendored
@@ -28,6 +28,8 @@ body:
|
||||
required: true
|
||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766), [the FAQ](https://github.com/yt-dlp/yt-dlp/wiki/FAQ), and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%3Aissue%20-label%3Aspam%20%20) for similar questions **including closed ones**. DO NOT post duplicates
|
||||
required: true
|
||||
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||
required: true
|
||||
- type: textarea
|
||||
id: question
|
||||
attributes:
|
||||
|
||||
@@ -20,6 +20,8 @@ body:
|
||||
required: true
|
||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766), [the FAQ](https://github.com/yt-dlp/yt-dlp/wiki/FAQ), and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%%3Aissue%%20-label%%3Aspam%%20%%20) for similar issues **including closed ones**. DO NOT post duplicates
|
||||
required: true
|
||||
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||
required: true
|
||||
- label: I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and I'm willing to share it if required
|
||||
- type: input
|
||||
id: region
|
||||
|
||||
@@ -20,6 +20,8 @@ body:
|
||||
required: true
|
||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%%3Aissue%%20-label%%3Aspam%%20%%20) for similar requests **including closed ones**. DO NOT post duplicates
|
||||
required: true
|
||||
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||
required: true
|
||||
- label: I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and am willing to share it if required
|
||||
- type: input
|
||||
id: region
|
||||
|
||||
@@ -18,6 +18,8 @@ body:
|
||||
required: true
|
||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%%3Aissue%%20-label%%3Aspam%%20%%20) for similar requests **including closed ones**. DO NOT post duplicates
|
||||
required: true
|
||||
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||
required: true
|
||||
- label: I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and I'm willing to share it if required
|
||||
- type: input
|
||||
id: region
|
||||
|
||||
2
.github/ISSUE_TEMPLATE_tmpl/4_bug_report.yml
vendored
2
.github/ISSUE_TEMPLATE_tmpl/4_bug_report.yml
vendored
@@ -16,6 +16,8 @@ body:
|
||||
required: true
|
||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766), [the FAQ](https://github.com/yt-dlp/yt-dlp/wiki/FAQ), and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%%3Aissue%%20-label%%3Aspam%%20%%20) for similar issues **including closed ones**. DO NOT post duplicates
|
||||
required: true
|
||||
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||
required: true
|
||||
- type: textarea
|
||||
id: description
|
||||
attributes:
|
||||
|
||||
@@ -18,6 +18,8 @@ body:
|
||||
required: true
|
||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%%3Aissue%%20-label%%3Aspam%%20%%20) for similar requests **including closed ones**. DO NOT post duplicates
|
||||
required: true
|
||||
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||
required: true
|
||||
- type: textarea
|
||||
id: description
|
||||
attributes:
|
||||
|
||||
2
.github/ISSUE_TEMPLATE_tmpl/6_question.yml
vendored
2
.github/ISSUE_TEMPLATE_tmpl/6_question.yml
vendored
@@ -24,6 +24,8 @@ body:
|
||||
required: true
|
||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766), [the FAQ](https://github.com/yt-dlp/yt-dlp/wiki/FAQ), and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%%3Aissue%%20-label%%3Aspam%%20%%20) for similar questions **including closed ones**. DO NOT post duplicates
|
||||
required: true
|
||||
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||
required: true
|
||||
- type: textarea
|
||||
id: question
|
||||
attributes:
|
||||
|
||||
1
.github/PULL_REQUEST_TEMPLATE.md
vendored
1
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -33,6 +33,7 @@ Fixes #
|
||||
### In order to be accepted and merged into yt-dlp each piece of code must be in public domain or released under [Unlicense](http://unlicense.org/). Check those that apply and remove the others:
|
||||
- [ ] I am the original author of the code in this PR, and I am willing to release it under [Unlicense](http://unlicense.org/)
|
||||
- [ ] I am not the original author of the code in this PR, but it is in the public domain or released under [Unlicense](http://unlicense.org/) (provide reliable evidence)
|
||||
- [ ] I have read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||
|
||||
### What is the purpose of your *pull request*? Check those that apply and remove the others:
|
||||
- [ ] Fix or improvement to an extractor (Make sure to add/update tests)
|
||||
|
||||
28
.github/actionlint.yml
vendored
Normal file
28
.github/actionlint.yml
vendored
Normal file
@@ -0,0 +1,28 @@
|
||||
self-hosted-runner:
|
||||
labels:
|
||||
# Workaround for the outdated runner list in actionlint v1.7.7
|
||||
# Ref: https://github.com/rhysd/actionlint/issues/533
|
||||
- windows-11-arm
|
||||
|
||||
config-variables:
|
||||
- KEEP_CACHE_WARM
|
||||
- PUSH_VERSION_COMMIT
|
||||
- UPDATE_TO_VERIFICATION
|
||||
- PYPI_PROJECT
|
||||
- PYPI_SUFFIX
|
||||
- NIGHTLY_PYPI_PROJECT
|
||||
- NIGHTLY_PYPI_SUFFIX
|
||||
- NIGHTLY_ARCHIVE_REPO
|
||||
- BUILD_NIGHTLY
|
||||
- MASTER_PYPI_PROJECT
|
||||
- MASTER_PYPI_SUFFIX
|
||||
- MASTER_ARCHIVE_REPO
|
||||
- BUILD_MASTER
|
||||
- ISSUE_LOCKDOWN
|
||||
- SANITIZE_COMMENT
|
||||
|
||||
paths:
|
||||
.github/workflows/build.yml:
|
||||
ignore:
|
||||
# SC1090 "Can't follow non-constant source": ignore when using `source` to activate venv
|
||||
- '.+SC1090.+'
|
||||
308
.github/workflows/build.yml
vendored
308
.github/workflows/build.yml
vendored
@@ -9,6 +9,9 @@ on:
|
||||
required: false
|
||||
default: stable
|
||||
type: string
|
||||
origin:
|
||||
required: true
|
||||
type: string
|
||||
unix:
|
||||
default: true
|
||||
type: boolean
|
||||
@@ -27,10 +30,6 @@ on:
|
||||
windows:
|
||||
default: true
|
||||
type: boolean
|
||||
origin:
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
secrets:
|
||||
GPG_SIGNING_KEY:
|
||||
required: false
|
||||
@@ -74,13 +73,6 @@ on:
|
||||
description: yt-dlp.exe, yt-dlp_win.zip, yt-dlp_x86.exe, yt-dlp_win_x86.zip, yt-dlp_arm64.exe, yt-dlp_win_arm64.zip
|
||||
default: true
|
||||
type: boolean
|
||||
origin:
|
||||
description: Origin
|
||||
required: false
|
||||
default: 'current repo'
|
||||
type: choice
|
||||
options:
|
||||
- 'current repo'
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
@@ -89,25 +81,26 @@ jobs:
|
||||
process:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
origin: ${{ steps.process_origin.outputs.origin }}
|
||||
timestamp: ${{ steps.process_origin.outputs.timestamp }}
|
||||
version: ${{ steps.process_origin.outputs.version }}
|
||||
origin: ${{ steps.process_inputs.outputs.origin }}
|
||||
timestamp: ${{ steps.process_inputs.outputs.timestamp }}
|
||||
version: ${{ steps.process_inputs.outputs.version }}
|
||||
linux_matrix: ${{ steps.linux_matrix.outputs.matrix }}
|
||||
|
||||
steps:
|
||||
- name: Process origin
|
||||
id: process_origin
|
||||
- name: Process inputs
|
||||
id: process_inputs
|
||||
env:
|
||||
ORIGIN: ${{ inputs.origin }}
|
||||
INPUTS: ${{ toJSON(inputs) }}
|
||||
REPOSITORY: ${{ github.repository }}
|
||||
VERSION: ${{ inputs.version }}
|
||||
shell: python
|
||||
run: |
|
||||
import datetime as dt
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
origin = os.environ['ORIGIN']
|
||||
INPUTS = json.loads(os.environ['INPUTS'])
|
||||
timestamp = dt.datetime.now(tz=dt.timezone.utc).strftime('%Y.%m.%d.%H%M%S.%f')
|
||||
version = os.getenv('VERSION')
|
||||
version = INPUTS.get('version')
|
||||
if version and '.' not in version:
|
||||
# build.yml was dispatched with only a revision as the version input value
|
||||
version_parts = [*timestamp.split('.')[:3], version]
|
||||
@@ -119,7 +112,7 @@ jobs:
|
||||
version_parts = version.split('.')
|
||||
assert all(re.fullmatch(r'[0-9]+', part) for part in version_parts), 'Version must be numeric'
|
||||
outputs = {
|
||||
'origin': os.environ['REPOSITORY'] if origin == 'current repo' else origin,
|
||||
'origin': INPUTS.get('origin') or os.environ['REPOSITORY'],
|
||||
'timestamp': timestamp,
|
||||
'version': '.'.join(version_parts),
|
||||
}
|
||||
@@ -127,6 +120,69 @@ jobs:
|
||||
with open(os.environ['GITHUB_OUTPUT'], 'a') as f:
|
||||
f.write('\n'.join(f'{key}={value}' for key, value in outputs.items()))
|
||||
|
||||
- name: Build Linux matrix
|
||||
id: linux_matrix
|
||||
env:
|
||||
INPUTS: ${{ toJSON(inputs) }}
|
||||
PYTHON_VERSION: '3.13'
|
||||
UPDATE_TO: yt-dlp/yt-dlp@2025.09.05
|
||||
shell: python
|
||||
run: |
|
||||
import json
|
||||
import os
|
||||
EXE_MAP = {
|
||||
'linux': [{
|
||||
'os': 'linux',
|
||||
'arch': 'x86_64',
|
||||
'runner': 'ubuntu-24.04',
|
||||
}, {
|
||||
'os': 'linux',
|
||||
'arch': 'aarch64',
|
||||
'runner': 'ubuntu-24.04-arm',
|
||||
}],
|
||||
'linux_armv7l': [{
|
||||
'os': 'linux',
|
||||
'arch': 'armv7l',
|
||||
'runner': 'ubuntu-24.04-arm',
|
||||
'qemu_platform': 'linux/arm/v7',
|
||||
'onefile': False,
|
||||
'cache_requirements': True,
|
||||
'update_to': 'yt-dlp/yt-dlp@2023.03.04',
|
||||
}],
|
||||
'musllinux': [{
|
||||
'os': 'musllinux',
|
||||
'arch': 'x86_64',
|
||||
'runner': 'ubuntu-24.04',
|
||||
}, {
|
||||
'os': 'musllinux',
|
||||
'arch': 'aarch64',
|
||||
'runner': 'ubuntu-24.04-arm',
|
||||
}],
|
||||
}
|
||||
INPUTS = json.loads(os.environ['INPUTS'])
|
||||
matrix = [exe for key, group in EXE_MAP.items() for exe in group if INPUTS.get(key)]
|
||||
if not matrix:
|
||||
# If we send an empty matrix when no linux inputs are given, the entire workflow fails
|
||||
matrix = [EXE_MAP['linux'][0]]
|
||||
for exe in matrix:
|
||||
exe['exe'] = '_'.join(filter(None, (
|
||||
'yt-dlp',
|
||||
exe['os'],
|
||||
exe['arch'] != 'x86_64' and exe['arch'],
|
||||
)))
|
||||
exe.setdefault('qemu_platform', None)
|
||||
exe.setdefault('onefile', True)
|
||||
exe.setdefault('onedir', True)
|
||||
exe.setdefault('cache_requirements', False)
|
||||
exe.setdefault('python_version', os.environ['PYTHON_VERSION'])
|
||||
exe.setdefault('update_to', os.environ['UPDATE_TO'])
|
||||
if not any(INPUTS.get(key) for key in EXE_MAP):
|
||||
print('skipping linux job')
|
||||
else:
|
||||
print(json.dumps(matrix, indent=2))
|
||||
with open(os.environ['GITHUB_OUTPUT'], 'a') as f:
|
||||
f.write(f'matrix={json.dumps(matrix)}')
|
||||
|
||||
unix:
|
||||
needs: process
|
||||
if: inputs.unix
|
||||
@@ -135,33 +191,41 @@ jobs:
|
||||
CHANNEL: ${{ inputs.channel }}
|
||||
ORIGIN: ${{ needs.process.outputs.origin }}
|
||||
VERSION: ${{ needs.process.outputs.version }}
|
||||
UPDATE_TO: yt-dlp/yt-dlp@2025.09.05
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 0 # Needed for changelog
|
||||
- uses: actions/setup-python@v5
|
||||
|
||||
- uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: "3.10"
|
||||
|
||||
- name: Install Requirements
|
||||
run: |
|
||||
sudo apt -y install zip pandoc man sed
|
||||
|
||||
- name: Prepare
|
||||
run: |
|
||||
python devscripts/update-version.py -c "${CHANNEL}" -r "${ORIGIN}" "${VERSION}"
|
||||
python devscripts/update_changelog.py -vv
|
||||
python devscripts/make_lazy_extractors.py
|
||||
|
||||
- name: Build Unix platform-independent binary
|
||||
run: |
|
||||
make all tar
|
||||
|
||||
- name: Verify --update-to
|
||||
if: vars.UPDATE_TO_VERIFICATION
|
||||
run: |
|
||||
chmod +x ./yt-dlp
|
||||
cp ./yt-dlp ./yt-dlp_downgraded
|
||||
version="$(./yt-dlp --version)"
|
||||
./yt-dlp_downgraded -v --update-to yt-dlp/yt-dlp@2023.03.04
|
||||
./yt-dlp_downgraded -v --update-to "${UPDATE_TO}"
|
||||
downgraded_version="$(./yt-dlp_downgraded --version)"
|
||||
[[ "${version}" != "${downgraded_version}" ]]
|
||||
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
@@ -172,153 +236,71 @@ jobs:
|
||||
compression-level: 0
|
||||
|
||||
linux:
|
||||
name: ${{ matrix.os }} (${{ matrix.arch }})
|
||||
if: inputs.linux || inputs.linux_armv7l || inputs.musllinux
|
||||
needs: process
|
||||
if: inputs.linux
|
||||
runs-on: ${{ matrix.runner }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- exe: yt-dlp_linux
|
||||
platform: x86_64
|
||||
runner: ubuntu-24.04
|
||||
- exe: yt-dlp_linux_aarch64
|
||||
platform: aarch64
|
||||
runner: ubuntu-24.04-arm
|
||||
include: ${{ fromJSON(needs.process.outputs.linux_matrix) }}
|
||||
env:
|
||||
CHANNEL: ${{ inputs.channel }}
|
||||
ORIGIN: ${{ needs.process.outputs.origin }}
|
||||
VERSION: ${{ needs.process.outputs.version }}
|
||||
EXE_NAME: ${{ matrix.exe }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Build executable
|
||||
env:
|
||||
SERVICE: linux_${{ matrix.platform }}
|
||||
run: |
|
||||
mkdir -p ./dist
|
||||
pushd bundle/docker
|
||||
docker compose up --build --exit-code-from "${SERVICE}" "${SERVICE}"
|
||||
popd
|
||||
sudo chown "${USER}:docker" "./dist/${EXE_NAME}"
|
||||
- name: Verify executable in container
|
||||
if: vars.UPDATE_TO_VERIFICATION
|
||||
env:
|
||||
SERVICE: linux_${{ matrix.platform }}_verify
|
||||
run: |
|
||||
cd bundle/docker
|
||||
docker compose up --build --exit-code-from "${SERVICE}" "${SERVICE}"
|
||||
- name: Verify --update-to
|
||||
if: vars.UPDATE_TO_VERIFICATION
|
||||
run: |
|
||||
chmod +x "./dist/${EXE_NAME}"
|
||||
mkdir -p ~/testing
|
||||
cp "./dist/${EXE_NAME}" ~/testing/"${EXE_NAME}_downgraded"
|
||||
version="$("./dist/${EXE_NAME}" --version)"
|
||||
~/testing/"${EXE_NAME}_downgraded" -v --update-to yt-dlp/yt-dlp@2023.03.04
|
||||
downgraded_version="$(~/testing/"${EXE_NAME}_downgraded" --version)"
|
||||
[[ "${version}" != "${downgraded_version}" ]]
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: build-bin-${{ github.job }}_${{ matrix.platform }}
|
||||
path: |
|
||||
dist/${{ matrix.exe }}*
|
||||
compression-level: 0
|
||||
PYTHON_VERSION: ${{ matrix.python_version }}
|
||||
UPDATE_TO: ${{ (vars.UPDATE_TO_VERIFICATION && matrix.update_to) || '' }}
|
||||
SKIP_ONEDIR_BUILD: ${{ (!matrix.onedir && '1') || '' }}
|
||||
SKIP_ONEFILE_BUILD: ${{ (!matrix.onefile && '1') || '' }}
|
||||
|
||||
linux_armv7l:
|
||||
needs: process
|
||||
if: inputs.linux_armv7l
|
||||
permissions:
|
||||
contents: read
|
||||
runs-on: ubuntu-24.04-arm
|
||||
env:
|
||||
CHANNEL: ${{ inputs.channel }}
|
||||
ORIGIN: ${{ needs.process.outputs.origin }}
|
||||
VERSION: ${{ needs.process.outputs.version }}
|
||||
EXE_NAME: yt-dlp_linux_armv7l
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
|
||||
- name: Cache requirements
|
||||
if: matrix.cache_requirements
|
||||
id: cache-venv
|
||||
uses: actions/cache@v4
|
||||
env:
|
||||
SEGMENT_DOWNLOAD_TIMEOUT_MINS: 1
|
||||
with:
|
||||
path: |
|
||||
~/yt-dlp-build-venv
|
||||
key: cache-reqs-${{ github.job }}-${{ github.ref }}-${{ needs.process.outputs.timestamp }}
|
||||
venv
|
||||
key: cache-reqs-${{ matrix.os }}_${{ matrix.arch }}-${{ github.ref }}-${{ needs.process.outputs.timestamp }}
|
||||
restore-keys: |
|
||||
cache-reqs-${{ github.job }}-${{ github.ref }}-
|
||||
cache-reqs-${{ github.job }}-
|
||||
cache-reqs-${{ matrix.os }}_${{ matrix.arch }}-${{ github.ref }}-
|
||||
cache-reqs-${{ matrix.os }}_${{ matrix.arch }}-
|
||||
|
||||
- name: Set up QEMU
|
||||
if: matrix.qemu_platform
|
||||
uses: docker/setup-qemu-action@v3
|
||||
with:
|
||||
platforms: linux/arm/v7
|
||||
- name: Build executable
|
||||
env:
|
||||
SERVICE: linux_armv7l
|
||||
run: |
|
||||
mkdir -p ./dist
|
||||
mkdir -p ~/yt-dlp-build-venv
|
||||
cd bundle/docker
|
||||
docker compose up --build --exit-code-from "${SERVICE}" "${SERVICE}"
|
||||
- name: Verify executable in container
|
||||
if: vars.UPDATE_TO_VERIFICATION
|
||||
env:
|
||||
SERVICE: linux_armv7l_verify
|
||||
run: |
|
||||
cd bundle/docker
|
||||
docker compose up --build --exit-code-from "${SERVICE}" "${SERVICE}"
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: build-bin-${{ github.job }}
|
||||
path: |
|
||||
dist/yt-dlp_linux_armv7l.zip
|
||||
compression-level: 0
|
||||
platforms: ${{ matrix.qemu_platform }}
|
||||
|
||||
musllinux:
|
||||
needs: process
|
||||
if: inputs.musllinux
|
||||
runs-on: ${{ matrix.runner }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- exe: yt-dlp_musllinux
|
||||
platform: x86_64
|
||||
runner: ubuntu-24.04
|
||||
- exe: yt-dlp_musllinux_aarch64
|
||||
platform: aarch64
|
||||
runner: ubuntu-24.04-arm
|
||||
env:
|
||||
CHANNEL: ${{ inputs.channel }}
|
||||
ORIGIN: ${{ needs.process.outputs.origin }}
|
||||
VERSION: ${{ needs.process.outputs.version }}
|
||||
EXE_NAME: ${{ matrix.exe }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Build executable
|
||||
env:
|
||||
SERVICE: musllinux_${{ matrix.platform }}
|
||||
SERVICE: ${{ matrix.os }}_${{ matrix.arch }}
|
||||
run: |
|
||||
mkdir -p ./venv
|
||||
mkdir -p ./dist
|
||||
pushd bundle/docker
|
||||
docker compose up --build --exit-code-from "${SERVICE}" "${SERVICE}"
|
||||
popd
|
||||
sudo chown "${USER}:docker" "./dist/${EXE_NAME}"
|
||||
if [[ -z "${SKIP_ONEFILE_BUILD}" ]]; then
|
||||
sudo chown "${USER}:docker" "./dist/${EXE_NAME}"
|
||||
fi
|
||||
|
||||
- name: Verify executable in container
|
||||
if: vars.UPDATE_TO_VERIFICATION
|
||||
env:
|
||||
SERVICE: musllinux_${{ matrix.platform }}_verify
|
||||
SERVICE: ${{ matrix.os }}_${{ matrix.arch }}_verify
|
||||
run: |
|
||||
cd bundle/docker
|
||||
docker compose up --build --exit-code-from "${SERVICE}" "${SERVICE}"
|
||||
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: build-bin-${{ github.job }}_${{ matrix.platform }}
|
||||
name: build-bin-${{ matrix.os }}_${{ matrix.arch }}
|
||||
path: |
|
||||
dist/${{ matrix.exe }}*
|
||||
compression-level: 0
|
||||
@@ -333,9 +315,10 @@ jobs:
|
||||
CHANNEL: ${{ inputs.channel }}
|
||||
ORIGIN: ${{ needs.process.outputs.origin }}
|
||||
VERSION: ${{ needs.process.outputs.version }}
|
||||
UPDATE_TO: yt-dlp/yt-dlp@2025.09.05
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
# NB: Building universal2 does not work with python from actions/setup-python
|
||||
|
||||
- name: Cache requirements
|
||||
@@ -409,7 +392,7 @@ jobs:
|
||||
chmod +x ./dist/yt-dlp_macos
|
||||
cp ./dist/yt-dlp_macos ./dist/yt-dlp_macos_downgraded
|
||||
version="$(./dist/yt-dlp_macos --version)"
|
||||
./dist/yt-dlp_macos_downgraded -v --update-to yt-dlp/yt-dlp@2023.03.04
|
||||
./dist/yt-dlp_macos_downgraded -v --update-to "${UPDATE_TO}"
|
||||
downgraded_version="$(./dist/yt-dlp_macos_downgraded --version)"
|
||||
[[ "$version" != "$downgraded_version" ]]
|
||||
|
||||
@@ -423,6 +406,7 @@ jobs:
|
||||
compression-level: 0
|
||||
|
||||
windows:
|
||||
name: windows (${{ matrix.arch }})
|
||||
needs: process
|
||||
if: inputs.windows
|
||||
permissions:
|
||||
@@ -435,27 +419,37 @@ jobs:
|
||||
- arch: 'x64'
|
||||
runner: windows-2025
|
||||
python_version: '3.10'
|
||||
suffix: ''
|
||||
platform_tag: win_amd64
|
||||
pyi_version: '6.16.0'
|
||||
pyi_tag: '2025.09.13.221251'
|
||||
pyi_hash: b6496c7630c3afe66900cfa824e8234a8c2e2c81704bd7facd79586abc76c0e5
|
||||
- arch: 'x86'
|
||||
runner: windows-2025
|
||||
python_version: '3.10'
|
||||
suffix: '_x86'
|
||||
platform_tag: win32
|
||||
pyi_version: '6.16.0'
|
||||
pyi_tag: '2025.09.13.221251'
|
||||
pyi_hash: 2d881843580efdc54f3523507fc6d9c5b6051ee49c743a6d9b7003ac5758c226
|
||||
- arch: 'arm64'
|
||||
runner: windows-11-arm
|
||||
python_version: '3.13' # arm64 only has Python >= 3.11 available
|
||||
suffix: '_arm64'
|
||||
platform_tag: win_arm64
|
||||
pyi_version: '6.16.0'
|
||||
pyi_tag: '2025.09.13.221251'
|
||||
pyi_hash: 4250c9085e34a95c898f3ee2f764914fc36ec59f0d97c28e6a75fcf21f7b144f
|
||||
env:
|
||||
CHANNEL: ${{ inputs.channel }}
|
||||
ORIGIN: ${{ needs.process.outputs.origin }}
|
||||
VERSION: ${{ needs.process.outputs.version }}
|
||||
SUFFIX: ${{ matrix.suffix }}
|
||||
SUFFIX: ${{ (matrix.arch != 'x64' && format('_{0}', matrix.arch)) || '' }}
|
||||
UPDATE_TO: yt-dlp/yt-dlp@2025.09.05
|
||||
BASE_CACHE_KEY: cache-reqs-${{ github.job }}_${{ matrix.arch }}-${{ matrix.python_version }}
|
||||
# Use custom PyInstaller built with https://github.com/yt-dlp/Pyinstaller-builds
|
||||
PYINSTALLER_URL: https://yt-dlp.github.io/Pyinstaller-Builds/${{ matrix.arch }}/pyinstaller-6.15.0-py3-none-any.whl
|
||||
PYI_REPO: https://github.com/yt-dlp/Pyinstaller-Builds
|
||||
PYI_WHEEL: pyinstaller-${{ matrix.pyi_version }}-py3-none-${{ matrix.platform_tag }}.whl
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: ${{ matrix.python_version }}
|
||||
architecture: ${{ matrix.arch }}
|
||||
@@ -477,17 +471,23 @@ jobs:
|
||||
- name: Install Requirements
|
||||
env:
|
||||
ARCH: ${{ matrix.arch }}
|
||||
PYI_URL: ${{ env.PYI_REPO }}/releases/download/${{ matrix.pyi_tag }}/${{ env.PYI_WHEEL }}
|
||||
PYI_HASH: ${{ matrix.pyi_hash }}
|
||||
shell: pwsh
|
||||
run: |
|
||||
python -m venv /yt-dlp-build-venv
|
||||
/yt-dlp-build-venv/Scripts/Activate.ps1
|
||||
python -m pip install -U pip
|
||||
# Install custom PyInstaller build and verify hash
|
||||
mkdir /pyi-wheels
|
||||
python -m pip download -d /pyi-wheels --no-deps --require-hashes "pyinstaller@${Env:PYI_URL}#sha256=${Env:PYI_HASH}"
|
||||
python -m pip install --force-reinstall -U "/pyi-wheels/${Env:PYI_WHEEL}"
|
||||
python devscripts/install_deps.py -o --include build
|
||||
if ("${Env:ARCH}" -eq "x86") {
|
||||
python devscripts/install_deps.py
|
||||
} else {
|
||||
python devscripts/install_deps.py --include curl-cffi
|
||||
}
|
||||
python -m pip install -U "${Env:PYINSTALLER_URL}"
|
||||
|
||||
- name: Prepare
|
||||
shell: pwsh
|
||||
@@ -510,7 +510,7 @@ jobs:
|
||||
$name = "yt-dlp${Env:SUFFIX}"
|
||||
Copy-Item "./dist/${name}.exe" "./dist/${name}_downgraded.exe"
|
||||
$version = & "./dist/${name}.exe" --version
|
||||
& "./dist/${name}_downgraded.exe" -v --update-to yt-dlp/yt-dlp@2025.08.20
|
||||
& "./dist/${name}_downgraded.exe" -v --update-to "${Env:UPDATE_TO}"
|
||||
$downgraded_version = & "./dist/${name}_downgraded.exe" --version
|
||||
if ($version -eq $downgraded_version) {
|
||||
exit 1
|
||||
@@ -521,8 +521,8 @@ jobs:
|
||||
with:
|
||||
name: build-bin-${{ github.job }}-${{ matrix.arch }}
|
||||
path: |
|
||||
dist/yt-dlp${{ matrix.suffix }}.exe
|
||||
dist/yt-dlp_win${{ matrix.suffix }}.zip
|
||||
dist/yt-dlp${{ env.SUFFIX }}.exe
|
||||
dist/yt-dlp_win${{ env.SUFFIX }}.zip
|
||||
compression-level: 0
|
||||
|
||||
meta_files:
|
||||
@@ -531,14 +531,12 @@ jobs:
|
||||
- process
|
||||
- unix
|
||||
- linux
|
||||
- linux_armv7l
|
||||
- musllinux
|
||||
- macos
|
||||
- windows
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Download artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
uses: actions/download-artifact@v5
|
||||
with:
|
||||
path: artifact
|
||||
pattern: build-bin-*
|
||||
@@ -560,35 +558,39 @@ jobs:
|
||||
cat >> _update_spec << EOF
|
||||
# This file is used for regulating self-update
|
||||
lock 2022.08.18.36 .+ Python 3\.6
|
||||
lock 2023.11.16 (?!win_x86_exe).+ Python 3\.7
|
||||
lock 2023.11.16 zip Python 3\.7
|
||||
lock 2023.11.16 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
||||
lock 2024.10.22 py2exe .+
|
||||
lock 2024.10.22 zip Python 3\.8
|
||||
lock 2024.10.22 win(?:_x86)?_exe Python 3\.[78].+ Windows-(?:7-|2008ServerR2)
|
||||
lock 2025.08.11 darwin_legacy_exe .+
|
||||
lock 2025.08.27 linux_armv7l_exe .+
|
||||
lock 2025.10.14 zip Python 3\.9
|
||||
lockV2 yt-dlp/yt-dlp 2022.08.18.36 .+ Python 3\.6
|
||||
lockV2 yt-dlp/yt-dlp 2023.11.16 (?!win_x86_exe).+ Python 3\.7
|
||||
lockV2 yt-dlp/yt-dlp 2023.11.16 zip Python 3\.7
|
||||
lockV2 yt-dlp/yt-dlp 2023.11.16 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
||||
lockV2 yt-dlp/yt-dlp 2024.10.22 py2exe .+
|
||||
lockV2 yt-dlp/yt-dlp 2024.10.22 zip Python 3\.8
|
||||
lockV2 yt-dlp/yt-dlp 2024.10.22 win(?:_x86)?_exe Python 3\.[78].+ Windows-(?:7-|2008ServerR2)
|
||||
lockV2 yt-dlp/yt-dlp 2025.08.11 darwin_legacy_exe .+
|
||||
lockV2 yt-dlp/yt-dlp 2025.08.27 linux_armv7l_exe .+
|
||||
lockV2 yt-dlp/yt-dlp-nightly-builds 2023.11.15.232826 (?!win_x86_exe).+ Python 3\.7
|
||||
lockV2 yt-dlp/yt-dlp 2025.10.14 zip Python 3\.9
|
||||
lockV2 yt-dlp/yt-dlp-nightly-builds 2023.11.15.232826 zip Python 3\.7
|
||||
lockV2 yt-dlp/yt-dlp-nightly-builds 2023.11.15.232826 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
||||
lockV2 yt-dlp/yt-dlp-nightly-builds 2024.10.22.051025 py2exe .+
|
||||
lockV2 yt-dlp/yt-dlp-nightly-builds 2024.10.22.051025 zip Python 3\.8
|
||||
lockV2 yt-dlp/yt-dlp-nightly-builds 2024.10.22.051025 win(?:_x86)?_exe Python 3\.[78].+ Windows-(?:7-|2008ServerR2)
|
||||
lockV2 yt-dlp/yt-dlp-nightly-builds 2025.08.12.233030 darwin_legacy_exe .+
|
||||
lockV2 yt-dlp/yt-dlp-nightly-builds 2025.08.30.232839 linux_armv7l_exe .+
|
||||
lockV2 yt-dlp/yt-dlp-master-builds 2023.11.15.232812 (?!win_x86_exe).+ Python 3\.7
|
||||
lockV2 yt-dlp/yt-dlp-nightly-builds 2025.10.14.232845 zip Python 3\.9
|
||||
lockV2 yt-dlp/yt-dlp-master-builds 2023.11.15.232812 zip Python 3\.7
|
||||
lockV2 yt-dlp/yt-dlp-master-builds 2023.11.15.232812 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
||||
lockV2 yt-dlp/yt-dlp-master-builds 2024.10.22.045052 py2exe .+
|
||||
lockV2 yt-dlp/yt-dlp-master-builds 2024.10.22.060347 zip Python 3\.8
|
||||
lockV2 yt-dlp/yt-dlp-master-builds 2024.10.22.060347 win(?:_x86)?_exe Python 3\.[78].+ Windows-(?:7-|2008ServerR2)
|
||||
lockV2 yt-dlp/yt-dlp-master-builds 2025.08.12.232447 darwin_legacy_exe .+
|
||||
lockV2 yt-dlp/yt-dlp-master-builds 2025.09.05.212910 linux_armv7l_exe .+
|
||||
lockV2 yt-dlp/yt-dlp-master-builds 2025.10.14.232330 zip Python 3\.9
|
||||
EOF
|
||||
|
||||
- name: Sign checksum files
|
||||
|
||||
1
.github/workflows/cache-warmer.yml
vendored
1
.github/workflows/cache-warmer.yml
vendored
@@ -12,6 +12,7 @@ jobs:
|
||||
with:
|
||||
version: '999999'
|
||||
channel: stable
|
||||
origin: ${{ github.repository }}
|
||||
unix: false
|
||||
linux: false
|
||||
linux_armv7l: true
|
||||
|
||||
2
.github/workflows/codeql.yml
vendored
2
.github/workflows/codeql.yml
vendored
@@ -29,7 +29,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
|
||||
14
.github/workflows/core.yml
vendored
14
.github/workflows/core.yml
vendored
@@ -36,24 +36,26 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-latest]
|
||||
# CPython 3.9 is in quick-test
|
||||
python-version: ['3.10', '3.11', '3.12', '3.13', pypy-3.11]
|
||||
# CPython 3.10 is in quick-test
|
||||
python-version: ['3.11', '3.12', '3.13', '3.14', pypy-3.11]
|
||||
include:
|
||||
# atleast one of each CPython/PyPy tests must be in windows
|
||||
- os: windows-latest
|
||||
python-version: '3.9'
|
||||
- os: windows-latest
|
||||
python-version: '3.10'
|
||||
- os: windows-latest
|
||||
python-version: '3.11'
|
||||
- os: windows-latest
|
||||
python-version: '3.12'
|
||||
- os: windows-latest
|
||||
python-version: '3.13'
|
||||
- os: windows-latest
|
||||
python-version: '3.14'
|
||||
- os: windows-latest
|
||||
python-version: pypy-3.11
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v5
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install test requirements
|
||||
|
||||
14
.github/workflows/download.yml
vendored
14
.github/workflows/download.yml
vendored
@@ -9,11 +9,11 @@ jobs:
|
||||
if: "contains(github.event.head_commit.message, 'ci run dl')"
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: 3.9
|
||||
python-version: '3.10'
|
||||
- name: Install test requirements
|
||||
run: python3 ./devscripts/install_deps.py --include dev
|
||||
- name: Run tests
|
||||
@@ -28,17 +28,17 @@ jobs:
|
||||
fail-fast: true
|
||||
matrix:
|
||||
os: [ubuntu-latest]
|
||||
python-version: ['3.10', '3.11', '3.12', '3.13', pypy-3.11]
|
||||
python-version: ['3.11', '3.12', '3.13', '3.14', pypy-3.11]
|
||||
include:
|
||||
# atleast one of each CPython/PyPy tests must be in windows
|
||||
- os: windows-latest
|
||||
python-version: '3.9'
|
||||
python-version: '3.10'
|
||||
- os: windows-latest
|
||||
python-version: pypy-3.11
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v5
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install test requirements
|
||||
|
||||
14
.github/workflows/quick-test.yml
vendored
14
.github/workflows/quick-test.yml
vendored
@@ -9,11 +9,11 @@ jobs:
|
||||
if: "!contains(github.event.head_commit.message, 'ci skip all')"
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up Python 3.9
|
||||
uses: actions/setup-python@v5
|
||||
- uses: actions/checkout@v5
|
||||
- name: Set up Python 3.10
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: '3.9'
|
||||
python-version: '3.10'
|
||||
- name: Install test requirements
|
||||
run: python3 ./devscripts/install_deps.py -o --include test
|
||||
- name: Run tests
|
||||
@@ -26,10 +26,10 @@ jobs:
|
||||
if: "!contains(github.event.head_commit.message, 'ci skip all')"
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: '3.9'
|
||||
python-version: '3.10'
|
||||
- name: Install dev dependencies
|
||||
run: python3 ./devscripts/install_deps.py -o --include static-analysis
|
||||
- name: Make lazy extractors
|
||||
|
||||
2
.github/workflows/release-master.yml
vendored
2
.github/workflows/release-master.yml
vendored
@@ -38,7 +38,7 @@ jobs:
|
||||
id-token: write # mandatory for trusted publishing
|
||||
steps:
|
||||
- name: Download artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
uses: actions/download-artifact@v5
|
||||
with:
|
||||
path: dist
|
||||
name: build-pypi
|
||||
|
||||
4
.github/workflows/release-nightly.yml
vendored
4
.github/workflows/release-nightly.yml
vendored
@@ -12,7 +12,7 @@ jobs:
|
||||
outputs:
|
||||
commit: ${{ steps.check_for_new_commits.outputs.commit }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Check for new commits
|
||||
@@ -53,7 +53,7 @@ jobs:
|
||||
id-token: write # mandatory for trusted publishing
|
||||
steps:
|
||||
- name: Download artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
uses: actions/download-artifact@v5
|
||||
with:
|
||||
path: dist
|
||||
name: build-pypi
|
||||
|
||||
29
.github/workflows/release.yml
vendored
29
.github/workflows/release.yml
vendored
@@ -75,13 +75,13 @@ jobs:
|
||||
head_sha: ${{ steps.get_target.outputs.head_sha }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
- uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: "3.10"
|
||||
python-version: "3.10" # Keep this in sync with test-workflows.yml
|
||||
|
||||
- name: Process inputs
|
||||
id: process_inputs
|
||||
@@ -170,10 +170,10 @@ jobs:
|
||||
id-token: write # mandatory for trusted publishing
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@v5
|
||||
- uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: "3.10"
|
||||
|
||||
@@ -233,15 +233,15 @@ jobs:
|
||||
VERSION: ${{ needs.prepare.outputs.version }}
|
||||
HEAD_SHA: ${{ needs.prepare.outputs.head_sha }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/download-artifact@v4
|
||||
- uses: actions/download-artifact@v5
|
||||
with:
|
||||
path: artifact
|
||||
pattern: build-*
|
||||
merge-multiple: true
|
||||
- uses: actions/setup-python@v5
|
||||
- uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: "3.10"
|
||||
|
||||
@@ -259,7 +259,7 @@ jobs:
|
||||
"[]" \
|
||||
"(https://discord.gg/H5MNcFW63r \"Discord\") " \
|
||||
"[]" \
|
||||
"(https://github.com/${BASE_REPO}/blob/master/Collaborators.md#collaborators \"Donate\") " \
|
||||
"(https://github.com/${BASE_REPO}/blob/master/Maintainers.md#maintainers \"Donate\") " \
|
||||
"[]" \
|
||||
"(https://github.com/${REPOSITORY}${DOCS_PATH}#readme \"Documentation\") " > ./RELEASE_NOTES
|
||||
if [[ "${TARGET_REPO}" == "${BASE_REPO}" ]]; then
|
||||
@@ -269,12 +269,11 @@ jobs:
|
||||
"[]" \
|
||||
"(https://github.com/${MASTER_REPO}/releases/latest \"Master builds\")" >> ./RELEASE_NOTES
|
||||
fi
|
||||
printf '\n\n' >> ./RELEASE_NOTES
|
||||
cat >> ./RELEASE_NOTES << EOF
|
||||
#### A description of the various files is in the [README](https://github.com/${REPOSITORY}#release-files)
|
||||
---
|
||||
$(python ./devscripts/make_changelog.py -vv --collapsible)
|
||||
EOF
|
||||
printf '\n\n%s\n\n%s%s\n\n---\n' \
|
||||
"#### A description of the various files is in the [README](https://github.com/${REPOSITORY}#release-files)" \
|
||||
"The PyInstaller-bundled executables are subject to the licenses described in " \
|
||||
"[THIRD_PARTY_LICENSES.txt](https://github.com/${BASE_REPO}/blob/${HEAD_SHA}/THIRD_PARTY_LICENSES.txt)" >> ./RELEASE_NOTES
|
||||
python ./devscripts/make_changelog.py -vv --collapsible >> ./RELEASE_NOTES
|
||||
printf '%s\n\n' '**This is a pre-release build**' >> ./PRERELEASE_NOTES
|
||||
cat ./RELEASE_NOTES >> ./PRERELEASE_NOTES
|
||||
printf '%s\n\n' "Generated from: https://github.com/${REPOSITORY}/commit/${HEAD_SHA}" >> ./ARCHIVE_NOTES
|
||||
|
||||
6
.github/workflows/signature-tests.yml
vendored
6
.github/workflows/signature-tests.yml
vendored
@@ -25,11 +25,11 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-latest, windows-latest]
|
||||
python-version: ['3.9', '3.10', '3.11', '3.12', '3.13', pypy-3.11]
|
||||
python-version: ['3.10', '3.11', '3.12', '3.13', '3.14', pypy-3.11]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v5
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install test requirements
|
||||
|
||||
52
.github/workflows/test-workflows.yml
vendored
Normal file
52
.github/workflows/test-workflows.yml
vendored
Normal file
@@ -0,0 +1,52 @@
|
||||
name: Test and lint workflows
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- .github/workflows/*
|
||||
- bundle/docker/linux/*.sh
|
||||
- devscripts/setup_variables.py
|
||||
- devscripts/setup_variables_tests.py
|
||||
- devscripts/utils.py
|
||||
pull_request:
|
||||
paths:
|
||||
- .github/workflows/*
|
||||
- bundle/docker/linux/*.sh
|
||||
- devscripts/setup_variables.py
|
||||
- devscripts/setup_variables_tests.py
|
||||
- devscripts/utils.py
|
||||
permissions:
|
||||
contents: read
|
||||
env:
|
||||
ACTIONLINT_VERSION: "1.7.7"
|
||||
ACTIONLINT_SHA256SUM: 023070a287cd8cccd71515fedc843f1985bf96c436b7effaecce67290e7e0757
|
||||
ACTIONLINT_REPO: https://github.com/rhysd/actionlint
|
||||
|
||||
jobs:
|
||||
check:
|
||||
name: Check workflows
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: "3.10" # Keep this in sync with release.yml's prepare job
|
||||
- name: Install requirements
|
||||
env:
|
||||
ACTIONLINT_TARBALL: ${{ format('actionlint_{0}_linux_amd64.tar.gz', env.ACTIONLINT_VERSION) }}
|
||||
run: |
|
||||
python -m devscripts.install_deps -o --include test
|
||||
sudo apt -y install shellcheck
|
||||
python -m pip install -U pyflakes
|
||||
curl -LO "${ACTIONLINT_REPO}/releases/download/v${ACTIONLINT_VERSION}/${ACTIONLINT_TARBALL}"
|
||||
printf '%s %s' "${ACTIONLINT_SHA256SUM}" "${ACTIONLINT_TARBALL}" | sha256sum -c -
|
||||
tar xvzf "${ACTIONLINT_TARBALL}" actionlint
|
||||
chmod +x actionlint
|
||||
- name: Run actionlint
|
||||
run: |
|
||||
./actionlint -color
|
||||
- name: Check Docker shell scripts
|
||||
run: |
|
||||
shellcheck bundle/docker/linux/*.sh
|
||||
- name: Test GHA devscripts
|
||||
run: |
|
||||
pytest -Werror --tb=short --color=yes devscripts/setup_variables_tests.py
|
||||
@@ -12,6 +12,7 @@
|
||||
- [Is your question about yt-dlp?](#is-your-question-about-yt-dlp)
|
||||
- [Are you willing to share account details if needed?](#are-you-willing-to-share-account-details-if-needed)
|
||||
- [Is the website primarily used for piracy](#is-the-website-primarily-used-for-piracy)
|
||||
- [AUTOMATED CONTRIBUTIONS (AI / LLM) POLICY](#automated-contributions-ai--llm-policy)
|
||||
- [DEVELOPER INSTRUCTIONS](#developer-instructions)
|
||||
- [Adding new feature or making overarching changes](#adding-new-feature-or-making-overarching-changes)
|
||||
- [Adding support for a new site](#adding-support-for-a-new-site)
|
||||
@@ -134,6 +135,17 @@ While these steps won't necessarily ensure that no misuse of the account takes p
|
||||
We follow [youtube-dl's policy](https://github.com/ytdl-org/youtube-dl#can-you-add-support-for-this-anime-video-site-or-site-which-shows-current-movies-for-free) to not support services that is primarily used for infringing copyright. Additionally, it has been decided to not to support porn sites that specialize in fakes. We also cannot support any service that serves only [DRM protected content](https://en.wikipedia.org/wiki/Digital_rights_management).
|
||||
|
||||
|
||||
# AUTOMATED CONTRIBUTIONS (AI / LLM) POLICY
|
||||
|
||||
Please refrain from submitting issues or pull requests that have been generated by an LLM or other fully-automated tools. Any submission that is in violation of this policy will be closed, and the submitter may be blocked from this repository without warning.
|
||||
|
||||
If you submit an issue, you need to understand what your issue description is saying. You need to be able to answer questions about your bug report or feature request. Using an AI tool to *proofread* your issue/comment text is acceptable. Using an AI tool to *write* your issue/comment text is unacceptable.
|
||||
|
||||
If you submit a pull request, you need to understand what every line of code you've changed does. If you can't explain why your PR is doing something, then do not submit it. Using an AI tool to generate entire lines of code is unacceptable.
|
||||
|
||||
The rationale behind this policy is that automated contributions are a waste of the maintainers' time. Humans spend their time and brainpower reviewing every submission. Issues or pull requests generated by automation tools create an imbalance of effort between the submitter and the reviewer. Nobody learns anything when a maintainer reviews code written by an LLM.
|
||||
|
||||
Additionally, AI-generated code conflicts with this project's license (Unlicense), since you cannot truly release code into the public domain if you didn't author it yourself.
|
||||
|
||||
|
||||
# DEVELOPER INSTRUCTIONS
|
||||
@@ -272,7 +284,7 @@ After you have ensured this site is distributing its content legally, you can fo
|
||||
|
||||
You can use `hatch fmt` to automatically fix problems. Rules that the linter/formatter enforces should not be disabled with `# noqa` unless a maintainer requests it. The only exception allowed is for old/printf-style string formatting in GraphQL query templates (use `# noqa: UP031`).
|
||||
|
||||
1. Make sure your code works under all [Python](https://www.python.org/) versions supported by yt-dlp, namely CPython >=3.9 and PyPy >=3.11. Backward compatibility is not required for even older versions of Python.
|
||||
1. Make sure your code works under all [Python](https://www.python.org/) versions supported by yt-dlp, namely CPython >=3.10 and PyPy >=3.11. Backward compatibility is not required for even older versions of Python.
|
||||
1. When the tests pass, [add](https://git-scm.com/docs/git-add) the new files, [commit](https://git-scm.com/docs/git-commit) them and [push](https://git-scm.com/docs/git-push) the result, like this:
|
||||
|
||||
```shell
|
||||
@@ -768,12 +780,10 @@ view_count = int_or_none(video.get('views'))
|
||||
```
|
||||
|
||||
|
||||
# My pull request is labeled pending-fixes
|
||||
## My pull request is labeled pending-fixes
|
||||
|
||||
The `pending-fixes` label is added when there are changes requested to a PR. When the necessary changes are made, the label should be removed. However, despite our best efforts, it may sometimes happen that the maintainer did not see the changes or forgot to remove the label. If your PR is still marked as `pending-fixes` a few days after all requested changes have been made, feel free to ping the maintainer who labeled your issue and ask them to re-review and remove the label.
|
||||
|
||||
|
||||
|
||||
|
||||
# EMBEDDING YT-DLP
|
||||
See [README.md#embedding-yt-dlp](README.md#embedding-yt-dlp) for instructions on how to embed yt-dlp in another Python program
|
||||
|
||||
22
CONTRIBUTORS
22
CONTRIBUTORS
@@ -1,10 +1,10 @@
|
||||
pukkandan (owner)
|
||||
shirt-dev (collaborator)
|
||||
coletdjnz/colethedj (collaborator)
|
||||
Ashish0804 (collaborator)
|
||||
bashonly (collaborator)
|
||||
Grub4K (collaborator)
|
||||
seproDev (collaborator)
|
||||
shirt-dev (maintainer)
|
||||
coletdjnz (maintainer)
|
||||
Ashish0804 (maintainer)
|
||||
bashonly (maintainer)
|
||||
Grub4K (maintainer)
|
||||
seproDev (maintainer)
|
||||
h-h-h-h
|
||||
pauldubois98
|
||||
nixxo
|
||||
@@ -808,3 +808,13 @@ Randalix
|
||||
runarmod
|
||||
gitchasing
|
||||
zakaryan2004
|
||||
cdce8p
|
||||
nicolaasjan
|
||||
willsmillie
|
||||
CasualYT31
|
||||
cecilia-sanare
|
||||
dhwz
|
||||
robin-mu
|
||||
shssoichiro
|
||||
thanhtaivtt
|
||||
uoag
|
||||
|
||||
117
Changelog.md
117
Changelog.md
@@ -4,6 +4,123 @@
|
||||
# To create a release, dispatch the https://github.com/yt-dlp/yt-dlp/actions/workflows/release.yml workflow on master
|
||||
-->
|
||||
|
||||
### 2025.10.22
|
||||
|
||||
#### Important changes
|
||||
- **A stopgap release with a *TEMPORARY partial* fix for YouTube support**
|
||||
Some formats may still be unavailable, especially if cookies are passed to yt-dlp. The ***NEXT*** release, expected very soon, **will require an external JS runtime (e.g. Deno)** in order for YouTube downloads to work properly. [Read more](https://github.com/yt-dlp/yt-dlp/issues/14404)
|
||||
- **The minimum *required* Python version has been raised to 3.10**
|
||||
Python 3.9 has reached its end-of-life as of October 2025, and yt-dlp has now removed support for it. [Read more](https://github.com/yt-dlp/yt-dlp/issues/13858)
|
||||
|
||||
#### Core changes
|
||||
- [Remove Python 3.9 support](https://github.com/yt-dlp/yt-dlp/commit/4e6a693057cfaf1ce1f07b019ed3bfce2bf936f6) ([#13861](https://github.com/yt-dlp/yt-dlp/issues/13861)) by [bashonly](https://github.com/bashonly)
|
||||
|
||||
#### Extractor changes
|
||||
- **appleconnect**: [Rework extractor](https://github.com/yt-dlp/yt-dlp/commit/78748b506f0dca8236ac0045ed7f72f7cf334b62) ([#13229](https://github.com/yt-dlp/yt-dlp/issues/13229)) by [doe1080](https://github.com/doe1080)
|
||||
- **idagio**: [Support URLs with country codes](https://github.com/yt-dlp/yt-dlp/commit/c9356f308dd3c5f9f494cb40ed14c5df017b4fe0) ([#14655](https://github.com/yt-dlp/yt-dlp/issues/14655)) by [robin-mu](https://github.com/robin-mu)
|
||||
- **tvnoe**: [Rework Extractor](https://github.com/yt-dlp/yt-dlp/commit/fe5ae54a7b08ebe679f03afdeafbe1cee5784d5b) ([#13369](https://github.com/yt-dlp/yt-dlp/issues/13369)) by [doe1080](https://github.com/doe1080)
|
||||
- **youtube**: [Use temporary player client workaround](https://github.com/yt-dlp/yt-dlp/commit/2c9091e355a7ba5d1edb69796ecdca48199b77fb) ([#14693](https://github.com/yt-dlp/yt-dlp/issues/14693)) by [gamer191](https://github.com/gamer191)
|
||||
|
||||
#### Misc. changes
|
||||
- **cleanup**
|
||||
- Miscellaneous
|
||||
- [c7bda21](https://github.com/yt-dlp/yt-dlp/commit/c7bda2192aa24afce40fdbbbe056d269aa3b2872) by [bashonly](https://github.com/bashonly), [seproDev](https://github.com/seproDev)
|
||||
- [de7b3c0](https://github.com/yt-dlp/yt-dlp/commit/de7b3c0705022cb777c5b4b7f0c69c59ad6ff538) by [bashonly](https://github.com/bashonly)
|
||||
- **docs**: [Update list of maintainers](https://github.com/yt-dlp/yt-dlp/commit/dfc0a84c192a7357dd1768cc345d590253a14fe5) ([#14148](https://github.com/yt-dlp/yt-dlp/issues/14148)) by [bashonly](https://github.com/bashonly), [coletdjnz](https://github.com/coletdjnz), [seproDev](https://github.com/seproDev)
|
||||
|
||||
### 2025.10.14
|
||||
|
||||
#### Core changes
|
||||
- [Fix `prefer-vp9-sort` compat option](https://github.com/yt-dlp/yt-dlp/commit/a6673a8e82276ea529c1773ed09e5bc4a22e822a) ([#14603](https://github.com/yt-dlp/yt-dlp/issues/14603)) by [seproDev](https://github.com/seproDev)
|
||||
|
||||
#### Extractor changes
|
||||
- **10play**
|
||||
- [Handle geo-restriction errors](https://github.com/yt-dlp/yt-dlp/commit/ad55bfcfb700fbfc1364c04e3425761d6f95c0a7) ([#14618](https://github.com/yt-dlp/yt-dlp/issues/14618)) by [bashonly](https://github.com/bashonly)
|
||||
- [Rework extractor](https://github.com/yt-dlp/yt-dlp/commit/eafedc21817bb0de20e9aaccd7151a1d4c4e1ebd) ([#14417](https://github.com/yt-dlp/yt-dlp/issues/14417)) by [seproDev](https://github.com/seproDev), [Sipherdrakon](https://github.com/Sipherdrakon)
|
||||
- **abc.net.au**: [Support listen URLs](https://github.com/yt-dlp/yt-dlp/commit/0ea5d5882def84415f946907cfc00ab431c18fed) ([#14389](https://github.com/yt-dlp/yt-dlp/issues/14389)) by [uoag](https://github.com/uoag)
|
||||
- **cbc.ca**: listen: [Add extractor](https://github.com/yt-dlp/yt-dlp/commit/df160ab18db523f6629f2e7e20123d7a3551df28) ([#14391](https://github.com/yt-dlp/yt-dlp/issues/14391)) by [uoag](https://github.com/uoag)
|
||||
- **dropout**: [Update extractor for new domain](https://github.com/yt-dlp/yt-dlp/commit/8eb8695139dece6351aac10463df63b87b45b000) ([#14531](https://github.com/yt-dlp/yt-dlp/issues/14531)) by [cecilia-sanare](https://github.com/cecilia-sanare)
|
||||
- **idagio**: [Add extractors](https://github.com/yt-dlp/yt-dlp/commit/a98e7f9f58a9492d2cb216baa59c890ed8ce02f3) ([#14586](https://github.com/yt-dlp/yt-dlp/issues/14586)) by [robin-mu](https://github.com/robin-mu)
|
||||
- **musescore**: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/87be1bb96ac47abaaa4cfc6d7dd651e511b74551) ([#14598](https://github.com/yt-dlp/yt-dlp/issues/14598)) by [seproDev](https://github.com/seproDev)
|
||||
- **prankcastpost**: [Rework extractor](https://github.com/yt-dlp/yt-dlp/commit/5d7678195a7d0c045a9fe0418383171a71a7ea43) ([#14445](https://github.com/yt-dlp/yt-dlp/issues/14445)) by [columndeeply](https://github.com/columndeeply)
|
||||
- **slideslive**: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/c2e124881f9aa02097589e853b3d3505e78372c4) ([#14619](https://github.com/yt-dlp/yt-dlp/issues/14619)) by [bashonly](https://github.com/bashonly)
|
||||
- **soundcloud**: [Support new API URLs](https://github.com/yt-dlp/yt-dlp/commit/6d41aaf21c61a87e74564646abd0a8ee887e888d) ([#14449](https://github.com/yt-dlp/yt-dlp/issues/14449)) by [seproDev](https://github.com/seproDev)
|
||||
- **tiktok**
|
||||
- [Support browser impersonation](https://github.com/yt-dlp/yt-dlp/commit/5513036104ed9710f624c537fb3644b07a0680db) ([#14473](https://github.com/yt-dlp/yt-dlp/issues/14473)) by [bashonly](https://github.com/bashonly), [thanhtaivtt](https://github.com/thanhtaivtt)
|
||||
- user: [Fix private account extraction](https://github.com/yt-dlp/yt-dlp/commit/cdc533b114c35ceb8a2e9dd3eb9c172a8737ae5e) ([#14585](https://github.com/yt-dlp/yt-dlp/issues/14585)) by [CasualYT31](https://github.com/CasualYT31)
|
||||
- **vidyard**: [Extract chapters](https://github.com/yt-dlp/yt-dlp/commit/5f94f054907c12e68129cd9ac2508ed8aba1b223) ([#14478](https://github.com/yt-dlp/yt-dlp/issues/14478)) by [exterrestris](https://github.com/exterrestris)
|
||||
- **xhamster**: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/739125d40f8ede3beb7be68fc4df55bec0d226fd) ([#14446](https://github.com/yt-dlp/yt-dlp/issues/14446)) by [dhwz](https://github.com/dhwz), [dirkf](https://github.com/dirkf), [shssoichiro](https://github.com/shssoichiro)
|
||||
- **youtube**
|
||||
- [Detect experiment binding GVS PO Token to video id](https://github.com/yt-dlp/yt-dlp/commit/bd5ed90419eea18adfb2f0d8efa9d22b2029119f) ([#14471](https://github.com/yt-dlp/yt-dlp/issues/14471)) by [coletdjnz](https://github.com/coletdjnz)
|
||||
- tab: [Fix approximate timestamp extraction for feeds](https://github.com/yt-dlp/yt-dlp/commit/ccc25d6710a4aa373b7e15c558e07f8a2ffae5f3) ([#14539](https://github.com/yt-dlp/yt-dlp/issues/14539)) by [coletdjnz](https://github.com/coletdjnz)
|
||||
|
||||
### 2025.09.26
|
||||
|
||||
#### Extractor changes
|
||||
- **twitch**: vod: [Fix `live_status` detection](https://github.com/yt-dlp/yt-dlp/commit/50e452fd7dfb8a648bd3b9aaabc8f94f37ce2051) ([#14457](https://github.com/yt-dlp/yt-dlp/issues/14457)) by [bashonly](https://github.com/bashonly)
|
||||
- **youtube**
|
||||
- [Fix player JS overrides](https://github.com/yt-dlp/yt-dlp/commit/b7b7910d96359a539b7997890342ab4a59dd685d) ([#14430](https://github.com/yt-dlp/yt-dlp/issues/14430)) by [bashonly](https://github.com/bashonly), [seproDev](https://github.com/seproDev)
|
||||
- [Improve PO token logging](https://github.com/yt-dlp/yt-dlp/commit/7df5acc546dccd32213c3a125d721e32b06d71b0) ([#14447](https://github.com/yt-dlp/yt-dlp/issues/14447)) by [seproDev](https://github.com/seproDev)
|
||||
- [Player client maintenance](https://github.com/yt-dlp/yt-dlp/commit/94c5622be96474ca3c637e52898c4daee4d8fb69) ([#14448](https://github.com/yt-dlp/yt-dlp/issues/14448)) by [seproDev](https://github.com/seproDev)
|
||||
- [Replace `tv_simply` with `web_safari` in default clients](https://github.com/yt-dlp/yt-dlp/commit/12b57d2858845c0c7fb33bf9aa8ed7be6905535d) ([#14465](https://github.com/yt-dlp/yt-dlp/issues/14465)) by [bashonly](https://github.com/bashonly)
|
||||
|
||||
### 2025.09.23
|
||||
|
||||
#### Important changes
|
||||
- **Several options have been deprecated**
|
||||
In order to simplify the codebase and reduce maintenance burden, various options have been deprecated. Please remove them from your commands/configurations. [Read more](https://github.com/yt-dlp/yt-dlp/issues/14198)
|
||||
|
||||
#### Core changes
|
||||
- **compat**: [Add `compat_datetime_from_timestamp`](https://github.com/yt-dlp/yt-dlp/commit/6a763a55d8a93b2a964ecf7699248ad342485412) ([#11902](https://github.com/yt-dlp/yt-dlp/issues/11902)) by [pzhlkj6612](https://github.com/pzhlkj6612), [seproDev](https://github.com/seproDev)
|
||||
- **utils**
|
||||
- `mimetype2ext`: [Recognize `vnd.dlna.mpeg-tts`](https://github.com/yt-dlp/yt-dlp/commit/98b6b0d339130e955f9d45ce67c0357c633c1627) ([#14388](https://github.com/yt-dlp/yt-dlp/issues/14388)) by [seproDev](https://github.com/seproDev)
|
||||
- `random_user_agent`: [Bump versions](https://github.com/yt-dlp/yt-dlp/commit/f3829463c728a5b5e62b3fc157e71c99b26edac7) ([#14317](https://github.com/yt-dlp/yt-dlp/issues/14317)) by [seproDev](https://github.com/seproDev)
|
||||
|
||||
#### Extractor changes
|
||||
- **10play**: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/067062bb87ac057e453ce9efdac7ca117a6a7da0) ([#14242](https://github.com/yt-dlp/yt-dlp/issues/14242)) by [Sipherdrakon](https://github.com/Sipherdrakon)
|
||||
- **applepodcast**: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/b2c01d0498653e0239c7226c5a7fcb614dd4dbc8) ([#14372](https://github.com/yt-dlp/yt-dlp/issues/14372)) by [seproDev](https://github.com/seproDev)
|
||||
- **loco**: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/f5cb721185e8725cf4eb4080e86aa9aa73ef25b3) ([#14256](https://github.com/yt-dlp/yt-dlp/issues/14256)) by [seproDev](https://github.com/seproDev)
|
||||
- **mitele**: [Remove extractor](https://github.com/yt-dlp/yt-dlp/commit/820c6e244571557fcfc127d4b3680e2d07c04dca) ([#14348](https://github.com/yt-dlp/yt-dlp/issues/14348)) by [bashonly](https://github.com/bashonly)
|
||||
- **newspicks**: [Warn when only preview is available](https://github.com/yt-dlp/yt-dlp/commit/9def9a4b0e958285e055eb350e5dd43b5c423336) ([#14197](https://github.com/yt-dlp/yt-dlp/issues/14197)) by [doe1080](https://github.com/doe1080)
|
||||
- **onsen**: [Add extractor](https://github.com/yt-dlp/yt-dlp/commit/17bfaa53edf5c52fce73cf0cef4592f929c2462d) ([#10971](https://github.com/yt-dlp/yt-dlp/issues/10971)) by [doe1080](https://github.com/doe1080)
|
||||
- **pixivsketch**: [Remove extractors](https://github.com/yt-dlp/yt-dlp/commit/3d9a88bd8ef149d781c7e569e48e61551eda395e) ([#14196](https://github.com/yt-dlp/yt-dlp/issues/14196)) by [doe1080](https://github.com/doe1080)
|
||||
- **smotrim**: [Rework extractors](https://github.com/yt-dlp/yt-dlp/commit/8cb037c0b06c2815080f87d61ea2e95c412785fc) ([#14200](https://github.com/yt-dlp/yt-dlp/issues/14200)) by [doe1080](https://github.com/doe1080), [swayll](https://github.com/swayll)
|
||||
- **telecinco**: [Support browser impersonation](https://github.com/yt-dlp/yt-dlp/commit/e123a48f1155703d8709a4221a42bd45c0a2b3ce) ([#14351](https://github.com/yt-dlp/yt-dlp/issues/14351)) by [bashonly](https://github.com/bashonly)
|
||||
- **tiktok**: live: [Fix room ID extraction](https://github.com/yt-dlp/yt-dlp/commit/5c1abcdc49b9d23e1dcb77b95d063cf2bf93e352) ([#14287](https://github.com/yt-dlp/yt-dlp/issues/14287)) by [bashonly](https://github.com/bashonly)
|
||||
- **ttinglive**: [Adapt FlexTV extractor to new domain](https://github.com/yt-dlp/yt-dlp/commit/4bc19adc8798e7564513898cf34adc432c6c5709) ([#14375](https://github.com/yt-dlp/yt-dlp/issues/14375)) by [seproDev](https://github.com/seproDev)
|
||||
- **tunein**: [Fix extractors](https://github.com/yt-dlp/yt-dlp/commit/7d9e48b22a780c2e8d2d2d68940d49fd2029ab70) ([#13981](https://github.com/yt-dlp/yt-dlp/issues/13981)) by [doe1080](https://github.com/doe1080)
|
||||
- **twitch**: clips: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/f8750504c2f71b54586fb857d60dce4e354a13ea) ([#14397](https://github.com/yt-dlp/yt-dlp/issues/14397)) by [seproDev](https://github.com/seproDev)
|
||||
- **vimeo**: [Fix login error handling](https://github.com/yt-dlp/yt-dlp/commit/679587dac7cd011a1472255e1f06efb017ba91b6) ([#14280](https://github.com/yt-dlp/yt-dlp/issues/14280)) by [bashonly](https://github.com/bashonly)
|
||||
- **vk**
|
||||
- [Support vksport URLs](https://github.com/yt-dlp/yt-dlp/commit/b81e9272dce5844e8fba371cb4b4fd95ad3ed819) ([#14341](https://github.com/yt-dlp/yt-dlp/issues/14341)) by [seproDev](https://github.com/seproDev)
|
||||
- uservideos: [Support alternate URL format](https://github.com/yt-dlp/yt-dlp/commit/bf5d18016b03a3f2fd5d3494d9efe85d3f8beeac) ([#14376](https://github.com/yt-dlp/yt-dlp/issues/14376)) by [seproDev](https://github.com/seproDev)
|
||||
- **xhamster**: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/a1c98226a4e869a34cc764a9dcf7a4558516308e) ([#14286](https://github.com/yt-dlp/yt-dlp/issues/14286)) by [nicolaasjan](https://github.com/nicolaasjan), [willsmillie](https://github.com/willsmillie) (With fixes in [677997d](https://github.com/yt-dlp/yt-dlp/commit/677997d84eaec0037397f7d935386daa3025b004) by [arand](https://github.com/arand), [thegymguy](https://github.com/thegymguy))
|
||||
- **youtube**: [Force player `0004de42`](https://github.com/yt-dlp/yt-dlp/commit/7f5d9f8543d19590eeec9473d54fa00151afa78a) ([#14398](https://github.com/yt-dlp/yt-dlp/issues/14398)) by [seproDev](https://github.com/seproDev)
|
||||
|
||||
#### Misc. changes
|
||||
- **build**
|
||||
- [Fix cache warmer](https://github.com/yt-dlp/yt-dlp/commit/8597a4331e8535a246d777bb8397bdcab251766c) ([#14261](https://github.com/yt-dlp/yt-dlp/issues/14261)) by [bashonly](https://github.com/bashonly)
|
||||
- [Post-release workflow cleanup](https://github.com/yt-dlp/yt-dlp/commit/cd94e7004036e0149d7d3fa236c7dd44cf460788) ([#14250](https://github.com/yt-dlp/yt-dlp/issues/14250)) by [bashonly](https://github.com/bashonly)
|
||||
- [Refactor Linux build jobs](https://github.com/yt-dlp/yt-dlp/commit/e2d37bcc8e84be9ce0f67fc24cb830c13963d10f) ([#14275](https://github.com/yt-dlp/yt-dlp/issues/14275)) by [bashonly](https://github.com/bashonly)
|
||||
- [Use PyInstaller 6.16 for Windows](https://github.com/yt-dlp/yt-dlp/commit/df4b4e8ccf3385be6d2ad65465a0704c223dfdfb) ([#14318](https://github.com/yt-dlp/yt-dlp/issues/14318)) by [bashonly](https://github.com/bashonly)
|
||||
- [Use SPDX license identifier](https://github.com/yt-dlp/yt-dlp/commit/48a214bef4bfd5984362d3d24b09dce50ba449ea) ([#14260](https://github.com/yt-dlp/yt-dlp/issues/14260)) by [cdce8p](https://github.com/cdce8p)
|
||||
- [Use new PyInstaller builds for Windows](https://github.com/yt-dlp/yt-dlp/commit/c8ede5f34d6c95c442b936bb01ecbcb724aefdef) ([#14273](https://github.com/yt-dlp/yt-dlp/issues/14273)) by [bashonly](https://github.com/bashonly)
|
||||
- **ci**
|
||||
- [Bump actions/setup-python to v6](https://github.com/yt-dlp/yt-dlp/commit/22ea0688ed6bcdbe4c51401a84239cda3decfc9c) ([#14282](https://github.com/yt-dlp/yt-dlp/issues/14282)) by [bashonly](https://github.com/bashonly)
|
||||
- [Improve workflow checks](https://github.com/yt-dlp/yt-dlp/commit/ae3923b6b23bc62115be55510d6b5842f7a46b5f) ([#14316](https://github.com/yt-dlp/yt-dlp/issues/14316)) by [bashonly](https://github.com/bashonly)
|
||||
- [Test and lint workflows](https://github.com/yt-dlp/yt-dlp/commit/7c9b10ebc83907d37f9f65ea9d4bd6f5e3bd1371) ([#14249](https://github.com/yt-dlp/yt-dlp/issues/14249)) by [bashonly](https://github.com/bashonly)
|
||||
- [Test with Python 3.14](https://github.com/yt-dlp/yt-dlp/commit/83b8409366d0f9554eaeae56394b244dab64a2cb) ([#13468](https://github.com/yt-dlp/yt-dlp/issues/13468)) by [bashonly](https://github.com/bashonly)
|
||||
- **cleanup**
|
||||
- [Bump ruff to 0.13.x](https://github.com/yt-dlp/yt-dlp/commit/ba8044685537e8e14adc6826fb4d730856fd2e2b) ([#14293](https://github.com/yt-dlp/yt-dlp/issues/14293)) by [bashonly](https://github.com/bashonly)
|
||||
- [Deprecate various options](https://github.com/yt-dlp/yt-dlp/commit/08d78996831bd8e1e3c2592d740c3def00bbf548) ([#13821](https://github.com/yt-dlp/yt-dlp/issues/13821)) by [seproDev](https://github.com/seproDev)
|
||||
- [Remove broken extractors](https://github.com/yt-dlp/yt-dlp/commit/65e90aea29cf3bfc9d1ae3e009fbf9a8db3a23c9) ([#14305](https://github.com/yt-dlp/yt-dlp/issues/14305)) by [bashonly](https://github.com/bashonly)
|
||||
- [Remove setup.cfg](https://github.com/yt-dlp/yt-dlp/commit/eb4b3a5fc7765a6cd0370ca44ccee0d7d5111dd7) ([#14314](https://github.com/yt-dlp/yt-dlp/issues/14314)) by [seproDev](https://github.com/seproDev) (With fixes in [8ab262c](https://github.com/yt-dlp/yt-dlp/commit/8ab262c66bd3e1d8874fb2d070068ba1f0d48f16) by [bashonly](https://github.com/bashonly))
|
||||
- Miscellaneous: [2e81e29](https://github.com/yt-dlp/yt-dlp/commit/2e81e298cdce23afadb06a95836284acb38f7018) by [bashonly](https://github.com/bashonly), [doe1080](https://github.com/doe1080), [seproDev](https://github.com/seproDev)
|
||||
- **docs**
|
||||
- [Clarify license of PyInstaller-bundled executables](https://github.com/yt-dlp/yt-dlp/commit/e6e6b512141e66b1b36058966804fe59c02a2b4d) ([#14257](https://github.com/yt-dlp/yt-dlp/issues/14257)) by [seproDev](https://github.com/seproDev)
|
||||
- [Establish AI/LLM contribution policy](https://github.com/yt-dlp/yt-dlp/commit/8821682f15af59047bc1f92724ef8a9ba30d6f7e) ([#14194](https://github.com/yt-dlp/yt-dlp/issues/14194)) by [bashonly](https://github.com/bashonly), [seproDev](https://github.com/seproDev)
|
||||
- **test**: utils: [Fix `sanitize_path` test for Windows CPython 3.11](https://github.com/yt-dlp/yt-dlp/commit/a183837ec8bb5e28fe6eb3a9d77ea2d0d7a106bd) ([#13878](https://github.com/yt-dlp/yt-dlp/issues/13878)) by [Grub4K](https://github.com/Grub4K)
|
||||
|
||||
### 2025.09.05
|
||||
|
||||
#### Core changes
|
||||
|
||||
@@ -1,59 +1,34 @@
|
||||
# Collaborators
|
||||
# Maintainers
|
||||
|
||||
This is a list of the collaborators of the project and their major contributions. See the [Changelog](Changelog.md) for more details.
|
||||
This file lists the maintainers of yt-dlp and their major contributions. See the [Changelog](Changelog.md) for more details.
|
||||
|
||||
You can also find lists of all [contributors of yt-dlp](CONTRIBUTORS) and [authors of youtube-dl](https://github.com/ytdl-org/youtube-dl/blob/master/AUTHORS)
|
||||
|
||||
## Core Maintainers
|
||||
|
||||
## [pukkandan](https://github.com/pukkandan)
|
||||
Core Maintainers are responsible for reviewing and merging contributions, publishing releases, and steering the overall direction of the project.
|
||||
|
||||
[](https://ko-fi.com/pukkandan)
|
||||
[](https://github.com/sponsors/pukkandan)
|
||||
**You can contact the core maintainers via `maintainers@yt-dlp.org`.**
|
||||
|
||||
* Owner of the fork
|
||||
|
||||
|
||||
|
||||
## [shirt](https://github.com/shirt-dev)
|
||||
|
||||
[](https://ko-fi.com/shirt)
|
||||
|
||||
* Multithreading (`-N`) and aria2c support for fragment downloads
|
||||
* Support for media initialization and discontinuity in HLS
|
||||
* The self-updater (`-U`)
|
||||
|
||||
|
||||
|
||||
## [coletdjnz](https://github.com/coletdjnz)
|
||||
### [coletdjnz](https://github.com/coletdjnz)
|
||||
|
||||
[](https://github.com/sponsors/coletdjnz)
|
||||
|
||||
* Improved plugin architecture
|
||||
* Rewrote the networking infrastructure, implemented support for `requests`
|
||||
* YouTube improvements including: age-gate bypass, private playlists, multiple-clients (to avoid throttling) and a lot of under-the-hood improvements
|
||||
* Added support for new websites YoutubeWebArchive, MainStreaming, PRX, nzherald, Mediaklikk, StarTV etc
|
||||
* Improved/fixed support for Patreon, panopto, gfycat, itv, pbs, SouthParkDE etc
|
||||
* Overhauled the networking stack and implemented support for `requests` and `curl_cffi` (`--impersonate`) HTTP clients
|
||||
* Reworked the plugin architecture to support installing plugins across all yt-dlp distributions (exe, pip, etc.)
|
||||
* Maintains support for YouTube
|
||||
* Added and fixed support for various other sites
|
||||
|
||||
### [bashonly](https://github.com/bashonly)
|
||||
|
||||
* Rewrote and maintains the build/release workflows and the self-updater: executables, automated/nightly/master releases, `--update-to`
|
||||
* Overhauled external downloader cookie handling
|
||||
* Added `--cookies-from-browser` support for Firefox containers
|
||||
* Overhauled and maintains support for sites like Youtube, Vimeo, Twitter, TikTok, etc
|
||||
* Added support for sites like Dacast, Kick, Loom, SproutVideo, Triller, Weverse, etc
|
||||
|
||||
|
||||
|
||||
## [Ashish0804](https://github.com/Ashish0804) <sub><sup>[Inactive]</sup></sub>
|
||||
|
||||
[](https://ko-fi.com/ashish0804)
|
||||
|
||||
* Added support for new websites BiliIntl, DiscoveryPlusIndia, OlympicsReplay, PlanetMarathi, ShemarooMe, Utreon, Zee5 etc
|
||||
* Added playlist/series downloads for Hotstar, ParamountPlus, Rumble, SonyLIV, Trovo, TubiTv, Voot etc
|
||||
* Improved/fixed support for HiDive, HotStar, Hungama, LBRY, LinkedInLearning, Mxplayer, SonyLiv, TV2, Vimeo, VLive etc
|
||||
|
||||
|
||||
## [bashonly](https://github.com/bashonly)
|
||||
|
||||
* `--update-to`, self-updater rewrite, automated/nightly/master releases
|
||||
* `--cookies-from-browser` support for Firefox containers, external downloader cookie handling overhaul
|
||||
* Added support for new websites like Dacast, Kick, NBCStations, Triller, VideoKen, Weverse, WrestleUniverse etc
|
||||
* Improved/fixed support for Anvato, Brightcove, Reddit, SlidesLive, TikTok, Twitter, Vimeo etc
|
||||
|
||||
|
||||
## [Grub4K](https://github.com/Grub4K)
|
||||
### [Grub4K](https://github.com/Grub4K)
|
||||
|
||||
[](https://github.com/sponsors/Grub4K) [](https://ko-fi.com/Grub4K)
|
||||
|
||||
@@ -63,8 +38,48 @@ You can also find lists of all [contributors of yt-dlp](CONTRIBUTORS) and [autho
|
||||
* Improved/fixed/added Bundestag, crunchyroll, pr0gramm, Twitter, WrestleUniverse etc
|
||||
|
||||
|
||||
## [sepro](https://github.com/seproDev)
|
||||
### [sepro](https://github.com/seproDev)
|
||||
|
||||
* UX improvements: Warn when ffmpeg is missing, warn when double-clicking exe
|
||||
* Code cleanup: Remove dead extractors, mark extractors as broken, enable/apply ruff rules
|
||||
* Improved/fixed/added ArdMediathek, DRTV, Floatplane, MagentaMusik, Naver, Nebula, OnDemandKorea, Vbox7 etc
|
||||
|
||||
|
||||
## Inactive Core Maintainers
|
||||
|
||||
### [pukkandan](https://github.com/pukkandan)
|
||||
|
||||
[](https://ko-fi.com/pukkandan)
|
||||
[](https://github.com/sponsors/pukkandan)
|
||||
|
||||
* Founder of the fork
|
||||
* Lead Maintainer from 2021-2024
|
||||
|
||||
|
||||
### [shirt](https://github.com/shirt-dev)
|
||||
|
||||
[](https://ko-fi.com/shirt)
|
||||
|
||||
* Multithreading (`-N`) and aria2c support for fragment downloads
|
||||
* Support for media initialization and discontinuity in HLS
|
||||
* The self-updater (`-U`)
|
||||
|
||||
|
||||
### [Ashish0804](https://github.com/Ashish0804)
|
||||
|
||||
[](https://ko-fi.com/ashish0804)
|
||||
|
||||
* Added support for new websites BiliIntl, DiscoveryPlusIndia, OlympicsReplay, PlanetMarathi, ShemarooMe, Utreon, Zee5 etc
|
||||
* Added playlist/series downloads for Hotstar, ParamountPlus, Rumble, SonyLIV, Trovo, TubiTv, Voot etc
|
||||
* Improved/fixed support for HiDive, HotStar, Hungama, LBRY, LinkedInLearning, Mxplayer, SonyLiv, TV2, Vimeo, VLive etc
|
||||
|
||||
## Triage Maintainers
|
||||
|
||||
Triage Maintainers are frequent contributors who can manage issues and pull requests.
|
||||
|
||||
- [gamer191](https://github.com/gamer191)
|
||||
- [garret1317](https://github.com/garret1317)
|
||||
- [pzhlkj6612](https://github.com/pzhlkj6612)
|
||||
- [DTrombett](https://github.com/dtrombett)
|
||||
- [doe1080](https://github.com/doe1080)
|
||||
- [grqz](https://github.com/grqz)
|
||||
6
Makefile
6
Makefile
@@ -10,7 +10,7 @@ tar: yt-dlp.tar.gz
|
||||
# intended use: when building a source distribution,
|
||||
# make pypi-files && python3 -m build -sn .
|
||||
pypi-files: AUTHORS Changelog.md LICENSE README.md README.txt supportedsites \
|
||||
completions yt-dlp.1 pyproject.toml setup.cfg devscripts/* test/*
|
||||
completions yt-dlp.1 pyproject.toml devscripts/* test/*
|
||||
|
||||
.PHONY: all clean clean-all clean-test clean-dist clean-cache \
|
||||
completions completion-bash completion-fish completion-zsh \
|
||||
@@ -157,9 +157,9 @@ yt-dlp.tar.gz: all
|
||||
--exclude '.git' \
|
||||
-- \
|
||||
README.md supportedsites.md Changelog.md LICENSE \
|
||||
CONTRIBUTING.md Collaborators.md CONTRIBUTORS AUTHORS \
|
||||
CONTRIBUTING.md Maintainers.md CONTRIBUTORS AUTHORS \
|
||||
Makefile yt-dlp.1 README.txt completions .gitignore \
|
||||
setup.cfg yt-dlp yt_dlp pyproject.toml devscripts test
|
||||
yt-dlp yt_dlp pyproject.toml devscripts test
|
||||
|
||||
AUTHORS: Changelog.md
|
||||
@if [ -d '.git' ] && command -v git > /dev/null ; then \
|
||||
|
||||
60
README.md
60
README.md
@@ -5,7 +5,7 @@
|
||||
|
||||
[](#installation "Installation")
|
||||
[](https://pypi.org/project/yt-dlp "PyPI")
|
||||
[](Collaborators.md#collaborators "Donate")
|
||||
[](Maintainers.md#maintainers "Donate")
|
||||
[](https://discord.gg/H5MNcFW63r "Discord")
|
||||
[](supportedsites.md "Supported Sites")
|
||||
[](LICENSE "License")
|
||||
@@ -138,6 +138,17 @@ curl -L https://github.com/yt-dlp/yt-dlp/raw/master/public.key | gpg --import
|
||||
gpg --verify SHA2-256SUMS.sig SHA2-256SUMS
|
||||
gpg --verify SHA2-512SUMS.sig SHA2-512SUMS
|
||||
```
|
||||
|
||||
#### Licensing
|
||||
|
||||
While yt-dlp is licensed under the [Unlicense](LICENSE), many of the release files contain code from other projects with different licenses.
|
||||
|
||||
Most notably, the PyInstaller-bundled executables include GPLv3+ licensed code, and as such the combined work is licensed under [GPLv3+](https://www.gnu.org/licenses/gpl-3.0.html).
|
||||
|
||||
See [THIRD_PARTY_LICENSES.txt](THIRD_PARTY_LICENSES.txt) for details.
|
||||
|
||||
The zipimport binary (`yt-dlp`), the source tarball (`yt-dlp.tar.gz`), and the PyPI source distribution & wheel only contain code licensed under the [Unlicense](LICENSE).
|
||||
|
||||
<!-- MANPAGE: END EXCLUDED SECTION -->
|
||||
|
||||
**Note**: The manpages, shell completion (autocomplete) files etc. are available inside the [source tarball](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp.tar.gz)
|
||||
@@ -183,7 +194,7 @@ When running a yt-dlp version that is older than 90 days, you will see a warning
|
||||
You can suppress this warning by adding `--no-update` to your command or configuration file.
|
||||
|
||||
## DEPENDENCIES
|
||||
Python versions 3.9+ (CPython) and 3.11+ (PyPy) are supported. Other versions and implementations may or may not work correctly.
|
||||
Python versions 3.10+ (CPython) and 3.11+ (PyPy) are supported. Other versions and implementations may or may not work correctly.
|
||||
|
||||
<!-- Python 3.5+ uses VC++14 and it is already embedded in the binary created
|
||||
<!x-- https://www.microsoft.com/en-us/download/details.aspx?id=26999 --x>
|
||||
@@ -230,8 +241,6 @@ The following provide support for impersonating browser requests. This may be re
|
||||
|
||||
### Deprecated
|
||||
|
||||
* [**avconv** and **avprobe**](https://www.libav.org) - Now **deprecated** alternative to ffmpeg. License [depends on the build](https://libav.org/legal)
|
||||
* [**sponskrub**](https://github.com/faissaloo/SponSkrub) - For using the now **deprecated** [sponskrub options](#sponskrub-options). Licensed under [GPLv3+](https://github.com/faissaloo/SponSkrub/blob/master/LICENCE.md)
|
||||
* [**rtmpdump**](http://rtmpdump.mplayerhq.hu) - For downloading `rtmp` streams. ffmpeg can be used instead with `--downloader ffmpeg`. Licensed under [GPLv2+](http://rtmpdump.mplayerhq.hu)
|
||||
* [**mplayer**](http://mplayerhq.hu/design7/info.html) or [**mpv**](https://mpv.io) - For downloading `rstp`/`mms` streams. ffmpeg can be used instead with `--downloader ffmpeg`. Licensed under [GPLv2+](https://github.com/mpv-player/mpv/blob/master/Copyright)
|
||||
|
||||
@@ -264,7 +273,7 @@ On some systems, you may need to use `py` or `python` instead of `python3`.
|
||||
**Important**: Running `pyinstaller` directly **instead of** using `python -m bundle.pyinstaller` is **not** officially supported. This may or may not work correctly.
|
||||
|
||||
### Platform-independent Binary (UNIX)
|
||||
You will need the build tools `python` (3.9+), `zip`, `make` (GNU), `pandoc`\* and `pytest`\*.
|
||||
You will need the build tools `python` (3.10+), `zip`, `make` (GNU), `pandoc`\* and `pytest`\*.
|
||||
|
||||
After installing these, simply run `make`.
|
||||
|
||||
@@ -311,7 +320,6 @@ Tip: Use `CTRL`+`F` (or `Command`+`F`) to search by keywords
|
||||
playlist (default)
|
||||
--abort-on-error Abort downloading of further videos if an
|
||||
error occurs (Alias: --no-ignore-errors)
|
||||
--dump-user-agent Display the current user-agent and exit
|
||||
--list-extractors List all supported extractors and exit
|
||||
--extractor-descriptions Output descriptions of all supported
|
||||
extractors and exit
|
||||
@@ -562,8 +570,6 @@ Tip: Use `CTRL`+`F` (or `Command`+`F`) to search by keywords
|
||||
--playlist-random and --playlist-reverse
|
||||
--no-lazy-playlist Process videos in the playlist only after
|
||||
the entire playlist is parsed (default)
|
||||
--xattr-set-filesize Set file xattribute ytdl.filesize with
|
||||
expected file size
|
||||
--hls-use-mpegts Use the mpegts container for HLS videos;
|
||||
allowing some players to play the video
|
||||
while downloading, and reducing the chance
|
||||
@@ -587,9 +593,9 @@ Tip: Use `CTRL`+`F` (or `Command`+`F`) to search by keywords
|
||||
use (optionally) prefixed by the protocols
|
||||
(http, ftp, m3u8, dash, rstp, rtmp, mms) to
|
||||
use it for. Currently supports native,
|
||||
aria2c, avconv, axel, curl, ffmpeg, httpie,
|
||||
wget. You can use this option multiple times
|
||||
to set different downloaders for different
|
||||
aria2c, axel, curl, ffmpeg, httpie, wget.
|
||||
You can use this option multiple times to
|
||||
set different downloaders for different
|
||||
protocols. E.g. --downloader aria2c
|
||||
--downloader "dash,m3u8:native" will use
|
||||
aria2c for http/ftp downloads, and the
|
||||
@@ -1808,11 +1814,12 @@ The following extractors use this feature:
|
||||
#### youtube
|
||||
* `lang`: Prefer translated metadata (`title`, `description` etc) of this language code (case-sensitive). By default, the video primary language metadata is preferred, with a fallback to `en` translated. See [youtube/_base.py](https://github.com/yt-dlp/yt-dlp/blob/415b4c9f955b1a0391204bd24a7132590e7b3bdb/yt_dlp/extractor/youtube/_base.py#L402-L409) for the list of supported content language codes
|
||||
* `skip`: One or more of `hls`, `dash` or `translated_subs` to skip extraction of the m3u8 manifests, dash manifests and [auto-translated subtitles](https://github.com/yt-dlp/yt-dlp/issues/4090#issuecomment-1158102032) respectively
|
||||
* `player_client`: Clients to extract video data from. The currently available clients are `web`, `web_safari`, `web_embedded`, `web_music`, `web_creator`, `mweb`, `ios`, `android`, `android_vr`, `tv`, `tv_simply` and `tv_embedded`. By default, `tv_simply,tv,web` is used, but `tv,web_safari,web` is used when authenticating with cookies and `tv,web_creator,web` is used with premium accounts. The `web_music` client is added for `music.youtube.com` URLs when logged-in cookies are used. The `web_embedded` client is added for age-restricted videos but only works if the video is embeddable. The `tv_embedded` and `web_creator` clients are added for age-restricted videos if account age-verification is required. Some clients, such as `web` and `web_music`, require a `po_token` for their formats to be downloadable. Some clients, such as `web_creator`, will only work with authentication. Not all clients support authentication via cookies. You can use `default` for the default clients, or you can use `all` for all clients (not recommended). You can prefix a client with `-` to exclude it, e.g. `youtube:player_client=default,-ios`
|
||||
* `player_client`: Clients to extract video data from. The currently available clients are `web`, `web_safari`, `web_embedded`, `web_music`, `web_creator`, `mweb`, `ios`, `android`, `android_sdkless`, `android_vr`, `tv`, `tv_simply` and `tv_embedded`. By default, `android_sdkless,tv,web_safari,web` is used. `android_sdkless` is omitted if cookies are passed. If premium cookies are passed, `tv,web_creator,web_safari,web` is used instead. The `web_music` client is added for `music.youtube.com` URLs when logged-in cookies are used. The `web_embedded` client is added for age-restricted videos but only works if the video is embeddable. The `tv_embedded` and `web_creator` clients are added for age-restricted videos if account age-verification is required. Some clients, such as `web` and `web_music`, require a `po_token` for their formats to be downloadable. Some clients, such as `web_creator`, will only work with authentication. Not all clients support authentication via cookies. You can use `default` for the default clients, or you can use `all` for all clients (not recommended). You can prefix a client with `-` to exclude it, e.g. `youtube:player_client=default,-ios`
|
||||
* `player_skip`: Skip some network requests that are generally needed for robust extraction. One or more of `configs` (skip client configs), `webpage` (skip initial webpage), `js` (skip js player), `initial_data` (skip initial data/next ep request). While these options can help reduce the number of requests needed or avoid some rate-limiting, they could cause issues such as missing formats or metadata. See [#860](https://github.com/yt-dlp/yt-dlp/pull/860) and [#12826](https://github.com/yt-dlp/yt-dlp/issues/12826) for more details
|
||||
* `webpage_skip`: Skip extraction of embedded webpage data. One or both of `player_response`, `initial_data`. These options are for testing purposes and don't skip any network requests
|
||||
* `player_params`: YouTube player parameters to use for player requests. Will overwrite any default ones set by yt-dlp.
|
||||
* `player_js_variant`: The player javascript variant to use for signature and nsig deciphering. The known variants are: `main`, `tce`, `tv`, `tv_es6`, `phone`, `tablet`. The default is `main`, and the others are for debugging purposes. You can use `actual` to go with what is prescribed by the site
|
||||
* `player_js_variant`: The player javascript variant to use for n/sig deciphering. The known variants are: `main`, `tcc`, `tce`, `es5`, `es6`, `tv`, `tv_es6`, `phone`, `tablet`. The default is `main`, and the others are for debugging purposes. You can use `actual` to go with what is prescribed by the site
|
||||
* `player_js_version`: The player javascript version to use for n/sig deciphering, in the format of `signature_timestamp@hash` (e.g. `20348@0004de42`). The default is to use what is prescribed by the site, and can be selected with `actual`
|
||||
* `comment_sort`: `top` or `new` (default) - choose comment sorting mode (on YouTube's side)
|
||||
* `max_comments`: Limit the amount of comments to gather. Comma-separated list of integers representing `max-comments,max-parents,max-replies,max-replies-per-thread`. Default is `all,all,all,all`
|
||||
* E.g. `all,all,1000,10` will get a maximum of 1000 replies total, with up to 10 replies per thread. `1000,all,100` will get a maximum of 1000 comments, with a maximum of 100 replies total
|
||||
@@ -2207,7 +2214,6 @@ with yt_dlp.YoutubeDL(ydl_opts) as ydl:
|
||||
* Fix for [n-sig based throttling](https://github.com/ytdl-org/youtube-dl/issues/29326) **\***
|
||||
* Download livestreams from the start using `--live-from-start` (*experimental*)
|
||||
* Channel URLs download all uploads of the channel, including shorts and live
|
||||
* Support for [logging in with OAuth](https://github.com/yt-dlp/yt-dlp/wiki/Extractors#logging-in-with-oauth)
|
||||
|
||||
* **Cookies from browser**: Cookies can be automatically extracted from all major web browsers using `--cookies-from-browser BROWSER[+KEYRING][:PROFILE][::CONTAINER]`
|
||||
|
||||
@@ -2249,7 +2255,7 @@ Features marked with a **\*** have been back-ported to youtube-dl
|
||||
|
||||
Some of yt-dlp's default options are different from that of youtube-dl and youtube-dlc:
|
||||
|
||||
* yt-dlp supports only [Python 3.9+](## "Windows 8"), and will remove support for more versions as they [become EOL](https://devguide.python.org/versions/#python-release-cycle); while [youtube-dl still supports Python 2.6+ and 3.2+](https://github.com/ytdl-org/youtube-dl/issues/30568#issue-1118238743)
|
||||
* yt-dlp supports only [Python 3.10+](## "Windows 8"), and will remove support for more versions as they [become EOL](https://devguide.python.org/versions/#python-release-cycle); while [youtube-dl still supports Python 2.6+ and 3.2+](https://github.com/ytdl-org/youtube-dl/issues/30568#issue-1118238743)
|
||||
* The options `--auto-number` (`-A`), `--title` (`-t`) and `--literal` (`-l`), no longer work. See [removed options](#Removed) for details
|
||||
* `avconv` is not supported as an alternative to `ffmpeg`
|
||||
* yt-dlp stores config files in slightly different locations to youtube-dl. See [CONFIGURATION](#configuration) for a list of correct locations
|
||||
@@ -2351,11 +2357,7 @@ While these options still work, their use is not recommended since there are oth
|
||||
--hls-prefer-native --downloader "m3u8:native"
|
||||
--hls-prefer-ffmpeg --downloader "m3u8:ffmpeg"
|
||||
--list-formats-old --compat-options list-formats (Alias: --no-list-formats-as-table)
|
||||
--list-formats-as-table --compat-options -list-formats [Default] (Alias: --no-list-formats-old)
|
||||
--youtube-skip-dash-manifest --extractor-args "youtube:skip=dash" (Alias: --no-youtube-include-dash-manifest)
|
||||
--youtube-skip-hls-manifest --extractor-args "youtube:skip=hls" (Alias: --no-youtube-include-hls-manifest)
|
||||
--youtube-include-dash-manifest Default (Alias: --no-youtube-skip-dash-manifest)
|
||||
--youtube-include-hls-manifest Default (Alias: --no-youtube-skip-hls-manifest)
|
||||
--list-formats-as-table --compat-options -list-formats [Default]
|
||||
--geo-bypass --xff "default"
|
||||
--no-geo-bypass --xff "never"
|
||||
--geo-bypass-country CODE --xff CODE
|
||||
@@ -2366,18 +2368,13 @@ These options are not intended to be used by the end-user
|
||||
|
||||
--test Download only part of video for testing extractors
|
||||
--load-pages Load pages dumped by --write-pages
|
||||
--youtube-print-sig-code For testing youtube signatures
|
||||
--allow-unplayable-formats List unplayable formats also
|
||||
--no-allow-unplayable-formats Default
|
||||
|
||||
#### Old aliases
|
||||
These are aliases that are no longer documented for various reasons
|
||||
|
||||
--avconv-location --ffmpeg-location
|
||||
--clean-infojson --clean-info-json
|
||||
--cn-verification-proxy URL --geo-verification-proxy URL
|
||||
--dump-headers --print-traffic
|
||||
--dump-intermediate-pages --dump-pages
|
||||
--force-write-download-archive --force-write-archive
|
||||
--no-clean-infojson --no-clean-info-json
|
||||
--no-split-tracks --no-split-chapters
|
||||
@@ -2391,7 +2388,7 @@ These are aliases that are no longer documented for various reasons
|
||||
--yes-overwrites --force-overwrites
|
||||
|
||||
#### Sponskrub Options
|
||||
Support for [SponSkrub](https://github.com/faissaloo/SponSkrub) has been deprecated in favor of the `--sponsorblock` options
|
||||
Support for [SponSkrub](https://github.com/faissaloo/SponSkrub) has been removed in favor of the `--sponsorblock` options
|
||||
|
||||
--sponskrub --sponsorblock-mark all
|
||||
--no-sponskrub --no-sponsorblock
|
||||
@@ -2413,6 +2410,17 @@ These options may no longer work as intended
|
||||
--no-include-ads Default
|
||||
--write-annotations No supported site has annotations now
|
||||
--no-write-annotations Default
|
||||
--avconv-location Removed alias for --ffmpeg-location
|
||||
--cn-verification-proxy URL Removed alias for --geo-verification-proxy URL
|
||||
--dump-headers Removed alias for --print-traffic
|
||||
--dump-intermediate-pages Removed alias for --dump-pages
|
||||
--youtube-skip-dash-manifest Removed alias for --extractor-args "youtube:skip=dash" (Alias: --no-youtube-include-dash-manifest)
|
||||
--youtube-skip-hls-manifest Removed alias for --extractor-args "youtube:skip=hls" (Alias: --no-youtube-include-hls-manifest)
|
||||
--youtube-include-dash-manifest Default (Alias: --no-youtube-skip-dash-manifest)
|
||||
--youtube-include-hls-manifest Default (Alias: --no-youtube-skip-hls-manifest)
|
||||
--youtube-print-sig-code Removed testing functionality
|
||||
--dump-user-agent No longer supported
|
||||
--xattr-set-filesize No longer supported
|
||||
--compat-options seperate-video-versions No longer needed
|
||||
--compat-options no-youtube-prefer-utc-upload-date No longer supported
|
||||
|
||||
|
||||
4433
THIRD_PARTY_LICENSES.txt
Normal file
4433
THIRD_PARTY_LICENSES.txt
Normal file
File diff suppressed because it is too large
Load Diff
@@ -13,6 +13,9 @@ services:
|
||||
CHANNEL: ${CHANNEL:?}
|
||||
ORIGIN: ${ORIGIN:?}
|
||||
VERSION:
|
||||
PYTHON_VERSION:
|
||||
SKIP_ONEDIR_BUILD:
|
||||
SKIP_ONEFILE_BUILD:
|
||||
volumes:
|
||||
- ../..:/yt-dlp
|
||||
|
||||
@@ -26,6 +29,9 @@ services:
|
||||
VERIFYIMAGE: quay.io/pypa/manylinux2014_x86_64:latest
|
||||
environment:
|
||||
EXE_NAME: ${EXE_NAME:?}
|
||||
UPDATE_TO:
|
||||
SKIP_ONEDIR_BUILD:
|
||||
SKIP_ONEFILE_BUILD:
|
||||
volumes:
|
||||
- ../../dist:/build
|
||||
|
||||
@@ -42,6 +48,9 @@ services:
|
||||
CHANNEL: ${CHANNEL:?}
|
||||
ORIGIN: ${ORIGIN:?}
|
||||
VERSION:
|
||||
PYTHON_VERSION:
|
||||
SKIP_ONEDIR_BUILD:
|
||||
SKIP_ONEFILE_BUILD:
|
||||
volumes:
|
||||
- ../..:/yt-dlp
|
||||
|
||||
@@ -55,7 +64,9 @@ services:
|
||||
VERIFYIMAGE: quay.io/pypa/manylinux2014_aarch64:latest
|
||||
environment:
|
||||
EXE_NAME: ${EXE_NAME:?}
|
||||
SKIP_UPDATE_TO: "1" # TODO: remove when there is a glibc2.17 aarch64 release to --update-to
|
||||
UPDATE_TO:
|
||||
SKIP_ONEDIR_BUILD:
|
||||
SKIP_ONEFILE_BUILD:
|
||||
volumes:
|
||||
- ../../dist:/build
|
||||
|
||||
@@ -72,10 +83,12 @@ services:
|
||||
CHANNEL: ${CHANNEL:?}
|
||||
ORIGIN: ${ORIGIN:?}
|
||||
VERSION:
|
||||
SKIP_ONEFILE_BUILD: "1"
|
||||
PYTHON_VERSION:
|
||||
SKIP_ONEDIR_BUILD:
|
||||
SKIP_ONEFILE_BUILD:
|
||||
volumes:
|
||||
- ../..:/yt-dlp
|
||||
- ~/yt-dlp-build-venv:/yt-dlp-build-venv
|
||||
- ../../venv:/yt-dlp-build-venv
|
||||
|
||||
linux_armv7l_verify:
|
||||
build:
|
||||
@@ -87,7 +100,9 @@ services:
|
||||
VERIFYIMAGE: arm32v7/debian:bullseye
|
||||
environment:
|
||||
EXE_NAME: ${EXE_NAME:?}
|
||||
TEST_ONEDIR_BUILD: "1"
|
||||
UPDATE_TO:
|
||||
SKIP_ONEDIR_BUILD:
|
||||
SKIP_ONEFILE_BUILD:
|
||||
volumes:
|
||||
- ../../dist:/build
|
||||
|
||||
@@ -104,6 +119,9 @@ services:
|
||||
CHANNEL: ${CHANNEL:?}
|
||||
ORIGIN: ${ORIGIN:?}
|
||||
VERSION:
|
||||
PYTHON_VERSION:
|
||||
SKIP_ONEDIR_BUILD:
|
||||
SKIP_ONEFILE_BUILD:
|
||||
volumes:
|
||||
- ../..:/yt-dlp
|
||||
|
||||
@@ -117,7 +135,9 @@ services:
|
||||
VERIFYIMAGE: alpine:3.22
|
||||
environment:
|
||||
EXE_NAME: ${EXE_NAME:?}
|
||||
SKIP_UPDATE_TO: "1" # TODO: remove when there is a musllinux_aarch64 release to --update-to
|
||||
UPDATE_TO:
|
||||
SKIP_ONEDIR_BUILD:
|
||||
SKIP_ONEFILE_BUILD:
|
||||
volumes:
|
||||
- ../../dist:/build
|
||||
|
||||
@@ -134,6 +154,9 @@ services:
|
||||
CHANNEL: ${CHANNEL:?}
|
||||
ORIGIN: ${ORIGIN:?}
|
||||
VERSION:
|
||||
PYTHON_VERSION:
|
||||
SKIP_ONEDIR_BUILD:
|
||||
SKIP_ONEFILE_BUILD:
|
||||
EXCLUDE_CURL_CFFI: "1"
|
||||
volumes:
|
||||
- ../..:/yt-dlp
|
||||
@@ -148,6 +171,8 @@ services:
|
||||
VERIFYIMAGE: alpine:3.22
|
||||
environment:
|
||||
EXE_NAME: ${EXE_NAME:?}
|
||||
SKIP_UPDATE_TO: "1" # TODO: remove when there is a musllinux_aarch64 release to --update-to
|
||||
UPDATE_TO:
|
||||
SKIP_ONEDIR_BUILD:
|
||||
SKIP_ONEFILE_BUILD:
|
||||
volumes:
|
||||
- ../../dist:/build
|
||||
|
||||
@@ -1,16 +1,17 @@
|
||||
#!/bin/bash
|
||||
set -exuo pipefail
|
||||
|
||||
if [[ -z "${USE_PYTHON_VERSION:-}" ]]; then
|
||||
USE_PYTHON_VERSION="3.13"
|
||||
if [[ -z "${PYTHON_VERSION:-}" ]]; then
|
||||
PYTHON_VERSION="3.13"
|
||||
echo "Defaulting to using Python ${PYTHON_VERSION}"
|
||||
fi
|
||||
|
||||
function runpy {
|
||||
"/opt/shared-cpython-${USE_PYTHON_VERSION}/bin/python${USE_PYTHON_VERSION}" "$@"
|
||||
"/opt/shared-cpython-${PYTHON_VERSION}/bin/python${PYTHON_VERSION}" "$@"
|
||||
}
|
||||
|
||||
function venvpy {
|
||||
"python${USE_PYTHON_VERSION}" "$@"
|
||||
"python${PYTHON_VERSION}" "$@"
|
||||
}
|
||||
|
||||
INCLUDES=(
|
||||
@@ -23,6 +24,7 @@ if [[ -z "${EXCLUDE_CURL_CFFI:-}" ]]; then
|
||||
fi
|
||||
|
||||
runpy -m venv /yt-dlp-build-venv
|
||||
# shellcheck disable=SC1091
|
||||
source /yt-dlp-build-venv/bin/activate
|
||||
# Inside the venv we use venvpy instead of runpy
|
||||
venvpy -m ensurepip --upgrade --default-pip
|
||||
|
||||
@@ -1,7 +1,11 @@
|
||||
#!/bin/sh
|
||||
set -eu
|
||||
|
||||
if [ -n "${TEST_ONEDIR_BUILD:-}" ]; then
|
||||
if [ -n "${SKIP_ONEFILE_BUILD:-}" ]; then
|
||||
if [ -n "${SKIP_ONEDIR_BUILD:-}" ]; then
|
||||
echo "All executable builds were skipped"
|
||||
exit 1
|
||||
fi
|
||||
echo "Extracting zip to verify onedir build"
|
||||
if command -v python3 >/dev/null 2>&1; then
|
||||
python3 -m zipfile -e "/build/${EXE_NAME}.zip" ./
|
||||
@@ -22,14 +26,17 @@ if [ -n "${TEST_ONEDIR_BUILD:-}" ]; then
|
||||
fi
|
||||
unzip "/build/${EXE_NAME}.zip" -d ./
|
||||
fi
|
||||
else
|
||||
echo "Verifying onefile build"
|
||||
cp "/build/${EXE_NAME}" ./
|
||||
chmod +x "./${EXE_NAME}"
|
||||
"./${EXE_NAME}" -v || true
|
||||
"./${EXE_NAME}" --version
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "Verifying onefile build"
|
||||
cp "/build/${EXE_NAME}" ./
|
||||
chmod +x "./${EXE_NAME}"
|
||||
|
||||
if [ -n "${SKIP_UPDATE_TO:-}" ] || [ -n "${TEST_ONEDIR_BUILD:-}" ]; then
|
||||
if [ -z "${UPDATE_TO:-}" ]; then
|
||||
"./${EXE_NAME}" -v || true
|
||||
"./${EXE_NAME}" --version
|
||||
exit 0
|
||||
@@ -37,7 +44,7 @@ fi
|
||||
|
||||
cp "./${EXE_NAME}" "./${EXE_NAME}_downgraded"
|
||||
version="$("./${EXE_NAME}" --version)"
|
||||
"./${EXE_NAME}_downgraded" -v --update-to yt-dlp/yt-dlp@2023.03.04
|
||||
"./${EXE_NAME}_downgraded" -v --update-to "${UPDATE_TO}"
|
||||
downgraded_version="$("./${EXE_NAME}_downgraded" --version)"
|
||||
if [ "${version}" = "${downgraded_version}" ]; then
|
||||
exit 1
|
||||
|
||||
@@ -129,7 +129,6 @@ def windows_set_version(exe, version):
|
||||
StringStruct('FileDescription', 'yt-dlp%s' % (MACHINE and f' ({MACHINE})')),
|
||||
StringStruct('FileVersion', version),
|
||||
StringStruct('InternalName', f'yt-dlp{suffix}'),
|
||||
StringStruct('LegalCopyright', 'pukkandan.ytdlp@gmail.com | UNLICENSE'),
|
||||
StringStruct('OriginalFilename', f'yt-dlp{suffix}.exe'),
|
||||
StringStruct('ProductName', f'yt-dlp{suffix}'),
|
||||
StringStruct(
|
||||
|
||||
@@ -293,5 +293,20 @@
|
||||
"action": "add",
|
||||
"when": "c76ce28e06c816eb5b261dfb6aff6e69dd9b7382",
|
||||
"short": "[priority] **linux_armv7l_exe builds are being discontinued**\nThis release's `yt-dlp_linux_armv7l` binary could be the last one. [Read more](https://github.com/yt-dlp/yt-dlp/issues/13976)"
|
||||
},
|
||||
{
|
||||
"action": "add",
|
||||
"when": "08d78996831bd8e1e3c2592d740c3def00bbf548",
|
||||
"short": "[priority] **Several options have been deprecated**\nIn order to simplify the codebase and reduce maintenance burden, various options have been deprecated. Please remove them from your commands/configurations. [Read more](https://github.com/yt-dlp/yt-dlp/issues/14198)"
|
||||
},
|
||||
{
|
||||
"action": "add",
|
||||
"when": "4e6a693057cfaf1ce1f07b019ed3bfce2bf936f6",
|
||||
"short": "[priority] **The minimum *required* Python version has been raised to 3.10**\nPython 3.9 has reached its end-of-life as of October 2025, and yt-dlp has now removed support for it. [Read more](https://github.com/yt-dlp/yt-dlp/issues/13858)"
|
||||
},
|
||||
{
|
||||
"action": "add",
|
||||
"when": "2c9091e355a7ba5d1edb69796ecdca48199b77fb",
|
||||
"short": "[priority] **A stopgap release with a *TEMPORARY partial* fix for YouTube support**\nSome formats may still be unavailable, especially if cookies are passed to yt-dlp. The ***NEXT*** release, expected very soon, **will require an external JS runtime (e.g. Deno)** in order for YouTube downloads to work properly. [Read more](https://github.com/yt-dlp/yt-dlp/issues/14404)"
|
||||
}
|
||||
]
|
||||
|
||||
316
devscripts/generate_third_party_licenses.py
Normal file
316
devscripts/generate_third_party_licenses.py
Normal file
@@ -0,0 +1,316 @@
|
||||
import requests
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
import hashlib
|
||||
|
||||
DEFAULT_OUTPUT = 'THIRD_PARTY_LICENSES.txt'
|
||||
CACHE_LOCATION = '.license_cache'
|
||||
HEADER = '''THIRD-PARTY LICENSES
|
||||
|
||||
This file aggregates license texts of third-party components included with the yt-dlp PyInstaller-bundled executables.
|
||||
yt-dlp itself is licensed under the Unlicense (see LICENSE file).
|
||||
Source code for bundled third-party components is available from the original projects.
|
||||
If you cannot obtain it, the maintainers will provide it as per license obligation; maintainer emails are listed in pyproject.toml.'''
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class Dependency:
|
||||
name: str
|
||||
license_url: str
|
||||
project_url: str = ''
|
||||
license: str = ''
|
||||
comment: str = ''
|
||||
|
||||
|
||||
DEPENDENCIES: list[Dependency] = [
|
||||
# Core runtime environment components
|
||||
Dependency(
|
||||
name='Python',
|
||||
license='PSF-2.0',
|
||||
license_url='https://raw.githubusercontent.com/python/cpython/refs/heads/main/LICENSE',
|
||||
project_url='https://www.python.org/',
|
||||
),
|
||||
Dependency(
|
||||
name='Microsoft Distributable Code',
|
||||
license_url='https://raw.githubusercontent.com/python/cpython/refs/heads/main/PC/crtlicense.txt',
|
||||
comment='Only included in Windows builds',
|
||||
),
|
||||
Dependency(
|
||||
name='bzip2',
|
||||
license='bzip2-1.0.6',
|
||||
license_url='https://gitlab.com/federicomenaquintero/bzip2/-/raw/master/COPYING',
|
||||
project_url='https://sourceware.org/bzip2/',
|
||||
),
|
||||
Dependency(
|
||||
name='libffi',
|
||||
license='MIT',
|
||||
license_url='https://raw.githubusercontent.com/libffi/libffi/refs/heads/master/LICENSE',
|
||||
project_url='https://sourceware.org/libffi/',
|
||||
),
|
||||
Dependency(
|
||||
name='OpenSSL 3.0+',
|
||||
license='Apache-2.0',
|
||||
license_url='https://raw.githubusercontent.com/openssl/openssl/refs/heads/master/LICENSE.txt',
|
||||
project_url='https://www.openssl.org/',
|
||||
),
|
||||
Dependency(
|
||||
name='SQLite',
|
||||
license='Public Domain', # Technically does not need to be included
|
||||
license_url='https://sqlite.org/src/raw/e108e1e69ae8e8a59e93c455654b8ac9356a11720d3345df2a4743e9590fb20d?at=LICENSE.md',
|
||||
project_url='https://www.sqlite.org/',
|
||||
),
|
||||
Dependency(
|
||||
name='liblzma',
|
||||
license='0BSD', # Technically does not need to be included
|
||||
license_url='https://raw.githubusercontent.com/tukaani-project/xz/refs/heads/master/COPYING',
|
||||
project_url='https://tukaani.org/xz/',
|
||||
),
|
||||
Dependency(
|
||||
name='mpdecimal',
|
||||
license='BSD-2-Clause',
|
||||
# No official repo URL
|
||||
license_url='https://gist.githubusercontent.com/seproDev/9e5dbfc08af35c3f2463e64eb9b27161/raw/61f5a98bc1a4ad7d48b1c793fc3314d4d43c2ab1/mpdecimal_COPYRIGHT.txt',
|
||||
project_url='https://www.bytereef.org/mpdecimal/',
|
||||
),
|
||||
Dependency(
|
||||
name='zlib',
|
||||
license='zlib',
|
||||
license_url='https://raw.githubusercontent.com/madler/zlib/refs/heads/develop/LICENSE',
|
||||
project_url='https://zlib.net/',
|
||||
),
|
||||
Dependency(
|
||||
name='Expat',
|
||||
license='MIT',
|
||||
license_url='https://raw.githubusercontent.com/libexpat/libexpat/refs/heads/master/COPYING',
|
||||
project_url='https://libexpat.github.io/',
|
||||
),
|
||||
Dependency(
|
||||
name='ncurses',
|
||||
license='X11-distribute-modifications-variant',
|
||||
license_url='https://raw.githubusercontent.com/mirror/ncurses/refs/heads/master/COPYING',
|
||||
comment='Only included in Linux/macOS builds',
|
||||
project_url='https://invisible-island.net/ncurses/',
|
||||
),
|
||||
Dependency(
|
||||
name='GNU Readline',
|
||||
license='GPL-3.0-or-later',
|
||||
license_url='https://tiswww.case.edu/php/chet/readline/COPYING',
|
||||
comment='Only included in Linux builds',
|
||||
project_url='https://www.gnu.org/software/readline/',
|
||||
),
|
||||
Dependency(
|
||||
name='libstdc++',
|
||||
license='GPL-3.0-with-GCC-exception',
|
||||
license_url='https://raw.githubusercontent.com/gcc-mirror/gcc/refs/heads/master/COPYING.RUNTIME',
|
||||
comment='Only included in Linux builds',
|
||||
project_url='https://gcc.gnu.org/onlinedocs/libstdc++/',
|
||||
),
|
||||
Dependency(
|
||||
name='libgcc',
|
||||
license='GPL-3.0-with-GCC-exception',
|
||||
license_url='https://raw.githubusercontent.com/gcc-mirror/gcc/refs/heads/master/COPYING.RUNTIME',
|
||||
comment='Only included in Linux builds',
|
||||
project_url='https://gcc.gnu.org/',
|
||||
),
|
||||
Dependency(
|
||||
name='libuuid',
|
||||
license='BSD-3-Clause',
|
||||
license_url='https://git.kernel.org/pub/scm/fs/ext2/e2fsprogs.git/plain/lib/uuid/COPYING',
|
||||
comment='Only included in Linux builds',
|
||||
project_url='https://git.kernel.org/pub/scm/fs/ext2/e2fsprogs.git/tree/lib/uuid',
|
||||
),
|
||||
Dependency(
|
||||
name='libintl',
|
||||
license='LGPL-2.1-or-later',
|
||||
license_url='https://raw.githubusercontent.com/autotools-mirror/gettext/refs/heads/master/gettext-runtime/intl/COPYING.LIB',
|
||||
comment='Only included in macOS builds',
|
||||
project_url='https://www.gnu.org/software/gettext/',
|
||||
),
|
||||
Dependency(
|
||||
name='libidn2',
|
||||
license='LGPL-3.0-or-later',
|
||||
license_url='https://gitlab.com/libidn/libidn2/-/raw/master/COPYING.LESSERv3',
|
||||
comment='Only included in macOS builds',
|
||||
project_url='https://www.gnu.org/software/libidn/',
|
||||
),
|
||||
Dependency(
|
||||
name='libidn2 (Unicode character data files)',
|
||||
license='Unicode-TOU AND Unicode-DFS-2016',
|
||||
license_url='https://gitlab.com/libidn/libidn2/-/raw/master/COPYING.unicode',
|
||||
comment='Only included in macOS builds',
|
||||
project_url='https://www.gnu.org/software/libidn/',
|
||||
),
|
||||
Dependency(
|
||||
name='libunistring',
|
||||
license='LGPL-3.0-or-later',
|
||||
license_url='https://gitweb.git.savannah.gnu.org/gitweb/?p=libunistring.git;a=blob_plain;f=COPYING.LIB;hb=HEAD',
|
||||
comment='Only included in macOS builds',
|
||||
project_url='https://www.gnu.org/software/libunistring/',
|
||||
),
|
||||
Dependency(
|
||||
name='librtmp',
|
||||
license='LGPL-2.1-or-later',
|
||||
# No official repo URL
|
||||
license_url='https://gist.githubusercontent.com/seproDev/31d8c691ccddebe37b8b379307cb232d/raw/053408e98547ea8c7d9ba3a80c965f33e163b881/librtmp_COPYING.txt',
|
||||
comment='Only included in macOS builds',
|
||||
project_url='https://rtmpdump.mplayerhq.hu/',
|
||||
),
|
||||
Dependency(
|
||||
name='zstd',
|
||||
license='BSD-3-Clause',
|
||||
license_url='https://raw.githubusercontent.com/facebook/zstd/refs/heads/dev/LICENSE',
|
||||
comment='Only included in macOS builds',
|
||||
project_url='https://facebook.github.io/zstd/',
|
||||
),
|
||||
|
||||
# Python packages
|
||||
Dependency(
|
||||
name='brotli',
|
||||
license='MIT',
|
||||
license_url='https://raw.githubusercontent.com/google/brotli/refs/heads/master/LICENSE',
|
||||
project_url='https://brotli.org/',
|
||||
),
|
||||
Dependency(
|
||||
name='curl_cffi',
|
||||
license='MIT',
|
||||
license_url='https://raw.githubusercontent.com/lexiforest/curl_cffi/refs/heads/main/LICENSE',
|
||||
comment='Not included in `yt-dlp_x86` and `yt-dlp_musllinux_aarch64` builds',
|
||||
project_url='https://curl-cffi.readthedocs.io/',
|
||||
),
|
||||
# Dependency of curl_cffi
|
||||
Dependency(
|
||||
name='curl-impersonate',
|
||||
license='MIT',
|
||||
license_url='https://raw.githubusercontent.com/lexiforest/curl-impersonate/refs/heads/main/LICENSE',
|
||||
comment='Not included in `yt-dlp_x86` and `yt-dlp_musllinux_aarch64` builds',
|
||||
project_url='https://github.com/lexiforest/curl-impersonate',
|
||||
),
|
||||
Dependency(
|
||||
name='cffi',
|
||||
license='MIT-0', # Technically does not need to be included
|
||||
license_url='https://raw.githubusercontent.com/python-cffi/cffi/refs/heads/main/LICENSE',
|
||||
project_url='https://cffi.readthedocs.io/',
|
||||
),
|
||||
# Dependecy of cffi
|
||||
Dependency(
|
||||
name='pycparser',
|
||||
license='BSD-3-Clause',
|
||||
license_url='https://raw.githubusercontent.com/eliben/pycparser/refs/heads/main/LICENSE',
|
||||
project_url='https://github.com/eliben/pycparser',
|
||||
),
|
||||
Dependency(
|
||||
name='mutagen',
|
||||
license='GPL-2.0-or-later',
|
||||
license_url='https://raw.githubusercontent.com/quodlibet/mutagen/refs/heads/main/COPYING',
|
||||
project_url='https://mutagen.readthedocs.io/',
|
||||
),
|
||||
Dependency(
|
||||
name='PyCryptodome',
|
||||
license='Public Domain and BSD-2-Clause',
|
||||
license_url='https://raw.githubusercontent.com/Legrandin/pycryptodome/refs/heads/master/LICENSE.rst',
|
||||
project_url='https://www.pycryptodome.org/',
|
||||
),
|
||||
Dependency(
|
||||
name='certifi',
|
||||
license='MPL-2.0',
|
||||
license_url='https://raw.githubusercontent.com/certifi/python-certifi/refs/heads/master/LICENSE',
|
||||
project_url='https://github.com/certifi/python-certifi',
|
||||
),
|
||||
Dependency(
|
||||
name='requests',
|
||||
license='Apache-2.0',
|
||||
license_url='https://raw.githubusercontent.com/psf/requests/refs/heads/main/LICENSE',
|
||||
project_url='https://requests.readthedocs.io/',
|
||||
),
|
||||
# Dependency of requests
|
||||
Dependency(
|
||||
name='charset-normalizer',
|
||||
license='MIT',
|
||||
license_url='https://raw.githubusercontent.com/jawah/charset_normalizer/refs/heads/master/LICENSE',
|
||||
project_url='https://charset-normalizer.readthedocs.io/',
|
||||
),
|
||||
# Dependency of requests
|
||||
Dependency(
|
||||
name='idna',
|
||||
license='BSD-3-Clause',
|
||||
license_url='https://raw.githubusercontent.com/kjd/idna/refs/heads/master/LICENSE.md',
|
||||
project_url='https://github.com/kjd/idna',
|
||||
),
|
||||
Dependency(
|
||||
name='urllib3',
|
||||
license='MIT',
|
||||
license_url='https://raw.githubusercontent.com/urllib3/urllib3/refs/heads/main/LICENSE.txt',
|
||||
project_url='https://urllib3.readthedocs.io/',
|
||||
),
|
||||
Dependency(
|
||||
name='SecretStorage',
|
||||
license='BSD-3-Clause',
|
||||
license_url='https://raw.githubusercontent.com/mitya57/secretstorage/refs/heads/master/LICENSE',
|
||||
comment='Only included in Linux builds',
|
||||
project_url='https://secretstorage.readthedocs.io/',
|
||||
),
|
||||
# Dependency of SecretStorage
|
||||
Dependency(
|
||||
name='cryptography',
|
||||
license='Apache-2.0', # Also available as BSD-3-Clause
|
||||
license_url='https://raw.githubusercontent.com/pyca/cryptography/refs/heads/main/LICENSE.APACHE',
|
||||
comment='Only included in Linux builds',
|
||||
project_url='https://cryptography.io/',
|
||||
),
|
||||
# Dependency of SecretStorage
|
||||
Dependency(
|
||||
name='Jeepney',
|
||||
license='MIT',
|
||||
license_url='https://gitlab.com/takluyver/jeepney/-/raw/master/LICENSE',
|
||||
comment='Only included in Linux builds',
|
||||
project_url='https://jeepney.readthedocs.io/',
|
||||
),
|
||||
Dependency(
|
||||
name='websockets',
|
||||
license='BSD-3-Clause',
|
||||
license_url='https://raw.githubusercontent.com/python-websockets/websockets/refs/heads/main/LICENSE',
|
||||
project_url='https://websockets.readthedocs.io/',
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
def fetch_text(dep: Dependency) -> str:
|
||||
cache_dir = Path(CACHE_LOCATION)
|
||||
cache_dir.mkdir(exist_ok=True)
|
||||
url_hash = hashlib.sha256(dep.license_url.encode('utf-8')).hexdigest()
|
||||
cache_file = cache_dir / f'{url_hash}.txt'
|
||||
|
||||
if cache_file.exists():
|
||||
return cache_file.read_text()
|
||||
|
||||
# UA needed since some domains block requests default UA
|
||||
req = requests.get(dep.license_url, headers={'User-Agent': 'yt-dlp license fetcher'})
|
||||
req.raise_for_status()
|
||||
text = req.text
|
||||
cache_file.write_text(text)
|
||||
return text
|
||||
|
||||
|
||||
def build_output() -> str:
|
||||
lines = [HEADER]
|
||||
for d in DEPENDENCIES:
|
||||
lines.append('\n')
|
||||
lines.append('-' * 80)
|
||||
header = f'{d.name}'
|
||||
if d.license:
|
||||
header += f' | {d.license}'
|
||||
if d.comment:
|
||||
header += f'\nNote: {d.comment}'
|
||||
if d.project_url:
|
||||
header += f'\nURL: {d.project_url}'
|
||||
lines.append(header)
|
||||
lines.append('-' * 80)
|
||||
|
||||
text = fetch_text(d)
|
||||
lines.append(text.strip('\n') + '\n')
|
||||
return '\n'.join(lines)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
content = build_output()
|
||||
Path(DEFAULT_OUTPUT).write_text(content)
|
||||
@@ -373,7 +373,7 @@ class CommitRange:
|
||||
issues = [issue.strip()[1:] for issue in issues.split(',')] if issues else []
|
||||
|
||||
if prefix:
|
||||
groups, details, sub_details = zip(*map(self.details_from_prefix, prefix.split(',')))
|
||||
groups, details, sub_details = zip(*map(self.details_from_prefix, prefix.split(',')), strict=True)
|
||||
group = next(iter(filter(None, groups)), None)
|
||||
details = ', '.join(unique(details))
|
||||
sub_details = list(itertools.chain.from_iterable(sub_details))
|
||||
|
||||
@@ -8,7 +8,7 @@ def main():
|
||||
return # This is unused in yt-dlp
|
||||
|
||||
parser = optparse.OptionParser(usage='%prog INFILE OUTFILE')
|
||||
options, args = parser.parse_args()
|
||||
_, args = parser.parse_args()
|
||||
if len(args) != 2:
|
||||
parser.error('Expected an input and an output filename')
|
||||
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
# Allow direct execution
|
||||
import os
|
||||
import sys
|
||||
|
||||
@@ -55,7 +54,7 @@ def _test(github_repository, note, repo_vars, repo_secrets, inputs, expected=Non
|
||||
assert result == exp, f'unexpected result: {github_repository} {note}'
|
||||
|
||||
|
||||
def main():
|
||||
def test_setup_variables():
|
||||
DEFAULT_VERSION_WITH_REVISION = dt.datetime.now(tz=dt.timezone.utc).strftime('%Y.%m.%d.%H%M%S')
|
||||
DEFAULT_VERSION = calculate_version()
|
||||
BASE_REPO_VARS = {
|
||||
@@ -323,9 +322,3 @@ def main():
|
||||
'pypi_project': None,
|
||||
'pypi_suffix': None,
|
||||
}, ignore_revision=True)
|
||||
|
||||
print('all tests passed')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
||||
@@ -1,11 +1,14 @@
|
||||
[build-system]
|
||||
requires = ["hatchling"]
|
||||
requires = ["hatchling>=1.27.0"]
|
||||
build-backend = "hatchling.build"
|
||||
|
||||
[project]
|
||||
name = "yt-dlp"
|
||||
maintainers = [
|
||||
authors = [
|
||||
{name = "pukkandan", email = "pukkandan.ytdlp@gmail.com"},
|
||||
]
|
||||
maintainers = [
|
||||
{email = "maintainers@yt-dlp.org"},
|
||||
{name = "Grub4K", email = "contact@grub4k.xyz"},
|
||||
{name = "bashonly", email = "bashonly@protonmail.com"},
|
||||
{name = "coletdjnz", email = "coletdjnz@protonmail.com"},
|
||||
@@ -13,7 +16,7 @@ maintainers = [
|
||||
]
|
||||
description = "A feature-rich command-line audio/video downloader"
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.9"
|
||||
requires-python = ">=3.10"
|
||||
keywords = [
|
||||
"cli",
|
||||
"downloader",
|
||||
@@ -22,22 +25,22 @@ keywords = [
|
||||
"sponsorblock",
|
||||
"yt-dlp",
|
||||
]
|
||||
license = {file = "LICENSE"}
|
||||
license = "Unlicense"
|
||||
license-files = ["LICENSE"]
|
||||
classifiers = [
|
||||
"Topic :: Multimedia :: Video",
|
||||
"Development Status :: 5 - Production/Stable",
|
||||
"Environment :: Console",
|
||||
"Programming Language :: Python",
|
||||
"Programming Language :: Python :: 3 :: Only",
|
||||
"Programming Language :: Python :: 3.9",
|
||||
"Programming Language :: Python :: 3.10",
|
||||
"Programming Language :: Python :: 3.11",
|
||||
"Programming Language :: Python :: 3.12",
|
||||
"Programming Language :: Python :: 3.13",
|
||||
"Programming Language :: Python :: 3.14",
|
||||
"Programming Language :: Python :: Implementation",
|
||||
"Programming Language :: Python :: Implementation :: CPython",
|
||||
"Programming Language :: Python :: Implementation :: PyPy",
|
||||
"License :: OSI Approved :: The Unlicense (Unlicense)",
|
||||
"Operating System :: OS Independent",
|
||||
]
|
||||
dynamic = ["version"]
|
||||
@@ -63,7 +66,7 @@ secretstorage = [
|
||||
]
|
||||
build = [
|
||||
"build",
|
||||
"hatchling",
|
||||
"hatchling>=1.27.0",
|
||||
"pip",
|
||||
"setuptools>=71.0.2,<81", # See https://github.com/pyinstaller/pyinstaller/issues/9149
|
||||
"wheel",
|
||||
@@ -75,7 +78,7 @@ dev = [
|
||||
]
|
||||
static-analysis = [
|
||||
"autopep8~=2.0",
|
||||
"ruff~=0.12.0",
|
||||
"ruff~=0.14.0",
|
||||
]
|
||||
test = [
|
||||
"pytest~=8.1",
|
||||
@@ -89,7 +92,7 @@ pyinstaller = [
|
||||
Documentation = "https://github.com/yt-dlp/yt-dlp#readme"
|
||||
Repository = "https://github.com/yt-dlp/yt-dlp"
|
||||
Tracker = "https://github.com/yt-dlp/yt-dlp/issues"
|
||||
Funding = "https://github.com/yt-dlp/yt-dlp/blob/master/Collaborators.md#collaborators"
|
||||
Funding = "https://github.com/yt-dlp/yt-dlp/blob/master/Maintainers.md#maintainers"
|
||||
|
||||
[project.scripts]
|
||||
yt-dlp = "yt_dlp:main"
|
||||
@@ -107,7 +110,6 @@ include = [
|
||||
"/LICENSE", # included as license
|
||||
"/pyproject.toml", # included by default
|
||||
"/README.md", # included as readme
|
||||
"/setup.cfg",
|
||||
"/supportedsites.md",
|
||||
]
|
||||
artifacts = [
|
||||
@@ -168,12 +170,12 @@ run-cov = "echo Code coverage not implemented && exit 1"
|
||||
|
||||
[[tool.hatch.envs.hatch-test.matrix]]
|
||||
python = [
|
||||
"3.9",
|
||||
"3.10",
|
||||
"3.11",
|
||||
"3.12",
|
||||
"3.13",
|
||||
"pypy3.10",
|
||||
"3.14",
|
||||
"pypy3.11",
|
||||
]
|
||||
|
||||
[tool.ruff]
|
||||
|
||||
39
setup.cfg
39
setup.cfg
@@ -1,39 +0,0 @@
|
||||
[flake8]
|
||||
exclude = build,venv,.tox,.git,.pytest_cache
|
||||
ignore = E402,E501,E731,E741,W503
|
||||
max_line_length = 120
|
||||
per_file_ignores =
|
||||
devscripts/lazy_load_template.py: F401
|
||||
|
||||
|
||||
[autoflake]
|
||||
ignore-init-module-imports = true
|
||||
ignore-pass-after-docstring = true
|
||||
remove-all-unused-imports = true
|
||||
remove-duplicate-keys = true
|
||||
remove-unused-variables = true
|
||||
|
||||
|
||||
[tox:tox]
|
||||
skipsdist = true
|
||||
envlist = py{39,310,311,312,313},pypy311
|
||||
skip_missing_interpreters = true
|
||||
|
||||
[testenv] # tox
|
||||
deps =
|
||||
pytest
|
||||
commands = pytest {posargs:"-m not download"}
|
||||
passenv = HOME # For test_compat_expanduser
|
||||
setenv =
|
||||
# PYTHONWARNINGS = error # Catches PIP's warnings too
|
||||
|
||||
|
||||
[isort]
|
||||
py_version = 39
|
||||
multi_line_output = VERTICAL_HANGING_INDENT
|
||||
line_length = 80
|
||||
reverse_relative = true
|
||||
ensure_newline_before_comments = true
|
||||
include_trailing_comma = true
|
||||
known_first_party =
|
||||
test
|
||||
@@ -20,7 +20,6 @@ The only reliable way to check if a site is supported is to try it.
|
||||
- **3sat**
|
||||
- **4tube**
|
||||
- **56.com**
|
||||
- **6play**
|
||||
- **7plus**
|
||||
- **8tracks**
|
||||
- **9c9media**
|
||||
@@ -86,7 +85,7 @@ The only reliable way to check if a site is supported is to try it.
|
||||
- **aol.com**: Yahoo screen and movies (**Currently broken**)
|
||||
- **APA**
|
||||
- **Aparat**
|
||||
- **AppleConnect**
|
||||
- **apple:music:connect**: Apple Music Connect
|
||||
- **AppleDaily**: 臺灣蘋果日報
|
||||
- **ApplePodcasts**
|
||||
- **appletrailers**
|
||||
@@ -243,6 +242,7 @@ The only reliable way to check if a site is supported is to try it.
|
||||
- **Canalsurmas**
|
||||
- **CaracolTvPlay**: [*caracoltv-play*](## "netrc machine")
|
||||
- **cbc.ca**
|
||||
- **cbc.ca:listen**
|
||||
- **cbc.ca:player**
|
||||
- **cbc.ca:player:playlist**
|
||||
- **CBS**: (**Currently broken**)
|
||||
@@ -299,7 +299,6 @@ The only reliable way to check if a site is supported is to try it.
|
||||
- **cpac**
|
||||
- **cpac:playlist**
|
||||
- **Cracked**
|
||||
- **Crackle**
|
||||
- **Craftsy**
|
||||
- **CrooksAndLiars**
|
||||
- **CrowdBunker**
|
||||
@@ -314,8 +313,6 @@ The only reliable way to check if a site is supported is to try it.
|
||||
- **curiositystream**: [*curiositystream*](## "netrc machine")
|
||||
- **curiositystream:collections**: [*curiositystream*](## "netrc machine")
|
||||
- **curiositystream:series**: [*curiositystream*](## "netrc machine")
|
||||
- **cwtv**
|
||||
- **cwtv:movie**
|
||||
- **Cybrary**: [*cybrary*](## "netrc machine")
|
||||
- **CybraryCourse**: [*cybrary*](## "netrc machine")
|
||||
- **DacastPlaylist**
|
||||
@@ -450,7 +447,6 @@ The only reliable way to check if a site is supported is to try it.
|
||||
- **Filmweb**
|
||||
- **FiveThirtyEight**
|
||||
- **FiveTV**
|
||||
- **FlexTV**
|
||||
- **Flickr**
|
||||
- **Floatplane**
|
||||
- **FloatplaneChannel**
|
||||
@@ -584,6 +580,11 @@ The only reliable way to check if a site is supported is to try it.
|
||||
- **Hypem**
|
||||
- **Hytale**
|
||||
- **Icareus**
|
||||
- **IdagioAlbum**
|
||||
- **IdagioPersonalPlaylist**
|
||||
- **IdagioPlaylist**
|
||||
- **IdagioRecording**
|
||||
- **IdagioTrack**
|
||||
- **IdolPlus**
|
||||
- **iflix:episode**
|
||||
- **IflixSeries**
|
||||
@@ -798,7 +799,6 @@ The only reliable way to check if a site is supported is to try it.
|
||||
- **mirrativ**
|
||||
- **mirrativ:user**
|
||||
- **MirrorCoUK**
|
||||
- **MiTele**: mitele.es
|
||||
- **mixch**
|
||||
- **mixch:archive**
|
||||
- **mixch:movie**
|
||||
@@ -1009,6 +1009,7 @@ The only reliable way to check if a site is supported is to try it.
|
||||
- **onet.tv:channel**
|
||||
- **OnetMVP**
|
||||
- **OnionStudios**
|
||||
- **onsen**: [*onsen*](## "netrc machine") インターネットラジオステーション<音泉>
|
||||
- **Opencast**
|
||||
- **OpencastPlaylist**
|
||||
- **openrec**
|
||||
@@ -1033,8 +1034,6 @@ The only reliable way to check if a site is supported is to try it.
|
||||
- **Panopto**
|
||||
- **PanoptoList**
|
||||
- **PanoptoPlaylist**
|
||||
- **ParamountPlus**
|
||||
- **ParamountPlusSeries**
|
||||
- **ParamountPressExpress**
|
||||
- **Parler**: Posts on parler.com
|
||||
- **parliamentlive.tv**: UK parliament videos
|
||||
@@ -1069,8 +1068,6 @@ The only reliable way to check if a site is supported is to try it.
|
||||
- **PinterestCollection**
|
||||
- **PiramideTV**
|
||||
- **PiramideTVChannel**
|
||||
- **pixiv:sketch**
|
||||
- **pixiv:sketch:user**
|
||||
- **PlanetMarathi**
|
||||
- **Platzi**: [*platzi*](## "netrc machine")
|
||||
- **PlatziCourse**: [*platzi*](## "netrc machine")
|
||||
@@ -1257,7 +1254,6 @@ The only reliable way to check if a site is supported is to try it.
|
||||
- **rutube:person**: Rutube person videos
|
||||
- **rutube:playlist**: Rutube playlists
|
||||
- **rutube:tags**: Rutube tags
|
||||
- **RUTV**: RUTV.RU
|
||||
- **Ruutu**: (**Currently broken**)
|
||||
- **Ruv**
|
||||
- **ruv.is:spila**
|
||||
@@ -1332,7 +1328,10 @@ The only reliable way to check if a site is supported is to try it.
|
||||
- **Slideshare**
|
||||
- **SlidesLive**
|
||||
- **Slutload**
|
||||
- **Smotrim**
|
||||
- **smotrim**
|
||||
- **smotrim:audio**
|
||||
- **smotrim:live**
|
||||
- **smotrim:playlist**
|
||||
- **SnapchatSpotlight**
|
||||
- **Snotr**
|
||||
- **SoftWhiteUnderbelly**: [*softwhiteunderbelly*](## "netrc machine")
|
||||
@@ -1370,8 +1369,6 @@ The only reliable way to check if a site is supported is to try it.
|
||||
- **Sport5**
|
||||
- **SportBox**: (**Currently broken**)
|
||||
- **SportDeutschland**
|
||||
- **spotify**: Spotify episodes (**Currently broken**)
|
||||
- **spotify:show**: Spotify shows (**Currently broken**)
|
||||
- **Spreaker**
|
||||
- **SpreakerShow**
|
||||
- **SpringboardPlatform**
|
||||
@@ -1510,15 +1507,17 @@ The only reliable way to check if a site is supported is to try it.
|
||||
- **TrueID**
|
||||
- **TruNews**
|
||||
- **Truth**
|
||||
- **ttinglive**: 띵라이브 (formerly FlexTV)
|
||||
- **Tube8**: (**Currently broken**)
|
||||
- **TubeTuGraz**: [*tubetugraz*](## "netrc machine") tube.tugraz.at
|
||||
- **TubeTuGrazSeries**: [*tubetugraz*](## "netrc machine")
|
||||
- **tubitv**: [*tubitv*](## "netrc machine")
|
||||
- **tubitv:series**
|
||||
- **Tumblr**: [*tumblr*](## "netrc machine")
|
||||
- **TuneInPodcast**
|
||||
- **TuneInPodcastEpisode**
|
||||
- **TuneInStation**
|
||||
- **tunein:embed**
|
||||
- **tunein:podcast**
|
||||
- **tunein:podcast:program**
|
||||
- **tunein:station**
|
||||
- **tv.dfb.de**
|
||||
- **TV2**
|
||||
- **TV2Article**
|
||||
@@ -1542,7 +1541,7 @@ The only reliable way to check if a site is supported is to try it.
|
||||
- **tvigle**: Интернет-телевидение Tvigle.ru
|
||||
- **TVIPlayer**
|
||||
- **TVN24**: (**Currently broken**)
|
||||
- **TVNoe**: (**Currently broken**)
|
||||
- **tvnoe**: Televize Noe
|
||||
- **tvopengr:embed**: tvopen.gr embedded videos
|
||||
- **tvopengr:watch**: tvopen.gr (and ethnos.gr) videos
|
||||
- **tvp**: Telewizja Polska
|
||||
@@ -1600,7 +1599,6 @@ The only reliable way to check if a site is supported is to try it.
|
||||
- **Varzesh3**: (**Currently broken**)
|
||||
- **Vbox7**
|
||||
- **Veo**
|
||||
- **Vesti**: Вести.Ru (**Currently broken**)
|
||||
- **Vevo**
|
||||
- **VevoPlaylist**
|
||||
- **VGTV**: VGTV, BTTV, FTV, Aftenposten and Aftonbladet
|
||||
@@ -1748,7 +1746,6 @@ The only reliable way to check if a site is supported is to try it.
|
||||
- **wykop:dig:comment**
|
||||
- **wykop:post**
|
||||
- **wykop:post:comment**
|
||||
- **Xanimu**
|
||||
- **XboxClips**
|
||||
- **XHamster**
|
||||
- **XHamsterEmbed**
|
||||
|
||||
@@ -176,7 +176,7 @@ def _iter_differences(got, expected, field):
|
||||
yield field, f'expected length of {len(expected)}, got {len(got)}'
|
||||
return
|
||||
|
||||
for index, (got_val, expected_val) in enumerate(zip(got, expected)):
|
||||
for index, (got_val, expected_val) in enumerate(zip(got, expected, strict=True)):
|
||||
field_name = str(index) if field is None else f'{field}.{index}'
|
||||
yield from _iter_differences(got_val, expected_val, field_name)
|
||||
return
|
||||
|
||||
@@ -36,7 +36,6 @@
|
||||
"verbose": true,
|
||||
"writedescription": false,
|
||||
"writeinfojson": true,
|
||||
"writeannotations": false,
|
||||
"writelink": false,
|
||||
"writeurllink": false,
|
||||
"writewebloclink": false,
|
||||
|
||||
@@ -1945,7 +1945,7 @@ jwplayer("mediaplayer").setup({"abouttext":"Visit Indie DB","aboutlink":"http:\/
|
||||
server_thread.daemon = True
|
||||
server_thread.start()
|
||||
|
||||
(content, urlh) = self.ie._download_webpage_handle(
|
||||
content, _ = self.ie._download_webpage_handle(
|
||||
f'http://127.0.0.1:{port}/teapot', None,
|
||||
expected_status=TEAPOT_RESPONSE_STATUS)
|
||||
self.assertEqual(content, TEAPOT_RESPONSE_BODY)
|
||||
|
||||
@@ -13,6 +13,7 @@ sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
import contextlib
|
||||
import copy
|
||||
import itertools
|
||||
import json
|
||||
|
||||
from test.helper import FakeYDL, assertRegexpMatches, try_rm
|
||||
@@ -414,7 +415,7 @@ class TestFormatSelection(unittest.TestCase):
|
||||
downloaded_ids = [info['format_id'] for info in ydl.downloaded_info_dicts]
|
||||
self.assertEqual(downloaded_ids, ['248+141'])
|
||||
|
||||
for f1, f2 in zip(formats_order, formats_order[1:]):
|
||||
for f1, f2 in itertools.pairwise(formats_order):
|
||||
info_dict = _make_result([f1, f2], extractor='youtube')
|
||||
ydl = YDL({'format': 'best/bestvideo'})
|
||||
ydl.sort_formats(info_dict)
|
||||
@@ -749,7 +750,7 @@ class TestYoutubeDL(unittest.TestCase):
|
||||
|
||||
if not isinstance(expected, (list, tuple)):
|
||||
expected = (expected, expected)
|
||||
for (name, got), expect in zip((('outtmpl', out), ('filename', fname)), expected):
|
||||
for (name, got), expect in zip((('outtmpl', out), ('filename', fname)), expected, strict=True):
|
||||
if callable(expect):
|
||||
self.assertTrue(expect(got), f'Wrong {name} from {tmpl}')
|
||||
elif expect is not None:
|
||||
@@ -1147,7 +1148,7 @@ class TestYoutubeDL(unittest.TestCase):
|
||||
entries = func(evaluated)
|
||||
results = [(v['playlist_autonumber'] - 1, (int(v['id']), v['playlist_index']))
|
||||
for v in get_downloaded_info_dicts(params, entries)]
|
||||
self.assertEqual(results, list(enumerate(zip(expected_ids, expected_ids))), f'Entries of {name} for {params}')
|
||||
self.assertEqual(results, list(enumerate(zip(expected_ids, expected_ids, strict=True))), f'Entries of {name} for {params}')
|
||||
self.assertEqual(sorted(evaluated), expected_eval, f'Evaluation of {name} for {params}')
|
||||
|
||||
test_selection({}, INDICES)
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Allow direct execution
|
||||
import datetime as dt
|
||||
import os
|
||||
import sys
|
||||
import unittest
|
||||
@@ -12,7 +13,7 @@ import struct
|
||||
|
||||
from yt_dlp import compat
|
||||
from yt_dlp.compat import urllib # isort: split
|
||||
from yt_dlp.compat import compat_etree_fromstring, compat_expanduser
|
||||
from yt_dlp.compat import compat_etree_fromstring, compat_expanduser, compat_datetime_from_timestamp
|
||||
from yt_dlp.compat.urllib.request import getproxies
|
||||
|
||||
|
||||
@@ -59,6 +60,45 @@ class TestCompat(unittest.TestCase):
|
||||
def test_struct_unpack(self):
|
||||
self.assertEqual(struct.unpack('!B', b'\x00'), (0,))
|
||||
|
||||
def test_compat_datetime_from_timestamp(self):
|
||||
self.assertEqual(
|
||||
compat_datetime_from_timestamp(0),
|
||||
dt.datetime(1970, 1, 1, 0, 0, 0, tzinfo=dt.timezone.utc))
|
||||
self.assertEqual(
|
||||
compat_datetime_from_timestamp(1),
|
||||
dt.datetime(1970, 1, 1, 0, 0, 1, tzinfo=dt.timezone.utc))
|
||||
self.assertEqual(
|
||||
compat_datetime_from_timestamp(3600),
|
||||
dt.datetime(1970, 1, 1, 1, 0, 0, tzinfo=dt.timezone.utc))
|
||||
|
||||
self.assertEqual(
|
||||
compat_datetime_from_timestamp(-1),
|
||||
dt.datetime(1969, 12, 31, 23, 59, 59, tzinfo=dt.timezone.utc))
|
||||
self.assertEqual(
|
||||
compat_datetime_from_timestamp(-86400),
|
||||
dt.datetime(1969, 12, 31, 0, 0, 0, tzinfo=dt.timezone.utc))
|
||||
|
||||
self.assertEqual(
|
||||
compat_datetime_from_timestamp(0.5),
|
||||
dt.datetime(1970, 1, 1, 0, 0, 0, 500000, tzinfo=dt.timezone.utc))
|
||||
self.assertEqual(
|
||||
compat_datetime_from_timestamp(1.000001),
|
||||
dt.datetime(1970, 1, 1, 0, 0, 1, 1, tzinfo=dt.timezone.utc))
|
||||
self.assertEqual(
|
||||
compat_datetime_from_timestamp(-1.25),
|
||||
dt.datetime(1969, 12, 31, 23, 59, 58, 750000, tzinfo=dt.timezone.utc))
|
||||
|
||||
self.assertEqual(
|
||||
compat_datetime_from_timestamp(-1577923200),
|
||||
dt.datetime(1920, 1, 1, 0, 0, 0, tzinfo=dt.timezone.utc))
|
||||
self.assertEqual(
|
||||
compat_datetime_from_timestamp(4102444800),
|
||||
dt.datetime(2100, 1, 1, 0, 0, 0, tzinfo=dt.timezone.utc))
|
||||
|
||||
self.assertEqual(
|
||||
compat_datetime_from_timestamp(173568960000),
|
||||
dt.datetime(7470, 3, 8, 0, 0, 0, tzinfo=dt.timezone.utc))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
||||
@@ -29,7 +29,7 @@ class TestOverwrites(unittest.TestCase):
|
||||
'-o', 'test.webm',
|
||||
'https://www.youtube.com/watch?v=jNQXAC9IVRw',
|
||||
], cwd=root_dir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
sout, serr = outp.communicate()
|
||||
sout, _ = outp.communicate()
|
||||
self.assertTrue(b'has already been downloaded' in sout)
|
||||
# if the file has no content, it has not been redownloaded
|
||||
self.assertTrue(os.path.getsize(download_file) < 1)
|
||||
@@ -41,7 +41,7 @@ class TestOverwrites(unittest.TestCase):
|
||||
'-o', 'test.webm',
|
||||
'https://www.youtube.com/watch?v=jNQXAC9IVRw',
|
||||
], cwd=root_dir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
sout, serr = outp.communicate()
|
||||
sout, _ = outp.communicate()
|
||||
self.assertTrue(b'has already been downloaded' not in sout)
|
||||
# if the file has no content, it has not been redownloaded
|
||||
self.assertTrue(os.path.getsize(download_file) > 1)
|
||||
|
||||
@@ -115,7 +115,7 @@ class TestModifyChaptersPP(unittest.TestCase):
|
||||
self.assertEqual(len(ends), len(titles))
|
||||
start = 0
|
||||
chapters = []
|
||||
for e, t in zip(ends, titles):
|
||||
for e, t in zip(ends, titles, strict=True):
|
||||
chapters.append(self._chapter(start, e, t))
|
||||
start = e
|
||||
return chapters
|
||||
|
||||
@@ -45,3 +45,8 @@ class TestGetWebPoContentBinding:
|
||||
def test_invalid_base64(self, pot_request):
|
||||
pot_request.visitor_data = 'invalid-base64'
|
||||
assert get_webpo_content_binding(pot_request, bind_to_visitor_id=True) == (pot_request.visitor_data, ContentBindingType.VISITOR_DATA)
|
||||
|
||||
def test_gvs_video_id_binding_experiment(self, pot_request):
|
||||
pot_request.context = PoTokenContext.GVS
|
||||
pot_request._gvs_bind_to_video_id = True
|
||||
assert get_webpo_content_binding(pot_request) == ('example-video-id', ContentBindingType.VIDEO_ID)
|
||||
|
||||
@@ -153,7 +153,7 @@ class TestPoTokenProvider:
|
||||
|
||||
with pytest.raises(
|
||||
PoTokenProviderRejectedRequest,
|
||||
match='External requests by "example" provider do not support proxy scheme "socks4". Supported proxy '
|
||||
match=r'External requests by "example" provider do not support proxy scheme "socks4"\. Supported proxy '
|
||||
'schemes: http, socks5h',
|
||||
):
|
||||
provider.request_pot(pot_request)
|
||||
|
||||
@@ -417,7 +417,7 @@ class TestTraversal:
|
||||
|
||||
def test_traversal_morsel(self):
|
||||
morsel = http.cookies.Morsel()
|
||||
values = dict(zip(morsel, 'abcdefghijklmnop'))
|
||||
values = dict(zip(morsel, 'abcdefghijklmnop', strict=False))
|
||||
morsel.set('item_key', 'item_value', 'coded_value')
|
||||
morsel.update(values)
|
||||
values['key'] = 'item_key'
|
||||
|
||||
@@ -12,6 +12,7 @@ import datetime as dt
|
||||
import io
|
||||
import itertools
|
||||
import json
|
||||
import ntpath
|
||||
import pickle
|
||||
import subprocess
|
||||
import unittest
|
||||
@@ -101,11 +102,13 @@ from yt_dlp.utils import (
|
||||
remove_start,
|
||||
render_table,
|
||||
replace_extension,
|
||||
datetime_round,
|
||||
rot47,
|
||||
sanitize_filename,
|
||||
sanitize_path,
|
||||
sanitize_url,
|
||||
shell_quote,
|
||||
strftime_or_none,
|
||||
smuggle_url,
|
||||
str_to_int,
|
||||
strip_jsonp,
|
||||
@@ -251,12 +254,6 @@ class TestUtil(unittest.TestCase):
|
||||
self.assertEqual(sanitize_path('abc.../def...'), 'abc..#\\def..#')
|
||||
self.assertEqual(sanitize_path('C:\\abc:%(title)s.%(ext)s'), 'C:\\abc#%(title)s.%(ext)s')
|
||||
|
||||
# Check with nt._path_normpath if available
|
||||
try:
|
||||
from nt import _path_normpath as nt_path_normpath
|
||||
except ImportError:
|
||||
nt_path_normpath = None
|
||||
|
||||
for test, expected in [
|
||||
('C:\\', 'C:\\'),
|
||||
('../abc', '..\\abc'),
|
||||
@@ -274,8 +271,7 @@ class TestUtil(unittest.TestCase):
|
||||
result = sanitize_path(test)
|
||||
assert result == expected, f'{test} was incorrectly resolved'
|
||||
assert result == sanitize_path(result), f'{test} changed after sanitizing again'
|
||||
if nt_path_normpath:
|
||||
assert result == nt_path_normpath(test), f'{test} does not match nt._path_normpath'
|
||||
assert result == ntpath.normpath(test), f'{test} does not match ntpath.normpath'
|
||||
|
||||
def test_sanitize_url(self):
|
||||
self.assertEqual(sanitize_url('//foo.bar'), 'http://foo.bar')
|
||||
@@ -409,6 +405,25 @@ class TestUtil(unittest.TestCase):
|
||||
self.assertEqual(datetime_from_str('now+1day', precision='hour'), datetime_from_str('now+24hours', precision='auto'))
|
||||
self.assertEqual(datetime_from_str('now+23hours', precision='hour'), datetime_from_str('now+23hours', precision='auto'))
|
||||
|
||||
def test_datetime_round(self):
|
||||
self.assertEqual(datetime_round(dt.datetime.strptime('1820-05-12T01:23:45Z', '%Y-%m-%dT%H:%M:%SZ')),
|
||||
dt.datetime(1820, 5, 12, tzinfo=dt.timezone.utc))
|
||||
self.assertEqual(datetime_round(dt.datetime.strptime('1969-12-31T23:34:45Z', '%Y-%m-%dT%H:%M:%SZ'), 'hour'),
|
||||
dt.datetime(1970, 1, 1, 0, tzinfo=dt.timezone.utc))
|
||||
self.assertEqual(datetime_round(dt.datetime.strptime('2024-12-25T01:23:45Z', '%Y-%m-%dT%H:%M:%SZ'), 'minute'),
|
||||
dt.datetime(2024, 12, 25, 1, 24, tzinfo=dt.timezone.utc))
|
||||
self.assertEqual(datetime_round(dt.datetime.strptime('2024-12-25T01:23:45.123Z', '%Y-%m-%dT%H:%M:%S.%fZ'), 'second'),
|
||||
dt.datetime(2024, 12, 25, 1, 23, 45, tzinfo=dt.timezone.utc))
|
||||
self.assertEqual(datetime_round(dt.datetime.strptime('2024-12-25T01:23:45.678Z', '%Y-%m-%dT%H:%M:%S.%fZ'), 'second'),
|
||||
dt.datetime(2024, 12, 25, 1, 23, 46, tzinfo=dt.timezone.utc))
|
||||
|
||||
def test_strftime_or_none(self):
|
||||
self.assertEqual(strftime_or_none(-4722192000), '18200512')
|
||||
self.assertEqual(strftime_or_none(0), '19700101')
|
||||
self.assertEqual(strftime_or_none(1735084800), '20241225')
|
||||
# Throws OverflowError
|
||||
self.assertEqual(strftime_or_none(1735084800000), None)
|
||||
|
||||
def test_daterange(self):
|
||||
_20century = DateRange('19000101', '20000101')
|
||||
self.assertFalse('17890714' in _20century)
|
||||
@@ -1848,7 +1863,7 @@ Line 1
|
||||
|
||||
self.assertEqual(
|
||||
list(get_elements_text_and_html_by_attribute('class', 'foo bar', html)),
|
||||
list(zip(['nice', 'also nice'], self.GET_ELEMENTS_BY_CLASS_RES)))
|
||||
list(zip(['nice', 'also nice'], self.GET_ELEMENTS_BY_CLASS_RES, strict=True)))
|
||||
self.assertEqual(list(get_elements_text_and_html_by_attribute('class', 'foo', html)), [])
|
||||
self.assertEqual(list(get_elements_text_and_html_by_attribute('class', 'no-such-foo', html)), [])
|
||||
|
||||
|
||||
@@ -22,7 +22,7 @@ class TestVerboseOutput(unittest.TestCase):
|
||||
'--username', 'johnsmith@gmail.com',
|
||||
'--password', 'my_secret_password',
|
||||
], cwd=rootDir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
sout, serr = outp.communicate()
|
||||
_, serr = outp.communicate()
|
||||
self.assertTrue(b'--username' in serr)
|
||||
self.assertTrue(b'johnsmith' not in serr)
|
||||
self.assertTrue(b'--password' in serr)
|
||||
@@ -36,7 +36,7 @@ class TestVerboseOutput(unittest.TestCase):
|
||||
'-u', 'johnsmith@gmail.com',
|
||||
'-p', 'my_secret_password',
|
||||
], cwd=rootDir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
sout, serr = outp.communicate()
|
||||
_, serr = outp.communicate()
|
||||
self.assertTrue(b'-u' in serr)
|
||||
self.assertTrue(b'johnsmith' not in serr)
|
||||
self.assertTrue(b'-p' in serr)
|
||||
@@ -50,7 +50,7 @@ class TestVerboseOutput(unittest.TestCase):
|
||||
'--username=johnsmith@gmail.com',
|
||||
'--password=my_secret_password',
|
||||
], cwd=rootDir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
sout, serr = outp.communicate()
|
||||
_, serr = outp.communicate()
|
||||
self.assertTrue(b'--username' in serr)
|
||||
self.assertTrue(b'johnsmith' not in serr)
|
||||
self.assertTrue(b'--password' in serr)
|
||||
@@ -64,7 +64,7 @@ class TestVerboseOutput(unittest.TestCase):
|
||||
'-u=johnsmith@gmail.com',
|
||||
'-p=my_secret_password',
|
||||
], cwd=rootDir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
sout, serr = outp.communicate()
|
||||
_, serr = outp.communicate()
|
||||
self.assertTrue(b'-u' in serr)
|
||||
self.assertTrue(b'johnsmith' not in serr)
|
||||
self.assertTrue(b'-p' in serr)
|
||||
|
||||
@@ -20,7 +20,7 @@ import random
|
||||
import ssl
|
||||
import threading
|
||||
|
||||
from yt_dlp import socks, traverse_obj
|
||||
from yt_dlp import socks
|
||||
from yt_dlp.cookies import YoutubeDLCookieJar
|
||||
from yt_dlp.dependencies import websockets
|
||||
from yt_dlp.networking import Request
|
||||
@@ -32,6 +32,7 @@ from yt_dlp.networking.exceptions import (
|
||||
SSLError,
|
||||
TransportError,
|
||||
)
|
||||
from yt_dlp.utils.traversal import traverse_obj
|
||||
from yt_dlp.utils.networking import HTTPHeaderDict
|
||||
|
||||
TEST_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
@@ -1,77 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Allow direct execution
|
||||
import os
|
||||
import sys
|
||||
import unittest
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
|
||||
import xml.etree.ElementTree
|
||||
|
||||
import yt_dlp.extractor
|
||||
import yt_dlp.YoutubeDL
|
||||
from test.helper import get_params, is_download_test, try_rm
|
||||
|
||||
|
||||
class YoutubeDL(yt_dlp.YoutubeDL):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.to_stderr = self.to_screen
|
||||
|
||||
|
||||
params = get_params({
|
||||
'writeannotations': True,
|
||||
'skip_download': True,
|
||||
'writeinfojson': False,
|
||||
'format': 'flv',
|
||||
})
|
||||
|
||||
|
||||
TEST_ID = 'gr51aVj-mLg'
|
||||
ANNOTATIONS_FILE = TEST_ID + '.annotations.xml'
|
||||
EXPECTED_ANNOTATIONS = ['Speech bubble', 'Note', 'Title', 'Spotlight', 'Label']
|
||||
|
||||
|
||||
@is_download_test
|
||||
class TestAnnotations(unittest.TestCase):
|
||||
def setUp(self):
|
||||
# Clear old files
|
||||
self.tearDown()
|
||||
|
||||
def test_info_json(self):
|
||||
expected = list(EXPECTED_ANNOTATIONS) # Two annotations could have the same text.
|
||||
ie = yt_dlp.extractor.YoutubeIE()
|
||||
ydl = YoutubeDL(params)
|
||||
ydl.add_info_extractor(ie)
|
||||
ydl.download([TEST_ID])
|
||||
self.assertTrue(os.path.exists(ANNOTATIONS_FILE))
|
||||
annoxml = None
|
||||
with open(ANNOTATIONS_FILE, encoding='utf-8') as annof:
|
||||
annoxml = xml.etree.ElementTree.parse(annof)
|
||||
self.assertTrue(annoxml is not None, 'Failed to parse annotations XML')
|
||||
root = annoxml.getroot()
|
||||
self.assertEqual(root.tag, 'document')
|
||||
annotationsTag = root.find('annotations')
|
||||
self.assertEqual(annotationsTag.tag, 'annotations')
|
||||
annotations = annotationsTag.findall('annotation')
|
||||
|
||||
# Not all the annotations have TEXT children and the annotations are returned unsorted.
|
||||
for a in annotations:
|
||||
self.assertEqual(a.tag, 'annotation')
|
||||
if a.get('type') == 'text':
|
||||
textTag = a.find('TEXT')
|
||||
text = textTag.text
|
||||
self.assertTrue(text in expected) # assertIn only added in python 2.7
|
||||
# remove the first occurrence, there could be more than one annotation with the same text
|
||||
expected.remove(text)
|
||||
# We should have seen (and removed) all the expected annotation texts.
|
||||
self.assertEqual(len(expected), 0, 'Not all expected annotations were found.')
|
||||
|
||||
def tearDown(self):
|
||||
try_rm(ANNOTATIONS_FILE)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
@@ -304,7 +304,6 @@ class YoutubeDL:
|
||||
clean_infojson: Remove internal metadata from the infojson
|
||||
getcomments: Extract video comments. This will not be written to disk
|
||||
unless writeinfojson is also given
|
||||
writeannotations: Write the video annotations to a .annotations.xml file
|
||||
writethumbnail: Write the thumbnail image to a file
|
||||
allow_playlist_files: Whether to write playlists' description, infojson etc
|
||||
also to disk when using the 'write*' options
|
||||
@@ -511,11 +510,11 @@ class YoutubeDL:
|
||||
the downloader (see yt_dlp/downloader/common.py):
|
||||
nopart, updatetime, buffersize, ratelimit, throttledratelimit, min_filesize,
|
||||
max_filesize, test, noresizebuffer, retries, file_access_retries, fragment_retries,
|
||||
continuedl, xattr_set_filesize, hls_use_mpegts, http_chunk_size,
|
||||
external_downloader_args, concurrent_fragment_downloads, progress_delta.
|
||||
continuedl, hls_use_mpegts, http_chunk_size, external_downloader_args,
|
||||
concurrent_fragment_downloads, progress_delta.
|
||||
|
||||
The following options are used by the post processors:
|
||||
ffmpeg_location: Location of the ffmpeg/avconv binary; either the path
|
||||
ffmpeg_location: Location of the ffmpeg binary; either the path
|
||||
to the binary or its containing directory.
|
||||
postprocessor_args: A dictionary of postprocessor/executable keys (in lower case)
|
||||
and a list of additional command-line arguments for the
|
||||
@@ -566,32 +565,14 @@ class YoutubeDL:
|
||||
allsubtitles: - Use subtitleslangs = ['all']
|
||||
Downloads all the subtitles of the video
|
||||
(requires writesubtitles or writeautomaticsub)
|
||||
include_ads: - Doesn't work
|
||||
Download ads as well
|
||||
call_home: - Not implemented
|
||||
Boolean, true if we are allowed to contact the
|
||||
yt-dlp servers for debugging.
|
||||
post_hooks: - Register a custom postprocessor
|
||||
A list of functions that get called as the final step
|
||||
for each video file, after all postprocessors have been
|
||||
called. The filename will be passed as the only argument.
|
||||
hls_prefer_native: - Use external_downloader = {'m3u8': 'native'} or {'m3u8': 'ffmpeg'}.
|
||||
Use the native HLS downloader instead of ffmpeg/avconv
|
||||
if True, otherwise use ffmpeg/avconv if False, otherwise
|
||||
Use the native HLS downloader instead of ffmpeg
|
||||
if True, otherwise use ffmpeg if False, otherwise
|
||||
use downloader suggested by extractor if None.
|
||||
prefer_ffmpeg: - avconv support is deprecated
|
||||
If False, use avconv instead of ffmpeg if both are available,
|
||||
otherwise prefer ffmpeg.
|
||||
youtube_include_dash_manifest: - Use extractor_args
|
||||
If True (default), DASH manifests and related
|
||||
data will be downloaded and processed by extractor.
|
||||
You can reduce network I/O by disabling it if you don't
|
||||
care about DASH. (only for youtube)
|
||||
youtube_include_hls_manifest: - Use extractor_args
|
||||
If True (default), HLS manifests and related
|
||||
data will be downloaded and processed by extractor.
|
||||
You can reduce network I/O by disabling it if you don't
|
||||
care about HLS. (only for youtube)
|
||||
no_color: Same as `color='no_color'`
|
||||
no_overwrites: Same as `overwrites=False`
|
||||
"""
|
||||
@@ -750,10 +731,6 @@ class YoutubeDL:
|
||||
return True
|
||||
return False
|
||||
|
||||
if check_deprecated('cn_verification_proxy', '--cn-verification-proxy', '--geo-verification-proxy'):
|
||||
if self.params.get('geo_verification_proxy') is None:
|
||||
self.params['geo_verification_proxy'] = self.params['cn_verification_proxy']
|
||||
|
||||
check_deprecated('useid', '--id', '-o "%(id)s.%(ext)s"')
|
||||
|
||||
for msg in self.params.get('_warnings', []):
|
||||
@@ -2030,7 +2007,7 @@ class YoutubeDL:
|
||||
else:
|
||||
entries = resolved_entries = list(entries)
|
||||
n_entries = len(resolved_entries)
|
||||
ie_result['requested_entries'], ie_result['entries'] = tuple(zip(*resolved_entries)) or ([], [])
|
||||
ie_result['requested_entries'], ie_result['entries'] = tuple(zip(*resolved_entries, strict=True)) or ([], [])
|
||||
if not ie_result.get('playlist_count'):
|
||||
# Better to do this after potentially exhausting entries
|
||||
ie_result['playlist_count'] = all_entries.get_full_count()
|
||||
@@ -2717,11 +2694,7 @@ class YoutubeDL:
|
||||
('modified_timestamp', 'modified_date'),
|
||||
):
|
||||
if info_dict.get(date_key) is None and info_dict.get(ts_key) is not None:
|
||||
# Working around out-of-range timestamp values (e.g. negative ones on Windows,
|
||||
# see http://bugs.python.org/issue1646728)
|
||||
with contextlib.suppress(ValueError, OverflowError, OSError):
|
||||
upload_date = dt.datetime.fromtimestamp(info_dict[ts_key], dt.timezone.utc)
|
||||
info_dict[date_key] = upload_date.strftime('%Y%m%d')
|
||||
info_dict[date_key] = strftime_or_none(info_dict[ts_key])
|
||||
|
||||
if not info_dict.get('release_year'):
|
||||
info_dict['release_year'] = traverse_obj(info_dict, ('release_date', {lambda x: int(x[:4])}))
|
||||
@@ -2812,7 +2785,7 @@ class YoutubeDL:
|
||||
|
||||
dummy_chapter = {'end_time': 0, 'start_time': info_dict.get('duration')}
|
||||
for idx, (prev, current, next_) in enumerate(zip(
|
||||
(dummy_chapter, *chapters), chapters, (*chapters[1:], dummy_chapter)), 1):
|
||||
(dummy_chapter, *chapters), chapters, (*chapters[1:], dummy_chapter), strict=False), 1):
|
||||
if current.get('start_time') is None:
|
||||
current['start_time'] = prev.get('end_time')
|
||||
if not current.get('end_time'):
|
||||
@@ -3339,28 +3312,6 @@ class YoutubeDL:
|
||||
elif _infojson_written is None:
|
||||
return
|
||||
|
||||
# Note: Annotations are deprecated
|
||||
annofn = None
|
||||
if self.params.get('writeannotations', False):
|
||||
annofn = self.prepare_filename(info_dict, 'annotation')
|
||||
if annofn:
|
||||
if not self._ensure_dir_exists(annofn):
|
||||
return
|
||||
if not self.params.get('overwrites', True) and os.path.exists(annofn):
|
||||
self.to_screen('[info] Video annotations are already present')
|
||||
elif not info_dict.get('annotations'):
|
||||
self.report_warning('There are no annotations to write.')
|
||||
else:
|
||||
try:
|
||||
self.to_screen('[info] Writing video annotations to: ' + annofn)
|
||||
with open(annofn, 'w', encoding='utf-8') as annofile:
|
||||
annofile.write(info_dict['annotations'])
|
||||
except (KeyError, TypeError):
|
||||
self.report_warning('There are no annotations to write.')
|
||||
except OSError:
|
||||
self.report_error('Cannot write annotations file: ' + annofn)
|
||||
return
|
||||
|
||||
# Write internet shortcut files
|
||||
def _write_link_file(link_type):
|
||||
url = try_get(info_dict['webpage_url'], iri_to_uri)
|
||||
@@ -3419,7 +3370,7 @@ class YoutubeDL:
|
||||
def existing_video_file(*filepaths):
|
||||
ext = info_dict.get('ext')
|
||||
converted = lambda file: replace_extension(file, self.params.get('final_ext') or ext, ext)
|
||||
file = self.existing_file(itertools.chain(*zip(map(converted, filepaths), filepaths)),
|
||||
file = self.existing_file(itertools.chain(*zip(map(converted, filepaths), filepaths, strict=True)),
|
||||
default_overwrite=False)
|
||||
if file:
|
||||
info_dict['ext'] = os.path.splitext(file)[1][1:]
|
||||
@@ -4005,7 +3956,7 @@ class YoutubeDL:
|
||||
|
||||
def render_subtitles_table(self, video_id, subtitles):
|
||||
def _row(lang, formats):
|
||||
exts, names = zip(*((f['ext'], f.get('name') or 'unknown') for f in reversed(formats)))
|
||||
exts, names = zip(*((f['ext'], f.get('name') or 'unknown') for f in reversed(formats)), strict=True)
|
||||
if len(set(names)) == 1:
|
||||
names = [] if names[0] == 'unknown' else names[:1]
|
||||
return [lang, ', '.join(names), ', '.join(exts)]
|
||||
@@ -4161,8 +4112,7 @@ class YoutubeDL:
|
||||
self.params.get('cookiefile'), self.params.get('cookiesfrombrowser'), self)
|
||||
except CookieLoadError as error:
|
||||
cause = error.__context__
|
||||
# compat: <=py3.9: `traceback.format_exception` has a different signature
|
||||
self.report_error(str(cause), tb=''.join(traceback.format_exception(None, cause, cause.__traceback__)))
|
||||
self.report_error(str(cause), tb=''.join(traceback.format_exception(cause)))
|
||||
raise
|
||||
|
||||
@property
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import sys
|
||||
|
||||
if sys.version_info < (3, 9):
|
||||
if sys.version_info < (3, 10):
|
||||
raise ImportError(
|
||||
f'You are using an unsupported version of Python. Only Python versions 3.9 and above are supported by yt-dlp') # noqa: F541
|
||||
f'You are using an unsupported version of Python. Only Python versions 3.10 and above are supported by yt-dlp') # noqa: F541
|
||||
|
||||
__license__ = 'The Unlicense'
|
||||
|
||||
@@ -59,11 +59,9 @@ from .utils import (
|
||||
render_table,
|
||||
setproctitle,
|
||||
shell_quote,
|
||||
traverse_obj,
|
||||
variadic,
|
||||
write_string,
|
||||
)
|
||||
from .utils.networking import std_headers
|
||||
from .utils._utils import _UnsafeExtensionError
|
||||
from .YoutubeDL import YoutubeDL
|
||||
|
||||
@@ -157,7 +155,7 @@ def set_compat_opts(opts):
|
||||
if 'format-sort' in opts.compat_opts:
|
||||
opts.format_sort.extend(FormatSorter.ytdl_default)
|
||||
elif 'prefer-vp9-sort' in opts.compat_opts:
|
||||
opts.format_sort.extend(FormatSorter._prefer_vp9_sort)
|
||||
FormatSorter.default = FormatSorter._prefer_vp9_sort
|
||||
|
||||
if 'mtime-by-default' in opts.compat_opts:
|
||||
if opts.updatetime is None:
|
||||
@@ -523,7 +521,6 @@ def validate_options(opts):
|
||||
|
||||
if report_args_compat('post-processor', opts.postprocessor_args, 'default-compat', 'default'):
|
||||
opts.postprocessor_args['default'] = opts.postprocessor_args.pop('default-compat')
|
||||
opts.postprocessor_args.setdefault('sponskrub', [])
|
||||
|
||||
def report_conflict(arg1, opt1, arg2='--allow-unplayable-formats', opt2='allow_unplayable_formats',
|
||||
val1=NO_DEFAULT, val2=NO_DEFAULT, default=False):
|
||||
@@ -548,11 +545,6 @@ def validate_options(opts):
|
||||
'"--exec before_dl:"', 'exec_cmd', val2=opts.exec_cmd.get('before_dl'))
|
||||
report_conflict('--id', 'useid', '--output', 'outtmpl', val2=opts.outtmpl.get('default'))
|
||||
report_conflict('--remux-video', 'remuxvideo', '--recode-video', 'recodevideo')
|
||||
report_conflict('--sponskrub', 'sponskrub', '--remove-chapters', 'remove_chapters')
|
||||
report_conflict('--sponskrub', 'sponskrub', '--sponsorblock-mark', 'sponsorblock_mark')
|
||||
report_conflict('--sponskrub', 'sponskrub', '--sponsorblock-remove', 'sponsorblock_remove')
|
||||
report_conflict('--sponskrub-cut', 'sponskrub_cut', '--split-chapter', 'split_chapters',
|
||||
val1=opts.sponskrub and opts.sponskrub_cut)
|
||||
|
||||
# Conflicts with --allow-unplayable-formats
|
||||
report_conflict('--embed-metadata', 'addmetadata')
|
||||
@@ -565,23 +557,15 @@ def validate_options(opts):
|
||||
report_conflict('--recode-video', 'recodevideo')
|
||||
report_conflict('--remove-chapters', 'remove_chapters', default=[])
|
||||
report_conflict('--remux-video', 'remuxvideo')
|
||||
report_conflict('--sponskrub', 'sponskrub')
|
||||
report_conflict('--sponsorblock-remove', 'sponsorblock_remove', default=set())
|
||||
report_conflict('--xattrs', 'xattrs')
|
||||
|
||||
# Fully deprecated options
|
||||
def report_deprecation(val, old, new=None):
|
||||
if not val:
|
||||
return
|
||||
if hasattr(opts, '_deprecated_options'):
|
||||
deprecation_warnings.append(
|
||||
f'{old} is deprecated and may be removed in a future version. Use {new} instead' if new
|
||||
else f'{old} is deprecated and may not work as expected')
|
||||
|
||||
report_deprecation(opts.sponskrub, '--sponskrub', '--sponsorblock-mark or --sponsorblock-remove')
|
||||
report_deprecation(not opts.prefer_ffmpeg, '--prefer-avconv', 'ffmpeg')
|
||||
# report_deprecation(opts.include_ads, '--include-ads') # We may re-implement this in future
|
||||
# report_deprecation(opts.call_home, '--call-home') # We may re-implement this in future
|
||||
# report_deprecation(opts.writeannotations, '--write-annotations') # It's just that no website has it
|
||||
f'The following options have been deprecated: {", ".join(opts._deprecated_options)}\n'
|
||||
'Please remove them from your command/configuration to avoid future errors.\n'
|
||||
'See https://github.com/yt-dlp/yt-dlp/issues/14198 for more details')
|
||||
del opts._deprecated_options
|
||||
|
||||
# Dependent options
|
||||
opts.date = DateRange.day(opts.date) if opts.date else DateRange(opts.dateafter, opts.datebefore)
|
||||
@@ -712,21 +696,6 @@ def get_postprocessors(opts):
|
||||
'add_metadata': opts.addmetadata,
|
||||
'add_infojson': opts.embed_infojson,
|
||||
}
|
||||
# Deprecated
|
||||
# This should be above EmbedThumbnail since sponskrub removes the thumbnail attachment
|
||||
# but must be below EmbedSubtitle and FFmpegMetadata
|
||||
# See https://github.com/yt-dlp/yt-dlp/issues/204 , https://github.com/faissaloo/SponSkrub/issues/29
|
||||
# If opts.sponskrub is None, sponskrub is used, but it silently fails if the executable can't be found
|
||||
if opts.sponskrub is not False:
|
||||
yield {
|
||||
'key': 'SponSkrub',
|
||||
'path': opts.sponskrub_path,
|
||||
'args': opts.sponskrub_args,
|
||||
'cut': opts.sponskrub_cut,
|
||||
'force': opts.sponskrub_force,
|
||||
'ignoreerror': opts.sponskrub is None,
|
||||
'_from_cli': True,
|
||||
}
|
||||
if opts.embedthumbnail:
|
||||
yield {
|
||||
'key': 'EmbedThumbnail',
|
||||
@@ -885,7 +854,6 @@ def parse_options(argv=None):
|
||||
'nopart': opts.nopart,
|
||||
'updatetime': opts.updatetime,
|
||||
'writedescription': opts.writedescription,
|
||||
'writeannotations': opts.writeannotations,
|
||||
'writeinfojson': opts.writeinfojson,
|
||||
'allow_playlist_files': opts.allow_playlist_files,
|
||||
'clean_infojson': opts.clean_infojson,
|
||||
@@ -919,7 +887,6 @@ def parse_options(argv=None):
|
||||
'max_views': opts.max_views,
|
||||
'daterange': opts.date,
|
||||
'cachedir': opts.cachedir,
|
||||
'youtube_print_sig_code': opts.youtube_print_sig_code,
|
||||
'age_limit': opts.age_limit,
|
||||
'download_archive': opts.download_archive,
|
||||
'break_on_existing': opts.break_on_existing,
|
||||
@@ -937,13 +904,9 @@ def parse_options(argv=None):
|
||||
'socket_timeout': opts.socket_timeout,
|
||||
'bidi_workaround': opts.bidi_workaround,
|
||||
'debug_printtraffic': opts.debug_printtraffic,
|
||||
'prefer_ffmpeg': opts.prefer_ffmpeg,
|
||||
'include_ads': opts.include_ads,
|
||||
'default_search': opts.default_search,
|
||||
'dynamic_mpd': opts.dynamic_mpd,
|
||||
'extractor_args': opts.extractor_args,
|
||||
'youtube_include_dash_manifest': opts.youtube_include_dash_manifest,
|
||||
'youtube_include_hls_manifest': opts.youtube_include_hls_manifest,
|
||||
'encoding': opts.encoding,
|
||||
'extract_flat': opts.extract_flat,
|
||||
'live_from_start': opts.live_from_start,
|
||||
@@ -955,7 +918,6 @@ def parse_options(argv=None):
|
||||
'fixup': opts.fixup,
|
||||
'source_address': opts.source_address,
|
||||
'impersonate': opts.impersonate,
|
||||
'call_home': opts.call_home,
|
||||
'sleep_interval_requests': opts.sleep_interval_requests,
|
||||
'sleep_interval': opts.sleep_interval,
|
||||
'max_sleep_interval': opts.max_sleep_interval,
|
||||
@@ -965,7 +927,6 @@ def parse_options(argv=None):
|
||||
'force_keyframes_at_cuts': opts.force_keyframes_at_cuts,
|
||||
'list_thumbnails': opts.list_thumbnails,
|
||||
'playlist_items': opts.playlist_items,
|
||||
'xattr_set_filesize': opts.xattr_set_filesize,
|
||||
'match_filter': opts.match_filter,
|
||||
'color': opts.color,
|
||||
'ffmpeg_location': opts.ffmpeg_location,
|
||||
@@ -974,7 +935,6 @@ def parse_options(argv=None):
|
||||
'hls_split_discontinuity': opts.hls_split_discontinuity,
|
||||
'external_downloader_args': opts.external_downloader_args,
|
||||
'postprocessor_args': opts.postprocessor_args,
|
||||
'cn_verification_proxy': opts.cn_verification_proxy,
|
||||
'geo_verification_proxy': opts.geo_verification_proxy,
|
||||
'geo_bypass': opts.geo_bypass,
|
||||
'geo_bypass_country': opts.geo_bypass_country,
|
||||
@@ -992,12 +952,6 @@ def _real_main(argv=None):
|
||||
|
||||
parser, opts, all_urls, ydl_opts = parse_options(argv)
|
||||
|
||||
# Dump user agent
|
||||
if opts.dump_user_agent:
|
||||
ua = traverse_obj(opts.headers, 'User-Agent', casesense=False, default=std_headers['User-Agent'])
|
||||
write_string(f'{ua}\n', out=sys.stdout)
|
||||
return
|
||||
|
||||
if print_extractor_information(opts, all_urls):
|
||||
return
|
||||
|
||||
@@ -1020,13 +974,8 @@ def _real_main(argv=None):
|
||||
|
||||
try:
|
||||
updater = Updater(ydl, opts.update_self)
|
||||
if opts.update_self and updater.update() and actual_use:
|
||||
if updater.cmd:
|
||||
return updater.restart()
|
||||
# This code is reachable only for zip variant in py < 3.10
|
||||
# It makes sense to exit here, but the old behavior is to continue
|
||||
ydl.report_warning('Restart yt-dlp to use the updated version')
|
||||
# return 100, 'ERROR: The program must exit for the update to complete'
|
||||
if opts.update_self and updater.update() and actual_use and updater.cmd:
|
||||
return updater.restart()
|
||||
except Exception:
|
||||
traceback.print_exc()
|
||||
ydl._download_retcode = 100
|
||||
|
||||
@@ -447,7 +447,7 @@ def key_schedule_core(data, rcon_iteration):
|
||||
|
||||
|
||||
def xor(data1, data2):
|
||||
return [x ^ y for x, y in zip(data1, data2)]
|
||||
return [x ^ y for x, y in zip(data1, data2, strict=False)]
|
||||
|
||||
|
||||
def iter_mix_columns(data, matrix):
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import datetime as dt
|
||||
import os
|
||||
import xml.etree.ElementTree as etree
|
||||
|
||||
@@ -27,6 +28,13 @@ def compat_ord(c):
|
||||
return c if isinstance(c, int) else ord(c)
|
||||
|
||||
|
||||
def compat_datetime_from_timestamp(timestamp):
|
||||
# Calling dt.datetime.fromtimestamp with negative timestamps throws error in Windows
|
||||
# Ref: https://github.com/yt-dlp/yt-dlp/issues/5185, https://github.com/python/cpython/issues/81708,
|
||||
# https://github.com/yt-dlp/yt-dlp/issues/6706#issuecomment-1496842642
|
||||
return (dt.datetime.fromtimestamp(0, dt.timezone.utc) + dt.timedelta(seconds=timestamp))
|
||||
|
||||
|
||||
# Python 3.8+ does not honor %HOME% on windows, but this breaks compatibility with youtube-dl
|
||||
# See https://github.com/yt-dlp/yt-dlp/issues/792
|
||||
# https://docs.python.org/3/library/os.path.html#os.path.expanduser
|
||||
|
||||
@@ -1,13 +0,0 @@
|
||||
# flake8: noqa: F405
|
||||
from types import * # noqa: F403
|
||||
|
||||
from .compat_utils import passthrough_module
|
||||
|
||||
passthrough_module(__name__, 'types')
|
||||
del passthrough_module
|
||||
|
||||
try:
|
||||
# NB: pypy has builtin NoneType, so checking NameError won't work
|
||||
from types import NoneType # >= 3.10
|
||||
except ImportError:
|
||||
NoneType = type(None)
|
||||
@@ -22,15 +22,11 @@ if os.name == 'nt':
|
||||
|
||||
def getproxies_registry_patched():
|
||||
proxies = getproxies_registry()
|
||||
if (
|
||||
sys.version_info >= (3, 10, 5) # https://docs.python.org/3.10/whatsnew/changelog.html#python-3-10-5-final
|
||||
or (3, 9, 13) <= sys.version_info < (3, 10) # https://docs.python.org/3.9/whatsnew/changelog.html#python-3-9-13-final
|
||||
):
|
||||
return proxies
|
||||
|
||||
for scheme in ('https', 'ftp'):
|
||||
if scheme in proxies and proxies[scheme].startswith(f'{scheme}://'):
|
||||
proxies[scheme] = 'http' + proxies[scheme][len(scheme):]
|
||||
if sys.version_info < (3, 10, 5): # https://docs.python.org/3.10/whatsnew/changelog.html#python-3-10-5-final
|
||||
for scheme in ('https', 'ftp'):
|
||||
if scheme in proxies and proxies[scheme].startswith(f'{scheme}://'):
|
||||
proxies[scheme] = 'http' + proxies[scheme][len(scheme):]
|
||||
|
||||
return proxies
|
||||
|
||||
|
||||
@@ -62,7 +62,6 @@ class FileDownloader:
|
||||
test: Download only first bytes to test the downloader.
|
||||
min_filesize: Skip files smaller than this size
|
||||
max_filesize: Skip files larger than this size
|
||||
xattr_set_filesize: Set ytdl.filesize user xattribute with expected size.
|
||||
progress_delta: The minimum time between progress output, in seconds
|
||||
external_downloader_args: A dictionary of downloader keys (in lower case)
|
||||
and a list of additional command-line arguments for the
|
||||
|
||||
@@ -563,7 +563,7 @@ class FFmpegFD(ExternalFD):
|
||||
f'{cookie.name}={cookie.value}; path={cookie.path}; domain={cookie.domain};\r\n'
|
||||
for cookie in cookies)])
|
||||
if fmt.get('http_headers') and is_http:
|
||||
# Trailing \r\n after each HTTP header is important to prevent warning from ffmpeg/avconv:
|
||||
# Trailing \r\n after each HTTP header is important to prevent warning from ffmpeg:
|
||||
# [http @ 00000000003d2fa0] No trailing CRLF found in HTTP header.
|
||||
args.extend(['-headers', ''.join(f'{key}: {val}\r\n' for key, val in fmt['http_headers'].items())])
|
||||
|
||||
@@ -654,10 +654,6 @@ class FFmpegFD(ExternalFD):
|
||||
return retval
|
||||
|
||||
|
||||
class AVconvFD(FFmpegFD):
|
||||
pass
|
||||
|
||||
|
||||
_BY_NAME = {
|
||||
klass.get_basename(): klass
|
||||
for name, klass in globals().items()
|
||||
|
||||
@@ -149,14 +149,14 @@ class FlvReader(io.BytesIO):
|
||||
segments_count = self.read_unsigned_char()
|
||||
segments = []
|
||||
for _ in range(segments_count):
|
||||
box_size, box_type, box_data = self.read_box_info()
|
||||
_box_size, box_type, box_data = self.read_box_info()
|
||||
assert box_type == b'asrt'
|
||||
segment = FlvReader(box_data).read_asrt()
|
||||
segments.append(segment)
|
||||
fragments_run_count = self.read_unsigned_char()
|
||||
fragments = []
|
||||
for _ in range(fragments_run_count):
|
||||
box_size, box_type, box_data = self.read_box_info()
|
||||
_box_size, box_type, box_data = self.read_box_info()
|
||||
assert box_type == b'afrt'
|
||||
fragments.append(FlvReader(box_data).read_afrt())
|
||||
|
||||
@@ -167,7 +167,7 @@ class FlvReader(io.BytesIO):
|
||||
}
|
||||
|
||||
def read_bootstrap_info(self):
|
||||
total_size, box_type, box_data = self.read_box_info()
|
||||
_, box_type, box_data = self.read_box_info()
|
||||
assert box_type == b'abst'
|
||||
return FlvReader(box_data).read_abst()
|
||||
|
||||
@@ -324,9 +324,9 @@ class F4mFD(FragmentFD):
|
||||
if requested_bitrate is None or len(formats) == 1:
|
||||
# get the best format
|
||||
formats = sorted(formats, key=lambda f: f[0])
|
||||
rate, media = formats[-1]
|
||||
_, media = formats[-1]
|
||||
else:
|
||||
rate, media = next(filter(
|
||||
_, media = next(filter(
|
||||
lambda f: int(f[0]) == requested_bitrate, formats))
|
||||
|
||||
# Prefer baseURL for relative URLs as per 11.2 of F4M 3.0 spec.
|
||||
|
||||
@@ -13,12 +13,9 @@ from ..utils import (
|
||||
ContentTooShortError,
|
||||
RetryManager,
|
||||
ThrottledDownload,
|
||||
XAttrMetadataError,
|
||||
XAttrUnavailableError,
|
||||
int_or_none,
|
||||
parse_http_range,
|
||||
try_call,
|
||||
write_xattr,
|
||||
)
|
||||
from ..utils.networking import HTTPHeaderDict
|
||||
|
||||
@@ -273,12 +270,6 @@ class HttpFD(FileDownloader):
|
||||
self.report_error(f'unable to open for writing: {err}')
|
||||
return False
|
||||
|
||||
if self.params.get('xattr_set_filesize', False) and data_len is not None:
|
||||
try:
|
||||
write_xattr(ctx.tmpfilename, 'user.ytdl.filesize', str(data_len).encode())
|
||||
except (XAttrUnavailableError, XAttrMetadataError) as err:
|
||||
self.report_error(f'unable to set filesize xattr: {err}')
|
||||
|
||||
try:
|
||||
ctx.stream.write(data_block)
|
||||
except OSError as err:
|
||||
|
||||
@@ -337,6 +337,7 @@ from .cbc import (
|
||||
CBCGemIE,
|
||||
CBCGemLiveIE,
|
||||
CBCGemPlaylistIE,
|
||||
CBCListenIE,
|
||||
CBCPlayerIE,
|
||||
CBCPlayerPlaylistIE,
|
||||
)
|
||||
@@ -424,7 +425,6 @@ from .cpac import (
|
||||
CPACPlaylistIE,
|
||||
)
|
||||
from .cracked import CrackedIE
|
||||
from .crackle import CrackleIE
|
||||
from .craftsy import CraftsyIE
|
||||
from .crooksandliars import CrooksAndLiarsIE
|
||||
from .crowdbunker import (
|
||||
@@ -444,10 +444,6 @@ from .curiositystream import (
|
||||
CuriosityStreamIE,
|
||||
CuriosityStreamSeriesIE,
|
||||
)
|
||||
from .cwtv import (
|
||||
CWTVIE,
|
||||
CWTVMovieIE,
|
||||
)
|
||||
from .cybrary import (
|
||||
CybraryCourseIE,
|
||||
CybraryIE,
|
||||
@@ -828,6 +824,13 @@ from .ichinanalive import (
|
||||
IchinanaLiveIE,
|
||||
IchinanaLiveVODIE,
|
||||
)
|
||||
from .idagio import (
|
||||
IdagioAlbumIE,
|
||||
IdagioPersonalPlaylistIE,
|
||||
IdagioPlaylistIE,
|
||||
IdagioRecordingIE,
|
||||
IdagioTrackIE,
|
||||
)
|
||||
from .idolplus import IdolPlusIE
|
||||
from .ign import (
|
||||
IGNIE,
|
||||
@@ -1141,7 +1144,6 @@ from .mit import (
|
||||
OCWMITIE,
|
||||
TechTVMITIE,
|
||||
)
|
||||
from .mitele import MiTeleIE
|
||||
from .mixch import (
|
||||
MixchArchiveIE,
|
||||
MixchIE,
|
||||
@@ -1433,6 +1435,7 @@ from .onet import (
|
||||
OnetPlIE,
|
||||
)
|
||||
from .onionstudios import OnionStudiosIE
|
||||
from .onsen import OnsenIE
|
||||
from .opencast import (
|
||||
OpencastIE,
|
||||
OpencastPlaylistIE,
|
||||
@@ -1466,10 +1469,6 @@ from .panopto import (
|
||||
PanoptoListIE,
|
||||
PanoptoPlaylistIE,
|
||||
)
|
||||
from .paramountplus import (
|
||||
ParamountPlusIE,
|
||||
ParamountPlusSeriesIE,
|
||||
)
|
||||
from .parler import ParlerIE
|
||||
from .parlview import ParlviewIE
|
||||
from .parti import (
|
||||
@@ -1523,10 +1522,6 @@ from .piramidetv import (
|
||||
PiramideTVChannelIE,
|
||||
PiramideTVIE,
|
||||
)
|
||||
from .pixivsketch import (
|
||||
PixivSketchIE,
|
||||
PixivSketchUserIE,
|
||||
)
|
||||
from .planetmarathi import PlanetMarathiIE
|
||||
from .platzi import (
|
||||
PlatziCourseIE,
|
||||
@@ -1783,7 +1778,6 @@ from .rutube import (
|
||||
RutubePlaylistIE,
|
||||
RutubeTagsIE,
|
||||
)
|
||||
from .rutv import RUTVIE
|
||||
from .ruutu import RuutuIE
|
||||
from .ruv import (
|
||||
RuvIE,
|
||||
@@ -1853,7 +1847,6 @@ from .simplecast import (
|
||||
SimplecastPodcastIE,
|
||||
)
|
||||
from .sina import SinaIE
|
||||
from .sixplay import SixPlayIE
|
||||
from .skeb import SkebIE
|
||||
from .sky import (
|
||||
SkyNewsIE,
|
||||
@@ -1881,7 +1874,12 @@ from .skynewsau import SkyNewsAUIE
|
||||
from .slideshare import SlideshareIE
|
||||
from .slideslive import SlidesLiveIE
|
||||
from .slutload import SlutloadIE
|
||||
from .smotrim import SmotrimIE
|
||||
from .smotrim import (
|
||||
SmotrimAudioIE,
|
||||
SmotrimIE,
|
||||
SmotrimLiveIE,
|
||||
SmotrimPlaylistIE,
|
||||
)
|
||||
from .snapchat import SnapchatSpotlightIE
|
||||
from .snotr import SnotrIE
|
||||
from .softwhiteunderbelly import SoftWhiteUnderbellyIE
|
||||
@@ -1929,10 +1927,6 @@ from .spiegel import SpiegelIE
|
||||
from .sport5 import Sport5IE
|
||||
from .sportbox import SportBoxIE
|
||||
from .sportdeutschland import SportDeutschlandIE
|
||||
from .spotify import (
|
||||
SpotifyIE,
|
||||
SpotifyShowIE,
|
||||
)
|
||||
from .spreaker import (
|
||||
SpreakerIE,
|
||||
SpreakerShowIE,
|
||||
@@ -2153,6 +2147,7 @@ from .tubitv import (
|
||||
)
|
||||
from .tumblr import TumblrIE
|
||||
from .tunein import (
|
||||
TuneInEmbedIE,
|
||||
TuneInPodcastEpisodeIE,
|
||||
TuneInPodcastIE,
|
||||
TuneInShortenerIE,
|
||||
@@ -2287,7 +2282,6 @@ from .utreon import UtreonIE
|
||||
from .varzesh3 import Varzesh3IE
|
||||
from .vbox7 import Vbox7IE
|
||||
from .veo import VeoIE
|
||||
from .vesti import VestiIE
|
||||
from .vevo import (
|
||||
VevoIE,
|
||||
VevoPlaylistIE,
|
||||
@@ -2476,7 +2470,6 @@ from .wykop import (
|
||||
WykopPostCommentIE,
|
||||
WykopPostIE,
|
||||
)
|
||||
from .xanimu import XanimuIE
|
||||
from .xboxclips import XboxClipsIE
|
||||
from .xhamster import (
|
||||
XHamsterEmbedIE,
|
||||
|
||||
@@ -21,7 +21,7 @@ from ..utils import (
|
||||
|
||||
class ABCIE(InfoExtractor):
|
||||
IE_NAME = 'abc.net.au'
|
||||
_VALID_URL = r'https?://(?:www\.)?abc\.net\.au/(?:news|btn)/(?:[^/]+/){1,4}(?P<id>\d{5,})'
|
||||
_VALID_URL = r'https?://(?:www\.)?abc\.net\.au/(?:news|btn|listen)/(?:[^/?#]+/){1,4}(?P<id>\d{5,})'
|
||||
|
||||
_TESTS = [{
|
||||
'url': 'http://www.abc.net.au/news/2014-11-05/australia-to-staff-ebola-treatment-centre-in-sierra-leone/5868334',
|
||||
@@ -53,8 +53,9 @@ class ABCIE(InfoExtractor):
|
||||
'info_dict': {
|
||||
'id': '6880080',
|
||||
'ext': 'mp3',
|
||||
'title': 'NAB lifts interest rates, following Westpac and CBA',
|
||||
'title': 'NAB lifts interest rates, following Westpac and CBA - ABC listen',
|
||||
'description': 'md5:f13d8edc81e462fce4a0437c7dc04728',
|
||||
'thumbnail': r're:https://live-production\.wcms\.abc-cdn\.net\.au/2193d7437c84b25eafd6360c82b5fa21',
|
||||
},
|
||||
}, {
|
||||
'url': 'http://www.abc.net.au/news/2015-10-19/6866214',
|
||||
@@ -64,8 +65,9 @@ class ABCIE(InfoExtractor):
|
||||
'info_dict': {
|
||||
'id': '10527914',
|
||||
'ext': 'mp4',
|
||||
'title': 'WWI Centenary',
|
||||
'description': 'md5:c2379ec0ca84072e86b446e536954546',
|
||||
'title': 'WWI Centenary - Behind The News',
|
||||
'description': 'md5:fa4405939ff750fade46ff0cd4c66a52',
|
||||
'thumbnail': r're:https://live-production\.wcms\.abc-cdn\.net\.au/bcc3433c97bf992dff32ec5a768713c9',
|
||||
},
|
||||
}, {
|
||||
'url': 'https://www.abc.net.au/news/programs/the-world/2020-06-10/black-lives-matter-protests-spawn-support-for/12342074',
|
||||
@@ -73,7 +75,8 @@ class ABCIE(InfoExtractor):
|
||||
'id': '12342074',
|
||||
'ext': 'mp4',
|
||||
'title': 'Black Lives Matter protests spawn support for Papuans in Indonesia',
|
||||
'description': 'md5:2961a17dc53abc558589ccd0fb8edd6f',
|
||||
'description': 'md5:625257209f2d14ce23cb4e3785da9beb',
|
||||
'thumbnail': r're:https://live-production\.wcms\.abc-cdn\.net\.au/7ee6f190de6d7dbb04203e514bfae9ec',
|
||||
},
|
||||
}, {
|
||||
'url': 'https://www.abc.net.au/btn/newsbreak/btn-newsbreak-20200814/12560476',
|
||||
@@ -93,7 +96,16 @@ class ABCIE(InfoExtractor):
|
||||
'title': 'Wagner Group retreating from Russia, leader Prigozhin to move to Belarus',
|
||||
'ext': 'mp4',
|
||||
'description': 'Wagner troops leave Rostov-on-Don and\xa0Yevgeny Prigozhin will move to Belarus under a deal brokered by Belarusian President Alexander Lukashenko to end the mutiny.',
|
||||
'thumbnail': 'https://live-production.wcms.abc-cdn.net.au/0c170f5b57f0105c432f366c0e8e267b?impolicy=wcms_crop_resize&cropH=2813&cropW=5000&xPos=0&yPos=249&width=862&height=485',
|
||||
'thumbnail': r're:https://live-production\.wcm\.abc-cdn\.net\.au/0c170f5b57f0105c432f366c0e8e267b',
|
||||
},
|
||||
}, {
|
||||
'url': 'https://www.abc.net.au/listen/programs/the-followers-madness-of-two/presents-followers-madness-of-two/105697646',
|
||||
'info_dict': {
|
||||
'id': '105697646',
|
||||
'title': 'INTRODUCING — The Followers: Madness of Two - ABC listen',
|
||||
'ext': 'mp3',
|
||||
'description': 'md5:2310cd0d440a4e01656abea15db8d1f3',
|
||||
'thumbnail': r're:https://live-production\.wcms\.abc-cdn\.net\.au/90d7078214e5d66553ffb7fcf0da0cda',
|
||||
},
|
||||
}]
|
||||
|
||||
|
||||
@@ -1,47 +1,125 @@
|
||||
import time
|
||||
|
||||
from .common import InfoExtractor
|
||||
from ..utils import ExtractorError, str_to_int
|
||||
from ..utils import (
|
||||
ExtractorError,
|
||||
extract_attributes,
|
||||
float_or_none,
|
||||
jwt_decode_hs256,
|
||||
jwt_encode,
|
||||
parse_resolution,
|
||||
qualities,
|
||||
unified_strdate,
|
||||
update_url,
|
||||
url_or_none,
|
||||
urljoin,
|
||||
)
|
||||
from ..utils.traversal import (
|
||||
find_element,
|
||||
require,
|
||||
traverse_obj,
|
||||
)
|
||||
|
||||
|
||||
class AppleConnectIE(InfoExtractor):
|
||||
_VALID_URL = r'https?://itunes\.apple\.com/\w{0,2}/?post/(?:id)?sa\.(?P<id>[\w-]+)'
|
||||
IE_NAME = 'apple:music:connect'
|
||||
IE_DESC = 'Apple Music Connect'
|
||||
|
||||
_BASE_URL = 'https://music.apple.com'
|
||||
_QUALITIES = {
|
||||
'provisionalUploadVideo': None,
|
||||
'sdVideo': 480,
|
||||
'sdVideoWithPlusAudio': 480,
|
||||
'sd480pVideo': 480,
|
||||
'720pHdVideo': 720,
|
||||
'1080pHdVideo': 1080,
|
||||
}
|
||||
_VALID_URL = r'https?://music\.apple\.com/[\w-]+/post/(?P<id>\d+)'
|
||||
_TESTS = [{
|
||||
'url': 'https://itunes.apple.com/us/post/idsa.4ab17a39-2720-11e5-96c5-a5b38f6c42d3',
|
||||
'md5': 'c1d41f72c8bcaf222e089434619316e4',
|
||||
'url': 'https://music.apple.com/us/post/1018290019',
|
||||
'info_dict': {
|
||||
'id': '4ab17a39-2720-11e5-96c5-a5b38f6c42d3',
|
||||
'id': '1018290019',
|
||||
'ext': 'm4v',
|
||||
'title': 'Energy',
|
||||
'uploader': 'Drake',
|
||||
'thumbnail': r're:^https?://.*\.jpg$',
|
||||
'duration': 177.911,
|
||||
'thumbnail': r're:https?://.+\.png',
|
||||
'upload_date': '20150710',
|
||||
'timestamp': 1436545535,
|
||||
'uploader': 'Drake',
|
||||
},
|
||||
}, {
|
||||
'url': 'https://itunes.apple.com/us/post/sa.0fe0229f-2457-11e5-9f40-1bb645f2d5d9',
|
||||
'only_matching': True,
|
||||
'url': 'https://music.apple.com/us/post/1016746627',
|
||||
'info_dict': {
|
||||
'id': '1016746627',
|
||||
'ext': 'm4v',
|
||||
'title': 'Body Shop (Madonna) - Chellous Lima (Acoustic Cover)',
|
||||
'duration': 210.278,
|
||||
'thumbnail': r're:https?://.+\.png',
|
||||
'upload_date': '20150706',
|
||||
'uploader': 'Chellous Lima',
|
||||
},
|
||||
}]
|
||||
|
||||
_jwt = None
|
||||
|
||||
@staticmethod
|
||||
def _jwt_is_expired(token):
|
||||
return jwt_decode_hs256(token)['exp'] - time.time() < 120
|
||||
|
||||
def _get_token(self, webpage, video_id):
|
||||
if self._jwt and not self._jwt_is_expired(self._jwt):
|
||||
return self._jwt
|
||||
|
||||
js_url = traverse_obj(webpage, (
|
||||
{find_element(tag='script', attr='crossorigin', value='', html=True)},
|
||||
{extract_attributes}, 'src', {urljoin(self._BASE_URL)}, {require('JS URL')}))
|
||||
js = self._download_webpage(
|
||||
js_url, video_id, 'Downloading token JS', 'Unable to download token JS')
|
||||
|
||||
header = jwt_encode({}, '', headers={'alg': 'ES256', 'kid': 'WebPlayKid'}).split('.')[0]
|
||||
self._jwt = self._search_regex(
|
||||
fr'(["\'])(?P<jwt>{header}(?:\.[\w-]+){{2}})\1', js, 'JSON Web Token', group='jwt')
|
||||
if self._jwt_is_expired(self._jwt):
|
||||
raise ExtractorError('The fetched token is already expired')
|
||||
|
||||
return self._jwt
|
||||
|
||||
def _real_extract(self, url):
|
||||
video_id = self._match_id(url)
|
||||
webpage = self._download_webpage(url, video_id)
|
||||
|
||||
try:
|
||||
video_json = self._html_search_regex(
|
||||
r'class="auc-video-data">(\{.*?\})', webpage, 'json')
|
||||
except ExtractorError:
|
||||
raise ExtractorError('This post doesn\'t contain a video', expected=True)
|
||||
videos = self._download_json(
|
||||
'https://amp-api.music.apple.com/v1/catalog/us/uploaded-videos',
|
||||
video_id, headers={
|
||||
'Authorization': f'Bearer {self._get_token(webpage, video_id)}',
|
||||
'Origin': self._BASE_URL,
|
||||
}, query={'ids': video_id, 'l': 'en-US'})
|
||||
attributes = traverse_obj(videos, (
|
||||
'data', ..., 'attributes', any, {require('video information')}))
|
||||
|
||||
video_data = self._parse_json(video_json, video_id)
|
||||
timestamp = str_to_int(self._html_search_regex(r'data-timestamp="(\d+)"', webpage, 'timestamp'))
|
||||
like_count = str_to_int(self._html_search_regex(r'(\d+) Loves', webpage, 'like count', default=None))
|
||||
formats = []
|
||||
quality = qualities(list(self._QUALITIES.keys()))
|
||||
for format_id, src_url in traverse_obj(attributes, (
|
||||
'assetTokens', {dict.items}, lambda _, v: url_or_none(v[1]),
|
||||
)):
|
||||
formats.append({
|
||||
'ext': 'm4v',
|
||||
'format_id': format_id,
|
||||
'height': self._QUALITIES.get(format_id),
|
||||
'quality': quality(format_id),
|
||||
'url': src_url,
|
||||
**parse_resolution(update_url(src_url, query=None), lenient=True),
|
||||
})
|
||||
|
||||
return {
|
||||
'id': video_id,
|
||||
'url': video_data['sslSrc'],
|
||||
'title': video_data['title'],
|
||||
'description': video_data['description'],
|
||||
'uploader': video_data['artistName'],
|
||||
'thumbnail': video_data['artworkUrl'],
|
||||
'timestamp': timestamp,
|
||||
'like_count': like_count,
|
||||
'formats': formats,
|
||||
'thumbnail': self._html_search_meta(
|
||||
['og:image', 'og:image:secure_url', 'twitter:image'], webpage),
|
||||
**traverse_obj(attributes, {
|
||||
'title': ('name', {str}),
|
||||
'duration': ('durationInMilliseconds', {float_or_none(scale=1000)}),
|
||||
'upload_date': ('uploadDate', {unified_strdate}),
|
||||
'uploader': (('artistName', 'uploadingArtistName'), {str}, any),
|
||||
'webpage_url': ('postUrl', {url_or_none}),
|
||||
}),
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
from .common import InfoExtractor
|
||||
from ..utils import (
|
||||
clean_html,
|
||||
clean_podcast_url,
|
||||
int_or_none,
|
||||
parse_iso8601,
|
||||
@@ -17,7 +18,7 @@ class ApplePodcastsIE(InfoExtractor):
|
||||
'ext': 'mp3',
|
||||
'title': 'Ferreck Dawn - To The Break of Dawn 117',
|
||||
'episode': 'Ferreck Dawn - To The Break of Dawn 117',
|
||||
'description': 'md5:1fc571102f79dbd0a77bfd71ffda23bc',
|
||||
'description': 'md5:8c4f5c2c30af17ed6a98b0b9daf15b76',
|
||||
'upload_date': '20240812',
|
||||
'timestamp': 1723449600,
|
||||
'duration': 3596,
|
||||
@@ -58,7 +59,7 @@ class ApplePodcastsIE(InfoExtractor):
|
||||
r'<script [^>]*\bid=["\']serialized-server-data["\'][^>]*>', webpage,
|
||||
'server data', episode_id, contains_pattern=r'\[{(?s:.+)}\]')[0]['data']
|
||||
model_data = traverse_obj(server_data, (
|
||||
'headerButtonItems', lambda _, v: v['$kind'] == 'bookmark' and v['modelType'] == 'EpisodeOffer',
|
||||
'headerButtonItems', lambda _, v: v['$kind'] == 'share' and v['modelType'] == 'EpisodeLockup',
|
||||
'model', {dict}, any))
|
||||
|
||||
return {
|
||||
@@ -68,7 +69,8 @@ class ApplePodcastsIE(InfoExtractor):
|
||||
or self._yield_json_ld(webpage, episode_id, fatal=False), episode_id, fatal=False),
|
||||
**traverse_obj(model_data, {
|
||||
'title': ('title', {str}),
|
||||
'url': ('streamUrl', {clean_podcast_url}),
|
||||
'description': ('summary', {clean_html}),
|
||||
'url': ('playAction', 'episodeOffer', 'streamUrl', {clean_podcast_url}),
|
||||
'timestamp': ('releaseDate', {parse_iso8601}),
|
||||
'duration': ('duration', {int_or_none}),
|
||||
}),
|
||||
|
||||
@@ -740,7 +740,7 @@ class YoutubeWebArchiveIE(InfoExtractor):
|
||||
note or 'Downloading CDX API JSON', query=query, fatal=fatal)
|
||||
if isinstance(res, list) and len(res) >= 2:
|
||||
# format response to make it easier to use
|
||||
return [dict(zip(res[0], v)) for v in res[1:]]
|
||||
return [dict(zip(res[0], v)) for v in res[1:]] # noqa: B905
|
||||
elif not isinstance(res, list) or len(res) != 0:
|
||||
self.report_warning('Error while parsing CDX API response' + bug_reports_message())
|
||||
|
||||
|
||||
@@ -1366,7 +1366,7 @@ class BilibiliSpaceVideoIE(BilibiliSpaceBaseIE):
|
||||
else:
|
||||
yield self.url_result(f'https://www.bilibili.com/video/{entry["bvid"]}', BiliBiliIE, entry['bvid'])
|
||||
|
||||
metadata, paged_list = self._extract_playlist(fetch_page, get_metadata, get_entries)
|
||||
_, paged_list = self._extract_playlist(fetch_page, get_metadata, get_entries)
|
||||
return self.playlist_result(paged_list, playlist_id)
|
||||
|
||||
|
||||
@@ -1400,7 +1400,7 @@ class BilibiliSpaceAudioIE(BilibiliSpaceBaseIE):
|
||||
for entry in page_data.get('data') or []:
|
||||
yield self.url_result(f'https://www.bilibili.com/audio/au{entry["id"]}', BilibiliAudioIE, entry['id'])
|
||||
|
||||
metadata, paged_list = self._extract_playlist(fetch_page, get_metadata, get_entries)
|
||||
_, paged_list = self._extract_playlist(fetch_page, get_metadata, get_entries)
|
||||
return self.playlist_result(paged_list, playlist_id)
|
||||
|
||||
|
||||
|
||||
@@ -174,7 +174,7 @@ class BrainPOPLegacyBaseIE(BrainPOPBaseIE):
|
||||
}
|
||||
|
||||
def _real_extract(self, url):
|
||||
slug, display_id = self._match_valid_url(url).group('slug', 'id')
|
||||
display_id = self._match_id(url)
|
||||
webpage = self._download_webpage(url, display_id)
|
||||
topic_data = self._search_json(
|
||||
r'var\s+content\s*=\s*', webpage, 'content data',
|
||||
|
||||
@@ -31,7 +31,7 @@ from ..utils.traversal import require, traverse_obj, trim_str
|
||||
|
||||
class CBCIE(InfoExtractor):
|
||||
IE_NAME = 'cbc.ca'
|
||||
_VALID_URL = r'https?://(?:www\.)?cbc\.ca/(?!player/)(?:[^/]+/)+(?P<id>[^/?#]+)'
|
||||
_VALID_URL = r'https?://(?:www\.)?cbc\.ca/(?!player/|listen/|i/caffeine/syndicate/)(?:[^/?#]+/)+(?P<id>[^/?#]+)'
|
||||
_TESTS = [{
|
||||
# with mediaId
|
||||
'url': 'http://www.cbc.ca/22minutes/videos/clips-season-23/don-cherry-play-offs',
|
||||
@@ -112,10 +112,6 @@ class CBCIE(InfoExtractor):
|
||||
'playlist_mincount': 6,
|
||||
}]
|
||||
|
||||
@classmethod
|
||||
def suitable(cls, url):
|
||||
return False if CBCPlayerIE.suitable(url) else super().suitable(url)
|
||||
|
||||
def _extract_player_init(self, player_init, display_id):
|
||||
player_info = self._parse_json(player_init, display_id, js_to_json)
|
||||
media_id = player_info.get('mediaId')
|
||||
@@ -913,3 +909,63 @@ class CBCGemLiveIE(InfoExtractor):
|
||||
'thumbnail': ('images', 'card', 'url'),
|
||||
}),
|
||||
}
|
||||
|
||||
|
||||
class CBCListenIE(InfoExtractor):
|
||||
IE_NAME = 'cbc.ca:listen'
|
||||
_VALID_URL = r'https?://(?:www\.)?cbc\.ca/listen/(?:cbc-podcasts|live-radio)/[\w-]+/[\w-]+/(?P<id>\d+)'
|
||||
_TESTS = [{
|
||||
'url': 'https://www.cbc.ca/listen/cbc-podcasts/1353-the-naked-emperor/episode/16142603-introducing-understood-who-broke-the-internet',
|
||||
'info_dict': {
|
||||
'id': '16142603',
|
||||
'title': 'Introducing Understood: Who Broke the Internet?',
|
||||
'ext': 'mp3',
|
||||
'description': 'md5:c605117500084e43f08a950adc6a708c',
|
||||
'duration': 229,
|
||||
'timestamp': 1745812800,
|
||||
'release_timestamp': 1745827200,
|
||||
'release_date': '20250428',
|
||||
'upload_date': '20250428',
|
||||
},
|
||||
}, {
|
||||
'url': 'https://www.cbc.ca/listen/live-radio/1-64-the-house/clip/16170773-should-canada-suck-stand-donald-trump',
|
||||
'info_dict': {
|
||||
'id': '16170773',
|
||||
'title': 'Should Canada suck up or stand up to Donald Trump?',
|
||||
'ext': 'mp3',
|
||||
'description': 'md5:7385194f1cdda8df27ba3764b35e7976',
|
||||
'duration': 3159,
|
||||
'timestamp': 1758340800,
|
||||
'release_timestamp': 1758254400,
|
||||
'release_date': '20250919',
|
||||
'upload_date': '20250920',
|
||||
},
|
||||
}]
|
||||
|
||||
def _real_extract(self, url):
|
||||
video_id = self._match_id(url)
|
||||
|
||||
response = self._download_json(
|
||||
f'https://www.cbc.ca/listen/api/v1/clips/{video_id}', video_id, fatal=False)
|
||||
data = traverse_obj(response, ('data', {dict}))
|
||||
if not data:
|
||||
self.report_warning('API failed to return data. Falling back to webpage parsing')
|
||||
webpage = self._download_webpage(url, video_id)
|
||||
preloaded_state = self._search_json(
|
||||
r'window\.__PRELOADED_STATE__\s*=', webpage, 'preloaded state',
|
||||
video_id, transform_source=js_to_json)
|
||||
data = traverse_obj(preloaded_state, (
|
||||
('podcastDetailData', 'showDetailData'), ..., 'episodes',
|
||||
lambda _, v: str(v['clipID']) == video_id, any, {require('episode data')}))
|
||||
|
||||
return {
|
||||
'id': video_id,
|
||||
**traverse_obj(data, {
|
||||
'url': (('src', 'url'), {url_or_none}, any),
|
||||
'title': ('title', {str}),
|
||||
'description': ('description', {str}),
|
||||
'release_timestamp': ('releasedAt', {int_or_none(scale=1000)}),
|
||||
'timestamp': ('airdate', {int_or_none(scale=1000)}),
|
||||
'duration': ('duration', {int_or_none}),
|
||||
}),
|
||||
}
|
||||
|
||||
@@ -5,8 +5,6 @@ import zlib
|
||||
|
||||
from .anvato import AnvatoIE
|
||||
from .common import InfoExtractor
|
||||
from .paramountplus import ParamountPlusIE
|
||||
from ..networking import HEADRequest
|
||||
from ..utils import (
|
||||
ExtractorError,
|
||||
UserNotLive,
|
||||
@@ -132,13 +130,7 @@ class CBSNewsEmbedIE(CBSNewsBaseIE):
|
||||
video_id = item['mpxRefId']
|
||||
video_url = self._get_video_url(item)
|
||||
if not video_url:
|
||||
# Old embeds redirect user to ParamountPlus but most links are 404
|
||||
pplus_url = f'https://www.paramountplus.com/shows/video/{video_id}'
|
||||
try:
|
||||
self._request_webpage(HEADRequest(pplus_url), video_id)
|
||||
return self.url_result(pplus_url, ParamountPlusIE)
|
||||
except ExtractorError:
|
||||
self.raise_no_formats('This video is no longer available', True, video_id)
|
||||
raise ExtractorError('This video is no longer available', expected=True)
|
||||
|
||||
return self._extract_video(item, video_url, video_id)
|
||||
|
||||
|
||||
@@ -5,18 +5,6 @@ from ..utils import ExtractorError, make_archive_id, url_basename
|
||||
class CellebriteIE(VidyardBaseIE):
|
||||
_VALID_URL = r'https?://cellebrite\.com/(?:\w+)?/(?P<id>[\w-]+)'
|
||||
_TESTS = [{
|
||||
'url': 'https://cellebrite.com/en/collect-data-from-android-devices-with-cellebrite-ufed/',
|
||||
'info_dict': {
|
||||
'id': 'ZqmUss3dQfEMGpauambPuH',
|
||||
'display_id': '16025876',
|
||||
'ext': 'mp4',
|
||||
'title': 'Ask the Expert: Chat Capture - Collect Data from Android Devices in Cellebrite UFED',
|
||||
'description': 'md5:dee48fe12bbae5c01fe6a053f7676da4',
|
||||
'thumbnail': 'https://cellebrite.com/wp-content/uploads/2021/05/Chat-Capture-1024x559.png',
|
||||
'duration': 455.979,
|
||||
'_old_archive_ids': ['cellebrite 16025876'],
|
||||
},
|
||||
}, {
|
||||
'url': 'https://cellebrite.com/en/how-to-lawfully-collect-the-maximum-amount-of-data-from-android-devices/',
|
||||
'info_dict': {
|
||||
'id': 'QV1U8a2yzcxigw7VFnqKyg',
|
||||
|
||||
@@ -272,6 +272,7 @@ class CNNIndonesiaIE(InfoExtractor):
|
||||
return merge_dicts(json_ld_data, {
|
||||
'_type': 'url_transparent',
|
||||
'url': embed_url,
|
||||
'id': video_id,
|
||||
'upload_date': upload_date,
|
||||
'tags': try_call(lambda: self._html_search_meta('keywords', webpage).split(', ')),
|
||||
})
|
||||
|
||||
@@ -1663,7 +1663,7 @@ class InfoExtractor:
|
||||
'end_time': part.get('endOffset'),
|
||||
} for part in variadic(e.get('hasPart') or []) if part.get('@type') == 'Clip']
|
||||
for idx, (last_c, current_c, next_c) in enumerate(zip(
|
||||
[{'end_time': 0}, *chapters], chapters, chapters[1:])):
|
||||
[{'end_time': 0}, *chapters], chapters, chapters[1:], strict=False)):
|
||||
current_c['end_time'] = current_c['end_time'] or next_c['start_time']
|
||||
current_c['start_time'] = current_c['start_time'] or last_c['end_time']
|
||||
if None in current_c.values():
|
||||
@@ -1848,7 +1848,7 @@ class InfoExtractor:
|
||||
return {}
|
||||
|
||||
args = dict(zip(arg_keys.split(','), map(json.dumps, self._parse_json(
|
||||
f'[{arg_vals}]', video_id, transform_source=js_to_json, fatal=fatal) or ())))
|
||||
f'[{arg_vals}]', video_id, transform_source=js_to_json, fatal=fatal) or ()), strict=True))
|
||||
|
||||
ret = self._parse_json(js, video_id, transform_source=functools.partial(js_to_json, vars=args), fatal=fatal)
|
||||
return traverse_obj(ret, traverse) or {}
|
||||
|
||||
@@ -1,243 +0,0 @@
|
||||
import hashlib
|
||||
import hmac
|
||||
import re
|
||||
import time
|
||||
|
||||
from .common import InfoExtractor
|
||||
from ..networking.exceptions import HTTPError
|
||||
from ..utils import (
|
||||
ExtractorError,
|
||||
determine_ext,
|
||||
float_or_none,
|
||||
int_or_none,
|
||||
orderedSet,
|
||||
parse_age_limit,
|
||||
parse_duration,
|
||||
url_or_none,
|
||||
)
|
||||
|
||||
|
||||
class CrackleIE(InfoExtractor):
|
||||
_VALID_URL = r'(?:crackle:|https?://(?:(?:www|m)\.)?(?:sony)?crackle\.com/(?:playlist/\d+/|(?:[^/]+/)+))(?P<id>\d+)'
|
||||
_TESTS = [{
|
||||
# Crackle is available in the United States and territories
|
||||
'url': 'https://www.crackle.com/thanksgiving/2510064',
|
||||
'info_dict': {
|
||||
'id': '2510064',
|
||||
'ext': 'mp4',
|
||||
'title': 'Touch Football',
|
||||
'description': 'md5:cfbb513cf5de41e8b56d7ab756cff4df',
|
||||
'duration': 1398,
|
||||
'view_count': int,
|
||||
'average_rating': 0,
|
||||
'age_limit': 17,
|
||||
'genre': 'Comedy',
|
||||
'creator': 'Daniel Powell',
|
||||
'artist': 'Chris Elliott, Amy Sedaris',
|
||||
'release_year': 2016,
|
||||
'series': 'Thanksgiving',
|
||||
'episode': 'Touch Football',
|
||||
'season_number': 1,
|
||||
'episode_number': 1,
|
||||
},
|
||||
'params': {
|
||||
# m3u8 download
|
||||
'skip_download': True,
|
||||
},
|
||||
'expected_warnings': [
|
||||
'Trying with a list of known countries',
|
||||
],
|
||||
}, {
|
||||
'url': 'https://www.sonycrackle.com/thanksgiving/2510064',
|
||||
'only_matching': True,
|
||||
}]
|
||||
|
||||
_MEDIA_FILE_SLOTS = {
|
||||
'360p.mp4': {
|
||||
'width': 640,
|
||||
'height': 360,
|
||||
},
|
||||
'480p.mp4': {
|
||||
'width': 768,
|
||||
'height': 432,
|
||||
},
|
||||
'480p_1mbps.mp4': {
|
||||
'width': 852,
|
||||
'height': 480,
|
||||
},
|
||||
}
|
||||
|
||||
def _download_json(self, url, *args, **kwargs):
|
||||
# Authorization generation algorithm is reverse engineered from:
|
||||
# https://www.sonycrackle.com/static/js/main.ea93451f.chunk.js
|
||||
timestamp = time.strftime('%Y%m%d%H%M', time.gmtime())
|
||||
h = hmac.new(b'IGSLUQCBDFHEOIFM', '|'.join([url, timestamp]).encode(), hashlib.sha1).hexdigest().upper()
|
||||
headers = {
|
||||
'Accept': 'application/json',
|
||||
'Authorization': '|'.join([h, timestamp, '117', '1']),
|
||||
}
|
||||
return InfoExtractor._download_json(self, url, *args, headers=headers, **kwargs)
|
||||
|
||||
def _real_extract(self, url):
|
||||
video_id = self._match_id(url)
|
||||
|
||||
geo_bypass_country = self.get_param('geo_bypass_country', None)
|
||||
countries = orderedSet((geo_bypass_country, 'US', 'AU', 'CA', 'AS', 'FM', 'GU', 'MP', 'PR', 'PW', 'MH', 'VI', ''))
|
||||
num_countries, num = len(countries) - 1, 0
|
||||
|
||||
media = {}
|
||||
for num, country in enumerate(countries):
|
||||
if num == 1: # start hard-coded list
|
||||
self.report_warning('%s. Trying with a list of known countries' % (
|
||||
f'Unable to obtain video formats from {geo_bypass_country} API' if geo_bypass_country
|
||||
else 'No country code was given using --geo-bypass-country'))
|
||||
elif num == num_countries: # end of list
|
||||
geo_info = self._download_json(
|
||||
'https://web-api-us.crackle.com/Service.svc/geo/country',
|
||||
video_id, fatal=False, note='Downloading geo-location information from crackle API',
|
||||
errnote='Unable to fetch geo-location information from crackle') or {}
|
||||
country = geo_info.get('CountryCode')
|
||||
if country is None:
|
||||
continue
|
||||
self.to_screen(f'{self.IE_NAME} identified country as {country}')
|
||||
if country in countries:
|
||||
self.to_screen(f'Downloading from {country} API was already attempted. Skipping...')
|
||||
continue
|
||||
|
||||
if country is None:
|
||||
continue
|
||||
try:
|
||||
media = self._download_json(
|
||||
f'https://web-api-us.crackle.com/Service.svc/details/media/{video_id}/{country}?disableProtocols=true',
|
||||
video_id, note=f'Downloading media JSON from {country} API',
|
||||
errnote='Unable to download media JSON')
|
||||
except ExtractorError as e:
|
||||
# 401 means geo restriction, trying next country
|
||||
if isinstance(e.cause, HTTPError) and e.cause.status == 401:
|
||||
continue
|
||||
raise
|
||||
|
||||
status = media.get('status')
|
||||
if status.get('messageCode') != '0':
|
||||
raise ExtractorError(
|
||||
'{} said: {} {} - {}'.format(
|
||||
self.IE_NAME, status.get('messageCodeDescription'), status.get('messageCode'), status.get('message')),
|
||||
expected=True)
|
||||
|
||||
# Found video formats
|
||||
if isinstance(media.get('MediaURLs'), list):
|
||||
break
|
||||
|
||||
ignore_no_formats = self.get_param('ignore_no_formats_error')
|
||||
|
||||
if not media or (not media.get('MediaURLs') and not ignore_no_formats):
|
||||
raise ExtractorError(
|
||||
'Unable to access the crackle API. Try passing your country code '
|
||||
'to --geo-bypass-country. If it still does not work and the '
|
||||
'video is available in your country')
|
||||
title = media['Title']
|
||||
|
||||
formats, subtitles = [], {}
|
||||
has_drm = False
|
||||
for e in media.get('MediaURLs') or []:
|
||||
if e.get('UseDRM'):
|
||||
has_drm = True
|
||||
format_url = url_or_none(e.get('DRMPath'))
|
||||
else:
|
||||
format_url = url_or_none(e.get('Path'))
|
||||
if not format_url:
|
||||
continue
|
||||
ext = determine_ext(format_url)
|
||||
if ext == 'm3u8':
|
||||
fmts, subs = self._extract_m3u8_formats_and_subtitles(
|
||||
format_url, video_id, 'mp4', entry_protocol='m3u8_native',
|
||||
m3u8_id='hls', fatal=False)
|
||||
formats.extend(fmts)
|
||||
subtitles = self._merge_subtitles(subtitles, subs)
|
||||
elif ext == 'mpd':
|
||||
fmts, subs = self._extract_mpd_formats_and_subtitles(
|
||||
format_url, video_id, mpd_id='dash', fatal=False)
|
||||
formats.extend(fmts)
|
||||
subtitles = self._merge_subtitles(subtitles, subs)
|
||||
elif format_url.endswith('.ism/Manifest'):
|
||||
fmts, subs = self._extract_ism_formats_and_subtitles(
|
||||
format_url, video_id, ism_id='mss', fatal=False)
|
||||
formats.extend(fmts)
|
||||
subtitles = self._merge_subtitles(subtitles, subs)
|
||||
else:
|
||||
mfs_path = e.get('Type')
|
||||
mfs_info = self._MEDIA_FILE_SLOTS.get(mfs_path)
|
||||
if not mfs_info:
|
||||
continue
|
||||
formats.append({
|
||||
'url': format_url,
|
||||
'format_id': 'http-' + mfs_path.split('.')[0],
|
||||
'width': mfs_info['width'],
|
||||
'height': mfs_info['height'],
|
||||
})
|
||||
if not formats and has_drm:
|
||||
self.report_drm(video_id)
|
||||
|
||||
description = media.get('Description')
|
||||
duration = int_or_none(media.get(
|
||||
'DurationInSeconds')) or parse_duration(media.get('Duration'))
|
||||
view_count = int_or_none(media.get('CountViews'))
|
||||
average_rating = float_or_none(media.get('UserRating'))
|
||||
age_limit = parse_age_limit(media.get('Rating'))
|
||||
genre = media.get('Genre')
|
||||
release_year = int_or_none(media.get('ReleaseYear'))
|
||||
creator = media.get('Directors')
|
||||
artist = media.get('Cast')
|
||||
|
||||
if media.get('MediaTypeDisplayValue') == 'Full Episode':
|
||||
series = media.get('ShowName')
|
||||
episode = title
|
||||
season_number = int_or_none(media.get('Season'))
|
||||
episode_number = int_or_none(media.get('Episode'))
|
||||
else:
|
||||
series = episode = season_number = episode_number = None
|
||||
|
||||
cc_files = media.get('ClosedCaptionFiles')
|
||||
if isinstance(cc_files, list):
|
||||
for cc_file in cc_files:
|
||||
if not isinstance(cc_file, dict):
|
||||
continue
|
||||
cc_url = url_or_none(cc_file.get('Path'))
|
||||
if not cc_url:
|
||||
continue
|
||||
lang = cc_file.get('Locale') or 'en'
|
||||
subtitles.setdefault(lang, []).append({'url': cc_url})
|
||||
|
||||
thumbnails = []
|
||||
images = media.get('Images')
|
||||
if isinstance(images, list):
|
||||
for image_key, image_url in images.items():
|
||||
mobj = re.search(r'Img_(\d+)[xX](\d+)', image_key)
|
||||
if not mobj:
|
||||
continue
|
||||
thumbnails.append({
|
||||
'url': image_url,
|
||||
'width': int(mobj.group(1)),
|
||||
'height': int(mobj.group(2)),
|
||||
})
|
||||
|
||||
return {
|
||||
'id': video_id,
|
||||
'title': title,
|
||||
'description': description,
|
||||
'duration': duration,
|
||||
'view_count': view_count,
|
||||
'average_rating': average_rating,
|
||||
'age_limit': age_limit,
|
||||
'genre': genre,
|
||||
'creator': creator,
|
||||
'artist': artist,
|
||||
'release_year': release_year,
|
||||
'series': series,
|
||||
'episode': episode,
|
||||
'season_number': season_number,
|
||||
'episode_number': episode_number,
|
||||
'thumbnails': thumbnails,
|
||||
'subtitles': subtitles,
|
||||
'formats': formats,
|
||||
}
|
||||
@@ -1,180 +0,0 @@
|
||||
import re
|
||||
|
||||
from .common import InfoExtractor
|
||||
from ..utils import (
|
||||
ExtractorError,
|
||||
int_or_none,
|
||||
parse_age_limit,
|
||||
parse_iso8601,
|
||||
parse_qs,
|
||||
smuggle_url,
|
||||
str_or_none,
|
||||
update_url_query,
|
||||
)
|
||||
from ..utils.traversal import traverse_obj
|
||||
|
||||
|
||||
class CWTVIE(InfoExtractor):
|
||||
IE_NAME = 'cwtv'
|
||||
_VALID_URL = r'https?://(?:www\.)?cw(?:tv(?:pr)?|seed)\.com/(?:shows/)?(?:[^/]+/)+[^?]*\?.*\b(?:play|watch|guid)=(?P<id>[a-z0-9]{8}-[a-z0-9]{4}-[a-z0-9]{4}-[a-z0-9]{4}-[a-z0-9]{12})'
|
||||
_TESTS = [{
|
||||
'url': 'https://www.cwtv.com/shows/continuum/a-stitch-in-time/?play=9149a1e1-4cb2-46d7-81b2-47d35bbd332b',
|
||||
'info_dict': {
|
||||
'id': '9149a1e1-4cb2-46d7-81b2-47d35bbd332b',
|
||||
'ext': 'mp4',
|
||||
'title': 'A Stitch in Time',
|
||||
'description': r're:(?s)City Protective Services officer Kiera Cameron is transported from 2077.+',
|
||||
'thumbnail': r're:https?://.+\.jpe?g',
|
||||
'duration': 2632,
|
||||
'timestamp': 1736928000,
|
||||
'uploader': 'CWTV',
|
||||
'chapters': 'count:5',
|
||||
'series': 'Continuum',
|
||||
'season_number': 1,
|
||||
'episode_number': 1,
|
||||
'age_limit': 14,
|
||||
'upload_date': '20250115',
|
||||
'season': 'Season 1',
|
||||
'episode': 'Episode 1',
|
||||
},
|
||||
'params': {
|
||||
# m3u8 download
|
||||
'skip_download': True,
|
||||
},
|
||||
}, {
|
||||
'url': 'http://cwtv.com/shows/arrow/legends-of-yesterday/?play=6b15e985-9345-4f60-baf8-56e96be57c63',
|
||||
'info_dict': {
|
||||
'id': '6b15e985-9345-4f60-baf8-56e96be57c63',
|
||||
'ext': 'mp4',
|
||||
'title': 'Legends of Yesterday',
|
||||
'description': r're:(?s)Oliver and Barry Allen take Kendra Saunders and Carter Hall to a remote.+',
|
||||
'duration': 2665,
|
||||
'series': 'Arrow',
|
||||
'season_number': 4,
|
||||
'season': '4',
|
||||
'episode_number': 8,
|
||||
'upload_date': '20151203',
|
||||
'timestamp': 1449122100,
|
||||
},
|
||||
'params': {
|
||||
# m3u8 download
|
||||
'skip_download': True,
|
||||
},
|
||||
'skip': 'redirect to http://cwtv.com/shows/arrow/',
|
||||
}, {
|
||||
'url': 'http://www.cwseed.com/shows/whose-line-is-it-anyway/jeff-davis-4/?play=24282b12-ead2-42f2-95ad-26770c2c6088',
|
||||
'info_dict': {
|
||||
'id': '24282b12-ead2-42f2-95ad-26770c2c6088',
|
||||
'ext': 'mp4',
|
||||
'title': 'Jeff Davis 4',
|
||||
'description': 'Jeff Davis is back to make you laugh.',
|
||||
'duration': 1263,
|
||||
'series': 'Whose Line Is It Anyway?',
|
||||
'season_number': 11,
|
||||
'episode_number': 20,
|
||||
'upload_date': '20151006',
|
||||
'timestamp': 1444107300,
|
||||
'age_limit': 14,
|
||||
'uploader': 'CWTV',
|
||||
'thumbnail': r're:https?://.+\.jpe?g',
|
||||
'chapters': 'count:4',
|
||||
'episode': 'Episode 20',
|
||||
'season': 'Season 11',
|
||||
},
|
||||
'params': {
|
||||
# m3u8 download
|
||||
'skip_download': True,
|
||||
},
|
||||
}, {
|
||||
'url': 'http://cwtv.com/thecw/chroniclesofcisco/?play=8adebe35-f447-465f-ab52-e863506ff6d6',
|
||||
'only_matching': True,
|
||||
}, {
|
||||
'url': 'http://cwtvpr.com/the-cw/video?watch=9eee3f60-ef4e-440b-b3b2-49428ac9c54e',
|
||||
'only_matching': True,
|
||||
}, {
|
||||
'url': 'http://cwtv.com/shows/arrow/legends-of-yesterday/?watch=6b15e985-9345-4f60-baf8-56e96be57c63',
|
||||
'only_matching': True,
|
||||
}, {
|
||||
'url': 'http://www.cwtv.com/movies/play/?guid=0a8e8b5b-1356-41d5-9a6a-4eda1a6feb6c',
|
||||
'only_matching': True,
|
||||
}]
|
||||
|
||||
def _real_extract(self, url):
|
||||
video_id = self._match_id(url)
|
||||
data = self._download_json(
|
||||
f'https://images.cwtv.com/feed/app-2/video-meta/apiversion_22/device_android/guid_{video_id}', video_id)
|
||||
if traverse_obj(data, 'result') != 'ok':
|
||||
raise ExtractorError(traverse_obj(data, (('error_msg', 'msg'), {str}, any)), expected=True)
|
||||
video_data = data['video']
|
||||
title = video_data['title']
|
||||
mpx_url = update_url_query(
|
||||
video_data.get('mpx_url') or f'https://link.theplatform.com/s/cwtv/media/guid/2703454149/{video_id}',
|
||||
{'formats': 'M3U+none'})
|
||||
|
||||
season = str_or_none(video_data.get('season'))
|
||||
episode = str_or_none(video_data.get('episode'))
|
||||
if episode and season:
|
||||
episode = episode[len(season):]
|
||||
|
||||
return {
|
||||
'_type': 'url_transparent',
|
||||
'id': video_id,
|
||||
'title': title,
|
||||
'url': smuggle_url(mpx_url, {'force_smil_url': True}),
|
||||
'description': video_data.get('description_long'),
|
||||
'duration': int_or_none(video_data.get('duration_secs')),
|
||||
'series': video_data.get('series_name'),
|
||||
'season_number': int_or_none(season),
|
||||
'episode_number': int_or_none(episode),
|
||||
'timestamp': parse_iso8601(video_data.get('start_time')),
|
||||
'age_limit': parse_age_limit(video_data.get('rating')),
|
||||
'ie_key': 'ThePlatform',
|
||||
'thumbnail': video_data.get('large_thumbnail'),
|
||||
}
|
||||
|
||||
|
||||
class CWTVMovieIE(InfoExtractor):
|
||||
IE_NAME = 'cwtv:movie'
|
||||
_VALID_URL = r'https?://(?:www\.)?cwtv\.com/shows/(?P<id>[\w-]+)/?\?(?:[^#]+&)?viewContext=Movies'
|
||||
_TESTS = [{
|
||||
'url': 'https://www.cwtv.com/shows/the-crush/?viewContext=Movies+Swimlane',
|
||||
'info_dict': {
|
||||
'id': '0a8e8b5b-1356-41d5-9a6a-4eda1a6feb6c',
|
||||
'ext': 'mp4',
|
||||
'title': 'The Crush',
|
||||
'upload_date': '20241112',
|
||||
'description': 'md5:1549acd90dff4a8273acd7284458363e',
|
||||
'chapters': 'count:9',
|
||||
'timestamp': 1731398400,
|
||||
'age_limit': 16,
|
||||
'duration': 5337,
|
||||
'series': 'The Crush',
|
||||
'season': 'Season 1',
|
||||
'uploader': 'CWTV',
|
||||
'season_number': 1,
|
||||
'episode': 'Episode 1',
|
||||
'episode_number': 1,
|
||||
'thumbnail': r're:https?://.+\.jpe?g',
|
||||
},
|
||||
'params': {
|
||||
# m3u8 download
|
||||
'skip_download': True,
|
||||
},
|
||||
}]
|
||||
_UUID_RE = r'[\da-f]{8}-(?:[\da-f]{4}-){3}[\da-f]{12}'
|
||||
|
||||
def _real_extract(self, url):
|
||||
display_id = self._match_id(url)
|
||||
webpage = self._download_webpage(url, display_id)
|
||||
app_url = (
|
||||
self._html_search_meta('al:ios:url', webpage, default=None)
|
||||
or self._html_search_meta('al:android:url', webpage, default=None))
|
||||
video_id = (
|
||||
traverse_obj(parse_qs(app_url), ('video_id', 0, {lambda x: re.fullmatch(self._UUID_RE, x)}, 0))
|
||||
or self._search_regex([
|
||||
rf'CWTV\.Site\.curPlayingGUID\s*=\s*["\']({self._UUID_RE})',
|
||||
rf'CWTV\.Site\.viewInAppURL\s*=\s*["\']/shows/[\w-]+/watch-in-app/\?play=({self._UUID_RE})',
|
||||
], webpage, 'video ID'))
|
||||
|
||||
return self.url_result(
|
||||
f'https://www.cwtv.com/shows/{display_id}/{display_id}/?play={video_id}', CWTVIE, video_id)
|
||||
@@ -18,15 +18,15 @@ from ..utils import (
|
||||
|
||||
|
||||
class DropoutIE(InfoExtractor):
|
||||
_LOGIN_URL = 'https://www.dropout.tv/login'
|
||||
_LOGIN_URL = 'https://watch.dropout.tv/login'
|
||||
_NETRC_MACHINE = 'dropout'
|
||||
|
||||
_VALID_URL = r'https?://(?:www\.)?dropout\.tv/(?:[^/]+/)*videos/(?P<id>[^/]+)/?$'
|
||||
_VALID_URL = r'https?://(?:watch\.)?dropout\.tv/(?:[^/?#]+/)*videos/(?P<id>[^/?#]+)/?(?:[?#]|$)'
|
||||
_TESTS = [
|
||||
{
|
||||
'url': 'https://www.dropout.tv/game-changer/season:2/videos/yes-or-no',
|
||||
'url': 'https://watch.dropout.tv/game-changer/season:2/videos/yes-or-no',
|
||||
'note': 'Episode in a series',
|
||||
'md5': '5e000fdfd8d8fa46ff40456f1c2af04a',
|
||||
'md5': '4b76963f904f8bc4ba22dcf0e66ada06',
|
||||
'info_dict': {
|
||||
'id': '738153',
|
||||
'display_id': 'yes-or-no',
|
||||
@@ -45,35 +45,35 @@ class DropoutIE(InfoExtractor):
|
||||
'uploader_url': 'https://vimeo.com/user80538407',
|
||||
'uploader': 'OTT Videos',
|
||||
},
|
||||
'expected_warnings': ['Ignoring subtitle tracks found in the HLS manifest'],
|
||||
'expected_warnings': ['Ignoring subtitle tracks found in the HLS manifest', 'Failed to parse XML: not well-formed'],
|
||||
},
|
||||
{
|
||||
'url': 'https://www.dropout.tv/dimension-20-fantasy-high/season:1/videos/episode-1',
|
||||
'url': 'https://watch.dropout.tv/tablepop-presents-megadungeon-live/season:1/videos/enter-through-the-gift-shop',
|
||||
'note': 'Episode in a series (missing release_date)',
|
||||
'md5': '712caf7c191f1c47c8f1879520c2fa5c',
|
||||
'md5': 'b08fb03050585ea25cd7ee092db9134c',
|
||||
'info_dict': {
|
||||
'id': '320562',
|
||||
'display_id': 'episode-1',
|
||||
'id': '624270',
|
||||
'display_id': 'enter-through-the-gift-shop',
|
||||
'ext': 'mp4',
|
||||
'title': 'The Beginning Begins',
|
||||
'description': 'The cast introduces their PCs, including a neurotic elf, a goblin PI, and a corn-worshipping cleric.',
|
||||
'thumbnail': 'https://vhx.imgix.net/chuncensoredstaging/assets/4421ed0d-f630-4c88-9004-5251b2b8adfa.jpg',
|
||||
'series': 'Dimension 20: Fantasy High',
|
||||
'title': 'Enter Through the Gift Shop',
|
||||
'description': 'A new adventuring party explores a gift shop and runs into a friendly orc -- and some angry goblins.',
|
||||
'thumbnail': 'https://vhx.imgix.net/chuncensoredstaging/assets/a1d876c3-3dee-4cd0-87c6-27a851b1d0ec.jpg',
|
||||
'series': 'TablePop Presents: MEGADUNGEON LIVE!',
|
||||
'season_number': 1,
|
||||
'season': 'Season 1',
|
||||
'episode_number': 1,
|
||||
'episode': 'The Beginning Begins',
|
||||
'duration': 6838,
|
||||
'episode': 'Enter Through the Gift Shop',
|
||||
'duration': 7101,
|
||||
'uploader_id': 'user80538407',
|
||||
'uploader_url': 'https://vimeo.com/user80538407',
|
||||
'uploader': 'OTT Videos',
|
||||
},
|
||||
'expected_warnings': ['Ignoring subtitle tracks found in the HLS manifest'],
|
||||
'expected_warnings': ['Ignoring subtitle tracks found in the HLS manifest', 'Failed to parse XML: not well-formed'],
|
||||
},
|
||||
{
|
||||
'url': 'https://www.dropout.tv/videos/misfits-magic-holiday-special',
|
||||
'url': 'https://watch.dropout.tv/videos/misfits-magic-holiday-special',
|
||||
'note': 'Episode not in a series',
|
||||
'md5': 'c30fa18999c5880d156339f13c953a26',
|
||||
'md5': '1e6428f7756b02c93b573d39ddd789fe',
|
||||
'info_dict': {
|
||||
'id': '1915774',
|
||||
'display_id': 'misfits-magic-holiday-special',
|
||||
@@ -87,7 +87,7 @@ class DropoutIE(InfoExtractor):
|
||||
'uploader_url': 'https://vimeo.com/user80538407',
|
||||
'uploader': 'OTT Videos',
|
||||
},
|
||||
'expected_warnings': ['Ignoring subtitle tracks found in the HLS manifest'],
|
||||
'expected_warnings': ['Ignoring subtitle tracks found in the HLS manifest', 'Failed to parse XML: not well-formed'],
|
||||
},
|
||||
]
|
||||
|
||||
@@ -125,7 +125,7 @@ class DropoutIE(InfoExtractor):
|
||||
display_id = self._match_id(url)
|
||||
|
||||
webpage = None
|
||||
if self._get_cookies('https://www.dropout.tv').get('_session'):
|
||||
if self._get_cookies('https://watch.dropout.tv').get('_session'):
|
||||
webpage = self._download_webpage(url, display_id)
|
||||
if not webpage or '<div id="watch-unauthorized"' in webpage:
|
||||
login_err = self._login(display_id)
|
||||
@@ -148,7 +148,7 @@ class DropoutIE(InfoExtractor):
|
||||
return {
|
||||
'_type': 'url_transparent',
|
||||
'ie_key': VHXEmbedIE.ie_key(),
|
||||
'url': VHXEmbedIE._smuggle_referrer(embed_url, 'https://www.dropout.tv'),
|
||||
'url': VHXEmbedIE._smuggle_referrer(embed_url, 'https://watch.dropout.tv'),
|
||||
'id': self._search_regex(r'embed\.vhx\.tv/videos/(.+?)\?', embed_url, 'id'),
|
||||
'display_id': display_id,
|
||||
'title': title,
|
||||
@@ -167,10 +167,10 @@ class DropoutIE(InfoExtractor):
|
||||
|
||||
class DropoutSeasonIE(InfoExtractor):
|
||||
_PAGE_SIZE = 24
|
||||
_VALID_URL = r'https?://(?:www\.)?dropout\.tv/(?P<id>[^\/$&?#]+)(?:/?$|/season:(?P<season>[0-9]+)/?$)'
|
||||
_VALID_URL = r'https?://(?:watch\.)?dropout\.tv/(?P<id>[^\/$&?#]+)(?:/?$|/season:(?P<season>[0-9]+)/?$)'
|
||||
_TESTS = [
|
||||
{
|
||||
'url': 'https://www.dropout.tv/dimension-20-fantasy-high/season:1',
|
||||
'url': 'https://watch.dropout.tv/dimension-20-fantasy-high/season:1',
|
||||
'note': 'Multi-season series with the season in the url',
|
||||
'playlist_count': 24,
|
||||
'info_dict': {
|
||||
@@ -179,7 +179,7 @@ class DropoutSeasonIE(InfoExtractor):
|
||||
},
|
||||
},
|
||||
{
|
||||
'url': 'https://www.dropout.tv/dimension-20-fantasy-high',
|
||||
'url': 'https://watch.dropout.tv/dimension-20-fantasy-high',
|
||||
'note': 'Multi-season series with the season not in the url',
|
||||
'playlist_count': 24,
|
||||
'info_dict': {
|
||||
@@ -188,7 +188,7 @@ class DropoutSeasonIE(InfoExtractor):
|
||||
},
|
||||
},
|
||||
{
|
||||
'url': 'https://www.dropout.tv/dimension-20-shriek-week',
|
||||
'url': 'https://watch.dropout.tv/dimension-20-shriek-week',
|
||||
'note': 'Single-season series',
|
||||
'playlist_count': 4,
|
||||
'info_dict': {
|
||||
@@ -197,7 +197,7 @@ class DropoutSeasonIE(InfoExtractor):
|
||||
},
|
||||
},
|
||||
{
|
||||
'url': 'https://www.dropout.tv/breaking-news-no-laugh-newsroom/season:3',
|
||||
'url': 'https://watch.dropout.tv/breaking-news-no-laugh-newsroom/season:3',
|
||||
'note': 'Multi-season series with season in the url that requires pagination',
|
||||
'playlist_count': 25,
|
||||
'info_dict': {
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import json
|
||||
import socket
|
||||
|
||||
from .common import InfoExtractor
|
||||
from ..utils import (
|
||||
@@ -56,7 +55,7 @@ class DTubeIE(InfoExtractor):
|
||||
try:
|
||||
self.to_screen(f'{video_id}: Checking {format_id} video format URL')
|
||||
self._downloader._opener.open(video_url, timeout=5).close()
|
||||
except socket.timeout:
|
||||
except TimeoutError:
|
||||
self.to_screen(
|
||||
f'{video_id}: {format_id} URL is invalid, skipping')
|
||||
continue
|
||||
|
||||
@@ -7,7 +7,7 @@ from ..utils import (
|
||||
|
||||
|
||||
class FifaIE(InfoExtractor):
|
||||
_VALID_URL = r'https?://www\.fifa\.com/fifaplus/(?P<locale>\w{2})/watch/([^#?]+/)?(?P<id>\w+)'
|
||||
_VALID_URL = r'https?://www\.fifa\.com/fifaplus/\w{2}/watch/([^#?]+/)?(?P<id>\w+)'
|
||||
_TESTS = [{
|
||||
'url': 'https://www.fifa.com/fifaplus/en/watch/7on10qPcnyLajDDU3ntg6y',
|
||||
'info_dict': {
|
||||
@@ -51,7 +51,7 @@ class FifaIE(InfoExtractor):
|
||||
}]
|
||||
|
||||
def _real_extract(self, url):
|
||||
video_id, locale = self._match_valid_url(url).group('id', 'locale')
|
||||
video_id = self._match_id(url)
|
||||
webpage = self._download_webpage(url, video_id)
|
||||
|
||||
preconnect_link = self._search_regex(
|
||||
|
||||
@@ -3,15 +3,19 @@ from ..networking.exceptions import HTTPError
|
||||
from ..utils import (
|
||||
ExtractorError,
|
||||
UserNotLive,
|
||||
int_or_none,
|
||||
join_nonempty,
|
||||
parse_iso8601,
|
||||
str_or_none,
|
||||
traverse_obj,
|
||||
url_or_none,
|
||||
)
|
||||
from ..utils.traversal import traverse_obj
|
||||
|
||||
|
||||
class FlexTVIE(InfoExtractor):
|
||||
_VALID_URL = r'https?://(?:www\.)?flextv\.co\.kr/channels/(?P<id>\d+)/live'
|
||||
IE_NAME = 'ttinglive'
|
||||
IE_DESC = '띵라이브 (formerly FlexTV)'
|
||||
_VALID_URL = r'https?://(?:www\.)?(?:ttinglive\.com|flextv\.co\.kr)/channels/(?P<id>\d+)/live'
|
||||
_TESTS = [{
|
||||
'url': 'https://www.flextv.co.kr/channels/231638/live',
|
||||
'info_dict': {
|
||||
@@ -36,21 +40,32 @@ class FlexTVIE(InfoExtractor):
|
||||
|
||||
try:
|
||||
stream_data = self._download_json(
|
||||
f'https://api.flextv.co.kr/api/channels/{channel_id}/stream',
|
||||
f'https://api.ttinglive.com/api/channels/{channel_id}/stream',
|
||||
channel_id, query={'option': 'all'})
|
||||
except ExtractorError as e:
|
||||
if isinstance(e.cause, HTTPError) and e.cause.status == 400:
|
||||
raise UserNotLive(video_id=channel_id)
|
||||
raise
|
||||
|
||||
playlist_url = stream_data['sources'][0]['url']
|
||||
formats, subtitles = self._extract_m3u8_formats_and_subtitles(
|
||||
playlist_url, channel_id, 'mp4')
|
||||
formats = []
|
||||
for stream in traverse_obj(stream_data, ('sources', ..., {dict})):
|
||||
if stream.get('format') == 'ivs' and url_or_none(stream.get('url')):
|
||||
formats.extend(self._extract_m3u8_formats(
|
||||
stream['url'], channel_id, 'mp4', live=True, fatal=False, m3u8_id='ivs'))
|
||||
for format_type in ['hls', 'flv']:
|
||||
for data in traverse_obj(stream, (
|
||||
'urlDetail', format_type, 'resolution', lambda _, v: url_or_none(v['url']))):
|
||||
formats.append({
|
||||
'format_id': join_nonempty(format_type, data.get('suffixName'), delim=''),
|
||||
'url': data['url'],
|
||||
'height': int_or_none(data.get('resolution')),
|
||||
'ext': 'mp4' if format_type == 'hls' else 'flv',
|
||||
'protocol': 'm3u8_native' if format_type == 'hls' else 'http',
|
||||
})
|
||||
|
||||
return {
|
||||
'id': channel_id,
|
||||
'formats': formats,
|
||||
'subtitles': subtitles,
|
||||
'is_live': True,
|
||||
**traverse_obj(stream_data, {
|
||||
'title': ('stream', 'title', {str}),
|
||||
|
||||
@@ -56,7 +56,7 @@ class FujiTVFODPlus7IE(InfoExtractor):
|
||||
fmt, subs = self._extract_m3u8_formats_and_subtitles(src['url'], video_id, 'ts')
|
||||
for f in fmt:
|
||||
f.update(dict(zip(('height', 'width'),
|
||||
self._BITRATE_MAP.get(f.get('tbr'), ()))))
|
||||
self._BITRATE_MAP.get(f.get('tbr'), ()), strict=False)))
|
||||
formats.extend(fmt)
|
||||
subtitles = self._merge_subtitles(subtitles, subs)
|
||||
|
||||
|
||||
262
yt_dlp/extractor/idagio.py
Normal file
262
yt_dlp/extractor/idagio.py
Normal file
@@ -0,0 +1,262 @@
|
||||
from .common import InfoExtractor
|
||||
from ..utils import int_or_none, unified_timestamp, url_or_none
|
||||
from ..utils.traversal import traverse_obj
|
||||
|
||||
|
||||
class IdagioTrackIE(InfoExtractor):
|
||||
_VALID_URL = r'https?://(?:www\.)?app\.idagio\.com(?:/[a-z]{2})?/recordings/\d+\?(?:[^#]+&)?trackId=(?P<id>\d+)'
|
||||
_TESTS = [{
|
||||
'url': 'https://app.idagio.com/recordings/30576934?trackId=30576943',
|
||||
'md5': '15148bd71804b2450a2508931a116b56',
|
||||
'info_dict': {
|
||||
'id': '30576943',
|
||||
'ext': 'mp3',
|
||||
'title': 'Theme. Andante',
|
||||
'duration': 82,
|
||||
'composers': ['Edward Elgar'],
|
||||
'artists': ['Vasily Petrenko', 'Royal Liverpool Philharmonic Orchestra'],
|
||||
'genres': ['Orchestral', 'Other Orchestral Music'],
|
||||
'track': 'Theme. Andante',
|
||||
'timestamp': 1554474370,
|
||||
'upload_date': '20190405',
|
||||
},
|
||||
}, {
|
||||
'url': 'https://app.idagio.com/recordings/20514467?trackId=20514478&utm_source=pcl',
|
||||
'md5': '3acef2ea0feadf889123b70e5a1e7fa7',
|
||||
'info_dict': {
|
||||
'id': '20514478',
|
||||
'ext': 'mp3',
|
||||
'title': 'I. Adagio sostenuto',
|
||||
'duration': 316,
|
||||
'composers': ['Ludwig van Beethoven'],
|
||||
'genres': ['Keyboard', 'Sonata (Keyboard)'],
|
||||
'track': 'I. Adagio sostenuto',
|
||||
'timestamp': 1518076337,
|
||||
'upload_date': '20180208',
|
||||
},
|
||||
}, {
|
||||
'url': 'https://app.idagio.com/de/recordings/20514467?trackId=20514478&utm_source=pcl',
|
||||
'only_matching': True,
|
||||
}]
|
||||
|
||||
def _real_extract(self, url):
|
||||
track_id = self._match_id(url)
|
||||
track_info = self._download_json(
|
||||
f'https://api.idagio.com/v2.0/metadata/tracks/{track_id}',
|
||||
track_id, fatal=False, expected_status=406)
|
||||
if traverse_obj(track_info, 'error_code') == 'idagio.error.blocked.location':
|
||||
self.raise_geo_restricted()
|
||||
|
||||
content_info = self._download_json(
|
||||
f'https://api.idagio.com/v1.8/content/track/{track_id}', track_id,
|
||||
query={
|
||||
'quality': '0',
|
||||
'format': '2',
|
||||
'client_type': 'web-4',
|
||||
})
|
||||
|
||||
return {
|
||||
'ext': 'mp3',
|
||||
'vcodec': 'none',
|
||||
'id': track_id,
|
||||
'url': traverse_obj(content_info, ('url', {url_or_none})),
|
||||
**traverse_obj(track_info, ('result', {
|
||||
'title': ('piece', 'title', {str}),
|
||||
'timestamp': ('recording', 'created_at', {int_or_none(scale=1000)}),
|
||||
'location': ('recording', 'location', {str}),
|
||||
'duration': ('duration', {int_or_none}),
|
||||
'track': ('piece', 'title', {str}),
|
||||
'artists': ('recording', ('conductor', ('ensembles', ...), ('soloists', ...)), 'name', {str}, filter),
|
||||
'composers': ('piece', 'workpart', 'work', 'composer', 'name', {str}, filter, all, filter),
|
||||
'genres': ('piece', 'workpart', 'work', ('genre', 'subgenre'), 'title', {str}, filter),
|
||||
})),
|
||||
}
|
||||
|
||||
|
||||
class IdagioPlaylistBaseIE(InfoExtractor):
|
||||
"""Subclasses must set _API_URL_TMPL and define _parse_playlist_metadata"""
|
||||
_PLAYLIST_ID_KEY = 'id' # vs. 'display_id'
|
||||
|
||||
def _entries(self, playlist_info):
|
||||
for track_data in traverse_obj(playlist_info, ('tracks', lambda _, v: v['id'] and v['recording']['id'])):
|
||||
track_id = track_data['id']
|
||||
recording_id = track_data['recording']['id']
|
||||
yield self.url_result(
|
||||
f'https://app.idagio.com/recordings/{recording_id}?trackId={track_id}',
|
||||
ie=IdagioTrackIE, video_id=track_id)
|
||||
|
||||
def _real_extract(self, url):
|
||||
playlist_id = self._match_id(url)
|
||||
playlist_info = self._download_json(
|
||||
self._API_URL_TMPL.format(playlist_id), playlist_id)['result']
|
||||
|
||||
return {
|
||||
'_type': 'playlist',
|
||||
self._PLAYLIST_ID_KEY: playlist_id,
|
||||
'entries': self._entries(playlist_info),
|
||||
**self._parse_playlist_metadata(playlist_info),
|
||||
}
|
||||
|
||||
|
||||
class IdagioRecordingIE(IdagioPlaylistBaseIE):
|
||||
_VALID_URL = r'https?://(?:www\.)?app\.idagio\.com(?:/[a-z]{2})?/recordings/(?P<id>\d+)(?![^#]*[&?]trackId=\d+)'
|
||||
_TESTS = [{
|
||||
'url': 'https://app.idagio.com/recordings/30576934',
|
||||
'info_dict': {
|
||||
'id': '30576934',
|
||||
'title': 'Variations on an Original Theme op. 36',
|
||||
'composers': ['Edward Elgar'],
|
||||
'artists': ['Vasily Petrenko', 'Royal Liverpool Philharmonic Orchestra'],
|
||||
'genres': ['Orchestral', 'Other Orchestral Music'],
|
||||
'timestamp': 1554474370,
|
||||
'modified_timestamp': 1554474370,
|
||||
'modified_date': '20190405',
|
||||
'upload_date': '20190405',
|
||||
},
|
||||
'playlist_count': 15,
|
||||
}, {
|
||||
'url': 'https://app.idagio.com/de/recordings/20514467',
|
||||
'info_dict': {
|
||||
'id': '20514467',
|
||||
'title': 'Sonata for Piano No. 14 in C sharp minor op. 27/2',
|
||||
'composers': ['Ludwig van Beethoven'],
|
||||
'genres': ['Keyboard', 'Sonata (Keyboard)'],
|
||||
'timestamp': 1518076337,
|
||||
'upload_date': '20180208',
|
||||
'modified_timestamp': 1518076337,
|
||||
'modified_date': '20180208',
|
||||
},
|
||||
'playlist_count': 3,
|
||||
}]
|
||||
_API_URL_TMPL = 'https://api.idagio.com/v2.0/metadata/recordings/{}'
|
||||
|
||||
def _parse_playlist_metadata(self, playlist_info):
|
||||
return traverse_obj(playlist_info, {
|
||||
'title': ('work', 'title', {str}),
|
||||
'timestamp': ('created_at', {int_or_none(scale=1000)}),
|
||||
'modified_timestamp': ('created_at', {int_or_none(scale=1000)}),
|
||||
'location': ('location', {str}),
|
||||
'artists': (('conductor', ('ensembles', ...), ('soloists', ...)), 'name', {str}),
|
||||
'composers': ('work', 'composer', 'name', {str}, all),
|
||||
'genres': ('work', ('genre', 'subgenre'), 'title', {str}),
|
||||
'tags': ('tags', ..., {str}),
|
||||
})
|
||||
|
||||
|
||||
class IdagioAlbumIE(IdagioPlaylistBaseIE):
|
||||
_VALID_URL = r'https?://(?:www\.)?app\.idagio\.com(?:/[a-z]{2})?/albums/(?P<id>[\w-]+)'
|
||||
_TESTS = [{
|
||||
'url': 'https://app.idagio.com/albums/elgar-enigma-variations-in-the-south-serenade-for-strings',
|
||||
'info_dict': {
|
||||
'id': 'a9f139b8-f70d-4b8a-a9a4-5fe8d35eaf9c',
|
||||
'display_id': 'elgar-enigma-variations-in-the-south-serenade-for-strings',
|
||||
'title': 'Elgar: Enigma Variations, In the South, Serenade for Strings',
|
||||
'description': '',
|
||||
'thumbnail': r're:https://.+/albums/880040420521/main\.jpg',
|
||||
'artists': ['Vasily Petrenko', 'Royal Liverpool Philharmonic Orchestra', 'Edward Elgar'],
|
||||
'timestamp': 1553817600,
|
||||
'upload_date': '20190329',
|
||||
'modified_timestamp': 1562566559.0,
|
||||
'modified_date': '20190708',
|
||||
},
|
||||
'playlist_count': 19,
|
||||
}, {
|
||||
'url': 'https://app.idagio.com/de/albums/brahms-ein-deutsches-requiem-3B403DF6-62D7-4A42-807B-47173F3E0192',
|
||||
'info_dict': {
|
||||
'id': '2862ad4e-4a61-45ad-9ce4-7fcf0c2626fe',
|
||||
'display_id': 'brahms-ein-deutsches-requiem-3B403DF6-62D7-4A42-807B-47173F3E0192',
|
||||
'title': 'Brahms: Ein deutsches Requiem',
|
||||
'description': 'GRAMOPHONE CLASSICAL MUSIC AWARDS 2025 Recording of the Year & Choral',
|
||||
'thumbnail': r're:https://.+/albums/3149020954522/main\.jpg',
|
||||
'artists': ['Sabine Devieilhe', 'Stéphane Degout', 'Raphaël Pichon', 'Pygmalion', 'Johannes Brahms'],
|
||||
'timestamp': 1760054400,
|
||||
'upload_date': '20251010',
|
||||
'modified_timestamp': 1760624868,
|
||||
'modified_date': '20251016',
|
||||
'tags': ['recommended', 'recent-release'],
|
||||
},
|
||||
'playlist_count': 7,
|
||||
}]
|
||||
_API_URL_TMPL = 'https://api.idagio.com/v2.0/metadata/albums/{}'
|
||||
_PLAYLIST_ID_KEY = 'display_id'
|
||||
|
||||
def _parse_playlist_metadata(self, playlist_info):
|
||||
return traverse_obj(playlist_info, {
|
||||
'id': ('id', {str}),
|
||||
'title': ('title', {str}),
|
||||
'timestamp': ('publishDate', {unified_timestamp}),
|
||||
'modified_timestamp': ('lastModified', {unified_timestamp}),
|
||||
'thumbnail': ('imageUrl', {url_or_none}),
|
||||
'description': ('description', {str}),
|
||||
'artists': ('participants', ..., 'name', {str}),
|
||||
'tags': ('tags', ..., {str}),
|
||||
})
|
||||
|
||||
|
||||
class IdagioPlaylistIE(IdagioPlaylistBaseIE):
|
||||
_VALID_URL = r'https?://(?:www\.)?app\.idagio\.com(?:/[a-z]{2})?/playlists/(?!personal/)(?P<id>[\w-]+)'
|
||||
_TESTS = [{
|
||||
'url': 'https://app.idagio.com/playlists/beethoven-the-most-beautiful-piano-music',
|
||||
'info_dict': {
|
||||
'id': '31652bec-8c5b-460e-a3f0-cf1f69817f53',
|
||||
'display_id': 'beethoven-the-most-beautiful-piano-music',
|
||||
'title': 'Beethoven: the most beautiful piano music',
|
||||
'description': 'md5:d41bb04b8896bb69377f5c2cd9345ad1',
|
||||
'thumbnail': r're:https://.+/playlists/31652bec-8c5b-460e-a3f0-cf1f69817f53/main\.jpg',
|
||||
'creators': ['IDAGIO'],
|
||||
},
|
||||
'playlist_mincount': 16, # one entry is geo-restricted
|
||||
}, {
|
||||
'url': 'https://app.idagio.com/de/playlists/piano-music-for-an-autumn-day',
|
||||
'info_dict': {
|
||||
'id': 'd70e9c7f-7080-4308-ae0f-f890dddeda82',
|
||||
'display_id': 'piano-music-for-an-autumn-day',
|
||||
'title': 'Piano Music for an Autumn Day',
|
||||
'description': 'Get ready to snuggle up and enjoy all the musical colours of this cosy, autumnal playlist.',
|
||||
'thumbnail': r're:https://.+/playlists/d70e9c7f-7080-4308-ae0f-f890dddeda82/main\.jpg',
|
||||
'creators': ['IDAGIO'],
|
||||
},
|
||||
'playlist_count': 35,
|
||||
}]
|
||||
_API_URL_TMPL = 'https://api.idagio.com/v2.0/playlists/{}'
|
||||
_PLAYLIST_ID_KEY = 'display_id'
|
||||
|
||||
def _parse_playlist_metadata(self, playlist_info):
|
||||
return traverse_obj(playlist_info, {
|
||||
'id': ('id', {str}),
|
||||
'title': ('title', {str}),
|
||||
'thumbnail': ('imageUrl', {url_or_none}),
|
||||
'description': ('description', {str}),
|
||||
'creators': ('curator', 'name', {str}, all),
|
||||
})
|
||||
|
||||
|
||||
class IdagioPersonalPlaylistIE(IdagioPlaylistBaseIE):
|
||||
_VALID_URL = r'https?://(?:www\.)?app\.idagio\.com(?:/[a-z]{2})?/playlists/personal/(?P<id>[\da-f-]+)'
|
||||
_TESTS = [{
|
||||
'url': 'https://app.idagio.com/playlists/personal/99dad72e-7b3a-45a4-b216-867c08046ed8',
|
||||
'info_dict': {
|
||||
'id': '99dad72e-7b3a-45a4-b216-867c08046ed8',
|
||||
'title': 'Test',
|
||||
'creators': ['1a6f16a6-4514-4d0c-b481-3a9877835626'],
|
||||
'thumbnail': r're:https://.+/artists/86371/main\.jpg',
|
||||
'timestamp': 1602859138,
|
||||
'modified_timestamp': 1755616667,
|
||||
'upload_date': '20201016',
|
||||
'modified_date': '20250819',
|
||||
},
|
||||
'playlist_count': 100,
|
||||
}, {
|
||||
'url': 'https://app.idagio.com/de/playlists/personal/99dad72e-7b3a-45a4-b216-867c08046ed8',
|
||||
'only_matching': True,
|
||||
}]
|
||||
_API_URL_TMPL = 'https://api.idagio.com/v1.0/personal-playlists/{}'
|
||||
|
||||
def _parse_playlist_metadata(self, playlist_info):
|
||||
return traverse_obj(playlist_info, {
|
||||
'title': ('title', {str}),
|
||||
'thumbnail': ('image_url', {url_or_none}),
|
||||
'creators': ('user_id', {str}, all),
|
||||
'timestamp': ('created_at', {int_or_none(scale=1000)}),
|
||||
'modified_timestamp': ('updated_at', {int_or_none(scale=1000)}),
|
||||
})
|
||||
@@ -437,7 +437,7 @@ class KalturaIE(InfoExtractor):
|
||||
params = urllib.parse.parse_qs(query)
|
||||
if path:
|
||||
splitted_path = path.split('/')
|
||||
params.update(dict(zip(splitted_path[::2], [[v] for v in splitted_path[1::2]])))
|
||||
params.update(dict(zip(splitted_path[::2], [[v] for v in splitted_path[1::2]]))) # noqa: B905
|
||||
if 'wid' in params:
|
||||
partner_id = remove_start(params['wid'][0], '_')
|
||||
elif 'p' in params:
|
||||
|
||||
@@ -37,7 +37,7 @@ class LocoIE(InfoExtractor):
|
||||
},
|
||||
}, {
|
||||
'url': 'https://loco.com/stream/c64916eb-10fb-46a9-9a19-8c4b7ed064e7',
|
||||
'md5': '45ebc8a47ee1c2240178757caf8881b5',
|
||||
'md5': '8b9bda03eba4d066928ae8d71f19befb',
|
||||
'info_dict': {
|
||||
'id': 'c64916eb-10fb-46a9-9a19-8c4b7ed064e7',
|
||||
'ext': 'mp4',
|
||||
@@ -55,9 +55,9 @@ class LocoIE(InfoExtractor):
|
||||
'tags': ['Gameplay'],
|
||||
'series': 'GTA 5',
|
||||
'timestamp': 1740612872,
|
||||
'modified_timestamp': 1740613037,
|
||||
'modified_timestamp': 1750948439,
|
||||
'upload_date': '20250226',
|
||||
'modified_date': '20250226',
|
||||
'modified_date': '20250626',
|
||||
},
|
||||
}, {
|
||||
# Requires video authorization
|
||||
@@ -123,8 +123,8 @@ class LocoIE(InfoExtractor):
|
||||
def _real_extract(self, url):
|
||||
video_type, video_id = self._match_valid_url(url).group('type', 'id')
|
||||
webpage = self._download_webpage(url, video_id)
|
||||
stream = traverse_obj(self._search_nextjs_data(webpage, video_id), (
|
||||
'props', 'pageProps', ('liveStreamData', 'stream', 'liveStream'), {dict}, any, {require('stream info')}))
|
||||
stream = traverse_obj(self._search_nextjs_v13_data(webpage, video_id), (
|
||||
..., (None, 'ssrData'), ('liveStreamData', 'stream', 'liveStream'), {dict}, any, {require('stream info')}))
|
||||
|
||||
if access_token := self._get_access_token(video_id):
|
||||
self._request_webpage(
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import itertools
|
||||
import re
|
||||
import urllib.parse
|
||||
|
||||
@@ -216,7 +217,7 @@ class LyndaIE(LyndaBaseIE):
|
||||
def _fix_subtitles(self, subs):
|
||||
srt = ''
|
||||
seq_counter = 0
|
||||
for seq_current, seq_next in zip(subs, subs[1:]):
|
||||
for seq_current, seq_next in itertools.pairwise(subs):
|
||||
m_current = re.match(self._TIMECODE_REGEX, seq_current['Timecode'])
|
||||
if m_current is None:
|
||||
continue
|
||||
|
||||
@@ -1,102 +0,0 @@
|
||||
from .telecinco import TelecincoBaseIE
|
||||
from ..utils import (
|
||||
int_or_none,
|
||||
parse_iso8601,
|
||||
)
|
||||
|
||||
|
||||
class MiTeleIE(TelecincoBaseIE):
|
||||
IE_DESC = 'mitele.es'
|
||||
_VALID_URL = r'https?://(?:www\.)?mitele\.es/(?:[^/]+/)+(?P<id>[^/]+)/player'
|
||||
_TESTS = [{
|
||||
'url': 'http://www.mitele.es/programas-tv/diario-de/57b0dfb9c715da65618b4afa/player',
|
||||
'info_dict': {
|
||||
'id': 'FhYW1iNTE6J6H7NkQRIEzfne6t2quqPg',
|
||||
'ext': 'mp4',
|
||||
'title': 'Diario de La redacción Programa 144',
|
||||
'description': 'md5:07c35a7b11abb05876a6a79185b58d27',
|
||||
'series': 'Diario de',
|
||||
'season': 'Season 14',
|
||||
'season_number': 14,
|
||||
'episode': 'Tor, la web invisible',
|
||||
'episode_number': 3,
|
||||
'thumbnail': r're:(?i)^https?://.*\.jpg$',
|
||||
'duration': 2913,
|
||||
'age_limit': 16,
|
||||
'timestamp': 1471209401,
|
||||
'upload_date': '20160814',
|
||||
},
|
||||
'skip': 'HTTP Error 404 Not Found',
|
||||
}, {
|
||||
# no explicit title
|
||||
'url': 'http://www.mitele.es/programas-tv/cuarto-milenio/57b0de3dc915da14058b4876/player',
|
||||
'info_dict': {
|
||||
'id': 'oyNG1iNTE6TAPP-JmCjbwfwJqqMMX3Vq',
|
||||
'ext': 'mp4',
|
||||
'title': 'Cuarto Milenio Temporada 6 Programa 226',
|
||||
'description': 'md5:5ff132013f0cd968ffbf1f5f3538a65f',
|
||||
'series': 'Cuarto Milenio',
|
||||
'season': 'Season 6',
|
||||
'season_number': 6,
|
||||
'episode': 'Episode 24',
|
||||
'episode_number': 24,
|
||||
'thumbnail': r're:(?i)^https?://.*\.jpg$',
|
||||
'duration': 7313,
|
||||
'age_limit': 12,
|
||||
'timestamp': 1471209021,
|
||||
'upload_date': '20160814',
|
||||
},
|
||||
'params': {
|
||||
'skip_download': True,
|
||||
},
|
||||
'skip': 'HTTP Error 404 Not Found',
|
||||
}, {
|
||||
'url': 'https://www.mitele.es/programas-tv/horizonte/temporada-5/programa-171-40_013480051/player/',
|
||||
'info_dict': {
|
||||
'id': '7adbe22e-cd41-4787-afa4-36f3da7c2c6f',
|
||||
'ext': 'mp4',
|
||||
'title': 'Horizonte Temporada 5 Programa 171',
|
||||
'description': 'md5:97f1fb712c5ac27e5693a8b3c5c0c6e3',
|
||||
'episode': 'Las Zonas de Bajas Emisiones, a debate',
|
||||
'episode_number': 171,
|
||||
'season': 'Season 5',
|
||||
'season_number': 5,
|
||||
'series': 'Horizonte',
|
||||
'duration': 7012,
|
||||
'upload_date': '20240927',
|
||||
'timestamp': 1727416450,
|
||||
'thumbnail': 'https://album.mediaset.es/eimg/2024/09/27/horizonte-171_9f02.jpg',
|
||||
'age_limit': 12,
|
||||
},
|
||||
'params': {'geo_bypass_country': 'ES'},
|
||||
}, {
|
||||
'url': 'http://www.mitele.es/series-online/la-que-se-avecina/57aac5c1c915da951a8b45ed/player',
|
||||
'only_matching': True,
|
||||
}, {
|
||||
'url': 'https://www.mitele.es/programas-tv/diario-de/la-redaccion/programa-144-40_1006364575251/player/',
|
||||
'only_matching': True,
|
||||
}]
|
||||
|
||||
def _real_extract(self, url):
|
||||
display_id = self._match_id(url)
|
||||
webpage = self._download_webpage(url, display_id)
|
||||
pre_player = self._search_json(
|
||||
r'window\.\$REACTBASE_STATE\.prePlayer_mtweb\s*=',
|
||||
webpage, 'Pre Player', display_id)['prePlayer']
|
||||
title = pre_player['title']
|
||||
video_info = self._parse_content(pre_player['video'], url)
|
||||
content = pre_player.get('content') or {}
|
||||
info = content.get('info') or {}
|
||||
|
||||
video_info.update({
|
||||
'title': title,
|
||||
'description': info.get('synopsis'),
|
||||
'series': content.get('title'),
|
||||
'season_number': int_or_none(info.get('season_number')),
|
||||
'episode': content.get('subtitle'),
|
||||
'episode_number': int_or_none(info.get('episode_number')),
|
||||
'duration': int_or_none(info.get('duration')),
|
||||
'age_limit': int_or_none(info.get('rating')),
|
||||
'timestamp': parse_iso8601(pre_player.get('publishedTime')),
|
||||
})
|
||||
return video_info
|
||||
@@ -92,7 +92,7 @@ class MojevideoIE(InfoExtractor):
|
||||
contains_pattern=r'\[(?s:.+)\]', transform_source=js_to_json)
|
||||
|
||||
formats = []
|
||||
for video_hash, (suffix, quality, format_note) in zip(video_hashes, [
|
||||
for video_hash, (suffix, quality, format_note) in zip(video_hashes, [ # noqa: B905
|
||||
('', 1, 'normálna kvalita'),
|
||||
('_lq', 0, 'nízka kvalita'),
|
||||
('_hd', 2, 'HD-720p'),
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
import hashlib
|
||||
|
||||
from .common import InfoExtractor
|
||||
|
||||
|
||||
@@ -9,10 +11,10 @@ class MuseScoreIE(InfoExtractor):
|
||||
'id': '142975',
|
||||
'ext': 'mp3',
|
||||
'title': 'WA Mozart Marche Turque (Turkish March fingered)',
|
||||
'description': 'md5:7ede08230e4eaabd67a4a98bb54d07be',
|
||||
'thumbnail': r're:https?://(?:www\.)?musescore\.com/.*\.png[^$]+',
|
||||
'description': 'md5:0ca4cf6b79d7f5868a1fee74097394ab',
|
||||
'thumbnail': r're:https?://cdn\.ustatik\.com/musescore/.*\.jpg',
|
||||
'uploader': 'PapyPiano',
|
||||
'creator': 'Wolfgang Amadeus Mozart',
|
||||
'creators': ['Wolfgang Amadeus Mozart'],
|
||||
},
|
||||
}, {
|
||||
'url': 'https://musescore.com/user/36164500/scores/6837638',
|
||||
@@ -20,10 +22,10 @@ class MuseScoreIE(InfoExtractor):
|
||||
'id': '6837638',
|
||||
'ext': 'mp3',
|
||||
'title': 'Sweet Child O\' Mine – Guns N\' Roses sweet child',
|
||||
'description': 'md5:4dca71191c14abc312a0a4192492eace',
|
||||
'thumbnail': r're:https?://(?:www\.)?musescore\.com/.*\.png[^$]+',
|
||||
'description': 'md5:2cd49bd6b4e48a75a3c469d4775d5079',
|
||||
'thumbnail': r're:https?://cdn\.ustatik\.com/musescore/.*\.png',
|
||||
'uploader': 'roxbelviolin',
|
||||
'creator': 'Guns N´Roses Arr. Roxbel Violin',
|
||||
'creators': ['Guns N´Roses Arr. Roxbel Violin'],
|
||||
},
|
||||
}, {
|
||||
'url': 'https://musescore.com/classicman/fur-elise',
|
||||
@@ -31,22 +33,28 @@ class MuseScoreIE(InfoExtractor):
|
||||
'id': '33816',
|
||||
'ext': 'mp3',
|
||||
'title': 'Für Elise – Beethoven',
|
||||
'description': 'md5:49515a3556d5ecaf9fa4b2514064ac34',
|
||||
'thumbnail': r're:https?://(?:www\.)?musescore\.com/.*\.png[^$]+',
|
||||
'description': 'md5:e37b241c0280b33e9ac25651b815d06e',
|
||||
'thumbnail': r're:https?://cdn\.ustatik\.com/musescore/.*\.jpg',
|
||||
'uploader': 'ClassicMan',
|
||||
'creator': 'Ludwig van Beethoven (1770–1827)',
|
||||
'creators': ['Ludwig van Beethoven (1770–1827)'],
|
||||
},
|
||||
}, {
|
||||
'url': 'https://musescore.com/minh_cuteee/scores/6555384',
|
||||
'only_matching': True,
|
||||
}]
|
||||
|
||||
@staticmethod
|
||||
def _generate_auth_token(video_id):
|
||||
return hashlib.md5((video_id + 'mp30gs').encode()).hexdigest()[:4]
|
||||
|
||||
def _real_extract(self, url):
|
||||
webpage = self._download_webpage(url, None)
|
||||
url = self._og_search_url(webpage) or url
|
||||
video_id = self._match_id(url)
|
||||
mp3_url = self._download_json(f'https://musescore.com/api/jmuse?id={video_id}&index=0&type=mp3&v2=1', video_id,
|
||||
headers={'authorization': '63794e5461e4cfa046edfbdddfccc1ac16daffd2'})['info']['url']
|
||||
mp3_url = self._download_json(
|
||||
'https://musescore.com/api/jmuse', video_id,
|
||||
headers={'authorization': self._generate_auth_token(video_id)},
|
||||
query={'id': video_id, 'index': '0', 'type': 'mp3'})['info']['url']
|
||||
formats = [{
|
||||
'url': mp3_url,
|
||||
'ext': 'mp3',
|
||||
@@ -57,7 +65,7 @@ class MuseScoreIE(InfoExtractor):
|
||||
'id': video_id,
|
||||
'formats': formats,
|
||||
'title': self._og_search_title(webpage),
|
||||
'description': self._og_search_description(webpage),
|
||||
'description': self._html_search_meta('description', webpage, 'description'),
|
||||
'thumbnail': self._og_search_thumbnail(webpage),
|
||||
'uploader': self._html_search_meta('musescore:author', webpage, 'uploader'),
|
||||
'creator': self._html_search_meta('musescore:composer', webpage, 'composer'),
|
||||
|
||||
@@ -50,7 +50,14 @@ class NewsPicksIE(InfoExtractor):
|
||||
webpage = self._download_webpage(url, video_id)
|
||||
|
||||
fragment = self._search_nextjs_data(webpage, video_id)['props']['pageProps']['fragment']
|
||||
m3u8_url = traverse_obj(fragment, ('movie', 'movieUrl', {url_or_none}, {require('m3u8 URL')}))
|
||||
movie = fragment['movie']
|
||||
|
||||
if traverse_obj(movie, ('viewable', {str})) == 'PARTIAL_FREE' and not traverse_obj(movie, ('canWatch', {bool})):
|
||||
self.report_warning(
|
||||
'Full video is for Premium members. Without cookies, '
|
||||
f'only the preview is downloaded. {self._login_hint()}', video_id)
|
||||
|
||||
m3u8_url = traverse_obj(movie, ('movieUrl', {url_or_none}, {require('m3u8 URL')}))
|
||||
formats, subtitles = self._extract_m3u8_formats_and_subtitles(m3u8_url, video_id, 'mp4')
|
||||
|
||||
return {
|
||||
@@ -59,12 +66,12 @@ class NewsPicksIE(InfoExtractor):
|
||||
'series': traverse_obj(fragment, ('series', 'title', {str})),
|
||||
'series_id': series_id,
|
||||
'subtitles': subtitles,
|
||||
**traverse_obj(fragment, ('movie', {
|
||||
**traverse_obj(movie, {
|
||||
'title': ('title', {str}),
|
||||
'cast': ('relatedUsers', ..., 'displayName', {str}, filter, all, filter),
|
||||
'description': ('explanation', {clean_html}),
|
||||
'release_timestamp': ('onAirStartDate', {parse_iso8601}),
|
||||
'thumbnail': (('image', 'coverImageUrl'), {url_or_none}, any),
|
||||
'timestamp': ('published', {parse_iso8601}),
|
||||
})),
|
||||
}),
|
||||
}
|
||||
|
||||
@@ -503,7 +503,7 @@ class NhkForSchoolBangumiIE(InfoExtractor):
|
||||
'start_time': s,
|
||||
'end_time': e,
|
||||
'title': t,
|
||||
} for s, e, t in zip(start_time, end_time, chapter_titles)]
|
||||
} for s, e, t in zip(start_time, end_time, chapter_titles, strict=True)]
|
||||
|
||||
return {
|
||||
'id': video_id,
|
||||
|
||||
@@ -129,7 +129,7 @@ class NownessSeriesIE(NownessBaseIE):
|
||||
}
|
||||
|
||||
def _real_extract(self, url):
|
||||
display_id, series = self._api_request(url, 'series/getBySlug/%s')
|
||||
_, series = self._api_request(url, 'series/getBySlug/%s')
|
||||
entries = [self._extract_url_result(post) for post in series['posts']]
|
||||
series_title = None
|
||||
series_description = None
|
||||
|
||||
151
yt_dlp/extractor/onsen.py
Normal file
151
yt_dlp/extractor/onsen.py
Normal file
@@ -0,0 +1,151 @@
|
||||
import base64
|
||||
import json
|
||||
|
||||
from .common import InfoExtractor
|
||||
from ..networking.exceptions import HTTPError
|
||||
from ..utils import (
|
||||
ExtractorError,
|
||||
clean_html,
|
||||
int_or_none,
|
||||
parse_qs,
|
||||
str_or_none,
|
||||
strftime_or_none,
|
||||
update_url,
|
||||
update_url_query,
|
||||
url_or_none,
|
||||
)
|
||||
from ..utils.traversal import traverse_obj
|
||||
|
||||
|
||||
class OnsenIE(InfoExtractor):
|
||||
IE_NAME = 'onsen'
|
||||
IE_DESC = 'インターネットラジオステーション<音泉>'
|
||||
|
||||
_BASE_URL = 'https://www.onsen.ag'
|
||||
_HEADERS = {'Referer': f'{_BASE_URL}/'}
|
||||
_NETRC_MACHINE = 'onsen'
|
||||
_VALID_URL = r'https?://(?:(?:share|www)\.)onsen\.ag/program/(?P<id>[^/?#]+)'
|
||||
_TESTS = [{
|
||||
'url': 'https://share.onsen.ag/program/onsenking?p=90&c=MTA0NjI',
|
||||
'info_dict': {
|
||||
'id': '10462',
|
||||
'ext': 'm4a',
|
||||
'title': '第SP回',
|
||||
'cast': 'count:3',
|
||||
'description': 'md5:de62c80a41c4c8d84da53a1ee681ad18',
|
||||
'display_id': 'MTA0NjI=',
|
||||
'media_type': 'sound',
|
||||
'section_start': 0,
|
||||
'series': '音泉キング「下野紘」のラジオ きみはもちろん、<音泉>ファミリーだよね?',
|
||||
'series_id': 'onsenking',
|
||||
'tags': 'count:2',
|
||||
'thumbnail': r're:https?://d3bzklg4lms4gh\.cloudfront\.net/program_info/image/default/production/.+',
|
||||
'upload_date': '20220627',
|
||||
'webpage_url': 'https://www.onsen.ag/program/onsenking?c=MTA0NjI=',
|
||||
},
|
||||
}, {
|
||||
'url': 'https://share.onsen.ag/program/girls-band-cry-radio?p=370&c=MTgwMDE',
|
||||
'info_dict': {
|
||||
'id': '18001',
|
||||
'ext': 'mp4',
|
||||
'title': '第4回',
|
||||
'cast': 'count:5',
|
||||
'description': 'md5:bbca8a389d99c90cbbce8f383c85fedd',
|
||||
'display_id': 'MTgwMDE=',
|
||||
'media_type': 'movie',
|
||||
'section_start': 0,
|
||||
'series': 'TVアニメ『ガールズバンドクライ』WEBラジオ「ガールズバンドクライ~ラジオにも全部ぶち込め。~」',
|
||||
'series_id': 'girls-band-cry-radio',
|
||||
'tags': 'count:3',
|
||||
'thumbnail': r're:https?://d3bzklg4lms4gh\.cloudfront\.net/program_info/image/default/production/.+',
|
||||
'upload_date': '20240425',
|
||||
'webpage_url': 'https://www.onsen.ag/program/girls-band-cry-radio?c=MTgwMDE=',
|
||||
},
|
||||
'skip': 'Only available for premium supporters',
|
||||
}, {
|
||||
'url': 'https://www.onsen.ag/program/uma',
|
||||
'info_dict': {
|
||||
'id': 'uma',
|
||||
'title': 'UMA YELL RADIO',
|
||||
},
|
||||
'playlist_mincount': 35,
|
||||
}]
|
||||
|
||||
@staticmethod
|
||||
def _get_encoded_id(program):
|
||||
return base64.urlsafe_b64encode(str(program['id']).encode()).decode()
|
||||
|
||||
def _perform_login(self, username, password):
|
||||
sign_in = self._download_json(
|
||||
f'{self._BASE_URL}/web_api/signin', None, 'Logging in', headers={
|
||||
'Accept': 'application/json',
|
||||
'Content-Type': 'application/json',
|
||||
}, data=json.dumps({
|
||||
'session': {
|
||||
'email': username,
|
||||
'password': password,
|
||||
},
|
||||
}).encode(), expected_status=401)
|
||||
|
||||
if sign_in.get('error'):
|
||||
raise ExtractorError('Invalid username or password', expected=True)
|
||||
|
||||
def _real_extract(self, url):
|
||||
program_id = self._match_id(url)
|
||||
try:
|
||||
programs = self._download_json(
|
||||
f'{self._BASE_URL}/web_api/programs/{program_id}', program_id)
|
||||
except ExtractorError as e:
|
||||
if isinstance(e.cause, HTTPError) and e.cause.status == 404:
|
||||
raise ExtractorError('Invalid URL', expected=True)
|
||||
raise
|
||||
|
||||
query = {k: v[-1] for k, v in parse_qs(url).items() if v}
|
||||
if 'c' not in query:
|
||||
entries = [
|
||||
self.url_result(update_url_query(url, {'c': self._get_encoded_id(program)}), OnsenIE)
|
||||
for program in traverse_obj(programs, ('contents', lambda _, v: v['id']))
|
||||
]
|
||||
|
||||
return self.playlist_result(
|
||||
entries, program_id, traverse_obj(programs, ('program_info', 'title', {clean_html})))
|
||||
|
||||
raw_id = base64.urlsafe_b64decode(f'{query["c"]}===').decode()
|
||||
p_keys = ('contents', lambda _, v: v['id'] == int(raw_id))
|
||||
|
||||
program = traverse_obj(programs, (*p_keys, any))
|
||||
if not program:
|
||||
raise ExtractorError(
|
||||
'This program is no longer available', expected=True)
|
||||
m3u8_url = traverse_obj(program, ('streaming_url', {url_or_none}))
|
||||
if not m3u8_url:
|
||||
self.raise_login_required(
|
||||
'This program is only available for premium supporters')
|
||||
|
||||
display_id = self._get_encoded_id(program)
|
||||
date_str = self._search_regex(
|
||||
rf'{program_id}0?(\d{{6}})', m3u8_url, 'date string', default=None)
|
||||
|
||||
return {
|
||||
'display_id': display_id,
|
||||
'formats': self._extract_m3u8_formats(m3u8_url, raw_id, headers=self._HEADERS),
|
||||
'http_headers': self._HEADERS,
|
||||
'section_start': int_or_none(query.get('t', 0)),
|
||||
'upload_date': strftime_or_none(f'20{date_str}'),
|
||||
'webpage_url': f'{self._BASE_URL}/program/{program_id}?c={display_id}',
|
||||
**traverse_obj(program, {
|
||||
'id': ('id', {int}, {str_or_none}),
|
||||
'title': ('title', {clean_html}),
|
||||
'media_type': ('media_type', {str}),
|
||||
'thumbnail': ('poster_image_url', {url_or_none}, {update_url(query=None)}),
|
||||
}),
|
||||
**traverse_obj(programs, {
|
||||
'cast': (('performers', (*p_keys, 'guests')), ..., 'name', {str}, filter),
|
||||
'series_id': ('directory_name', {str}),
|
||||
}),
|
||||
**traverse_obj(programs, ('program_info', {
|
||||
'description': ('description', {clean_html}, filter),
|
||||
'series': ('title', {clean_html}),
|
||||
'tags': ('hashtag_list', ..., {str}, filter),
|
||||
})),
|
||||
}
|
||||
@@ -1,201 +0,0 @@
|
||||
import itertools
|
||||
|
||||
from .cbs import CBSBaseIE
|
||||
from .common import InfoExtractor
|
||||
from ..utils import (
|
||||
ExtractorError,
|
||||
int_or_none,
|
||||
url_or_none,
|
||||
)
|
||||
|
||||
|
||||
class ParamountPlusIE(CBSBaseIE):
|
||||
_VALID_URL = r'''(?x)
|
||||
(?:
|
||||
paramountplus:|
|
||||
https?://(?:www\.)?(?:
|
||||
paramountplus\.com/(?:shows|movies)/(?:video|[^/]+/video|[^/]+)/
|
||||
)(?P<id>[\w-]+))'''
|
||||
|
||||
# All tests are blocked outside US
|
||||
_TESTS = [{
|
||||
'url': 'https://www.paramountplus.com/shows/video/Oe44g5_NrlgiZE3aQVONleD6vXc8kP0k/',
|
||||
'info_dict': {
|
||||
'id': 'Oe44g5_NrlgiZE3aQVONleD6vXc8kP0k',
|
||||
'ext': 'mp4',
|
||||
'title': 'CatDog - Climb Every CatDog/The Canine Mutiny',
|
||||
'description': 'md5:7ac835000645a69933df226940e3c859',
|
||||
'duration': 1426,
|
||||
'timestamp': 920264400,
|
||||
'upload_date': '19990301',
|
||||
'uploader': 'CBSI-NEW',
|
||||
'episode_number': 5,
|
||||
'thumbnail': r're:https?://.+\.jpg$',
|
||||
'season': 'Season 2',
|
||||
'chapters': 'count:3',
|
||||
'episode': 'Episode 5',
|
||||
'season_number': 2,
|
||||
'series': 'CatDog',
|
||||
},
|
||||
'params': {
|
||||
'skip_download': 'm3u8',
|
||||
},
|
||||
}, {
|
||||
'url': 'https://www.paramountplus.com/shows/video/6hSWYWRrR9EUTz7IEe5fJKBhYvSUfexd/',
|
||||
'info_dict': {
|
||||
'id': '6hSWYWRrR9EUTz7IEe5fJKBhYvSUfexd',
|
||||
'ext': 'mp4',
|
||||
'title': '7/23/21 WEEK IN REVIEW (Rep. Jahana Hayes/Howard Fineman/Sen. Michael Bennet/Sheera Frenkel & Cecilia Kang)',
|
||||
'description': 'md5:f4adcea3e8b106192022e121f1565bae',
|
||||
'duration': 2506,
|
||||
'timestamp': 1627063200,
|
||||
'upload_date': '20210723',
|
||||
'uploader': 'CBSI-NEW',
|
||||
'episode_number': 81,
|
||||
'thumbnail': r're:https?://.+\.jpg$',
|
||||
'season': 'Season 2',
|
||||
'chapters': 'count:4',
|
||||
'episode': 'Episode 81',
|
||||
'season_number': 2,
|
||||
'series': 'Tooning Out The News',
|
||||
},
|
||||
'params': {
|
||||
'skip_download': 'm3u8',
|
||||
},
|
||||
}, {
|
||||
'url': 'https://www.paramountplus.com/movies/video/vM2vm0kE6vsS2U41VhMRKTOVHyQAr6pC/',
|
||||
'info_dict': {
|
||||
'id': 'vM2vm0kE6vsS2U41VhMRKTOVHyQAr6pC',
|
||||
'ext': 'mp4',
|
||||
'title': 'Daddy\'s Home',
|
||||
'upload_date': '20151225',
|
||||
'description': 'md5:9a6300c504d5e12000e8707f20c54745',
|
||||
'uploader': 'CBSI-NEW',
|
||||
'timestamp': 1451030400,
|
||||
'thumbnail': r're:https?://.+\.jpg$',
|
||||
'chapters': 'count:0',
|
||||
'duration': 5761,
|
||||
'series': 'Paramount+ Movies',
|
||||
},
|
||||
'params': {
|
||||
'skip_download': 'm3u8',
|
||||
},
|
||||
'skip': 'DRM',
|
||||
}, {
|
||||
'url': 'https://www.paramountplus.com/movies/video/5EKDXPOzdVf9voUqW6oRuocyAEeJGbEc/',
|
||||
'info_dict': {
|
||||
'id': '5EKDXPOzdVf9voUqW6oRuocyAEeJGbEc',
|
||||
'ext': 'mp4',
|
||||
'uploader': 'CBSI-NEW',
|
||||
'description': 'md5:bc7b6fea84ba631ef77a9bda9f2ff911',
|
||||
'timestamp': 1577865600,
|
||||
'title': 'Sonic the Hedgehog',
|
||||
'upload_date': '20200101',
|
||||
'thumbnail': r're:https?://.+\.jpg$',
|
||||
'chapters': 'count:0',
|
||||
'duration': 5932,
|
||||
'series': 'Paramount+ Movies',
|
||||
},
|
||||
'params': {
|
||||
'skip_download': 'm3u8',
|
||||
},
|
||||
'skip': 'DRM',
|
||||
}, {
|
||||
'url': 'https://www.paramountplus.com/shows/the-real-world/video/mOVeHeL9ub9yWdyzSZFYz8Uj4ZBkVzQg/the-real-world-reunion/',
|
||||
'only_matching': True,
|
||||
}, {
|
||||
'url': 'https://www.paramountplus.com/shows/video/mOVeHeL9ub9yWdyzSZFYz8Uj4ZBkVzQg/',
|
||||
'only_matching': True,
|
||||
}, {
|
||||
'url': 'https://www.paramountplus.com/movies/video/W0VyStQqUnqKzJkrpSAIARuCc9YuYGNy/',
|
||||
'only_matching': True,
|
||||
}, {
|
||||
'url': 'https://www.paramountplus.com/movies/paw-patrol-the-movie/W0VyStQqUnqKzJkrpSAIARuCc9YuYGNy/',
|
||||
'only_matching': True,
|
||||
}]
|
||||
|
||||
def _extract_video_info(self, content_id, mpx_acc=2198311517):
|
||||
items_data = self._download_json(
|
||||
f'https://www.paramountplus.com/apps-api/v2.0/androidtv/video/cid/{content_id}.json',
|
||||
content_id, query={
|
||||
'locale': 'en-us',
|
||||
'at': 'ABCXgPuoStiPipsK0OHVXIVh68zNys+G4f7nW9R6qH68GDOcneW6Kg89cJXGfiQCsj0=',
|
||||
}, headers=self.geo_verification_headers())
|
||||
|
||||
asset_types = {
|
||||
item.get('assetType'): {
|
||||
'format': 'SMIL',
|
||||
'formats': 'M3U+none,MPEG4', # '+none' specifies ProtectionScheme (no DRM)
|
||||
} for item in items_data['itemList']
|
||||
}
|
||||
item = items_data['itemList'][-1]
|
||||
|
||||
info, error = {}, None
|
||||
metadata = {
|
||||
'title': item.get('title'),
|
||||
'series': item.get('seriesTitle'),
|
||||
'season_number': int_or_none(item.get('seasonNum')),
|
||||
'episode_number': int_or_none(item.get('episodeNum')),
|
||||
'duration': int_or_none(item.get('duration')),
|
||||
'thumbnail': url_or_none(item.get('thumbnail')),
|
||||
}
|
||||
try:
|
||||
info = self._extract_common_video_info(content_id, asset_types, mpx_acc, extra_info=metadata)
|
||||
except ExtractorError as e:
|
||||
error = e
|
||||
|
||||
# Check for DRM formats to give appropriate error
|
||||
if not info.get('formats'):
|
||||
for query in asset_types.values():
|
||||
query['formats'] = 'MPEG-DASH,M3U,MPEG4' # allows DRM formats
|
||||
|
||||
try:
|
||||
drm_info = self._extract_common_video_info(content_id, asset_types, mpx_acc, extra_info=metadata)
|
||||
except ExtractorError:
|
||||
if error:
|
||||
raise error from None
|
||||
raise
|
||||
if drm_info['formats']:
|
||||
self.report_drm(content_id)
|
||||
elif error:
|
||||
raise error
|
||||
|
||||
return info
|
||||
|
||||
|
||||
class ParamountPlusSeriesIE(InfoExtractor):
|
||||
_VALID_URL = r'https?://(?:www\.)?paramountplus\.com/shows/(?P<id>[a-zA-Z0-9-_]+)/?(?:[#?]|$)'
|
||||
_TESTS = [{
|
||||
'url': 'https://www.paramountplus.com/shows/drake-josh',
|
||||
'playlist_mincount': 50,
|
||||
'info_dict': {
|
||||
'id': 'drake-josh',
|
||||
},
|
||||
}, {
|
||||
'url': 'https://www.paramountplus.com/shows/hawaii_five_0/',
|
||||
'playlist_mincount': 240,
|
||||
'info_dict': {
|
||||
'id': 'hawaii_five_0',
|
||||
},
|
||||
}, {
|
||||
'url': 'https://www.paramountplus.com/shows/spongebob-squarepants/',
|
||||
'playlist_mincount': 248,
|
||||
'info_dict': {
|
||||
'id': 'spongebob-squarepants',
|
||||
},
|
||||
}]
|
||||
|
||||
def _entries(self, show_name):
|
||||
for page in itertools.count():
|
||||
show_json = self._download_json(
|
||||
f'https://www.paramountplus.com/shows/{show_name}/xhr/episodes/page/{page}/size/50/xs/0/season/0', show_name)
|
||||
if not show_json.get('success'):
|
||||
return
|
||||
for episode in show_json['result']['data']:
|
||||
yield self.url_result(
|
||||
'https://www.paramountplus.com{}'.format(episode['url']),
|
||||
ie=ParamountPlusIE.ie_key(), video_id=episode['content_id'])
|
||||
|
||||
def _real_extract(self, url):
|
||||
show_name = self._match_id(url)
|
||||
return self.playlist_result(self._entries(show_name), playlist_id=show_name)
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user