mirror of
https://github.com/yt-dlp/yt-dlp.git
synced 2026-01-16 03:41:44 +00:00
Compare commits
66 Commits
2025.08.22
...
2025.09.26
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
88e2a2de8e | ||
|
|
12b57d2858 | ||
|
|
b7b7910d96 | ||
|
|
50e452fd7d | ||
|
|
94c5622be9 | ||
|
|
7df5acc546 | ||
|
|
4429fd0450 | ||
|
|
2e81e298cd | ||
|
|
7f5d9f8543 | ||
|
|
f8750504c2 | ||
|
|
8821682f15 | ||
|
|
08d7899683 | ||
|
|
98b6b0d339 | ||
|
|
bf5d18016b | ||
|
|
4bc19adc87 | ||
|
|
b2c01d0498 | ||
|
|
e123a48f11 | ||
|
|
820c6e2445 | ||
|
|
677997d84e | ||
|
|
b81e9272dc | ||
|
|
df4b4e8ccf | ||
|
|
f3829463c7 | ||
|
|
ae3923b6b2 | ||
|
|
8ab262c66b | ||
|
|
e2d37bcc8e | ||
|
|
eb4b3a5fc7 | ||
|
|
65e90aea29 | ||
|
|
17bfaa53ed | ||
|
|
8cb037c0b0 | ||
|
|
7d9e48b22a | ||
|
|
f5cb721185 | ||
|
|
83b8409366 | ||
|
|
ba80446855 | ||
|
|
22ea0688ed | ||
|
|
5c1abcdc49 | ||
|
|
3d9a88bd8e | ||
|
|
9def9a4b0e | ||
|
|
679587dac7 | ||
|
|
a1c98226a4 | ||
|
|
c8ede5f34d | ||
|
|
a183837ec8 | ||
|
|
067062bb87 | ||
|
|
8597a4331e | ||
|
|
48a214bef4 | ||
|
|
6a763a55d8 | ||
|
|
e6e6b51214 | ||
|
|
7c9b10ebc8 | ||
|
|
cd94e70040 | ||
|
|
7c27965ff6 | ||
|
|
50136eeeb3 | ||
|
|
603acdff07 | ||
|
|
d925e92b71 | ||
|
|
ed24640943 | ||
|
|
76bb46002c | ||
|
|
1e28f6bf74 | ||
|
|
0b51005b48 | ||
|
|
223baa81f6 | ||
|
|
18fe696df9 | ||
|
|
487a90c8ef | ||
|
|
8cd37b85d4 | ||
|
|
5c7ad68ff1 | ||
|
|
1ddbd033f0 | ||
|
|
fec30c56f0 | ||
|
|
d6950c27af | ||
|
|
3bd9154412 | ||
|
|
8f4a908300 |
2
.github/ISSUE_TEMPLATE/1_broken_site.yml
vendored
2
.github/ISSUE_TEMPLATE/1_broken_site.yml
vendored
@@ -24,6 +24,8 @@ body:
|
|||||||
required: true
|
required: true
|
||||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766), [the FAQ](https://github.com/yt-dlp/yt-dlp/wiki/FAQ), and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%3Aissue%20-label%3Aspam%20%20) for similar issues **including closed ones**. DO NOT post duplicates
|
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766), [the FAQ](https://github.com/yt-dlp/yt-dlp/wiki/FAQ), and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%3Aissue%20-label%3Aspam%20%20) for similar issues **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
required: true
|
||||||
|
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||||
|
required: true
|
||||||
- label: I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and I'm willing to share it if required
|
- label: I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and I'm willing to share it if required
|
||||||
- type: input
|
- type: input
|
||||||
id: region
|
id: region
|
||||||
|
|||||||
@@ -24,6 +24,8 @@ body:
|
|||||||
required: true
|
required: true
|
||||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%3Aissue%20-label%3Aspam%20%20) for similar requests **including closed ones**. DO NOT post duplicates
|
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%3Aissue%20-label%3Aspam%20%20) for similar requests **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
required: true
|
||||||
|
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||||
|
required: true
|
||||||
- label: I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and am willing to share it if required
|
- label: I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and am willing to share it if required
|
||||||
- type: input
|
- type: input
|
||||||
id: region
|
id: region
|
||||||
|
|||||||
@@ -22,6 +22,8 @@ body:
|
|||||||
required: true
|
required: true
|
||||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%3Aissue%20-label%3Aspam%20%20) for similar requests **including closed ones**. DO NOT post duplicates
|
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%3Aissue%20-label%3Aspam%20%20) for similar requests **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
required: true
|
||||||
|
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||||
|
required: true
|
||||||
- label: I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and I'm willing to share it if required
|
- label: I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and I'm willing to share it if required
|
||||||
- type: input
|
- type: input
|
||||||
id: region
|
id: region
|
||||||
|
|||||||
2
.github/ISSUE_TEMPLATE/4_bug_report.yml
vendored
2
.github/ISSUE_TEMPLATE/4_bug_report.yml
vendored
@@ -20,6 +20,8 @@ body:
|
|||||||
required: true
|
required: true
|
||||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766), [the FAQ](https://github.com/yt-dlp/yt-dlp/wiki/FAQ), and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%3Aissue%20-label%3Aspam%20%20) for similar issues **including closed ones**. DO NOT post duplicates
|
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766), [the FAQ](https://github.com/yt-dlp/yt-dlp/wiki/FAQ), and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%3Aissue%20-label%3Aspam%20%20) for similar issues **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
required: true
|
||||||
|
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||||
|
required: true
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: description
|
id: description
|
||||||
attributes:
|
attributes:
|
||||||
|
|||||||
2
.github/ISSUE_TEMPLATE/5_feature_request.yml
vendored
2
.github/ISSUE_TEMPLATE/5_feature_request.yml
vendored
@@ -22,6 +22,8 @@ body:
|
|||||||
required: true
|
required: true
|
||||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%3Aissue%20-label%3Aspam%20%20) for similar requests **including closed ones**. DO NOT post duplicates
|
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%3Aissue%20-label%3Aspam%20%20) for similar requests **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
required: true
|
||||||
|
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||||
|
required: true
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: description
|
id: description
|
||||||
attributes:
|
attributes:
|
||||||
|
|||||||
2
.github/ISSUE_TEMPLATE/6_question.yml
vendored
2
.github/ISSUE_TEMPLATE/6_question.yml
vendored
@@ -28,6 +28,8 @@ body:
|
|||||||
required: true
|
required: true
|
||||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766), [the FAQ](https://github.com/yt-dlp/yt-dlp/wiki/FAQ), and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%3Aissue%20-label%3Aspam%20%20) for similar questions **including closed ones**. DO NOT post duplicates
|
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766), [the FAQ](https://github.com/yt-dlp/yt-dlp/wiki/FAQ), and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%3Aissue%20-label%3Aspam%20%20) for similar questions **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
required: true
|
||||||
|
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||||
|
required: true
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: question
|
id: question
|
||||||
attributes:
|
attributes:
|
||||||
|
|||||||
@@ -20,6 +20,8 @@ body:
|
|||||||
required: true
|
required: true
|
||||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766), [the FAQ](https://github.com/yt-dlp/yt-dlp/wiki/FAQ), and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%%3Aissue%%20-label%%3Aspam%%20%%20) for similar issues **including closed ones**. DO NOT post duplicates
|
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766), [the FAQ](https://github.com/yt-dlp/yt-dlp/wiki/FAQ), and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%%3Aissue%%20-label%%3Aspam%%20%%20) for similar issues **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
required: true
|
||||||
|
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||||
|
required: true
|
||||||
- label: I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and I'm willing to share it if required
|
- label: I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and I'm willing to share it if required
|
||||||
- type: input
|
- type: input
|
||||||
id: region
|
id: region
|
||||||
|
|||||||
@@ -20,6 +20,8 @@ body:
|
|||||||
required: true
|
required: true
|
||||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%%3Aissue%%20-label%%3Aspam%%20%%20) for similar requests **including closed ones**. DO NOT post duplicates
|
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%%3Aissue%%20-label%%3Aspam%%20%%20) for similar requests **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
required: true
|
||||||
|
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||||
|
required: true
|
||||||
- label: I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and am willing to share it if required
|
- label: I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and am willing to share it if required
|
||||||
- type: input
|
- type: input
|
||||||
id: region
|
id: region
|
||||||
|
|||||||
@@ -18,6 +18,8 @@ body:
|
|||||||
required: true
|
required: true
|
||||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%%3Aissue%%20-label%%3Aspam%%20%%20) for similar requests **including closed ones**. DO NOT post duplicates
|
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%%3Aissue%%20-label%%3Aspam%%20%%20) for similar requests **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
required: true
|
||||||
|
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||||
|
required: true
|
||||||
- label: I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and I'm willing to share it if required
|
- label: I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and I'm willing to share it if required
|
||||||
- type: input
|
- type: input
|
||||||
id: region
|
id: region
|
||||||
|
|||||||
2
.github/ISSUE_TEMPLATE_tmpl/4_bug_report.yml
vendored
2
.github/ISSUE_TEMPLATE_tmpl/4_bug_report.yml
vendored
@@ -16,6 +16,8 @@ body:
|
|||||||
required: true
|
required: true
|
||||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766), [the FAQ](https://github.com/yt-dlp/yt-dlp/wiki/FAQ), and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%%3Aissue%%20-label%%3Aspam%%20%%20) for similar issues **including closed ones**. DO NOT post duplicates
|
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766), [the FAQ](https://github.com/yt-dlp/yt-dlp/wiki/FAQ), and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%%3Aissue%%20-label%%3Aspam%%20%%20) for similar issues **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
required: true
|
||||||
|
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||||
|
required: true
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: description
|
id: description
|
||||||
attributes:
|
attributes:
|
||||||
|
|||||||
@@ -18,6 +18,8 @@ body:
|
|||||||
required: true
|
required: true
|
||||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%%3Aissue%%20-label%%3Aspam%%20%%20) for similar requests **including closed ones**. DO NOT post duplicates
|
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%%3Aissue%%20-label%%3Aspam%%20%%20) for similar requests **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
required: true
|
||||||
|
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||||
|
required: true
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: description
|
id: description
|
||||||
attributes:
|
attributes:
|
||||||
|
|||||||
2
.github/ISSUE_TEMPLATE_tmpl/6_question.yml
vendored
2
.github/ISSUE_TEMPLATE_tmpl/6_question.yml
vendored
@@ -24,6 +24,8 @@ body:
|
|||||||
required: true
|
required: true
|
||||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766), [the FAQ](https://github.com/yt-dlp/yt-dlp/wiki/FAQ), and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%%3Aissue%%20-label%%3Aspam%%20%%20) for similar questions **including closed ones**. DO NOT post duplicates
|
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766), [the FAQ](https://github.com/yt-dlp/yt-dlp/wiki/FAQ), and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%%3Aissue%%20-label%%3Aspam%%20%%20) for similar questions **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
required: true
|
||||||
|
- label: I've read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||||
|
required: true
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: question
|
id: question
|
||||||
attributes:
|
attributes:
|
||||||
|
|||||||
1
.github/PULL_REQUEST_TEMPLATE.md
vendored
1
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -33,6 +33,7 @@ Fixes #
|
|||||||
### In order to be accepted and merged into yt-dlp each piece of code must be in public domain or released under [Unlicense](http://unlicense.org/). Check those that apply and remove the others:
|
### In order to be accepted and merged into yt-dlp each piece of code must be in public domain or released under [Unlicense](http://unlicense.org/). Check those that apply and remove the others:
|
||||||
- [ ] I am the original author of the code in this PR, and I am willing to release it under [Unlicense](http://unlicense.org/)
|
- [ ] I am the original author of the code in this PR, and I am willing to release it under [Unlicense](http://unlicense.org/)
|
||||||
- [ ] I am not the original author of the code in this PR, but it is in the public domain or released under [Unlicense](http://unlicense.org/) (provide reliable evidence)
|
- [ ] I am not the original author of the code in this PR, but it is in the public domain or released under [Unlicense](http://unlicense.org/) (provide reliable evidence)
|
||||||
|
- [ ] I have read the [policy against AI/LLM contributions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#automated-contributions-ai--llm-policy) and understand I may be blocked from the repository if it is violated
|
||||||
|
|
||||||
### What is the purpose of your *pull request*? Check those that apply and remove the others:
|
### What is the purpose of your *pull request*? Check those that apply and remove the others:
|
||||||
- [ ] Fix or improvement to an extractor (Make sure to add/update tests)
|
- [ ] Fix or improvement to an extractor (Make sure to add/update tests)
|
||||||
|
|||||||
28
.github/actionlint.yml
vendored
Normal file
28
.github/actionlint.yml
vendored
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
self-hosted-runner:
|
||||||
|
labels:
|
||||||
|
# Workaround for the outdated runner list in actionlint v1.7.7
|
||||||
|
# Ref: https://github.com/rhysd/actionlint/issues/533
|
||||||
|
- windows-11-arm
|
||||||
|
|
||||||
|
config-variables:
|
||||||
|
- KEEP_CACHE_WARM
|
||||||
|
- PUSH_VERSION_COMMIT
|
||||||
|
- UPDATE_TO_VERIFICATION
|
||||||
|
- PYPI_PROJECT
|
||||||
|
- PYPI_SUFFIX
|
||||||
|
- NIGHTLY_PYPI_PROJECT
|
||||||
|
- NIGHTLY_PYPI_SUFFIX
|
||||||
|
- NIGHTLY_ARCHIVE_REPO
|
||||||
|
- BUILD_NIGHTLY
|
||||||
|
- MASTER_PYPI_PROJECT
|
||||||
|
- MASTER_PYPI_SUFFIX
|
||||||
|
- MASTER_ARCHIVE_REPO
|
||||||
|
- BUILD_MASTER
|
||||||
|
- ISSUE_LOCKDOWN
|
||||||
|
- SANITIZE_COMMENT
|
||||||
|
|
||||||
|
paths:
|
||||||
|
.github/workflows/build.yml:
|
||||||
|
ignore:
|
||||||
|
# SC1090 "Can't follow non-constant source": ignore when using `source` to activate venv
|
||||||
|
- '.+SC1090.+'
|
||||||
446
.github/workflows/build.yml
vendored
446
.github/workflows/build.yml
vendored
@@ -9,13 +9,19 @@ on:
|
|||||||
required: false
|
required: false
|
||||||
default: stable
|
default: stable
|
||||||
type: string
|
type: string
|
||||||
|
origin:
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
unix:
|
unix:
|
||||||
default: true
|
default: true
|
||||||
type: boolean
|
type: boolean
|
||||||
linux_static:
|
linux:
|
||||||
default: true
|
default: true
|
||||||
type: boolean
|
type: boolean
|
||||||
linux_arm:
|
linux_armv7l:
|
||||||
|
default: true
|
||||||
|
type: boolean
|
||||||
|
musllinux:
|
||||||
default: true
|
default: true
|
||||||
type: boolean
|
type: boolean
|
||||||
macos:
|
macos:
|
||||||
@@ -24,10 +30,6 @@ on:
|
|||||||
windows:
|
windows:
|
||||||
default: true
|
default: true
|
||||||
type: boolean
|
type: boolean
|
||||||
origin:
|
|
||||||
required: false
|
|
||||||
default: ''
|
|
||||||
type: string
|
|
||||||
secrets:
|
secrets:
|
||||||
GPG_SIGNING_KEY:
|
GPG_SIGNING_KEY:
|
||||||
required: false
|
required: false
|
||||||
@@ -37,7 +39,9 @@ on:
|
|||||||
version:
|
version:
|
||||||
description: |
|
description: |
|
||||||
VERSION: yyyy.mm.dd[.rev] or rev
|
VERSION: yyyy.mm.dd[.rev] or rev
|
||||||
required: true
|
(default: auto-generated)
|
||||||
|
required: false
|
||||||
|
default: ''
|
||||||
type: string
|
type: string
|
||||||
channel:
|
channel:
|
||||||
description: |
|
description: |
|
||||||
@@ -49,12 +53,16 @@ on:
|
|||||||
description: yt-dlp, yt-dlp.tar.gz
|
description: yt-dlp, yt-dlp.tar.gz
|
||||||
default: true
|
default: true
|
||||||
type: boolean
|
type: boolean
|
||||||
linux_static:
|
linux:
|
||||||
description: yt-dlp_linux
|
description: yt-dlp_linux, yt-dlp_linux.zip, yt-dlp_linux_aarch64, yt-dlp_linux_aarch64.zip
|
||||||
default: true
|
default: true
|
||||||
type: boolean
|
type: boolean
|
||||||
linux_arm:
|
linux_armv7l:
|
||||||
description: yt-dlp_linux_aarch64, yt-dlp_linux_armv7l
|
description: yt-dlp_linux_armv7l.zip
|
||||||
|
default: true
|
||||||
|
type: boolean
|
||||||
|
musllinux:
|
||||||
|
description: yt-dlp_musllinux, yt-dlp_musllinux.zip, yt-dlp_musllinux_aarch64, yt-dlp_musllinux_aarch64.zip
|
||||||
default: true
|
default: true
|
||||||
type: boolean
|
type: boolean
|
||||||
macos:
|
macos:
|
||||||
@@ -65,13 +73,6 @@ on:
|
|||||||
description: yt-dlp.exe, yt-dlp_win.zip, yt-dlp_x86.exe, yt-dlp_win_x86.zip, yt-dlp_arm64.exe, yt-dlp_win_arm64.zip
|
description: yt-dlp.exe, yt-dlp_win.zip, yt-dlp_x86.exe, yt-dlp_win_x86.zip, yt-dlp_arm64.exe, yt-dlp_win_arm64.zip
|
||||||
default: true
|
default: true
|
||||||
type: boolean
|
type: boolean
|
||||||
origin:
|
|
||||||
description: Origin
|
|
||||||
required: false
|
|
||||||
default: 'current repo'
|
|
||||||
type: choice
|
|
||||||
options:
|
|
||||||
- 'current repo'
|
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
contents: read
|
contents: read
|
||||||
@@ -80,44 +81,151 @@ jobs:
|
|||||||
process:
|
process:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
outputs:
|
outputs:
|
||||||
origin: ${{ steps.process_origin.outputs.origin }}
|
origin: ${{ steps.process_inputs.outputs.origin }}
|
||||||
|
timestamp: ${{ steps.process_inputs.outputs.timestamp }}
|
||||||
|
version: ${{ steps.process_inputs.outputs.version }}
|
||||||
|
linux_matrix: ${{ steps.linux_matrix.outputs.matrix }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Process origin
|
- name: Process inputs
|
||||||
id: process_origin
|
id: process_inputs
|
||||||
|
env:
|
||||||
|
INPUTS: ${{ toJSON(inputs) }}
|
||||||
|
REPOSITORY: ${{ github.repository }}
|
||||||
|
shell: python
|
||||||
run: |
|
run: |
|
||||||
echo "origin=${{ inputs.origin == 'current repo' && github.repository || inputs.origin }}" | tee "$GITHUB_OUTPUT"
|
import datetime as dt
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
INPUTS = json.loads(os.environ['INPUTS'])
|
||||||
|
timestamp = dt.datetime.now(tz=dt.timezone.utc).strftime('%Y.%m.%d.%H%M%S.%f')
|
||||||
|
version = INPUTS.get('version')
|
||||||
|
if version and '.' not in version:
|
||||||
|
# build.yml was dispatched with only a revision as the version input value
|
||||||
|
version_parts = [*timestamp.split('.')[:3], version]
|
||||||
|
elif not version:
|
||||||
|
# build.yml was dispatched without any version input value, so include .HHMMSS revision
|
||||||
|
version_parts = timestamp.split('.')[:4]
|
||||||
|
else:
|
||||||
|
# build.yml was called or dispatched with a complete version input value
|
||||||
|
version_parts = version.split('.')
|
||||||
|
assert all(re.fullmatch(r'[0-9]+', part) for part in version_parts), 'Version must be numeric'
|
||||||
|
outputs = {
|
||||||
|
'origin': INPUTS.get('origin') or os.environ['REPOSITORY'],
|
||||||
|
'timestamp': timestamp,
|
||||||
|
'version': '.'.join(version_parts),
|
||||||
|
}
|
||||||
|
print(json.dumps(outputs, indent=2))
|
||||||
|
with open(os.environ['GITHUB_OUTPUT'], 'a') as f:
|
||||||
|
f.write('\n'.join(f'{key}={value}' for key, value in outputs.items()))
|
||||||
|
|
||||||
|
- name: Build Linux matrix
|
||||||
|
id: linux_matrix
|
||||||
|
env:
|
||||||
|
INPUTS: ${{ toJSON(inputs) }}
|
||||||
|
PYTHON_VERSION: '3.13'
|
||||||
|
UPDATE_TO: yt-dlp/yt-dlp@2025.09.05
|
||||||
|
shell: python
|
||||||
|
run: |
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
EXE_MAP = {
|
||||||
|
'linux': [{
|
||||||
|
'os': 'linux',
|
||||||
|
'arch': 'x86_64',
|
||||||
|
'runner': 'ubuntu-24.04',
|
||||||
|
}, {
|
||||||
|
'os': 'linux',
|
||||||
|
'arch': 'aarch64',
|
||||||
|
'runner': 'ubuntu-24.04-arm',
|
||||||
|
}],
|
||||||
|
'linux_armv7l': [{
|
||||||
|
'os': 'linux',
|
||||||
|
'arch': 'armv7l',
|
||||||
|
'runner': 'ubuntu-24.04-arm',
|
||||||
|
'qemu_platform': 'linux/arm/v7',
|
||||||
|
'onefile': False,
|
||||||
|
'cache_requirements': True,
|
||||||
|
'update_to': 'yt-dlp/yt-dlp@2023.03.04',
|
||||||
|
}],
|
||||||
|
'musllinux': [{
|
||||||
|
'os': 'musllinux',
|
||||||
|
'arch': 'x86_64',
|
||||||
|
'runner': 'ubuntu-24.04',
|
||||||
|
}, {
|
||||||
|
'os': 'musllinux',
|
||||||
|
'arch': 'aarch64',
|
||||||
|
'runner': 'ubuntu-24.04-arm',
|
||||||
|
}],
|
||||||
|
}
|
||||||
|
INPUTS = json.loads(os.environ['INPUTS'])
|
||||||
|
matrix = [exe for key, group in EXE_MAP.items() for exe in group if INPUTS.get(key)]
|
||||||
|
if not matrix:
|
||||||
|
# If we send an empty matrix when no linux inputs are given, the entire workflow fails
|
||||||
|
matrix = [EXE_MAP['linux'][0]]
|
||||||
|
for exe in matrix:
|
||||||
|
exe['exe'] = '_'.join(filter(None, (
|
||||||
|
'yt-dlp',
|
||||||
|
exe['os'],
|
||||||
|
exe['arch'] != 'x86_64' and exe['arch'],
|
||||||
|
)))
|
||||||
|
exe.setdefault('qemu_platform', None)
|
||||||
|
exe.setdefault('onefile', True)
|
||||||
|
exe.setdefault('onedir', True)
|
||||||
|
exe.setdefault('cache_requirements', False)
|
||||||
|
exe.setdefault('python_version', os.environ['PYTHON_VERSION'])
|
||||||
|
exe.setdefault('update_to', os.environ['UPDATE_TO'])
|
||||||
|
if not any(INPUTS.get(key) for key in EXE_MAP):
|
||||||
|
print('skipping linux job')
|
||||||
|
else:
|
||||||
|
print(json.dumps(matrix, indent=2))
|
||||||
|
with open(os.environ['GITHUB_OUTPUT'], 'a') as f:
|
||||||
|
f.write(f'matrix={json.dumps(matrix)}')
|
||||||
|
|
||||||
unix:
|
unix:
|
||||||
needs: process
|
needs: process
|
||||||
if: inputs.unix
|
if: inputs.unix
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
env:
|
||||||
|
CHANNEL: ${{ inputs.channel }}
|
||||||
|
ORIGIN: ${{ needs.process.outputs.origin }}
|
||||||
|
VERSION: ${{ needs.process.outputs.version }}
|
||||||
|
UPDATE_TO: yt-dlp/yt-dlp@2025.09.05
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0 # Needed for changelog
|
fetch-depth: 0 # Needed for changelog
|
||||||
- uses: actions/setup-python@v5
|
|
||||||
|
- uses: actions/setup-python@v6
|
||||||
with:
|
with:
|
||||||
python-version: "3.10"
|
python-version: "3.10"
|
||||||
|
|
||||||
- name: Install Requirements
|
- name: Install Requirements
|
||||||
run: |
|
run: |
|
||||||
sudo apt -y install zip pandoc man sed
|
sudo apt -y install zip pandoc man sed
|
||||||
|
|
||||||
- name: Prepare
|
- name: Prepare
|
||||||
run: |
|
run: |
|
||||||
python devscripts/update-version.py -c "${{ inputs.channel }}" -r "${{ needs.process.outputs.origin }}" "${{ inputs.version }}"
|
python devscripts/update-version.py -c "${CHANNEL}" -r "${ORIGIN}" "${VERSION}"
|
||||||
python devscripts/update_changelog.py -vv
|
python devscripts/update_changelog.py -vv
|
||||||
python devscripts/make_lazy_extractors.py
|
python devscripts/make_lazy_extractors.py
|
||||||
|
|
||||||
- name: Build Unix platform-independent binary
|
- name: Build Unix platform-independent binary
|
||||||
run: |
|
run: |
|
||||||
make all tar
|
make all tar
|
||||||
|
|
||||||
- name: Verify --update-to
|
- name: Verify --update-to
|
||||||
if: vars.UPDATE_TO_VERIFICATION
|
if: vars.UPDATE_TO_VERIFICATION
|
||||||
run: |
|
run: |
|
||||||
chmod +x ./yt-dlp
|
chmod +x ./yt-dlp
|
||||||
cp ./yt-dlp ./yt-dlp_downgraded
|
cp ./yt-dlp ./yt-dlp_downgraded
|
||||||
version="$(./yt-dlp --version)"
|
version="$(./yt-dlp --version)"
|
||||||
./yt-dlp_downgraded -v --update-to yt-dlp/yt-dlp@2023.03.04
|
./yt-dlp_downgraded -v --update-to "${UPDATE_TO}"
|
||||||
downgraded_version="$(./yt-dlp_downgraded --version)"
|
downgraded_version="$(./yt-dlp_downgraded --version)"
|
||||||
[[ "$version" != "$downgraded_version" ]]
|
[[ "${version}" != "${downgraded_version}" ]]
|
||||||
|
|
||||||
- name: Upload artifacts
|
- name: Upload artifacts
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
@@ -127,99 +235,74 @@ jobs:
|
|||||||
yt-dlp.tar.gz
|
yt-dlp.tar.gz
|
||||||
compression-level: 0
|
compression-level: 0
|
||||||
|
|
||||||
linux_static:
|
linux:
|
||||||
|
name: ${{ matrix.os }} (${{ matrix.arch }})
|
||||||
|
if: inputs.linux || inputs.linux_armv7l || inputs.musllinux
|
||||||
needs: process
|
needs: process
|
||||||
if: inputs.linux_static
|
runs-on: ${{ matrix.runner }}
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
- name: Build static executable
|
|
||||||
env:
|
|
||||||
channel: ${{ inputs.channel }}
|
|
||||||
origin: ${{ needs.process.outputs.origin }}
|
|
||||||
version: ${{ inputs.version }}
|
|
||||||
run: |
|
|
||||||
mkdir ~/build
|
|
||||||
cd bundle/docker
|
|
||||||
docker compose up --build static
|
|
||||||
sudo chown "${USER}:docker" ~/build/yt-dlp_linux
|
|
||||||
- name: Verify --update-to
|
|
||||||
if: vars.UPDATE_TO_VERIFICATION
|
|
||||||
run: |
|
|
||||||
chmod +x ~/build/yt-dlp_linux
|
|
||||||
cp ~/build/yt-dlp_linux ~/build/yt-dlp_linux_downgraded
|
|
||||||
version="$(~/build/yt-dlp_linux --version)"
|
|
||||||
~/build/yt-dlp_linux_downgraded -v --update-to yt-dlp/yt-dlp@2023.03.04
|
|
||||||
downgraded_version="$(~/build/yt-dlp_linux_downgraded --version)"
|
|
||||||
[[ "$version" != "$downgraded_version" ]]
|
|
||||||
- name: Upload artifacts
|
|
||||||
uses: actions/upload-artifact@v4
|
|
||||||
with:
|
|
||||||
name: build-bin-${{ github.job }}
|
|
||||||
path: |
|
|
||||||
~/build/yt-dlp_linux
|
|
||||||
compression-level: 0
|
|
||||||
|
|
||||||
linux_arm:
|
|
||||||
needs: process
|
|
||||||
if: inputs.linux_arm
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
packages: write # for creating cache
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
strategy:
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
architecture:
|
include: ${{ fromJSON(needs.process.outputs.linux_matrix) }}
|
||||||
- armv7
|
env:
|
||||||
- aarch64
|
CHANNEL: ${{ inputs.channel }}
|
||||||
|
ORIGIN: ${{ needs.process.outputs.origin }}
|
||||||
|
VERSION: ${{ needs.process.outputs.version }}
|
||||||
|
EXE_NAME: ${{ matrix.exe }}
|
||||||
|
PYTHON_VERSION: ${{ matrix.python_version }}
|
||||||
|
UPDATE_TO: ${{ (vars.UPDATE_TO_VERIFICATION && matrix.update_to) || '' }}
|
||||||
|
SKIP_ONEDIR_BUILD: ${{ (!matrix.onedir && '1') || '' }}
|
||||||
|
SKIP_ONEFILE_BUILD: ${{ (!matrix.onefile && '1') || '' }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
|
||||||
path: ./repo
|
|
||||||
- name: Virtualized Install, Prepare & Build
|
|
||||||
uses: yt-dlp/run-on-arch-action@v3
|
|
||||||
with:
|
|
||||||
# Ref: https://github.com/uraimo/run-on-arch-action/issues/55
|
|
||||||
env: |
|
|
||||||
GITHUB_WORKFLOW: build
|
|
||||||
githubToken: ${{ github.token }} # To cache image
|
|
||||||
arch: ${{ matrix.architecture }}
|
|
||||||
distro: ubuntu20.04 # Standalone executable should be built on minimum supported OS
|
|
||||||
dockerRunArgs: --volume "${PWD}/repo:/repo"
|
|
||||||
install: | # Installing Python 3.10 from the Deadsnakes repo raises errors
|
|
||||||
apt update
|
|
||||||
apt -y install zlib1g-dev libffi-dev python3.9 python3.9-dev python3.9-distutils python3-pip \
|
|
||||||
python3-secretstorage # Cannot build cryptography wheel in virtual armv7 environment
|
|
||||||
python3.9 -m pip install -U pip wheel 'setuptools>=71.0.2'
|
|
||||||
# XXX: Keep this in sync with pyproject.toml (it can't be accessed at this stage) and exclude secretstorage
|
|
||||||
python3.9 -m pip install -U Pyinstaller mutagen pycryptodomex brotli certifi cffi \
|
|
||||||
'requests>=2.32.2,<3' 'urllib3>=2.0.2,<3' 'websockets>=13.0'
|
|
||||||
|
|
||||||
run: |
|
- name: Cache requirements
|
||||||
cd repo
|
if: matrix.cache_requirements
|
||||||
python3.9 devscripts/install_deps.py -o --include build
|
id: cache-venv
|
||||||
python3.9 devscripts/install_deps.py --include pyinstaller # Cached versions may be out of date
|
uses: actions/cache@v4
|
||||||
python3.9 devscripts/update-version.py -c "${{ inputs.channel }}" -r "${{ needs.process.outputs.origin }}" "${{ inputs.version }}"
|
env:
|
||||||
python3.9 devscripts/make_lazy_extractors.py
|
SEGMENT_DOWNLOAD_TIMEOUT_MINS: 1
|
||||||
python3.9 -m bundle.pyinstaller
|
with:
|
||||||
|
path: |
|
||||||
|
venv
|
||||||
|
key: cache-reqs-${{ matrix.os }}_${{ matrix.arch }}-${{ github.ref }}-${{ needs.process.outputs.timestamp }}
|
||||||
|
restore-keys: |
|
||||||
|
cache-reqs-${{ matrix.os }}_${{ matrix.arch }}-${{ github.ref }}-
|
||||||
|
cache-reqs-${{ matrix.os }}_${{ matrix.arch }}-
|
||||||
|
|
||||||
if ${{ vars.UPDATE_TO_VERIFICATION && 'true' || 'false' }}; then
|
- name: Set up QEMU
|
||||||
arch="${{ (matrix.architecture == 'armv7' && 'armv7l') || matrix.architecture }}"
|
if: matrix.qemu_platform
|
||||||
chmod +x ./dist/yt-dlp_linux_${arch}
|
uses: docker/setup-qemu-action@v3
|
||||||
cp ./dist/yt-dlp_linux_${arch} ./dist/yt-dlp_linux_${arch}_downgraded
|
with:
|
||||||
version="$(./dist/yt-dlp_linux_${arch} --version)"
|
platforms: ${{ matrix.qemu_platform }}
|
||||||
./dist/yt-dlp_linux_${arch}_downgraded -v --update-to yt-dlp/yt-dlp@2023.03.04
|
|
||||||
downgraded_version="$(./dist/yt-dlp_linux_${arch}_downgraded --version)"
|
- name: Build executable
|
||||||
[[ "$version" != "$downgraded_version" ]]
|
env:
|
||||||
fi
|
SERVICE: ${{ matrix.os }}_${{ matrix.arch }}
|
||||||
|
run: |
|
||||||
|
mkdir -p ./venv
|
||||||
|
mkdir -p ./dist
|
||||||
|
pushd bundle/docker
|
||||||
|
docker compose up --build --exit-code-from "${SERVICE}" "${SERVICE}"
|
||||||
|
popd
|
||||||
|
if [[ -z "${SKIP_ONEFILE_BUILD}" ]]; then
|
||||||
|
sudo chown "${USER}:docker" "./dist/${EXE_NAME}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Verify executable in container
|
||||||
|
env:
|
||||||
|
SERVICE: ${{ matrix.os }}_${{ matrix.arch }}_verify
|
||||||
|
run: |
|
||||||
|
cd bundle/docker
|
||||||
|
docker compose up --build --exit-code-from "${SERVICE}" "${SERVICE}"
|
||||||
|
|
||||||
- name: Upload artifacts
|
- name: Upload artifacts
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: build-bin-linux_${{ matrix.architecture }}
|
name: build-bin-${{ matrix.os }}_${{ matrix.arch }}
|
||||||
path: | # run-on-arch-action designates armv7l as armv7
|
path: |
|
||||||
repo/dist/yt-dlp_linux_${{ (matrix.architecture == 'armv7' && 'armv7l') || matrix.architecture }}
|
dist/${{ matrix.exe }}*
|
||||||
compression-level: 0
|
compression-level: 0
|
||||||
|
|
||||||
macos:
|
macos:
|
||||||
@@ -227,22 +310,29 @@ jobs:
|
|||||||
if: inputs.macos
|
if: inputs.macos
|
||||||
permissions:
|
permissions:
|
||||||
contents: read
|
contents: read
|
||||||
actions: write # For cleaning up cache
|
|
||||||
runs-on: macos-14
|
runs-on: macos-14
|
||||||
|
env:
|
||||||
|
CHANNEL: ${{ inputs.channel }}
|
||||||
|
ORIGIN: ${{ needs.process.outputs.origin }}
|
||||||
|
VERSION: ${{ needs.process.outputs.version }}
|
||||||
|
UPDATE_TO: yt-dlp/yt-dlp@2025.09.05
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
# NB: Building universal2 does not work with python from actions/setup-python
|
# NB: Building universal2 does not work with python from actions/setup-python
|
||||||
|
|
||||||
- name: Restore cached requirements
|
- name: Cache requirements
|
||||||
id: restore-cache
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4
|
uses: actions/cache@v4
|
||||||
env:
|
env:
|
||||||
SEGMENT_DOWNLOAD_TIMEOUT_MINS: 1
|
SEGMENT_DOWNLOAD_TIMEOUT_MINS: 1
|
||||||
with:
|
with:
|
||||||
path: |
|
path: |
|
||||||
~/yt-dlp-build-venv
|
~/yt-dlp-build-venv
|
||||||
key: cache-reqs-${{ github.job }}-${{ github.ref }}
|
key: cache-reqs-${{ github.job }}-${{ github.ref }}-${{ needs.process.outputs.timestamp }}
|
||||||
|
restore-keys: |
|
||||||
|
cache-reqs-${{ github.job }}-${{ github.ref }}-
|
||||||
|
cache-reqs-${{ github.job }}-
|
||||||
|
|
||||||
- name: Install Requirements
|
- name: Install Requirements
|
||||||
run: |
|
run: |
|
||||||
@@ -287,7 +377,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Prepare
|
- name: Prepare
|
||||||
run: |
|
run: |
|
||||||
python3 devscripts/update-version.py -c "${{ inputs.channel }}" -r "${{ needs.process.outputs.origin }}" "${{ inputs.version }}"
|
python3 devscripts/update-version.py -c "${CHANNEL}" -r "${ORIGIN}" "${VERSION}"
|
||||||
python3 devscripts/make_lazy_extractors.py
|
python3 devscripts/make_lazy_extractors.py
|
||||||
- name: Build
|
- name: Build
|
||||||
run: |
|
run: |
|
||||||
@@ -302,7 +392,7 @@ jobs:
|
|||||||
chmod +x ./dist/yt-dlp_macos
|
chmod +x ./dist/yt-dlp_macos
|
||||||
cp ./dist/yt-dlp_macos ./dist/yt-dlp_macos_downgraded
|
cp ./dist/yt-dlp_macos ./dist/yt-dlp_macos_downgraded
|
||||||
version="$(./dist/yt-dlp_macos --version)"
|
version="$(./dist/yt-dlp_macos --version)"
|
||||||
./dist/yt-dlp_macos_downgraded -v --update-to yt-dlp/yt-dlp@2023.03.04
|
./dist/yt-dlp_macos_downgraded -v --update-to "${UPDATE_TO}"
|
||||||
downgraded_version="$(./dist/yt-dlp_macos_downgraded --version)"
|
downgraded_version="$(./dist/yt-dlp_macos_downgraded --version)"
|
||||||
[[ "$version" != "$downgraded_version" ]]
|
[[ "$version" != "$downgraded_version" ]]
|
||||||
|
|
||||||
@@ -315,27 +405,12 @@ jobs:
|
|||||||
dist/yt-dlp_macos.zip
|
dist/yt-dlp_macos.zip
|
||||||
compression-level: 0
|
compression-level: 0
|
||||||
|
|
||||||
- name: Cleanup cache
|
|
||||||
if: steps.restore-cache.outputs.cache-hit == 'true'
|
|
||||||
env:
|
|
||||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
cache_key: cache-reqs-${{ github.job }}-${{ github.ref }}
|
|
||||||
run: |
|
|
||||||
gh cache delete "${cache_key}"
|
|
||||||
|
|
||||||
- name: Cache requirements
|
|
||||||
uses: actions/cache/save@v4
|
|
||||||
with:
|
|
||||||
path: |
|
|
||||||
~/yt-dlp-build-venv
|
|
||||||
key: cache-reqs-${{ github.job }}-${{ github.ref }}
|
|
||||||
|
|
||||||
windows:
|
windows:
|
||||||
|
name: windows (${{ matrix.arch }})
|
||||||
needs: process
|
needs: process
|
||||||
if: inputs.windows
|
if: inputs.windows
|
||||||
permissions:
|
permissions:
|
||||||
contents: read
|
contents: read
|
||||||
actions: write # For cleaning up cache
|
|
||||||
runs-on: ${{ matrix.runner }}
|
runs-on: ${{ matrix.runner }}
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
@@ -344,66 +419,101 @@ jobs:
|
|||||||
- arch: 'x64'
|
- arch: 'x64'
|
||||||
runner: windows-2025
|
runner: windows-2025
|
||||||
python_version: '3.10'
|
python_version: '3.10'
|
||||||
suffix: ''
|
platform_tag: win_amd64
|
||||||
|
pyi_version: '6.16.0'
|
||||||
|
pyi_tag: '2025.09.13.221251'
|
||||||
|
pyi_hash: b6496c7630c3afe66900cfa824e8234a8c2e2c81704bd7facd79586abc76c0e5
|
||||||
- arch: 'x86'
|
- arch: 'x86'
|
||||||
runner: windows-2025
|
runner: windows-2025
|
||||||
python_version: '3.10'
|
python_version: '3.10'
|
||||||
suffix: '_x86'
|
platform_tag: win32
|
||||||
|
pyi_version: '6.16.0'
|
||||||
|
pyi_tag: '2025.09.13.221251'
|
||||||
|
pyi_hash: 2d881843580efdc54f3523507fc6d9c5b6051ee49c743a6d9b7003ac5758c226
|
||||||
- arch: 'arm64'
|
- arch: 'arm64'
|
||||||
runner: windows-11-arm
|
runner: windows-11-arm
|
||||||
python_version: '3.13' # arm64 only has Python >= 3.11 available
|
python_version: '3.13' # arm64 only has Python >= 3.11 available
|
||||||
suffix: '_arm64'
|
platform_tag: win_arm64
|
||||||
|
pyi_version: '6.16.0'
|
||||||
|
pyi_tag: '2025.09.13.221251'
|
||||||
|
pyi_hash: 4250c9085e34a95c898f3ee2f764914fc36ec59f0d97c28e6a75fcf21f7b144f
|
||||||
|
env:
|
||||||
|
CHANNEL: ${{ inputs.channel }}
|
||||||
|
ORIGIN: ${{ needs.process.outputs.origin }}
|
||||||
|
VERSION: ${{ needs.process.outputs.version }}
|
||||||
|
SUFFIX: ${{ (matrix.arch != 'x64' && format('_{0}', matrix.arch)) || '' }}
|
||||||
|
UPDATE_TO: yt-dlp/yt-dlp@2025.09.05
|
||||||
|
BASE_CACHE_KEY: cache-reqs-${{ github.job }}_${{ matrix.arch }}-${{ matrix.python_version }}
|
||||||
|
PYI_REPO: https://github.com/yt-dlp/Pyinstaller-Builds
|
||||||
|
PYI_WHEEL: pyinstaller-${{ matrix.pyi_version }}-py3-none-${{ matrix.platform_tag }}.whl
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v6
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python_version }}
|
python-version: ${{ matrix.python_version }}
|
||||||
architecture: ${{ matrix.arch }}
|
architecture: ${{ matrix.arch }}
|
||||||
|
|
||||||
- name: Restore cached requirements
|
- name: Cache requirements
|
||||||
id: restore-cache
|
id: cache-venv
|
||||||
if: matrix.arch == 'arm64'
|
if: matrix.arch == 'arm64'
|
||||||
uses: actions/cache/restore@v4
|
uses: actions/cache@v4
|
||||||
env:
|
env:
|
||||||
SEGMENT_DOWNLOAD_TIMEOUT_MINS: 1
|
SEGMENT_DOWNLOAD_TIMEOUT_MINS: 1
|
||||||
with:
|
with:
|
||||||
path: |
|
path: |
|
||||||
/yt-dlp-build-venv
|
/yt-dlp-build-venv
|
||||||
key: cache-reqs-${{ github.job }}_${{ matrix.arch }}-${{ matrix.python_version }}-${{ github.ref }}
|
key: ${{ env.BASE_CACHE_KEY }}-${{ github.ref }}-${{ needs.process.outputs.timestamp }}
|
||||||
|
restore-keys: |
|
||||||
|
${{ env.BASE_CACHE_KEY }}-${{ github.ref }}-
|
||||||
|
${{ env.BASE_CACHE_KEY }}-
|
||||||
|
|
||||||
- name: Install Requirements
|
- name: Install Requirements
|
||||||
|
env:
|
||||||
|
ARCH: ${{ matrix.arch }}
|
||||||
|
PYI_URL: ${{ env.PYI_REPO }}/releases/download/${{ matrix.pyi_tag }}/${{ env.PYI_WHEEL }}
|
||||||
|
PYI_HASH: ${{ matrix.pyi_hash }}
|
||||||
|
shell: pwsh
|
||||||
run: |
|
run: |
|
||||||
python -m venv /yt-dlp-build-venv
|
python -m venv /yt-dlp-build-venv
|
||||||
/yt-dlp-build-venv/Scripts/Activate.ps1
|
/yt-dlp-build-venv/Scripts/Activate.ps1
|
||||||
|
python -m pip install -U pip
|
||||||
|
# Install custom PyInstaller build and verify hash
|
||||||
|
mkdir /pyi-wheels
|
||||||
|
python -m pip download -d /pyi-wheels --no-deps --require-hashes "pyinstaller@${Env:PYI_URL}#sha256=${Env:PYI_HASH}"
|
||||||
|
python -m pip install --force-reinstall -U "/pyi-wheels/${Env:PYI_WHEEL}"
|
||||||
python devscripts/install_deps.py -o --include build
|
python devscripts/install_deps.py -o --include build
|
||||||
python devscripts/install_deps.py ${{ (matrix.arch != 'x86' && '--include curl-cffi') || '' }}
|
if ("${Env:ARCH}" -eq "x86") {
|
||||||
# Use custom pyinstaller built with https://github.com/yt-dlp/pyinstaller-builds
|
python devscripts/install_deps.py
|
||||||
python -m pip install -U "https://yt-dlp.github.io/Pyinstaller-Builds/${{ matrix.arch }}/pyinstaller-6.15.0-py3-none-any.whl"
|
} else {
|
||||||
|
python devscripts/install_deps.py --include curl-cffi
|
||||||
|
}
|
||||||
|
|
||||||
- name: Prepare
|
- name: Prepare
|
||||||
|
shell: pwsh
|
||||||
run: |
|
run: |
|
||||||
python devscripts/update-version.py -c "${{ inputs.channel }}" -r "${{ needs.process.outputs.origin }}" "${{ inputs.version }}"
|
python devscripts/update-version.py -c "${Env:CHANNEL}" -r "${Env:ORIGIN}" "${Env:VERSION}"
|
||||||
python devscripts/make_lazy_extractors.py
|
python devscripts/make_lazy_extractors.py
|
||||||
|
|
||||||
- name: Build
|
- name: Build
|
||||||
|
shell: pwsh
|
||||||
run: |
|
run: |
|
||||||
/yt-dlp-build-venv/Scripts/Activate.ps1
|
/yt-dlp-build-venv/Scripts/Activate.ps1
|
||||||
python -m bundle.pyinstaller
|
python -m bundle.pyinstaller
|
||||||
python -m bundle.pyinstaller --onedir
|
python -m bundle.pyinstaller --onedir
|
||||||
Compress-Archive -Path ./dist/yt-dlp${{ matrix.suffix }}/* -DestinationPath ./dist/yt-dlp_win${{ matrix.suffix }}.zip
|
Compress-Archive -Path ./dist/yt-dlp${Env:SUFFIX}/* -DestinationPath ./dist/yt-dlp_win${Env:SUFFIX}.zip
|
||||||
|
|
||||||
- name: Verify --update-to
|
- name: Verify --update-to
|
||||||
if: vars.UPDATE_TO_VERIFICATION
|
if: vars.UPDATE_TO_VERIFICATION
|
||||||
|
shell: pwsh
|
||||||
run: |
|
run: |
|
||||||
foreach ($name in @("yt-dlp${{ matrix.suffix }}")) {
|
$name = "yt-dlp${Env:SUFFIX}"
|
||||||
Copy-Item "./dist/${name}.exe" "./dist/${name}_downgraded.exe"
|
Copy-Item "./dist/${name}.exe" "./dist/${name}_downgraded.exe"
|
||||||
$version = & "./dist/${name}.exe" --version
|
$version = & "./dist/${name}.exe" --version
|
||||||
& "./dist/${name}_downgraded.exe" -v --update-to yt-dlp/yt-dlp@2025.08.20
|
& "./dist/${name}_downgraded.exe" -v --update-to "${Env:UPDATE_TO}"
|
||||||
$downgraded_version = & "./dist/${name}_downgraded.exe" --version
|
$downgraded_version = & "./dist/${name}_downgraded.exe" --version
|
||||||
if ($version -eq $downgraded_version) {
|
if ($version -eq $downgraded_version) {
|
||||||
exit 1
|
exit 1
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
- name: Upload artifacts
|
- name: Upload artifacts
|
||||||
@@ -411,34 +521,16 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
name: build-bin-${{ github.job }}-${{ matrix.arch }}
|
name: build-bin-${{ github.job }}-${{ matrix.arch }}
|
||||||
path: |
|
path: |
|
||||||
dist/yt-dlp${{ matrix.suffix }}.exe
|
dist/yt-dlp${{ env.SUFFIX }}.exe
|
||||||
dist/yt-dlp_win${{ matrix.suffix }}.zip
|
dist/yt-dlp_win${{ env.SUFFIX }}.zip
|
||||||
compression-level: 0
|
compression-level: 0
|
||||||
|
|
||||||
- name: Cleanup cache
|
|
||||||
if: |
|
|
||||||
matrix.arch == 'arm64' && steps.restore-cache.outputs.cache-hit == 'true'
|
|
||||||
env:
|
|
||||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
cache_key: cache-reqs-${{ github.job }}_${{ matrix.arch }}-${{ matrix.python_version }}-${{ github.ref }}
|
|
||||||
run: |
|
|
||||||
gh cache delete "${cache_key}"
|
|
||||||
|
|
||||||
- name: Cache requirements
|
|
||||||
if: matrix.arch == 'arm64'
|
|
||||||
uses: actions/cache/save@v4
|
|
||||||
with:
|
|
||||||
path: |
|
|
||||||
/yt-dlp-build-venv
|
|
||||||
key: cache-reqs-${{ github.job }}_${{ matrix.arch }}-${{ matrix.python_version }}-${{ github.ref }}
|
|
||||||
|
|
||||||
meta_files:
|
meta_files:
|
||||||
if: always() && !cancelled()
|
if: always() && !cancelled()
|
||||||
needs:
|
needs:
|
||||||
- process
|
- process
|
||||||
- unix
|
- unix
|
||||||
- linux_static
|
- linux
|
||||||
- linux_arm
|
|
||||||
- macos
|
- macos
|
||||||
- windows
|
- windows
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
@@ -469,38 +561,38 @@ jobs:
|
|||||||
lock 2023.11.16 (?!win_x86_exe).+ Python 3\.7
|
lock 2023.11.16 (?!win_x86_exe).+ Python 3\.7
|
||||||
lock 2023.11.16 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
lock 2023.11.16 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
||||||
lock 2024.10.22 py2exe .+
|
lock 2024.10.22 py2exe .+
|
||||||
lock 2024.10.22 linux_(?:armv7l|aarch64)_exe .+-glibc2\.(?:[12]?\d|30)\b
|
|
||||||
lock 2024.10.22 zip Python 3\.8
|
lock 2024.10.22 zip Python 3\.8
|
||||||
lock 2024.10.22 win(?:_x86)?_exe Python 3\.[78].+ Windows-(?:7-|2008ServerR2)
|
lock 2024.10.22 win(?:_x86)?_exe Python 3\.[78].+ Windows-(?:7-|2008ServerR2)
|
||||||
lock 2025.08.11 darwin_legacy_exe .+
|
lock 2025.08.11 darwin_legacy_exe .+
|
||||||
|
lock 2025.08.27 linux_armv7l_exe .+
|
||||||
lockV2 yt-dlp/yt-dlp 2022.08.18.36 .+ Python 3\.6
|
lockV2 yt-dlp/yt-dlp 2022.08.18.36 .+ Python 3\.6
|
||||||
lockV2 yt-dlp/yt-dlp 2023.11.16 (?!win_x86_exe).+ Python 3\.7
|
lockV2 yt-dlp/yt-dlp 2023.11.16 (?!win_x86_exe).+ Python 3\.7
|
||||||
lockV2 yt-dlp/yt-dlp 2023.11.16 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
lockV2 yt-dlp/yt-dlp 2023.11.16 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
||||||
lockV2 yt-dlp/yt-dlp 2024.10.22 py2exe .+
|
lockV2 yt-dlp/yt-dlp 2024.10.22 py2exe .+
|
||||||
lockV2 yt-dlp/yt-dlp 2024.10.22 linux_(?:armv7l|aarch64)_exe .+-glibc2\.(?:[12]?\d|30)\b
|
|
||||||
lockV2 yt-dlp/yt-dlp 2024.10.22 zip Python 3\.8
|
lockV2 yt-dlp/yt-dlp 2024.10.22 zip Python 3\.8
|
||||||
lockV2 yt-dlp/yt-dlp 2024.10.22 win(?:_x86)?_exe Python 3\.[78].+ Windows-(?:7-|2008ServerR2)
|
lockV2 yt-dlp/yt-dlp 2024.10.22 win(?:_x86)?_exe Python 3\.[78].+ Windows-(?:7-|2008ServerR2)
|
||||||
lockV2 yt-dlp/yt-dlp 2025.08.11 darwin_legacy_exe .+
|
lockV2 yt-dlp/yt-dlp 2025.08.11 darwin_legacy_exe .+
|
||||||
|
lockV2 yt-dlp/yt-dlp 2025.08.27 linux_armv7l_exe .+
|
||||||
lockV2 yt-dlp/yt-dlp-nightly-builds 2023.11.15.232826 (?!win_x86_exe).+ Python 3\.7
|
lockV2 yt-dlp/yt-dlp-nightly-builds 2023.11.15.232826 (?!win_x86_exe).+ Python 3\.7
|
||||||
lockV2 yt-dlp/yt-dlp-nightly-builds 2023.11.15.232826 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
lockV2 yt-dlp/yt-dlp-nightly-builds 2023.11.15.232826 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
||||||
lockV2 yt-dlp/yt-dlp-nightly-builds 2024.10.22.051025 py2exe .+
|
lockV2 yt-dlp/yt-dlp-nightly-builds 2024.10.22.051025 py2exe .+
|
||||||
lockV2 yt-dlp/yt-dlp-nightly-builds 2024.10.22.051025 linux_(?:armv7l|aarch64)_exe .+-glibc2\.(?:[12]?\d|30)\b
|
|
||||||
lockV2 yt-dlp/yt-dlp-nightly-builds 2024.10.22.051025 zip Python 3\.8
|
lockV2 yt-dlp/yt-dlp-nightly-builds 2024.10.22.051025 zip Python 3\.8
|
||||||
lockV2 yt-dlp/yt-dlp-nightly-builds 2024.10.22.051025 win(?:_x86)?_exe Python 3\.[78].+ Windows-(?:7-|2008ServerR2)
|
lockV2 yt-dlp/yt-dlp-nightly-builds 2024.10.22.051025 win(?:_x86)?_exe Python 3\.[78].+ Windows-(?:7-|2008ServerR2)
|
||||||
lockV2 yt-dlp/yt-dlp-nightly-builds 2025.08.12.233030 darwin_legacy_exe .+
|
lockV2 yt-dlp/yt-dlp-nightly-builds 2025.08.12.233030 darwin_legacy_exe .+
|
||||||
|
lockV2 yt-dlp/yt-dlp-nightly-builds 2025.08.30.232839 linux_armv7l_exe .+
|
||||||
lockV2 yt-dlp/yt-dlp-master-builds 2023.11.15.232812 (?!win_x86_exe).+ Python 3\.7
|
lockV2 yt-dlp/yt-dlp-master-builds 2023.11.15.232812 (?!win_x86_exe).+ Python 3\.7
|
||||||
lockV2 yt-dlp/yt-dlp-master-builds 2023.11.15.232812 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
lockV2 yt-dlp/yt-dlp-master-builds 2023.11.15.232812 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
||||||
lockV2 yt-dlp/yt-dlp-master-builds 2024.10.22.045052 py2exe .+
|
lockV2 yt-dlp/yt-dlp-master-builds 2024.10.22.045052 py2exe .+
|
||||||
lockV2 yt-dlp/yt-dlp-master-builds 2024.10.22.060347 linux_(?:armv7l|aarch64)_exe .+-glibc2\.(?:[12]?\d|30)\b
|
|
||||||
lockV2 yt-dlp/yt-dlp-master-builds 2024.10.22.060347 zip Python 3\.8
|
lockV2 yt-dlp/yt-dlp-master-builds 2024.10.22.060347 zip Python 3\.8
|
||||||
lockV2 yt-dlp/yt-dlp-master-builds 2024.10.22.060347 win(?:_x86)?_exe Python 3\.[78].+ Windows-(?:7-|2008ServerR2)
|
lockV2 yt-dlp/yt-dlp-master-builds 2024.10.22.060347 win(?:_x86)?_exe Python 3\.[78].+ Windows-(?:7-|2008ServerR2)
|
||||||
lockV2 yt-dlp/yt-dlp-master-builds 2025.08.12.232447 darwin_legacy_exe .+
|
lockV2 yt-dlp/yt-dlp-master-builds 2025.08.12.232447 darwin_legacy_exe .+
|
||||||
|
lockV2 yt-dlp/yt-dlp-master-builds 2025.09.05.212910 linux_armv7l_exe .+
|
||||||
EOF
|
EOF
|
||||||
|
|
||||||
- name: Sign checksum files
|
- name: Sign checksum files
|
||||||
env:
|
env:
|
||||||
GPG_SIGNING_KEY: ${{ secrets.GPG_SIGNING_KEY }}
|
GPG_SIGNING_KEY: ${{ secrets.GPG_SIGNING_KEY }}
|
||||||
if: env.GPG_SIGNING_KEY != ''
|
if: env.GPG_SIGNING_KEY
|
||||||
run: |
|
run: |
|
||||||
gpg --batch --import <<< "${{ secrets.GPG_SIGNING_KEY }}"
|
gpg --batch --import <<< "${{ secrets.GPG_SIGNING_KEY }}"
|
||||||
for signfile in ./SHA*SUMS; do
|
for signfile in ./SHA*SUMS; do
|
||||||
|
|||||||
23
.github/workflows/cache-warmer.yml
vendored
Normal file
23
.github/workflows/cache-warmer.yml
vendored
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
name: Keep cache warm
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
schedule:
|
||||||
|
- cron: '0 22 1,6,11,16,21,27 * *'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
if: |
|
||||||
|
vars.KEEP_CACHE_WARM || github.event_name == 'workflow_dispatch'
|
||||||
|
uses: ./.github/workflows/build.yml
|
||||||
|
with:
|
||||||
|
version: '999999'
|
||||||
|
channel: stable
|
||||||
|
origin: ${{ github.repository }}
|
||||||
|
unix: false
|
||||||
|
linux: false
|
||||||
|
linux_armv7l: true
|
||||||
|
musllinux: false
|
||||||
|
macos: true
|
||||||
|
windows: true
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
8
.github/workflows/core.yml
vendored
8
.github/workflows/core.yml
vendored
@@ -37,23 +37,27 @@ jobs:
|
|||||||
matrix:
|
matrix:
|
||||||
os: [ubuntu-latest]
|
os: [ubuntu-latest]
|
||||||
# CPython 3.9 is in quick-test
|
# CPython 3.9 is in quick-test
|
||||||
python-version: ['3.10', '3.11', '3.12', '3.13', pypy-3.11]
|
python-version: ['3.10', '3.11', '3.12', '3.13', '3.14-dev', pypy-3.11]
|
||||||
include:
|
include:
|
||||||
# atleast one of each CPython/PyPy tests must be in windows
|
# atleast one of each CPython/PyPy tests must be in windows
|
||||||
- os: windows-latest
|
- os: windows-latest
|
||||||
python-version: '3.9'
|
python-version: '3.9'
|
||||||
- os: windows-latest
|
- os: windows-latest
|
||||||
python-version: '3.10'
|
python-version: '3.10'
|
||||||
|
- os: windows-latest
|
||||||
|
python-version: '3.11'
|
||||||
- os: windows-latest
|
- os: windows-latest
|
||||||
python-version: '3.12'
|
python-version: '3.12'
|
||||||
- os: windows-latest
|
- os: windows-latest
|
||||||
python-version: '3.13'
|
python-version: '3.13'
|
||||||
|
- os: windows-latest
|
||||||
|
python-version: '3.14-dev'
|
||||||
- os: windows-latest
|
- os: windows-latest
|
||||||
python-version: pypy-3.11
|
python-version: pypy-3.11
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v6
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
- name: Install test requirements
|
- name: Install test requirements
|
||||||
|
|||||||
6
.github/workflows/download.yml
vendored
6
.github/workflows/download.yml
vendored
@@ -11,7 +11,7 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v6
|
||||||
with:
|
with:
|
||||||
python-version: 3.9
|
python-version: 3.9
|
||||||
- name: Install test requirements
|
- name: Install test requirements
|
||||||
@@ -28,7 +28,7 @@ jobs:
|
|||||||
fail-fast: true
|
fail-fast: true
|
||||||
matrix:
|
matrix:
|
||||||
os: [ubuntu-latest]
|
os: [ubuntu-latest]
|
||||||
python-version: ['3.10', '3.11', '3.12', '3.13', pypy-3.11]
|
python-version: ['3.10', '3.11', '3.12', '3.13', '3.14-dev', pypy-3.11]
|
||||||
include:
|
include:
|
||||||
# atleast one of each CPython/PyPy tests must be in windows
|
# atleast one of each CPython/PyPy tests must be in windows
|
||||||
- os: windows-latest
|
- os: windows-latest
|
||||||
@@ -38,7 +38,7 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v6
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
- name: Install test requirements
|
- name: Install test requirements
|
||||||
|
|||||||
4
.github/workflows/quick-test.yml
vendored
4
.github/workflows/quick-test.yml
vendored
@@ -11,7 +11,7 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- name: Set up Python 3.9
|
- name: Set up Python 3.9
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v6
|
||||||
with:
|
with:
|
||||||
python-version: '3.9'
|
python-version: '3.9'
|
||||||
- name: Install test requirements
|
- name: Install test requirements
|
||||||
@@ -27,7 +27,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v6
|
||||||
with:
|
with:
|
||||||
python-version: '3.9'
|
python-version: '3.9'
|
||||||
- name: Install dev dependencies
|
- name: Install dev dependencies
|
||||||
|
|||||||
13
.github/workflows/release-master.yml
vendored
13
.github/workflows/release-master.yml
vendored
@@ -6,10 +6,12 @@ on:
|
|||||||
paths:
|
paths:
|
||||||
- "yt_dlp/**.py"
|
- "yt_dlp/**.py"
|
||||||
- "!yt_dlp/version.py"
|
- "!yt_dlp/version.py"
|
||||||
- "bundle/*.py"
|
- "bundle/**"
|
||||||
- "pyproject.toml"
|
- "pyproject.toml"
|
||||||
- "Makefile"
|
- "Makefile"
|
||||||
- ".github/workflows/build.yml"
|
- ".github/workflows/build.yml"
|
||||||
|
- ".github/workflows/release.yml"
|
||||||
|
- ".github/workflows/release-master.yml"
|
||||||
concurrency:
|
concurrency:
|
||||||
group: release-master
|
group: release-master
|
||||||
permissions:
|
permissions:
|
||||||
@@ -17,21 +19,20 @@ permissions:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
release:
|
release:
|
||||||
if: vars.BUILD_MASTER != ''
|
if: vars.BUILD_MASTER
|
||||||
uses: ./.github/workflows/release.yml
|
uses: ./.github/workflows/release.yml
|
||||||
with:
|
with:
|
||||||
prerelease: true
|
prerelease: true
|
||||||
source: master
|
source: ${{ (github.repository != 'yt-dlp/yt-dlp' && vars.MASTER_ARCHIVE_REPO) || 'master' }}
|
||||||
|
target: 'master'
|
||||||
permissions:
|
permissions:
|
||||||
contents: write
|
contents: write
|
||||||
packages: write # For package cache
|
|
||||||
actions: write # For cleaning up cache
|
|
||||||
id-token: write # mandatory for trusted publishing
|
id-token: write # mandatory for trusted publishing
|
||||||
secrets: inherit
|
secrets: inherit
|
||||||
|
|
||||||
publish_pypi:
|
publish_pypi:
|
||||||
needs: [release]
|
needs: [release]
|
||||||
if: vars.MASTER_PYPI_PROJECT != ''
|
if: vars.MASTER_PYPI_PROJECT
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
permissions:
|
permissions:
|
||||||
id-token: write # mandatory for trusted publishing
|
id-token: write # mandatory for trusted publishing
|
||||||
|
|||||||
13
.github/workflows/release-nightly.yml
vendored
13
.github/workflows/release-nightly.yml
vendored
@@ -7,7 +7,7 @@ permissions:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
check_nightly:
|
check_nightly:
|
||||||
if: vars.BUILD_NIGHTLY != ''
|
if: vars.BUILD_NIGHTLY
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
outputs:
|
outputs:
|
||||||
commit: ${{ steps.check_for_new_commits.outputs.commit }}
|
commit: ${{ steps.check_for_new_commits.outputs.commit }}
|
||||||
@@ -22,9 +22,13 @@ jobs:
|
|||||||
"yt_dlp/*.py"
|
"yt_dlp/*.py"
|
||||||
':!yt_dlp/version.py'
|
':!yt_dlp/version.py'
|
||||||
"bundle/*.py"
|
"bundle/*.py"
|
||||||
|
"bundle/docker/compose.yml"
|
||||||
|
"bundle/docker/linux/*"
|
||||||
"pyproject.toml"
|
"pyproject.toml"
|
||||||
"Makefile"
|
"Makefile"
|
||||||
".github/workflows/build.yml"
|
".github/workflows/build.yml"
|
||||||
|
".github/workflows/release.yml"
|
||||||
|
".github/workflows/release-nightly.yml"
|
||||||
)
|
)
|
||||||
echo "commit=$(git log --format=%H -1 --since="24 hours ago" -- "${relevant_files[@]}")" | tee "$GITHUB_OUTPUT"
|
echo "commit=$(git log --format=%H -1 --since="24 hours ago" -- "${relevant_files[@]}")" | tee "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
@@ -34,17 +38,16 @@ jobs:
|
|||||||
uses: ./.github/workflows/release.yml
|
uses: ./.github/workflows/release.yml
|
||||||
with:
|
with:
|
||||||
prerelease: true
|
prerelease: true
|
||||||
source: nightly
|
source: ${{ (github.repository != 'yt-dlp/yt-dlp' && vars.NIGHTLY_ARCHIVE_REPO) || 'nightly' }}
|
||||||
|
target: 'nightly'
|
||||||
permissions:
|
permissions:
|
||||||
contents: write
|
contents: write
|
||||||
packages: write # For package cache
|
|
||||||
actions: write # For cleaning up cache
|
|
||||||
id-token: write # mandatory for trusted publishing
|
id-token: write # mandatory for trusted publishing
|
||||||
secrets: inherit
|
secrets: inherit
|
||||||
|
|
||||||
publish_pypi:
|
publish_pypi:
|
||||||
needs: [release]
|
needs: [release]
|
||||||
if: vars.NIGHTLY_PYPI_PROJECT != ''
|
if: vars.NIGHTLY_PYPI_PROJECT
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
permissions:
|
permissions:
|
||||||
id-token: write # mandatory for trusted publishing
|
id-token: write # mandatory for trusted publishing
|
||||||
|
|||||||
279
.github/workflows/release.yml
vendored
279
.github/workflows/release.yml
vendored
@@ -14,6 +14,10 @@ on:
|
|||||||
required: false
|
required: false
|
||||||
default: ''
|
default: ''
|
||||||
type: string
|
type: string
|
||||||
|
linux_armv7l:
|
||||||
|
required: false
|
||||||
|
default: false
|
||||||
|
type: boolean
|
||||||
prerelease:
|
prerelease:
|
||||||
required: false
|
required: false
|
||||||
default: true
|
default: true
|
||||||
@@ -43,6 +47,10 @@ on:
|
|||||||
required: false
|
required: false
|
||||||
default: ''
|
default: ''
|
||||||
type: string
|
type: string
|
||||||
|
linux_armv7l:
|
||||||
|
description: Include linux_armv7l
|
||||||
|
default: true
|
||||||
|
type: boolean
|
||||||
prerelease:
|
prerelease:
|
||||||
description: Pre-release
|
description: Pre-release
|
||||||
default: false
|
default: false
|
||||||
@@ -71,141 +79,63 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v6
|
||||||
with:
|
with:
|
||||||
python-version: "3.10"
|
python-version: "3.10" # Keep this in sync with test-workflows.yml
|
||||||
|
|
||||||
- name: Process inputs
|
- name: Process inputs
|
||||||
id: process_inputs
|
id: process_inputs
|
||||||
|
env:
|
||||||
|
INPUTS: ${{ toJSON(inputs) }}
|
||||||
run: |
|
run: |
|
||||||
cat << EOF
|
python -m devscripts.setup_variables process_inputs
|
||||||
::group::Inputs
|
|
||||||
prerelease=${{ inputs.prerelease }}
|
|
||||||
source=${{ inputs.source }}
|
|
||||||
target=${{ inputs.target }}
|
|
||||||
version=${{ inputs.version }}
|
|
||||||
::endgroup::
|
|
||||||
EOF
|
|
||||||
IFS='@' read -r source_repo source_tag <<<"${{ inputs.source }}"
|
|
||||||
IFS='@' read -r target_repo target_tag <<<"${{ inputs.target }}"
|
|
||||||
cat << EOF >> "$GITHUB_OUTPUT"
|
|
||||||
source_repo=${source_repo}
|
|
||||||
source_tag=${source_tag}
|
|
||||||
target_repo=${target_repo}
|
|
||||||
target_tag=${target_tag}
|
|
||||||
EOF
|
|
||||||
|
|
||||||
- name: Setup variables
|
- name: Setup variables
|
||||||
id: setup_variables
|
id: setup_variables
|
||||||
env:
|
env:
|
||||||
source_repo: ${{ steps.process_inputs.outputs.source_repo }}
|
INPUTS: ${{ toJSON(inputs) }}
|
||||||
source_tag: ${{ steps.process_inputs.outputs.source_tag }}
|
PROCESSED: ${{ toJSON(steps.process_inputs.outputs) }}
|
||||||
target_repo: ${{ steps.process_inputs.outputs.target_repo }}
|
REPOSITORY: ${{ github.repository }}
|
||||||
target_tag: ${{ steps.process_inputs.outputs.target_tag }}
|
PUSH_VERSION_COMMIT: ${{ vars.PUSH_VERSION_COMMIT }}
|
||||||
|
PYPI_PROJECT: ${{ vars.PYPI_PROJECT }}
|
||||||
|
SOURCE_PYPI_PROJECT: ${{ vars[format('{0}_pypi_project', steps.process_inputs.outputs.source_repo)] }}
|
||||||
|
SOURCE_PYPI_SUFFIX: ${{ vars[format('{0}_pypi_suffix', steps.process_inputs.outputs.source_repo)] }}
|
||||||
|
TARGET_PYPI_PROJECT: ${{ vars[format('{0}_pypi_project', steps.process_inputs.outputs.target_repo)] }}
|
||||||
|
TARGET_PYPI_SUFFIX: ${{ vars[format('{0}_pypi_suffix', steps.process_inputs.outputs.target_repo)] }}
|
||||||
|
SOURCE_ARCHIVE_REPO: ${{ vars[format('{0}_archive_repo', steps.process_inputs.outputs.source_repo)] }}
|
||||||
|
TARGET_ARCHIVE_REPO: ${{ vars[format('{0}_archive_repo', steps.process_inputs.outputs.target_repo)] }}
|
||||||
|
HAS_SOURCE_ARCHIVE_REPO_TOKEN: ${{ !!secrets[format('{0}_archive_repo_token', steps.process_inputs.outputs.source_repo)] }}
|
||||||
|
HAS_TARGET_ARCHIVE_REPO_TOKEN: ${{ !!secrets[format('{0}_archive_repo_token', steps.process_inputs.outputs.target_repo)] }}
|
||||||
|
HAS_ARCHIVE_REPO_TOKEN: ${{ !!secrets.ARCHIVE_REPO_TOKEN }}
|
||||||
run: |
|
run: |
|
||||||
# unholy bash monstrosity (sincere apologies)
|
python -m devscripts.setup_variables
|
||||||
fallback_token () {
|
|
||||||
if ${{ !secrets.ARCHIVE_REPO_TOKEN }}; then
|
|
||||||
echo "::error::Repository access secret ${target_repo_token^^} not found"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
target_repo_token=ARCHIVE_REPO_TOKEN
|
|
||||||
return 0
|
|
||||||
}
|
|
||||||
|
|
||||||
source_is_channel=0
|
- name: Update version & documentation
|
||||||
[[ "${source_repo}" == 'stable' ]] && source_repo='yt-dlp/yt-dlp'
|
|
||||||
if [[ -z "${source_repo}" ]]; then
|
|
||||||
source_repo='${{ github.repository }}'
|
|
||||||
elif [[ '${{ vars[format('{0}_archive_repo', env.source_repo)] }}' ]]; then
|
|
||||||
source_is_channel=1
|
|
||||||
source_channel='${{ vars[format('{0}_archive_repo', env.source_repo)] }}'
|
|
||||||
elif [[ -z "${source_tag}" && "${source_repo}" != */* ]]; then
|
|
||||||
source_tag="${source_repo}"
|
|
||||||
source_repo='${{ github.repository }}'
|
|
||||||
fi
|
|
||||||
resolved_source="${source_repo}"
|
|
||||||
if [[ "${source_tag}" ]]; then
|
|
||||||
resolved_source="${resolved_source}@${source_tag}"
|
|
||||||
elif [[ "${source_repo}" == 'yt-dlp/yt-dlp' ]]; then
|
|
||||||
resolved_source='stable'
|
|
||||||
fi
|
|
||||||
|
|
||||||
revision="${{ (inputs.prerelease || !vars.PUSH_VERSION_COMMIT) && '$(date -u +"%H%M%S")' || '' }}"
|
|
||||||
version="$(
|
|
||||||
python devscripts/update-version.py \
|
|
||||||
-c "${resolved_source}" -r "${{ github.repository }}" ${{ inputs.version || '$revision' }} | \
|
|
||||||
grep -Po "version=\K\d+\.\d+\.\d+(\.\d+)?")"
|
|
||||||
|
|
||||||
if [[ "${target_repo}" ]]; then
|
|
||||||
if [[ -z "${target_tag}" ]]; then
|
|
||||||
if [[ '${{ vars[format('{0}_archive_repo', env.target_repo)] }}' ]]; then
|
|
||||||
target_tag="${source_tag:-${version}}"
|
|
||||||
else
|
|
||||||
target_tag="${target_repo}"
|
|
||||||
target_repo='${{ github.repository }}'
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
if [[ "${target_repo}" != '${{ github.repository}}' ]]; then
|
|
||||||
target_repo='${{ vars[format('{0}_archive_repo', env.target_repo)] }}'
|
|
||||||
target_repo_token='${{ env.target_repo }}_archive_repo_token'
|
|
||||||
${{ !!secrets[format('{0}_archive_repo_token', env.target_repo)] }} || fallback_token
|
|
||||||
pypi_project='${{ vars[format('{0}_pypi_project', env.target_repo)] }}'
|
|
||||||
pypi_suffix='${{ vars[format('{0}_pypi_suffix', env.target_repo)] }}'
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
target_tag="${source_tag:-${version}}"
|
|
||||||
if ((source_is_channel)); then
|
|
||||||
target_repo="${source_channel}"
|
|
||||||
target_repo_token='${{ env.source_repo }}_archive_repo_token'
|
|
||||||
${{ !!secrets[format('{0}_archive_repo_token', env.source_repo)] }} || fallback_token
|
|
||||||
pypi_project='${{ vars[format('{0}_pypi_project', env.source_repo)] }}'
|
|
||||||
pypi_suffix='${{ vars[format('{0}_pypi_suffix', env.source_repo)] }}'
|
|
||||||
else
|
|
||||||
target_repo='${{ github.repository }}'
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ "${target_repo}" == '${{ github.repository }}' ]] && ${{ !inputs.prerelease }}; then
|
|
||||||
pypi_project='${{ vars.PYPI_PROJECT }}'
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "::group::Output variables"
|
|
||||||
cat << EOF | tee -a "$GITHUB_OUTPUT"
|
|
||||||
channel=${resolved_source}
|
|
||||||
version=${version}
|
|
||||||
target_repo=${target_repo}
|
|
||||||
target_repo_token=${target_repo_token}
|
|
||||||
target_tag=${target_tag}
|
|
||||||
pypi_project=${pypi_project}
|
|
||||||
pypi_suffix=${pypi_suffix}
|
|
||||||
EOF
|
|
||||||
echo "::endgroup::"
|
|
||||||
|
|
||||||
- name: Update documentation
|
|
||||||
env:
|
env:
|
||||||
version: ${{ steps.setup_variables.outputs.version }}
|
CHANNEL: ${{ steps.setup_variables.outputs.channel }}
|
||||||
target_repo: ${{ steps.setup_variables.outputs.target_repo }}
|
# Use base repo since this could be committed; build jobs will call this again with true origin
|
||||||
if: |
|
REPOSITORY: ${{ github.repository }}
|
||||||
!inputs.prerelease && env.target_repo == github.repository
|
VERSION: ${{ steps.setup_variables.outputs.version }}
|
||||||
run: |
|
run: |
|
||||||
|
python devscripts/update-version.py -c "${CHANNEL}" -r "${REPOSITORY}" "${VERSION}"
|
||||||
python devscripts/update_changelog.py -vv
|
python devscripts/update_changelog.py -vv
|
||||||
make doc
|
make doc
|
||||||
|
|
||||||
- name: Push to release
|
- name: Push to release
|
||||||
id: push_release
|
id: push_release
|
||||||
env:
|
env:
|
||||||
version: ${{ steps.setup_variables.outputs.version }}
|
VERSION: ${{ steps.setup_variables.outputs.version }}
|
||||||
target_repo: ${{ steps.setup_variables.outputs.target_repo }}
|
GITHUB_EVENT_SENDER_LOGIN: ${{ github.event.sender.login }}
|
||||||
|
GITHUB_EVENT_REF: ${{ github.event.ref }}
|
||||||
if: |
|
if: |
|
||||||
!inputs.prerelease && env.target_repo == github.repository
|
!inputs.prerelease && steps.setup_variables.outputs.target_repo == github.repository
|
||||||
run: |
|
run: |
|
||||||
git config --global user.name "github-actions[bot]"
|
git config --global user.name "github-actions[bot]"
|
||||||
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
||||||
git add -u
|
git add -u
|
||||||
git commit -m "Release ${{ env.version }}" \
|
git commit -m "Release ${VERSION}" \
|
||||||
-m "Created by: ${{ github.event.sender.login }}" -m ":ci skip all"
|
-m "Created by: ${GITHUB_EVENT_SENDER_LOGIN}" -m ":ci skip all"
|
||||||
git push origin --force ${{ github.event.ref }}:release
|
git push origin --force "${GITHUB_EVENT_REF}:release"
|
||||||
|
|
||||||
- name: Get target commitish
|
- name: Get target commitish
|
||||||
id: get_target
|
id: get_target
|
||||||
@@ -214,10 +144,10 @@ jobs:
|
|||||||
|
|
||||||
- name: Update master
|
- name: Update master
|
||||||
env:
|
env:
|
||||||
target_repo: ${{ steps.setup_variables.outputs.target_repo }}
|
GITHUB_EVENT_REF: ${{ github.event.ref }}
|
||||||
if: |
|
if: |
|
||||||
vars.PUSH_VERSION_COMMIT != '' && !inputs.prerelease && env.target_repo == github.repository
|
vars.PUSH_VERSION_COMMIT && !inputs.prerelease && steps.setup_variables.outputs.target_repo == github.repository
|
||||||
run: git push origin ${{ github.event.ref }}
|
run: git push origin "${GITHUB_EVENT_REF}"
|
||||||
|
|
||||||
build:
|
build:
|
||||||
needs: prepare
|
needs: prepare
|
||||||
@@ -226,10 +156,9 @@ jobs:
|
|||||||
version: ${{ needs.prepare.outputs.version }}
|
version: ${{ needs.prepare.outputs.version }}
|
||||||
channel: ${{ needs.prepare.outputs.channel }}
|
channel: ${{ needs.prepare.outputs.channel }}
|
||||||
origin: ${{ needs.prepare.outputs.target_repo }}
|
origin: ${{ needs.prepare.outputs.target_repo }}
|
||||||
|
linux_armv7l: ${{ inputs.linux_armv7l }}
|
||||||
permissions:
|
permissions:
|
||||||
contents: read
|
contents: read
|
||||||
packages: write # For package cache
|
|
||||||
actions: write # For cleaning up cache
|
|
||||||
secrets:
|
secrets:
|
||||||
GPG_SIGNING_KEY: ${{ secrets.GPG_SIGNING_KEY }}
|
GPG_SIGNING_KEY: ${{ secrets.GPG_SIGNING_KEY }}
|
||||||
|
|
||||||
@@ -244,7 +173,7 @@ jobs:
|
|||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v6
|
||||||
with:
|
with:
|
||||||
python-version: "3.10"
|
python-version: "3.10"
|
||||||
|
|
||||||
@@ -255,16 +184,16 @@ jobs:
|
|||||||
|
|
||||||
- name: Prepare
|
- name: Prepare
|
||||||
env:
|
env:
|
||||||
version: ${{ needs.prepare.outputs.version }}
|
VERSION: ${{ needs.prepare.outputs.version }}
|
||||||
suffix: ${{ needs.prepare.outputs.pypi_suffix }}
|
SUFFIX: ${{ needs.prepare.outputs.pypi_suffix }}
|
||||||
channel: ${{ needs.prepare.outputs.channel }}
|
CHANNEL: ${{ needs.prepare.outputs.channel }}
|
||||||
target_repo: ${{ needs.prepare.outputs.target_repo }}
|
TARGET_REPO: ${{ needs.prepare.outputs.target_repo }}
|
||||||
pypi_project: ${{ needs.prepare.outputs.pypi_project }}
|
PYPI_PROJECT: ${{ needs.prepare.outputs.pypi_project }}
|
||||||
run: |
|
run: |
|
||||||
python devscripts/update-version.py -c "${{ env.channel }}" -r "${{ env.target_repo }}" -s "${{ env.suffix }}" "${{ env.version }}"
|
python devscripts/update-version.py -c "${CHANNEL}" -r "${TARGET_REPO}" -s "${SUFFIX}" "${VERSION}"
|
||||||
python devscripts/update_changelog.py -vv
|
python devscripts/update_changelog.py -vv
|
||||||
python devscripts/make_lazy_extractors.py
|
python devscripts/make_lazy_extractors.py
|
||||||
sed -i -E '0,/(name = ")[^"]+(")/s//\1${{ env.pypi_project }}\2/' pyproject.toml
|
sed -i -E '0,/(name = ")[^"]+(")/s//\1'"${PYPI_PROJECT}"'\2/' pyproject.toml
|
||||||
|
|
||||||
- name: Build
|
- name: Build
|
||||||
run: |
|
run: |
|
||||||
@@ -298,7 +227,11 @@ jobs:
|
|||||||
permissions:
|
permissions:
|
||||||
contents: write
|
contents: write
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
env:
|
||||||
|
TARGET_REPO: ${{ needs.prepare.outputs.target_repo }}
|
||||||
|
TARGET_TAG: ${{ needs.prepare.outputs.target_tag }}
|
||||||
|
VERSION: ${{ needs.prepare.outputs.version }}
|
||||||
|
HEAD_SHA: ${{ needs.prepare.outputs.head_sha }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
@@ -308,87 +241,85 @@ jobs:
|
|||||||
path: artifact
|
path: artifact
|
||||||
pattern: build-*
|
pattern: build-*
|
||||||
merge-multiple: true
|
merge-multiple: true
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v6
|
||||||
with:
|
with:
|
||||||
python-version: "3.10"
|
python-version: "3.10"
|
||||||
|
|
||||||
- name: Generate release notes
|
- name: Generate release notes
|
||||||
env:
|
env:
|
||||||
head_sha: ${{ needs.prepare.outputs.head_sha }}
|
REPOSITORY: ${{ github.repository }}
|
||||||
target_repo: ${{ needs.prepare.outputs.target_repo }}
|
BASE_REPO: yt-dlp/yt-dlp
|
||||||
target_tag: ${{ needs.prepare.outputs.target_tag }}
|
NIGHTLY_REPO: yt-dlp/yt-dlp-nightly-builds
|
||||||
|
MASTER_REPO: yt-dlp/yt-dlp-master-builds
|
||||||
|
DOCS_PATH: ${{ env.TARGET_REPO == github.repository && format('/tree/{0}', env.TARGET_TAG) || '' }}
|
||||||
run: |
|
run: |
|
||||||
printf '%s' \
|
printf '%s' \
|
||||||
'[]' \
|
"[]" \
|
||||||
'(https://github.com/${{ github.repository }}#installation "Installation instructions") ' \
|
"(https://github.com/${REPOSITORY}#installation \"Installation instructions\") " \
|
||||||
'[]' \
|
"[]" \
|
||||||
'(https://discord.gg/H5MNcFW63r "Discord") ' \
|
"(https://discord.gg/H5MNcFW63r \"Discord\") " \
|
||||||
'[]' \
|
"[]" \
|
||||||
'(https://github.com/yt-dlp/yt-dlp/blob/master/Collaborators.md#collaborators "Donate") ' \
|
"(https://github.com/${BASE_REPO}/blob/master/Collaborators.md#collaborators \"Donate\") " \
|
||||||
'[]' \
|
"[]" \
|
||||||
'(https://github.com/${{ github.repository }}' \
|
"(https://github.com/${REPOSITORY}${DOCS_PATH}#readme \"Documentation\") " > ./RELEASE_NOTES
|
||||||
'${{ env.target_repo == github.repository && format('/tree/{0}', env.target_tag) || '' }}#readme "Documentation") ' \
|
if [[ "${TARGET_REPO}" == "${BASE_REPO}" ]]; then
|
||||||
${{ env.target_repo == 'yt-dlp/yt-dlp' && '\
|
printf '%s' \
|
||||||
"[]" \
|
"[]" \
|
||||||
"(https://github.com/yt-dlp/yt-dlp-nightly-builds/releases/latest \"Nightly builds\") " \
|
"(https://github.com/${NIGHTLY_REPO}/releases/latest \"Nightly builds\") " \
|
||||||
"[]" \
|
"[]" \
|
||||||
"(https://github.com/yt-dlp/yt-dlp-master-builds/releases/latest \"Master builds\")"' || '' }} > ./RELEASE_NOTES
|
"(https://github.com/${MASTER_REPO}/releases/latest \"Master builds\")" >> ./RELEASE_NOTES
|
||||||
printf '\n\n' >> ./RELEASE_NOTES
|
fi
|
||||||
cat >> ./RELEASE_NOTES << EOF
|
printf '\n\n%s\n\n%s%s\n\n---\n' \
|
||||||
#### A description of the various files is in the [README](https://github.com/${{ github.repository }}#release-files)
|
"#### A description of the various files is in the [README](https://github.com/${REPOSITORY}#release-files)" \
|
||||||
---
|
"The PyInstaller-bundled executables are subject to the licenses described in " \
|
||||||
$(python ./devscripts/make_changelog.py -vv --collapsible)
|
"[THIRD_PARTY_LICENSES.txt](https://github.com/${BASE_REPO}/blob/${HEAD_SHA}/THIRD_PARTY_LICENSES.txt)" >> ./RELEASE_NOTES
|
||||||
EOF
|
python ./devscripts/make_changelog.py -vv --collapsible >> ./RELEASE_NOTES
|
||||||
printf '%s\n\n' '**This is a pre-release build**' >> ./PRERELEASE_NOTES
|
printf '%s\n\n' '**This is a pre-release build**' >> ./PRERELEASE_NOTES
|
||||||
cat ./RELEASE_NOTES >> ./PRERELEASE_NOTES
|
cat ./RELEASE_NOTES >> ./PRERELEASE_NOTES
|
||||||
printf '%s\n\n' 'Generated from: https://github.com/${{ github.repository }}/commit/${{ env.head_sha }}' >> ./ARCHIVE_NOTES
|
printf '%s\n\n' "Generated from: https://github.com/${REPOSITORY}/commit/${HEAD_SHA}" >> ./ARCHIVE_NOTES
|
||||||
cat ./RELEASE_NOTES >> ./ARCHIVE_NOTES
|
cat ./RELEASE_NOTES >> ./ARCHIVE_NOTES
|
||||||
|
|
||||||
- name: Publish to archive repo
|
- name: Publish to archive repo
|
||||||
env:
|
env:
|
||||||
GH_TOKEN: ${{ secrets[needs.prepare.outputs.target_repo_token] }}
|
GH_TOKEN: ${{ secrets[needs.prepare.outputs.target_repo_token] }}
|
||||||
GH_REPO: ${{ needs.prepare.outputs.target_repo }}
|
GH_REPO: ${{ needs.prepare.outputs.target_repo }}
|
||||||
version: ${{ needs.prepare.outputs.version }}
|
TITLE_PREFIX: ${{ startswith(env.TARGET_REPO, 'yt-dlp/') && 'yt-dlp ' || '' }}
|
||||||
channel: ${{ needs.prepare.outputs.channel }}
|
TITLE: ${{ inputs.target != env.TARGET_REPO && inputs.target || needs.prepare.outputs.channel }}
|
||||||
if: |
|
if: |
|
||||||
inputs.prerelease && env.GH_TOKEN != '' && env.GH_REPO != '' && env.GH_REPO != github.repository
|
inputs.prerelease && env.GH_TOKEN && env.GH_REPO && env.GH_REPO != github.repository
|
||||||
run: |
|
run: |
|
||||||
title="${{ startswith(env.GH_REPO, 'yt-dlp/') && 'yt-dlp ' || '' }}${{ env.channel }}"
|
|
||||||
gh release create \
|
gh release create \
|
||||||
--notes-file ARCHIVE_NOTES \
|
--notes-file ARCHIVE_NOTES \
|
||||||
--title "${title} ${{ env.version }}" \
|
--title "${TITLE_PREFIX}${TITLE} ${VERSION}" \
|
||||||
${{ env.version }} \
|
"${VERSION}" \
|
||||||
artifact/*
|
artifact/*
|
||||||
|
|
||||||
- name: Prune old release
|
- name: Prune old release
|
||||||
env:
|
env:
|
||||||
GH_TOKEN: ${{ github.token }}
|
GH_TOKEN: ${{ github.token }}
|
||||||
version: ${{ needs.prepare.outputs.version }}
|
|
||||||
target_repo: ${{ needs.prepare.outputs.target_repo }}
|
|
||||||
target_tag: ${{ needs.prepare.outputs.target_tag }}
|
|
||||||
if: |
|
if: |
|
||||||
env.target_repo == github.repository && env.target_tag != env.version
|
env.TARGET_REPO == github.repository && env.TARGET_TAG != env.VERSION
|
||||||
run: |
|
run: |
|
||||||
gh release delete --yes --cleanup-tag "${{ env.target_tag }}" || true
|
gh release delete --yes --cleanup-tag "${TARGET_TAG}" || true
|
||||||
git tag --delete "${{ env.target_tag }}" || true
|
git tag --delete "${TARGET_TAG}" || true
|
||||||
sleep 5 # Enough time to cover deletion race condition
|
sleep 5 # Enough time to cover deletion race condition
|
||||||
|
|
||||||
- name: Publish release
|
- name: Publish release
|
||||||
env:
|
env:
|
||||||
GH_TOKEN: ${{ github.token }}
|
GH_TOKEN: ${{ github.token }}
|
||||||
version: ${{ needs.prepare.outputs.version }}
|
NOTES_FILE: ${{ inputs.prerelease && 'PRERELEASE_NOTES' || 'RELEASE_NOTES' }}
|
||||||
target_repo: ${{ needs.prepare.outputs.target_repo }}
|
TITLE_PREFIX: ${{ github.repository == 'yt-dlp/yt-dlp' && 'yt-dlp ' || '' }}
|
||||||
target_tag: ${{ needs.prepare.outputs.target_tag }}
|
TITLE: ${{ env.TARGET_TAG != env.VERSION && format('{0} ', env.TARGET_TAG) || '' }}
|
||||||
head_sha: ${{ needs.prepare.outputs.head_sha }}
|
PRERELEASE: ${{ inputs.prerelease && '1' || '0' }}
|
||||||
if: |
|
if: |
|
||||||
env.target_repo == github.repository
|
env.TARGET_REPO == github.repository
|
||||||
run: |
|
run: |
|
||||||
title="${{ github.repository == 'yt-dlp/yt-dlp' && 'yt-dlp ' || '' }}"
|
gh_options=(
|
||||||
title+="${{ env.target_tag != env.version && format('{0} ', env.target_tag) || '' }}"
|
--notes-file "${NOTES_FILE}"
|
||||||
gh release create \
|
--target "${HEAD_SHA}"
|
||||||
--notes-file ${{ inputs.prerelease && 'PRERELEASE_NOTES' || 'RELEASE_NOTES' }} \
|
--title "${TITLE_PREFIX}${TITLE}${VERSION}"
|
||||||
--target ${{ env.head_sha }} \
|
)
|
||||||
--title "${title}${{ env.version }}" \
|
if ((PRERELEASE)); then
|
||||||
${{ inputs.prerelease && '--prerelease' || '' }} \
|
gh_options+=(--prerelease)
|
||||||
${{ env.target_tag }} \
|
fi
|
||||||
artifact/*
|
gh release create "${gh_options[@]}" "${TARGET_TAG}" artifact/*
|
||||||
|
|||||||
4
.github/workflows/signature-tests.yml
vendored
4
.github/workflows/signature-tests.yml
vendored
@@ -25,11 +25,11 @@ jobs:
|
|||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
os: [ubuntu-latest, windows-latest]
|
os: [ubuntu-latest, windows-latest]
|
||||||
python-version: ['3.9', '3.10', '3.11', '3.12', '3.13', pypy-3.11]
|
python-version: ['3.9', '3.10', '3.11', '3.12', '3.13', '3.14-dev', pypy-3.11]
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v6
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
- name: Install test requirements
|
- name: Install test requirements
|
||||||
|
|||||||
52
.github/workflows/test-workflows.yml
vendored
Normal file
52
.github/workflows/test-workflows.yml
vendored
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
name: Test and lint workflows
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
paths:
|
||||||
|
- .github/workflows/*
|
||||||
|
- bundle/docker/linux/*.sh
|
||||||
|
- devscripts/setup_variables.py
|
||||||
|
- devscripts/setup_variables_tests.py
|
||||||
|
- devscripts/utils.py
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- .github/workflows/*
|
||||||
|
- bundle/docker/linux/*.sh
|
||||||
|
- devscripts/setup_variables.py
|
||||||
|
- devscripts/setup_variables_tests.py
|
||||||
|
- devscripts/utils.py
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
env:
|
||||||
|
ACTIONLINT_VERSION: "1.7.7"
|
||||||
|
ACTIONLINT_SHA256SUM: 023070a287cd8cccd71515fedc843f1985bf96c436b7effaecce67290e7e0757
|
||||||
|
ACTIONLINT_REPO: https://github.com/rhysd/actionlint
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
check:
|
||||||
|
name: Check workflows
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- uses: actions/setup-python@v6
|
||||||
|
with:
|
||||||
|
python-version: "3.10" # Keep this in sync with release.yml's prepare job
|
||||||
|
- name: Install requirements
|
||||||
|
env:
|
||||||
|
ACTIONLINT_TARBALL: ${{ format('actionlint_{0}_linux_amd64.tar.gz', env.ACTIONLINT_VERSION) }}
|
||||||
|
run: |
|
||||||
|
python -m devscripts.install_deps -o --include test
|
||||||
|
sudo apt -y install shellcheck
|
||||||
|
python -m pip install -U pyflakes
|
||||||
|
curl -LO "${ACTIONLINT_REPO}/releases/download/v${ACTIONLINT_VERSION}/${ACTIONLINT_TARBALL}"
|
||||||
|
printf '%s %s' "${ACTIONLINT_SHA256SUM}" "${ACTIONLINT_TARBALL}" | sha256sum -c -
|
||||||
|
tar xvzf "${ACTIONLINT_TARBALL}" actionlint
|
||||||
|
chmod +x actionlint
|
||||||
|
- name: Run actionlint
|
||||||
|
run: |
|
||||||
|
./actionlint -color
|
||||||
|
- name: Check Docker shell scripts
|
||||||
|
run: |
|
||||||
|
shellcheck bundle/docker/linux/*.sh
|
||||||
|
- name: Test GHA devscripts
|
||||||
|
run: |
|
||||||
|
pytest -Werror --tb=short --color=yes devscripts/setup_variables_tests.py
|
||||||
@@ -12,6 +12,7 @@
|
|||||||
- [Is your question about yt-dlp?](#is-your-question-about-yt-dlp)
|
- [Is your question about yt-dlp?](#is-your-question-about-yt-dlp)
|
||||||
- [Are you willing to share account details if needed?](#are-you-willing-to-share-account-details-if-needed)
|
- [Are you willing to share account details if needed?](#are-you-willing-to-share-account-details-if-needed)
|
||||||
- [Is the website primarily used for piracy](#is-the-website-primarily-used-for-piracy)
|
- [Is the website primarily used for piracy](#is-the-website-primarily-used-for-piracy)
|
||||||
|
- [AUTOMATED CONTRIBUTIONS (AI / LLM) POLICY](#automated-contributions-ai--llm-policy)
|
||||||
- [DEVELOPER INSTRUCTIONS](#developer-instructions)
|
- [DEVELOPER INSTRUCTIONS](#developer-instructions)
|
||||||
- [Adding new feature or making overarching changes](#adding-new-feature-or-making-overarching-changes)
|
- [Adding new feature or making overarching changes](#adding-new-feature-or-making-overarching-changes)
|
||||||
- [Adding support for a new site](#adding-support-for-a-new-site)
|
- [Adding support for a new site](#adding-support-for-a-new-site)
|
||||||
@@ -134,6 +135,17 @@ While these steps won't necessarily ensure that no misuse of the account takes p
|
|||||||
We follow [youtube-dl's policy](https://github.com/ytdl-org/youtube-dl#can-you-add-support-for-this-anime-video-site-or-site-which-shows-current-movies-for-free) to not support services that is primarily used for infringing copyright. Additionally, it has been decided to not to support porn sites that specialize in fakes. We also cannot support any service that serves only [DRM protected content](https://en.wikipedia.org/wiki/Digital_rights_management).
|
We follow [youtube-dl's policy](https://github.com/ytdl-org/youtube-dl#can-you-add-support-for-this-anime-video-site-or-site-which-shows-current-movies-for-free) to not support services that is primarily used for infringing copyright. Additionally, it has been decided to not to support porn sites that specialize in fakes. We also cannot support any service that serves only [DRM protected content](https://en.wikipedia.org/wiki/Digital_rights_management).
|
||||||
|
|
||||||
|
|
||||||
|
# AUTOMATED CONTRIBUTIONS (AI / LLM) POLICY
|
||||||
|
|
||||||
|
Please refrain from submitting issues or pull requests that have been generated by an LLM or other fully-automated tools. Any submission that is in violation of this policy will be closed, and the submitter may be blocked from this repository without warning.
|
||||||
|
|
||||||
|
If you submit an issue, you need to understand what your issue description is saying. You need to be able to answer questions about your bug report or feature request. Using an AI tool to *proofread* your issue/comment text is acceptable. Using an AI tool to *write* your issue/comment text is unacceptable.
|
||||||
|
|
||||||
|
If you submit a pull request, you need to understand what every line of code you've changed does. If you can't explain why your PR is doing something, then do not submit it. Using an AI tool to generate entire lines of code is unacceptable.
|
||||||
|
|
||||||
|
The rationale behind this policy is that automated contributions are a waste of the maintainers' time. Humans spend their time and brainpower reviewing every submission. Issues or pull requests generated by automation tools create an imbalance of effort between the submitter and the reviewer. Nobody learns anything when a maintainer reviews code written by an LLM.
|
||||||
|
|
||||||
|
Additionally, AI-generated code conflicts with this project's license (Unlicense), since you cannot truly release code into the public domain if you didn't author it yourself.
|
||||||
|
|
||||||
|
|
||||||
# DEVELOPER INSTRUCTIONS
|
# DEVELOPER INSTRUCTIONS
|
||||||
@@ -768,12 +780,10 @@ view_count = int_or_none(video.get('views'))
|
|||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
# My pull request is labeled pending-fixes
|
## My pull request is labeled pending-fixes
|
||||||
|
|
||||||
The `pending-fixes` label is added when there are changes requested to a PR. When the necessary changes are made, the label should be removed. However, despite our best efforts, it may sometimes happen that the maintainer did not see the changes or forgot to remove the label. If your PR is still marked as `pending-fixes` a few days after all requested changes have been made, feel free to ping the maintainer who labeled your issue and ask them to re-review and remove the label.
|
The `pending-fixes` label is added when there are changes requested to a PR. When the necessary changes are made, the label should be removed. However, despite our best efforts, it may sometimes happen that the maintainer did not see the changes or forgot to remove the label. If your PR is still marked as `pending-fixes` a few days after all requested changes have been made, feel free to ping the maintainer who labeled your issue and ask them to re-review and remove the label.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# EMBEDDING YT-DLP
|
# EMBEDDING YT-DLP
|
||||||
See [README.md#embedding-yt-dlp](README.md#embedding-yt-dlp) for instructions on how to embed yt-dlp in another Python program
|
See [README.md#embedding-yt-dlp](README.md#embedding-yt-dlp) for instructions on how to embed yt-dlp in another Python program
|
||||||
|
|||||||
@@ -806,3 +806,8 @@ junyilou
|
|||||||
PierreMesure
|
PierreMesure
|
||||||
Randalix
|
Randalix
|
||||||
runarmod
|
runarmod
|
||||||
|
gitchasing
|
||||||
|
zakaryan2004
|
||||||
|
cdce8p
|
||||||
|
nicolaasjan
|
||||||
|
willsmillie
|
||||||
|
|||||||
97
Changelog.md
97
Changelog.md
@@ -4,6 +4,103 @@
|
|||||||
# To create a release, dispatch the https://github.com/yt-dlp/yt-dlp/actions/workflows/release.yml workflow on master
|
# To create a release, dispatch the https://github.com/yt-dlp/yt-dlp/actions/workflows/release.yml workflow on master
|
||||||
-->
|
-->
|
||||||
|
|
||||||
|
### 2025.09.26
|
||||||
|
|
||||||
|
#### Extractor changes
|
||||||
|
- **twitch**: vod: [Fix `live_status` detection](https://github.com/yt-dlp/yt-dlp/commit/50e452fd7dfb8a648bd3b9aaabc8f94f37ce2051) ([#14457](https://github.com/yt-dlp/yt-dlp/issues/14457)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- **youtube**
|
||||||
|
- [Fix player JS overrides](https://github.com/yt-dlp/yt-dlp/commit/b7b7910d96359a539b7997890342ab4a59dd685d) ([#14430](https://github.com/yt-dlp/yt-dlp/issues/14430)) by [bashonly](https://github.com/bashonly), [seproDev](https://github.com/seproDev)
|
||||||
|
- [Improve PO token logging](https://github.com/yt-dlp/yt-dlp/commit/7df5acc546dccd32213c3a125d721e32b06d71b0) ([#14447](https://github.com/yt-dlp/yt-dlp/issues/14447)) by [seproDev](https://github.com/seproDev)
|
||||||
|
- [Player client maintenance](https://github.com/yt-dlp/yt-dlp/commit/94c5622be96474ca3c637e52898c4daee4d8fb69) ([#14448](https://github.com/yt-dlp/yt-dlp/issues/14448)) by [seproDev](https://github.com/seproDev)
|
||||||
|
- [Replace `tv_simply` with `web_safari` in default clients](https://github.com/yt-dlp/yt-dlp/commit/12b57d2858845c0c7fb33bf9aa8ed7be6905535d) ([#14465](https://github.com/yt-dlp/yt-dlp/issues/14465)) by [bashonly](https://github.com/bashonly)
|
||||||
|
|
||||||
|
### 2025.09.23
|
||||||
|
|
||||||
|
#### Important changes
|
||||||
|
- **Several options have been deprecated**
|
||||||
|
In order to simplify the codebase and reduce maintenance burden, various options have been deprecated. Please remove them from your commands/configurations. [Read more](https://github.com/yt-dlp/yt-dlp/issues/14198)
|
||||||
|
|
||||||
|
#### Core changes
|
||||||
|
- **compat**: [Add `compat_datetime_from_timestamp`](https://github.com/yt-dlp/yt-dlp/commit/6a763a55d8a93b2a964ecf7699248ad342485412) ([#11902](https://github.com/yt-dlp/yt-dlp/issues/11902)) by [pzhlkj6612](https://github.com/pzhlkj6612), [seproDev](https://github.com/seproDev)
|
||||||
|
- **utils**
|
||||||
|
- `mimetype2ext`: [Recognize `vnd.dlna.mpeg-tts`](https://github.com/yt-dlp/yt-dlp/commit/98b6b0d339130e955f9d45ce67c0357c633c1627) ([#14388](https://github.com/yt-dlp/yt-dlp/issues/14388)) by [seproDev](https://github.com/seproDev)
|
||||||
|
- `random_user_agent`: [Bump versions](https://github.com/yt-dlp/yt-dlp/commit/f3829463c728a5b5e62b3fc157e71c99b26edac7) ([#14317](https://github.com/yt-dlp/yt-dlp/issues/14317)) by [seproDev](https://github.com/seproDev)
|
||||||
|
|
||||||
|
#### Extractor changes
|
||||||
|
- **10play**: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/067062bb87ac057e453ce9efdac7ca117a6a7da0) ([#14242](https://github.com/yt-dlp/yt-dlp/issues/14242)) by [Sipherdrakon](https://github.com/Sipherdrakon)
|
||||||
|
- **applepodcast**: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/b2c01d0498653e0239c7226c5a7fcb614dd4dbc8) ([#14372](https://github.com/yt-dlp/yt-dlp/issues/14372)) by [seproDev](https://github.com/seproDev)
|
||||||
|
- **loco**: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/f5cb721185e8725cf4eb4080e86aa9aa73ef25b3) ([#14256](https://github.com/yt-dlp/yt-dlp/issues/14256)) by [seproDev](https://github.com/seproDev)
|
||||||
|
- **mitele**: [Remove extractor](https://github.com/yt-dlp/yt-dlp/commit/820c6e244571557fcfc127d4b3680e2d07c04dca) ([#14348](https://github.com/yt-dlp/yt-dlp/issues/14348)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- **newspicks**: [Warn when only preview is available](https://github.com/yt-dlp/yt-dlp/commit/9def9a4b0e958285e055eb350e5dd43b5c423336) ([#14197](https://github.com/yt-dlp/yt-dlp/issues/14197)) by [doe1080](https://github.com/doe1080)
|
||||||
|
- **onsen**: [Add extractor](https://github.com/yt-dlp/yt-dlp/commit/17bfaa53edf5c52fce73cf0cef4592f929c2462d) ([#10971](https://github.com/yt-dlp/yt-dlp/issues/10971)) by [doe1080](https://github.com/doe1080)
|
||||||
|
- **pixivsketch**: [Remove extractors](https://github.com/yt-dlp/yt-dlp/commit/3d9a88bd8ef149d781c7e569e48e61551eda395e) ([#14196](https://github.com/yt-dlp/yt-dlp/issues/14196)) by [doe1080](https://github.com/doe1080)
|
||||||
|
- **smotrim**: [Rework extractors](https://github.com/yt-dlp/yt-dlp/commit/8cb037c0b06c2815080f87d61ea2e95c412785fc) ([#14200](https://github.com/yt-dlp/yt-dlp/issues/14200)) by [doe1080](https://github.com/doe1080), [swayll](https://github.com/swayll)
|
||||||
|
- **telecinco**: [Support browser impersonation](https://github.com/yt-dlp/yt-dlp/commit/e123a48f1155703d8709a4221a42bd45c0a2b3ce) ([#14351](https://github.com/yt-dlp/yt-dlp/issues/14351)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- **tiktok**: live: [Fix room ID extraction](https://github.com/yt-dlp/yt-dlp/commit/5c1abcdc49b9d23e1dcb77b95d063cf2bf93e352) ([#14287](https://github.com/yt-dlp/yt-dlp/issues/14287)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- **ttinglive**: [Adapt FlexTV extractor to new domain](https://github.com/yt-dlp/yt-dlp/commit/4bc19adc8798e7564513898cf34adc432c6c5709) ([#14375](https://github.com/yt-dlp/yt-dlp/issues/14375)) by [seproDev](https://github.com/seproDev)
|
||||||
|
- **tunein**: [Fix extractors](https://github.com/yt-dlp/yt-dlp/commit/7d9e48b22a780c2e8d2d2d68940d49fd2029ab70) ([#13981](https://github.com/yt-dlp/yt-dlp/issues/13981)) by [doe1080](https://github.com/doe1080)
|
||||||
|
- **twitch**: clips: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/f8750504c2f71b54586fb857d60dce4e354a13ea) ([#14397](https://github.com/yt-dlp/yt-dlp/issues/14397)) by [seproDev](https://github.com/seproDev)
|
||||||
|
- **vimeo**: [Fix login error handling](https://github.com/yt-dlp/yt-dlp/commit/679587dac7cd011a1472255e1f06efb017ba91b6) ([#14280](https://github.com/yt-dlp/yt-dlp/issues/14280)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- **vk**
|
||||||
|
- [Support vksport URLs](https://github.com/yt-dlp/yt-dlp/commit/b81e9272dce5844e8fba371cb4b4fd95ad3ed819) ([#14341](https://github.com/yt-dlp/yt-dlp/issues/14341)) by [seproDev](https://github.com/seproDev)
|
||||||
|
- uservideos: [Support alternate URL format](https://github.com/yt-dlp/yt-dlp/commit/bf5d18016b03a3f2fd5d3494d9efe85d3f8beeac) ([#14376](https://github.com/yt-dlp/yt-dlp/issues/14376)) by [seproDev](https://github.com/seproDev)
|
||||||
|
- **xhamster**: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/a1c98226a4e869a34cc764a9dcf7a4558516308e) ([#14286](https://github.com/yt-dlp/yt-dlp/issues/14286)) by [nicolaasjan](https://github.com/nicolaasjan), [willsmillie](https://github.com/willsmillie) (With fixes in [677997d](https://github.com/yt-dlp/yt-dlp/commit/677997d84eaec0037397f7d935386daa3025b004) by [arand](https://github.com/arand), [thegymguy](https://github.com/thegymguy))
|
||||||
|
- **youtube**: [Force player `0004de42`](https://github.com/yt-dlp/yt-dlp/commit/7f5d9f8543d19590eeec9473d54fa00151afa78a) ([#14398](https://github.com/yt-dlp/yt-dlp/issues/14398)) by [seproDev](https://github.com/seproDev)
|
||||||
|
|
||||||
|
#### Misc. changes
|
||||||
|
- **build**
|
||||||
|
- [Fix cache warmer](https://github.com/yt-dlp/yt-dlp/commit/8597a4331e8535a246d777bb8397bdcab251766c) ([#14261](https://github.com/yt-dlp/yt-dlp/issues/14261)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- [Post-release workflow cleanup](https://github.com/yt-dlp/yt-dlp/commit/cd94e7004036e0149d7d3fa236c7dd44cf460788) ([#14250](https://github.com/yt-dlp/yt-dlp/issues/14250)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- [Refactor Linux build jobs](https://github.com/yt-dlp/yt-dlp/commit/e2d37bcc8e84be9ce0f67fc24cb830c13963d10f) ([#14275](https://github.com/yt-dlp/yt-dlp/issues/14275)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- [Use PyInstaller 6.16 for Windows](https://github.com/yt-dlp/yt-dlp/commit/df4b4e8ccf3385be6d2ad65465a0704c223dfdfb) ([#14318](https://github.com/yt-dlp/yt-dlp/issues/14318)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- [Use SPDX license identifier](https://github.com/yt-dlp/yt-dlp/commit/48a214bef4bfd5984362d3d24b09dce50ba449ea) ([#14260](https://github.com/yt-dlp/yt-dlp/issues/14260)) by [cdce8p](https://github.com/cdce8p)
|
||||||
|
- [Use new PyInstaller builds for Windows](https://github.com/yt-dlp/yt-dlp/commit/c8ede5f34d6c95c442b936bb01ecbcb724aefdef) ([#14273](https://github.com/yt-dlp/yt-dlp/issues/14273)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- **ci**
|
||||||
|
- [Bump actions/setup-python to v6](https://github.com/yt-dlp/yt-dlp/commit/22ea0688ed6bcdbe4c51401a84239cda3decfc9c) ([#14282](https://github.com/yt-dlp/yt-dlp/issues/14282)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- [Improve workflow checks](https://github.com/yt-dlp/yt-dlp/commit/ae3923b6b23bc62115be55510d6b5842f7a46b5f) ([#14316](https://github.com/yt-dlp/yt-dlp/issues/14316)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- [Test and lint workflows](https://github.com/yt-dlp/yt-dlp/commit/7c9b10ebc83907d37f9f65ea9d4bd6f5e3bd1371) ([#14249](https://github.com/yt-dlp/yt-dlp/issues/14249)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- [Test with Python 3.14](https://github.com/yt-dlp/yt-dlp/commit/83b8409366d0f9554eaeae56394b244dab64a2cb) ([#13468](https://github.com/yt-dlp/yt-dlp/issues/13468)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- **cleanup**
|
||||||
|
- [Bump ruff to 0.13.x](https://github.com/yt-dlp/yt-dlp/commit/ba8044685537e8e14adc6826fb4d730856fd2e2b) ([#14293](https://github.com/yt-dlp/yt-dlp/issues/14293)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- [Deprecate various options](https://github.com/yt-dlp/yt-dlp/commit/08d78996831bd8e1e3c2592d740c3def00bbf548) ([#13821](https://github.com/yt-dlp/yt-dlp/issues/13821)) by [seproDev](https://github.com/seproDev)
|
||||||
|
- [Remove broken extractors](https://github.com/yt-dlp/yt-dlp/commit/65e90aea29cf3bfc9d1ae3e009fbf9a8db3a23c9) ([#14305](https://github.com/yt-dlp/yt-dlp/issues/14305)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- [Remove setup.cfg](https://github.com/yt-dlp/yt-dlp/commit/eb4b3a5fc7765a6cd0370ca44ccee0d7d5111dd7) ([#14314](https://github.com/yt-dlp/yt-dlp/issues/14314)) by [seproDev](https://github.com/seproDev) (With fixes in [8ab262c](https://github.com/yt-dlp/yt-dlp/commit/8ab262c66bd3e1d8874fb2d070068ba1f0d48f16) by [bashonly](https://github.com/bashonly))
|
||||||
|
- Miscellaneous: [2e81e29](https://github.com/yt-dlp/yt-dlp/commit/2e81e298cdce23afadb06a95836284acb38f7018) by [bashonly](https://github.com/bashonly), [doe1080](https://github.com/doe1080), [seproDev](https://github.com/seproDev)
|
||||||
|
- **docs**
|
||||||
|
- [Clarify license of PyInstaller-bundled executables](https://github.com/yt-dlp/yt-dlp/commit/e6e6b512141e66b1b36058966804fe59c02a2b4d) ([#14257](https://github.com/yt-dlp/yt-dlp/issues/14257)) by [seproDev](https://github.com/seproDev)
|
||||||
|
- [Establish AI/LLM contribution policy](https://github.com/yt-dlp/yt-dlp/commit/8821682f15af59047bc1f92724ef8a9ba30d6f7e) ([#14194](https://github.com/yt-dlp/yt-dlp/issues/14194)) by [bashonly](https://github.com/bashonly), [seproDev](https://github.com/seproDev)
|
||||||
|
- **test**: utils: [Fix `sanitize_path` test for Windows CPython 3.11](https://github.com/yt-dlp/yt-dlp/commit/a183837ec8bb5e28fe6eb3a9d77ea2d0d7a106bd) ([#13878](https://github.com/yt-dlp/yt-dlp/issues/13878)) by [Grub4K](https://github.com/Grub4K)
|
||||||
|
|
||||||
|
### 2025.09.05
|
||||||
|
|
||||||
|
#### Core changes
|
||||||
|
- [Fix `--id` deprecation warning](https://github.com/yt-dlp/yt-dlp/commit/76bb46002c9a9655f2b1d29d4840e75e79037cfa) ([#14190](https://github.com/yt-dlp/yt-dlp/issues/14190)) by [seproDev](https://github.com/seproDev)
|
||||||
|
|
||||||
|
#### Extractor changes
|
||||||
|
- **charlierose**: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/603acdff07f0226088916886002d2ad8309ff9d3) ([#14231](https://github.com/yt-dlp/yt-dlp/issues/14231)) by [gitchasing](https://github.com/gitchasing)
|
||||||
|
- **googledrive**: [Fix subtitles extraction](https://github.com/yt-dlp/yt-dlp/commit/18fe696df9d60804a8f5cb8cd74f38111d6eb711) ([#14139](https://github.com/yt-dlp/yt-dlp/issues/14139)) by [zakaryan2004](https://github.com/zakaryan2004)
|
||||||
|
- **itvbtcc**: [Fix extractor](https://github.com/yt-dlp/yt-dlp/commit/0b51005b4819e7cea222fcbaf8e60391db4f732c) ([#14161](https://github.com/yt-dlp/yt-dlp/issues/14161)) by [garret1317](https://github.com/garret1317)
|
||||||
|
- **kick**: vod: [Support ongoing livestream VODs](https://github.com/yt-dlp/yt-dlp/commit/1e28f6bf743627b909135bb9a88537ad2deccaf0) ([#14154](https://github.com/yt-dlp/yt-dlp/issues/14154)) by [InvalidUsernameException](https://github.com/InvalidUsernameException)
|
||||||
|
- **lrt**: [Fix extractors](https://github.com/yt-dlp/yt-dlp/commit/ed24640943872c4cf30d7cc4601bec87b50ba03c) ([#14193](https://github.com/yt-dlp/yt-dlp/issues/14193)) by [seproDev](https://github.com/seproDev)
|
||||||
|
- **tver**: [Extract more metadata](https://github.com/yt-dlp/yt-dlp/commit/223baa81f6637dcdef108f817180d8d1ae9fa213) ([#14165](https://github.com/yt-dlp/yt-dlp/issues/14165)) by [arabcoders](https://github.com/arabcoders)
|
||||||
|
- **vevo**: [Restore extractors](https://github.com/yt-dlp/yt-dlp/commit/d925e92b710153d0d51d030f115b3c87226bc0f0) ([#14203](https://github.com/yt-dlp/yt-dlp/issues/14203)) by [seproDev](https://github.com/seproDev)
|
||||||
|
|
||||||
|
#### Misc. changes
|
||||||
|
- **build**: [Overhaul Linux builds and refactor release workflow](https://github.com/yt-dlp/yt-dlp/commit/50136eeeb3767289b236f140b759f23b39b00888) ([#13997](https://github.com/yt-dlp/yt-dlp/issues/13997)) by [bashonly](https://github.com/bashonly)
|
||||||
|
|
||||||
|
### 2025.08.27
|
||||||
|
|
||||||
|
#### Extractor changes
|
||||||
|
- **generic**
|
||||||
|
- [Simplify invalid URL error message](https://github.com/yt-dlp/yt-dlp/commit/1ddbd033f0fd65917526b1271cea66913ac8647f) ([#14167](https://github.com/yt-dlp/yt-dlp/issues/14167)) by [seproDev](https://github.com/seproDev)
|
||||||
|
- [Use https as fallback protocol](https://github.com/yt-dlp/yt-dlp/commit/fec30c56f0e97e573ace659104ff0d72c4cc9809) ([#14160](https://github.com/yt-dlp/yt-dlp/issues/14160)) by [seproDev](https://github.com/seproDev)
|
||||||
|
- **skeb**: [Support wav files](https://github.com/yt-dlp/yt-dlp/commit/d6950c27af31908363c5c815e3b7eb4f9ff41643) ([#14147](https://github.com/yt-dlp/yt-dlp/issues/14147)) by [seproDev](https://github.com/seproDev)
|
||||||
|
- **youtube**
|
||||||
|
- [Add `tcc` player JS variant](https://github.com/yt-dlp/yt-dlp/commit/8f4a908300f55054bc96814bceeaa1034fdf4110) ([#14134](https://github.com/yt-dlp/yt-dlp/issues/14134)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- [Deprioritize `web_safari` m3u8 formats](https://github.com/yt-dlp/yt-dlp/commit/5c7ad68ff1643ad80d18cef8be9db8fcab05ee6c) ([#14168](https://github.com/yt-dlp/yt-dlp/issues/14168)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- [Player client maintenance](https://github.com/yt-dlp/yt-dlp/commit/3bd91544122142a87863d79e54e995c26cfd7f92) ([#14135](https://github.com/yt-dlp/yt-dlp/issues/14135)) by [bashonly](https://github.com/bashonly)
|
||||||
|
- [Use alternative `tv` user-agent when authenticated](https://github.com/yt-dlp/yt-dlp/commit/8cd37b85d492edb56a4f7506ea05527b85a6b02b) ([#14169](https://github.com/yt-dlp/yt-dlp/issues/14169)) by [bashonly](https://github.com/bashonly)
|
||||||
|
|
||||||
### 2025.08.22
|
### 2025.08.22
|
||||||
|
|
||||||
#### Core changes
|
#### Core changes
|
||||||
|
|||||||
4
Makefile
4
Makefile
@@ -10,7 +10,7 @@ tar: yt-dlp.tar.gz
|
|||||||
# intended use: when building a source distribution,
|
# intended use: when building a source distribution,
|
||||||
# make pypi-files && python3 -m build -sn .
|
# make pypi-files && python3 -m build -sn .
|
||||||
pypi-files: AUTHORS Changelog.md LICENSE README.md README.txt supportedsites \
|
pypi-files: AUTHORS Changelog.md LICENSE README.md README.txt supportedsites \
|
||||||
completions yt-dlp.1 pyproject.toml setup.cfg devscripts/* test/*
|
completions yt-dlp.1 pyproject.toml devscripts/* test/*
|
||||||
|
|
||||||
.PHONY: all clean clean-all clean-test clean-dist clean-cache \
|
.PHONY: all clean clean-all clean-test clean-dist clean-cache \
|
||||||
completions completion-bash completion-fish completion-zsh \
|
completions completion-bash completion-fish completion-zsh \
|
||||||
@@ -159,7 +159,7 @@ yt-dlp.tar.gz: all
|
|||||||
README.md supportedsites.md Changelog.md LICENSE \
|
README.md supportedsites.md Changelog.md LICENSE \
|
||||||
CONTRIBUTING.md Collaborators.md CONTRIBUTORS AUTHORS \
|
CONTRIBUTING.md Collaborators.md CONTRIBUTORS AUTHORS \
|
||||||
Makefile yt-dlp.1 README.txt completions .gitignore \
|
Makefile yt-dlp.1 README.txt completions .gitignore \
|
||||||
setup.cfg yt-dlp yt_dlp pyproject.toml devscripts test
|
yt-dlp yt_dlp pyproject.toml devscripts test
|
||||||
|
|
||||||
AUTHORS: Changelog.md
|
AUTHORS: Changelog.md
|
||||||
@if [ -d '.git' ] && command -v git > /dev/null ; then \
|
@if [ -d '.git' ] && command -v git > /dev/null ; then \
|
||||||
|
|||||||
72
README.md
72
README.md
@@ -105,14 +105,20 @@ File|Description
|
|||||||
|
|
||||||
File|Description
|
File|Description
|
||||||
:---|:---
|
:---|:---
|
||||||
|
[yt-dlp_linux](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_linux)|Linux (glibc 2.17+) standalone x86_64 binary
|
||||||
|
[yt-dlp_linux.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_linux.zip)|Unpackaged Linux (glibc 2.17+) x86_64 executable (no auto-update)
|
||||||
|
[yt-dlp_linux_aarch64](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_linux_aarch64)|Linux (glibc 2.17+) standalone aarch64 binary
|
||||||
|
[yt-dlp_linux_aarch64.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_linux_aarch64.zip)|Unpackaged Linux (glibc 2.17+) aarch64 executable (no auto-update)
|
||||||
|
[yt-dlp_linux_armv7l.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_linux_armv7l.zip)|Unpackaged Linux (glibc 2.31+) armv7l executable (no auto-update)
|
||||||
|
[yt-dlp_musllinux](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_musllinux)|Linux (musl 1.2+) standalone x86_64 binary
|
||||||
|
[yt-dlp_musllinux.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_musllinux.zip)|Unpackaged Linux (musl 1.2+) x86_64 executable (no auto-update)
|
||||||
|
[yt-dlp_musllinux_aarch64](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_musllinux_aarch64)|Linux (musl 1.2+) standalone aarch64 binary
|
||||||
|
[yt-dlp_musllinux_aarch64.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_musllinux_aarch64.zip)|Unpackaged Linux (musl 1.2+) aarch64 executable (no auto-update)
|
||||||
[yt-dlp_x86.exe](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_x86.exe)|Windows (Win8+) standalone x86 (32-bit) binary
|
[yt-dlp_x86.exe](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_x86.exe)|Windows (Win8+) standalone x86 (32-bit) binary
|
||||||
[yt-dlp_arm64.exe](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_arm64.exe)|Windows (Win10+) standalone arm64 (64-bit) binary
|
[yt-dlp_win_x86.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_win_x86.zip)|Unpackaged Windows (Win8+) x86 (32-bit) executable (no auto-update)
|
||||||
[yt-dlp_linux](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_linux)|Linux standalone x64 binary
|
[yt-dlp_arm64.exe](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_arm64.exe)|Windows (Win10+) standalone ARM64 binary
|
||||||
[yt-dlp_linux_armv7l](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_linux_armv7l)|Linux standalone armv7l (32-bit) binary
|
[yt-dlp_win_arm64.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_win_arm64.zip)|Unpackaged Windows (Win10+) ARM64 executable (no auto-update)
|
||||||
[yt-dlp_linux_aarch64](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_linux_aarch64)|Linux standalone aarch64 (64-bit) binary
|
|
||||||
[yt-dlp_win.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_win.zip)|Unpackaged Windows (Win8+) x64 executable (no auto-update)
|
[yt-dlp_win.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_win.zip)|Unpackaged Windows (Win8+) x64 executable (no auto-update)
|
||||||
[yt-dlp_win_x86.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_win_x86.zip)|Unpackaged Windows (Win8+) x86 executable (no auto-update)
|
|
||||||
[yt-dlp_win_arm64.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_win_arm64.zip)|Unpackaged Windows (Win10+) arm64 executable (no auto-update)
|
|
||||||
[yt-dlp_macos.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_macos.zip)|Unpackaged MacOS (10.15+) executable (no auto-update)
|
[yt-dlp_macos.zip](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_macos.zip)|Unpackaged MacOS (10.15+) executable (no auto-update)
|
||||||
|
|
||||||
#### Misc
|
#### Misc
|
||||||
@@ -132,6 +138,17 @@ curl -L https://github.com/yt-dlp/yt-dlp/raw/master/public.key | gpg --import
|
|||||||
gpg --verify SHA2-256SUMS.sig SHA2-256SUMS
|
gpg --verify SHA2-256SUMS.sig SHA2-256SUMS
|
||||||
gpg --verify SHA2-512SUMS.sig SHA2-512SUMS
|
gpg --verify SHA2-512SUMS.sig SHA2-512SUMS
|
||||||
```
|
```
|
||||||
|
|
||||||
|
#### Licensing
|
||||||
|
|
||||||
|
While yt-dlp is licensed under the [Unlicense](LICENSE), many of the release files contain code from other projects with different licenses.
|
||||||
|
|
||||||
|
Most notably, the PyInstaller-bundled executables include GPLv3+ licensed code, and as such the combined work is licensed under [GPLv3+](https://www.gnu.org/licenses/gpl-3.0.html).
|
||||||
|
|
||||||
|
See [THIRD_PARTY_LICENSES.txt](THIRD_PARTY_LICENSES.txt) for details.
|
||||||
|
|
||||||
|
The zipimport binary (`yt-dlp`), the source tarball (`yt-dlp.tar.gz`), and the PyPI source distribution & wheel only contain code licensed under the [Unlicense](LICENSE).
|
||||||
|
|
||||||
<!-- MANPAGE: END EXCLUDED SECTION -->
|
<!-- MANPAGE: END EXCLUDED SECTION -->
|
||||||
|
|
||||||
**Note**: The manpages, shell completion (autocomplete) files etc. are available inside the [source tarball](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp.tar.gz)
|
**Note**: The manpages, shell completion (autocomplete) files etc. are available inside the [source tarball](https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp.tar.gz)
|
||||||
@@ -206,7 +223,7 @@ The following provide support for impersonating browser requests. This may be re
|
|||||||
|
|
||||||
* [**curl_cffi**](https://github.com/lexiforest/curl_cffi) (recommended) - Python binding for [curl-impersonate](https://github.com/lexiforest/curl-impersonate). Provides impersonation targets for Chrome, Edge and Safari. Licensed under [MIT](https://github.com/lexiforest/curl_cffi/blob/main/LICENSE)
|
* [**curl_cffi**](https://github.com/lexiforest/curl_cffi) (recommended) - Python binding for [curl-impersonate](https://github.com/lexiforest/curl-impersonate). Provides impersonation targets for Chrome, Edge and Safari. Licensed under [MIT](https://github.com/lexiforest/curl_cffi/blob/main/LICENSE)
|
||||||
* Can be installed with the `curl-cffi` group, e.g. `pip install "yt-dlp[default,curl-cffi]"`
|
* Can be installed with the `curl-cffi` group, e.g. `pip install "yt-dlp[default,curl-cffi]"`
|
||||||
* Currently included in `yt-dlp.exe`, `yt-dlp_linux` and `yt-dlp_macos` builds
|
* Currently included in most builds *except* `yt-dlp` (Unix zipimport binary), `yt-dlp_x86` (Windows 32-bit) and `yt-dlp_musllinux_aarch64`
|
||||||
|
|
||||||
|
|
||||||
### Metadata
|
### Metadata
|
||||||
@@ -224,8 +241,6 @@ The following provide support for impersonating browser requests. This may be re
|
|||||||
|
|
||||||
### Deprecated
|
### Deprecated
|
||||||
|
|
||||||
* [**avconv** and **avprobe**](https://www.libav.org) - Now **deprecated** alternative to ffmpeg. License [depends on the build](https://libav.org/legal)
|
|
||||||
* [**sponskrub**](https://github.com/faissaloo/SponSkrub) - For using the now **deprecated** [sponskrub options](#sponskrub-options). Licensed under [GPLv3+](https://github.com/faissaloo/SponSkrub/blob/master/LICENCE.md)
|
|
||||||
* [**rtmpdump**](http://rtmpdump.mplayerhq.hu) - For downloading `rtmp` streams. ffmpeg can be used instead with `--downloader ffmpeg`. Licensed under [GPLv2+](http://rtmpdump.mplayerhq.hu)
|
* [**rtmpdump**](http://rtmpdump.mplayerhq.hu) - For downloading `rtmp` streams. ffmpeg can be used instead with `--downloader ffmpeg`. Licensed under [GPLv2+](http://rtmpdump.mplayerhq.hu)
|
||||||
* [**mplayer**](http://mplayerhq.hu/design7/info.html) or [**mpv**](https://mpv.io) - For downloading `rstp`/`mms` streams. ffmpeg can be used instead with `--downloader ffmpeg`. Licensed under [GPLv2+](https://github.com/mpv-player/mpv/blob/master/Copyright)
|
* [**mplayer**](http://mplayerhq.hu/design7/info.html) or [**mpv**](https://mpv.io) - For downloading `rstp`/`mms` streams. ffmpeg can be used instead with `--downloader ffmpeg`. Licensed under [GPLv2+](https://github.com/mpv-player/mpv/blob/master/Copyright)
|
||||||
|
|
||||||
@@ -305,7 +320,6 @@ Tip: Use `CTRL`+`F` (or `Command`+`F`) to search by keywords
|
|||||||
playlist (default)
|
playlist (default)
|
||||||
--abort-on-error Abort downloading of further videos if an
|
--abort-on-error Abort downloading of further videos if an
|
||||||
error occurs (Alias: --no-ignore-errors)
|
error occurs (Alias: --no-ignore-errors)
|
||||||
--dump-user-agent Display the current user-agent and exit
|
|
||||||
--list-extractors List all supported extractors and exit
|
--list-extractors List all supported extractors and exit
|
||||||
--extractor-descriptions Output descriptions of all supported
|
--extractor-descriptions Output descriptions of all supported
|
||||||
extractors and exit
|
extractors and exit
|
||||||
@@ -556,8 +570,6 @@ Tip: Use `CTRL`+`F` (or `Command`+`F`) to search by keywords
|
|||||||
--playlist-random and --playlist-reverse
|
--playlist-random and --playlist-reverse
|
||||||
--no-lazy-playlist Process videos in the playlist only after
|
--no-lazy-playlist Process videos in the playlist only after
|
||||||
the entire playlist is parsed (default)
|
the entire playlist is parsed (default)
|
||||||
--xattr-set-filesize Set file xattribute ytdl.filesize with
|
|
||||||
expected file size
|
|
||||||
--hls-use-mpegts Use the mpegts container for HLS videos;
|
--hls-use-mpegts Use the mpegts container for HLS videos;
|
||||||
allowing some players to play the video
|
allowing some players to play the video
|
||||||
while downloading, and reducing the chance
|
while downloading, and reducing the chance
|
||||||
@@ -581,9 +593,9 @@ Tip: Use `CTRL`+`F` (or `Command`+`F`) to search by keywords
|
|||||||
use (optionally) prefixed by the protocols
|
use (optionally) prefixed by the protocols
|
||||||
(http, ftp, m3u8, dash, rstp, rtmp, mms) to
|
(http, ftp, m3u8, dash, rstp, rtmp, mms) to
|
||||||
use it for. Currently supports native,
|
use it for. Currently supports native,
|
||||||
aria2c, avconv, axel, curl, ffmpeg, httpie,
|
aria2c, axel, curl, ffmpeg, httpie, wget.
|
||||||
wget. You can use this option multiple times
|
You can use this option multiple times to
|
||||||
to set different downloaders for different
|
set different downloaders for different
|
||||||
protocols. E.g. --downloader aria2c
|
protocols. E.g. --downloader aria2c
|
||||||
--downloader "dash,m3u8:native" will use
|
--downloader "dash,m3u8:native" will use
|
||||||
aria2c for http/ftp downloads, and the
|
aria2c for http/ftp downloads, and the
|
||||||
@@ -1802,11 +1814,12 @@ The following extractors use this feature:
|
|||||||
#### youtube
|
#### youtube
|
||||||
* `lang`: Prefer translated metadata (`title`, `description` etc) of this language code (case-sensitive). By default, the video primary language metadata is preferred, with a fallback to `en` translated. See [youtube/_base.py](https://github.com/yt-dlp/yt-dlp/blob/415b4c9f955b1a0391204bd24a7132590e7b3bdb/yt_dlp/extractor/youtube/_base.py#L402-L409) for the list of supported content language codes
|
* `lang`: Prefer translated metadata (`title`, `description` etc) of this language code (case-sensitive). By default, the video primary language metadata is preferred, with a fallback to `en` translated. See [youtube/_base.py](https://github.com/yt-dlp/yt-dlp/blob/415b4c9f955b1a0391204bd24a7132590e7b3bdb/yt_dlp/extractor/youtube/_base.py#L402-L409) for the list of supported content language codes
|
||||||
* `skip`: One or more of `hls`, `dash` or `translated_subs` to skip extraction of the m3u8 manifests, dash manifests and [auto-translated subtitles](https://github.com/yt-dlp/yt-dlp/issues/4090#issuecomment-1158102032) respectively
|
* `skip`: One or more of `hls`, `dash` or `translated_subs` to skip extraction of the m3u8 manifests, dash manifests and [auto-translated subtitles](https://github.com/yt-dlp/yt-dlp/issues/4090#issuecomment-1158102032) respectively
|
||||||
* `player_client`: Clients to extract video data from. The currently available clients are `web`, `web_safari`, `web_embedded`, `web_music`, `web_creator`, `mweb`, `ios`, `android`, `android_vr`, `tv`, `tv_simply` and `tv_embedded`. By default, `tv,tv_simply,web` is used, but `tv,web_safari,web` is used when authenticating with cookies and `tv,web_creator,web` is used with premium accounts. The `web_music` client is added for `music.youtube.com` URLs when logged-in cookies are used. The `web_embedded` client is added for age-restricted videos but only works if the video is embeddable. The `tv_embedded` and `web_creator` clients are added for age-restricted videos if account age-verification is required. Some clients, such as `web` and `web_music`, require a `po_token` for their formats to be downloadable. Some clients, such as `web_creator`, will only work with authentication. Not all clients support authentication via cookies. You can use `default` for the default clients, or you can use `all` for all clients (not recommended). You can prefix a client with `-` to exclude it, e.g. `youtube:player_client=default,-ios`
|
* `player_client`: Clients to extract video data from. The currently available clients are `web`, `web_safari`, `web_embedded`, `web_music`, `web_creator`, `mweb`, `ios`, `android`, `android_vr`, `tv`, `tv_simply` and `tv_embedded`. By default, `tv,web_safari,web` is used, and `tv,web_creator,web` is used with premium accounts. The `web_music` client is added for `music.youtube.com` URLs when logged-in cookies are used. The `web_embedded` client is added for age-restricted videos but only works if the video is embeddable. The `tv_embedded` and `web_creator` clients are added for age-restricted videos if account age-verification is required. Some clients, such as `web` and `web_music`, require a `po_token` for their formats to be downloadable. Some clients, such as `web_creator`, will only work with authentication. Not all clients support authentication via cookies. You can use `default` for the default clients, or you can use `all` for all clients (not recommended). You can prefix a client with `-` to exclude it, e.g. `youtube:player_client=default,-ios`
|
||||||
* `player_skip`: Skip some network requests that are generally needed for robust extraction. One or more of `configs` (skip client configs), `webpage` (skip initial webpage), `js` (skip js player), `initial_data` (skip initial data/next ep request). While these options can help reduce the number of requests needed or avoid some rate-limiting, they could cause issues such as missing formats or metadata. See [#860](https://github.com/yt-dlp/yt-dlp/pull/860) and [#12826](https://github.com/yt-dlp/yt-dlp/issues/12826) for more details
|
* `player_skip`: Skip some network requests that are generally needed for robust extraction. One or more of `configs` (skip client configs), `webpage` (skip initial webpage), `js` (skip js player), `initial_data` (skip initial data/next ep request). While these options can help reduce the number of requests needed or avoid some rate-limiting, they could cause issues such as missing formats or metadata. See [#860](https://github.com/yt-dlp/yt-dlp/pull/860) and [#12826](https://github.com/yt-dlp/yt-dlp/issues/12826) for more details
|
||||||
* `webpage_skip`: Skip extraction of embedded webpage data. One or both of `player_response`, `initial_data`. These options are for testing purposes and don't skip any network requests
|
* `webpage_skip`: Skip extraction of embedded webpage data. One or both of `player_response`, `initial_data`. These options are for testing purposes and don't skip any network requests
|
||||||
* `player_params`: YouTube player parameters to use for player requests. Will overwrite any default ones set by yt-dlp.
|
* `player_params`: YouTube player parameters to use for player requests. Will overwrite any default ones set by yt-dlp.
|
||||||
* `player_js_variant`: The player javascript variant to use for signature and nsig deciphering. The known variants are: `main`, `tce`, `tv`, `tv_es6`, `phone`, `tablet`. The default is `main`, and the others are for debugging purposes. You can use `actual` to go with what is prescribed by the site
|
* `player_js_variant`: The player javascript variant to use for n/sig deciphering. The known variants are: `main`, `tcc`, `tce`, `es5`, `es6`, `tv`, `tv_es6`, `phone`, `tablet`. The default is `main`, and the others are for debugging purposes. You can use `actual` to go with what is prescribed by the site
|
||||||
|
* `player_js_version`: The player javascript version to use for n/sig deciphering, in the format of `signature_timestamp@hash`. Currently, the default is to force `20348@0004de42`. You can use `actual` to go with what is prescribed by the site
|
||||||
* `comment_sort`: `top` or `new` (default) - choose comment sorting mode (on YouTube's side)
|
* `comment_sort`: `top` or `new` (default) - choose comment sorting mode (on YouTube's side)
|
||||||
* `max_comments`: Limit the amount of comments to gather. Comma-separated list of integers representing `max-comments,max-parents,max-replies,max-replies-per-thread`. Default is `all,all,all,all`
|
* `max_comments`: Limit the amount of comments to gather. Comma-separated list of integers representing `max-comments,max-parents,max-replies,max-replies-per-thread`. Default is `all,all,all,all`
|
||||||
* E.g. `all,all,1000,10` will get a maximum of 1000 replies total, with up to 10 replies per thread. `1000,all,100` will get a maximum of 1000 comments, with a maximum of 100 replies total
|
* E.g. `all,all,1000,10` will get a maximum of 1000 replies total, with up to 10 replies per thread. `1000,all,100` will get a maximum of 1000 comments, with a maximum of 100 replies total
|
||||||
@@ -2201,7 +2214,6 @@ with yt_dlp.YoutubeDL(ydl_opts) as ydl:
|
|||||||
* Fix for [n-sig based throttling](https://github.com/ytdl-org/youtube-dl/issues/29326) **\***
|
* Fix for [n-sig based throttling](https://github.com/ytdl-org/youtube-dl/issues/29326) **\***
|
||||||
* Download livestreams from the start using `--live-from-start` (*experimental*)
|
* Download livestreams from the start using `--live-from-start` (*experimental*)
|
||||||
* Channel URLs download all uploads of the channel, including shorts and live
|
* Channel URLs download all uploads of the channel, including shorts and live
|
||||||
* Support for [logging in with OAuth](https://github.com/yt-dlp/yt-dlp/wiki/Extractors#logging-in-with-oauth)
|
|
||||||
|
|
||||||
* **Cookies from browser**: Cookies can be automatically extracted from all major web browsers using `--cookies-from-browser BROWSER[+KEYRING][:PROFILE][::CONTAINER]`
|
* **Cookies from browser**: Cookies can be automatically extracted from all major web browsers using `--cookies-from-browser BROWSER[+KEYRING][:PROFILE][::CONTAINER]`
|
||||||
|
|
||||||
@@ -2345,11 +2357,7 @@ While these options still work, their use is not recommended since there are oth
|
|||||||
--hls-prefer-native --downloader "m3u8:native"
|
--hls-prefer-native --downloader "m3u8:native"
|
||||||
--hls-prefer-ffmpeg --downloader "m3u8:ffmpeg"
|
--hls-prefer-ffmpeg --downloader "m3u8:ffmpeg"
|
||||||
--list-formats-old --compat-options list-formats (Alias: --no-list-formats-as-table)
|
--list-formats-old --compat-options list-formats (Alias: --no-list-formats-as-table)
|
||||||
--list-formats-as-table --compat-options -list-formats [Default] (Alias: --no-list-formats-old)
|
--list-formats-as-table --compat-options -list-formats [Default]
|
||||||
--youtube-skip-dash-manifest --extractor-args "youtube:skip=dash" (Alias: --no-youtube-include-dash-manifest)
|
|
||||||
--youtube-skip-hls-manifest --extractor-args "youtube:skip=hls" (Alias: --no-youtube-include-hls-manifest)
|
|
||||||
--youtube-include-dash-manifest Default (Alias: --no-youtube-skip-dash-manifest)
|
|
||||||
--youtube-include-hls-manifest Default (Alias: --no-youtube-skip-hls-manifest)
|
|
||||||
--geo-bypass --xff "default"
|
--geo-bypass --xff "default"
|
||||||
--no-geo-bypass --xff "never"
|
--no-geo-bypass --xff "never"
|
||||||
--geo-bypass-country CODE --xff CODE
|
--geo-bypass-country CODE --xff CODE
|
||||||
@@ -2360,18 +2368,13 @@ These options are not intended to be used by the end-user
|
|||||||
|
|
||||||
--test Download only part of video for testing extractors
|
--test Download only part of video for testing extractors
|
||||||
--load-pages Load pages dumped by --write-pages
|
--load-pages Load pages dumped by --write-pages
|
||||||
--youtube-print-sig-code For testing youtube signatures
|
|
||||||
--allow-unplayable-formats List unplayable formats also
|
--allow-unplayable-formats List unplayable formats also
|
||||||
--no-allow-unplayable-formats Default
|
--no-allow-unplayable-formats Default
|
||||||
|
|
||||||
#### Old aliases
|
#### Old aliases
|
||||||
These are aliases that are no longer documented for various reasons
|
These are aliases that are no longer documented for various reasons
|
||||||
|
|
||||||
--avconv-location --ffmpeg-location
|
|
||||||
--clean-infojson --clean-info-json
|
--clean-infojson --clean-info-json
|
||||||
--cn-verification-proxy URL --geo-verification-proxy URL
|
|
||||||
--dump-headers --print-traffic
|
|
||||||
--dump-intermediate-pages --dump-pages
|
|
||||||
--force-write-download-archive --force-write-archive
|
--force-write-download-archive --force-write-archive
|
||||||
--no-clean-infojson --no-clean-info-json
|
--no-clean-infojson --no-clean-info-json
|
||||||
--no-split-tracks --no-split-chapters
|
--no-split-tracks --no-split-chapters
|
||||||
@@ -2385,7 +2388,7 @@ These are aliases that are no longer documented for various reasons
|
|||||||
--yes-overwrites --force-overwrites
|
--yes-overwrites --force-overwrites
|
||||||
|
|
||||||
#### Sponskrub Options
|
#### Sponskrub Options
|
||||||
Support for [SponSkrub](https://github.com/faissaloo/SponSkrub) has been deprecated in favor of the `--sponsorblock` options
|
Support for [SponSkrub](https://github.com/faissaloo/SponSkrub) has been removed in favor of the `--sponsorblock` options
|
||||||
|
|
||||||
--sponskrub --sponsorblock-mark all
|
--sponskrub --sponsorblock-mark all
|
||||||
--no-sponskrub --no-sponsorblock
|
--no-sponskrub --no-sponsorblock
|
||||||
@@ -2407,6 +2410,17 @@ These options may no longer work as intended
|
|||||||
--no-include-ads Default
|
--no-include-ads Default
|
||||||
--write-annotations No supported site has annotations now
|
--write-annotations No supported site has annotations now
|
||||||
--no-write-annotations Default
|
--no-write-annotations Default
|
||||||
|
--avconv-location Removed alias for --ffmpeg-location
|
||||||
|
--cn-verification-proxy URL Removed alias for --geo-verification-proxy URL
|
||||||
|
--dump-headers Removed alias for --print-traffic
|
||||||
|
--dump-intermediate-pages Removed alias for --dump-pages
|
||||||
|
--youtube-skip-dash-manifest Removed alias for --extractor-args "youtube:skip=dash" (Alias: --no-youtube-include-dash-manifest)
|
||||||
|
--youtube-skip-hls-manifest Removed alias for --extractor-args "youtube:skip=hls" (Alias: --no-youtube-include-hls-manifest)
|
||||||
|
--youtube-include-dash-manifest Default (Alias: --no-youtube-skip-dash-manifest)
|
||||||
|
--youtube-include-hls-manifest Default (Alias: --no-youtube-skip-hls-manifest)
|
||||||
|
--youtube-print-sig-code Removed testing functionality
|
||||||
|
--dump-user-agent No longer supported
|
||||||
|
--xattr-set-filesize No longer supported
|
||||||
--compat-options seperate-video-versions No longer needed
|
--compat-options seperate-video-versions No longer needed
|
||||||
--compat-options no-youtube-prefer-utc-upload-date No longer supported
|
--compat-options no-youtube-prefer-utc-upload-date No longer supported
|
||||||
|
|
||||||
|
|||||||
4433
THIRD_PARTY_LICENSES.txt
Normal file
4433
THIRD_PARTY_LICENSES.txt
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,10 +1,178 @@
|
|||||||
services:
|
services:
|
||||||
static:
|
|
||||||
build: static
|
linux_x86_64:
|
||||||
|
build:
|
||||||
|
context: linux
|
||||||
|
target: build
|
||||||
|
platforms:
|
||||||
|
- "linux/amd64"
|
||||||
|
args:
|
||||||
|
BUILDIMAGE: ghcr.io/yt-dlp/manylinux2014_x86_64-shared:latest
|
||||||
environment:
|
environment:
|
||||||
channel: ${channel}
|
EXE_NAME: ${EXE_NAME:?}
|
||||||
origin: ${origin}
|
CHANNEL: ${CHANNEL:?}
|
||||||
version: ${version}
|
ORIGIN: ${ORIGIN:?}
|
||||||
|
VERSION:
|
||||||
|
PYTHON_VERSION:
|
||||||
|
SKIP_ONEDIR_BUILD:
|
||||||
|
SKIP_ONEFILE_BUILD:
|
||||||
volumes:
|
volumes:
|
||||||
- ~/build:/build
|
|
||||||
- ../..:/yt-dlp
|
- ../..:/yt-dlp
|
||||||
|
|
||||||
|
linux_x86_64_verify:
|
||||||
|
build:
|
||||||
|
context: linux
|
||||||
|
target: verify
|
||||||
|
platforms:
|
||||||
|
- "linux/amd64"
|
||||||
|
args:
|
||||||
|
VERIFYIMAGE: quay.io/pypa/manylinux2014_x86_64:latest
|
||||||
|
environment:
|
||||||
|
EXE_NAME: ${EXE_NAME:?}
|
||||||
|
UPDATE_TO:
|
||||||
|
SKIP_ONEDIR_BUILD:
|
||||||
|
SKIP_ONEFILE_BUILD:
|
||||||
|
volumes:
|
||||||
|
- ../../dist:/build
|
||||||
|
|
||||||
|
linux_aarch64:
|
||||||
|
build:
|
||||||
|
context: linux
|
||||||
|
target: build
|
||||||
|
platforms:
|
||||||
|
- "linux/arm64"
|
||||||
|
args:
|
||||||
|
BUILDIMAGE: ghcr.io/yt-dlp/manylinux2014_aarch64-shared:latest
|
||||||
|
environment:
|
||||||
|
EXE_NAME: ${EXE_NAME:?}
|
||||||
|
CHANNEL: ${CHANNEL:?}
|
||||||
|
ORIGIN: ${ORIGIN:?}
|
||||||
|
VERSION:
|
||||||
|
PYTHON_VERSION:
|
||||||
|
SKIP_ONEDIR_BUILD:
|
||||||
|
SKIP_ONEFILE_BUILD:
|
||||||
|
volumes:
|
||||||
|
- ../..:/yt-dlp
|
||||||
|
|
||||||
|
linux_aarch64_verify:
|
||||||
|
build:
|
||||||
|
context: linux
|
||||||
|
target: verify
|
||||||
|
platforms:
|
||||||
|
- "linux/arm64"
|
||||||
|
args:
|
||||||
|
VERIFYIMAGE: quay.io/pypa/manylinux2014_aarch64:latest
|
||||||
|
environment:
|
||||||
|
EXE_NAME: ${EXE_NAME:?}
|
||||||
|
UPDATE_TO:
|
||||||
|
SKIP_ONEDIR_BUILD:
|
||||||
|
SKIP_ONEFILE_BUILD:
|
||||||
|
volumes:
|
||||||
|
- ../../dist:/build
|
||||||
|
|
||||||
|
linux_armv7l:
|
||||||
|
build:
|
||||||
|
context: linux
|
||||||
|
target: build
|
||||||
|
platforms:
|
||||||
|
- "linux/arm/v7"
|
||||||
|
args:
|
||||||
|
BUILDIMAGE: ghcr.io/yt-dlp/manylinux_2_31_armv7l-shared:latest
|
||||||
|
environment:
|
||||||
|
EXE_NAME: ${EXE_NAME:?}
|
||||||
|
CHANNEL: ${CHANNEL:?}
|
||||||
|
ORIGIN: ${ORIGIN:?}
|
||||||
|
VERSION:
|
||||||
|
PYTHON_VERSION:
|
||||||
|
SKIP_ONEDIR_BUILD:
|
||||||
|
SKIP_ONEFILE_BUILD:
|
||||||
|
volumes:
|
||||||
|
- ../..:/yt-dlp
|
||||||
|
- ../../venv:/yt-dlp-build-venv
|
||||||
|
|
||||||
|
linux_armv7l_verify:
|
||||||
|
build:
|
||||||
|
context: linux
|
||||||
|
target: verify
|
||||||
|
platforms:
|
||||||
|
- "linux/arm/v7"
|
||||||
|
args:
|
||||||
|
VERIFYIMAGE: arm32v7/debian:bullseye
|
||||||
|
environment:
|
||||||
|
EXE_NAME: ${EXE_NAME:?}
|
||||||
|
UPDATE_TO:
|
||||||
|
SKIP_ONEDIR_BUILD:
|
||||||
|
SKIP_ONEFILE_BUILD:
|
||||||
|
volumes:
|
||||||
|
- ../../dist:/build
|
||||||
|
|
||||||
|
musllinux_x86_64:
|
||||||
|
build:
|
||||||
|
context: linux
|
||||||
|
target: build
|
||||||
|
platforms:
|
||||||
|
- "linux/amd64"
|
||||||
|
args:
|
||||||
|
BUILDIMAGE: ghcr.io/yt-dlp/musllinux_1_2_x86_64-shared:latest
|
||||||
|
environment:
|
||||||
|
EXE_NAME: ${EXE_NAME:?}
|
||||||
|
CHANNEL: ${CHANNEL:?}
|
||||||
|
ORIGIN: ${ORIGIN:?}
|
||||||
|
VERSION:
|
||||||
|
PYTHON_VERSION:
|
||||||
|
SKIP_ONEDIR_BUILD:
|
||||||
|
SKIP_ONEFILE_BUILD:
|
||||||
|
volumes:
|
||||||
|
- ../..:/yt-dlp
|
||||||
|
|
||||||
|
musllinux_x86_64_verify:
|
||||||
|
build:
|
||||||
|
context: linux
|
||||||
|
target: verify
|
||||||
|
platforms:
|
||||||
|
- "linux/amd64"
|
||||||
|
args:
|
||||||
|
VERIFYIMAGE: alpine:3.22
|
||||||
|
environment:
|
||||||
|
EXE_NAME: ${EXE_NAME:?}
|
||||||
|
UPDATE_TO:
|
||||||
|
SKIP_ONEDIR_BUILD:
|
||||||
|
SKIP_ONEFILE_BUILD:
|
||||||
|
volumes:
|
||||||
|
- ../../dist:/build
|
||||||
|
|
||||||
|
musllinux_aarch64:
|
||||||
|
build:
|
||||||
|
context: linux
|
||||||
|
target: build
|
||||||
|
platforms:
|
||||||
|
- "linux/arm64"
|
||||||
|
args:
|
||||||
|
BUILDIMAGE: ghcr.io/yt-dlp/musllinux_1_2_aarch64-shared:latest
|
||||||
|
environment:
|
||||||
|
EXE_NAME: ${EXE_NAME:?}
|
||||||
|
CHANNEL: ${CHANNEL:?}
|
||||||
|
ORIGIN: ${ORIGIN:?}
|
||||||
|
VERSION:
|
||||||
|
PYTHON_VERSION:
|
||||||
|
SKIP_ONEDIR_BUILD:
|
||||||
|
SKIP_ONEFILE_BUILD:
|
||||||
|
EXCLUDE_CURL_CFFI: "1"
|
||||||
|
volumes:
|
||||||
|
- ../..:/yt-dlp
|
||||||
|
|
||||||
|
musllinux_aarch64_verify:
|
||||||
|
build:
|
||||||
|
context: linux
|
||||||
|
target: verify
|
||||||
|
platforms:
|
||||||
|
- "linux/arm64"
|
||||||
|
args:
|
||||||
|
VERIFYIMAGE: alpine:3.22
|
||||||
|
environment:
|
||||||
|
EXE_NAME: ${EXE_NAME:?}
|
||||||
|
UPDATE_TO:
|
||||||
|
SKIP_ONEDIR_BUILD:
|
||||||
|
SKIP_ONEFILE_BUILD:
|
||||||
|
volumes:
|
||||||
|
- ../../dist:/build
|
||||||
|
|||||||
16
bundle/docker/linux/Dockerfile
Normal file
16
bundle/docker/linux/Dockerfile
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
ARG BUILDIMAGE=ghcr.io/yt-dlp/manylinux2014_x86_64-shared:latest
|
||||||
|
ARG VERIFYIMAGE=alpine:3.22
|
||||||
|
|
||||||
|
|
||||||
|
FROM $BUILDIMAGE AS build
|
||||||
|
|
||||||
|
WORKDIR /yt-dlp
|
||||||
|
COPY build.sh /build.sh
|
||||||
|
ENTRYPOINT ["/build.sh"]
|
||||||
|
|
||||||
|
|
||||||
|
FROM $VERIFYIMAGE AS verify
|
||||||
|
|
||||||
|
WORKDIR /testing
|
||||||
|
COPY verify.sh /verify.sh
|
||||||
|
ENTRYPOINT ["/verify.sh"]
|
||||||
48
bundle/docker/linux/build.sh
Executable file
48
bundle/docker/linux/build.sh
Executable file
@@ -0,0 +1,48 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -exuo pipefail
|
||||||
|
|
||||||
|
if [[ -z "${PYTHON_VERSION:-}" ]]; then
|
||||||
|
PYTHON_VERSION="3.13"
|
||||||
|
echo "Defaulting to using Python ${PYTHON_VERSION}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
function runpy {
|
||||||
|
"/opt/shared-cpython-${PYTHON_VERSION}/bin/python${PYTHON_VERSION}" "$@"
|
||||||
|
}
|
||||||
|
|
||||||
|
function venvpy {
|
||||||
|
"python${PYTHON_VERSION}" "$@"
|
||||||
|
}
|
||||||
|
|
||||||
|
INCLUDES=(
|
||||||
|
--include pyinstaller
|
||||||
|
--include secretstorage
|
||||||
|
)
|
||||||
|
|
||||||
|
if [[ -z "${EXCLUDE_CURL_CFFI:-}" ]]; then
|
||||||
|
INCLUDES+=(--include curl-cffi)
|
||||||
|
fi
|
||||||
|
|
||||||
|
runpy -m venv /yt-dlp-build-venv
|
||||||
|
# shellcheck disable=SC1091
|
||||||
|
source /yt-dlp-build-venv/bin/activate
|
||||||
|
# Inside the venv we use venvpy instead of runpy
|
||||||
|
venvpy -m ensurepip --upgrade --default-pip
|
||||||
|
venvpy -m devscripts.install_deps -o --include build
|
||||||
|
venvpy -m devscripts.install_deps "${INCLUDES[@]}"
|
||||||
|
venvpy -m devscripts.make_lazy_extractors
|
||||||
|
venvpy devscripts/update-version.py -c "${CHANNEL}" -r "${ORIGIN}" "${VERSION}"
|
||||||
|
|
||||||
|
if [[ -z "${SKIP_ONEDIR_BUILD:-}" ]]; then
|
||||||
|
mkdir -p /build
|
||||||
|
venvpy -m bundle.pyinstaller --onedir --distpath=/build
|
||||||
|
pushd "/build/${EXE_NAME}"
|
||||||
|
chmod +x "${EXE_NAME}"
|
||||||
|
venvpy -m zipfile -c "/yt-dlp/dist/${EXE_NAME}.zip" ./
|
||||||
|
popd
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ -z "${SKIP_ONEFILE_BUILD:-}" ]]; then
|
||||||
|
venvpy -m bundle.pyinstaller
|
||||||
|
chmod +x "./dist/${EXE_NAME}"
|
||||||
|
fi
|
||||||
51
bundle/docker/linux/verify.sh
Executable file
51
bundle/docker/linux/verify.sh
Executable file
@@ -0,0 +1,51 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
set -eu
|
||||||
|
|
||||||
|
if [ -n "${SKIP_ONEFILE_BUILD:-}" ]; then
|
||||||
|
if [ -n "${SKIP_ONEDIR_BUILD:-}" ]; then
|
||||||
|
echo "All executable builds were skipped"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
echo "Extracting zip to verify onedir build"
|
||||||
|
if command -v python3 >/dev/null 2>&1; then
|
||||||
|
python3 -m zipfile -e "/build/${EXE_NAME}.zip" ./
|
||||||
|
else
|
||||||
|
echo "Attempting to install unzip"
|
||||||
|
if command -v dnf >/dev/null 2>&1; then
|
||||||
|
dnf -y install --allowerasing unzip
|
||||||
|
elif command -v yum >/dev/null 2>&1; then
|
||||||
|
yum -y install unzip
|
||||||
|
elif command -v apt-get >/dev/null 2>&1; then
|
||||||
|
DEBIAN_FRONTEND=noninteractive apt-get update -qq
|
||||||
|
DEBIAN_FRONTEND=noninteractive apt-get install -qq -y --no-install-recommends unzip
|
||||||
|
elif command -v apk >/dev/null 2>&1; then
|
||||||
|
apk add --no-cache unzip
|
||||||
|
else
|
||||||
|
echo "Unsupported image"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
unzip "/build/${EXE_NAME}.zip" -d ./
|
||||||
|
fi
|
||||||
|
chmod +x "./${EXE_NAME}"
|
||||||
|
"./${EXE_NAME}" -v || true
|
||||||
|
"./${EXE_NAME}" --version
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Verifying onefile build"
|
||||||
|
cp "/build/${EXE_NAME}" ./
|
||||||
|
chmod +x "./${EXE_NAME}"
|
||||||
|
|
||||||
|
if [ -z "${UPDATE_TO:-}" ]; then
|
||||||
|
"./${EXE_NAME}" -v || true
|
||||||
|
"./${EXE_NAME}" --version
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
cp "./${EXE_NAME}" "./${EXE_NAME}_downgraded"
|
||||||
|
version="$("./${EXE_NAME}" --version)"
|
||||||
|
"./${EXE_NAME}_downgraded" -v --update-to "${UPDATE_TO}"
|
||||||
|
downgraded_version="$("./${EXE_NAME}_downgraded" --version)"
|
||||||
|
if [ "${version}" = "${downgraded_version}" ]; then
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
@@ -1,21 +0,0 @@
|
|||||||
FROM alpine:3.19 as base
|
|
||||||
|
|
||||||
RUN apk --update add --no-cache \
|
|
||||||
build-base \
|
|
||||||
python3 \
|
|
||||||
pipx \
|
|
||||||
;
|
|
||||||
|
|
||||||
RUN pipx install pyinstaller
|
|
||||||
# Requires above step to prepare the shared venv
|
|
||||||
RUN ~/.local/share/pipx/shared/bin/python -m pip install -U wheel
|
|
||||||
RUN apk --update add --no-cache \
|
|
||||||
scons \
|
|
||||||
patchelf \
|
|
||||||
binutils \
|
|
||||||
;
|
|
||||||
RUN pipx install staticx
|
|
||||||
|
|
||||||
WORKDIR /yt-dlp
|
|
||||||
COPY entrypoint.sh /entrypoint.sh
|
|
||||||
ENTRYPOINT /entrypoint.sh
|
|
||||||
@@ -1,14 +0,0 @@
|
|||||||
#!/bin/ash
|
|
||||||
set -e
|
|
||||||
|
|
||||||
source ~/.local/share/pipx/venvs/pyinstaller/bin/activate
|
|
||||||
python -m devscripts.install_deps -o --include build
|
|
||||||
python -m devscripts.install_deps --include secretstorage --include curl-cffi
|
|
||||||
python -m devscripts.make_lazy_extractors
|
|
||||||
python devscripts/update-version.py -c "${channel}" -r "${origin}" "${version}"
|
|
||||||
python -m bundle.pyinstaller
|
|
||||||
deactivate
|
|
||||||
|
|
||||||
source ~/.local/share/pipx/venvs/staticx/bin/activate
|
|
||||||
staticx /yt-dlp/dist/yt-dlp_linux /build/yt-dlp_linux
|
|
||||||
deactivate
|
|
||||||
@@ -13,6 +13,8 @@ from PyInstaller.__main__ import run as run_pyinstaller
|
|||||||
from devscripts.utils import read_version
|
from devscripts.utils import read_version
|
||||||
|
|
||||||
OS_NAME, MACHINE, ARCH = sys.platform, platform.machine().lower(), platform.architecture()[0][:2]
|
OS_NAME, MACHINE, ARCH = sys.platform, platform.machine().lower(), platform.architecture()[0][:2]
|
||||||
|
if OS_NAME == 'linux' and platform.libc_ver()[0] != 'glibc':
|
||||||
|
OS_NAME = 'musllinux'
|
||||||
if MACHINE in ('x86', 'x86_64', 'amd64', 'i386', 'i686'):
|
if MACHINE in ('x86', 'x86_64', 'amd64', 'i386', 'i686'):
|
||||||
MACHINE = 'x86' if ARCH == '32' else ''
|
MACHINE = 'x86' if ARCH == '32' else ''
|
||||||
|
|
||||||
@@ -127,7 +129,6 @@ def windows_set_version(exe, version):
|
|||||||
StringStruct('FileDescription', 'yt-dlp%s' % (MACHINE and f' ({MACHINE})')),
|
StringStruct('FileDescription', 'yt-dlp%s' % (MACHINE and f' ({MACHINE})')),
|
||||||
StringStruct('FileVersion', version),
|
StringStruct('FileVersion', version),
|
||||||
StringStruct('InternalName', f'yt-dlp{suffix}'),
|
StringStruct('InternalName', f'yt-dlp{suffix}'),
|
||||||
StringStruct('LegalCopyright', 'pukkandan.ytdlp@gmail.com | UNLICENSE'),
|
|
||||||
StringStruct('OriginalFilename', f'yt-dlp{suffix}.exe'),
|
StringStruct('OriginalFilename', f'yt-dlp{suffix}.exe'),
|
||||||
StringStruct('ProductName', f'yt-dlp{suffix}'),
|
StringStruct('ProductName', f'yt-dlp{suffix}'),
|
||||||
StringStruct(
|
StringStruct(
|
||||||
|
|||||||
@@ -293,5 +293,10 @@
|
|||||||
"action": "add",
|
"action": "add",
|
||||||
"when": "c76ce28e06c816eb5b261dfb6aff6e69dd9b7382",
|
"when": "c76ce28e06c816eb5b261dfb6aff6e69dd9b7382",
|
||||||
"short": "[priority] **linux_armv7l_exe builds are being discontinued**\nThis release's `yt-dlp_linux_armv7l` binary could be the last one. [Read more](https://github.com/yt-dlp/yt-dlp/issues/13976)"
|
"short": "[priority] **linux_armv7l_exe builds are being discontinued**\nThis release's `yt-dlp_linux_armv7l` binary could be the last one. [Read more](https://github.com/yt-dlp/yt-dlp/issues/13976)"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "add",
|
||||||
|
"when": "08d78996831bd8e1e3c2592d740c3def00bbf548",
|
||||||
|
"short": "[priority] **Several options have been deprecated**\nIn order to simplify the codebase and reduce maintenance burden, various options have been deprecated. Please remove them from your commands/configurations. [Read more](https://github.com/yt-dlp/yt-dlp/issues/14198)"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
|||||||
316
devscripts/generate_third_party_licenses.py
Normal file
316
devscripts/generate_third_party_licenses.py
Normal file
@@ -0,0 +1,316 @@
|
|||||||
|
import requests
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from pathlib import Path
|
||||||
|
import hashlib
|
||||||
|
|
||||||
|
DEFAULT_OUTPUT = 'THIRD_PARTY_LICENSES.txt'
|
||||||
|
CACHE_LOCATION = '.license_cache'
|
||||||
|
HEADER = '''THIRD-PARTY LICENSES
|
||||||
|
|
||||||
|
This file aggregates license texts of third-party components included with the yt-dlp PyInstaller-bundled executables.
|
||||||
|
yt-dlp itself is licensed under the Unlicense (see LICENSE file).
|
||||||
|
Source code for bundled third-party components is available from the original projects.
|
||||||
|
If you cannot obtain it, the maintainers will provide it as per license obligation; maintainer emails are listed in pyproject.toml.'''
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class Dependency:
|
||||||
|
name: str
|
||||||
|
license_url: str
|
||||||
|
project_url: str = ''
|
||||||
|
license: str = ''
|
||||||
|
comment: str = ''
|
||||||
|
|
||||||
|
|
||||||
|
DEPENDENCIES: list[Dependency] = [
|
||||||
|
# Core runtime environment components
|
||||||
|
Dependency(
|
||||||
|
name='Python',
|
||||||
|
license='PSF-2.0',
|
||||||
|
license_url='https://raw.githubusercontent.com/python/cpython/refs/heads/main/LICENSE',
|
||||||
|
project_url='https://www.python.org/',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='Microsoft Distributable Code',
|
||||||
|
license_url='https://raw.githubusercontent.com/python/cpython/refs/heads/main/PC/crtlicense.txt',
|
||||||
|
comment='Only included in Windows builds',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='bzip2',
|
||||||
|
license='bzip2-1.0.6',
|
||||||
|
license_url='https://gitlab.com/federicomenaquintero/bzip2/-/raw/master/COPYING',
|
||||||
|
project_url='https://sourceware.org/bzip2/',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='libffi',
|
||||||
|
license='MIT',
|
||||||
|
license_url='https://raw.githubusercontent.com/libffi/libffi/refs/heads/master/LICENSE',
|
||||||
|
project_url='https://sourceware.org/libffi/',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='OpenSSL 3.0+',
|
||||||
|
license='Apache-2.0',
|
||||||
|
license_url='https://raw.githubusercontent.com/openssl/openssl/refs/heads/master/LICENSE.txt',
|
||||||
|
project_url='https://www.openssl.org/',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='SQLite',
|
||||||
|
license='Public Domain', # Technically does not need to be included
|
||||||
|
license_url='https://sqlite.org/src/raw/e108e1e69ae8e8a59e93c455654b8ac9356a11720d3345df2a4743e9590fb20d?at=LICENSE.md',
|
||||||
|
project_url='https://www.sqlite.org/',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='liblzma',
|
||||||
|
license='0BSD', # Technically does not need to be included
|
||||||
|
license_url='https://raw.githubusercontent.com/tukaani-project/xz/refs/heads/master/COPYING',
|
||||||
|
project_url='https://tukaani.org/xz/',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='mpdecimal',
|
||||||
|
license='BSD-2-Clause',
|
||||||
|
# No official repo URL
|
||||||
|
license_url='https://gist.githubusercontent.com/seproDev/9e5dbfc08af35c3f2463e64eb9b27161/raw/61f5a98bc1a4ad7d48b1c793fc3314d4d43c2ab1/mpdecimal_COPYRIGHT.txt',
|
||||||
|
project_url='https://www.bytereef.org/mpdecimal/',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='zlib',
|
||||||
|
license='zlib',
|
||||||
|
license_url='https://raw.githubusercontent.com/madler/zlib/refs/heads/develop/LICENSE',
|
||||||
|
project_url='https://zlib.net/',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='Expat',
|
||||||
|
license='MIT',
|
||||||
|
license_url='https://raw.githubusercontent.com/libexpat/libexpat/refs/heads/master/COPYING',
|
||||||
|
project_url='https://libexpat.github.io/',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='ncurses',
|
||||||
|
license='X11-distribute-modifications-variant',
|
||||||
|
license_url='https://raw.githubusercontent.com/mirror/ncurses/refs/heads/master/COPYING',
|
||||||
|
comment='Only included in Linux/macOS builds',
|
||||||
|
project_url='https://invisible-island.net/ncurses/',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='GNU Readline',
|
||||||
|
license='GPL-3.0-or-later',
|
||||||
|
license_url='https://tiswww.case.edu/php/chet/readline/COPYING',
|
||||||
|
comment='Only included in Linux builds',
|
||||||
|
project_url='https://www.gnu.org/software/readline/',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='libstdc++',
|
||||||
|
license='GPL-3.0-with-GCC-exception',
|
||||||
|
license_url='https://raw.githubusercontent.com/gcc-mirror/gcc/refs/heads/master/COPYING.RUNTIME',
|
||||||
|
comment='Only included in Linux builds',
|
||||||
|
project_url='https://gcc.gnu.org/onlinedocs/libstdc++/',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='libgcc',
|
||||||
|
license='GPL-3.0-with-GCC-exception',
|
||||||
|
license_url='https://raw.githubusercontent.com/gcc-mirror/gcc/refs/heads/master/COPYING.RUNTIME',
|
||||||
|
comment='Only included in Linux builds',
|
||||||
|
project_url='https://gcc.gnu.org/',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='libuuid',
|
||||||
|
license='BSD-3-Clause',
|
||||||
|
license_url='https://git.kernel.org/pub/scm/fs/ext2/e2fsprogs.git/plain/lib/uuid/COPYING',
|
||||||
|
comment='Only included in Linux builds',
|
||||||
|
project_url='https://git.kernel.org/pub/scm/fs/ext2/e2fsprogs.git/tree/lib/uuid',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='libintl',
|
||||||
|
license='LGPL-2.1-or-later',
|
||||||
|
license_url='https://raw.githubusercontent.com/autotools-mirror/gettext/refs/heads/master/gettext-runtime/intl/COPYING.LIB',
|
||||||
|
comment='Only included in macOS builds',
|
||||||
|
project_url='https://www.gnu.org/software/gettext/',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='libidn2',
|
||||||
|
license='LGPL-3.0-or-later',
|
||||||
|
license_url='https://gitlab.com/libidn/libidn2/-/raw/master/COPYING.LESSERv3',
|
||||||
|
comment='Only included in macOS builds',
|
||||||
|
project_url='https://www.gnu.org/software/libidn/',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='libidn2 (Unicode character data files)',
|
||||||
|
license='Unicode-TOU AND Unicode-DFS-2016',
|
||||||
|
license_url='https://gitlab.com/libidn/libidn2/-/raw/master/COPYING.unicode',
|
||||||
|
comment='Only included in macOS builds',
|
||||||
|
project_url='https://www.gnu.org/software/libidn/',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='libunistring',
|
||||||
|
license='LGPL-3.0-or-later',
|
||||||
|
license_url='https://gitweb.git.savannah.gnu.org/gitweb/?p=libunistring.git;a=blob_plain;f=COPYING.LIB;hb=HEAD',
|
||||||
|
comment='Only included in macOS builds',
|
||||||
|
project_url='https://www.gnu.org/software/libunistring/',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='librtmp',
|
||||||
|
license='LGPL-2.1-or-later',
|
||||||
|
# No official repo URL
|
||||||
|
license_url='https://gist.githubusercontent.com/seproDev/31d8c691ccddebe37b8b379307cb232d/raw/053408e98547ea8c7d9ba3a80c965f33e163b881/librtmp_COPYING.txt',
|
||||||
|
comment='Only included in macOS builds',
|
||||||
|
project_url='https://rtmpdump.mplayerhq.hu/',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='zstd',
|
||||||
|
license='BSD-3-Clause',
|
||||||
|
license_url='https://raw.githubusercontent.com/facebook/zstd/refs/heads/dev/LICENSE',
|
||||||
|
comment='Only included in macOS builds',
|
||||||
|
project_url='https://facebook.github.io/zstd/',
|
||||||
|
),
|
||||||
|
|
||||||
|
# Python packages
|
||||||
|
Dependency(
|
||||||
|
name='brotli',
|
||||||
|
license='MIT',
|
||||||
|
license_url='https://raw.githubusercontent.com/google/brotli/refs/heads/master/LICENSE',
|
||||||
|
project_url='https://brotli.org/',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='curl_cffi',
|
||||||
|
license='MIT',
|
||||||
|
license_url='https://raw.githubusercontent.com/lexiforest/curl_cffi/refs/heads/main/LICENSE',
|
||||||
|
comment='Not included in `yt-dlp_x86` and `yt-dlp_musllinux_aarch64` builds',
|
||||||
|
project_url='https://curl-cffi.readthedocs.io/',
|
||||||
|
),
|
||||||
|
# Dependency of curl_cffi
|
||||||
|
Dependency(
|
||||||
|
name='curl-impersonate',
|
||||||
|
license='MIT',
|
||||||
|
license_url='https://raw.githubusercontent.com/lexiforest/curl-impersonate/refs/heads/main/LICENSE',
|
||||||
|
comment='Not included in `yt-dlp_x86` and `yt-dlp_musllinux_aarch64` builds',
|
||||||
|
project_url='https://github.com/lexiforest/curl-impersonate',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='cffi',
|
||||||
|
license='MIT-0', # Technically does not need to be included
|
||||||
|
license_url='https://raw.githubusercontent.com/python-cffi/cffi/refs/heads/main/LICENSE',
|
||||||
|
project_url='https://cffi.readthedocs.io/',
|
||||||
|
),
|
||||||
|
# Dependecy of cffi
|
||||||
|
Dependency(
|
||||||
|
name='pycparser',
|
||||||
|
license='BSD-3-Clause',
|
||||||
|
license_url='https://raw.githubusercontent.com/eliben/pycparser/refs/heads/main/LICENSE',
|
||||||
|
project_url='https://github.com/eliben/pycparser',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='mutagen',
|
||||||
|
license='GPL-2.0-or-later',
|
||||||
|
license_url='https://raw.githubusercontent.com/quodlibet/mutagen/refs/heads/main/COPYING',
|
||||||
|
project_url='https://mutagen.readthedocs.io/',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='PyCryptodome',
|
||||||
|
license='Public Domain and BSD-2-Clause',
|
||||||
|
license_url='https://raw.githubusercontent.com/Legrandin/pycryptodome/refs/heads/master/LICENSE.rst',
|
||||||
|
project_url='https://www.pycryptodome.org/',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='certifi',
|
||||||
|
license='MPL-2.0',
|
||||||
|
license_url='https://raw.githubusercontent.com/certifi/python-certifi/refs/heads/master/LICENSE',
|
||||||
|
project_url='https://github.com/certifi/python-certifi',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='requests',
|
||||||
|
license='Apache-2.0',
|
||||||
|
license_url='https://raw.githubusercontent.com/psf/requests/refs/heads/main/LICENSE',
|
||||||
|
project_url='https://requests.readthedocs.io/',
|
||||||
|
),
|
||||||
|
# Dependency of requests
|
||||||
|
Dependency(
|
||||||
|
name='charset-normalizer',
|
||||||
|
license='MIT',
|
||||||
|
license_url='https://raw.githubusercontent.com/jawah/charset_normalizer/refs/heads/master/LICENSE',
|
||||||
|
project_url='https://charset-normalizer.readthedocs.io/',
|
||||||
|
),
|
||||||
|
# Dependency of requests
|
||||||
|
Dependency(
|
||||||
|
name='idna',
|
||||||
|
license='BSD-3-Clause',
|
||||||
|
license_url='https://raw.githubusercontent.com/kjd/idna/refs/heads/master/LICENSE.md',
|
||||||
|
project_url='https://github.com/kjd/idna',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='urllib3',
|
||||||
|
license='MIT',
|
||||||
|
license_url='https://raw.githubusercontent.com/urllib3/urllib3/refs/heads/main/LICENSE.txt',
|
||||||
|
project_url='https://urllib3.readthedocs.io/',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='SecretStorage',
|
||||||
|
license='BSD-3-Clause',
|
||||||
|
license_url='https://raw.githubusercontent.com/mitya57/secretstorage/refs/heads/master/LICENSE',
|
||||||
|
comment='Only included in Linux builds',
|
||||||
|
project_url='https://secretstorage.readthedocs.io/',
|
||||||
|
),
|
||||||
|
# Dependency of SecretStorage
|
||||||
|
Dependency(
|
||||||
|
name='cryptography',
|
||||||
|
license='Apache-2.0', # Also available as BSD-3-Clause
|
||||||
|
license_url='https://raw.githubusercontent.com/pyca/cryptography/refs/heads/main/LICENSE.APACHE',
|
||||||
|
comment='Only included in Linux builds',
|
||||||
|
project_url='https://cryptography.io/',
|
||||||
|
),
|
||||||
|
# Dependency of SecretStorage
|
||||||
|
Dependency(
|
||||||
|
name='Jeepney',
|
||||||
|
license='MIT',
|
||||||
|
license_url='https://gitlab.com/takluyver/jeepney/-/raw/master/LICENSE',
|
||||||
|
comment='Only included in Linux builds',
|
||||||
|
project_url='https://jeepney.readthedocs.io/',
|
||||||
|
),
|
||||||
|
Dependency(
|
||||||
|
name='websockets',
|
||||||
|
license='BSD-3-Clause',
|
||||||
|
license_url='https://raw.githubusercontent.com/python-websockets/websockets/refs/heads/main/LICENSE',
|
||||||
|
project_url='https://websockets.readthedocs.io/',
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def fetch_text(dep: Dependency) -> str:
|
||||||
|
cache_dir = Path(CACHE_LOCATION)
|
||||||
|
cache_dir.mkdir(exist_ok=True)
|
||||||
|
url_hash = hashlib.sha256(dep.license_url.encode('utf-8')).hexdigest()
|
||||||
|
cache_file = cache_dir / f'{url_hash}.txt'
|
||||||
|
|
||||||
|
if cache_file.exists():
|
||||||
|
return cache_file.read_text()
|
||||||
|
|
||||||
|
# UA needed since some domains block requests default UA
|
||||||
|
req = requests.get(dep.license_url, headers={'User-Agent': 'yt-dlp license fetcher'})
|
||||||
|
req.raise_for_status()
|
||||||
|
text = req.text
|
||||||
|
cache_file.write_text(text)
|
||||||
|
return text
|
||||||
|
|
||||||
|
|
||||||
|
def build_output() -> str:
|
||||||
|
lines = [HEADER]
|
||||||
|
for d in DEPENDENCIES:
|
||||||
|
lines.append('\n')
|
||||||
|
lines.append('-' * 80)
|
||||||
|
header = f'{d.name}'
|
||||||
|
if d.license:
|
||||||
|
header += f' | {d.license}'
|
||||||
|
if d.comment:
|
||||||
|
header += f'\nNote: {d.comment}'
|
||||||
|
if d.project_url:
|
||||||
|
header += f'\nURL: {d.project_url}'
|
||||||
|
lines.append(header)
|
||||||
|
lines.append('-' * 80)
|
||||||
|
|
||||||
|
text = fetch_text(d)
|
||||||
|
lines.append(text.strip('\n') + '\n')
|
||||||
|
return '\n'.join(lines)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
content = build_output()
|
||||||
|
Path(DEFAULT_OUTPUT).write_text(content)
|
||||||
@@ -8,7 +8,7 @@ def main():
|
|||||||
return # This is unused in yt-dlp
|
return # This is unused in yt-dlp
|
||||||
|
|
||||||
parser = optparse.OptionParser(usage='%prog INFILE OUTFILE')
|
parser = optparse.OptionParser(usage='%prog INFILE OUTFILE')
|
||||||
options, args = parser.parse_args()
|
_, args = parser.parse_args()
|
||||||
if len(args) != 2:
|
if len(args) != 2:
|
||||||
parser.error('Expected an input and an output filename')
|
parser.error('Expected an input and an output filename')
|
||||||
|
|
||||||
|
|||||||
157
devscripts/setup_variables.py
Normal file
157
devscripts/setup_variables.py
Normal file
@@ -0,0 +1,157 @@
|
|||||||
|
# Allow direct execution
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
|
||||||
|
import datetime as dt
|
||||||
|
import json
|
||||||
|
|
||||||
|
from devscripts.utils import calculate_version
|
||||||
|
|
||||||
|
|
||||||
|
STABLE_REPOSITORY = 'yt-dlp/yt-dlp'
|
||||||
|
|
||||||
|
|
||||||
|
def setup_variables(environment):
|
||||||
|
"""
|
||||||
|
`environment` must contain these keys:
|
||||||
|
REPOSITORY, INPUTS, PROCESSED,
|
||||||
|
PUSH_VERSION_COMMIT, PYPI_PROJECT,
|
||||||
|
SOURCE_PYPI_PROJECT, SOURCE_PYPI_SUFFIX,
|
||||||
|
TARGET_PYPI_PROJECT, TARGET_PYPI_SUFFIX,
|
||||||
|
SOURCE_ARCHIVE_REPO, TARGET_ARCHIVE_REPO,
|
||||||
|
HAS_SOURCE_ARCHIVE_REPO_TOKEN,
|
||||||
|
HAS_TARGET_ARCHIVE_REPO_TOKEN,
|
||||||
|
HAS_ARCHIVE_REPO_TOKEN
|
||||||
|
|
||||||
|
`INPUTS` must contain these keys:
|
||||||
|
prerelease
|
||||||
|
|
||||||
|
`PROCESSED` must contain these keys:
|
||||||
|
source_repo, source_tag,
|
||||||
|
target_repo, target_tag
|
||||||
|
"""
|
||||||
|
REPOSITORY = environment['REPOSITORY']
|
||||||
|
INPUTS = json.loads(environment['INPUTS'])
|
||||||
|
PROCESSED = json.loads(environment['PROCESSED'])
|
||||||
|
|
||||||
|
source_channel = None
|
||||||
|
does_not_have_needed_token = False
|
||||||
|
target_repo_token = None
|
||||||
|
pypi_project = None
|
||||||
|
pypi_suffix = None
|
||||||
|
|
||||||
|
source_repo = PROCESSED['source_repo']
|
||||||
|
source_tag = PROCESSED['source_tag']
|
||||||
|
if source_repo == 'stable':
|
||||||
|
source_repo = STABLE_REPOSITORY
|
||||||
|
if not source_repo:
|
||||||
|
source_repo = REPOSITORY
|
||||||
|
elif environment['SOURCE_ARCHIVE_REPO']:
|
||||||
|
source_channel = environment['SOURCE_ARCHIVE_REPO']
|
||||||
|
elif not source_tag and '/' not in source_repo:
|
||||||
|
source_tag = source_repo
|
||||||
|
source_repo = REPOSITORY
|
||||||
|
|
||||||
|
resolved_source = source_repo
|
||||||
|
if source_tag:
|
||||||
|
resolved_source = f'{resolved_source}@{source_tag}'
|
||||||
|
elif source_repo == STABLE_REPOSITORY:
|
||||||
|
resolved_source = 'stable'
|
||||||
|
|
||||||
|
revision = None
|
||||||
|
if INPUTS['prerelease'] or not environment['PUSH_VERSION_COMMIT']:
|
||||||
|
revision = dt.datetime.now(tz=dt.timezone.utc).strftime('%H%M%S')
|
||||||
|
|
||||||
|
version = calculate_version(INPUTS.get('version') or revision)
|
||||||
|
|
||||||
|
target_repo = PROCESSED['target_repo']
|
||||||
|
target_tag = PROCESSED['target_tag']
|
||||||
|
if target_repo:
|
||||||
|
if target_repo == 'stable':
|
||||||
|
target_repo = STABLE_REPOSITORY
|
||||||
|
if not target_tag:
|
||||||
|
if target_repo == STABLE_REPOSITORY:
|
||||||
|
target_tag = version
|
||||||
|
elif environment['TARGET_ARCHIVE_REPO']:
|
||||||
|
target_tag = source_tag or version
|
||||||
|
else:
|
||||||
|
target_tag = target_repo
|
||||||
|
target_repo = REPOSITORY
|
||||||
|
if target_repo != REPOSITORY:
|
||||||
|
target_repo = environment['TARGET_ARCHIVE_REPO']
|
||||||
|
target_repo_token = f'{PROCESSED["target_repo"].upper()}_ARCHIVE_REPO_TOKEN'
|
||||||
|
if not json.loads(environment['HAS_TARGET_ARCHIVE_REPO_TOKEN']):
|
||||||
|
does_not_have_needed_token = True
|
||||||
|
pypi_project = environment['TARGET_PYPI_PROJECT'] or None
|
||||||
|
pypi_suffix = environment['TARGET_PYPI_SUFFIX'] or None
|
||||||
|
else:
|
||||||
|
target_tag = source_tag or version
|
||||||
|
if source_channel:
|
||||||
|
target_repo = source_channel
|
||||||
|
target_repo_token = f'{PROCESSED["source_repo"].upper()}_ARCHIVE_REPO_TOKEN'
|
||||||
|
if not json.loads(environment['HAS_SOURCE_ARCHIVE_REPO_TOKEN']):
|
||||||
|
does_not_have_needed_token = True
|
||||||
|
pypi_project = environment['SOURCE_PYPI_PROJECT'] or None
|
||||||
|
pypi_suffix = environment['SOURCE_PYPI_SUFFIX'] or None
|
||||||
|
else:
|
||||||
|
target_repo = REPOSITORY
|
||||||
|
|
||||||
|
if does_not_have_needed_token:
|
||||||
|
if not json.loads(environment['HAS_ARCHIVE_REPO_TOKEN']):
|
||||||
|
print(f'::error::Repository access secret {target_repo_token} not found')
|
||||||
|
return None
|
||||||
|
target_repo_token = 'ARCHIVE_REPO_TOKEN'
|
||||||
|
|
||||||
|
if target_repo == REPOSITORY and not INPUTS['prerelease']:
|
||||||
|
pypi_project = environment['PYPI_PROJECT'] or None
|
||||||
|
|
||||||
|
return {
|
||||||
|
'channel': resolved_source,
|
||||||
|
'version': version,
|
||||||
|
'target_repo': target_repo,
|
||||||
|
'target_repo_token': target_repo_token,
|
||||||
|
'target_tag': target_tag,
|
||||||
|
'pypi_project': pypi_project,
|
||||||
|
'pypi_suffix': pypi_suffix,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def process_inputs(inputs):
|
||||||
|
outputs = {}
|
||||||
|
for key in ('source', 'target'):
|
||||||
|
repo, _, tag = inputs.get(key, '').partition('@')
|
||||||
|
outputs[f'{key}_repo'] = repo
|
||||||
|
outputs[f'{key}_tag'] = tag
|
||||||
|
return outputs
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
if not os.getenv('GITHUB_OUTPUT'):
|
||||||
|
print('This script is only intended for use with GitHub Actions', file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
if 'process_inputs' in sys.argv:
|
||||||
|
inputs = json.loads(os.environ['INPUTS'])
|
||||||
|
print('::group::Inputs')
|
||||||
|
print(json.dumps(inputs, indent=2))
|
||||||
|
print('::endgroup::')
|
||||||
|
outputs = process_inputs(inputs)
|
||||||
|
print('::group::Processed')
|
||||||
|
print(json.dumps(outputs, indent=2))
|
||||||
|
print('::endgroup::')
|
||||||
|
with open(os.environ['GITHUB_OUTPUT'], 'a') as f:
|
||||||
|
f.write('\n'.join(f'{key}={value}' for key, value in outputs.items()))
|
||||||
|
sys.exit(0)
|
||||||
|
|
||||||
|
outputs = setup_variables(dict(os.environ))
|
||||||
|
if not outputs:
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
print('::group::Output variables')
|
||||||
|
print(json.dumps(outputs, indent=2))
|
||||||
|
print('::endgroup::')
|
||||||
|
|
||||||
|
with open(os.environ['GITHUB_OUTPUT'], 'a') as f:
|
||||||
|
f.write('\n'.join(f'{key}={value or ""}' for key, value in outputs.items()))
|
||||||
324
devscripts/setup_variables_tests.py
Normal file
324
devscripts/setup_variables_tests.py
Normal file
@@ -0,0 +1,324 @@
|
|||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
|
||||||
|
import datetime as dt
|
||||||
|
import json
|
||||||
|
|
||||||
|
from devscripts.setup_variables import STABLE_REPOSITORY, process_inputs, setup_variables
|
||||||
|
from devscripts.utils import calculate_version
|
||||||
|
|
||||||
|
|
||||||
|
def _test(github_repository, note, repo_vars, repo_secrets, inputs, expected=None, ignore_revision=False):
|
||||||
|
inp = inputs.copy()
|
||||||
|
inp.setdefault('linux_armv7l', True)
|
||||||
|
inp.setdefault('prerelease', False)
|
||||||
|
processed = process_inputs(inp)
|
||||||
|
source_repo = processed['source_repo'].upper()
|
||||||
|
target_repo = processed['target_repo'].upper()
|
||||||
|
variables = {k.upper(): v for k, v in repo_vars.items()}
|
||||||
|
secrets = {k.upper(): v for k, v in repo_secrets.items()}
|
||||||
|
|
||||||
|
env = {
|
||||||
|
# Keep this in sync with prepare.setup_variables in release.yml
|
||||||
|
'INPUTS': json.dumps(inp),
|
||||||
|
'PROCESSED': json.dumps(processed),
|
||||||
|
'REPOSITORY': github_repository,
|
||||||
|
'PUSH_VERSION_COMMIT': variables.get('PUSH_VERSION_COMMIT') or '',
|
||||||
|
'PYPI_PROJECT': variables.get('PYPI_PROJECT') or '',
|
||||||
|
'SOURCE_PYPI_PROJECT': variables.get(f'{source_repo}_PYPI_PROJECT') or '',
|
||||||
|
'SOURCE_PYPI_SUFFIX': variables.get(f'{source_repo}_PYPI_SUFFIX') or '',
|
||||||
|
'TARGET_PYPI_PROJECT': variables.get(f'{target_repo}_PYPI_PROJECT') or '',
|
||||||
|
'TARGET_PYPI_SUFFIX': variables.get(f'{target_repo}_PYPI_SUFFIX') or '',
|
||||||
|
'SOURCE_ARCHIVE_REPO': variables.get(f'{source_repo}_ARCHIVE_REPO') or '',
|
||||||
|
'TARGET_ARCHIVE_REPO': variables.get(f'{target_repo}_ARCHIVE_REPO') or '',
|
||||||
|
'HAS_SOURCE_ARCHIVE_REPO_TOKEN': json.dumps(bool(secrets.get(f'{source_repo}_ARCHIVE_REPO_TOKEN'))),
|
||||||
|
'HAS_TARGET_ARCHIVE_REPO_TOKEN': json.dumps(bool(secrets.get(f'{target_repo}_ARCHIVE_REPO_TOKEN'))),
|
||||||
|
'HAS_ARCHIVE_REPO_TOKEN': json.dumps(bool(secrets.get('ARCHIVE_REPO_TOKEN'))),
|
||||||
|
}
|
||||||
|
|
||||||
|
result = setup_variables(env)
|
||||||
|
if not expected:
|
||||||
|
print(' {\n' + '\n'.join(f' {k!r}: {v!r},' for k, v in result.items()) + '\n }')
|
||||||
|
return
|
||||||
|
|
||||||
|
exp = expected.copy()
|
||||||
|
if ignore_revision:
|
||||||
|
assert len(result['version']) == len(exp['version']), f'revision missing: {github_repository} {note}'
|
||||||
|
version_is_tag = result['version'] == result['target_tag']
|
||||||
|
for dct in (result, exp):
|
||||||
|
dct['version'] = '.'.join(dct['version'].split('.')[:3])
|
||||||
|
if version_is_tag:
|
||||||
|
dct['target_tag'] = dct['version']
|
||||||
|
assert result == exp, f'unexpected result: {github_repository} {note}'
|
||||||
|
|
||||||
|
|
||||||
|
def test_setup_variables():
|
||||||
|
DEFAULT_VERSION_WITH_REVISION = dt.datetime.now(tz=dt.timezone.utc).strftime('%Y.%m.%d.%H%M%S')
|
||||||
|
DEFAULT_VERSION = calculate_version()
|
||||||
|
BASE_REPO_VARS = {
|
||||||
|
'MASTER_ARCHIVE_REPO': 'yt-dlp/yt-dlp-master-builds',
|
||||||
|
'NIGHTLY_ARCHIVE_REPO': 'yt-dlp/yt-dlp-nightly-builds',
|
||||||
|
'NIGHTLY_PYPI_PROJECT': 'yt-dlp',
|
||||||
|
'NIGHTLY_PYPI_SUFFIX': 'dev',
|
||||||
|
'PUSH_VERSION_COMMIT': '1',
|
||||||
|
'PYPI_PROJECT': 'yt-dlp',
|
||||||
|
}
|
||||||
|
BASE_REPO_SECRETS = {
|
||||||
|
'ARCHIVE_REPO_TOKEN': '1',
|
||||||
|
}
|
||||||
|
FORK_REPOSITORY = 'fork/yt-dlp'
|
||||||
|
FORK_ORG = FORK_REPOSITORY.partition('/')[0]
|
||||||
|
|
||||||
|
_test(
|
||||||
|
STABLE_REPOSITORY, 'official vars/secrets, stable',
|
||||||
|
BASE_REPO_VARS, BASE_REPO_SECRETS, {}, {
|
||||||
|
'channel': 'stable',
|
||||||
|
'version': DEFAULT_VERSION,
|
||||||
|
'target_repo': STABLE_REPOSITORY,
|
||||||
|
'target_repo_token': None,
|
||||||
|
'target_tag': DEFAULT_VERSION,
|
||||||
|
'pypi_project': 'yt-dlp',
|
||||||
|
'pypi_suffix': None,
|
||||||
|
})
|
||||||
|
_test(
|
||||||
|
STABLE_REPOSITORY, 'official vars/secrets, nightly (w/o target)',
|
||||||
|
BASE_REPO_VARS, BASE_REPO_SECRETS, {
|
||||||
|
'source': 'nightly',
|
||||||
|
'prerelease': True,
|
||||||
|
}, {
|
||||||
|
'channel': 'nightly',
|
||||||
|
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||||
|
'target_repo': 'yt-dlp/yt-dlp-nightly-builds',
|
||||||
|
'target_repo_token': 'ARCHIVE_REPO_TOKEN',
|
||||||
|
'target_tag': DEFAULT_VERSION_WITH_REVISION,
|
||||||
|
'pypi_project': 'yt-dlp',
|
||||||
|
'pypi_suffix': 'dev',
|
||||||
|
}, ignore_revision=True)
|
||||||
|
_test(
|
||||||
|
STABLE_REPOSITORY, 'official vars/secrets, nightly',
|
||||||
|
BASE_REPO_VARS, BASE_REPO_SECRETS, {
|
||||||
|
'source': 'nightly',
|
||||||
|
'target': 'nightly',
|
||||||
|
'prerelease': True,
|
||||||
|
}, {
|
||||||
|
'channel': 'nightly',
|
||||||
|
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||||
|
'target_repo': 'yt-dlp/yt-dlp-nightly-builds',
|
||||||
|
'target_repo_token': 'ARCHIVE_REPO_TOKEN',
|
||||||
|
'target_tag': DEFAULT_VERSION_WITH_REVISION,
|
||||||
|
'pypi_project': 'yt-dlp',
|
||||||
|
'pypi_suffix': 'dev',
|
||||||
|
}, ignore_revision=True)
|
||||||
|
_test(
|
||||||
|
STABLE_REPOSITORY, 'official vars/secrets, master (w/o target)',
|
||||||
|
BASE_REPO_VARS, BASE_REPO_SECRETS, {
|
||||||
|
'source': 'master',
|
||||||
|
'prerelease': True,
|
||||||
|
}, {
|
||||||
|
'channel': 'master',
|
||||||
|
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||||
|
'target_repo': 'yt-dlp/yt-dlp-master-builds',
|
||||||
|
'target_repo_token': 'ARCHIVE_REPO_TOKEN',
|
||||||
|
'target_tag': DEFAULT_VERSION_WITH_REVISION,
|
||||||
|
'pypi_project': None,
|
||||||
|
'pypi_suffix': None,
|
||||||
|
}, ignore_revision=True)
|
||||||
|
_test(
|
||||||
|
STABLE_REPOSITORY, 'official vars/secrets, master',
|
||||||
|
BASE_REPO_VARS, BASE_REPO_SECRETS, {
|
||||||
|
'source': 'master',
|
||||||
|
'target': 'master',
|
||||||
|
'prerelease': True,
|
||||||
|
}, {
|
||||||
|
'channel': 'master',
|
||||||
|
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||||
|
'target_repo': 'yt-dlp/yt-dlp-master-builds',
|
||||||
|
'target_repo_token': 'ARCHIVE_REPO_TOKEN',
|
||||||
|
'target_tag': DEFAULT_VERSION_WITH_REVISION,
|
||||||
|
'pypi_project': None,
|
||||||
|
'pypi_suffix': None,
|
||||||
|
}, ignore_revision=True)
|
||||||
|
_test(
|
||||||
|
STABLE_REPOSITORY, 'official vars/secrets, special tag, updates to stable',
|
||||||
|
BASE_REPO_VARS, BASE_REPO_SECRETS, {
|
||||||
|
'target': f'{STABLE_REPOSITORY}@experimental',
|
||||||
|
'prerelease': True,
|
||||||
|
}, {
|
||||||
|
'channel': 'stable',
|
||||||
|
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||||
|
'target_repo': STABLE_REPOSITORY,
|
||||||
|
'target_repo_token': None,
|
||||||
|
'target_tag': 'experimental',
|
||||||
|
'pypi_project': None,
|
||||||
|
'pypi_suffix': None,
|
||||||
|
}, ignore_revision=True)
|
||||||
|
_test(
|
||||||
|
STABLE_REPOSITORY, 'official vars/secrets, special tag, "stable" as target repo',
|
||||||
|
BASE_REPO_VARS, BASE_REPO_SECRETS, {
|
||||||
|
'target': 'stable@experimental',
|
||||||
|
'prerelease': True,
|
||||||
|
}, {
|
||||||
|
'channel': 'stable',
|
||||||
|
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||||
|
'target_repo': STABLE_REPOSITORY,
|
||||||
|
'target_repo_token': None,
|
||||||
|
'target_tag': 'experimental',
|
||||||
|
'pypi_project': None,
|
||||||
|
'pypi_suffix': None,
|
||||||
|
}, ignore_revision=True)
|
||||||
|
|
||||||
|
_test(
|
||||||
|
FORK_REPOSITORY, 'fork w/o vars/secrets, stable',
|
||||||
|
{}, {}, {}, {
|
||||||
|
'channel': FORK_REPOSITORY,
|
||||||
|
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||||
|
'target_repo': FORK_REPOSITORY,
|
||||||
|
'target_repo_token': None,
|
||||||
|
'target_tag': DEFAULT_VERSION_WITH_REVISION,
|
||||||
|
'pypi_project': None,
|
||||||
|
'pypi_suffix': None,
|
||||||
|
}, ignore_revision=True)
|
||||||
|
_test(
|
||||||
|
FORK_REPOSITORY, 'fork w/o vars/secrets, prerelease',
|
||||||
|
{}, {}, {'prerelease': True}, {
|
||||||
|
'channel': FORK_REPOSITORY,
|
||||||
|
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||||
|
'target_repo': FORK_REPOSITORY,
|
||||||
|
'target_repo_token': None,
|
||||||
|
'target_tag': DEFAULT_VERSION_WITH_REVISION,
|
||||||
|
'pypi_project': None,
|
||||||
|
'pypi_suffix': None,
|
||||||
|
}, ignore_revision=True)
|
||||||
|
_test(
|
||||||
|
FORK_REPOSITORY, 'fork w/o vars/secrets, nightly',
|
||||||
|
{}, {}, {
|
||||||
|
'prerelease': True,
|
||||||
|
'source': 'nightly',
|
||||||
|
'target': 'nightly',
|
||||||
|
}, {
|
||||||
|
'channel': f'{FORK_REPOSITORY}@nightly',
|
||||||
|
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||||
|
'target_repo': FORK_REPOSITORY,
|
||||||
|
'target_repo_token': None,
|
||||||
|
'target_tag': 'nightly',
|
||||||
|
'pypi_project': None,
|
||||||
|
'pypi_suffix': None,
|
||||||
|
}, ignore_revision=True)
|
||||||
|
_test(
|
||||||
|
FORK_REPOSITORY, 'fork w/o vars/secrets, master',
|
||||||
|
{}, {}, {
|
||||||
|
'prerelease': True,
|
||||||
|
'source': 'master',
|
||||||
|
'target': 'master',
|
||||||
|
}, {
|
||||||
|
'channel': f'{FORK_REPOSITORY}@master',
|
||||||
|
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||||
|
'target_repo': FORK_REPOSITORY,
|
||||||
|
'target_repo_token': None,
|
||||||
|
'target_tag': 'master',
|
||||||
|
'pypi_project': None,
|
||||||
|
'pypi_suffix': None,
|
||||||
|
}, ignore_revision=True)
|
||||||
|
_test(
|
||||||
|
FORK_REPOSITORY, 'fork w/o vars/secrets, revision',
|
||||||
|
{}, {}, {'version': '123'}, {
|
||||||
|
'channel': FORK_REPOSITORY,
|
||||||
|
'version': f'{DEFAULT_VERSION[:10]}.123',
|
||||||
|
'target_repo': FORK_REPOSITORY,
|
||||||
|
'target_repo_token': None,
|
||||||
|
'target_tag': f'{DEFAULT_VERSION[:10]}.123',
|
||||||
|
'pypi_project': None,
|
||||||
|
'pypi_suffix': None,
|
||||||
|
})
|
||||||
|
|
||||||
|
_test(
|
||||||
|
FORK_REPOSITORY, 'fork w/ PUSH_VERSION_COMMIT, stable',
|
||||||
|
{'PUSH_VERSION_COMMIT': '1'}, {}, {}, {
|
||||||
|
'channel': FORK_REPOSITORY,
|
||||||
|
'version': DEFAULT_VERSION,
|
||||||
|
'target_repo': FORK_REPOSITORY,
|
||||||
|
'target_repo_token': None,
|
||||||
|
'target_tag': DEFAULT_VERSION,
|
||||||
|
'pypi_project': None,
|
||||||
|
'pypi_suffix': None,
|
||||||
|
})
|
||||||
|
_test(
|
||||||
|
FORK_REPOSITORY, 'fork w/ PUSH_VERSION_COMMIT, prerelease',
|
||||||
|
{'PUSH_VERSION_COMMIT': '1'}, {}, {'prerelease': True}, {
|
||||||
|
'channel': FORK_REPOSITORY,
|
||||||
|
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||||
|
'target_repo': FORK_REPOSITORY,
|
||||||
|
'target_repo_token': None,
|
||||||
|
'target_tag': DEFAULT_VERSION_WITH_REVISION,
|
||||||
|
'pypi_project': None,
|
||||||
|
'pypi_suffix': None,
|
||||||
|
}, ignore_revision=True)
|
||||||
|
|
||||||
|
_test(
|
||||||
|
FORK_REPOSITORY, 'fork w/NIGHTLY_ARCHIVE_REPO_TOKEN, nightly', {
|
||||||
|
'NIGHTLY_ARCHIVE_REPO': f'{FORK_ORG}/yt-dlp-nightly-builds',
|
||||||
|
'PYPI_PROJECT': 'yt-dlp-test',
|
||||||
|
}, {
|
||||||
|
'NIGHTLY_ARCHIVE_REPO_TOKEN': '1',
|
||||||
|
}, {
|
||||||
|
'source': f'{FORK_ORG}/yt-dlp-nightly-builds',
|
||||||
|
'target': 'nightly',
|
||||||
|
'prerelease': True,
|
||||||
|
}, {
|
||||||
|
'channel': f'{FORK_ORG}/yt-dlp-nightly-builds',
|
||||||
|
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||||
|
'target_repo': f'{FORK_ORG}/yt-dlp-nightly-builds',
|
||||||
|
'target_repo_token': 'NIGHTLY_ARCHIVE_REPO_TOKEN',
|
||||||
|
'target_tag': DEFAULT_VERSION_WITH_REVISION,
|
||||||
|
'pypi_project': None,
|
||||||
|
'pypi_suffix': None,
|
||||||
|
}, ignore_revision=True)
|
||||||
|
_test(
|
||||||
|
FORK_REPOSITORY, 'fork w/MASTER_ARCHIVE_REPO_TOKEN, master', {
|
||||||
|
'MASTER_ARCHIVE_REPO': f'{FORK_ORG}/yt-dlp-master-builds',
|
||||||
|
'MASTER_PYPI_PROJECT': 'yt-dlp-test',
|
||||||
|
'MASTER_PYPI_SUFFIX': 'dev',
|
||||||
|
}, {
|
||||||
|
'MASTER_ARCHIVE_REPO_TOKEN': '1',
|
||||||
|
}, {
|
||||||
|
'source': f'{FORK_ORG}/yt-dlp-master-builds',
|
||||||
|
'target': 'master',
|
||||||
|
'prerelease': True,
|
||||||
|
}, {
|
||||||
|
'channel': f'{FORK_ORG}/yt-dlp-master-builds',
|
||||||
|
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||||
|
'target_repo': f'{FORK_ORG}/yt-dlp-master-builds',
|
||||||
|
'target_repo_token': 'MASTER_ARCHIVE_REPO_TOKEN',
|
||||||
|
'target_tag': DEFAULT_VERSION_WITH_REVISION,
|
||||||
|
'pypi_project': 'yt-dlp-test',
|
||||||
|
'pypi_suffix': 'dev',
|
||||||
|
}, ignore_revision=True)
|
||||||
|
|
||||||
|
_test(
|
||||||
|
FORK_REPOSITORY, 'fork, non-numeric tag',
|
||||||
|
{}, {}, {'source': 'experimental'}, {
|
||||||
|
'channel': f'{FORK_REPOSITORY}@experimental',
|
||||||
|
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||||
|
'target_repo': FORK_REPOSITORY,
|
||||||
|
'target_repo_token': None,
|
||||||
|
'target_tag': 'experimental',
|
||||||
|
'pypi_project': None,
|
||||||
|
'pypi_suffix': None,
|
||||||
|
}, ignore_revision=True)
|
||||||
|
_test(
|
||||||
|
FORK_REPOSITORY, 'fork, non-numeric tag, updates to stable',
|
||||||
|
{}, {}, {
|
||||||
|
'prerelease': True,
|
||||||
|
'source': 'stable',
|
||||||
|
'target': 'experimental',
|
||||||
|
}, {
|
||||||
|
'channel': 'stable',
|
||||||
|
'version': DEFAULT_VERSION_WITH_REVISION,
|
||||||
|
'target_repo': FORK_REPOSITORY,
|
||||||
|
'target_repo_token': None,
|
||||||
|
'target_tag': 'experimental',
|
||||||
|
'pypi_project': None,
|
||||||
|
'pypi_suffix': None,
|
||||||
|
}, ignore_revision=True)
|
||||||
@@ -9,24 +9,9 @@ sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
|||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import contextlib
|
import contextlib
|
||||||
import datetime as dt
|
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
from devscripts.utils import read_version, run_process, write_file
|
from devscripts.utils import calculate_version, run_process, write_file
|
||||||
|
|
||||||
|
|
||||||
def get_new_version(version, revision):
|
|
||||||
if not version:
|
|
||||||
version = dt.datetime.now(dt.timezone.utc).strftime('%Y.%m.%d')
|
|
||||||
|
|
||||||
if revision:
|
|
||||||
assert revision.isdecimal(), 'Revision must be a number'
|
|
||||||
else:
|
|
||||||
old_version = read_version().split('.')
|
|
||||||
if version.split('.') == old_version[:3]:
|
|
||||||
revision = str(int(([*old_version, 0])[3]) + 1)
|
|
||||||
|
|
||||||
return f'{version}.{revision}' if revision else version
|
|
||||||
|
|
||||||
|
|
||||||
def get_git_head():
|
def get_git_head():
|
||||||
@@ -72,9 +57,7 @@ if __name__ == '__main__':
|
|||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
git_head = get_git_head()
|
git_head = get_git_head()
|
||||||
version = (
|
version = calculate_version(args.version)
|
||||||
args.version if args.version and '.' in args.version
|
|
||||||
else get_new_version(None, args.version))
|
|
||||||
write_file(args.output, VERSION_TEMPLATE.format(
|
write_file(args.output, VERSION_TEMPLATE.format(
|
||||||
version=version, git_head=git_head, channel=args.channel, origin=args.origin,
|
version=version, git_head=git_head, channel=args.channel, origin=args.origin,
|
||||||
package_version=f'{version}{args.suffix}'))
|
package_version=f'{version}{args.suffix}'))
|
||||||
|
|||||||
@@ -20,7 +20,9 @@ if __name__ == '__main__':
|
|||||||
'--changelog-path', type=Path, default=Path(__file__).parent.parent / 'Changelog.md',
|
'--changelog-path', type=Path, default=Path(__file__).parent.parent / 'Changelog.md',
|
||||||
help='path to the Changelog file')
|
help='path to the Changelog file')
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
new_entry = create_changelog(args)
|
|
||||||
|
|
||||||
header, sep, changelog = read_file(args.changelog_path).partition('\n### ')
|
header, sep, changelog = read_file(args.changelog_path).partition('\n### ')
|
||||||
write_file(args.changelog_path, f'{header}{sep}{read_version()}\n{new_entry}\n{sep}{changelog}')
|
current_version = read_version()
|
||||||
|
if current_version != changelog.splitlines()[0]:
|
||||||
|
new_entry = create_changelog(args)
|
||||||
|
write_file(args.changelog_path, f'{header}{sep}{current_version}\n{new_entry}\n{sep}{changelog}')
|
||||||
|
|||||||
@@ -1,5 +1,7 @@
|
|||||||
import argparse
|
import argparse
|
||||||
|
import datetime as dt
|
||||||
import functools
|
import functools
|
||||||
|
import re
|
||||||
import subprocess
|
import subprocess
|
||||||
|
|
||||||
|
|
||||||
@@ -20,6 +22,23 @@ def read_version(fname='yt_dlp/version.py', varname='__version__'):
|
|||||||
return items[varname]
|
return items[varname]
|
||||||
|
|
||||||
|
|
||||||
|
def calculate_version(version=None, fname='yt_dlp/version.py'):
|
||||||
|
if version and '.' in version:
|
||||||
|
return version
|
||||||
|
|
||||||
|
revision = version
|
||||||
|
version = dt.datetime.now(dt.timezone.utc).strftime('%Y.%m.%d')
|
||||||
|
|
||||||
|
if revision:
|
||||||
|
assert re.fullmatch(r'[0-9]+', revision), 'Revision must be numeric'
|
||||||
|
else:
|
||||||
|
old_version = read_version(fname=fname).split('.')
|
||||||
|
if version.split('.') == old_version[:3]:
|
||||||
|
revision = str(int(([*old_version, 0])[3]) + 1)
|
||||||
|
|
||||||
|
return f'{version}.{revision}' if revision else version
|
||||||
|
|
||||||
|
|
||||||
def get_filename_args(has_infile=False, default_outfile=None):
|
def get_filename_args(has_infile=False, default_outfile=None):
|
||||||
parser = argparse.ArgumentParser()
|
parser = argparse.ArgumentParser()
|
||||||
if has_infile:
|
if has_infile:
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
[build-system]
|
[build-system]
|
||||||
requires = ["hatchling"]
|
requires = ["hatchling>=1.27.0"]
|
||||||
build-backend = "hatchling.build"
|
build-backend = "hatchling.build"
|
||||||
|
|
||||||
[project]
|
[project]
|
||||||
@@ -22,7 +22,8 @@ keywords = [
|
|||||||
"sponsorblock",
|
"sponsorblock",
|
||||||
"yt-dlp",
|
"yt-dlp",
|
||||||
]
|
]
|
||||||
license = {file = "LICENSE"}
|
license = "Unlicense"
|
||||||
|
license-files = ["LICENSE"]
|
||||||
classifiers = [
|
classifiers = [
|
||||||
"Topic :: Multimedia :: Video",
|
"Topic :: Multimedia :: Video",
|
||||||
"Development Status :: 5 - Production/Stable",
|
"Development Status :: 5 - Production/Stable",
|
||||||
@@ -34,10 +35,10 @@ classifiers = [
|
|||||||
"Programming Language :: Python :: 3.11",
|
"Programming Language :: Python :: 3.11",
|
||||||
"Programming Language :: Python :: 3.12",
|
"Programming Language :: Python :: 3.12",
|
||||||
"Programming Language :: Python :: 3.13",
|
"Programming Language :: Python :: 3.13",
|
||||||
|
"Programming Language :: Python :: 3.14",
|
||||||
"Programming Language :: Python :: Implementation",
|
"Programming Language :: Python :: Implementation",
|
||||||
"Programming Language :: Python :: Implementation :: CPython",
|
"Programming Language :: Python :: Implementation :: CPython",
|
||||||
"Programming Language :: Python :: Implementation :: PyPy",
|
"Programming Language :: Python :: Implementation :: PyPy",
|
||||||
"License :: OSI Approved :: The Unlicense (Unlicense)",
|
|
||||||
"Operating System :: OS Independent",
|
"Operating System :: OS Independent",
|
||||||
]
|
]
|
||||||
dynamic = ["version"]
|
dynamic = ["version"]
|
||||||
@@ -63,7 +64,7 @@ secretstorage = [
|
|||||||
]
|
]
|
||||||
build = [
|
build = [
|
||||||
"build",
|
"build",
|
||||||
"hatchling",
|
"hatchling>=1.27.0",
|
||||||
"pip",
|
"pip",
|
||||||
"setuptools>=71.0.2,<81", # See https://github.com/pyinstaller/pyinstaller/issues/9149
|
"setuptools>=71.0.2,<81", # See https://github.com/pyinstaller/pyinstaller/issues/9149
|
||||||
"wheel",
|
"wheel",
|
||||||
@@ -75,7 +76,7 @@ dev = [
|
|||||||
]
|
]
|
||||||
static-analysis = [
|
static-analysis = [
|
||||||
"autopep8~=2.0",
|
"autopep8~=2.0",
|
||||||
"ruff~=0.12.0",
|
"ruff~=0.13.0",
|
||||||
]
|
]
|
||||||
test = [
|
test = [
|
||||||
"pytest~=8.1",
|
"pytest~=8.1",
|
||||||
@@ -107,7 +108,6 @@ include = [
|
|||||||
"/LICENSE", # included as license
|
"/LICENSE", # included as license
|
||||||
"/pyproject.toml", # included by default
|
"/pyproject.toml", # included by default
|
||||||
"/README.md", # included as readme
|
"/README.md", # included as readme
|
||||||
"/setup.cfg",
|
|
||||||
"/supportedsites.md",
|
"/supportedsites.md",
|
||||||
]
|
]
|
||||||
artifacts = [
|
artifacts = [
|
||||||
@@ -173,7 +173,8 @@ python = [
|
|||||||
"3.11",
|
"3.11",
|
||||||
"3.12",
|
"3.12",
|
||||||
"3.13",
|
"3.13",
|
||||||
"pypy3.10",
|
"3.14",
|
||||||
|
"pypy3.11",
|
||||||
]
|
]
|
||||||
|
|
||||||
[tool.ruff]
|
[tool.ruff]
|
||||||
|
|||||||
39
setup.cfg
39
setup.cfg
@@ -1,39 +0,0 @@
|
|||||||
[flake8]
|
|
||||||
exclude = build,venv,.tox,.git,.pytest_cache
|
|
||||||
ignore = E402,E501,E731,E741,W503
|
|
||||||
max_line_length = 120
|
|
||||||
per_file_ignores =
|
|
||||||
devscripts/lazy_load_template.py: F401
|
|
||||||
|
|
||||||
|
|
||||||
[autoflake]
|
|
||||||
ignore-init-module-imports = true
|
|
||||||
ignore-pass-after-docstring = true
|
|
||||||
remove-all-unused-imports = true
|
|
||||||
remove-duplicate-keys = true
|
|
||||||
remove-unused-variables = true
|
|
||||||
|
|
||||||
|
|
||||||
[tox:tox]
|
|
||||||
skipsdist = true
|
|
||||||
envlist = py{39,310,311,312,313},pypy311
|
|
||||||
skip_missing_interpreters = true
|
|
||||||
|
|
||||||
[testenv] # tox
|
|
||||||
deps =
|
|
||||||
pytest
|
|
||||||
commands = pytest {posargs:"-m not download"}
|
|
||||||
passenv = HOME # For test_compat_expanduser
|
|
||||||
setenv =
|
|
||||||
# PYTHONWARNINGS = error # Catches PIP's warnings too
|
|
||||||
|
|
||||||
|
|
||||||
[isort]
|
|
||||||
py_version = 39
|
|
||||||
multi_line_output = VERTICAL_HANGING_INDENT
|
|
||||||
line_length = 80
|
|
||||||
reverse_relative = true
|
|
||||||
ensure_newline_before_comments = true
|
|
||||||
include_trailing_comma = true
|
|
||||||
known_first_party =
|
|
||||||
test
|
|
||||||
@@ -20,7 +20,6 @@ The only reliable way to check if a site is supported is to try it.
|
|||||||
- **3sat**
|
- **3sat**
|
||||||
- **4tube**
|
- **4tube**
|
||||||
- **56.com**
|
- **56.com**
|
||||||
- **6play**
|
|
||||||
- **7plus**
|
- **7plus**
|
||||||
- **8tracks**
|
- **8tracks**
|
||||||
- **9c9media**
|
- **9c9media**
|
||||||
@@ -299,7 +298,6 @@ The only reliable way to check if a site is supported is to try it.
|
|||||||
- **cpac**
|
- **cpac**
|
||||||
- **cpac:playlist**
|
- **cpac:playlist**
|
||||||
- **Cracked**
|
- **Cracked**
|
||||||
- **Crackle**
|
|
||||||
- **Craftsy**
|
- **Craftsy**
|
||||||
- **CrooksAndLiars**
|
- **CrooksAndLiars**
|
||||||
- **CrowdBunker**
|
- **CrowdBunker**
|
||||||
@@ -314,8 +312,6 @@ The only reliable way to check if a site is supported is to try it.
|
|||||||
- **curiositystream**: [*curiositystream*](## "netrc machine")
|
- **curiositystream**: [*curiositystream*](## "netrc machine")
|
||||||
- **curiositystream:collections**: [*curiositystream*](## "netrc machine")
|
- **curiositystream:collections**: [*curiositystream*](## "netrc machine")
|
||||||
- **curiositystream:series**: [*curiositystream*](## "netrc machine")
|
- **curiositystream:series**: [*curiositystream*](## "netrc machine")
|
||||||
- **cwtv**
|
|
||||||
- **cwtv:movie**
|
|
||||||
- **Cybrary**: [*cybrary*](## "netrc machine")
|
- **Cybrary**: [*cybrary*](## "netrc machine")
|
||||||
- **CybraryCourse**: [*cybrary*](## "netrc machine")
|
- **CybraryCourse**: [*cybrary*](## "netrc machine")
|
||||||
- **DacastPlaylist**
|
- **DacastPlaylist**
|
||||||
@@ -450,7 +446,6 @@ The only reliable way to check if a site is supported is to try it.
|
|||||||
- **Filmweb**
|
- **Filmweb**
|
||||||
- **FiveThirtyEight**
|
- **FiveThirtyEight**
|
||||||
- **FiveTV**
|
- **FiveTV**
|
||||||
- **FlexTV**
|
|
||||||
- **Flickr**
|
- **Flickr**
|
||||||
- **Floatplane**
|
- **Floatplane**
|
||||||
- **FloatplaneChannel**
|
- **FloatplaneChannel**
|
||||||
@@ -798,7 +793,6 @@ The only reliable way to check if a site is supported is to try it.
|
|||||||
- **mirrativ**
|
- **mirrativ**
|
||||||
- **mirrativ:user**
|
- **mirrativ:user**
|
||||||
- **MirrorCoUK**
|
- **MirrorCoUK**
|
||||||
- **MiTele**: mitele.es
|
|
||||||
- **mixch**
|
- **mixch**
|
||||||
- **mixch:archive**
|
- **mixch:archive**
|
||||||
- **mixch:movie**
|
- **mixch:movie**
|
||||||
@@ -1009,6 +1003,7 @@ The only reliable way to check if a site is supported is to try it.
|
|||||||
- **onet.tv:channel**
|
- **onet.tv:channel**
|
||||||
- **OnetMVP**
|
- **OnetMVP**
|
||||||
- **OnionStudios**
|
- **OnionStudios**
|
||||||
|
- **onsen**: [*onsen*](## "netrc machine") インターネットラジオステーション<音泉>
|
||||||
- **Opencast**
|
- **Opencast**
|
||||||
- **OpencastPlaylist**
|
- **OpencastPlaylist**
|
||||||
- **openrec**
|
- **openrec**
|
||||||
@@ -1033,8 +1028,6 @@ The only reliable way to check if a site is supported is to try it.
|
|||||||
- **Panopto**
|
- **Panopto**
|
||||||
- **PanoptoList**
|
- **PanoptoList**
|
||||||
- **PanoptoPlaylist**
|
- **PanoptoPlaylist**
|
||||||
- **ParamountPlus**
|
|
||||||
- **ParamountPlusSeries**
|
|
||||||
- **ParamountPressExpress**
|
- **ParamountPressExpress**
|
||||||
- **Parler**: Posts on parler.com
|
- **Parler**: Posts on parler.com
|
||||||
- **parliamentlive.tv**: UK parliament videos
|
- **parliamentlive.tv**: UK parliament videos
|
||||||
@@ -1069,8 +1062,6 @@ The only reliable way to check if a site is supported is to try it.
|
|||||||
- **PinterestCollection**
|
- **PinterestCollection**
|
||||||
- **PiramideTV**
|
- **PiramideTV**
|
||||||
- **PiramideTVChannel**
|
- **PiramideTVChannel**
|
||||||
- **pixiv:sketch**
|
|
||||||
- **pixiv:sketch:user**
|
|
||||||
- **PlanetMarathi**
|
- **PlanetMarathi**
|
||||||
- **Platzi**: [*platzi*](## "netrc machine")
|
- **Platzi**: [*platzi*](## "netrc machine")
|
||||||
- **PlatziCourse**: [*platzi*](## "netrc machine")
|
- **PlatziCourse**: [*platzi*](## "netrc machine")
|
||||||
@@ -1257,7 +1248,6 @@ The only reliable way to check if a site is supported is to try it.
|
|||||||
- **rutube:person**: Rutube person videos
|
- **rutube:person**: Rutube person videos
|
||||||
- **rutube:playlist**: Rutube playlists
|
- **rutube:playlist**: Rutube playlists
|
||||||
- **rutube:tags**: Rutube tags
|
- **rutube:tags**: Rutube tags
|
||||||
- **RUTV**: RUTV.RU
|
|
||||||
- **Ruutu**: (**Currently broken**)
|
- **Ruutu**: (**Currently broken**)
|
||||||
- **Ruv**
|
- **Ruv**
|
||||||
- **ruv.is:spila**
|
- **ruv.is:spila**
|
||||||
@@ -1332,7 +1322,10 @@ The only reliable way to check if a site is supported is to try it.
|
|||||||
- **Slideshare**
|
- **Slideshare**
|
||||||
- **SlidesLive**
|
- **SlidesLive**
|
||||||
- **Slutload**
|
- **Slutload**
|
||||||
- **Smotrim**
|
- **smotrim**
|
||||||
|
- **smotrim:audio**
|
||||||
|
- **smotrim:live**
|
||||||
|
- **smotrim:playlist**
|
||||||
- **SnapchatSpotlight**
|
- **SnapchatSpotlight**
|
||||||
- **Snotr**
|
- **Snotr**
|
||||||
- **SoftWhiteUnderbelly**: [*softwhiteunderbelly*](## "netrc machine")
|
- **SoftWhiteUnderbelly**: [*softwhiteunderbelly*](## "netrc machine")
|
||||||
@@ -1370,8 +1363,6 @@ The only reliable way to check if a site is supported is to try it.
|
|||||||
- **Sport5**
|
- **Sport5**
|
||||||
- **SportBox**: (**Currently broken**)
|
- **SportBox**: (**Currently broken**)
|
||||||
- **SportDeutschland**
|
- **SportDeutschland**
|
||||||
- **spotify**: Spotify episodes (**Currently broken**)
|
|
||||||
- **spotify:show**: Spotify shows (**Currently broken**)
|
|
||||||
- **Spreaker**
|
- **Spreaker**
|
||||||
- **SpreakerShow**
|
- **SpreakerShow**
|
||||||
- **SpringboardPlatform**
|
- **SpringboardPlatform**
|
||||||
@@ -1510,15 +1501,17 @@ The only reliable way to check if a site is supported is to try it.
|
|||||||
- **TrueID**
|
- **TrueID**
|
||||||
- **TruNews**
|
- **TruNews**
|
||||||
- **Truth**
|
- **Truth**
|
||||||
|
- **ttinglive**: 띵라이브 (formerly FlexTV)
|
||||||
- **Tube8**: (**Currently broken**)
|
- **Tube8**: (**Currently broken**)
|
||||||
- **TubeTuGraz**: [*tubetugraz*](## "netrc machine") tube.tugraz.at
|
- **TubeTuGraz**: [*tubetugraz*](## "netrc machine") tube.tugraz.at
|
||||||
- **TubeTuGrazSeries**: [*tubetugraz*](## "netrc machine")
|
- **TubeTuGrazSeries**: [*tubetugraz*](## "netrc machine")
|
||||||
- **tubitv**: [*tubitv*](## "netrc machine")
|
- **tubitv**: [*tubitv*](## "netrc machine")
|
||||||
- **tubitv:series**
|
- **tubitv:series**
|
||||||
- **Tumblr**: [*tumblr*](## "netrc machine")
|
- **Tumblr**: [*tumblr*](## "netrc machine")
|
||||||
- **TuneInPodcast**
|
- **tunein:embed**
|
||||||
- **TuneInPodcastEpisode**
|
- **tunein:podcast**
|
||||||
- **TuneInStation**
|
- **tunein:podcast:program**
|
||||||
|
- **tunein:station**
|
||||||
- **tv.dfb.de**
|
- **tv.dfb.de**
|
||||||
- **TV2**
|
- **TV2**
|
||||||
- **TV2Article**
|
- **TV2Article**
|
||||||
@@ -1600,7 +1593,8 @@ The only reliable way to check if a site is supported is to try it.
|
|||||||
- **Varzesh3**: (**Currently broken**)
|
- **Varzesh3**: (**Currently broken**)
|
||||||
- **Vbox7**
|
- **Vbox7**
|
||||||
- **Veo**
|
- **Veo**
|
||||||
- **Vesti**: Вести.Ru (**Currently broken**)
|
- **Vevo**
|
||||||
|
- **VevoPlaylist**
|
||||||
- **VGTV**: VGTV, BTTV, FTV, Aftenposten and Aftonbladet
|
- **VGTV**: VGTV, BTTV, FTV, Aftenposten and Aftonbladet
|
||||||
- **vh1.com**
|
- **vh1.com**
|
||||||
- **vhx:embed**: [*vimeo*](## "netrc machine")
|
- **vhx:embed**: [*vimeo*](## "netrc machine")
|
||||||
@@ -1746,7 +1740,6 @@ The only reliable way to check if a site is supported is to try it.
|
|||||||
- **wykop:dig:comment**
|
- **wykop:dig:comment**
|
||||||
- **wykop:post**
|
- **wykop:post**
|
||||||
- **wykop:post:comment**
|
- **wykop:post:comment**
|
||||||
- **Xanimu**
|
|
||||||
- **XboxClips**
|
- **XboxClips**
|
||||||
- **XHamster**
|
- **XHamster**
|
||||||
- **XHamsterEmbed**
|
- **XHamsterEmbed**
|
||||||
|
|||||||
@@ -36,7 +36,6 @@
|
|||||||
"verbose": true,
|
"verbose": true,
|
||||||
"writedescription": false,
|
"writedescription": false,
|
||||||
"writeinfojson": true,
|
"writeinfojson": true,
|
||||||
"writeannotations": false,
|
|
||||||
"writelink": false,
|
"writelink": false,
|
||||||
"writeurllink": false,
|
"writeurllink": false,
|
||||||
"writewebloclink": false,
|
"writewebloclink": false,
|
||||||
|
|||||||
@@ -1945,7 +1945,7 @@ jwplayer("mediaplayer").setup({"abouttext":"Visit Indie DB","aboutlink":"http:\/
|
|||||||
server_thread.daemon = True
|
server_thread.daemon = True
|
||||||
server_thread.start()
|
server_thread.start()
|
||||||
|
|
||||||
(content, urlh) = self.ie._download_webpage_handle(
|
content, _ = self.ie._download_webpage_handle(
|
||||||
f'http://127.0.0.1:{port}/teapot', None,
|
f'http://127.0.0.1:{port}/teapot', None,
|
||||||
expected_status=TEAPOT_RESPONSE_STATUS)
|
expected_status=TEAPOT_RESPONSE_STATUS)
|
||||||
self.assertEqual(content, TEAPOT_RESPONSE_BODY)
|
self.assertEqual(content, TEAPOT_RESPONSE_BODY)
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
# Allow direct execution
|
# Allow direct execution
|
||||||
|
import datetime as dt
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import unittest
|
import unittest
|
||||||
@@ -12,7 +13,7 @@ import struct
|
|||||||
|
|
||||||
from yt_dlp import compat
|
from yt_dlp import compat
|
||||||
from yt_dlp.compat import urllib # isort: split
|
from yt_dlp.compat import urllib # isort: split
|
||||||
from yt_dlp.compat import compat_etree_fromstring, compat_expanduser
|
from yt_dlp.compat import compat_etree_fromstring, compat_expanduser, compat_datetime_from_timestamp
|
||||||
from yt_dlp.compat.urllib.request import getproxies
|
from yt_dlp.compat.urllib.request import getproxies
|
||||||
|
|
||||||
|
|
||||||
@@ -59,6 +60,45 @@ class TestCompat(unittest.TestCase):
|
|||||||
def test_struct_unpack(self):
|
def test_struct_unpack(self):
|
||||||
self.assertEqual(struct.unpack('!B', b'\x00'), (0,))
|
self.assertEqual(struct.unpack('!B', b'\x00'), (0,))
|
||||||
|
|
||||||
|
def test_compat_datetime_from_timestamp(self):
|
||||||
|
self.assertEqual(
|
||||||
|
compat_datetime_from_timestamp(0),
|
||||||
|
dt.datetime(1970, 1, 1, 0, 0, 0, tzinfo=dt.timezone.utc))
|
||||||
|
self.assertEqual(
|
||||||
|
compat_datetime_from_timestamp(1),
|
||||||
|
dt.datetime(1970, 1, 1, 0, 0, 1, tzinfo=dt.timezone.utc))
|
||||||
|
self.assertEqual(
|
||||||
|
compat_datetime_from_timestamp(3600),
|
||||||
|
dt.datetime(1970, 1, 1, 1, 0, 0, tzinfo=dt.timezone.utc))
|
||||||
|
|
||||||
|
self.assertEqual(
|
||||||
|
compat_datetime_from_timestamp(-1),
|
||||||
|
dt.datetime(1969, 12, 31, 23, 59, 59, tzinfo=dt.timezone.utc))
|
||||||
|
self.assertEqual(
|
||||||
|
compat_datetime_from_timestamp(-86400),
|
||||||
|
dt.datetime(1969, 12, 31, 0, 0, 0, tzinfo=dt.timezone.utc))
|
||||||
|
|
||||||
|
self.assertEqual(
|
||||||
|
compat_datetime_from_timestamp(0.5),
|
||||||
|
dt.datetime(1970, 1, 1, 0, 0, 0, 500000, tzinfo=dt.timezone.utc))
|
||||||
|
self.assertEqual(
|
||||||
|
compat_datetime_from_timestamp(1.000001),
|
||||||
|
dt.datetime(1970, 1, 1, 0, 0, 1, 1, tzinfo=dt.timezone.utc))
|
||||||
|
self.assertEqual(
|
||||||
|
compat_datetime_from_timestamp(-1.25),
|
||||||
|
dt.datetime(1969, 12, 31, 23, 59, 58, 750000, tzinfo=dt.timezone.utc))
|
||||||
|
|
||||||
|
self.assertEqual(
|
||||||
|
compat_datetime_from_timestamp(-1577923200),
|
||||||
|
dt.datetime(1920, 1, 1, 0, 0, 0, tzinfo=dt.timezone.utc))
|
||||||
|
self.assertEqual(
|
||||||
|
compat_datetime_from_timestamp(4102444800),
|
||||||
|
dt.datetime(2100, 1, 1, 0, 0, 0, tzinfo=dt.timezone.utc))
|
||||||
|
|
||||||
|
self.assertEqual(
|
||||||
|
compat_datetime_from_timestamp(173568960000),
|
||||||
|
dt.datetime(7470, 3, 8, 0, 0, 0, tzinfo=dt.timezone.utc))
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
|||||||
@@ -29,7 +29,7 @@ class TestOverwrites(unittest.TestCase):
|
|||||||
'-o', 'test.webm',
|
'-o', 'test.webm',
|
||||||
'https://www.youtube.com/watch?v=jNQXAC9IVRw',
|
'https://www.youtube.com/watch?v=jNQXAC9IVRw',
|
||||||
], cwd=root_dir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
], cwd=root_dir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
sout, serr = outp.communicate()
|
sout, _ = outp.communicate()
|
||||||
self.assertTrue(b'has already been downloaded' in sout)
|
self.assertTrue(b'has already been downloaded' in sout)
|
||||||
# if the file has no content, it has not been redownloaded
|
# if the file has no content, it has not been redownloaded
|
||||||
self.assertTrue(os.path.getsize(download_file) < 1)
|
self.assertTrue(os.path.getsize(download_file) < 1)
|
||||||
@@ -41,7 +41,7 @@ class TestOverwrites(unittest.TestCase):
|
|||||||
'-o', 'test.webm',
|
'-o', 'test.webm',
|
||||||
'https://www.youtube.com/watch?v=jNQXAC9IVRw',
|
'https://www.youtube.com/watch?v=jNQXAC9IVRw',
|
||||||
], cwd=root_dir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
], cwd=root_dir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
sout, serr = outp.communicate()
|
sout, _ = outp.communicate()
|
||||||
self.assertTrue(b'has already been downloaded' not in sout)
|
self.assertTrue(b'has already been downloaded' not in sout)
|
||||||
# if the file has no content, it has not been redownloaded
|
# if the file has no content, it has not been redownloaded
|
||||||
self.assertTrue(os.path.getsize(download_file) > 1)
|
self.assertTrue(os.path.getsize(download_file) > 1)
|
||||||
|
|||||||
@@ -153,7 +153,7 @@ class TestPoTokenProvider:
|
|||||||
|
|
||||||
with pytest.raises(
|
with pytest.raises(
|
||||||
PoTokenProviderRejectedRequest,
|
PoTokenProviderRejectedRequest,
|
||||||
match='External requests by "example" provider do not support proxy scheme "socks4". Supported proxy '
|
match=r'External requests by "example" provider do not support proxy scheme "socks4"\. Supported proxy '
|
||||||
'schemes: http, socks5h',
|
'schemes: http, socks5h',
|
||||||
):
|
):
|
||||||
provider.request_pot(pot_request)
|
provider.request_pot(pot_request)
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
|||||||
|
|
||||||
|
|
||||||
from test.helper import FakeYDL, report_warning
|
from test.helper import FakeYDL, report_warning
|
||||||
from yt_dlp.update import UpdateInfo, Updater
|
from yt_dlp.update import UpdateInfo, Updater, UPDATE_SOURCES, _make_label
|
||||||
|
|
||||||
|
|
||||||
# XXX: Keep in sync with yt_dlp.update.UPDATE_SOURCES
|
# XXX: Keep in sync with yt_dlp.update.UPDATE_SOURCES
|
||||||
@@ -280,6 +280,26 @@ class TestUpdate(unittest.TestCase):
|
|||||||
test('testing', None, current_commit='9' * 40)
|
test('testing', None, current_commit='9' * 40)
|
||||||
test('testing', UpdateInfo('testing', commit='9' * 40))
|
test('testing', UpdateInfo('testing', commit='9' * 40))
|
||||||
|
|
||||||
|
def test_make_label(self):
|
||||||
|
STABLE_REPO = UPDATE_SOURCES['stable']
|
||||||
|
NIGHTLY_REPO = UPDATE_SOURCES['nightly']
|
||||||
|
MASTER_REPO = UPDATE_SOURCES['master']
|
||||||
|
|
||||||
|
for inputs, expected in [
|
||||||
|
([STABLE_REPO, '2025.09.02', '2025.09.02'], f'stable@2025.09.02 from {STABLE_REPO}'),
|
||||||
|
([NIGHTLY_REPO, '2025.09.02.123456', '2025.09.02.123456'], f'nightly@2025.09.02.123456 from {NIGHTLY_REPO}'),
|
||||||
|
([MASTER_REPO, '2025.09.02.987654', '2025.09.02.987654'], f'master@2025.09.02.987654 from {MASTER_REPO}'),
|
||||||
|
(['fork/yt-dlp', 'experimental', '2025.12.31.000000'], 'fork/yt-dlp@experimental build 2025.12.31.000000'),
|
||||||
|
(['fork/yt-dlp', '2025.09.02', '2025.09.02'], 'fork/yt-dlp@2025.09.02'),
|
||||||
|
([STABLE_REPO, 'experimental', '2025.12.31.000000'], f'{STABLE_REPO}@experimental build 2025.12.31.000000'),
|
||||||
|
([STABLE_REPO, 'experimental'], f'{STABLE_REPO}@experimental'),
|
||||||
|
(['fork/yt-dlp', 'experimental'], 'fork/yt-dlp@experimental'),
|
||||||
|
]:
|
||||||
|
result = _make_label(*inputs)
|
||||||
|
self.assertEqual(
|
||||||
|
result, expected,
|
||||||
|
f'{inputs!r} returned {result!r} instead of {expected!r}')
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
|||||||
@@ -12,6 +12,7 @@ import datetime as dt
|
|||||||
import io
|
import io
|
||||||
import itertools
|
import itertools
|
||||||
import json
|
import json
|
||||||
|
import ntpath
|
||||||
import pickle
|
import pickle
|
||||||
import subprocess
|
import subprocess
|
||||||
import unittest
|
import unittest
|
||||||
@@ -101,11 +102,13 @@ from yt_dlp.utils import (
|
|||||||
remove_start,
|
remove_start,
|
||||||
render_table,
|
render_table,
|
||||||
replace_extension,
|
replace_extension,
|
||||||
|
datetime_round,
|
||||||
rot47,
|
rot47,
|
||||||
sanitize_filename,
|
sanitize_filename,
|
||||||
sanitize_path,
|
sanitize_path,
|
||||||
sanitize_url,
|
sanitize_url,
|
||||||
shell_quote,
|
shell_quote,
|
||||||
|
strftime_or_none,
|
||||||
smuggle_url,
|
smuggle_url,
|
||||||
str_to_int,
|
str_to_int,
|
||||||
strip_jsonp,
|
strip_jsonp,
|
||||||
@@ -251,12 +254,6 @@ class TestUtil(unittest.TestCase):
|
|||||||
self.assertEqual(sanitize_path('abc.../def...'), 'abc..#\\def..#')
|
self.assertEqual(sanitize_path('abc.../def...'), 'abc..#\\def..#')
|
||||||
self.assertEqual(sanitize_path('C:\\abc:%(title)s.%(ext)s'), 'C:\\abc#%(title)s.%(ext)s')
|
self.assertEqual(sanitize_path('C:\\abc:%(title)s.%(ext)s'), 'C:\\abc#%(title)s.%(ext)s')
|
||||||
|
|
||||||
# Check with nt._path_normpath if available
|
|
||||||
try:
|
|
||||||
from nt import _path_normpath as nt_path_normpath
|
|
||||||
except ImportError:
|
|
||||||
nt_path_normpath = None
|
|
||||||
|
|
||||||
for test, expected in [
|
for test, expected in [
|
||||||
('C:\\', 'C:\\'),
|
('C:\\', 'C:\\'),
|
||||||
('../abc', '..\\abc'),
|
('../abc', '..\\abc'),
|
||||||
@@ -274,8 +271,7 @@ class TestUtil(unittest.TestCase):
|
|||||||
result = sanitize_path(test)
|
result = sanitize_path(test)
|
||||||
assert result == expected, f'{test} was incorrectly resolved'
|
assert result == expected, f'{test} was incorrectly resolved'
|
||||||
assert result == sanitize_path(result), f'{test} changed after sanitizing again'
|
assert result == sanitize_path(result), f'{test} changed after sanitizing again'
|
||||||
if nt_path_normpath:
|
assert result == ntpath.normpath(test), f'{test} does not match ntpath.normpath'
|
||||||
assert result == nt_path_normpath(test), f'{test} does not match nt._path_normpath'
|
|
||||||
|
|
||||||
def test_sanitize_url(self):
|
def test_sanitize_url(self):
|
||||||
self.assertEqual(sanitize_url('//foo.bar'), 'http://foo.bar')
|
self.assertEqual(sanitize_url('//foo.bar'), 'http://foo.bar')
|
||||||
@@ -409,6 +405,25 @@ class TestUtil(unittest.TestCase):
|
|||||||
self.assertEqual(datetime_from_str('now+1day', precision='hour'), datetime_from_str('now+24hours', precision='auto'))
|
self.assertEqual(datetime_from_str('now+1day', precision='hour'), datetime_from_str('now+24hours', precision='auto'))
|
||||||
self.assertEqual(datetime_from_str('now+23hours', precision='hour'), datetime_from_str('now+23hours', precision='auto'))
|
self.assertEqual(datetime_from_str('now+23hours', precision='hour'), datetime_from_str('now+23hours', precision='auto'))
|
||||||
|
|
||||||
|
def test_datetime_round(self):
|
||||||
|
self.assertEqual(datetime_round(dt.datetime.strptime('1820-05-12T01:23:45Z', '%Y-%m-%dT%H:%M:%SZ')),
|
||||||
|
dt.datetime(1820, 5, 12, tzinfo=dt.timezone.utc))
|
||||||
|
self.assertEqual(datetime_round(dt.datetime.strptime('1969-12-31T23:34:45Z', '%Y-%m-%dT%H:%M:%SZ'), 'hour'),
|
||||||
|
dt.datetime(1970, 1, 1, 0, tzinfo=dt.timezone.utc))
|
||||||
|
self.assertEqual(datetime_round(dt.datetime.strptime('2024-12-25T01:23:45Z', '%Y-%m-%dT%H:%M:%SZ'), 'minute'),
|
||||||
|
dt.datetime(2024, 12, 25, 1, 24, tzinfo=dt.timezone.utc))
|
||||||
|
self.assertEqual(datetime_round(dt.datetime.strptime('2024-12-25T01:23:45.123Z', '%Y-%m-%dT%H:%M:%S.%fZ'), 'second'),
|
||||||
|
dt.datetime(2024, 12, 25, 1, 23, 45, tzinfo=dt.timezone.utc))
|
||||||
|
self.assertEqual(datetime_round(dt.datetime.strptime('2024-12-25T01:23:45.678Z', '%Y-%m-%dT%H:%M:%S.%fZ'), 'second'),
|
||||||
|
dt.datetime(2024, 12, 25, 1, 23, 46, tzinfo=dt.timezone.utc))
|
||||||
|
|
||||||
|
def test_strftime_or_none(self):
|
||||||
|
self.assertEqual(strftime_or_none(-4722192000), '18200512')
|
||||||
|
self.assertEqual(strftime_or_none(0), '19700101')
|
||||||
|
self.assertEqual(strftime_or_none(1735084800), '20241225')
|
||||||
|
# Throws OverflowError
|
||||||
|
self.assertEqual(strftime_or_none(1735084800000), None)
|
||||||
|
|
||||||
def test_daterange(self):
|
def test_daterange(self):
|
||||||
_20century = DateRange('19000101', '20000101')
|
_20century = DateRange('19000101', '20000101')
|
||||||
self.assertFalse('17890714' in _20century)
|
self.assertFalse('17890714' in _20century)
|
||||||
|
|||||||
@@ -22,7 +22,7 @@ class TestVerboseOutput(unittest.TestCase):
|
|||||||
'--username', 'johnsmith@gmail.com',
|
'--username', 'johnsmith@gmail.com',
|
||||||
'--password', 'my_secret_password',
|
'--password', 'my_secret_password',
|
||||||
], cwd=rootDir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
], cwd=rootDir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
sout, serr = outp.communicate()
|
_, serr = outp.communicate()
|
||||||
self.assertTrue(b'--username' in serr)
|
self.assertTrue(b'--username' in serr)
|
||||||
self.assertTrue(b'johnsmith' not in serr)
|
self.assertTrue(b'johnsmith' not in serr)
|
||||||
self.assertTrue(b'--password' in serr)
|
self.assertTrue(b'--password' in serr)
|
||||||
@@ -36,7 +36,7 @@ class TestVerboseOutput(unittest.TestCase):
|
|||||||
'-u', 'johnsmith@gmail.com',
|
'-u', 'johnsmith@gmail.com',
|
||||||
'-p', 'my_secret_password',
|
'-p', 'my_secret_password',
|
||||||
], cwd=rootDir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
], cwd=rootDir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
sout, serr = outp.communicate()
|
_, serr = outp.communicate()
|
||||||
self.assertTrue(b'-u' in serr)
|
self.assertTrue(b'-u' in serr)
|
||||||
self.assertTrue(b'johnsmith' not in serr)
|
self.assertTrue(b'johnsmith' not in serr)
|
||||||
self.assertTrue(b'-p' in serr)
|
self.assertTrue(b'-p' in serr)
|
||||||
@@ -50,7 +50,7 @@ class TestVerboseOutput(unittest.TestCase):
|
|||||||
'--username=johnsmith@gmail.com',
|
'--username=johnsmith@gmail.com',
|
||||||
'--password=my_secret_password',
|
'--password=my_secret_password',
|
||||||
], cwd=rootDir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
], cwd=rootDir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
sout, serr = outp.communicate()
|
_, serr = outp.communicate()
|
||||||
self.assertTrue(b'--username' in serr)
|
self.assertTrue(b'--username' in serr)
|
||||||
self.assertTrue(b'johnsmith' not in serr)
|
self.assertTrue(b'johnsmith' not in serr)
|
||||||
self.assertTrue(b'--password' in serr)
|
self.assertTrue(b'--password' in serr)
|
||||||
@@ -64,7 +64,7 @@ class TestVerboseOutput(unittest.TestCase):
|
|||||||
'-u=johnsmith@gmail.com',
|
'-u=johnsmith@gmail.com',
|
||||||
'-p=my_secret_password',
|
'-p=my_secret_password',
|
||||||
], cwd=rootDir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
], cwd=rootDir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
sout, serr = outp.communicate()
|
_, serr = outp.communicate()
|
||||||
self.assertTrue(b'-u' in serr)
|
self.assertTrue(b'-u' in serr)
|
||||||
self.assertTrue(b'johnsmith' not in serr)
|
self.assertTrue(b'johnsmith' not in serr)
|
||||||
self.assertTrue(b'-p' in serr)
|
self.assertTrue(b'-p' in serr)
|
||||||
|
|||||||
@@ -20,7 +20,7 @@ import random
|
|||||||
import ssl
|
import ssl
|
||||||
import threading
|
import threading
|
||||||
|
|
||||||
from yt_dlp import socks, traverse_obj
|
from yt_dlp import socks
|
||||||
from yt_dlp.cookies import YoutubeDLCookieJar
|
from yt_dlp.cookies import YoutubeDLCookieJar
|
||||||
from yt_dlp.dependencies import websockets
|
from yt_dlp.dependencies import websockets
|
||||||
from yt_dlp.networking import Request
|
from yt_dlp.networking import Request
|
||||||
@@ -32,6 +32,7 @@ from yt_dlp.networking.exceptions import (
|
|||||||
SSLError,
|
SSLError,
|
||||||
TransportError,
|
TransportError,
|
||||||
)
|
)
|
||||||
|
from yt_dlp.utils.traversal import traverse_obj
|
||||||
from yt_dlp.utils.networking import HTTPHeaderDict
|
from yt_dlp.utils.networking import HTTPHeaderDict
|
||||||
|
|
||||||
TEST_DIR = os.path.dirname(os.path.abspath(__file__))
|
TEST_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||||
|
|||||||
@@ -1,77 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
# Allow direct execution
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
import unittest
|
|
||||||
|
|
||||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
|
||||||
|
|
||||||
|
|
||||||
import xml.etree.ElementTree
|
|
||||||
|
|
||||||
import yt_dlp.extractor
|
|
||||||
import yt_dlp.YoutubeDL
|
|
||||||
from test.helper import get_params, is_download_test, try_rm
|
|
||||||
|
|
||||||
|
|
||||||
class YoutubeDL(yt_dlp.YoutubeDL):
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
super().__init__(*args, **kwargs)
|
|
||||||
self.to_stderr = self.to_screen
|
|
||||||
|
|
||||||
|
|
||||||
params = get_params({
|
|
||||||
'writeannotations': True,
|
|
||||||
'skip_download': True,
|
|
||||||
'writeinfojson': False,
|
|
||||||
'format': 'flv',
|
|
||||||
})
|
|
||||||
|
|
||||||
|
|
||||||
TEST_ID = 'gr51aVj-mLg'
|
|
||||||
ANNOTATIONS_FILE = TEST_ID + '.annotations.xml'
|
|
||||||
EXPECTED_ANNOTATIONS = ['Speech bubble', 'Note', 'Title', 'Spotlight', 'Label']
|
|
||||||
|
|
||||||
|
|
||||||
@is_download_test
|
|
||||||
class TestAnnotations(unittest.TestCase):
|
|
||||||
def setUp(self):
|
|
||||||
# Clear old files
|
|
||||||
self.tearDown()
|
|
||||||
|
|
||||||
def test_info_json(self):
|
|
||||||
expected = list(EXPECTED_ANNOTATIONS) # Two annotations could have the same text.
|
|
||||||
ie = yt_dlp.extractor.YoutubeIE()
|
|
||||||
ydl = YoutubeDL(params)
|
|
||||||
ydl.add_info_extractor(ie)
|
|
||||||
ydl.download([TEST_ID])
|
|
||||||
self.assertTrue(os.path.exists(ANNOTATIONS_FILE))
|
|
||||||
annoxml = None
|
|
||||||
with open(ANNOTATIONS_FILE, encoding='utf-8') as annof:
|
|
||||||
annoxml = xml.etree.ElementTree.parse(annof)
|
|
||||||
self.assertTrue(annoxml is not None, 'Failed to parse annotations XML')
|
|
||||||
root = annoxml.getroot()
|
|
||||||
self.assertEqual(root.tag, 'document')
|
|
||||||
annotationsTag = root.find('annotations')
|
|
||||||
self.assertEqual(annotationsTag.tag, 'annotations')
|
|
||||||
annotations = annotationsTag.findall('annotation')
|
|
||||||
|
|
||||||
# Not all the annotations have TEXT children and the annotations are returned unsorted.
|
|
||||||
for a in annotations:
|
|
||||||
self.assertEqual(a.tag, 'annotation')
|
|
||||||
if a.get('type') == 'text':
|
|
||||||
textTag = a.find('TEXT')
|
|
||||||
text = textTag.text
|
|
||||||
self.assertTrue(text in expected) # assertIn only added in python 2.7
|
|
||||||
# remove the first occurrence, there could be more than one annotation with the same text
|
|
||||||
expected.remove(text)
|
|
||||||
# We should have seen (and removed) all the expected annotation texts.
|
|
||||||
self.assertEqual(len(expected), 0, 'Not all expected annotations were found.')
|
|
||||||
|
|
||||||
def tearDown(self):
|
|
||||||
try_rm(ANNOTATIONS_FILE)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
unittest.main()
|
|
||||||
@@ -148,6 +148,11 @@ _SIG_TESTS = [
|
|||||||
'gN7a-hudCuAuPH6fByOk1_GNXN0yNMHShjZXS2VOgsEItAJz0tipeavEOmNdYN-wUtcEqD3bCXjc0iyKfAyZxCBGgIARwsSdQfJ2CJtt',
|
'gN7a-hudCuAuPH6fByOk1_GNXN0yNMHShjZXS2VOgsEItAJz0tipeavEOmNdYN-wUtcEqD3bCXjc0iyKfAyZxCBGgIARwsSdQfJ2CJtt',
|
||||||
'ttJC2JfQdSswRAIgGBCxZyAfKyi0cjXCb3DqEctUw-NYdNmOEvaepit2zJAsIEggOVaSXZjhSHMNy0NXNG_1kOyBf6HPuAuCduh-',
|
'ttJC2JfQdSswRAIgGBCxZyAfKyi0cjXCb3DqEctUw-NYdNmOEvaepit2zJAsIEggOVaSXZjhSHMNy0NXNG_1kOyBf6HPuAuCduh-',
|
||||||
),
|
),
|
||||||
|
(
|
||||||
|
'https://www.youtube.com/s/player/5ec65609/player_ias_tcc.vflset/en_US/base.js',
|
||||||
|
'AAJAJfQdSswRAIgNSN0GDUcHnCIXkKcF61yLBgDHiX1sUhOJdY4_GxunRYCIDeYNYP_16mQTPm5f1OVq3oV1ijUNYPjP4iUSMAjO9bZ',
|
||||||
|
'AJfQdSswRAIgNSN0GDUcHnCIXkKcF61ZLBgDHiX1sUhOJdY4_GxunRYCIDyYNYP_16mQTPm5f1OVq3oV1ijUNYPjP4iUSMAjO9be',
|
||||||
|
),
|
||||||
]
|
]
|
||||||
|
|
||||||
_NSIG_TESTS = [
|
_NSIG_TESTS = [
|
||||||
@@ -395,6 +400,10 @@ _NSIG_TESTS = [
|
|||||||
'https://www.youtube.com/s/player/010fbc8d/player_es6.vflset/en_US/base.js',
|
'https://www.youtube.com/s/player/010fbc8d/player_es6.vflset/en_US/base.js',
|
||||||
'0hlOAlqjFszVvF4Z', 'R-H23bZGAsRFTg',
|
'0hlOAlqjFszVvF4Z', 'R-H23bZGAsRFTg',
|
||||||
),
|
),
|
||||||
|
(
|
||||||
|
'https://www.youtube.com/s/player/5ec65609/player_ias_tcc.vflset/en_US/base.js',
|
||||||
|
'6l5CTNx4AzIqH4MXM', 'NupToduxHBew1g',
|
||||||
|
),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -304,7 +304,6 @@ class YoutubeDL:
|
|||||||
clean_infojson: Remove internal metadata from the infojson
|
clean_infojson: Remove internal metadata from the infojson
|
||||||
getcomments: Extract video comments. This will not be written to disk
|
getcomments: Extract video comments. This will not be written to disk
|
||||||
unless writeinfojson is also given
|
unless writeinfojson is also given
|
||||||
writeannotations: Write the video annotations to a .annotations.xml file
|
|
||||||
writethumbnail: Write the thumbnail image to a file
|
writethumbnail: Write the thumbnail image to a file
|
||||||
allow_playlist_files: Whether to write playlists' description, infojson etc
|
allow_playlist_files: Whether to write playlists' description, infojson etc
|
||||||
also to disk when using the 'write*' options
|
also to disk when using the 'write*' options
|
||||||
@@ -511,11 +510,11 @@ class YoutubeDL:
|
|||||||
the downloader (see yt_dlp/downloader/common.py):
|
the downloader (see yt_dlp/downloader/common.py):
|
||||||
nopart, updatetime, buffersize, ratelimit, throttledratelimit, min_filesize,
|
nopart, updatetime, buffersize, ratelimit, throttledratelimit, min_filesize,
|
||||||
max_filesize, test, noresizebuffer, retries, file_access_retries, fragment_retries,
|
max_filesize, test, noresizebuffer, retries, file_access_retries, fragment_retries,
|
||||||
continuedl, xattr_set_filesize, hls_use_mpegts, http_chunk_size,
|
continuedl, hls_use_mpegts, http_chunk_size, external_downloader_args,
|
||||||
external_downloader_args, concurrent_fragment_downloads, progress_delta.
|
concurrent_fragment_downloads, progress_delta.
|
||||||
|
|
||||||
The following options are used by the post processors:
|
The following options are used by the post processors:
|
||||||
ffmpeg_location: Location of the ffmpeg/avconv binary; either the path
|
ffmpeg_location: Location of the ffmpeg binary; either the path
|
||||||
to the binary or its containing directory.
|
to the binary or its containing directory.
|
||||||
postprocessor_args: A dictionary of postprocessor/executable keys (in lower case)
|
postprocessor_args: A dictionary of postprocessor/executable keys (in lower case)
|
||||||
and a list of additional command-line arguments for the
|
and a list of additional command-line arguments for the
|
||||||
@@ -566,32 +565,14 @@ class YoutubeDL:
|
|||||||
allsubtitles: - Use subtitleslangs = ['all']
|
allsubtitles: - Use subtitleslangs = ['all']
|
||||||
Downloads all the subtitles of the video
|
Downloads all the subtitles of the video
|
||||||
(requires writesubtitles or writeautomaticsub)
|
(requires writesubtitles or writeautomaticsub)
|
||||||
include_ads: - Doesn't work
|
|
||||||
Download ads as well
|
|
||||||
call_home: - Not implemented
|
|
||||||
Boolean, true if we are allowed to contact the
|
|
||||||
yt-dlp servers for debugging.
|
|
||||||
post_hooks: - Register a custom postprocessor
|
post_hooks: - Register a custom postprocessor
|
||||||
A list of functions that get called as the final step
|
A list of functions that get called as the final step
|
||||||
for each video file, after all postprocessors have been
|
for each video file, after all postprocessors have been
|
||||||
called. The filename will be passed as the only argument.
|
called. The filename will be passed as the only argument.
|
||||||
hls_prefer_native: - Use external_downloader = {'m3u8': 'native'} or {'m3u8': 'ffmpeg'}.
|
hls_prefer_native: - Use external_downloader = {'m3u8': 'native'} or {'m3u8': 'ffmpeg'}.
|
||||||
Use the native HLS downloader instead of ffmpeg/avconv
|
Use the native HLS downloader instead of ffmpeg
|
||||||
if True, otherwise use ffmpeg/avconv if False, otherwise
|
if True, otherwise use ffmpeg if False, otherwise
|
||||||
use downloader suggested by extractor if None.
|
use downloader suggested by extractor if None.
|
||||||
prefer_ffmpeg: - avconv support is deprecated
|
|
||||||
If False, use avconv instead of ffmpeg if both are available,
|
|
||||||
otherwise prefer ffmpeg.
|
|
||||||
youtube_include_dash_manifest: - Use extractor_args
|
|
||||||
If True (default), DASH manifests and related
|
|
||||||
data will be downloaded and processed by extractor.
|
|
||||||
You can reduce network I/O by disabling it if you don't
|
|
||||||
care about DASH. (only for youtube)
|
|
||||||
youtube_include_hls_manifest: - Use extractor_args
|
|
||||||
If True (default), HLS manifests and related
|
|
||||||
data will be downloaded and processed by extractor.
|
|
||||||
You can reduce network I/O by disabling it if you don't
|
|
||||||
care about HLS. (only for youtube)
|
|
||||||
no_color: Same as `color='no_color'`
|
no_color: Same as `color='no_color'`
|
||||||
no_overwrites: Same as `overwrites=False`
|
no_overwrites: Same as `overwrites=False`
|
||||||
"""
|
"""
|
||||||
@@ -750,10 +731,6 @@ class YoutubeDL:
|
|||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if check_deprecated('cn_verification_proxy', '--cn-verification-proxy', '--geo-verification-proxy'):
|
|
||||||
if self.params.get('geo_verification_proxy') is None:
|
|
||||||
self.params['geo_verification_proxy'] = self.params['cn_verification_proxy']
|
|
||||||
|
|
||||||
check_deprecated('useid', '--id', '-o "%(id)s.%(ext)s"')
|
check_deprecated('useid', '--id', '-o "%(id)s.%(ext)s"')
|
||||||
|
|
||||||
for msg in self.params.get('_warnings', []):
|
for msg in self.params.get('_warnings', []):
|
||||||
@@ -2717,11 +2694,7 @@ class YoutubeDL:
|
|||||||
('modified_timestamp', 'modified_date'),
|
('modified_timestamp', 'modified_date'),
|
||||||
):
|
):
|
||||||
if info_dict.get(date_key) is None and info_dict.get(ts_key) is not None:
|
if info_dict.get(date_key) is None and info_dict.get(ts_key) is not None:
|
||||||
# Working around out-of-range timestamp values (e.g. negative ones on Windows,
|
info_dict[date_key] = strftime_or_none(info_dict[ts_key])
|
||||||
# see http://bugs.python.org/issue1646728)
|
|
||||||
with contextlib.suppress(ValueError, OverflowError, OSError):
|
|
||||||
upload_date = dt.datetime.fromtimestamp(info_dict[ts_key], dt.timezone.utc)
|
|
||||||
info_dict[date_key] = upload_date.strftime('%Y%m%d')
|
|
||||||
|
|
||||||
if not info_dict.get('release_year'):
|
if not info_dict.get('release_year'):
|
||||||
info_dict['release_year'] = traverse_obj(info_dict, ('release_date', {lambda x: int(x[:4])}))
|
info_dict['release_year'] = traverse_obj(info_dict, ('release_date', {lambda x: int(x[:4])}))
|
||||||
@@ -3339,28 +3312,6 @@ class YoutubeDL:
|
|||||||
elif _infojson_written is None:
|
elif _infojson_written is None:
|
||||||
return
|
return
|
||||||
|
|
||||||
# Note: Annotations are deprecated
|
|
||||||
annofn = None
|
|
||||||
if self.params.get('writeannotations', False):
|
|
||||||
annofn = self.prepare_filename(info_dict, 'annotation')
|
|
||||||
if annofn:
|
|
||||||
if not self._ensure_dir_exists(annofn):
|
|
||||||
return
|
|
||||||
if not self.params.get('overwrites', True) and os.path.exists(annofn):
|
|
||||||
self.to_screen('[info] Video annotations are already present')
|
|
||||||
elif not info_dict.get('annotations'):
|
|
||||||
self.report_warning('There are no annotations to write.')
|
|
||||||
else:
|
|
||||||
try:
|
|
||||||
self.to_screen('[info] Writing video annotations to: ' + annofn)
|
|
||||||
with open(annofn, 'w', encoding='utf-8') as annofile:
|
|
||||||
annofile.write(info_dict['annotations'])
|
|
||||||
except (KeyError, TypeError):
|
|
||||||
self.report_warning('There are no annotations to write.')
|
|
||||||
except OSError:
|
|
||||||
self.report_error('Cannot write annotations file: ' + annofn)
|
|
||||||
return
|
|
||||||
|
|
||||||
# Write internet shortcut files
|
# Write internet shortcut files
|
||||||
def _write_link_file(link_type):
|
def _write_link_file(link_type):
|
||||||
url = try_get(info_dict['webpage_url'], iri_to_uri)
|
url = try_get(info_dict['webpage_url'], iri_to_uri)
|
||||||
|
|||||||
@@ -59,11 +59,9 @@ from .utils import (
|
|||||||
render_table,
|
render_table,
|
||||||
setproctitle,
|
setproctitle,
|
||||||
shell_quote,
|
shell_quote,
|
||||||
traverse_obj,
|
|
||||||
variadic,
|
variadic,
|
||||||
write_string,
|
write_string,
|
||||||
)
|
)
|
||||||
from .utils.networking import std_headers
|
|
||||||
from .utils._utils import _UnsafeExtensionError
|
from .utils._utils import _UnsafeExtensionError
|
||||||
from .YoutubeDL import YoutubeDL
|
from .YoutubeDL import YoutubeDL
|
||||||
|
|
||||||
@@ -523,7 +521,6 @@ def validate_options(opts):
|
|||||||
|
|
||||||
if report_args_compat('post-processor', opts.postprocessor_args, 'default-compat', 'default'):
|
if report_args_compat('post-processor', opts.postprocessor_args, 'default-compat', 'default'):
|
||||||
opts.postprocessor_args['default'] = opts.postprocessor_args.pop('default-compat')
|
opts.postprocessor_args['default'] = opts.postprocessor_args.pop('default-compat')
|
||||||
opts.postprocessor_args.setdefault('sponskrub', [])
|
|
||||||
|
|
||||||
def report_conflict(arg1, opt1, arg2='--allow-unplayable-formats', opt2='allow_unplayable_formats',
|
def report_conflict(arg1, opt1, arg2='--allow-unplayable-formats', opt2='allow_unplayable_formats',
|
||||||
val1=NO_DEFAULT, val2=NO_DEFAULT, default=False):
|
val1=NO_DEFAULT, val2=NO_DEFAULT, default=False):
|
||||||
@@ -548,11 +545,6 @@ def validate_options(opts):
|
|||||||
'"--exec before_dl:"', 'exec_cmd', val2=opts.exec_cmd.get('before_dl'))
|
'"--exec before_dl:"', 'exec_cmd', val2=opts.exec_cmd.get('before_dl'))
|
||||||
report_conflict('--id', 'useid', '--output', 'outtmpl', val2=opts.outtmpl.get('default'))
|
report_conflict('--id', 'useid', '--output', 'outtmpl', val2=opts.outtmpl.get('default'))
|
||||||
report_conflict('--remux-video', 'remuxvideo', '--recode-video', 'recodevideo')
|
report_conflict('--remux-video', 'remuxvideo', '--recode-video', 'recodevideo')
|
||||||
report_conflict('--sponskrub', 'sponskrub', '--remove-chapters', 'remove_chapters')
|
|
||||||
report_conflict('--sponskrub', 'sponskrub', '--sponsorblock-mark', 'sponsorblock_mark')
|
|
||||||
report_conflict('--sponskrub', 'sponskrub', '--sponsorblock-remove', 'sponsorblock_remove')
|
|
||||||
report_conflict('--sponskrub-cut', 'sponskrub_cut', '--split-chapter', 'split_chapters',
|
|
||||||
val1=opts.sponskrub and opts.sponskrub_cut)
|
|
||||||
|
|
||||||
# Conflicts with --allow-unplayable-formats
|
# Conflicts with --allow-unplayable-formats
|
||||||
report_conflict('--embed-metadata', 'addmetadata')
|
report_conflict('--embed-metadata', 'addmetadata')
|
||||||
@@ -565,23 +557,15 @@ def validate_options(opts):
|
|||||||
report_conflict('--recode-video', 'recodevideo')
|
report_conflict('--recode-video', 'recodevideo')
|
||||||
report_conflict('--remove-chapters', 'remove_chapters', default=[])
|
report_conflict('--remove-chapters', 'remove_chapters', default=[])
|
||||||
report_conflict('--remux-video', 'remuxvideo')
|
report_conflict('--remux-video', 'remuxvideo')
|
||||||
report_conflict('--sponskrub', 'sponskrub')
|
|
||||||
report_conflict('--sponsorblock-remove', 'sponsorblock_remove', default=set())
|
report_conflict('--sponsorblock-remove', 'sponsorblock_remove', default=set())
|
||||||
report_conflict('--xattrs', 'xattrs')
|
report_conflict('--xattrs', 'xattrs')
|
||||||
|
|
||||||
# Fully deprecated options
|
if hasattr(opts, '_deprecated_options'):
|
||||||
def report_deprecation(val, old, new=None):
|
|
||||||
if not val:
|
|
||||||
return
|
|
||||||
deprecation_warnings.append(
|
deprecation_warnings.append(
|
||||||
f'{old} is deprecated and may be removed in a future version. Use {new} instead' if new
|
f'The following options have been deprecated: {", ".join(opts._deprecated_options)}\n'
|
||||||
else f'{old} is deprecated and may not work as expected')
|
'Please remove them from your command/configuration to avoid future errors.\n'
|
||||||
|
'See https://github.com/yt-dlp/yt-dlp/issues/14198 for more details')
|
||||||
report_deprecation(opts.sponskrub, '--sponskrub', '--sponsorblock-mark or --sponsorblock-remove')
|
del opts._deprecated_options
|
||||||
report_deprecation(not opts.prefer_ffmpeg, '--prefer-avconv', 'ffmpeg')
|
|
||||||
# report_deprecation(opts.include_ads, '--include-ads') # We may re-implement this in future
|
|
||||||
# report_deprecation(opts.call_home, '--call-home') # We may re-implement this in future
|
|
||||||
# report_deprecation(opts.writeannotations, '--write-annotations') # It's just that no website has it
|
|
||||||
|
|
||||||
# Dependent options
|
# Dependent options
|
||||||
opts.date = DateRange.day(opts.date) if opts.date else DateRange(opts.dateafter, opts.datebefore)
|
opts.date = DateRange.day(opts.date) if opts.date else DateRange(opts.dateafter, opts.datebefore)
|
||||||
@@ -712,21 +696,6 @@ def get_postprocessors(opts):
|
|||||||
'add_metadata': opts.addmetadata,
|
'add_metadata': opts.addmetadata,
|
||||||
'add_infojson': opts.embed_infojson,
|
'add_infojson': opts.embed_infojson,
|
||||||
}
|
}
|
||||||
# Deprecated
|
|
||||||
# This should be above EmbedThumbnail since sponskrub removes the thumbnail attachment
|
|
||||||
# but must be below EmbedSubtitle and FFmpegMetadata
|
|
||||||
# See https://github.com/yt-dlp/yt-dlp/issues/204 , https://github.com/faissaloo/SponSkrub/issues/29
|
|
||||||
# If opts.sponskrub is None, sponskrub is used, but it silently fails if the executable can't be found
|
|
||||||
if opts.sponskrub is not False:
|
|
||||||
yield {
|
|
||||||
'key': 'SponSkrub',
|
|
||||||
'path': opts.sponskrub_path,
|
|
||||||
'args': opts.sponskrub_args,
|
|
||||||
'cut': opts.sponskrub_cut,
|
|
||||||
'force': opts.sponskrub_force,
|
|
||||||
'ignoreerror': opts.sponskrub is None,
|
|
||||||
'_from_cli': True,
|
|
||||||
}
|
|
||||||
if opts.embedthumbnail:
|
if opts.embedthumbnail:
|
||||||
yield {
|
yield {
|
||||||
'key': 'EmbedThumbnail',
|
'key': 'EmbedThumbnail',
|
||||||
@@ -885,7 +854,6 @@ def parse_options(argv=None):
|
|||||||
'nopart': opts.nopart,
|
'nopart': opts.nopart,
|
||||||
'updatetime': opts.updatetime,
|
'updatetime': opts.updatetime,
|
||||||
'writedescription': opts.writedescription,
|
'writedescription': opts.writedescription,
|
||||||
'writeannotations': opts.writeannotations,
|
|
||||||
'writeinfojson': opts.writeinfojson,
|
'writeinfojson': opts.writeinfojson,
|
||||||
'allow_playlist_files': opts.allow_playlist_files,
|
'allow_playlist_files': opts.allow_playlist_files,
|
||||||
'clean_infojson': opts.clean_infojson,
|
'clean_infojson': opts.clean_infojson,
|
||||||
@@ -919,7 +887,6 @@ def parse_options(argv=None):
|
|||||||
'max_views': opts.max_views,
|
'max_views': opts.max_views,
|
||||||
'daterange': opts.date,
|
'daterange': opts.date,
|
||||||
'cachedir': opts.cachedir,
|
'cachedir': opts.cachedir,
|
||||||
'youtube_print_sig_code': opts.youtube_print_sig_code,
|
|
||||||
'age_limit': opts.age_limit,
|
'age_limit': opts.age_limit,
|
||||||
'download_archive': opts.download_archive,
|
'download_archive': opts.download_archive,
|
||||||
'break_on_existing': opts.break_on_existing,
|
'break_on_existing': opts.break_on_existing,
|
||||||
@@ -937,13 +904,9 @@ def parse_options(argv=None):
|
|||||||
'socket_timeout': opts.socket_timeout,
|
'socket_timeout': opts.socket_timeout,
|
||||||
'bidi_workaround': opts.bidi_workaround,
|
'bidi_workaround': opts.bidi_workaround,
|
||||||
'debug_printtraffic': opts.debug_printtraffic,
|
'debug_printtraffic': opts.debug_printtraffic,
|
||||||
'prefer_ffmpeg': opts.prefer_ffmpeg,
|
|
||||||
'include_ads': opts.include_ads,
|
|
||||||
'default_search': opts.default_search,
|
'default_search': opts.default_search,
|
||||||
'dynamic_mpd': opts.dynamic_mpd,
|
'dynamic_mpd': opts.dynamic_mpd,
|
||||||
'extractor_args': opts.extractor_args,
|
'extractor_args': opts.extractor_args,
|
||||||
'youtube_include_dash_manifest': opts.youtube_include_dash_manifest,
|
|
||||||
'youtube_include_hls_manifest': opts.youtube_include_hls_manifest,
|
|
||||||
'encoding': opts.encoding,
|
'encoding': opts.encoding,
|
||||||
'extract_flat': opts.extract_flat,
|
'extract_flat': opts.extract_flat,
|
||||||
'live_from_start': opts.live_from_start,
|
'live_from_start': opts.live_from_start,
|
||||||
@@ -955,7 +918,6 @@ def parse_options(argv=None):
|
|||||||
'fixup': opts.fixup,
|
'fixup': opts.fixup,
|
||||||
'source_address': opts.source_address,
|
'source_address': opts.source_address,
|
||||||
'impersonate': opts.impersonate,
|
'impersonate': opts.impersonate,
|
||||||
'call_home': opts.call_home,
|
|
||||||
'sleep_interval_requests': opts.sleep_interval_requests,
|
'sleep_interval_requests': opts.sleep_interval_requests,
|
||||||
'sleep_interval': opts.sleep_interval,
|
'sleep_interval': opts.sleep_interval,
|
||||||
'max_sleep_interval': opts.max_sleep_interval,
|
'max_sleep_interval': opts.max_sleep_interval,
|
||||||
@@ -965,7 +927,6 @@ def parse_options(argv=None):
|
|||||||
'force_keyframes_at_cuts': opts.force_keyframes_at_cuts,
|
'force_keyframes_at_cuts': opts.force_keyframes_at_cuts,
|
||||||
'list_thumbnails': opts.list_thumbnails,
|
'list_thumbnails': opts.list_thumbnails,
|
||||||
'playlist_items': opts.playlist_items,
|
'playlist_items': opts.playlist_items,
|
||||||
'xattr_set_filesize': opts.xattr_set_filesize,
|
|
||||||
'match_filter': opts.match_filter,
|
'match_filter': opts.match_filter,
|
||||||
'color': opts.color,
|
'color': opts.color,
|
||||||
'ffmpeg_location': opts.ffmpeg_location,
|
'ffmpeg_location': opts.ffmpeg_location,
|
||||||
@@ -974,11 +935,11 @@ def parse_options(argv=None):
|
|||||||
'hls_split_discontinuity': opts.hls_split_discontinuity,
|
'hls_split_discontinuity': opts.hls_split_discontinuity,
|
||||||
'external_downloader_args': opts.external_downloader_args,
|
'external_downloader_args': opts.external_downloader_args,
|
||||||
'postprocessor_args': opts.postprocessor_args,
|
'postprocessor_args': opts.postprocessor_args,
|
||||||
'cn_verification_proxy': opts.cn_verification_proxy,
|
|
||||||
'geo_verification_proxy': opts.geo_verification_proxy,
|
'geo_verification_proxy': opts.geo_verification_proxy,
|
||||||
'geo_bypass': opts.geo_bypass,
|
'geo_bypass': opts.geo_bypass,
|
||||||
'geo_bypass_country': opts.geo_bypass_country,
|
'geo_bypass_country': opts.geo_bypass_country,
|
||||||
'geo_bypass_ip_block': opts.geo_bypass_ip_block,
|
'geo_bypass_ip_block': opts.geo_bypass_ip_block,
|
||||||
|
'useid': opts.useid or None,
|
||||||
'warn_when_outdated': opts.update_self is None,
|
'warn_when_outdated': opts.update_self is None,
|
||||||
'_warnings': warnings,
|
'_warnings': warnings,
|
||||||
'_deprecation_warnings': deprecation_warnings,
|
'_deprecation_warnings': deprecation_warnings,
|
||||||
@@ -991,12 +952,6 @@ def _real_main(argv=None):
|
|||||||
|
|
||||||
parser, opts, all_urls, ydl_opts = parse_options(argv)
|
parser, opts, all_urls, ydl_opts = parse_options(argv)
|
||||||
|
|
||||||
# Dump user agent
|
|
||||||
if opts.dump_user_agent:
|
|
||||||
ua = traverse_obj(opts.headers, 'User-Agent', casesense=False, default=std_headers['User-Agent'])
|
|
||||||
write_string(f'{ua}\n', out=sys.stdout)
|
|
||||||
return
|
|
||||||
|
|
||||||
if print_extractor_information(opts, all_urls):
|
if print_extractor_information(opts, all_urls):
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
import datetime as dt
|
||||||
import os
|
import os
|
||||||
import xml.etree.ElementTree as etree
|
import xml.etree.ElementTree as etree
|
||||||
|
|
||||||
@@ -27,6 +28,13 @@ def compat_ord(c):
|
|||||||
return c if isinstance(c, int) else ord(c)
|
return c if isinstance(c, int) else ord(c)
|
||||||
|
|
||||||
|
|
||||||
|
def compat_datetime_from_timestamp(timestamp):
|
||||||
|
# Calling dt.datetime.fromtimestamp with negative timestamps throws error in Windows
|
||||||
|
# Ref: https://github.com/yt-dlp/yt-dlp/issues/5185, https://github.com/python/cpython/issues/81708,
|
||||||
|
# https://github.com/yt-dlp/yt-dlp/issues/6706#issuecomment-1496842642
|
||||||
|
return (dt.datetime.fromtimestamp(0, dt.timezone.utc) + dt.timedelta(seconds=timestamp))
|
||||||
|
|
||||||
|
|
||||||
# Python 3.8+ does not honor %HOME% on windows, but this breaks compatibility with youtube-dl
|
# Python 3.8+ does not honor %HOME% on windows, but this breaks compatibility with youtube-dl
|
||||||
# See https://github.com/yt-dlp/yt-dlp/issues/792
|
# See https://github.com/yt-dlp/yt-dlp/issues/792
|
||||||
# https://docs.python.org/3/library/os.path.html#os.path.expanduser
|
# https://docs.python.org/3/library/os.path.html#os.path.expanduser
|
||||||
|
|||||||
@@ -62,7 +62,6 @@ class FileDownloader:
|
|||||||
test: Download only first bytes to test the downloader.
|
test: Download only first bytes to test the downloader.
|
||||||
min_filesize: Skip files smaller than this size
|
min_filesize: Skip files smaller than this size
|
||||||
max_filesize: Skip files larger than this size
|
max_filesize: Skip files larger than this size
|
||||||
xattr_set_filesize: Set ytdl.filesize user xattribute with expected size.
|
|
||||||
progress_delta: The minimum time between progress output, in seconds
|
progress_delta: The minimum time between progress output, in seconds
|
||||||
external_downloader_args: A dictionary of downloader keys (in lower case)
|
external_downloader_args: A dictionary of downloader keys (in lower case)
|
||||||
and a list of additional command-line arguments for the
|
and a list of additional command-line arguments for the
|
||||||
|
|||||||
@@ -563,7 +563,7 @@ class FFmpegFD(ExternalFD):
|
|||||||
f'{cookie.name}={cookie.value}; path={cookie.path}; domain={cookie.domain};\r\n'
|
f'{cookie.name}={cookie.value}; path={cookie.path}; domain={cookie.domain};\r\n'
|
||||||
for cookie in cookies)])
|
for cookie in cookies)])
|
||||||
if fmt.get('http_headers') and is_http:
|
if fmt.get('http_headers') and is_http:
|
||||||
# Trailing \r\n after each HTTP header is important to prevent warning from ffmpeg/avconv:
|
# Trailing \r\n after each HTTP header is important to prevent warning from ffmpeg:
|
||||||
# [http @ 00000000003d2fa0] No trailing CRLF found in HTTP header.
|
# [http @ 00000000003d2fa0] No trailing CRLF found in HTTP header.
|
||||||
args.extend(['-headers', ''.join(f'{key}: {val}\r\n' for key, val in fmt['http_headers'].items())])
|
args.extend(['-headers', ''.join(f'{key}: {val}\r\n' for key, val in fmt['http_headers'].items())])
|
||||||
|
|
||||||
@@ -654,10 +654,6 @@ class FFmpegFD(ExternalFD):
|
|||||||
return retval
|
return retval
|
||||||
|
|
||||||
|
|
||||||
class AVconvFD(FFmpegFD):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
_BY_NAME = {
|
_BY_NAME = {
|
||||||
klass.get_basename(): klass
|
klass.get_basename(): klass
|
||||||
for name, klass in globals().items()
|
for name, klass in globals().items()
|
||||||
|
|||||||
@@ -149,14 +149,14 @@ class FlvReader(io.BytesIO):
|
|||||||
segments_count = self.read_unsigned_char()
|
segments_count = self.read_unsigned_char()
|
||||||
segments = []
|
segments = []
|
||||||
for _ in range(segments_count):
|
for _ in range(segments_count):
|
||||||
box_size, box_type, box_data = self.read_box_info()
|
_box_size, box_type, box_data = self.read_box_info()
|
||||||
assert box_type == b'asrt'
|
assert box_type == b'asrt'
|
||||||
segment = FlvReader(box_data).read_asrt()
|
segment = FlvReader(box_data).read_asrt()
|
||||||
segments.append(segment)
|
segments.append(segment)
|
||||||
fragments_run_count = self.read_unsigned_char()
|
fragments_run_count = self.read_unsigned_char()
|
||||||
fragments = []
|
fragments = []
|
||||||
for _ in range(fragments_run_count):
|
for _ in range(fragments_run_count):
|
||||||
box_size, box_type, box_data = self.read_box_info()
|
_box_size, box_type, box_data = self.read_box_info()
|
||||||
assert box_type == b'afrt'
|
assert box_type == b'afrt'
|
||||||
fragments.append(FlvReader(box_data).read_afrt())
|
fragments.append(FlvReader(box_data).read_afrt())
|
||||||
|
|
||||||
@@ -167,7 +167,7 @@ class FlvReader(io.BytesIO):
|
|||||||
}
|
}
|
||||||
|
|
||||||
def read_bootstrap_info(self):
|
def read_bootstrap_info(self):
|
||||||
total_size, box_type, box_data = self.read_box_info()
|
_, box_type, box_data = self.read_box_info()
|
||||||
assert box_type == b'abst'
|
assert box_type == b'abst'
|
||||||
return FlvReader(box_data).read_abst()
|
return FlvReader(box_data).read_abst()
|
||||||
|
|
||||||
@@ -324,9 +324,9 @@ class F4mFD(FragmentFD):
|
|||||||
if requested_bitrate is None or len(formats) == 1:
|
if requested_bitrate is None or len(formats) == 1:
|
||||||
# get the best format
|
# get the best format
|
||||||
formats = sorted(formats, key=lambda f: f[0])
|
formats = sorted(formats, key=lambda f: f[0])
|
||||||
rate, media = formats[-1]
|
_, media = formats[-1]
|
||||||
else:
|
else:
|
||||||
rate, media = next(filter(
|
_, media = next(filter(
|
||||||
lambda f: int(f[0]) == requested_bitrate, formats))
|
lambda f: int(f[0]) == requested_bitrate, formats))
|
||||||
|
|
||||||
# Prefer baseURL for relative URLs as per 11.2 of F4M 3.0 spec.
|
# Prefer baseURL for relative URLs as per 11.2 of F4M 3.0 spec.
|
||||||
|
|||||||
@@ -13,12 +13,9 @@ from ..utils import (
|
|||||||
ContentTooShortError,
|
ContentTooShortError,
|
||||||
RetryManager,
|
RetryManager,
|
||||||
ThrottledDownload,
|
ThrottledDownload,
|
||||||
XAttrMetadataError,
|
|
||||||
XAttrUnavailableError,
|
|
||||||
int_or_none,
|
int_or_none,
|
||||||
parse_http_range,
|
parse_http_range,
|
||||||
try_call,
|
try_call,
|
||||||
write_xattr,
|
|
||||||
)
|
)
|
||||||
from ..utils.networking import HTTPHeaderDict
|
from ..utils.networking import HTTPHeaderDict
|
||||||
|
|
||||||
@@ -273,12 +270,6 @@ class HttpFD(FileDownloader):
|
|||||||
self.report_error(f'unable to open for writing: {err}')
|
self.report_error(f'unable to open for writing: {err}')
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if self.params.get('xattr_set_filesize', False) and data_len is not None:
|
|
||||||
try:
|
|
||||||
write_xattr(ctx.tmpfilename, 'user.ytdl.filesize', str(data_len).encode())
|
|
||||||
except (XAttrUnavailableError, XAttrMetadataError) as err:
|
|
||||||
self.report_error(f'unable to set filesize xattr: {err}')
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
ctx.stream.write(data_block)
|
ctx.stream.write(data_block)
|
||||||
except OSError as err:
|
except OSError as err:
|
||||||
|
|||||||
@@ -424,7 +424,6 @@ from .cpac import (
|
|||||||
CPACPlaylistIE,
|
CPACPlaylistIE,
|
||||||
)
|
)
|
||||||
from .cracked import CrackedIE
|
from .cracked import CrackedIE
|
||||||
from .crackle import CrackleIE
|
|
||||||
from .craftsy import CraftsyIE
|
from .craftsy import CraftsyIE
|
||||||
from .crooksandliars import CrooksAndLiarsIE
|
from .crooksandliars import CrooksAndLiarsIE
|
||||||
from .crowdbunker import (
|
from .crowdbunker import (
|
||||||
@@ -444,10 +443,6 @@ from .curiositystream import (
|
|||||||
CuriosityStreamIE,
|
CuriosityStreamIE,
|
||||||
CuriosityStreamSeriesIE,
|
CuriosityStreamSeriesIE,
|
||||||
)
|
)
|
||||||
from .cwtv import (
|
|
||||||
CWTVIE,
|
|
||||||
CWTVMovieIE,
|
|
||||||
)
|
|
||||||
from .cybrary import (
|
from .cybrary import (
|
||||||
CybraryCourseIE,
|
CybraryCourseIE,
|
||||||
CybraryIE,
|
CybraryIE,
|
||||||
@@ -1141,7 +1136,6 @@ from .mit import (
|
|||||||
OCWMITIE,
|
OCWMITIE,
|
||||||
TechTVMITIE,
|
TechTVMITIE,
|
||||||
)
|
)
|
||||||
from .mitele import MiTeleIE
|
|
||||||
from .mixch import (
|
from .mixch import (
|
||||||
MixchArchiveIE,
|
MixchArchiveIE,
|
||||||
MixchIE,
|
MixchIE,
|
||||||
@@ -1433,6 +1427,7 @@ from .onet import (
|
|||||||
OnetPlIE,
|
OnetPlIE,
|
||||||
)
|
)
|
||||||
from .onionstudios import OnionStudiosIE
|
from .onionstudios import OnionStudiosIE
|
||||||
|
from .onsen import OnsenIE
|
||||||
from .opencast import (
|
from .opencast import (
|
||||||
OpencastIE,
|
OpencastIE,
|
||||||
OpencastPlaylistIE,
|
OpencastPlaylistIE,
|
||||||
@@ -1466,10 +1461,6 @@ from .panopto import (
|
|||||||
PanoptoListIE,
|
PanoptoListIE,
|
||||||
PanoptoPlaylistIE,
|
PanoptoPlaylistIE,
|
||||||
)
|
)
|
||||||
from .paramountplus import (
|
|
||||||
ParamountPlusIE,
|
|
||||||
ParamountPlusSeriesIE,
|
|
||||||
)
|
|
||||||
from .parler import ParlerIE
|
from .parler import ParlerIE
|
||||||
from .parlview import ParlviewIE
|
from .parlview import ParlviewIE
|
||||||
from .parti import (
|
from .parti import (
|
||||||
@@ -1523,10 +1514,6 @@ from .piramidetv import (
|
|||||||
PiramideTVChannelIE,
|
PiramideTVChannelIE,
|
||||||
PiramideTVIE,
|
PiramideTVIE,
|
||||||
)
|
)
|
||||||
from .pixivsketch import (
|
|
||||||
PixivSketchIE,
|
|
||||||
PixivSketchUserIE,
|
|
||||||
)
|
|
||||||
from .planetmarathi import PlanetMarathiIE
|
from .planetmarathi import PlanetMarathiIE
|
||||||
from .platzi import (
|
from .platzi import (
|
||||||
PlatziCourseIE,
|
PlatziCourseIE,
|
||||||
@@ -1783,7 +1770,6 @@ from .rutube import (
|
|||||||
RutubePlaylistIE,
|
RutubePlaylistIE,
|
||||||
RutubeTagsIE,
|
RutubeTagsIE,
|
||||||
)
|
)
|
||||||
from .rutv import RUTVIE
|
|
||||||
from .ruutu import RuutuIE
|
from .ruutu import RuutuIE
|
||||||
from .ruv import (
|
from .ruv import (
|
||||||
RuvIE,
|
RuvIE,
|
||||||
@@ -1853,7 +1839,6 @@ from .simplecast import (
|
|||||||
SimplecastPodcastIE,
|
SimplecastPodcastIE,
|
||||||
)
|
)
|
||||||
from .sina import SinaIE
|
from .sina import SinaIE
|
||||||
from .sixplay import SixPlayIE
|
|
||||||
from .skeb import SkebIE
|
from .skeb import SkebIE
|
||||||
from .sky import (
|
from .sky import (
|
||||||
SkyNewsIE,
|
SkyNewsIE,
|
||||||
@@ -1881,7 +1866,12 @@ from .skynewsau import SkyNewsAUIE
|
|||||||
from .slideshare import SlideshareIE
|
from .slideshare import SlideshareIE
|
||||||
from .slideslive import SlidesLiveIE
|
from .slideslive import SlidesLiveIE
|
||||||
from .slutload import SlutloadIE
|
from .slutload import SlutloadIE
|
||||||
from .smotrim import SmotrimIE
|
from .smotrim import (
|
||||||
|
SmotrimAudioIE,
|
||||||
|
SmotrimIE,
|
||||||
|
SmotrimLiveIE,
|
||||||
|
SmotrimPlaylistIE,
|
||||||
|
)
|
||||||
from .snapchat import SnapchatSpotlightIE
|
from .snapchat import SnapchatSpotlightIE
|
||||||
from .snotr import SnotrIE
|
from .snotr import SnotrIE
|
||||||
from .softwhiteunderbelly import SoftWhiteUnderbellyIE
|
from .softwhiteunderbelly import SoftWhiteUnderbellyIE
|
||||||
@@ -1929,10 +1919,6 @@ from .spiegel import SpiegelIE
|
|||||||
from .sport5 import Sport5IE
|
from .sport5 import Sport5IE
|
||||||
from .sportbox import SportBoxIE
|
from .sportbox import SportBoxIE
|
||||||
from .sportdeutschland import SportDeutschlandIE
|
from .sportdeutschland import SportDeutschlandIE
|
||||||
from .spotify import (
|
|
||||||
SpotifyIE,
|
|
||||||
SpotifyShowIE,
|
|
||||||
)
|
|
||||||
from .spreaker import (
|
from .spreaker import (
|
||||||
SpreakerIE,
|
SpreakerIE,
|
||||||
SpreakerShowIE,
|
SpreakerShowIE,
|
||||||
@@ -2153,6 +2139,7 @@ from .tubitv import (
|
|||||||
)
|
)
|
||||||
from .tumblr import TumblrIE
|
from .tumblr import TumblrIE
|
||||||
from .tunein import (
|
from .tunein import (
|
||||||
|
TuneInEmbedIE,
|
||||||
TuneInPodcastEpisodeIE,
|
TuneInPodcastEpisodeIE,
|
||||||
TuneInPodcastIE,
|
TuneInPodcastIE,
|
||||||
TuneInShortenerIE,
|
TuneInShortenerIE,
|
||||||
@@ -2287,7 +2274,10 @@ from .utreon import UtreonIE
|
|||||||
from .varzesh3 import Varzesh3IE
|
from .varzesh3 import Varzesh3IE
|
||||||
from .vbox7 import Vbox7IE
|
from .vbox7 import Vbox7IE
|
||||||
from .veo import VeoIE
|
from .veo import VeoIE
|
||||||
from .vesti import VestiIE
|
from .vevo import (
|
||||||
|
VevoIE,
|
||||||
|
VevoPlaylistIE,
|
||||||
|
)
|
||||||
from .vgtv import (
|
from .vgtv import (
|
||||||
VGTVIE,
|
VGTVIE,
|
||||||
BTArticleIE,
|
BTArticleIE,
|
||||||
@@ -2472,7 +2462,6 @@ from .wykop import (
|
|||||||
WykopPostCommentIE,
|
WykopPostCommentIE,
|
||||||
WykopPostIE,
|
WykopPostIE,
|
||||||
)
|
)
|
||||||
from .xanimu import XanimuIE
|
|
||||||
from .xboxclips import XboxClipsIE
|
from .xboxclips import XboxClipsIE
|
||||||
from .xhamster import (
|
from .xhamster import (
|
||||||
XHamsterEmbedIE,
|
XHamsterEmbedIE,
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
from .common import InfoExtractor
|
from .common import InfoExtractor
|
||||||
from ..utils import (
|
from ..utils import (
|
||||||
|
clean_html,
|
||||||
clean_podcast_url,
|
clean_podcast_url,
|
||||||
int_or_none,
|
int_or_none,
|
||||||
parse_iso8601,
|
parse_iso8601,
|
||||||
@@ -17,7 +18,7 @@ class ApplePodcastsIE(InfoExtractor):
|
|||||||
'ext': 'mp3',
|
'ext': 'mp3',
|
||||||
'title': 'Ferreck Dawn - To The Break of Dawn 117',
|
'title': 'Ferreck Dawn - To The Break of Dawn 117',
|
||||||
'episode': 'Ferreck Dawn - To The Break of Dawn 117',
|
'episode': 'Ferreck Dawn - To The Break of Dawn 117',
|
||||||
'description': 'md5:1fc571102f79dbd0a77bfd71ffda23bc',
|
'description': 'md5:8c4f5c2c30af17ed6a98b0b9daf15b76',
|
||||||
'upload_date': '20240812',
|
'upload_date': '20240812',
|
||||||
'timestamp': 1723449600,
|
'timestamp': 1723449600,
|
||||||
'duration': 3596,
|
'duration': 3596,
|
||||||
@@ -58,7 +59,7 @@ class ApplePodcastsIE(InfoExtractor):
|
|||||||
r'<script [^>]*\bid=["\']serialized-server-data["\'][^>]*>', webpage,
|
r'<script [^>]*\bid=["\']serialized-server-data["\'][^>]*>', webpage,
|
||||||
'server data', episode_id, contains_pattern=r'\[{(?s:.+)}\]')[0]['data']
|
'server data', episode_id, contains_pattern=r'\[{(?s:.+)}\]')[0]['data']
|
||||||
model_data = traverse_obj(server_data, (
|
model_data = traverse_obj(server_data, (
|
||||||
'headerButtonItems', lambda _, v: v['$kind'] == 'bookmark' and v['modelType'] == 'EpisodeOffer',
|
'headerButtonItems', lambda _, v: v['$kind'] == 'share' and v['modelType'] == 'EpisodeLockup',
|
||||||
'model', {dict}, any))
|
'model', {dict}, any))
|
||||||
|
|
||||||
return {
|
return {
|
||||||
@@ -68,7 +69,8 @@ class ApplePodcastsIE(InfoExtractor):
|
|||||||
or self._yield_json_ld(webpage, episode_id, fatal=False), episode_id, fatal=False),
|
or self._yield_json_ld(webpage, episode_id, fatal=False), episode_id, fatal=False),
|
||||||
**traverse_obj(model_data, {
|
**traverse_obj(model_data, {
|
||||||
'title': ('title', {str}),
|
'title': ('title', {str}),
|
||||||
'url': ('streamUrl', {clean_podcast_url}),
|
'description': ('summary', {clean_html}),
|
||||||
|
'url': ('playAction', 'episodeOffer', 'streamUrl', {clean_podcast_url}),
|
||||||
'timestamp': ('releaseDate', {parse_iso8601}),
|
'timestamp': ('releaseDate', {parse_iso8601}),
|
||||||
'duration': ('duration', {int_or_none}),
|
'duration': ('duration', {int_or_none}),
|
||||||
}),
|
}),
|
||||||
|
|||||||
@@ -1366,7 +1366,7 @@ class BilibiliSpaceVideoIE(BilibiliSpaceBaseIE):
|
|||||||
else:
|
else:
|
||||||
yield self.url_result(f'https://www.bilibili.com/video/{entry["bvid"]}', BiliBiliIE, entry['bvid'])
|
yield self.url_result(f'https://www.bilibili.com/video/{entry["bvid"]}', BiliBiliIE, entry['bvid'])
|
||||||
|
|
||||||
metadata, paged_list = self._extract_playlist(fetch_page, get_metadata, get_entries)
|
_, paged_list = self._extract_playlist(fetch_page, get_metadata, get_entries)
|
||||||
return self.playlist_result(paged_list, playlist_id)
|
return self.playlist_result(paged_list, playlist_id)
|
||||||
|
|
||||||
|
|
||||||
@@ -1400,7 +1400,7 @@ class BilibiliSpaceAudioIE(BilibiliSpaceBaseIE):
|
|||||||
for entry in page_data.get('data') or []:
|
for entry in page_data.get('data') or []:
|
||||||
yield self.url_result(f'https://www.bilibili.com/audio/au{entry["id"]}', BilibiliAudioIE, entry['id'])
|
yield self.url_result(f'https://www.bilibili.com/audio/au{entry["id"]}', BilibiliAudioIE, entry['id'])
|
||||||
|
|
||||||
metadata, paged_list = self._extract_playlist(fetch_page, get_metadata, get_entries)
|
_, paged_list = self._extract_playlist(fetch_page, get_metadata, get_entries)
|
||||||
return self.playlist_result(paged_list, playlist_id)
|
return self.playlist_result(paged_list, playlist_id)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -174,7 +174,7 @@ class BrainPOPLegacyBaseIE(BrainPOPBaseIE):
|
|||||||
}
|
}
|
||||||
|
|
||||||
def _real_extract(self, url):
|
def _real_extract(self, url):
|
||||||
slug, display_id = self._match_valid_url(url).group('slug', 'id')
|
display_id = self._match_id(url)
|
||||||
webpage = self._download_webpage(url, display_id)
|
webpage = self._download_webpage(url, display_id)
|
||||||
topic_data = self._search_json(
|
topic_data = self._search_json(
|
||||||
r'var\s+content\s*=\s*', webpage, 'content data',
|
r'var\s+content\s*=\s*', webpage, 'content data',
|
||||||
|
|||||||
@@ -5,8 +5,6 @@ import zlib
|
|||||||
|
|
||||||
from .anvato import AnvatoIE
|
from .anvato import AnvatoIE
|
||||||
from .common import InfoExtractor
|
from .common import InfoExtractor
|
||||||
from .paramountplus import ParamountPlusIE
|
|
||||||
from ..networking import HEADRequest
|
|
||||||
from ..utils import (
|
from ..utils import (
|
||||||
ExtractorError,
|
ExtractorError,
|
||||||
UserNotLive,
|
UserNotLive,
|
||||||
@@ -132,13 +130,7 @@ class CBSNewsEmbedIE(CBSNewsBaseIE):
|
|||||||
video_id = item['mpxRefId']
|
video_id = item['mpxRefId']
|
||||||
video_url = self._get_video_url(item)
|
video_url = self._get_video_url(item)
|
||||||
if not video_url:
|
if not video_url:
|
||||||
# Old embeds redirect user to ParamountPlus but most links are 404
|
raise ExtractorError('This video is no longer available', expected=True)
|
||||||
pplus_url = f'https://www.paramountplus.com/shows/video/{video_id}'
|
|
||||||
try:
|
|
||||||
self._request_webpage(HEADRequest(pplus_url), video_id)
|
|
||||||
return self.url_result(pplus_url, ParamountPlusIE)
|
|
||||||
except ExtractorError:
|
|
||||||
self.raise_no_formats('This video is no longer available', True, video_id)
|
|
||||||
|
|
||||||
return self._extract_video(item, video_url, video_id)
|
return self._extract_video(item, video_url, video_id)
|
||||||
|
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ class CharlieRoseIE(InfoExtractor):
|
|||||||
_VALID_URL = r'https?://(?:www\.)?charlierose\.com/(?:video|episode)(?:s|/player)/(?P<id>\d+)'
|
_VALID_URL = r'https?://(?:www\.)?charlierose\.com/(?:video|episode)(?:s|/player)/(?P<id>\d+)'
|
||||||
_TESTS = [{
|
_TESTS = [{
|
||||||
'url': 'https://charlierose.com/videos/27996',
|
'url': 'https://charlierose.com/videos/27996',
|
||||||
'md5': 'fda41d49e67d4ce7c2411fd2c4702e09',
|
'md5': '4405b662f557f94aa256fa6a7baf7426',
|
||||||
'info_dict': {
|
'info_dict': {
|
||||||
'id': '27996',
|
'id': '27996',
|
||||||
'ext': 'mp4',
|
'ext': 'mp4',
|
||||||
@@ -39,12 +39,16 @@ class CharlieRoseIE(InfoExtractor):
|
|||||||
self._PLAYER_BASE % video_id, webpage, video_id,
|
self._PLAYER_BASE % video_id, webpage, video_id,
|
||||||
m3u8_entry_protocol='m3u8_native')[0]
|
m3u8_entry_protocol='m3u8_native')[0]
|
||||||
self._remove_duplicate_formats(info_dict['formats'])
|
self._remove_duplicate_formats(info_dict['formats'])
|
||||||
|
for fmt in info_dict['formats']:
|
||||||
|
if fmt.get('protocol') == 'm3u8_native':
|
||||||
|
fmt['__needs_testing'] = True
|
||||||
|
|
||||||
info_dict.update({
|
info_dict.update({
|
||||||
'id': video_id,
|
'id': video_id,
|
||||||
'title': title,
|
'title': title,
|
||||||
'thumbnail': self._og_search_thumbnail(webpage),
|
'thumbnail': self._og_search_thumbnail(webpage),
|
||||||
'description': self._og_search_description(webpage),
|
'description': self._og_search_description(webpage),
|
||||||
|
'_format_sort_fields': ('proto',),
|
||||||
})
|
})
|
||||||
|
|
||||||
return info_dict
|
return info_dict
|
||||||
|
|||||||
@@ -272,6 +272,7 @@ class CNNIndonesiaIE(InfoExtractor):
|
|||||||
return merge_dicts(json_ld_data, {
|
return merge_dicts(json_ld_data, {
|
||||||
'_type': 'url_transparent',
|
'_type': 'url_transparent',
|
||||||
'url': embed_url,
|
'url': embed_url,
|
||||||
|
'id': video_id,
|
||||||
'upload_date': upload_date,
|
'upload_date': upload_date,
|
||||||
'tags': try_call(lambda: self._html_search_meta('keywords', webpage).split(', ')),
|
'tags': try_call(lambda: self._html_search_meta('keywords', webpage).split(', ')),
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -1,243 +0,0 @@
|
|||||||
import hashlib
|
|
||||||
import hmac
|
|
||||||
import re
|
|
||||||
import time
|
|
||||||
|
|
||||||
from .common import InfoExtractor
|
|
||||||
from ..networking.exceptions import HTTPError
|
|
||||||
from ..utils import (
|
|
||||||
ExtractorError,
|
|
||||||
determine_ext,
|
|
||||||
float_or_none,
|
|
||||||
int_or_none,
|
|
||||||
orderedSet,
|
|
||||||
parse_age_limit,
|
|
||||||
parse_duration,
|
|
||||||
url_or_none,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class CrackleIE(InfoExtractor):
|
|
||||||
_VALID_URL = r'(?:crackle:|https?://(?:(?:www|m)\.)?(?:sony)?crackle\.com/(?:playlist/\d+/|(?:[^/]+/)+))(?P<id>\d+)'
|
|
||||||
_TESTS = [{
|
|
||||||
# Crackle is available in the United States and territories
|
|
||||||
'url': 'https://www.crackle.com/thanksgiving/2510064',
|
|
||||||
'info_dict': {
|
|
||||||
'id': '2510064',
|
|
||||||
'ext': 'mp4',
|
|
||||||
'title': 'Touch Football',
|
|
||||||
'description': 'md5:cfbb513cf5de41e8b56d7ab756cff4df',
|
|
||||||
'duration': 1398,
|
|
||||||
'view_count': int,
|
|
||||||
'average_rating': 0,
|
|
||||||
'age_limit': 17,
|
|
||||||
'genre': 'Comedy',
|
|
||||||
'creator': 'Daniel Powell',
|
|
||||||
'artist': 'Chris Elliott, Amy Sedaris',
|
|
||||||
'release_year': 2016,
|
|
||||||
'series': 'Thanksgiving',
|
|
||||||
'episode': 'Touch Football',
|
|
||||||
'season_number': 1,
|
|
||||||
'episode_number': 1,
|
|
||||||
},
|
|
||||||
'params': {
|
|
||||||
# m3u8 download
|
|
||||||
'skip_download': True,
|
|
||||||
},
|
|
||||||
'expected_warnings': [
|
|
||||||
'Trying with a list of known countries',
|
|
||||||
],
|
|
||||||
}, {
|
|
||||||
'url': 'https://www.sonycrackle.com/thanksgiving/2510064',
|
|
||||||
'only_matching': True,
|
|
||||||
}]
|
|
||||||
|
|
||||||
_MEDIA_FILE_SLOTS = {
|
|
||||||
'360p.mp4': {
|
|
||||||
'width': 640,
|
|
||||||
'height': 360,
|
|
||||||
},
|
|
||||||
'480p.mp4': {
|
|
||||||
'width': 768,
|
|
||||||
'height': 432,
|
|
||||||
},
|
|
||||||
'480p_1mbps.mp4': {
|
|
||||||
'width': 852,
|
|
||||||
'height': 480,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
def _download_json(self, url, *args, **kwargs):
|
|
||||||
# Authorization generation algorithm is reverse engineered from:
|
|
||||||
# https://www.sonycrackle.com/static/js/main.ea93451f.chunk.js
|
|
||||||
timestamp = time.strftime('%Y%m%d%H%M', time.gmtime())
|
|
||||||
h = hmac.new(b'IGSLUQCBDFHEOIFM', '|'.join([url, timestamp]).encode(), hashlib.sha1).hexdigest().upper()
|
|
||||||
headers = {
|
|
||||||
'Accept': 'application/json',
|
|
||||||
'Authorization': '|'.join([h, timestamp, '117', '1']),
|
|
||||||
}
|
|
||||||
return InfoExtractor._download_json(self, url, *args, headers=headers, **kwargs)
|
|
||||||
|
|
||||||
def _real_extract(self, url):
|
|
||||||
video_id = self._match_id(url)
|
|
||||||
|
|
||||||
geo_bypass_country = self.get_param('geo_bypass_country', None)
|
|
||||||
countries = orderedSet((geo_bypass_country, 'US', 'AU', 'CA', 'AS', 'FM', 'GU', 'MP', 'PR', 'PW', 'MH', 'VI', ''))
|
|
||||||
num_countries, num = len(countries) - 1, 0
|
|
||||||
|
|
||||||
media = {}
|
|
||||||
for num, country in enumerate(countries):
|
|
||||||
if num == 1: # start hard-coded list
|
|
||||||
self.report_warning('%s. Trying with a list of known countries' % (
|
|
||||||
f'Unable to obtain video formats from {geo_bypass_country} API' if geo_bypass_country
|
|
||||||
else 'No country code was given using --geo-bypass-country'))
|
|
||||||
elif num == num_countries: # end of list
|
|
||||||
geo_info = self._download_json(
|
|
||||||
'https://web-api-us.crackle.com/Service.svc/geo/country',
|
|
||||||
video_id, fatal=False, note='Downloading geo-location information from crackle API',
|
|
||||||
errnote='Unable to fetch geo-location information from crackle') or {}
|
|
||||||
country = geo_info.get('CountryCode')
|
|
||||||
if country is None:
|
|
||||||
continue
|
|
||||||
self.to_screen(f'{self.IE_NAME} identified country as {country}')
|
|
||||||
if country in countries:
|
|
||||||
self.to_screen(f'Downloading from {country} API was already attempted. Skipping...')
|
|
||||||
continue
|
|
||||||
|
|
||||||
if country is None:
|
|
||||||
continue
|
|
||||||
try:
|
|
||||||
media = self._download_json(
|
|
||||||
f'https://web-api-us.crackle.com/Service.svc/details/media/{video_id}/{country}?disableProtocols=true',
|
|
||||||
video_id, note=f'Downloading media JSON from {country} API',
|
|
||||||
errnote='Unable to download media JSON')
|
|
||||||
except ExtractorError as e:
|
|
||||||
# 401 means geo restriction, trying next country
|
|
||||||
if isinstance(e.cause, HTTPError) and e.cause.status == 401:
|
|
||||||
continue
|
|
||||||
raise
|
|
||||||
|
|
||||||
status = media.get('status')
|
|
||||||
if status.get('messageCode') != '0':
|
|
||||||
raise ExtractorError(
|
|
||||||
'{} said: {} {} - {}'.format(
|
|
||||||
self.IE_NAME, status.get('messageCodeDescription'), status.get('messageCode'), status.get('message')),
|
|
||||||
expected=True)
|
|
||||||
|
|
||||||
# Found video formats
|
|
||||||
if isinstance(media.get('MediaURLs'), list):
|
|
||||||
break
|
|
||||||
|
|
||||||
ignore_no_formats = self.get_param('ignore_no_formats_error')
|
|
||||||
|
|
||||||
if not media or (not media.get('MediaURLs') and not ignore_no_formats):
|
|
||||||
raise ExtractorError(
|
|
||||||
'Unable to access the crackle API. Try passing your country code '
|
|
||||||
'to --geo-bypass-country. If it still does not work and the '
|
|
||||||
'video is available in your country')
|
|
||||||
title = media['Title']
|
|
||||||
|
|
||||||
formats, subtitles = [], {}
|
|
||||||
has_drm = False
|
|
||||||
for e in media.get('MediaURLs') or []:
|
|
||||||
if e.get('UseDRM'):
|
|
||||||
has_drm = True
|
|
||||||
format_url = url_or_none(e.get('DRMPath'))
|
|
||||||
else:
|
|
||||||
format_url = url_or_none(e.get('Path'))
|
|
||||||
if not format_url:
|
|
||||||
continue
|
|
||||||
ext = determine_ext(format_url)
|
|
||||||
if ext == 'm3u8':
|
|
||||||
fmts, subs = self._extract_m3u8_formats_and_subtitles(
|
|
||||||
format_url, video_id, 'mp4', entry_protocol='m3u8_native',
|
|
||||||
m3u8_id='hls', fatal=False)
|
|
||||||
formats.extend(fmts)
|
|
||||||
subtitles = self._merge_subtitles(subtitles, subs)
|
|
||||||
elif ext == 'mpd':
|
|
||||||
fmts, subs = self._extract_mpd_formats_and_subtitles(
|
|
||||||
format_url, video_id, mpd_id='dash', fatal=False)
|
|
||||||
formats.extend(fmts)
|
|
||||||
subtitles = self._merge_subtitles(subtitles, subs)
|
|
||||||
elif format_url.endswith('.ism/Manifest'):
|
|
||||||
fmts, subs = self._extract_ism_formats_and_subtitles(
|
|
||||||
format_url, video_id, ism_id='mss', fatal=False)
|
|
||||||
formats.extend(fmts)
|
|
||||||
subtitles = self._merge_subtitles(subtitles, subs)
|
|
||||||
else:
|
|
||||||
mfs_path = e.get('Type')
|
|
||||||
mfs_info = self._MEDIA_FILE_SLOTS.get(mfs_path)
|
|
||||||
if not mfs_info:
|
|
||||||
continue
|
|
||||||
formats.append({
|
|
||||||
'url': format_url,
|
|
||||||
'format_id': 'http-' + mfs_path.split('.')[0],
|
|
||||||
'width': mfs_info['width'],
|
|
||||||
'height': mfs_info['height'],
|
|
||||||
})
|
|
||||||
if not formats and has_drm:
|
|
||||||
self.report_drm(video_id)
|
|
||||||
|
|
||||||
description = media.get('Description')
|
|
||||||
duration = int_or_none(media.get(
|
|
||||||
'DurationInSeconds')) or parse_duration(media.get('Duration'))
|
|
||||||
view_count = int_or_none(media.get('CountViews'))
|
|
||||||
average_rating = float_or_none(media.get('UserRating'))
|
|
||||||
age_limit = parse_age_limit(media.get('Rating'))
|
|
||||||
genre = media.get('Genre')
|
|
||||||
release_year = int_or_none(media.get('ReleaseYear'))
|
|
||||||
creator = media.get('Directors')
|
|
||||||
artist = media.get('Cast')
|
|
||||||
|
|
||||||
if media.get('MediaTypeDisplayValue') == 'Full Episode':
|
|
||||||
series = media.get('ShowName')
|
|
||||||
episode = title
|
|
||||||
season_number = int_or_none(media.get('Season'))
|
|
||||||
episode_number = int_or_none(media.get('Episode'))
|
|
||||||
else:
|
|
||||||
series = episode = season_number = episode_number = None
|
|
||||||
|
|
||||||
cc_files = media.get('ClosedCaptionFiles')
|
|
||||||
if isinstance(cc_files, list):
|
|
||||||
for cc_file in cc_files:
|
|
||||||
if not isinstance(cc_file, dict):
|
|
||||||
continue
|
|
||||||
cc_url = url_or_none(cc_file.get('Path'))
|
|
||||||
if not cc_url:
|
|
||||||
continue
|
|
||||||
lang = cc_file.get('Locale') or 'en'
|
|
||||||
subtitles.setdefault(lang, []).append({'url': cc_url})
|
|
||||||
|
|
||||||
thumbnails = []
|
|
||||||
images = media.get('Images')
|
|
||||||
if isinstance(images, list):
|
|
||||||
for image_key, image_url in images.items():
|
|
||||||
mobj = re.search(r'Img_(\d+)[xX](\d+)', image_key)
|
|
||||||
if not mobj:
|
|
||||||
continue
|
|
||||||
thumbnails.append({
|
|
||||||
'url': image_url,
|
|
||||||
'width': int(mobj.group(1)),
|
|
||||||
'height': int(mobj.group(2)),
|
|
||||||
})
|
|
||||||
|
|
||||||
return {
|
|
||||||
'id': video_id,
|
|
||||||
'title': title,
|
|
||||||
'description': description,
|
|
||||||
'duration': duration,
|
|
||||||
'view_count': view_count,
|
|
||||||
'average_rating': average_rating,
|
|
||||||
'age_limit': age_limit,
|
|
||||||
'genre': genre,
|
|
||||||
'creator': creator,
|
|
||||||
'artist': artist,
|
|
||||||
'release_year': release_year,
|
|
||||||
'series': series,
|
|
||||||
'episode': episode,
|
|
||||||
'season_number': season_number,
|
|
||||||
'episode_number': episode_number,
|
|
||||||
'thumbnails': thumbnails,
|
|
||||||
'subtitles': subtitles,
|
|
||||||
'formats': formats,
|
|
||||||
}
|
|
||||||
@@ -1,180 +0,0 @@
|
|||||||
import re
|
|
||||||
|
|
||||||
from .common import InfoExtractor
|
|
||||||
from ..utils import (
|
|
||||||
ExtractorError,
|
|
||||||
int_or_none,
|
|
||||||
parse_age_limit,
|
|
||||||
parse_iso8601,
|
|
||||||
parse_qs,
|
|
||||||
smuggle_url,
|
|
||||||
str_or_none,
|
|
||||||
update_url_query,
|
|
||||||
)
|
|
||||||
from ..utils.traversal import traverse_obj
|
|
||||||
|
|
||||||
|
|
||||||
class CWTVIE(InfoExtractor):
|
|
||||||
IE_NAME = 'cwtv'
|
|
||||||
_VALID_URL = r'https?://(?:www\.)?cw(?:tv(?:pr)?|seed)\.com/(?:shows/)?(?:[^/]+/)+[^?]*\?.*\b(?:play|watch|guid)=(?P<id>[a-z0-9]{8}-[a-z0-9]{4}-[a-z0-9]{4}-[a-z0-9]{4}-[a-z0-9]{12})'
|
|
||||||
_TESTS = [{
|
|
||||||
'url': 'https://www.cwtv.com/shows/continuum/a-stitch-in-time/?play=9149a1e1-4cb2-46d7-81b2-47d35bbd332b',
|
|
||||||
'info_dict': {
|
|
||||||
'id': '9149a1e1-4cb2-46d7-81b2-47d35bbd332b',
|
|
||||||
'ext': 'mp4',
|
|
||||||
'title': 'A Stitch in Time',
|
|
||||||
'description': r're:(?s)City Protective Services officer Kiera Cameron is transported from 2077.+',
|
|
||||||
'thumbnail': r're:https?://.+\.jpe?g',
|
|
||||||
'duration': 2632,
|
|
||||||
'timestamp': 1736928000,
|
|
||||||
'uploader': 'CWTV',
|
|
||||||
'chapters': 'count:5',
|
|
||||||
'series': 'Continuum',
|
|
||||||
'season_number': 1,
|
|
||||||
'episode_number': 1,
|
|
||||||
'age_limit': 14,
|
|
||||||
'upload_date': '20250115',
|
|
||||||
'season': 'Season 1',
|
|
||||||
'episode': 'Episode 1',
|
|
||||||
},
|
|
||||||
'params': {
|
|
||||||
# m3u8 download
|
|
||||||
'skip_download': True,
|
|
||||||
},
|
|
||||||
}, {
|
|
||||||
'url': 'http://cwtv.com/shows/arrow/legends-of-yesterday/?play=6b15e985-9345-4f60-baf8-56e96be57c63',
|
|
||||||
'info_dict': {
|
|
||||||
'id': '6b15e985-9345-4f60-baf8-56e96be57c63',
|
|
||||||
'ext': 'mp4',
|
|
||||||
'title': 'Legends of Yesterday',
|
|
||||||
'description': r're:(?s)Oliver and Barry Allen take Kendra Saunders and Carter Hall to a remote.+',
|
|
||||||
'duration': 2665,
|
|
||||||
'series': 'Arrow',
|
|
||||||
'season_number': 4,
|
|
||||||
'season': '4',
|
|
||||||
'episode_number': 8,
|
|
||||||
'upload_date': '20151203',
|
|
||||||
'timestamp': 1449122100,
|
|
||||||
},
|
|
||||||
'params': {
|
|
||||||
# m3u8 download
|
|
||||||
'skip_download': True,
|
|
||||||
},
|
|
||||||
'skip': 'redirect to http://cwtv.com/shows/arrow/',
|
|
||||||
}, {
|
|
||||||
'url': 'http://www.cwseed.com/shows/whose-line-is-it-anyway/jeff-davis-4/?play=24282b12-ead2-42f2-95ad-26770c2c6088',
|
|
||||||
'info_dict': {
|
|
||||||
'id': '24282b12-ead2-42f2-95ad-26770c2c6088',
|
|
||||||
'ext': 'mp4',
|
|
||||||
'title': 'Jeff Davis 4',
|
|
||||||
'description': 'Jeff Davis is back to make you laugh.',
|
|
||||||
'duration': 1263,
|
|
||||||
'series': 'Whose Line Is It Anyway?',
|
|
||||||
'season_number': 11,
|
|
||||||
'episode_number': 20,
|
|
||||||
'upload_date': '20151006',
|
|
||||||
'timestamp': 1444107300,
|
|
||||||
'age_limit': 14,
|
|
||||||
'uploader': 'CWTV',
|
|
||||||
'thumbnail': r're:https?://.+\.jpe?g',
|
|
||||||
'chapters': 'count:4',
|
|
||||||
'episode': 'Episode 20',
|
|
||||||
'season': 'Season 11',
|
|
||||||
},
|
|
||||||
'params': {
|
|
||||||
# m3u8 download
|
|
||||||
'skip_download': True,
|
|
||||||
},
|
|
||||||
}, {
|
|
||||||
'url': 'http://cwtv.com/thecw/chroniclesofcisco/?play=8adebe35-f447-465f-ab52-e863506ff6d6',
|
|
||||||
'only_matching': True,
|
|
||||||
}, {
|
|
||||||
'url': 'http://cwtvpr.com/the-cw/video?watch=9eee3f60-ef4e-440b-b3b2-49428ac9c54e',
|
|
||||||
'only_matching': True,
|
|
||||||
}, {
|
|
||||||
'url': 'http://cwtv.com/shows/arrow/legends-of-yesterday/?watch=6b15e985-9345-4f60-baf8-56e96be57c63',
|
|
||||||
'only_matching': True,
|
|
||||||
}, {
|
|
||||||
'url': 'http://www.cwtv.com/movies/play/?guid=0a8e8b5b-1356-41d5-9a6a-4eda1a6feb6c',
|
|
||||||
'only_matching': True,
|
|
||||||
}]
|
|
||||||
|
|
||||||
def _real_extract(self, url):
|
|
||||||
video_id = self._match_id(url)
|
|
||||||
data = self._download_json(
|
|
||||||
f'https://images.cwtv.com/feed/app-2/video-meta/apiversion_22/device_android/guid_{video_id}', video_id)
|
|
||||||
if traverse_obj(data, 'result') != 'ok':
|
|
||||||
raise ExtractorError(traverse_obj(data, (('error_msg', 'msg'), {str}, any)), expected=True)
|
|
||||||
video_data = data['video']
|
|
||||||
title = video_data['title']
|
|
||||||
mpx_url = update_url_query(
|
|
||||||
video_data.get('mpx_url') or f'https://link.theplatform.com/s/cwtv/media/guid/2703454149/{video_id}',
|
|
||||||
{'formats': 'M3U+none'})
|
|
||||||
|
|
||||||
season = str_or_none(video_data.get('season'))
|
|
||||||
episode = str_or_none(video_data.get('episode'))
|
|
||||||
if episode and season:
|
|
||||||
episode = episode[len(season):]
|
|
||||||
|
|
||||||
return {
|
|
||||||
'_type': 'url_transparent',
|
|
||||||
'id': video_id,
|
|
||||||
'title': title,
|
|
||||||
'url': smuggle_url(mpx_url, {'force_smil_url': True}),
|
|
||||||
'description': video_data.get('description_long'),
|
|
||||||
'duration': int_or_none(video_data.get('duration_secs')),
|
|
||||||
'series': video_data.get('series_name'),
|
|
||||||
'season_number': int_or_none(season),
|
|
||||||
'episode_number': int_or_none(episode),
|
|
||||||
'timestamp': parse_iso8601(video_data.get('start_time')),
|
|
||||||
'age_limit': parse_age_limit(video_data.get('rating')),
|
|
||||||
'ie_key': 'ThePlatform',
|
|
||||||
'thumbnail': video_data.get('large_thumbnail'),
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
class CWTVMovieIE(InfoExtractor):
|
|
||||||
IE_NAME = 'cwtv:movie'
|
|
||||||
_VALID_URL = r'https?://(?:www\.)?cwtv\.com/shows/(?P<id>[\w-]+)/?\?(?:[^#]+&)?viewContext=Movies'
|
|
||||||
_TESTS = [{
|
|
||||||
'url': 'https://www.cwtv.com/shows/the-crush/?viewContext=Movies+Swimlane',
|
|
||||||
'info_dict': {
|
|
||||||
'id': '0a8e8b5b-1356-41d5-9a6a-4eda1a6feb6c',
|
|
||||||
'ext': 'mp4',
|
|
||||||
'title': 'The Crush',
|
|
||||||
'upload_date': '20241112',
|
|
||||||
'description': 'md5:1549acd90dff4a8273acd7284458363e',
|
|
||||||
'chapters': 'count:9',
|
|
||||||
'timestamp': 1731398400,
|
|
||||||
'age_limit': 16,
|
|
||||||
'duration': 5337,
|
|
||||||
'series': 'The Crush',
|
|
||||||
'season': 'Season 1',
|
|
||||||
'uploader': 'CWTV',
|
|
||||||
'season_number': 1,
|
|
||||||
'episode': 'Episode 1',
|
|
||||||
'episode_number': 1,
|
|
||||||
'thumbnail': r're:https?://.+\.jpe?g',
|
|
||||||
},
|
|
||||||
'params': {
|
|
||||||
# m3u8 download
|
|
||||||
'skip_download': True,
|
|
||||||
},
|
|
||||||
}]
|
|
||||||
_UUID_RE = r'[\da-f]{8}-(?:[\da-f]{4}-){3}[\da-f]{12}'
|
|
||||||
|
|
||||||
def _real_extract(self, url):
|
|
||||||
display_id = self._match_id(url)
|
|
||||||
webpage = self._download_webpage(url, display_id)
|
|
||||||
app_url = (
|
|
||||||
self._html_search_meta('al:ios:url', webpage, default=None)
|
|
||||||
or self._html_search_meta('al:android:url', webpage, default=None))
|
|
||||||
video_id = (
|
|
||||||
traverse_obj(parse_qs(app_url), ('video_id', 0, {lambda x: re.fullmatch(self._UUID_RE, x)}, 0))
|
|
||||||
or self._search_regex([
|
|
||||||
rf'CWTV\.Site\.curPlayingGUID\s*=\s*["\']({self._UUID_RE})',
|
|
||||||
rf'CWTV\.Site\.viewInAppURL\s*=\s*["\']/shows/[\w-]+/watch-in-app/\?play=({self._UUID_RE})',
|
|
||||||
], webpage, 'video ID'))
|
|
||||||
|
|
||||||
return self.url_result(
|
|
||||||
f'https://www.cwtv.com/shows/{display_id}/{display_id}/?play={video_id}', CWTVIE, video_id)
|
|
||||||
@@ -90,6 +90,10 @@ class DisneyIE(InfoExtractor):
|
|||||||
webpage, 'embed data'), video_id)
|
webpage, 'embed data'), video_id)
|
||||||
video_data = page_data['video']
|
video_data = page_data['video']
|
||||||
|
|
||||||
|
for external in video_data.get('externals', []):
|
||||||
|
if external.get('source') == 'vevo':
|
||||||
|
return self.url_result('vevo:' + external['data_id'], 'Vevo')
|
||||||
|
|
||||||
video_id = video_data['id']
|
video_id = video_data['id']
|
||||||
title = video_data['title']
|
title = video_data['title']
|
||||||
|
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ from ..utils import (
|
|||||||
|
|
||||||
|
|
||||||
class FifaIE(InfoExtractor):
|
class FifaIE(InfoExtractor):
|
||||||
_VALID_URL = r'https?://www\.fifa\.com/fifaplus/(?P<locale>\w{2})/watch/([^#?]+/)?(?P<id>\w+)'
|
_VALID_URL = r'https?://www\.fifa\.com/fifaplus/\w{2}/watch/([^#?]+/)?(?P<id>\w+)'
|
||||||
_TESTS = [{
|
_TESTS = [{
|
||||||
'url': 'https://www.fifa.com/fifaplus/en/watch/7on10qPcnyLajDDU3ntg6y',
|
'url': 'https://www.fifa.com/fifaplus/en/watch/7on10qPcnyLajDDU3ntg6y',
|
||||||
'info_dict': {
|
'info_dict': {
|
||||||
@@ -51,7 +51,7 @@ class FifaIE(InfoExtractor):
|
|||||||
}]
|
}]
|
||||||
|
|
||||||
def _real_extract(self, url):
|
def _real_extract(self, url):
|
||||||
video_id, locale = self._match_valid_url(url).group('id', 'locale')
|
video_id = self._match_id(url)
|
||||||
webpage = self._download_webpage(url, video_id)
|
webpage = self._download_webpage(url, video_id)
|
||||||
|
|
||||||
preconnect_link = self._search_regex(
|
preconnect_link = self._search_regex(
|
||||||
|
|||||||
@@ -3,15 +3,19 @@ from ..networking.exceptions import HTTPError
|
|||||||
from ..utils import (
|
from ..utils import (
|
||||||
ExtractorError,
|
ExtractorError,
|
||||||
UserNotLive,
|
UserNotLive,
|
||||||
|
int_or_none,
|
||||||
|
join_nonempty,
|
||||||
parse_iso8601,
|
parse_iso8601,
|
||||||
str_or_none,
|
str_or_none,
|
||||||
traverse_obj,
|
|
||||||
url_or_none,
|
url_or_none,
|
||||||
)
|
)
|
||||||
|
from ..utils.traversal import traverse_obj
|
||||||
|
|
||||||
|
|
||||||
class FlexTVIE(InfoExtractor):
|
class FlexTVIE(InfoExtractor):
|
||||||
_VALID_URL = r'https?://(?:www\.)?flextv\.co\.kr/channels/(?P<id>\d+)/live'
|
IE_NAME = 'ttinglive'
|
||||||
|
IE_DESC = '띵라이브 (formerly FlexTV)'
|
||||||
|
_VALID_URL = r'https?://(?:www\.)?(?:ttinglive\.com|flextv\.co\.kr)/channels/(?P<id>\d+)/live'
|
||||||
_TESTS = [{
|
_TESTS = [{
|
||||||
'url': 'https://www.flextv.co.kr/channels/231638/live',
|
'url': 'https://www.flextv.co.kr/channels/231638/live',
|
||||||
'info_dict': {
|
'info_dict': {
|
||||||
@@ -36,21 +40,32 @@ class FlexTVIE(InfoExtractor):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
stream_data = self._download_json(
|
stream_data = self._download_json(
|
||||||
f'https://api.flextv.co.kr/api/channels/{channel_id}/stream',
|
f'https://api.ttinglive.com/api/channels/{channel_id}/stream',
|
||||||
channel_id, query={'option': 'all'})
|
channel_id, query={'option': 'all'})
|
||||||
except ExtractorError as e:
|
except ExtractorError as e:
|
||||||
if isinstance(e.cause, HTTPError) and e.cause.status == 400:
|
if isinstance(e.cause, HTTPError) and e.cause.status == 400:
|
||||||
raise UserNotLive(video_id=channel_id)
|
raise UserNotLive(video_id=channel_id)
|
||||||
raise
|
raise
|
||||||
|
|
||||||
playlist_url = stream_data['sources'][0]['url']
|
formats = []
|
||||||
formats, subtitles = self._extract_m3u8_formats_and_subtitles(
|
for stream in traverse_obj(stream_data, ('sources', ..., {dict})):
|
||||||
playlist_url, channel_id, 'mp4')
|
if stream.get('format') == 'ivs' and url_or_none(stream.get('url')):
|
||||||
|
formats.extend(self._extract_m3u8_formats(
|
||||||
|
stream['url'], channel_id, 'mp4', live=True, fatal=False, m3u8_id='ivs'))
|
||||||
|
for format_type in ['hls', 'flv']:
|
||||||
|
for data in traverse_obj(stream, (
|
||||||
|
'urlDetail', format_type, 'resolution', lambda _, v: url_or_none(v['url']))):
|
||||||
|
formats.append({
|
||||||
|
'format_id': join_nonempty(format_type, data.get('suffixName'), delim=''),
|
||||||
|
'url': data['url'],
|
||||||
|
'height': int_or_none(data.get('resolution')),
|
||||||
|
'ext': 'mp4' if format_type == 'hls' else 'flv',
|
||||||
|
'protocol': 'm3u8_native' if format_type == 'hls' else 'http',
|
||||||
|
})
|
||||||
|
|
||||||
return {
|
return {
|
||||||
'id': channel_id,
|
'id': channel_id,
|
||||||
'formats': formats,
|
'formats': formats,
|
||||||
'subtitles': subtitles,
|
|
||||||
'is_live': True,
|
'is_live': True,
|
||||||
**traverse_obj(stream_data, {
|
**traverse_obj(stream_data, {
|
||||||
'title': ('stream', 'title', {str}),
|
'title': ('stream', 'title', {str}),
|
||||||
|
|||||||
@@ -772,8 +772,8 @@ class GenericIE(InfoExtractor):
|
|||||||
|
|
||||||
if default_search in ('auto', 'auto_warning', 'fixup_error'):
|
if default_search in ('auto', 'auto_warning', 'fixup_error'):
|
||||||
if re.match(r'[^\s/]+\.[^\s/]+/', url):
|
if re.match(r'[^\s/]+\.[^\s/]+/', url):
|
||||||
self.report_warning('The url doesn\'t specify the protocol, trying with http')
|
self.report_warning('The url doesn\'t specify the protocol, trying with https')
|
||||||
return self.url_result('http://' + url)
|
return self.url_result('https://' + url)
|
||||||
elif default_search != 'fixup_error':
|
elif default_search != 'fixup_error':
|
||||||
if default_search == 'auto_warning':
|
if default_search == 'auto_warning':
|
||||||
if re.match(r'^(?:url|URL)$', url):
|
if re.match(r'^(?:url|URL)$', url):
|
||||||
@@ -786,9 +786,7 @@ class GenericIE(InfoExtractor):
|
|||||||
return self.url_result('ytsearch:' + url)
|
return self.url_result('ytsearch:' + url)
|
||||||
|
|
||||||
if default_search in ('error', 'fixup_error'):
|
if default_search in ('error', 'fixup_error'):
|
||||||
raise ExtractorError(
|
raise ExtractorError(f'{url!r} is not a valid URL', expected=True)
|
||||||
f'{url!r} is not a valid URL. '
|
|
||||||
f'Set --default-search "ytsearch" (or run yt-dlp "ytsearch:{url}" ) to search YouTube', expected=True)
|
|
||||||
else:
|
else:
|
||||||
if ':' not in default_search:
|
if ':' not in default_search:
|
||||||
default_search += ':'
|
default_search += ':'
|
||||||
|
|||||||
@@ -12,6 +12,7 @@ from ..utils import (
|
|||||||
get_element_html_by_id,
|
get_element_html_by_id,
|
||||||
int_or_none,
|
int_or_none,
|
||||||
lowercase_escape,
|
lowercase_escape,
|
||||||
|
parse_qs,
|
||||||
try_get,
|
try_get,
|
||||||
update_url_query,
|
update_url_query,
|
||||||
)
|
)
|
||||||
@@ -111,14 +112,18 @@ class GoogleDriveIE(InfoExtractor):
|
|||||||
self._caption_formats_ext.append(f.attrib['fmt_code'])
|
self._caption_formats_ext.append(f.attrib['fmt_code'])
|
||||||
|
|
||||||
def _get_captions_by_type(self, video_id, subtitles_id, caption_type,
|
def _get_captions_by_type(self, video_id, subtitles_id, caption_type,
|
||||||
origin_lang_code=None):
|
origin_lang_code=None, origin_lang_name=None):
|
||||||
if not subtitles_id or not caption_type:
|
if not subtitles_id or not caption_type:
|
||||||
return
|
return
|
||||||
captions = {}
|
captions = {}
|
||||||
for caption_entry in self._captions_xml.findall(
|
for caption_entry in self._captions_xml.findall(
|
||||||
self._CAPTIONS_ENTRY_TAG[caption_type]):
|
self._CAPTIONS_ENTRY_TAG[caption_type]):
|
||||||
caption_lang_code = caption_entry.attrib.get('lang_code')
|
caption_lang_code = caption_entry.attrib.get('lang_code')
|
||||||
if not caption_lang_code:
|
caption_name = caption_entry.attrib.get('name') or origin_lang_name
|
||||||
|
if not caption_lang_code or not caption_name:
|
||||||
|
self.report_warning(f'Missing necessary caption metadata. '
|
||||||
|
f'Need lang_code and name attributes. '
|
||||||
|
f'Found: {caption_entry.attrib}')
|
||||||
continue
|
continue
|
||||||
caption_format_data = []
|
caption_format_data = []
|
||||||
for caption_format in self._caption_formats_ext:
|
for caption_format in self._caption_formats_ext:
|
||||||
@@ -129,7 +134,7 @@ class GoogleDriveIE(InfoExtractor):
|
|||||||
'lang': (caption_lang_code if origin_lang_code is None
|
'lang': (caption_lang_code if origin_lang_code is None
|
||||||
else origin_lang_code),
|
else origin_lang_code),
|
||||||
'type': 'track',
|
'type': 'track',
|
||||||
'name': '',
|
'name': caption_name,
|
||||||
'kind': '',
|
'kind': '',
|
||||||
}
|
}
|
||||||
if origin_lang_code is not None:
|
if origin_lang_code is not None:
|
||||||
@@ -155,14 +160,15 @@ class GoogleDriveIE(InfoExtractor):
|
|||||||
self._download_subtitles_xml(video_id, subtitles_id, hl)
|
self._download_subtitles_xml(video_id, subtitles_id, hl)
|
||||||
if not self._captions_xml:
|
if not self._captions_xml:
|
||||||
return
|
return
|
||||||
track = self._captions_xml.find('track')
|
track = next((t for t in self._captions_xml.findall('track') if t.attrib.get('cantran') == 'true'), None)
|
||||||
if track is None:
|
if track is None:
|
||||||
return
|
return
|
||||||
origin_lang_code = track.attrib.get('lang_code')
|
origin_lang_code = track.attrib.get('lang_code')
|
||||||
if not origin_lang_code:
|
origin_lang_name = track.attrib.get('name')
|
||||||
|
if not origin_lang_code or not origin_lang_name:
|
||||||
return
|
return
|
||||||
return self._get_captions_by_type(
|
return self._get_captions_by_type(
|
||||||
video_id, subtitles_id, 'automatic_captions', origin_lang_code)
|
video_id, subtitles_id, 'automatic_captions', origin_lang_code, origin_lang_name)
|
||||||
|
|
||||||
def _real_extract(self, url):
|
def _real_extract(self, url):
|
||||||
video_id = self._match_id(url)
|
video_id = self._match_id(url)
|
||||||
@@ -268,10 +274,8 @@ class GoogleDriveIE(InfoExtractor):
|
|||||||
subtitles_id = None
|
subtitles_id = None
|
||||||
ttsurl = get_value('ttsurl')
|
ttsurl = get_value('ttsurl')
|
||||||
if ttsurl:
|
if ttsurl:
|
||||||
# the video Id for subtitles will be the last value in the ttsurl
|
# the subtitles ID is the vid param of the ttsurl query
|
||||||
# query string
|
subtitles_id = parse_qs(ttsurl).get('vid', [None])[-1]
|
||||||
subtitles_id = ttsurl.encode().decode(
|
|
||||||
'unicode_escape').split('=')[-1]
|
|
||||||
|
|
||||||
self.cookiejar.clear(domain='.google.com', path='/', name='NID')
|
self.cookiejar.clear(domain='.google.com', path='/', name='NID')
|
||||||
|
|
||||||
|
|||||||
@@ -18,6 +18,7 @@ from ..utils import (
|
|||||||
url_or_none,
|
url_or_none,
|
||||||
urljoin,
|
urljoin,
|
||||||
)
|
)
|
||||||
|
from ..utils.traversal import traverse_obj
|
||||||
|
|
||||||
|
|
||||||
class ITVIE(InfoExtractor):
|
class ITVIE(InfoExtractor):
|
||||||
@@ -223,6 +224,7 @@ class ITVBTCCIE(InfoExtractor):
|
|||||||
},
|
},
|
||||||
'playlist_count': 12,
|
'playlist_count': 12,
|
||||||
}, {
|
}, {
|
||||||
|
# news page, can have absent `data` field
|
||||||
'url': 'https://www.itv.com/news/2021-10-27/i-have-to-protect-the-country-says-rishi-sunak-as-uk-faces-interest-rate-hike',
|
'url': 'https://www.itv.com/news/2021-10-27/i-have-to-protect-the-country-says-rishi-sunak-as-uk-faces-interest-rate-hike',
|
||||||
'info_dict': {
|
'info_dict': {
|
||||||
'id': 'i-have-to-protect-the-country-says-rishi-sunak-as-uk-faces-interest-rate-hike',
|
'id': 'i-have-to-protect-the-country-says-rishi-sunak-as-uk-faces-interest-rate-hike',
|
||||||
@@ -243,7 +245,7 @@ class ITVBTCCIE(InfoExtractor):
|
|||||||
|
|
||||||
entries = []
|
entries = []
|
||||||
for video in json_map:
|
for video in json_map:
|
||||||
if not any(video['data'].get(attr) == 'Brightcove' for attr in ('name', 'type')):
|
if not any(traverse_obj(video, ('data', attr)) == 'Brightcove' for attr in ('name', 'type')):
|
||||||
continue
|
continue
|
||||||
video_id = video['data']['id']
|
video_id = video['data']['id']
|
||||||
account_id = video['data']['accountId']
|
account_id = video['data']['accountId']
|
||||||
|
|||||||
@@ -95,26 +95,47 @@ class KickVODIE(KickBaseIE):
|
|||||||
IE_NAME = 'kick:vod'
|
IE_NAME = 'kick:vod'
|
||||||
_VALID_URL = r'https?://(?:www\.)?kick\.com/[\w-]+/videos/(?P<id>[\da-f]{8}-(?:[\da-f]{4}-){3}[\da-f]{12})'
|
_VALID_URL = r'https?://(?:www\.)?kick\.com/[\w-]+/videos/(?P<id>[\da-f]{8}-(?:[\da-f]{4}-){3}[\da-f]{12})'
|
||||||
_TESTS = [{
|
_TESTS = [{
|
||||||
'url': 'https://kick.com/xqc/videos/8dd97a8d-e17f-48fb-8bc3-565f88dbc9ea',
|
# Regular VOD
|
||||||
'md5': '3870f94153e40e7121a6e46c068b70cb',
|
'url': 'https://kick.com/xqc/videos/5c697a87-afce-4256-b01f-3c8fe71ef5cb',
|
||||||
'info_dict': {
|
'info_dict': {
|
||||||
'id': '8dd97a8d-e17f-48fb-8bc3-565f88dbc9ea',
|
'id': '5c697a87-afce-4256-b01f-3c8fe71ef5cb',
|
||||||
'ext': 'mp4',
|
'ext': 'mp4',
|
||||||
'title': '18+ #ad 🛑LIVE🛑CLICK🛑DRAMA🛑NEWS🛑STUFF🛑REACT🛑GET IN HHERE🛑BOP BOP🛑WEEEE WOOOO🛑',
|
'title': '🐗LIVE🐗CLICK🐗HERE🐗DRAMA🐗ALL DAY🐗NEWS🐗VIDEOS🐗CLIPS🐗GAMES🐗STUFF🐗WOW🐗IM HERE🐗LETS GO🐗COOL🐗VERY NICE🐗',
|
||||||
'description': 'THE BEST AT ABSOLUTELY EVERYTHING. THE JUICER. LEADER OF THE JUICERS.',
|
'description': 'THE BEST AT ABSOLUTELY EVERYTHING. THE JUICER. LEADER OF THE JUICERS.',
|
||||||
'channel': 'xqc',
|
|
||||||
'channel_id': '668',
|
|
||||||
'uploader': 'xQc',
|
'uploader': 'xQc',
|
||||||
'uploader_id': '676',
|
'uploader_id': '676',
|
||||||
'upload_date': '20240909',
|
'channel': 'xqc',
|
||||||
'timestamp': 1725919141,
|
'channel_id': '668',
|
||||||
'duration': 10155.0,
|
|
||||||
'thumbnail': r're:^https?://.*\.jpg',
|
|
||||||
'view_count': int,
|
'view_count': int,
|
||||||
'categories': ['Just Chatting'],
|
'age_limit': 18,
|
||||||
'age_limit': 0,
|
'duration': 22278.0,
|
||||||
|
'thumbnail': r're:^https?://.*\.jpg',
|
||||||
|
'categories': ['Deadlock'],
|
||||||
|
'timestamp': 1756082443,
|
||||||
|
'upload_date': '20250825',
|
||||||
},
|
},
|
||||||
'params': {'skip_download': 'm3u8'},
|
'params': {'skip_download': 'm3u8'},
|
||||||
|
}, {
|
||||||
|
# VOD of ongoing livestream (at the time of writing the test, ID rotates every two days)
|
||||||
|
'url': 'https://kick.com/a-log-burner/videos/5230df84-ea38-46e1-be4f-f5949ae55641',
|
||||||
|
'info_dict': {
|
||||||
|
'id': '5230df84-ea38-46e1-be4f-f5949ae55641',
|
||||||
|
'ext': 'mp4',
|
||||||
|
'title': r're:😴 Cozy Fireplace ASMR 🔥 | Relax, Focus, Sleep 💤',
|
||||||
|
'description': 'md5:080bc713eac0321a7b376a1b53816d1b',
|
||||||
|
'uploader': 'A_Log_Burner',
|
||||||
|
'uploader_id': '65114691',
|
||||||
|
'channel': 'a-log-burner',
|
||||||
|
'channel_id': '63967687',
|
||||||
|
'view_count': int,
|
||||||
|
'age_limit': 18,
|
||||||
|
'thumbnail': r're:^https?://.*\.jpg',
|
||||||
|
'categories': ['Other, Watch Party'],
|
||||||
|
'timestamp': int,
|
||||||
|
'upload_date': str,
|
||||||
|
'live_status': 'is_live',
|
||||||
|
},
|
||||||
|
'skip': 'live',
|
||||||
}]
|
}]
|
||||||
|
|
||||||
def _real_extract(self, url):
|
def _real_extract(self, url):
|
||||||
@@ -137,6 +158,7 @@ class KickVODIE(KickBaseIE):
|
|||||||
'categories': ('livestream', 'categories', ..., 'name', {str}),
|
'categories': ('livestream', 'categories', ..., 'name', {str}),
|
||||||
'view_count': ('views', {int_or_none}),
|
'view_count': ('views', {int_or_none}),
|
||||||
'age_limit': ('livestream', 'is_mature', {bool}, {lambda x: 18 if x else 0}),
|
'age_limit': ('livestream', 'is_mature', {bool}, {lambda x: 18 if x else 0}),
|
||||||
|
'is_live': ('livestream', 'is_live', {bool}),
|
||||||
}),
|
}),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -37,7 +37,7 @@ class LocoIE(InfoExtractor):
|
|||||||
},
|
},
|
||||||
}, {
|
}, {
|
||||||
'url': 'https://loco.com/stream/c64916eb-10fb-46a9-9a19-8c4b7ed064e7',
|
'url': 'https://loco.com/stream/c64916eb-10fb-46a9-9a19-8c4b7ed064e7',
|
||||||
'md5': '45ebc8a47ee1c2240178757caf8881b5',
|
'md5': '8b9bda03eba4d066928ae8d71f19befb',
|
||||||
'info_dict': {
|
'info_dict': {
|
||||||
'id': 'c64916eb-10fb-46a9-9a19-8c4b7ed064e7',
|
'id': 'c64916eb-10fb-46a9-9a19-8c4b7ed064e7',
|
||||||
'ext': 'mp4',
|
'ext': 'mp4',
|
||||||
@@ -55,9 +55,9 @@ class LocoIE(InfoExtractor):
|
|||||||
'tags': ['Gameplay'],
|
'tags': ['Gameplay'],
|
||||||
'series': 'GTA 5',
|
'series': 'GTA 5',
|
||||||
'timestamp': 1740612872,
|
'timestamp': 1740612872,
|
||||||
'modified_timestamp': 1740613037,
|
'modified_timestamp': 1750948439,
|
||||||
'upload_date': '20250226',
|
'upload_date': '20250226',
|
||||||
'modified_date': '20250226',
|
'modified_date': '20250626',
|
||||||
},
|
},
|
||||||
}, {
|
}, {
|
||||||
# Requires video authorization
|
# Requires video authorization
|
||||||
@@ -123,8 +123,8 @@ class LocoIE(InfoExtractor):
|
|||||||
def _real_extract(self, url):
|
def _real_extract(self, url):
|
||||||
video_type, video_id = self._match_valid_url(url).group('type', 'id')
|
video_type, video_id = self._match_valid_url(url).group('type', 'id')
|
||||||
webpage = self._download_webpage(url, video_id)
|
webpage = self._download_webpage(url, video_id)
|
||||||
stream = traverse_obj(self._search_nextjs_data(webpage, video_id), (
|
stream = traverse_obj(self._search_nextjs_v13_data(webpage, video_id), (
|
||||||
'props', 'pageProps', ('liveStreamData', 'stream', 'liveStream'), {dict}, any, {require('stream info')}))
|
..., (None, 'ssrData'), ('liveStreamData', 'stream', 'liveStream'), {dict}, any, {require('stream info')}))
|
||||||
|
|
||||||
if access_token := self._get_access_token(video_id):
|
if access_token := self._get_access_token(video_id):
|
||||||
self._request_webpage(
|
self._request_webpage(
|
||||||
|
|||||||
@@ -1,22 +1,14 @@
|
|||||||
from .common import InfoExtractor
|
from .common import InfoExtractor
|
||||||
from ..utils import (
|
from ..utils import (
|
||||||
clean_html,
|
clean_html,
|
||||||
merge_dicts,
|
|
||||||
traverse_obj,
|
|
||||||
unified_timestamp,
|
unified_timestamp,
|
||||||
url_or_none,
|
url_or_none,
|
||||||
urljoin,
|
urljoin,
|
||||||
)
|
)
|
||||||
|
from ..utils.traversal import traverse_obj
|
||||||
|
|
||||||
|
|
||||||
class LRTBaseIE(InfoExtractor):
|
class LRTStreamIE(InfoExtractor):
|
||||||
def _extract_js_var(self, webpage, var_name, default=None):
|
|
||||||
return self._search_regex(
|
|
||||||
fr'{var_name}\s*=\s*(["\'])((?:(?!\1).)+)\1',
|
|
||||||
webpage, var_name.replace('_', ' '), default, group=2)
|
|
||||||
|
|
||||||
|
|
||||||
class LRTStreamIE(LRTBaseIE):
|
|
||||||
_VALID_URL = r'https?://(?:www\.)?lrt\.lt/mediateka/tiesiogiai/(?P<id>[\w-]+)'
|
_VALID_URL = r'https?://(?:www\.)?lrt\.lt/mediateka/tiesiogiai/(?P<id>[\w-]+)'
|
||||||
_TESTS = [{
|
_TESTS = [{
|
||||||
'url': 'https://www.lrt.lt/mediateka/tiesiogiai/lrt-opus',
|
'url': 'https://www.lrt.lt/mediateka/tiesiogiai/lrt-opus',
|
||||||
@@ -31,86 +23,110 @@ class LRTStreamIE(LRTBaseIE):
|
|||||||
def _real_extract(self, url):
|
def _real_extract(self, url):
|
||||||
video_id = self._match_id(url)
|
video_id = self._match_id(url)
|
||||||
webpage = self._download_webpage(url, video_id)
|
webpage = self._download_webpage(url, video_id)
|
||||||
streams_data = self._download_json(self._extract_js_var(webpage, 'tokenURL'), video_id)
|
|
||||||
|
# TODO: Use _search_nextjs_v13_data once fixed
|
||||||
|
get_stream_url = self._search_regex(
|
||||||
|
r'\\"get_streams_url\\":\\"([^"]+)\\"', webpage, 'stream URL')
|
||||||
|
streams_data = self._download_json(get_stream_url, video_id)
|
||||||
|
|
||||||
formats, subtitles = [], {}
|
formats, subtitles = [], {}
|
||||||
for stream_url in traverse_obj(streams_data, (
|
for stream_url in traverse_obj(streams_data, (
|
||||||
'response', 'data', lambda k, _: k.startswith('content')), expected_type=url_or_none):
|
'response', 'data', lambda k, _: k.startswith('content'), {url_or_none})):
|
||||||
fmts, subs = self._extract_m3u8_formats_and_subtitles(stream_url, video_id, 'mp4', m3u8_id='hls', live=True)
|
fmts, subs = self._extract_m3u8_formats_and_subtitles(
|
||||||
|
stream_url, video_id, 'mp4', m3u8_id='hls', live=True)
|
||||||
formats.extend(fmts)
|
formats.extend(fmts)
|
||||||
subtitles = self._merge_subtitles(subtitles, subs)
|
subtitles = self._merge_subtitles(subtitles, subs)
|
||||||
|
|
||||||
stream_title = self._extract_js_var(webpage, 'video_title', 'LRT')
|
|
||||||
return {
|
return {
|
||||||
'id': video_id,
|
'id': video_id,
|
||||||
'formats': formats,
|
'formats': formats,
|
||||||
'subtitles': subtitles,
|
'subtitles': subtitles,
|
||||||
'is_live': True,
|
'is_live': True,
|
||||||
'title': f'{self._og_search_title(webpage)} - {stream_title}',
|
'title': self._og_search_title(webpage),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
class LRTVODIE(LRTBaseIE):
|
class LRTVODIE(InfoExtractor):
|
||||||
_VALID_URL = r'https?://(?:www\.)?lrt\.lt(?P<path>/mediateka/irasas/(?P<id>[0-9]+))'
|
_VALID_URL = [
|
||||||
|
r'https?://(?:(?:www|archyvai)\.)?lrt\.lt/mediateka/irasas/(?P<id>[0-9]+)',
|
||||||
|
r'https?://(?:(?:www|archyvai)\.)?lrt\.lt/mediateka/video/[^?#]+\?(?:[^#]*&)?episode=(?P<id>[0-9]+)',
|
||||||
|
]
|
||||||
_TESTS = [{
|
_TESTS = [{
|
||||||
# m3u8 download
|
# m3u8 download
|
||||||
'url': 'https://www.lrt.lt/mediateka/irasas/2000127261/greita-ir-gardu-sicilijos-ikvepta-klasikiniu-makaronu-su-baklazanais-vakariene',
|
'url': 'https://www.lrt.lt/mediateka/irasas/2000127261/greita-ir-gardu-sicilijos-ikvepta-klasikiniu-makaronu-su-baklazanais-vakariene',
|
||||||
'info_dict': {
|
'info_dict': {
|
||||||
'id': '2000127261',
|
'id': '2000127261',
|
||||||
'ext': 'mp4',
|
'ext': 'mp4',
|
||||||
'title': 'Greita ir gardu: Sicilijos įkvėpta klasikinių makaronų su baklažanais vakarienė',
|
'title': 'Nustebinkite svečius klasikiniu makaronų su baklažanais receptu',
|
||||||
'description': 'md5:ad7d985f51b0dc1489ba2d76d7ed47fa',
|
'description': 'md5:ad7d985f51b0dc1489ba2d76d7ed47fa',
|
||||||
'duration': 3035,
|
'timestamp': 1604086200,
|
||||||
'timestamp': 1604079000,
|
|
||||||
'upload_date': '20201030',
|
'upload_date': '20201030',
|
||||||
'tags': ['LRT TELEVIZIJA', 'Beatos virtuvė', 'Beata Nicholson', 'Makaronai', 'Baklažanai', 'Vakarienė', 'Receptas'],
|
'tags': ['LRT TELEVIZIJA', 'Beatos virtuvė', 'Beata Nicholson', 'Makaronai', 'Baklažanai', 'Vakarienė', 'Receptas'],
|
||||||
'thumbnail': 'https://www.lrt.lt/img/2020/10/30/764041-126478-1287x836.jpg',
|
'thumbnail': 'https://www.lrt.lt/img/2020/10/30/764041-126478-1287x836.jpg',
|
||||||
|
'channel': 'Beatos virtuvė',
|
||||||
},
|
},
|
||||||
}, {
|
}, {
|
||||||
# direct mp3 download
|
# audio download
|
||||||
'url': 'http://www.lrt.lt/mediateka/irasas/1013074524/',
|
'url': 'https://www.lrt.lt/mediateka/irasas/1013074524/kita-tema',
|
||||||
'md5': '389da8ca3cad0f51d12bed0c844f6a0a',
|
'md5': 'fc982f10274929c66fdff65f75615cb0',
|
||||||
'info_dict': {
|
'info_dict': {
|
||||||
'id': '1013074524',
|
'id': '1013074524',
|
||||||
'ext': 'mp3',
|
'ext': 'mp4',
|
||||||
'title': 'Kita tema 2016-09-05 15:05',
|
'title': 'Kita tema',
|
||||||
'description': 'md5:1b295a8fc7219ed0d543fc228c931fb5',
|
'description': 'md5:1b295a8fc7219ed0d543fc228c931fb5',
|
||||||
'duration': 3008,
|
'channel': 'Kita tema',
|
||||||
'view_count': int,
|
'timestamp': 1473087900,
|
||||||
'like_count': int,
|
'upload_date': '20160905',
|
||||||
},
|
},
|
||||||
|
}, {
|
||||||
|
'url': 'https://www.lrt.lt/mediateka/video/auksinis-protas-vasara?episode=2000420320&season=%2Fmediateka%2Fvideo%2Fauksinis-protas-vasara%2F2025',
|
||||||
|
'info_dict': {
|
||||||
|
'id': '2000420320',
|
||||||
|
'ext': 'mp4',
|
||||||
|
'title': 'Kuris senovės romėnų poetas aprašė Narcizo mitą?',
|
||||||
|
'description': 'Intelektinė viktorina. Ved. Arūnas Valinskas ir Andrius Tapinas.',
|
||||||
|
'channel': 'Auksinis protas. Vasara',
|
||||||
|
'thumbnail': 'https://www.lrt.lt/img/2025/06/09/2094343-987905-1287x836.jpg',
|
||||||
|
'tags': ['LRT TELEVIZIJA', 'Auksinis protas'],
|
||||||
|
'timestamp': 1749851040,
|
||||||
|
'upload_date': '20250613',
|
||||||
|
},
|
||||||
|
}, {
|
||||||
|
'url': 'https://archyvai.lrt.lt/mediateka/video/ziniu-riteriai-ir-damos?episode=49685&season=%2Fmediateka%2Fvideo%2Fziniu-riteriai-ir-damos%2F2013',
|
||||||
|
'only_matching': True,
|
||||||
|
}, {
|
||||||
|
'url': 'https://archyvai.lrt.lt/mediateka/irasas/2000077058/panorama-1989-baltijos-kelias',
|
||||||
|
'only_matching': True,
|
||||||
}]
|
}]
|
||||||
|
|
||||||
def _real_extract(self, url):
|
def _real_extract(self, url):
|
||||||
path, video_id = self._match_valid_url(url).group('path', 'id')
|
video_id = self._match_id(url)
|
||||||
webpage = self._download_webpage(url, video_id)
|
webpage = self._download_webpage(url, video_id)
|
||||||
|
|
||||||
media_url = self._extract_js_var(webpage, 'main_url', path)
|
# TODO: Use _search_nextjs_v13_data once fixed
|
||||||
media = self._download_json(self._extract_js_var(
|
canonical_url = (
|
||||||
webpage, 'media_info_url',
|
self._search_regex(r'\\"(?:article|data)\\":{[^}]*\\"url\\":\\"(/[^"]+)\\"', webpage, 'content URL', fatal=False)
|
||||||
'https://www.lrt.lt/servisai/stream_url/vod/media_info/'),
|
or self._search_regex(r'<link\s+rel="canonical"\s*href="(/[^"]+)"', webpage, 'canonical URL'))
|
||||||
video_id, query={'url': media_url})
|
|
||||||
|
media = self._download_json(
|
||||||
|
'https://www.lrt.lt/servisai/stream_url/vod/media_info/',
|
||||||
|
video_id, query={'url': canonical_url})
|
||||||
jw_data = self._parse_jwplayer_data(
|
jw_data = self._parse_jwplayer_data(
|
||||||
media['playlist_item'], video_id, base_url=url)
|
media['playlist_item'], video_id, base_url=url)
|
||||||
|
|
||||||
json_ld_data = self._search_json_ld(webpage, video_id)
|
return {
|
||||||
|
**jw_data,
|
||||||
tags = []
|
**traverse_obj(media, {
|
||||||
for tag in (media.get('tags') or []):
|
'id': ('id', {str}),
|
||||||
tag_name = tag.get('name')
|
'title': ('title', {str}),
|
||||||
if not tag_name:
|
'description': ('content', {clean_html}),
|
||||||
continue
|
'timestamp': ('date', {lambda x: x.replace('.', '/')}, {unified_timestamp}),
|
||||||
tags.append(tag_name)
|
'tags': ('tags', ..., 'name', {str}),
|
||||||
|
}),
|
||||||
clean_info = {
|
|
||||||
'description': clean_html(media.get('content')),
|
|
||||||
'tags': tags,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return merge_dicts(clean_info, jw_data, json_ld_data)
|
|
||||||
|
|
||||||
|
class LRTRadioIE(InfoExtractor):
|
||||||
class LRTRadioIE(LRTBaseIE):
|
|
||||||
_VALID_URL = r'https?://(?:www\.)?lrt\.lt/radioteka/irasas/(?P<id>\d+)/(?P<path>[^?#/]+)'
|
_VALID_URL = r'https?://(?:www\.)?lrt\.lt/radioteka/irasas/(?P<id>\d+)/(?P<path>[^?#/]+)'
|
||||||
_TESTS = [{
|
_TESTS = [{
|
||||||
# m3u8 download
|
# m3u8 download
|
||||||
|
|||||||
@@ -1,102 +0,0 @@
|
|||||||
from .telecinco import TelecincoBaseIE
|
|
||||||
from ..utils import (
|
|
||||||
int_or_none,
|
|
||||||
parse_iso8601,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class MiTeleIE(TelecincoBaseIE):
|
|
||||||
IE_DESC = 'mitele.es'
|
|
||||||
_VALID_URL = r'https?://(?:www\.)?mitele\.es/(?:[^/]+/)+(?P<id>[^/]+)/player'
|
|
||||||
_TESTS = [{
|
|
||||||
'url': 'http://www.mitele.es/programas-tv/diario-de/57b0dfb9c715da65618b4afa/player',
|
|
||||||
'info_dict': {
|
|
||||||
'id': 'FhYW1iNTE6J6H7NkQRIEzfne6t2quqPg',
|
|
||||||
'ext': 'mp4',
|
|
||||||
'title': 'Diario de La redacción Programa 144',
|
|
||||||
'description': 'md5:07c35a7b11abb05876a6a79185b58d27',
|
|
||||||
'series': 'Diario de',
|
|
||||||
'season': 'Season 14',
|
|
||||||
'season_number': 14,
|
|
||||||
'episode': 'Tor, la web invisible',
|
|
||||||
'episode_number': 3,
|
|
||||||
'thumbnail': r're:(?i)^https?://.*\.jpg$',
|
|
||||||
'duration': 2913,
|
|
||||||
'age_limit': 16,
|
|
||||||
'timestamp': 1471209401,
|
|
||||||
'upload_date': '20160814',
|
|
||||||
},
|
|
||||||
'skip': 'HTTP Error 404 Not Found',
|
|
||||||
}, {
|
|
||||||
# no explicit title
|
|
||||||
'url': 'http://www.mitele.es/programas-tv/cuarto-milenio/57b0de3dc915da14058b4876/player',
|
|
||||||
'info_dict': {
|
|
||||||
'id': 'oyNG1iNTE6TAPP-JmCjbwfwJqqMMX3Vq',
|
|
||||||
'ext': 'mp4',
|
|
||||||
'title': 'Cuarto Milenio Temporada 6 Programa 226',
|
|
||||||
'description': 'md5:5ff132013f0cd968ffbf1f5f3538a65f',
|
|
||||||
'series': 'Cuarto Milenio',
|
|
||||||
'season': 'Season 6',
|
|
||||||
'season_number': 6,
|
|
||||||
'episode': 'Episode 24',
|
|
||||||
'episode_number': 24,
|
|
||||||
'thumbnail': r're:(?i)^https?://.*\.jpg$',
|
|
||||||
'duration': 7313,
|
|
||||||
'age_limit': 12,
|
|
||||||
'timestamp': 1471209021,
|
|
||||||
'upload_date': '20160814',
|
|
||||||
},
|
|
||||||
'params': {
|
|
||||||
'skip_download': True,
|
|
||||||
},
|
|
||||||
'skip': 'HTTP Error 404 Not Found',
|
|
||||||
}, {
|
|
||||||
'url': 'https://www.mitele.es/programas-tv/horizonte/temporada-5/programa-171-40_013480051/player/',
|
|
||||||
'info_dict': {
|
|
||||||
'id': '7adbe22e-cd41-4787-afa4-36f3da7c2c6f',
|
|
||||||
'ext': 'mp4',
|
|
||||||
'title': 'Horizonte Temporada 5 Programa 171',
|
|
||||||
'description': 'md5:97f1fb712c5ac27e5693a8b3c5c0c6e3',
|
|
||||||
'episode': 'Las Zonas de Bajas Emisiones, a debate',
|
|
||||||
'episode_number': 171,
|
|
||||||
'season': 'Season 5',
|
|
||||||
'season_number': 5,
|
|
||||||
'series': 'Horizonte',
|
|
||||||
'duration': 7012,
|
|
||||||
'upload_date': '20240927',
|
|
||||||
'timestamp': 1727416450,
|
|
||||||
'thumbnail': 'https://album.mediaset.es/eimg/2024/09/27/horizonte-171_9f02.jpg',
|
|
||||||
'age_limit': 12,
|
|
||||||
},
|
|
||||||
'params': {'geo_bypass_country': 'ES'},
|
|
||||||
}, {
|
|
||||||
'url': 'http://www.mitele.es/series-online/la-que-se-avecina/57aac5c1c915da951a8b45ed/player',
|
|
||||||
'only_matching': True,
|
|
||||||
}, {
|
|
||||||
'url': 'https://www.mitele.es/programas-tv/diario-de/la-redaccion/programa-144-40_1006364575251/player/',
|
|
||||||
'only_matching': True,
|
|
||||||
}]
|
|
||||||
|
|
||||||
def _real_extract(self, url):
|
|
||||||
display_id = self._match_id(url)
|
|
||||||
webpage = self._download_webpage(url, display_id)
|
|
||||||
pre_player = self._search_json(
|
|
||||||
r'window\.\$REACTBASE_STATE\.prePlayer_mtweb\s*=',
|
|
||||||
webpage, 'Pre Player', display_id)['prePlayer']
|
|
||||||
title = pre_player['title']
|
|
||||||
video_info = self._parse_content(pre_player['video'], url)
|
|
||||||
content = pre_player.get('content') or {}
|
|
||||||
info = content.get('info') or {}
|
|
||||||
|
|
||||||
video_info.update({
|
|
||||||
'title': title,
|
|
||||||
'description': info.get('synopsis'),
|
|
||||||
'series': content.get('title'),
|
|
||||||
'season_number': int_or_none(info.get('season_number')),
|
|
||||||
'episode': content.get('subtitle'),
|
|
||||||
'episode_number': int_or_none(info.get('episode_number')),
|
|
||||||
'duration': int_or_none(info.get('duration')),
|
|
||||||
'age_limit': int_or_none(info.get('rating')),
|
|
||||||
'timestamp': parse_iso8601(pre_player.get('publishedTime')),
|
|
||||||
})
|
|
||||||
return video_info
|
|
||||||
@@ -111,8 +111,12 @@ class MySpaceIE(InfoExtractor):
|
|||||||
search_data('stream-url'), search_data('hls-stream-url'),
|
search_data('stream-url'), search_data('hls-stream-url'),
|
||||||
search_data('http-stream-url'))
|
search_data('http-stream-url'))
|
||||||
if not formats:
|
if not formats:
|
||||||
|
vevo_id = search_data('vevo-id')
|
||||||
youtube_id = search_data('youtube-id')
|
youtube_id = search_data('youtube-id')
|
||||||
if youtube_id:
|
if vevo_id:
|
||||||
|
self.to_screen(f'Vevo video detected: {vevo_id}')
|
||||||
|
return self.url_result(f'vevo:{vevo_id}', ie='Vevo')
|
||||||
|
elif youtube_id:
|
||||||
self.to_screen(f'Youtube video detected: {youtube_id}')
|
self.to_screen(f'Youtube video detected: {youtube_id}')
|
||||||
return self.url_result(youtube_id, ie='Youtube')
|
return self.url_result(youtube_id, ie='Youtube')
|
||||||
else:
|
else:
|
||||||
|
|||||||
@@ -50,7 +50,14 @@ class NewsPicksIE(InfoExtractor):
|
|||||||
webpage = self._download_webpage(url, video_id)
|
webpage = self._download_webpage(url, video_id)
|
||||||
|
|
||||||
fragment = self._search_nextjs_data(webpage, video_id)['props']['pageProps']['fragment']
|
fragment = self._search_nextjs_data(webpage, video_id)['props']['pageProps']['fragment']
|
||||||
m3u8_url = traverse_obj(fragment, ('movie', 'movieUrl', {url_or_none}, {require('m3u8 URL')}))
|
movie = fragment['movie']
|
||||||
|
|
||||||
|
if traverse_obj(movie, ('viewable', {str})) == 'PARTIAL_FREE' and not traverse_obj(movie, ('canWatch', {bool})):
|
||||||
|
self.report_warning(
|
||||||
|
'Full video is for Premium members. Without cookies, '
|
||||||
|
f'only the preview is downloaded. {self._login_hint()}', video_id)
|
||||||
|
|
||||||
|
m3u8_url = traverse_obj(movie, ('movieUrl', {url_or_none}, {require('m3u8 URL')}))
|
||||||
formats, subtitles = self._extract_m3u8_formats_and_subtitles(m3u8_url, video_id, 'mp4')
|
formats, subtitles = self._extract_m3u8_formats_and_subtitles(m3u8_url, video_id, 'mp4')
|
||||||
|
|
||||||
return {
|
return {
|
||||||
@@ -59,12 +66,12 @@ class NewsPicksIE(InfoExtractor):
|
|||||||
'series': traverse_obj(fragment, ('series', 'title', {str})),
|
'series': traverse_obj(fragment, ('series', 'title', {str})),
|
||||||
'series_id': series_id,
|
'series_id': series_id,
|
||||||
'subtitles': subtitles,
|
'subtitles': subtitles,
|
||||||
**traverse_obj(fragment, ('movie', {
|
**traverse_obj(movie, {
|
||||||
'title': ('title', {str}),
|
'title': ('title', {str}),
|
||||||
'cast': ('relatedUsers', ..., 'displayName', {str}, filter, all, filter),
|
'cast': ('relatedUsers', ..., 'displayName', {str}, filter, all, filter),
|
||||||
'description': ('explanation', {clean_html}),
|
'description': ('explanation', {clean_html}),
|
||||||
'release_timestamp': ('onAirStartDate', {parse_iso8601}),
|
'release_timestamp': ('onAirStartDate', {parse_iso8601}),
|
||||||
'thumbnail': (('image', 'coverImageUrl'), {url_or_none}, any),
|
'thumbnail': (('image', 'coverImageUrl'), {url_or_none}, any),
|
||||||
'timestamp': ('published', {parse_iso8601}),
|
'timestamp': ('published', {parse_iso8601}),
|
||||||
})),
|
}),
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -129,7 +129,7 @@ class NownessSeriesIE(NownessBaseIE):
|
|||||||
}
|
}
|
||||||
|
|
||||||
def _real_extract(self, url):
|
def _real_extract(self, url):
|
||||||
display_id, series = self._api_request(url, 'series/getBySlug/%s')
|
_, series = self._api_request(url, 'series/getBySlug/%s')
|
||||||
entries = [self._extract_url_result(post) for post in series['posts']]
|
entries = [self._extract_url_result(post) for post in series['posts']]
|
||||||
series_title = None
|
series_title = None
|
||||||
series_description = None
|
series_description = None
|
||||||
|
|||||||
151
yt_dlp/extractor/onsen.py
Normal file
151
yt_dlp/extractor/onsen.py
Normal file
@@ -0,0 +1,151 @@
|
|||||||
|
import base64
|
||||||
|
import json
|
||||||
|
|
||||||
|
from .common import InfoExtractor
|
||||||
|
from ..networking.exceptions import HTTPError
|
||||||
|
from ..utils import (
|
||||||
|
ExtractorError,
|
||||||
|
clean_html,
|
||||||
|
int_or_none,
|
||||||
|
parse_qs,
|
||||||
|
str_or_none,
|
||||||
|
strftime_or_none,
|
||||||
|
update_url,
|
||||||
|
update_url_query,
|
||||||
|
url_or_none,
|
||||||
|
)
|
||||||
|
from ..utils.traversal import traverse_obj
|
||||||
|
|
||||||
|
|
||||||
|
class OnsenIE(InfoExtractor):
|
||||||
|
IE_NAME = 'onsen'
|
||||||
|
IE_DESC = 'インターネットラジオステーション<音泉>'
|
||||||
|
|
||||||
|
_BASE_URL = 'https://www.onsen.ag'
|
||||||
|
_HEADERS = {'Referer': f'{_BASE_URL}/'}
|
||||||
|
_NETRC_MACHINE = 'onsen'
|
||||||
|
_VALID_URL = r'https?://(?:(?:share|www)\.)onsen\.ag/program/(?P<id>[^/?#]+)'
|
||||||
|
_TESTS = [{
|
||||||
|
'url': 'https://share.onsen.ag/program/onsenking?p=90&c=MTA0NjI',
|
||||||
|
'info_dict': {
|
||||||
|
'id': '10462',
|
||||||
|
'ext': 'm4a',
|
||||||
|
'title': '第SP回',
|
||||||
|
'cast': 'count:3',
|
||||||
|
'description': 'md5:de62c80a41c4c8d84da53a1ee681ad18',
|
||||||
|
'display_id': 'MTA0NjI=',
|
||||||
|
'media_type': 'sound',
|
||||||
|
'section_start': 0,
|
||||||
|
'series': '音泉キング「下野紘」のラジオ きみはもちろん、<音泉>ファミリーだよね?',
|
||||||
|
'series_id': 'onsenking',
|
||||||
|
'tags': 'count:2',
|
||||||
|
'thumbnail': r're:https?://d3bzklg4lms4gh\.cloudfront\.net/program_info/image/default/production/.+',
|
||||||
|
'upload_date': '20220627',
|
||||||
|
'webpage_url': 'https://www.onsen.ag/program/onsenking?c=MTA0NjI=',
|
||||||
|
},
|
||||||
|
}, {
|
||||||
|
'url': 'https://share.onsen.ag/program/girls-band-cry-radio?p=370&c=MTgwMDE',
|
||||||
|
'info_dict': {
|
||||||
|
'id': '18001',
|
||||||
|
'ext': 'mp4',
|
||||||
|
'title': '第4回',
|
||||||
|
'cast': 'count:5',
|
||||||
|
'description': 'md5:bbca8a389d99c90cbbce8f383c85fedd',
|
||||||
|
'display_id': 'MTgwMDE=',
|
||||||
|
'media_type': 'movie',
|
||||||
|
'section_start': 0,
|
||||||
|
'series': 'TVアニメ『ガールズバンドクライ』WEBラジオ「ガールズバンドクライ~ラジオにも全部ぶち込め。~」',
|
||||||
|
'series_id': 'girls-band-cry-radio',
|
||||||
|
'tags': 'count:3',
|
||||||
|
'thumbnail': r're:https?://d3bzklg4lms4gh\.cloudfront\.net/program_info/image/default/production/.+',
|
||||||
|
'upload_date': '20240425',
|
||||||
|
'webpage_url': 'https://www.onsen.ag/program/girls-band-cry-radio?c=MTgwMDE=',
|
||||||
|
},
|
||||||
|
'skip': 'Only available for premium supporters',
|
||||||
|
}, {
|
||||||
|
'url': 'https://www.onsen.ag/program/uma',
|
||||||
|
'info_dict': {
|
||||||
|
'id': 'uma',
|
||||||
|
'title': 'UMA YELL RADIO',
|
||||||
|
},
|
||||||
|
'playlist_mincount': 35,
|
||||||
|
}]
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _get_encoded_id(program):
|
||||||
|
return base64.urlsafe_b64encode(str(program['id']).encode()).decode()
|
||||||
|
|
||||||
|
def _perform_login(self, username, password):
|
||||||
|
sign_in = self._download_json(
|
||||||
|
f'{self._BASE_URL}/web_api/signin', None, 'Logging in', headers={
|
||||||
|
'Accept': 'application/json',
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
}, data=json.dumps({
|
||||||
|
'session': {
|
||||||
|
'email': username,
|
||||||
|
'password': password,
|
||||||
|
},
|
||||||
|
}).encode(), expected_status=401)
|
||||||
|
|
||||||
|
if sign_in.get('error'):
|
||||||
|
raise ExtractorError('Invalid username or password', expected=True)
|
||||||
|
|
||||||
|
def _real_extract(self, url):
|
||||||
|
program_id = self._match_id(url)
|
||||||
|
try:
|
||||||
|
programs = self._download_json(
|
||||||
|
f'{self._BASE_URL}/web_api/programs/{program_id}', program_id)
|
||||||
|
except ExtractorError as e:
|
||||||
|
if isinstance(e.cause, HTTPError) and e.cause.status == 404:
|
||||||
|
raise ExtractorError('Invalid URL', expected=True)
|
||||||
|
raise
|
||||||
|
|
||||||
|
query = {k: v[-1] for k, v in parse_qs(url).items() if v}
|
||||||
|
if 'c' not in query:
|
||||||
|
entries = [
|
||||||
|
self.url_result(update_url_query(url, {'c': self._get_encoded_id(program)}), OnsenIE)
|
||||||
|
for program in traverse_obj(programs, ('contents', lambda _, v: v['id']))
|
||||||
|
]
|
||||||
|
|
||||||
|
return self.playlist_result(
|
||||||
|
entries, program_id, traverse_obj(programs, ('program_info', 'title', {clean_html})))
|
||||||
|
|
||||||
|
raw_id = base64.urlsafe_b64decode(f'{query["c"]}===').decode()
|
||||||
|
p_keys = ('contents', lambda _, v: v['id'] == int(raw_id))
|
||||||
|
|
||||||
|
program = traverse_obj(programs, (*p_keys, any))
|
||||||
|
if not program:
|
||||||
|
raise ExtractorError(
|
||||||
|
'This program is no longer available', expected=True)
|
||||||
|
m3u8_url = traverse_obj(program, ('streaming_url', {url_or_none}))
|
||||||
|
if not m3u8_url:
|
||||||
|
self.raise_login_required(
|
||||||
|
'This program is only available for premium supporters')
|
||||||
|
|
||||||
|
display_id = self._get_encoded_id(program)
|
||||||
|
date_str = self._search_regex(
|
||||||
|
rf'{program_id}0?(\d{{6}})', m3u8_url, 'date string', default=None)
|
||||||
|
|
||||||
|
return {
|
||||||
|
'display_id': display_id,
|
||||||
|
'formats': self._extract_m3u8_formats(m3u8_url, raw_id, headers=self._HEADERS),
|
||||||
|
'http_headers': self._HEADERS,
|
||||||
|
'section_start': int_or_none(query.get('t', 0)),
|
||||||
|
'upload_date': strftime_or_none(f'20{date_str}'),
|
||||||
|
'webpage_url': f'{self._BASE_URL}/program/{program_id}?c={display_id}',
|
||||||
|
**traverse_obj(program, {
|
||||||
|
'id': ('id', {int}, {str_or_none}),
|
||||||
|
'title': ('title', {clean_html}),
|
||||||
|
'media_type': ('media_type', {str}),
|
||||||
|
'thumbnail': ('poster_image_url', {url_or_none}, {update_url(query=None)}),
|
||||||
|
}),
|
||||||
|
**traverse_obj(programs, {
|
||||||
|
'cast': (('performers', (*p_keys, 'guests')), ..., 'name', {str}, filter),
|
||||||
|
'series_id': ('directory_name', {str}),
|
||||||
|
}),
|
||||||
|
**traverse_obj(programs, ('program_info', {
|
||||||
|
'description': ('description', {clean_html}, filter),
|
||||||
|
'series': ('title', {clean_html}),
|
||||||
|
'tags': ('hashtag_list', ..., {str}, filter),
|
||||||
|
})),
|
||||||
|
}
|
||||||
@@ -1,201 +0,0 @@
|
|||||||
import itertools
|
|
||||||
|
|
||||||
from .cbs import CBSBaseIE
|
|
||||||
from .common import InfoExtractor
|
|
||||||
from ..utils import (
|
|
||||||
ExtractorError,
|
|
||||||
int_or_none,
|
|
||||||
url_or_none,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class ParamountPlusIE(CBSBaseIE):
|
|
||||||
_VALID_URL = r'''(?x)
|
|
||||||
(?:
|
|
||||||
paramountplus:|
|
|
||||||
https?://(?:www\.)?(?:
|
|
||||||
paramountplus\.com/(?:shows|movies)/(?:video|[^/]+/video|[^/]+)/
|
|
||||||
)(?P<id>[\w-]+))'''
|
|
||||||
|
|
||||||
# All tests are blocked outside US
|
|
||||||
_TESTS = [{
|
|
||||||
'url': 'https://www.paramountplus.com/shows/video/Oe44g5_NrlgiZE3aQVONleD6vXc8kP0k/',
|
|
||||||
'info_dict': {
|
|
||||||
'id': 'Oe44g5_NrlgiZE3aQVONleD6vXc8kP0k',
|
|
||||||
'ext': 'mp4',
|
|
||||||
'title': 'CatDog - Climb Every CatDog/The Canine Mutiny',
|
|
||||||
'description': 'md5:7ac835000645a69933df226940e3c859',
|
|
||||||
'duration': 1426,
|
|
||||||
'timestamp': 920264400,
|
|
||||||
'upload_date': '19990301',
|
|
||||||
'uploader': 'CBSI-NEW',
|
|
||||||
'episode_number': 5,
|
|
||||||
'thumbnail': r're:https?://.+\.jpg$',
|
|
||||||
'season': 'Season 2',
|
|
||||||
'chapters': 'count:3',
|
|
||||||
'episode': 'Episode 5',
|
|
||||||
'season_number': 2,
|
|
||||||
'series': 'CatDog',
|
|
||||||
},
|
|
||||||
'params': {
|
|
||||||
'skip_download': 'm3u8',
|
|
||||||
},
|
|
||||||
}, {
|
|
||||||
'url': 'https://www.paramountplus.com/shows/video/6hSWYWRrR9EUTz7IEe5fJKBhYvSUfexd/',
|
|
||||||
'info_dict': {
|
|
||||||
'id': '6hSWYWRrR9EUTz7IEe5fJKBhYvSUfexd',
|
|
||||||
'ext': 'mp4',
|
|
||||||
'title': '7/23/21 WEEK IN REVIEW (Rep. Jahana Hayes/Howard Fineman/Sen. Michael Bennet/Sheera Frenkel & Cecilia Kang)',
|
|
||||||
'description': 'md5:f4adcea3e8b106192022e121f1565bae',
|
|
||||||
'duration': 2506,
|
|
||||||
'timestamp': 1627063200,
|
|
||||||
'upload_date': '20210723',
|
|
||||||
'uploader': 'CBSI-NEW',
|
|
||||||
'episode_number': 81,
|
|
||||||
'thumbnail': r're:https?://.+\.jpg$',
|
|
||||||
'season': 'Season 2',
|
|
||||||
'chapters': 'count:4',
|
|
||||||
'episode': 'Episode 81',
|
|
||||||
'season_number': 2,
|
|
||||||
'series': 'Tooning Out The News',
|
|
||||||
},
|
|
||||||
'params': {
|
|
||||||
'skip_download': 'm3u8',
|
|
||||||
},
|
|
||||||
}, {
|
|
||||||
'url': 'https://www.paramountplus.com/movies/video/vM2vm0kE6vsS2U41VhMRKTOVHyQAr6pC/',
|
|
||||||
'info_dict': {
|
|
||||||
'id': 'vM2vm0kE6vsS2U41VhMRKTOVHyQAr6pC',
|
|
||||||
'ext': 'mp4',
|
|
||||||
'title': 'Daddy\'s Home',
|
|
||||||
'upload_date': '20151225',
|
|
||||||
'description': 'md5:9a6300c504d5e12000e8707f20c54745',
|
|
||||||
'uploader': 'CBSI-NEW',
|
|
||||||
'timestamp': 1451030400,
|
|
||||||
'thumbnail': r're:https?://.+\.jpg$',
|
|
||||||
'chapters': 'count:0',
|
|
||||||
'duration': 5761,
|
|
||||||
'series': 'Paramount+ Movies',
|
|
||||||
},
|
|
||||||
'params': {
|
|
||||||
'skip_download': 'm3u8',
|
|
||||||
},
|
|
||||||
'skip': 'DRM',
|
|
||||||
}, {
|
|
||||||
'url': 'https://www.paramountplus.com/movies/video/5EKDXPOzdVf9voUqW6oRuocyAEeJGbEc/',
|
|
||||||
'info_dict': {
|
|
||||||
'id': '5EKDXPOzdVf9voUqW6oRuocyAEeJGbEc',
|
|
||||||
'ext': 'mp4',
|
|
||||||
'uploader': 'CBSI-NEW',
|
|
||||||
'description': 'md5:bc7b6fea84ba631ef77a9bda9f2ff911',
|
|
||||||
'timestamp': 1577865600,
|
|
||||||
'title': 'Sonic the Hedgehog',
|
|
||||||
'upload_date': '20200101',
|
|
||||||
'thumbnail': r're:https?://.+\.jpg$',
|
|
||||||
'chapters': 'count:0',
|
|
||||||
'duration': 5932,
|
|
||||||
'series': 'Paramount+ Movies',
|
|
||||||
},
|
|
||||||
'params': {
|
|
||||||
'skip_download': 'm3u8',
|
|
||||||
},
|
|
||||||
'skip': 'DRM',
|
|
||||||
}, {
|
|
||||||
'url': 'https://www.paramountplus.com/shows/the-real-world/video/mOVeHeL9ub9yWdyzSZFYz8Uj4ZBkVzQg/the-real-world-reunion/',
|
|
||||||
'only_matching': True,
|
|
||||||
}, {
|
|
||||||
'url': 'https://www.paramountplus.com/shows/video/mOVeHeL9ub9yWdyzSZFYz8Uj4ZBkVzQg/',
|
|
||||||
'only_matching': True,
|
|
||||||
}, {
|
|
||||||
'url': 'https://www.paramountplus.com/movies/video/W0VyStQqUnqKzJkrpSAIARuCc9YuYGNy/',
|
|
||||||
'only_matching': True,
|
|
||||||
}, {
|
|
||||||
'url': 'https://www.paramountplus.com/movies/paw-patrol-the-movie/W0VyStQqUnqKzJkrpSAIARuCc9YuYGNy/',
|
|
||||||
'only_matching': True,
|
|
||||||
}]
|
|
||||||
|
|
||||||
def _extract_video_info(self, content_id, mpx_acc=2198311517):
|
|
||||||
items_data = self._download_json(
|
|
||||||
f'https://www.paramountplus.com/apps-api/v2.0/androidtv/video/cid/{content_id}.json',
|
|
||||||
content_id, query={
|
|
||||||
'locale': 'en-us',
|
|
||||||
'at': 'ABCXgPuoStiPipsK0OHVXIVh68zNys+G4f7nW9R6qH68GDOcneW6Kg89cJXGfiQCsj0=',
|
|
||||||
}, headers=self.geo_verification_headers())
|
|
||||||
|
|
||||||
asset_types = {
|
|
||||||
item.get('assetType'): {
|
|
||||||
'format': 'SMIL',
|
|
||||||
'formats': 'M3U+none,MPEG4', # '+none' specifies ProtectionScheme (no DRM)
|
|
||||||
} for item in items_data['itemList']
|
|
||||||
}
|
|
||||||
item = items_data['itemList'][-1]
|
|
||||||
|
|
||||||
info, error = {}, None
|
|
||||||
metadata = {
|
|
||||||
'title': item.get('title'),
|
|
||||||
'series': item.get('seriesTitle'),
|
|
||||||
'season_number': int_or_none(item.get('seasonNum')),
|
|
||||||
'episode_number': int_or_none(item.get('episodeNum')),
|
|
||||||
'duration': int_or_none(item.get('duration')),
|
|
||||||
'thumbnail': url_or_none(item.get('thumbnail')),
|
|
||||||
}
|
|
||||||
try:
|
|
||||||
info = self._extract_common_video_info(content_id, asset_types, mpx_acc, extra_info=metadata)
|
|
||||||
except ExtractorError as e:
|
|
||||||
error = e
|
|
||||||
|
|
||||||
# Check for DRM formats to give appropriate error
|
|
||||||
if not info.get('formats'):
|
|
||||||
for query in asset_types.values():
|
|
||||||
query['formats'] = 'MPEG-DASH,M3U,MPEG4' # allows DRM formats
|
|
||||||
|
|
||||||
try:
|
|
||||||
drm_info = self._extract_common_video_info(content_id, asset_types, mpx_acc, extra_info=metadata)
|
|
||||||
except ExtractorError:
|
|
||||||
if error:
|
|
||||||
raise error from None
|
|
||||||
raise
|
|
||||||
if drm_info['formats']:
|
|
||||||
self.report_drm(content_id)
|
|
||||||
elif error:
|
|
||||||
raise error
|
|
||||||
|
|
||||||
return info
|
|
||||||
|
|
||||||
|
|
||||||
class ParamountPlusSeriesIE(InfoExtractor):
|
|
||||||
_VALID_URL = r'https?://(?:www\.)?paramountplus\.com/shows/(?P<id>[a-zA-Z0-9-_]+)/?(?:[#?]|$)'
|
|
||||||
_TESTS = [{
|
|
||||||
'url': 'https://www.paramountplus.com/shows/drake-josh',
|
|
||||||
'playlist_mincount': 50,
|
|
||||||
'info_dict': {
|
|
||||||
'id': 'drake-josh',
|
|
||||||
},
|
|
||||||
}, {
|
|
||||||
'url': 'https://www.paramountplus.com/shows/hawaii_five_0/',
|
|
||||||
'playlist_mincount': 240,
|
|
||||||
'info_dict': {
|
|
||||||
'id': 'hawaii_five_0',
|
|
||||||
},
|
|
||||||
}, {
|
|
||||||
'url': 'https://www.paramountplus.com/shows/spongebob-squarepants/',
|
|
||||||
'playlist_mincount': 248,
|
|
||||||
'info_dict': {
|
|
||||||
'id': 'spongebob-squarepants',
|
|
||||||
},
|
|
||||||
}]
|
|
||||||
|
|
||||||
def _entries(self, show_name):
|
|
||||||
for page in itertools.count():
|
|
||||||
show_json = self._download_json(
|
|
||||||
f'https://www.paramountplus.com/shows/{show_name}/xhr/episodes/page/{page}/size/50/xs/0/season/0', show_name)
|
|
||||||
if not show_json.get('success'):
|
|
||||||
return
|
|
||||||
for episode in show_json['result']['data']:
|
|
||||||
yield self.url_result(
|
|
||||||
'https://www.paramountplus.com{}'.format(episode['url']),
|
|
||||||
ie=ParamountPlusIE.ie_key(), video_id=episode['content_id'])
|
|
||||||
|
|
||||||
def _real_extract(self, url):
|
|
||||||
show_name = self._match_id(url)
|
|
||||||
return self.playlist_result(self._entries(show_name), playlist_id=show_name)
|
|
||||||
@@ -1,119 +0,0 @@
|
|||||||
from .common import InfoExtractor
|
|
||||||
from ..networking.exceptions import HTTPError
|
|
||||||
from ..utils import (
|
|
||||||
ExtractorError,
|
|
||||||
traverse_obj,
|
|
||||||
unified_timestamp,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class PixivSketchBaseIE(InfoExtractor):
|
|
||||||
def _call_api(self, video_id, path, referer, note='Downloading JSON metadata'):
|
|
||||||
response = self._download_json(f'https://sketch.pixiv.net/api/{path}', video_id, note=note, headers={
|
|
||||||
'Referer': referer,
|
|
||||||
'X-Requested-With': referer,
|
|
||||||
})
|
|
||||||
errors = traverse_obj(response, ('errors', ..., 'message'))
|
|
||||||
if errors:
|
|
||||||
raise ExtractorError(' '.join(f'{e}.' for e in errors))
|
|
||||||
return response.get('data') or {}
|
|
||||||
|
|
||||||
|
|
||||||
class PixivSketchIE(PixivSketchBaseIE):
|
|
||||||
IE_NAME = 'pixiv:sketch'
|
|
||||||
_VALID_URL = r'https?://sketch\.pixiv\.net/@(?P<uploader_id>[a-zA-Z0-9_-]+)/lives/(?P<id>\d+)/?'
|
|
||||||
_TESTS = [{
|
|
||||||
'url': 'https://sketch.pixiv.net/@nuhutya/lives/3654620468641830507',
|
|
||||||
'info_dict': {
|
|
||||||
'id': '7370666691623196569',
|
|
||||||
'title': 'まにあえクリスマス!',
|
|
||||||
'uploader': 'ぬふちゃ',
|
|
||||||
'uploader_id': 'nuhutya',
|
|
||||||
'channel_id': '9844815',
|
|
||||||
'age_limit': 0,
|
|
||||||
'timestamp': 1640351536,
|
|
||||||
},
|
|
||||||
'skip': True,
|
|
||||||
}, {
|
|
||||||
# these two (age_limit > 0) requires you to login on website, but it's actually not required for download
|
|
||||||
'url': 'https://sketch.pixiv.net/@namahyou/lives/4393103321546851377',
|
|
||||||
'info_dict': {
|
|
||||||
'id': '4907995960957946943',
|
|
||||||
'title': 'クリスマスなんて知らん🖕',
|
|
||||||
'uploader': 'すゃもり',
|
|
||||||
'uploader_id': 'suya2mori2',
|
|
||||||
'channel_id': '31169300',
|
|
||||||
'age_limit': 15,
|
|
||||||
'timestamp': 1640347640,
|
|
||||||
},
|
|
||||||
'skip': True,
|
|
||||||
}, {
|
|
||||||
'url': 'https://sketch.pixiv.net/@8aki/lives/3553803162487249670',
|
|
||||||
'info_dict': {
|
|
||||||
'id': '1593420639479156945',
|
|
||||||
'title': 'おまけ本作業(リョナ有)',
|
|
||||||
'uploader': 'おぶい / Obui',
|
|
||||||
'uploader_id': 'oving',
|
|
||||||
'channel_id': '17606',
|
|
||||||
'age_limit': 18,
|
|
||||||
'timestamp': 1640330263,
|
|
||||||
},
|
|
||||||
'skip': True,
|
|
||||||
}]
|
|
||||||
|
|
||||||
def _real_extract(self, url):
|
|
||||||
video_id, uploader_id = self._match_valid_url(url).group('id', 'uploader_id')
|
|
||||||
data = self._call_api(video_id, f'lives/{video_id}.json', url)
|
|
||||||
|
|
||||||
if not traverse_obj(data, 'is_broadcasting'):
|
|
||||||
raise ExtractorError(f'This live is offline. Use https://sketch.pixiv.net/@{uploader_id} for ongoing live.', expected=True)
|
|
||||||
|
|
||||||
m3u8_url = traverse_obj(data, ('owner', 'hls_movie', 'url'))
|
|
||||||
formats = self._extract_m3u8_formats(
|
|
||||||
m3u8_url, video_id, ext='mp4',
|
|
||||||
entry_protocol='m3u8_native', m3u8_id='hls')
|
|
||||||
|
|
||||||
return {
|
|
||||||
'id': video_id,
|
|
||||||
'title': data.get('name'),
|
|
||||||
'formats': formats,
|
|
||||||
'uploader': traverse_obj(data, ('user', 'name'), ('owner', 'user', 'name')),
|
|
||||||
'uploader_id': traverse_obj(data, ('user', 'unique_name'), ('owner', 'user', 'unique_name')),
|
|
||||||
'channel_id': str(traverse_obj(data, ('user', 'pixiv_user_id'), ('owner', 'user', 'pixiv_user_id'))),
|
|
||||||
'age_limit': 18 if data.get('is_r18') else 15 if data.get('is_r15') else 0,
|
|
||||||
'timestamp': unified_timestamp(data.get('created_at')),
|
|
||||||
'is_live': True,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
class PixivSketchUserIE(PixivSketchBaseIE):
|
|
||||||
IE_NAME = 'pixiv:sketch:user'
|
|
||||||
_VALID_URL = r'https?://sketch\.pixiv\.net/@(?P<id>[a-zA-Z0-9_-]+)/?'
|
|
||||||
_TESTS = [{
|
|
||||||
'url': 'https://sketch.pixiv.net/@nuhutya',
|
|
||||||
'only_matching': True,
|
|
||||||
}, {
|
|
||||||
'url': 'https://sketch.pixiv.net/@namahyou',
|
|
||||||
'only_matching': True,
|
|
||||||
}, {
|
|
||||||
'url': 'https://sketch.pixiv.net/@8aki',
|
|
||||||
'only_matching': True,
|
|
||||||
}]
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def suitable(cls, url):
|
|
||||||
return super().suitable(url) and not PixivSketchIE.suitable(url)
|
|
||||||
|
|
||||||
def _real_extract(self, url):
|
|
||||||
user_id = self._match_id(url)
|
|
||||||
data = self._call_api(user_id, f'lives/users/@{user_id}.json', url)
|
|
||||||
|
|
||||||
if not traverse_obj(data, 'is_broadcasting'):
|
|
||||||
try:
|
|
||||||
self._call_api(user_id, 'users/current.json', url, 'Investigating reason for request failure')
|
|
||||||
except ExtractorError as e:
|
|
||||||
if isinstance(e.cause, HTTPError) and e.cause.status == 401:
|
|
||||||
self.raise_login_required(f'Please log in, or use direct link like https://sketch.pixiv.net/@{user_id}/1234567890', method='cookies')
|
|
||||||
raise ExtractorError('This user is offline', expected=True)
|
|
||||||
|
|
||||||
return self.url_result(f'https://sketch.pixiv.net/@{user_id}/lives/{data["id"]}')
|
|
||||||
@@ -414,7 +414,7 @@ class RadioFranceProgramScheduleIE(RadioFranceBaseIE):
|
|||||||
_VALID_URL = rf'''(?x)
|
_VALID_URL = rf'''(?x)
|
||||||
{RadioFranceBaseIE._VALID_URL_BASE}
|
{RadioFranceBaseIE._VALID_URL_BASE}
|
||||||
/(?P<station>{RadioFranceBaseIE._STATIONS_RE})
|
/(?P<station>{RadioFranceBaseIE._STATIONS_RE})
|
||||||
/grille-programmes(?:\?date=(?P<date>[\d-]+))?
|
/grille-programmes
|
||||||
'''
|
'''
|
||||||
|
|
||||||
_TESTS = [{
|
_TESTS = [{
|
||||||
@@ -463,7 +463,7 @@ class RadioFranceProgramScheduleIE(RadioFranceBaseIE):
|
|||||||
}))
|
}))
|
||||||
|
|
||||||
def _real_extract(self, url):
|
def _real_extract(self, url):
|
||||||
station, date = self._match_valid_url(url).group('station', 'date')
|
station = self._match_valid_url(url).group('station')
|
||||||
webpage = self._download_webpage(url, station)
|
webpage = self._download_webpage(url, station)
|
||||||
grid_data = self._extract_data_from_webpage(webpage, station, 'grid')
|
grid_data = self._extract_data_from_webpage(webpage, station, 'grid')
|
||||||
upload_date = strftime_or_none(grid_data.get('date'), '%Y%m%d')
|
upload_date = strftime_or_none(grid_data.get('date'), '%Y%m%d')
|
||||||
|
|||||||
@@ -321,7 +321,7 @@ class RCTIPlusSeriesIE(RCTIPlusBaseIE):
|
|||||||
f'Only {video_type} will be downloaded. '
|
f'Only {video_type} will be downloaded. '
|
||||||
f'To download everything from the series, remove "/{video_type}" from the URL')
|
f'To download everything from the series, remove "/{video_type}" from the URL')
|
||||||
|
|
||||||
series_meta, meta_paths = self._call_api(
|
series_meta, _ = self._call_api(
|
||||||
f'https://api.rctiplus.com/api/v1/program/{series_id}/detail', display_id, 'Downloading series metadata')
|
f'https://api.rctiplus.com/api/v1/program/{series_id}/detail', display_id, 'Downloading series metadata')
|
||||||
metadata = {
|
metadata = {
|
||||||
'age_limit': try_get(series_meta, lambda x: self._AGE_RATINGS[x['age_restriction'][0]['code']]),
|
'age_limit': try_get(series_meta, lambda x: self._AGE_RATINGS[x['age_restriction'][0]['code']]),
|
||||||
|
|||||||
@@ -1,191 +0,0 @@
|
|||||||
import re
|
|
||||||
|
|
||||||
from .common import InfoExtractor
|
|
||||||
from ..utils import ExtractorError, int_or_none, str_to_int
|
|
||||||
|
|
||||||
|
|
||||||
class RUTVIE(InfoExtractor):
|
|
||||||
IE_DESC = 'RUTV.RU'
|
|
||||||
_VALID_URL = r'''(?x)
|
|
||||||
https?://
|
|
||||||
(?:test)?player\.(?:rutv\.ru|vgtrk\.com)/
|
|
||||||
(?P<path>
|
|
||||||
flash\d+v/container\.swf\?id=|
|
|
||||||
iframe/(?P<type>swf|video|live)/id/|
|
|
||||||
index/iframe/cast_id/
|
|
||||||
)
|
|
||||||
(?P<id>\d+)
|
|
||||||
'''
|
|
||||||
_EMBED_REGEX = [
|
|
||||||
r'<iframe[^>]+?src=(["\'])(?P<url>https?://(?:test)?player\.(?:rutv\.ru|vgtrk\.com)/(?:iframe/(?:swf|video|live)/id|index/iframe/cast_id)/.+?)\1',
|
|
||||||
r'<meta[^>]+?property=(["\'])og:video\1[^>]+?content=(["\'])(?P<url>https?://(?:test)?player\.(?:rutv\.ru|vgtrk\.com)/flash\d+v/container\.swf\?id=.+?\2)',
|
|
||||||
]
|
|
||||||
|
|
||||||
_TESTS = [{
|
|
||||||
'url': 'http://player.rutv.ru/flash2v/container.swf?id=774471&sid=kultura&fbv=true&isPlay=true&ssl=false&i=560&acc_video_id=episode_id/972347/video_id/978186/brand_id/31724',
|
|
||||||
'info_dict': {
|
|
||||||
'id': '774471',
|
|
||||||
'ext': 'mp4',
|
|
||||||
'title': 'Монологи на все времена. Концерт',
|
|
||||||
'description': 'md5:18d8b5e6a41fb1faa53819471852d5d5',
|
|
||||||
'duration': 2906,
|
|
||||||
'thumbnail': r're:https?://cdn-st2\.smotrim\.ru/.+\.jpg',
|
|
||||||
},
|
|
||||||
'params': {'skip_download': 'm3u8'},
|
|
||||||
}, {
|
|
||||||
'url': 'https://player.vgtrk.com/flash2v/container.swf?id=774016&sid=russiatv&fbv=true&isPlay=true&ssl=false&i=560&acc_video_id=episode_id/972098/video_id/977760/brand_id/57638',
|
|
||||||
'info_dict': {
|
|
||||||
'id': '774016',
|
|
||||||
'ext': 'mp4',
|
|
||||||
'title': 'Чужой в семье Сталина',
|
|
||||||
'description': '',
|
|
||||||
'duration': 2539,
|
|
||||||
},
|
|
||||||
'skip': 'Invalid URL',
|
|
||||||
}, {
|
|
||||||
'url': 'http://player.rutv.ru/iframe/swf/id/766888/sid/hitech/?acc_video_id=4000',
|
|
||||||
'info_dict': {
|
|
||||||
'id': '766888',
|
|
||||||
'ext': 'mp4',
|
|
||||||
'title': 'Вести.net: интернет-гиганты начали перетягивание программных "одеял"',
|
|
||||||
'description': 'md5:65ddd47f9830c4f42ed6475f8730c995',
|
|
||||||
'duration': 279,
|
|
||||||
'thumbnail': r're:https?://cdn-st2\.smotrim\.ru/.+\.jpg',
|
|
||||||
},
|
|
||||||
'params': {'skip_download': 'm3u8'},
|
|
||||||
}, {
|
|
||||||
'url': 'http://player.rutv.ru/iframe/video/id/771852/start_zoom/true/showZoomBtn/false/sid/russiatv/?acc_video_id=episode_id/970443/video_id/975648/brand_id/5169',
|
|
||||||
'info_dict': {
|
|
||||||
'id': '771852',
|
|
||||||
'ext': 'mp4',
|
|
||||||
'title': 'Прямой эфир. Жертвы загадочной болезни: смерть от старости в 17 лет',
|
|
||||||
'description': 'md5:b81c8c55247a4bd996b43ce17395b2d8',
|
|
||||||
'duration': 3096,
|
|
||||||
'thumbnail': r're:https?://cdn-st2\.smotrim\.ru/.+\.jpg',
|
|
||||||
},
|
|
||||||
'params': {'skip_download': 'm3u8'},
|
|
||||||
}, {
|
|
||||||
'url': 'http://player.rutv.ru/iframe/live/id/51499/showZoomBtn/false/isPlay/true/sid/sochi2014',
|
|
||||||
'info_dict': {
|
|
||||||
'id': '51499',
|
|
||||||
'ext': 'flv',
|
|
||||||
'title': 'Сочи-2014. Биатлон. Индивидуальная гонка. Мужчины ',
|
|
||||||
'description': 'md5:9e0ed5c9d2fa1efbfdfed90c9a6d179c',
|
|
||||||
},
|
|
||||||
'skip': 'Invalid URL',
|
|
||||||
}, {
|
|
||||||
'url': 'http://player.rutv.ru/iframe/live/id/21/showZoomBtn/false/isPlay/true/',
|
|
||||||
'info_dict': {
|
|
||||||
'id': '21',
|
|
||||||
'ext': 'mp4',
|
|
||||||
'title': str,
|
|
||||||
'is_live': True,
|
|
||||||
},
|
|
||||||
'skip': 'Invalid URL',
|
|
||||||
}, {
|
|
||||||
'url': 'https://testplayer.vgtrk.com/iframe/live/id/19201/showZoomBtn/false/isPlay/true/',
|
|
||||||
'only_matching': True,
|
|
||||||
}]
|
|
||||||
_WEBPAGE_TESTS = [{
|
|
||||||
'url': 'http://istoriya-teatra.ru/news/item/f00/s05/n0000545/index.shtml',
|
|
||||||
'info_dict': {
|
|
||||||
'id': '1952012',
|
|
||||||
'ext': 'mp4',
|
|
||||||
'title': 'Новости культуры. Эфир от 10.10.2019 (23:30). Театр Сатиры отмечает день рождения премьерой',
|
|
||||||
'description': 'md5:fced27112ff01ff8fc4a452fc088bad6',
|
|
||||||
'duration': 191,
|
|
||||||
'thumbnail': r're:https?://cdn-st2\.smotrim\.ru/.+\.jpg',
|
|
||||||
},
|
|
||||||
'params': {'skip_download': 'm3u8'},
|
|
||||||
}]
|
|
||||||
|
|
||||||
def _real_extract(self, url):
|
|
||||||
mobj = self._match_valid_url(url)
|
|
||||||
video_id = mobj.group('id')
|
|
||||||
video_path = mobj.group('path')
|
|
||||||
|
|
||||||
if re.match(r'flash\d+v', video_path):
|
|
||||||
video_type = 'video'
|
|
||||||
elif video_path.startswith('iframe'):
|
|
||||||
video_type = mobj.group('type')
|
|
||||||
if video_type == 'swf':
|
|
||||||
video_type = 'video'
|
|
||||||
elif video_path.startswith('index/iframe/cast_id'):
|
|
||||||
video_type = 'live'
|
|
||||||
|
|
||||||
is_live = video_type == 'live'
|
|
||||||
|
|
||||||
json_data = self._download_json(
|
|
||||||
'http://player.vgtrk.com/iframe/data{}/id/{}'.format('live' if is_live else 'video', video_id),
|
|
||||||
video_id, 'Downloading JSON')
|
|
||||||
|
|
||||||
if json_data['errors']:
|
|
||||||
raise ExtractorError('{} said: {}'.format(self.IE_NAME, json_data['errors']), expected=True)
|
|
||||||
|
|
||||||
playlist = json_data['data']['playlist']
|
|
||||||
medialist = playlist['medialist']
|
|
||||||
media = medialist[0]
|
|
||||||
|
|
||||||
if media['errors']:
|
|
||||||
raise ExtractorError('{} said: {}'.format(self.IE_NAME, media['errors']), expected=True)
|
|
||||||
|
|
||||||
view_count = int_or_none(playlist.get('count_views'))
|
|
||||||
priority_transport = playlist['priority_transport']
|
|
||||||
|
|
||||||
thumbnail = media['picture']
|
|
||||||
width = int_or_none(media['width'])
|
|
||||||
height = int_or_none(media['height'])
|
|
||||||
description = media['anons']
|
|
||||||
title = media['title']
|
|
||||||
duration = int_or_none(media.get('duration'))
|
|
||||||
|
|
||||||
formats = []
|
|
||||||
subtitles = {}
|
|
||||||
|
|
||||||
for transport, links in media['sources'].items():
|
|
||||||
for quality, url in links.items():
|
|
||||||
preference = -1 if priority_transport == transport else -2
|
|
||||||
if transport == 'rtmp':
|
|
||||||
mobj = re.search(r'^(?P<url>rtmp://[^/]+/(?P<app>.+))/(?P<playpath>.+)$', url)
|
|
||||||
if not mobj:
|
|
||||||
continue
|
|
||||||
fmt = {
|
|
||||||
'url': mobj.group('url'),
|
|
||||||
'play_path': mobj.group('playpath'),
|
|
||||||
'app': mobj.group('app'),
|
|
||||||
'page_url': 'http://player.rutv.ru',
|
|
||||||
'player_url': 'http://player.rutv.ru/flash3v/osmf.swf?i=22',
|
|
||||||
'rtmp_live': True,
|
|
||||||
'ext': 'flv',
|
|
||||||
'vbr': str_to_int(quality),
|
|
||||||
}
|
|
||||||
elif transport == 'm3u8':
|
|
||||||
fmt, subs = self._extract_m3u8_formats_and_subtitles(
|
|
||||||
url, video_id, 'mp4', quality=preference, m3u8_id='hls')
|
|
||||||
formats.extend(fmt)
|
|
||||||
self._merge_subtitles(subs, target=subtitles)
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
fmt = {
|
|
||||||
'url': url,
|
|
||||||
}
|
|
||||||
fmt.update({
|
|
||||||
'width': int_or_none(quality, default=height, invscale=width, scale=height),
|
|
||||||
'height': int_or_none(quality, default=height),
|
|
||||||
'format_id': f'{transport}-{quality}',
|
|
||||||
'source_preference': preference,
|
|
||||||
})
|
|
||||||
formats.append(fmt)
|
|
||||||
|
|
||||||
return {
|
|
||||||
'id': video_id,
|
|
||||||
'title': title,
|
|
||||||
'description': description,
|
|
||||||
'thumbnail': thumbnail,
|
|
||||||
'view_count': view_count,
|
|
||||||
'duration': duration,
|
|
||||||
'formats': formats,
|
|
||||||
'subtitles': subtitles,
|
|
||||||
'is_live': is_live,
|
|
||||||
'_format_sort_fields': ('source', ),
|
|
||||||
}
|
|
||||||
@@ -1,119 +0,0 @@
|
|||||||
from .common import InfoExtractor
|
|
||||||
from ..utils import (
|
|
||||||
determine_ext,
|
|
||||||
int_or_none,
|
|
||||||
parse_qs,
|
|
||||||
qualities,
|
|
||||||
try_get,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class SixPlayIE(InfoExtractor):
|
|
||||||
IE_NAME = '6play'
|
|
||||||
_VALID_URL = r'(?:6play:|https?://(?:www\.)?(?P<domain>6play\.fr|rtlplay\.be|play\.rtl\.hr|rtlmost\.hu)/.+?-c_)(?P<id>[0-9]+)'
|
|
||||||
_TESTS = [{
|
|
||||||
'url': 'https://www.6play.fr/minute-par-minute-p_9533/le-but-qui-a-marque-lhistoire-du-football-francais-c_12041051',
|
|
||||||
'md5': '31fcd112637baa0c2ab92c4fcd8baf27',
|
|
||||||
'info_dict': {
|
|
||||||
'id': '12041051',
|
|
||||||
'ext': 'mp4',
|
|
||||||
'title': 'Le but qui a marqué l\'histoire du football français !',
|
|
||||||
'description': 'md5:b59e7e841d646ef1eb42a7868eb6a851',
|
|
||||||
},
|
|
||||||
}, {
|
|
||||||
'url': 'https://www.rtlplay.be/rtl-info-13h-p_8551/les-titres-du-rtlinfo-13h-c_12045869',
|
|
||||||
'only_matching': True,
|
|
||||||
}, {
|
|
||||||
'url': 'https://play.rtl.hr/pj-masks-p_9455/epizoda-34-sezona-1-catboyevo-cudo-na-dva-kotaca-c_11984989',
|
|
||||||
'only_matching': True,
|
|
||||||
}, {
|
|
||||||
'url': 'https://www.rtlmost.hu/megtorve-p_14167/megtorve-6-resz-c_12397787',
|
|
||||||
'only_matching': True,
|
|
||||||
}]
|
|
||||||
|
|
||||||
def _real_extract(self, url):
|
|
||||||
domain, video_id = self._match_valid_url(url).groups()
|
|
||||||
service, consumer_name = {
|
|
||||||
'6play.fr': ('6play', 'm6web'),
|
|
||||||
'rtlplay.be': ('rtlbe_rtl_play', 'rtlbe'),
|
|
||||||
'play.rtl.hr': ('rtlhr_rtl_play', 'rtlhr'),
|
|
||||||
'rtlmost.hu': ('rtlhu_rtl_most', 'rtlhu'),
|
|
||||||
}.get(domain, ('6play', 'm6web'))
|
|
||||||
|
|
||||||
data = self._download_json(
|
|
||||||
f'https://pc.middleware.6play.fr/6play/v2/platforms/m6group_web/services/{service}/videos/clip_{video_id}',
|
|
||||||
video_id, headers={
|
|
||||||
'x-customer-name': consumer_name,
|
|
||||||
}, query={
|
|
||||||
'csa': 5,
|
|
||||||
'with': 'clips',
|
|
||||||
})
|
|
||||||
|
|
||||||
clip_data = data['clips'][0]
|
|
||||||
title = clip_data['title']
|
|
||||||
|
|
||||||
urls = []
|
|
||||||
quality_key = qualities(['lq', 'sd', 'hq', 'hd'])
|
|
||||||
formats = []
|
|
||||||
subtitles = {}
|
|
||||||
assets = clip_data.get('assets') or []
|
|
||||||
for asset in assets:
|
|
||||||
asset_url = asset.get('full_physical_path')
|
|
||||||
protocol = asset.get('protocol')
|
|
||||||
if not asset_url or ((protocol == 'primetime' or asset.get('type') == 'usp_hlsfp_h264') and not ('_drmnp.ism/' in asset_url or '_unpnp.ism/' in asset_url)) or asset_url in urls:
|
|
||||||
continue
|
|
||||||
urls.append(asset_url)
|
|
||||||
container = asset.get('video_container')
|
|
||||||
ext = determine_ext(asset_url)
|
|
||||||
if protocol == 'http_subtitle' or ext == 'vtt':
|
|
||||||
subtitles.setdefault('fr', []).append({'url': asset_url})
|
|
||||||
continue
|
|
||||||
if container == 'm3u8' or ext == 'm3u8':
|
|
||||||
if protocol == 'usp':
|
|
||||||
if parse_qs(asset_url).get('token', [None])[0]:
|
|
||||||
urlh = self._request_webpage(
|
|
||||||
asset_url, video_id, fatal=False,
|
|
||||||
headers=self.geo_verification_headers())
|
|
||||||
if not urlh:
|
|
||||||
continue
|
|
||||||
asset_url = urlh.url
|
|
||||||
asset_url = asset_url.replace('_drmnp.ism/', '_unpnp.ism/')
|
|
||||||
for i in range(3, 0, -1):
|
|
||||||
asset_url = asset_url.replace('_sd1/', f'_sd{i}/')
|
|
||||||
m3u8_formats = self._extract_m3u8_formats(
|
|
||||||
asset_url, video_id, 'mp4', 'm3u8_native',
|
|
||||||
m3u8_id='hls', fatal=False)
|
|
||||||
formats.extend(m3u8_formats)
|
|
||||||
formats.extend(self._extract_mpd_formats(
|
|
||||||
asset_url.replace('.m3u8', '.mpd'),
|
|
||||||
video_id, mpd_id='dash', fatal=False))
|
|
||||||
if m3u8_formats:
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
formats.extend(self._extract_m3u8_formats(
|
|
||||||
asset_url, video_id, 'mp4', 'm3u8_native',
|
|
||||||
m3u8_id='hls', fatal=False))
|
|
||||||
elif container == 'mp4' or ext == 'mp4':
|
|
||||||
quality = asset.get('video_quality')
|
|
||||||
formats.append({
|
|
||||||
'url': asset_url,
|
|
||||||
'format_id': quality,
|
|
||||||
'quality': quality_key(quality),
|
|
||||||
'ext': ext,
|
|
||||||
})
|
|
||||||
|
|
||||||
def get(getter):
|
|
||||||
for src in (data, clip_data):
|
|
||||||
v = try_get(src, getter, str)
|
|
||||||
if v:
|
|
||||||
return v
|
|
||||||
|
|
||||||
return {
|
|
||||||
'id': video_id,
|
|
||||||
'title': title,
|
|
||||||
'description': get(lambda x: x['description']),
|
|
||||||
'duration': int_or_none(clip_data.get('duration')),
|
|
||||||
'series': get(lambda x: x['program']['title']),
|
|
||||||
'formats': formats,
|
|
||||||
'subtitles': subtitles,
|
|
||||||
}
|
|
||||||
@@ -51,6 +51,20 @@ class SkebIE(InfoExtractor):
|
|||||||
},
|
},
|
||||||
'playlist_count': 2,
|
'playlist_count': 2,
|
||||||
'expected_warnings': ['Skipping unsupported extension'],
|
'expected_warnings': ['Skipping unsupported extension'],
|
||||||
|
}, {
|
||||||
|
'url': 'https://skeb.jp/@Yossshy_Music/works/13',
|
||||||
|
'info_dict': {
|
||||||
|
'ext': 'wav',
|
||||||
|
'id': '5566495',
|
||||||
|
'title': '13-1',
|
||||||
|
'description': 'md5:1026b8b9ae38c67c2d995970ec196550',
|
||||||
|
'uploader': 'Yossshy',
|
||||||
|
'uploader_id': 'Yossshy_Music',
|
||||||
|
'duration': 336,
|
||||||
|
'thumbnail': r're:https?://.+',
|
||||||
|
'tags': 'count:59',
|
||||||
|
'genres': ['music'],
|
||||||
|
},
|
||||||
}]
|
}]
|
||||||
|
|
||||||
def _call_api(self, uploader_id, work_id):
|
def _call_api(self, uploader_id, work_id):
|
||||||
@@ -87,7 +101,7 @@ class SkebIE(InfoExtractor):
|
|||||||
entries = []
|
entries = []
|
||||||
for idx, preview in enumerate(traverse_obj(works, ('previews', lambda _, v: url_or_none(v['url']))), 1):
|
for idx, preview in enumerate(traverse_obj(works, ('previews', lambda _, v: url_or_none(v['url']))), 1):
|
||||||
ext = traverse_obj(preview, ('information', 'extension', {str}))
|
ext = traverse_obj(preview, ('information', 'extension', {str}))
|
||||||
if ext not in ('mp3', 'mp4'):
|
if ext not in ('mp3', 'mp4', 'wav'):
|
||||||
self.report_warning(f'Skipping unsupported extension "{ext}"')
|
self.report_warning(f'Skipping unsupported extension "{ext}"')
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@@ -100,7 +114,7 @@ class SkebIE(InfoExtractor):
|
|||||||
'url': preview['vtt_url'],
|
'url': preview['vtt_url'],
|
||||||
}],
|
}],
|
||||||
} if url_or_none(preview.get('vtt_url')) else None,
|
} if url_or_none(preview.get('vtt_url')) else None,
|
||||||
'vcodec': 'none' if ext == 'mp3' else None,
|
'vcodec': 'none' if ext in ('mp3', 'wav') else None,
|
||||||
**info,
|
**info,
|
||||||
**traverse_obj(preview, {
|
**traverse_obj(preview, {
|
||||||
'id': ('id', {str_or_none}),
|
'id': ('id', {str_or_none}),
|
||||||
|
|||||||
@@ -1,65 +1,403 @@
|
|||||||
|
import functools
|
||||||
|
import json
|
||||||
|
import re
|
||||||
|
import urllib.parse
|
||||||
|
|
||||||
from .common import InfoExtractor
|
from .common import InfoExtractor
|
||||||
from ..utils import ExtractorError
|
from ..utils import (
|
||||||
|
OnDemandPagedList,
|
||||||
|
clean_html,
|
||||||
|
determine_ext,
|
||||||
|
extract_attributes,
|
||||||
|
int_or_none,
|
||||||
|
parse_iso8601,
|
||||||
|
str_or_none,
|
||||||
|
unescapeHTML,
|
||||||
|
url_or_none,
|
||||||
|
urljoin,
|
||||||
|
)
|
||||||
|
from ..utils.traversal import (
|
||||||
|
find_element,
|
||||||
|
find_elements,
|
||||||
|
require,
|
||||||
|
traverse_obj,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class SmotrimIE(InfoExtractor):
|
class SmotrimBaseIE(InfoExtractor):
|
||||||
_VALID_URL = r'https?://smotrim\.ru/(?P<type>brand|video|article|live)/(?P<id>[0-9]+)'
|
_BASE_URL = 'https://smotrim.ru'
|
||||||
_TESTS = [{ # video
|
_GEO_BYPASS = False
|
||||||
|
_GEO_COUNTRIES = ['RU']
|
||||||
|
|
||||||
|
def _extract_from_smotrim_api(self, typ, item_id):
|
||||||
|
path = f'data{typ.replace("-", "")}/{"uid" if typ == "live" else "id"}'
|
||||||
|
data = self._download_json(
|
||||||
|
f'https://player.smotrim.ru/iframe/{path}/{item_id}/sid/smotrim', item_id)
|
||||||
|
media = traverse_obj(data, ('data', 'playlist', 'medialist', -1, {dict}))
|
||||||
|
if traverse_obj(media, ('locked', {bool})):
|
||||||
|
self.raise_login_required()
|
||||||
|
if error_msg := traverse_obj(media, ('errors', {clean_html})):
|
||||||
|
self.raise_geo_restricted(error_msg, countries=self._GEO_COUNTRIES)
|
||||||
|
|
||||||
|
webpage_url = traverse_obj(data, ('data', 'template', 'share_url', {url_or_none}))
|
||||||
|
webpage = self._download_webpage(webpage_url, item_id)
|
||||||
|
common = {
|
||||||
|
'thumbnail': self._html_search_meta(['og:image', 'twitter:image'], webpage, default=None),
|
||||||
|
**traverse_obj(media, {
|
||||||
|
'id': ('id', {str_or_none}),
|
||||||
|
'title': (('episodeTitle', 'title'), {clean_html}, filter, any),
|
||||||
|
'channel_id': ('channelId', {str_or_none}),
|
||||||
|
'description': ('anons', {clean_html}, filter),
|
||||||
|
'season': ('season', {clean_html}, filter),
|
||||||
|
'series': (('brand_title', 'brandTitle'), {clean_html}, filter, any),
|
||||||
|
'series_id': ('brand_id', {str_or_none}),
|
||||||
|
}),
|
||||||
|
}
|
||||||
|
|
||||||
|
if typ == 'audio':
|
||||||
|
bookmark = self._search_json(
|
||||||
|
r'class="bookmark"[^>]+value\s*=\s*"', webpage,
|
||||||
|
'bookmark', item_id, default={}, transform_source=unescapeHTML)
|
||||||
|
|
||||||
|
metadata = {
|
||||||
|
'vcodec': 'none',
|
||||||
|
**common,
|
||||||
|
**traverse_obj(media, {
|
||||||
|
'ext': ('audio_url', {determine_ext(default_ext='mp3')}),
|
||||||
|
'duration': ('duration', {int_or_none}),
|
||||||
|
'url': ('audio_url', {url_or_none}),
|
||||||
|
}),
|
||||||
|
**traverse_obj(bookmark, {
|
||||||
|
'title': ('subtitle', {clean_html}),
|
||||||
|
'timestamp': ('published', {parse_iso8601}),
|
||||||
|
}),
|
||||||
|
}
|
||||||
|
elif typ == 'audio-live':
|
||||||
|
metadata = {
|
||||||
|
'ext': 'mp3',
|
||||||
|
'url': traverse_obj(media, ('source', 'auto', {url_or_none})),
|
||||||
|
'vcodec': 'none',
|
||||||
|
**common,
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
formats, subtitles = [], {}
|
||||||
|
for m3u8_url in traverse_obj(media, (
|
||||||
|
'sources', 'm3u8', {dict.values}, ..., {url_or_none},
|
||||||
|
)):
|
||||||
|
fmts, subs = self._extract_m3u8_formats_and_subtitles(
|
||||||
|
m3u8_url, item_id, 'mp4', m3u8_id='hls', fatal=False)
|
||||||
|
formats.extend(fmts)
|
||||||
|
self._merge_subtitles(subs, target=subtitles)
|
||||||
|
|
||||||
|
metadata = {
|
||||||
|
'formats': formats,
|
||||||
|
'subtitles': subtitles,
|
||||||
|
**self._search_json_ld(webpage, item_id),
|
||||||
|
**common,
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
'age_limit': traverse_obj(data, ('data', 'age_restrictions', {int_or_none})),
|
||||||
|
'is_live': typ in ('audio-live', 'live'),
|
||||||
|
'tags': traverse_obj(webpage, (
|
||||||
|
{find_elements(cls='tags-list__link')}, ..., {clean_html}, filter, all, filter)),
|
||||||
|
'webpage_url': webpage_url,
|
||||||
|
**metadata,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class SmotrimIE(SmotrimBaseIE):
|
||||||
|
IE_NAME = 'smotrim'
|
||||||
|
_VALID_URL = r'(?:https?:)?//(?:(?:player|www)\.)?smotrim\.ru(?:/iframe)?/video(?:/id)?/(?P<id>\d+)'
|
||||||
|
_EMBED_REGEX = [fr'<iframe\b[^>]+\bsrc=["\'](?P<url>{_VALID_URL})']
|
||||||
|
_TESTS = [{
|
||||||
'url': 'https://smotrim.ru/video/1539617',
|
'url': 'https://smotrim.ru/video/1539617',
|
||||||
'md5': 'b1923a533c8cab09679789d720d0b1c5',
|
|
||||||
'info_dict': {
|
'info_dict': {
|
||||||
'id': '1539617',
|
'id': '1539617',
|
||||||
'ext': 'mp4',
|
'ext': 'mp4',
|
||||||
'title': 'Полиглот. Китайский с нуля за 16 часов! Урок №16',
|
'title': 'Урок №16',
|
||||||
'description': '',
|
'duration': 2631,
|
||||||
|
'series': 'Полиглот. Китайский с нуля за 16 часов!',
|
||||||
|
'series_id': '60562',
|
||||||
|
'tags': 'mincount:6',
|
||||||
|
'thumbnail': r're:https?://cdn-st\d+\.smotrim\.ru/.+\.(?:jpg|png)',
|
||||||
|
'timestamp': 1466771100,
|
||||||
|
'upload_date': '20160624',
|
||||||
|
'view_count': int,
|
||||||
},
|
},
|
||||||
'add_ie': ['RUTV'],
|
}, {
|
||||||
}, { # article (geo-restricted? plays fine from the US and JP)
|
'url': 'https://player.smotrim.ru/iframe/video/id/2988590',
|
||||||
|
'info_dict': {
|
||||||
|
'id': '2988590',
|
||||||
|
'ext': 'mp4',
|
||||||
|
'title': 'Трейлер',
|
||||||
|
'age_limit': 16,
|
||||||
|
'description': 'md5:6af7e68ecf4ed7b8ff6720d20c4da47b',
|
||||||
|
'duration': 30,
|
||||||
|
'series': 'Мы в разводе',
|
||||||
|
'series_id': '71624',
|
||||||
|
'tags': 'mincount:5',
|
||||||
|
'thumbnail': r're:https?://cdn-st\d+\.smotrim\.ru/.+\.(?:jpg|png)',
|
||||||
|
'timestamp': 1750670040,
|
||||||
|
'upload_date': '20250623',
|
||||||
|
'view_count': int,
|
||||||
|
'webpage_url': 'https://smotrim.ru/video/2988590',
|
||||||
|
},
|
||||||
|
}]
|
||||||
|
_WEBPAGE_TESTS = [{
|
||||||
'url': 'https://smotrim.ru/article/2813445',
|
'url': 'https://smotrim.ru/article/2813445',
|
||||||
'md5': 'e0ac453952afbc6a2742e850b4dc8e77',
|
|
||||||
'info_dict': {
|
'info_dict': {
|
||||||
'id': '2431846',
|
'id': '2431846',
|
||||||
'ext': 'mp4',
|
'ext': 'mp4',
|
||||||
'title': 'Новости культуры. Съёмки первой программы "Большие и маленькие"',
|
'title': 'Съёмки первой программы "Большие и маленькие"',
|
||||||
'description': 'md5:94a4a22472da4252bf5587a4ee441b99',
|
'description': 'md5:446c9a5d334b995152a813946353f447',
|
||||||
|
'duration': 240,
|
||||||
|
'series': 'Новости культуры',
|
||||||
|
'series_id': '19725',
|
||||||
|
'tags': 'mincount:6',
|
||||||
|
'thumbnail': r're:https?://cdn-st\d+\.smotrim\.ru/.+\.(?:jpg|png)',
|
||||||
|
'timestamp': 1656054443,
|
||||||
|
'upload_date': '20220624',
|
||||||
|
'view_count': int,
|
||||||
|
'webpage_url': 'https://smotrim.ru/video/2431846',
|
||||||
},
|
},
|
||||||
'add_ie': ['RUTV'],
|
}, {
|
||||||
}, { # brand, redirect
|
'url': 'https://www.vesti.ru/article/4642878',
|
||||||
'url': 'https://smotrim.ru/brand/64356',
|
|
||||||
'md5': '740472999ccff81d7f6df79cecd91c18',
|
|
||||||
'info_dict': {
|
'info_dict': {
|
||||||
'id': '2354523',
|
'id': '3007209',
|
||||||
'ext': 'mp4',
|
'ext': 'mp4',
|
||||||
'title': 'Большие и маленькие. Лучшее. 4-й выпуск',
|
'title': 'Иностранные мессенджеры используют не только мошенники, но и вербовщики',
|
||||||
'description': 'md5:84089e834429008371ea41ea3507b989',
|
'description': 'md5:74ab625a0a89b87b2e0ed98d6391b182',
|
||||||
|
'duration': 265,
|
||||||
|
'series': 'Вести. Дежурная часть',
|
||||||
|
'series_id': '5204',
|
||||||
|
'tags': 'mincount:6',
|
||||||
|
'thumbnail': r're:https?://cdn-st\d+\.smotrim\.ru/.+\.(?:jpg|png)',
|
||||||
|
'timestamp': 1754756280,
|
||||||
|
'upload_date': '20250809',
|
||||||
|
'view_count': int,
|
||||||
|
'webpage_url': 'https://smotrim.ru/video/3007209',
|
||||||
},
|
},
|
||||||
'add_ie': ['RUTV'],
|
|
||||||
}, { # live
|
|
||||||
'url': 'https://smotrim.ru/live/19201',
|
|
||||||
'info_dict': {
|
|
||||||
'id': '19201',
|
|
||||||
'ext': 'mp4',
|
|
||||||
# this looks like a TV channel name
|
|
||||||
'title': 'Россия Культура. Прямой эфир',
|
|
||||||
'description': '',
|
|
||||||
},
|
|
||||||
'add_ie': ['RUTV'],
|
|
||||||
}]
|
}]
|
||||||
|
|
||||||
def _real_extract(self, url):
|
def _real_extract(self, url):
|
||||||
video_id, typ = self._match_valid_url(url).group('id', 'type')
|
video_id = self._match_id(url)
|
||||||
rutv_type = 'video'
|
|
||||||
if typ not in ('video', 'live'):
|
|
||||||
webpage = self._download_webpage(url, video_id, f'Resolving {typ} link')
|
|
||||||
# there are two cases matching regex:
|
|
||||||
# 1. "embedUrl" in JSON LD (/brand/)
|
|
||||||
# 2. "src" attribute from iframe (/article/)
|
|
||||||
video_id = self._search_regex(
|
|
||||||
r'"https://player.smotrim.ru/iframe/video/id/(?P<video_id>\d+)/',
|
|
||||||
webpage, 'video_id', default=None)
|
|
||||||
if not video_id:
|
|
||||||
raise ExtractorError('There are no video in this page.', expected=True)
|
|
||||||
elif typ == 'live':
|
|
||||||
rutv_type = 'live'
|
|
||||||
|
|
||||||
return self.url_result(f'https://player.vgtrk.com/iframe/{rutv_type}/id/{video_id}')
|
return self._extract_from_smotrim_api('video', video_id)
|
||||||
|
|
||||||
|
|
||||||
|
class SmotrimAudioIE(SmotrimBaseIE):
|
||||||
|
IE_NAME = 'smotrim:audio'
|
||||||
|
_VALID_URL = r'https?://(?:(?:player|www)\.)?smotrim\.ru(?:/iframe)?/audio(?:/id)?/(?P<id>\d+)'
|
||||||
|
_TESTS = [{
|
||||||
|
'url': 'https://smotrim.ru/audio/2573986',
|
||||||
|
'md5': 'e28d94c20da524e242b2d00caef41a8e',
|
||||||
|
'info_dict': {
|
||||||
|
'id': '2573986',
|
||||||
|
'ext': 'mp3',
|
||||||
|
'title': 'Радиоспектакль',
|
||||||
|
'description': 'md5:4bcaaf7d532bc78f76e478fad944e388',
|
||||||
|
'duration': 3072,
|
||||||
|
'series': 'Морис Леблан. Арсен Люпен, джентльмен-грабитель',
|
||||||
|
'series_id': '66461',
|
||||||
|
'tags': 'mincount:7',
|
||||||
|
'thumbnail': r're:https?://cdn-st\d+\.smotrim\.ru/.+\.(?:jpg|png)',
|
||||||
|
'timestamp': 1624884358,
|
||||||
|
'upload_date': '20210628',
|
||||||
|
},
|
||||||
|
}, {
|
||||||
|
'url': 'https://player.smotrim.ru/iframe/audio/id/2860468',
|
||||||
|
'md5': '5a6bc1fa24c7142958be1ad9cfae58a8',
|
||||||
|
'info_dict': {
|
||||||
|
'id': '2860468',
|
||||||
|
'ext': 'mp3',
|
||||||
|
'title': 'Колобок и музыкальная игра "Терем-теремок"',
|
||||||
|
'duration': 1501,
|
||||||
|
'series': 'Веселый колобок',
|
||||||
|
'series_id': '68880',
|
||||||
|
'tags': 'mincount:4',
|
||||||
|
'thumbnail': r're:https?://cdn-st\d+\.smotrim\.ru/.+\.(?:jpg|png)',
|
||||||
|
'timestamp': 1755925800,
|
||||||
|
'upload_date': '20250823',
|
||||||
|
'webpage_url': 'https://smotrim.ru/audio/2860468',
|
||||||
|
},
|
||||||
|
}]
|
||||||
|
|
||||||
|
def _real_extract(self, url):
|
||||||
|
audio_id = self._match_id(url)
|
||||||
|
|
||||||
|
return self._extract_from_smotrim_api('audio', audio_id)
|
||||||
|
|
||||||
|
|
||||||
|
class SmotrimLiveIE(SmotrimBaseIE):
|
||||||
|
IE_NAME = 'smotrim:live'
|
||||||
|
_VALID_URL = r'''(?x:
|
||||||
|
(?:https?:)?//
|
||||||
|
(?:(?:(?:test)?player|www)\.)?
|
||||||
|
(?:
|
||||||
|
smotrim\.ru|
|
||||||
|
vgtrk\.com
|
||||||
|
)
|
||||||
|
(?:/iframe)?/
|
||||||
|
(?P<type>
|
||||||
|
channel|
|
||||||
|
(?:audio-)?live
|
||||||
|
)
|
||||||
|
(?:/u?id)?/(?P<id>[\da-f-]+)
|
||||||
|
)'''
|
||||||
|
_EMBED_REGEX = [fr'<iframe\b[^>]+\bsrc=["\'](?P<url>{_VALID_URL})']
|
||||||
|
_TESTS = [{
|
||||||
|
'url': 'https://smotrim.ru/channel/76',
|
||||||
|
'info_dict': {
|
||||||
|
'id': '1661',
|
||||||
|
'ext': 'mp4',
|
||||||
|
'title': str,
|
||||||
|
'channel_id': '76',
|
||||||
|
'description': 'Смотрим прямой эфир «Москва 24»',
|
||||||
|
'display_id': '76',
|
||||||
|
'live_status': 'is_live',
|
||||||
|
'thumbnail': r're:https?://cdn-st\d+\.smotrim\.ru/.+\.(?:jpg|png)',
|
||||||
|
'timestamp': int,
|
||||||
|
'upload_date': str,
|
||||||
|
},
|
||||||
|
'params': {'skip_download': 'Livestream'},
|
||||||
|
}, {
|
||||||
|
# Radio
|
||||||
|
'url': 'https://smotrim.ru/channel/81',
|
||||||
|
'info_dict': {
|
||||||
|
'id': '81',
|
||||||
|
'ext': 'mp3',
|
||||||
|
'title': str,
|
||||||
|
'channel_id': '81',
|
||||||
|
'live_status': 'is_live',
|
||||||
|
'thumbnail': r're:https?://cdn-st\d+\.smotrim\.ru/.+\.(?:jpg|png)',
|
||||||
|
},
|
||||||
|
'params': {'skip_download': 'Livestream'},
|
||||||
|
}, {
|
||||||
|
# Sometimes geo-restricted to Russia
|
||||||
|
'url': 'https://player.smotrim.ru/iframe/live/uid/381308c7-a066-4c4f-9656-83e2e792a7b4',
|
||||||
|
'info_dict': {
|
||||||
|
'id': '19201',
|
||||||
|
'ext': 'mp4',
|
||||||
|
'title': str,
|
||||||
|
'channel_id': '4',
|
||||||
|
'description': 'Смотрим прямой эфир «Россия К»',
|
||||||
|
'display_id': '381308c7-a066-4c4f-9656-83e2e792a7b4',
|
||||||
|
'live_status': 'is_live',
|
||||||
|
'thumbnail': r're:https?://cdn-st\d+\.smotrim\.ru/.+\.(?:jpg|png)',
|
||||||
|
'timestamp': int,
|
||||||
|
'upload_date': str,
|
||||||
|
'webpage_url': 'https://smotrim.ru/channel/4',
|
||||||
|
},
|
||||||
|
'params': {'skip_download': 'Livestream'},
|
||||||
|
}, {
|
||||||
|
'url': 'https://smotrim.ru/live/19201',
|
||||||
|
'only_matching': True,
|
||||||
|
}, {
|
||||||
|
'url': 'https://player.smotrim.ru/iframe/audio-live/id/81',
|
||||||
|
'only_matching': True,
|
||||||
|
}, {
|
||||||
|
'url': 'https://testplayer.vgtrk.com/iframe/live/id/19201',
|
||||||
|
'only_matching': True,
|
||||||
|
}]
|
||||||
|
|
||||||
|
def _real_extract(self, url):
|
||||||
|
typ, display_id = self._match_valid_url(url).group('type', 'id')
|
||||||
|
|
||||||
|
if typ == 'live' and re.fullmatch(r'[0-9]+', display_id):
|
||||||
|
url = self._request_webpage(url, display_id).url
|
||||||
|
typ = self._match_valid_url(url).group('type')
|
||||||
|
|
||||||
|
if typ == 'channel':
|
||||||
|
webpage = self._download_webpage(url, display_id)
|
||||||
|
src_url = traverse_obj(webpage, ((
|
||||||
|
({find_element(cls='main-player__frame', html=True)}, {extract_attributes}, 'src'),
|
||||||
|
({find_element(cls='audio-play-button', html=True)},
|
||||||
|
{extract_attributes}, 'value', {urllib.parse.unquote}, {json.loads}, 'source'),
|
||||||
|
), any, {self._proto_relative_url}, {url_or_none}, {require('src URL')}))
|
||||||
|
typ, video_id = self._match_valid_url(src_url).group('type', 'id')
|
||||||
|
else:
|
||||||
|
video_id = display_id
|
||||||
|
|
||||||
|
return {
|
||||||
|
'display_id': display_id,
|
||||||
|
**self._extract_from_smotrim_api(typ, video_id),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class SmotrimPlaylistIE(SmotrimBaseIE):
|
||||||
|
IE_NAME = 'smotrim:playlist'
|
||||||
|
_PAGE_SIZE = 15
|
||||||
|
_VALID_URL = r'https?://smotrim\.ru/(?P<type>brand|podcast)/(?P<id>\d+)/?(?P<season>[\w-]+)?'
|
||||||
|
_TESTS = [{
|
||||||
|
# Video
|
||||||
|
'url': 'https://smotrim.ru/brand/64356',
|
||||||
|
'info_dict': {
|
||||||
|
'id': '64356',
|
||||||
|
'title': 'Большие и маленькие',
|
||||||
|
},
|
||||||
|
'playlist_mincount': 55,
|
||||||
|
}, {
|
||||||
|
# Video, season
|
||||||
|
'url': 'https://smotrim.ru/brand/65293/3-sezon',
|
||||||
|
'info_dict': {
|
||||||
|
'id': '65293',
|
||||||
|
'title': 'Спасская',
|
||||||
|
'season': '3 сезон',
|
||||||
|
},
|
||||||
|
'playlist_count': 16,
|
||||||
|
}, {
|
||||||
|
# Audio
|
||||||
|
'url': 'https://smotrim.ru/brand/68880',
|
||||||
|
'info_dict': {
|
||||||
|
'id': '68880',
|
||||||
|
'title': 'Веселый колобок',
|
||||||
|
},
|
||||||
|
'playlist_mincount': 156,
|
||||||
|
}, {
|
||||||
|
# Podcast
|
||||||
|
'url': 'https://smotrim.ru/podcast/8021',
|
||||||
|
'info_dict': {
|
||||||
|
'id': '8021',
|
||||||
|
'title': 'Сила звука',
|
||||||
|
},
|
||||||
|
'playlist_mincount': 27,
|
||||||
|
}]
|
||||||
|
|
||||||
|
def _fetch_page(self, endpoint, key, playlist_id, page):
|
||||||
|
page += 1
|
||||||
|
items = self._download_json(
|
||||||
|
f'{self._BASE_URL}/api/{endpoint}', playlist_id,
|
||||||
|
f'Downloading page {page}', query={
|
||||||
|
key: playlist_id,
|
||||||
|
'limit': self._PAGE_SIZE,
|
||||||
|
'page': page,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
for link in traverse_obj(items, ('contents', -1, 'list', ..., 'link', {str})):
|
||||||
|
yield self.url_result(urljoin(self._BASE_URL, link))
|
||||||
|
|
||||||
|
def _real_extract(self, url):
|
||||||
|
playlist_type, playlist_id, season = self._match_valid_url(url).group('type', 'id', 'season')
|
||||||
|
key = 'rubricId' if playlist_type == 'podcast' else 'brandId'
|
||||||
|
webpage = self._download_webpage(url, playlist_id)
|
||||||
|
playlist_title = self._html_search_meta(['og:title', 'twitter:title'], webpage, default=None)
|
||||||
|
|
||||||
|
if season:
|
||||||
|
return self.playlist_from_matches(traverse_obj(webpage, (
|
||||||
|
{find_elements(tag='a', attr='href', value=r'/video/\d+', html=True, regex=True)},
|
||||||
|
..., {extract_attributes}, 'href', {str},
|
||||||
|
)), playlist_id, playlist_title, season=traverse_obj(webpage, (
|
||||||
|
{find_element(cls='seasons__item seasons__item--selected')}, {clean_html},
|
||||||
|
)), ie=SmotrimIE, getter=urljoin(self._BASE_URL))
|
||||||
|
|
||||||
|
if traverse_obj(webpage, (
|
||||||
|
{find_element(cls='brand-main-item__videos')}, {clean_html}, filter,
|
||||||
|
)):
|
||||||
|
endpoint = 'videos'
|
||||||
|
else:
|
||||||
|
endpoint = 'audios'
|
||||||
|
|
||||||
|
return self.playlist_result(OnDemandPagedList(
|
||||||
|
functools.partial(self._fetch_page, endpoint, key, playlist_id), self._PAGE_SIZE), playlist_id, playlist_title)
|
||||||
|
|||||||
@@ -1,167 +0,0 @@
|
|||||||
import functools
|
|
||||||
import json
|
|
||||||
import re
|
|
||||||
|
|
||||||
from .common import InfoExtractor
|
|
||||||
from ..utils import (
|
|
||||||
OnDemandPagedList,
|
|
||||||
clean_podcast_url,
|
|
||||||
float_or_none,
|
|
||||||
int_or_none,
|
|
||||||
strip_or_none,
|
|
||||||
traverse_obj,
|
|
||||||
try_get,
|
|
||||||
unified_strdate,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class SpotifyBaseIE(InfoExtractor):
|
|
||||||
_WORKING = False
|
|
||||||
_ACCESS_TOKEN = None
|
|
||||||
_OPERATION_HASHES = {
|
|
||||||
'Episode': '8276d4423d709ae9b68ec1b74cc047ba0f7479059a37820be730f125189ac2bf',
|
|
||||||
'MinimalShow': '13ee079672fad3f858ea45a55eb109553b4fb0969ed793185b2e34cbb6ee7cc0',
|
|
||||||
'ShowEpisodes': 'e0e5ce27bd7748d2c59b4d44ba245a8992a05be75d6fabc3b20753fc8857444d',
|
|
||||||
}
|
|
||||||
_VALID_URL_TEMPL = r'https?://open\.spotify\.com/(?:embed-podcast/|embed/|)%s/(?P<id>[^/?&#]+)'
|
|
||||||
_EMBED_REGEX = [r'<iframe[^>]+src="(?P<url>https?://open\.spotify.com/embed/[^"]+)"']
|
|
||||||
|
|
||||||
def _real_initialize(self):
|
|
||||||
self._ACCESS_TOKEN = self._download_json(
|
|
||||||
'https://open.spotify.com/get_access_token', None)['accessToken']
|
|
||||||
|
|
||||||
def _call_api(self, operation, video_id, variables, **kwargs):
|
|
||||||
return self._download_json(
|
|
||||||
'https://api-partner.spotify.com/pathfinder/v1/query', video_id, query={
|
|
||||||
'operationName': 'query' + operation,
|
|
||||||
'variables': json.dumps(variables),
|
|
||||||
'extensions': json.dumps({
|
|
||||||
'persistedQuery': {
|
|
||||||
'sha256Hash': self._OPERATION_HASHES[operation],
|
|
||||||
},
|
|
||||||
}),
|
|
||||||
}, headers={'authorization': 'Bearer ' + self._ACCESS_TOKEN},
|
|
||||||
**kwargs)['data']
|
|
||||||
|
|
||||||
def _extract_episode(self, episode, series):
|
|
||||||
episode_id = episode['id']
|
|
||||||
title = episode['name'].strip()
|
|
||||||
|
|
||||||
formats = []
|
|
||||||
audio_preview = episode.get('audioPreview') or {}
|
|
||||||
audio_preview_url = audio_preview.get('url')
|
|
||||||
if audio_preview_url:
|
|
||||||
f = {
|
|
||||||
'url': audio_preview_url.replace('://p.scdn.co/mp3-preview/', '://anon-podcast.scdn.co/'),
|
|
||||||
'vcodec': 'none',
|
|
||||||
}
|
|
||||||
audio_preview_format = audio_preview.get('format')
|
|
||||||
if audio_preview_format:
|
|
||||||
f['format_id'] = audio_preview_format
|
|
||||||
mobj = re.match(r'([0-9A-Z]{3})_(?:[A-Z]+_)?(\d+)', audio_preview_format)
|
|
||||||
if mobj:
|
|
||||||
f.update({
|
|
||||||
'abr': int(mobj.group(2)),
|
|
||||||
'ext': mobj.group(1).lower(),
|
|
||||||
})
|
|
||||||
formats.append(f)
|
|
||||||
|
|
||||||
for item in (try_get(episode, lambda x: x['audio']['items']) or []):
|
|
||||||
item_url = item.get('url')
|
|
||||||
if not (item_url and item.get('externallyHosted')):
|
|
||||||
continue
|
|
||||||
formats.append({
|
|
||||||
'url': clean_podcast_url(item_url),
|
|
||||||
'vcodec': 'none',
|
|
||||||
})
|
|
||||||
|
|
||||||
thumbnails = []
|
|
||||||
for source in (try_get(episode, lambda x: x['coverArt']['sources']) or []):
|
|
||||||
source_url = source.get('url')
|
|
||||||
if not source_url:
|
|
||||||
continue
|
|
||||||
thumbnails.append({
|
|
||||||
'url': source_url,
|
|
||||||
'width': int_or_none(source.get('width')),
|
|
||||||
'height': int_or_none(source.get('height')),
|
|
||||||
})
|
|
||||||
|
|
||||||
return {
|
|
||||||
'id': episode_id,
|
|
||||||
'title': title,
|
|
||||||
'formats': formats,
|
|
||||||
'thumbnails': thumbnails,
|
|
||||||
'description': strip_or_none(episode.get('description')),
|
|
||||||
'duration': float_or_none(try_get(
|
|
||||||
episode, lambda x: x['duration']['totalMilliseconds']), 1000),
|
|
||||||
'release_date': unified_strdate(try_get(
|
|
||||||
episode, lambda x: x['releaseDate']['isoString'])),
|
|
||||||
'series': series,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
class SpotifyIE(SpotifyBaseIE):
|
|
||||||
IE_NAME = 'spotify'
|
|
||||||
IE_DESC = 'Spotify episodes'
|
|
||||||
_VALID_URL = SpotifyBaseIE._VALID_URL_TEMPL % 'episode'
|
|
||||||
_TESTS = [{
|
|
||||||
'url': 'https://open.spotify.com/episode/4Z7GAJ50bgctf6uclHlWKo',
|
|
||||||
'md5': '74010a1e3fa4d9e1ab3aa7ad14e42d3b',
|
|
||||||
'info_dict': {
|
|
||||||
'id': '4Z7GAJ50bgctf6uclHlWKo',
|
|
||||||
'ext': 'mp3',
|
|
||||||
'title': 'From the archive: Why time management is ruining our lives',
|
|
||||||
'description': 'md5:b120d9c4ff4135b42aa9b6d9cde86935',
|
|
||||||
'duration': 2083.605,
|
|
||||||
'release_date': '20201217',
|
|
||||||
'series': "The Guardian's Audio Long Reads",
|
|
||||||
},
|
|
||||||
}, {
|
|
||||||
'url': 'https://open.spotify.com/embed/episode/4TvCsKKs2thXmarHigWvXE?si=7eatS8AbQb6RxqO2raIuWA',
|
|
||||||
'only_matching': True,
|
|
||||||
}]
|
|
||||||
|
|
||||||
def _real_extract(self, url):
|
|
||||||
episode_id = self._match_id(url)
|
|
||||||
episode = self._call_api('Episode', episode_id, {
|
|
||||||
'uri': 'spotify:episode:' + episode_id,
|
|
||||||
})['episode']
|
|
||||||
return self._extract_episode(
|
|
||||||
episode, try_get(episode, lambda x: x['podcast']['name']))
|
|
||||||
|
|
||||||
|
|
||||||
class SpotifyShowIE(SpotifyBaseIE):
|
|
||||||
IE_NAME = 'spotify:show'
|
|
||||||
IE_DESC = 'Spotify shows'
|
|
||||||
_VALID_URL = SpotifyBaseIE._VALID_URL_TEMPL % 'show'
|
|
||||||
_TEST = {
|
|
||||||
'url': 'https://open.spotify.com/show/4PM9Ke6l66IRNpottHKV9M',
|
|
||||||
'info_dict': {
|
|
||||||
'id': '4PM9Ke6l66IRNpottHKV9M',
|
|
||||||
'title': 'The Story from the Guardian',
|
|
||||||
'description': 'The Story podcast is dedicated to our finest audio documentaries, investigations and long form stories',
|
|
||||||
},
|
|
||||||
'playlist_mincount': 36,
|
|
||||||
}
|
|
||||||
_PER_PAGE = 100
|
|
||||||
|
|
||||||
def _fetch_page(self, show_id, page=0):
|
|
||||||
return self._call_api('ShowEpisodes', show_id, {
|
|
||||||
'limit': 100,
|
|
||||||
'offset': page * self._PER_PAGE,
|
|
||||||
'uri': f'spotify:show:{show_id}',
|
|
||||||
}, note=f'Downloading page {page + 1} JSON metadata')['podcast']
|
|
||||||
|
|
||||||
def _real_extract(self, url):
|
|
||||||
show_id = self._match_id(url)
|
|
||||||
first_page = self._fetch_page(show_id)
|
|
||||||
|
|
||||||
def _entries(page):
|
|
||||||
podcast = self._fetch_page(show_id, page) if page else first_page
|
|
||||||
yield from map(
|
|
||||||
functools.partial(self._extract_episode, series=podcast.get('name')),
|
|
||||||
traverse_obj(podcast, ('episodes', 'items', ..., 'episode')))
|
|
||||||
|
|
||||||
return self.playlist_result(
|
|
||||||
OnDemandPagedList(_entries, self._PER_PAGE),
|
|
||||||
show_id, first_page.get('name'), first_page.get('description'))
|
|
||||||
@@ -12,7 +12,7 @@ from ..utils.traversal import traverse_obj
|
|||||||
|
|
||||||
|
|
||||||
class SubstackIE(InfoExtractor):
|
class SubstackIE(InfoExtractor):
|
||||||
_VALID_URL = r'https?://(?P<username>[\w-]+)\.substack\.com/p/(?P<id>[\w-]+)'
|
_VALID_URL = r'https?://[\w-]+\.substack\.com/p/(?P<id>[\w-]+)'
|
||||||
_TESTS = [{
|
_TESTS = [{
|
||||||
'url': 'https://haleynahman.substack.com/p/i-made-a-vlog?s=r',
|
'url': 'https://haleynahman.substack.com/p/i-made-a-vlog?s=r',
|
||||||
'md5': 'f27e4fc6252001d48d479f45e65cdfd5',
|
'md5': 'f27e4fc6252001d48d479f45e65cdfd5',
|
||||||
@@ -116,7 +116,7 @@ class SubstackIE(InfoExtractor):
|
|||||||
return formats, subtitles
|
return formats, subtitles
|
||||||
|
|
||||||
def _real_extract(self, url):
|
def _real_extract(self, url):
|
||||||
display_id, username = self._match_valid_url(url).group('id', 'username')
|
display_id = self._match_id(url)
|
||||||
webpage = self._download_webpage(url, display_id)
|
webpage = self._download_webpage(url, display_id)
|
||||||
|
|
||||||
webpage_info = self._parse_json(self._search_json(
|
webpage_info = self._parse_json(self._search_json(
|
||||||
|
|||||||
@@ -46,7 +46,7 @@ class TelecincoBaseIE(InfoExtractor):
|
|||||||
error_code = traverse_obj(
|
error_code = traverse_obj(
|
||||||
self._webpage_read_content(error.cause.response, caronte['cerbero'], video_id, fatal=False),
|
self._webpage_read_content(error.cause.response, caronte['cerbero'], video_id, fatal=False),
|
||||||
({json.loads}, 'code', {int}))
|
({json.loads}, 'code', {int}))
|
||||||
if error_code in (4038, 40313):
|
if error_code in (4036, 4038, 40313):
|
||||||
self.raise_geo_restricted(countries=['ES'])
|
self.raise_geo_restricted(countries=['ES'])
|
||||||
raise
|
raise
|
||||||
|
|
||||||
@@ -140,7 +140,7 @@ class TelecincoIE(TelecincoBaseIE):
|
|||||||
|
|
||||||
def _real_extract(self, url):
|
def _real_extract(self, url):
|
||||||
display_id = self._match_id(url)
|
display_id = self._match_id(url)
|
||||||
webpage = self._download_webpage(url, display_id)
|
webpage = self._download_webpage(url, display_id, impersonate=True)
|
||||||
article = self._search_json(
|
article = self._search_json(
|
||||||
r'window\.\$REACTBASE_STATE\.article(?:_multisite)?\s*=',
|
r'window\.\$REACTBASE_STATE\.article(?:_multisite)?\s*=',
|
||||||
webpage, 'article', display_id)['article']
|
webpage, 'article', display_id)['article']
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user