mirror of
https://github.com/yt-dlp/yt-dlp.git
synced 2025-10-21 17:58:38 +00:00
parent
264044286d
commit
4e6a693057
12
.github/workflows/build.yml
vendored
12
.github/workflows/build.yml
vendored
@ -558,35 +558,39 @@ jobs:
|
|||||||
cat >> _update_spec << EOF
|
cat >> _update_spec << EOF
|
||||||
# This file is used for regulating self-update
|
# This file is used for regulating self-update
|
||||||
lock 2022.08.18.36 .+ Python 3\.6
|
lock 2022.08.18.36 .+ Python 3\.6
|
||||||
lock 2023.11.16 (?!win_x86_exe).+ Python 3\.7
|
lock 2023.11.16 zip Python 3\.7
|
||||||
lock 2023.11.16 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
lock 2023.11.16 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
||||||
lock 2024.10.22 py2exe .+
|
lock 2024.10.22 py2exe .+
|
||||||
lock 2024.10.22 zip Python 3\.8
|
lock 2024.10.22 zip Python 3\.8
|
||||||
lock 2024.10.22 win(?:_x86)?_exe Python 3\.[78].+ Windows-(?:7-|2008ServerR2)
|
lock 2024.10.22 win(?:_x86)?_exe Python 3\.[78].+ Windows-(?:7-|2008ServerR2)
|
||||||
lock 2025.08.11 darwin_legacy_exe .+
|
lock 2025.08.11 darwin_legacy_exe .+
|
||||||
lock 2025.08.27 linux_armv7l_exe .+
|
lock 2025.08.27 linux_armv7l_exe .+
|
||||||
|
lock 2025.10.14 zip Python 3\.9
|
||||||
lockV2 yt-dlp/yt-dlp 2022.08.18.36 .+ Python 3\.6
|
lockV2 yt-dlp/yt-dlp 2022.08.18.36 .+ Python 3\.6
|
||||||
lockV2 yt-dlp/yt-dlp 2023.11.16 (?!win_x86_exe).+ Python 3\.7
|
lockV2 yt-dlp/yt-dlp 2023.11.16 zip Python 3\.7
|
||||||
lockV2 yt-dlp/yt-dlp 2023.11.16 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
lockV2 yt-dlp/yt-dlp 2023.11.16 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
||||||
lockV2 yt-dlp/yt-dlp 2024.10.22 py2exe .+
|
lockV2 yt-dlp/yt-dlp 2024.10.22 py2exe .+
|
||||||
lockV2 yt-dlp/yt-dlp 2024.10.22 zip Python 3\.8
|
lockV2 yt-dlp/yt-dlp 2024.10.22 zip Python 3\.8
|
||||||
lockV2 yt-dlp/yt-dlp 2024.10.22 win(?:_x86)?_exe Python 3\.[78].+ Windows-(?:7-|2008ServerR2)
|
lockV2 yt-dlp/yt-dlp 2024.10.22 win(?:_x86)?_exe Python 3\.[78].+ Windows-(?:7-|2008ServerR2)
|
||||||
lockV2 yt-dlp/yt-dlp 2025.08.11 darwin_legacy_exe .+
|
lockV2 yt-dlp/yt-dlp 2025.08.11 darwin_legacy_exe .+
|
||||||
lockV2 yt-dlp/yt-dlp 2025.08.27 linux_armv7l_exe .+
|
lockV2 yt-dlp/yt-dlp 2025.08.27 linux_armv7l_exe .+
|
||||||
lockV2 yt-dlp/yt-dlp-nightly-builds 2023.11.15.232826 (?!win_x86_exe).+ Python 3\.7
|
lockV2 yt-dlp/yt-dlp 2025.10.14 zip Python 3\.9
|
||||||
|
lockV2 yt-dlp/yt-dlp-nightly-builds 2023.11.15.232826 zip Python 3\.7
|
||||||
lockV2 yt-dlp/yt-dlp-nightly-builds 2023.11.15.232826 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
lockV2 yt-dlp/yt-dlp-nightly-builds 2023.11.15.232826 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
||||||
lockV2 yt-dlp/yt-dlp-nightly-builds 2024.10.22.051025 py2exe .+
|
lockV2 yt-dlp/yt-dlp-nightly-builds 2024.10.22.051025 py2exe .+
|
||||||
lockV2 yt-dlp/yt-dlp-nightly-builds 2024.10.22.051025 zip Python 3\.8
|
lockV2 yt-dlp/yt-dlp-nightly-builds 2024.10.22.051025 zip Python 3\.8
|
||||||
lockV2 yt-dlp/yt-dlp-nightly-builds 2024.10.22.051025 win(?:_x86)?_exe Python 3\.[78].+ Windows-(?:7-|2008ServerR2)
|
lockV2 yt-dlp/yt-dlp-nightly-builds 2024.10.22.051025 win(?:_x86)?_exe Python 3\.[78].+ Windows-(?:7-|2008ServerR2)
|
||||||
lockV2 yt-dlp/yt-dlp-nightly-builds 2025.08.12.233030 darwin_legacy_exe .+
|
lockV2 yt-dlp/yt-dlp-nightly-builds 2025.08.12.233030 darwin_legacy_exe .+
|
||||||
lockV2 yt-dlp/yt-dlp-nightly-builds 2025.08.30.232839 linux_armv7l_exe .+
|
lockV2 yt-dlp/yt-dlp-nightly-builds 2025.08.30.232839 linux_armv7l_exe .+
|
||||||
lockV2 yt-dlp/yt-dlp-master-builds 2023.11.15.232812 (?!win_x86_exe).+ Python 3\.7
|
lockV2 yt-dlp/yt-dlp-nightly-builds 2025.10.14.232845 zip Python 3\.9
|
||||||
|
lockV2 yt-dlp/yt-dlp-master-builds 2023.11.15.232812 zip Python 3\.7
|
||||||
lockV2 yt-dlp/yt-dlp-master-builds 2023.11.15.232812 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
lockV2 yt-dlp/yt-dlp-master-builds 2023.11.15.232812 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
||||||
lockV2 yt-dlp/yt-dlp-master-builds 2024.10.22.045052 py2exe .+
|
lockV2 yt-dlp/yt-dlp-master-builds 2024.10.22.045052 py2exe .+
|
||||||
lockV2 yt-dlp/yt-dlp-master-builds 2024.10.22.060347 zip Python 3\.8
|
lockV2 yt-dlp/yt-dlp-master-builds 2024.10.22.060347 zip Python 3\.8
|
||||||
lockV2 yt-dlp/yt-dlp-master-builds 2024.10.22.060347 win(?:_x86)?_exe Python 3\.[78].+ Windows-(?:7-|2008ServerR2)
|
lockV2 yt-dlp/yt-dlp-master-builds 2024.10.22.060347 win(?:_x86)?_exe Python 3\.[78].+ Windows-(?:7-|2008ServerR2)
|
||||||
lockV2 yt-dlp/yt-dlp-master-builds 2025.08.12.232447 darwin_legacy_exe .+
|
lockV2 yt-dlp/yt-dlp-master-builds 2025.08.12.232447 darwin_legacy_exe .+
|
||||||
lockV2 yt-dlp/yt-dlp-master-builds 2025.09.05.212910 linux_armv7l_exe .+
|
lockV2 yt-dlp/yt-dlp-master-builds 2025.09.05.212910 linux_armv7l_exe .+
|
||||||
|
lockV2 yt-dlp/yt-dlp-master-builds 2025.10.14.232330 zip Python 3\.9
|
||||||
EOF
|
EOF
|
||||||
|
|
||||||
- name: Sign checksum files
|
- name: Sign checksum files
|
||||||
|
8
.github/workflows/core.yml
vendored
8
.github/workflows/core.yml
vendored
@ -36,12 +36,10 @@ jobs:
|
|||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
os: [ubuntu-latest]
|
os: [ubuntu-latest]
|
||||||
# CPython 3.9 is in quick-test
|
# CPython 3.10 is in quick-test
|
||||||
python-version: ['3.10', '3.11', '3.12', '3.13', '3.14-dev', pypy-3.11]
|
python-version: ['3.11', '3.12', '3.13', '3.14', pypy-3.11]
|
||||||
include:
|
include:
|
||||||
# atleast one of each CPython/PyPy tests must be in windows
|
# atleast one of each CPython/PyPy tests must be in windows
|
||||||
- os: windows-latest
|
|
||||||
python-version: '3.9'
|
|
||||||
- os: windows-latest
|
- os: windows-latest
|
||||||
python-version: '3.10'
|
python-version: '3.10'
|
||||||
- os: windows-latest
|
- os: windows-latest
|
||||||
@ -51,7 +49,7 @@ jobs:
|
|||||||
- os: windows-latest
|
- os: windows-latest
|
||||||
python-version: '3.13'
|
python-version: '3.13'
|
||||||
- os: windows-latest
|
- os: windows-latest
|
||||||
python-version: '3.14-dev'
|
python-version: '3.14'
|
||||||
- os: windows-latest
|
- os: windows-latest
|
||||||
python-version: pypy-3.11
|
python-version: pypy-3.11
|
||||||
steps:
|
steps:
|
||||||
|
6
.github/workflows/download.yml
vendored
6
.github/workflows/download.yml
vendored
@ -13,7 +13,7 @@ jobs:
|
|||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
uses: actions/setup-python@v6
|
uses: actions/setup-python@v6
|
||||||
with:
|
with:
|
||||||
python-version: 3.9
|
python-version: '3.10'
|
||||||
- name: Install test requirements
|
- name: Install test requirements
|
||||||
run: python3 ./devscripts/install_deps.py --include dev
|
run: python3 ./devscripts/install_deps.py --include dev
|
||||||
- name: Run tests
|
- name: Run tests
|
||||||
@ -28,11 +28,11 @@ jobs:
|
|||||||
fail-fast: true
|
fail-fast: true
|
||||||
matrix:
|
matrix:
|
||||||
os: [ubuntu-latest]
|
os: [ubuntu-latest]
|
||||||
python-version: ['3.10', '3.11', '3.12', '3.13', '3.14-dev', pypy-3.11]
|
python-version: ['3.11', '3.12', '3.13', '3.14', pypy-3.11]
|
||||||
include:
|
include:
|
||||||
# atleast one of each CPython/PyPy tests must be in windows
|
# atleast one of each CPython/PyPy tests must be in windows
|
||||||
- os: windows-latest
|
- os: windows-latest
|
||||||
python-version: '3.9'
|
python-version: '3.10'
|
||||||
- os: windows-latest
|
- os: windows-latest
|
||||||
python-version: pypy-3.11
|
python-version: pypy-3.11
|
||||||
steps:
|
steps:
|
||||||
|
6
.github/workflows/quick-test.yml
vendored
6
.github/workflows/quick-test.yml
vendored
@ -10,10 +10,10 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- name: Set up Python 3.9
|
- name: Set up Python 3.10
|
||||||
uses: actions/setup-python@v6
|
uses: actions/setup-python@v6
|
||||||
with:
|
with:
|
||||||
python-version: '3.9'
|
python-version: '3.10'
|
||||||
- name: Install test requirements
|
- name: Install test requirements
|
||||||
run: python3 ./devscripts/install_deps.py -o --include test
|
run: python3 ./devscripts/install_deps.py -o --include test
|
||||||
- name: Run tests
|
- name: Run tests
|
||||||
@ -29,7 +29,7 @@ jobs:
|
|||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-python@v6
|
- uses: actions/setup-python@v6
|
||||||
with:
|
with:
|
||||||
python-version: '3.9'
|
python-version: '3.10'
|
||||||
- name: Install dev dependencies
|
- name: Install dev dependencies
|
||||||
run: python3 ./devscripts/install_deps.py -o --include static-analysis
|
run: python3 ./devscripts/install_deps.py -o --include static-analysis
|
||||||
- name: Make lazy extractors
|
- name: Make lazy extractors
|
||||||
|
2
.github/workflows/signature-tests.yml
vendored
2
.github/workflows/signature-tests.yml
vendored
@ -25,7 +25,7 @@ jobs:
|
|||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
os: [ubuntu-latest, windows-latest]
|
os: [ubuntu-latest, windows-latest]
|
||||||
python-version: ['3.9', '3.10', '3.11', '3.12', '3.13', '3.14-dev', pypy-3.11]
|
python-version: ['3.10', '3.11', '3.12', '3.13', '3.14', pypy-3.11]
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
|
@ -284,7 +284,7 @@ ## Adding support for a new site
|
|||||||
|
|
||||||
You can use `hatch fmt` to automatically fix problems. Rules that the linter/formatter enforces should not be disabled with `# noqa` unless a maintainer requests it. The only exception allowed is for old/printf-style string formatting in GraphQL query templates (use `# noqa: UP031`).
|
You can use `hatch fmt` to automatically fix problems. Rules that the linter/formatter enforces should not be disabled with `# noqa` unless a maintainer requests it. The only exception allowed is for old/printf-style string formatting in GraphQL query templates (use `# noqa: UP031`).
|
||||||
|
|
||||||
1. Make sure your code works under all [Python](https://www.python.org/) versions supported by yt-dlp, namely CPython >=3.9 and PyPy >=3.11. Backward compatibility is not required for even older versions of Python.
|
1. Make sure your code works under all [Python](https://www.python.org/) versions supported by yt-dlp, namely CPython >=3.10 and PyPy >=3.11. Backward compatibility is not required for even older versions of Python.
|
||||||
1. When the tests pass, [add](https://git-scm.com/docs/git-add) the new files, [commit](https://git-scm.com/docs/git-commit) them and [push](https://git-scm.com/docs/git-push) the result, like this:
|
1. When the tests pass, [add](https://git-scm.com/docs/git-add) the new files, [commit](https://git-scm.com/docs/git-commit) them and [push](https://git-scm.com/docs/git-push) the result, like this:
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
|
@ -194,7 +194,7 @@ # To install nightly with pip:
|
|||||||
You can suppress this warning by adding `--no-update` to your command or configuration file.
|
You can suppress this warning by adding `--no-update` to your command or configuration file.
|
||||||
|
|
||||||
## DEPENDENCIES
|
## DEPENDENCIES
|
||||||
Python versions 3.9+ (CPython) and 3.11+ (PyPy) are supported. Other versions and implementations may or may not work correctly.
|
Python versions 3.10+ (CPython) and 3.11+ (PyPy) are supported. Other versions and implementations may or may not work correctly.
|
||||||
|
|
||||||
<!-- Python 3.5+ uses VC++14 and it is already embedded in the binary created
|
<!-- Python 3.5+ uses VC++14 and it is already embedded in the binary created
|
||||||
<!x-- https://www.microsoft.com/en-us/download/details.aspx?id=26999 --x>
|
<!x-- https://www.microsoft.com/en-us/download/details.aspx?id=26999 --x>
|
||||||
@ -273,7 +273,7 @@ ### Standalone PyInstaller Builds
|
|||||||
**Important**: Running `pyinstaller` directly **instead of** using `python -m bundle.pyinstaller` is **not** officially supported. This may or may not work correctly.
|
**Important**: Running `pyinstaller` directly **instead of** using `python -m bundle.pyinstaller` is **not** officially supported. This may or may not work correctly.
|
||||||
|
|
||||||
### Platform-independent Binary (UNIX)
|
### Platform-independent Binary (UNIX)
|
||||||
You will need the build tools `python` (3.9+), `zip`, `make` (GNU), `pandoc`\* and `pytest`\*.
|
You will need the build tools `python` (3.10+), `zip`, `make` (GNU), `pandoc`\* and `pytest`\*.
|
||||||
|
|
||||||
After installing these, simply run `make`.
|
After installing these, simply run `make`.
|
||||||
|
|
||||||
@ -2255,7 +2255,7 @@ ### Differences in default behavior
|
|||||||
|
|
||||||
Some of yt-dlp's default options are different from that of youtube-dl and youtube-dlc:
|
Some of yt-dlp's default options are different from that of youtube-dl and youtube-dlc:
|
||||||
|
|
||||||
* yt-dlp supports only [Python 3.9+](## "Windows 8"), and will remove support for more versions as they [become EOL](https://devguide.python.org/versions/#python-release-cycle); while [youtube-dl still supports Python 2.6+ and 3.2+](https://github.com/ytdl-org/youtube-dl/issues/30568#issue-1118238743)
|
* yt-dlp supports only [Python 3.10+](## "Windows 8"), and will remove support for more versions as they [become EOL](https://devguide.python.org/versions/#python-release-cycle); while [youtube-dl still supports Python 2.6+ and 3.2+](https://github.com/ytdl-org/youtube-dl/issues/30568#issue-1118238743)
|
||||||
* The options `--auto-number` (`-A`), `--title` (`-t`) and `--literal` (`-l`), no longer work. See [removed options](#Removed) for details
|
* The options `--auto-number` (`-A`), `--title` (`-t`) and `--literal` (`-l`), no longer work. See [removed options](#Removed) for details
|
||||||
* `avconv` is not supported as an alternative to `ffmpeg`
|
* `avconv` is not supported as an alternative to `ffmpeg`
|
||||||
* yt-dlp stores config files in slightly different locations to youtube-dl. See [CONFIGURATION](#configuration) for a list of correct locations
|
* yt-dlp stores config files in slightly different locations to youtube-dl. See [CONFIGURATION](#configuration) for a list of correct locations
|
||||||
|
@ -373,7 +373,7 @@ def groups(self):
|
|||||||
issues = [issue.strip()[1:] for issue in issues.split(',')] if issues else []
|
issues = [issue.strip()[1:] for issue in issues.split(',')] if issues else []
|
||||||
|
|
||||||
if prefix:
|
if prefix:
|
||||||
groups, details, sub_details = zip(*map(self.details_from_prefix, prefix.split(',')))
|
groups, details, sub_details = zip(*map(self.details_from_prefix, prefix.split(',')), strict=True)
|
||||||
group = next(iter(filter(None, groups)), None)
|
group = next(iter(filter(None, groups)), None)
|
||||||
details = ', '.join(unique(details))
|
details = ', '.join(unique(details))
|
||||||
sub_details = list(itertools.chain.from_iterable(sub_details))
|
sub_details = list(itertools.chain.from_iterable(sub_details))
|
||||||
|
@ -13,7 +13,7 @@ maintainers = [
|
|||||||
]
|
]
|
||||||
description = "A feature-rich command-line audio/video downloader"
|
description = "A feature-rich command-line audio/video downloader"
|
||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
requires-python = ">=3.9"
|
requires-python = ">=3.10"
|
||||||
keywords = [
|
keywords = [
|
||||||
"cli",
|
"cli",
|
||||||
"downloader",
|
"downloader",
|
||||||
@ -30,7 +30,6 @@ classifiers = [
|
|||||||
"Environment :: Console",
|
"Environment :: Console",
|
||||||
"Programming Language :: Python",
|
"Programming Language :: Python",
|
||||||
"Programming Language :: Python :: 3 :: Only",
|
"Programming Language :: Python :: 3 :: Only",
|
||||||
"Programming Language :: Python :: 3.9",
|
|
||||||
"Programming Language :: Python :: 3.10",
|
"Programming Language :: Python :: 3.10",
|
||||||
"Programming Language :: Python :: 3.11",
|
"Programming Language :: Python :: 3.11",
|
||||||
"Programming Language :: Python :: 3.12",
|
"Programming Language :: Python :: 3.12",
|
||||||
@ -168,7 +167,6 @@ run-cov = "echo Code coverage not implemented && exit 1"
|
|||||||
|
|
||||||
[[tool.hatch.envs.hatch-test.matrix]]
|
[[tool.hatch.envs.hatch-test.matrix]]
|
||||||
python = [
|
python = [
|
||||||
"3.9",
|
|
||||||
"3.10",
|
"3.10",
|
||||||
"3.11",
|
"3.11",
|
||||||
"3.12",
|
"3.12",
|
||||||
|
@ -176,7 +176,7 @@ def _iter_differences(got, expected, field):
|
|||||||
yield field, f'expected length of {len(expected)}, got {len(got)}'
|
yield field, f'expected length of {len(expected)}, got {len(got)}'
|
||||||
return
|
return
|
||||||
|
|
||||||
for index, (got_val, expected_val) in enumerate(zip(got, expected)):
|
for index, (got_val, expected_val) in enumerate(zip(got, expected, strict=True)):
|
||||||
field_name = str(index) if field is None else f'{field}.{index}'
|
field_name = str(index) if field is None else f'{field}.{index}'
|
||||||
yield from _iter_differences(got_val, expected_val, field_name)
|
yield from _iter_differences(got_val, expected_val, field_name)
|
||||||
return
|
return
|
||||||
|
@ -13,6 +13,7 @@
|
|||||||
|
|
||||||
import contextlib
|
import contextlib
|
||||||
import copy
|
import copy
|
||||||
|
import itertools
|
||||||
import json
|
import json
|
||||||
|
|
||||||
from test.helper import FakeYDL, assertRegexpMatches, try_rm
|
from test.helper import FakeYDL, assertRegexpMatches, try_rm
|
||||||
@ -414,7 +415,7 @@ def format_info(f_id):
|
|||||||
downloaded_ids = [info['format_id'] for info in ydl.downloaded_info_dicts]
|
downloaded_ids = [info['format_id'] for info in ydl.downloaded_info_dicts]
|
||||||
self.assertEqual(downloaded_ids, ['248+141'])
|
self.assertEqual(downloaded_ids, ['248+141'])
|
||||||
|
|
||||||
for f1, f2 in zip(formats_order, formats_order[1:]):
|
for f1, f2 in itertools.pairwise(formats_order):
|
||||||
info_dict = _make_result([f1, f2], extractor='youtube')
|
info_dict = _make_result([f1, f2], extractor='youtube')
|
||||||
ydl = YDL({'format': 'best/bestvideo'})
|
ydl = YDL({'format': 'best/bestvideo'})
|
||||||
ydl.sort_formats(info_dict)
|
ydl.sort_formats(info_dict)
|
||||||
@ -749,7 +750,7 @@ def test(tmpl, expected, *, info=None, **params):
|
|||||||
|
|
||||||
if not isinstance(expected, (list, tuple)):
|
if not isinstance(expected, (list, tuple)):
|
||||||
expected = (expected, expected)
|
expected = (expected, expected)
|
||||||
for (name, got), expect in zip((('outtmpl', out), ('filename', fname)), expected):
|
for (name, got), expect in zip((('outtmpl', out), ('filename', fname)), expected, strict=True):
|
||||||
if callable(expect):
|
if callable(expect):
|
||||||
self.assertTrue(expect(got), f'Wrong {name} from {tmpl}')
|
self.assertTrue(expect(got), f'Wrong {name} from {tmpl}')
|
||||||
elif expect is not None:
|
elif expect is not None:
|
||||||
@ -1147,7 +1148,7 @@ def test_selection(params, expected_ids, evaluate_all=False):
|
|||||||
entries = func(evaluated)
|
entries = func(evaluated)
|
||||||
results = [(v['playlist_autonumber'] - 1, (int(v['id']), v['playlist_index']))
|
results = [(v['playlist_autonumber'] - 1, (int(v['id']), v['playlist_index']))
|
||||||
for v in get_downloaded_info_dicts(params, entries)]
|
for v in get_downloaded_info_dicts(params, entries)]
|
||||||
self.assertEqual(results, list(enumerate(zip(expected_ids, expected_ids))), f'Entries of {name} for {params}')
|
self.assertEqual(results, list(enumerate(zip(expected_ids, expected_ids, strict=True))), f'Entries of {name} for {params}')
|
||||||
self.assertEqual(sorted(evaluated), expected_eval, f'Evaluation of {name} for {params}')
|
self.assertEqual(sorted(evaluated), expected_eval, f'Evaluation of {name} for {params}')
|
||||||
|
|
||||||
test_selection({}, INDICES)
|
test_selection({}, INDICES)
|
||||||
|
@ -115,7 +115,7 @@ def _chapters(self, ends, titles):
|
|||||||
self.assertEqual(len(ends), len(titles))
|
self.assertEqual(len(ends), len(titles))
|
||||||
start = 0
|
start = 0
|
||||||
chapters = []
|
chapters = []
|
||||||
for e, t in zip(ends, titles):
|
for e, t in zip(ends, titles, strict=True):
|
||||||
chapters.append(self._chapter(start, e, t))
|
chapters.append(self._chapter(start, e, t))
|
||||||
start = e
|
start = e
|
||||||
return chapters
|
return chapters
|
||||||
|
@ -417,7 +417,7 @@ def test_traversal_unbranching(self):
|
|||||||
|
|
||||||
def test_traversal_morsel(self):
|
def test_traversal_morsel(self):
|
||||||
morsel = http.cookies.Morsel()
|
morsel = http.cookies.Morsel()
|
||||||
values = dict(zip(morsel, 'abcdefghijklmnop'))
|
values = dict(zip(morsel, 'abcdefghijklmnop', strict=False))
|
||||||
morsel.set('item_key', 'item_value', 'coded_value')
|
morsel.set('item_key', 'item_value', 'coded_value')
|
||||||
morsel.update(values)
|
morsel.update(values)
|
||||||
values['key'] = 'item_key'
|
values['key'] = 'item_key'
|
||||||
|
@ -1863,7 +1863,7 @@ def test_get_elements_text_and_html_by_attribute(self):
|
|||||||
|
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
list(get_elements_text_and_html_by_attribute('class', 'foo bar', html)),
|
list(get_elements_text_and_html_by_attribute('class', 'foo bar', html)),
|
||||||
list(zip(['nice', 'also nice'], self.GET_ELEMENTS_BY_CLASS_RES)))
|
list(zip(['nice', 'also nice'], self.GET_ELEMENTS_BY_CLASS_RES, strict=True)))
|
||||||
self.assertEqual(list(get_elements_text_and_html_by_attribute('class', 'foo', html)), [])
|
self.assertEqual(list(get_elements_text_and_html_by_attribute('class', 'foo', html)), [])
|
||||||
self.assertEqual(list(get_elements_text_and_html_by_attribute('class', 'no-such-foo', html)), [])
|
self.assertEqual(list(get_elements_text_and_html_by_attribute('class', 'no-such-foo', html)), [])
|
||||||
|
|
||||||
|
@ -2007,7 +2007,7 @@ def __process_playlist(self, ie_result, download):
|
|||||||
else:
|
else:
|
||||||
entries = resolved_entries = list(entries)
|
entries = resolved_entries = list(entries)
|
||||||
n_entries = len(resolved_entries)
|
n_entries = len(resolved_entries)
|
||||||
ie_result['requested_entries'], ie_result['entries'] = tuple(zip(*resolved_entries)) or ([], [])
|
ie_result['requested_entries'], ie_result['entries'] = tuple(zip(*resolved_entries, strict=True)) or ([], [])
|
||||||
if not ie_result.get('playlist_count'):
|
if not ie_result.get('playlist_count'):
|
||||||
# Better to do this after potentially exhausting entries
|
# Better to do this after potentially exhausting entries
|
||||||
ie_result['playlist_count'] = all_entries.get_full_count()
|
ie_result['playlist_count'] = all_entries.get_full_count()
|
||||||
@ -2785,7 +2785,7 @@ def sanitize_numeric_fields(info):
|
|||||||
|
|
||||||
dummy_chapter = {'end_time': 0, 'start_time': info_dict.get('duration')}
|
dummy_chapter = {'end_time': 0, 'start_time': info_dict.get('duration')}
|
||||||
for idx, (prev, current, next_) in enumerate(zip(
|
for idx, (prev, current, next_) in enumerate(zip(
|
||||||
(dummy_chapter, *chapters), chapters, (*chapters[1:], dummy_chapter)), 1):
|
(dummy_chapter, *chapters), chapters, (*chapters[1:], dummy_chapter), strict=False), 1):
|
||||||
if current.get('start_time') is None:
|
if current.get('start_time') is None:
|
||||||
current['start_time'] = prev.get('end_time')
|
current['start_time'] = prev.get('end_time')
|
||||||
if not current.get('end_time'):
|
if not current.get('end_time'):
|
||||||
@ -3370,7 +3370,7 @@ def _write_link_file(link_type):
|
|||||||
def existing_video_file(*filepaths):
|
def existing_video_file(*filepaths):
|
||||||
ext = info_dict.get('ext')
|
ext = info_dict.get('ext')
|
||||||
converted = lambda file: replace_extension(file, self.params.get('final_ext') or ext, ext)
|
converted = lambda file: replace_extension(file, self.params.get('final_ext') or ext, ext)
|
||||||
file = self.existing_file(itertools.chain(*zip(map(converted, filepaths), filepaths)),
|
file = self.existing_file(itertools.chain(*zip(map(converted, filepaths), filepaths, strict=True)),
|
||||||
default_overwrite=False)
|
default_overwrite=False)
|
||||||
if file:
|
if file:
|
||||||
info_dict['ext'] = os.path.splitext(file)[1][1:]
|
info_dict['ext'] = os.path.splitext(file)[1][1:]
|
||||||
@ -3956,7 +3956,7 @@ def render_thumbnails_table(self, info_dict):
|
|||||||
|
|
||||||
def render_subtitles_table(self, video_id, subtitles):
|
def render_subtitles_table(self, video_id, subtitles):
|
||||||
def _row(lang, formats):
|
def _row(lang, formats):
|
||||||
exts, names = zip(*((f['ext'], f.get('name') or 'unknown') for f in reversed(formats)))
|
exts, names = zip(*((f['ext'], f.get('name') or 'unknown') for f in reversed(formats)), strict=True)
|
||||||
if len(set(names)) == 1:
|
if len(set(names)) == 1:
|
||||||
names = [] if names[0] == 'unknown' else names[:1]
|
names = [] if names[0] == 'unknown' else names[:1]
|
||||||
return [lang, ', '.join(names), ', '.join(exts)]
|
return [lang, ', '.join(names), ', '.join(exts)]
|
||||||
@ -4112,7 +4112,6 @@ def cookiejar(self):
|
|||||||
self.params.get('cookiefile'), self.params.get('cookiesfrombrowser'), self)
|
self.params.get('cookiefile'), self.params.get('cookiesfrombrowser'), self)
|
||||||
except CookieLoadError as error:
|
except CookieLoadError as error:
|
||||||
cause = error.__context__
|
cause = error.__context__
|
||||||
# compat: <=py3.9: `traceback.format_exception` has a different signature
|
|
||||||
self.report_error(str(cause), tb=''.join(traceback.format_exception(None, cause, cause.__traceback__)))
|
self.report_error(str(cause), tb=''.join(traceback.format_exception(None, cause, cause.__traceback__)))
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
@ -1,8 +1,8 @@
|
|||||||
import sys
|
import sys
|
||||||
|
|
||||||
if sys.version_info < (3, 9):
|
if sys.version_info < (3, 10):
|
||||||
raise ImportError(
|
raise ImportError(
|
||||||
f'You are using an unsupported version of Python. Only Python versions 3.9 and above are supported by yt-dlp') # noqa: F541
|
f'You are using an unsupported version of Python. Only Python versions 3.10 and above are supported by yt-dlp') # noqa: F541
|
||||||
|
|
||||||
__license__ = 'The Unlicense'
|
__license__ = 'The Unlicense'
|
||||||
|
|
||||||
@ -974,13 +974,8 @@ def _real_main(argv=None):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
updater = Updater(ydl, opts.update_self)
|
updater = Updater(ydl, opts.update_self)
|
||||||
if opts.update_self and updater.update() and actual_use:
|
if opts.update_self and updater.update() and actual_use and updater.cmd:
|
||||||
if updater.cmd:
|
|
||||||
return updater.restart()
|
return updater.restart()
|
||||||
# This code is reachable only for zip variant in py < 3.10
|
|
||||||
# It makes sense to exit here, but the old behavior is to continue
|
|
||||||
ydl.report_warning('Restart yt-dlp to use the updated version')
|
|
||||||
# return 100, 'ERROR: The program must exit for the update to complete'
|
|
||||||
except Exception:
|
except Exception:
|
||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
ydl._download_retcode = 100
|
ydl._download_retcode = 100
|
||||||
|
@ -447,7 +447,7 @@ def key_schedule_core(data, rcon_iteration):
|
|||||||
|
|
||||||
|
|
||||||
def xor(data1, data2):
|
def xor(data1, data2):
|
||||||
return [x ^ y for x, y in zip(data1, data2)]
|
return [x ^ y for x, y in zip(data1, data2, strict=False)]
|
||||||
|
|
||||||
|
|
||||||
def iter_mix_columns(data, matrix):
|
def iter_mix_columns(data, matrix):
|
||||||
|
@ -1,13 +0,0 @@
|
|||||||
# flake8: noqa: F405
|
|
||||||
from types import * # noqa: F403
|
|
||||||
|
|
||||||
from .compat_utils import passthrough_module
|
|
||||||
|
|
||||||
passthrough_module(__name__, 'types')
|
|
||||||
del passthrough_module
|
|
||||||
|
|
||||||
try:
|
|
||||||
# NB: pypy has builtin NoneType, so checking NameError won't work
|
|
||||||
from types import NoneType # >= 3.10
|
|
||||||
except ImportError:
|
|
||||||
NoneType = type(None)
|
|
@ -22,12 +22,8 @@
|
|||||||
|
|
||||||
def getproxies_registry_patched():
|
def getproxies_registry_patched():
|
||||||
proxies = getproxies_registry()
|
proxies = getproxies_registry()
|
||||||
if (
|
|
||||||
sys.version_info >= (3, 10, 5) # https://docs.python.org/3.10/whatsnew/changelog.html#python-3-10-5-final
|
|
||||||
or (3, 9, 13) <= sys.version_info < (3, 10) # https://docs.python.org/3.9/whatsnew/changelog.html#python-3-9-13-final
|
|
||||||
):
|
|
||||||
return proxies
|
|
||||||
|
|
||||||
|
if sys.version_info < (3, 10, 5): # https://docs.python.org/3.10/whatsnew/changelog.html#python-3-10-5-final
|
||||||
for scheme in ('https', 'ftp'):
|
for scheme in ('https', 'ftp'):
|
||||||
if scheme in proxies and proxies[scheme].startswith(f'{scheme}://'):
|
if scheme in proxies and proxies[scheme].startswith(f'{scheme}://'):
|
||||||
proxies[scheme] = 'http' + proxies[scheme][len(scheme):]
|
proxies[scheme] = 'http' + proxies[scheme][len(scheme):]
|
||||||
|
@ -740,7 +740,7 @@ def _call_cdx_api(self, item_id, url, filters: list | None = None, collapse: lis
|
|||||||
note or 'Downloading CDX API JSON', query=query, fatal=fatal)
|
note or 'Downloading CDX API JSON', query=query, fatal=fatal)
|
||||||
if isinstance(res, list) and len(res) >= 2:
|
if isinstance(res, list) and len(res) >= 2:
|
||||||
# format response to make it easier to use
|
# format response to make it easier to use
|
||||||
return [dict(zip(res[0], v)) for v in res[1:]]
|
return [dict(zip(res[0], v)) for v in res[1:]] # noqa: B905
|
||||||
elif not isinstance(res, list) or len(res) != 0:
|
elif not isinstance(res, list) or len(res) != 0:
|
||||||
self.report_warning('Error while parsing CDX API response' + bug_reports_message())
|
self.report_warning('Error while parsing CDX API response' + bug_reports_message())
|
||||||
|
|
||||||
|
@ -1663,7 +1663,7 @@ def extract_chapter_information(e):
|
|||||||
'end_time': part.get('endOffset'),
|
'end_time': part.get('endOffset'),
|
||||||
} for part in variadic(e.get('hasPart') or []) if part.get('@type') == 'Clip']
|
} for part in variadic(e.get('hasPart') or []) if part.get('@type') == 'Clip']
|
||||||
for idx, (last_c, current_c, next_c) in enumerate(zip(
|
for idx, (last_c, current_c, next_c) in enumerate(zip(
|
||||||
[{'end_time': 0}, *chapters], chapters, chapters[1:])):
|
[{'end_time': 0}, *chapters], chapters, chapters[1:], strict=False)):
|
||||||
current_c['end_time'] = current_c['end_time'] or next_c['start_time']
|
current_c['end_time'] = current_c['end_time'] or next_c['start_time']
|
||||||
current_c['start_time'] = current_c['start_time'] or last_c['end_time']
|
current_c['start_time'] = current_c['start_time'] or last_c['end_time']
|
||||||
if None in current_c.values():
|
if None in current_c.values():
|
||||||
@ -1848,7 +1848,7 @@ def _search_nuxt_data(self, webpage, video_id, context_name='__NUXT__', *, fatal
|
|||||||
return {}
|
return {}
|
||||||
|
|
||||||
args = dict(zip(arg_keys.split(','), map(json.dumps, self._parse_json(
|
args = dict(zip(arg_keys.split(','), map(json.dumps, self._parse_json(
|
||||||
f'[{arg_vals}]', video_id, transform_source=js_to_json, fatal=fatal) or ())))
|
f'[{arg_vals}]', video_id, transform_source=js_to_json, fatal=fatal) or ()), strict=True))
|
||||||
|
|
||||||
ret = self._parse_json(js, video_id, transform_source=functools.partial(js_to_json, vars=args), fatal=fatal)
|
ret = self._parse_json(js, video_id, transform_source=functools.partial(js_to_json, vars=args), fatal=fatal)
|
||||||
return traverse_obj(ret, traverse) or {}
|
return traverse_obj(ret, traverse) or {}
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
import json
|
import json
|
||||||
import socket
|
|
||||||
|
|
||||||
from .common import InfoExtractor
|
from .common import InfoExtractor
|
||||||
from ..utils import (
|
from ..utils import (
|
||||||
@ -56,7 +55,7 @@ def canonical_url(h):
|
|||||||
try:
|
try:
|
||||||
self.to_screen(f'{video_id}: Checking {format_id} video format URL')
|
self.to_screen(f'{video_id}: Checking {format_id} video format URL')
|
||||||
self._downloader._opener.open(video_url, timeout=5).close()
|
self._downloader._opener.open(video_url, timeout=5).close()
|
||||||
except socket.timeout:
|
except TimeoutError:
|
||||||
self.to_screen(
|
self.to_screen(
|
||||||
f'{video_id}: {format_id} URL is invalid, skipping')
|
f'{video_id}: {format_id} URL is invalid, skipping')
|
||||||
continue
|
continue
|
||||||
|
@ -56,7 +56,7 @@ def _real_extract(self, url):
|
|||||||
fmt, subs = self._extract_m3u8_formats_and_subtitles(src['url'], video_id, 'ts')
|
fmt, subs = self._extract_m3u8_formats_and_subtitles(src['url'], video_id, 'ts')
|
||||||
for f in fmt:
|
for f in fmt:
|
||||||
f.update(dict(zip(('height', 'width'),
|
f.update(dict(zip(('height', 'width'),
|
||||||
self._BITRATE_MAP.get(f.get('tbr'), ()))))
|
self._BITRATE_MAP.get(f.get('tbr'), ()), strict=False)))
|
||||||
formats.extend(fmt)
|
formats.extend(fmt)
|
||||||
subtitles = self._merge_subtitles(subtitles, subs)
|
subtitles = self._merge_subtitles(subtitles, subs)
|
||||||
|
|
||||||
|
@ -437,7 +437,7 @@ def _real_extract(self, url):
|
|||||||
params = urllib.parse.parse_qs(query)
|
params = urllib.parse.parse_qs(query)
|
||||||
if path:
|
if path:
|
||||||
splitted_path = path.split('/')
|
splitted_path = path.split('/')
|
||||||
params.update(dict(zip(splitted_path[::2], [[v] for v in splitted_path[1::2]])))
|
params.update(dict(zip(splitted_path[::2], [[v] for v in splitted_path[1::2]]))) # noqa: B905
|
||||||
if 'wid' in params:
|
if 'wid' in params:
|
||||||
partner_id = remove_start(params['wid'][0], '_')
|
partner_id = remove_start(params['wid'][0], '_')
|
||||||
elif 'p' in params:
|
elif 'p' in params:
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
import itertools
|
||||||
import re
|
import re
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
|
|
||||||
@ -216,7 +217,7 @@ def _real_extract(self, url):
|
|||||||
def _fix_subtitles(self, subs):
|
def _fix_subtitles(self, subs):
|
||||||
srt = ''
|
srt = ''
|
||||||
seq_counter = 0
|
seq_counter = 0
|
||||||
for seq_current, seq_next in zip(subs, subs[1:]):
|
for seq_current, seq_next in itertools.pairwise(subs):
|
||||||
m_current = re.match(self._TIMECODE_REGEX, seq_current['Timecode'])
|
m_current = re.match(self._TIMECODE_REGEX, seq_current['Timecode'])
|
||||||
if m_current is None:
|
if m_current is None:
|
||||||
continue
|
continue
|
||||||
|
@ -92,7 +92,7 @@ def _real_extract(self, url):
|
|||||||
contains_pattern=r'\[(?s:.+)\]', transform_source=js_to_json)
|
contains_pattern=r'\[(?s:.+)\]', transform_source=js_to_json)
|
||||||
|
|
||||||
formats = []
|
formats = []
|
||||||
for video_hash, (suffix, quality, format_note) in zip(video_hashes, [
|
for video_hash, (suffix, quality, format_note) in zip(video_hashes, [ # noqa: B905
|
||||||
('', 1, 'normálna kvalita'),
|
('', 1, 'normálna kvalita'),
|
||||||
('_lq', 0, 'nízka kvalita'),
|
('_lq', 0, 'nízka kvalita'),
|
||||||
('_hd', 2, 'HD-720p'),
|
('_hd', 2, 'HD-720p'),
|
||||||
|
@ -503,7 +503,7 @@ def _real_extract(self, url):
|
|||||||
'start_time': s,
|
'start_time': s,
|
||||||
'end_time': e,
|
'end_time': e,
|
||||||
'title': t,
|
'title': t,
|
||||||
} for s, e, t in zip(start_time, end_time, chapter_titles)]
|
} for s, e, t in zip(start_time, end_time, chapter_titles, strict=True)]
|
||||||
|
|
||||||
return {
|
return {
|
||||||
'id': video_id,
|
'id': video_id,
|
||||||
|
@ -181,7 +181,7 @@ class PBSIE(InfoExtractor):
|
|||||||
)
|
)
|
||||||
|
|
||||||
IE_NAME = 'pbs'
|
IE_NAME = 'pbs'
|
||||||
IE_DESC = 'Public Broadcasting Service (PBS) and member stations: {}'.format(', '.join(list(zip(*_STATIONS))[1]))
|
IE_DESC = 'Public Broadcasting Service (PBS) and member stations: {}'.format(', '.join(list(zip(*_STATIONS, strict=True))[1]))
|
||||||
|
|
||||||
_VALID_URL = r'''(?x)https?://
|
_VALID_URL = r'''(?x)https?://
|
||||||
(?:
|
(?:
|
||||||
@ -193,7 +193,7 @@ class PBSIE(InfoExtractor):
|
|||||||
(?:[^/?#]+/){{1,5}}(?P<presumptive_id>[^/?#]+?)(?:\.html)?/?(?:$|[?#])
|
(?:[^/?#]+/){{1,5}}(?P<presumptive_id>[^/?#]+?)(?:\.html)?/?(?:$|[?#])
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
'''.format('|'.join(next(zip(*_STATIONS))))
|
'''.format('|'.join(next(zip(*_STATIONS, strict=True))))
|
||||||
|
|
||||||
_GEO_COUNTRIES = ['US']
|
_GEO_COUNTRIES = ['US']
|
||||||
|
|
||||||
|
@ -405,7 +405,7 @@ def _entries(self, url, page, category_id):
|
|||||||
tab_content = self._download_json(
|
tab_content = self._download_json(
|
||||||
'https://www.polskieradio.pl/CMS/TemplateBoxesManagement/TemplateBoxTabContent.aspx/GetTabContent',
|
'https://www.polskieradio.pl/CMS/TemplateBoxesManagement/TemplateBoxTabContent.aspx/GetTabContent',
|
||||||
category_id, f'Downloading page {page_num}', headers={'content-type': 'application/json'},
|
category_id, f'Downloading page {page_num}', headers={'content-type': 'application/json'},
|
||||||
data=json.dumps(dict(zip((
|
data=json.dumps(dict(zip(( # noqa: B905
|
||||||
'boxInstanceId', 'tabId', 'categoryType', 'sectionId', 'categoryId', 'pagerMode',
|
'boxInstanceId', 'tabId', 'categoryType', 'sectionId', 'categoryId', 'pagerMode',
|
||||||
'subjectIds', 'tagIndexId', 'queryString', 'name', 'openArticlesInParentTemplate',
|
'subjectIds', 'tagIndexId', 'queryString', 'name', 'openArticlesInParentTemplate',
|
||||||
'idSectionFromUrl', 'maxDocumentAge', 'showCategoryForArticle', 'pageNumber',
|
'idSectionFromUrl', 'maxDocumentAge', 'showCategoryForArticle', 'pageNumber',
|
||||||
|
@ -155,7 +155,7 @@ def _real_extract(self, url):
|
|||||||
# Sorted by "confidence", higher confidence = earlier in list
|
# Sorted by "confidence", higher confidence = earlier in list
|
||||||
confidences = traverse_obj(metadata, ('tags', ..., 'confidence', ({int}, {float})))
|
confidences = traverse_obj(metadata, ('tags', ..., 'confidence', ({int}, {float})))
|
||||||
if confidences:
|
if confidences:
|
||||||
tags = [tag for _, tag in sorted(zip(confidences, tags), reverse=True)]
|
tags = [tag for _, tag in sorted(zip(confidences, tags), reverse=True)] # noqa: B905
|
||||||
|
|
||||||
formats = traverse_obj(video_info, ('variants', ..., {
|
formats = traverse_obj(video_info, ('variants', ..., {
|
||||||
'format_id': ('name', {str}),
|
'format_id': ('name', {str}),
|
||||||
|
@ -81,7 +81,7 @@ def _get_next_app_info(self):
|
|||||||
}
|
}
|
||||||
self._APP_INFO_POOL = [
|
self._APP_INFO_POOL = [
|
||||||
{**defaults, **dict(
|
{**defaults, **dict(
|
||||||
(k, v) for k, v in zip(self._APP_INFO_DEFAULTS, app_info.split('/')) if v
|
(k, v) for k, v in zip(self._APP_INFO_DEFAULTS, app_info.split('/'), strict=False) if v
|
||||||
)} for app_info in self._KNOWN_APP_INFO
|
)} for app_info in self._KNOWN_APP_INFO
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -186,7 +186,7 @@ def js_number_to_string(val: float, radix: int = 10):
|
|||||||
_COMP_OPERATORS = {'===', '!==', '==', '!=', '<=', '>=', '<', '>'}
|
_COMP_OPERATORS = {'===', '!==', '==', '!=', '<=', '>=', '<', '>'}
|
||||||
|
|
||||||
_NAME_RE = r'[a-zA-Z_$][\w$]*'
|
_NAME_RE = r'[a-zA-Z_$][\w$]*'
|
||||||
_MATCHING_PARENS = dict(zip(*zip('()', '{}', '[]')))
|
_MATCHING_PARENS = dict(zip(*zip('()', '{}', '[]', strict=True), strict=True))
|
||||||
_QUOTES = '\'"/'
|
_QUOTES = '\'"/'
|
||||||
_NESTED_BRACKETS = r'[^[\]]+(?:\[[^[\]]+(?:\[[^\]]+\])?\])?'
|
_NESTED_BRACKETS = r'[^[\]]+(?:\[[^[\]]+(?:\[[^\]]+\])?\])?'
|
||||||
|
|
||||||
|
@ -4,7 +4,6 @@
|
|||||||
import http.client
|
import http.client
|
||||||
import logging
|
import logging
|
||||||
import re
|
import re
|
||||||
import socket
|
|
||||||
import warnings
|
import warnings
|
||||||
|
|
||||||
from ..dependencies import brotli, requests, urllib3
|
from ..dependencies import brotli, requests, urllib3
|
||||||
@ -125,7 +124,7 @@ def read(self, amt: int | None = None):
|
|||||||
# Work around issue with `.read(amt)` then `.read()`
|
# Work around issue with `.read(amt)` then `.read()`
|
||||||
# See: https://github.com/urllib3/urllib3/issues/3636
|
# See: https://github.com/urllib3/urllib3/issues/3636
|
||||||
if amt is None:
|
if amt is None:
|
||||||
# Python 3.9 preallocates the whole read buffer, read in chunks
|
# compat: py3.9: Python 3.9 preallocates the whole read buffer, read in chunks
|
||||||
read_chunk = functools.partial(self.fp.read, 1 << 20, decode_content=True)
|
read_chunk = functools.partial(self.fp.read, 1 << 20, decode_content=True)
|
||||||
return b''.join(iter(read_chunk, b''))
|
return b''.join(iter(read_chunk, b''))
|
||||||
# Interact with urllib3 response directly.
|
# Interact with urllib3 response directly.
|
||||||
@ -378,7 +377,7 @@ def _new_conn(self):
|
|||||||
source_address=self.source_address,
|
source_address=self.source_address,
|
||||||
_create_socket_func=functools.partial(
|
_create_socket_func=functools.partial(
|
||||||
create_socks_proxy_socket, (self.host, self.port), self._proxy_args))
|
create_socks_proxy_socket, (self.host, self.port), self._proxy_args))
|
||||||
except (socket.timeout, TimeoutError) as e:
|
except TimeoutError as e:
|
||||||
raise urllib3.exceptions.ConnectTimeoutError(
|
raise urllib3.exceptions.ConnectTimeoutError(
|
||||||
self, f'Connection to {self.host} timed out. (connect timeout={self.timeout})') from e
|
self, f'Connection to {self.host} timed out. (connect timeout={self.timeout})') from e
|
||||||
except SocksProxyError as e:
|
except SocksProxyError as e:
|
||||||
|
@ -12,6 +12,7 @@
|
|||||||
from collections.abc import Iterable, Mapping
|
from collections.abc import Iterable, Mapping
|
||||||
from email.message import Message
|
from email.message import Message
|
||||||
from http import HTTPStatus
|
from http import HTTPStatus
|
||||||
|
from types import NoneType
|
||||||
|
|
||||||
from ._helper import make_ssl_context, wrap_request_errors
|
from ._helper import make_ssl_context, wrap_request_errors
|
||||||
from .exceptions import (
|
from .exceptions import (
|
||||||
@ -20,7 +21,6 @@
|
|||||||
TransportError,
|
TransportError,
|
||||||
UnsupportedRequest,
|
UnsupportedRequest,
|
||||||
)
|
)
|
||||||
from ..compat.types import NoneType
|
|
||||||
from ..cookies import YoutubeDLCookieJar
|
from ..cookies import YoutubeDLCookieJar
|
||||||
from ..utils import (
|
from ..utils import (
|
||||||
bug_reports_message,
|
bug_reports_message,
|
||||||
|
@ -3,11 +3,11 @@
|
|||||||
import re
|
import re
|
||||||
from abc import ABC
|
from abc import ABC
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
|
from types import NoneType
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from .common import RequestHandler, register_preference, Request
|
from .common import RequestHandler, register_preference, Request
|
||||||
from .exceptions import UnsupportedRequest
|
from .exceptions import UnsupportedRequest
|
||||||
from ..compat.types import NoneType
|
|
||||||
from ..utils import classproperty, join_nonempty
|
from ..utils import classproperty, join_nonempty
|
||||||
from ..utils.networking import std_headers, HTTPHeaderDict
|
from ..utils.networking import std_headers, HTTPHeaderDict
|
||||||
|
|
||||||
|
@ -11,7 +11,6 @@
|
|||||||
import pkgutil
|
import pkgutil
|
||||||
import sys
|
import sys
|
||||||
import traceback
|
import traceback
|
||||||
import zipimport
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from zipfile import ZipFile
|
from zipfile import ZipFile
|
||||||
|
|
||||||
@ -202,12 +201,6 @@ def load_plugins(plugin_spec: PluginSpec):
|
|||||||
if any(x.startswith('_') for x in module_name.split('.')):
|
if any(x.startswith('_') for x in module_name.split('.')):
|
||||||
continue
|
continue
|
||||||
try:
|
try:
|
||||||
if sys.version_info < (3, 10) and isinstance(finder, zipimport.zipimporter):
|
|
||||||
# zipimporter.load_module() is deprecated in 3.10 and removed in 3.12
|
|
||||||
# The exec_module branch below is the replacement for >= 3.10
|
|
||||||
# See: https://docs.python.org/3/library/zipimport.html#zipimport.zipimporter.exec_module
|
|
||||||
module = finder.load_module(module_name)
|
|
||||||
else:
|
|
||||||
spec = finder.find_spec(module_name)
|
spec = finder.find_spec(module_name)
|
||||||
module = importlib.util.module_from_spec(spec)
|
module = importlib.util.module_from_spec(spec)
|
||||||
sys.modules[module_name] = module
|
sys.modules[module_name] = module
|
||||||
|
@ -418,7 +418,7 @@ def _concat_spec(cls, in_files, concat_opts=None):
|
|||||||
if concat_opts is None:
|
if concat_opts is None:
|
||||||
concat_opts = [{}] * len(in_files)
|
concat_opts = [{}] * len(in_files)
|
||||||
yield 'ffconcat version 1.0\n'
|
yield 'ffconcat version 1.0\n'
|
||||||
for file, opts in zip(in_files, concat_opts):
|
for file, opts in zip(in_files, concat_opts, strict=True):
|
||||||
yield f'file {cls._quote_for_ffmpeg(cls._ffmpeg_filename_argument(file))}\n'
|
yield f'file {cls._quote_for_ffmpeg(cls._ffmpeg_filename_argument(file))}\n'
|
||||||
# Iterate explicitly to yield the following directives in order, ignoring the rest.
|
# Iterate explicitly to yield the following directives in order, ignoring the rest.
|
||||||
for directive in 'inpoint', 'outpoint', 'duration':
|
for directive in 'inpoint', 'outpoint', 'duration':
|
||||||
@ -639,7 +639,7 @@ def run(self, info):
|
|||||||
# postprocessor a second time
|
# postprocessor a second time
|
||||||
'-map', '-0:s',
|
'-map', '-0:s',
|
||||||
]
|
]
|
||||||
for i, (lang, name) in enumerate(zip(sub_langs, sub_names)):
|
for i, (lang, name) in enumerate(zip(sub_langs, sub_names, strict=True)):
|
||||||
opts.extend(['-map', f'{i + 1}:0'])
|
opts.extend(['-map', f'{i + 1}:0'])
|
||||||
lang_code = ISO639Utils.short2long(lang) or lang
|
lang_code = ISO639Utils.short2long(lang) or lang
|
||||||
opts.extend([f'-metadata:s:s:{i}', f'language={lang_code}'])
|
opts.extend([f'-metadata:s:s:{i}', f'language={lang_code}'])
|
||||||
|
@ -154,7 +154,7 @@ def _get_binary_name():
|
|||||||
|
|
||||||
|
|
||||||
def _get_system_deprecation():
|
def _get_system_deprecation():
|
||||||
MIN_SUPPORTED, MIN_RECOMMENDED = (3, 9), (3, 10)
|
MIN_SUPPORTED, MIN_RECOMMENDED = (3, 10), (3, 10)
|
||||||
|
|
||||||
if sys.version_info > MIN_RECOMMENDED:
|
if sys.version_info > MIN_RECOMMENDED:
|
||||||
return None
|
return None
|
||||||
@ -559,11 +559,9 @@ def filename(self):
|
|||||||
@functools.cached_property
|
@functools.cached_property
|
||||||
def cmd(self):
|
def cmd(self):
|
||||||
"""The command-line to run the executable, if known"""
|
"""The command-line to run the executable, if known"""
|
||||||
argv = None
|
|
||||||
# There is no sys.orig_argv in py < 3.10. Also, it can be [] when frozen
|
|
||||||
if getattr(sys, 'orig_argv', None):
|
|
||||||
argv = sys.orig_argv
|
argv = sys.orig_argv
|
||||||
elif getattr(sys, 'frozen', False):
|
# sys.orig_argv can be [] when frozen
|
||||||
|
if not argv and getattr(sys, 'frozen', False):
|
||||||
argv = sys.argv
|
argv = sys.argv
|
||||||
# linux_static exe's argv[0] will be /tmp/staticx-NNNN/yt-dlp_linux if we don't fixup here
|
# linux_static exe's argv[0] will be /tmp/staticx-NNNN/yt-dlp_linux if we don't fixup here
|
||||||
if argv and os.getenv('STATICX_PROG_PATH'):
|
if argv and os.getenv('STATICX_PROG_PATH'):
|
||||||
@ -572,7 +570,7 @@ def cmd(self):
|
|||||||
|
|
||||||
def restart(self):
|
def restart(self):
|
||||||
"""Restart the executable"""
|
"""Restart the executable"""
|
||||||
assert self.cmd, 'Must be frozen or Py >= 3.10'
|
assert self.cmd, 'Unable to determine argv'
|
||||||
self.ydl.write_debug(f'Restarting: {shell_quote(self.cmd)}')
|
self.ydl.write_debug(f'Restarting: {shell_quote(self.cmd)}')
|
||||||
_, _, returncode = Popen.run(self.cmd)
|
_, _, returncode = Popen.run(self.cmd)
|
||||||
return returncode
|
return returncode
|
||||||
|
@ -1,6 +1,4 @@
|
|||||||
"""No longer used and new code should not use. Exists only for API compat."""
|
"""No longer used and new code should not use. Exists only for API compat."""
|
||||||
import asyncio
|
|
||||||
import atexit
|
|
||||||
import platform
|
import platform
|
||||||
import struct
|
import struct
|
||||||
import sys
|
import sys
|
||||||
@ -34,77 +32,6 @@
|
|||||||
has_websockets = bool(websockets)
|
has_websockets = bool(websockets)
|
||||||
|
|
||||||
|
|
||||||
class WebSocketsWrapper:
|
|
||||||
"""Wraps websockets module to use in non-async scopes"""
|
|
||||||
pool = None
|
|
||||||
|
|
||||||
def __init__(self, url, headers=None, connect=True, **ws_kwargs):
|
|
||||||
self.loop = asyncio.new_event_loop()
|
|
||||||
# XXX: "loop" is deprecated
|
|
||||||
self.conn = websockets.connect(
|
|
||||||
url, extra_headers=headers, ping_interval=None,
|
|
||||||
close_timeout=float('inf'), loop=self.loop, ping_timeout=float('inf'), **ws_kwargs)
|
|
||||||
if connect:
|
|
||||||
self.__enter__()
|
|
||||||
atexit.register(self.__exit__, None, None, None)
|
|
||||||
|
|
||||||
def __enter__(self):
|
|
||||||
if not self.pool:
|
|
||||||
self.pool = self.run_with_loop(self.conn.__aenter__(), self.loop)
|
|
||||||
return self
|
|
||||||
|
|
||||||
def send(self, *args):
|
|
||||||
self.run_with_loop(self.pool.send(*args), self.loop)
|
|
||||||
|
|
||||||
def recv(self, *args):
|
|
||||||
return self.run_with_loop(self.pool.recv(*args), self.loop)
|
|
||||||
|
|
||||||
def __exit__(self, type, value, traceback):
|
|
||||||
try:
|
|
||||||
return self.run_with_loop(self.conn.__aexit__(type, value, traceback), self.loop)
|
|
||||||
finally:
|
|
||||||
self.loop.close()
|
|
||||||
self._cancel_all_tasks(self.loop)
|
|
||||||
|
|
||||||
# taken from https://github.com/python/cpython/blob/3.9/Lib/asyncio/runners.py with modifications
|
|
||||||
# for contributors: If there's any new library using asyncio needs to be run in non-async, move these function out of this class
|
|
||||||
@staticmethod
|
|
||||||
def run_with_loop(main, loop):
|
|
||||||
if not asyncio.iscoroutine(main):
|
|
||||||
raise ValueError(f'a coroutine was expected, got {main!r}')
|
|
||||||
|
|
||||||
try:
|
|
||||||
return loop.run_until_complete(main)
|
|
||||||
finally:
|
|
||||||
loop.run_until_complete(loop.shutdown_asyncgens())
|
|
||||||
if hasattr(loop, 'shutdown_default_executor'):
|
|
||||||
loop.run_until_complete(loop.shutdown_default_executor())
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _cancel_all_tasks(loop):
|
|
||||||
to_cancel = asyncio.all_tasks(loop)
|
|
||||||
|
|
||||||
if not to_cancel:
|
|
||||||
return
|
|
||||||
|
|
||||||
for task in to_cancel:
|
|
||||||
task.cancel()
|
|
||||||
|
|
||||||
# XXX: "loop" is removed in Python 3.10+
|
|
||||||
loop.run_until_complete(
|
|
||||||
asyncio.gather(*to_cancel, loop=loop, return_exceptions=True))
|
|
||||||
|
|
||||||
for task in to_cancel:
|
|
||||||
if task.cancelled():
|
|
||||||
continue
|
|
||||||
if task.exception() is not None:
|
|
||||||
loop.call_exception_handler({
|
|
||||||
'message': 'unhandled exception during asyncio.run() shutdown',
|
|
||||||
'exception': task.exception(),
|
|
||||||
'task': task,
|
|
||||||
})
|
|
||||||
|
|
||||||
|
|
||||||
def load_plugins(name, suffix, namespace):
|
def load_plugins(name, suffix, namespace):
|
||||||
from ..plugins import load_plugins
|
from ..plugins import load_plugins
|
||||||
ret = load_plugins(name, suffix)
|
ret = load_plugins(name, suffix)
|
||||||
|
@ -95,7 +95,7 @@ def IDENTITY(x):
|
|||||||
# needed for sanitizing filenames in restricted mode
|
# needed for sanitizing filenames in restricted mode
|
||||||
ACCENT_CHARS = dict(zip('ÂÃÄÀÁÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖŐØŒÙÚÛÜŰÝÞßàáâãäåæçèéêëìíîïðñòóôõöőøœùúûüűýþÿ',
|
ACCENT_CHARS = dict(zip('ÂÃÄÀÁÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖŐØŒÙÚÛÜŰÝÞßàáâãäåæçèéêëìíîïðñòóôõöőøœùúûüűýþÿ',
|
||||||
itertools.chain('AAAAAA', ['AE'], 'CEEEEIIIIDNOOOOOOO', ['OE'], 'UUUUUY', ['TH', 'ss'],
|
itertools.chain('AAAAAA', ['AE'], 'CEEEEIIIIDNOOOOOOO', ['OE'], 'UUUUUY', ['TH', 'ss'],
|
||||||
'aaaaaa', ['ae'], 'ceeeeiiiionooooooo', ['oe'], 'uuuuuy', ['th'], 'y')))
|
'aaaaaa', ['ae'], 'ceeeeiiiionooooooo', ['oe'], 'uuuuuy', ['th'], 'y'), strict=True))
|
||||||
|
|
||||||
DATE_FORMATS = (
|
DATE_FORMATS = (
|
||||||
'%d %B %Y',
|
'%d %B %Y',
|
||||||
@ -2402,7 +2402,6 @@ class PlaylistEntries:
|
|||||||
|
|
||||||
def __init__(self, ydl, info_dict):
|
def __init__(self, ydl, info_dict):
|
||||||
self.ydl = ydl
|
self.ydl = ydl
|
||||||
|
|
||||||
# _entries must be assigned now since infodict can change during iteration
|
# _entries must be assigned now since infodict can change during iteration
|
||||||
entries = info_dict.get('entries')
|
entries = info_dict.get('entries')
|
||||||
if entries is None:
|
if entries is None:
|
||||||
@ -2415,7 +2414,7 @@ def __init__(self, ydl, info_dict):
|
|||||||
if self.is_incomplete:
|
if self.is_incomplete:
|
||||||
assert self.is_exhausted
|
assert self.is_exhausted
|
||||||
self._entries = [self.MissingEntry] * max(requested_entries or [0])
|
self._entries = [self.MissingEntry] * max(requested_entries or [0])
|
||||||
for i, entry in zip(requested_entries, entries):
|
for i, entry in zip(requested_entries, entries): # noqa: B905
|
||||||
self._entries[i - 1] = entry
|
self._entries[i - 1] = entry
|
||||||
elif isinstance(entries, (list, PagedList, LazyList)):
|
elif isinstance(entries, (list, PagedList, LazyList)):
|
||||||
self._entries = entries
|
self._entries = entries
|
||||||
@ -3184,7 +3183,7 @@ def width(string):
|
|||||||
return len(remove_terminal_sequences(string).replace('\t', ''))
|
return len(remove_terminal_sequences(string).replace('\t', ''))
|
||||||
|
|
||||||
def get_max_lens(table):
|
def get_max_lens(table):
|
||||||
return [max(width(str(v)) for v in col) for col in zip(*table)]
|
return [max(width(str(v)) for v in col) for col in zip(*table, strict=True)]
|
||||||
|
|
||||||
def filter_using_list(row, filter_array):
|
def filter_using_list(row, filter_array):
|
||||||
return [col for take, col in itertools.zip_longest(filter_array, row, fillvalue=True) if take]
|
return [col for take, col in itertools.zip_longest(filter_array, row, fillvalue=True) if take]
|
||||||
@ -3540,7 +3539,7 @@ def parse_node(node):
|
|||||||
continue
|
continue
|
||||||
default_style.update(style)
|
default_style.update(style)
|
||||||
|
|
||||||
for para, index in zip(paras, itertools.count(1)):
|
for para, index in zip(paras, itertools.count(1), strict=False):
|
||||||
begin_time = parse_dfxp_time_expr(para.attrib.get('begin'))
|
begin_time = parse_dfxp_time_expr(para.attrib.get('begin'))
|
||||||
end_time = parse_dfxp_time_expr(para.attrib.get('end'))
|
end_time = parse_dfxp_time_expr(para.attrib.get('end'))
|
||||||
dur = parse_dfxp_time_expr(para.attrib.get('dur'))
|
dur = parse_dfxp_time_expr(para.attrib.get('dur'))
|
||||||
@ -4854,7 +4853,7 @@ def scale_thumbnails_to_max_format_width(formats, thumbnails, url_width_re):
|
|||||||
return [
|
return [
|
||||||
merge_dicts(
|
merge_dicts(
|
||||||
{'url': re.sub(url_width_re, str(max_dimensions[0]), thumbnail['url'])},
|
{'url': re.sub(url_width_re, str(max_dimensions[0]), thumbnail['url'])},
|
||||||
dict(zip(_keys, max_dimensions)), thumbnail)
|
dict(zip(_keys, max_dimensions, strict=True)), thumbnail)
|
||||||
for thumbnail in thumbnails
|
for thumbnail in thumbnails
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -110,7 +110,7 @@ def parse_iter(parsed: typing.Any, /, *, revivers: dict[str, collections.abc.Cal
|
|||||||
|
|
||||||
elif value[0] == 'Map':
|
elif value[0] == 'Map':
|
||||||
result = []
|
result = []
|
||||||
for key, new_source in zip(*(iter(value[1:]),) * 2):
|
for key, new_source in zip(*(iter(value[1:]),) * 2, strict=True):
|
||||||
pair = [None, None]
|
pair = [None, None]
|
||||||
stack.append((pair, 0, key))
|
stack.append((pair, 0, key))
|
||||||
stack.append((pair, 1, new_source))
|
stack.append((pair, 1, new_source))
|
||||||
@ -129,7 +129,7 @@ def parse_iter(parsed: typing.Any, /, *, revivers: dict[str, collections.abc.Cal
|
|||||||
|
|
||||||
elif value[0] == 'null':
|
elif value[0] == 'null':
|
||||||
result = {}
|
result = {}
|
||||||
for key, new_source in zip(*(iter(value[1:]),) * 2):
|
for key, new_source in zip(*(iter(value[1:]),) * 2, strict=True):
|
||||||
stack.append((result, key, new_source))
|
stack.append((result, key, new_source))
|
||||||
|
|
||||||
elif value[0] in _ARRAY_TYPE_LOOKUP:
|
elif value[0] in _ARRAY_TYPE_LOOKUP:
|
||||||
|
@ -103,7 +103,7 @@ def _parse_ts(ts):
|
|||||||
into an MPEG PES timestamp: a tick counter at 90 kHz resolution.
|
into an MPEG PES timestamp: a tick counter at 90 kHz resolution.
|
||||||
"""
|
"""
|
||||||
return 90 * sum(
|
return 90 * sum(
|
||||||
int(part or 0) * mult for part, mult in zip(ts.groups(), (3600_000, 60_000, 1000, 1)))
|
int(part or 0) * mult for part, mult in zip(ts.groups(), (3600_000, 60_000, 1000, 1), strict=True))
|
||||||
|
|
||||||
|
|
||||||
def _format_ts(ts):
|
def _format_ts(ts):
|
||||||
|
Loading…
Reference in New Issue
Block a user