mirror of
https://github.com/yt-dlp/yt-dlp.git
synced 2026-01-29 18:21:40 +00:00
[cleanup] Add more ruff rules (#10149)
Authored by: seproDev Reviewed-by: bashonly <88596187+bashonly@users.noreply.github.com> Reviewed-by: Simon Sawicki <contact@grub4k.xyz>
This commit is contained in:
@@ -30,7 +30,7 @@ except Exception as e:
|
||||
warnings.warn(f'Failed to import "websockets" request handler: {e}' + bug_reports_message())
|
||||
|
||||
try:
|
||||
from . import _curlcffi # noqa: F401
|
||||
from . import _curlcffi
|
||||
except ImportError:
|
||||
pass
|
||||
except Exception as e:
|
||||
|
||||
@@ -207,7 +207,7 @@ class CurlCFFIRH(ImpersonateRequestHandler, InstanceStoreMixin):
|
||||
impersonate=self._SUPPORTED_IMPERSONATE_TARGET_MAP.get(
|
||||
self._get_request_target(request)),
|
||||
interface=self.source_address,
|
||||
stream=True
|
||||
stream=True,
|
||||
)
|
||||
except curl_cffi.requests.errors.RequestsError as e:
|
||||
if e.code == CurlECode.PEER_FAILED_VERIFICATION:
|
||||
|
||||
@@ -235,7 +235,7 @@ def create_socks_proxy_socket(dest_addr, proxy_args, proxy_ip_addr, timeout, sou
|
||||
connect_proxy_args = proxy_args.copy()
|
||||
connect_proxy_args.update({'addr': sa[0], 'port': sa[1]})
|
||||
sock.setproxy(**connect_proxy_args)
|
||||
if timeout is not socket._GLOBAL_DEFAULT_TIMEOUT: # noqa: E721
|
||||
if timeout is not socket._GLOBAL_DEFAULT_TIMEOUT:
|
||||
sock.settimeout(timeout)
|
||||
if source_address:
|
||||
sock.bind(source_address)
|
||||
@@ -251,7 +251,7 @@ def create_connection(
|
||||
timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
|
||||
source_address=None,
|
||||
*,
|
||||
_create_socket_func=_socket_connect
|
||||
_create_socket_func=_socket_connect,
|
||||
):
|
||||
# Work around socket.create_connection() which tries all addresses from getaddrinfo() including IPv6.
|
||||
# This filters the addresses based on the given source_address.
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import contextlib
|
||||
import functools
|
||||
import http.client
|
||||
@@ -58,13 +60,13 @@ from .exceptions import (
|
||||
from ..socks import ProxyError as SocksProxyError
|
||||
|
||||
SUPPORTED_ENCODINGS = [
|
||||
'gzip', 'deflate'
|
||||
'gzip', 'deflate',
|
||||
]
|
||||
|
||||
if brotli is not None:
|
||||
SUPPORTED_ENCODINGS.append('br')
|
||||
|
||||
"""
|
||||
'''
|
||||
Override urllib3's behavior to not convert lower-case percent-encoded characters
|
||||
to upper-case during url normalization process.
|
||||
|
||||
@@ -79,7 +81,7 @@ is best to avoid it in requests too for compatability reasons.
|
||||
|
||||
1: https://tools.ietf.org/html/rfc3986#section-2.1
|
||||
2: https://github.com/streamlink/streamlink/pull/4003
|
||||
"""
|
||||
'''
|
||||
|
||||
|
||||
class Urllib3PercentREOverride:
|
||||
@@ -96,7 +98,7 @@ class Urllib3PercentREOverride:
|
||||
|
||||
# urllib3 >= 1.25.8 uses subn:
|
||||
# https://github.com/urllib3/urllib3/commit/a2697e7c6b275f05879b60f593c5854a816489f0
|
||||
import urllib3.util.url # noqa: E305
|
||||
import urllib3.util.url
|
||||
|
||||
if hasattr(urllib3.util.url, 'PERCENT_RE'):
|
||||
urllib3.util.url.PERCENT_RE = Urllib3PercentREOverride(urllib3.util.url.PERCENT_RE)
|
||||
@@ -105,7 +107,7 @@ elif hasattr(urllib3.util.url, '_PERCENT_RE'): # urllib3 >= 2.0.0
|
||||
else:
|
||||
warnings.warn('Failed to patch PERCENT_RE in urllib3 (does the attribute exist?)' + bug_reports_message())
|
||||
|
||||
"""
|
||||
'''
|
||||
Workaround for issue in urllib.util.ssl_.py: ssl_wrap_context does not pass
|
||||
server_hostname to SSLContext.wrap_socket if server_hostname is an IP,
|
||||
however this is an issue because we set check_hostname to True in our SSLContext.
|
||||
@@ -114,7 +116,7 @@ Monkey-patching IS_SECURETRANSPORT forces ssl_wrap_context to pass server_hostna
|
||||
|
||||
This has been fixed in urllib3 2.0+.
|
||||
See: https://github.com/urllib3/urllib3/issues/517
|
||||
"""
|
||||
'''
|
||||
|
||||
if urllib3_version < (2, 0, 0):
|
||||
with contextlib.suppress(Exception):
|
||||
@@ -135,7 +137,7 @@ class RequestsResponseAdapter(Response):
|
||||
|
||||
self._requests_response = res
|
||||
|
||||
def read(self, amt: int = None):
|
||||
def read(self, amt: int | None = None):
|
||||
try:
|
||||
# Interact with urllib3 response directly.
|
||||
return self.fp.read(amt, decode_content=True)
|
||||
@@ -329,7 +331,7 @@ class RequestsRH(RequestHandler, InstanceStoreMixin):
|
||||
timeout=self._calculate_timeout(request),
|
||||
proxies=self._get_proxies(request),
|
||||
allow_redirects=True,
|
||||
stream=True
|
||||
stream=True,
|
||||
)
|
||||
|
||||
except requests.exceptions.TooManyRedirects as e:
|
||||
@@ -411,7 +413,7 @@ class SocksProxyManager(urllib3.PoolManager):
|
||||
super().__init__(num_pools, headers, **connection_pool_kw)
|
||||
self.pool_classes_by_scheme = {
|
||||
'http': SocksHTTPConnectionPool,
|
||||
'https': SocksHTTPSConnectionPool
|
||||
'https': SocksHTTPSConnectionPool,
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -246,8 +246,8 @@ class ProxyHandler(urllib.request.BaseHandler):
|
||||
def __init__(self, proxies=None):
|
||||
self.proxies = proxies
|
||||
# Set default handlers
|
||||
for type in ('http', 'https', 'ftp'):
|
||||
setattr(self, '%s_open' % type, lambda r, meth=self.proxy_open: meth(r))
|
||||
for scheme in ('http', 'https', 'ftp'):
|
||||
setattr(self, f'{scheme}_open', lambda r, meth=self.proxy_open: meth(r))
|
||||
|
||||
def proxy_open(self, req):
|
||||
proxy = select_proxy(req.get_full_url(), self.proxies)
|
||||
@@ -385,12 +385,12 @@ class UrllibRH(RequestHandler, InstanceStoreMixin):
|
||||
url=request.url,
|
||||
data=request.data,
|
||||
headers=dict(headers),
|
||||
method=request.method
|
||||
method=request.method,
|
||||
)
|
||||
|
||||
opener = self._get_instance(
|
||||
proxies=self._get_proxies(request),
|
||||
cookiejar=self._get_cookiejar(request)
|
||||
cookiejar=self._get_cookiejar(request),
|
||||
)
|
||||
try:
|
||||
res = opener.open(urllib_req, timeout=self._calculate_timeout(request))
|
||||
|
||||
@@ -137,7 +137,7 @@ class WebsocketsRH(WebSocketRequestHandler):
|
||||
wsuri = parse_uri(request.url)
|
||||
create_conn_kwargs = {
|
||||
'source_address': (self.source_address, 0) if self.source_address else None,
|
||||
'timeout': timeout
|
||||
'timeout': timeout,
|
||||
}
|
||||
proxy = select_proxy(request.url, self._get_proxies(request))
|
||||
try:
|
||||
@@ -147,12 +147,12 @@ class WebsocketsRH(WebSocketRequestHandler):
|
||||
address=(socks_proxy_options['addr'], socks_proxy_options['port']),
|
||||
_create_socket_func=functools.partial(
|
||||
create_socks_proxy_socket, (wsuri.host, wsuri.port), socks_proxy_options),
|
||||
**create_conn_kwargs
|
||||
**create_conn_kwargs,
|
||||
)
|
||||
else:
|
||||
sock = create_connection(
|
||||
address=(wsuri.host, wsuri.port),
|
||||
**create_conn_kwargs
|
||||
**create_conn_kwargs,
|
||||
)
|
||||
conn = websockets.sync.client.connect(
|
||||
sock=sock,
|
||||
|
||||
@@ -83,8 +83,8 @@ class RequestDirector:
|
||||
rh: sum(pref(rh, request) for pref in self.preferences)
|
||||
for rh in self.handlers.values()
|
||||
}
|
||||
self._print_verbose('Handler preferences for this request: %s' % ', '.join(
|
||||
f'{rh.RH_NAME}={pref}' for rh, pref in preferences.items()))
|
||||
self._print_verbose('Handler preferences for this request: {}'.format(', '.join(
|
||||
f'{rh.RH_NAME}={pref}' for rh, pref in preferences.items())))
|
||||
return sorted(self.handlers.values(), key=preferences.get, reverse=True)
|
||||
|
||||
def _print_verbose(self, msg):
|
||||
@@ -224,11 +224,11 @@ class RequestHandler(abc.ABC):
|
||||
headers: HTTPHeaderDict = None,
|
||||
cookiejar: YoutubeDLCookieJar = None,
|
||||
timeout: float | int | None = None,
|
||||
proxies: dict = None,
|
||||
source_address: str = None,
|
||||
proxies: dict | None = None,
|
||||
source_address: str | None = None,
|
||||
verbose: bool = False,
|
||||
prefer_system_certs: bool = False,
|
||||
client_cert: dict[str, str | None] = None,
|
||||
client_cert: dict[str, str | None] | None = None,
|
||||
verify: bool = True,
|
||||
legacy_ssl_support: bool = False,
|
||||
**_,
|
||||
@@ -341,7 +341,7 @@ class RequestHandler(abc.ABC):
|
||||
"""Handle a request from start to finish. Redefine in subclasses."""
|
||||
pass
|
||||
|
||||
def close(self):
|
||||
def close(self): # noqa: B027
|
||||
pass
|
||||
|
||||
@classproperty
|
||||
@@ -378,11 +378,11 @@ class Request:
|
||||
self,
|
||||
url: str,
|
||||
data: RequestData = None,
|
||||
headers: typing.Mapping = None,
|
||||
proxies: dict = None,
|
||||
query: dict = None,
|
||||
method: str = None,
|
||||
extensions: dict = None
|
||||
headers: typing.Mapping | None = None,
|
||||
proxies: dict | None = None,
|
||||
query: dict | None = None,
|
||||
method: str | None = None,
|
||||
extensions: dict | None = None,
|
||||
):
|
||||
|
||||
self._headers = HTTPHeaderDict()
|
||||
@@ -508,8 +508,8 @@ class Response(io.IOBase):
|
||||
url: str,
|
||||
headers: Mapping[str, str],
|
||||
status: int = 200,
|
||||
reason: str = None,
|
||||
extensions: dict = None
|
||||
reason: str | None = None,
|
||||
extensions: dict | None = None,
|
||||
):
|
||||
|
||||
self.fp = fp
|
||||
@@ -527,7 +527,7 @@ class Response(io.IOBase):
|
||||
def readable(self):
|
||||
return self.fp.readable()
|
||||
|
||||
def read(self, amt: int = None) -> bytes:
|
||||
def read(self, amt: int | None = None) -> bytes:
|
||||
# Expected errors raised here should be of type RequestError or subclasses.
|
||||
# Subclasses should redefine this method with more precise error handling.
|
||||
try:
|
||||
|
||||
@@ -13,7 +13,7 @@ class RequestError(YoutubeDLError):
|
||||
self,
|
||||
msg: str | None = None,
|
||||
cause: Exception | str | None = None,
|
||||
handler: RequestHandler = None
|
||||
handler: RequestHandler = None,
|
||||
):
|
||||
self.handler = handler
|
||||
self.cause = cause
|
||||
|
||||
@@ -112,8 +112,8 @@ class ImpersonateRequestHandler(RequestHandler, ABC):
|
||||
return supported_target
|
||||
|
||||
@classproperty
|
||||
def supported_targets(self) -> tuple[ImpersonateTarget, ...]:
|
||||
return tuple(self._SUPPORTED_IMPERSONATE_TARGET_MAP.keys())
|
||||
def supported_targets(cls) -> tuple[ImpersonateTarget, ...]:
|
||||
return tuple(cls._SUPPORTED_IMPERSONATE_TARGET_MAP.keys())
|
||||
|
||||
def is_supported_target(self, target: ImpersonateTarget):
|
||||
assert isinstance(target, ImpersonateTarget)
|
||||
@@ -127,7 +127,7 @@ class ImpersonateRequestHandler(RequestHandler, ABC):
|
||||
headers = self._merge_headers(request.headers)
|
||||
if self._get_request_target(request) is not None:
|
||||
# remove all headers present in std_headers
|
||||
# todo: change this to not depend on std_headers
|
||||
# TODO: change this to not depend on std_headers
|
||||
for k, v in std_headers.items():
|
||||
if headers.get(k) == v:
|
||||
headers.pop(k)
|
||||
|
||||
Reference in New Issue
Block a user