1
0
mirror of https://github.com/yt-dlp/yt-dlp.git synced 2025-08-16 09:28:28 +00:00
This commit is contained in:
bashonly 2025-08-12 20:25:36 -03:00 committed by GitHub
commit 13b6a2d94e
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
4 changed files with 34 additions and 20 deletions

View File

@ -613,7 +613,7 @@ class YoutubeDL:
'width', 'height', 'aspect_ratio', 'resolution', 'dynamic_range', 'tbr', 'abr', 'acodec', 'asr', 'audio_channels', 'width', 'height', 'aspect_ratio', 'resolution', 'dynamic_range', 'tbr', 'abr', 'acodec', 'asr', 'audio_channels',
'vbr', 'fps', 'vcodec', 'container', 'filesize', 'filesize_approx', 'rows', 'columns', 'hls_media_playlist_data', 'vbr', 'fps', 'vcodec', 'container', 'filesize', 'filesize_approx', 'rows', 'columns', 'hls_media_playlist_data',
'player_url', 'protocol', 'fragment_base_url', 'fragments', 'is_from_start', 'is_dash_periods', 'request_data', 'player_url', 'protocol', 'fragment_base_url', 'fragments', 'is_from_start', 'is_dash_periods', 'request_data',
'preference', 'language', 'language_preference', 'quality', 'source_preference', 'cookies', 'preference', 'language', 'language_preference', 'quality', 'source_preference', 'cookies', 'additional_cookies_urls',
'http_headers', 'stretched_ratio', 'no_resume', 'has_drm', 'extra_param_to_segment_url', 'extra_param_to_key_url', 'http_headers', 'stretched_ratio', 'no_resume', 'has_drm', 'extra_param_to_segment_url', 'extra_param_to_key_url',
'hls_aes', 'downloader_options', 'impersonate', 'page_url', 'app', 'play_path', 'tc_url', 'flash_version', 'hls_aes', 'downloader_options', 'impersonate', 'page_url', 'app', 'play_path', 'tc_url', 'flash_version',
'rtmp_live', 'rtmp_conn', 'rtmp_protocol', 'rtmp_real_time', 'rtmp_live', 'rtmp_conn', 'rtmp_protocol', 'rtmp_real_time',
@ -2628,6 +2628,8 @@ def _calc_headers(self, info_dict, load_cookies=False):
# See: https://github.com/yt-dlp/yt-dlp/security/advisories/GHSA-v8mc-9377-rwjj # See: https://github.com/yt-dlp/yt-dlp/security/advisories/GHSA-v8mc-9377-rwjj
res.pop('Cookie', None) res.pop('Cookie', None)
cookies = self.cookiejar.get_cookies_for_url(info_dict['url']) cookies = self.cookiejar.get_cookies_for_url(info_dict['url'])
for additional_url in info_dict.get('additional_cookies_urls') or []:
cookies.extend(self.cookiejar.get_cookies_for_url(additional_url))
if cookies: if cookies:
encoder = LenientSimpleCookie() encoder = LenientSimpleCookie()
values = [] values = []
@ -2917,6 +2919,10 @@ def is_wellformed(f):
if (('manifest-filesize-approx' in self.params['compat_opts'] or not fmt.get('manifest_url')) if (('manifest-filesize-approx' in self.params['compat_opts'] or not fmt.get('manifest_url'))
and not fmt.get('filesize') and not fmt.get('filesize_approx')): and not fmt.get('filesize') and not fmt.get('filesize_approx')):
fmt['filesize_approx'] = filesize_from_tbr(fmt.get('tbr'), info_dict.get('duration')) fmt['filesize_approx'] = filesize_from_tbr(fmt.get('tbr'), info_dict.get('duration'))
if hls_aes_key_url := traverse_obj(fmt, ('hls_aes', 'uri')):
additional_urls = fmt.get('additional_cookies_urls') or []
if hls_aes_key_url not in additional_urls:
fmt['additional_cookies_urls'] = [*additional_urls, hls_aes_key_url]
fmt['http_headers'] = self._calc_headers(collections.ChainMap(fmt, info_dict), load_cookies=True) fmt['http_headers'] = self._calc_headers(collections.ChainMap(fmt, info_dict), load_cookies=True)
# Safeguard against old/insecure infojson when using --load-info-json # Safeguard against old/insecure infojson when using --load-info-json

View File

@ -556,23 +556,25 @@ def _call_downloader(self, tmpfilename, info_dict):
selected_formats = info_dict.get('requested_formats') or [info_dict] selected_formats = info_dict.get('requested_formats') or [info_dict]
for i, fmt in enumerate(selected_formats): for i, fmt in enumerate(selected_formats):
is_http = re.match(r'https?://', fmt['url']) url = fmt['url']
cookies = self.ydl.cookiejar.get_cookies_for_url(fmt['url']) if is_http else [] if re.match(r'https?://', url):
if cookies: cookies = self.ydl.cookiejar.get_cookies_for_url(url)
args.extend(['-cookies', ''.join( for additional_url in fmt.get('additional_cookies_urls') or []:
f'{cookie.name}={cookie.value}; path={cookie.path}; domain={cookie.domain};\r\n' cookies.extend(self.ydl.cookiejar.get_cookies_for_url(additional_url))
for cookie in cookies)]) if cookies:
if fmt.get('http_headers') and is_http: args.extend(['-cookies', ''.join(
# Trailing \r\n after each HTTP header is important to prevent warning from ffmpeg/avconv: f'{cookie.name}={cookie.value}; path={cookie.path}; domain={cookie.domain};\r\n'
# [http @ 00000000003d2fa0] No trailing CRLF found in HTTP header. for cookie in cookies)])
args.extend(['-headers', ''.join(f'{key}: {val}\r\n' for key, val in fmt['http_headers'].items())]) if fmt.get('http_headers'):
# Trailing \r\n after each HTTP header is important to prevent warning from ffmpeg/avconv:
# [http @ 00000000003d2fa0] No trailing CRLF found in HTTP header.
args.extend(['-headers', ''.join(f'{key}: {val}\r\n' for key, val in fmt['http_headers'].items())])
if start_time: if start_time:
args += ['-ss', str(start_time)] args += ['-ss', str(start_time)]
if end_time: if end_time:
args += ['-t', str(end_time - start_time)] args += ['-t', str(end_time - start_time)]
url = fmt['url']
if self.params.get('enable_file_urls') and url.startswith('file:'): if self.params.get('enable_file_urls') and url.startswith('file:'):
# The default protocol_whitelist is 'file,crypto,data' when reading local m3u8 URLs, # The default protocol_whitelist is 'file,crypto,data' when reading local m3u8 URLs,
# so only local segments can be read unless we also include 'http,https,tcp,tls' # so only local segments can be read unless we also include 'http,https,tcp,tls'

View File

@ -273,6 +273,8 @@ class InfoExtractor:
* max_quality (NiconicoLiveFD only) Max stream quality string * max_quality (NiconicoLiveFD only) Max stream quality string
* is_dash_periods Whether the format is a result of merging * is_dash_periods Whether the format is a result of merging
multiple DASH periods. multiple DASH periods.
* additional_cookies_urls A list of additional URLs for which cookies are needed,
e.g. if a livestream HLS AES key URL domain differs from the m3u8 URL
RTMP formats can also have the additional fields: page_url, RTMP formats can also have the additional fields: page_url,
app, play_path, tc_url, flash_version, rtmp_live, rtmp_conn, app, play_path, tc_url, flash_version, rtmp_live, rtmp_conn,
rtmp_protocol, rtmp_real_time rtmp_protocol, rtmp_real_time

View File

@ -99,6 +99,10 @@ def _token_is_expired(self, key):
return is_expired return is_expired
return not self._access_token_is_valid() return not self._access_token_is_valid()
def _set_weverse_auth_cookies(self):
self._set_cookie('.weverse.io', self._ACCESS_TOKEN_KEY, self._oauth_tokens[self._ACCESS_TOKEN_KEY])
self._set_cookie('.weverse.io', self._REFRESH_TOKEN_KEY, self._oauth_tokens[self._REFRESH_TOKEN_KEY])
def _refresh_access_token(self): def _refresh_access_token(self):
if not self._oauth_tokens.get(self._REFRESH_TOKEN_KEY): if not self._oauth_tokens.get(self._REFRESH_TOKEN_KEY):
self._report_login_error('no_refresh_token') self._report_login_error('no_refresh_token')
@ -120,10 +124,9 @@ def _refresh_access_token(self):
except ExtractorError as e: except ExtractorError as e:
if isinstance(e.cause, HTTPError) and e.cause.status == 401: if isinstance(e.cause, HTTPError) and e.cause.status == 401:
self._oauth_tokens.clear() self._oauth_tokens.clear()
if self._oauth_cache_key == 'cookies': self.cookiejar.clear(domain='.weverse.io', path='/', name=self._ACCESS_TOKEN_KEY)
self.cookiejar.clear(domain='.weverse.io', path='/', name=self._ACCESS_TOKEN_KEY) self.cookiejar.clear(domain='.weverse.io', path='/', name=self._REFRESH_TOKEN_KEY)
self.cookiejar.clear(domain='.weverse.io', path='/', name=self._REFRESH_TOKEN_KEY) if self._oauth_cache_key != 'cookies':
else:
self.cache.store(self._NETRC_MACHINE, self._oauth_cache_key, self._oauth_tokens) self.cache.store(self._NETRC_MACHINE, self._oauth_cache_key, self._oauth_tokens)
self._report_login_error('expired_refresh_token') self._report_login_error('expired_refresh_token')
raise raise
@ -133,10 +136,8 @@ def _refresh_access_token(self):
self._REFRESH_TOKEN_KEY: ('refreshToken', {str}, {require('refresh token')}), self._REFRESH_TOKEN_KEY: ('refreshToken', {str}, {require('refresh token')}),
})) }))
if self._oauth_cache_key == 'cookies': self._set_weverse_auth_cookies()
self._set_cookie('.weverse.io', self._ACCESS_TOKEN_KEY, self._oauth_tokens[self._ACCESS_TOKEN_KEY]) if self._oauth_cache_key != 'cookies':
self._set_cookie('.weverse.io', self._REFRESH_TOKEN_KEY, self._oauth_tokens[self._REFRESH_TOKEN_KEY])
else:
self.cache.store(self._NETRC_MACHINE, self._oauth_cache_key, self._oauth_tokens) self.cache.store(self._NETRC_MACHINE, self._oauth_cache_key, self._oauth_tokens)
def _get_authorization_header(self): def _get_authorization_header(self):
@ -169,6 +170,7 @@ def _perform_login(self, username, password):
self._oauth_tokens.update(self.cache.load(self._NETRC_MACHINE, self._oauth_cache_key, default={})) self._oauth_tokens.update(self.cache.load(self._NETRC_MACHINE, self._oauth_cache_key, default={}))
if self._is_logged_in and self._access_token_is_valid(): if self._is_logged_in and self._access_token_is_valid():
self._set_weverse_auth_cookies()
return return
rt_key = self._REFRESH_TOKEN_KEY rt_key = self._REFRESH_TOKEN_KEY
@ -471,6 +473,8 @@ def _real_extract(self, url):
'channel': channel, 'channel': channel,
'channel_url': f'https://weverse.io/{channel}', 'channel_url': f'https://weverse.io/{channel}',
'formats': formats, 'formats': formats,
# Needed for livestream HLS AES keys
'additional_cookies_urls': ['https://video-api.weverse.io/'],
'availability': availability, 'availability': availability,
'live_status': live_status, 'live_status': live_status,
**self._parse_post_meta(post), **self._parse_post_meta(post),