1
0
mirror of https://github.com/yt-dlp/yt-dlp.git synced 2025-11-01 23:25:14 +00:00

[misc] Add hatch, ruff, pre-commit and improve dev docs (#7409)

Authored by: bashonly, seproDev, Grub4K

Co-authored-by: bashonly <88596187+bashonly@users.noreply.github.com>
Co-authored-by: sepro <4618135+seproDev@users.noreply.github.com>
This commit is contained in:
Simon Sawicki
2024-05-26 21:27:21 +02:00
committed by GitHub
parent a2e9031605
commit e897bd8292
264 changed files with 1224 additions and 1014 deletions

File diff suppressed because it is too large Load Diff

View File

@@ -6,10 +6,10 @@ import time
from .common import InfoExtractor
from ..compat import compat_str
from ..utils import (
dict_get,
ExtractorError,
js_to_json,
dict_get,
int_or_none,
js_to_json,
parse_iso8601,
str_or_none,
traverse_obj,

View File

@@ -12,20 +12,21 @@ import urllib.parse
import urllib.request
import urllib.response
import uuid
from ..utils.networking import clean_proxies
from .common import InfoExtractor
from ..aes import aes_ecb_decrypt
from ..utils import (
ExtractorError,
OnDemandPagedList,
bytes_to_intlist,
decode_base_n,
int_or_none,
intlist_to_bytes,
OnDemandPagedList,
time_seconds,
traverse_obj,
update_url_query,
)
from ..utils.networking import clean_proxies
def add_opener(ydl, handler): # FIXME: Create proper API in .networking

View File

@@ -3,10 +3,10 @@ from ..utils import (
float_or_none,
format_field,
int_or_none,
str_or_none,
traverse_obj,
parse_codecs,
parse_qs,
str_or_none,
traverse_obj,
)

View File

@@ -10,18 +10,18 @@ from ..aes import aes_cbc_decrypt_bytes, unpad_pkcs7
from ..compat import compat_b64decode
from ..networking.exceptions import HTTPError
from ..utils import (
ExtractorError,
ass_subtitles_timecode,
bytes_to_intlist,
bytes_to_long,
ExtractorError,
float_or_none,
int_or_none,
intlist_to_bytes,
long_to_bytes,
parse_iso8601,
pkcs1pad,
strip_or_none,
str_or_none,
strip_or_none,
try_get,
unified_strdate,
urlencode_postdata,

View File

@@ -4,11 +4,11 @@ import re
from .common import InfoExtractor
from ..compat import compat_str
from ..utils import (
ISO639Utils,
OnDemandPagedList,
float_or_none,
int_or_none,
ISO639Utils,
join_nonempty,
OnDemandPagedList,
parse_duration,
str_or_none,
str_to_int,

View File

@@ -5,7 +5,7 @@ from ..utils import (
int_or_none,
mimetype2ext,
parse_iso8601,
traverse_obj
traverse_obj,
)

View File

@@ -12,7 +12,6 @@ from ..utils import (
)
from ..utils.traversal import traverse_obj
_FIELDS = '''
_id
clipImageSource

View File

@@ -1,9 +1,9 @@
from .common import InfoExtractor
from ..utils import (
parse_iso8601,
int_or_none,
parse_duration,
parse_filesize,
int_or_none,
parse_iso8601,
)

View File

@@ -1,17 +1,13 @@
import re
from .common import InfoExtractor
from ..compat import (
compat_urlparse,
)
from ..compat import compat_urlparse
from ..utils import (
ExtractorError,
clean_html,
int_or_none,
urlencode_postdata,
urljoin,
int_or_none,
clean_html,
ExtractorError
)

View File

@@ -1,6 +1,6 @@
from .common import InfoExtractor
from .youtube import YoutubeIE
from .vimeo import VimeoIE
from .youtube import YoutubeIE
from ..utils import (
int_or_none,
parse_iso8601,

View File

@@ -1,7 +1,7 @@
from .common import InfoExtractor
from ..utils import (
determine_ext,
ExtractorError,
determine_ext,
int_or_none,
mimetype2ext,
parse_iso8601,

View File

@@ -5,7 +5,7 @@ from ..utils import (
int_or_none,
str_or_none,
traverse_obj,
unified_timestamp
unified_timestamp,
)

View File

@@ -1,7 +1,7 @@
import re
from .common import InfoExtractor
from ..utils import url_or_none, merge_dicts
from ..utils import merge_dicts, url_or_none
class AngelIE(InfoExtractor):

View File

@@ -1,8 +1,5 @@
from .common import InfoExtractor
from ..utils import (
str_to_int,
ExtractorError
)
from ..utils import ExtractorError, str_to_int
class AppleConnectIE(InfoExtractor):

View File

@@ -1,5 +1,5 @@
import re
import json
import re
from .common import InfoExtractor
from ..compat import compat_urlparse

View File

@@ -4,8 +4,8 @@ from ..compat import (
compat_urllib_parse_urlparse,
)
from ..utils import (
format_field,
float_or_none,
format_field,
int_or_none,
parse_iso8601,
remove_start,

View File

@@ -2,10 +2,10 @@ import datetime as dt
from .common import InfoExtractor
from ..utils import (
ExtractorError,
float_or_none,
jwt_encode_hs256,
try_get,
ExtractorError,
)

View File

@@ -2,8 +2,8 @@ import base64
from .common import InfoExtractor
from ..compat import (
compat_urllib_parse_urlencode,
compat_str,
compat_urllib_parse_urlencode,
)
from ..utils import (
format_field,

View File

@@ -2,12 +2,12 @@ import math
from .common import InfoExtractor
from ..compat import (
compat_urllib_parse_urlparse,
compat_parse_qs,
compat_urllib_parse_urlparse,
)
from ..utils import (
format_field,
InAdvancePagedList,
format_field,
traverse_obj,
unified_timestamp,
)

View File

@@ -2,11 +2,11 @@ import json
from .common import InfoExtractor
from ..utils import (
try_get,
int_or_none,
url_or_none,
float_or_none,
int_or_none,
try_get,
unified_timestamp,
url_or_none,
)

View File

@@ -1,5 +1,4 @@
from .common import InfoExtractor
from ..utils import (
int_or_none,
str_or_none,

View File

@@ -1,5 +1,5 @@
from .common import InfoExtractor
from .amp import AMPIE
from .common import InfoExtractor
from ..utils import (
ExtractorError,
int_or_none,

View File

@@ -1,3 +1,4 @@
from .common import InfoExtractor
from ..utils import (
mimetype2ext,
parse_duration,
@@ -5,7 +6,6 @@ from ..utils import (
str_or_none,
traverse_obj,
)
from .common import InfoExtractor
class BloggerIE(InfoExtractor):

View File

@@ -1,7 +1,6 @@
import re
from .common import InfoExtractor
from ..utils import (
extract_attributes,
)

View File

@@ -1,9 +1,5 @@
from .common import InfoExtractor
from ..utils import (
js_to_json,
traverse_obj,
unified_timestamp
)
from ..utils import js_to_json, traverse_obj, unified_timestamp
class BoxCastVideoIE(InfoExtractor):

View File

@@ -6,7 +6,7 @@ from ..utils import (
classproperty,
int_or_none,
traverse_obj,
urljoin
urljoin,
)

View File

@@ -12,10 +12,11 @@ from ..compat import (
)
from ..networking.exceptions import HTTPError
from ..utils import (
ExtractorError,
UnsupportedError,
clean_html,
dict_get,
extract_attributes,
ExtractorError,
find_xpath_attr,
fix_xml_ampersands,
float_or_none,
@@ -29,7 +30,6 @@ from ..utils import (
try_get,
unescapeHTML,
unsmuggle_url,
UnsupportedError,
update_url_query,
url_or_none,
)

View File

@@ -5,14 +5,14 @@ from .youtube import YoutubeIE
from ..utils import (
ExtractorError,
extract_attributes,
find_xpath_attr,
get_element_html_by_id,
int_or_none,
find_xpath_attr,
smuggle_url,
xpath_element,
xpath_text,
update_url_query,
url_or_none,
xpath_element,
xpath_text,
)

View File

@@ -1,4 +1,5 @@
import json
from .common import InfoExtractor
from ..networking.exceptions import HTTPError
from ..utils import (

View File

@@ -1,11 +1,11 @@
import re
from .common import InfoExtractor
from ..utils import (
parse_iso8601,
qualities,
)
import re
class ClippitIE(InfoExtractor):

View File

@@ -1,5 +1,6 @@
import base64
import collections
import functools
import getpass
import hashlib
import http.client
@@ -21,7 +22,6 @@ import urllib.parse
import urllib.request
import xml.etree.ElementTree
from ..compat import functools # isort: split
from ..compat import (
compat_etree_fromstring,
compat_expanduser,

View File

@@ -1,7 +1,7 @@
from .theplatform import ThePlatformFeedIE
from ..utils import (
dict_get,
ExtractorError,
dict_get,
float_or_none,
int_or_none,
)

View File

@@ -6,6 +6,7 @@ import time
from .common import InfoExtractor
from ..networking.exceptions import HTTPError
from ..utils import (
ExtractorError,
determine_ext,
float_or_none,
int_or_none,
@@ -13,7 +14,6 @@ from ..utils import (
parse_age_limit,
parse_duration,
url_or_none,
ExtractorError
)

View File

@@ -1,10 +1,12 @@
import re
from .common import InfoExtractor
from .senategov import SenateISVPIE
from .ustream import UstreamIE
from ..compat import compat_HTMLParseError
from ..utils import (
determine_ext,
ExtractorError,
determine_ext,
extract_attributes,
find_xpath_attr,
get_element_by_attribute,
@@ -19,8 +21,6 @@ from ..utils import (
str_to_int,
unescapeHTML,
)
from .senategov import SenateISVPIE
from .ustream import UstreamIE
class CSpanIE(InfoExtractor):

View File

@@ -1,6 +1,6 @@
from .common import InfoExtractor
from ..utils import unified_timestamp
from .youtube import YoutubeIE
from ..utils import unified_timestamp
class CtsNewsIE(InfoExtractor):

View File

@@ -1,8 +1,8 @@
from .common import InfoExtractor
from ..compat import compat_str
from ..utils import (
int_or_none,
determine_protocol,
int_or_none,
try_get,
unescapeHTML,
)

View File

@@ -1,8 +1,8 @@
import re
from .common import InfoExtractor
from ..utils import ExtractorError, clean_html, int_or_none, try_get, unified_strdate
from ..compat import compat_str
from ..utils import ExtractorError, clean_html, int_or_none, try_get, unified_strdate
class DamtomoBaseIE(InfoExtractor):

View File

@@ -1,11 +1,11 @@
import re
import os.path
import re
from .common import InfoExtractor
from ..compat import compat_urlparse
from ..utils import (
url_basename,
remove_start,
url_basename,
)

View File

@@ -1,5 +1,4 @@
from .common import InfoExtractor
from ..utils import (
ExtractorError,
parse_resolution,

View File

@@ -2,9 +2,9 @@ import re
from .common import InfoExtractor
from ..utils import (
ExtractorError,
determine_ext,
extract_attributes,
ExtractorError,
int_or_none,
parse_age_limit,
remove_end,

View File

@@ -2,10 +2,10 @@ import re
from .common import InfoExtractor
from ..utils import (
int_or_none,
unified_strdate,
determine_ext,
int_or_none,
join_nonempty,
unified_strdate,
update_url_query,
)

View File

@@ -1,5 +1,5 @@
import time
import hashlib
import time
import urllib
import uuid

View File

@@ -4,8 +4,8 @@ import uuid
from .common import InfoExtractor
from ..networking.exceptions import HTTPError
from ..utils import (
determine_ext,
ExtractorError,
determine_ext,
float_or_none,
int_or_none,
remove_start,

View File

@@ -2,8 +2,8 @@ import re
from .common import InfoExtractor
from ..utils import (
int_or_none,
NO_DEFAULT,
int_or_none,
parse_duration,
str_to_int,
)

View File

@@ -5,9 +5,9 @@ import urllib.parse
from .common import InfoExtractor
from ..compat import compat_urlparse
from ..utils import (
ExtractorError,
clean_html,
extract_attributes,
ExtractorError,
get_elements_by_class,
int_or_none,
js_to_json,

View File

@@ -2,15 +2,15 @@ import re
from .common import InfoExtractor
from ..utils import (
determine_ext,
ExtractorError,
determine_ext,
int_or_none,
join_nonempty,
js_to_json,
mimetype2ext,
parse_iso8601,
try_get,
unescapeHTML,
parse_iso8601,
)

View File

@@ -1,10 +1,10 @@
from .common import InfoExtractor
from ..compat import compat_urlparse
from ..utils import (
int_or_none,
unified_strdate,
url_or_none,
)
from ..compat import compat_urlparse
class DWIE(InfoExtractor):

View File

@@ -4,15 +4,15 @@ import re
from .common import InfoExtractor
from ..compat import compat_str
from ..utils import (
ExtractorError,
clean_html,
determine_ext,
ExtractorError,
dict_get,
int_or_none,
merge_dicts,
parse_qs,
parse_age_limit,
parse_iso8601,
parse_qs,
str_or_none,
try_get,
url_or_none,

View File

@@ -8,7 +8,7 @@ from ..utils import (
qualities,
traverse_obj,
unified_strdate,
xpath_text
xpath_text,
)

View File

@@ -1,8 +1,7 @@
from .common import InfoExtractor
from ..utils import (
parse_duration,
js_to_json,
parse_duration,
)

View File

@@ -1,8 +1,8 @@
from .common import InfoExtractor
from ..utils import (
xpath_text,
parse_duration,
ExtractorError,
parse_duration,
xpath_text,
)

View File

@@ -1,12 +1,6 @@
from .common import InfoExtractor
from ..compat import compat_str
from ..utils import (
parse_iso8601,
ExtractorError,
try_get,
mimetype2ext
)
from ..utils import ExtractorError, mimetype2ext, parse_iso8601, try_get
class FancodeVodIE(InfoExtractor):

View File

@@ -3,9 +3,9 @@ import re
from .common import InfoExtractor
from ..compat import compat_etree_fromstring
from ..utils import (
int_or_none,
xpath_element,
xpath_text,
int_or_none,
)

View File

@@ -1,7 +1,7 @@
from .common import InfoExtractor
from ..utils import (
int_or_none,
float_or_none,
int_or_none,
)

View File

@@ -1,5 +1,4 @@
from .common import InfoExtractor
from ..utils import (
int_or_none,
traverse_obj,

View File

@@ -2,10 +2,10 @@ from .common import InfoExtractor
from ..compat import compat_str
from ..networking.exceptions import HTTPError
from ..utils import (
ExtractorError,
int_or_none,
qualities,
strip_or_none,
int_or_none,
ExtractorError,
)

View File

@@ -7,7 +7,7 @@ from ..utils import (
parse_codecs,
parse_duration,
str_to_int,
unified_timestamp
unified_timestamp,
)

View File

@@ -10,7 +10,7 @@ from ..utils import (
int_or_none,
str_or_none,
traverse_obj,
try_get
try_get,
)

View File

@@ -1,4 +1,5 @@
import re
from .common import InfoExtractor
from ..utils import (
float_or_none,

View File

@@ -4,7 +4,7 @@ import types
import urllib.parse
import xml.etree.ElementTree
from .common import InfoExtractor # isort: split
from .common import InfoExtractor
from .commonprotocols import RtmpIE
from .youtube import YoutubeIE
from ..compat import compat_etree_fromstring

View File

@@ -1,7 +1,7 @@
from .common import InfoExtractor
from ..utils import (
bool_or_none,
ExtractorError,
bool_or_none,
dict_get,
float_or_none,
int_or_none,

View File

@@ -1,5 +1,4 @@
from .common import InfoExtractor
from ..utils import (
ExtractorError,
urlencode_postdata,

View File

@@ -3,9 +3,9 @@ import urllib.parse
from .common import InfoExtractor
from ..utils import (
ExtractorError,
determine_ext,
extract_attributes,
ExtractorError,
int_or_none,
parse_qs,
smuggle_url,

View File

@@ -3,16 +3,16 @@ import re
from .adobepass import AdobePassIE
from ..compat import compat_str
from ..utils import (
int_or_none,
determine_ext,
parse_age_limit,
remove_start,
remove_end,
try_get,
urlencode_postdata,
ExtractorError,
unified_timestamp,
determine_ext,
int_or_none,
parse_age_limit,
remove_end,
remove_start,
traverse_obj,
try_get,
unified_timestamp,
urlencode_postdata,
)

View File

@@ -4,7 +4,7 @@ from ..utils import (
determine_ext,
str_or_none,
unified_timestamp,
url_or_none
url_or_none,
)
from ..utils.traversal import traverse_obj

View File

@@ -1,10 +1,7 @@
import hashlib
from .common import InfoExtractor
from ..utils import (
ExtractorError,
try_get
)
from ..utils import ExtractorError, try_get
class GofileIE(InfoExtractor):

View File

@@ -1,11 +1,8 @@
import json
from .common import InfoExtractor
from ..compat import compat_str
from ..utils import (
try_get,
url_or_none
)
import json
from ..utils import try_get, url_or_none
class GoToStageIE(InfoExtractor):

View File

@@ -2,11 +2,11 @@ import re
from .common import InfoExtractor
from ..utils import (
xpath_text,
xpath_element,
int_or_none,
parse_duration,
urljoin,
xpath_element,
xpath_text,
)

View File

@@ -1,7 +1,7 @@
from .common import InfoExtractor
from ..utils import (
determine_ext,
KNOWN_EXTENSIONS,
determine_ext,
str_to_int,
)

View File

@@ -1,8 +1,8 @@
from .common import InfoExtractor
from ..compat import compat_str
from ..utils import (
clean_html,
ExtractorError,
clean_html,
int_or_none,
merge_dicts,
parse_count,

View File

@@ -4,8 +4,8 @@ from .common import InfoExtractor
from ..networking import Request
from ..networking.exceptions import HTTPError
from ..utils import (
clean_html,
ExtractorError,
clean_html,
int_or_none,
parse_age_limit,
try_get,

View File

@@ -2,8 +2,8 @@ import hashlib
import random
import re
from ..compat import compat_urlparse, compat_b64decode
from .common import InfoExtractor
from ..compat import compat_b64decode, compat_urlparse
from ..utils import (
ExtractorError,
int_or_none,
@@ -13,8 +13,6 @@ from ..utils import (
update_url_query,
)
from .common import InfoExtractor
class HuyaLiveIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.|m\.)?huya\.com/(?P<id>[^/#?&]+)(?:\D|$)'

View File

@@ -1,6 +1,6 @@
from .common import InfoExtractor
from ..utils import ExtractorError, str_or_none, traverse_obj, unified_strdate
from ..compat import compat_str
from ..utils import ExtractorError, str_or_none, traverse_obj, unified_strdate
class IchinanaLiveIE(InfoExtractor):

View File

@@ -1,3 +1,4 @@
from .bokecc import BokeCCBaseIE
from ..compat import (
compat_b64decode,
compat_urllib_parse_unquote,
@@ -6,10 +7,9 @@ from ..compat import (
from ..utils import (
ExtractorError,
determine_ext,
update_url_query,
traverse_obj,
update_url_query,
)
from .bokecc import BokeCCBaseIE
class InfoQIE(BokeCCBaseIE):

View File

@@ -3,12 +3,12 @@ import time
from .common import InfoExtractor
from ..utils import (
ExtractorError,
determine_ext,
js_to_json,
urlencode_postdata,
ExtractorError,
parse_qs,
traverse_obj
traverse_obj,
urlencode_postdata,
)

View File

@@ -4,20 +4,16 @@ import re
import time
from .common import InfoExtractor
from ..compat import (
compat_str,
compat_urllib_parse_urlencode,
compat_urllib_parse_unquote
)
from .openload import PhantomJSwrapper
from ..compat import compat_str, compat_urllib_parse_unquote, compat_urllib_parse_urlencode
from ..utils import (
ExtractorError,
clean_html,
decode_packed_codes,
ExtractorError,
float_or_none,
format_field,
get_element_by_id,
get_element_by_attribute,
get_element_by_id,
int_or_none,
js_to_json,
ohdave_rsa_encrypt,

View File

@@ -1,12 +1,11 @@
import re
from .common import InfoExtractor
from ..utils import (
int_or_none,
str_or_none,
traverse_obj,
urljoin
urljoin,
)

View File

@@ -1,23 +1,22 @@
import json
from .common import InfoExtractor
from .brightcove import BrightcoveNewIE
from .common import InfoExtractor
from ..compat import compat_str
from ..utils import (
JSON_LD_RE,
ExtractorError,
base_url,
clean_html,
determine_ext,
extract_attributes,
ExtractorError,
get_element_by_class,
JSON_LD_RE,
merge_dicts,
parse_duration,
smuggle_url,
try_get,
url_or_none,
url_basename,
url_or_none,
urljoin,
)

View File

@@ -1,9 +1,9 @@
import functools
import urllib.parse
import urllib.error
import hashlib
import json
import time
import urllib.error
import urllib.parse
from .common import InfoExtractor
from ..utils import (

View File

@@ -1,8 +1,8 @@
import hashlib
import random
from ..compat import compat_str
from .common import InfoExtractor
from ..compat import compat_str
from ..utils import (
clean_html,
int_or_none,

View File

@@ -1,5 +1,6 @@
import re
from .common import InfoExtractor
from ..utils import (
ExtractorError,
clean_html,
@@ -9,9 +10,8 @@ from ..utils import (
smuggle_url,
traverse_obj,
try_call,
unsmuggle_url
unsmuggle_url,
)
from .common import InfoExtractor
def _parse_japanese_date(text):

View File

@@ -1,8 +1,5 @@
from .common import InfoExtractor
from ..utils import (
ExtractorError,
unified_strdate
)
from ..utils import ExtractorError, unified_strdate
class JoveIE(InfoExtractor):

View File

@@ -1,6 +1,6 @@
import base64
import re
import json
import re
from .common import InfoExtractor
from ..utils import (

View File

@@ -3,8 +3,8 @@ from ..networking.exceptions import HTTPError
from ..utils import (
ExtractorError,
int_or_none,
strip_or_none,
str_or_none,
strip_or_none,
traverse_obj,
unified_timestamp,
)

View File

@@ -4,18 +4,18 @@ import re
from .common import InfoExtractor
from ..compat import (
compat_urlparse,
compat_parse_qs,
compat_urlparse,
)
from ..utils import (
clean_html,
ExtractorError,
clean_html,
format_field,
int_or_none,
unsmuggle_url,
remove_start,
smuggle_url,
traverse_obj,
remove_start
unsmuggle_url,
)

View File

@@ -1,7 +1,7 @@
import time
import hashlib
import random
import string
import hashlib
import time
import urllib.parse
from .common import InfoExtractor

View File

@@ -3,10 +3,10 @@ import re
from .common import InfoExtractor
from ..compat import compat_urlparse
from ..utils import (
get_element_by_id,
clean_html,
ExtractorError,
InAdvancePagedList,
clean_html,
get_element_by_id,
remove_start,
)

View File

@@ -1,5 +1,5 @@
from .common import InfoExtractor
from .arkena import ArkenaIE
from .common import InfoExtractor
class LcpPlayIE(ArkenaIE): # XXX: Do not subclass from concrete IE

View File

@@ -4,8 +4,8 @@ from .common import InfoExtractor
from ..utils import (
determine_ext,
determine_protocol,
parse_duration,
int_or_none,
parse_duration,
)

View File

@@ -2,9 +2,9 @@ import re
from .common import InfoExtractor
from ..utils import (
ExtractorError,
clean_html,
determine_ext,
ExtractorError,
float_or_none,
int_or_none,
str_or_none,

View File

@@ -11,9 +11,9 @@ from ..compat import (
compat_urllib_parse_urlencode,
)
from ..utils import (
ExtractorError,
determine_ext,
encode_data_uri,
ExtractorError,
int_or_none,
orderedSet,
parse_iso8601,

View File

@@ -1,7 +1,6 @@
import re
from .common import InfoExtractor
from ..utils import (
determine_ext,
float_or_none,

View File

@@ -6,8 +6,8 @@ from ..compat import (
compat_urlparse,
)
from ..utils import (
determine_ext,
ExtractorError,
determine_ext,
int_or_none,
parse_iso8601,
remove_end,

View File

@@ -3,13 +3,13 @@ import re
from .common import InfoExtractor
from ..networking.exceptions import HTTPError
from ..utils import (
ExtractorError,
determine_ext,
float_or_none,
int_or_none,
smuggle_url,
try_get,
unsmuggle_url,
ExtractorError,
)

View File

@@ -7,8 +7,8 @@ from ..utils import (
extract_attributes,
float_or_none,
int_or_none,
srt_subtitles_timecode,
mimetype2ext,
srt_subtitles_timecode,
traverse_obj,
try_get,
url_or_none,

View File

@@ -1,14 +1,13 @@
import re
from .common import InfoExtractor
from ..utils import (
int_or_none,
js_to_json,
parse_duration,
traverse_obj,
try_get,
urljoin
urljoin,
)

View File

@@ -1,10 +1,5 @@
from .common import InfoExtractor
from ..utils import (
clean_html,
int_or_none,
traverse_obj
)
from ..utils import clean_html, int_or_none, traverse_obj
_API_URL = 'https://dak1vd5vmi7x6.cloudfront.net/api/v1/publicrole/{}/{}?id={}'

View File

@@ -4,8 +4,8 @@ from .common import InfoExtractor
from ..compat import compat_str
from ..utils import (
ExtractorError,
format_field,
float_or_none,
format_field,
int_or_none,
str_or_none,
traverse_obj,

View File

@@ -1,14 +1,11 @@
from .common import InfoExtractor
from ..compat import compat_str, compat_urllib_parse_unquote
from ..utils import (
ExtractorError,
traverse_obj,
unified_strdate,
url_or_none,
)
from .common import InfoExtractor
from ..compat import (
compat_urllib_parse_unquote,
compat_str
)
class MediaKlikkIE(InfoExtractor):

Some files were not shown because too many files have changed in this diff Show More