mirror of
https://github.com/yt-dlp/yt-dlp.git
synced 2026-01-11 17:31:31 +00:00
Compare commits
841 Commits
2022.05.18
...
2023.01.06
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7287ab92f6 | ||
|
|
6becd2508c | ||
|
|
edfc7725b1 | ||
|
|
b382c1fc6a | ||
|
|
8a6b167723 | ||
|
|
253ac4ba6a | ||
|
|
84e0e33a19 | ||
|
|
ab4cbeff00 | ||
|
|
773c272d66 | ||
|
|
c3366fdfd0 | ||
|
|
5be214abed | ||
|
|
d37422f1db | ||
|
|
933ed882e9 | ||
|
|
a1d9aca338 | ||
|
|
91d54e9b99 | ||
|
|
76c3ceccfb | ||
|
|
ad68b16a1e | ||
|
|
f079514957 | ||
|
|
e9df3d42c4 | ||
|
|
d80ca5deaa | ||
|
|
1a3cd8ec35 | ||
|
|
990dd7b00f | ||
|
|
d83b0ad809 | ||
|
|
08e29b9f1f | ||
|
|
8e174ba7de | ||
|
|
05997b6e98 | ||
|
|
32a84bcf4e | ||
|
|
8300774c4a | ||
|
|
d7f9871469 | ||
|
|
13f930abc0 | ||
|
|
b23b503e22 | ||
|
|
e756f45ba0 | ||
|
|
8c53322cda | ||
|
|
193fb150b7 | ||
|
|
26fdfc3704 | ||
|
|
78d25e0b7c | ||
|
|
2a06bb4eb6 | ||
|
|
88fb942577 | ||
|
|
1cdda32998 | ||
|
|
3e01ce744a | ||
|
|
8e40b9d1ec | ||
|
|
2fb0f85868 | ||
|
|
a0e526ed4d | ||
|
|
8d1ddb0805 | ||
|
|
9bb856998b | ||
|
|
fbb7383306 | ||
|
|
ec54bd43f3 | ||
|
|
f74371a97d | ||
|
|
d5f043d127 | ||
|
|
fe74d5b592 | ||
|
|
119e40ef64 | ||
|
|
4455918e7f | ||
|
|
efa944f4bc | ||
|
|
e107c2b8cf | ||
|
|
ca2f6e14e6 | ||
|
|
c1edb853b0 | ||
|
|
2647c933b8 | ||
|
|
53006b35ea | ||
|
|
4b183d4962 | ||
|
|
3d667e0047 | ||
|
|
9a9006ba20 | ||
|
|
153e88a751 | ||
|
|
9fcd8ad1f2 | ||
|
|
6b71d186dd | ||
|
|
074b2fae90 | ||
|
|
06a9d68eb8 | ||
|
|
a4d6ead30f | ||
|
|
d1b5f3d79c | ||
|
|
da8d2de208 | ||
|
|
15e9e578c0 | ||
|
|
0ef3d47027 | ||
|
|
247c8dd4f5 | ||
|
|
032f22020c | ||
|
|
4af47a0003 | ||
|
|
9012d20b23 | ||
|
|
d61ef7f343 | ||
|
|
1c226ccdd4 | ||
|
|
8791e78ccc | ||
|
|
69f5fe45b9 | ||
|
|
0b5546c723 | ||
|
|
1fc089143c | ||
|
|
5424dbaf91 | ||
|
|
c733555106 | ||
|
|
81388c0954 | ||
|
|
df10bad267 | ||
|
|
f0f3fa028b | ||
|
|
22697a84f6 | ||
|
|
3ac5476430 | ||
|
|
e318b5b87a | ||
|
|
f549b18512 | ||
|
|
7c5e1701f6 | ||
|
|
16bed382fd | ||
|
|
3cf50fa8e9 | ||
|
|
f69b0554eb | ||
|
|
e74a3c6dcc | ||
|
|
7108221662 | ||
|
|
10dc85924a | ||
|
|
b05f0a50e0 | ||
|
|
3d79ebc8b7 | ||
|
|
b44cd29851 | ||
|
|
85a802969e | ||
|
|
72f96c5566 | ||
|
|
839e2a62ae | ||
|
|
28b8f57b4b | ||
|
|
dfc186d422 | ||
|
|
42ec478fc4 | ||
|
|
7991ae57a8 | ||
|
|
935bac1e4d | ||
|
|
c4cbd3bebd | ||
|
|
c53a18f016 | ||
|
|
71df9b7fd5 | ||
|
|
c9f5ce5118 | ||
|
|
ddf1e22d48 | ||
|
|
0e96b408b9 | ||
|
|
ba72399723 | ||
|
|
9bcfe33be7 | ||
|
|
71eb82d1b2 | ||
|
|
a9d069f5b8 | ||
|
|
48652590ec | ||
|
|
86f557b636 | ||
|
|
c0caa80515 | ||
|
|
0d95d8b00a | ||
|
|
9d52bf65ff | ||
|
|
d761dfd059 | ||
|
|
27c0f899c8 | ||
|
|
7ff2fafe47 | ||
|
|
3b021eacef | ||
|
|
f352a09778 | ||
|
|
02b2f9fa7d | ||
|
|
29ca408219 | ||
|
|
8486540257 | ||
|
|
ed027fd9d8 | ||
|
|
352e7d9873 | ||
|
|
9a0416c6a5 | ||
|
|
f5a9e9df0d | ||
|
|
f96a3fb7d3 | ||
|
|
bc87dac75f | ||
|
|
9f14daf22b | ||
|
|
784320c98c | ||
|
|
d0d74b7197 | ||
|
|
64c464a144 | ||
|
|
4de88a6a36 | ||
|
|
105bfd90f5 | ||
|
|
6368e2e639 | ||
|
|
a4894d3e25 | ||
|
|
d7b460d0e5 | ||
|
|
171a31dbe8 | ||
|
|
83cc7b8aae | ||
|
|
0a4b2f4180 | ||
|
|
a8c754cc00 | ||
|
|
bc5c2f8a2c | ||
|
|
d965856235 | ||
|
|
08270da5c3 | ||
|
|
5e39fb982e | ||
|
|
8b644025b1 | ||
|
|
7aaf4cd2a8 | ||
|
|
8522226d2f | ||
|
|
f4b2c59cfe | ||
|
|
7c8c63529e | ||
|
|
e4221b700f | ||
|
|
bd7e919a75 | ||
|
|
f7fc8d39e9 | ||
|
|
a6858cda29 | ||
|
|
17fc3dc48a | ||
|
|
3f5c216969 | ||
|
|
e72e48c53f | ||
|
|
0cf643b234 | ||
|
|
dc3028d233 | ||
|
|
4dc23a8051 | ||
|
|
495322b95b | ||
|
|
c789fb7787 | ||
|
|
ed6bec168d | ||
|
|
0d8affc17f | ||
|
|
d9df9b4919 | ||
|
|
efdc45a6ea | ||
|
|
86973308cd | ||
|
|
c61473c1d6 | ||
|
|
8fddc232bf | ||
|
|
fad689c7b6 | ||
|
|
db6fa6960c | ||
|
|
3b87f4d943 | ||
|
|
581e86b512 | ||
|
|
8196182a12 | ||
|
|
9b383177c9 | ||
|
|
fbb0ee7747 | ||
|
|
c7e4ab278a | ||
|
|
e9ce4e9250 | ||
|
|
5da08bde9e | ||
|
|
ff48fc04d0 | ||
|
|
46d09f8707 | ||
|
|
db4678e448 | ||
|
|
a349d4d641 | ||
|
|
ac8e69dd32 | ||
|
|
96b9e9cf62 | ||
|
|
cb1553e966 | ||
|
|
0d2a0ecac3 | ||
|
|
c94df4d19d | ||
|
|
728f4b5c2e | ||
|
|
8c188d5d09 | ||
|
|
e14ea7fbd9 | ||
|
|
7053aa3a48 | ||
|
|
049565df2e | ||
|
|
cc1d3bf96b | ||
|
|
5b9f253fa0 | ||
|
|
d715b0e413 | ||
|
|
6141346d18 | ||
|
|
59a0c35865 | ||
|
|
da9a60ca0d | ||
|
|
0d113603ac | ||
|
|
2e30b46fe4 | ||
|
|
68a9a450d4 | ||
|
|
ed13a772d7 | ||
|
|
78545664bf | ||
|
|
f72218c199 | ||
|
|
58fb927ebd | ||
|
|
62b8dac490 | ||
|
|
682b4524bf | ||
|
|
9da6612b0f | ||
|
|
e63faa101c | ||
|
|
497074f044 | ||
|
|
c90c5b9bdd | ||
|
|
ad97487606 | ||
|
|
e091fb92da | ||
|
|
c9bd65185c | ||
|
|
c66ed4e2e5 | ||
|
|
2530b68d44 | ||
|
|
7d61d2306e | ||
|
|
385adffcf5 | ||
|
|
0c908911f9 | ||
|
|
c13a301a94 | ||
|
|
f47cf86eff | ||
|
|
7a26ce2641 | ||
|
|
3639df54c3 | ||
|
|
a4713ba96d | ||
|
|
5318156f1c | ||
|
|
d5d1df8afd | ||
|
|
cd5df121f3 | ||
|
|
73ac0e6b85 | ||
|
|
a7ddbc0475 | ||
|
|
8fab23301c | ||
|
|
1338ae3ba3 | ||
|
|
63c547d71c | ||
|
|
814bba3933 | ||
|
|
2576d53a31 | ||
|
|
217753f4aa | ||
|
|
42a44f01c3 | ||
|
|
9b9dad119a | ||
|
|
6dca2aa66d | ||
|
|
6678a4f0b3 | ||
|
|
d51b2816e3 | ||
|
|
34f00179db | ||
|
|
5225df50cf | ||
|
|
94dc8604dd | ||
|
|
a71b812f53 | ||
|
|
c6989aa3ae | ||
|
|
a79bf78397 | ||
|
|
82fb2357d9 | ||
|
|
13b2ae29c2 | ||
|
|
36069409ec | ||
|
|
0468a3b325 | ||
|
|
d509c1f5a3 | ||
|
|
2c98d99818 | ||
|
|
226c0f3a54 | ||
|
|
ade1fa70cb | ||
|
|
4c9a1a3ba5 | ||
|
|
1d55ebabc9 | ||
|
|
f324fe8c59 | ||
|
|
866f037344 | ||
|
|
5d14b73491 | ||
|
|
540236ce11 | ||
|
|
7b0127e1e1 | ||
|
|
f99bbfc983 | ||
|
|
3b55aaac59 | ||
|
|
2e565f5bca | ||
|
|
e02e6d86db | ||
|
|
867c66ff97 | ||
|
|
f03940963e | ||
|
|
09c127ff83 | ||
|
|
aebb4f4ba7 | ||
|
|
bf2e1ec67a | ||
|
|
98d4ec1ef2 | ||
|
|
1305b659ef | ||
|
|
57fb88093e | ||
|
|
4e0511f27d | ||
|
|
304ad45a9b | ||
|
|
878eac3e2e | ||
|
|
34859e4b32 | ||
|
|
143a2ccab3 | ||
|
|
1e0daeb314 | ||
|
|
7f5b3cb8b3 | ||
|
|
c53e5cf59f | ||
|
|
c7f540ea1e | ||
|
|
12f153a827 | ||
|
|
0d887f273a | ||
|
|
4d37720a0c | ||
|
|
dd4411aac2 | ||
|
|
1d77d8ce07 | ||
|
|
a057779d5e | ||
|
|
7474e4531e | ||
|
|
d3a3d7f0cc | ||
|
|
8671f995cc | ||
|
|
4a61501db9 | ||
|
|
7244895bde | ||
|
|
177662e0f2 | ||
|
|
f48ab881f6 | ||
|
|
eb2d9504b9 | ||
|
|
8a04054647 | ||
|
|
8b7fb8b60d | ||
|
|
a83333c432 | ||
|
|
573a98d6f0 | ||
|
|
af7a5eef2f | ||
|
|
576faf00b2 | ||
|
|
81b6102d20 | ||
|
|
acf306d1f9 | ||
|
|
20a7304e4c | ||
|
|
2e0f8d4f6e | ||
|
|
7e378287c4 | ||
|
|
9cc5aed990 | ||
|
|
48f535f5f8 | ||
|
|
8dbad2a439 | ||
|
|
11398b922c | ||
|
|
dfea94f8f6 | ||
|
|
f1aae71568 | ||
|
|
a5642f2c4a | ||
|
|
10e2eb4f81 | ||
|
|
c9eba8075f | ||
|
|
9d69c4e4b4 | ||
|
|
292fdad297 | ||
|
|
c04cc2e28e | ||
|
|
7a32c70d13 | ||
|
|
709ee21417 | ||
|
|
1fb53b946c | ||
|
|
1dd18a8808 | ||
|
|
0a5095fe8d | ||
|
|
0f60ba6e65 | ||
|
|
1534aba865 | ||
|
|
0ca0f88121 | ||
|
|
0500ee3d81 | ||
|
|
46a5b335e7 | ||
|
|
914491b8e0 | ||
|
|
ab029d7e92 | ||
|
|
0bd5a039ea | ||
|
|
5c8b2ee9ec | ||
|
|
faf7863bb0 | ||
|
|
d42763a443 | ||
|
|
3c757d5ed2 | ||
|
|
f55523cfdd | ||
|
|
32972518da | ||
|
|
2e7675489f | ||
|
|
80eb0bd9b9 | ||
|
|
4cca2eb1bf | ||
|
|
1c09783f7a | ||
|
|
163281178a | ||
|
|
2fa669f759 | ||
|
|
8ca48a1a54 | ||
|
|
b27bc13af6 | ||
|
|
f7c5a5e967 | ||
|
|
fada8272b6 | ||
|
|
46d72cd2c7 | ||
|
|
19b4e59a1e | ||
|
|
dab284f80f | ||
|
|
9665f15a96 | ||
|
|
2b24afa6d7 | ||
|
|
3166e6840c | ||
|
|
8817a80d3a | ||
|
|
5736d79172 | ||
|
|
fc2ba496fd | ||
|
|
2b9d02167f | ||
|
|
2314b4d89f | ||
|
|
1060f82f89 | ||
|
|
22df97f9c5 | ||
|
|
9c935fbc72 | ||
|
|
deae7c1711 | ||
|
|
941e881e1f | ||
|
|
0cb0fdbbfe | ||
|
|
0831d95c46 | ||
|
|
c26f9b991a | ||
|
|
0c0b78b273 | ||
|
|
3ffb2f5bea | ||
|
|
ae1035646a | ||
|
|
1015ceeeaf | ||
|
|
17ffed1842 | ||
|
|
be9c0884d7 | ||
|
|
48c8424bd9 | ||
|
|
7657ec7ed6 | ||
|
|
07a1250e0e | ||
|
|
69082b38dc | ||
|
|
aa824dd10b | ||
|
|
a12d03e15d | ||
|
|
1a7c9fad9f | ||
|
|
3c7a276234 | ||
|
|
d6f8871964 | ||
|
|
5469a4ab11 | ||
|
|
2c475e48b5 | ||
|
|
7c6eb424d3 | ||
|
|
adba24d207 | ||
|
|
5d7c7d6569 | ||
|
|
d2c8aadf79 | ||
|
|
1ac7f46184 | ||
|
|
05deb747bb | ||
|
|
b505e8517a | ||
|
|
f2e9fa3ef7 | ||
|
|
50a399326f | ||
|
|
1ff88b7aec | ||
|
|
825d3ce386 | ||
|
|
92aa6d6883 | ||
|
|
b2a4db425b | ||
|
|
de49cdbe9d | ||
|
|
9f9c85dda4 | ||
|
|
11734714c2 | ||
|
|
b86ca447ce | ||
|
|
f8c7ba9984 | ||
|
|
76f2bb175d | ||
|
|
f26af78a8a | ||
|
|
bfbecd1174 | ||
|
|
9bd13fe5bb | ||
|
|
459262ac97 | ||
|
|
82ea226c61 | ||
|
|
da4db748fa | ||
|
|
e1eabd7beb | ||
|
|
d81ba7d491 | ||
|
|
5135ed3d4a | ||
|
|
c4b2df872d | ||
|
|
224b5a35f7 | ||
|
|
50ac0e5416 | ||
|
|
e0992d5558 | ||
|
|
5e01315aa1 | ||
|
|
4e4982ab5b | ||
|
|
89e4d86171 | ||
|
|
a1af516259 | ||
|
|
1d64a59547 | ||
|
|
ca7f8b8f31 | ||
|
|
164b03c486 | ||
|
|
e5458d1d88 | ||
|
|
b5e7a2e69d | ||
|
|
2516cafb28 | ||
|
|
fd404bec7e | ||
|
|
fe7866d0ed | ||
|
|
5314b52192 | ||
|
|
13db4e7b9e | ||
|
|
07275b708b | ||
|
|
b85703d11a | ||
|
|
992dc6b486 | ||
|
|
822d66e591 | ||
|
|
8d1ad6378f | ||
|
|
2d1019542a | ||
|
|
b25cac650f | ||
|
|
90a1df305b | ||
|
|
0a6b4b82e9 | ||
|
|
1704c47ba8 | ||
|
|
b76e9cedb3 | ||
|
|
48c88e088c | ||
|
|
a831c2ea90 | ||
|
|
be13a6e525 | ||
|
|
8a3da4c68c | ||
|
|
4d37d4a77c | ||
|
|
7d3b98be4c | ||
|
|
2b3e43e247 | ||
|
|
f60ef66371 | ||
|
|
25836db6be | ||
|
|
587021cd9f | ||
|
|
580ce00782 | ||
|
|
2f1a299c50 | ||
|
|
f6ca640b12 | ||
|
|
3ce2933693 | ||
|
|
c200096c03 | ||
|
|
6d3e7424bf | ||
|
|
5c6d2ef9d1 | ||
|
|
460eb9c50e | ||
|
|
9fd03a1696 | ||
|
|
55937202b7 | ||
|
|
1e4fca9a87 | ||
|
|
49b4ceaedf | ||
|
|
d711839760 | ||
|
|
48732becfe | ||
|
|
6440c45ff3 | ||
|
|
ef6342bd07 | ||
|
|
e183bb8c9b | ||
|
|
7695f5a0a7 | ||
|
|
cb7cc448c0 | ||
|
|
63be30e3e0 | ||
|
|
43cf982ac3 | ||
|
|
7e82397441 | ||
|
|
66c4afd828 | ||
|
|
0e0ce898f6 | ||
|
|
a6125983ab | ||
|
|
8f84770acd | ||
|
|
62b58c0936 | ||
|
|
8f53dc44a0 | ||
|
|
1cddfdc52b | ||
|
|
cea4b857f0 | ||
|
|
ffcd62c289 | ||
|
|
a1c5bd82ec | ||
|
|
5da42f2b9b | ||
|
|
1155ecef29 | ||
|
|
96623ab5c6 | ||
|
|
7e798d725e | ||
|
|
8420a4d063 | ||
|
|
b5e9a641f5 | ||
|
|
c220d9efc8 | ||
|
|
81e0195998 | ||
|
|
f1e2d4a9a2 | ||
|
|
3157158f76 | ||
|
|
16d4535abc | ||
|
|
2a5e5477bc | ||
|
|
e251986cbe | ||
|
|
f0ad6f8c51 | ||
|
|
70b2340909 | ||
|
|
115add4387 | ||
|
|
c4b6c5c7c9 | ||
|
|
c7dcf0b31e | ||
|
|
298d9c0e89 | ||
|
|
a416623436 | ||
|
|
b8ed0f15d4 | ||
|
|
22b22b7d5c | ||
|
|
1f6b90ed8d | ||
|
|
a3e9642116 | ||
|
|
43aebb7db4 | ||
|
|
061a17abd3 | ||
|
|
d380fc1614 | ||
|
|
ad26f15a06 | ||
|
|
aeaf905e22 | ||
|
|
97d9c79e92 | ||
|
|
f62f553d46 | ||
|
|
989a01c261 | ||
|
|
05e2243e80 | ||
|
|
4080efeb01 | ||
|
|
fc61aff41b | ||
|
|
fe0918bb65 | ||
|
|
b99ba3df09 | ||
|
|
7356a44443 | ||
|
|
a0c830f488 | ||
|
|
a6ca61d427 | ||
|
|
d8657ff76f | ||
|
|
5770293d25 | ||
|
|
0647d9251f | ||
|
|
be5c1ae862 | ||
|
|
bfd973ece3 | ||
|
|
1e8fe57e5c | ||
|
|
f14a2d8382 | ||
|
|
5fff2e576f | ||
|
|
f2e8dbcc00 | ||
|
|
8f97a15d1c | ||
|
|
47304e07dc | ||
|
|
565a4c5944 | ||
|
|
2ebe6fefbe | ||
|
|
5f2a7f7c4a | ||
|
|
30389593c2 | ||
|
|
d4ada3574e | ||
|
|
e1bd953f45 | ||
|
|
98a60600b2 | ||
|
|
e325a21a1f | ||
|
|
3df4f81dfe | ||
|
|
31b532a1f2 | ||
|
|
daef791100 | ||
|
|
a6bcaf71fc | ||
|
|
4f04be6add | ||
|
|
8dc5930511 | ||
|
|
b4daacb4ec | ||
|
|
6a7d3a0a09 | ||
|
|
c646d76f67 | ||
|
|
07b47084ba | ||
|
|
4f547d6d2c | ||
|
|
2eae7d507c | ||
|
|
1cdf69c57e | ||
|
|
b6cd135ac2 | ||
|
|
befcac11a0 | ||
|
|
7f71cee020 | ||
|
|
db5f248204 | ||
|
|
871a8929bc | ||
|
|
edebb65170 | ||
|
|
f640e42ffa | ||
|
|
59f63c8f0f | ||
|
|
bfbb5a1bb1 | ||
|
|
051d6b450c | ||
|
|
67685a541d | ||
|
|
964b5493a4 | ||
|
|
3955b20703 | ||
|
|
f1042989c1 | ||
|
|
e2884db36a | ||
|
|
2c646fe42c | ||
|
|
693f060040 | ||
|
|
3bec830a59 | ||
|
|
7d0f6f0c45 | ||
|
|
26bafe7028 | ||
|
|
0cd2810379 | ||
|
|
0f7247f88e | ||
|
|
2dc4970e08 | ||
|
|
4f08e58655 | ||
|
|
dcbf7394ab | ||
|
|
c40f327a16 | ||
|
|
81bf0943ea | ||
|
|
b79f9e302d | ||
|
|
bc83b4b06c | ||
|
|
8ef5af1942 | ||
|
|
6929b41a21 | ||
|
|
0b5583b112 | ||
|
|
135f05ef66 | ||
|
|
c6e07cf1e1 | ||
|
|
ce7f6aa660 | ||
|
|
1765c6039e | ||
|
|
fbb888a3d5 | ||
|
|
2aab569f1c | ||
|
|
2e2c60c4ba | ||
|
|
306770819e | ||
|
|
dfa6661e0f | ||
|
|
24093d52a7 | ||
|
|
f5e438a976 | ||
|
|
d08e1e6875 | ||
|
|
956f1cf805 | ||
|
|
129dfa5f45 | ||
|
|
3df6a603e4 | ||
|
|
a7dc6a89f6 | ||
|
|
5200976949 | ||
|
|
e3e606de12 | ||
|
|
88f60feb32 | ||
|
|
a904a7f8c6 | ||
|
|
49afc1d84a | ||
|
|
6edf28081f | ||
|
|
5f2da312fa | ||
|
|
eb2333bce1 | ||
|
|
660c0c4efd | ||
|
|
fe588ce8ef | ||
|
|
26b92a919d | ||
|
|
8f47b39b27 | ||
|
|
2f1b7afe32 | ||
|
|
dd634acd71 | ||
|
|
ebf99aaf70 | ||
|
|
cbd4f237b4 | ||
|
|
418bbfd722 | ||
|
|
45e8a04e48 | ||
|
|
0f44636597 | ||
|
|
7a7eeb1005 | ||
|
|
4e7f375c94 | ||
|
|
f5ea47488a | ||
|
|
134c913cca | ||
|
|
56b5b832bf | ||
|
|
cb794ee010 | ||
|
|
6d645b5577 | ||
|
|
563e0bf82a | ||
|
|
d816f61fbf | ||
|
|
4019bf0525 | ||
|
|
65ea4cba29 | ||
|
|
17a23f0930 | ||
|
|
258d88f301 | ||
|
|
a3fb1ca5ab | ||
|
|
1275aeb955 | ||
|
|
170a031386 | ||
|
|
65493f64e1 | ||
|
|
63e66cd0ad | ||
|
|
f2df407165 | ||
|
|
ca9def714a | ||
|
|
47cdc68e03 | ||
|
|
7b84d6f9b3 | ||
|
|
12a1b2254d | ||
|
|
6154438178 | ||
|
|
168bbc4f38 | ||
|
|
a3976e0760 | ||
|
|
385f7f3895 | ||
|
|
5c0dc6e603 | ||
|
|
284a60c516 | ||
|
|
44f14eb43e | ||
|
|
ca9f1df253 | ||
|
|
a63b35a60c | ||
|
|
28cdb605aa | ||
|
|
5b836d4739 | ||
|
|
84a251e1f5 | ||
|
|
9d339c41e2 | ||
|
|
ae61d108dd | ||
|
|
47046464fa | ||
|
|
b1f94422cc | ||
|
|
c2c8921b41 | ||
|
|
844086505f | ||
|
|
63da2d0911 | ||
|
|
1db1461272 | ||
|
|
5fb450a64c | ||
|
|
6d916fe709 | ||
|
|
2c60eae899 | ||
|
|
962ffcf89c | ||
|
|
8a40bffaf9 | ||
|
|
e08f72e675 | ||
|
|
1685d46007 | ||
|
|
8d214c484c | ||
|
|
9eef7c4e55 | ||
|
|
bbae437723 | ||
|
|
30d22d775b | ||
|
|
c043c24625 | ||
|
|
74900105be | ||
|
|
d1bf2e199c | ||
|
|
c800598cd1 | ||
|
|
14f25df2b6 | ||
|
|
54007a45f1 | ||
|
|
ac66811112 | ||
|
|
3c5386cd71 | ||
|
|
bc40160883 | ||
|
|
379a4f161d | ||
|
|
06cc8f103b | ||
|
|
34baaced11 | ||
|
|
9809740ba5 | ||
|
|
f67baae17e | ||
|
|
37e40d693b | ||
|
|
0c36dc00d7 | ||
|
|
28163422a6 | ||
|
|
1ac4fd80c8 | ||
|
|
885fe351fb | ||
|
|
f92347c312 | ||
|
|
a86e01e743 | ||
|
|
1ed70fd0b7 | ||
|
|
def4973ae7 | ||
|
|
0af80bcf70 | ||
|
|
eff4275925 | ||
|
|
998a3cae0c | ||
|
|
471d0367c7 | ||
|
|
3975b4d2e8 | ||
|
|
230d5c8239 | ||
|
|
e4afcfde08 | ||
|
|
8372be7469 | ||
|
|
57e0f077a6 | ||
|
|
f0500bd1e4 | ||
|
|
95032f302c | ||
|
|
8102a5991b | ||
|
|
c27eaf8920 | ||
|
|
dfb855b42d | ||
|
|
5df1444255 | ||
|
|
612f2be5d3 | ||
|
|
6d1b34896e | ||
|
|
7b2c3f47c6 | ||
|
|
8aa0e7cd96 | ||
|
|
695b28afaa | ||
|
|
0a4fb0d3fe | ||
|
|
8072ef2bbd | ||
|
|
40268a7974 | ||
|
|
697ebe4d31 | ||
|
|
38d86f4d45 | ||
|
|
f254d6ccd9 | ||
|
|
f0bc6e2019 | ||
|
|
9fde8a6b12 | ||
|
|
612e31f5ea | ||
|
|
7a2e40dd48 | ||
|
|
60ba603ab5 | ||
|
|
a79cba0c95 | ||
|
|
4f2a58c9c5 | ||
|
|
44a6fcff39 | ||
|
|
bf1824b391 | ||
|
|
a70635b8a1 | ||
|
|
e121e3cee7 | ||
|
|
7e9a612585 | ||
|
|
0df111a371 | ||
|
|
a39a7ba8d6 | ||
|
|
7e88d7d78f | ||
|
|
f0c9fb9682 | ||
|
|
560738f34d | ||
|
|
99d10bf607 | ||
|
|
145c5a83a8 | ||
|
|
2cb1982043 | ||
|
|
fccf90e7f3 | ||
|
|
d32f30ac48 | ||
|
|
e3aae45a6f | ||
|
|
f3c0c77304 | ||
|
|
79e591b59b | ||
|
|
21a73e9f39 | ||
|
|
4ce05f5759 | ||
|
|
2523702718 | ||
|
|
55baa67c7c | ||
|
|
64fa820ccf | ||
|
|
56ba69e4c9 | ||
|
|
d05460e5fe | ||
|
|
14c3a98049 | ||
|
|
e0a4a3d5bf | ||
|
|
62b2b736e7 | ||
|
|
6837633a4a | ||
|
|
2ae778b8fc | ||
|
|
c82a4a8fce | ||
|
|
6e7c9201cd | ||
|
|
bde0132e15 | ||
|
|
233ad894d3 | ||
|
|
0d6bafbfa7 | ||
|
|
36195c4461 | ||
|
|
65141660ab | ||
|
|
dec30912a7 | ||
|
|
5ec1b6b716 | ||
|
|
e0ab98541c | ||
|
|
35faefee5d | ||
|
|
b7c47b7438 | ||
|
|
00bbc5f177 | ||
|
|
0bea4fd807 | ||
|
|
b5770743fe | ||
|
|
1890fc6389 | ||
|
|
c4910024f3 | ||
|
|
c7a7baaa13 | ||
|
|
e50c3500b4 | ||
|
|
09d02ea429 | ||
|
|
ac05fb9338 | ||
|
|
28786529dc | ||
|
|
6b0b0a289a | ||
|
|
f95b9dee45 | ||
|
|
617f658b7e | ||
|
|
8a7f6d7a15 | ||
|
|
9c0412cf6b | ||
|
|
84131d0351 | ||
|
|
1cd6cba306 | ||
|
|
661e7253a2 | ||
|
|
222a230871 | ||
|
|
ee27297f82 | ||
|
|
ee164987c7 | ||
|
|
0fe51254cb | ||
|
|
52023f1291 | ||
|
|
5bbe631e04 | ||
|
|
2c6dcb65fb | ||
|
|
520876fa09 | ||
|
|
0bf9dc1e35 | ||
|
|
829bbd1d05 | ||
|
|
8a82af3511 | ||
|
|
8246f8402b | ||
|
|
6b9e832db7 | ||
|
|
d2ff2c91bb | ||
|
|
7879e79d11 | ||
|
|
8a3e7b1c95 | ||
|
|
d9473db78a | ||
|
|
11233f2afd | ||
|
|
3a85e9cee9 | ||
|
|
c4a62b99f6 | ||
|
|
b5899f4f19 | ||
|
|
92922fe7f9 | ||
|
|
c487cf0010 | ||
|
|
415f8d51a8 | ||
|
|
ca6d59d2c1 | ||
|
|
1a8cc83735 | ||
|
|
2762dbb17e | ||
|
|
666c36d58d | ||
|
|
854b0d325e | ||
|
|
79c318937b | ||
|
|
88d62206b4 | ||
|
|
e79969b242 | ||
|
|
53973b4d2c | ||
|
|
b801cd7179 | ||
|
|
0b9c08b47b | ||
|
|
2f97cc615b | ||
|
|
2dd5a2e3a1 | ||
|
|
23326151c4 | ||
|
|
9e49146352 |
61
.github/ISSUE_TEMPLATE/1_broken_site.yml
vendored
61
.github/ISSUE_TEMPLATE/1_broken_site.yml
vendored
@@ -2,6 +2,13 @@ name: Broken site
|
||||
description: Report broken or misfunctioning site
|
||||
labels: [triage, site-bug]
|
||||
body:
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: DO NOT REMOVE OR SKIP THE ISSUE TEMPLATE
|
||||
description: Fill all fields even if you think it is irrelevant for the issue
|
||||
options:
|
||||
- label: I understand that I will be **blocked** if I remove or skip any mandatory\* field
|
||||
required: true
|
||||
- type: checkboxes
|
||||
id: checklist
|
||||
attributes:
|
||||
@@ -11,13 +18,13 @@ body:
|
||||
options:
|
||||
- label: I'm reporting a broken site
|
||||
required: true
|
||||
- label: I've verified that I'm running yt-dlp version **2022.05.18** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
||||
- label: I've verified that I'm running yt-dlp version **2023.01.06** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
||||
required: true
|
||||
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
||||
required: true
|
||||
- label: I've checked that all URLs and arguments with special characters are [properly quoted or escaped](https://github.com/ytdl-org/youtube-dl#video-url-contains-an-ampersand-and-im-getting-some-strange-output-1-2839-or-v-is-not-recognized-as-an-internal-or-external-command)
|
||||
- label: I've checked that all URLs and arguments with special characters are [properly quoted or escaped](https://github.com/yt-dlp/yt-dlp/wiki/FAQ#video-url-contains-an-ampersand--and-im-getting-some-strange-output-1-2839-or-v-is-not-recognized-as-an-internal-or-external-command)
|
||||
required: true
|
||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues including closed ones. DO NOT post duplicates
|
||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
||||
required: true
|
||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
||||
required: true
|
||||
@@ -26,37 +33,45 @@ body:
|
||||
id: region
|
||||
attributes:
|
||||
label: Region
|
||||
description: "Enter the region the site is accessible from"
|
||||
placeholder: "India"
|
||||
description: Enter the country/region that the site is accessible from
|
||||
placeholder: India
|
||||
- type: textarea
|
||||
id: description
|
||||
attributes:
|
||||
label: Description
|
||||
description: |
|
||||
Provide an explanation of your issue in an arbitrary form.
|
||||
Provide any additional information, any suggested solutions, and as much context and examples as possible
|
||||
placeholder: WRITE DESCRIPTION HERE
|
||||
label: Provide a description that is worded well enough to be understood
|
||||
description: See [is-the-description-of-the-issue-itself-sufficient](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#is-the-description-of-the-issue-itself-sufficient)
|
||||
placeholder: Provide any additional information, any suggested solutions, and as much context and examples as possible
|
||||
validations:
|
||||
required: true
|
||||
- type: checkboxes
|
||||
id: verbose
|
||||
attributes:
|
||||
label: Provide verbose output that clearly demonstrates the problem
|
||||
options:
|
||||
- label: Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)
|
||||
required: true
|
||||
- label: Copy the WHOLE output (starting with `[debug] Command-line config`) and insert it below
|
||||
required: true
|
||||
- type: textarea
|
||||
id: log
|
||||
attributes:
|
||||
label: Verbose log
|
||||
label: Complete Verbose Output
|
||||
description: |
|
||||
Provide the complete verbose output of yt-dlp **that clearly demonstrates the problem**.
|
||||
Add the `-vU` flag to your command line you run yt-dlp with (`yt-dlp -vU <your command line>`), copy the WHOLE output and insert it below.
|
||||
It should look similar to this:
|
||||
It should start like this:
|
||||
placeholder: |
|
||||
[debug] Command-line config: ['-vU', 'http://www.youtube.com/watch?v=BaW_jenozKc']
|
||||
[debug] Portable config file: yt-dlp.conf
|
||||
[debug] Portable config: ['-i']
|
||||
[debug] Encodings: locale cp1252, fs utf-8, stdout utf-8, stderr utf-8, pref cp1252
|
||||
[debug] yt-dlp version 2022.05.18 (exe)
|
||||
[debug] Python version 3.8.8 (CPython 64bit) - Windows-10-10.0.19041-SP0
|
||||
[debug] exe versions: ffmpeg 3.0.1, ffprobe 3.0.1
|
||||
[debug] Optional libraries: Cryptodome, keyring, mutagen, sqlite, websockets
|
||||
[debug] Command-line config: ['-vU', 'test:youtube']
|
||||
[debug] Portable config "yt-dlp.conf": ['-i']
|
||||
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
||||
[debug] yt-dlp version 2023.01.06 [9d339c4] (win32_exe)
|
||||
[debug] Python 3.8.10 (CPython 64bit) - Windows-10-10.0.22000-SP0
|
||||
[debug] Checking exe version: ffmpeg -bsfs
|
||||
[debug] Checking exe version: ffprobe -bsfs
|
||||
[debug] exe versions: ffmpeg N-106550-g072101bd52-20220410 (fdk,setts), ffprobe N-106624-g391ce570c8-20220415, phantomjs 2.1.1
|
||||
[debug] Optional libraries: Cryptodome-3.15.0, brotli-1.0.9, certifi-2022.06.15, mutagen-1.45.1, sqlite3-2.6.0, websockets-10.3
|
||||
[debug] Proxy map: {}
|
||||
yt-dlp is up to date (2022.05.18)
|
||||
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp/releases/latest
|
||||
Latest version: 2023.01.06, Current version: 2023.01.06
|
||||
yt-dlp is up to date (2023.01.06)
|
||||
<more lines>
|
||||
render: shell
|
||||
validations:
|
||||
|
||||
@@ -2,6 +2,13 @@ name: Site support request
|
||||
description: Request support for a new site
|
||||
labels: [triage, site-request]
|
||||
body:
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: DO NOT REMOVE OR SKIP THE ISSUE TEMPLATE
|
||||
description: Fill all fields even if you think it is irrelevant for the issue
|
||||
options:
|
||||
- label: I understand that I will be **blocked** if I remove or skip any mandatory\* field
|
||||
required: true
|
||||
- type: checkboxes
|
||||
id: checklist
|
||||
attributes:
|
||||
@@ -11,13 +18,13 @@ body:
|
||||
options:
|
||||
- label: I'm reporting a new site support request
|
||||
required: true
|
||||
- label: I've verified that I'm running yt-dlp version **2022.05.18** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
||||
- label: I've verified that I'm running yt-dlp version **2023.01.06** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
||||
required: true
|
||||
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
||||
required: true
|
||||
- label: I've checked that none of provided URLs [violate any copyrights](https://github.com/ytdl-org/youtube-dl#can-you-add-support-for-this-anime-video-site-or-site-which-shows-current-movies-for-free) or contain any [DRM](https://en.wikipedia.org/wiki/Digital_rights_management) to the best of my knowledge
|
||||
- label: I've checked that none of provided URLs [violate any copyrights](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#is-the-website-primarily-used-for-piracy) or contain any [DRM](https://en.wikipedia.org/wiki/Digital_rights_management) to the best of my knowledge
|
||||
required: true
|
||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues including closed ones. DO NOT post duplicates
|
||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
||||
required: true
|
||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
||||
required: true
|
||||
@@ -26,8 +33,8 @@ body:
|
||||
id: region
|
||||
attributes:
|
||||
label: Region
|
||||
description: "Enter the region the site is accessible from"
|
||||
placeholder: "India"
|
||||
description: Enter the country/region that the site is accessible from
|
||||
placeholder: India
|
||||
- type: textarea
|
||||
id: example-urls
|
||||
attributes:
|
||||
@@ -43,31 +50,40 @@ body:
|
||||
- type: textarea
|
||||
id: description
|
||||
attributes:
|
||||
label: Description
|
||||
description: |
|
||||
Provide any additional information
|
||||
placeholder: WRITE DESCRIPTION HERE
|
||||
label: Provide a description that is worded well enough to be understood
|
||||
description: See [is-the-description-of-the-issue-itself-sufficient](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#is-the-description-of-the-issue-itself-sufficient)
|
||||
placeholder: Provide any additional information, any suggested solutions, and as much context and examples as possible
|
||||
validations:
|
||||
required: true
|
||||
- type: checkboxes
|
||||
id: verbose
|
||||
attributes:
|
||||
label: Provide verbose output that clearly demonstrates the problem
|
||||
options:
|
||||
- label: Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)
|
||||
required: true
|
||||
- label: Copy the WHOLE output (starting with `[debug] Command-line config`) and insert it below
|
||||
required: true
|
||||
- type: textarea
|
||||
id: log
|
||||
attributes:
|
||||
label: Verbose log
|
||||
label: Complete Verbose Output
|
||||
description: |
|
||||
Provide the complete verbose output **using one of the example URLs provided above**.
|
||||
Add the `-vU` flag to your command line you run yt-dlp with (`yt-dlp -vU <your command line>`), copy the WHOLE output and insert it below.
|
||||
It should look similar to this:
|
||||
It should start like this:
|
||||
placeholder: |
|
||||
[debug] Command-line config: ['-vU', 'http://www.youtube.com/watch?v=BaW_jenozKc']
|
||||
[debug] Portable config file: yt-dlp.conf
|
||||
[debug] Portable config: ['-i']
|
||||
[debug] Encodings: locale cp1252, fs utf-8, stdout utf-8, stderr utf-8, pref cp1252
|
||||
[debug] yt-dlp version 2022.05.18 (exe)
|
||||
[debug] Python version 3.8.8 (CPython 64bit) - Windows-10-10.0.19041-SP0
|
||||
[debug] exe versions: ffmpeg 3.0.1, ffprobe 3.0.1
|
||||
[debug] Optional libraries: Cryptodome, keyring, mutagen, sqlite, websockets
|
||||
[debug] Command-line config: ['-vU', 'test:youtube']
|
||||
[debug] Portable config "yt-dlp.conf": ['-i']
|
||||
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
||||
[debug] yt-dlp version 2023.01.06 [9d339c4] (win32_exe)
|
||||
[debug] Python 3.8.10 (CPython 64bit) - Windows-10-10.0.22000-SP0
|
||||
[debug] Checking exe version: ffmpeg -bsfs
|
||||
[debug] Checking exe version: ffprobe -bsfs
|
||||
[debug] exe versions: ffmpeg N-106550-g072101bd52-20220410 (fdk,setts), ffprobe N-106624-g391ce570c8-20220415, phantomjs 2.1.1
|
||||
[debug] Optional libraries: Cryptodome-3.15.0, brotli-1.0.9, certifi-2022.06.15, mutagen-1.45.1, sqlite3-2.6.0, websockets-10.3
|
||||
[debug] Proxy map: {}
|
||||
yt-dlp is up to date (2022.05.18)
|
||||
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp/releases/latest
|
||||
Latest version: 2023.01.06, Current version: 2023.01.06
|
||||
yt-dlp is up to date (2023.01.06)
|
||||
<more lines>
|
||||
render: shell
|
||||
validations:
|
||||
|
||||
@@ -2,6 +2,13 @@ name: Site feature request
|
||||
description: Request a new functionality for a supported site
|
||||
labels: [triage, site-enhancement]
|
||||
body:
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: DO NOT REMOVE OR SKIP THE ISSUE TEMPLATE
|
||||
description: Fill all fields even if you think it is irrelevant for the issue
|
||||
options:
|
||||
- label: I understand that I will be **blocked** if I remove or skip any mandatory\* field
|
||||
required: true
|
||||
- type: checkboxes
|
||||
id: checklist
|
||||
attributes:
|
||||
@@ -9,13 +16,13 @@ body:
|
||||
description: |
|
||||
Carefully read and work through this check list in order to prevent the most common mistakes and misuse of yt-dlp:
|
||||
options:
|
||||
- label: I'm reporting a site feature request
|
||||
- label: I'm requesting a site-specific feature
|
||||
required: true
|
||||
- label: I've verified that I'm running yt-dlp version **2022.05.18** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
||||
- label: I've verified that I'm running yt-dlp version **2023.01.06** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
||||
required: true
|
||||
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
||||
required: true
|
||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues including closed ones. DO NOT post duplicates
|
||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
||||
required: true
|
||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
||||
required: true
|
||||
@@ -24,8 +31,8 @@ body:
|
||||
id: region
|
||||
attributes:
|
||||
label: Region
|
||||
description: "Enter the region the site is accessible from"
|
||||
placeholder: "India"
|
||||
description: Enter the country/region that the site is accessible from
|
||||
placeholder: India
|
||||
- type: textarea
|
||||
id: example-urls
|
||||
attributes:
|
||||
@@ -39,33 +46,40 @@ body:
|
||||
- type: textarea
|
||||
id: description
|
||||
attributes:
|
||||
label: Description
|
||||
description: |
|
||||
Provide an explanation of your site feature request in an arbitrary form.
|
||||
Please make sure the description is worded well enough to be understood, see [is-the-description-of-the-issue-itself-sufficient](https://github.com/ytdl-org/youtube-dl#is-the-description-of-the-issue-itself-sufficient).
|
||||
Provide any additional information, any suggested solutions, and as much context and examples as possible
|
||||
placeholder: WRITE DESCRIPTION HERE
|
||||
label: Provide a description that is worded well enough to be understood
|
||||
description: See [is-the-description-of-the-issue-itself-sufficient](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#is-the-description-of-the-issue-itself-sufficient)
|
||||
placeholder: Provide any additional information, any suggested solutions, and as much context and examples as possible
|
||||
validations:
|
||||
required: true
|
||||
- type: checkboxes
|
||||
id: verbose
|
||||
attributes:
|
||||
label: Provide verbose output that clearly demonstrates the problem
|
||||
options:
|
||||
- label: Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)
|
||||
required: true
|
||||
- label: Copy the WHOLE output (starting with `[debug] Command-line config`) and insert it below
|
||||
required: true
|
||||
- type: textarea
|
||||
id: log
|
||||
attributes:
|
||||
label: Verbose log
|
||||
label: Complete Verbose Output
|
||||
description: |
|
||||
Provide the complete verbose output of yt-dlp that demonstrates the need for the enhancement.
|
||||
Add the `-vU` flag to your command line you run yt-dlp with (`yt-dlp -vU <your command line>`), copy the WHOLE output and insert it below.
|
||||
It should look similar to this:
|
||||
It should start like this:
|
||||
placeholder: |
|
||||
[debug] Command-line config: ['-vU', 'http://www.youtube.com/watch?v=BaW_jenozKc']
|
||||
[debug] Portable config file: yt-dlp.conf
|
||||
[debug] Portable config: ['-i']
|
||||
[debug] Encodings: locale cp1252, fs utf-8, stdout utf-8, stderr utf-8, pref cp1252
|
||||
[debug] yt-dlp version 2022.05.18 (exe)
|
||||
[debug] Python version 3.8.8 (CPython 64bit) - Windows-10-10.0.19041-SP0
|
||||
[debug] exe versions: ffmpeg 3.0.1, ffprobe 3.0.1
|
||||
[debug] Optional libraries: Cryptodome, keyring, mutagen, sqlite, websockets
|
||||
[debug] Command-line config: ['-vU', 'test:youtube']
|
||||
[debug] Portable config "yt-dlp.conf": ['-i']
|
||||
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
||||
[debug] yt-dlp version 2023.01.06 [9d339c4] (win32_exe)
|
||||
[debug] Python 3.8.10 (CPython 64bit) - Windows-10-10.0.22000-SP0
|
||||
[debug] Checking exe version: ffmpeg -bsfs
|
||||
[debug] Checking exe version: ffprobe -bsfs
|
||||
[debug] exe versions: ffmpeg N-106550-g072101bd52-20220410 (fdk,setts), ffprobe N-106624-g391ce570c8-20220415, phantomjs 2.1.1
|
||||
[debug] Optional libraries: Cryptodome-3.15.0, brotli-1.0.9, certifi-2022.06.15, mutagen-1.45.1, sqlite3-2.6.0, websockets-10.3
|
||||
[debug] Proxy map: {}
|
||||
yt-dlp is up to date (2022.05.18)
|
||||
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp/releases/latest
|
||||
Latest version: 2023.01.06, Current version: 2023.01.06
|
||||
yt-dlp is up to date (2023.01.06)
|
||||
<more lines>
|
||||
render: shell
|
||||
validations:
|
||||
|
||||
58
.github/ISSUE_TEMPLATE/4_bug_report.yml
vendored
58
.github/ISSUE_TEMPLATE/4_bug_report.yml
vendored
@@ -2,6 +2,13 @@ name: Bug report
|
||||
description: Report a bug unrelated to any particular site or extractor
|
||||
labels: [triage, bug]
|
||||
body:
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: DO NOT REMOVE OR SKIP THE ISSUE TEMPLATE
|
||||
description: Fill all fields even if you think it is irrelevant for the issue
|
||||
options:
|
||||
- label: I understand that I will be **blocked** if I remove or skip any mandatory\* field
|
||||
required: true
|
||||
- type: checkboxes
|
||||
id: checklist
|
||||
attributes:
|
||||
@@ -11,46 +18,53 @@ body:
|
||||
options:
|
||||
- label: I'm reporting a bug unrelated to a specific site
|
||||
required: true
|
||||
- label: I've verified that I'm running yt-dlp version **2022.05.18** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
||||
- label: I've verified that I'm running yt-dlp version **2023.01.06** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
||||
required: true
|
||||
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
||||
required: true
|
||||
- label: I've checked that all URLs and arguments with special characters are [properly quoted or escaped](https://github.com/ytdl-org/youtube-dl#video-url-contains-an-ampersand-and-im-getting-some-strange-output-1-2839-or-v-is-not-recognized-as-an-internal-or-external-command)
|
||||
- label: I've checked that all URLs and arguments with special characters are [properly quoted or escaped](https://github.com/yt-dlp/yt-dlp/wiki/FAQ#video-url-contains-an-ampersand--and-im-getting-some-strange-output-1-2839-or-v-is-not-recognized-as-an-internal-or-external-command)
|
||||
required: true
|
||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues including closed ones. DO NOT post duplicates
|
||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
||||
required: true
|
||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
||||
required: true
|
||||
- type: textarea
|
||||
id: description
|
||||
attributes:
|
||||
label: Description
|
||||
description: |
|
||||
Provide an explanation of your issue in an arbitrary form.
|
||||
Please make sure the description is worded well enough to be understood, see [is-the-description-of-the-issue-itself-sufficient](https://github.com/ytdl-org/youtube-dl#is-the-description-of-the-issue-itself-sufficient).
|
||||
Provide any additional information, any suggested solutions, and as much context and examples as possible
|
||||
placeholder: WRITE DESCRIPTION HERE
|
||||
label: Provide a description that is worded well enough to be understood
|
||||
description: See [is-the-description-of-the-issue-itself-sufficient](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#is-the-description-of-the-issue-itself-sufficient)
|
||||
placeholder: Provide any additional information, any suggested solutions, and as much context and examples as possible
|
||||
validations:
|
||||
required: true
|
||||
- type: checkboxes
|
||||
id: verbose
|
||||
attributes:
|
||||
label: Provide verbose output that clearly demonstrates the problem
|
||||
options:
|
||||
- label: Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)
|
||||
required: true
|
||||
- label: Copy the WHOLE output (starting with `[debug] Command-line config`) and insert it below
|
||||
required: true
|
||||
- type: textarea
|
||||
id: log
|
||||
attributes:
|
||||
label: Verbose log
|
||||
label: Complete Verbose Output
|
||||
description: |
|
||||
Provide the complete verbose output of yt-dlp **that clearly demonstrates the problem**.
|
||||
Add the `-vU` flag to **your** command line you run yt-dlp with (`yt-dlp -vU <your command line>`), copy the WHOLE output and insert it below.
|
||||
It should look similar to this:
|
||||
It should start like this:
|
||||
placeholder: |
|
||||
[debug] Command-line config: ['-vU', 'http://www.youtube.com/watch?v=BaW_jenozKc']
|
||||
[debug] Portable config file: yt-dlp.conf
|
||||
[debug] Portable config: ['-i']
|
||||
[debug] Encodings: locale cp1252, fs utf-8, stdout utf-8, stderr utf-8, pref cp1252
|
||||
[debug] yt-dlp version 2022.05.18 (exe)
|
||||
[debug] Python version 3.8.8 (CPython 64bit) - Windows-10-10.0.19041-SP0
|
||||
[debug] exe versions: ffmpeg 3.0.1, ffprobe 3.0.1
|
||||
[debug] Optional libraries: Cryptodome, keyring, mutagen, sqlite, websockets
|
||||
[debug] Command-line config: ['-vU', 'test:youtube']
|
||||
[debug] Portable config "yt-dlp.conf": ['-i']
|
||||
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
||||
[debug] yt-dlp version 2023.01.06 [9d339c4] (win32_exe)
|
||||
[debug] Python 3.8.10 (CPython 64bit) - Windows-10-10.0.22000-SP0
|
||||
[debug] Checking exe version: ffmpeg -bsfs
|
||||
[debug] Checking exe version: ffprobe -bsfs
|
||||
[debug] exe versions: ffmpeg N-106550-g072101bd52-20220410 (fdk,setts), ffprobe N-106624-g391ce570c8-20220415, phantomjs 2.1.1
|
||||
[debug] Optional libraries: Cryptodome-3.15.0, brotli-1.0.9, certifi-2022.06.15, mutagen-1.45.1, sqlite3-2.6.0, websockets-10.3
|
||||
[debug] Proxy map: {}
|
||||
yt-dlp is up to date (2022.05.18)
|
||||
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp/releases/latest
|
||||
Latest version: 2023.01.06, Current version: 2023.01.06
|
||||
yt-dlp is up to date (2023.01.06)
|
||||
<more lines>
|
||||
render: shell
|
||||
validations:
|
||||
|
||||
56
.github/ISSUE_TEMPLATE/5_feature_request.yml
vendored
56
.github/ISSUE_TEMPLATE/5_feature_request.yml
vendored
@@ -2,6 +2,13 @@ name: Feature request
|
||||
description: Request a new functionality unrelated to any particular site or extractor
|
||||
labels: [triage, enhancement]
|
||||
body:
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: DO NOT REMOVE OR SKIP THE ISSUE TEMPLATE
|
||||
description: Fill all fields even if you think it is irrelevant for the issue
|
||||
options:
|
||||
- label: I understand that I will be **blocked** if I remove or skip any mandatory\* field
|
||||
required: true
|
||||
- type: checkboxes
|
||||
id: checklist
|
||||
attributes:
|
||||
@@ -9,45 +16,50 @@ body:
|
||||
description: |
|
||||
Carefully read and work through this check list in order to prevent the most common mistakes and misuse of yt-dlp:
|
||||
options:
|
||||
- label: I'm reporting a feature request
|
||||
- label: I'm requesting a feature unrelated to a specific site
|
||||
required: true
|
||||
- label: I've looked through the [README](https://github.com/yt-dlp/yt-dlp#readme)
|
||||
required: true
|
||||
- label: I've verified that I'm running yt-dlp version **2022.05.18** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
||||
- label: I've verified that I'm running yt-dlp version **2023.01.06** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
||||
required: true
|
||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues including closed ones. DO NOT post duplicates
|
||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
||||
required: true
|
||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
||||
required: true
|
||||
- type: textarea
|
||||
id: description
|
||||
attributes:
|
||||
label: Description
|
||||
description: |
|
||||
Provide an explanation of your site feature request in an arbitrary form.
|
||||
Please make sure the description is worded well enough to be understood, see [is-the-description-of-the-issue-itself-sufficient](https://github.com/ytdl-org/youtube-dl#is-the-description-of-the-issue-itself-sufficient).
|
||||
Provide any additional information, any suggested solutions, and as much context and examples as possible
|
||||
placeholder: WRITE DESCRIPTION HERE
|
||||
label: Provide a description that is worded well enough to be understood
|
||||
description: See [is-the-description-of-the-issue-itself-sufficient](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#is-the-description-of-the-issue-itself-sufficient)
|
||||
placeholder: Provide any additional information, any suggested solutions, and as much context and examples as possible
|
||||
validations:
|
||||
required: true
|
||||
- type: checkboxes
|
||||
id: verbose
|
||||
attributes:
|
||||
label: Provide verbose output that clearly demonstrates the problem
|
||||
options:
|
||||
- label: Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)
|
||||
- label: Copy the WHOLE output (starting with `[debug] Command-line config`) and insert it below
|
||||
- type: textarea
|
||||
id: log
|
||||
attributes:
|
||||
label: Verbose log
|
||||
label: Complete Verbose Output
|
||||
description: |
|
||||
If your feature request involves an existing yt-dlp command, provide the complete verbose output of that command.
|
||||
Add the `-vU` flag to **your** command line you run yt-dlp with (`yt-dlp -vU <your command line>`), copy the WHOLE output and insert it below.
|
||||
It should look similar to this:
|
||||
It should start like this:
|
||||
placeholder: |
|
||||
[debug] Command-line config: ['-vU', 'http://www.youtube.com/watch?v=BaW_jenozKc']
|
||||
[debug] Portable config file: yt-dlp.conf
|
||||
[debug] Portable config: ['-i']
|
||||
[debug] Encodings: locale cp1252, fs utf-8, stdout utf-8, stderr utf-8, pref cp1252
|
||||
[debug] yt-dlp version 2021.12.01 (exe)
|
||||
[debug] Python version 3.8.8 (CPython 64bit) - Windows-10-10.0.19041-SP0
|
||||
[debug] exe versions: ffmpeg 3.0.1, ffprobe 3.0.1
|
||||
[debug] Optional libraries: Cryptodome, keyring, mutagen, sqlite, websockets
|
||||
[debug] Command-line config: ['-vU', 'test:youtube']
|
||||
[debug] Portable config "yt-dlp.conf": ['-i']
|
||||
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
||||
[debug] yt-dlp version 2023.01.06 [9d339c4] (win32_exe)
|
||||
[debug] Python 3.8.10 (CPython 64bit) - Windows-10-10.0.22000-SP0
|
||||
[debug] Checking exe version: ffmpeg -bsfs
|
||||
[debug] Checking exe version: ffprobe -bsfs
|
||||
[debug] exe versions: ffmpeg N-106550-g072101bd52-20220410 (fdk,setts), ffprobe N-106624-g391ce570c8-20220415, phantomjs 2.1.1
|
||||
[debug] Optional libraries: Cryptodome-3.15.0, brotli-1.0.9, certifi-2022.06.15, mutagen-1.45.1, sqlite3-2.6.0, websockets-10.3
|
||||
[debug] Proxy map: {}
|
||||
yt-dlp is up to date (2021.12.01)
|
||||
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp/releases/latest
|
||||
Latest version: 2023.01.06, Current version: 2023.01.06
|
||||
yt-dlp is up to date (2023.01.06)
|
||||
<more lines>
|
||||
render: shell
|
||||
|
||||
66
.github/ISSUE_TEMPLATE/6_question.yml
vendored
66
.github/ISSUE_TEMPLATE/6_question.yml
vendored
@@ -2,6 +2,19 @@ name: Ask question
|
||||
description: Ask yt-dlp related question
|
||||
labels: [question]
|
||||
body:
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: DO NOT REMOVE OR SKIP THE ISSUE TEMPLATE
|
||||
description: Fill all fields even if you think it is irrelevant for the issue
|
||||
options:
|
||||
- label: I understand that I will be **blocked** if I remove or skip any mandatory\* field
|
||||
required: true
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
### Make sure you are **only** asking a question and not reporting a bug or requesting a feature.
|
||||
If your question contains "isn't working" or "can you add", this is most likely the wrong template.
|
||||
If you are in doubt whether this is the right template, **USE ANOTHER TEMPLATE**!
|
||||
- type: checkboxes
|
||||
id: checklist
|
||||
attributes:
|
||||
@@ -9,45 +22,50 @@ body:
|
||||
description: |
|
||||
Carefully read and work through this check list in order to prevent the most common mistakes and misuse of yt-dlp:
|
||||
options:
|
||||
- label: I'm asking a question and **not** reporting a bug/feature request
|
||||
- label: I'm asking a question and **not** reporting a bug or requesting a feature
|
||||
required: true
|
||||
- label: I've looked through the [README](https://github.com/yt-dlp/yt-dlp#readme)
|
||||
required: true
|
||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
||||
- label: I've verified that I'm running yt-dlp version **2023.01.06** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
||||
required: true
|
||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar questions including closed ones
|
||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar questions **including closed ones**. DO NOT post duplicates
|
||||
required: true
|
||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
||||
required: true
|
||||
- type: textarea
|
||||
id: question
|
||||
attributes:
|
||||
label: Question
|
||||
description: |
|
||||
Ask your question in an arbitrary form.
|
||||
Please make sure it's worded well enough to be understood, see [is-the-description-of-the-issue-itself-sufficient](https://github.com/ytdl-org/youtube-dl#is-the-description-of-the-issue-itself-sufficient).
|
||||
Provide any additional information and as much context and examples as possible.
|
||||
If your question contains "isn't working" or "can you add", this is most likely the wrong template.
|
||||
If you are in doubt if this is the right template, use another template!
|
||||
placeholder: WRITE QUESTION HERE
|
||||
label: Please make sure the question is worded well enough to be understood
|
||||
description: See [is-the-description-of-the-issue-itself-sufficient](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#is-the-description-of-the-issue-itself-sufficient)
|
||||
placeholder: Provide any additional information and as much context and examples as possible
|
||||
validations:
|
||||
required: true
|
||||
- type: checkboxes
|
||||
id: verbose
|
||||
attributes:
|
||||
label: Provide verbose output that clearly demonstrates the problem
|
||||
options:
|
||||
- label: Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)
|
||||
- label: Copy the WHOLE output (starting with `[debug] Command-line config`) and insert it below
|
||||
- type: textarea
|
||||
id: log
|
||||
attributes:
|
||||
label: Verbose log
|
||||
label: Complete Verbose Output
|
||||
description: |
|
||||
If your question involves a yt-dlp command, provide the complete verbose output of that command.
|
||||
Add the `-vU` flag to **your** command line you run yt-dlp with (`yt-dlp -vU <your command line>`), copy the WHOLE output and insert it below.
|
||||
It should look similar to this:
|
||||
It should start like this:
|
||||
placeholder: |
|
||||
[debug] Command-line config: ['-vU', 'http://www.youtube.com/watch?v=BaW_jenozKc']
|
||||
[debug] Portable config file: yt-dlp.conf
|
||||
[debug] Portable config: ['-i']
|
||||
[debug] Encodings: locale cp1252, fs utf-8, stdout utf-8, stderr utf-8, pref cp1252
|
||||
[debug] yt-dlp version 2021.12.01 (exe)
|
||||
[debug] Python version 3.8.8 (CPython 64bit) - Windows-10-10.0.19041-SP0
|
||||
[debug] exe versions: ffmpeg 3.0.1, ffprobe 3.0.1
|
||||
[debug] Optional libraries: Cryptodome, keyring, mutagen, sqlite, websockets
|
||||
[debug] Command-line config: ['-vU', 'test:youtube']
|
||||
[debug] Portable config "yt-dlp.conf": ['-i']
|
||||
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
||||
[debug] yt-dlp version 2023.01.06 [9d339c4] (win32_exe)
|
||||
[debug] Python 3.8.10 (CPython 64bit) - Windows-10-10.0.22000-SP0
|
||||
[debug] Checking exe version: ffmpeg -bsfs
|
||||
[debug] Checking exe version: ffprobe -bsfs
|
||||
[debug] exe versions: ffmpeg N-106550-g072101bd52-20220410 (fdk,setts), ffprobe N-106624-g391ce570c8-20220415, phantomjs 2.1.1
|
||||
[debug] Optional libraries: Cryptodome-3.15.0, brotli-1.0.9, certifi-2022.06.15, mutagen-1.45.1, sqlite3-2.6.0, websockets-10.3
|
||||
[debug] Proxy map: {}
|
||||
yt-dlp is up to date (2021.12.01)
|
||||
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp/releases/latest
|
||||
Latest version: 2023.01.06, Current version: 2023.01.06
|
||||
yt-dlp is up to date (2023.01.06)
|
||||
<more lines>
|
||||
render: shell
|
||||
|
||||
41
.github/ISSUE_TEMPLATE_tmpl/1_broken_site.yml
vendored
41
.github/ISSUE_TEMPLATE_tmpl/1_broken_site.yml
vendored
@@ -2,6 +2,7 @@ name: Broken site
|
||||
description: Report broken or misfunctioning site
|
||||
labels: [triage, site-bug]
|
||||
body:
|
||||
%(no_skip)s
|
||||
- type: checkboxes
|
||||
id: checklist
|
||||
attributes:
|
||||
@@ -15,9 +16,9 @@ body:
|
||||
required: true
|
||||
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
||||
required: true
|
||||
- label: I've checked that all URLs and arguments with special characters are [properly quoted or escaped](https://github.com/ytdl-org/youtube-dl#video-url-contains-an-ampersand-and-im-getting-some-strange-output-1-2839-or-v-is-not-recognized-as-an-internal-or-external-command)
|
||||
- label: I've checked that all URLs and arguments with special characters are [properly quoted or escaped](https://github.com/yt-dlp/yt-dlp/wiki/FAQ#video-url-contains-an-ampersand--and-im-getting-some-strange-output-1-2839-or-v-is-not-recognized-as-an-internal-or-external-command)
|
||||
required: true
|
||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues including closed ones. DO NOT post duplicates
|
||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
||||
required: true
|
||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
||||
required: true
|
||||
@@ -26,38 +27,14 @@ body:
|
||||
id: region
|
||||
attributes:
|
||||
label: Region
|
||||
description: "Enter the region the site is accessible from"
|
||||
placeholder: "India"
|
||||
description: Enter the country/region that the site is accessible from
|
||||
placeholder: India
|
||||
- type: textarea
|
||||
id: description
|
||||
attributes:
|
||||
label: Description
|
||||
description: |
|
||||
Provide an explanation of your issue in an arbitrary form.
|
||||
Provide any additional information, any suggested solutions, and as much context and examples as possible
|
||||
placeholder: WRITE DESCRIPTION HERE
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: log
|
||||
attributes:
|
||||
label: Verbose log
|
||||
description: |
|
||||
Provide the complete verbose output of yt-dlp **that clearly demonstrates the problem**.
|
||||
Add the `-vU` flag to your command line you run yt-dlp with (`yt-dlp -vU <your command line>`), copy the WHOLE output and insert it below.
|
||||
It should look similar to this:
|
||||
placeholder: |
|
||||
[debug] Command-line config: ['-vU', 'http://www.youtube.com/watch?v=BaW_jenozKc']
|
||||
[debug] Portable config file: yt-dlp.conf
|
||||
[debug] Portable config: ['-i']
|
||||
[debug] Encodings: locale cp1252, fs utf-8, stdout utf-8, stderr utf-8, pref cp1252
|
||||
[debug] yt-dlp version %(version)s (exe)
|
||||
[debug] Python version 3.8.8 (CPython 64bit) - Windows-10-10.0.19041-SP0
|
||||
[debug] exe versions: ffmpeg 3.0.1, ffprobe 3.0.1
|
||||
[debug] Optional libraries: Cryptodome, keyring, mutagen, sqlite, websockets
|
||||
[debug] Proxy map: {}
|
||||
yt-dlp is up to date (%(version)s)
|
||||
<more lines>
|
||||
render: shell
|
||||
label: Provide a description that is worded well enough to be understood
|
||||
description: See [is-the-description-of-the-issue-itself-sufficient](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#is-the-description-of-the-issue-itself-sufficient)
|
||||
placeholder: Provide any additional information, any suggested solutions, and as much context and examples as possible
|
||||
validations:
|
||||
required: true
|
||||
%(verbose)s
|
||||
|
||||
@@ -2,6 +2,7 @@ name: Site support request
|
||||
description: Request support for a new site
|
||||
labels: [triage, site-request]
|
||||
body:
|
||||
%(no_skip)s
|
||||
- type: checkboxes
|
||||
id: checklist
|
||||
attributes:
|
||||
@@ -15,9 +16,9 @@ body:
|
||||
required: true
|
||||
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
||||
required: true
|
||||
- label: I've checked that none of provided URLs [violate any copyrights](https://github.com/ytdl-org/youtube-dl#can-you-add-support-for-this-anime-video-site-or-site-which-shows-current-movies-for-free) or contain any [DRM](https://en.wikipedia.org/wiki/Digital_rights_management) to the best of my knowledge
|
||||
- label: I've checked that none of provided URLs [violate any copyrights](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#is-the-website-primarily-used-for-piracy) or contain any [DRM](https://en.wikipedia.org/wiki/Digital_rights_management) to the best of my knowledge
|
||||
required: true
|
||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues including closed ones. DO NOT post duplicates
|
||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
||||
required: true
|
||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
||||
required: true
|
||||
@@ -26,8 +27,8 @@ body:
|
||||
id: region
|
||||
attributes:
|
||||
label: Region
|
||||
description: "Enter the region the site is accessible from"
|
||||
placeholder: "India"
|
||||
description: Enter the country/region that the site is accessible from
|
||||
placeholder: India
|
||||
- type: textarea
|
||||
id: example-urls
|
||||
attributes:
|
||||
@@ -43,32 +44,9 @@ body:
|
||||
- type: textarea
|
||||
id: description
|
||||
attributes:
|
||||
label: Description
|
||||
description: |
|
||||
Provide any additional information
|
||||
placeholder: WRITE DESCRIPTION HERE
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: log
|
||||
attributes:
|
||||
label: Verbose log
|
||||
description: |
|
||||
Provide the complete verbose output **using one of the example URLs provided above**.
|
||||
Add the `-vU` flag to your command line you run yt-dlp with (`yt-dlp -vU <your command line>`), copy the WHOLE output and insert it below.
|
||||
It should look similar to this:
|
||||
placeholder: |
|
||||
[debug] Command-line config: ['-vU', 'http://www.youtube.com/watch?v=BaW_jenozKc']
|
||||
[debug] Portable config file: yt-dlp.conf
|
||||
[debug] Portable config: ['-i']
|
||||
[debug] Encodings: locale cp1252, fs utf-8, stdout utf-8, stderr utf-8, pref cp1252
|
||||
[debug] yt-dlp version %(version)s (exe)
|
||||
[debug] Python version 3.8.8 (CPython 64bit) - Windows-10-10.0.19041-SP0
|
||||
[debug] exe versions: ffmpeg 3.0.1, ffprobe 3.0.1
|
||||
[debug] Optional libraries: Cryptodome, keyring, mutagen, sqlite, websockets
|
||||
[debug] Proxy map: {}
|
||||
yt-dlp is up to date (%(version)s)
|
||||
<more lines>
|
||||
render: shell
|
||||
label: Provide a description that is worded well enough to be understood
|
||||
description: See [is-the-description-of-the-issue-itself-sufficient](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#is-the-description-of-the-issue-itself-sufficient)
|
||||
placeholder: Provide any additional information, any suggested solutions, and as much context and examples as possible
|
||||
validations:
|
||||
required: true
|
||||
%(verbose)s
|
||||
|
||||
@@ -2,6 +2,7 @@ name: Site feature request
|
||||
description: Request a new functionality for a supported site
|
||||
labels: [triage, site-enhancement]
|
||||
body:
|
||||
%(no_skip)s
|
||||
- type: checkboxes
|
||||
id: checklist
|
||||
attributes:
|
||||
@@ -9,13 +10,13 @@ body:
|
||||
description: |
|
||||
Carefully read and work through this check list in order to prevent the most common mistakes and misuse of yt-dlp:
|
||||
options:
|
||||
- label: I'm reporting a site feature request
|
||||
- label: I'm requesting a site-specific feature
|
||||
required: true
|
||||
- label: I've verified that I'm running yt-dlp version **%(version)s** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
||||
required: true
|
||||
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
||||
required: true
|
||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues including closed ones. DO NOT post duplicates
|
||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
||||
required: true
|
||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
||||
required: true
|
||||
@@ -24,8 +25,8 @@ body:
|
||||
id: region
|
||||
attributes:
|
||||
label: Region
|
||||
description: "Enter the region the site is accessible from"
|
||||
placeholder: "India"
|
||||
description: Enter the country/region that the site is accessible from
|
||||
placeholder: India
|
||||
- type: textarea
|
||||
id: example-urls
|
||||
attributes:
|
||||
@@ -39,34 +40,9 @@ body:
|
||||
- type: textarea
|
||||
id: description
|
||||
attributes:
|
||||
label: Description
|
||||
description: |
|
||||
Provide an explanation of your site feature request in an arbitrary form.
|
||||
Please make sure the description is worded well enough to be understood, see [is-the-description-of-the-issue-itself-sufficient](https://github.com/ytdl-org/youtube-dl#is-the-description-of-the-issue-itself-sufficient).
|
||||
Provide any additional information, any suggested solutions, and as much context and examples as possible
|
||||
placeholder: WRITE DESCRIPTION HERE
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: log
|
||||
attributes:
|
||||
label: Verbose log
|
||||
description: |
|
||||
Provide the complete verbose output of yt-dlp that demonstrates the need for the enhancement.
|
||||
Add the `-vU` flag to your command line you run yt-dlp with (`yt-dlp -vU <your command line>`), copy the WHOLE output and insert it below.
|
||||
It should look similar to this:
|
||||
placeholder: |
|
||||
[debug] Command-line config: ['-vU', 'http://www.youtube.com/watch?v=BaW_jenozKc']
|
||||
[debug] Portable config file: yt-dlp.conf
|
||||
[debug] Portable config: ['-i']
|
||||
[debug] Encodings: locale cp1252, fs utf-8, stdout utf-8, stderr utf-8, pref cp1252
|
||||
[debug] yt-dlp version %(version)s (exe)
|
||||
[debug] Python version 3.8.8 (CPython 64bit) - Windows-10-10.0.19041-SP0
|
||||
[debug] exe versions: ffmpeg 3.0.1, ffprobe 3.0.1
|
||||
[debug] Optional libraries: Cryptodome, keyring, mutagen, sqlite, websockets
|
||||
[debug] Proxy map: {}
|
||||
yt-dlp is up to date (%(version)s)
|
||||
<more lines>
|
||||
render: shell
|
||||
label: Provide a description that is worded well enough to be understood
|
||||
description: See [is-the-description-of-the-issue-itself-sufficient](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#is-the-description-of-the-issue-itself-sufficient)
|
||||
placeholder: Provide any additional information, any suggested solutions, and as much context and examples as possible
|
||||
validations:
|
||||
required: true
|
||||
%(verbose)s
|
||||
|
||||
38
.github/ISSUE_TEMPLATE_tmpl/4_bug_report.yml
vendored
38
.github/ISSUE_TEMPLATE_tmpl/4_bug_report.yml
vendored
@@ -2,6 +2,7 @@ name: Bug report
|
||||
description: Report a bug unrelated to any particular site or extractor
|
||||
labels: [triage, bug]
|
||||
body:
|
||||
%(no_skip)s
|
||||
- type: checkboxes
|
||||
id: checklist
|
||||
attributes:
|
||||
@@ -15,43 +16,18 @@ body:
|
||||
required: true
|
||||
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
||||
required: true
|
||||
- label: I've checked that all URLs and arguments with special characters are [properly quoted or escaped](https://github.com/ytdl-org/youtube-dl#video-url-contains-an-ampersand-and-im-getting-some-strange-output-1-2839-or-v-is-not-recognized-as-an-internal-or-external-command)
|
||||
- label: I've checked that all URLs and arguments with special characters are [properly quoted or escaped](https://github.com/yt-dlp/yt-dlp/wiki/FAQ#video-url-contains-an-ampersand--and-im-getting-some-strange-output-1-2839-or-v-is-not-recognized-as-an-internal-or-external-command)
|
||||
required: true
|
||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues including closed ones. DO NOT post duplicates
|
||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
||||
required: true
|
||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
||||
required: true
|
||||
- type: textarea
|
||||
id: description
|
||||
attributes:
|
||||
label: Description
|
||||
description: |
|
||||
Provide an explanation of your issue in an arbitrary form.
|
||||
Please make sure the description is worded well enough to be understood, see [is-the-description-of-the-issue-itself-sufficient](https://github.com/ytdl-org/youtube-dl#is-the-description-of-the-issue-itself-sufficient).
|
||||
Provide any additional information, any suggested solutions, and as much context and examples as possible
|
||||
placeholder: WRITE DESCRIPTION HERE
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: log
|
||||
attributes:
|
||||
label: Verbose log
|
||||
description: |
|
||||
Provide the complete verbose output of yt-dlp **that clearly demonstrates the problem**.
|
||||
Add the `-vU` flag to **your** command line you run yt-dlp with (`yt-dlp -vU <your command line>`), copy the WHOLE output and insert it below.
|
||||
It should look similar to this:
|
||||
placeholder: |
|
||||
[debug] Command-line config: ['-vU', 'http://www.youtube.com/watch?v=BaW_jenozKc']
|
||||
[debug] Portable config file: yt-dlp.conf
|
||||
[debug] Portable config: ['-i']
|
||||
[debug] Encodings: locale cp1252, fs utf-8, stdout utf-8, stderr utf-8, pref cp1252
|
||||
[debug] yt-dlp version %(version)s (exe)
|
||||
[debug] Python version 3.8.8 (CPython 64bit) - Windows-10-10.0.19041-SP0
|
||||
[debug] exe versions: ffmpeg 3.0.1, ffprobe 3.0.1
|
||||
[debug] Optional libraries: Cryptodome, keyring, mutagen, sqlite, websockets
|
||||
[debug] Proxy map: {}
|
||||
yt-dlp is up to date (%(version)s)
|
||||
<more lines>
|
||||
render: shell
|
||||
label: Provide a description that is worded well enough to be understood
|
||||
description: See [is-the-description-of-the-issue-itself-sufficient](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#is-the-description-of-the-issue-itself-sufficient)
|
||||
placeholder: Provide any additional information, any suggested solutions, and as much context and examples as possible
|
||||
validations:
|
||||
required: true
|
||||
%(verbose)s
|
||||
|
||||
@@ -2,6 +2,7 @@ name: Feature request
|
||||
description: Request a new functionality unrelated to any particular site or extractor
|
||||
labels: [triage, enhancement]
|
||||
body:
|
||||
%(no_skip)s
|
||||
- type: checkboxes
|
||||
id: checklist
|
||||
attributes:
|
||||
@@ -9,45 +10,22 @@ body:
|
||||
description: |
|
||||
Carefully read and work through this check list in order to prevent the most common mistakes and misuse of yt-dlp:
|
||||
options:
|
||||
- label: I'm reporting a feature request
|
||||
- label: I'm requesting a feature unrelated to a specific site
|
||||
required: true
|
||||
- label: I've looked through the [README](https://github.com/yt-dlp/yt-dlp#readme)
|
||||
required: true
|
||||
- label: I've verified that I'm running yt-dlp version **%(version)s** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
||||
required: true
|
||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues including closed ones. DO NOT post duplicates
|
||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
||||
required: true
|
||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
||||
required: true
|
||||
- type: textarea
|
||||
id: description
|
||||
attributes:
|
||||
label: Description
|
||||
description: |
|
||||
Provide an explanation of your site feature request in an arbitrary form.
|
||||
Please make sure the description is worded well enough to be understood, see [is-the-description-of-the-issue-itself-sufficient](https://github.com/ytdl-org/youtube-dl#is-the-description-of-the-issue-itself-sufficient).
|
||||
Provide any additional information, any suggested solutions, and as much context and examples as possible
|
||||
placeholder: WRITE DESCRIPTION HERE
|
||||
label: Provide a description that is worded well enough to be understood
|
||||
description: See [is-the-description-of-the-issue-itself-sufficient](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#is-the-description-of-the-issue-itself-sufficient)
|
||||
placeholder: Provide any additional information, any suggested solutions, and as much context and examples as possible
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: log
|
||||
attributes:
|
||||
label: Verbose log
|
||||
description: |
|
||||
If your feature request involves an existing yt-dlp command, provide the complete verbose output of that command.
|
||||
Add the `-vU` flag to **your** command line you run yt-dlp with (`yt-dlp -vU <your command line>`), copy the WHOLE output and insert it below.
|
||||
It should look similar to this:
|
||||
placeholder: |
|
||||
[debug] Command-line config: ['-vU', 'http://www.youtube.com/watch?v=BaW_jenozKc']
|
||||
[debug] Portable config file: yt-dlp.conf
|
||||
[debug] Portable config: ['-i']
|
||||
[debug] Encodings: locale cp1252, fs utf-8, stdout utf-8, stderr utf-8, pref cp1252
|
||||
[debug] yt-dlp version 2021.12.01 (exe)
|
||||
[debug] Python version 3.8.8 (CPython 64bit) - Windows-10-10.0.19041-SP0
|
||||
[debug] exe versions: ffmpeg 3.0.1, ffprobe 3.0.1
|
||||
[debug] Optional libraries: Cryptodome, keyring, mutagen, sqlite, websockets
|
||||
[debug] Proxy map: {}
|
||||
yt-dlp is up to date (2021.12.01)
|
||||
<more lines>
|
||||
render: shell
|
||||
%(verbose_optional)s
|
||||
|
||||
48
.github/ISSUE_TEMPLATE_tmpl/6_question.yml
vendored
48
.github/ISSUE_TEMPLATE_tmpl/6_question.yml
vendored
@@ -2,6 +2,13 @@ name: Ask question
|
||||
description: Ask yt-dlp related question
|
||||
labels: [question]
|
||||
body:
|
||||
%(no_skip)s
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
### Make sure you are **only** asking a question and not reporting a bug or requesting a feature.
|
||||
If your question contains "isn't working" or "can you add", this is most likely the wrong template.
|
||||
If you are in doubt whether this is the right template, **USE ANOTHER TEMPLATE**!
|
||||
- type: checkboxes
|
||||
id: checklist
|
||||
attributes:
|
||||
@@ -9,45 +16,22 @@ body:
|
||||
description: |
|
||||
Carefully read and work through this check list in order to prevent the most common mistakes and misuse of yt-dlp:
|
||||
options:
|
||||
- label: I'm asking a question and **not** reporting a bug/feature request
|
||||
- label: I'm asking a question and **not** reporting a bug or requesting a feature
|
||||
required: true
|
||||
- label: I've looked through the [README](https://github.com/yt-dlp/yt-dlp#readme)
|
||||
required: true
|
||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
||||
- label: I've verified that I'm running yt-dlp version **%(version)s** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
||||
required: true
|
||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar questions including closed ones
|
||||
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar questions **including closed ones**. DO NOT post duplicates
|
||||
required: true
|
||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
||||
required: true
|
||||
- type: textarea
|
||||
id: question
|
||||
attributes:
|
||||
label: Question
|
||||
description: |
|
||||
Ask your question in an arbitrary form.
|
||||
Please make sure it's worded well enough to be understood, see [is-the-description-of-the-issue-itself-sufficient](https://github.com/ytdl-org/youtube-dl#is-the-description-of-the-issue-itself-sufficient).
|
||||
Provide any additional information and as much context and examples as possible.
|
||||
If your question contains "isn't working" or "can you add", this is most likely the wrong template.
|
||||
If you are in doubt if this is the right template, use another template!
|
||||
placeholder: WRITE QUESTION HERE
|
||||
label: Please make sure the question is worded well enough to be understood
|
||||
description: See [is-the-description-of-the-issue-itself-sufficient](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#is-the-description-of-the-issue-itself-sufficient)
|
||||
placeholder: Provide any additional information and as much context and examples as possible
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: log
|
||||
attributes:
|
||||
label: Verbose log
|
||||
description: |
|
||||
If your question involves a yt-dlp command, provide the complete verbose output of that command.
|
||||
Add the `-vU` flag to **your** command line you run yt-dlp with (`yt-dlp -vU <your command line>`), copy the WHOLE output and insert it below.
|
||||
It should look similar to this:
|
||||
placeholder: |
|
||||
[debug] Command-line config: ['-vU', 'http://www.youtube.com/watch?v=BaW_jenozKc']
|
||||
[debug] Portable config file: yt-dlp.conf
|
||||
[debug] Portable config: ['-i']
|
||||
[debug] Encodings: locale cp1252, fs utf-8, stdout utf-8, stderr utf-8, pref cp1252
|
||||
[debug] yt-dlp version 2021.12.01 (exe)
|
||||
[debug] Python version 3.8.8 (CPython 64bit) - Windows-10-10.0.19041-SP0
|
||||
[debug] exe versions: ffmpeg 3.0.1, ffprobe 3.0.1
|
||||
[debug] Optional libraries: Cryptodome, keyring, mutagen, sqlite, websockets
|
||||
[debug] Proxy map: {}
|
||||
yt-dlp is up to date (2021.12.01)
|
||||
<more lines>
|
||||
render: shell
|
||||
%(verbose_optional)s
|
||||
|
||||
26
.github/PULL_REQUEST_TEMPLATE.md
vendored
26
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -1,5 +1,23 @@
|
||||
**IMPORTANT**: PRs without the template will be CLOSED
|
||||
|
||||
### Description of your *pull request* and other information
|
||||
|
||||
<!--
|
||||
# Please follow the guide below
|
||||
|
||||
Explanation of your *pull request* in arbitrary form goes here. Please **make sure the description explains the purpose and effect** of your *pull request* and is worded well enough to be understood. Provide as much **context and examples** as possible
|
||||
|
||||
-->
|
||||
|
||||
ADD DESCRIPTION HERE
|
||||
|
||||
Fixes #
|
||||
|
||||
|
||||
<details open><summary>Template</summary> <!-- OPEN is intentional -->
|
||||
|
||||
<!--
|
||||
|
||||
# PLEASE FOLLOW THE GUIDE BELOW
|
||||
|
||||
- You will be asked some questions, please read them **carefully** and answer honestly
|
||||
- Put an `x` into all the boxes `[ ]` relevant to your *pull request* (like [x])
|
||||
@@ -22,8 +40,4 @@
|
||||
- [ ] Core bug fix/improvement
|
||||
- [ ] New feature (It is strongly [recommended to open an issue first](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#adding-new-feature-or-making-overarching-changes))
|
||||
|
||||
---
|
||||
|
||||
### Description of your *pull request* and other information
|
||||
|
||||
Explanation of your *pull request* in arbitrary form goes here. Please **make sure the description explains the purpose and effect** of your *pull request* and is worded well enough to be understood. Provide as much **context and examples** as possible.
|
||||
</details>
|
||||
|
||||
598
.github/workflows/build.yml
vendored
598
.github/workflows/build.yml
vendored
@@ -1,98 +1,117 @@
|
||||
name: Build
|
||||
on: workflow_dispatch
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
build_unix:
|
||||
prepare:
|
||||
permissions:
|
||||
contents: write # for push_release
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
version_suffix: ${{ steps.version_suffix.outputs.version_suffix }}
|
||||
ytdlp_version: ${{ steps.bump_version.outputs.ytdlp_version }}
|
||||
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
||||
sha256_bin: ${{ steps.sha256_bin.outputs.sha256_bin }}
|
||||
sha512_bin: ${{ steps.sha512_bin.outputs.sha512_bin }}
|
||||
sha256_tar: ${{ steps.sha256_tar.outputs.sha256_tar }}
|
||||
sha512_tar: ${{ steps.sha512_tar.outputs.sha512_tar }}
|
||||
|
||||
head_sha: ${{ steps.push_release.outputs.head_sha }}
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.8'
|
||||
- name: Install packages
|
||||
run: sudo apt-get -y install zip pandoc man
|
||||
python-version: '3.10'
|
||||
|
||||
- name: Set version suffix
|
||||
id: version_suffix
|
||||
env:
|
||||
PUSH_VERSION_COMMIT: ${{ secrets.PUSH_VERSION_COMMIT }}
|
||||
if: "env.PUSH_VERSION_COMMIT == ''"
|
||||
run: echo ::set-output name=version_suffix::$(date -u +"%H%M%S")
|
||||
run: echo "version_suffix=$(date -u +"%H%M%S")" >> "$GITHUB_OUTPUT"
|
||||
- name: Bump version
|
||||
id: bump_version
|
||||
run: |
|
||||
python devscripts/update-version.py ${{ steps.version_suffix.outputs.version_suffix }}
|
||||
make issuetemplates
|
||||
|
||||
- name: Push to release
|
||||
id: push_release
|
||||
run: |
|
||||
git config --global user.name github-actions
|
||||
git config --global user.email github-actions@example.com
|
||||
git add -u
|
||||
git commit -m "[version] update" -m "Created by: ${{ github.event.sender.login }}" -m ":ci skip all"
|
||||
git commit -m "[version] update" -m "Created by: ${{ github.event.sender.login }}" -m ":ci skip all :ci run dl"
|
||||
git push origin --force ${{ github.event.ref }}:release
|
||||
echo ::set-output name=head_sha::$(git rev-parse HEAD)
|
||||
echo "head_sha=$(git rev-parse HEAD)" >> "$GITHUB_OUTPUT"
|
||||
- name: Update master
|
||||
id: push_master
|
||||
env:
|
||||
PUSH_VERSION_COMMIT: ${{ secrets.PUSH_VERSION_COMMIT }}
|
||||
if: "env.PUSH_VERSION_COMMIT != ''"
|
||||
run: git push origin ${{ github.event.ref }}
|
||||
- name: Get Changelog
|
||||
id: get_changelog
|
||||
run: |
|
||||
changelog=$(cat Changelog.md | grep -oPz '(?s)(?<=### ${{ steps.bump_version.outputs.ytdlp_version }}\n{2}).+?(?=\n{2,3}###)') || true
|
||||
echo "changelog<<EOF" >> $GITHUB_ENV
|
||||
echo "$changelog" >> $GITHUB_ENV
|
||||
echo "EOF" >> $GITHUB_ENV
|
||||
|
||||
- name: Build lazy extractors
|
||||
id: lazy_extractors
|
||||
run: python devscripts/make_lazy_extractors.py
|
||||
- name: Run Make
|
||||
run: make all tar
|
||||
- name: Get SHA2-256SUMS for yt-dlp
|
||||
id: sha256_bin
|
||||
run: echo "::set-output name=sha256_bin::$(sha256sum yt-dlp | awk '{print $1}')"
|
||||
- name: Get SHA2-256SUMS for yt-dlp.tar.gz
|
||||
id: sha256_tar
|
||||
run: echo "::set-output name=sha256_tar::$(sha256sum yt-dlp.tar.gz | awk '{print $1}')"
|
||||
- name: Get SHA2-512SUMS for yt-dlp
|
||||
id: sha512_bin
|
||||
run: echo "::set-output name=sha512_bin::$(sha512sum yt-dlp | awk '{print $1}')"
|
||||
- name: Get SHA2-512SUMS for yt-dlp.tar.gz
|
||||
id: sha512_tar
|
||||
run: echo "::set-output name=sha512_tar::$(sha512sum yt-dlp.tar.gz | awk '{print $1}')"
|
||||
|
||||
- name: Install dependencies for pypi
|
||||
env:
|
||||
PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }}
|
||||
if: "env.PYPI_TOKEN != ''"
|
||||
build_unix:
|
||||
needs: prepare
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.10'
|
||||
- uses: conda-incubator/setup-miniconda@v2
|
||||
with:
|
||||
miniforge-variant: Mambaforge
|
||||
use-mamba: true
|
||||
channels: conda-forge
|
||||
auto-update-conda: true
|
||||
activate-environment: ''
|
||||
auto-activate-base: false
|
||||
- name: Install Requirements
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install setuptools wheel twine
|
||||
- name: Build and publish on pypi
|
||||
sudo apt-get -y install zip pandoc man sed
|
||||
python -m pip install -U pip setuptools wheel twine
|
||||
python -m pip install -U Pyinstaller -r requirements.txt
|
||||
reqs=$(mktemp)
|
||||
echo -e 'python=3.10.*\npyinstaller' >$reqs
|
||||
sed 's/^brotli.*/brotli-python/' <requirements.txt >>$reqs
|
||||
mamba create -n build --file $reqs
|
||||
|
||||
- name: Prepare
|
||||
run: |
|
||||
python devscripts/update-version.py ${{ needs.prepare.outputs.version_suffix }}
|
||||
python devscripts/make_lazy_extractors.py
|
||||
- name: Build Unix platform-independent binary
|
||||
run: |
|
||||
make all tar
|
||||
- name: Build Unix standalone binary
|
||||
shell: bash -l {0}
|
||||
run: |
|
||||
unset LD_LIBRARY_PATH # Harmful; set by setup-python
|
||||
conda activate build
|
||||
python pyinst.py --onedir
|
||||
(cd ./dist/yt-dlp_linux && zip -r ../yt-dlp_linux.zip .)
|
||||
python pyinst.py
|
||||
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
path: |
|
||||
yt-dlp
|
||||
yt-dlp.tar.gz
|
||||
dist/yt-dlp_linux
|
||||
dist/yt-dlp_linux.zip
|
||||
|
||||
- name: Build and publish on PyPi
|
||||
env:
|
||||
TWINE_USERNAME: __token__
|
||||
TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }}
|
||||
if: "env.TWINE_PASSWORD != ''"
|
||||
run: |
|
||||
rm -rf dist/*
|
||||
python devscripts/set-variant.py pip -M "You installed yt-dlp with pip or using the wheel from PyPi; Use that to update"
|
||||
python setup.py sdist bdist_wheel
|
||||
twine upload dist/*
|
||||
|
||||
- name: Install SSH private key
|
||||
- name: Install SSH private key for Homebrew
|
||||
env:
|
||||
BREW_TOKEN: ${{ secrets.BREW_TOKEN }}
|
||||
if: "env.BREW_TOKEN != ''"
|
||||
@@ -105,309 +124,270 @@ jobs:
|
||||
if: "env.BREW_TOKEN != ''"
|
||||
run: |
|
||||
git clone git@github.com:yt-dlp/homebrew-taps taps/
|
||||
python3 devscripts/update-formulae.py taps/Formula/yt-dlp.rb "${{ steps.bump_version.outputs.ytdlp_version }}"
|
||||
python devscripts/update-formulae.py taps/Formula/yt-dlp.rb "${{ needs.prepare.outputs.ytdlp_version }}"
|
||||
git -C taps/ config user.name github-actions
|
||||
git -C taps/ config user.email github-actions@example.com
|
||||
git -C taps/ commit -am 'yt-dlp: ${{ steps.bump_version.outputs.ytdlp_version }}'
|
||||
git -C taps/ commit -am 'yt-dlp: ${{ needs.prepare.outputs.ytdlp_version }}'
|
||||
git -C taps/ push
|
||||
|
||||
- name: Create Release
|
||||
id: create_release
|
||||
uses: actions/create-release@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
tag_name: ${{ steps.bump_version.outputs.ytdlp_version }}
|
||||
release_name: yt-dlp ${{ steps.bump_version.outputs.ytdlp_version }}
|
||||
commitish: ${{ steps.push_release.outputs.head_sha }}
|
||||
body: |
|
||||
#### [A description of the various files]((https://github.com/yt-dlp/yt-dlp#release-files)) are in the README
|
||||
|
||||
---
|
||||
build_linux_arm:
|
||||
permissions:
|
||||
packages: write # for Creating cache
|
||||
runs-on: ubuntu-latest
|
||||
needs: prepare
|
||||
strategy:
|
||||
matrix:
|
||||
architecture:
|
||||
- armv7
|
||||
- aarch64
|
||||
|
||||
### Changelog:
|
||||
${{ env.changelog }}
|
||||
draft: false
|
||||
prerelease: false
|
||||
- name: Upload yt-dlp Unix binary
|
||||
id: upload-release-asset
|
||||
uses: actions/upload-release-asset@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
||||
asset_path: ./yt-dlp
|
||||
asset_name: yt-dlp
|
||||
asset_content_type: application/octet-stream
|
||||
- name: Upload Source tar
|
||||
uses: actions/upload-release-asset@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
path: ./repo
|
||||
- name: Virtualized Install, Prepare & Build
|
||||
uses: yt-dlp/run-on-arch-action@v2
|
||||
with:
|
||||
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
||||
asset_path: ./yt-dlp.tar.gz
|
||||
asset_name: yt-dlp.tar.gz
|
||||
asset_content_type: application/gzip
|
||||
githubToken: ${{ github.token }} # To cache image
|
||||
arch: ${{ matrix.architecture }}
|
||||
distro: ubuntu18.04 # Standalone executable should be built on minimum supported OS
|
||||
dockerRunArgs: --volume "${PWD}/repo:/repo"
|
||||
install: | # Installing Python 3.10 from the Deadsnakes repo raises errors
|
||||
apt update
|
||||
apt -y install zlib1g-dev python3.8 python3.8-dev python3.8-distutils python3-pip
|
||||
python3.8 -m pip install -U pip setuptools wheel
|
||||
# Cannot access requirements.txt from the repo directory at this stage
|
||||
python3.8 -m pip install -U Pyinstaller mutagen pycryptodomex websockets brotli certifi
|
||||
|
||||
run: |
|
||||
cd repo
|
||||
python3.8 -m pip install -U Pyinstaller -r requirements.txt # Cached version may be out of date
|
||||
python3.8 devscripts/update-version.py ${{ needs.prepare.outputs.version_suffix }}
|
||||
python3.8 devscripts/make_lazy_extractors.py
|
||||
python3.8 pyinst.py
|
||||
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
path: | # run-on-arch-action designates armv7l as armv7
|
||||
repo/dist/yt-dlp_linux_${{ (matrix.architecture == 'armv7' && 'armv7l') || matrix.architecture }}
|
||||
|
||||
|
||||
build_macos:
|
||||
runs-on: macos-11
|
||||
needs: build_unix
|
||||
outputs:
|
||||
sha256_macos: ${{ steps.sha256_macos.outputs.sha256_macos }}
|
||||
sha512_macos: ${{ steps.sha512_macos.outputs.sha512_macos }}
|
||||
sha256_macos_zip: ${{ steps.sha256_macos_zip.outputs.sha256_macos_zip }}
|
||||
sha512_macos_zip: ${{ steps.sha512_macos_zip.outputs.sha512_macos_zip }}
|
||||
needs: prepare
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
# In order to create a universal2 application, the version of python3 in /usr/bin has to be used
|
||||
- uses: actions/checkout@v3
|
||||
# NB: In order to create a universal2 application, the version of python3 in /usr/bin has to be used
|
||||
- name: Install Requirements
|
||||
run: |
|
||||
brew install coreutils
|
||||
/usr/bin/python3 -m pip install -U --user pip Pyinstaller==4.10 -r requirements.txt
|
||||
- name: Bump version
|
||||
id: bump_version
|
||||
run: /usr/bin/python3 devscripts/update-version.py
|
||||
- name: Build lazy extractors
|
||||
id: lazy_extractors
|
||||
run: /usr/bin/python3 devscripts/make_lazy_extractors.py
|
||||
- name: Run PyInstaller Script
|
||||
run: /usr/bin/python3 pyinst.py --target-architecture universal2 --onefile
|
||||
- name: Upload yt-dlp MacOS binary
|
||||
id: upload-release-macos
|
||||
uses: actions/upload-release-asset@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ needs.build_unix.outputs.upload_url }}
|
||||
asset_path: ./dist/yt-dlp_macos
|
||||
asset_name: yt-dlp_macos
|
||||
asset_content_type: application/octet-stream
|
||||
- name: Get SHA2-256SUMS for yt-dlp_macos
|
||||
id: sha256_macos
|
||||
run: echo "::set-output name=sha256_macos::$(sha256sum dist/yt-dlp_macos | awk '{print $1}')"
|
||||
- name: Get SHA2-512SUMS for yt-dlp_macos
|
||||
id: sha512_macos
|
||||
run: echo "::set-output name=sha512_macos::$(sha512sum dist/yt-dlp_macos | awk '{print $1}')"
|
||||
/usr/bin/python3 -m pip install -U --user pip Pyinstaller -r requirements.txt
|
||||
|
||||
- name: Run PyInstaller Script with --onedir
|
||||
- name: Prepare
|
||||
run: |
|
||||
/usr/bin/python3 pyinst.py --target-architecture universal2 --onedir
|
||||
zip ./dist/yt-dlp_macos.zip ./dist/yt-dlp_macos
|
||||
- name: Upload yt-dlp MacOS onedir
|
||||
id: upload-release-macos-zip
|
||||
uses: actions/upload-release-asset@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
/usr/bin/python3 devscripts/update-version.py ${{ needs.prepare.outputs.version_suffix }}
|
||||
/usr/bin/python3 devscripts/make_lazy_extractors.py
|
||||
- name: Build
|
||||
run: |
|
||||
/usr/bin/python3 pyinst.py --target-architecture universal2 --onedir
|
||||
(cd ./dist/yt-dlp_macos && zip -r ../yt-dlp_macos.zip .)
|
||||
/usr/bin/python3 pyinst.py --target-architecture universal2
|
||||
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
upload_url: ${{ needs.build_unix.outputs.upload_url }}
|
||||
asset_path: ./dist/yt-dlp_macos.zip
|
||||
asset_name: yt-dlp_macos.zip
|
||||
asset_content_type: application/zip
|
||||
- name: Get SHA2-256SUMS for yt-dlp_macos.zip
|
||||
id: sha256_macos_zip
|
||||
run: echo "::set-output name=sha256_macos_zip::$(sha256sum dist/yt-dlp_macos.zip | awk '{print $1}')"
|
||||
- name: Get SHA2-512SUMS for yt-dlp_macos.zip
|
||||
id: sha512_macos_zip
|
||||
run: echo "::set-output name=sha512_macos_zip::$(sha512sum dist/yt-dlp_macos.zip | awk '{print $1}')"
|
||||
path: |
|
||||
dist/yt-dlp_macos
|
||||
dist/yt-dlp_macos.zip
|
||||
|
||||
|
||||
build_macos_legacy:
|
||||
runs-on: macos-latest
|
||||
needs: prepare
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Install Python
|
||||
# We need the official Python, because the GA ones only support newer macOS versions
|
||||
env:
|
||||
PYTHON_VERSION: 3.10.5
|
||||
MACOSX_DEPLOYMENT_TARGET: 10.9 # Used up by the Python build tools
|
||||
run: |
|
||||
# Hack to get the latest patch version. Uncomment if needed
|
||||
#brew install python@3.10
|
||||
#export PYTHON_VERSION=$( $(brew --prefix)/opt/python@3.10/bin/python3 --version | cut -d ' ' -f 2 )
|
||||
curl https://www.python.org/ftp/python/${PYTHON_VERSION}/python-${PYTHON_VERSION}-macos11.pkg -o "python.pkg"
|
||||
sudo installer -pkg python.pkg -target /
|
||||
python3 --version
|
||||
- name: Install Requirements
|
||||
run: |
|
||||
brew install coreutils
|
||||
python3 -m pip install -U --user pip Pyinstaller -r requirements.txt
|
||||
|
||||
- name: Prepare
|
||||
run: |
|
||||
python3 devscripts/update-version.py ${{ needs.prepare.outputs.version_suffix }}
|
||||
python3 devscripts/make_lazy_extractors.py
|
||||
- name: Build
|
||||
run: |
|
||||
python3 pyinst.py
|
||||
mv dist/yt-dlp_macos dist/yt-dlp_macos_legacy
|
||||
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
path: |
|
||||
dist/yt-dlp_macos_legacy
|
||||
|
||||
|
||||
build_windows:
|
||||
runs-on: windows-latest
|
||||
needs: build_unix
|
||||
outputs:
|
||||
sha256_win: ${{ steps.sha256_win.outputs.sha256_win }}
|
||||
sha512_win: ${{ steps.sha512_win.outputs.sha512_win }}
|
||||
sha256_py2exe: ${{ steps.sha256_py2exe.outputs.sha256_py2exe }}
|
||||
sha512_py2exe: ${{ steps.sha512_py2exe.outputs.sha512_py2exe }}
|
||||
sha256_win_zip: ${{ steps.sha256_win_zip.outputs.sha256_win_zip }}
|
||||
sha512_win_zip: ${{ steps.sha512_win_zip.outputs.sha512_win_zip }}
|
||||
needs: prepare
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
# 3.8 is used for Win7 support
|
||||
- name: Set up Python 3.8
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
with: # 3.8 is used for Win7 support
|
||||
python-version: '3.8'
|
||||
- name: Install Requirements
|
||||
# Custom pyinstaller built with https://github.com/yt-dlp/pyinstaller-builds
|
||||
run: |
|
||||
python -m pip install --upgrade pip setuptools wheel py2exe
|
||||
pip install "https://yt-dlp.github.io/Pyinstaller-Builds/x86_64/pyinstaller-4.10-py3-none-any.whl" -r requirements.txt
|
||||
- name: Bump version
|
||||
id: bump_version
|
||||
env:
|
||||
version_suffix: ${{ needs.build_unix.outputs.version_suffix }}
|
||||
run: python devscripts/update-version.py ${{ env.version_suffix }}
|
||||
- name: Build lazy extractors
|
||||
id: lazy_extractors
|
||||
run: python devscripts/make_lazy_extractors.py
|
||||
- name: Run PyInstaller Script
|
||||
run: python pyinst.py
|
||||
- name: Upload yt-dlp.exe Windows binary
|
||||
id: upload-release-windows
|
||||
uses: actions/upload-release-asset@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ needs.build_unix.outputs.upload_url }}
|
||||
asset_path: ./dist/yt-dlp.exe
|
||||
asset_name: yt-dlp.exe
|
||||
asset_content_type: application/vnd.microsoft.portable-executable
|
||||
- name: Get SHA2-256SUMS for yt-dlp.exe
|
||||
id: sha256_win
|
||||
run: echo "::set-output name=sha256_win::$((Get-FileHash dist\yt-dlp.exe -Algorithm SHA256).Hash.ToLower())"
|
||||
- name: Get SHA2-512SUMS for yt-dlp.exe
|
||||
id: sha512_win
|
||||
run: echo "::set-output name=sha512_win::$((Get-FileHash dist\yt-dlp.exe -Algorithm SHA512).Hash.ToLower())"
|
||||
run: | # Custom pyinstaller built with https://github.com/yt-dlp/pyinstaller-builds
|
||||
python -m pip install -U pip setuptools wheel py2exe
|
||||
pip install -U "https://yt-dlp.github.io/Pyinstaller-Builds/x86_64/pyinstaller-5.3-py3-none-any.whl" -r requirements.txt
|
||||
|
||||
- name: Run PyInstaller Script with --onedir
|
||||
- name: Prepare
|
||||
run: |
|
||||
python pyinst.py --onedir
|
||||
Compress-Archive -LiteralPath ./dist/yt-dlp -DestinationPath ./dist/yt-dlp_win.zip
|
||||
- name: Upload yt-dlp Windows onedir
|
||||
id: upload-release-windows-zip
|
||||
uses: actions/upload-release-asset@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ needs.build_unix.outputs.upload_url }}
|
||||
asset_path: ./dist/yt-dlp_win.zip
|
||||
asset_name: yt-dlp_win.zip
|
||||
asset_content_type: application/zip
|
||||
- name: Get SHA2-256SUMS for yt-dlp_win.zip
|
||||
id: sha256_win_zip
|
||||
run: echo "::set-output name=sha256_win_zip::$((Get-FileHash dist\yt-dlp_win.zip -Algorithm SHA256).Hash.ToLower())"
|
||||
- name: Get SHA2-512SUMS for yt-dlp_win.zip
|
||||
id: sha512_win_zip
|
||||
run: echo "::set-output name=sha512_win_zip::$((Get-FileHash dist\yt-dlp_win.zip -Algorithm SHA512).Hash.ToLower())"
|
||||
python devscripts/update-version.py ${{ needs.prepare.outputs.version_suffix }}
|
||||
python devscripts/make_lazy_extractors.py
|
||||
- name: Build
|
||||
run: |
|
||||
python setup.py py2exe
|
||||
Move-Item ./dist/yt-dlp.exe ./dist/yt-dlp_min.exe
|
||||
python pyinst.py
|
||||
python pyinst.py --onedir
|
||||
Compress-Archive -Path ./dist/yt-dlp/* -DestinationPath ./dist/yt-dlp_win.zip
|
||||
|
||||
- name: Run py2exe Script
|
||||
run: python setup.py py2exe
|
||||
- name: Upload yt-dlp_min.exe Windows binary
|
||||
id: upload-release-windows-py2exe
|
||||
uses: actions/upload-release-asset@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
upload_url: ${{ needs.build_unix.outputs.upload_url }}
|
||||
asset_path: ./dist/yt-dlp.exe
|
||||
asset_name: yt-dlp_min.exe
|
||||
asset_content_type: application/vnd.microsoft.portable-executable
|
||||
- name: Get SHA2-256SUMS for yt-dlp_min.exe
|
||||
id: sha256_py2exe
|
||||
run: echo "::set-output name=sha256_py2exe::$((Get-FileHash dist\yt-dlp.exe -Algorithm SHA256).Hash.ToLower())"
|
||||
- name: Get SHA2-512SUMS for yt-dlp_min.exe
|
||||
id: sha512_py2exe
|
||||
run: echo "::set-output name=sha512_py2exe::$((Get-FileHash dist\yt-dlp.exe -Algorithm SHA512).Hash.ToLower())"
|
||||
path: |
|
||||
dist/yt-dlp.exe
|
||||
dist/yt-dlp_min.exe
|
||||
dist/yt-dlp_win.zip
|
||||
|
||||
|
||||
build_windows32:
|
||||
runs-on: windows-latest
|
||||
needs: build_unix
|
||||
|
||||
outputs:
|
||||
sha256_win32: ${{ steps.sha256_win32.outputs.sha256_win32 }}
|
||||
sha512_win32: ${{ steps.sha512_win32.outputs.sha512_win32 }}
|
||||
needs: prepare
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
# 3.7 is used for Vista support. See https://github.com/yt-dlp/yt-dlp/issues/390
|
||||
- name: Set up Python 3.7 32-Bit
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
with: # 3.7 is used for Vista support. See https://github.com/yt-dlp/yt-dlp/issues/390
|
||||
python-version: '3.7'
|
||||
architecture: 'x86'
|
||||
- name: Install Requirements
|
||||
run: |
|
||||
python -m pip install --upgrade pip setuptools wheel
|
||||
pip install "https://yt-dlp.github.io/Pyinstaller-Builds/i686/pyinstaller-4.10-py3-none-any.whl" -r requirements.txt
|
||||
- name: Bump version
|
||||
id: bump_version
|
||||
env:
|
||||
version_suffix: ${{ needs.build_unix.outputs.version_suffix }}
|
||||
run: python devscripts/update-version.py ${{ env.version_suffix }}
|
||||
- name: Build lazy extractors
|
||||
id: lazy_extractors
|
||||
run: python devscripts/make_lazy_extractors.py
|
||||
- name: Run PyInstaller Script for 32 Bit
|
||||
run: python pyinst.py
|
||||
- name: Upload Executable yt-dlp_x86.exe
|
||||
id: upload-release-windows32
|
||||
uses: actions/upload-release-asset@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ needs.build_unix.outputs.upload_url }}
|
||||
asset_path: ./dist/yt-dlp_x86.exe
|
||||
asset_name: yt-dlp_x86.exe
|
||||
asset_content_type: application/vnd.microsoft.portable-executable
|
||||
- name: Get SHA2-256SUMS for yt-dlp_x86.exe
|
||||
id: sha256_win32
|
||||
run: echo "::set-output name=sha256_win32::$((Get-FileHash dist\yt-dlp_x86.exe -Algorithm SHA256).Hash.ToLower())"
|
||||
- name: Get SHA2-512SUMS for yt-dlp_x86.exe
|
||||
id: sha512_win32
|
||||
run: echo "::set-output name=sha512_win32::$((Get-FileHash dist\yt-dlp_x86.exe -Algorithm SHA512).Hash.ToLower())"
|
||||
python -m pip install -U pip setuptools wheel
|
||||
pip install -U "https://yt-dlp.github.io/Pyinstaller-Builds/i686/pyinstaller-5.3-py3-none-any.whl" -r requirements.txt
|
||||
|
||||
finish:
|
||||
- name: Prepare
|
||||
run: |
|
||||
python devscripts/update-version.py ${{ needs.prepare.outputs.version_suffix }}
|
||||
python devscripts/make_lazy_extractors.py
|
||||
- name: Build
|
||||
run: |
|
||||
python pyinst.py
|
||||
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
path: |
|
||||
dist/yt-dlp_x86.exe
|
||||
|
||||
|
||||
publish_release:
|
||||
permissions:
|
||||
contents: write # for action-gh-release
|
||||
runs-on: ubuntu-latest
|
||||
needs: [build_unix, build_windows, build_windows32, build_macos]
|
||||
needs: [prepare, build_unix, build_linux_arm, build_windows, build_windows32, build_macos, build_macos_legacy]
|
||||
|
||||
steps:
|
||||
- name: Make SHA2-256SUMS file
|
||||
env:
|
||||
SHA256_BIN: ${{ needs.build_unix.outputs.sha256_bin }}
|
||||
SHA256_TAR: ${{ needs.build_unix.outputs.sha256_tar }}
|
||||
SHA256_WIN: ${{ needs.build_windows.outputs.sha256_win }}
|
||||
SHA256_PY2EXE: ${{ needs.build_windows.outputs.sha256_py2exe }}
|
||||
SHA256_WIN_ZIP: ${{ needs.build_windows.outputs.sha256_win_zip }}
|
||||
SHA256_WIN32: ${{ needs.build_windows32.outputs.sha256_win32 }}
|
||||
SHA256_MACOS: ${{ needs.build_macos.outputs.sha256_macos }}
|
||||
SHA256_MACOS_ZIP: ${{ needs.build_macos.outputs.sha256_macos_zip }}
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/download-artifact@v3
|
||||
|
||||
- name: Get Changelog
|
||||
run: |
|
||||
echo "${{ env.SHA256_BIN }} yt-dlp" >> SHA2-256SUMS
|
||||
echo "${{ env.SHA256_TAR }} yt-dlp.tar.gz" >> SHA2-256SUMS
|
||||
echo "${{ env.SHA256_WIN }} yt-dlp.exe" >> SHA2-256SUMS
|
||||
echo "${{ env.SHA256_PY2EXE }} yt-dlp_min.exe" >> SHA2-256SUMS
|
||||
echo "${{ env.SHA256_WIN32 }} yt-dlp_x86.exe" >> SHA2-256SUMS
|
||||
echo "${{ env.SHA256_WIN_ZIP }} yt-dlp_win.zip" >> SHA2-256SUMS
|
||||
echo "${{ env.SHA256_MACOS }} yt-dlp_macos" >> SHA2-256SUMS
|
||||
echo "${{ env.SHA256_MACOS_ZIP }} yt-dlp_macos.zip" >> SHA2-256SUMS
|
||||
- name: Upload 256SUMS file
|
||||
id: upload-sums
|
||||
uses: actions/upload-release-asset@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ needs.build_unix.outputs.upload_url }}
|
||||
asset_path: ./SHA2-256SUMS
|
||||
asset_name: SHA2-256SUMS
|
||||
asset_content_type: text/plain
|
||||
- name: Make SHA2-512SUMS file
|
||||
env:
|
||||
SHA512_BIN: ${{ needs.build_unix.outputs.sha512_bin }}
|
||||
SHA512_TAR: ${{ needs.build_unix.outputs.sha512_tar }}
|
||||
SHA512_WIN: ${{ needs.build_windows.outputs.sha512_win }}
|
||||
SHA512_PY2EXE: ${{ needs.build_windows.outputs.sha512_py2exe }}
|
||||
SHA512_WIN_ZIP: ${{ needs.build_windows.outputs.sha512_win_zip }}
|
||||
SHA512_WIN32: ${{ needs.build_windows32.outputs.sha512_win32 }}
|
||||
SHA512_MACOS: ${{ needs.build_macos.outputs.sha512_macos }}
|
||||
SHA512_MACOS_ZIP: ${{ needs.build_macos.outputs.sha512_macos_zip }}
|
||||
changelog=$(grep -oPz '(?s)(?<=### ${{ needs.prepare.outputs.ytdlp_version }}\n{2}).+?(?=\n{2,3}###)' Changelog.md) || true
|
||||
echo "changelog<<EOF" >> $GITHUB_ENV
|
||||
echo "$changelog" >> $GITHUB_ENV
|
||||
echo "EOF" >> $GITHUB_ENV
|
||||
- name: Make Update spec
|
||||
run: |
|
||||
echo "${{ env.SHA512_BIN }} yt-dlp" >> SHA2-512SUMS
|
||||
echo "${{ env.SHA512_TAR }} yt-dlp.tar.gz" >> SHA2-512SUMS
|
||||
echo "${{ env.SHA512_WIN }} yt-dlp.exe" >> SHA2-512SUMS
|
||||
echo "${{ env.SHA512_WIN_ZIP }} yt-dlp_win.zip" >> SHA2-512SUMS
|
||||
echo "${{ env.SHA512_PY2EXE }} yt-dlp_min.exe" >> SHA2-512SUMS
|
||||
echo "${{ env.SHA512_WIN32 }} yt-dlp_x86.exe" >> SHA2-512SUMS
|
||||
echo "${{ env.SHA512_MACOS }} yt-dlp_macos" >> SHA2-512SUMS
|
||||
echo "${{ env.SHA512_MACOS_ZIP }} yt-dlp_macos.zip" >> SHA2-512SUMS
|
||||
- name: Upload 512SUMS file
|
||||
id: upload-512sums
|
||||
uses: actions/upload-release-asset@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
echo "# This file is used for regulating self-update" >> _update_spec
|
||||
echo "lock 2022.07.18 .+ Python 3.6" >> _update_spec
|
||||
- name: Make SHA2-SUMS files
|
||||
run: |
|
||||
sha256sum artifact/yt-dlp | awk '{print $1 " yt-dlp"}' >> SHA2-256SUMS
|
||||
sha256sum artifact/yt-dlp.tar.gz | awk '{print $1 " yt-dlp.tar.gz"}' >> SHA2-256SUMS
|
||||
sha256sum artifact/yt-dlp.exe | awk '{print $1 " yt-dlp.exe"}' >> SHA2-256SUMS
|
||||
sha256sum artifact/yt-dlp_win.zip | awk '{print $1 " yt-dlp_win.zip"}' >> SHA2-256SUMS
|
||||
sha256sum artifact/yt-dlp_min.exe | awk '{print $1 " yt-dlp_min.exe"}' >> SHA2-256SUMS
|
||||
sha256sum artifact/yt-dlp_x86.exe | awk '{print $1 " yt-dlp_x86.exe"}' >> SHA2-256SUMS
|
||||
sha256sum artifact/yt-dlp_macos | awk '{print $1 " yt-dlp_macos"}' >> SHA2-256SUMS
|
||||
sha256sum artifact/yt-dlp_macos.zip | awk '{print $1 " yt-dlp_macos.zip"}' >> SHA2-256SUMS
|
||||
sha256sum artifact/yt-dlp_macos_legacy | awk '{print $1 " yt-dlp_macos_legacy"}' >> SHA2-256SUMS
|
||||
sha256sum artifact/yt-dlp_linux_armv7l | awk '{print $1 " yt-dlp_linux_armv7l"}' >> SHA2-256SUMS
|
||||
sha256sum artifact/yt-dlp_linux_aarch64 | awk '{print $1 " yt-dlp_linux_aarch64"}' >> SHA2-256SUMS
|
||||
sha256sum artifact/dist/yt-dlp_linux | awk '{print $1 " yt-dlp_linux"}' >> SHA2-256SUMS
|
||||
sha256sum artifact/dist/yt-dlp_linux.zip | awk '{print $1 " yt-dlp_linux.zip"}' >> SHA2-256SUMS
|
||||
sha512sum artifact/yt-dlp | awk '{print $1 " yt-dlp"}' >> SHA2-512SUMS
|
||||
sha512sum artifact/yt-dlp.tar.gz | awk '{print $1 " yt-dlp.tar.gz"}' >> SHA2-512SUMS
|
||||
sha512sum artifact/yt-dlp.exe | awk '{print $1 " yt-dlp.exe"}' >> SHA2-512SUMS
|
||||
sha512sum artifact/yt-dlp_win.zip | awk '{print $1 " yt-dlp_win.zip"}' >> SHA2-512SUMS
|
||||
sha512sum artifact/yt-dlp_min.exe | awk '{print $1 " yt-dlp_min.exe"}' >> SHA2-512SUMS
|
||||
sha512sum artifact/yt-dlp_x86.exe | awk '{print $1 " yt-dlp_x86.exe"}' >> SHA2-512SUMS
|
||||
sha512sum artifact/yt-dlp_macos | awk '{print $1 " yt-dlp_macos"}' >> SHA2-512SUMS
|
||||
sha512sum artifact/yt-dlp_macos.zip | awk '{print $1 " yt-dlp_macos.zip"}' >> SHA2-512SUMS
|
||||
sha512sum artifact/yt-dlp_macos_legacy | awk '{print $1 " yt-dlp_macos_legacy"}' >> SHA2-512SUMS
|
||||
sha512sum artifact/yt-dlp_linux_armv7l | awk '{print $1 " yt-dlp_linux_armv7l"}' >> SHA2-512SUMS
|
||||
sha512sum artifact/yt-dlp_linux_aarch64 | awk '{print $1 " yt-dlp_linux_aarch64"}' >> SHA2-512SUMS
|
||||
sha512sum artifact/dist/yt-dlp_linux | awk '{print $1 " yt-dlp_linux"}' >> SHA2-512SUMS
|
||||
sha512sum artifact/dist/yt-dlp_linux.zip | awk '{print $1 " yt-dlp_linux.zip"}' >> SHA2-512SUMS
|
||||
|
||||
- name: Publish Release
|
||||
uses: yt-dlp/action-gh-release@v1
|
||||
with:
|
||||
upload_url: ${{ needs.build_unix.outputs.upload_url }}
|
||||
asset_path: ./SHA2-512SUMS
|
||||
asset_name: SHA2-512SUMS
|
||||
asset_content_type: text/plain
|
||||
tag_name: ${{ needs.prepare.outputs.ytdlp_version }}
|
||||
name: yt-dlp ${{ needs.prepare.outputs.ytdlp_version }}
|
||||
target_commitish: ${{ needs.prepare.outputs.head_sha }}
|
||||
body: |
|
||||
#### [A description of the various files]((https://github.com/yt-dlp/yt-dlp#release-files)) are in the README
|
||||
|
||||
---
|
||||
<details open><summary><h3>Changelog</summary>
|
||||
<p>
|
||||
|
||||
${{ env.changelog }}
|
||||
|
||||
</p>
|
||||
</details>
|
||||
files: |
|
||||
SHA2-256SUMS
|
||||
SHA2-512SUMS
|
||||
artifact/yt-dlp
|
||||
artifact/yt-dlp.tar.gz
|
||||
artifact/yt-dlp.exe
|
||||
artifact/yt-dlp_win.zip
|
||||
artifact/yt-dlp_min.exe
|
||||
artifact/yt-dlp_x86.exe
|
||||
artifact/yt-dlp_macos
|
||||
artifact/yt-dlp_macos.zip
|
||||
artifact/yt-dlp_macos_legacy
|
||||
artifact/yt-dlp_linux_armv7l
|
||||
artifact/yt-dlp_linux_aarch64
|
||||
artifact/dist/yt-dlp_linux
|
||||
artifact/dist/yt-dlp_linux.zip
|
||||
_update_spec
|
||||
|
||||
23
.github/workflows/core.yml
vendored
23
.github/workflows/core.yml
vendored
@@ -1,5 +1,8 @@
|
||||
name: Core Tests
|
||||
on: [push, pull_request]
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
tests:
|
||||
name: Core Tests
|
||||
@@ -9,23 +12,27 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-latest]
|
||||
# CPython 3.9 is in quick-test
|
||||
python-version: ['3.6', '3.7', '3.10', 3.11-dev, pypy-3.6, pypy-3.7, pypy-3.8, pypy-3.9]
|
||||
# CPython 3.11 is in quick-test
|
||||
python-version: ['3.8', '3.9', '3.10', pypy-3.7, pypy-3.8]
|
||||
run-tests-ext: [sh]
|
||||
include:
|
||||
# atleast one of the tests must be in windows
|
||||
# atleast one of each CPython/PyPy tests must be in windows
|
||||
- os: windows-latest
|
||||
python-version: 3.8
|
||||
python-version: '3.7'
|
||||
run-tests-ext: bat
|
||||
- os: windows-latest
|
||||
python-version: pypy-3.9
|
||||
run-tests-ext: bat
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v2
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install pytest
|
||||
run: pip install pytest
|
||||
- name: Run tests
|
||||
continue-on-error: False
|
||||
run: ./devscripts/run_tests.${{ matrix.run-tests-ext }} core
|
||||
# Linter is in quick-test
|
||||
run: |
|
||||
python3 -m yt_dlp -v || true # Print debug head
|
||||
./devscripts/run_tests.${{ matrix.run-tests-ext }} core
|
||||
|
||||
35
.github/workflows/download.yml
vendored
35
.github/workflows/download.yml
vendored
@@ -1,24 +1,47 @@
|
||||
name: Download Tests
|
||||
on: [push, pull_request]
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
tests:
|
||||
name: Download Tests
|
||||
quick:
|
||||
name: Quick Download Tests
|
||||
if: "contains(github.event.head_commit.message, 'ci run dl')"
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install test requirements
|
||||
run: pip install pytest
|
||||
- name: Run tests
|
||||
continue-on-error: true
|
||||
run: ./devscripts/run_tests.sh download
|
||||
|
||||
full:
|
||||
name: Full Download Tests
|
||||
if: "contains(github.event.head_commit.message, 'ci run dl all')"
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
fail-fast: true
|
||||
matrix:
|
||||
os: [ubuntu-latest]
|
||||
python-version: ['3.6', '3.7', '3.9', '3.10', 3.11-dev, pypy-3.6, pypy-3.7, pypy-3.8, pypy-3.9]
|
||||
python-version: ['3.7', '3.10', 3.11-dev, pypy-3.7, pypy-3.8]
|
||||
run-tests-ext: [sh]
|
||||
include:
|
||||
# atleast one of each CPython/PyPy tests must be in windows
|
||||
- os: windows-latest
|
||||
python-version: 3.8
|
||||
python-version: '3.8'
|
||||
run-tests-ext: bat
|
||||
- os: windows-latest
|
||||
python-version: pypy-3.9
|
||||
run-tests-ext: bat
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v2
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install pytest
|
||||
|
||||
22
.github/workflows/quick-test.yml
vendored
22
.github/workflows/quick-test.yml
vendored
@@ -1,30 +1,32 @@
|
||||
name: Quick Test
|
||||
on: [push, pull_request]
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
tests:
|
||||
name: Core Test
|
||||
if: "!contains(github.event.head_commit.message, 'ci skip all')"
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
- uses: actions/checkout@v3
|
||||
- name: Set up Python 3.11
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.9
|
||||
python-version: '3.11'
|
||||
- name: Install test requirements
|
||||
run: pip install pytest pycryptodomex
|
||||
- name: Run tests
|
||||
run: ./devscripts/run_tests.sh core
|
||||
run: |
|
||||
python3 -m yt_dlp -v || true
|
||||
./devscripts/run_tests.sh core
|
||||
flake8:
|
||||
name: Linter
|
||||
if: "!contains(github.event.head_commit.message, 'ci skip all')"
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
- name: Install flake8
|
||||
run: pip install flake8
|
||||
- name: Make lazy extractors
|
||||
|
||||
15
.gitignore
vendored
15
.gitignore
vendored
@@ -27,8 +27,10 @@ cookies
|
||||
*.ass
|
||||
*.avi
|
||||
*.desktop
|
||||
*.f4v
|
||||
*.flac
|
||||
*.flv
|
||||
*.gif
|
||||
*.jpeg
|
||||
*.jpg
|
||||
*.m4a
|
||||
@@ -38,6 +40,8 @@ cookies
|
||||
*.mov
|
||||
*.mp3
|
||||
*.mp4
|
||||
*.mpga
|
||||
*.oga
|
||||
*.ogg
|
||||
*.opus
|
||||
*.png
|
||||
@@ -45,6 +49,7 @@ cookies
|
||||
*.srt
|
||||
*.swf
|
||||
*.swp
|
||||
*.tt
|
||||
*.ttml
|
||||
*.url
|
||||
*.vtt
|
||||
@@ -67,6 +72,7 @@ dist/
|
||||
zip/
|
||||
tmp/
|
||||
venv/
|
||||
.venv/
|
||||
completions/
|
||||
|
||||
# Misc
|
||||
@@ -83,6 +89,7 @@ updates_key.pem
|
||||
.tox
|
||||
*.class
|
||||
*.isorted
|
||||
*.stackdump
|
||||
|
||||
# Generated
|
||||
AUTHORS
|
||||
@@ -114,9 +121,5 @@ yt-dlp.zip
|
||||
*/extractor/lazy_extractors.py
|
||||
|
||||
# Plugins
|
||||
ytdlp_plugins/extractor/*
|
||||
!ytdlp_plugins/extractor/__init__.py
|
||||
!ytdlp_plugins/extractor/sample.py
|
||||
ytdlp_plugins/postprocessor/*
|
||||
!ytdlp_plugins/postprocessor/__init__.py
|
||||
!ytdlp_plugins/postprocessor/sample.py
|
||||
ytdlp_plugins/
|
||||
yt-dlp-plugins
|
||||
|
||||
140
CONTRIBUTING.md
140
CONTRIBUTING.md
@@ -161,7 +161,7 @@ The same applies for changes to the documentation, code style, or overarching ch
|
||||
|
||||
## Adding support for a new site
|
||||
|
||||
If you want to add support for a new site, first of all **make sure** this site is **not dedicated to [copyright infringement](https://www.github.com/ytdl-org/youtube-dl#can-you-add-support-for-this-anime-video-site-or-site-which-shows-current-movies-for-free)**. yt-dlp does **not support** such sites thus pull requests adding support for them **will be rejected**.
|
||||
If you want to add support for a new site, first of all **make sure** this site is **not dedicated to [copyright infringement](#is-the-website-primarily-used-for-piracy)**. yt-dlp does **not support** such sites thus pull requests adding support for them **will be rejected**.
|
||||
|
||||
After you have ensured this site is distributing its content legally, you can follow this quick list (assuming your service is called `yourextractor`):
|
||||
|
||||
@@ -195,7 +195,7 @@ After you have ensured this site is distributing its content legally, you can fo
|
||||
# * A value
|
||||
# * MD5 checksum; start the string with md5:
|
||||
# * A regular expression; start the string with re:
|
||||
# * Any Python type (for example int or float)
|
||||
# * Any Python type, e.g. int or float
|
||||
}
|
||||
}]
|
||||
|
||||
@@ -214,7 +214,7 @@ After you have ensured this site is distributing its content legally, you can fo
|
||||
# TODO more properties (see yt_dlp/extractor/common.py)
|
||||
}
|
||||
```
|
||||
1. Add an import in [`yt_dlp/extractor/extractors.py`](yt_dlp/extractor/extractors.py).
|
||||
1. Add an import in [`yt_dlp/extractor/_extractors.py`](yt_dlp/extractor/_extractors.py). Note that the class name must end with `IE`.
|
||||
1. Run `python test/test_download.py TestDownload.test_YourExtractor` (note that `YourExtractor` doesn't end with `IE`). This *should fail* at first, but you can continually re-run it until you're done. If you decide to add more than one test, the tests will then be named `TestDownload.test_YourExtractor`, `TestDownload.test_YourExtractor_1`, `TestDownload.test_YourExtractor_2`, etc. Note that tests with `only_matching` key in test's dict are not counted in. You can also run all the tests in one go with `TestDownload.test_YourExtractor_all`
|
||||
1. Make sure you have atleast one test for your extractor. Even if all videos covered by the extractor are expected to be inaccessible for automated testing, tests should still be added with a `skip` parameter indicating why the particular test is disabled from running.
|
||||
1. Have a look at [`yt_dlp/extractor/common.py`](yt_dlp/extractor/common.py) for possible helper methods and a [detailed description of what your extractor should and may return](yt_dlp/extractor/common.py#L91-L426). Add tests and code for as many as you want.
|
||||
@@ -222,10 +222,10 @@ After you have ensured this site is distributing its content legally, you can fo
|
||||
|
||||
$ flake8 yt_dlp/extractor/yourextractor.py
|
||||
|
||||
1. Make sure your code works under all [Python](https://www.python.org/) versions supported by yt-dlp, namely CPython and PyPy for Python 3.6 and above. Backward compatibility is not required for even older versions of Python.
|
||||
1. Make sure your code works under all [Python](https://www.python.org/) versions supported by yt-dlp, namely CPython and PyPy for Python 3.7 and above. Backward compatibility is not required for even older versions of Python.
|
||||
1. When the tests pass, [add](https://git-scm.com/docs/git-add) the new files, [commit](https://git-scm.com/docs/git-commit) them and [push](https://git-scm.com/docs/git-push) the result, like this:
|
||||
|
||||
$ git add yt_dlp/extractor/extractors.py
|
||||
$ git add yt_dlp/extractor/_extractors.py
|
||||
$ git add yt_dlp/extractor/yourextractor.py
|
||||
$ git commit -m '[yourextractor] Add extractor'
|
||||
$ git push origin yourextractor
|
||||
@@ -261,7 +261,7 @@ The aforementioned metafields are the critical data that the extraction does not
|
||||
|
||||
For pornographic sites, appropriate `age_limit` must also be returned.
|
||||
|
||||
The extractor is allowed to return the info dict without url or formats in some special cases if it allows the user to extract usefull information with `--ignore-no-formats-error` - Eg: when the video is a live stream that has not started yet.
|
||||
The extractor is allowed to return the info dict without url or formats in some special cases if it allows the user to extract usefull information with `--ignore-no-formats-error` - e.g. when the video is a live stream that has not started yet.
|
||||
|
||||
[Any field](yt_dlp/extractor/common.py#219-L426) apart from the aforementioned ones are considered **optional**. That means that extraction should be **tolerant** to situations when sources for these fields can potentially be unavailable (even if they are always available at the moment) and **future-proof** in order not to break the extraction of general purpose mandatory fields.
|
||||
|
||||
@@ -300,14 +300,10 @@ description = meta['summary'] # incorrect
|
||||
The latter will break extraction process with `KeyError` if `summary` disappears from `meta` at some later time but with the former approach extraction will just go ahead with `description` set to `None` which is perfectly fine (remember `None` is equivalent to the absence of data).
|
||||
|
||||
|
||||
If the data is nested, do not use `.get` chains, but instead make use of the utility functions `try_get` or `traverse_obj`
|
||||
If the data is nested, do not use `.get` chains, but instead make use of `traverse_obj`.
|
||||
|
||||
Considering the above `meta` again, assume you want to extract `["user"]["name"]` and put it in the resulting info dict as `uploader`
|
||||
|
||||
```python
|
||||
uploader = try_get(meta, lambda x: x['user']['name']) # correct
|
||||
```
|
||||
or
|
||||
```python
|
||||
uploader = traverse_obj(meta, ('user', 'name')) # correct
|
||||
```
|
||||
@@ -321,6 +317,10 @@ or
|
||||
```python
|
||||
uploader = meta.get('user', {}).get('name') # incorrect
|
||||
```
|
||||
or
|
||||
```python
|
||||
uploader = try_get(meta, lambda x: x['user']['name']) # old utility
|
||||
```
|
||||
|
||||
|
||||
Similarly, you should pass `fatal=False` when extracting optional data from a webpage with `_search_regex`, `_html_search_regex` or similar methods, for instance:
|
||||
@@ -346,26 +346,42 @@ On failure this code will silently continue the extraction with `description` se
|
||||
|
||||
Another thing to remember is not to try to iterate over `None`
|
||||
|
||||
Say you extracted a list of thumbnails into `thumbnail_data` using `try_get` and now want to iterate over them
|
||||
Say you extracted a list of thumbnails into `thumbnail_data` and want to iterate over them
|
||||
|
||||
```python
|
||||
thumbnail_data = try_get(...)
|
||||
thumbnail_data = data.get('thumbnails') or []
|
||||
thumbnails = [{
|
||||
'url': item['url']
|
||||
} for item in thumbnail_data or []] # correct
|
||||
'url': item['url'],
|
||||
'height': item.get('h'),
|
||||
} for item in thumbnail_data if item.get('url')] # correct
|
||||
```
|
||||
|
||||
and not like:
|
||||
|
||||
```python
|
||||
thumbnail_data = try_get(...)
|
||||
thumbnail_data = data.get('thumbnails')
|
||||
thumbnails = [{
|
||||
'url': item['url']
|
||||
'url': item['url'],
|
||||
'height': item.get('h'),
|
||||
} for item in thumbnail_data] # incorrect
|
||||
```
|
||||
|
||||
In the later case, `thumbnail_data` will be `None` if the field was not found and this will cause the loop `for item in thumbnail_data` to raise a fatal error. Using `for item in thumbnail_data or []` avoids this error and results in setting an empty list in `thumbnails` instead.
|
||||
In this case, `thumbnail_data` will be `None` if the field was not found and this will cause the loop `for item in thumbnail_data` to raise a fatal error. Using `or []` avoids this error and results in setting an empty list in `thumbnails` instead.
|
||||
|
||||
Alternately, this can be further simplified by using `traverse_obj`
|
||||
|
||||
```python
|
||||
thumbnails = [{
|
||||
'url': item['url'],
|
||||
'height': item.get('h'),
|
||||
} for item in traverse_obj(data, ('thumbnails', lambda _, v: v['url']))]
|
||||
```
|
||||
|
||||
or, even better,
|
||||
|
||||
```python
|
||||
thumbnails = traverse_obj(data, ('thumbnails', ..., {'url': 'url', 'height': 'h'}))
|
||||
```
|
||||
|
||||
### Provide fallbacks
|
||||
|
||||
@@ -431,7 +447,7 @@ title = self._search_regex( # correct
|
||||
r'<span[^>]+class="title"[^>]*>([^<]+)', webpage, 'title')
|
||||
```
|
||||
|
||||
Or even better:
|
||||
which tolerates potential changes in the `style` attribute's value. Or even better:
|
||||
|
||||
```python
|
||||
title = self._search_regex( # correct
|
||||
@@ -439,7 +455,7 @@ title = self._search_regex( # correct
|
||||
webpage, 'title', group='title')
|
||||
```
|
||||
|
||||
Note how you tolerate potential changes in the `style` attribute's value or switch from using double quotes to single for `class` attribute:
|
||||
which also handles both single quotes in addition to double quotes.
|
||||
|
||||
The code definitely should not look like:
|
||||
|
||||
@@ -457,7 +473,42 @@ title = self._search_regex( # incorrect
|
||||
webpage, 'title', group='title')
|
||||
```
|
||||
|
||||
Here the presence or absence of other attributes including `style` is irrelevent for the data we need, and so the regex must not depend on it
|
||||
Here the presence or absence of other attributes including `style` is irrelevant for the data we need, and so the regex must not depend on it
|
||||
|
||||
|
||||
#### Keep the regular expressions as simple as possible, but no simpler
|
||||
|
||||
Since many extractors deal with unstructured data provided by websites, we will often need to use very complex regular expressions. You should try to use the *simplest* regex that can accomplish what you want. In other words, each part of the regex must have a reason for existing. If you can take out a symbol and the functionality does not change, the symbol should not be there.
|
||||
|
||||
##### Example
|
||||
|
||||
Correct:
|
||||
|
||||
```python
|
||||
_VALID_URL = r'https?://(?:www\.)?website\.com/(?:[^/]+/){3,4}(?P<display_id>[^/]+)_(?P<id>\d+)'
|
||||
```
|
||||
|
||||
Incorrect:
|
||||
|
||||
```python
|
||||
_VALID_URL = r'https?:\/\/(?:www\.)?website\.com\/[^\/]+/[^\/]+/[^\/]+(?:\/[^\/]+)?\/(?P<display_id>[^\/]+)_(?P<id>\d+)'
|
||||
```
|
||||
|
||||
#### Do not misuse `.` and use the correct quantifiers (`+*?`)
|
||||
|
||||
Avoid creating regexes that over-match because of wrong use of quantifiers. Also try to avoid non-greedy matching (`?`) where possible since they could easily result in [catastrophic backtracking](https://www.regular-expressions.info/catastrophic.html)
|
||||
|
||||
Correct:
|
||||
|
||||
```python
|
||||
title = self._search_regex(r'<span\b[^>]+class="title"[^>]*>([^<]+)', webpage, 'title')
|
||||
```
|
||||
|
||||
Incorrect:
|
||||
|
||||
```python
|
||||
title = self._search_regex(r'<span\b.*class="title".*>(.+?)<', webpage, 'title')
|
||||
```
|
||||
|
||||
|
||||
### Long lines policy
|
||||
@@ -466,7 +517,7 @@ There is a soft limit to keep lines of code under 100 characters long. This mean
|
||||
|
||||
For example, you should **never** split long string literals like URLs or some other often copied entities over multiple lines to fit this limit:
|
||||
|
||||
Conversely, don't unecessarily split small lines further. As a rule of thumb, if removing the line split keeps the code under 80 characters, it should be a single line.
|
||||
Conversely, don't unnecessarily split small lines further. As a rule of thumb, if removing the line split keeps the code under 80 characters, it should be a single line.
|
||||
|
||||
##### Examples
|
||||
|
||||
@@ -521,19 +572,22 @@ formats = self._extract_m3u8_formats(m3u8_url,
|
||||
|
||||
### Quotes
|
||||
|
||||
Always use single quotes for strings (even if the string has `'`) and double quotes for docstrings. Use `'''` only for multi-line strings. An exception can be made if a string has multiple single quotes in it and escaping makes it significantly harder to read. For f-strings, use you can use double quotes on the inside. But avoid f-strings that have too many quotes inside.
|
||||
Always use single quotes for strings (even if the string has `'`) and double quotes for docstrings. Use `'''` only for multi-line strings. An exception can be made if a string has multiple single quotes in it and escaping makes it *significantly* harder to read. For f-strings, use you can use double quotes on the inside. But avoid f-strings that have too many quotes inside.
|
||||
|
||||
|
||||
### Inline values
|
||||
|
||||
Extracting variables is acceptable for reducing code duplication and improving readability of complex expressions. However, you should avoid extracting variables used only once and moving them to opposite parts of the extractor file, which makes reading the linear flow difficult.
|
||||
|
||||
#### Example
|
||||
#### Examples
|
||||
|
||||
Correct:
|
||||
|
||||
```python
|
||||
title = self._html_search_regex(r'<h1>([^<]+)</h1>', webpage, 'title')
|
||||
return {
|
||||
'title': self._html_search_regex(r'<h1>([^<]+)</h1>', webpage, 'title'),
|
||||
# ...some lines of code...
|
||||
}
|
||||
```
|
||||
|
||||
Incorrect:
|
||||
@@ -542,6 +596,11 @@ Incorrect:
|
||||
TITLE_RE = r'<h1>([^<]+)</h1>'
|
||||
# ...some lines of code...
|
||||
title = self._html_search_regex(TITLE_RE, webpage, 'title')
|
||||
# ...some lines of code...
|
||||
return {
|
||||
'title': title,
|
||||
# ...some lines of code...
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
@@ -573,33 +632,32 @@ Methods supporting list of patterns are: `_search_regex`, `_html_search_regex`,
|
||||
|
||||
### Trailing parentheses
|
||||
|
||||
Always move trailing parentheses used for grouping/functions after the last argument. On the other hand, literal list/tuple/dict/set should closed be in a new line. Generators and list/dict comprehensions may use either style
|
||||
Always move trailing parentheses used for grouping/functions after the last argument. On the other hand, multi-line literal list/tuple/dict/set should closed be in a new line. Generators and list/dict comprehensions may use either style
|
||||
|
||||
#### Examples
|
||||
|
||||
Correct:
|
||||
|
||||
```python
|
||||
url = try_get(
|
||||
info,
|
||||
lambda x: x['ResultSet']['Result'][0]['VideoUrlSet']['VideoUrl'],
|
||||
list)
|
||||
url = traverse_obj(info, (
|
||||
'context', 'dispatcher', 'stores', 'VideoTitlePageStore', 'data', 'video', 0, 'VideoUrlSet', 'VideoUrl'), list)
|
||||
```
|
||||
Correct:
|
||||
|
||||
```python
|
||||
url = try_get(info,
|
||||
lambda x: x['ResultSet']['Result'][0]['VideoUrlSet']['VideoUrl'],
|
||||
list)
|
||||
url = traverse_obj(
|
||||
info,
|
||||
('context', 'dispatcher', 'stores', 'VideoTitlePageStore', 'data', 'video', 0, 'VideoUrlSet', 'VideoUrl'),
|
||||
list)
|
||||
```
|
||||
|
||||
Incorrect:
|
||||
|
||||
```python
|
||||
url = try_get(
|
||||
url = traverse_obj(
|
||||
info,
|
||||
lambda x: x['ResultSet']['Result'][0]['VideoUrlSet']['VideoUrl'],
|
||||
list,
|
||||
('context', 'dispatcher', 'stores', 'VideoTitlePageStore', 'data', 'video', 0, 'VideoUrlSet', 'VideoUrl'),
|
||||
list
|
||||
)
|
||||
```
|
||||
|
||||
@@ -648,21 +706,17 @@ Use `unified_strdate` for uniform `upload_date` or any `YYYYMMDD` meta field ext
|
||||
|
||||
Explore [`yt_dlp/utils.py`](yt_dlp/utils.py) for more useful convenience functions.
|
||||
|
||||
#### More examples
|
||||
#### Examples
|
||||
|
||||
##### Safely extract optional description from parsed JSON
|
||||
```python
|
||||
description = traverse_obj(response, ('result', 'video', 'summary'), expected_type=str)
|
||||
```
|
||||
|
||||
##### Safely extract more optional metadata
|
||||
```python
|
||||
thumbnails = traverse_obj(response, ('result', 'thumbnails', ..., 'url'), expected_type=url_or_none)
|
||||
video = traverse_obj(response, ('result', 'video', 0), default={}, expected_type=dict)
|
||||
description = video.get('summary')
|
||||
duration = float_or_none(video.get('durationMs'), scale=1000)
|
||||
view_count = int_or_none(video.get('views'))
|
||||
```
|
||||
|
||||
|
||||
# My pull request is labeled pending-fixes
|
||||
|
||||
The `pending-fixes` label is added when there are changes requested to a PR. When the necessary changes are made, the label should be removed. However, despite our best efforts, it may sometimes happen that the maintainer did not see the changes or forgot to remove the label. If your PR is still marked as `pending-fixes` a few days after all requested changes have been made, feel free to ping the maintainer who labeled your issue and ask them to re-review and remove the label.
|
||||
|
||||
133
CONTRIBUTORS
133
CONTRIBUTORS
@@ -3,6 +3,7 @@ shirt-dev (collaborator)
|
||||
coletdjnz/colethedj (collaborator)
|
||||
Ashish0804 (collaborator)
|
||||
nao20010128nao/Lesmiscore (collaborator)
|
||||
bashonly (collaborator)
|
||||
h-h-h-h
|
||||
pauldubois98
|
||||
nixxo
|
||||
@@ -248,3 +249,135 @@ rand-net
|
||||
vertan
|
||||
Wikidepia
|
||||
Yipten
|
||||
moench-tegeder
|
||||
christoph-heinrich
|
||||
HobbyistDev
|
||||
LunarFang416
|
||||
sbor23
|
||||
aurelg
|
||||
adamanldo
|
||||
gamer191
|
||||
vkorablin
|
||||
Burve
|
||||
mnn
|
||||
ZhymabekRoman
|
||||
mozbugbox
|
||||
aejdl
|
||||
ping
|
||||
sqrtNOT
|
||||
bubbleguuum
|
||||
darkxex
|
||||
miseran
|
||||
StefanLobbenmeier
|
||||
crazymoose77756
|
||||
nomevi
|
||||
Brett824
|
||||
pingiun
|
||||
dosy4ev
|
||||
EhtishamSabir
|
||||
Ferdi265
|
||||
FirefoxMetzger
|
||||
ftk
|
||||
lamby
|
||||
llamasblade
|
||||
lockmatrix
|
||||
misaelaguayo
|
||||
odo2063
|
||||
pritam20ps05
|
||||
scy
|
||||
sheerluck
|
||||
AxiosDeminence
|
||||
DjesonPV
|
||||
eren-kemer
|
||||
freezboltz
|
||||
Galiley
|
||||
haobinliang
|
||||
Mehavoid
|
||||
winterbird-code
|
||||
yashkc2025
|
||||
aldoridhoni
|
||||
jacobtruman
|
||||
masta79
|
||||
palewire
|
||||
cgrigis
|
||||
DavidH-2022
|
||||
dfaker
|
||||
jackyyf
|
||||
ohaiibuzzle
|
||||
SamantazFox
|
||||
shreyasminocha
|
||||
tejasa97
|
||||
xenov
|
||||
satan1st
|
||||
0xGodspeed
|
||||
5736d79
|
||||
587021c
|
||||
basrieter
|
||||
Bobscorn
|
||||
CNugteren
|
||||
columndeeply
|
||||
DoubleCouponDay
|
||||
Fabi019
|
||||
GautamMKGarg
|
||||
Grub4K
|
||||
itachi-19
|
||||
jeroenj
|
||||
josanabr
|
||||
LiviaMedeiros
|
||||
nikita-moor
|
||||
snapdgn
|
||||
SuperSonicHub1
|
||||
tannertechnology
|
||||
Timendum
|
||||
tobi1805
|
||||
TokyoBlackHole
|
||||
ajayyy
|
||||
Alienmaster
|
||||
bsun0000
|
||||
changren-wcr
|
||||
ClosedPort22
|
||||
CrankDatSouljaBoy
|
||||
cruel-efficiency
|
||||
endotronic
|
||||
Generator
|
||||
gibson042
|
||||
How-Bout-No
|
||||
invertico
|
||||
jahway603
|
||||
jwoglom
|
||||
lksj
|
||||
megapro17
|
||||
mlampe
|
||||
MrOctopus
|
||||
nosoop
|
||||
puc9
|
||||
sashashura
|
||||
schnusch
|
||||
SG5
|
||||
the-marenga
|
||||
tkgmomosheep
|
||||
vitkhab
|
||||
glensc
|
||||
synthpop123
|
||||
tntmod54321
|
||||
milkknife
|
||||
Bnyro
|
||||
CapacitorSet
|
||||
stelcodes
|
||||
skbeh
|
||||
muddi900
|
||||
digitall
|
||||
chengzhicn
|
||||
mexus
|
||||
JChris246
|
||||
redraskal
|
||||
Spicadox
|
||||
barsnick
|
||||
docbender
|
||||
KurtBestor
|
||||
Chrissi2812
|
||||
FrederikNS
|
||||
gschizas
|
||||
JC-Chung
|
||||
mzhou
|
||||
OndrejBakan
|
||||
|
||||
800
Changelog.md
800
Changelog.md
@@ -11,6 +11,796 @@
|
||||
-->
|
||||
|
||||
|
||||
### 2023.01.06
|
||||
|
||||
* Fix config locations by [Grub4k](https://github.com/Grub4k), [coletdjnz](https://github.com/coletdjnz), [pukkandan](https://github.com/pukkandan)
|
||||
* [downloader/aria2c] Disable native progress
|
||||
* [utils] `mimetype2ext`: `weba` is not standard
|
||||
* [utils] `windows_enable_vt_mode`: Better error handling
|
||||
* [build] Add minimal `pyproject.toml`
|
||||
* [update] Fix updater file removal on windows by [Grub4K](https://github.com/Grub4K)
|
||||
* [cleanup] Misc fixes and cleanup
|
||||
* [extractor/aitube] Add extractor by [HobbyistDev](https://github.com/HobbyistDev)
|
||||
* [extractor/drtv] Add series extractors by [FrederikNS](https://github.com/FrederikNS)
|
||||
* [extractor/volejtv] Add extractor by [HobbyistDev](https://github.com/HobbyistDev)
|
||||
* [extractor/xanimu] Add extractor by [JChris246](https://github.com/JChris246)
|
||||
* [extractor/youtube] Retry manifest refresh for live-from-start by [mzhou](https://github.com/mzhou)
|
||||
* [extractor/biliintl] Add `/media` to `VALID_URL` by [HobbyistDev](https://github.com/HobbyistDev)
|
||||
* [extractor/biliIntl] Add fallback to `video_data` by [HobbyistDev](https://github.com/HobbyistDev)
|
||||
* [extractor/crunchyroll:show] Add `language` to entries by [Chrissi2812](https://github.com/Chrissi2812)
|
||||
* [extractor/joj] Fix extractor by [OndrejBakan](https://github.com/OndrejBakan), [pukkandan](https://github.com/pukkandan)
|
||||
* [extractor/nbc] Update graphql query by [jacobtruman](https://github.com/jacobtruman)
|
||||
* [extractor/reddit] Add subreddit as `channel_id` by [gschizas](https://github.com/gschizas)
|
||||
* [extractor/tiktok] Add `TikTokLive` extractor by [JC-Chung](https://github.com/JC-Chung)
|
||||
|
||||
### 2023.01.02
|
||||
|
||||
* **Improve plugin architecture** by [Grub4K](https://github.com/Grub4K), [coletdjnz](https://github.com/coletdjnz), [flashdagger](https://github.com/flashdagger), [pukkandan](https://github.com/pukkandan)
|
||||
* Plugins can be loaded in any distribution of yt-dlp (binary, pip, source, etc.) and can be distributed and installed as packages. See [the readme](https://github.com/yt-dlp/yt-dlp/tree/05997b6e98e638d97d409c65bb5eb86da68f3b64#plugins) for more information
|
||||
* Add `--compat-options 2021,2022`
|
||||
* This allows devs to change defaults and make other potentially breaking changes more easily. If you need everything to work exactly as-is, put Use `--compat 2022` in your config to guard against future compat changes.
|
||||
* [downloader/aria2c] Native progress for aria2c via RPC by [Lesmiscore](https://github.com/Lesmiscore), [pukkandan](https://github.com/pukkandan)
|
||||
* Merge youtube-dl: Upto [commit/195f22f](https://github.com/ytdl-org/youtube-dl/commit/195f22f6) by [Grub4k](https://github.com/Grub4k), [pukkandan](https://github.com/pukkandan)
|
||||
* Add pre-processor stage `video`
|
||||
* Let `--parse/replace-in-metadata` run at any post-processing stage
|
||||
* Add `--enable-file-urls` by [coletdjnz](https://github.com/coletdjnz)
|
||||
* Add new field `aspect_ratio`
|
||||
* Add `ac4` to known codecs
|
||||
* Add `weba` to known extensions
|
||||
* [FFmpegVideoConvertor] Add `gif` to `--recode-video`
|
||||
* Add message when there are no subtitles/thumbnails
|
||||
* Deprioritize HEVC-over-FLV formats by [Lesmiscore](https://github.com/Lesmiscore)
|
||||
* Make early reject of `--match-filter` stricter
|
||||
* Fix `--cookies-from-browser` CLI parsing
|
||||
* Fix `original_url` in playlists
|
||||
* Fix bug in writing playlist info-json
|
||||
* Fix bugs in `PlaylistEntries`
|
||||
* [downloader/ffmpeg] Fix headers for video+audio formats by [Grub4K](https://github.com/Grub4K), [bashonly](https://github.com/bashonly)
|
||||
* [extractor] Add a way to distinguish IEs that returns only videos
|
||||
* [extractor] Implement universal format sorting and deprecate `_sort_formats`
|
||||
* [extractor] Let `_extract_format` functions obey `--ignore-no-formats`
|
||||
* [extractor/generic] Add `fragment_query` extractor arg for DASH and HLS by [bashonly](https://github.com/bashonly), [pukkandan](https://github.com/pukkandan)
|
||||
* [extractor/generic] Decode unicode-escaped embed URLs by [bashonly](https://github.com/bashonly)
|
||||
* [extractor/generic] Don't report redirect to https
|
||||
* [extractor/generic] Fix JSON LD manifest extraction by [bashonly](https://github.com/bashonly), [pukkandan](https://github.com/pukkandan)
|
||||
* [extractor/generic] Use `Accept-Encoding: identity` for initial request by [coletdjnz](https://github.com/coletdjnz)
|
||||
* [FormatSort] Add `mov` to `vext`
|
||||
* [jsinterp] Escape regex that looks like nested set
|
||||
* [webvtt] Handle premature EOF by [flashdagger](https://github.com/flashdagger)
|
||||
* [utils] `classproperty`: Add cache support
|
||||
* [utils] `get_exe_version`: Detect broken executables by [dirkf](https://github.com/dirkf), [pukkandan](https://github.com/pukkandan)
|
||||
* [utils] `js_to_json`: Fix bug in [f55523c](https://github.com/yt-dlp/yt-dlp/commit/f55523c) by [ChillingPepper](https://github.com/ChillingPepper), [pukkandan](https://github.com/pukkandan)
|
||||
* [utils] Make `ExtractorError` mutable
|
||||
* [utils] Move `FileDownloader.parse_bytes` into utils
|
||||
* [utils] Move format sorting code into `utils`
|
||||
* [utils] `windows_enable_vt_mode`: Proper implementation by [Grub4K](https://github.com/Grub4K)
|
||||
* [update] Workaround [#5632](https://github.com/yt-dlp/yt-dlp/issues/5632)
|
||||
* [docs] Improvements
|
||||
* [cleanup] Misc fixes and cleanup
|
||||
* [cleanup] Use `random.choices` by [freezboltz](https://github.com/freezboltz)
|
||||
* [extractor/airtv] Add extractor by [HobbyistDev](https://github.com/HobbyistDev)
|
||||
* [extractor/amazonminitv] Add extractors by [GautamMKGarg](https://github.com/GautamMKGarg), [nyuszika7h](https://github.com/nyuszika7h)
|
||||
* [extractor/beatbump] Add extractors by [Bobscorn](https://github.com/Bobscorn), [pukkandan](https://github.com/pukkandan)
|
||||
* [extractor/europarl] Add EuroParlWebstream extractor by [HobbyistDev](https://github.com/HobbyistDev)
|
||||
* [extractor/kanal2] Add extractor by [bashonly](https://github.com/bashonly), [glensc](https://github.com/glensc), [pukkandan](https://github.com/pukkandan)
|
||||
* [extractor/kankanews] Add extractor by [synthpop123](https://github.com/synthpop123)
|
||||
* [extractor/kick] Add extractor by [bashonly](https://github.com/bashonly)
|
||||
* [extractor/mediastream] Add extractor by [HobbyistDev](https://github.com/HobbyistDev), [elyse0](https://github.com/elyse0)
|
||||
* [extractor/noice] Add NoicePodcast extractor by [HobbyistDev](https://github.com/HobbyistDev)
|
||||
* [extractor/oneplace] Add OnePlacePodcast extractor by [HobbyistDev](https://github.com/HobbyistDev)
|
||||
* [extractor/rumble] Add RumbleIE extractor by [flashdagger](https://github.com/flashdagger)
|
||||
* [extractor/screencastify] Add extractor by [bashonly](https://github.com/bashonly)
|
||||
* [extractor/trtcocuk] Add extractor by [HobbyistDev](https://github.com/HobbyistDev)
|
||||
* [extractor/Veoh] Add user extractor by [tntmod54321](https://github.com/tntmod54321)
|
||||
* [extractor/videoken] Add extractors by [bashonly](https://github.com/bashonly)
|
||||
* [extractor/webcamerapl] Add extractor by [milkknife](https://github.com/milkknife)
|
||||
* [extractor/amazon] Add `AmazonReviews` extractor by [bashonly](https://github.com/bashonly)
|
||||
* [extractor/netverse] Add `NetverseSearch` extractor by [HobbyistDev](https://github.com/HobbyistDev)
|
||||
* [extractor/vimeo] Add `VimeoProIE` by [bashonly](https://github.com/bashonly), [pukkandan](https://github.com/pukkandan)
|
||||
* [extractor/xiami] Remove extractors by [synthpop123](https://github.com/synthpop123)
|
||||
* [extractor/youtube] Add `piped.video` by [Bnyro](https://github.com/Bnyro)
|
||||
* [extractor/youtube] Consider language in format de-duplication
|
||||
* [extractor/youtube] Extract DRC formats
|
||||
* [extractor/youtube] Fix `ytuser:`
|
||||
* [extractor/youtube] Fix bug in handling of music URLs
|
||||
* [extractor/youtube] Subtitles cannot be translated to `und`
|
||||
* [extractor/youtube:tab] Extract metadata from channel items by [coletdjnz](https://github.com/coletdjnz)
|
||||
* [extractor/ARD] Add vtt subtitles by [CapacitorSet](https://github.com/CapacitorSet)
|
||||
* [extractor/ArteTV] Extract chapters by [bashonly](https://github.com/bashonly), [iw0nderhow](https://github.com/iw0nderhow)
|
||||
* [extractor/bandcamp] Add `album_artist` by [stelcodes](https://github.com/stelcodes)
|
||||
* [extractor/bilibili] Fix `--no-playlist` for anthology
|
||||
* [extractor/bilibili] Improve `_VALID_URL` by [skbeh](https://github.com/skbeh)
|
||||
* [extractor/biliintl:series] Make partial download of series faster
|
||||
* [extractor/BiliLive] Fix extractor
|
||||
* [extractor/brightcove] Add `BrightcoveNewBaseIE` and fix embed extraction
|
||||
* [extractor/cda] Support premium and misc improvements by [selfisekai](https://github.com/selfisekai)
|
||||
* [extractor/ciscowebex] Support password-protected videos by [damianoamatruda](https://github.com/damianoamatruda)
|
||||
* [extractor/curiositystream] Fix auth by [mnn](https://github.com/mnn)
|
||||
* [extractor/embedly] Handle vimeo embeds
|
||||
* [extractor/fifa] Fix Preplay extraction by [dirkf](https://github.com/dirkf)
|
||||
* [extractor/foxsports] Fix extractor by [bashonly](https://github.com/bashonly)
|
||||
* [extractor/gronkh] Fix `_VALID_URL` by [muddi900](https://github.com/muddi900)
|
||||
* [extractor/hotstar] Improve format metadata
|
||||
* [extractor/iqiyi] Fix `Iq` JS regex by [bashonly](https://github.com/bashonly)
|
||||
* [extractor/la7] Improve extractor by [nixxo](https://github.com/nixxo)
|
||||
* [extractor/mediaset] Better embed detection and error messages by [nixxo](https://github.com/nixxo)
|
||||
* [extractor/mixch] Support `--wait-for-video`
|
||||
* [extractor/naver] Improve `_VALID_URL` for `NaverNowIE` by [bashonly](https://github.com/bashonly)
|
||||
* [extractor/naver] Treat fan subtitles as separate language
|
||||
* [extractor/netverse] Extract comments by [HobbyistDev](https://github.com/HobbyistDev)
|
||||
* [extractor/nosnl] Add support for /video by [HobbyistDev](https://github.com/HobbyistDev)
|
||||
* [extractor/odnoklassniki] Extract subtitles by [bashonly](https://github.com/bashonly)
|
||||
* [extractor/pinterest] Fix extractor by [bashonly](https://github.com/bashonly)
|
||||
* [extractor/plutotv] Fix videos with non-zero start by [digitall](https://github.com/digitall)
|
||||
* [extractor/polskieradio] Adapt to next.js redesigns by [selfisekai](https://github.com/selfisekai)
|
||||
* [extractor/reddit] Add vcodec to fallback format by [chengzhicn](https://github.com/chengzhicn)
|
||||
* [extractor/reddit] Extract crossposted media by [bashonly](https://github.com/bashonly)
|
||||
* [extractor/reddit] Extract video embeds in text posts by [bashonly](https://github.com/bashonly)
|
||||
* [extractor/rutube] Support private videos by [mexus](https://github.com/mexus)
|
||||
* [extractor/sibnet] Separate from VKIE
|
||||
* [extractor/slideslive] Fix extractor by [Grub4K](https://github.com/Grub4K), [bashonly](https://github.com/bashonly)
|
||||
* [extractor/slideslive] Support embeds and slides by [Grub4K](https://github.com/Grub4K), [bashonly](https://github.com/bashonly), [pukkandan](https://github.com/pukkandan)
|
||||
* [extractor/soundcloud] Support user permalink by [nosoop](https://github.com/nosoop)
|
||||
* [extractor/spankbang] Fix extractor by [JChris246](https://github.com/JChris246)
|
||||
* [extractor/stv] Detect DRM
|
||||
* [extractor/swearnet] Fix description bug
|
||||
* [extractor/tencent] Fix geo-restricted video by [elyse0](https://github.com/elyse0)
|
||||
* [extractor/tiktok] Fix subs, `DouyinIE`, improve `_VALID_URL` by [bashonly](https://github.com/bashonly)
|
||||
* [extractor/tiktok] Update `_VALID_URL`, add `api_hostname` arg by [bashonly](https://github.com/bashonly)
|
||||
* [extractor/tiktok] Update API hostname by [redraskal](https://github.com/redraskal)
|
||||
* [extractor/twitcasting] Fix videos with password by [Spicadox](https://github.com/Spicadox), [bashonly](https://github.com/bashonly)
|
||||
* [extractor/twitter] Heed `--no-playlist` for multi-video tweets by [Grub4K](https://github.com/Grub4K), [bashonly](https://github.com/bashonly)
|
||||
* [extractor/twitter] Refresh guest token when expired by [Grub4K](https://github.com/Grub4K), [bashonly](https://github.com/bashonly)
|
||||
* [extractor/twitter:spaces] Add `Referer` to m3u8 by [nixxo](https://github.com/nixxo)
|
||||
* [extractor/udemy] Fix lectures that have no URL and detect DRM
|
||||
* [extractor/unsupported] Add more URLs
|
||||
* [extractor/urplay] Support for audio-only formats by [barsnick](https://github.com/barsnick)
|
||||
* [extractor/wistia] Improve extension detection by [Grub4k](https://github.com/Grub4k), [bashonly](https://github.com/bashonly), [pukkandan](https://github.com/pukkandan)
|
||||
* [extractor/yle_areena] Support restricted videos by [docbender](https://github.com/docbender)
|
||||
* [extractor/youku] Fix extractor by [KurtBestor](https://github.com/KurtBestor)
|
||||
* [extractor/youporn] Fix metadata by [marieell](https://github.com/marieell)
|
||||
* [extractor/redgifs] Fix bug in [8c188d5](https://github.com/yt-dlp/yt-dlp/commit/8c188d5d09177ed213a05c900d3523867c5897fd)
|
||||
|
||||
|
||||
### 2022.11.11
|
||||
|
||||
* Merge youtube-dl: Upto [commit/de39d12](https://github.com/ytdl-org/youtube-dl/commit/de39d128)
|
||||
* Backport SSL configuration from Python 3.10 by [coletdjnz](https://github.com/coletdjnz)
|
||||
* Do more processing in `--flat-playlist`
|
||||
* Fix `--list` options not implying `-s` in some cases by [Grub4K](https://github.com/Grub4K), [bashonly](https://github.com/bashonly)
|
||||
* Fix end time of clips by [cruel-efficiency](https://github.com/cruel-efficiency)
|
||||
* Fix for `formats=None`
|
||||
* Write API params in debug head
|
||||
* [outtmpl] Ensure ASCII in json and add option for Unicode
|
||||
* [SponsorBlock] Add `type` field, obey `--retry-sleep extractor`, relax duration check for large segments
|
||||
* [SponsorBlock] **Support `chapter` category** by [ajayyy](https://github.com/ajayyy), [pukkandan](https://github.com/pukkandan)
|
||||
* [ThumbnailsConvertor] Fix filename escaping by [dirkf](https://github.com/dirkf), [pukkandan](https://github.com/pukkandan)
|
||||
* [ModifyChapters] Handle the entire video being marked for removal
|
||||
* [embedthumbnail] Fix thumbnail name in mp3 by [How-Bout-No](https://github.com/How-Bout-No)
|
||||
* [downloader/fragment] HLS download can continue without first fragment
|
||||
* [cookies] Improve `LenientSimpleCookie` by [Grub4K](https://github.com/Grub4K)
|
||||
* [jsinterp] Improve separating regex
|
||||
* [extractor/common] Fix `fatal=False` for `_search_nuxt_data`
|
||||
* [extractor/common] Improve `_generic_title`
|
||||
* [extractor/common] Fix `json_ld` type checks by [Grub4K](https://github.com/Grub4K)
|
||||
* [extractor/generic] Separate embed extraction into own function
|
||||
* [extractor/generic:quoted-html] Add extractor by [coletdjnz](https://github.com/coletdjnz), [pukkandan](https://github.com/pukkandan)
|
||||
* [extractor/unsupported] Raise error on known DRM-only sites by [coletdjnz](https://github.com/coletdjnz)
|
||||
* [utils] `js_to_json`: Improve escape handling by [Grub4K](https://github.com/Grub4K)
|
||||
* [utils] `strftime_or_none`: Workaround Python bug on Windows
|
||||
* [utils] `traverse_obj`: Always return list when branching, allow `re.Match` objects by [Grub4K](https://github.com/Grub4K)
|
||||
* [build, test] Harden workflows' security by [sashashura](https://github.com/sashashura)
|
||||
* [build] `py2exe`: Migrate to freeze API by [SG5](https://github.com/SG5), [pukkandan](https://github.com/pukkandan)
|
||||
* [build] Create `armv7l` and `aarch64` releases by [MrOctopus](https://github.com/MrOctopus), [pukkandan](https://github.com/pukkandan)
|
||||
* [build] Make linux binary truly standalone using `conda` by [mlampe](https://github.com/mlampe)
|
||||
* [build] Replace `set-output` with `GITHUB_OUTPUT` by [Lesmiscore](https://github.com/Lesmiscore)
|
||||
* [update] Use error code `100` for update errors
|
||||
* [compat] Fix `shutils.move` in restricted ACL mode on BSD by [ClosedPort22](https://github.com/ClosedPort22), [pukkandan](https://github.com/pukkandan)
|
||||
* [docs, devscripts] Document `pyinst`'s argument passthrough by [jahway603](https://github.com/jahway603)
|
||||
* [test] Allow `extract_flat` in download tests by [coletdjnz](https://github.com/coletdjnz), [pukkandan](https://github.com/pukkandan)
|
||||
* [cleanup] Misc fixes and cleanup by [pukkandan](https://github.com/pukkandan), [Alienmaster](https://github.com/Alienmaster)
|
||||
* [extractor/aeon] Add extractor by [DoubleCouponDay](https://github.com/DoubleCouponDay)
|
||||
* [extractor/agora] Add extractors by [selfisekai](https://github.com/selfisekai)
|
||||
* [extractor/camsoda] Add extractor by [zulaport](https://github.com/zulaport)
|
||||
* [extractor/cinetecamilano] Add extractor by [timendum](https://github.com/timendum)
|
||||
* [extractor/deuxm] Add extractors by [CrankDatSouljaBoy](https://github.com/CrankDatSouljaBoy)
|
||||
* [extractor/genius] Add extractors by [bashonly](https://github.com/bashonly)
|
||||
* [extractor/japandiet] Add extractors by [Lesmiscore](https://github.com/Lesmiscore)
|
||||
* [extractor/listennotes] Add extractor by [lksj](https://github.com/lksj), [pukkandan](https://github.com/pukkandan)
|
||||
* [extractor/nos.nl] Add extractor by [HobbyistDev](https://github.com/HobbyistDev)
|
||||
* [extractor/oftv] Add extractors by [DoubleCouponDay](https://github.com/DoubleCouponDay)
|
||||
* [extractor/podbayfm] Add extractor by [schnusch](https://github.com/schnusch)
|
||||
* [extractor/qingting] Add extractor by [bashonly](https://github.com/bashonly), [changren-wcr](https://github.com/changren-wcr)
|
||||
* [extractor/screen9] Add extractor by [tpikonen](https://github.com/tpikonen)
|
||||
* [extractor/swearnet] Add extractor by [HobbyistDev](https://github.com/HobbyistDev)
|
||||
* [extractor/YleAreena] Add extractor by [pukkandan](https://github.com/pukkandan), [vitkhab](https://github.com/vitkhab)
|
||||
* [extractor/zeenews] Add extractor by [m4tu4g](https://github.com/m4tu4g), [pukkandan](https://github.com/pukkandan)
|
||||
* [extractor/youtube:tab] **Update tab handling for redesign** by [coletdjnz](https://github.com/coletdjnz), [pukkandan](https://github.com/pukkandan)
|
||||
* Channel URLs download all uploads of the channel as multiple playlists, separated by tab
|
||||
* [extractor/youtube] Differentiate between no comments and disabled comments by [coletdjnz](https://github.com/coletdjnz)
|
||||
* [extractor/youtube] Extract `concurrent_view_count` for livestreams by [coletdjnz](https://github.com/coletdjnz)
|
||||
* [extractor/youtube] Fix `duration` for premieres by [nosoop](https://github.com/nosoop)
|
||||
* [extractor/youtube] Fix `live_status` by [coletdjnz](https://github.com/coletdjnz), [pukkandan](https://github.com/pukkandan)
|
||||
* [extractor/youtube] Ignore incomplete data error for comment replies by [coletdjnz](https://github.com/coletdjnz)
|
||||
* [extractor/youtube] Improve chapter parsing from description
|
||||
* [extractor/youtube] Mark videos as fully watched by [bsun0000](https://github.com/bsun0000)
|
||||
* [extractor/youtube] Update piped instances by [Generator](https://github.com/Generator)
|
||||
* [extractor/youtube] Update playlist metadata extraction for new layout by [coletdjnz](https://github.com/coletdjnz)
|
||||
* [extractor/youtube:tab] Fix video metadata from tabs by [coletdjnz](https://github.com/coletdjnz)
|
||||
* [extractor/youtube:tab] Let `approximate_date` return timestamp
|
||||
* [extractor/americastestkitchen] Fix extractor by [bashonly](https://github.com/bashonly)
|
||||
* [extractor/bbc] Support onion domains by [DoubleCouponDay](https://github.com/DoubleCouponDay)
|
||||
* [extractor/bilibili] Add chapters and misc cleanup by [lockmatrix](https://github.com/lockmatrix), [pukkandan](https://github.com/pukkandan)
|
||||
* [extractor/bilibili] Fix BilibiliIE and Bangumi extractors by [lockmatrix](https://github.com/lockmatrix), [pukkandan](https://github.com/pukkandan)
|
||||
* [extractor/bitchute] Better error for geo-restricted videos by [flashdagger](https://github.com/flashdagger)
|
||||
* [extractor/bitchute] Improve `BitChuteChannelIE` by [flashdagger](https://github.com/flashdagger), [pukkandan](https://github.com/pukkandan)
|
||||
* [extractor/bitchute] Simplify extractor by [flashdagger](https://github.com/flashdagger), [pukkandan](https://github.com/pukkandan)
|
||||
* [extractor/cda] Support login through API by [selfisekai](https://github.com/selfisekai)
|
||||
* [extractor/crunchyroll] Beta is now the only layout by [tejing1](https://github.com/tejing1)
|
||||
* [extractor/detik] Avoid unnecessary extraction
|
||||
* [extractor/doodstream] Remove extractor
|
||||
* [extractor/dplay] Add MotorTrendOnDemand extractor by [bashonly](https://github.com/bashonly)
|
||||
* [extractor/epoch] Support videos without data-trailer by [gibson042](https://github.com/gibson042), [pukkandan](https://github.com/pukkandan)
|
||||
* [extractor/fox] Extract thumbnail by [vitkhab](https://github.com/vitkhab)
|
||||
* [extractor/foxnews] Add `FoxNewsVideo` extractor
|
||||
* [extractor/hotstar] Add season support by [m4tu4g](https://github.com/m4tu4g)
|
||||
* [extractor/hotstar] Refactor v1 API calls
|
||||
* [extractor/iprima] Make json+ld non-fatal by [bashonly](https://github.com/bashonly)
|
||||
* [extractor/iq] Increase phantomjs timeout
|
||||
* [extractor/kaltura] Support playlists by [jwoglom](https://github.com/jwoglom), [pukkandan](https://github.com/pukkandan)
|
||||
* [extractor/lbry] Authenticate with cookies by [flashdagger](https://github.com/flashdagger)
|
||||
* [extractor/livestreamfails] Support posts by [invertico](https://github.com/invertico)
|
||||
* [extractor/mlb] Add `MLBArticle` extractor by [HobbyistDev](https://github.com/HobbyistDev)
|
||||
* [extractor/mxplayer] Improve extractor by [m4tu4g](https://github.com/m4tu4g)
|
||||
* [extractor/niconico] Always use HTTPS for requests
|
||||
* [extractor/nzherald] Support new video embed by [coletdjnz](https://github.com/coletdjnz)
|
||||
* [extractor/odnoklassniki] Support boosty.to embeds by [Lesmiscore](https://github.com/Lesmiscore), [megapro17](https://github.com/megapro17), [pukkandan](https://github.com/pukkandan)
|
||||
* [extractor/paramountplus] Update API token by [bashonly](https://github.com/bashonly)
|
||||
* [extractor/reddit] Add fallback format by [bashonly](https://github.com/bashonly)
|
||||
* [extractor/redgifs] Fix extractors by [bashonly](https://github.com/bashonly), [pukkandan](https://github.com/pukkandan)
|
||||
* [extractor/redgifs] Refresh auth token for 401 by [endotronic](https://github.com/endotronic), [pukkandan](https://github.com/pukkandan)
|
||||
* [extractor/rumble] Add HLS formats and extract more metadata by [flashdagger](https://github.com/flashdagger)
|
||||
* [extractor/sbs] Improve `_VALID_URL` by [bashonly](https://github.com/bashonly)
|
||||
* [extractor/skyit] Fix extractors by [nixxo](https://github.com/nixxo)
|
||||
* [extractor/stripchat] Fix hostname for HLS stream by [zulaport](https://github.com/zulaport)
|
||||
* [extractor/stripchat] Improve error message by [freezboltz](https://github.com/freezboltz)
|
||||
* [extractor/telegram] Add playlist support and more metadata by [bashonly](https://github.com/bashonly), [bsun0000](https://github.com/bsun0000)
|
||||
* [extractor/Tnaflix] Fix for HTTP 500 by [SG5](https://github.com/SG5), [pukkandan](https://github.com/pukkandan)
|
||||
* [extractor/tubitv] Better DRM detection by [bashonly](https://github.com/bashonly)
|
||||
* [extractor/tvp] Update extractors by [selfisekai](https://github.com/selfisekai)
|
||||
* [extractor/twitcasting] Fix `data-movie-playlist` extraction by [Lesmiscore](https://github.com/Lesmiscore)
|
||||
* [extractor/twitter] Add onion site to `_VALID_URL` by [DoubleCouponDay](https://github.com/DoubleCouponDay)
|
||||
* [extractor/twitter] Add Spaces extractor and GraphQL API by [Grub4K](https://github.com/Grub4K), [bashonly](https://github.com/bashonly), [nixxo](https://github.com/nixxo), [pukkandan](https://github.com/pukkandan)
|
||||
* [extractor/twitter] Support multi-video posts by [Grub4K](https://github.com/Grub4K)
|
||||
* [extractor/uktvplay] Fix `_VALID_URL`
|
||||
* [extractor/viu] Support subtitles of on-screen text by [tkgmomosheep](https://github.com/tkgmomosheep)
|
||||
* [extractor/VK] Fix playlist URLs by [the-marenga](https://github.com/the-marenga)
|
||||
* [extractor/vlive] Extract `release_timestamp`
|
||||
* [extractor/voot] Improve `_VALID_URL` by [freezboltz](https://github.com/freezboltz)
|
||||
* [extractor/wordpress:mb.miniAudioPlayer] Add embed extractor by [coletdjnz](https://github.com/coletdjnz)
|
||||
* [extractor/YoutubeWebArchive] Improve metadata extraction by [coletdjnz](https://github.com/coletdjnz)
|
||||
* [extractor/zee5] Improve `_VALID_URL` by [m4tu4g](https://github.com/m4tu4g)
|
||||
* [extractor/zenyandex] Fix extractors by [lksj](https://github.com/lksj), [puc9](https://github.com/puc9), [pukkandan](https://github.com/pukkandan)
|
||||
|
||||
|
||||
### 2022.10.04
|
||||
|
||||
* Allow a `set` to be passed as `download_archive` by [pukkandan](https://github.com/pukkandan), [bashonly](https://github.com/bashonly)
|
||||
* Allow open ranges for time ranges by [Lesmiscore](https://github.com/Lesmiscore)
|
||||
* Allow plugin extractors to replace the built-in ones
|
||||
* Don't download entire video when no matching `--download-sections`
|
||||
* Fix `--config-location -`
|
||||
* Improve [5736d79](https://github.com/yt-dlp/yt-dlp/pull/5044/commits/5736d79172c47ff84740d5720467370a560febad)
|
||||
* Fix for when playlists don't have `webpage_url`
|
||||
* Support environment variables in `--ffmpeg-location`
|
||||
* Workaround `libc_ver` not be available on Windows Store version of Python
|
||||
* [outtmpl] Curly braces to filter keys by [pukkandan](https://github.com/pukkandan)
|
||||
* [outtmpl] Make `%s` work in strfformat for all systems
|
||||
* [jsinterp] Workaround operator associativity issue
|
||||
* [cookies] Let `_get_mac_keyring_password` fail gracefully
|
||||
* [cookies] Parse cookies leniently by [Grub4K](https://github.com/Grub4K)
|
||||
* [phantomjs] Fix bug in [587021c](https://github.com/yt-dlp/yt-dlp/commit/587021cd9f717181b44e881941aca3f8d753758b) by [elyse0](https://github.com/elyse0)
|
||||
* [downloader/aria2c] Fix filename containing leading whitespace by [std-move](https://github.com/std-move)
|
||||
* [downloader/ism] Support ec-3 codec by [nixxo](https://github.com/nixxo)
|
||||
* [extractor] Fix `fatal=False` in `RetryManager`
|
||||
* [extractor] Improve json-ld extraction
|
||||
* [extractor] Make `_search_json` able to parse lists
|
||||
* [extractor] Escape `%` in `representation_id` of m3u8
|
||||
* [extractor/generic] Pass through referer from json-ld
|
||||
* [utils] `base_url`: URL paths can contain `&` by [elyse0](https://github.com/elyse0)
|
||||
* [utils] `js_to_json`: Improve
|
||||
* [utils] `Popen.run`: Fix default return in binary mode
|
||||
* [utils] `traverse_obj`: Rewrite, document and add tests by [Grub4K](https://github.com/Grub4K)
|
||||
* [devscripts] `make_lazy_extractors`: Fix for Docker by [josanabr](https://github.com/josanabr)
|
||||
* [docs] Misc Improvements
|
||||
* [cleanup] Misc fixes and cleanup by [pukkandan](https://github.com/pukkandan), [gamer191](https://github.com/gamer191)
|
||||
* [extractor/24tv.ua] Add extractors by [coletdjnz](https://github.com/coletdjnz)
|
||||
* [extractor/BerufeTV] Add extractor by [Fabi019](https://github.com/Fabi019)
|
||||
* [extractor/booyah] Add extractor by [HobbyistDev](https://github.com/HobbyistDev), [elyse0](https://github.com/elyse0)
|
||||
* [extractor/bundesliga] Add extractor by [Fabi019](https://github.com/Fabi019)
|
||||
* [extractor/GoPlay] Add extractor by [CNugteren](https://github.com/CNugteren), [basrieter](https://github.com/basrieter), [jeroenj](https://github.com/jeroenj)
|
||||
* [extractor/iltalehti] Add extractor by [tpikonen](https://github.com/tpikonen)
|
||||
* [extractor/IsraelNationalNews] Add extractor by [Bobscorn](https://github.com/Bobscorn)
|
||||
* [extractor/mediaworksnzvod] Add extractor by [coletdjnz](https://github.com/coletdjnz)
|
||||
* [extractor/MicrosoftEmbed] Add extractor by [DoubleCouponDay](https://github.com/DoubleCouponDay)
|
||||
* [extractor/nbc] Add NBCStations extractor by [bashonly](https://github.com/bashonly)
|
||||
* [extractor/onenewsnz] Add extractor by [coletdjnz](https://github.com/coletdjnz)
|
||||
* [extractor/prankcast] Add extractor by [HobbyistDev](https://github.com/HobbyistDev), [columndeeply](https://github.com/columndeeply)
|
||||
* [extractor/Smotrim] Add extractor by [Lesmiscore](https://github.com/Lesmiscore), [nikita-moor](https://github.com/nikita-moor)
|
||||
* [extractor/tencent] Add Iflix extractor by [elyse0](https://github.com/elyse0)
|
||||
* [extractor/unscripted] Add extractor by [HobbyistDev](https://github.com/HobbyistDev)
|
||||
* [extractor/adobepass] Add MSO AlticeOne (Optimum TV) by [CplPwnies](https://github.com/CplPwnies)
|
||||
* [extractor/youtube] **Download `post_live` videos from start** by [Lesmiscore](https://github.com/Lesmiscore), [pukkandan](https://github.com/pukkandan)
|
||||
* [extractor/youtube] Add support for Shorts audio pivot feed by [coletdjnz](https://github.com/coletdjnz), [pukkandan](https://github.com/pukkandan)
|
||||
* [extractor/youtube] Detect `lazy-load-for-videos` embeds
|
||||
* [extractor/youtube] Do not warn on duplicate chapters
|
||||
* [extractor/youtube] Fix video like count extraction by [coletdjnz](https://github.com/coletdjnz)
|
||||
* [extractor/youtube] Support changing extraction language by [coletdjnz](https://github.com/coletdjnz)
|
||||
* [extractor/youtube:tab] Improve continuation items extraction
|
||||
* [extractor/youtube:tab] Support `reporthistory` page
|
||||
* [extractor/amazonstore] Fix JSON extraction by [coletdjnz](https://github.com/coletdjnz), [pukkandan](https://github.com/pukkandan)
|
||||
* [extractor/amazonstore] Retry to avoid captcha page by [Lesmiscore](https://github.com/Lesmiscore)
|
||||
* [extractor/animeondemand] Remove extractor by [TokyoBlackHole](https://github.com/TokyoBlackHole)
|
||||
* [extractor/anvato] Fix extractor and refactor by [bashonly](https://github.com/bashonly)
|
||||
* [extractor/artetv] Remove duplicate stream urls by [Grub4K](https://github.com/Grub4K)
|
||||
* [extractor/audioboom] Support direct URLs and refactor by [pukkandan](https://github.com/pukkandan), [tpikonen](https://github.com/tpikonen)
|
||||
* [extractor/bandcamp] Extract `uploader_url`
|
||||
* [extractor/bilibili] Add space.bilibili extractors by [lockmatrix](https://github.com/lockmatrix)
|
||||
* [extractor/BilibiliSpace] Fix extractor and better error message by [lockmatrix](https://github.com/lockmatrix)
|
||||
* [extractor/BiliIntl] Support uppercase lang in `_VALID_URL` by [coletdjnz](https://github.com/coletdjnz)
|
||||
* [extractor/BiliIntlSeries] Fix `_VALID_URL`
|
||||
* [extractor/bongacams] Update `_VALID_URL` by [0xGodspeed](https://github.com/0xGodspeed)
|
||||
* [extractor/crunchyroll:beta] Improve handling of hardsubs by [Grub4K](https://github.com/Grub4K)
|
||||
* [extractor/detik] Generalize extractors by [HobbyistDev](https://github.com/HobbyistDev), [coletdjnz](https://github.com/coletdjnz)
|
||||
* [extractor/dplay:italy] Add default authentication by [Timendum](https://github.com/Timendum)
|
||||
* [extractor/heise] Fix extractor by [coletdjnz](https://github.com/coletdjnz)
|
||||
* [extractor/holodex] Fix `_VALID_URL` by [LiviaMedeiros](https://github.com/LiviaMedeiros)
|
||||
* [extractor/hrfensehen] Fix extractor by [snapdgn](https://github.com/snapdgn)
|
||||
* [extractor/hungama] Add subtitle by [GautamMKGarg](https://github.com/GautamMKGarg), [pukkandan](https://github.com/pukkandan)
|
||||
* [extractor/instagram] Extract more metadata by [pritam20ps05](https://github.com/pritam20ps05)
|
||||
* [extractor/JWPlatform] Fix extractor by [coletdjnz](https://github.com/coletdjnz)
|
||||
* [extractor/malltv] Fix video_id extraction by [HobbyistDev](https://github.com/HobbyistDev)
|
||||
* [extractor/MLBTV] Detect live streams
|
||||
* [extractor/motorsport] Support native embeds
|
||||
* [extractor/Mxplayer] Fix extractor by [itachi-19](https://github.com/itachi-19)
|
||||
* [extractor/nebula] Add nebula.tv by [tannertechnology](https://github.com/tannertechnology)
|
||||
* [extractor/nfl] Fix extractor by [bashonly](https://github.com/bashonly)
|
||||
* [extractor/ondemandkorea] Update `jw_config` regex by [julien-hadleyjack](https://github.com/julien-hadleyjack)
|
||||
* [extractor/paramountplus] Better DRM detection by [bashonly](https://github.com/bashonly)
|
||||
* [extractor/patreon] Sort formats
|
||||
* [extractor/rcs] Fix embed extraction by [coletdjnz](https://github.com/coletdjnz)
|
||||
* [extractor/redgifs] Fix extractor by [jhwgh1968](https://github.com/jhwgh1968)
|
||||
* [extractor/rutube] Fix `_EMBED_REGEX` by [coletdjnz](https://github.com/coletdjnz)
|
||||
* [extractor/RUTV] Fix warnings for livestreams by [Lesmiscore](https://github.com/Lesmiscore)
|
||||
* [extractor/soundcloud:search] More metadata in `--flat-playlist` by [SuperSonicHub1](https://github.com/SuperSonicHub1)
|
||||
* [extractor/telegraaf] Use mobile GraphQL API endpoint by [coletdjnz](https://github.com/coletdjnz)
|
||||
* [extractor/tennistv] Fix timestamp by [zenerdi0de](https://github.com/zenerdi0de)
|
||||
* [extractor/tiktok] Fix TikTokIE by [bashonly](https://github.com/bashonly)
|
||||
* [extractor/triller] Fix auth token by [bashonly](https://github.com/bashonly)
|
||||
* [extractor/trovo] Fix extractors by [Mehavoid](https://github.com/Mehavoid)
|
||||
* [extractor/tv2] Support new url format by [tobi1805](https://github.com/tobi1805)
|
||||
* [extractor/web.archive:youtube] Fix `_YT_INITIAL_PLAYER_RESPONSE_RE`
|
||||
* [extractor/wistia] Add support for channels by [coletdjnz](https://github.com/coletdjnz)
|
||||
* [extractor/wistia] Match IDs in embed URLs by [bashonly](https://github.com/bashonly)
|
||||
* [extractor/wordpress:playlist] Add generic embed extractor by [coletdjnz](https://github.com/coletdjnz)
|
||||
* [extractor/yandexvideopreview] Update `_VALID_URL` by [Grub4K](https://github.com/Grub4K)
|
||||
* [extractor/zee5] Fix `_VALID_URL` by [m4tu4g](https://github.com/m4tu4g)
|
||||
* [extractor/zee5] Generate device ids by [freezboltz](https://github.com/freezboltz)
|
||||
|
||||
|
||||
### 2022.09.01
|
||||
|
||||
* Add option `--use-extractors`
|
||||
* Merge youtube-dl: Upto [commit/ed5c44e](https://github.com/ytdl-org/youtube-dl/commit/ed5c44e7)
|
||||
* Add yt-dlp version to infojson
|
||||
* Fix `--break-per-url --max-downloads`
|
||||
* Fix bug in `--alias`
|
||||
* [cookies] Support firefox container in `--cookies-from-browser` by [bashonly](https://github.com/bashonly), [coletdjnz](https://github.com/coletdjnz), [pukkandan](https://github.com/pukkandan)
|
||||
* [downloader/external] Smarter detection of executable
|
||||
* [extractor/generic] Don't return JW player without formats
|
||||
* [FormatSort] Fix `aext` for `--prefer-free-formats`
|
||||
* [jsinterp] Various improvements by [pukkandan](https://github.com/pukkandan), [dirkf](https://github.com/dirkf), [elyse0](https://github.com/elyse0)
|
||||
* [cache] Mechanism to invalidate old cache
|
||||
* [utils] Add `deprecation_warning`
|
||||
* [utils] Add `orderedSet_from_options`
|
||||
* [utils] `Popen`: Restore `LD_LIBRARY_PATH` when using PyInstaller by [Lesmiscore](https://github.com/Lesmiscore)
|
||||
* [build] `make tar` should not follow `DESTDIR` by [satan1st](https://github.com/satan1st)
|
||||
* [build] Update pyinstaller by [shirt-dev](https://github.com/shirt-dev)
|
||||
* [test] Fix `test_youtube_signature`
|
||||
* [cleanup] Misc fixes and cleanup by [DavidH-2022](https://github.com/DavidH-2022), [MrRawes](https://github.com/MrRawes), [pukkandan](https://github.com/pukkandan)
|
||||
* [extractor/epoch] Add extractor by [tejasa97](https://github.com/tejasa97)
|
||||
* [extractor/eurosport] Add extractor by [HobbyistDev](https://github.com/HobbyistDev)
|
||||
* [extractor/IslamChannel] Add extractors by [Lesmiscore](https://github.com/Lesmiscore)
|
||||
* [extractor/newspicks] Add extractor by [Lesmiscore](https://github.com/Lesmiscore)
|
||||
* [extractor/triller] Add extractor by [bashonly](https://github.com/bashonly)
|
||||
* [extractor/VQQ] Add extractors by [elyse0](https://github.com/elyse0)
|
||||
* [extractor/youtube] Improvements to nsig extraction
|
||||
* [extractor/youtube] Fix bug in format sorting
|
||||
* [extractor/youtube] Update iOS Innertube clients by [SamantazFox](https://github.com/SamantazFox)
|
||||
* [extractor/youtube] Use device-specific user agent by [coletdjnz](https://github.com/coletdjnz)
|
||||
* [extractor/youtube] Add `--compat-option no-youtube-prefer-utc-upload-date` by [coletdjnz](https://github.com/coletdjnz)
|
||||
* [extractor/arte] Bug fix by [cgrigis](https://github.com/cgrigis)
|
||||
* [extractor/bilibili] Extract `flac` with premium account by [jackyyf](https://github.com/jackyyf)
|
||||
* [extractor/BiliBiliSearch] Don't sort by date
|
||||
* [extractor/BiliBiliSearch] Fix infinite loop
|
||||
* [extractor/bitchute] Mark errors as expected
|
||||
* [extractor/crunchyroll:beta] Use anonymous access by [tejing1](https://github.com/tejing1)
|
||||
* [extractor/huya] Fix stream extraction by [ohaiibuzzle](https://github.com/ohaiibuzzle)
|
||||
* [extractor/medaltv] Fix extraction by [xenova](https://github.com/xenova)
|
||||
* [extractor/mediaset] Fix embed extraction
|
||||
* [extractor/mixcloud] All formats are audio-only
|
||||
* [extractor/rtbf] Fix jwt extraction by [elyse0](https://github.com/elyse0)
|
||||
* [extractor/screencastomatic] Support `--video-password` by [shreyasminocha](https://github.com/shreyasminocha)
|
||||
* [extractor/stripchat] Don't modify input URL by [dfaker](https://github.com/dfaker)
|
||||
* [extractor/uktv] Improve `_VALID_URL` by [dirkf](https://github.com/dirkf)
|
||||
* [extractor/vimeo:user] Fix `_VALID_URL`
|
||||
|
||||
|
||||
### 2022.08.19
|
||||
|
||||
* Fix bug in `--download-archive`
|
||||
* [jsinterp] **Fix for new youtube players** and related improvements by [dirkf](https://github.com/dirkf), [pukkandan](https://github.com/pukkandan)
|
||||
* [phantomjs] Add function to execute JS without a DOM by [MinePlayersPE](https://github.com/MinePlayersPE), [pukkandan](https://github.com/pukkandan)
|
||||
* [build] Exclude devscripts from installs by [Lesmiscore](https://github.com/Lesmiscore)
|
||||
* [cleanup] Misc fixes and cleanup
|
||||
* [extractor/youtube] **Add fallback to phantomjs** for nsig
|
||||
* [extractor/youtube] Fix error reporting of "Incomplete data"
|
||||
* [extractor/youtube] Improve format sorting for IOS formats
|
||||
* [extractor/youtube] Improve signature caching
|
||||
* [extractor/instagram] Fix extraction by [bashonly](https://github.com/bashonly), [pritam20ps05](https://github.com/pritam20ps05)
|
||||
* [extractor/rai] Minor fix by [nixxo](https://github.com/nixxo)
|
||||
* [extractor/rtbf] Fix stream extractor by [elyse0](https://github.com/elyse0)
|
||||
* [extractor/SovietsCloset] Fix extractor by [ChillingPepper](https://github.com/ChillingPepper)
|
||||
* [extractor/zattoo] Fix Zattoo resellers by [goggle](https://github.com/goggle)
|
||||
|
||||
### 2022.08.14
|
||||
|
||||
* Merge youtube-dl: Upto [commit/d231b56](https://github.com/ytdl-org/youtube-dl/commit/d231b56)
|
||||
* [jsinterp] Handle **new youtube signature functions**
|
||||
* [jsinterp] Truncate error messages
|
||||
* [extractor] Fix format sorting of `channels`
|
||||
* [ffmpeg] Disable avconv unless `--prefer-avconv`
|
||||
* [ffmpeg] Smarter detection of ffprobe filename
|
||||
* [embedthumbnail] Detect `libatomicparsley.so`
|
||||
* [ThumbnailsConvertor] Fix conversion after `fixup_webp`
|
||||
* [utils] Fix `get_compatible_ext`
|
||||
* [build] Fix changelog
|
||||
* [update] Set executable bit-mask by [pukkandan](https://github.com/pukkandan), [Lesmiscore](https://github.com/Lesmiscore)
|
||||
* [devscripts] Fix import
|
||||
* [docs] Consistent use of `e.g.` by [Lesmiscore](https://github.com/Lesmiscore)
|
||||
* [cleanup] Misc fixes and cleanup
|
||||
* [extractor/moview] Add extractor by [HobbyistDev](https://github.com/HobbyistDev)
|
||||
* [extractor/parler] Add extractor by [palewire](https://github.com/palewire)
|
||||
* [extractor/patreon] Ignore erroneous media attachments by [coletdjnz](https://github.com/coletdjnz)
|
||||
* [extractor/truth] Add extractor by [palewire](https://github.com/palewire)
|
||||
* [extractor/aenetworks] Add formats parameter by [jacobtruman](https://github.com/jacobtruman)
|
||||
* [extractor/crunchyroll] Improve `_VALID_URL`s
|
||||
* [extractor/doodstream] Add `wf` domain by [aldoridhoni](https://github.com/aldoridhoni)
|
||||
* [extractor/facebook] Add reel support by [bashonly](https://github.com/bashonly)
|
||||
* [extractor/MLB] New extractor by [ischmidt20](https://github.com/ischmidt20)
|
||||
* [extractor/rai] Misc fixes by [nixxo](https://github.com/nixxo)
|
||||
* [extractor/toggo] Improve `_VALID_URL` by [masta79](https://github.com/masta79)
|
||||
* [extractor/tubitv] Extract additional formats by [shirt-dev](https://github.com/shirt-dev)
|
||||
* [extractor/zattoo] Potential fix for resellers
|
||||
|
||||
|
||||
### 2022.08.08
|
||||
|
||||
* **Remove Python 3.6 support**
|
||||
* Determine merge container better by [pukkandan](https://github.com/pukkandan), [selfisekai](https://github.com/selfisekai)
|
||||
* Framework for embed detection by [coletdjnz](https://github.com/coletdjnz), [pukkandan](https://github.com/pukkandan)
|
||||
* Merge youtube-dl: Upto [commit/adb5294](https://github.com/ytdl-org/youtube-dl/commit/adb5294)
|
||||
* `--compat-option no-live-chat` should disable danmaku
|
||||
* Fix misleading DRM message
|
||||
* Import ctypes only when necessary
|
||||
* Minor bugfixes
|
||||
* Reject entire playlists faster with `--match-filter`
|
||||
* Remove filtered entries from `-J`
|
||||
* Standardize retry mechanism
|
||||
* Validate `--merge-output-format`
|
||||
* [downloader] Add average speed to final progress line
|
||||
* [extractor] Add field `audio_channels`
|
||||
* [extractor] Support multiple archive ids for one video
|
||||
* [ffmpeg] Set `ffmpeg_location` in a contextvar
|
||||
* [FFmpegThumbnailsConvertor] Fix conversion from GIF
|
||||
* [MetadataParser] Don't set `None` when the field didn't match
|
||||
* [outtmpl] Smarter replacing of unsupported characters
|
||||
* [outtmpl] Treat empty values as None in filenames
|
||||
* [utils] sanitize_open: Allow any IO stream as stdout
|
||||
* [build, devscripts] Add devscript to set a build variant
|
||||
* [build] Improve build process by [shirt-dev](https://github.com/shirt-dev)
|
||||
* [build] Update pyinstaller
|
||||
* [devscripts] Create `utils` and refactor
|
||||
* [docs] Clarify `best*`
|
||||
* [docs] Fix bug report issue template
|
||||
* [docs] Fix capitalization in references by [christoph-heinrich](https://github.com/christoph-heinrich)
|
||||
* [cleanup, mhtml] Use imghdr
|
||||
* [cleanup, utils] Consolidate known media extensions
|
||||
* [cleanup] Misc fixes and cleanup
|
||||
* [extractor/angel] Add extractor by [AxiosDeminence](https://github.com/AxiosDeminence)
|
||||
* [extractor/dplay] Add MotorTrend extractor by [Sipherdrakon](https://github.com/Sipherdrakon)
|
||||
* [extractor/harpodeon] Add extractor by [eren-kemer](https://github.com/eren-kemer)
|
||||
* [extractor/holodex] Add extractor by [pukkandan](https://github.com/pukkandan), [sqrtNOT](https://github.com/sqrtNOT)
|
||||
* [extractor/kompas] Add extractor by [HobbyistDev](https://github.com/HobbyistDev)
|
||||
* [extractor/rai] Add raisudtirol extractor by [nixxo](https://github.com/nixxo)
|
||||
* [extractor/tempo] Add extractor by [HobbyistDev](https://github.com/HobbyistDev)
|
||||
* [extractor/youtube] **Fixes for third party client detection** by [coletdjnz](https://github.com/coletdjnz)
|
||||
* [extractor/youtube] Add `live_status=post_live` by [lazypete365](https://github.com/lazypete365)
|
||||
* [extractor/youtube] Extract more format info
|
||||
* [extractor/youtube] Parse translated subtitles only when requested
|
||||
* [extractor/youtube, extractor/twitch] Allow waiting for channels to become live
|
||||
* [extractor/youtube, webvtt] Extract auto-subs from livestream VODs by [fstirlitz](https://github.com/fstirlitz), [pukkandan](https://github.com/pukkandan)
|
||||
* [extractor/AbemaTVTitle] Implement paging by [Lesmiscore](https://github.com/Lesmiscore)
|
||||
* [extractor/archiveorg] Improve handling of formats by [coletdjnz](https://github.com/coletdjnz), [pukkandan](https://github.com/pukkandan)
|
||||
* [extractor/arte] Fix title extraction
|
||||
* [extractor/arte] **Move to v2 API** by [fstirlitz](https://github.com/fstirlitz), [pukkandan](https://github.com/pukkandan)
|
||||
* [extractor/bbc] Fix news articles by [ajj8](https://github.com/ajj8)
|
||||
* [extractor/camtasia] Separate into own extractor by [coletdjnz](https://github.com/coletdjnz)
|
||||
* [extractor/cloudflarestream] Fix video_id padding by [haobinliang](https://github.com/haobinliang)
|
||||
* [extractor/crunchyroll] Fix conversion of thumbnail from GIF
|
||||
* [extractor/crunchyroll] Handle missing metadata correctly by [Burve](https://github.com/Burve), [pukkandan](https://github.com/pukkandan)
|
||||
* [extractor/crunchyroll:beta] Extract timestamp and fix tests by [tejing1](https://github.com/tejing1)
|
||||
* [extractor/crunchyroll:beta] Use streams API by [tejing1](https://github.com/tejing1)
|
||||
* [extractor/doodstream] Support more domains by [Galiley](https://github.com/Galiley)
|
||||
* [extractor/ESPN] Extract duration by [ischmidt20](https://github.com/ischmidt20)
|
||||
* [extractor/FIFA] Change API endpoint by [Bricio](https://github.com/Bricio), [yashkc2025](https://github.com/yashkc2025)
|
||||
* [extractor/globo:article] Remove false positives by [Bricio](https://github.com/Bricio)
|
||||
* [extractor/Go] Extract timestamp by [ischmidt20](https://github.com/ischmidt20)
|
||||
* [extractor/hidive] Fix cookie login when netrc is also given by [winterbird-code](https://github.com/winterbird-code)
|
||||
* [extractor/html5] Separate into own extractor by [coletdjnz](https://github.com/coletdjnz), [pukkandan](https://github.com/pukkandan)
|
||||
* [extractor/ina] Improve extractor by [elyse0](https://github.com/elyse0)
|
||||
* [extractor/NaverNow] Change endpoint by [ping](https://github.com/ping)
|
||||
* [extractor/ninegag] Extract uploader by [DjesonPV](https://github.com/DjesonPV)
|
||||
* [extractor/NovaPlay] Fix extractor by [Bojidarist](https://github.com/Bojidarist)
|
||||
* [extractor/orf:radio] Rewrite extractors
|
||||
* [extractor/patreon] Fix and improve extractors by [coletdjnz](https://github.com/coletdjnz), [pukkandan](https://github.com/pukkandan)
|
||||
* [extractor/rai] Fix RaiNews extraction by [nixxo](https://github.com/nixxo)
|
||||
* [extractor/redbee] Unify and update extractors by [elyse0](https://github.com/elyse0)
|
||||
* [extractor/stripchat] Fix _VALID_URL by [freezboltz](https://github.com/freezboltz)
|
||||
* [extractor/tubi] Exclude playlists from playlist entries by [sqrtNOT](https://github.com/sqrtNOT)
|
||||
* [extractor/tviplayer] Improve `_VALID_URL` by [HobbyistDev](https://github.com/HobbyistDev)
|
||||
* [extractor/twitch] Extract chapters for single chapter VODs by [mpeter50](https://github.com/mpeter50)
|
||||
* [extractor/vgtv] Support tv.vg.no by [sqrtNOT](https://github.com/sqrtNOT)
|
||||
* [extractor/vidio] Support embed link by [HobbyistDev](https://github.com/HobbyistDev)
|
||||
* [extractor/vk] Fix extractor by [Mehavoid](https://github.com/Mehavoid)
|
||||
* [extractor/WASDTV:record] Fix `_VALID_URL`
|
||||
* [extractor/xfileshare] Add Referer by [Galiley](https://github.com/Galiley)
|
||||
* [extractor/YahooJapanNews] Fix extractor by [Lesmiscore](https://github.com/Lesmiscore)
|
||||
* [extractor/yandexmusic] Extract higher quality format
|
||||
* [extractor/zee5] Update Device ID by [m4tu4g](https://github.com/m4tu4g)
|
||||
|
||||
|
||||
### 2022.07.18
|
||||
|
||||
* Allow users to specify encoding in each config files by [Lesmiscore](https://github.com/Lesmiscore)
|
||||
* Discard infodict from memory if no longer needed
|
||||
* Do not allow extractors to return `None`
|
||||
* Do not load system certificates when `certifi` is used
|
||||
* Fix rounding of integers in format table
|
||||
* Improve chapter sanitization
|
||||
* Skip some fixup if remux/recode is needed by [Lesmiscore](https://github.com/Lesmiscore)
|
||||
* Support `--no-progress` for `--wait-for-video`
|
||||
* Fix bug in [612f2be](https://github.com/yt-dlp/yt-dlp/commit/612f2be5d3924540158dfbe5f25d841f04cff8c6)
|
||||
* [outtmpl] Add alternate form `h` for HTML escaping
|
||||
* [aes] Add multiple padding modes in CBC by [elyse0](https://github.com/elyse0)
|
||||
* [extractor/common] Passthrough `errnote=False` to parsers
|
||||
* [extractor/generic] Remove HEAD request
|
||||
* [http] Ensure the file handle is always closed
|
||||
* [ModifyChapters] Modify duration in infodict
|
||||
* [options] Fix aliases to `--config-location`
|
||||
* [utils] Fix `get_domain`
|
||||
* [build] Consistent order for lazy extractors by [lamby](https://github.com/lamby)
|
||||
* [build] Fix architecture suffix of executables by [odo2063](https://github.com/odo2063)
|
||||
* [build] Improve `setup.py`
|
||||
* [update] Do not check `_update_spec` when up to date
|
||||
* [update] Prepare to remove Python 3.6 support
|
||||
* [compat] Let PyInstaller detect _legacy module
|
||||
* [devscripts/update-formulae] Do not change dependency section
|
||||
* [test] Split download tests so they can be more easily run in CI
|
||||
* [docs] Improve docstring of `download_ranges` by [FirefoxMetzger](https://github.com/FirefoxMetzger)
|
||||
* [docs] Improve issue templates
|
||||
* [build] Fix bug in [6d916fe](https://github.com/yt-dlp/yt-dlp/commit/6d916fe709a38e8c4c69b73843acf170b5165931)
|
||||
* [cleanup, utils] Refactor parse_codecs
|
||||
* [cleanup] Misc fixes and cleanup
|
||||
* [extractor/acfun] Add extractors by [lockmatrix](https://github.com/lockmatrix)
|
||||
* [extractor/Audiodraft] Add extractors by [Ashish0804](https://github.com/Ashish0804), [fstirlitz](https://github.com/fstirlitz)
|
||||
* [extractor/cellebrite] Add extractor by [HobbyistDev](https://github.com/HobbyistDev)
|
||||
* [extractor/detik] Add extractor by [HobbyistDev](https://github.com/HobbyistDev)
|
||||
* [extractor/hytale] Add extractor by [llamasblade](https://github.com/llamasblade), [pukkandan](https://github.com/pukkandan)
|
||||
* [extractor/liputan6] Add extractor by [HobbyistDev](https://github.com/HobbyistDev)
|
||||
* [extractor/mocha] Add extractor by [HobbyistDev](https://github.com/HobbyistDev)
|
||||
* [extractor/rtl.lu] Add extractor by [HobbyistDev](https://github.com/HobbyistDev)
|
||||
* [extractor/rtvsl] Add extractor by [iw0nderhow](https://github.com/iw0nderhow), [pukkandan](https://github.com/pukkandan)
|
||||
* [extractor/StarTrek] Add extractor by [scy](https://github.com/scy)
|
||||
* [extractor/syvdk] Add extractor by [misaelaguayo](https://github.com/misaelaguayo)
|
||||
* [extractor/theholetv] Add extractor by [dosy4ev](https://github.com/dosy4ev)
|
||||
* [extractor/TubeTuGraz] Add extractor by [Ferdi265](https://github.com/Ferdi265), [pukkandan](https://github.com/pukkandan)
|
||||
* [extractor/tviplayer] Add extractor by [HobbyistDev](https://github.com/HobbyistDev)
|
||||
* [extractor/wetv] Add extractors by [elyse0](https://github.com/elyse0)
|
||||
* [extractor/wikimedia] Add extractor by [EhtishamSabir](https://github.com/EhtishamSabir), [pukkandan](https://github.com/pukkandan)
|
||||
* [extractor/youtube] Fix duration check for post-live manifestless mode
|
||||
* [extractor/youtube] More metadata for storyboards by [ftk](https://github.com/ftk)
|
||||
* [extractor/bigo] Fix extractor by [Lesmiscore](https://github.com/Lesmiscore)
|
||||
* [extractor/BiliIntl] Fix subtitle extraction by [MinePlayersPE](https://github.com/MinePlayersPE)
|
||||
* [extractor/crunchyroll] Improve `_VALID_URL`
|
||||
* [extractor/fifa] Fix extractor by [ischmidt20](https://github.com/ischmidt20)
|
||||
* [extractor/instagram] Fix post/story extractors by [pritam20ps05](https://github.com/pritam20ps05), [pukkandan](https://github.com/pukkandan)
|
||||
* [extractor/iq] Set language correctly for Korean subtitles
|
||||
* [extractor/MangoTV] Fix subtitle languages
|
||||
* [extractor/Netverse] Improve playlist extractor by [HobbyistDev](https://github.com/HobbyistDev)
|
||||
* [extractor/philharmoniedeparis] Fix extractor by [sqrtNOT](https://github.com/sqrtNOT)
|
||||
* [extractor/Trovo] Fix extractor by [u-spec-png](https://github.com/u-spec-png)
|
||||
* [extractor/twitch] Support storyboards for VODs by [ftk](https://github.com/ftk)
|
||||
* [extractor/WatchESPN] Improve `_VALID_URL` by [IONECarter](https://github.com/IONECarter), [dirkf](https://github.com/dirkf)
|
||||
* [extractor/WSJArticle] Fix video id extraction by [sqrtNOT](https://github.com/sqrtNOT)
|
||||
* [extractor/Ximalaya] Fix extractors by [lockmatrix](https://github.com/lockmatrix)
|
||||
* [cleanup, extractor/youtube] Fix tests by [sheerluck](https://github.com/sheerluck)
|
||||
|
||||
|
||||
### 2022.06.29
|
||||
|
||||
* Fix `--downloader native`
|
||||
* Fix `section_end` of clips
|
||||
* Fix playlist error handling
|
||||
* Sanitize `chapters`
|
||||
* [extractor] Fix `_create_request` when headers is None
|
||||
* [extractor] Fix empty `BaseURL` in MPD
|
||||
* [ffmpeg] Write full output to debug on error
|
||||
* [hls] Warn user when trying to download live HLS
|
||||
* [options] Fix `parse_known_args` for `--`
|
||||
* [utils] Fix inconsistent default handling between HTTP and HTTPS requests by [coletdjnz](https://github.com/coletdjnz)
|
||||
* [build] Draft release until complete
|
||||
* [build] Fix release tag commit
|
||||
* [build] Standalone x64 builds for MacOS 10.9 by [StefanLobbenmeier](https://github.com/StefanLobbenmeier)
|
||||
* [update] Ability to set a maximum version for specific variants
|
||||
* [compat] Fix `compat.WINDOWS_VT_MODE`
|
||||
* [compat] Remove deprecated functions from core code
|
||||
* [compat] Remove more functions
|
||||
* [cleanup, extractor] Reduce direct use of `_downloader`
|
||||
* [cleanup] Consistent style for file heads
|
||||
* [cleanup] Fix some typos by [crazymoose77756](https://github.com/crazymoose77756)
|
||||
* [cleanup] Misc fixes and cleanup
|
||||
* [extractor/Scrolller] Add extractor by [LunarFang416](https://github.com/LunarFang416)
|
||||
* [extractor/ViMP] Add playlist extractor by [FestplattenSchnitzel](https://github.com/FestplattenSchnitzel)
|
||||
* [extractor/fuyin] Add extractor by [HobbyistDev](https://github.com/HobbyistDev)
|
||||
* [extractor/livestreamfails] Add extractor by [nomevi](https://github.com/nomevi)
|
||||
* [extractor/premiershiprugby] Add extractor by [HobbyistDev](https://github.com/HobbyistDev)
|
||||
* [extractor/steam] Add broadcast extractor by [HobbyistDev](https://github.com/HobbyistDev)
|
||||
* [extractor/youtube] Mark videos as fully watched by [Brett824](https://github.com/Brett824)
|
||||
* [extractor/CWTV] Extract thumbnail by [ischmidt20](https://github.com/ischmidt20)
|
||||
* [extractor/ViMP] Add thumbnail and support more sites by [FestplattenSchnitzel](https://github.com/FestplattenSchnitzel)
|
||||
* [extractor/dropout] Support cookies and login only as needed by [pingiun](https://github.com/pingiun), [pukkandan](https://github.com/pukkandan)
|
||||
* [extractor/ertflix] Improve `_VALID_URL`
|
||||
* [extractor/lbry] Use HEAD request for redirect URL by [flashdagger](https://github.com/flashdagger)
|
||||
* [extractor/mediaset] Improve `_VALID_URL`
|
||||
* [extractor/npr] Implement [e50c350](https://github.com/yt-dlp/yt-dlp/commit/e50c3500b43d80e4492569c4b4523c4379c6fbb2) differently
|
||||
* [extractor/tennistv] Rewrite extractor by [pukkandan](https://github.com/pukkandan), [zenerdi0de](https://github.com/zenerdi0de)
|
||||
|
||||
### 2022.06.22.1
|
||||
|
||||
* [build] Fix updating homebrew formula
|
||||
|
||||
### 2022.06.22
|
||||
|
||||
* [**Deprecate support for Python 3.6**](https://github.com/yt-dlp/yt-dlp/issues/3764#issuecomment-1154051119)
|
||||
* **Add option `--download-sections` to download video partially**
|
||||
* Chapter regex and time ranges are accepted, e.g. `--download-sections *1:10-2:20`
|
||||
* Add option `--alias`
|
||||
* Add option `--lazy-playlist` to process entries as they are received
|
||||
* Add option `--retry-sleep`
|
||||
* Add slicing notation to `--playlist-items`
|
||||
* Adds support for negative indices and step
|
||||
* Add `-I` as alias for `--playlist-index`
|
||||
* Makes `--playlist-start`, `--playlist-end`, `--playlist-reverse`, `--no-playlist-reverse` redundant
|
||||
* `--config-location -` to provide options interactively
|
||||
* [build] Add Linux standalone builds
|
||||
* [update] Self-restart after update
|
||||
* Merge youtube-dl: Upto [commit/8a158a9](https://github.com/ytdl-org/youtube-dl/commit/8a158a9)
|
||||
* Add `--no-update`
|
||||
* Allow extractors to specify section_start/end for clips
|
||||
* Do not print progress to `stderr` with `-q`
|
||||
* Ensure pre-processor errors do not block video download
|
||||
* Fix `--simulate --max-downloads`
|
||||
* Improve error handling of bad config files
|
||||
* Return an error code if update fails
|
||||
* Fix bug in [3a408f9](https://github.com/yt-dlp/yt-dlp/commit/3a408f9d199127ca2626359e21a866a09ab236b3)
|
||||
* [ExtractAudio] Allow conditional conversion
|
||||
* [ModifyChapters] Fix repeated removal of small segments
|
||||
* [ThumbnailsConvertor] Allow conditional conversion
|
||||
* [cookies] Detect profiles for cygwin/BSD by [moench-tegeder](https://github.com/moench-tegeder)
|
||||
* [dash] Show fragment count with `--live-from-start` by [flashdagger](https://github.com/flashdagger)
|
||||
* [extractor] Add `_search_json` by [coletdjnz](https://github.com/coletdjnz), [pukkandan](https://github.com/pukkandan)
|
||||
* [extractor] Add `default` parameter to `_search_json` by [coletdjnz](https://github.com/coletdjnz), [pukkandan](https://github.com/pukkandan)
|
||||
* [extractor] Add dev option `--load-pages`
|
||||
* [extractor] Handle `json_ld` with multiple `@type`s
|
||||
* [extractor] Import `_ALL_CLASSES` lazily
|
||||
* [extractor] Recognize `src` attribute from HTML5 media elements by [Lesmiscore](https://github.com/Lesmiscore)
|
||||
* [extractor/generic] Revert e6ae51c123897927eb3c9899923d8ffd31c7f85d
|
||||
* [f4m] Bugfix
|
||||
* [ffmpeg] Check version lazily
|
||||
* [jsinterp] Some optimizations and refactoring by [dirkf](https://github.com/dirkf), [pukkandan](https://github.com/pukkandan)
|
||||
* [utils] Improve performance using `functools.cache`
|
||||
* [utils] Send HTTP/1.1 ALPN extension by [coletdjnz](https://github.com/coletdjnz)
|
||||
* [utils] `ExtractorError`: Fix `exc_info`
|
||||
* [utils] `ISO3166Utils`: Add `EU` and `AP`
|
||||
* [utils] `Popen`: Refactor to use contextmanager
|
||||
* [utils] `locked_file`: Fix for PyPy on Windows
|
||||
* [update] Expose more functionality to API
|
||||
* [update] Use `.git` folder to distinguish `source`/`unknown`
|
||||
* [compat] Add `functools.cached_property`
|
||||
* [test] Fix `FakeYDL` signatures by [coletdjnz](https://github.com/coletdjnz)
|
||||
* [docs] Improvements
|
||||
* [cleanup, ExtractAudio] Refactor
|
||||
* [cleanup, downloader] Refactor `report_progress`
|
||||
* [cleanup, extractor] Refactor `_download_...` methods
|
||||
* [cleanup, extractor] Rename `extractors.py` to `_extractors.py`
|
||||
* [cleanup, utils] Don't use kwargs for `format_field`
|
||||
* [cleanup, build] Refactor
|
||||
* [cleanup, docs] Re-indent "Usage and Options" section
|
||||
* [cleanup] Deprecate `YoutubeDL.parse_outtmpl`
|
||||
* [cleanup] Misc fixes and cleanup by [Lesmiscore](https://github.com/Lesmiscore), [MrRawes](https://github.com/MrRawes), [christoph-heinrich](https://github.com/christoph-heinrich), [flashdagger](https://github.com/flashdagger), [gamer191](https://github.com/gamer191), [kwconder](https://github.com/kwconder), [pukkandan](https://github.com/pukkandan)
|
||||
* [extractor/DailyWire] Add extractors by [HobbyistDev](https://github.com/HobbyistDev), [pukkandan](https://github.com/pukkandan)
|
||||
* [extractor/fourzerostudio] Add extractors by [Lesmiscore](https://github.com/Lesmiscore)
|
||||
* [extractor/GoogleDrive] Add folder extractor by [evansp](https://github.com/evansp), [pukkandan](https://github.com/pukkandan)
|
||||
* [extractor/MirrorCoUK] Add extractor by [LunarFang416](https://github.com/LunarFang416), [pukkandan](https://github.com/pukkandan)
|
||||
* [extractor/atscaleconfevent] Add extractor by [Ashish0804](https://github.com/Ashish0804)
|
||||
* [extractor/freetv] Add extractor by [elyse0](https://github.com/elyse0)
|
||||
* [extractor/ixigua] Add Extractor by [HobbyistDev](https://github.com/HobbyistDev)
|
||||
* [extractor/kicker.de] Add extractor by [HobbyistDev](https://github.com/HobbyistDev)
|
||||
* [extractor/netverse] Add extractors by [HobbyistDev](https://github.com/HobbyistDev), [pukkandan](https://github.com/pukkandan)
|
||||
* [extractor/playsuisse] Add extractor by [pukkandan](https://github.com/pukkandan), [sbor23](https://github.com/sbor23)
|
||||
* [extractor/substack] Add extractor by [elyse0](https://github.com/elyse0)
|
||||
* [extractor/youtube] **Support downloading clips**
|
||||
* [extractor/youtube] Add `innertube_host` and `innertube_key` extractor args by [coletdjnz](https://github.com/coletdjnz)
|
||||
* [extractor/youtube] Add warning for PostLiveDvr
|
||||
* [extractor/youtube] Bring back `_extract_chapters_from_description`
|
||||
* [extractor/youtube] Extract `comment_count` from webpage
|
||||
* [extractor/youtube] Fix `:ytnotifications` extractor by [coletdjnz](https://github.com/coletdjnz)
|
||||
* [extractor/youtube] Fix initial player response extraction by [coletdjnz](https://github.com/coletdjnz), [pukkandan](https://github.com/pukkandan)
|
||||
* [extractor/youtube] Fix live chat for videos with content warning by [coletdjnz](https://github.com/coletdjnz)
|
||||
* [extractor/youtube] Make signature extraction non-fatal
|
||||
* [extractor/youtube:tab] Detect `videoRenderer` in `_post_thread_continuation_entries`
|
||||
* [extractor/BiliIntl] Fix metadata extraction
|
||||
* [extractor/BiliIntl] Fix subtitle extraction by [HobbyistDev](https://github.com/HobbyistDev)
|
||||
* [extractor/FranceCulture] Fix extractor by [aurelg](https://github.com/aurelg), [pukkandan](https://github.com/pukkandan)
|
||||
* [extractor/PokemonSoundLibrary] Remove extractor by [Lesmiscore](https://github.com/Lesmiscore)
|
||||
* [extractor/StreamCZ] Fix extractor by [adamanldo](https://github.com/adamanldo), [dirkf](https://github.com/dirkf)
|
||||
* [extractor/WatchESPN] Support free videos and BAM_DTC by [ischmidt20](https://github.com/ischmidt20)
|
||||
* [extractor/animelab] Remove extractor by [gamer191](https://github.com/gamer191)
|
||||
* [extractor/bloomberg] Change playback endpoint by [m4tu4g](https://github.com/m4tu4g)
|
||||
* [extractor/ccc] Extract view_count by [vkorablin](https://github.com/vkorablin)
|
||||
* [extractor/crunchyroll:beta] Fix extractor after API change by [Burve](https://github.com/Burve), [tejing1](https://github.com/tejing1)
|
||||
* [extractor/curiositystream] Get `auth_token` from cookie by [mnn](https://github.com/mnn)
|
||||
* [extractor/digitalconcerthall] Fix extractor by [ZhymabekRoman](https://github.com/ZhymabekRoman)
|
||||
* [extractor/dropbox] Extract the correct `mountComponent`
|
||||
* [extractor/dropout] Login is not mandatory
|
||||
* [extractor/duboku] Fix for hostname change by [mozbugbox](https://github.com/mozbugbox)
|
||||
* [extractor/espn] Add `WatchESPN` extractor by [ischmidt20](https://github.com/ischmidt20), [pukkandan](https://github.com/pukkandan)
|
||||
* [extractor/expressen] Fix extractor by [aejdl](https://github.com/aejdl)
|
||||
* [extractor/foxnews] Update embed extraction by [elyse0](https://github.com/elyse0)
|
||||
* [extractor/ina] Fix extractor by [elyse0](https://github.com/elyse0)
|
||||
* [extractor/iwara:user] Make paging better by [Lesmiscore](https://github.com/Lesmiscore)
|
||||
* [extractor/jwplatform] Look for `data-video-jw-id`
|
||||
* [extractor/lbry] Update livestream API by [flashdagger](https://github.com/flashdagger)
|
||||
* [extractor/mediaset] Improve `_VALID_URL`
|
||||
* [extractor/naver] Add `navernow` extractor by [ping](https://github.com/ping)
|
||||
* [extractor/niconico:series] Fix extractor by [sqrtNOT](https://github.com/sqrtNOT)
|
||||
* [extractor/npr] Use stream url from json-ld by [r5d](https://github.com/r5d)
|
||||
* [extractor/pornhub] Extract `uploader_id` field by [Lesmiscore](https://github.com/Lesmiscore)
|
||||
* [extractor/radiofrance] Add more radios by [bubbleguuum](https://github.com/bubbleguuum)
|
||||
* [extractor/rumble] Detect JS embed
|
||||
* [extractor/rumble] Extract subtitles by [fstirlitz](https://github.com/fstirlitz)
|
||||
* [extractor/southpark] Add `southpark.lat` extractor by [darkxex](https://github.com/darkxex)
|
||||
* [extractor/spotify:show] Fix extractor
|
||||
* [extractor/tiktok] Detect embeds
|
||||
* [extractor/tiktok] Extract `SIGI_STATE` by [dirkf](https://github.com/dirkf), [pukkandan](https://github.com/pukkandan), [sulyi](https://github.com/sulyi)
|
||||
* [extractor/tver] Fix extractor by [Lesmiscore](https://github.com/Lesmiscore)
|
||||
* [extractor/vevo] Fix extractor by [Lesmiscore](https://github.com/Lesmiscore)
|
||||
* [extractor/yahoo:gyao] Fix extractor
|
||||
* [extractor/zattoo] Fix live streams by [miseran](https://github.com/miseran)
|
||||
* [extractor/zdf] Improve format sorting by [elyse0](https://github.com/elyse0)
|
||||
|
||||
|
||||
### 2022.05.18
|
||||
|
||||
* Add support for SSL client certificate authentication by [coletdjnz](https://github.com/coletdjnz), [dirkf](https://github.com/dirkf)
|
||||
@@ -419,7 +1209,7 @@
|
||||
* [downloader/ffmpeg] Handle unknown formats better
|
||||
* [outtmpl] Handle `-o ""` better
|
||||
* [outtmpl] Handle hard-coded file extension better
|
||||
* [extractor] Add convinience function `_yes_playlist`
|
||||
* [extractor] Add convenience function `_yes_playlist`
|
||||
* [extractor] Allow non-fatal `title` extraction
|
||||
* [extractor] Extract video inside `Article` json_ld
|
||||
* [generic] Allow further processing of json_ld URL
|
||||
@@ -1058,7 +1848,7 @@
|
||||
|
||||
* Add new option `--netrc-location`
|
||||
* [outtmpl] Allow alternate fields using `,`
|
||||
* [outtmpl] Add format type `B` to treat the value as bytes (eg: to limit the filename to a certain number of bytes)
|
||||
* [outtmpl] Add format type `B` to treat the value as bytes, e.g. to limit the filename to a certain number of bytes
|
||||
* Separate the options `--ignore-errors` and `--no-abort-on-error`
|
||||
* Basic framework for simultaneous download of multiple formats by [nao20010128nao](https://github.com/nao20010128nao)
|
||||
* [17live] Add 17.live extractor by [nao20010128nao](https://github.com/nao20010128nao)
|
||||
@@ -1156,7 +1946,7 @@
|
||||
* [build] Automate more of the release process by [animelover1984](https://github.com/animelover1984), [pukkandan](https://github.com/pukkandan)
|
||||
* [build] Fix sha256 by [nihil-admirari](https://github.com/nihil-admirari)
|
||||
* [build] Bring back brew taps by [nao20010128nao](https://github.com/nao20010128nao)
|
||||
* [build] Provide `--onedir` zip for windows by [pukkandan](https://github.com/pukkandan)
|
||||
* [build] Provide `--onedir` zip for windows
|
||||
* [cleanup,docs] Add deprecation warning in docs for some counter intuitive behaviour
|
||||
* [cleanup] Fix line endings for `nebula.py` by [glenn-slayden](https://github.com/glenn-slayden)
|
||||
* [cleanup] Improve `make clean-test` by [sulyi](https://github.com/sulyi)
|
||||
@@ -1448,7 +2238,7 @@
|
||||
|
||||
* Merge youtube-dl: Upto [commit/a803582](https://github.com/ytdl-org/youtube-dl/commit/a8035827177d6b59aca03bd717acb6a9bdd75ada)
|
||||
* Add `--extractor-args` to pass some extractor-specific arguments. See [readme](https://github.com/yt-dlp/yt-dlp#extractor-arguments)
|
||||
* Add extractor option `skip` for `youtube`. Eg: `--extractor-args youtube:skip=hls,dash`
|
||||
* Add extractor option `skip` for `youtube`, e.g. `--extractor-args youtube:skip=hls,dash`
|
||||
* Deprecates `--youtube-skip-dash-manifest`, `--youtube-skip-hls-manifest`, `--youtube-include-dash-manifest`, `--youtube-include-hls-manifest`
|
||||
* Allow `--list...` options to work with `--print`, `--quiet` and other `--list...` options
|
||||
* [youtube] Use `player` API for additional video extraction requests by [coletdjnz](https://github.com/coletdjnz)
|
||||
@@ -1553,7 +2343,7 @@
|
||||
* [utils] Generalize `traverse_dict` to `traverse_obj`
|
||||
* [downloader/ffmpeg] Hide FFmpeg banner unless in verbose mode by [fstirlitz](https://github.com/fstirlitz)
|
||||
* [build] Release `yt-dlp.tar.gz`
|
||||
* [build,update] Add GNU-style SHA512 and prepare updater for simlar SHA256 by [nihil-admirari](https://github.com/nihil-admirari)
|
||||
* [build,update] Add GNU-style SHA512 and prepare updater for similar SHA256 by [nihil-admirari](https://github.com/nihil-admirari)
|
||||
* [pyinst] Show Python version in exe metadata by [nihil-admirari](https://github.com/nihil-admirari)
|
||||
* [docs] Improve documentation of dependencies
|
||||
* [cleanup] Mark unused files
|
||||
|
||||
@@ -28,12 +28,12 @@ You can also find lists of all [contributors of yt-dlp](CONTRIBUTORS) and [autho
|
||||
[](https://github.com/sponsors/coletdjnz)
|
||||
|
||||
* YouTube improvements including: age-gate bypass, private playlists, multiple-clients (to avoid throttling) and a lot of under-the-hood improvements
|
||||
* Added support for downloading YoutubeWebArchive videos
|
||||
* Added support for new websites MainStreaming, PRX, nzherald, etc
|
||||
* Added support for new websites YoutubeWebArchive, MainStreaming, PRX, nzherald, Mediaklikk, StarTV etc
|
||||
* Improved/fixed support for Patreon, panopto, gfycat, itv, pbs, SouthParkDE etc
|
||||
|
||||
|
||||
|
||||
## [Ashish0804](https://github.com/Ashish0804)
|
||||
## [Ashish0804](https://github.com/Ashish0804) <sub><sup>[Inactive]</sup></sub>
|
||||
|
||||
[](https://ko-fi.com/ashish0804)
|
||||
|
||||
@@ -42,10 +42,18 @@ You can also find lists of all [contributors of yt-dlp](CONTRIBUTORS) and [autho
|
||||
* Improved/fixed support for HiDive, HotStar, Hungama, LBRY, LinkedInLearning, Mxplayer, SonyLiv, TV2, Vimeo, VLive etc
|
||||
|
||||
|
||||
## [Lesmiscore](https://github.com/Lesmiscore) (nao20010128nao)
|
||||
## [Lesmiscore](https://github.com/Lesmiscore) <sub><sup>(nao20010128nao)</sup></sub>
|
||||
|
||||
**Bitcoin**: bc1qfd02r007cutfdjwjmyy9w23rjvtls6ncve7r3s
|
||||
**Monacoin**: mona1q3tf7dzvshrhfe3md379xtvt2n22duhglv5dskr
|
||||
|
||||
* Download live from start to end for YouTube
|
||||
* Added support for new websites mildom, PixivSketch, skeb, radiko, voicy, mirrativ, openrec, whowatch, damtomo, 17.live, mixch etc
|
||||
* Added support for new websites AbemaTV, mildom, PixivSketch, skeb, radiko, voicy, mirrativ, openrec, whowatch, damtomo, 17.live, mixch etc
|
||||
* Improved/fixed support for fc2, YahooJapanNews, tver, iwara etc
|
||||
|
||||
|
||||
## [bashonly](https://github.com/bashonly)
|
||||
|
||||
* `--cookies-from-browser` support for Firefox containers
|
||||
* Added support for new websites Genius, Kick, NBCStations, Triller, VideoKen etc
|
||||
* Improved/fixed support for Anvato, Brightcove, Instagram, ParamountPlus, Reddit, SlidesLive, TikTok, Twitter, Vimeo etc
|
||||
|
||||
31
Makefile
31
Makefile
@@ -9,15 +9,16 @@ tar: yt-dlp.tar.gz
|
||||
# Keep this list in sync with MANIFEST.in
|
||||
# intended use: when building a source distribution,
|
||||
# make pypi-files && python setup.py sdist
|
||||
pypi-files: AUTHORS Changelog.md LICENSE README.md README.txt supportedsites completions yt-dlp.1 devscripts/* test/*
|
||||
pypi-files: AUTHORS Changelog.md LICENSE README.md README.txt supportedsites \
|
||||
completions yt-dlp.1 requirements.txt setup.cfg devscripts/* test/*
|
||||
|
||||
.PHONY: all clean install test tar pypi-files completions ot offlinetest codetest supportedsites
|
||||
|
||||
clean-test:
|
||||
rm -rf test/testdata/sigs/player-*.js tmp/ *.annotations.xml *.aria2 *.description *.dump *.frag \
|
||||
*.frag.aria2 *.frag.urls *.info.json *.live_chat.json *.meta *.part* *.tmp *.temp *.unknown_video *.ytdl \
|
||||
*.3gp *.ape *.ass *.avi *.desktop *.flac *.flv *.jpeg *.jpg *.m4a *.m4v *.mhtml *.mkv *.mov *.mp3 \
|
||||
*.mp4 *.ogg *.opus *.png *.sbv *.srt *.swf *.swp *.ttml *.url *.vtt *.wav *.webloc *.webm *.webp
|
||||
*.3gp *.ape *.ass *.avi *.desktop *.f4v *.flac *.flv *.gif *.jpeg *.jpg *.m4a *.m4v *.mhtml *.mkv *.mov *.mp3 \
|
||||
*.mp4 *.mpga *.oga *.ogg *.opus *.png *.sbv *.srt *.swf *.swp *.tt *.ttml *.url *.vtt *.wav *.webloc *.webm *.webp
|
||||
clean-dist:
|
||||
rm -rf yt-dlp.1.temp.md yt-dlp.1 README.txt MANIFEST build/ dist/ .coverage cover/ yt-dlp.tar.gz completions/ \
|
||||
yt_dlp/extractor/lazy_extractors.py *.spec CONTRIBUTING.md.tmp yt-dlp yt-dlp.exe yt_dlp.egg-info/ AUTHORS .mailmap
|
||||
@@ -32,7 +33,6 @@ completion-zsh: completions/zsh/_yt-dlp
|
||||
lazy-extractors: yt_dlp/extractor/lazy_extractors.py
|
||||
|
||||
PREFIX ?= /usr/local
|
||||
DESTDIR ?= .
|
||||
BINDIR ?= $(PREFIX)/bin
|
||||
MANDIR ?= $(PREFIX)/man
|
||||
SHAREDIR ?= $(PREFIX)/share
|
||||
@@ -42,7 +42,7 @@ PYTHON ?= /usr/bin/env python3
|
||||
SYSCONFDIR = $(shell if [ $(PREFIX) = /usr -o $(PREFIX) = /usr/local ]; then echo /etc; else echo $(PREFIX)/etc; fi)
|
||||
|
||||
# set markdown input format to "markdown-smart" for pandoc version 2 and to "markdown" for pandoc prior to version 2
|
||||
MARKDOWN = $(shell if [ "$(pandoc -v | head -n1 | cut -d" " -f2 | head -c1)" = "2" ]; then echo markdown-smart; else echo markdown; fi)
|
||||
MARKDOWN = $(shell if [ `pandoc -v | head -n1 | cut -d" " -f2 | head -c1` = "2" ]; then echo markdown-smart; else echo markdown; fi)
|
||||
|
||||
install: lazy-extractors yt-dlp yt-dlp.1 completions
|
||||
mkdir -p $(DESTDIR)$(BINDIR)
|
||||
@@ -74,27 +74,26 @@ offlinetest: codetest
|
||||
$(PYTHON) -m pytest -k "not download"
|
||||
|
||||
# XXX: This is hard to maintain
|
||||
CODE_FOLDERS = yt_dlp yt_dlp/downloader yt_dlp/extractor yt_dlp/postprocessor yt_dlp/compat \
|
||||
yt_dlp/extractor/anvato_token_generator
|
||||
CODE_FOLDERS = yt_dlp yt_dlp/downloader yt_dlp/extractor yt_dlp/postprocessor yt_dlp/compat
|
||||
yt-dlp: yt_dlp/*.py yt_dlp/*/*.py
|
||||
mkdir -p zip
|
||||
for d in $(CODE_FOLDERS) ; do \
|
||||
mkdir -p zip/$$d ;\
|
||||
cp -pPR $$d/*.py zip/$$d/ ;\
|
||||
done
|
||||
touch -t 200001010101 zip/yt_dlp/*.py zip/yt_dlp/*/*.py zip/yt_dlp/*/*/*.py
|
||||
touch -t 200001010101 zip/yt_dlp/*.py zip/yt_dlp/*/*.py
|
||||
mv zip/yt_dlp/__main__.py zip/
|
||||
cd zip ; zip -q ../yt-dlp yt_dlp/*.py yt_dlp/*/*.py yt_dlp/*/*/*.py __main__.py
|
||||
cd zip ; zip -q ../yt-dlp yt_dlp/*.py yt_dlp/*/*.py __main__.py
|
||||
rm -rf zip
|
||||
echo '#!$(PYTHON)' > yt-dlp
|
||||
cat yt-dlp.zip >> yt-dlp
|
||||
rm yt-dlp.zip
|
||||
chmod a+x yt-dlp
|
||||
|
||||
README.md: yt_dlp/*.py yt_dlp/*/*.py
|
||||
COLUMNS=80 $(PYTHON) yt_dlp/__main__.py --help | $(PYTHON) devscripts/make_readme.py
|
||||
README.md: yt_dlp/*.py yt_dlp/*/*.py devscripts/make_readme.py
|
||||
COLUMNS=80 $(PYTHON) yt_dlp/__main__.py --ignore-config --help | $(PYTHON) devscripts/make_readme.py
|
||||
|
||||
CONTRIBUTING.md: README.md
|
||||
CONTRIBUTING.md: README.md devscripts/make_contributing.py
|
||||
$(PYTHON) devscripts/make_contributing.py README.md CONTRIBUTING.md
|
||||
|
||||
issuetemplates: devscripts/make_issue_template.py .github/ISSUE_TEMPLATE_tmpl/1_broken_site.yml .github/ISSUE_TEMPLATE_tmpl/2_site_support_request.yml .github/ISSUE_TEMPLATE_tmpl/3_site_feature_request.yml .github/ISSUE_TEMPLATE_tmpl/4_bug_report.yml .github/ISSUE_TEMPLATE_tmpl/5_feature_request.yml yt_dlp/version.py
|
||||
@@ -111,7 +110,7 @@ supportedsites:
|
||||
README.txt: README.md
|
||||
pandoc -f $(MARKDOWN) -t plain README.md -o README.txt
|
||||
|
||||
yt-dlp.1: README.md
|
||||
yt-dlp.1: README.md devscripts/prepare_manpage.py
|
||||
$(PYTHON) devscripts/prepare_manpage.py yt-dlp.1.temp.md
|
||||
pandoc -s -f $(MARKDOWN) -t man yt-dlp.1.temp.md -o yt-dlp.1
|
||||
rm -f yt-dlp.1.temp.md
|
||||
@@ -128,12 +127,12 @@ completions/fish/yt-dlp.fish: yt_dlp/*.py yt_dlp/*/*.py devscripts/fish-completi
|
||||
mkdir -p completions/fish
|
||||
$(PYTHON) devscripts/fish-completion.py
|
||||
|
||||
_EXTRACTOR_FILES = $(shell find yt_dlp/extractor -iname '*.py' -and -not -iname 'lazy_extractors.py')
|
||||
_EXTRACTOR_FILES = $(shell find yt_dlp/extractor -name '*.py' -and -not -name 'lazy_extractors.py')
|
||||
yt_dlp/extractor/lazy_extractors.py: devscripts/make_lazy_extractors.py devscripts/lazy_load_template.py $(_EXTRACTOR_FILES)
|
||||
$(PYTHON) devscripts/make_lazy_extractors.py $@
|
||||
|
||||
yt-dlp.tar.gz: all
|
||||
@tar -czf $(DESTDIR)/yt-dlp.tar.gz --transform "s|^|yt-dlp/|" --owner 0 --group 0 \
|
||||
@tar -czf yt-dlp.tar.gz --transform "s|^|yt-dlp/|" --owner 0 --group 0 \
|
||||
--exclude '*.DS_Store' \
|
||||
--exclude '*.kate-swp' \
|
||||
--exclude '*.pyc' \
|
||||
@@ -147,7 +146,7 @@ yt-dlp.tar.gz: all
|
||||
CONTRIBUTING.md Collaborators.md CONTRIBUTORS AUTHORS \
|
||||
Makefile MANIFEST.in yt-dlp.1 README.txt completions \
|
||||
setup.py setup.cfg yt-dlp yt_dlp requirements.txt \
|
||||
devscripts test tox.ini pytest.ini
|
||||
devscripts test
|
||||
|
||||
AUTHORS: .mailmap
|
||||
git shortlog -s -n | cut -f2 | sort > AUTHORS
|
||||
|
||||
1
devscripts/__init__.py
Normal file
1
devscripts/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
# Empty file needed to make devscripts.utils properly importable from outside
|
||||
@@ -1,9 +1,12 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Allow direct execution
|
||||
import os
|
||||
import sys
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
|
||||
import yt_dlp
|
||||
|
||||
BASH_COMPLETION_FILE = "completions/bash/yt-dlp"
|
||||
|
||||
@@ -13,9 +13,11 @@ import sys
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
from test.helper import gettestcases
|
||||
|
||||
from yt_dlp.utils import compat_urllib_parse_urlparse, compat_urllib_request
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
|
||||
from test.helper import gettestcases
|
||||
|
||||
if len(sys.argv) > 1:
|
||||
METHOD = 'LIST'
|
||||
@@ -26,7 +28,7 @@ else:
|
||||
for test in gettestcases():
|
||||
if METHOD == 'EURISTIC':
|
||||
try:
|
||||
webpage = compat_urllib_request.urlopen(test['url'], timeout=10).read()
|
||||
webpage = urllib.request.urlopen(test['url'], timeout=10).read()
|
||||
except Exception:
|
||||
print('\nFail: {}'.format(test['name']))
|
||||
continue
|
||||
@@ -36,7 +38,7 @@ for test in gettestcases():
|
||||
RESULT = 'porn' in webpage.lower()
|
||||
|
||||
elif METHOD == 'LIST':
|
||||
domain = compat_urllib_parse_urlparse(test['url']).netloc
|
||||
domain = urllib.parse.urlparse(test['url']).netloc
|
||||
if not domain:
|
||||
print('\nFail: {}'.format(test['name']))
|
||||
continue
|
||||
|
||||
@@ -1,10 +1,14 @@
|
||||
#!/usr/bin/env python3
|
||||
import optparse
|
||||
|
||||
# Allow direct execution
|
||||
import os
|
||||
import sys
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
|
||||
import optparse
|
||||
|
||||
import yt_dlp
|
||||
from yt_dlp.utils import shell_quote
|
||||
|
||||
|
||||
@@ -1,11 +1,15 @@
|
||||
#!/usr/bin/env python3
|
||||
import codecs
|
||||
|
||||
# Allow direct execution
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
|
||||
import codecs
|
||||
import subprocess
|
||||
|
||||
from yt_dlp.aes import aes_encrypt, key_expansion
|
||||
from yt_dlp.utils import intlist_to_bytes
|
||||
|
||||
|
||||
@@ -9,14 +9,19 @@ from ..utils import (
|
||||
write_string,
|
||||
)
|
||||
|
||||
# These bloat the lazy_extractors, so allow them to passthrough silently
|
||||
ALLOWED_CLASSMETHODS = {'extract_from_webpage', 'get_testcases', 'get_webpage_testcases'}
|
||||
_WARNED = False
|
||||
|
||||
|
||||
class LazyLoadMetaClass(type):
|
||||
def __getattr__(cls, name):
|
||||
# "_TESTS" bloat the lazy_extractors
|
||||
if '_real_class' not in cls.__dict__ and name != 'get_testcases':
|
||||
write_string(
|
||||
'WARNING: Falling back to normal extractor since lazy extractor '
|
||||
f'{cls.__name__} does not have attribute {name}{bug_reports_message()}\n')
|
||||
global _WARNED
|
||||
if ('_real_class' not in cls.__dict__
|
||||
and name not in ALLOWED_CLASSMETHODS and not _WARNED):
|
||||
_WARNED = True
|
||||
write_string('WARNING: Falling back to normal extractor since lazy extractor '
|
||||
f'{cls.__name__} does not have attribute {name}{bug_reports_message()}\n')
|
||||
return getattr(cls.real_class, name)
|
||||
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import optparse
|
||||
import re
|
||||
|
||||
|
||||
@@ -1,29 +1,75 @@
|
||||
#!/usr/bin/env python3
|
||||
import io
|
||||
import optparse
|
||||
|
||||
# Allow direct execution
|
||||
import os
|
||||
import sys
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
|
||||
def read(fname):
|
||||
with open(fname, encoding='utf-8') as f:
|
||||
return f.read()
|
||||
import re
|
||||
|
||||
from devscripts.utils import (
|
||||
get_filename_args,
|
||||
read_file,
|
||||
read_version,
|
||||
write_file,
|
||||
)
|
||||
|
||||
# Get the version from yt_dlp/version.py without importing the package
|
||||
def read_version(fname):
|
||||
exec(compile(read(fname), fname, 'exec'))
|
||||
return locals()['__version__']
|
||||
VERBOSE_TMPL = '''
|
||||
- type: checkboxes
|
||||
id: verbose
|
||||
attributes:
|
||||
label: Provide verbose output that clearly demonstrates the problem
|
||||
options:
|
||||
- label: Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)
|
||||
required: true
|
||||
- label: Copy the WHOLE output (starting with `[debug] Command-line config`) and insert it below
|
||||
required: true
|
||||
- type: textarea
|
||||
id: log
|
||||
attributes:
|
||||
label: Complete Verbose Output
|
||||
description: |
|
||||
It should start like this:
|
||||
placeholder: |
|
||||
[debug] Command-line config: ['-vU', 'test:youtube']
|
||||
[debug] Portable config "yt-dlp.conf": ['-i']
|
||||
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
||||
[debug] yt-dlp version %(version)s [9d339c4] (win32_exe)
|
||||
[debug] Python 3.8.10 (CPython 64bit) - Windows-10-10.0.22000-SP0
|
||||
[debug] Checking exe version: ffmpeg -bsfs
|
||||
[debug] Checking exe version: ffprobe -bsfs
|
||||
[debug] exe versions: ffmpeg N-106550-g072101bd52-20220410 (fdk,setts), ffprobe N-106624-g391ce570c8-20220415, phantomjs 2.1.1
|
||||
[debug] Optional libraries: Cryptodome-3.15.0, brotli-1.0.9, certifi-2022.06.15, mutagen-1.45.1, sqlite3-2.6.0, websockets-10.3
|
||||
[debug] Proxy map: {}
|
||||
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp/releases/latest
|
||||
Latest version: %(version)s, Current version: %(version)s
|
||||
yt-dlp is up to date (%(version)s)
|
||||
<more lines>
|
||||
render: shell
|
||||
validations:
|
||||
required: true
|
||||
'''.strip()
|
||||
|
||||
NO_SKIP = '''
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: DO NOT REMOVE OR SKIP THE ISSUE TEMPLATE
|
||||
description: Fill all fields even if you think it is irrelevant for the issue
|
||||
options:
|
||||
- label: I understand that I will be **blocked** if I remove or skip any mandatory\\* field
|
||||
required: true
|
||||
'''.strip()
|
||||
|
||||
|
||||
def main():
|
||||
parser = optparse.OptionParser(usage='%prog INFILE OUTFILE')
|
||||
options, args = parser.parse_args()
|
||||
if len(args) != 2:
|
||||
parser.error('Expected an input and an output filename')
|
||||
fields = {'version': read_version(), 'no_skip': NO_SKIP}
|
||||
fields['verbose'] = VERBOSE_TMPL % fields
|
||||
fields['verbose_optional'] = re.sub(r'(\n\s+validations:)?\n\s+required: true', '', fields['verbose'])
|
||||
|
||||
infile, outfile = args
|
||||
with open(outfile, 'w', encoding='utf-8') as outf:
|
||||
outf.write(
|
||||
read(infile) % {'version': read_version('yt_dlp/version.py')})
|
||||
infile, outfile = get_filename_args(has_infile=True)
|
||||
write_file(outfile, read_file(infile) % fields)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
@@ -1,39 +1,51 @@
|
||||
#!/usr/bin/env python3
|
||||
import optparse
|
||||
|
||||
# Allow direct execution
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
from inspect import getsource
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
|
||||
from inspect import getsource
|
||||
|
||||
from devscripts.utils import get_filename_args, read_file, write_file
|
||||
|
||||
NO_ATTR = object()
|
||||
STATIC_CLASS_PROPERTIES = ['IE_NAME', 'IE_DESC', 'SEARCH_KEY', '_WORKING', '_NETRC_MACHINE', 'age_limit']
|
||||
STATIC_CLASS_PROPERTIES = [
|
||||
'IE_NAME', '_ENABLED', '_VALID_URL', # Used for URL matching
|
||||
'_WORKING', 'IE_DESC', '_NETRC_MACHINE', 'SEARCH_KEY', # Used for --extractor-descriptions
|
||||
'age_limit', # Used for --age-limit (evaluated)
|
||||
'_RETURN_TYPE', # Accessed in CLI only with instance (evaluated)
|
||||
]
|
||||
CLASS_METHODS = [
|
||||
'ie_key', 'working', 'description', 'suitable', '_match_valid_url', '_match_id', 'get_temp_id', 'is_suitable'
|
||||
'ie_key', 'suitable', '_match_valid_url', # Used for URL matching
|
||||
'working', 'get_temp_id', '_match_id', # Accessed just before instance creation
|
||||
'description', # Used for --extractor-descriptions
|
||||
'is_suitable', # Used for --age-limit
|
||||
'supports_login', 'is_single_video', # Accessed in CLI only with instance
|
||||
]
|
||||
IE_TEMPLATE = '''
|
||||
class {name}({bases}):
|
||||
_module = {module!r}
|
||||
'''
|
||||
with open('devscripts/lazy_load_template.py', encoding='utf-8') as f:
|
||||
MODULE_TEMPLATE = f.read()
|
||||
MODULE_TEMPLATE = read_file('devscripts/lazy_load_template.py')
|
||||
|
||||
|
||||
def main():
|
||||
parser = optparse.OptionParser(usage='%prog [OUTFILE.py]')
|
||||
args = parser.parse_args()[1] or ['yt_dlp/extractor/lazy_extractors.py']
|
||||
if len(args) != 1:
|
||||
parser.error('Expected only an output filename')
|
||||
|
||||
lazy_extractors_filename = args[0]
|
||||
lazy_extractors_filename = get_filename_args(default_outfile='yt_dlp/extractor/lazy_extractors.py')
|
||||
if os.path.exists(lazy_extractors_filename):
|
||||
os.remove(lazy_extractors_filename)
|
||||
|
||||
_ALL_CLASSES = get_all_ies() # Must be before import
|
||||
|
||||
import yt_dlp.plugins
|
||||
from yt_dlp.extractor.common import InfoExtractor, SearchInfoExtractor
|
||||
|
||||
# Filter out plugins
|
||||
_ALL_CLASSES = [cls for cls in _ALL_CLASSES if not cls.__module__.startswith(f'{yt_dlp.plugins.PACKAGE_NAME}.')]
|
||||
|
||||
DummyInfoExtractor = type('InfoExtractor', (InfoExtractor,), {'IE_NAME': NO_ATTR})
|
||||
module_src = '\n'.join((
|
||||
MODULE_TEMPLATE,
|
||||
@@ -43,20 +55,20 @@ def main():
|
||||
*build_ies(_ALL_CLASSES, (InfoExtractor, SearchInfoExtractor), DummyInfoExtractor),
|
||||
))
|
||||
|
||||
with open(lazy_extractors_filename, 'wt', encoding='utf-8') as f:
|
||||
f.write(f'{module_src}\n')
|
||||
write_file(lazy_extractors_filename, f'{module_src}\n')
|
||||
|
||||
|
||||
def get_all_ies():
|
||||
PLUGINS_DIRNAME = 'ytdlp_plugins'
|
||||
BLOCKED_DIRNAME = f'{PLUGINS_DIRNAME}_blocked'
|
||||
if os.path.exists(PLUGINS_DIRNAME):
|
||||
os.rename(PLUGINS_DIRNAME, BLOCKED_DIRNAME)
|
||||
# os.rename cannot be used, e.g. in Docker. See https://github.com/yt-dlp/yt-dlp/pull/4958
|
||||
shutil.move(PLUGINS_DIRNAME, BLOCKED_DIRNAME)
|
||||
try:
|
||||
from yt_dlp.extractor import _ALL_CLASSES
|
||||
from yt_dlp.extractor.extractors import _ALL_CLASSES
|
||||
finally:
|
||||
if os.path.exists(BLOCKED_DIRNAME):
|
||||
os.rename(BLOCKED_DIRNAME, PLUGINS_DIRNAME)
|
||||
shutil.move(BLOCKED_DIRNAME, PLUGINS_DIRNAME)
|
||||
return _ALL_CLASSES
|
||||
|
||||
|
||||
@@ -91,7 +103,7 @@ def sort_ies(ies, ignored_bases):
|
||||
for c in classes[:]:
|
||||
bases = set(c.__bases__) - {object, *ignored_bases}
|
||||
restart = False
|
||||
for b in bases:
|
||||
for b in sorted(bases, key=lambda x: x.__name__):
|
||||
if b not in classes and b not in returned_classes:
|
||||
assert b.__name__ != 'GenericIE', 'Cannot inherit from GenericIE'
|
||||
classes.insert(0, b)
|
||||
@@ -113,11 +125,6 @@ def build_lazy_ie(ie, name, attr_base):
|
||||
}.get(base.__name__, base.__name__) for base in ie.__bases__)
|
||||
|
||||
s = IE_TEMPLATE.format(name=name, module=ie.__module__, bases=bases)
|
||||
valid_url = getattr(ie, '_VALID_URL', None)
|
||||
if not valid_url and hasattr(ie, '_make_valid_url'):
|
||||
valid_url = ie._make_valid_url()
|
||||
if valid_url:
|
||||
s += f' _VALID_URL = {valid_url!r}\n'
|
||||
return s + '\n'.join(extra_ie_code(ie, attr_base))
|
||||
|
||||
|
||||
|
||||
@@ -1,30 +1,83 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# yt-dlp --help | make_readme.py
|
||||
# This must be run in a console of correct width
|
||||
import re
|
||||
"""
|
||||
yt-dlp --help | make_readme.py
|
||||
This must be run in a console of correct width
|
||||
"""
|
||||
|
||||
# Allow direct execution
|
||||
import os
|
||||
import sys
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
|
||||
import functools
|
||||
import re
|
||||
|
||||
from devscripts.utils import read_file, write_file
|
||||
|
||||
README_FILE = 'README.md'
|
||||
|
||||
OPTIONS_START = 'General Options:'
|
||||
OPTIONS_END = 'CONFIGURATION'
|
||||
EPILOG_START = 'See full documentation'
|
||||
ALLOWED_OVERSHOOT = 2
|
||||
|
||||
DISABLE_PATCH = object()
|
||||
|
||||
|
||||
helptext = sys.stdin.read()
|
||||
if isinstance(helptext, bytes):
|
||||
helptext = helptext.decode()
|
||||
def take_section(text, start=None, end=None, *, shift=0):
|
||||
return text[
|
||||
text.index(start) + shift if start else None:
|
||||
text.index(end) + shift if end else None
|
||||
]
|
||||
|
||||
start, end = helptext.index(f'\n {OPTIONS_START}'), helptext.index(f'\n{EPILOG_START}')
|
||||
options = re.sub(r'(?m)^ (\w.+)$', r'## \1', helptext[start + 1: end + 1])
|
||||
|
||||
with open(README_FILE, encoding='utf-8') as f:
|
||||
readme = f.read()
|
||||
def apply_patch(text, patch):
|
||||
return text if patch[0] is DISABLE_PATCH else re.sub(*patch, text)
|
||||
|
||||
header = readme[:readme.index(f'## {OPTIONS_START}')]
|
||||
footer = readme[readme.index(f'# {OPTIONS_END}'):]
|
||||
|
||||
with open(README_FILE, 'w', encoding='utf-8') as f:
|
||||
for part in (header, options, footer):
|
||||
f.write(part)
|
||||
options = take_section(sys.stdin.read(), f'\n {OPTIONS_START}', f'\n{EPILOG_START}', shift=1)
|
||||
|
||||
max_width = max(map(len, options.split('\n')))
|
||||
switch_col_width = len(re.search(r'(?m)^\s{5,}', options).group())
|
||||
delim = f'\n{" " * switch_col_width}'
|
||||
|
||||
PATCHES = (
|
||||
( # Standardize update message
|
||||
r'(?m)^( -U, --update\s+).+(\n \s.+)*$',
|
||||
r'\1Update this program to the latest version',
|
||||
),
|
||||
( # Headings
|
||||
r'(?m)^ (\w.+\n)( (?=\w))?',
|
||||
r'## \1'
|
||||
),
|
||||
( # Do not split URLs
|
||||
rf'({delim[:-1]})? (?P<label>\[\S+\] )?(?P<url>https?({delim})?:({delim})?/({delim})?/(({delim})?\S+)+)\s',
|
||||
lambda mobj: ''.join((delim, mobj.group('label') or '', re.sub(r'\s+', '', mobj.group('url')), '\n'))
|
||||
),
|
||||
( # Do not split "words"
|
||||
rf'(?m)({delim}\S+)+$',
|
||||
lambda mobj: ''.join((delim, mobj.group(0).replace(delim, '')))
|
||||
),
|
||||
( # Allow overshooting last line
|
||||
rf'(?m)^(?P<prev>.+)${delim}(?P<current>.+)$(?!{delim})',
|
||||
lambda mobj: (mobj.group().replace(delim, ' ')
|
||||
if len(mobj.group()) - len(delim) + 1 <= max_width + ALLOWED_OVERSHOOT
|
||||
else mobj.group())
|
||||
),
|
||||
( # Avoid newline when a space is available b/w switch and description
|
||||
DISABLE_PATCH, # This creates issues with prepare_manpage
|
||||
r'(?m)^(\s{4}-.{%d})(%s)' % (switch_col_width - 6, delim),
|
||||
r'\1 '
|
||||
),
|
||||
)
|
||||
|
||||
readme = read_file(README_FILE)
|
||||
|
||||
write_file(README_FILE, ''.join((
|
||||
take_section(readme, end=f'## {OPTIONS_START}'),
|
||||
functools.reduce(apply_patch, PATCHES, options),
|
||||
take_section(readme, f'# {OPTIONS_END}'),
|
||||
)))
|
||||
|
||||
@@ -1,23 +1,19 @@
|
||||
#!/usr/bin/env python3
|
||||
import optparse
|
||||
|
||||
# Allow direct execution
|
||||
import os
|
||||
import sys
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
|
||||
from devscripts.utils import get_filename_args, write_file
|
||||
from yt_dlp.extractor import list_extractor_classes
|
||||
|
||||
|
||||
def main():
|
||||
parser = optparse.OptionParser(usage='%prog OUTFILE.md')
|
||||
_, args = parser.parse_args()
|
||||
if len(args) != 1:
|
||||
parser.error('Expected an output filename')
|
||||
|
||||
out = '\n'.join(ie.description() for ie in list_extractor_classes() if ie.IE_DESC is not False)
|
||||
|
||||
with open(args[0], 'w', encoding='utf-8') as outf:
|
||||
outf.write(f'# Supported sites\n{out}\n')
|
||||
write_file(get_filename_args(), f'# Supported sites\n{out}\n')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
@@ -1,8 +1,22 @@
|
||||
#!/usr/bin/env python3
|
||||
import optparse
|
||||
|
||||
# Allow direct execution
|
||||
import os
|
||||
import sys
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
|
||||
import os.path
|
||||
import re
|
||||
|
||||
from devscripts.utils import (
|
||||
compose_functions,
|
||||
get_filename_args,
|
||||
read_file,
|
||||
write_file,
|
||||
)
|
||||
|
||||
ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
||||
README_FILE = os.path.join(ROOT_DIR, 'README.md')
|
||||
|
||||
@@ -21,25 +35,6 @@ yt\-dlp \- A youtube-dl fork with additional features and patches
|
||||
'''
|
||||
|
||||
|
||||
def main():
|
||||
parser = optparse.OptionParser(usage='%prog OUTFILE.md')
|
||||
options, args = parser.parse_args()
|
||||
if len(args) != 1:
|
||||
parser.error('Expected an output filename')
|
||||
|
||||
outfile, = args
|
||||
|
||||
with open(README_FILE, encoding='utf-8') as f:
|
||||
readme = f.read()
|
||||
|
||||
readme = filter_excluded_sections(readme)
|
||||
readme = move_sections(readme)
|
||||
readme = filter_options(readme)
|
||||
|
||||
with open(outfile, 'w', encoding='utf-8') as outf:
|
||||
outf.write(PREFIX + readme)
|
||||
|
||||
|
||||
def filter_excluded_sections(readme):
|
||||
EXCLUDED_SECTION_BEGIN_STRING = re.escape('<!-- MANPAGE: BEGIN EXCLUDED SECTION -->')
|
||||
EXCLUDED_SECTION_END_STRING = re.escape('<!-- MANPAGE: END EXCLUDED SECTION -->')
|
||||
@@ -91,5 +86,12 @@ def filter_options(readme):
|
||||
return readme.replace(section, options, 1)
|
||||
|
||||
|
||||
TRANSFORM = compose_functions(filter_excluded_sections, move_sections, filter_options)
|
||||
|
||||
|
||||
def main():
|
||||
write_file(get_filename_args(), PREFIX + TRANSFORM(read_file(README_FILE)))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
#!/bin/sh
|
||||
#!/usr/bin/env sh
|
||||
|
||||
if [ -z $1 ]; then
|
||||
if [ -z "$1" ]; then
|
||||
test_set='test'
|
||||
elif [ $1 = 'core' ]; then
|
||||
elif [ "$1" = 'core' ]; then
|
||||
test_set="-m not download"
|
||||
elif [ $1 = 'download' ]; then
|
||||
elif [ "$1" = 'download' ]; then
|
||||
test_set="-m download"
|
||||
else
|
||||
echo 'Invalid test type "'$1'". Use "core" | "download"'
|
||||
echo 'Invalid test type "'"$1"'". Use "core" | "download"'
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
36
devscripts/set-variant.py
Normal file
36
devscripts/set-variant.py
Normal file
@@ -0,0 +1,36 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Allow direct execution
|
||||
import os
|
||||
import sys
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
|
||||
import argparse
|
||||
import functools
|
||||
import re
|
||||
|
||||
from devscripts.utils import compose_functions, read_file, write_file
|
||||
|
||||
VERSION_FILE = 'yt_dlp/version.py'
|
||||
|
||||
|
||||
def parse_options():
|
||||
parser = argparse.ArgumentParser(description='Set the build variant of the package')
|
||||
parser.add_argument('variant', help='Name of the variant')
|
||||
parser.add_argument('-M', '--update-message', default=None, help='Message to show in -U')
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def property_setter(name, value):
|
||||
return functools.partial(re.sub, rf'(?m)^{name}\s*=\s*.+$', f'{name} = {value!r}')
|
||||
|
||||
|
||||
opts = parse_options()
|
||||
transform = compose_functions(
|
||||
property_setter('VARIANT', opts.variant),
|
||||
property_setter('UPDATE_HINT', opts.update_message)
|
||||
)
|
||||
|
||||
write_file(VERSION_FILE, transform(read_file(VERSION_FILE)))
|
||||
@@ -1,21 +1,28 @@
|
||||
#!/usr/bin/env python3
|
||||
import json
|
||||
|
||||
"""
|
||||
Usage: python3 ./devscripts/update-formulae.py <path-to-formulae-rb> <version>
|
||||
version can be either 0-aligned (yt-dlp version) or normalized (PyPi version)
|
||||
"""
|
||||
|
||||
# Allow direct execution
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
from yt_dlp.compat import compat_urllib_request
|
||||
|
||||
# usage: python3 ./devscripts/update-formulae.py <path-to-formulae-rb> <version>
|
||||
# version can be either 0-aligned (yt-dlp version) or normalized (PyPl version)
|
||||
import json
|
||||
import re
|
||||
import urllib.request
|
||||
|
||||
from devscripts.utils import read_file, write_file
|
||||
|
||||
filename, version = sys.argv[1:]
|
||||
|
||||
normalized_version = '.'.join(str(int(x)) for x in version.split('.'))
|
||||
|
||||
pypi_release = json.loads(compat_urllib_request.urlopen(
|
||||
pypi_release = json.loads(urllib.request.urlopen(
|
||||
'https://pypi.org/pypi/yt-dlp/%s/json' % normalized_version
|
||||
).read().decode())
|
||||
|
||||
@@ -24,11 +31,9 @@ tarball_file = next(x for x in pypi_release['urls'] if x['filename'].endswith('.
|
||||
sha256sum = tarball_file['digests']['sha256']
|
||||
url = tarball_file['url']
|
||||
|
||||
with open(filename) as r:
|
||||
formulae_text = r.read()
|
||||
formulae_text = read_file(filename)
|
||||
|
||||
formulae_text = re.sub(r'sha256 "[0-9a-f]*?"', 'sha256 "%s"' % sha256sum, formulae_text)
|
||||
formulae_text = re.sub(r'url "[^"]*?"', 'url "%s"' % url, formulae_text)
|
||||
formulae_text = re.sub(r'sha256 "[0-9a-f]*?"', 'sha256 "%s"' % sha256sum, formulae_text, count=1)
|
||||
formulae_text = re.sub(r'url "[^"]*?"', 'url "%s"' % url, formulae_text, count=1)
|
||||
|
||||
with open(filename, 'w') as w:
|
||||
w.write(formulae_text)
|
||||
write_file(filename, formulae_text)
|
||||
|
||||
@@ -1,30 +1,41 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Allow direct execution
|
||||
import os
|
||||
import sys
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
|
||||
import contextlib
|
||||
import subprocess
|
||||
import sys
|
||||
from datetime import datetime
|
||||
|
||||
with open('yt_dlp/version.py') as f:
|
||||
exec(compile(f.read(), 'yt_dlp/version.py', 'exec'))
|
||||
old_version = locals()['__version__']
|
||||
from devscripts.utils import read_version, write_file
|
||||
|
||||
old_version_list = old_version.split('.')
|
||||
|
||||
old_ver = '.'.join(old_version_list[:3])
|
||||
old_rev = old_version_list[3] if len(old_version_list) > 3 else ''
|
||||
def get_new_version(revision):
|
||||
version = datetime.utcnow().strftime('%Y.%m.%d')
|
||||
|
||||
ver = datetime.utcnow().strftime("%Y.%m.%d")
|
||||
if revision:
|
||||
assert revision.isdigit(), 'Revision must be a number'
|
||||
else:
|
||||
old_version = read_version().split('.')
|
||||
if version.split('.') == old_version[:3]:
|
||||
revision = str(int((old_version + [0])[3]) + 1)
|
||||
|
||||
rev = (sys.argv[1:] or [''])[0] # Use first argument, if present as revision number
|
||||
if not rev:
|
||||
rev = str(int(old_rev or 0) + 1) if old_ver == ver else ''
|
||||
return f'{version}.{revision}' if revision else version
|
||||
|
||||
VERSION = '.'.join((ver, rev)) if rev else ver
|
||||
|
||||
try:
|
||||
sp = subprocess.Popen(['git', 'rev-parse', '--short', 'HEAD'], stdout=subprocess.PIPE)
|
||||
GIT_HEAD = sp.communicate()[0].decode().strip() or None
|
||||
except Exception:
|
||||
GIT_HEAD = None
|
||||
def get_git_head():
|
||||
with contextlib.suppress(Exception):
|
||||
sp = subprocess.Popen(['git', 'rev-parse', '--short', 'HEAD'], stdout=subprocess.PIPE)
|
||||
return sp.communicate()[0].decode().strip() or None
|
||||
|
||||
|
||||
VERSION = get_new_version((sys.argv + [''])[1])
|
||||
GIT_HEAD = get_git_head()
|
||||
|
||||
VERSION_FILE = f'''\
|
||||
# Autogenerated by devscripts/update-version.py
|
||||
@@ -32,10 +43,14 @@ VERSION_FILE = f'''\
|
||||
__version__ = {VERSION!r}
|
||||
|
||||
RELEASE_GIT_HEAD = {GIT_HEAD!r}
|
||||
|
||||
VARIANT = None
|
||||
|
||||
UPDATE_HINT = None
|
||||
'''
|
||||
|
||||
with open('yt_dlp/version.py', 'wt') as f:
|
||||
f.write(VERSION_FILE)
|
||||
|
||||
print('::set-output name=ytdlp_version::' + VERSION)
|
||||
write_file('yt_dlp/version.py', VERSION_FILE)
|
||||
github_output = os.getenv('GITHUB_OUTPUT')
|
||||
if github_output:
|
||||
write_file(github_output, f'ytdlp_version={VERSION}\n', 'a')
|
||||
print(f'\nVersion = {VERSION}, Git HEAD = {GIT_HEAD}')
|
||||
|
||||
35
devscripts/utils.py
Normal file
35
devscripts/utils.py
Normal file
@@ -0,0 +1,35 @@
|
||||
import argparse
|
||||
import functools
|
||||
|
||||
|
||||
def read_file(fname):
|
||||
with open(fname, encoding='utf-8') as f:
|
||||
return f.read()
|
||||
|
||||
|
||||
def write_file(fname, content, mode='w'):
|
||||
with open(fname, mode, encoding='utf-8') as f:
|
||||
return f.write(content)
|
||||
|
||||
|
||||
# Get the version without importing the package
|
||||
def read_version(fname='yt_dlp/version.py'):
|
||||
exec(compile(read_file(fname), fname, 'exec'))
|
||||
return locals()['__version__']
|
||||
|
||||
|
||||
def get_filename_args(has_infile=False, default_outfile=None):
|
||||
parser = argparse.ArgumentParser()
|
||||
if has_infile:
|
||||
parser.add_argument('infile', help='Input file')
|
||||
kwargs = {'nargs': '?', 'default': default_outfile} if default_outfile else {}
|
||||
parser.add_argument('outfile', **kwargs, help='Output file')
|
||||
|
||||
opts = parser.parse_args()
|
||||
if has_infile:
|
||||
return opts.infile, opts.outfile
|
||||
return opts.outfile
|
||||
|
||||
|
||||
def compose_functions(*functions):
|
||||
return lambda x: functools.reduce(lambda y, f: f(y), functions, x)
|
||||
@@ -1,9 +1,12 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Allow direct execution
|
||||
import os
|
||||
import sys
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
|
||||
import yt_dlp
|
||||
|
||||
ZSH_COMPLETION_FILE = "completions/zsh/_yt-dlp"
|
||||
|
||||
82
pyinst.py
82
pyinst.py
@@ -1,43 +1,31 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Allow direct execution
|
||||
import os
|
||||
import platform
|
||||
import sys
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
|
||||
|
||||
import platform
|
||||
|
||||
from PyInstaller.__main__ import run as run_pyinstaller
|
||||
|
||||
OS_NAME = platform.system()
|
||||
if OS_NAME == 'Windows':
|
||||
from PyInstaller.utils.win32.versioninfo import (
|
||||
FixedFileInfo,
|
||||
SetVersion,
|
||||
StringFileInfo,
|
||||
StringStruct,
|
||||
StringTable,
|
||||
VarFileInfo,
|
||||
VarStruct,
|
||||
VSVersionInfo,
|
||||
)
|
||||
elif OS_NAME == 'Darwin':
|
||||
pass
|
||||
else:
|
||||
raise Exception(f'{OS_NAME} is not supported')
|
||||
from devscripts.utils import read_version
|
||||
|
||||
ARCH = platform.architecture()[0][:2]
|
||||
OS_NAME, MACHINE, ARCH = sys.platform, platform.machine().lower(), platform.architecture()[0][:2]
|
||||
if MACHINE in ('x86', 'x86_64', 'amd64', 'i386', 'i686'):
|
||||
MACHINE = 'x86' if ARCH == '32' else ''
|
||||
|
||||
|
||||
def main():
|
||||
opts = parse_options()
|
||||
version = read_version('yt_dlp/version.py')
|
||||
opts, version = parse_options(), read_version()
|
||||
|
||||
onedir = '--onedir' in opts or '-D' in opts
|
||||
if not onedir and '-F' not in opts and '--onefile' not in opts:
|
||||
opts.append('--onefile')
|
||||
|
||||
name = 'yt-dlp%s' % ('_macos' if OS_NAME == 'Darwin' else '_x86' if ARCH == '32' else '')
|
||||
final_file = ''.join((
|
||||
'dist/', f'{name}/' if onedir else '', name, '.exe' if OS_NAME == 'Windows' else ''))
|
||||
|
||||
print(f'Building yt-dlp v{version} {ARCH}bit for {OS_NAME} with options {opts}')
|
||||
name, final_file = exe(onedir)
|
||||
print(f'Building yt-dlp v{version} for {OS_NAME} {platform.machine()} with options {opts}')
|
||||
print('Remember to update the version using "devscripts/update-version.py"')
|
||||
if not os.path.isfile('yt_dlp/extractor/lazy_extractors.py'):
|
||||
print('WARNING: Building without lazy_extractors. Run '
|
||||
@@ -49,9 +37,6 @@ def main():
|
||||
'--icon=devscripts/logo.ico',
|
||||
'--upx-exclude=vcruntime140.dll',
|
||||
'--noconfirm',
|
||||
# NB: Modules that are only imported dynamically must be added here.
|
||||
# --collect-submodules may not work correctly if user has a yt-dlp installed via PIP
|
||||
'--hidden-import=yt_dlp.compat._legacy',
|
||||
*dependency_options(),
|
||||
*opts,
|
||||
'yt_dlp/__main__.py',
|
||||
@@ -63,7 +48,7 @@ def main():
|
||||
|
||||
|
||||
def parse_options():
|
||||
# Compatability with older arguments
|
||||
# Compatibility with older arguments
|
||||
opts = sys.argv[1:]
|
||||
if opts[0:1] in (['32'], ['64']):
|
||||
if ARCH != opts[0]:
|
||||
@@ -72,11 +57,19 @@ def parse_options():
|
||||
return opts
|
||||
|
||||
|
||||
# Get the version from yt_dlp/version.py without importing the package
|
||||
def read_version(fname):
|
||||
with open(fname, encoding='utf-8') as f:
|
||||
exec(compile(f.read(), fname, 'exec'))
|
||||
return locals()['__version__']
|
||||
def exe(onedir):
|
||||
"""@returns (name, path)"""
|
||||
name = '_'.join(filter(None, (
|
||||
'yt-dlp',
|
||||
{'win32': '', 'darwin': 'macos'}.get(OS_NAME, OS_NAME),
|
||||
MACHINE,
|
||||
)))
|
||||
return name, ''.join(filter(None, (
|
||||
'dist/',
|
||||
onedir and f'{name}/',
|
||||
name,
|
||||
OS_NAME == 'win32' and '.exe'
|
||||
)))
|
||||
|
||||
|
||||
def version_to_list(version):
|
||||
@@ -87,7 +80,7 @@ def version_to_list(version):
|
||||
def dependency_options():
|
||||
# Due to the current implementation, these are auto-detected, but explicitly add them just in case
|
||||
dependencies = [pycryptodome_module(), 'mutagen', 'brotli', 'certifi', 'websockets']
|
||||
excluded_modules = ['test', 'ytdlp_plugins', 'youtube_dl', 'youtube_dlc']
|
||||
excluded_modules = ('youtube_dl', 'youtube_dlc', 'test', 'ytdlp_plugins', 'devscripts')
|
||||
|
||||
yield from (f'--hidden-import={module}' for module in dependencies)
|
||||
yield '--collect-submodules=websockets'
|
||||
@@ -109,13 +102,24 @@ def pycryptodome_module():
|
||||
|
||||
|
||||
def set_version_info(exe, version):
|
||||
if OS_NAME == 'Windows':
|
||||
if OS_NAME == 'win32':
|
||||
windows_set_version(exe, version)
|
||||
|
||||
|
||||
def windows_set_version(exe, version):
|
||||
from PyInstaller.utils.win32.versioninfo import (
|
||||
FixedFileInfo,
|
||||
SetVersion,
|
||||
StringFileInfo,
|
||||
StringStruct,
|
||||
StringTable,
|
||||
VarFileInfo,
|
||||
VarStruct,
|
||||
VSVersionInfo,
|
||||
)
|
||||
|
||||
version_list = version_to_list(version)
|
||||
suffix = '_x86' if ARCH == '32' else ''
|
||||
suffix = MACHINE and f'_{MACHINE}'
|
||||
SetVersion(exe, VSVersionInfo(
|
||||
ffi=FixedFileInfo(
|
||||
filevers=version_list,
|
||||
@@ -129,9 +133,9 @@ def windows_set_version(exe, version):
|
||||
),
|
||||
kids=[
|
||||
StringFileInfo([StringTable('040904B0', [
|
||||
StringStruct('Comments', 'yt-dlp%s Command Line Interface.' % suffix),
|
||||
StringStruct('Comments', 'yt-dlp%s Command Line Interface' % suffix),
|
||||
StringStruct('CompanyName', 'https://github.com/yt-dlp'),
|
||||
StringStruct('FileDescription', 'yt-dlp%s' % (' (32 Bit)' if ARCH == '32' else '')),
|
||||
StringStruct('FileDescription', 'yt-dlp%s' % (MACHINE and f' ({MACHINE})')),
|
||||
StringStruct('FileVersion', version),
|
||||
StringStruct('InternalName', f'yt-dlp{suffix}'),
|
||||
StringStruct('LegalCopyright', 'pukkandan.ytdlp@gmail.com | UNLICENSE'),
|
||||
|
||||
5
pyproject.toml
Normal file
5
pyproject.toml
Normal file
@@ -0,0 +1,5 @@
|
||||
[build-system]
|
||||
build-backend = 'setuptools.build_meta'
|
||||
# https://github.com/yt-dlp/yt-dlp/issues/5941
|
||||
# https://github.com/pypa/distutils/issues/17
|
||||
requires = ['setuptools > 50']
|
||||
@@ -1,4 +0,0 @@
|
||||
[pytest]
|
||||
addopts = -ra -v --strict-markers
|
||||
markers =
|
||||
download
|
||||
@@ -3,4 +3,4 @@ pycryptodomex
|
||||
websockets
|
||||
brotli; platform_python_implementation=='CPython'
|
||||
brotlicffi; platform_python_implementation!='CPython'
|
||||
certifi
|
||||
certifi
|
||||
|
||||
47
setup.cfg
47
setup.cfg
@@ -1,6 +1,49 @@
|
||||
[wheel]
|
||||
universal = True
|
||||
universal = true
|
||||
|
||||
|
||||
[flake8]
|
||||
exclude = devscripts/lazy_load_template.py,devscripts/make_issue_template.py,setup.py,build,.git,venv
|
||||
exclude = build,venv,.tox,.git,.pytest_cache
|
||||
ignore = E402,E501,E731,E741,W503
|
||||
max_line_length = 120
|
||||
per_file_ignores =
|
||||
devscripts/lazy_load_template.py: F401
|
||||
|
||||
|
||||
[autoflake]
|
||||
ignore-init-module-imports = true
|
||||
ignore-pass-after-docstring = true
|
||||
remove-all-unused-imports = true
|
||||
remove-duplicate-keys = true
|
||||
remove-unused-variables = true
|
||||
|
||||
|
||||
[tool:pytest]
|
||||
addopts = -ra -v --strict-markers
|
||||
markers =
|
||||
download
|
||||
|
||||
|
||||
[tox:tox]
|
||||
skipsdist = true
|
||||
envlist = py{36,37,38,39,310,311},pypy{36,37,38,39}
|
||||
skip_missing_interpreters = true
|
||||
|
||||
[testenv] # tox
|
||||
deps =
|
||||
pytest
|
||||
commands = pytest {posargs:"-m not download"}
|
||||
passenv = HOME # For test_compat_expanduser
|
||||
setenv =
|
||||
# PYTHONWARNINGS = error # Catches PIP's warnings too
|
||||
|
||||
|
||||
[isort]
|
||||
py_version = 37
|
||||
multi_line_output = VERTICAL_HANGING_INDENT
|
||||
line_length = 80
|
||||
reverse_relative = true
|
||||
ensure_newline_before_comments = true
|
||||
include_trailing_comma = true
|
||||
known_first_party =
|
||||
test
|
||||
|
||||
181
setup.py
181
setup.py
@@ -1,6 +1,12 @@
|
||||
#!/usr/bin/env python3
|
||||
import os.path
|
||||
|
||||
# Allow execution from anywhere
|
||||
import os
|
||||
import sys
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
|
||||
|
||||
import subprocess
|
||||
import warnings
|
||||
|
||||
try:
|
||||
@@ -9,63 +15,63 @@ try:
|
||||
except ImportError:
|
||||
from distutils.core import Command, setup
|
||||
setuptools_available = False
|
||||
from distutils.spawn import spawn
|
||||
|
||||
from devscripts.utils import read_file, read_version
|
||||
|
||||
def read(fname):
|
||||
with open(fname, encoding='utf-8') as f:
|
||||
return f.read()
|
||||
|
||||
|
||||
# Get the version from yt_dlp/version.py without importing the package
|
||||
def read_version(fname):
|
||||
exec(compile(read(fname), fname, 'exec'))
|
||||
return locals()['__version__']
|
||||
|
||||
|
||||
VERSION = read_version('yt_dlp/version.py')
|
||||
VERSION = read_version()
|
||||
|
||||
DESCRIPTION = 'A youtube-dl fork with additional features and patches'
|
||||
|
||||
LONG_DESCRIPTION = '\n\n'.join((
|
||||
'Official repository: <https://github.com/yt-dlp/yt-dlp>',
|
||||
'**PS**: Some links in this document will not work since this is a copy of the README.md from Github',
|
||||
read('README.md')))
|
||||
read_file('README.md')))
|
||||
|
||||
REQUIREMENTS = read('requirements.txt').splitlines()
|
||||
REQUIREMENTS = read_file('requirements.txt').splitlines()
|
||||
|
||||
|
||||
if sys.argv[1:2] == ['py2exe']:
|
||||
import py2exe
|
||||
def packages():
|
||||
if setuptools_available:
|
||||
return find_packages(exclude=('youtube_dl', 'youtube_dlc', 'test', 'ytdlp_plugins', 'devscripts'))
|
||||
|
||||
return [
|
||||
'yt_dlp', 'yt_dlp.extractor', 'yt_dlp.downloader', 'yt_dlp.postprocessor', 'yt_dlp.compat',
|
||||
]
|
||||
|
||||
|
||||
def py2exe_params():
|
||||
warnings.warn(
|
||||
'py2exe builds do not support pycryptodomex and needs VC++14 to run. '
|
||||
'The recommended way is to use "pyinst.py" to build using pyinstaller')
|
||||
params = {
|
||||
'It is recommended to run "pyinst.py" to build using pyinstaller instead')
|
||||
|
||||
return {
|
||||
'console': [{
|
||||
'script': './yt_dlp/__main__.py',
|
||||
'dest_base': 'yt-dlp',
|
||||
'icon_resources': [(1, 'devscripts/logo.ico')],
|
||||
}],
|
||||
'version_info': {
|
||||
'version': VERSION,
|
||||
'description': DESCRIPTION,
|
||||
'comments': LONG_DESCRIPTION.split('\n')[0],
|
||||
'product_name': 'yt-dlp',
|
||||
'product_version': VERSION,
|
||||
}],
|
||||
'options': {
|
||||
'py2exe': {
|
||||
'bundle_files': 0,
|
||||
'compressed': 1,
|
||||
'optimize': 2,
|
||||
'dist_dir': './dist',
|
||||
'excludes': ['Crypto', 'Cryptodome'], # py2exe cannot import Crypto
|
||||
'dll_excludes': ['w9xpopen.exe', 'crypt32.dll'],
|
||||
# Modules that are only imported dynamically must be added here
|
||||
'includes': ['yt_dlp.compat._legacy'],
|
||||
}
|
||||
},
|
||||
'zipfile': None
|
||||
'options': {
|
||||
'bundle_files': 0,
|
||||
'compressed': 1,
|
||||
'optimize': 2,
|
||||
'dist_dir': './dist',
|
||||
'excludes': ['Crypto', 'Cryptodome'], # py2exe cannot import Crypto
|
||||
'dll_excludes': ['w9xpopen.exe', 'crypt32.dll'],
|
||||
# Modules that are only imported dynamically must be added here
|
||||
'includes': ['yt_dlp.compat._legacy'],
|
||||
},
|
||||
'zipfile': None,
|
||||
}
|
||||
|
||||
else:
|
||||
|
||||
def build_params():
|
||||
files_spec = [
|
||||
('share/bash-completion/completions', ['completions/bash/yt-dlp']),
|
||||
('share/zsh/site-functions', ['completions/zsh/_yt-dlp']),
|
||||
@@ -73,25 +79,23 @@ else:
|
||||
('share/doc/yt_dlp', ['README.txt']),
|
||||
('share/man/man1', ['yt-dlp.1'])
|
||||
]
|
||||
root = os.path.dirname(os.path.abspath(__file__))
|
||||
data_files = []
|
||||
for dirname, files in files_spec:
|
||||
resfiles = []
|
||||
for fn in files:
|
||||
if not os.path.exists(fn):
|
||||
warnings.warn('Skipping file %s since it is not present. Try running `make pypi-files` first' % fn)
|
||||
warnings.warn(f'Skipping file {fn} since it is not present. Try running " make pypi-files " first')
|
||||
else:
|
||||
resfiles.append(fn)
|
||||
data_files.append((dirname, resfiles))
|
||||
|
||||
params = {
|
||||
'data_files': data_files,
|
||||
}
|
||||
params = {'data_files': data_files}
|
||||
|
||||
if setuptools_available:
|
||||
params['entry_points'] = {'console_scripts': ['yt-dlp = yt_dlp:main']}
|
||||
else:
|
||||
params['scripts'] = ['yt-dlp']
|
||||
return params
|
||||
|
||||
|
||||
class build_lazy_extractors(Command):
|
||||
@@ -105,49 +109,64 @@ class build_lazy_extractors(Command):
|
||||
pass
|
||||
|
||||
def run(self):
|
||||
spawn([sys.executable, 'devscripts/make_lazy_extractors.py', 'yt_dlp/extractor/lazy_extractors.py'],
|
||||
dry_run=self.dry_run)
|
||||
if self.dry_run:
|
||||
print('Skipping build of lazy extractors in dry run mode')
|
||||
return
|
||||
subprocess.run([sys.executable, 'devscripts/make_lazy_extractors.py'])
|
||||
|
||||
|
||||
if setuptools_available:
|
||||
packages = find_packages(exclude=('youtube_dl', 'youtube_dlc', 'test', 'ytdlp_plugins'))
|
||||
else:
|
||||
packages = ['yt_dlp', 'yt_dlp.downloader', 'yt_dlp.extractor', 'yt_dlp.postprocessor']
|
||||
def main():
|
||||
if sys.argv[1:2] == ['py2exe']:
|
||||
params = py2exe_params()
|
||||
try:
|
||||
from py2exe import freeze
|
||||
except ImportError:
|
||||
import py2exe # noqa: F401
|
||||
warnings.warn('You are using an outdated version of py2exe. Support for this version will be removed in the future')
|
||||
params['console'][0].update(params.pop('version_info'))
|
||||
params['options'] = {'py2exe': params.pop('options')}
|
||||
else:
|
||||
return freeze(**params)
|
||||
else:
|
||||
params = build_params()
|
||||
|
||||
setup(
|
||||
name='yt-dlp',
|
||||
version=VERSION,
|
||||
maintainer='pukkandan',
|
||||
maintainer_email='pukkandan.ytdlp@gmail.com',
|
||||
description=DESCRIPTION,
|
||||
long_description=LONG_DESCRIPTION,
|
||||
long_description_content_type='text/markdown',
|
||||
url='https://github.com/yt-dlp/yt-dlp',
|
||||
packages=packages(),
|
||||
install_requires=REQUIREMENTS,
|
||||
python_requires='>=3.7',
|
||||
project_urls={
|
||||
'Documentation': 'https://github.com/yt-dlp/yt-dlp#readme',
|
||||
'Source': 'https://github.com/yt-dlp/yt-dlp',
|
||||
'Tracker': 'https://github.com/yt-dlp/yt-dlp/issues',
|
||||
'Funding': 'https://github.com/yt-dlp/yt-dlp/blob/master/Collaborators.md#collaborators',
|
||||
},
|
||||
classifiers=[
|
||||
'Topic :: Multimedia :: Video',
|
||||
'Development Status :: 5 - Production/Stable',
|
||||
'Environment :: Console',
|
||||
'Programming Language :: Python',
|
||||
'Programming Language :: Python :: 3.7',
|
||||
'Programming Language :: Python :: 3.8',
|
||||
'Programming Language :: Python :: 3.9',
|
||||
'Programming Language :: Python :: 3.10',
|
||||
'Programming Language :: Python :: 3.11',
|
||||
'Programming Language :: Python :: Implementation',
|
||||
'Programming Language :: Python :: Implementation :: CPython',
|
||||
'Programming Language :: Python :: Implementation :: PyPy',
|
||||
'License :: Public Domain',
|
||||
'Operating System :: OS Independent',
|
||||
],
|
||||
cmdclass={'build_lazy_extractors': build_lazy_extractors},
|
||||
**params
|
||||
)
|
||||
|
||||
|
||||
setup(
|
||||
name='yt-dlp',
|
||||
version=VERSION,
|
||||
maintainer='pukkandan',
|
||||
maintainer_email='pukkandan.ytdlp@gmail.com',
|
||||
description=DESCRIPTION,
|
||||
long_description=LONG_DESCRIPTION,
|
||||
long_description_content_type='text/markdown',
|
||||
url='https://github.com/yt-dlp/yt-dlp',
|
||||
packages=packages,
|
||||
install_requires=REQUIREMENTS,
|
||||
project_urls={
|
||||
'Documentation': 'https://github.com/yt-dlp/yt-dlp#readme',
|
||||
'Source': 'https://github.com/yt-dlp/yt-dlp',
|
||||
'Tracker': 'https://github.com/yt-dlp/yt-dlp/issues',
|
||||
'Funding': 'https://github.com/yt-dlp/yt-dlp/blob/master/Collaborators.md#collaborators',
|
||||
},
|
||||
classifiers=[
|
||||
'Topic :: Multimedia :: Video',
|
||||
'Development Status :: 5 - Production/Stable',
|
||||
'Environment :: Console',
|
||||
'Programming Language :: Python',
|
||||
'Programming Language :: Python :: 3.6',
|
||||
'Programming Language :: Python :: 3.7',
|
||||
'Programming Language :: Python :: 3.8',
|
||||
'Programming Language :: Python :: Implementation',
|
||||
'Programming Language :: Python :: Implementation :: CPython',
|
||||
'Programming Language :: Python :: Implementation :: PyPy',
|
||||
'License :: Public Domain',
|
||||
'Operating System :: OS Independent',
|
||||
],
|
||||
python_requires='>=3.6',
|
||||
|
||||
cmdclass={'build_lazy_extractors': build_lazy_extractors},
|
||||
**params
|
||||
)
|
||||
main()
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -9,7 +9,7 @@ import types
|
||||
|
||||
import yt_dlp.extractor
|
||||
from yt_dlp import YoutubeDL
|
||||
from yt_dlp.compat import compat_os_name, compat_str
|
||||
from yt_dlp.compat import compat_os_name
|
||||
from yt_dlp.utils import preferredencoding, write_string
|
||||
|
||||
if 'pytest' in sys.modules:
|
||||
@@ -44,7 +44,7 @@ def try_rm(filename):
|
||||
raise
|
||||
|
||||
|
||||
def report_warning(message):
|
||||
def report_warning(message, *args, **kwargs):
|
||||
'''
|
||||
Print the message to stderr, it will be prefixed with 'WARNING:'
|
||||
If stderr is a tty file the 'WARNING:' will be colored
|
||||
@@ -67,10 +67,10 @@ class FakeYDL(YoutubeDL):
|
||||
super().__init__(params, auto_init=False)
|
||||
self.result = []
|
||||
|
||||
def to_screen(self, s, skip_eol=None):
|
||||
def to_screen(self, s, *args, **kwargs):
|
||||
print(s)
|
||||
|
||||
def trouble(self, s, tb=None):
|
||||
def trouble(self, s, *args, **kwargs):
|
||||
raise Exception(s)
|
||||
|
||||
def download(self, x):
|
||||
@@ -80,10 +80,10 @@ class FakeYDL(YoutubeDL):
|
||||
# Silence an expected warning matching a regex
|
||||
old_report_warning = self.report_warning
|
||||
|
||||
def report_warning(self, message):
|
||||
def report_warning(self, message, *args, **kwargs):
|
||||
if re.match(regex, message):
|
||||
return
|
||||
old_report_warning(message)
|
||||
old_report_warning(message, *args, **kwargs)
|
||||
self.report_warning = types.MethodType(report_warning, self)
|
||||
|
||||
|
||||
@@ -92,33 +92,40 @@ def gettestcases(include_onlymatching=False):
|
||||
yield from ie.get_testcases(include_onlymatching)
|
||||
|
||||
|
||||
def getwebpagetestcases():
|
||||
for ie in yt_dlp.extractor.gen_extractors():
|
||||
for tc in ie.get_webpage_testcases():
|
||||
tc.setdefault('add_ie', []).append('Generic')
|
||||
yield tc
|
||||
|
||||
|
||||
md5 = lambda s: hashlib.md5(s.encode()).hexdigest()
|
||||
|
||||
|
||||
def expect_value(self, got, expected, field):
|
||||
if isinstance(expected, compat_str) and expected.startswith('re:'):
|
||||
if isinstance(expected, str) and expected.startswith('re:'):
|
||||
match_str = expected[len('re:'):]
|
||||
match_rex = re.compile(match_str)
|
||||
|
||||
self.assertTrue(
|
||||
isinstance(got, compat_str),
|
||||
f'Expected a {compat_str.__name__} object, but got {type(got).__name__} for field {field}')
|
||||
isinstance(got, str),
|
||||
f'Expected a {str.__name__} object, but got {type(got).__name__} for field {field}')
|
||||
self.assertTrue(
|
||||
match_rex.match(got),
|
||||
f'field {field} (value: {got!r}) should match {match_str!r}')
|
||||
elif isinstance(expected, compat_str) and expected.startswith('startswith:'):
|
||||
elif isinstance(expected, str) and expected.startswith('startswith:'):
|
||||
start_str = expected[len('startswith:'):]
|
||||
self.assertTrue(
|
||||
isinstance(got, compat_str),
|
||||
f'Expected a {compat_str.__name__} object, but got {type(got).__name__} for field {field}')
|
||||
isinstance(got, str),
|
||||
f'Expected a {str.__name__} object, but got {type(got).__name__} for field {field}')
|
||||
self.assertTrue(
|
||||
got.startswith(start_str),
|
||||
f'field {field} (value: {got!r}) should start with {start_str!r}')
|
||||
elif isinstance(expected, compat_str) and expected.startswith('contains:'):
|
||||
elif isinstance(expected, str) and expected.startswith('contains:'):
|
||||
contains_str = expected[len('contains:'):]
|
||||
self.assertTrue(
|
||||
isinstance(got, compat_str),
|
||||
f'Expected a {compat_str.__name__} object, but got {type(got).__name__} for field {field}')
|
||||
isinstance(got, str),
|
||||
f'Expected a {str.__name__} object, but got {type(got).__name__} for field {field}')
|
||||
self.assertTrue(
|
||||
contains_str in got,
|
||||
f'field {field} (value: {got!r}) should contain {contains_str!r}')
|
||||
@@ -142,12 +149,12 @@ def expect_value(self, got, expected, field):
|
||||
index, field, type_expected, type_got))
|
||||
expect_value(self, item_got, item_expected, field)
|
||||
else:
|
||||
if isinstance(expected, compat_str) and expected.startswith('md5:'):
|
||||
if isinstance(expected, str) and expected.startswith('md5:'):
|
||||
self.assertTrue(
|
||||
isinstance(got, compat_str),
|
||||
isinstance(got, str),
|
||||
f'Expected field {field} to be a unicode object, but got value {got!r} of type {type(got)!r}')
|
||||
got = 'md5:' + md5(got)
|
||||
elif isinstance(expected, compat_str) and re.match(r'^(?:min|max)?count:\d+', expected):
|
||||
elif isinstance(expected, str) and re.match(r'^(?:min|max)?count:\d+', expected):
|
||||
self.assertTrue(
|
||||
isinstance(got, (list, dict)),
|
||||
f'Expected field {field} to be a list or a dict, but it is of type {type(got).__name__}')
|
||||
@@ -215,6 +222,10 @@ def sanitize_got_info_dict(got_dict):
|
||||
if test_info_dict.get('display_id') == test_info_dict.get('id'):
|
||||
test_info_dict.pop('display_id')
|
||||
|
||||
# Check url for flat entries
|
||||
if got_dict.get('_type', 'video') != 'video' and got_dict.get('url'):
|
||||
test_info_dict['url'] = got_dict['url']
|
||||
|
||||
return test_info_dict
|
||||
|
||||
|
||||
@@ -228,33 +239,31 @@ def expect_info_dict(self, got_dict, expected_dict):
|
||||
for key in mandatory_fields:
|
||||
self.assertTrue(got_dict.get(key), 'Missing mandatory field %s' % key)
|
||||
# Check for mandatory fields that are automatically set by YoutubeDL
|
||||
for key in ['webpage_url', 'extractor', 'extractor_key']:
|
||||
self.assertTrue(got_dict.get(key), 'Missing field: %s' % key)
|
||||
if got_dict.get('_type', 'video') == 'video':
|
||||
for key in ['webpage_url', 'extractor', 'extractor_key']:
|
||||
self.assertTrue(got_dict.get(key), 'Missing field: %s' % key)
|
||||
|
||||
test_info_dict = sanitize_got_info_dict(got_dict)
|
||||
|
||||
missing_keys = set(test_info_dict.keys()) - set(expected_dict.keys())
|
||||
if missing_keys:
|
||||
def _repr(v):
|
||||
if isinstance(v, compat_str):
|
||||
if isinstance(v, str):
|
||||
return "'%s'" % v.replace('\\', '\\\\').replace("'", "\\'").replace('\n', '\\n')
|
||||
elif isinstance(v, type):
|
||||
return v.__name__
|
||||
else:
|
||||
return repr(v)
|
||||
info_dict_str = ''
|
||||
if len(missing_keys) != len(expected_dict):
|
||||
info_dict_str += ''.join(
|
||||
f' {_repr(k)}: {_repr(v)},\n'
|
||||
for k, v in test_info_dict.items() if k not in missing_keys)
|
||||
|
||||
if info_dict_str:
|
||||
info_dict_str += '\n'
|
||||
info_dict_str = ''.join(
|
||||
f' {_repr(k)}: {_repr(v)},\n'
|
||||
for k, v in test_info_dict.items() if k not in missing_keys)
|
||||
if info_dict_str:
|
||||
info_dict_str += '\n'
|
||||
info_dict_str += ''.join(
|
||||
f' {_repr(k)}: {_repr(test_info_dict[k])},\n'
|
||||
for k in missing_keys)
|
||||
write_string(
|
||||
'\n\'info_dict\': {\n' + info_dict_str + '},\n', out=sys.stderr)
|
||||
info_dict_str = '\n\'info_dict\': {\n' + info_dict_str + '},\n'
|
||||
write_string(info_dict_str.replace('\n', '\n '), out=sys.stderr)
|
||||
self.assertFalse(
|
||||
missing_keys,
|
||||
'Missing keys in test definition: %s' % (
|
||||
@@ -301,9 +310,9 @@ def assertEqual(self, got, expected, msg=None):
|
||||
def expect_warnings(ydl, warnings_re):
|
||||
real_warning = ydl.report_warning
|
||||
|
||||
def _report_warning(w):
|
||||
def _report_warning(w, *args, **kwargs):
|
||||
if not any(re.search(w_re, w) for w_re in warnings_re):
|
||||
real_warning(w)
|
||||
real_warning(w, *args, **kwargs)
|
||||
|
||||
ydl.report_warning = _report_warning
|
||||
|
||||
|
||||
@@ -44,5 +44,6 @@
|
||||
"writesubtitles": false,
|
||||
"allsubtitles": false,
|
||||
"listsubtitles": false,
|
||||
"fixup": "never"
|
||||
"fixup": "never",
|
||||
"allow_playlist_files": false
|
||||
}
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Allow direct execution
|
||||
import os
|
||||
import sys
|
||||
@@ -6,10 +7,12 @@ import unittest
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
import threading
|
||||
from test.helper import FakeYDL, expect_dict, expect_value, http_server_port
|
||||
|
||||
from yt_dlp.compat import compat_etree_fromstring, compat_http_server
|
||||
import http.server
|
||||
import threading
|
||||
|
||||
from test.helper import FakeYDL, expect_dict, expect_value, http_server_port
|
||||
from yt_dlp.compat import compat_etree_fromstring
|
||||
from yt_dlp.extractor import YoutubeIE, get_info_extractor
|
||||
from yt_dlp.extractor.common import InfoExtractor
|
||||
from yt_dlp.utils import (
|
||||
@@ -23,7 +26,7 @@ TEAPOT_RESPONSE_STATUS = 418
|
||||
TEAPOT_RESPONSE_BODY = "<h1>418 I'm a teapot</h1>"
|
||||
|
||||
|
||||
class InfoExtractorTestRequestHandler(compat_http_server.BaseHTTPRequestHandler):
|
||||
class InfoExtractorTestRequestHandler(http.server.BaseHTTPRequestHandler):
|
||||
def log_message(self, format, *args):
|
||||
pass
|
||||
|
||||
@@ -38,7 +41,9 @@ class InfoExtractorTestRequestHandler(compat_http_server.BaseHTTPRequestHandler)
|
||||
|
||||
|
||||
class DummyIE(InfoExtractor):
|
||||
pass
|
||||
def _sort_formats(self, formats, field_preference=[]):
|
||||
self._downloader.sort_formats(
|
||||
{'formats': formats, '_format_sort_fields': field_preference})
|
||||
|
||||
|
||||
class TestInfoExtractor(unittest.TestCase):
|
||||
@@ -502,6 +507,24 @@ class TestInfoExtractor(unittest.TestCase):
|
||||
}],
|
||||
})
|
||||
|
||||
# from https://0000.studio/
|
||||
# with type attribute but without extension in URL
|
||||
expect_dict(
|
||||
self,
|
||||
self.ie._parse_html5_media_entries(
|
||||
'https://0000.studio',
|
||||
r'''
|
||||
<video src="https://d1ggyt9m8pwf3g.cloudfront.net/protected/ap-northeast-1:1864af40-28d5-492b-b739-b32314b1a527/archive/clip/838db6a7-8973-4cd6-840d-8517e4093c92"
|
||||
controls="controls" type="video/mp4" preload="metadata" autoplay="autoplay" playsinline class="object-contain">
|
||||
</video>
|
||||
''', None)[0],
|
||||
{
|
||||
'formats': [{
|
||||
'url': 'https://d1ggyt9m8pwf3g.cloudfront.net/protected/ap-northeast-1:1864af40-28d5-492b-b739-b32314b1a527/archive/clip/838db6a7-8973-4cd6-840d-8517e4093c92',
|
||||
'ext': 'mp4',
|
||||
}],
|
||||
})
|
||||
|
||||
def test_extract_jwplayer_data_realworld(self):
|
||||
# from http://www.suffolk.edu/sjc/
|
||||
expect_dict(
|
||||
@@ -1546,6 +1569,292 @@ jwplayer("mediaplayer").setup({"abouttext":"Visit Indie DB","aboutlink":"http:\/
|
||||
]
|
||||
},
|
||||
),
|
||||
(
|
||||
'ec-3_test',
|
||||
'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
|
||||
[{
|
||||
'format_id': 'audio_deu_1-224',
|
||||
'url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
|
||||
'manifest_url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
|
||||
'ext': 'isma',
|
||||
'tbr': 224,
|
||||
'asr': 48000,
|
||||
'vcodec': 'none',
|
||||
'acodec': 'EC-3',
|
||||
'protocol': 'ism',
|
||||
'_download_params':
|
||||
{
|
||||
'stream_type': 'audio',
|
||||
'duration': 370000000,
|
||||
'timescale': 10000000,
|
||||
'width': 0,
|
||||
'height': 0,
|
||||
'fourcc': 'EC-3',
|
||||
'language': 'deu',
|
||||
'codec_private_data': '00063F000000AF87FBA7022DFB42A4D405CD93843BDD0700200F00',
|
||||
'sampling_rate': 48000,
|
||||
'channels': 6,
|
||||
'bits_per_sample': 16,
|
||||
'nal_unit_length_field': 4
|
||||
},
|
||||
'audio_ext': 'isma',
|
||||
'video_ext': 'none',
|
||||
'abr': 224,
|
||||
}, {
|
||||
'format_id': 'audio_deu-127',
|
||||
'url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
|
||||
'manifest_url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
|
||||
'ext': 'isma',
|
||||
'tbr': 127,
|
||||
'asr': 48000,
|
||||
'vcodec': 'none',
|
||||
'acodec': 'AACL',
|
||||
'protocol': 'ism',
|
||||
'_download_params':
|
||||
{
|
||||
'stream_type': 'audio',
|
||||
'duration': 370000000,
|
||||
'timescale': 10000000,
|
||||
'width': 0,
|
||||
'height': 0,
|
||||
'fourcc': 'AACL',
|
||||
'language': 'deu',
|
||||
'codec_private_data': '1190',
|
||||
'sampling_rate': 48000,
|
||||
'channels': 2,
|
||||
'bits_per_sample': 16,
|
||||
'nal_unit_length_field': 4
|
||||
},
|
||||
'audio_ext': 'isma',
|
||||
'video_ext': 'none',
|
||||
'abr': 127,
|
||||
}, {
|
||||
'format_id': 'video_deu-23',
|
||||
'url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
|
||||
'manifest_url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
|
||||
'ext': 'ismv',
|
||||
'width': 384,
|
||||
'height': 216,
|
||||
'tbr': 23,
|
||||
'vcodec': 'AVC1',
|
||||
'acodec': 'none',
|
||||
'protocol': 'ism',
|
||||
'_download_params':
|
||||
{
|
||||
'stream_type': 'video',
|
||||
'duration': 370000000,
|
||||
'timescale': 10000000,
|
||||
'width': 384,
|
||||
'height': 216,
|
||||
'fourcc': 'AVC1',
|
||||
'language': 'deu',
|
||||
'codec_private_data': '000000016742C00CDB06077E5C05A808080A00000300020000030009C0C02EE0177CC6300F142AE00000000168CA8DC8',
|
||||
'channels': 2,
|
||||
'bits_per_sample': 16,
|
||||
'nal_unit_length_field': 4
|
||||
},
|
||||
'video_ext': 'ismv',
|
||||
'audio_ext': 'none',
|
||||
'vbr': 23,
|
||||
}, {
|
||||
'format_id': 'video_deu-403',
|
||||
'url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
|
||||
'manifest_url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
|
||||
'ext': 'ismv',
|
||||
'width': 400,
|
||||
'height': 224,
|
||||
'tbr': 403,
|
||||
'vcodec': 'AVC1',
|
||||
'acodec': 'none',
|
||||
'protocol': 'ism',
|
||||
'_download_params':
|
||||
{
|
||||
'stream_type': 'video',
|
||||
'duration': 370000000,
|
||||
'timescale': 10000000,
|
||||
'width': 400,
|
||||
'height': 224,
|
||||
'fourcc': 'AVC1',
|
||||
'language': 'deu',
|
||||
'codec_private_data': '00000001674D4014E98323B602D4040405000003000100000300320F1429380000000168EAECF2',
|
||||
'channels': 2,
|
||||
'bits_per_sample': 16,
|
||||
'nal_unit_length_field': 4
|
||||
},
|
||||
'video_ext': 'ismv',
|
||||
'audio_ext': 'none',
|
||||
'vbr': 403,
|
||||
}, {
|
||||
'format_id': 'video_deu-680',
|
||||
'url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
|
||||
'manifest_url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
|
||||
'ext': 'ismv',
|
||||
'width': 640,
|
||||
'height': 360,
|
||||
'tbr': 680,
|
||||
'vcodec': 'AVC1',
|
||||
'acodec': 'none',
|
||||
'protocol': 'ism',
|
||||
'_download_params':
|
||||
{
|
||||
'stream_type': 'video',
|
||||
'duration': 370000000,
|
||||
'timescale': 10000000,
|
||||
'width': 640,
|
||||
'height': 360,
|
||||
'fourcc': 'AVC1',
|
||||
'language': 'deu',
|
||||
'codec_private_data': '00000001674D401EE981405FF2E02D4040405000000300100000030320F162D3800000000168EAECF2',
|
||||
'channels': 2,
|
||||
'bits_per_sample': 16,
|
||||
'nal_unit_length_field': 4
|
||||
},
|
||||
'video_ext': 'ismv',
|
||||
'audio_ext': 'none',
|
||||
'vbr': 680,
|
||||
}, {
|
||||
'format_id': 'video_deu-1253',
|
||||
'url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
|
||||
'manifest_url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
|
||||
'ext': 'ismv',
|
||||
'width': 640,
|
||||
'height': 360,
|
||||
'tbr': 1253,
|
||||
'vcodec': 'AVC1',
|
||||
'acodec': 'none',
|
||||
'protocol': 'ism',
|
||||
'_download_params':
|
||||
{
|
||||
'stream_type': 'video',
|
||||
'duration': 370000000,
|
||||
'timescale': 10000000,
|
||||
'width': 640,
|
||||
'height': 360,
|
||||
'fourcc': 'AVC1',
|
||||
'language': 'deu',
|
||||
'codec_private_data': '00000001674D401EE981405FF2E02D4040405000000300100000030320F162D3800000000168EAECF2',
|
||||
'channels': 2,
|
||||
'bits_per_sample': 16,
|
||||
'nal_unit_length_field': 4
|
||||
},
|
||||
'video_ext': 'ismv',
|
||||
'audio_ext': 'none',
|
||||
'vbr': 1253,
|
||||
}, {
|
||||
'format_id': 'video_deu-2121',
|
||||
'url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
|
||||
'manifest_url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
|
||||
'ext': 'ismv',
|
||||
'width': 768,
|
||||
'height': 432,
|
||||
'tbr': 2121,
|
||||
'vcodec': 'AVC1',
|
||||
'acodec': 'none',
|
||||
'protocol': 'ism',
|
||||
'_download_params':
|
||||
{
|
||||
'stream_type': 'video',
|
||||
'duration': 370000000,
|
||||
'timescale': 10000000,
|
||||
'width': 768,
|
||||
'height': 432,
|
||||
'fourcc': 'AVC1',
|
||||
'language': 'deu',
|
||||
'codec_private_data': '00000001674D401EECA0601BD80B50101014000003000400000300C83C58B6580000000168E93B3C80',
|
||||
'channels': 2,
|
||||
'bits_per_sample': 16,
|
||||
'nal_unit_length_field': 4
|
||||
},
|
||||
'video_ext': 'ismv',
|
||||
'audio_ext': 'none',
|
||||
'vbr': 2121,
|
||||
}, {
|
||||
'format_id': 'video_deu-3275',
|
||||
'url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
|
||||
'manifest_url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
|
||||
'ext': 'ismv',
|
||||
'width': 1280,
|
||||
'height': 720,
|
||||
'tbr': 3275,
|
||||
'vcodec': 'AVC1',
|
||||
'acodec': 'none',
|
||||
'protocol': 'ism',
|
||||
'_download_params':
|
||||
{
|
||||
'stream_type': 'video',
|
||||
'duration': 370000000,
|
||||
'timescale': 10000000,
|
||||
'width': 1280,
|
||||
'height': 720,
|
||||
'fourcc': 'AVC1',
|
||||
'language': 'deu',
|
||||
'codec_private_data': '00000001674D4020ECA02802DD80B501010140000003004000000C83C60C65800000000168E93B3C80',
|
||||
'channels': 2,
|
||||
'bits_per_sample': 16,
|
||||
'nal_unit_length_field': 4
|
||||
},
|
||||
'video_ext': 'ismv',
|
||||
'audio_ext': 'none',
|
||||
'vbr': 3275,
|
||||
}, {
|
||||
'format_id': 'video_deu-5300',
|
||||
'url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
|
||||
'manifest_url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
|
||||
'ext': 'ismv',
|
||||
'width': 1920,
|
||||
'height': 1080,
|
||||
'tbr': 5300,
|
||||
'vcodec': 'AVC1',
|
||||
'acodec': 'none',
|
||||
'protocol': 'ism',
|
||||
'_download_params':
|
||||
{
|
||||
'stream_type': 'video',
|
||||
'duration': 370000000,
|
||||
'timescale': 10000000,
|
||||
'width': 1920,
|
||||
'height': 1080,
|
||||
'fourcc': 'AVC1',
|
||||
'language': 'deu',
|
||||
'codec_private_data': '00000001674D4028ECA03C0113F2E02D4040405000000300100000030320F18319600000000168E93B3C80',
|
||||
'channels': 2,
|
||||
'bits_per_sample': 16,
|
||||
'nal_unit_length_field': 4
|
||||
},
|
||||
'video_ext': 'ismv',
|
||||
'audio_ext': 'none',
|
||||
'vbr': 5300,
|
||||
}, {
|
||||
'format_id': 'video_deu-8079',
|
||||
'url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
|
||||
'manifest_url': 'https://smstr01.dmm.t-online.de/smooth24/smoothstream_m1/streaming/sony/9221438342941275747/636887760842957027/25_km_h-Trailer-9221571562372022953_deu_20_1300k_HD_H_264_ISMV.ism/Manifest',
|
||||
'ext': 'ismv',
|
||||
'width': 1920,
|
||||
'height': 1080,
|
||||
'tbr': 8079,
|
||||
'vcodec': 'AVC1',
|
||||
'acodec': 'none',
|
||||
'protocol': 'ism',
|
||||
'_download_params':
|
||||
{
|
||||
'stream_type': 'video',
|
||||
'duration': 370000000,
|
||||
'timescale': 10000000,
|
||||
'width': 1920,
|
||||
'height': 1080,
|
||||
'fourcc': 'AVC1',
|
||||
'language': 'deu',
|
||||
'codec_private_data': '00000001674D4028ECA03C0113F2E02D4040405000000300100000030320F18319600000000168E93B3C80',
|
||||
'channels': 2,
|
||||
'bits_per_sample': 16,
|
||||
'nal_unit_length_field': 4
|
||||
},
|
||||
'video_ext': 'ismv',
|
||||
'audio_ext': 'none',
|
||||
'vbr': 8079,
|
||||
}],
|
||||
{},
|
||||
),
|
||||
]
|
||||
|
||||
for ism_file, ism_url, expected_formats, expected_subtitles in _TEST_CASES:
|
||||
@@ -1637,7 +1946,7 @@ jwplayer("mediaplayer").setup({"abouttext":"Visit Indie DB","aboutlink":"http:\/
|
||||
# or the underlying `_download_webpage_handle` returning no content
|
||||
# when a response matches `expected_status`.
|
||||
|
||||
httpd = compat_http_server.HTTPServer(
|
||||
httpd = http.server.HTTPServer(
|
||||
('127.0.0.1', 0), InfoExtractorTestRequestHandler)
|
||||
port = http_server_port(httpd)
|
||||
server_thread = threading.Thread(target=httpd.serve_forever)
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Allow direct execution
|
||||
import os
|
||||
import sys
|
||||
@@ -6,23 +7,21 @@ import unittest
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
|
||||
import copy
|
||||
import json
|
||||
from test.helper import FakeYDL, assertRegexpMatches
|
||||
import urllib.error
|
||||
|
||||
from test.helper import FakeYDL, assertRegexpMatches
|
||||
from yt_dlp import YoutubeDL
|
||||
from yt_dlp.compat import (
|
||||
compat_os_name,
|
||||
compat_setenv,
|
||||
compat_str,
|
||||
compat_urllib_error,
|
||||
)
|
||||
from yt_dlp.compat import compat_os_name
|
||||
from yt_dlp.extractor import YoutubeIE
|
||||
from yt_dlp.extractor.common import InfoExtractor
|
||||
from yt_dlp.postprocessor.common import PostProcessor
|
||||
from yt_dlp.utils import (
|
||||
ExtractorError,
|
||||
LazyList,
|
||||
OnDemandPagedList,
|
||||
int_or_none,
|
||||
match_filter_func,
|
||||
)
|
||||
@@ -39,7 +38,7 @@ class YDL(FakeYDL):
|
||||
def process_info(self, info_dict):
|
||||
self.downloaded_info_dicts.append(info_dict.copy())
|
||||
|
||||
def to_screen(self, msg):
|
||||
def to_screen(self, msg, *args, **kwargs):
|
||||
self.msgs.append(msg)
|
||||
|
||||
def dl(self, *args, **kwargs):
|
||||
@@ -69,8 +68,7 @@ class TestFormatSelection(unittest.TestCase):
|
||||
{'ext': 'mp4', 'height': 460, 'url': TEST_URL},
|
||||
]
|
||||
info_dict = _make_result(formats)
|
||||
yie = YoutubeIE(ydl)
|
||||
yie._sort_formats(info_dict['formats'])
|
||||
ydl.sort_formats(info_dict)
|
||||
ydl.process_ie_result(info_dict)
|
||||
downloaded = ydl.downloaded_info_dicts[0]
|
||||
self.assertEqual(downloaded['ext'], 'webm')
|
||||
@@ -83,8 +81,7 @@ class TestFormatSelection(unittest.TestCase):
|
||||
{'ext': 'mp4', 'height': 1080, 'url': TEST_URL},
|
||||
]
|
||||
info_dict['formats'] = formats
|
||||
yie = YoutubeIE(ydl)
|
||||
yie._sort_formats(info_dict['formats'])
|
||||
ydl.sort_formats(info_dict)
|
||||
ydl.process_ie_result(info_dict)
|
||||
downloaded = ydl.downloaded_info_dicts[0]
|
||||
self.assertEqual(downloaded['ext'], 'mp4')
|
||||
@@ -98,8 +95,7 @@ class TestFormatSelection(unittest.TestCase):
|
||||
{'ext': 'flv', 'height': 720, 'url': TEST_URL},
|
||||
]
|
||||
info_dict['formats'] = formats
|
||||
yie = YoutubeIE(ydl)
|
||||
yie._sort_formats(info_dict['formats'])
|
||||
ydl.sort_formats(info_dict)
|
||||
ydl.process_ie_result(info_dict)
|
||||
downloaded = ydl.downloaded_info_dicts[0]
|
||||
self.assertEqual(downloaded['ext'], 'mp4')
|
||||
@@ -111,15 +107,14 @@ class TestFormatSelection(unittest.TestCase):
|
||||
{'ext': 'webm', 'height': 720, 'url': TEST_URL},
|
||||
]
|
||||
info_dict['formats'] = formats
|
||||
yie = YoutubeIE(ydl)
|
||||
yie._sort_formats(info_dict['formats'])
|
||||
ydl.sort_formats(info_dict)
|
||||
ydl.process_ie_result(info_dict)
|
||||
downloaded = ydl.downloaded_info_dicts[0]
|
||||
self.assertEqual(downloaded['ext'], 'webm')
|
||||
|
||||
def test_format_selection(self):
|
||||
formats = [
|
||||
{'format_id': '35', 'ext': 'mp4', 'preference': 1, 'url': TEST_URL},
|
||||
{'format_id': '35', 'ext': 'mp4', 'preference': 0, 'url': TEST_URL},
|
||||
{'format_id': 'example-with-dashes', 'ext': 'webm', 'preference': 1, 'url': TEST_URL},
|
||||
{'format_id': '45', 'ext': 'webm', 'preference': 2, 'url': TEST_URL},
|
||||
{'format_id': '47', 'ext': 'webm', 'preference': 3, 'url': TEST_URL},
|
||||
@@ -187,22 +182,19 @@ class TestFormatSelection(unittest.TestCase):
|
||||
|
||||
info_dict = _make_result(formats)
|
||||
ydl = YDL({'format': 'best'})
|
||||
ie = YoutubeIE(ydl)
|
||||
ie._sort_formats(info_dict['formats'])
|
||||
ydl.sort_formats(info_dict)
|
||||
ydl.process_ie_result(copy.deepcopy(info_dict))
|
||||
downloaded = ydl.downloaded_info_dicts[0]
|
||||
self.assertEqual(downloaded['format_id'], 'aac-64')
|
||||
|
||||
ydl = YDL({'format': 'mp3'})
|
||||
ie = YoutubeIE(ydl)
|
||||
ie._sort_formats(info_dict['formats'])
|
||||
ydl.sort_formats(info_dict)
|
||||
ydl.process_ie_result(copy.deepcopy(info_dict))
|
||||
downloaded = ydl.downloaded_info_dicts[0]
|
||||
self.assertEqual(downloaded['format_id'], 'mp3-64')
|
||||
|
||||
ydl = YDL({'prefer_free_formats': True})
|
||||
ie = YoutubeIE(ydl)
|
||||
ie._sort_formats(info_dict['formats'])
|
||||
ydl.sort_formats(info_dict)
|
||||
ydl.process_ie_result(copy.deepcopy(info_dict))
|
||||
downloaded = ydl.downloaded_info_dicts[0]
|
||||
self.assertEqual(downloaded['format_id'], 'ogg-64')
|
||||
@@ -347,8 +339,7 @@ class TestFormatSelection(unittest.TestCase):
|
||||
|
||||
info_dict = _make_result(list(formats_order), extractor='youtube')
|
||||
ydl = YDL({'format': 'bestvideo+bestaudio'})
|
||||
yie = YoutubeIE(ydl)
|
||||
yie._sort_formats(info_dict['formats'])
|
||||
ydl.sort_formats(info_dict)
|
||||
ydl.process_ie_result(info_dict)
|
||||
downloaded = ydl.downloaded_info_dicts[0]
|
||||
self.assertEqual(downloaded['format_id'], '248+172')
|
||||
@@ -356,40 +347,35 @@ class TestFormatSelection(unittest.TestCase):
|
||||
|
||||
info_dict = _make_result(list(formats_order), extractor='youtube')
|
||||
ydl = YDL({'format': 'bestvideo[height>=999999]+bestaudio/best'})
|
||||
yie = YoutubeIE(ydl)
|
||||
yie._sort_formats(info_dict['formats'])
|
||||
ydl.sort_formats(info_dict)
|
||||
ydl.process_ie_result(info_dict)
|
||||
downloaded = ydl.downloaded_info_dicts[0]
|
||||
self.assertEqual(downloaded['format_id'], '38')
|
||||
|
||||
info_dict = _make_result(list(formats_order), extractor='youtube')
|
||||
ydl = YDL({'format': 'bestvideo/best,bestaudio'})
|
||||
yie = YoutubeIE(ydl)
|
||||
yie._sort_formats(info_dict['formats'])
|
||||
ydl.sort_formats(info_dict)
|
||||
ydl.process_ie_result(info_dict)
|
||||
downloaded_ids = [info['format_id'] for info in ydl.downloaded_info_dicts]
|
||||
self.assertEqual(downloaded_ids, ['137', '141'])
|
||||
|
||||
info_dict = _make_result(list(formats_order), extractor='youtube')
|
||||
ydl = YDL({'format': '(bestvideo[ext=mp4],bestvideo[ext=webm])+bestaudio'})
|
||||
yie = YoutubeIE(ydl)
|
||||
yie._sort_formats(info_dict['formats'])
|
||||
ydl.sort_formats(info_dict)
|
||||
ydl.process_ie_result(info_dict)
|
||||
downloaded_ids = [info['format_id'] for info in ydl.downloaded_info_dicts]
|
||||
self.assertEqual(downloaded_ids, ['137+141', '248+141'])
|
||||
|
||||
info_dict = _make_result(list(formats_order), extractor='youtube')
|
||||
ydl = YDL({'format': '(bestvideo[ext=mp4],bestvideo[ext=webm])[height<=720]+bestaudio'})
|
||||
yie = YoutubeIE(ydl)
|
||||
yie._sort_formats(info_dict['formats'])
|
||||
ydl.sort_formats(info_dict)
|
||||
ydl.process_ie_result(info_dict)
|
||||
downloaded_ids = [info['format_id'] for info in ydl.downloaded_info_dicts]
|
||||
self.assertEqual(downloaded_ids, ['136+141', '247+141'])
|
||||
|
||||
info_dict = _make_result(list(formats_order), extractor='youtube')
|
||||
ydl = YDL({'format': '(bestvideo[ext=none]/bestvideo[ext=webm])+bestaudio'})
|
||||
yie = YoutubeIE(ydl)
|
||||
yie._sort_formats(info_dict['formats'])
|
||||
ydl.sort_formats(info_dict)
|
||||
ydl.process_ie_result(info_dict)
|
||||
downloaded_ids = [info['format_id'] for info in ydl.downloaded_info_dicts]
|
||||
self.assertEqual(downloaded_ids, ['248+141'])
|
||||
@@ -397,16 +383,14 @@ class TestFormatSelection(unittest.TestCase):
|
||||
for f1, f2 in zip(formats_order, formats_order[1:]):
|
||||
info_dict = _make_result([f1, f2], extractor='youtube')
|
||||
ydl = YDL({'format': 'best/bestvideo'})
|
||||
yie = YoutubeIE(ydl)
|
||||
yie._sort_formats(info_dict['formats'])
|
||||
ydl.sort_formats(info_dict)
|
||||
ydl.process_ie_result(info_dict)
|
||||
downloaded = ydl.downloaded_info_dicts[0]
|
||||
self.assertEqual(downloaded['format_id'], f1['format_id'])
|
||||
|
||||
info_dict = _make_result([f2, f1], extractor='youtube')
|
||||
ydl = YDL({'format': 'best/bestvideo'})
|
||||
yie = YoutubeIE(ydl)
|
||||
yie._sort_formats(info_dict['formats'])
|
||||
ydl.sort_formats(info_dict)
|
||||
ydl.process_ie_result(info_dict)
|
||||
downloaded = ydl.downloaded_info_dicts[0]
|
||||
self.assertEqual(downloaded['format_id'], f1['format_id'])
|
||||
@@ -481,7 +465,7 @@ class TestFormatSelection(unittest.TestCase):
|
||||
for f in formats:
|
||||
f['url'] = 'http://_/'
|
||||
f['ext'] = 'unknown'
|
||||
info_dict = _make_result(formats)
|
||||
info_dict = _make_result(formats, _format_sort_fields=('id', ))
|
||||
|
||||
ydl = YDL({'format': 'best[filesize<3000]'})
|
||||
ydl.process_ie_result(info_dict)
|
||||
@@ -663,13 +647,17 @@ class TestYoutubeDL(unittest.TestCase):
|
||||
'playlist_autonumber': 2,
|
||||
'__last_playlist_index': 100,
|
||||
'n_entries': 10,
|
||||
'formats': [{'id': 'id 1'}, {'id': 'id 2'}, {'id': 'id 3'}]
|
||||
'formats': [
|
||||
{'id': 'id 1', 'height': 1080, 'width': 1920},
|
||||
{'id': 'id 2', 'height': 720},
|
||||
{'id': 'id 3'}
|
||||
]
|
||||
}
|
||||
|
||||
def test_prepare_outtmpl_and_filename(self):
|
||||
def test(tmpl, expected, *, info=None, **params):
|
||||
params['outtmpl'] = tmpl
|
||||
ydl = YoutubeDL(params)
|
||||
ydl = FakeYDL(params)
|
||||
ydl._num_downloads = 1
|
||||
self.assertEqual(ydl.validate_outtmpl(tmpl), None)
|
||||
|
||||
@@ -723,13 +711,14 @@ class TestYoutubeDL(unittest.TestCase):
|
||||
test('%(id)s', '-abcd', info={'id': '-abcd'})
|
||||
test('%(id)s', '.abcd', info={'id': '.abcd'})
|
||||
test('%(id)s', 'ab__cd', info={'id': 'ab__cd'})
|
||||
test('%(id)s', ('ab:cd', 'ab -cd'), info={'id': 'ab:cd'})
|
||||
test('%(id)s', ('ab:cd', 'ab:cd'), info={'id': 'ab:cd'})
|
||||
test('%(id.0)s', '-', info={'id': '--'})
|
||||
|
||||
# Invalid templates
|
||||
self.assertTrue(isinstance(YoutubeDL.validate_outtmpl('%(title)'), ValueError))
|
||||
test('%(invalid@tmpl|def)s', 'none', outtmpl_na_placeholder='none')
|
||||
test('%(..)s', 'NA')
|
||||
test('%(formats.{id)s', 'NA')
|
||||
|
||||
# Entire info_dict
|
||||
def expect_same_infodict(out):
|
||||
@@ -771,7 +760,7 @@ class TestYoutubeDL(unittest.TestCase):
|
||||
test('a%(width|)d', 'a', outtmpl_na_placeholder='none')
|
||||
|
||||
FORMATS = self.outtmpl_info['formats']
|
||||
sanitize = lambda x: x.replace(':', ' -').replace('"', "'").replace('\n', ' ')
|
||||
sanitize = lambda x: x.replace(':', ':').replace('"', """).replace('\n', ' ')
|
||||
|
||||
# Custom type casting
|
||||
test('%(formats.:.id)l', 'id 1, id 2, id 3')
|
||||
@@ -789,13 +778,13 @@ class TestYoutubeDL(unittest.TestCase):
|
||||
test('%(filesize)#D', '1Ki')
|
||||
test('%(height)5.2D', ' 1.08k')
|
||||
test('%(title4)#S', 'foo_bar_test')
|
||||
test('%(title4).10S', ('foo \'bar\' ', 'foo \'bar\'' + ('#' if compat_os_name == 'nt' else ' ')))
|
||||
test('%(title4).10S', ('foo "bar" ', 'foo "bar"' + ('#' if compat_os_name == 'nt' else ' ')))
|
||||
if compat_os_name == 'nt':
|
||||
test('%(title4)q', ('"foo \\"bar\\" test"', "'foo _'bar_' test'"))
|
||||
test('%(formats.:.id)#q', ('"id 1" "id 2" "id 3"', "'id 1' 'id 2' 'id 3'"))
|
||||
test('%(formats.0.id)#q', ('"id 1"', "'id 1'"))
|
||||
test('%(title4)q', ('"foo \\"bar\\" test"', ""foo ⧹"bar⧹" test""))
|
||||
test('%(formats.:.id)#q', ('"id 1" "id 2" "id 3"', '"id 1" "id 2" "id 3"'))
|
||||
test('%(formats.0.id)#q', ('"id 1"', '"id 1"'))
|
||||
else:
|
||||
test('%(title4)q', ('\'foo "bar" test\'', "'foo 'bar' test'"))
|
||||
test('%(title4)q', ('\'foo "bar" test\'', '\'foo "bar" test\''))
|
||||
test('%(formats.:.id)#q', "'id 1' 'id 2' 'id 3'")
|
||||
test('%(formats.0.id)#q', "'id 1'")
|
||||
|
||||
@@ -814,6 +803,12 @@ class TestYoutubeDL(unittest.TestCase):
|
||||
test('%(formats.:2:-1)r', repr(FORMATS[:2:-1]))
|
||||
test('%(formats.0.id.-1+id)f', '1235.000000')
|
||||
test('%(formats.0.id.-1+formats.1.id.-1)d', '3')
|
||||
out = json.dumps([{'id': f['id'], 'height.:2': str(f['height'])[:2]}
|
||||
if 'height' in f else {'id': f['id']}
|
||||
for f in FORMATS])
|
||||
test('%(formats.:.{id,height.:2})j', (out, sanitize(out)))
|
||||
test('%(formats.:.{id,height}.id)l', ', '.join(f['id'] for f in FORMATS))
|
||||
test('%(.{id,title})j', ('{"id": "1234"}', '{"id": "1234"}'))
|
||||
|
||||
# Alternates
|
||||
test('%(title,id)s', '1234')
|
||||
@@ -840,21 +835,21 @@ class TestYoutubeDL(unittest.TestCase):
|
||||
# test('%(foo|)s', ('', '_')) # fixme
|
||||
|
||||
# Environment variable expansion for prepare_filename
|
||||
compat_setenv('__yt_dlp_var', 'expanded')
|
||||
os.environ['__yt_dlp_var'] = 'expanded'
|
||||
envvar = '%__yt_dlp_var%' if compat_os_name == 'nt' else '$__yt_dlp_var'
|
||||
test(envvar, (envvar, 'expanded'))
|
||||
if compat_os_name == 'nt':
|
||||
test('%s%', ('%s%', '%s%'))
|
||||
compat_setenv('s', 'expanded')
|
||||
os.environ['s'] = 'expanded'
|
||||
test('%s%', ('%s%', 'expanded')) # %s% should be expanded before escaping %s
|
||||
compat_setenv('(test)s', 'expanded')
|
||||
os.environ['(test)s'] = 'expanded'
|
||||
test('%(test)s%', ('NA%', 'expanded')) # Environment should take priority over template
|
||||
|
||||
# Path expansion and escaping
|
||||
test('Hello %(title1)s', 'Hello $PATH')
|
||||
test('Hello %(title2)s', 'Hello %PATH%')
|
||||
test('%(title3)s', ('foo/bar\\test', 'foo_bar_test'))
|
||||
test('folder/%(title3)s', ('folder/foo/bar\\test', 'folder%sfoo_bar_test' % os.path.sep))
|
||||
test('%(title3)s', ('foo/bar\\test', 'foo⧸bar⧹test'))
|
||||
test('folder/%(title3)s', ('folder/foo/bar\\test', 'folder%sfoo⧸bar⧹test' % os.path.sep))
|
||||
|
||||
def test_format_note(self):
|
||||
ydl = YoutubeDL()
|
||||
@@ -989,41 +984,80 @@ class TestYoutubeDL(unittest.TestCase):
|
||||
self.assertEqual(res, [])
|
||||
|
||||
def test_playlist_items_selection(self):
|
||||
entries = [{
|
||||
'id': compat_str(i),
|
||||
'title': compat_str(i),
|
||||
'url': TEST_URL,
|
||||
} for i in range(1, 5)]
|
||||
playlist = {
|
||||
'_type': 'playlist',
|
||||
'id': 'test',
|
||||
'entries': entries,
|
||||
'extractor': 'test:playlist',
|
||||
'extractor_key': 'test:playlist',
|
||||
'webpage_url': 'http://example.com',
|
||||
}
|
||||
INDICES, PAGE_SIZE = list(range(1, 11)), 3
|
||||
|
||||
def get_downloaded_info_dicts(params):
|
||||
def entry(i, evaluated):
|
||||
evaluated.append(i)
|
||||
return {
|
||||
'id': str(i),
|
||||
'title': str(i),
|
||||
'url': TEST_URL,
|
||||
}
|
||||
|
||||
def pagedlist_entries(evaluated):
|
||||
def page_func(n):
|
||||
start = PAGE_SIZE * n
|
||||
for i in INDICES[start: start + PAGE_SIZE]:
|
||||
yield entry(i, evaluated)
|
||||
return OnDemandPagedList(page_func, PAGE_SIZE)
|
||||
|
||||
def page_num(i):
|
||||
return (i + PAGE_SIZE - 1) // PAGE_SIZE
|
||||
|
||||
def generator_entries(evaluated):
|
||||
for i in INDICES:
|
||||
yield entry(i, evaluated)
|
||||
|
||||
def list_entries(evaluated):
|
||||
return list(generator_entries(evaluated))
|
||||
|
||||
def lazylist_entries(evaluated):
|
||||
return LazyList(generator_entries(evaluated))
|
||||
|
||||
def get_downloaded_info_dicts(params, entries):
|
||||
ydl = YDL(params)
|
||||
# make a deep copy because the dictionary and nested entries
|
||||
# can be modified
|
||||
ydl.process_ie_result(copy.deepcopy(playlist))
|
||||
ydl.process_ie_result({
|
||||
'_type': 'playlist',
|
||||
'id': 'test',
|
||||
'extractor': 'test:playlist',
|
||||
'extractor_key': 'test:playlist',
|
||||
'webpage_url': 'http://example.com',
|
||||
'entries': entries,
|
||||
})
|
||||
return ydl.downloaded_info_dicts
|
||||
|
||||
def test_selection(params, expected_ids):
|
||||
results = [
|
||||
(v['playlist_autonumber'] - 1, (int(v['id']), v['playlist_index']))
|
||||
for v in get_downloaded_info_dicts(params)]
|
||||
self.assertEqual(results, list(enumerate(zip(expected_ids, expected_ids))))
|
||||
def test_selection(params, expected_ids, evaluate_all=False):
|
||||
expected_ids = list(expected_ids)
|
||||
if evaluate_all:
|
||||
generator_eval = pagedlist_eval = INDICES
|
||||
elif not expected_ids:
|
||||
generator_eval = pagedlist_eval = []
|
||||
else:
|
||||
generator_eval = INDICES[0: max(expected_ids)]
|
||||
pagedlist_eval = INDICES[PAGE_SIZE * page_num(min(expected_ids)) - PAGE_SIZE:
|
||||
PAGE_SIZE * page_num(max(expected_ids))]
|
||||
|
||||
test_selection({}, [1, 2, 3, 4])
|
||||
test_selection({'playlistend': 10}, [1, 2, 3, 4])
|
||||
test_selection({'playlistend': 2}, [1, 2])
|
||||
test_selection({'playliststart': 10}, [])
|
||||
test_selection({'playliststart': 2}, [2, 3, 4])
|
||||
test_selection({'playlist_items': '2-4'}, [2, 3, 4])
|
||||
for name, func, expected_eval in (
|
||||
('list', list_entries, INDICES),
|
||||
('Generator', generator_entries, generator_eval),
|
||||
# ('LazyList', lazylist_entries, generator_eval), # Generator and LazyList follow the exact same code path
|
||||
('PagedList', pagedlist_entries, pagedlist_eval),
|
||||
):
|
||||
evaluated = []
|
||||
entries = func(evaluated)
|
||||
results = [(v['playlist_autonumber'] - 1, (int(v['id']), v['playlist_index']))
|
||||
for v in get_downloaded_info_dicts(params, entries)]
|
||||
self.assertEqual(results, list(enumerate(zip(expected_ids, expected_ids))), f'Entries of {name} for {params}')
|
||||
self.assertEqual(sorted(evaluated), expected_eval, f'Evaluation of {name} for {params}')
|
||||
|
||||
test_selection({}, INDICES)
|
||||
test_selection({'playlistend': 20}, INDICES, True)
|
||||
test_selection({'playlistend': 2}, INDICES[:2])
|
||||
test_selection({'playliststart': 11}, [], True)
|
||||
test_selection({'playliststart': 2}, INDICES[1:])
|
||||
test_selection({'playlist_items': '2-4'}, INDICES[1:4])
|
||||
test_selection({'playlist_items': '2,4'}, [2, 4])
|
||||
test_selection({'playlist_items': '10'}, [])
|
||||
test_selection({'playlist_items': '20'}, [], True)
|
||||
test_selection({'playlist_items': '0'}, [])
|
||||
|
||||
# Tests for https://github.com/ytdl-org/youtube-dl/issues/10591
|
||||
@@ -1032,15 +1066,37 @@ class TestYoutubeDL(unittest.TestCase):
|
||||
|
||||
# Tests for https://github.com/yt-dlp/yt-dlp/issues/720
|
||||
# https://github.com/yt-dlp/yt-dlp/issues/302
|
||||
test_selection({'playlistreverse': True}, [4, 3, 2, 1])
|
||||
test_selection({'playliststart': 2, 'playlistreverse': True}, [4, 3, 2])
|
||||
test_selection({'playlistreverse': True}, INDICES[::-1])
|
||||
test_selection({'playliststart': 2, 'playlistreverse': True}, INDICES[:0:-1])
|
||||
test_selection({'playlist_items': '2,4', 'playlistreverse': True}, [4, 2])
|
||||
test_selection({'playlist_items': '4,2'}, [4, 2])
|
||||
|
||||
# Tests for --playlist-items start:end:step
|
||||
test_selection({'playlist_items': ':'}, INDICES, True)
|
||||
test_selection({'playlist_items': '::1'}, INDICES, True)
|
||||
test_selection({'playlist_items': '::-1'}, INDICES[::-1], True)
|
||||
test_selection({'playlist_items': ':6'}, INDICES[:6])
|
||||
test_selection({'playlist_items': ':-6'}, INDICES[:-5], True)
|
||||
test_selection({'playlist_items': '-1:6:-2'}, INDICES[:4:-2], True)
|
||||
test_selection({'playlist_items': '9:-6:-2'}, INDICES[8:3:-2], True)
|
||||
|
||||
test_selection({'playlist_items': '1:inf:2'}, INDICES[::2], True)
|
||||
test_selection({'playlist_items': '-2:inf'}, INDICES[-2:], True)
|
||||
test_selection({'playlist_items': ':inf:-1'}, [], True)
|
||||
test_selection({'playlist_items': '0-2:2'}, [2])
|
||||
test_selection({'playlist_items': '1-:2'}, INDICES[::2], True)
|
||||
test_selection({'playlist_items': '0--2:2'}, INDICES[1:-1:2], True)
|
||||
|
||||
test_selection({'playlist_items': '10::3'}, [10], True)
|
||||
test_selection({'playlist_items': '-1::3'}, [10], True)
|
||||
test_selection({'playlist_items': '11::3'}, [], True)
|
||||
test_selection({'playlist_items': '-15::2'}, INDICES[1::2], True)
|
||||
test_selection({'playlist_items': '-15::15'}, [], True)
|
||||
|
||||
def test_urlopen_no_file_protocol(self):
|
||||
# see https://github.com/ytdl-org/youtube-dl/issues/8227
|
||||
ydl = YDL()
|
||||
self.assertRaises(compat_urllib_error.URLError, ydl.urlopen, 'file:///etc/passwd')
|
||||
self.assertRaises(urllib.error.URLError, ydl.urlopen, 'file:///etc/passwd')
|
||||
|
||||
def test_do_not_override_ie_key_in_url_transparent(self):
|
||||
ydl = YDL()
|
||||
@@ -1126,7 +1182,7 @@ class TestYoutubeDL(unittest.TestCase):
|
||||
|
||||
def _entries(self):
|
||||
for n in range(3):
|
||||
video_id = compat_str(n)
|
||||
video_id = str(n)
|
||||
yield {
|
||||
'_type': 'url_transparent',
|
||||
'ie_key': VideoIE.ie_key(),
|
||||
|
||||
@@ -1,12 +1,16 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Allow direct execution
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import tempfile
|
||||
import unittest
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
|
||||
import re
|
||||
import tempfile
|
||||
|
||||
from yt_dlp.utils import YoutubeDLCookieJar
|
||||
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Allow direct execution
|
||||
import os
|
||||
import sys
|
||||
@@ -6,10 +7,10 @@ import unittest
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
|
||||
import base64
|
||||
|
||||
from yt_dlp.aes import (
|
||||
BLOCK_SIZE_BYTES,
|
||||
aes_cbc_decrypt,
|
||||
aes_cbc_decrypt_bytes,
|
||||
aes_cbc_encrypt,
|
||||
@@ -22,6 +23,8 @@ from yt_dlp.aes import (
|
||||
aes_encrypt,
|
||||
aes_gcm_decrypt_and_verify,
|
||||
aes_gcm_decrypt_and_verify_bytes,
|
||||
key_expansion,
|
||||
pad_block,
|
||||
)
|
||||
from yt_dlp.dependencies import Cryptodome_AES
|
||||
from yt_dlp.utils import bytes_to_intlist, intlist_to_bytes
|
||||
@@ -99,8 +102,7 @@ class TestAES(unittest.TestCase):
|
||||
|
||||
def test_ecb_encrypt(self):
|
||||
data = bytes_to_intlist(self.secret_msg)
|
||||
data += [0x08] * (BLOCK_SIZE_BYTES - len(data) % BLOCK_SIZE_BYTES)
|
||||
encrypted = intlist_to_bytes(aes_ecb_encrypt(data, self.key, self.iv))
|
||||
encrypted = intlist_to_bytes(aes_ecb_encrypt(data, self.key))
|
||||
self.assertEqual(
|
||||
encrypted,
|
||||
b'\xaa\x86]\x81\x97>\x02\x92\x9d\x1bR[[L/u\xd3&\xd1(h\xde{\x81\x94\xba\x02\xae\xbd\xa6\xd0:')
|
||||
@@ -110,6 +112,41 @@ class TestAES(unittest.TestCase):
|
||||
decrypted = intlist_to_bytes(aes_ecb_decrypt(data, self.key, self.iv))
|
||||
self.assertEqual(decrypted.rstrip(b'\x08'), self.secret_msg)
|
||||
|
||||
def test_key_expansion(self):
|
||||
key = '4f6bdaa39e2f8cb07f5e722d9edef314'
|
||||
|
||||
self.assertEqual(key_expansion(bytes_to_intlist(bytearray.fromhex(key))), [
|
||||
0x4F, 0x6B, 0xDA, 0xA3, 0x9E, 0x2F, 0x8C, 0xB0, 0x7F, 0x5E, 0x72, 0x2D, 0x9E, 0xDE, 0xF3, 0x14,
|
||||
0x53, 0x66, 0x20, 0xA8, 0xCD, 0x49, 0xAC, 0x18, 0xB2, 0x17, 0xDE, 0x35, 0x2C, 0xC9, 0x2D, 0x21,
|
||||
0x8C, 0xBE, 0xDD, 0xD9, 0x41, 0xF7, 0x71, 0xC1, 0xF3, 0xE0, 0xAF, 0xF4, 0xDF, 0x29, 0x82, 0xD5,
|
||||
0x2D, 0xAD, 0xDE, 0x47, 0x6C, 0x5A, 0xAF, 0x86, 0x9F, 0xBA, 0x00, 0x72, 0x40, 0x93, 0x82, 0xA7,
|
||||
0xF9, 0xBE, 0x82, 0x4E, 0x95, 0xE4, 0x2D, 0xC8, 0x0A, 0x5E, 0x2D, 0xBA, 0x4A, 0xCD, 0xAF, 0x1D,
|
||||
0x54, 0xC7, 0x26, 0x98, 0xC1, 0x23, 0x0B, 0x50, 0xCB, 0x7D, 0x26, 0xEA, 0x81, 0xB0, 0x89, 0xF7,
|
||||
0x93, 0x60, 0x4E, 0x94, 0x52, 0x43, 0x45, 0xC4, 0x99, 0x3E, 0x63, 0x2E, 0x18, 0x8E, 0xEA, 0xD9,
|
||||
0xCA, 0xE7, 0x7B, 0x39, 0x98, 0xA4, 0x3E, 0xFD, 0x01, 0x9A, 0x5D, 0xD3, 0x19, 0x14, 0xB7, 0x0A,
|
||||
0xB0, 0x4E, 0x1C, 0xED, 0x28, 0xEA, 0x22, 0x10, 0x29, 0x70, 0x7F, 0xC3, 0x30, 0x64, 0xC8, 0xC9,
|
||||
0xE8, 0xA6, 0xC1, 0xE9, 0xC0, 0x4C, 0xE3, 0xF9, 0xE9, 0x3C, 0x9C, 0x3A, 0xD9, 0x58, 0x54, 0xF3,
|
||||
0xB4, 0x86, 0xCC, 0xDC, 0x74, 0xCA, 0x2F, 0x25, 0x9D, 0xF6, 0xB3, 0x1F, 0x44, 0xAE, 0xE7, 0xEC])
|
||||
|
||||
def test_pad_block(self):
|
||||
block = [0x21, 0xA0, 0x43, 0xFF]
|
||||
|
||||
self.assertEqual(pad_block(block, 'pkcs7'),
|
||||
block + [0x0C, 0x0C, 0x0C, 0x0C, 0x0C, 0x0C, 0x0C, 0x0C, 0x0C, 0x0C, 0x0C, 0x0C])
|
||||
|
||||
self.assertEqual(pad_block(block, 'iso7816'),
|
||||
block + [0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00])
|
||||
|
||||
self.assertEqual(pad_block(block, 'whitespace'),
|
||||
block + [0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20])
|
||||
|
||||
self.assertEqual(pad_block(block, 'zero'),
|
||||
block + [0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00])
|
||||
|
||||
block = list(range(16))
|
||||
for mode in ('pkcs7', 'iso7816', 'whitespace', 'zero'):
|
||||
self.assertEqual(pad_block(block, mode), block, mode)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Allow direct execution
|
||||
import os
|
||||
import sys
|
||||
@@ -6,8 +7,8 @@ import unittest
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
from test.helper import is_download_test, try_rm
|
||||
|
||||
from test.helper import is_download_test, try_rm
|
||||
from yt_dlp import YoutubeDL
|
||||
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Allow direct execution
|
||||
import collections
|
||||
import os
|
||||
import sys
|
||||
import unittest
|
||||
@@ -8,8 +8,9 @@ import unittest
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
|
||||
from test.helper import gettestcases
|
||||
import collections
|
||||
|
||||
from test.helper import gettestcases
|
||||
from yt_dlp.extractor import FacebookIE, YoutubeIE, gen_extractors
|
||||
|
||||
|
||||
|
||||
@@ -1,15 +1,16 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Allow direct execution
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
import unittest
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
|
||||
from test.helper import FakeYDL
|
||||
import shutil
|
||||
|
||||
from test.helper import FakeYDL
|
||||
from yt_dlp.cache import Cache
|
||||
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Allow direct execution
|
||||
import os
|
||||
import sys
|
||||
@@ -7,16 +8,14 @@ import unittest
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
|
||||
import struct
|
||||
import urllib.parse
|
||||
|
||||
from yt_dlp import compat
|
||||
from yt_dlp.compat import (
|
||||
compat_etree_fromstring,
|
||||
compat_expanduser,
|
||||
compat_getenv,
|
||||
compat_setenv,
|
||||
compat_str,
|
||||
compat_struct_unpack,
|
||||
compat_urllib_parse_unquote,
|
||||
compat_urllib_parse_unquote_plus,
|
||||
compat_urllib_parse_urlencode,
|
||||
)
|
||||
|
||||
@@ -26,28 +25,20 @@ class TestCompat(unittest.TestCase):
|
||||
with self.assertWarns(DeprecationWarning):
|
||||
compat.compat_basestring
|
||||
|
||||
compat.asyncio.events # Must not raise error
|
||||
with self.assertWarns(DeprecationWarning):
|
||||
compat.WINDOWS_VT_MODE
|
||||
|
||||
def test_compat_getenv(self):
|
||||
test_str = 'тест'
|
||||
compat_setenv('yt_dlp_COMPAT_GETENV', test_str)
|
||||
self.assertEqual(compat_getenv('yt_dlp_COMPAT_GETENV'), test_str)
|
||||
|
||||
def test_compat_setenv(self):
|
||||
test_var = 'yt_dlp_COMPAT_SETENV'
|
||||
test_str = 'тест'
|
||||
compat_setenv(test_var, test_str)
|
||||
compat_getenv(test_var)
|
||||
self.assertEqual(compat_getenv(test_var), test_str)
|
||||
# TODO: Test submodule
|
||||
# compat.asyncio.events # Must not raise error
|
||||
|
||||
def test_compat_expanduser(self):
|
||||
old_home = os.environ.get('HOME')
|
||||
test_str = R'C:\Documents and Settings\тест\Application Data'
|
||||
try:
|
||||
compat_setenv('HOME', test_str)
|
||||
os.environ['HOME'] = test_str
|
||||
self.assertEqual(compat_expanduser('~'), test_str)
|
||||
finally:
|
||||
compat_setenv('HOME', old_home or '')
|
||||
os.environ['HOME'] = old_home or ''
|
||||
|
||||
def test_compat_urllib_parse_unquote(self):
|
||||
self.assertEqual(compat_urllib_parse_unquote('abc%20def'), 'abc def')
|
||||
@@ -69,8 +60,8 @@ class TestCompat(unittest.TestCase):
|
||||
'''(^◣_◢^)っ︻デ═一 ⇀ ⇀ ⇀ ⇀ ⇀ ↶%I%Break%Things%''')
|
||||
|
||||
def test_compat_urllib_parse_unquote_plus(self):
|
||||
self.assertEqual(compat_urllib_parse_unquote_plus('abc%20def'), 'abc def')
|
||||
self.assertEqual(compat_urllib_parse_unquote_plus('%7e/abc+def'), '~/abc def')
|
||||
self.assertEqual(urllib.parse.unquote_plus('abc%20def'), 'abc def')
|
||||
self.assertEqual(urllib.parse.unquote_plus('%7e/abc+def'), '~/abc def')
|
||||
|
||||
def test_compat_urllib_parse_urlencode(self):
|
||||
self.assertEqual(compat_urllib_parse_urlencode({'abc': 'def'}), 'abc=def')
|
||||
@@ -91,11 +82,11 @@ class TestCompat(unittest.TestCase):
|
||||
</root>
|
||||
'''
|
||||
doc = compat_etree_fromstring(xml.encode())
|
||||
self.assertTrue(isinstance(doc.attrib['foo'], compat_str))
|
||||
self.assertTrue(isinstance(doc.attrib['spam'], compat_str))
|
||||
self.assertTrue(isinstance(doc.find('normal').text, compat_str))
|
||||
self.assertTrue(isinstance(doc.find('chinese').text, compat_str))
|
||||
self.assertTrue(isinstance(doc.find('foo/bar').text, compat_str))
|
||||
self.assertTrue(isinstance(doc.attrib['foo'], str))
|
||||
self.assertTrue(isinstance(doc.attrib['spam'], str))
|
||||
self.assertTrue(isinstance(doc.find('normal').text, str))
|
||||
self.assertTrue(isinstance(doc.find('chinese').text, str))
|
||||
self.assertTrue(isinstance(doc.find('foo/bar').text, str))
|
||||
|
||||
def test_compat_etree_fromstring_doctype(self):
|
||||
xml = '''<?xml version="1.0"?>
|
||||
@@ -104,7 +95,7 @@ class TestCompat(unittest.TestCase):
|
||||
compat_etree_fromstring(xml)
|
||||
|
||||
def test_struct_unpack(self):
|
||||
self.assertEqual(compat_struct_unpack('!B', b'\x00'), (0,))
|
||||
self.assertEqual(struct.unpack('!B', b'\x00'), (0,))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
227
test/test_config.py
Normal file
227
test/test_config.py
Normal file
@@ -0,0 +1,227 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Allow direct execution
|
||||
import os
|
||||
import sys
|
||||
import unittest
|
||||
import unittest.mock
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
import contextlib
|
||||
import itertools
|
||||
from pathlib import Path
|
||||
|
||||
from yt_dlp.compat import compat_expanduser
|
||||
from yt_dlp.options import create_parser, parseOpts
|
||||
from yt_dlp.utils import Config, get_executable_path
|
||||
|
||||
ENVIRON_DEFAULTS = {
|
||||
'HOME': None,
|
||||
'XDG_CONFIG_HOME': '/_xdg_config_home/',
|
||||
'USERPROFILE': 'C:/Users/testing/',
|
||||
'APPDATA': 'C:/Users/testing/AppData/Roaming/',
|
||||
'HOMEDRIVE': 'C:/',
|
||||
'HOMEPATH': 'Users/testing/',
|
||||
}
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def set_environ(**kwargs):
|
||||
saved_environ = os.environ.copy()
|
||||
|
||||
for name, value in {**ENVIRON_DEFAULTS, **kwargs}.items():
|
||||
if value is None:
|
||||
os.environ.pop(name, None)
|
||||
else:
|
||||
os.environ[name] = value
|
||||
|
||||
yield
|
||||
|
||||
os.environ.clear()
|
||||
os.environ.update(saved_environ)
|
||||
|
||||
|
||||
def _generate_expected_groups():
|
||||
xdg_config_home = os.getenv('XDG_CONFIG_HOME') or compat_expanduser('~/.config')
|
||||
appdata_dir = os.getenv('appdata')
|
||||
home_dir = compat_expanduser('~')
|
||||
return {
|
||||
'Portable': [
|
||||
Path(get_executable_path(), 'yt-dlp.conf'),
|
||||
],
|
||||
'Home': [
|
||||
Path('yt-dlp.conf'),
|
||||
],
|
||||
'User': [
|
||||
Path(xdg_config_home, 'yt-dlp.conf'),
|
||||
Path(xdg_config_home, 'yt-dlp', 'config'),
|
||||
Path(xdg_config_home, 'yt-dlp', 'config.txt'),
|
||||
*((
|
||||
Path(appdata_dir, 'yt-dlp.conf'),
|
||||
Path(appdata_dir, 'yt-dlp', 'config'),
|
||||
Path(appdata_dir, 'yt-dlp', 'config.txt'),
|
||||
) if appdata_dir else ()),
|
||||
Path(home_dir, 'yt-dlp.conf'),
|
||||
Path(home_dir, 'yt-dlp.conf.txt'),
|
||||
Path(home_dir, '.yt-dlp', 'config'),
|
||||
Path(home_dir, '.yt-dlp', 'config.txt'),
|
||||
],
|
||||
'System': [
|
||||
Path('/etc/yt-dlp.conf'),
|
||||
Path('/etc/yt-dlp/config'),
|
||||
Path('/etc/yt-dlp/config.txt'),
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
class TestConfig(unittest.TestCase):
|
||||
maxDiff = None
|
||||
|
||||
@set_environ()
|
||||
def test_config__ENVIRON_DEFAULTS_sanity(self):
|
||||
expected = make_expected()
|
||||
self.assertCountEqual(
|
||||
set(expected), expected,
|
||||
'ENVIRON_DEFAULTS produces non unique names')
|
||||
|
||||
def test_config_all_environ_values(self):
|
||||
for name, value in ENVIRON_DEFAULTS.items():
|
||||
for new_value in (None, '', '.', value or '/some/dir'):
|
||||
with set_environ(**{name: new_value}):
|
||||
self._simple_grouping_test()
|
||||
|
||||
def test_config_default_expected_locations(self):
|
||||
files, _ = self._simple_config_test()
|
||||
self.assertEqual(
|
||||
files, make_expected(),
|
||||
'Not all expected locations have been checked')
|
||||
|
||||
def test_config_default_grouping(self):
|
||||
self._simple_grouping_test()
|
||||
|
||||
def _simple_grouping_test(self):
|
||||
expected_groups = make_expected_groups()
|
||||
for name, group in expected_groups.items():
|
||||
for index, existing_path in enumerate(group):
|
||||
result, opts = self._simple_config_test(existing_path)
|
||||
expected = expected_from_expected_groups(expected_groups, existing_path)
|
||||
self.assertEqual(
|
||||
result, expected,
|
||||
f'The checked locations do not match the expected ({name}, {index})')
|
||||
self.assertEqual(
|
||||
opts.outtmpl['default'], '1',
|
||||
f'The used result value was incorrect ({name}, {index})')
|
||||
|
||||
def _simple_config_test(self, *stop_paths):
|
||||
encountered = 0
|
||||
paths = []
|
||||
|
||||
def read_file(filename, default=[]):
|
||||
nonlocal encountered
|
||||
path = Path(filename)
|
||||
paths.append(path)
|
||||
if path in stop_paths:
|
||||
encountered += 1
|
||||
return ['-o', f'{encountered}']
|
||||
|
||||
with ConfigMock(read_file):
|
||||
_, opts, _ = parseOpts([], False)
|
||||
|
||||
return paths, opts
|
||||
|
||||
@set_environ()
|
||||
def test_config_early_exit_commandline(self):
|
||||
self._early_exit_test(0, '--ignore-config')
|
||||
|
||||
@set_environ()
|
||||
def test_config_early_exit_files(self):
|
||||
for index, _ in enumerate(make_expected(), 1):
|
||||
self._early_exit_test(index)
|
||||
|
||||
def _early_exit_test(self, allowed_reads, *args):
|
||||
reads = 0
|
||||
|
||||
def read_file(filename, default=[]):
|
||||
nonlocal reads
|
||||
reads += 1
|
||||
|
||||
if reads > allowed_reads:
|
||||
self.fail('The remaining config was not ignored')
|
||||
elif reads == allowed_reads:
|
||||
return ['--ignore-config']
|
||||
|
||||
with ConfigMock(read_file):
|
||||
parseOpts(args, False)
|
||||
|
||||
@set_environ()
|
||||
def test_config_override_commandline(self):
|
||||
self._override_test(0, '-o', 'pass')
|
||||
|
||||
@set_environ()
|
||||
def test_config_override_files(self):
|
||||
for index, _ in enumerate(make_expected(), 1):
|
||||
self._override_test(index)
|
||||
|
||||
def _override_test(self, start_index, *args):
|
||||
index = 0
|
||||
|
||||
def read_file(filename, default=[]):
|
||||
nonlocal index
|
||||
index += 1
|
||||
|
||||
if index > start_index:
|
||||
return ['-o', 'fail']
|
||||
elif index == start_index:
|
||||
return ['-o', 'pass']
|
||||
|
||||
with ConfigMock(read_file):
|
||||
_, opts, _ = parseOpts(args, False)
|
||||
|
||||
self.assertEqual(
|
||||
opts.outtmpl['default'], 'pass',
|
||||
'The earlier group did not override the later ones')
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def ConfigMock(read_file=None):
|
||||
with unittest.mock.patch('yt_dlp.options.Config') as mock:
|
||||
mock.return_value = Config(create_parser())
|
||||
if read_file is not None:
|
||||
mock.read_file = read_file
|
||||
|
||||
yield mock
|
||||
|
||||
|
||||
def make_expected(*filepaths):
|
||||
return expected_from_expected_groups(_generate_expected_groups(), *filepaths)
|
||||
|
||||
|
||||
def make_expected_groups(*filepaths):
|
||||
return _filter_expected_groups(_generate_expected_groups(), filepaths)
|
||||
|
||||
|
||||
def expected_from_expected_groups(expected_groups, *filepaths):
|
||||
return list(itertools.chain.from_iterable(
|
||||
_filter_expected_groups(expected_groups, filepaths).values()))
|
||||
|
||||
|
||||
def _filter_expected_groups(expected, filepaths):
|
||||
if not filepaths:
|
||||
return expected
|
||||
|
||||
result = {}
|
||||
for group, paths in expected.items():
|
||||
new_paths = []
|
||||
for path in paths:
|
||||
new_paths.append(path)
|
||||
if path in filepaths:
|
||||
break
|
||||
|
||||
result[group] = new_paths
|
||||
|
||||
return result
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
@@ -3,6 +3,7 @@ from datetime import datetime, timezone
|
||||
|
||||
from yt_dlp import cookies
|
||||
from yt_dlp.cookies import (
|
||||
LenientSimpleCookie,
|
||||
LinuxChromeCookieDecryptor,
|
||||
MacChromeCookieDecryptor,
|
||||
WindowsChromeCookieDecryptor,
|
||||
@@ -14,16 +15,16 @@ from yt_dlp.cookies import (
|
||||
|
||||
|
||||
class Logger:
|
||||
def debug(self, message):
|
||||
def debug(self, message, *args, **kwargs):
|
||||
print(f'[verbose] {message}')
|
||||
|
||||
def info(self, message):
|
||||
def info(self, message, *args, **kwargs):
|
||||
print(message)
|
||||
|
||||
def warning(self, message, only_once=False):
|
||||
def warning(self, message, *args, **kwargs):
|
||||
self.error(message)
|
||||
|
||||
def error(self, message):
|
||||
def error(self, message, *args, **kwargs):
|
||||
raise Exception(message)
|
||||
|
||||
|
||||
@@ -137,3 +138,163 @@ class TestCookies(unittest.TestCase):
|
||||
def test_pbkdf2_sha1(self):
|
||||
key = pbkdf2_sha1(b'peanuts', b' ' * 16, 1, 16)
|
||||
self.assertEqual(key, b'g\xe1\x8e\x0fQ\x1c\x9b\xf3\xc9`!\xaa\x90\xd9\xd34')
|
||||
|
||||
|
||||
class TestLenientSimpleCookie(unittest.TestCase):
|
||||
def _run_tests(self, *cases):
|
||||
for message, raw_cookie, expected in cases:
|
||||
cookie = LenientSimpleCookie(raw_cookie)
|
||||
|
||||
with self.subTest(message, expected=expected):
|
||||
self.assertEqual(cookie.keys(), expected.keys(), message)
|
||||
|
||||
for key, expected_value in expected.items():
|
||||
morsel = cookie[key]
|
||||
if isinstance(expected_value, tuple):
|
||||
expected_value, expected_attributes = expected_value
|
||||
else:
|
||||
expected_attributes = {}
|
||||
|
||||
attributes = {
|
||||
key: value
|
||||
for key, value in dict(morsel).items()
|
||||
if value != ""
|
||||
}
|
||||
self.assertEqual(attributes, expected_attributes, message)
|
||||
|
||||
self.assertEqual(morsel.value, expected_value, message)
|
||||
|
||||
def test_parsing(self):
|
||||
self._run_tests(
|
||||
# Copied from https://github.com/python/cpython/blob/v3.10.7/Lib/test/test_http_cookies.py
|
||||
(
|
||||
"Test basic cookie",
|
||||
"chips=ahoy; vienna=finger",
|
||||
{"chips": "ahoy", "vienna": "finger"},
|
||||
),
|
||||
(
|
||||
"Test quoted cookie",
|
||||
'keebler="E=mc2; L=\\"Loves\\"; fudge=\\012;"',
|
||||
{"keebler": 'E=mc2; L="Loves"; fudge=\012;'},
|
||||
),
|
||||
(
|
||||
"Allow '=' in an unquoted value",
|
||||
"keebler=E=mc2",
|
||||
{"keebler": "E=mc2"},
|
||||
),
|
||||
(
|
||||
"Allow cookies with ':' in their name",
|
||||
"key:term=value:term",
|
||||
{"key:term": "value:term"},
|
||||
),
|
||||
(
|
||||
"Allow '[' and ']' in cookie values",
|
||||
"a=b; c=[; d=r; f=h",
|
||||
{"a": "b", "c": "[", "d": "r", "f": "h"},
|
||||
),
|
||||
(
|
||||
"Test basic cookie attributes",
|
||||
'Customer="WILE_E_COYOTE"; Version=1; Path=/acme',
|
||||
{"Customer": ("WILE_E_COYOTE", {"version": "1", "path": "/acme"})},
|
||||
),
|
||||
(
|
||||
"Test flag only cookie attributes",
|
||||
'Customer="WILE_E_COYOTE"; HttpOnly; Secure',
|
||||
{"Customer": ("WILE_E_COYOTE", {"httponly": True, "secure": True})},
|
||||
),
|
||||
(
|
||||
"Test flag only attribute with values",
|
||||
"eggs=scrambled; httponly=foo; secure=bar; Path=/bacon",
|
||||
{"eggs": ("scrambled", {"httponly": "foo", "secure": "bar", "path": "/bacon"})},
|
||||
),
|
||||
(
|
||||
"Test special case for 'expires' attribute, 4 digit year",
|
||||
'Customer="W"; expires=Wed, 01 Jan 2010 00:00:00 GMT',
|
||||
{"Customer": ("W", {"expires": "Wed, 01 Jan 2010 00:00:00 GMT"})},
|
||||
),
|
||||
(
|
||||
"Test special case for 'expires' attribute, 2 digit year",
|
||||
'Customer="W"; expires=Wed, 01 Jan 98 00:00:00 GMT',
|
||||
{"Customer": ("W", {"expires": "Wed, 01 Jan 98 00:00:00 GMT"})},
|
||||
),
|
||||
(
|
||||
"Test extra spaces in keys and values",
|
||||
"eggs = scrambled ; secure ; path = bar ; foo=foo ",
|
||||
{"eggs": ("scrambled", {"secure": True, "path": "bar"}), "foo": "foo"},
|
||||
),
|
||||
(
|
||||
"Test quoted attributes",
|
||||
'Customer="WILE_E_COYOTE"; Version="1"; Path="/acme"',
|
||||
{"Customer": ("WILE_E_COYOTE", {"version": "1", "path": "/acme"})}
|
||||
),
|
||||
# Our own tests that CPython passes
|
||||
(
|
||||
"Allow ';' in quoted value",
|
||||
'chips="a;hoy"; vienna=finger',
|
||||
{"chips": "a;hoy", "vienna": "finger"},
|
||||
),
|
||||
(
|
||||
"Keep only the last set value",
|
||||
"a=c; a=b",
|
||||
{"a": "b"},
|
||||
),
|
||||
)
|
||||
|
||||
def test_lenient_parsing(self):
|
||||
self._run_tests(
|
||||
(
|
||||
"Ignore and try to skip invalid cookies",
|
||||
'chips={"ahoy;": 1}; vienna="finger;"',
|
||||
{"vienna": "finger;"},
|
||||
),
|
||||
(
|
||||
"Ignore cookies without a name",
|
||||
"a=b; unnamed; c=d",
|
||||
{"a": "b", "c": "d"},
|
||||
),
|
||||
(
|
||||
"Ignore '\"' cookie without name",
|
||||
'a=b; "; c=d',
|
||||
{"a": "b", "c": "d"},
|
||||
),
|
||||
(
|
||||
"Skip all space separated values",
|
||||
"x a=b c=d x; e=f",
|
||||
{"a": "b", "c": "d", "e": "f"},
|
||||
),
|
||||
(
|
||||
"Skip all space separated values",
|
||||
'x a=b; data={"complex": "json", "with": "key=value"}; x c=d x',
|
||||
{"a": "b", "c": "d"},
|
||||
),
|
||||
(
|
||||
"Expect quote mending",
|
||||
'a=b; invalid="; c=d',
|
||||
{"a": "b", "c": "d"},
|
||||
),
|
||||
(
|
||||
"Reset morsel after invalid to not capture attributes",
|
||||
"a=b; invalid; Version=1; c=d",
|
||||
{"a": "b", "c": "d"},
|
||||
),
|
||||
(
|
||||
"Reset morsel after invalid to not capture attributes",
|
||||
"a=b; $invalid; $Version=1; c=d",
|
||||
{"a": "b", "c": "d"},
|
||||
),
|
||||
(
|
||||
"Continue after non-flag attribute without value",
|
||||
"a=b; path; Version=1; c=d",
|
||||
{"a": "b", "c": "d"},
|
||||
),
|
||||
(
|
||||
"Allow cookie attributes with `$` prefix",
|
||||
'Customer="WILE_E_COYOTE"; $Version=1; $Secure; $Path=/acme',
|
||||
{"Customer": ("WILE_E_COYOTE", {"version": "1", "secure": True, "path": "/acme"})},
|
||||
),
|
||||
(
|
||||
"Invalid Morsel keys should not result in an error",
|
||||
"Key=Value; [Invalid]=Value; Another=Value",
|
||||
{"Key": "Value", "Another": "Value"},
|
||||
),
|
||||
)
|
||||
|
||||
@@ -1,37 +1,40 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Allow direct execution
|
||||
import hashlib
|
||||
import json
|
||||
import os
|
||||
import socket
|
||||
import sys
|
||||
import unittest
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
|
||||
import collections
|
||||
import hashlib
|
||||
import http.client
|
||||
import json
|
||||
import socket
|
||||
import urllib.error
|
||||
|
||||
from test.helper import (
|
||||
assertGreaterEqual,
|
||||
expect_info_dict,
|
||||
expect_warnings,
|
||||
get_params,
|
||||
gettestcases,
|
||||
getwebpagetestcases,
|
||||
is_download_test,
|
||||
report_warning,
|
||||
try_rm,
|
||||
)
|
||||
|
||||
import yt_dlp.YoutubeDL
|
||||
from yt_dlp.compat import (
|
||||
compat_http_client,
|
||||
compat_HTTPError,
|
||||
compat_urllib_error,
|
||||
)
|
||||
import yt_dlp.YoutubeDL # isort: split
|
||||
from yt_dlp.extractor import get_info_extractor
|
||||
from yt_dlp.utils import (
|
||||
DownloadError,
|
||||
ExtractorError,
|
||||
UnavailableVideoError,
|
||||
format_bytes,
|
||||
join_nonempty,
|
||||
)
|
||||
|
||||
RETRIES = 3
|
||||
@@ -43,7 +46,7 @@ class YoutubeDL(yt_dlp.YoutubeDL):
|
||||
self.processed_info_dicts = []
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def report_warning(self, message):
|
||||
def report_warning(self, message, *args, **kwargs):
|
||||
# Don't accept warnings during tests
|
||||
raise ExtractorError(message)
|
||||
|
||||
@@ -57,7 +60,9 @@ def _file_md5(fn):
|
||||
return hashlib.md5(f.read()).hexdigest()
|
||||
|
||||
|
||||
defs = gettestcases()
|
||||
normal_test_cases = gettestcases()
|
||||
webpage_test_cases = getwebpagetestcases()
|
||||
tests_counter = collections.defaultdict(collections.Counter)
|
||||
|
||||
|
||||
@is_download_test
|
||||
@@ -72,24 +77,13 @@ class TestDownload(unittest.TestCase):
|
||||
|
||||
def __str__(self):
|
||||
"""Identify each test with the `add_ie` attribute, if available."""
|
||||
cls, add_ie = type(self), getattr(self, self._testMethodName).add_ie
|
||||
return f'{self._testMethodName} ({cls.__module__}.{cls.__name__}){f" [{add_ie}]" if add_ie else ""}:'
|
||||
|
||||
def strclass(cls):
|
||||
"""From 2.7's unittest; 2.6 had _strclass so we can't import it."""
|
||||
return f'{cls.__module__}.{cls.__name__}'
|
||||
|
||||
add_ie = getattr(self, self._testMethodName).add_ie
|
||||
return '%s (%s)%s:' % (self._testMethodName,
|
||||
strclass(self.__class__),
|
||||
' [%s]' % add_ie if add_ie else '')
|
||||
|
||||
def setUp(self):
|
||||
self.defs = defs
|
||||
|
||||
# Dynamically generate tests
|
||||
|
||||
|
||||
def generator(test_case, tname):
|
||||
|
||||
def test_template(self):
|
||||
if self.COMPLETED_TESTS.get(tname):
|
||||
return
|
||||
@@ -102,33 +96,34 @@ def generator(test_case, tname):
|
||||
|
||||
def print_skipping(reason):
|
||||
print('Skipping %s: %s' % (test_case['name'], reason))
|
||||
self.skipTest(reason)
|
||||
|
||||
if not ie.working():
|
||||
print_skipping('IE marked as not _WORKING')
|
||||
return
|
||||
|
||||
for tc in test_cases:
|
||||
info_dict = tc.get('info_dict', {})
|
||||
params = tc.get('params', {})
|
||||
if not info_dict.get('id'):
|
||||
raise Exception('Test definition incorrect. \'id\' key is not present')
|
||||
elif not info_dict.get('ext'):
|
||||
raise Exception(f'Test {tname} definition incorrect - "id" key is not present')
|
||||
elif not info_dict.get('ext') and info_dict.get('_type', 'video') == 'video':
|
||||
if params.get('skip_download') and params.get('ignore_no_formats_error'):
|
||||
continue
|
||||
raise Exception('Test definition incorrect. The output file cannot be known. \'ext\' key is not present')
|
||||
raise Exception(f'Test {tname} definition incorrect - "ext" key must be present to define the output file')
|
||||
|
||||
if 'skip' in test_case:
|
||||
print_skipping(test_case['skip'])
|
||||
return
|
||||
|
||||
for other_ie in other_ies:
|
||||
if not other_ie.working():
|
||||
print_skipping('test depends on %sIE, marked as not WORKING' % other_ie.ie_key())
|
||||
return
|
||||
|
||||
params = get_params(test_case.get('params', {}))
|
||||
params['outtmpl'] = tname + '_' + params['outtmpl']
|
||||
if is_playlist and 'playlist' not in test_case:
|
||||
params.setdefault('extract_flat', 'in_playlist')
|
||||
params.setdefault('playlistend', test_case.get('playlist_mincount'))
|
||||
params.setdefault('playlistend', test_case.get(
|
||||
'playlist_mincount', test_case.get('playlist_count', -2) + 1))
|
||||
params.setdefault('skip_download', True)
|
||||
|
||||
ydl = YoutubeDL(params, auto_init=False)
|
||||
@@ -167,7 +162,9 @@ def generator(test_case, tname):
|
||||
force_generic_extractor=params.get('force_generic_extractor', False))
|
||||
except (DownloadError, ExtractorError) as err:
|
||||
# Check if the exception is not a network related one
|
||||
if not err.exc_info[0] in (compat_urllib_error.URLError, socket.timeout, UnavailableVideoError, compat_http_client.BadStatusLine) or (err.exc_info[0] == compat_HTTPError and err.exc_info[1].code == 503):
|
||||
if (err.exc_info[0] not in (urllib.error.URLError, socket.timeout, UnavailableVideoError, http.client.BadStatusLine)
|
||||
or (err.exc_info[0] == urllib.error.HTTPError and err.exc_info[1].code == 503)):
|
||||
err.msg = f'{getattr(err, "msg", err)} ({tname})'
|
||||
raise
|
||||
|
||||
if try_num == RETRIES:
|
||||
@@ -216,6 +213,8 @@ def generator(test_case, tname):
|
||||
tc_res_dict = res_dict['entries'][tc_num]
|
||||
# First, check test cases' data against extracted data alone
|
||||
expect_info_dict(self, tc_res_dict, tc.get('info_dict', {}))
|
||||
if tc_res_dict.get('_type', 'video') != 'video':
|
||||
continue
|
||||
# Now, check downloaded file consistency
|
||||
tc_filename = get_tc_filename(tc)
|
||||
if not test_case.get('params', {}).get('skip_download', False):
|
||||
@@ -255,35 +254,43 @@ def generator(test_case, tname):
|
||||
|
||||
|
||||
# And add them to TestDownload
|
||||
tests_counter = {}
|
||||
for test_case in defs:
|
||||
name = test_case['name']
|
||||
i = tests_counter.get(name, 0)
|
||||
tests_counter[name] = i + 1
|
||||
tname = f'test_{name}_{i}' if i else f'test_{name}'
|
||||
test_method = generator(test_case, tname)
|
||||
test_method.__name__ = str(tname)
|
||||
ie_list = test_case.get('add_ie')
|
||||
test_method.add_ie = ie_list and ','.join(ie_list)
|
||||
setattr(TestDownload, test_method.__name__, test_method)
|
||||
del test_method
|
||||
def inject_tests(test_cases, label=''):
|
||||
for test_case in test_cases:
|
||||
name = test_case['name']
|
||||
tname = join_nonempty('test', name, label, tests_counter[name][label], delim='_')
|
||||
tests_counter[name][label] += 1
|
||||
|
||||
test_method = generator(test_case, tname)
|
||||
test_method.__name__ = tname
|
||||
test_method.add_ie = ','.join(test_case.get('add_ie', []))
|
||||
setattr(TestDownload, test_method.__name__, test_method)
|
||||
|
||||
|
||||
def batch_generator(name, num_tests):
|
||||
inject_tests(normal_test_cases)
|
||||
|
||||
# TODO: disable redirection to the IE to ensure we are actually testing the webpage extraction
|
||||
inject_tests(webpage_test_cases, 'webpage')
|
||||
|
||||
|
||||
def batch_generator(name):
|
||||
def test_template(self):
|
||||
for i in range(num_tests):
|
||||
getattr(self, f'test_{name}_{i}' if i else f'test_{name}')()
|
||||
for label, num_tests in tests_counter[name].items():
|
||||
for i in range(num_tests):
|
||||
test_name = join_nonempty('test', name, label, i, delim='_')
|
||||
try:
|
||||
getattr(self, test_name)()
|
||||
except unittest.SkipTest:
|
||||
print(f'Skipped {test_name}')
|
||||
|
||||
return test_template
|
||||
|
||||
|
||||
for name, num_tests in tests_counter.items():
|
||||
test_method = batch_generator(name, num_tests)
|
||||
for name in tests_counter:
|
||||
test_method = batch_generator(name)
|
||||
test_method.__name__ = f'test_{name}_all'
|
||||
test_method.add_ie = ''
|
||||
setattr(TestDownload, test_method.__name__, test_method)
|
||||
del test_method
|
||||
del test_method
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
@@ -1,17 +1,19 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Allow direct execution
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import unittest
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
import threading
|
||||
from test.helper import http_server_port, try_rm
|
||||
|
||||
import http.server
|
||||
import re
|
||||
import threading
|
||||
|
||||
from test.helper import http_server_port, try_rm
|
||||
from yt_dlp import YoutubeDL
|
||||
from yt_dlp.compat import compat_http_server
|
||||
from yt_dlp.downloader.http import HttpFD
|
||||
from yt_dlp.utils import encodeFilename
|
||||
|
||||
@@ -21,7 +23,7 @@ TEST_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
TEST_SIZE = 10 * 1024
|
||||
|
||||
|
||||
class HTTPTestRequestHandler(compat_http_server.BaseHTTPRequestHandler):
|
||||
class HTTPTestRequestHandler(http.server.BaseHTTPRequestHandler):
|
||||
def log_message(self, format, *args):
|
||||
pass
|
||||
|
||||
@@ -78,7 +80,7 @@ class FakeLogger:
|
||||
|
||||
class TestHttpFD(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.httpd = compat_http_server.HTTPServer(
|
||||
self.httpd = http.server.HTTPServer(
|
||||
('127.0.0.1', 0), HTTPTestRequestHandler)
|
||||
self.port = http_server_port(self.httpd)
|
||||
self.server_thread = threading.Thread(target=self.httpd.serve_forever)
|
||||
@@ -93,8 +95,8 @@ class TestHttpFD(unittest.TestCase):
|
||||
try_rm(encodeFilename(filename))
|
||||
self.assertTrue(downloader.real_download(filename, {
|
||||
'url': 'http://127.0.0.1:%d/%s' % (self.port, ep),
|
||||
}))
|
||||
self.assertEqual(os.path.getsize(encodeFilename(filename)), TEST_SIZE)
|
||||
}), ep)
|
||||
self.assertEqual(os.path.getsize(encodeFilename(filename)), TEST_SIZE, ep)
|
||||
try_rm(encodeFilename(filename))
|
||||
|
||||
def download_all(self, params):
|
||||
|
||||
@@ -1,47 +1,56 @@
|
||||
#!/usr/bin/env python3
|
||||
import contextlib
|
||||
|
||||
# Allow direct execution
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
import unittest
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
from yt_dlp.utils import encodeArgument
|
||||
|
||||
import contextlib
|
||||
import subprocess
|
||||
|
||||
from yt_dlp.utils import Popen
|
||||
|
||||
rootDir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
||||
|
||||
|
||||
try:
|
||||
_DEV_NULL = subprocess.DEVNULL
|
||||
except AttributeError:
|
||||
_DEV_NULL = open(os.devnull, 'wb')
|
||||
LAZY_EXTRACTORS = 'yt_dlp/extractor/lazy_extractors.py'
|
||||
|
||||
|
||||
class TestExecution(unittest.TestCase):
|
||||
def test_import(self):
|
||||
subprocess.check_call([sys.executable, '-c', 'import yt_dlp'], cwd=rootDir)
|
||||
|
||||
def test_module_exec(self):
|
||||
subprocess.check_call([sys.executable, '-m', 'yt_dlp', '--ignore-config', '--version'], cwd=rootDir, stdout=_DEV_NULL)
|
||||
def run_yt_dlp(self, exe=(sys.executable, 'yt_dlp/__main__.py'), opts=('--version', )):
|
||||
stdout, stderr, returncode = Popen.run(
|
||||
[*exe, '--ignore-config', *opts], cwd=rootDir, text=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
print(stderr, file=sys.stderr)
|
||||
self.assertEqual(returncode, 0)
|
||||
return stdout.strip(), stderr.strip()
|
||||
|
||||
def test_main_exec(self):
|
||||
subprocess.check_call([sys.executable, 'yt_dlp/__main__.py', '--ignore-config', '--version'], cwd=rootDir, stdout=_DEV_NULL)
|
||||
self.run_yt_dlp()
|
||||
|
||||
def test_import(self):
|
||||
self.run_yt_dlp(exe=(sys.executable, '-c', 'import yt_dlp'))
|
||||
|
||||
def test_module_exec(self):
|
||||
self.run_yt_dlp(exe=(sys.executable, '-m', 'yt_dlp'))
|
||||
|
||||
def test_cmdline_umlauts(self):
|
||||
p = subprocess.Popen(
|
||||
[sys.executable, 'yt_dlp/__main__.py', '--ignore-config', encodeArgument('ä'), '--version'],
|
||||
cwd=rootDir, stdout=_DEV_NULL, stderr=subprocess.PIPE)
|
||||
_, stderr = p.communicate()
|
||||
_, stderr = self.run_yt_dlp(opts=('ä', '--version'))
|
||||
self.assertFalse(stderr)
|
||||
|
||||
def test_lazy_extractors(self):
|
||||
try:
|
||||
subprocess.check_call([sys.executable, 'devscripts/make_lazy_extractors.py', 'yt_dlp/extractor/lazy_extractors.py'], cwd=rootDir, stdout=_DEV_NULL)
|
||||
subprocess.check_call([sys.executable, 'test/test_all_urls.py'], cwd=rootDir, stdout=_DEV_NULL)
|
||||
subprocess.check_call([sys.executable, 'devscripts/make_lazy_extractors.py', LAZY_EXTRACTORS],
|
||||
cwd=rootDir, stdout=subprocess.DEVNULL)
|
||||
self.assertTrue(os.path.exists(LAZY_EXTRACTORS))
|
||||
|
||||
_, stderr = self.run_yt_dlp(opts=('-s', 'test:'))
|
||||
self.assertFalse(stderr)
|
||||
|
||||
subprocess.check_call([sys.executable, 'test/test_all_urls.py'], cwd=rootDir, stdout=subprocess.DEVNULL)
|
||||
finally:
|
||||
with contextlib.suppress(OSError):
|
||||
os.remove('yt_dlp/extractor/lazy_extractors.py')
|
||||
os.remove(LAZY_EXTRACTORS)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Allow direct execution
|
||||
import os
|
||||
import sys
|
||||
@@ -6,17 +7,19 @@ import unittest
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
|
||||
import http.server
|
||||
import ssl
|
||||
import threading
|
||||
from test.helper import http_server_port
|
||||
import urllib.request
|
||||
|
||||
from test.helper import http_server_port
|
||||
from yt_dlp import YoutubeDL
|
||||
from yt_dlp.compat import compat_http_server, compat_urllib_request
|
||||
|
||||
TEST_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
|
||||
class HTTPTestRequestHandler(compat_http_server.BaseHTTPRequestHandler):
|
||||
class HTTPTestRequestHandler(http.server.BaseHTTPRequestHandler):
|
||||
def log_message(self, format, *args):
|
||||
pass
|
||||
|
||||
@@ -53,7 +56,7 @@ class FakeLogger:
|
||||
|
||||
class TestHTTP(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.httpd = compat_http_server.HTTPServer(
|
||||
self.httpd = http.server.HTTPServer(
|
||||
('127.0.0.1', 0), HTTPTestRequestHandler)
|
||||
self.port = http_server_port(self.httpd)
|
||||
self.server_thread = threading.Thread(target=self.httpd.serve_forever)
|
||||
@@ -64,7 +67,7 @@ class TestHTTP(unittest.TestCase):
|
||||
class TestHTTPS(unittest.TestCase):
|
||||
def setUp(self):
|
||||
certfn = os.path.join(TEST_DIR, 'testcert.pem')
|
||||
self.httpd = compat_http_server.HTTPServer(
|
||||
self.httpd = http.server.HTTPServer(
|
||||
('127.0.0.1', 0), HTTPTestRequestHandler)
|
||||
sslctx = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
|
||||
sslctx.load_cert_chain(certfn, None)
|
||||
@@ -82,7 +85,7 @@ class TestHTTPS(unittest.TestCase):
|
||||
|
||||
ydl = YoutubeDL({'logger': FakeLogger(), 'nocheckcertificate': True})
|
||||
r = ydl.extract_info('https://127.0.0.1:%d/video.html' % self.port)
|
||||
self.assertEqual(r['entries'][0]['url'], 'https://127.0.0.1:%d/vid.mp4' % self.port)
|
||||
self.assertEqual(r['url'], 'https://127.0.0.1:%d/vid.mp4' % self.port)
|
||||
|
||||
|
||||
class TestClientCert(unittest.TestCase):
|
||||
@@ -90,7 +93,7 @@ class TestClientCert(unittest.TestCase):
|
||||
certfn = os.path.join(TEST_DIR, 'testcert.pem')
|
||||
self.certdir = os.path.join(TEST_DIR, 'testdata', 'certificate')
|
||||
cacertfn = os.path.join(self.certdir, 'ca.crt')
|
||||
self.httpd = compat_http_server.HTTPServer(('127.0.0.1', 0), HTTPTestRequestHandler)
|
||||
self.httpd = http.server.HTTPServer(('127.0.0.1', 0), HTTPTestRequestHandler)
|
||||
sslctx = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
|
||||
sslctx.verify_mode = ssl.CERT_REQUIRED
|
||||
sslctx.load_verify_locations(cafile=cacertfn)
|
||||
@@ -110,7 +113,7 @@ class TestClientCert(unittest.TestCase):
|
||||
**params,
|
||||
})
|
||||
r = ydl.extract_info('https://127.0.0.1:%d/video.html' % self.port)
|
||||
self.assertEqual(r['entries'][0]['url'], 'https://127.0.0.1:%d/vid.mp4' % self.port)
|
||||
self.assertEqual(r['url'], 'https://127.0.0.1:%d/vid.mp4' % self.port)
|
||||
|
||||
def test_certificate_combined_nopass(self):
|
||||
self._run_test(client_certificate=os.path.join(self.certdir, 'clientwithkey.crt'))
|
||||
@@ -130,7 +133,7 @@ class TestClientCert(unittest.TestCase):
|
||||
|
||||
|
||||
def _build_proxy_handler(name):
|
||||
class HTTPTestRequestHandler(compat_http_server.BaseHTTPRequestHandler):
|
||||
class HTTPTestRequestHandler(http.server.BaseHTTPRequestHandler):
|
||||
proxy_name = name
|
||||
|
||||
def log_message(self, format, *args):
|
||||
@@ -146,14 +149,14 @@ def _build_proxy_handler(name):
|
||||
|
||||
class TestProxy(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.proxy = compat_http_server.HTTPServer(
|
||||
self.proxy = http.server.HTTPServer(
|
||||
('127.0.0.1', 0), _build_proxy_handler('normal'))
|
||||
self.port = http_server_port(self.proxy)
|
||||
self.proxy_thread = threading.Thread(target=self.proxy.serve_forever)
|
||||
self.proxy_thread.daemon = True
|
||||
self.proxy_thread.start()
|
||||
|
||||
self.geo_proxy = compat_http_server.HTTPServer(
|
||||
self.geo_proxy = http.server.HTTPServer(
|
||||
('127.0.0.1', 0), _build_proxy_handler('geo'))
|
||||
self.geo_port = http_server_port(self.geo_proxy)
|
||||
self.geo_proxy_thread = threading.Thread(target=self.geo_proxy.serve_forever)
|
||||
@@ -170,7 +173,7 @@ class TestProxy(unittest.TestCase):
|
||||
response = ydl.urlopen(url).read().decode()
|
||||
self.assertEqual(response, f'normal: {url}')
|
||||
|
||||
req = compat_urllib_request.Request(url)
|
||||
req = urllib.request.Request(url)
|
||||
req.add_header('Ytdl-request-proxy', geo_proxy)
|
||||
response = ydl.urlopen(req).read().decode()
|
||||
self.assertEqual(response, f'geo: {url}')
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Allow direct execution
|
||||
import os
|
||||
import sys
|
||||
@@ -6,8 +7,8 @@ import unittest
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
from test.helper import FakeYDL, is_download_test
|
||||
|
||||
from test.helper import FakeYDL, is_download_test
|
||||
from yt_dlp.extractor import IqiyiIE
|
||||
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Allow direct execution
|
||||
import os
|
||||
import sys
|
||||
@@ -6,7 +7,10 @@ import unittest
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
from yt_dlp.jsinterp import JSInterpreter
|
||||
import math
|
||||
import re
|
||||
|
||||
from yt_dlp.jsinterp import JS_Undefined, JSInterpreter
|
||||
|
||||
|
||||
class TestJSInterpreter(unittest.TestCase):
|
||||
@@ -17,6 +21,9 @@ class TestJSInterpreter(unittest.TestCase):
|
||||
jsi = JSInterpreter('function x3(){return 42;}')
|
||||
self.assertEqual(jsi.call_function('x3'), 42)
|
||||
|
||||
jsi = JSInterpreter('function x3(){42}')
|
||||
self.assertEqual(jsi.call_function('x3'), None)
|
||||
|
||||
jsi = JSInterpreter('var x5 = function(){return 42;}')
|
||||
self.assertEqual(jsi.call_function('x5'), 42)
|
||||
|
||||
@@ -43,14 +50,32 @@ class TestJSInterpreter(unittest.TestCase):
|
||||
jsi = JSInterpreter('function f(){return 1 << 5;}')
|
||||
self.assertEqual(jsi.call_function('f'), 32)
|
||||
|
||||
jsi = JSInterpreter('function f(){return 2 ** 5}')
|
||||
self.assertEqual(jsi.call_function('f'), 32)
|
||||
|
||||
jsi = JSInterpreter('function f(){return 19 & 21;}')
|
||||
self.assertEqual(jsi.call_function('f'), 17)
|
||||
|
||||
jsi = JSInterpreter('function f(){return 11 >> 2;}')
|
||||
self.assertEqual(jsi.call_function('f'), 2)
|
||||
|
||||
jsi = JSInterpreter('function f(){return []? 2+3: 4;}')
|
||||
self.assertEqual(jsi.call_function('f'), 5)
|
||||
|
||||
jsi = JSInterpreter('function f(){return 1 == 2}')
|
||||
self.assertEqual(jsi.call_function('f'), False)
|
||||
|
||||
jsi = JSInterpreter('function f(){return 0 && 1 || 2;}')
|
||||
self.assertEqual(jsi.call_function('f'), 2)
|
||||
|
||||
jsi = JSInterpreter('function f(){return 0 ?? 42;}')
|
||||
self.assertEqual(jsi.call_function('f'), 0)
|
||||
|
||||
jsi = JSInterpreter('function f(){return "life, the universe and everything" < 42;}')
|
||||
self.assertFalse(jsi.call_function('f'))
|
||||
|
||||
def test_array_access(self):
|
||||
jsi = JSInterpreter('function f(){var x = [1,2,3]; x[0] = 4; x[0] = 5; x[2] = 7; return x;}')
|
||||
jsi = JSInterpreter('function f(){var x = [1,2,3]; x[0] = 4; x[0] = 5; x[2.0] = 7; return x;}')
|
||||
self.assertEqual(jsi.call_function('f'), [5, 2, 7])
|
||||
|
||||
def test_parens(self):
|
||||
@@ -60,6 +85,10 @@ class TestJSInterpreter(unittest.TestCase):
|
||||
jsi = JSInterpreter('function f(){return (1 + 2) * 3;}')
|
||||
self.assertEqual(jsi.call_function('f'), 9)
|
||||
|
||||
def test_quotes(self):
|
||||
jsi = JSInterpreter(R'function f(){return "a\"\\("}')
|
||||
self.assertEqual(jsi.call_function('f'), R'a"\(')
|
||||
|
||||
def test_assignments(self):
|
||||
jsi = JSInterpreter('function f(){var x = 20; x = 30 + 1; return x;}')
|
||||
self.assertEqual(jsi.call_function('f'), 31)
|
||||
@@ -102,17 +131,33 @@ class TestJSInterpreter(unittest.TestCase):
|
||||
}''')
|
||||
self.assertEqual(jsi.call_function('x'), [20, 20, 30, 40, 50])
|
||||
|
||||
def test_builtins(self):
|
||||
jsi = JSInterpreter('''
|
||||
function x() { return NaN }
|
||||
''')
|
||||
self.assertTrue(math.isnan(jsi.call_function('x')))
|
||||
|
||||
jsi = JSInterpreter('''
|
||||
function x() { return new Date('Wednesday 31 December 1969 18:01:26 MDT') - 0; }
|
||||
''')
|
||||
self.assertEqual(jsi.call_function('x'), 86000)
|
||||
jsi = JSInterpreter('''
|
||||
function x(dt) { return new Date(dt) - 0; }
|
||||
''')
|
||||
self.assertEqual(jsi.call_function('x', 'Wednesday 31 December 1969 18:01:26 MDT'), 86000)
|
||||
|
||||
def test_call(self):
|
||||
jsi = JSInterpreter('''
|
||||
function x() { return 2; }
|
||||
function y(a) { return x() + a; }
|
||||
function y(a) { return x() + (a?a:0); }
|
||||
function z() { return y(3); }
|
||||
''')
|
||||
self.assertEqual(jsi.call_function('z'), 5)
|
||||
self.assertEqual(jsi.call_function('y'), 2)
|
||||
|
||||
def test_for_loop(self):
|
||||
jsi = JSInterpreter('''
|
||||
function x() { a=0; for (i=0; i-10; i++) {a++} a }
|
||||
function x() { a=0; for (i=0; i-10; i++) {a++} return a }
|
||||
''')
|
||||
self.assertEqual(jsi.call_function('x'), 10)
|
||||
|
||||
@@ -151,21 +196,53 @@ class TestJSInterpreter(unittest.TestCase):
|
||||
''')
|
||||
self.assertEqual(jsi.call_function('x'), 10)
|
||||
|
||||
def test_catch(self):
|
||||
jsi = JSInterpreter('''
|
||||
function x() { try{throw 10} catch(e){return 5} }
|
||||
''')
|
||||
self.assertEqual(jsi.call_function('x'), 5)
|
||||
|
||||
def test_finally(self):
|
||||
jsi = JSInterpreter('''
|
||||
function x() { try{throw 10} finally {return 42} }
|
||||
''')
|
||||
self.assertEqual(jsi.call_function('x'), 42)
|
||||
jsi = JSInterpreter('''
|
||||
function x() { try{throw 10} catch(e){return 5} finally {return 42} }
|
||||
''')
|
||||
self.assertEqual(jsi.call_function('x'), 42)
|
||||
|
||||
def test_nested_try(self):
|
||||
jsi = JSInterpreter('''
|
||||
function x() {try {
|
||||
try{throw 10} finally {throw 42}
|
||||
} catch(e){return 5} }
|
||||
''')
|
||||
self.assertEqual(jsi.call_function('x'), 5)
|
||||
|
||||
def test_for_loop_continue(self):
|
||||
jsi = JSInterpreter('''
|
||||
function x() { a=0; for (i=0; i-10; i++) { continue; a++ } a }
|
||||
function x() { a=0; for (i=0; i-10; i++) { continue; a++ } return a }
|
||||
''')
|
||||
self.assertEqual(jsi.call_function('x'), 0)
|
||||
|
||||
def test_for_loop_break(self):
|
||||
jsi = JSInterpreter('''
|
||||
function x() { a=0; for (i=0; i-10; i++) { break; a++ } a }
|
||||
function x() { a=0; for (i=0; i-10; i++) { break; a++ } return a }
|
||||
''')
|
||||
self.assertEqual(jsi.call_function('x'), 0)
|
||||
|
||||
def test_for_loop_try(self):
|
||||
jsi = JSInterpreter('''
|
||||
function x() {
|
||||
for (i=0; i-10; i++) { try { if (i == 5) throw i} catch {return 10} finally {break} };
|
||||
return 42 }
|
||||
''')
|
||||
self.assertEqual(jsi.call_function('x'), 42)
|
||||
|
||||
def test_literal_list(self):
|
||||
jsi = JSInterpreter('''
|
||||
function x() { [1, 2, "asdf", [5, 6, 7]][3] }
|
||||
function x() { return [1, 2, "asdf", [5, 6, 7]][3] }
|
||||
''')
|
||||
self.assertEqual(jsi.call_function('x'), [5, 6, 7])
|
||||
|
||||
@@ -175,6 +252,167 @@ class TestJSInterpreter(unittest.TestCase):
|
||||
''')
|
||||
self.assertEqual(jsi.call_function('x'), 7)
|
||||
|
||||
jsi = JSInterpreter('''
|
||||
function x() { a=5; return (a -= 1, a+=3, a); }
|
||||
''')
|
||||
self.assertEqual(jsi.call_function('x'), 7)
|
||||
|
||||
jsi = JSInterpreter('''
|
||||
function x() { return (l=[0,1,2,3], function(a, b){return a+b})((l[1], l[2]), l[3]) }
|
||||
''')
|
||||
self.assertEqual(jsi.call_function('x'), 5)
|
||||
|
||||
def test_void(self):
|
||||
jsi = JSInterpreter('''
|
||||
function x() { return void 42; }
|
||||
''')
|
||||
self.assertEqual(jsi.call_function('x'), None)
|
||||
|
||||
def test_return_function(self):
|
||||
jsi = JSInterpreter('''
|
||||
function x() { return [1, function(){return 1}][1] }
|
||||
''')
|
||||
self.assertEqual(jsi.call_function('x')([]), 1)
|
||||
|
||||
def test_null(self):
|
||||
jsi = JSInterpreter('''
|
||||
function x() { return null; }
|
||||
''')
|
||||
self.assertEqual(jsi.call_function('x'), None)
|
||||
|
||||
jsi = JSInterpreter('''
|
||||
function x() { return [null > 0, null < 0, null == 0, null === 0]; }
|
||||
''')
|
||||
self.assertEqual(jsi.call_function('x'), [False, False, False, False])
|
||||
|
||||
jsi = JSInterpreter('''
|
||||
function x() { return [null >= 0, null <= 0]; }
|
||||
''')
|
||||
self.assertEqual(jsi.call_function('x'), [True, True])
|
||||
|
||||
def test_undefined(self):
|
||||
jsi = JSInterpreter('''
|
||||
function x() { return undefined === undefined; }
|
||||
''')
|
||||
self.assertEqual(jsi.call_function('x'), True)
|
||||
|
||||
jsi = JSInterpreter('''
|
||||
function x() { return undefined; }
|
||||
''')
|
||||
self.assertEqual(jsi.call_function('x'), JS_Undefined)
|
||||
|
||||
jsi = JSInterpreter('''
|
||||
function x() { let v; return v; }
|
||||
''')
|
||||
self.assertEqual(jsi.call_function('x'), JS_Undefined)
|
||||
|
||||
jsi = JSInterpreter('''
|
||||
function x() { return [undefined === undefined, undefined == undefined, undefined < undefined, undefined > undefined]; }
|
||||
''')
|
||||
self.assertEqual(jsi.call_function('x'), [True, True, False, False])
|
||||
|
||||
jsi = JSInterpreter('''
|
||||
function x() { return [undefined === 0, undefined == 0, undefined < 0, undefined > 0]; }
|
||||
''')
|
||||
self.assertEqual(jsi.call_function('x'), [False, False, False, False])
|
||||
|
||||
jsi = JSInterpreter('''
|
||||
function x() { return [undefined >= 0, undefined <= 0]; }
|
||||
''')
|
||||
self.assertEqual(jsi.call_function('x'), [False, False])
|
||||
|
||||
jsi = JSInterpreter('''
|
||||
function x() { return [undefined > null, undefined < null, undefined == null, undefined === null]; }
|
||||
''')
|
||||
self.assertEqual(jsi.call_function('x'), [False, False, True, False])
|
||||
|
||||
jsi = JSInterpreter('''
|
||||
function x() { return [undefined === null, undefined == null, undefined < null, undefined > null]; }
|
||||
''')
|
||||
self.assertEqual(jsi.call_function('x'), [False, True, False, False])
|
||||
|
||||
jsi = JSInterpreter('''
|
||||
function x() { let v; return [42+v, v+42, v**42, 42**v, 0**v]; }
|
||||
''')
|
||||
for y in jsi.call_function('x'):
|
||||
self.assertTrue(math.isnan(y))
|
||||
|
||||
jsi = JSInterpreter('''
|
||||
function x() { let v; return v**0; }
|
||||
''')
|
||||
self.assertEqual(jsi.call_function('x'), 1)
|
||||
|
||||
jsi = JSInterpreter('''
|
||||
function x() { let v; return [v>42, v<=42, v&&42, 42&&v]; }
|
||||
''')
|
||||
self.assertEqual(jsi.call_function('x'), [False, False, JS_Undefined, JS_Undefined])
|
||||
|
||||
jsi = JSInterpreter('function x(){return undefined ?? 42; }')
|
||||
self.assertEqual(jsi.call_function('x'), 42)
|
||||
|
||||
def test_object(self):
|
||||
jsi = JSInterpreter('''
|
||||
function x() { return {}; }
|
||||
''')
|
||||
self.assertEqual(jsi.call_function('x'), {})
|
||||
|
||||
jsi = JSInterpreter('''
|
||||
function x() { let a = {m1: 42, m2: 0 }; return [a["m1"], a.m2]; }
|
||||
''')
|
||||
self.assertEqual(jsi.call_function('x'), [42, 0])
|
||||
|
||||
jsi = JSInterpreter('''
|
||||
function x() { let a; return a?.qq; }
|
||||
''')
|
||||
self.assertEqual(jsi.call_function('x'), JS_Undefined)
|
||||
|
||||
jsi = JSInterpreter('''
|
||||
function x() { let a = {m1: 42, m2: 0 }; return a?.qq; }
|
||||
''')
|
||||
self.assertEqual(jsi.call_function('x'), JS_Undefined)
|
||||
|
||||
def test_regex(self):
|
||||
jsi = JSInterpreter('''
|
||||
function x() { let a=/,,[/,913,/](,)}/; }
|
||||
''')
|
||||
self.assertEqual(jsi.call_function('x'), None)
|
||||
|
||||
jsi = JSInterpreter('''
|
||||
function x() { let a=/,,[/,913,/](,)}/; return a; }
|
||||
''')
|
||||
self.assertIsInstance(jsi.call_function('x'), re.Pattern)
|
||||
|
||||
jsi = JSInterpreter('''
|
||||
function x() { let a=/,,[/,913,/](,)}/i; return a; }
|
||||
''')
|
||||
self.assertEqual(jsi.call_function('x').flags & re.I, re.I)
|
||||
|
||||
jsi = JSInterpreter(R'''
|
||||
function x() { let a=/,][}",],()}(\[)/; return a; }
|
||||
''')
|
||||
self.assertEqual(jsi.call_function('x').pattern, r',][}",],()}(\[)')
|
||||
|
||||
jsi = JSInterpreter(R'''
|
||||
function x() { let a=[/[)\\]/]; return a[0]; }
|
||||
''')
|
||||
self.assertEqual(jsi.call_function('x').pattern, r'[)\\]')
|
||||
|
||||
def test_char_code_at(self):
|
||||
jsi = JSInterpreter('function x(i){return "test".charCodeAt(i)}')
|
||||
self.assertEqual(jsi.call_function('x', 0), 116)
|
||||
self.assertEqual(jsi.call_function('x', 1), 101)
|
||||
self.assertEqual(jsi.call_function('x', 2), 115)
|
||||
self.assertEqual(jsi.call_function('x', 3), 116)
|
||||
self.assertEqual(jsi.call_function('x', 4), None)
|
||||
self.assertEqual(jsi.call_function('x', 'not_a_number'), 116)
|
||||
|
||||
def test_bitwise_operators_overflow(self):
|
||||
jsi = JSInterpreter('function x(){return -524999584 << 5}')
|
||||
self.assertEqual(jsi.call_function('x'), 379882496)
|
||||
|
||||
jsi = JSInterpreter('function x(){return 1236566549 << 5}')
|
||||
self.assertEqual(jsi.call_function('x'), 915423904)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Allow direct execution
|
||||
import os
|
||||
import sys
|
||||
import unittest
|
||||
|
||||
@@ -1,11 +1,15 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Allow direct execution
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
import unittest
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
|
||||
import subprocess
|
||||
|
||||
from test.helper import is_download_test, try_rm
|
||||
|
||||
root_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
||||
|
||||
73
test/test_plugins.py
Normal file
73
test/test_plugins.py
Normal file
@@ -0,0 +1,73 @@
|
||||
import importlib
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
import unittest
|
||||
from pathlib import Path
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
TEST_DATA_DIR = Path(os.path.dirname(os.path.abspath(__file__)), 'testdata')
|
||||
sys.path.append(str(TEST_DATA_DIR))
|
||||
importlib.invalidate_caches()
|
||||
|
||||
from yt_dlp.plugins import PACKAGE_NAME, directories, load_plugins
|
||||
|
||||
|
||||
class TestPlugins(unittest.TestCase):
|
||||
|
||||
TEST_PLUGIN_DIR = TEST_DATA_DIR / PACKAGE_NAME
|
||||
|
||||
def test_directories_containing_plugins(self):
|
||||
self.assertIn(self.TEST_PLUGIN_DIR, map(Path, directories()))
|
||||
|
||||
def test_extractor_classes(self):
|
||||
for module_name in tuple(sys.modules):
|
||||
if module_name.startswith(f'{PACKAGE_NAME}.extractor'):
|
||||
del sys.modules[module_name]
|
||||
plugins_ie = load_plugins('extractor', 'IE')
|
||||
|
||||
self.assertIn(f'{PACKAGE_NAME}.extractor.normal', sys.modules.keys())
|
||||
self.assertIn('NormalPluginIE', plugins_ie.keys())
|
||||
|
||||
# don't load modules with underscore prefix
|
||||
self.assertFalse(
|
||||
f'{PACKAGE_NAME}.extractor._ignore' in sys.modules.keys(),
|
||||
'loaded module beginning with underscore')
|
||||
self.assertNotIn('IgnorePluginIE', plugins_ie.keys())
|
||||
|
||||
# Don't load extractors with underscore prefix
|
||||
self.assertNotIn('_IgnoreUnderscorePluginIE', plugins_ie.keys())
|
||||
|
||||
# Don't load extractors not specified in __all__ (if supplied)
|
||||
self.assertNotIn('IgnoreNotInAllPluginIE', plugins_ie.keys())
|
||||
self.assertIn('InAllPluginIE', plugins_ie.keys())
|
||||
|
||||
def test_postprocessor_classes(self):
|
||||
plugins_pp = load_plugins('postprocessor', 'PP')
|
||||
self.assertIn('NormalPluginPP', plugins_pp.keys())
|
||||
|
||||
def test_importing_zipped_module(self):
|
||||
zip_path = TEST_DATA_DIR / 'zipped_plugins.zip'
|
||||
shutil.make_archive(str(zip_path)[:-4], 'zip', str(zip_path)[:-4])
|
||||
sys.path.append(str(zip_path)) # add zip to search paths
|
||||
importlib.invalidate_caches() # reset the import caches
|
||||
|
||||
try:
|
||||
for plugin_type in ('extractor', 'postprocessor'):
|
||||
package = importlib.import_module(f'{PACKAGE_NAME}.{plugin_type}')
|
||||
self.assertIn(zip_path / PACKAGE_NAME / plugin_type, map(Path, package.__path__))
|
||||
|
||||
plugins_ie = load_plugins('extractor', 'IE')
|
||||
self.assertIn('ZippedPluginIE', plugins_ie.keys())
|
||||
|
||||
plugins_pp = load_plugins('postprocessor', 'PP')
|
||||
self.assertIn('ZippedPluginPP', plugins_pp.keys())
|
||||
|
||||
finally:
|
||||
sys.path.remove(str(zip_path))
|
||||
os.remove(zip_path)
|
||||
importlib.invalidate_caches() # reset the import caches
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
@@ -1,13 +1,15 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Allow direct execution
|
||||
import os
|
||||
import sys
|
||||
import unittest
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
from test.helper import get_params, is_download_test, try_rm
|
||||
|
||||
import yt_dlp.YoutubeDL
|
||||
from test.helper import get_params, is_download_test, try_rm
|
||||
import yt_dlp.YoutubeDL # isort: split
|
||||
from yt_dlp.utils import DownloadError
|
||||
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Allow direct execution
|
||||
import os
|
||||
import sys
|
||||
@@ -6,6 +7,7 @@ import unittest
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
|
||||
from yt_dlp import YoutubeDL
|
||||
from yt_dlp.compat import compat_shlex_quote
|
||||
from yt_dlp.postprocessor import (
|
||||
@@ -14,6 +16,7 @@ from yt_dlp.postprocessor import (
|
||||
MetadataFromFieldPP,
|
||||
MetadataParserPP,
|
||||
ModifyChaptersPP,
|
||||
SponsorBlockPP,
|
||||
)
|
||||
|
||||
|
||||
@@ -74,11 +77,15 @@ class TestModifyChaptersPP(unittest.TestCase):
|
||||
self._pp = ModifyChaptersPP(YoutubeDL())
|
||||
|
||||
@staticmethod
|
||||
def _sponsor_chapter(start, end, cat, remove=False):
|
||||
c = {'start_time': start, 'end_time': end, '_categories': [(cat, start, end)]}
|
||||
if remove:
|
||||
c['remove'] = True
|
||||
return c
|
||||
def _sponsor_chapter(start, end, cat, remove=False, title=None):
|
||||
if title is None:
|
||||
title = SponsorBlockPP.CATEGORIES[cat]
|
||||
return {
|
||||
'start_time': start,
|
||||
'end_time': end,
|
||||
'_categories': [(cat, start, end, title)],
|
||||
**({'remove': True} if remove else {}),
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def _chapter(start, end, title=None, remove=False):
|
||||
@@ -128,6 +135,19 @@ class TestModifyChaptersPP(unittest.TestCase):
|
||||
'c', '[SponsorBlock]: Filler Tangent', 'c'])
|
||||
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, [])
|
||||
|
||||
def test_remove_marked_arrange_sponsors_SponsorBlockChapters(self):
|
||||
chapters = self._chapters([70], ['c']) + [
|
||||
self._sponsor_chapter(10, 20, 'chapter', title='sb c1'),
|
||||
self._sponsor_chapter(15, 16, 'chapter', title='sb c2'),
|
||||
self._sponsor_chapter(30, 40, 'preview'),
|
||||
self._sponsor_chapter(50, 60, 'filler')]
|
||||
expected = self._chapters(
|
||||
[10, 15, 16, 20, 30, 40, 50, 60, 70],
|
||||
['c', '[SponsorBlock]: sb c1', '[SponsorBlock]: sb c1, sb c2', '[SponsorBlock]: sb c1',
|
||||
'c', '[SponsorBlock]: Preview/Recap',
|
||||
'c', '[SponsorBlock]: Filler Tangent', 'c'])
|
||||
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, [])
|
||||
|
||||
def test_remove_marked_arrange_sponsors_UniqueNamesForOverlappingSponsors(self):
|
||||
chapters = self._chapters([120], ['c']) + [
|
||||
self._sponsor_chapter(10, 45, 'sponsor'), self._sponsor_chapter(20, 40, 'selfpromo'),
|
||||
@@ -171,7 +191,7 @@ class TestModifyChaptersPP(unittest.TestCase):
|
||||
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, cuts)
|
||||
|
||||
def test_remove_marked_arrange_sponsors_ChapterWithCutHidingSponsor(self):
|
||||
cuts = [self._sponsor_chapter(20, 50, 'selpromo', remove=True)]
|
||||
cuts = [self._sponsor_chapter(20, 50, 'selfpromo', remove=True)]
|
||||
chapters = self._chapters([60], ['c']) + [
|
||||
self._sponsor_chapter(10, 20, 'intro'),
|
||||
self._sponsor_chapter(30, 40, 'sponsor'),
|
||||
@@ -197,7 +217,7 @@ class TestModifyChaptersPP(unittest.TestCase):
|
||||
self._sponsor_chapter(10, 20, 'sponsor'),
|
||||
self._sponsor_chapter(20, 30, 'interaction', remove=True),
|
||||
self._chapter(30, 40, remove=True),
|
||||
self._sponsor_chapter(40, 50, 'selpromo', remove=True),
|
||||
self._sponsor_chapter(40, 50, 'selfpromo', remove=True),
|
||||
self._sponsor_chapter(50, 60, 'interaction')]
|
||||
expected = self._chapters([10, 20, 30, 40],
|
||||
['c', '[SponsorBlock]: Sponsor',
|
||||
@@ -280,7 +300,7 @@ class TestModifyChaptersPP(unittest.TestCase):
|
||||
chapters = self._chapters([70], ['c']) + [
|
||||
self._sponsor_chapter(10, 30, 'sponsor'),
|
||||
self._sponsor_chapter(20, 50, 'interaction'),
|
||||
self._sponsor_chapter(30, 50, 'selpromo', remove=True),
|
||||
self._sponsor_chapter(30, 50, 'selfpromo', remove=True),
|
||||
self._sponsor_chapter(40, 60, 'sponsor'),
|
||||
self._sponsor_chapter(50, 60, 'interaction')]
|
||||
expected = self._chapters(
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Allow direct execution
|
||||
import os
|
||||
import sys
|
||||
@@ -6,11 +7,12 @@ import unittest
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
|
||||
import random
|
||||
import subprocess
|
||||
from test.helper import FakeYDL, get_params, is_download_test
|
||||
import urllib.request
|
||||
|
||||
from yt_dlp.compat import compat_str, compat_urllib_request
|
||||
from test.helper import FakeYDL, get_params, is_download_test
|
||||
|
||||
|
||||
@is_download_test
|
||||
@@ -51,7 +53,7 @@ class TestMultipleSocks(unittest.TestCase):
|
||||
if params is None:
|
||||
return
|
||||
ydl = FakeYDL()
|
||||
req = compat_urllib_request.Request('http://yt-dl.org/ip')
|
||||
req = urllib.request.Request('http://yt-dl.org/ip')
|
||||
req.add_header('Ytdl-request-proxy', params['secondary_proxy'])
|
||||
self.assertEqual(
|
||||
ydl.urlopen(req).read().decode(),
|
||||
@@ -62,7 +64,7 @@ class TestMultipleSocks(unittest.TestCase):
|
||||
if params is None:
|
||||
return
|
||||
ydl = FakeYDL()
|
||||
req = compat_urllib_request.Request('https://yt-dl.org/ip')
|
||||
req = urllib.request.Request('https://yt-dl.org/ip')
|
||||
req.add_header('Ytdl-request-proxy', params['secondary_proxy'])
|
||||
self.assertEqual(
|
||||
ydl.urlopen(req).read().decode(),
|
||||
@@ -99,13 +101,13 @@ class TestSocks(unittest.TestCase):
|
||||
return ydl.urlopen('http://yt-dl.org/ip').read().decode()
|
||||
|
||||
def test_socks4(self):
|
||||
self.assertTrue(isinstance(self._get_ip('socks4'), compat_str))
|
||||
self.assertTrue(isinstance(self._get_ip('socks4'), str))
|
||||
|
||||
def test_socks4a(self):
|
||||
self.assertTrue(isinstance(self._get_ip('socks4a'), compat_str))
|
||||
self.assertTrue(isinstance(self._get_ip('socks4a'), str))
|
||||
|
||||
def test_socks5(self):
|
||||
self.assertTrue(isinstance(self._get_ip('socks5'), compat_str))
|
||||
self.assertTrue(isinstance(self._get_ip('socks5'), str))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Allow direct execution
|
||||
import os
|
||||
import sys
|
||||
@@ -6,8 +7,8 @@ import unittest
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
from test.helper import FakeYDL, is_download_test, md5
|
||||
|
||||
from test.helper import FakeYDL, is_download_test, md5
|
||||
from yt_dlp.extractor import (
|
||||
NPOIE,
|
||||
NRKTVIE,
|
||||
@@ -38,6 +39,9 @@ class BaseTestSubtitles(unittest.TestCase):
|
||||
self.DL = FakeYDL()
|
||||
self.ie = self.IE()
|
||||
self.DL.add_info_extractor(self.ie)
|
||||
if not self.IE.working():
|
||||
print('Skipping: %s marked as not _WORKING' % self.IE.ie_key())
|
||||
self.skipTest('IE marked as not _WORKING')
|
||||
|
||||
def getInfoDict(self):
|
||||
info_dict = self.DL.extract_info(self.url, download=False)
|
||||
@@ -57,6 +61,21 @@ class BaseTestSubtitles(unittest.TestCase):
|
||||
|
||||
@is_download_test
|
||||
class TestYoutubeSubtitles(BaseTestSubtitles):
|
||||
# Available subtitles for QRS8MkLhQmM:
|
||||
# Language formats
|
||||
# ru vtt, ttml, srv3, srv2, srv1, json3
|
||||
# fr vtt, ttml, srv3, srv2, srv1, json3
|
||||
# en vtt, ttml, srv3, srv2, srv1, json3
|
||||
# nl vtt, ttml, srv3, srv2, srv1, json3
|
||||
# de vtt, ttml, srv3, srv2, srv1, json3
|
||||
# ko vtt, ttml, srv3, srv2, srv1, json3
|
||||
# it vtt, ttml, srv3, srv2, srv1, json3
|
||||
# zh-Hant vtt, ttml, srv3, srv2, srv1, json3
|
||||
# hi vtt, ttml, srv3, srv2, srv1, json3
|
||||
# pt-BR vtt, ttml, srv3, srv2, srv1, json3
|
||||
# es-MX vtt, ttml, srv3, srv2, srv1, json3
|
||||
# ja vtt, ttml, srv3, srv2, srv1, json3
|
||||
# pl vtt, ttml, srv3, srv2, srv1, json3
|
||||
url = 'QRS8MkLhQmM'
|
||||
IE = YoutubeIE
|
||||
|
||||
@@ -65,47 +84,60 @@ class TestYoutubeSubtitles(BaseTestSubtitles):
|
||||
self.DL.params['allsubtitles'] = True
|
||||
subtitles = self.getSubtitles()
|
||||
self.assertEqual(len(subtitles.keys()), 13)
|
||||
self.assertEqual(md5(subtitles['en']), '688dd1ce0981683867e7fe6fde2a224b')
|
||||
self.assertEqual(md5(subtitles['it']), '31324d30b8430b309f7f5979a504a769')
|
||||
self.assertEqual(md5(subtitles['en']), 'ae1bd34126571a77aabd4d276b28044d')
|
||||
self.assertEqual(md5(subtitles['it']), '0e0b667ba68411d88fd1c5f4f4eab2f9')
|
||||
for lang in ['fr', 'de']:
|
||||
self.assertTrue(subtitles.get(lang) is not None, 'Subtitles for \'%s\' not extracted' % lang)
|
||||
|
||||
def test_youtube_subtitles_ttml_format(self):
|
||||
def _test_subtitles_format(self, fmt, md5_hash, lang='en'):
|
||||
self.DL.params['writesubtitles'] = True
|
||||
self.DL.params['subtitlesformat'] = 'ttml'
|
||||
self.DL.params['subtitlesformat'] = fmt
|
||||
subtitles = self.getSubtitles()
|
||||
self.assertEqual(md5(subtitles['en']), 'c97ddf1217390906fa9fbd34901f3da2')
|
||||
self.assertEqual(md5(subtitles[lang]), md5_hash)
|
||||
|
||||
def test_youtube_subtitles_ttml_format(self):
|
||||
self._test_subtitles_format('ttml', 'c97ddf1217390906fa9fbd34901f3da2')
|
||||
|
||||
def test_youtube_subtitles_vtt_format(self):
|
||||
self.DL.params['writesubtitles'] = True
|
||||
self.DL.params['subtitlesformat'] = 'vtt'
|
||||
self._test_subtitles_format('vtt', 'ae1bd34126571a77aabd4d276b28044d')
|
||||
|
||||
def test_youtube_subtitles_json3_format(self):
|
||||
self._test_subtitles_format('json3', '688dd1ce0981683867e7fe6fde2a224b')
|
||||
|
||||
def _test_automatic_captions(self, url, lang):
|
||||
self.url = url
|
||||
self.DL.params['writeautomaticsub'] = True
|
||||
self.DL.params['subtitleslangs'] = [lang]
|
||||
subtitles = self.getSubtitles()
|
||||
self.assertEqual(md5(subtitles['en']), 'ae1bd34126571a77aabd4d276b28044d')
|
||||
self.assertTrue(subtitles[lang] is not None)
|
||||
|
||||
def test_youtube_automatic_captions(self):
|
||||
self.url = '8YoUxe5ncPo'
|
||||
self.DL.params['writeautomaticsub'] = True
|
||||
self.DL.params['subtitleslangs'] = ['it']
|
||||
subtitles = self.getSubtitles()
|
||||
self.assertTrue(subtitles['it'] is not None)
|
||||
|
||||
def test_youtube_no_automatic_captions(self):
|
||||
self.url = 'QRS8MkLhQmM'
|
||||
self.DL.params['writeautomaticsub'] = True
|
||||
subtitles = self.getSubtitles()
|
||||
self.assertTrue(not subtitles)
|
||||
# Available automatic captions for 8YoUxe5ncPo:
|
||||
# Language formats (all in vtt, ttml, srv3, srv2, srv1, json3)
|
||||
# gu, zh-Hans, zh-Hant, gd, ga, gl, lb, la, lo, tt, tr,
|
||||
# lv, lt, tk, th, tg, te, fil, haw, yi, ceb, yo, de, da,
|
||||
# el, eo, en, eu, et, es, ru, rw, ro, bn, be, bg, uk, jv,
|
||||
# bs, ja, or, xh, co, ca, cy, cs, ps, pt, pa, vi, pl, hy,
|
||||
# hr, ht, hu, hmn, hi, ha, mg, uz, ml, mn, mi, mk, ur,
|
||||
# mt, ms, mr, ug, ta, my, af, sw, is, am,
|
||||
# *it*, iw, sv, ar,
|
||||
# su, zu, az, id, ig, nl, no, ne, ny, fr, ku, fy, fa, fi,
|
||||
# ka, kk, sr, sq, ko, kn, km, st, sk, si, so, sn, sm, sl,
|
||||
# ky, sd
|
||||
# ...
|
||||
self._test_automatic_captions('8YoUxe5ncPo', 'it')
|
||||
|
||||
@unittest.skip('Video unavailable')
|
||||
def test_youtube_translated_subtitles(self):
|
||||
# This video has a subtitles track, which can be translated
|
||||
self.url = 'i0ZabxXmH4Y'
|
||||
self.DL.params['writeautomaticsub'] = True
|
||||
self.DL.params['subtitleslangs'] = ['it']
|
||||
subtitles = self.getSubtitles()
|
||||
self.assertTrue(subtitles['it'] is not None)
|
||||
# This video has a subtitles track, which can be translated (#4555)
|
||||
self._test_automatic_captions('Ky9eprVWzlI', 'it')
|
||||
|
||||
def test_youtube_nosubtitles(self):
|
||||
self.DL.expect_warning('video doesn\'t have subtitles')
|
||||
self.url = 'n5BB19UTcdA'
|
||||
# Available automatic captions for 8YoUxe5ncPo:
|
||||
# ...
|
||||
# 8YoUxe5ncPo has no subtitles
|
||||
self.url = '8YoUxe5ncPo'
|
||||
self.DL.params['writesubtitles'] = True
|
||||
self.DL.params['allsubtitles'] = True
|
||||
subtitles = self.getSubtitles()
|
||||
@@ -137,6 +169,7 @@ class TestDailymotionSubtitles(BaseTestSubtitles):
|
||||
|
||||
|
||||
@is_download_test
|
||||
@unittest.skip('IE broken')
|
||||
class TestTedSubtitles(BaseTestSubtitles):
|
||||
url = 'http://www.ted.com/talks/dan_dennett_on_our_consciousness.html'
|
||||
IE = TedTalkIE
|
||||
@@ -162,12 +195,12 @@ class TestVimeoSubtitles(BaseTestSubtitles):
|
||||
self.DL.params['allsubtitles'] = True
|
||||
subtitles = self.getSubtitles()
|
||||
self.assertEqual(set(subtitles.keys()), {'de', 'en', 'es', 'fr'})
|
||||
self.assertEqual(md5(subtitles['en']), '8062383cf4dec168fc40a088aa6d5888')
|
||||
self.assertEqual(md5(subtitles['fr']), 'b6191146a6c5d3a452244d853fde6dc8')
|
||||
self.assertEqual(md5(subtitles['en']), '386cbc9320b94e25cb364b97935e5dd1')
|
||||
self.assertEqual(md5(subtitles['fr']), 'c9b69eef35bc6641c0d4da8a04f9dfac')
|
||||
|
||||
def test_nosubtitles(self):
|
||||
self.DL.expect_warning('video doesn\'t have subtitles')
|
||||
self.url = 'http://vimeo.com/56015672'
|
||||
self.url = 'http://vimeo.com/68093876'
|
||||
self.DL.params['writesubtitles'] = True
|
||||
self.DL.params['allsubtitles'] = True
|
||||
subtitles = self.getSubtitles()
|
||||
@@ -175,6 +208,7 @@ class TestVimeoSubtitles(BaseTestSubtitles):
|
||||
|
||||
|
||||
@is_download_test
|
||||
@unittest.skip('IE broken')
|
||||
class TestWallaSubtitles(BaseTestSubtitles):
|
||||
url = 'http://vod.walla.co.il/movie/2705958/the-yes-men'
|
||||
IE = WallaIE
|
||||
@@ -197,6 +231,7 @@ class TestWallaSubtitles(BaseTestSubtitles):
|
||||
|
||||
|
||||
@is_download_test
|
||||
@unittest.skip('IE broken')
|
||||
class TestCeskaTelevizeSubtitles(BaseTestSubtitles):
|
||||
url = 'http://www.ceskatelevize.cz/ivysilani/10600540290-u6-uzasny-svet-techniky'
|
||||
IE = CeskaTelevizeIE
|
||||
@@ -219,6 +254,7 @@ class TestCeskaTelevizeSubtitles(BaseTestSubtitles):
|
||||
|
||||
|
||||
@is_download_test
|
||||
@unittest.skip('IE broken')
|
||||
class TestLyndaSubtitles(BaseTestSubtitles):
|
||||
url = 'http://www.lynda.com/Bootstrap-tutorials/Using-exercise-files/110885/114408-4.html'
|
||||
IE = LyndaIE
|
||||
@@ -232,6 +268,7 @@ class TestLyndaSubtitles(BaseTestSubtitles):
|
||||
|
||||
|
||||
@is_download_test
|
||||
@unittest.skip('IE broken')
|
||||
class TestNPOSubtitles(BaseTestSubtitles):
|
||||
url = 'http://www.npo.nl/nos-journaal/28-08-2014/POW_00722860'
|
||||
IE = NPOIE
|
||||
@@ -245,6 +282,7 @@ class TestNPOSubtitles(BaseTestSubtitles):
|
||||
|
||||
|
||||
@is_download_test
|
||||
@unittest.skip('IE broken')
|
||||
class TestMTVSubtitles(BaseTestSubtitles):
|
||||
url = 'http://www.cc.com/video-clips/p63lk0/adam-devine-s-house-party-chasing-white-swans'
|
||||
IE = ComedyCentralIE
|
||||
@@ -269,8 +307,8 @@ class TestNRKSubtitles(BaseTestSubtitles):
|
||||
self.DL.params['writesubtitles'] = True
|
||||
self.DL.params['allsubtitles'] = True
|
||||
subtitles = self.getSubtitles()
|
||||
self.assertEqual(set(subtitles.keys()), {'no'})
|
||||
self.assertEqual(md5(subtitles['no']), '544fa917d3197fcbee64634559221cc2')
|
||||
self.assertEqual(set(subtitles.keys()), {'nb-ttv'})
|
||||
self.assertEqual(md5(subtitles['nb-ttv']), '67e06ff02d0deaf975e68f6cb8f6a149')
|
||||
|
||||
|
||||
@is_download_test
|
||||
@@ -295,6 +333,7 @@ class TestRaiPlaySubtitles(BaseTestSubtitles):
|
||||
|
||||
|
||||
@is_download_test
|
||||
@unittest.skip('IE broken - DRM only')
|
||||
class TestVikiSubtitles(BaseTestSubtitles):
|
||||
url = 'http://www.viki.com/videos/1060846v-punch-episode-18'
|
||||
IE = VikiIE
|
||||
@@ -323,6 +362,7 @@ class TestThePlatformSubtitles(BaseTestSubtitles):
|
||||
|
||||
|
||||
@is_download_test
|
||||
@unittest.skip('IE broken')
|
||||
class TestThePlatformFeedSubtitles(BaseTestSubtitles):
|
||||
url = 'http://feed.theplatform.com/f/7wvmTC/msnbc_video-p-test?form=json&pretty=true&range=-40&byGuid=n_hardball_5biden_140207'
|
||||
IE = ThePlatformFeedIE
|
||||
@@ -360,7 +400,7 @@ class TestDemocracynowSubtitles(BaseTestSubtitles):
|
||||
self.DL.params['allsubtitles'] = True
|
||||
subtitles = self.getSubtitles()
|
||||
self.assertEqual(set(subtitles.keys()), {'en'})
|
||||
self.assertEqual(md5(subtitles['en']), 'acaca989e24a9e45a6719c9b3d60815c')
|
||||
self.assertEqual(md5(subtitles['en']), 'a3cc4c0b5eadd74d9974f1c1f5101045')
|
||||
|
||||
def test_subtitles_in_page(self):
|
||||
self.url = 'http://www.democracynow.org/2015/7/3/this_flag_comes_down_today_bree'
|
||||
@@ -368,7 +408,7 @@ class TestDemocracynowSubtitles(BaseTestSubtitles):
|
||||
self.DL.params['allsubtitles'] = True
|
||||
subtitles = self.getSubtitles()
|
||||
self.assertEqual(set(subtitles.keys()), {'en'})
|
||||
self.assertEqual(md5(subtitles['en']), 'acaca989e24a9e45a6719c9b3d60815c')
|
||||
self.assertEqual(md5(subtitles['en']), 'a3cc4c0b5eadd74d9974f1c1f5101045')
|
||||
|
||||
|
||||
@is_download_test
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Allow direct execution
|
||||
import os
|
||||
import sys
|
||||
|
||||
@@ -1,26 +1,24 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Allow direct execution
|
||||
import contextlib
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import unittest
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
|
||||
# Various small unit tests
|
||||
import contextlib
|
||||
import io
|
||||
import itertools
|
||||
import json
|
||||
import xml.etree.ElementTree
|
||||
|
||||
from yt_dlp.compat import (
|
||||
compat_chr,
|
||||
compat_etree_fromstring,
|
||||
compat_getenv,
|
||||
compat_HTMLParseError,
|
||||
compat_os_name,
|
||||
compat_setenv,
|
||||
)
|
||||
from yt_dlp.utils import (
|
||||
Config,
|
||||
@@ -42,6 +40,7 @@ from yt_dlp.utils import (
|
||||
datetime_from_str,
|
||||
detect_exe_version,
|
||||
determine_ext,
|
||||
determine_file_encoding,
|
||||
dfxp2srt,
|
||||
dict_get,
|
||||
encode_base_n,
|
||||
@@ -55,6 +54,7 @@ from yt_dlp.utils import (
|
||||
fix_xml_ampersands,
|
||||
float_or_none,
|
||||
format_bytes,
|
||||
get_compatible_ext,
|
||||
get_element_by_attribute,
|
||||
get_element_by_class,
|
||||
get_element_html_by_attribute,
|
||||
@@ -110,6 +110,7 @@ from yt_dlp.utils import (
|
||||
strip_or_none,
|
||||
subtitles_filename,
|
||||
timeconvert,
|
||||
traverse_obj,
|
||||
unescapeHTML,
|
||||
unified_strdate,
|
||||
unified_timestamp,
|
||||
@@ -141,13 +142,13 @@ class TestUtil(unittest.TestCase):
|
||||
|
||||
self.assertEqual(sanitize_filename('123'), '123')
|
||||
|
||||
self.assertEqual('abc_de', sanitize_filename('abc/de'))
|
||||
self.assertEqual('abc⧸de', sanitize_filename('abc/de'))
|
||||
self.assertFalse('/' in sanitize_filename('abc/de///'))
|
||||
|
||||
self.assertEqual('abc_de', sanitize_filename('abc/<>\\*|de'))
|
||||
self.assertEqual('xxx', sanitize_filename('xxx/<>\\*|'))
|
||||
self.assertEqual('yes no', sanitize_filename('yes? no'))
|
||||
self.assertEqual('this - that', sanitize_filename('this: that'))
|
||||
self.assertEqual('abc_de', sanitize_filename('abc/<>\\*|de', is_id=False))
|
||||
self.assertEqual('xxx', sanitize_filename('xxx/<>\\*|', is_id=False))
|
||||
self.assertEqual('yes no', sanitize_filename('yes? no', is_id=False))
|
||||
self.assertEqual('this - that', sanitize_filename('this: that', is_id=False))
|
||||
|
||||
self.assertEqual(sanitize_filename('AT&T'), 'AT&T')
|
||||
aumlaut = 'ä'
|
||||
@@ -266,20 +267,20 @@ class TestUtil(unittest.TestCase):
|
||||
def env(var):
|
||||
return f'%{var}%' if sys.platform == 'win32' else f'${var}'
|
||||
|
||||
compat_setenv('yt_dlp_EXPATH_PATH', 'expanded')
|
||||
os.environ['yt_dlp_EXPATH_PATH'] = 'expanded'
|
||||
self.assertEqual(expand_path(env('yt_dlp_EXPATH_PATH')), 'expanded')
|
||||
|
||||
old_home = os.environ.get('HOME')
|
||||
test_str = R'C:\Documents and Settings\тест\Application Data'
|
||||
try:
|
||||
compat_setenv('HOME', test_str)
|
||||
self.assertEqual(expand_path(env('HOME')), compat_getenv('HOME'))
|
||||
self.assertEqual(expand_path('~'), compat_getenv('HOME'))
|
||||
os.environ['HOME'] = test_str
|
||||
self.assertEqual(expand_path(env('HOME')), os.getenv('HOME'))
|
||||
self.assertEqual(expand_path('~'), os.getenv('HOME'))
|
||||
self.assertEqual(
|
||||
expand_path('~/%s' % env('yt_dlp_EXPATH_PATH')),
|
||||
'%s/expanded' % compat_getenv('HOME'))
|
||||
'%s/expanded' % os.getenv('HOME'))
|
||||
finally:
|
||||
compat_setenv('HOME', old_home or '')
|
||||
os.environ['HOME'] = old_home or ''
|
||||
|
||||
def test_prepend_extension(self):
|
||||
self.assertEqual(prepend_extension('abc.ext', 'temp'), 'abc.temp.ext')
|
||||
@@ -370,6 +371,7 @@ class TestUtil(unittest.TestCase):
|
||||
self.assertEqual(unified_strdate('2012/10/11 01:56:38 +0000'), '20121011')
|
||||
self.assertEqual(unified_strdate('1968 12 10'), '19681210')
|
||||
self.assertEqual(unified_strdate('1968-12-10'), '19681210')
|
||||
self.assertEqual(unified_strdate('31-07-2022 20:00'), '20220731')
|
||||
self.assertEqual(unified_strdate('28/01/2014 21:00:00 +0100'), '20140128')
|
||||
self.assertEqual(
|
||||
unified_strdate('11/26/2014 11:30:00 AM PST', day_first=False),
|
||||
@@ -413,6 +415,10 @@ class TestUtil(unittest.TestCase):
|
||||
self.assertEqual(unified_timestamp('December 15, 2017 at 7:49 am'), 1513324140)
|
||||
self.assertEqual(unified_timestamp('2018-03-14T08:32:43.1493874+00:00'), 1521016363)
|
||||
|
||||
self.assertEqual(unified_timestamp('December 31 1969 20:00:01 EDT'), 1)
|
||||
self.assertEqual(unified_timestamp('Wednesday 31 December 1969 18:01:26 MDT'), 86)
|
||||
self.assertEqual(unified_timestamp('12/31/1969 20:01:18 EDT', False), 78)
|
||||
|
||||
def test_determine_ext(self):
|
||||
self.assertEqual(determine_ext('http://example.com/foo/bar.mp4/?download'), 'mp4')
|
||||
self.assertEqual(determine_ext('http://example.com/foo/bar/?download', None), None)
|
||||
@@ -562,6 +568,7 @@ class TestUtil(unittest.TestCase):
|
||||
self.assertEqual(base_url('http://foo.de/bar/'), 'http://foo.de/bar/')
|
||||
self.assertEqual(base_url('http://foo.de/bar/baz'), 'http://foo.de/bar/')
|
||||
self.assertEqual(base_url('http://foo.de/bar/baz?x=z/x/c'), 'http://foo.de/bar/')
|
||||
self.assertEqual(base_url('http://foo.de/bar/baz&x=z&w=y/x/c'), 'http://foo.de/bar/baz&x=z&w=y/x/')
|
||||
|
||||
def test_urljoin(self):
|
||||
self.assertEqual(urljoin('http://foo.de/', '/a/b/c.txt'), 'http://foo.de/a/b/c.txt')
|
||||
@@ -898,7 +905,7 @@ class TestUtil(unittest.TestCase):
|
||||
'dynamic_range': 'HDR10',
|
||||
})
|
||||
self.assertEqual(parse_codecs('av01.0.12M.10.0.110.09.16.09.0'), {
|
||||
'vcodec': 'av01.0.12M.10',
|
||||
'vcodec': 'av01.0.12M.10.0.110.09.16.09.0',
|
||||
'acodec': 'none',
|
||||
'dynamic_range': 'HDR10',
|
||||
})
|
||||
@@ -947,6 +954,85 @@ class TestUtil(unittest.TestCase):
|
||||
)
|
||||
self.assertEqual(escape_url('http://vimeo.com/56015672#at=0'), 'http://vimeo.com/56015672#at=0')
|
||||
|
||||
def test_js_to_json_vars_strings(self):
|
||||
self.assertDictEqual(
|
||||
json.loads(js_to_json(
|
||||
'''{
|
||||
'null': a,
|
||||
'nullStr': b,
|
||||
'true': c,
|
||||
'trueStr': d,
|
||||
'false': e,
|
||||
'falseStr': f,
|
||||
'unresolvedVar': g,
|
||||
}''',
|
||||
{
|
||||
'a': 'null',
|
||||
'b': '"null"',
|
||||
'c': 'true',
|
||||
'd': '"true"',
|
||||
'e': 'false',
|
||||
'f': '"false"',
|
||||
'g': 'var',
|
||||
}
|
||||
)),
|
||||
{
|
||||
'null': None,
|
||||
'nullStr': 'null',
|
||||
'true': True,
|
||||
'trueStr': 'true',
|
||||
'false': False,
|
||||
'falseStr': 'false',
|
||||
'unresolvedVar': 'var'
|
||||
}
|
||||
)
|
||||
|
||||
self.assertDictEqual(
|
||||
json.loads(js_to_json(
|
||||
'''{
|
||||
'int': a,
|
||||
'intStr': b,
|
||||
'float': c,
|
||||
'floatStr': d,
|
||||
}''',
|
||||
{
|
||||
'a': '123',
|
||||
'b': '"123"',
|
||||
'c': '1.23',
|
||||
'd': '"1.23"',
|
||||
}
|
||||
)),
|
||||
{
|
||||
'int': 123,
|
||||
'intStr': '123',
|
||||
'float': 1.23,
|
||||
'floatStr': '1.23',
|
||||
}
|
||||
)
|
||||
|
||||
self.assertDictEqual(
|
||||
json.loads(js_to_json(
|
||||
'''{
|
||||
'object': a,
|
||||
'objectStr': b,
|
||||
'array': c,
|
||||
'arrayStr': d,
|
||||
}''',
|
||||
{
|
||||
'a': '{}',
|
||||
'b': '"{}"',
|
||||
'c': '[]',
|
||||
'd': '"[]"',
|
||||
}
|
||||
)),
|
||||
{
|
||||
'object': {},
|
||||
'objectStr': '{}',
|
||||
'array': [],
|
||||
'arrayStr': '[]',
|
||||
}
|
||||
)
|
||||
|
||||
def test_js_to_json_realworld(self):
|
||||
inp = '''{
|
||||
'clip':{'provider':'pseudo'}
|
||||
@@ -1093,6 +1179,12 @@ class TestUtil(unittest.TestCase):
|
||||
on = js_to_json('[1,//{},\n2]')
|
||||
self.assertEqual(json.loads(on), [1, 2])
|
||||
|
||||
on = js_to_json(R'"\^\$\#"')
|
||||
self.assertEqual(json.loads(on), R'^$#', msg='Unnecessary escapes should be stripped')
|
||||
|
||||
on = js_to_json('\'"\\""\'')
|
||||
self.assertEqual(json.loads(on), '"""', msg='Unnecessary quote escape should be escaped')
|
||||
|
||||
def test_js_to_json_malformed(self):
|
||||
self.assertEqual(js_to_json('42a1'), '42"a1"')
|
||||
self.assertEqual(js_to_json('42a-1'), '42"a"-1')
|
||||
@@ -1128,7 +1220,7 @@ class TestUtil(unittest.TestCase):
|
||||
self.assertEqual(extract_attributes('<e x="décomposé">'), {'x': 'décompose\u0301'})
|
||||
# "Narrow" Python builds don't support unicode code points outside BMP.
|
||||
try:
|
||||
compat_chr(0x10000)
|
||||
chr(0x10000)
|
||||
supports_outside_bmp = True
|
||||
except ValueError:
|
||||
supports_outside_bmp = False
|
||||
@@ -1672,6 +1764,9 @@ Line 1
|
||||
self.assertEqual(list(get_elements_text_and_html_by_attribute('class', 'foo', html)), [])
|
||||
self.assertEqual(list(get_elements_text_and_html_by_attribute('class', 'no-such-foo', html)), [])
|
||||
|
||||
self.assertEqual(list(get_elements_text_and_html_by_attribute(
|
||||
'class', 'foo', '<a class="foo">nice</a><span class="foo">nice</span>', tag='a')), [('nice', '<a class="foo">nice</a>')])
|
||||
|
||||
GET_ELEMENT_BY_TAG_TEST_STRING = '''
|
||||
random text lorem ipsum</p>
|
||||
<div>
|
||||
@@ -1825,6 +1920,276 @@ Line 1
|
||||
with contextlib.suppress(OSError):
|
||||
os.remove(FILE)
|
||||
|
||||
def test_determine_file_encoding(self):
|
||||
self.assertEqual(determine_file_encoding(b''), (None, 0))
|
||||
self.assertEqual(determine_file_encoding(b'--verbose -x --audio-format mkv\n'), (None, 0))
|
||||
|
||||
self.assertEqual(determine_file_encoding(b'\xef\xbb\xbf'), ('utf-8', 3))
|
||||
self.assertEqual(determine_file_encoding(b'\x00\x00\xfe\xff'), ('utf-32-be', 4))
|
||||
self.assertEqual(determine_file_encoding(b'\xff\xfe'), ('utf-16-le', 2))
|
||||
|
||||
self.assertEqual(determine_file_encoding(b'\xff\xfe# coding: utf-8\n--verbose'), ('utf-16-le', 2))
|
||||
|
||||
self.assertEqual(determine_file_encoding(b'# coding: utf-8\n--verbose'), ('utf-8', 0))
|
||||
self.assertEqual(determine_file_encoding(b'# coding: someencodinghere-12345\n--verbose'), ('someencodinghere-12345', 0))
|
||||
|
||||
self.assertEqual(determine_file_encoding(b'#coding:utf-8\n--verbose'), ('utf-8', 0))
|
||||
self.assertEqual(determine_file_encoding(b'# coding: utf-8 \r\n--verbose'), ('utf-8', 0))
|
||||
|
||||
self.assertEqual(determine_file_encoding('# coding: utf-32-be'.encode('utf-32-be')), ('utf-32-be', 0))
|
||||
self.assertEqual(determine_file_encoding('# coding: utf-16-le'.encode('utf-16-le')), ('utf-16-le', 0))
|
||||
|
||||
def test_get_compatible_ext(self):
|
||||
self.assertEqual(get_compatible_ext(
|
||||
vcodecs=[None], acodecs=[None, None], vexts=['mp4'], aexts=['m4a', 'm4a']), 'mkv')
|
||||
self.assertEqual(get_compatible_ext(
|
||||
vcodecs=[None], acodecs=[None], vexts=['flv'], aexts=['flv']), 'flv')
|
||||
|
||||
self.assertEqual(get_compatible_ext(
|
||||
vcodecs=[None], acodecs=[None], vexts=['mp4'], aexts=['m4a']), 'mp4')
|
||||
self.assertEqual(get_compatible_ext(
|
||||
vcodecs=[None], acodecs=[None], vexts=['mp4'], aexts=['webm']), 'mkv')
|
||||
self.assertEqual(get_compatible_ext(
|
||||
vcodecs=[None], acodecs=[None], vexts=['webm'], aexts=['m4a']), 'mkv')
|
||||
self.assertEqual(get_compatible_ext(
|
||||
vcodecs=[None], acodecs=[None], vexts=['webm'], aexts=['webm']), 'webm')
|
||||
self.assertEqual(get_compatible_ext(
|
||||
vcodecs=[None], acodecs=[None], vexts=['webm'], aexts=['weba']), 'webm')
|
||||
|
||||
self.assertEqual(get_compatible_ext(
|
||||
vcodecs=['h264'], acodecs=['mp4a'], vexts=['mov'], aexts=['m4a']), 'mp4')
|
||||
self.assertEqual(get_compatible_ext(
|
||||
vcodecs=['av01.0.12M.08'], acodecs=['opus'], vexts=['mp4'], aexts=['webm']), 'webm')
|
||||
|
||||
self.assertEqual(get_compatible_ext(
|
||||
vcodecs=['vp9'], acodecs=['opus'], vexts=['webm'], aexts=['webm'], preferences=['flv', 'mp4']), 'mp4')
|
||||
self.assertEqual(get_compatible_ext(
|
||||
vcodecs=['av1'], acodecs=['mp4a'], vexts=['webm'], aexts=['m4a'], preferences=('webm', 'mkv')), 'mkv')
|
||||
|
||||
def test_traverse_obj(self):
|
||||
_TEST_DATA = {
|
||||
100: 100,
|
||||
1.2: 1.2,
|
||||
'str': 'str',
|
||||
'None': None,
|
||||
'...': ...,
|
||||
'urls': [
|
||||
{'index': 0, 'url': 'https://www.example.com/0'},
|
||||
{'index': 1, 'url': 'https://www.example.com/1'},
|
||||
],
|
||||
'data': (
|
||||
{'index': 2},
|
||||
{'index': 3},
|
||||
),
|
||||
'dict': {},
|
||||
}
|
||||
|
||||
# Test base functionality
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, ('str',)), 'str',
|
||||
msg='allow tuple path')
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, ['str']), 'str',
|
||||
msg='allow list path')
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, (value for value in ("str",))), 'str',
|
||||
msg='allow iterable path')
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, 'str'), 'str',
|
||||
msg='single items should be treated as a path')
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, None), _TEST_DATA)
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, 100), 100)
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, 1.2), 1.2)
|
||||
|
||||
# Test Ellipsis behavior
|
||||
self.assertCountEqual(traverse_obj(_TEST_DATA, ...),
|
||||
(item for item in _TEST_DATA.values() if item is not None),
|
||||
msg='`...` should give all values except `None`')
|
||||
self.assertCountEqual(traverse_obj(_TEST_DATA, ('urls', 0, ...)), _TEST_DATA['urls'][0].values(),
|
||||
msg='`...` selection for dicts should select all values')
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, (..., ..., 'url')),
|
||||
['https://www.example.com/0', 'https://www.example.com/1'],
|
||||
msg='nested `...` queries should work')
|
||||
self.assertCountEqual(traverse_obj(_TEST_DATA, (..., ..., 'index')), range(4),
|
||||
msg='`...` query result should be flattened')
|
||||
|
||||
# Test function as key
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, lambda x, y: x == 'urls' and isinstance(y, list)),
|
||||
[_TEST_DATA['urls']],
|
||||
msg='function as query key should perform a filter based on (key, value)')
|
||||
self.assertCountEqual(traverse_obj(_TEST_DATA, lambda _, x: isinstance(x[0], str)), {'str'},
|
||||
msg='exceptions in the query function should be catched')
|
||||
|
||||
# Test alternative paths
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, 'fail', 'str'), 'str',
|
||||
msg='multiple `paths` should be treated as alternative paths')
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, 'str', 100), 'str',
|
||||
msg='alternatives should exit early')
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, 'fail', 'fail'), None,
|
||||
msg='alternatives should return `default` if exhausted')
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, (..., 'fail'), 100), 100,
|
||||
msg='alternatives should track their own branching return')
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, ('dict', ...), ('data', ...)), list(_TEST_DATA['data']),
|
||||
msg='alternatives on empty objects should search further')
|
||||
|
||||
# Test branch and path nesting
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, ('urls', (3, 0), 'url')), ['https://www.example.com/0'],
|
||||
msg='tuple as key should be treated as branches')
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, ('urls', [3, 0], 'url')), ['https://www.example.com/0'],
|
||||
msg='list as key should be treated as branches')
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, ('urls', ((1, 'fail'), (0, 'url')))), ['https://www.example.com/0'],
|
||||
msg='double nesting in path should be treated as paths')
|
||||
self.assertEqual(traverse_obj(['0', [1, 2]], [(0, 1), 0]), [1],
|
||||
msg='do not fail early on branching')
|
||||
self.assertCountEqual(traverse_obj(_TEST_DATA, ('urls', ((1, ('fail', 'url')), (0, 'url')))),
|
||||
['https://www.example.com/0', 'https://www.example.com/1'],
|
||||
msg='tripple nesting in path should be treated as branches')
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, ('urls', ('fail', (..., 'url')))),
|
||||
['https://www.example.com/0', 'https://www.example.com/1'],
|
||||
msg='ellipsis as branch path start gets flattened')
|
||||
|
||||
# Test dictionary as key
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, {0: 100, 1: 1.2}), {0: 100, 1: 1.2},
|
||||
msg='dict key should result in a dict with the same keys')
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, {0: ('urls', 0, 'url')}),
|
||||
{0: 'https://www.example.com/0'},
|
||||
msg='dict key should allow paths')
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, {0: ('urls', (3, 0), 'url')}),
|
||||
{0: ['https://www.example.com/0']},
|
||||
msg='tuple in dict path should be treated as branches')
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, {0: ('urls', ((1, 'fail'), (0, 'url')))}),
|
||||
{0: ['https://www.example.com/0']},
|
||||
msg='double nesting in dict path should be treated as paths')
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, {0: ('urls', ((1, ('fail', 'url')), (0, 'url')))}),
|
||||
{0: ['https://www.example.com/1', 'https://www.example.com/0']},
|
||||
msg='tripple nesting in dict path should be treated as branches')
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, {0: 'fail'}), {},
|
||||
msg='remove `None` values when dict key')
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, {0: 'fail'}, default=...), {0: ...},
|
||||
msg='do not remove `None` values if `default`')
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, {0: 'dict'}), {0: {}},
|
||||
msg='do not remove empty values when dict key')
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, {0: 'dict'}, default=...), {0: {}},
|
||||
msg='do not remove empty values when dict key and a default')
|
||||
self.assertEqual(traverse_obj(_TEST_DATA, {0: ('dict', ...)}), {0: []},
|
||||
msg='if branch in dict key not successful, return `[]`')
|
||||
|
||||
# Testing default parameter behavior
|
||||
_DEFAULT_DATA = {'None': None, 'int': 0, 'list': []}
|
||||
self.assertEqual(traverse_obj(_DEFAULT_DATA, 'fail'), None,
|
||||
msg='default value should be `None`')
|
||||
self.assertEqual(traverse_obj(_DEFAULT_DATA, 'fail', 'fail', default=...), ...,
|
||||
msg='chained fails should result in default')
|
||||
self.assertEqual(traverse_obj(_DEFAULT_DATA, 'None', 'int'), 0,
|
||||
msg='should not short cirquit on `None`')
|
||||
self.assertEqual(traverse_obj(_DEFAULT_DATA, 'fail', default=1), 1,
|
||||
msg='invalid dict key should result in `default`')
|
||||
self.assertEqual(traverse_obj(_DEFAULT_DATA, 'None', default=1), 1,
|
||||
msg='`None` is a deliberate sentinel and should become `default`')
|
||||
self.assertEqual(traverse_obj(_DEFAULT_DATA, ('list', 10)), None,
|
||||
msg='`IndexError` should result in `default`')
|
||||
self.assertEqual(traverse_obj(_DEFAULT_DATA, (..., 'fail'), default=1), 1,
|
||||
msg='if branched but not successful return `default` if defined, not `[]`')
|
||||
self.assertEqual(traverse_obj(_DEFAULT_DATA, (..., 'fail'), default=None), None,
|
||||
msg='if branched but not successful return `default` even if `default` is `None`')
|
||||
self.assertEqual(traverse_obj(_DEFAULT_DATA, (..., 'fail')), [],
|
||||
msg='if branched but not successful return `[]`, not `default`')
|
||||
self.assertEqual(traverse_obj(_DEFAULT_DATA, ('list', ...)), [],
|
||||
msg='if branched but object is empty return `[]`, not `default`')
|
||||
|
||||
# Testing expected_type behavior
|
||||
_EXPECTED_TYPE_DATA = {'str': 'str', 'int': 0}
|
||||
self.assertEqual(traverse_obj(_EXPECTED_TYPE_DATA, 'str', expected_type=str), 'str',
|
||||
msg='accept matching `expected_type` type')
|
||||
self.assertEqual(traverse_obj(_EXPECTED_TYPE_DATA, 'str', expected_type=int), None,
|
||||
msg='reject non matching `expected_type` type')
|
||||
self.assertEqual(traverse_obj(_EXPECTED_TYPE_DATA, 'int', expected_type=lambda x: str(x)), '0',
|
||||
msg='transform type using type function')
|
||||
self.assertEqual(traverse_obj(_EXPECTED_TYPE_DATA, 'str',
|
||||
expected_type=lambda _: 1 / 0), None,
|
||||
msg='wrap expected_type fuction in try_call')
|
||||
self.assertEqual(traverse_obj(_EXPECTED_TYPE_DATA, ..., expected_type=str), ['str'],
|
||||
msg='eliminate items that expected_type fails on')
|
||||
|
||||
# Test get_all behavior
|
||||
_GET_ALL_DATA = {'key': [0, 1, 2]}
|
||||
self.assertEqual(traverse_obj(_GET_ALL_DATA, ('key', ...), get_all=False), 0,
|
||||
msg='if not `get_all`, return only first matching value')
|
||||
self.assertEqual(traverse_obj(_GET_ALL_DATA, ..., get_all=False), [0, 1, 2],
|
||||
msg='do not overflatten if not `get_all`')
|
||||
|
||||
# Test casesense behavior
|
||||
_CASESENSE_DATA = {
|
||||
'KeY': 'value0',
|
||||
0: {
|
||||
'KeY': 'value1',
|
||||
0: {'KeY': 'value2'},
|
||||
},
|
||||
}
|
||||
self.assertEqual(traverse_obj(_CASESENSE_DATA, 'key'), None,
|
||||
msg='dict keys should be case sensitive unless `casesense`')
|
||||
self.assertEqual(traverse_obj(_CASESENSE_DATA, 'keY',
|
||||
casesense=False), 'value0',
|
||||
msg='allow non matching key case if `casesense`')
|
||||
self.assertEqual(traverse_obj(_CASESENSE_DATA, (0, ('keY',)),
|
||||
casesense=False), ['value1'],
|
||||
msg='allow non matching key case in branch if `casesense`')
|
||||
self.assertEqual(traverse_obj(_CASESENSE_DATA, (0, ((0, 'keY'),)),
|
||||
casesense=False), ['value2'],
|
||||
msg='allow non matching key case in branch path if `casesense`')
|
||||
|
||||
# Test traverse_string behavior
|
||||
_TRAVERSE_STRING_DATA = {'str': 'str', 1.2: 1.2}
|
||||
self.assertEqual(traverse_obj(_TRAVERSE_STRING_DATA, ('str', 0)), None,
|
||||
msg='do not traverse into string if not `traverse_string`')
|
||||
self.assertEqual(traverse_obj(_TRAVERSE_STRING_DATA, ('str', 0),
|
||||
traverse_string=True), 's',
|
||||
msg='traverse into string if `traverse_string`')
|
||||
self.assertEqual(traverse_obj(_TRAVERSE_STRING_DATA, (1.2, 1),
|
||||
traverse_string=True), '.',
|
||||
msg='traverse into converted data if `traverse_string`')
|
||||
self.assertEqual(traverse_obj(_TRAVERSE_STRING_DATA, ('str', ...),
|
||||
traverse_string=True), list('str'),
|
||||
msg='`...` branching into string should result in list')
|
||||
self.assertEqual(traverse_obj(_TRAVERSE_STRING_DATA, ('str', (0, 2)),
|
||||
traverse_string=True), ['s', 'r'],
|
||||
msg='branching into string should result in list')
|
||||
self.assertEqual(traverse_obj(_TRAVERSE_STRING_DATA, ('str', lambda _, x: x),
|
||||
traverse_string=True), list('str'),
|
||||
msg='function branching into string should result in list')
|
||||
|
||||
# Test is_user_input behavior
|
||||
_IS_USER_INPUT_DATA = {'range8': list(range(8))}
|
||||
self.assertEqual(traverse_obj(_IS_USER_INPUT_DATA, ('range8', '3'),
|
||||
is_user_input=True), 3,
|
||||
msg='allow for string indexing if `is_user_input`')
|
||||
self.assertCountEqual(traverse_obj(_IS_USER_INPUT_DATA, ('range8', '3:'),
|
||||
is_user_input=True), tuple(range(8))[3:],
|
||||
msg='allow for string slice if `is_user_input`')
|
||||
self.assertCountEqual(traverse_obj(_IS_USER_INPUT_DATA, ('range8', ':4:2'),
|
||||
is_user_input=True), tuple(range(8))[:4:2],
|
||||
msg='allow step in string slice if `is_user_input`')
|
||||
self.assertCountEqual(traverse_obj(_IS_USER_INPUT_DATA, ('range8', ':'),
|
||||
is_user_input=True), range(8),
|
||||
msg='`:` should be treated as `...` if `is_user_input`')
|
||||
with self.assertRaises(TypeError, msg='too many params should result in error'):
|
||||
traverse_obj(_IS_USER_INPUT_DATA, ('range8', ':::'), is_user_input=True)
|
||||
|
||||
# Test re.Match as input obj
|
||||
mobj = re.fullmatch(r'0(12)(?P<group>3)(4)?', '0123')
|
||||
self.assertEqual(traverse_obj(mobj, ...), [x for x in mobj.groups() if x is not None],
|
||||
msg='`...` on a `re.Match` should give its `groups()`')
|
||||
self.assertEqual(traverse_obj(mobj, lambda k, _: k in (0, 2)), ['0123', '3'],
|
||||
msg='function on a `re.Match` should give groupno, value starting at 0')
|
||||
self.assertEqual(traverse_obj(mobj, 'group'), '3',
|
||||
msg='str key on a `re.Match` should give group with that name')
|
||||
self.assertEqual(traverse_obj(mobj, 2), '3',
|
||||
msg='int key on a `re.Match` should give group with that name')
|
||||
self.assertEqual(traverse_obj(mobj, 'gRoUp', casesense=False), '3',
|
||||
msg='str key on a `re.Match` should respect casesense')
|
||||
self.assertEqual(traverse_obj(mobj, 'fail'), None,
|
||||
msg='failing str key on a `re.Match` should return `default`')
|
||||
self.assertEqual(traverse_obj(mobj, 'gRoUpS', casesense=False), None,
|
||||
msg='failing str key on a `re.Match` should return `default`')
|
||||
self.assertEqual(traverse_obj(mobj, 8), None,
|
||||
msg='failing int key on a `re.Match` should return `default`')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
||||
@@ -1,11 +1,15 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Allow direct execution
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
import unittest
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
|
||||
import subprocess
|
||||
|
||||
rootDir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
||||
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Allow direct execution
|
||||
import os
|
||||
import sys
|
||||
@@ -6,11 +7,12 @@ import unittest
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
|
||||
import xml.etree.ElementTree
|
||||
from test.helper import get_params, is_download_test, try_rm
|
||||
|
||||
import yt_dlp.extractor
|
||||
import yt_dlp.YoutubeDL
|
||||
from test.helper import get_params, is_download_test, try_rm
|
||||
|
||||
|
||||
class YoutubeDL(yt_dlp.YoutubeDL):
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Allow direct execution
|
||||
import os
|
||||
import sys
|
||||
@@ -6,9 +7,10 @@ import unittest
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
from test.helper import FakeYDL, is_download_test
|
||||
|
||||
from test.helper import FakeYDL, is_download_test
|
||||
from yt_dlp.extractor import YoutubeIE, YoutubeTabIE
|
||||
from yt_dlp.utils import ExtractorError
|
||||
|
||||
|
||||
@is_download_test
|
||||
@@ -52,6 +54,18 @@ class TestYoutubeLists(unittest.TestCase):
|
||||
self.assertEqual(video['duration'], 10)
|
||||
self.assertEqual(video['uploader'], 'Philipp Hagemeister')
|
||||
|
||||
def test_youtube_channel_no_uploads(self):
|
||||
dl = FakeYDL()
|
||||
dl.params['extract_flat'] = True
|
||||
ie = YoutubeTabIE(dl)
|
||||
# no uploads
|
||||
with self.assertRaisesRegex(ExtractorError, r'no uploads'):
|
||||
ie.extract('https://www.youtube.com/channel/UC2yXPzFejc422buOIzn_0CA')
|
||||
|
||||
# no uploads and no UCID given
|
||||
with self.assertRaisesRegex(ExtractorError, r'no uploads'):
|
||||
ie.extract('https://www.youtube.com/news')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Allow direct execution
|
||||
import os
|
||||
import sys
|
||||
|
||||
@@ -1,18 +1,19 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Allow direct execution
|
||||
import contextlib
|
||||
import os
|
||||
import sys
|
||||
import unittest
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
|
||||
import contextlib
|
||||
import re
|
||||
import string
|
||||
import urllib.request
|
||||
from test.helper import FakeYDL, is_download_test
|
||||
|
||||
from yt_dlp.compat import compat_str
|
||||
from test.helper import FakeYDL, is_download_test
|
||||
from yt_dlp.extractor import YoutubeIE
|
||||
from yt_dlp.jsinterp import JSInterpreter
|
||||
|
||||
@@ -93,6 +94,46 @@ _NSIG_TESTS = [
|
||||
'https://www.youtube.com/s/player/5dd88d1d/player-plasma-ias-phone-en_US.vflset/base.js',
|
||||
'kSxKFLeqzv_ZyHSAt', 'n8gS8oRlHOxPFA',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/324f67b9/player_ias.vflset/en_US/base.js',
|
||||
'xdftNy7dh9QGnhW', '22qLGxrmX8F1rA',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/4c3f79c5/player_ias.vflset/en_US/base.js',
|
||||
'TDCstCG66tEAO5pR9o', 'dbxNtZ14c-yWyw',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/c81bbb4a/player_ias.vflset/en_US/base.js',
|
||||
'gre3EcLurNY2vqp94', 'Z9DfGxWP115WTg',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/1f7d5369/player_ias.vflset/en_US/base.js',
|
||||
'batNX7sYqIJdkJ', 'IhOkL_zxbkOZBw',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/009f1d77/player_ias.vflset/en_US/base.js',
|
||||
'5dwFHw8aFWQUQtffRq', 'audescmLUzI3jw',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/dc0c6770/player_ias.vflset/en_US/base.js',
|
||||
'5EHDMgYLV6HPGk_Mu-kk', 'n9lUJLHbxUI0GQ',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/113ca41c/player_ias.vflset/en_US/base.js',
|
||||
'cgYl-tlYkhjT7A', 'hI7BBr2zUgcmMg',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/c57c113c/player_ias.vflset/en_US/base.js',
|
||||
'M92UUMHa8PdvPd3wyM', '3hPqLJsiNZx7yA',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/5a3b6271/player_ias.vflset/en_US/base.js',
|
||||
'B2j7f_UPT4rfje85Lu_e', 'm5DmNymaGQ5RdQ',
|
||||
),
|
||||
(
|
||||
'https://www.youtube.com/s/player/7a062b77/player_ias.vflset/en_US/base.js',
|
||||
'NRcE3y3mVtm_cV-W', 'VbsCYUATvqlt5w',
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
@@ -100,6 +141,7 @@ _NSIG_TESTS = [
|
||||
class TestPlayerInfo(unittest.TestCase):
|
||||
def test_youtube_extract_player_info(self):
|
||||
PLAYER_URLS = (
|
||||
('https://www.youtube.com/s/player/4c3f79c5/player_ias.vflset/en_US/base.js', '4c3f79c5'),
|
||||
('https://www.youtube.com/s/player/64dddad9/player_ias.vflset/en_US/base.js', '64dddad9'),
|
||||
('https://www.youtube.com/s/player/64dddad9/player_ias.vflset/fr_FR/base.js', '64dddad9'),
|
||||
('https://www.youtube.com/s/player/64dddad9/player-plasma-ias-phone-en_US.vflset/base.js', '64dddad9'),
|
||||
@@ -157,7 +199,7 @@ def t_factory(name, sig_func, url_pattern):
|
||||
def signature(jscode, sig_input):
|
||||
func = YoutubeIE(FakeYDL())._parse_sig_js(jscode)
|
||||
src_sig = (
|
||||
compat_str(string.printable[:sig_input])
|
||||
str(string.printable[:sig_input])
|
||||
if isinstance(sig_input, int) else sig_input)
|
||||
return func(src_sig)
|
||||
|
||||
|
||||
1
test/testdata/ism/ec-3_test.Manifest
vendored
Normal file
1
test/testdata/ism/ec-3_test.Manifest
vendored
Normal file
@@ -0,0 +1 @@
|
||||
<?xml version="1.0" encoding="utf-8"?><!--Transformed by VSMT using XSL stylesheet for rule Identity--><!-- Created with Unified Streaming Platform (version=1.10.12-18737) --><SmoothStreamingMedia MajorVersion="2" MinorVersion="0" TimeScale="10000000" Duration="370000000"><StreamIndex Type="audio" QualityLevels="1" TimeScale="10000000" Language="deu" Name="audio_deu" Chunks="19" Url="QualityLevels({bitrate})/Fragments(audio_deu={start time})?noStreamProfile=1"><QualityLevel Index="0" Bitrate="127802" CodecPrivateData="1190" SamplingRate="48000" Channels="2" BitsPerSample="16" PacketSize="4" AudioTag="255" FourCC="AACL" /><c t="0" d="20053333" /><c d="20053334" /><c d="20053333" /><c d="19840000" /><c d="20053333" /><c d="20053334" /><c d="20053333" /><c d="19840000" /><c d="20053333" /><c d="20053334" /><c d="20053333" /><c d="19840000" /><c d="20053333" /><c d="20053334" /><c d="20053333" /><c d="19840000" /><c d="20053333" /><c d="20053334" /><c d="7253333" /></StreamIndex><StreamIndex Type="audio" QualityLevels="1" TimeScale="10000000" Language="deu" Name="audio_deu_1" Chunks="19" Url="QualityLevels({bitrate})/Fragments(audio_deu_1={start time})?noStreamProfile=1"><QualityLevel Index="0" Bitrate="224000" CodecPrivateData="00063F000000AF87FBA7022DFB42A4D405CD93843BDD0700200F00" FourCCData="0700200F00" SamplingRate="48000" Channels="6" BitsPerSample="16" PacketSize="896" AudioTag="65534" FourCC="EC-3" /><c t="0" d="20160000" /><c d="19840000" /><c d="20160000" /><c d="19840000" /><c d="20160000" /><c d="19840000" /><c d="20160000" /><c d="19840000" /><c d="20160000" /><c d="19840000" /><c d="20160000" /><c d="19840000" /><c d="20160000" /><c d="19840000" /><c d="20160000" /><c d="19840000" /><c d="20160000" /><c d="19840000" /><c d="8320000" /></StreamIndex><StreamIndex Type="video" QualityLevels="8" TimeScale="10000000" Language="deu" Name="video_deu" Chunks="19" Url="QualityLevels({bitrate})/Fragments(video_deu={start time})?noStreamProfile=1" MaxWidth="1920" MaxHeight="1080" DisplayWidth="1920" DisplayHeight="1080"><QualityLevel Index="0" Bitrate="23909" CodecPrivateData="000000016742C00CDB06077E5C05A808080A00000300020000030009C0C02EE0177CC6300F142AE00000000168CA8DC8" MaxWidth="384" MaxHeight="216" FourCC="AVC1" /><QualityLevel Index="1" Bitrate="403188" CodecPrivateData="00000001674D4014E98323B602D4040405000003000100000300320F1429380000000168EAECF2" MaxWidth="400" MaxHeight="224" FourCC="AVC1" /><QualityLevel Index="2" Bitrate="680365" CodecPrivateData="00000001674D401EE981405FF2E02D4040405000000300100000030320F162D3800000000168EAECF2" MaxWidth="640" MaxHeight="360" FourCC="AVC1" /><QualityLevel Index="3" Bitrate="1253465" CodecPrivateData="00000001674D401EE981405FF2E02D4040405000000300100000030320F162D3800000000168EAECF2" MaxWidth="640" MaxHeight="360" FourCC="AVC1" /><QualityLevel Index="4" Bitrate="2121558" CodecPrivateData="00000001674D401EECA0601BD80B50101014000003000400000300C83C58B6580000000168E93B3C80" MaxWidth="768" MaxHeight="432" FourCC="AVC1" /><QualityLevel Index="5" Bitrate="3275545" CodecPrivateData="00000001674D4020ECA02802DD80B501010140000003004000000C83C60C65800000000168E93B3C80" MaxWidth="1280" MaxHeight="720" FourCC="AVC1" /><QualityLevel Index="6" Bitrate="5300196" CodecPrivateData="00000001674D4028ECA03C0113F2E02D4040405000000300100000030320F18319600000000168E93B3C80" MaxWidth="1920" MaxHeight="1080" FourCC="AVC1" /><QualityLevel Index="7" Bitrate="8079312" CodecPrivateData="00000001674D4028ECA03C0113F2E02D4040405000000300100000030320F18319600000000168E93B3C80" MaxWidth="1920" MaxHeight="1080" FourCC="AVC1" /><c t="0" d="20000000" /><c d="20000000" /><c d="20000000" /><c d="20000000" /><c d="20000000" /><c d="20000000" /><c d="20000000" /><c d="20000000" /><c d="20000000" /><c d="20000000" /><c d="20000000" /><c d="20000000" /><c d="20000000" /><c d="20000000" /><c d="20000000" /><c d="20000000" /><c d="20000000" /><c d="20000000" /><c d="10000000" /></StreamIndex></SmoothStreamingMedia>
|
||||
5
test/testdata/yt_dlp_plugins/extractor/_ignore.py
vendored
Normal file
5
test/testdata/yt_dlp_plugins/extractor/_ignore.py
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
from yt_dlp.extractor.common import InfoExtractor
|
||||
|
||||
|
||||
class IgnorePluginIE(InfoExtractor):
|
||||
pass
|
||||
12
test/testdata/yt_dlp_plugins/extractor/ignore.py
vendored
Normal file
12
test/testdata/yt_dlp_plugins/extractor/ignore.py
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
from yt_dlp.extractor.common import InfoExtractor
|
||||
|
||||
|
||||
class IgnoreNotInAllPluginIE(InfoExtractor):
|
||||
pass
|
||||
|
||||
|
||||
class InAllPluginIE(InfoExtractor):
|
||||
pass
|
||||
|
||||
|
||||
__all__ = ['InAllPluginIE']
|
||||
9
test/testdata/yt_dlp_plugins/extractor/normal.py
vendored
Normal file
9
test/testdata/yt_dlp_plugins/extractor/normal.py
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
from yt_dlp.extractor.common import InfoExtractor
|
||||
|
||||
|
||||
class NormalPluginIE(InfoExtractor):
|
||||
pass
|
||||
|
||||
|
||||
class _IgnoreUnderscorePluginIE(InfoExtractor):
|
||||
pass
|
||||
5
test/testdata/yt_dlp_plugins/postprocessor/normal.py
vendored
Normal file
5
test/testdata/yt_dlp_plugins/postprocessor/normal.py
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
from yt_dlp.postprocessor.common import PostProcessor
|
||||
|
||||
|
||||
class NormalPluginPP(PostProcessor):
|
||||
pass
|
||||
5
test/testdata/zipped_plugins/yt_dlp_plugins/extractor/zipped.py
vendored
Normal file
5
test/testdata/zipped_plugins/yt_dlp_plugins/extractor/zipped.py
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
from yt_dlp.extractor.common import InfoExtractor
|
||||
|
||||
|
||||
class ZippedPluginIE(InfoExtractor):
|
||||
pass
|
||||
5
test/testdata/zipped_plugins/yt_dlp_plugins/postprocessor/zipped.py
vendored
Normal file
5
test/testdata/zipped_plugins/yt_dlp_plugins/postprocessor/zipped.py
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
from yt_dlp.postprocessor.common import PostProcessor
|
||||
|
||||
|
||||
class ZippedPluginPP(PostProcessor):
|
||||
pass
|
||||
16
tox.ini
16
tox.ini
@@ -1,16 +0,0 @@
|
||||
[tox]
|
||||
envlist = py26,py27,py33,py34,py35
|
||||
|
||||
# Needed?
|
||||
[testenv]
|
||||
deps =
|
||||
nose
|
||||
coverage
|
||||
# We need a valid $HOME for test_compat_expanduser
|
||||
passenv = HOME
|
||||
defaultargs = test --exclude test_download.py --exclude test_age_restriction.py
|
||||
--exclude test_subtitles.py --exclude test_write_annotations.py
|
||||
--exclude test_youtube_lists.py --exclude test_iqiyi_sdk_interpreter.py
|
||||
--exclude test_socks.py
|
||||
commands = nosetests --verbose {posargs:{[testenv]defaultargs}} # --with-coverage --cover-package=yt_dlp --cover-html
|
||||
# test.test_download:TestDownload.test_NowVideo
|
||||
@@ -1,2 +1,2 @@
|
||||
#!/bin/sh
|
||||
#!/usr/bin/env sh
|
||||
exec "${PYTHON:-python3}" -bb -Werror -Xdev "$(dirname "$(realpath "$0")")/yt_dlp/__main__.py" "$@"
|
||||
|
||||
1243
yt_dlp/YoutubeDL.py
1243
yt_dlp/YoutubeDL.py
File diff suppressed because it is too large
Load Diff
@@ -1,22 +1,29 @@
|
||||
#!/usr/bin/env python3
|
||||
f'You are using an unsupported version of Python. Only Python versions 3.6 and above are supported by yt-dlp' # noqa: F541
|
||||
try:
|
||||
import contextvars # noqa: F401
|
||||
except Exception:
|
||||
raise Exception(
|
||||
f'You are using an unsupported version of Python. Only Python versions 3.7 and above are supported by yt-dlp') # noqa: F541
|
||||
|
||||
__license__ = 'Public Domain'
|
||||
|
||||
import collections
|
||||
import getpass
|
||||
import itertools
|
||||
import optparse
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
from .compat import compat_getpass, compat_os_name, compat_shlex_quote
|
||||
from .compat import compat_shlex_quote
|
||||
from .cookies import SUPPORTED_BROWSERS, SUPPORTED_KEYRINGS
|
||||
from .downloader import FileDownloader
|
||||
from .extractor import GenericIE, list_extractor_classes
|
||||
from .downloader.external import get_external_downloader
|
||||
from .extractor import list_extractor_classes
|
||||
from .extractor.adobepass import MSO_INFO
|
||||
from .extractor.common import InfoExtractor
|
||||
from .options import parseOpts
|
||||
from .postprocessor import (
|
||||
FFmpegExtractAudioPP,
|
||||
FFmpegMergerPP,
|
||||
FFmpegPostProcessor,
|
||||
FFmpegSubtitlesConvertorPP,
|
||||
FFmpegThumbnailsConvertorPP,
|
||||
FFmpegVideoConvertorPP,
|
||||
@@ -24,49 +31,59 @@ from .postprocessor import (
|
||||
MetadataFromFieldPP,
|
||||
MetadataParserPP,
|
||||
)
|
||||
from .update import run_update
|
||||
from .update import Updater
|
||||
from .utils import (
|
||||
NO_DEFAULT,
|
||||
POSTPROCESS_WHEN,
|
||||
DateRange,
|
||||
DownloadCancelled,
|
||||
DownloadError,
|
||||
FormatSorter,
|
||||
GeoUtils,
|
||||
PlaylistEntries,
|
||||
SameFileError,
|
||||
decodeOption,
|
||||
download_range_func,
|
||||
expand_path,
|
||||
float_or_none,
|
||||
format_field,
|
||||
int_or_none,
|
||||
match_filter_func,
|
||||
parse_bytes,
|
||||
parse_duration,
|
||||
preferredencoding,
|
||||
read_batch_urls,
|
||||
read_stdin,
|
||||
render_table,
|
||||
setproctitle,
|
||||
std_headers,
|
||||
traverse_obj,
|
||||
variadic,
|
||||
write_string,
|
||||
)
|
||||
from .YoutubeDL import YoutubeDL
|
||||
|
||||
_IN_CLI = False
|
||||
|
||||
|
||||
def _exit(status=0, *args):
|
||||
for msg in args:
|
||||
sys.stderr.write(msg)
|
||||
raise SystemExit(status)
|
||||
|
||||
|
||||
def get_urls(urls, batchfile, verbose):
|
||||
# Batch file verification
|
||||
batch_urls = []
|
||||
if batchfile is not None:
|
||||
try:
|
||||
if batchfile == '-':
|
||||
write_string('Reading URLs from stdin - EOF (%s) to end:\n' % (
|
||||
'Ctrl+Z' if compat_os_name == 'nt' else 'Ctrl+D'))
|
||||
batchfd = sys.stdin
|
||||
else:
|
||||
batchfd = open(
|
||||
expand_path(batchfile), encoding='utf-8', errors='ignore')
|
||||
batch_urls = read_batch_urls(batchfd)
|
||||
batch_urls = read_batch_urls(
|
||||
read_stdin('URLs') if batchfile == '-'
|
||||
else open(expand_path(batchfile), encoding='utf-8', errors='ignore'))
|
||||
if verbose:
|
||||
write_string('[debug] Batch file urls: ' + repr(batch_urls) + '\n')
|
||||
except OSError:
|
||||
sys.exit('ERROR: batch file %s could not be read' % batchfile)
|
||||
_exit(f'ERROR: batch file {batchfile} could not be read')
|
||||
_enc = preferredencoding()
|
||||
return [
|
||||
url.strip().decode(_enc, 'ignore') if isinstance(url, bytes) else url.strip()
|
||||
@@ -76,6 +93,9 @@ def get_urls(urls, batchfile, verbose):
|
||||
def print_extractor_information(opts, urls):
|
||||
out = ''
|
||||
if opts.list_extractors:
|
||||
# Importing GenericIE is currently slow since it imports YoutubeIE
|
||||
from .extractor.generic import GenericIE
|
||||
|
||||
urls = dict.fromkeys(urls, False)
|
||||
for ie in list_extractor_classes(opts.age_limit):
|
||||
out += ie.IE_NAME + (' (CURRENTLY BROKEN)' if not ie.working() else '') + '\n'
|
||||
@@ -131,7 +151,7 @@ def set_compat_opts(opts):
|
||||
else:
|
||||
opts.embed_infojson = False
|
||||
if 'format-sort' in opts.compat_opts:
|
||||
opts.format_sort.extend(InfoExtractor.FormatSort.ytdl_default)
|
||||
opts.format_sort.extend(FormatSorter.ytdl_default)
|
||||
_video_multistreams_set = set_default_compat('multistreams', 'allow_multiple_video_streams', False, remove_compat=False)
|
||||
_audio_multistreams_set = set_default_compat('multistreams', 'allow_multiple_audio_streams', False, remove_compat=False)
|
||||
if _video_multistreams_set is False and _audio_multistreams_set is False:
|
||||
@@ -206,18 +226,16 @@ def validate_options(opts):
|
||||
|
||||
# Format sort
|
||||
for f in opts.format_sort:
|
||||
validate_regex('format sorting', f, InfoExtractor.FormatSort.regex)
|
||||
validate_regex('format sorting', f, FormatSorter.regex)
|
||||
|
||||
# Postprocessor formats
|
||||
validate_in('audio format', opts.audioformat, ['best'] + list(FFmpegExtractAudioPP.SUPPORTED_EXTS))
|
||||
validate_regex('merge output format', opts.merge_output_format,
|
||||
r'({0})(/({0}))*'.format('|'.join(map(re.escape, FFmpegMergerPP.SUPPORTED_EXTS))))
|
||||
validate_regex('audio format', opts.audioformat, FFmpegExtractAudioPP.FORMAT_RE)
|
||||
validate_in('subtitle format', opts.convertsubtitles, FFmpegSubtitlesConvertorPP.SUPPORTED_EXTS)
|
||||
validate_in('thumbnail format', opts.convertthumbnails, FFmpegThumbnailsConvertorPP.SUPPORTED_EXTS)
|
||||
if opts.recodevideo is not None:
|
||||
opts.recodevideo = opts.recodevideo.replace(' ', '')
|
||||
validate_regex('video recode format', opts.recodevideo, FFmpegVideoConvertorPP.FORMAT_RE)
|
||||
if opts.remuxvideo is not None:
|
||||
opts.remuxvideo = opts.remuxvideo.replace(' ', '')
|
||||
validate_regex('video remux format', opts.remuxvideo, FFmpegVideoRemuxerPP.FORMAT_RE)
|
||||
validate_regex('thumbnail format', opts.convertthumbnails, FFmpegThumbnailsConvertorPP.FORMAT_RE)
|
||||
validate_regex('recode video format', opts.recodevideo, FFmpegVideoConvertorPP.FORMAT_RE)
|
||||
validate_regex('remux video format', opts.remuxvideo, FFmpegVideoRemuxerPP.FORMAT_RE)
|
||||
if opts.audioquality:
|
||||
opts.audioquality = opts.audioquality.strip('k').strip('K')
|
||||
# int_or_none prevents inf, nan
|
||||
@@ -239,20 +257,42 @@ def validate_options(opts):
|
||||
opts.extractor_retries = parse_retries('extractor', opts.extractor_retries)
|
||||
opts.file_access_retries = parse_retries('file access', opts.file_access_retries)
|
||||
|
||||
# Retry sleep function
|
||||
def parse_sleep_func(expr):
|
||||
NUMBER_RE = r'\d+(?:\.\d+)?'
|
||||
op, start, limit, step, *_ = tuple(re.fullmatch(
|
||||
rf'(?:(linear|exp)=)?({NUMBER_RE})(?::({NUMBER_RE})?)?(?::({NUMBER_RE}))?',
|
||||
expr.strip()).groups()) + (None, None)
|
||||
|
||||
if op == 'exp':
|
||||
return lambda n: min(float(start) * (float(step or 2) ** n), float(limit or 'inf'))
|
||||
else:
|
||||
default_step = start if op or limit else 0
|
||||
return lambda n: min(float(start) + float(step or default_step) * n, float(limit or 'inf'))
|
||||
|
||||
for key, expr in opts.retry_sleep.items():
|
||||
if not expr:
|
||||
del opts.retry_sleep[key]
|
||||
continue
|
||||
try:
|
||||
opts.retry_sleep[key] = parse_sleep_func(expr)
|
||||
except AttributeError:
|
||||
raise ValueError(f'invalid {key} retry sleep expression {expr!r}')
|
||||
|
||||
# Bytes
|
||||
def parse_bytes(name, value):
|
||||
def validate_bytes(name, value):
|
||||
if value is None:
|
||||
return None
|
||||
numeric_limit = FileDownloader.parse_bytes(value)
|
||||
numeric_limit = parse_bytes(value)
|
||||
validate(numeric_limit is not None, 'rate limit', value)
|
||||
return numeric_limit
|
||||
|
||||
opts.ratelimit = parse_bytes('rate limit', opts.ratelimit)
|
||||
opts.throttledratelimit = parse_bytes('throttled rate limit', opts.throttledratelimit)
|
||||
opts.min_filesize = parse_bytes('min filesize', opts.min_filesize)
|
||||
opts.max_filesize = parse_bytes('max filesize', opts.max_filesize)
|
||||
opts.buffersize = parse_bytes('buffer size', opts.buffersize)
|
||||
opts.http_chunk_size = parse_bytes('http chunk size', opts.http_chunk_size)
|
||||
opts.ratelimit = validate_bytes('rate limit', opts.ratelimit)
|
||||
opts.throttledratelimit = validate_bytes('throttled rate limit', opts.throttledratelimit)
|
||||
opts.min_filesize = validate_bytes('min filesize', opts.min_filesize)
|
||||
opts.max_filesize = validate_bytes('max filesize', opts.max_filesize)
|
||||
opts.buffersize = validate_bytes('buffer size', opts.buffersize)
|
||||
opts.http_chunk_size = validate_bytes('http chunk size', opts.http_chunk_size)
|
||||
|
||||
# Output templates
|
||||
def validate_outtmpl(tmpl, msg):
|
||||
@@ -283,27 +323,39 @@ def validate_options(opts):
|
||||
'Cannot download a video and extract audio into the same file! '
|
||||
f'Use "{outtmpl_default}.%(ext)s" instead of "{outtmpl_default}" as the output template')
|
||||
|
||||
# Remove chapters
|
||||
remove_chapters_patterns, opts.remove_ranges = [], []
|
||||
for regex in opts.remove_chapters or []:
|
||||
if regex.startswith('*'):
|
||||
dur = list(map(parse_duration, regex[1:].split('-')))
|
||||
if len(dur) == 2 and all(t is not None for t in dur):
|
||||
opts.remove_ranges.append(tuple(dur))
|
||||
def parse_chapters(name, value):
|
||||
chapters, ranges = [], []
|
||||
parse_timestamp = lambda x: float('inf') if x in ('inf', 'infinite') else parse_duration(x)
|
||||
for regex in value or []:
|
||||
if regex.startswith('*'):
|
||||
for range_ in map(str.strip, regex[1:].split(',')):
|
||||
mobj = range_ != '-' and re.fullmatch(r'([^-]+)?\s*-\s*([^-]+)?', range_)
|
||||
dur = mobj and (parse_timestamp(mobj.group(1) or '0'), parse_timestamp(mobj.group(2) or 'inf'))
|
||||
if None in (dur or [None]):
|
||||
raise ValueError(f'invalid {name} time range "{regex}". Must be of the form "*start-end"')
|
||||
ranges.append(dur)
|
||||
continue
|
||||
raise ValueError(f'invalid --remove-chapters time range "{regex}". Must be of the form *start-end')
|
||||
try:
|
||||
remove_chapters_patterns.append(re.compile(regex))
|
||||
except re.error as err:
|
||||
raise ValueError(f'invalid --remove-chapters regex "{regex}" - {err}')
|
||||
opts.remove_chapters = remove_chapters_patterns
|
||||
try:
|
||||
chapters.append(re.compile(regex))
|
||||
except re.error as err:
|
||||
raise ValueError(f'invalid {name} regex "{regex}" - {err}')
|
||||
return chapters, ranges
|
||||
|
||||
opts.remove_chapters, opts.remove_ranges = parse_chapters('--remove-chapters', opts.remove_chapters)
|
||||
opts.download_ranges = download_range_func(*parse_chapters('--download-sections', opts.download_ranges))
|
||||
|
||||
# Cookies from browser
|
||||
if opts.cookiesfrombrowser:
|
||||
mobj = re.match(r'(?P<name>[^+:]+)(\s*\+\s*(?P<keyring>[^:]+))?(\s*:(?P<profile>.+))?', opts.cookiesfrombrowser)
|
||||
container = None
|
||||
mobj = re.fullmatch(r'''(?x)
|
||||
(?P<name>[^+:]+)
|
||||
(?:\s*\+\s*(?P<keyring>[^:]+))?
|
||||
(?:\s*:\s*(?!:)(?P<profile>.+?))?
|
||||
(?:\s*::\s*(?P<container>.+))?
|
||||
''', opts.cookiesfrombrowser)
|
||||
if mobj is None:
|
||||
raise ValueError(f'invalid cookies from browser arguments: {opts.cookiesfrombrowser}')
|
||||
browser_name, keyring, profile = mobj.group('name', 'keyring', 'profile')
|
||||
browser_name, keyring, profile, container = mobj.group('name', 'keyring', 'profile', 'container')
|
||||
browser_name = browser_name.lower()
|
||||
if browser_name not in SUPPORTED_BROWSERS:
|
||||
raise ValueError(f'unsupported browser specified for cookies: "{browser_name}". '
|
||||
@@ -313,7 +365,7 @@ def validate_options(opts):
|
||||
if keyring not in SUPPORTED_KEYRINGS:
|
||||
raise ValueError(f'unsupported keyring specified for cookies: "{keyring}". '
|
||||
f'Supported keyrings are: {", ".join(sorted(SUPPORTED_KEYRINGS))}')
|
||||
opts.cookiesfrombrowser = (browser_name, profile, keyring)
|
||||
opts.cookiesfrombrowser = (browser_name, profile, keyring, container)
|
||||
|
||||
# MetadataParser
|
||||
def metadataparser_actions(f):
|
||||
@@ -334,12 +386,20 @@ def validate_options(opts):
|
||||
raise ValueError(f'{cmd} is invalid; {err}')
|
||||
yield action
|
||||
|
||||
parse_metadata = opts.parse_metadata or []
|
||||
if opts.metafromtitle is not None:
|
||||
parse_metadata.append('title:%s' % opts.metafromtitle)
|
||||
opts.parse_metadata = list(itertools.chain(*map(metadataparser_actions, parse_metadata)))
|
||||
opts.parse_metadata.setdefault('pre_process', []).append('title:%s' % opts.metafromtitle)
|
||||
opts.parse_metadata = {
|
||||
k: list(itertools.chain(*map(metadataparser_actions, v)))
|
||||
for k, v in opts.parse_metadata.items()
|
||||
}
|
||||
|
||||
# Other options
|
||||
if opts.playlist_items is not None:
|
||||
try:
|
||||
tuple(PlaylistEntries.parse_playlist_items(opts.playlist_items))
|
||||
except Exception as err:
|
||||
raise ValueError(f'Invalid playlist-items {opts.playlist_items!r}: {err}')
|
||||
|
||||
geo_bypass_code = opts.geo_bypass_ip_block or opts.geo_bypass_country
|
||||
if geo_bypass_code is not None:
|
||||
try:
|
||||
@@ -352,6 +412,9 @@ def validate_options(opts):
|
||||
if opts.download_archive is not None:
|
||||
opts.download_archive = expand_path(opts.download_archive)
|
||||
|
||||
if opts.ffmpeg_location is not None:
|
||||
opts.ffmpeg_location = expand_path(opts.ffmpeg_location)
|
||||
|
||||
if opts.user_agent is not None:
|
||||
opts.headers.setdefault('User-Agent', opts.user_agent)
|
||||
if opts.referer is not None:
|
||||
@@ -360,6 +423,17 @@ def validate_options(opts):
|
||||
if opts.no_sponsorblock:
|
||||
opts.sponsorblock_mark = opts.sponsorblock_remove = set()
|
||||
|
||||
default_downloader = None
|
||||
for proto, path in opts.external_downloader.items():
|
||||
if path == 'native':
|
||||
continue
|
||||
ed = get_external_downloader(path)
|
||||
if ed is None:
|
||||
raise ValueError(
|
||||
f'No such {format_field(proto, None, "%s ", ignore="default")}external downloader "{path}"')
|
||||
elif ed and proto == 'default':
|
||||
default_downloader = ed.get_basename()
|
||||
|
||||
warnings, deprecation_warnings = [], []
|
||||
|
||||
# Common mistake: -f best
|
||||
@@ -370,13 +444,18 @@ def validate_options(opts):
|
||||
'If you know what you are doing and want only the best pre-merged format, use "-f b" instead to suppress this warning')))
|
||||
|
||||
# --(postprocessor/downloader)-args without name
|
||||
def report_args_compat(name, value, key1, key2=None):
|
||||
def report_args_compat(name, value, key1, key2=None, where=None):
|
||||
if key1 in value and key2 not in value:
|
||||
warnings.append(f'{name} arguments given without specifying name. The arguments will be given to all {name}s')
|
||||
warnings.append(f'{name.title()} arguments given without specifying name. '
|
||||
f'The arguments will be given to {where or f"all {name}s"}')
|
||||
return True
|
||||
return False
|
||||
|
||||
report_args_compat('external downloader', opts.external_downloader_args, 'default')
|
||||
if report_args_compat('external downloader', opts.external_downloader_args,
|
||||
'default', where=default_downloader) and default_downloader:
|
||||
# Compat with youtube-dl's behavior. See https://github.com/ytdl-org/youtube-dl/commit/49c5293014bc11ec8c009856cd63cffa6296c1e1
|
||||
opts.external_downloader_args.setdefault(default_downloader, opts.external_downloader_args.pop('default'))
|
||||
|
||||
if report_args_compat('post-processor', opts.postprocessor_args, 'default-compat', 'default'):
|
||||
opts.postprocessor_args['default'] = opts.postprocessor_args.pop('default-compat')
|
||||
opts.postprocessor_args.setdefault('sponskrub', [])
|
||||
@@ -395,6 +474,9 @@ def validate_options(opts):
|
||||
setattr(opts, opt1, default)
|
||||
|
||||
# Conflicting options
|
||||
report_conflict('--playlist-reverse', 'playlist_reverse', '--playlist-random', 'playlist_random')
|
||||
report_conflict('--playlist-reverse', 'playlist_reverse', '--lazy-playlist', 'lazy_playlist')
|
||||
report_conflict('--playlist-random', 'playlist_random', '--lazy-playlist', 'lazy_playlist')
|
||||
report_conflict('--dateafter', 'dateafter', '--date', 'date', default=None)
|
||||
report_conflict('--datebefore', 'datebefore', '--date', 'date', default=None)
|
||||
report_conflict('--exec-before-download', 'exec_before_dl_cmd',
|
||||
@@ -408,7 +490,7 @@ def validate_options(opts):
|
||||
val1=opts.sponskrub and opts.sponskrub_cut)
|
||||
|
||||
# Conflicts with --allow-unplayable-formats
|
||||
report_conflict('--add-metadata', 'addmetadata')
|
||||
report_conflict('--embed-metadata', 'addmetadata')
|
||||
report_conflict('--embed-chapters', 'addchapters')
|
||||
report_conflict('--embed-info-json', 'embed_infojson')
|
||||
report_conflict('--embed-subs', 'embedsubtitles')
|
||||
@@ -456,7 +538,7 @@ def validate_options(opts):
|
||||
# Do not unnecessarily download audio
|
||||
opts.format = 'bestaudio/best'
|
||||
|
||||
if opts.getcomments and opts.writeinfojson is None:
|
||||
if opts.getcomments and opts.writeinfojson is None and not opts.embed_infojson:
|
||||
# If JSON is not printed anywhere, but comments are requested, save it to file
|
||||
if not opts.dumpjson or opts.print_json or opts.dump_single_json:
|
||||
opts.writeinfojson = True
|
||||
@@ -471,9 +553,9 @@ def validate_options(opts):
|
||||
|
||||
# Ask for passwords
|
||||
if opts.username is not None and opts.password is None:
|
||||
opts.password = compat_getpass('Type account password and press [Return]: ')
|
||||
opts.password = getpass.getpass('Type account password and press [Return]: ')
|
||||
if opts.ap_username is not None and opts.ap_password is None:
|
||||
opts.ap_password = compat_getpass('Type TV provider account password and press [Return]: ')
|
||||
opts.ap_password = getpass.getpass('Type TV provider account password and press [Return]: ')
|
||||
|
||||
return warnings, deprecation_warnings
|
||||
|
||||
@@ -481,11 +563,11 @@ def validate_options(opts):
|
||||
def get_postprocessors(opts):
|
||||
yield from opts.add_postprocessors
|
||||
|
||||
if opts.parse_metadata:
|
||||
for when, actions in opts.parse_metadata.items():
|
||||
yield {
|
||||
'key': 'MetadataParser',
|
||||
'actions': opts.parse_metadata,
|
||||
'when': 'pre_process'
|
||||
'actions': actions,
|
||||
'when': when
|
||||
}
|
||||
sponsorblock_query = opts.sponsorblock_mark | opts.sponsorblock_remove
|
||||
if sponsorblock_query:
|
||||
@@ -605,8 +687,11 @@ def get_postprocessors(opts):
|
||||
}
|
||||
|
||||
|
||||
ParsedOptions = collections.namedtuple('ParsedOptions', ('parser', 'options', 'urls', 'ydl_opts'))
|
||||
|
||||
|
||||
def parse_options(argv=None):
|
||||
""" @returns (parser, opts, urls, ydl_opts) """
|
||||
"""@returns ParsedOptions(parser, opts, urls, ydl_opts)"""
|
||||
parser, opts, urls = parseOpts(argv)
|
||||
urls = get_urls(urls, opts.batchfile, opts.verbose)
|
||||
|
||||
@@ -618,19 +703,34 @@ def parse_options(argv=None):
|
||||
|
||||
postprocessors = list(get_postprocessors(opts))
|
||||
|
||||
print_only = bool(opts.forceprint) and all(k not in opts.forceprint for k in POSTPROCESS_WHEN[2:])
|
||||
print_only = bool(opts.forceprint) and all(k not in opts.forceprint for k in POSTPROCESS_WHEN[3:])
|
||||
any_getting = any(getattr(opts, k) for k in (
|
||||
'dumpjson', 'dump_single_json', 'getdescription', 'getduration', 'getfilename',
|
||||
'getformat', 'getid', 'getthumbnail', 'gettitle', 'geturl'
|
||||
))
|
||||
|
||||
playlist_pps = [pp for pp in postprocessors if pp.get('when') == 'playlist']
|
||||
write_playlist_infojson = (opts.writeinfojson and not opts.clean_infojson
|
||||
and opts.allow_playlist_files and opts.outtmpl.get('pl_infojson') != '')
|
||||
if not any((
|
||||
opts.extract_flat,
|
||||
opts.dump_single_json,
|
||||
opts.forceprint.get('playlist'),
|
||||
opts.print_to_file.get('playlist'),
|
||||
write_playlist_infojson,
|
||||
)):
|
||||
if not playlist_pps:
|
||||
opts.extract_flat = 'discard'
|
||||
elif playlist_pps == [{'key': 'FFmpegConcat', 'only_multi_video': True, 'when': 'playlist'}]:
|
||||
opts.extract_flat = 'discard_in_playlist'
|
||||
|
||||
final_ext = (
|
||||
opts.recodevideo if opts.recodevideo in FFmpegVideoConvertorPP.SUPPORTED_EXTS
|
||||
else opts.remuxvideo if opts.remuxvideo in FFmpegVideoRemuxerPP.SUPPORTED_EXTS
|
||||
else opts.audioformat if (opts.extractaudio and opts.audioformat != 'best')
|
||||
else opts.audioformat if (opts.extractaudio and opts.audioformat in FFmpegExtractAudioPP.SUPPORTED_EXTS)
|
||||
else None)
|
||||
|
||||
return parser, opts, urls, {
|
||||
return ParsedOptions(parser, opts, urls, {
|
||||
'usenetrc': opts.usenetrc,
|
||||
'netrc_location': opts.netrc_location,
|
||||
'username': opts.username,
|
||||
@@ -679,6 +779,7 @@ def parse_options(argv=None):
|
||||
'windowsfilenames': opts.windowsfilenames,
|
||||
'ignoreerrors': opts.ignoreerrors,
|
||||
'force_generic_extractor': opts.force_generic_extractor,
|
||||
'allowed_extractors': opts.allowed_extractors or ['default'],
|
||||
'ratelimit': opts.ratelimit,
|
||||
'throttledratelimit': opts.throttledratelimit,
|
||||
'overwrites': opts.overwrites,
|
||||
@@ -686,6 +787,7 @@ def parse_options(argv=None):
|
||||
'file_access_retries': opts.file_access_retries,
|
||||
'fragment_retries': opts.fragment_retries,
|
||||
'extractor_retries': opts.extractor_retries,
|
||||
'retry_sleep_functions': opts.retry_sleep,
|
||||
'skip_unavailable_fragments': opts.skip_unavailable_fragments,
|
||||
'keep_fragments': opts.keep_fragments,
|
||||
'concurrent_fragment_downloads': opts.concurrent_fragment_downloads,
|
||||
@@ -700,6 +802,7 @@ def parse_options(argv=None):
|
||||
'playlistend': opts.playlistend,
|
||||
'playlistreverse': opts.playlist_reverse,
|
||||
'playlistrandom': opts.playlist_random,
|
||||
'lazy_playlist': opts.lazy_playlist,
|
||||
'noplaylist': opts.noplaylist,
|
||||
'logtostderr': opts.outtmpl.get('default') == '-',
|
||||
'consoletitle': opts.consoletitle,
|
||||
@@ -731,6 +834,7 @@ def parse_options(argv=None):
|
||||
'verbose': opts.verbose,
|
||||
'dump_intermediate_pages': opts.dump_intermediate_pages,
|
||||
'write_pages': opts.write_pages,
|
||||
'load_pages': opts.load_pages,
|
||||
'test': opts.test,
|
||||
'keepvideo': opts.keepvideo,
|
||||
'min_filesize': opts.min_filesize,
|
||||
@@ -751,6 +855,7 @@ def parse_options(argv=None):
|
||||
'legacyserverconnect': opts.legacy_server_connect,
|
||||
'nocheckcertificate': opts.no_check_certificate,
|
||||
'prefer_insecure': opts.prefer_insecure,
|
||||
'enable_file_urls': opts.enable_file_urls,
|
||||
'http_headers': opts.headers,
|
||||
'proxy': opts.proxy,
|
||||
'socket_timeout': opts.socket_timeout,
|
||||
@@ -779,6 +884,8 @@ def parse_options(argv=None):
|
||||
'max_sleep_interval': opts.max_sleep_interval,
|
||||
'sleep_interval_subtitles': opts.sleep_interval_subtitles,
|
||||
'external_downloader': opts.external_downloader,
|
||||
'download_ranges': opts.download_ranges,
|
||||
'force_keyframes_at_cuts': opts.force_keyframes_at_cuts,
|
||||
'list_thumbnails': opts.list_thumbnails,
|
||||
'playlist_items': opts.playlist_items,
|
||||
'xattr_set_filesize': opts.xattr_set_filesize,
|
||||
@@ -798,7 +905,7 @@ def parse_options(argv=None):
|
||||
'_warnings': warnings,
|
||||
'_deprecation_warnings': deprecation_warnings,
|
||||
'compat_opts': opts.compat_opts,
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
def _real_main(argv=None):
|
||||
@@ -810,62 +917,70 @@ def _real_main(argv=None):
|
||||
if opts.dump_user_agent:
|
||||
ua = traverse_obj(opts.headers, 'User-Agent', casesense=False, default=std_headers['User-Agent'])
|
||||
write_string(f'{ua}\n', out=sys.stdout)
|
||||
sys.exit(0)
|
||||
return
|
||||
|
||||
if print_extractor_information(opts, all_urls):
|
||||
sys.exit(0)
|
||||
return
|
||||
|
||||
# We may need ffmpeg_location without having access to the YoutubeDL instance
|
||||
# See https://github.com/yt-dlp/yt-dlp/issues/2191
|
||||
if opts.ffmpeg_location:
|
||||
FFmpegPostProcessor._ffmpeg_location.set(opts.ffmpeg_location)
|
||||
|
||||
with YoutubeDL(ydl_opts) as ydl:
|
||||
pre_process = opts.update_self or opts.rm_cachedir
|
||||
actual_use = all_urls or opts.load_info_filename
|
||||
|
||||
# Remove cache dir
|
||||
if opts.rm_cachedir:
|
||||
ydl.cache.remove()
|
||||
|
||||
# Update version
|
||||
if opts.update_self:
|
||||
# If updater returns True, exit. Required for windows
|
||||
if run_update(ydl):
|
||||
if actual_use:
|
||||
sys.exit('ERROR: The program must exit for the update to complete')
|
||||
sys.exit()
|
||||
updater = Updater(ydl)
|
||||
if opts.update_self and updater.update() and actual_use:
|
||||
if updater.cmd:
|
||||
return updater.restart()
|
||||
# This code is reachable only for zip variant in py < 3.10
|
||||
# It makes sense to exit here, but the old behavior is to continue
|
||||
ydl.report_warning('Restart yt-dlp to use the updated version')
|
||||
# return 100, 'ERROR: The program must exit for the update to complete'
|
||||
|
||||
# Maybe do nothing
|
||||
if not actual_use:
|
||||
if opts.update_self or opts.rm_cachedir:
|
||||
sys.exit()
|
||||
if pre_process:
|
||||
return ydl._download_retcode
|
||||
|
||||
ydl.warn_if_short_id(sys.argv[1:] if argv is None else argv)
|
||||
parser.error(
|
||||
'You must provide at least one URL.\n'
|
||||
'Type yt-dlp --help to see a list of all options.')
|
||||
|
||||
parser.destroy()
|
||||
try:
|
||||
if opts.load_info_filename is not None:
|
||||
retcode = ydl.download_with_info_file(expand_path(opts.load_info_filename))
|
||||
return ydl.download_with_info_file(expand_path(opts.load_info_filename))
|
||||
else:
|
||||
retcode = ydl.download(all_urls)
|
||||
return ydl.download(all_urls)
|
||||
except DownloadCancelled:
|
||||
ydl.to_screen('Aborting remaining downloads')
|
||||
retcode = 101
|
||||
|
||||
sys.exit(retcode)
|
||||
return 101
|
||||
|
||||
|
||||
def main(argv=None):
|
||||
global _IN_CLI
|
||||
_IN_CLI = True
|
||||
try:
|
||||
_real_main(argv)
|
||||
_exit(*variadic(_real_main(argv)))
|
||||
except DownloadError:
|
||||
sys.exit(1)
|
||||
_exit(1)
|
||||
except SameFileError as e:
|
||||
sys.exit(f'ERROR: {e}')
|
||||
_exit(f'ERROR: {e}')
|
||||
except KeyboardInterrupt:
|
||||
sys.exit('\nERROR: Interrupted by user')
|
||||
_exit('\nERROR: Interrupted by user')
|
||||
except BrokenPipeError as e:
|
||||
# https://docs.python.org/3/library/signal.html#note-on-sigpipe
|
||||
devnull = os.open(os.devnull, os.O_WRONLY)
|
||||
os.dup2(devnull, sys.stdout.fileno())
|
||||
sys.exit(f'\nERROR: {e}')
|
||||
_exit(f'\nERROR: {e}')
|
||||
except optparse.OptParseError as e:
|
||||
_exit(2, f'\n{e}')
|
||||
|
||||
|
||||
from .extractor import gen_extractors, list_extractors
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Execute with
|
||||
# $ python -m yt_dlp
|
||||
|
||||
import sys
|
||||
|
||||
if __package__ is None and not hasattr(sys, 'frozen'):
|
||||
if __package__ is None and not getattr(sys, 'frozen', False):
|
||||
# direct call of __main__.py
|
||||
import os.path
|
||||
path = os.path.realpath(os.path.abspath(__file__))
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import base64
|
||||
from math import ceil
|
||||
|
||||
from .compat import compat_b64decode, compat_ord
|
||||
from .compat import compat_ord
|
||||
from .dependencies import Cryptodome_AES
|
||||
from .utils import bytes_to_intlist, intlist_to_bytes
|
||||
|
||||
@@ -23,16 +24,59 @@ else:
|
||||
return intlist_to_bytes(aes_gcm_decrypt_and_verify(*map(bytes_to_intlist, (data, key, tag, nonce))))
|
||||
|
||||
|
||||
def unpad_pkcs7(data):
|
||||
return data[:-compat_ord(data[-1])]
|
||||
def aes_cbc_encrypt_bytes(data, key, iv, **kwargs):
|
||||
return intlist_to_bytes(aes_cbc_encrypt(*map(bytes_to_intlist, (data, key, iv)), **kwargs))
|
||||
|
||||
|
||||
BLOCK_SIZE_BYTES = 16
|
||||
|
||||
|
||||
def unpad_pkcs7(data):
|
||||
return data[:-compat_ord(data[-1])]
|
||||
|
||||
|
||||
def pkcs7_padding(data):
|
||||
"""
|
||||
PKCS#7 padding
|
||||
|
||||
@param {int[]} data cleartext
|
||||
@returns {int[]} padding data
|
||||
"""
|
||||
|
||||
remaining_length = BLOCK_SIZE_BYTES - len(data) % BLOCK_SIZE_BYTES
|
||||
return data + [remaining_length] * remaining_length
|
||||
|
||||
|
||||
def pad_block(block, padding_mode):
|
||||
"""
|
||||
Pad a block with the given padding mode
|
||||
@param {int[]} block block to pad
|
||||
@param padding_mode padding mode
|
||||
"""
|
||||
padding_size = BLOCK_SIZE_BYTES - len(block)
|
||||
|
||||
PADDING_BYTE = {
|
||||
'pkcs7': padding_size,
|
||||
'iso7816': 0x0,
|
||||
'whitespace': 0x20,
|
||||
'zero': 0x0,
|
||||
}
|
||||
|
||||
if padding_size < 0:
|
||||
raise ValueError('Block size exceeded')
|
||||
elif padding_mode not in PADDING_BYTE:
|
||||
raise NotImplementedError(f'Padding mode {padding_mode} is not implemented')
|
||||
|
||||
if padding_mode == 'iso7816' and padding_size:
|
||||
block = block + [0x80] # NB: += mutates list
|
||||
padding_size -= 1
|
||||
|
||||
return block + [PADDING_BYTE[padding_mode]] * padding_size
|
||||
|
||||
|
||||
def aes_ecb_encrypt(data, key, iv=None):
|
||||
"""
|
||||
Encrypt with aes in ECB mode
|
||||
Encrypt with aes in ECB mode. Using PKCS#7 padding
|
||||
|
||||
@param {int[]} data cleartext
|
||||
@param {int[]} key 16/24/32-Byte cipher key
|
||||
@@ -45,8 +89,7 @@ def aes_ecb_encrypt(data, key, iv=None):
|
||||
encrypted_data = []
|
||||
for i in range(block_count):
|
||||
block = data[i * BLOCK_SIZE_BYTES: (i + 1) * BLOCK_SIZE_BYTES]
|
||||
encrypted_data += aes_encrypt(block, expanded_key)
|
||||
encrypted_data = encrypted_data[:len(data)]
|
||||
encrypted_data += aes_encrypt(pkcs7_padding(block), expanded_key)
|
||||
|
||||
return encrypted_data
|
||||
|
||||
@@ -136,13 +179,14 @@ def aes_cbc_decrypt(data, key, iv):
|
||||
return decrypted_data
|
||||
|
||||
|
||||
def aes_cbc_encrypt(data, key, iv):
|
||||
def aes_cbc_encrypt(data, key, iv, *, padding_mode='pkcs7'):
|
||||
"""
|
||||
Encrypt with aes in CBC mode. Using PKCS#7 padding
|
||||
Encrypt with aes in CBC mode
|
||||
|
||||
@param {int[]} data cleartext
|
||||
@param {int[]} key 16/24/32-Byte cipher key
|
||||
@param {int[]} iv 16-Byte IV
|
||||
@param padding_mode Padding mode to use
|
||||
@returns {int[]} encrypted data
|
||||
"""
|
||||
expanded_key = key_expansion(key)
|
||||
@@ -152,8 +196,8 @@ def aes_cbc_encrypt(data, key, iv):
|
||||
previous_cipher_block = iv
|
||||
for i in range(block_count):
|
||||
block = data[i * BLOCK_SIZE_BYTES: (i + 1) * BLOCK_SIZE_BYTES]
|
||||
remaining_length = BLOCK_SIZE_BYTES - len(block)
|
||||
block += [remaining_length] * remaining_length
|
||||
block = pad_block(block, padding_mode)
|
||||
|
||||
mixed_block = xor(block, previous_cipher_block)
|
||||
|
||||
encrypted_block = aes_encrypt(mixed_block, expanded_key)
|
||||
@@ -264,7 +308,7 @@ def aes_decrypt_text(data, password, key_size_bytes):
|
||||
"""
|
||||
NONCE_LENGTH_BYTES = 8
|
||||
|
||||
data = bytes_to_intlist(compat_b64decode(data))
|
||||
data = bytes_to_intlist(base64.b64decode(data))
|
||||
password = bytes_to_intlist(password.encode())
|
||||
|
||||
key = password[:key_size_bytes] + [0] * (key_size_bytes - len(password))
|
||||
@@ -501,13 +545,23 @@ def ghash(subkey, data):
|
||||
|
||||
|
||||
__all__ = [
|
||||
'aes_ctr_decrypt',
|
||||
'aes_cbc_decrypt',
|
||||
'aes_cbc_decrypt_bytes',
|
||||
'aes_ctr_decrypt',
|
||||
'aes_decrypt_text',
|
||||
'aes_encrypt',
|
||||
'aes_decrypt',
|
||||
'aes_ecb_decrypt',
|
||||
'aes_gcm_decrypt_and_verify',
|
||||
'aes_gcm_decrypt_and_verify_bytes',
|
||||
|
||||
'aes_cbc_encrypt',
|
||||
'aes_cbc_encrypt_bytes',
|
||||
'aes_ctr_encrypt',
|
||||
'aes_ecb_encrypt',
|
||||
'aes_encrypt',
|
||||
|
||||
'key_expansion',
|
||||
'pad_block',
|
||||
'pkcs7_padding',
|
||||
'unpad_pkcs7',
|
||||
]
|
||||
|
||||
@@ -5,9 +5,10 @@ import os
|
||||
import re
|
||||
import shutil
|
||||
import traceback
|
||||
import urllib.parse
|
||||
|
||||
from .compat import compat_getenv
|
||||
from .utils import expand_path, write_json_file
|
||||
from .utils import expand_path, traverse_obj, version_tuple, write_json_file
|
||||
from .version import __version__
|
||||
|
||||
|
||||
class Cache:
|
||||
@@ -17,16 +18,14 @@ class Cache:
|
||||
def _get_root_dir(self):
|
||||
res = self._ydl.params.get('cachedir')
|
||||
if res is None:
|
||||
cache_root = compat_getenv('XDG_CACHE_HOME', '~/.cache')
|
||||
cache_root = os.getenv('XDG_CACHE_HOME', '~/.cache')
|
||||
res = os.path.join(cache_root, 'yt-dlp')
|
||||
return expand_path(res)
|
||||
|
||||
def _get_cache_fn(self, section, key, dtype):
|
||||
assert re.match(r'^[a-zA-Z0-9_.-]+$', section), \
|
||||
'invalid section %r' % section
|
||||
assert re.match(r'^[a-zA-Z0-9_.-]+$', key), 'invalid key %r' % key
|
||||
return os.path.join(
|
||||
self._get_root_dir(), section, f'{key}.{dtype}')
|
||||
assert re.match(r'^[\w.-]+$', section), f'invalid section {section!r}'
|
||||
key = urllib.parse.quote(key, safe='').replace('%', ',') # encode non-ascii characters
|
||||
return os.path.join(self._get_root_dir(), section, f'{key}.{dtype}')
|
||||
|
||||
@property
|
||||
def enabled(self):
|
||||
@@ -46,12 +45,20 @@ class Cache:
|
||||
if ose.errno != errno.EEXIST:
|
||||
raise
|
||||
self._ydl.write_debug(f'Saving {section}.{key} to cache')
|
||||
write_json_file(data, fn)
|
||||
write_json_file({'yt-dlp_version': __version__, 'data': data}, fn)
|
||||
except Exception:
|
||||
tb = traceback.format_exc()
|
||||
self._ydl.report_warning(f'Writing cache to {fn!r} failed: {tb}')
|
||||
|
||||
def load(self, section, key, dtype='json', default=None):
|
||||
def _validate(self, data, min_ver):
|
||||
version = traverse_obj(data, 'yt-dlp_version')
|
||||
if not version: # Backward compatibility
|
||||
data, version = {'data': data}, '2022.08.19'
|
||||
if not min_ver or version_tuple(version) >= version_tuple(min_ver):
|
||||
return data['data']
|
||||
self._ydl.write_debug(f'Discarding old cache from version {version} (needs {min_ver})')
|
||||
|
||||
def load(self, section, key, dtype='json', default=None, *, min_ver=None):
|
||||
assert dtype in ('json',)
|
||||
|
||||
if not self.enabled:
|
||||
@@ -62,8 +69,8 @@ class Cache:
|
||||
try:
|
||||
with open(cache_fn, encoding='utf-8') as cachef:
|
||||
self._ydl.write_debug(f'Loading {section}.{key} from cache')
|
||||
return json.load(cachef)
|
||||
except ValueError:
|
||||
return self._validate(json.load(cachef), min_ver)
|
||||
except (ValueError, KeyError):
|
||||
try:
|
||||
file_size = os.path.getsize(cache_fn)
|
||||
except OSError as oe:
|
||||
|
||||
@@ -1,25 +1,20 @@
|
||||
import contextlib
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
import warnings
|
||||
import xml.etree.ElementTree as etree
|
||||
|
||||
from . import re
|
||||
from ._deprecated import * # noqa: F401, F403
|
||||
from .compat_utils import passthrough_module
|
||||
|
||||
|
||||
# XXX: Implement this the same way as other DeprecationWarnings without circular import
|
||||
passthrough_module(__name__, '._legacy', callback=lambda attr: warnings.warn(
|
||||
DeprecationWarning(f'{__name__}.{attr} is deprecated'), stacklevel=2))
|
||||
del passthrough_module
|
||||
DeprecationWarning(f'{__name__}.{attr} is deprecated'), stacklevel=3))
|
||||
|
||||
|
||||
# HTMLParseError has been deprecated in Python 3.3 and removed in
|
||||
# Python 3.5. Introducing dummy exception for Python >3.5 for compatible
|
||||
# and uniform cross-version exception handling
|
||||
class compat_HTMLParseError(Exception):
|
||||
class compat_HTMLParseError(ValueError):
|
||||
pass
|
||||
|
||||
|
||||
@@ -37,6 +32,7 @@ compat_os_name = os._name if os.name == 'java' else os.name
|
||||
|
||||
if compat_os_name == 'nt':
|
||||
def compat_shlex_quote(s):
|
||||
import re
|
||||
return s if re.match(r'^[-_\w./]+$', s) else '"%s"' % s.replace('"', '\\"')
|
||||
else:
|
||||
from shlex import quote as compat_shlex_quote # noqa: F401
|
||||
@@ -52,7 +48,7 @@ if compat_os_name == 'nt' and sys.version_info < (3, 8):
|
||||
def compat_realpath(path):
|
||||
while os.path.islink(path):
|
||||
path = os.path.abspath(os.readlink(path))
|
||||
return path
|
||||
return os.path.realpath(path)
|
||||
else:
|
||||
compat_realpath = os.path.realpath
|
||||
|
||||
@@ -76,15 +72,7 @@ else:
|
||||
compat_expanduser = os.path.expanduser
|
||||
|
||||
|
||||
WINDOWS_VT_MODE = False if compat_os_name == 'nt' else None
|
||||
|
||||
|
||||
def windows_enable_vt_mode(): # TODO: Do this the proper way https://bugs.python.org/issue30075
|
||||
if compat_os_name != 'nt':
|
||||
return
|
||||
global WINDOWS_VT_MODE
|
||||
startupinfo = subprocess.STARTUPINFO()
|
||||
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
|
||||
with contextlib.suppress(Exception):
|
||||
subprocess.Popen('', shell=True, startupinfo=startupinfo).wait()
|
||||
WINDOWS_VT_MODE = True
|
||||
# NB: Add modules that are imported dynamically here so that PyInstaller can find them
|
||||
# See https://github.com/pyinstaller/pyinstaller-hooks-contrib/issues/438
|
||||
if False:
|
||||
from . import _legacy # noqa: F401
|
||||
|
||||
@@ -1,52 +1,16 @@
|
||||
"""Deprecated - New code should avoid these"""
|
||||
|
||||
import base64
|
||||
import getpass
|
||||
import html
|
||||
import html.parser
|
||||
import http
|
||||
import http.client
|
||||
import http.cookiejar
|
||||
import http.cookies
|
||||
import http.server
|
||||
import itertools
|
||||
import os
|
||||
import shutil
|
||||
import struct
|
||||
import tokenize
|
||||
import urllib
|
||||
import urllib.error
|
||||
import urllib.parse
|
||||
|
||||
compat_str = str
|
||||
|
||||
compat_b64decode = base64.b64decode
|
||||
compat_chr = chr
|
||||
compat_cookiejar = http.cookiejar
|
||||
compat_cookiejar_Cookie = http.cookiejar.Cookie
|
||||
compat_cookies_SimpleCookie = http.cookies.SimpleCookie
|
||||
compat_get_terminal_size = shutil.get_terminal_size
|
||||
compat_getenv = os.getenv
|
||||
compat_getpass = getpass.getpass
|
||||
compat_html_entities = html.entities
|
||||
compat_html_entities_html5 = html.entities.html5
|
||||
compat_HTMLParser = html.parser.HTMLParser
|
||||
compat_http_client = http.client
|
||||
compat_http_server = http.server
|
||||
|
||||
compat_HTTPError = urllib.error.HTTPError
|
||||
compat_itertools_count = itertools.count
|
||||
compat_urlparse = urllib.parse
|
||||
compat_parse_qs = urllib.parse.parse_qs
|
||||
compat_str = str
|
||||
compat_struct_pack = struct.pack
|
||||
compat_struct_unpack = struct.unpack
|
||||
compat_tokenize_tokenize = tokenize.tokenize
|
||||
compat_urllib_error = urllib.error
|
||||
compat_urllib_parse_unquote = urllib.parse.unquote
|
||||
compat_urllib_parse_unquote_plus = urllib.parse.unquote_plus
|
||||
compat_urllib_parse_urlencode = urllib.parse.urlencode
|
||||
compat_urllib_parse_urlparse = urllib.parse.urlparse
|
||||
compat_urllib_request = urllib.request
|
||||
compat_urlparse = compat_urllib_parse = urllib.parse
|
||||
|
||||
|
||||
def compat_setenv(key, value, env=os.environ):
|
||||
env[key] = value
|
||||
|
||||
|
||||
__all__ = [x for x in globals() if x.startswith('compat_')]
|
||||
|
||||
@@ -2,25 +2,40 @@
|
||||
|
||||
import collections
|
||||
import ctypes
|
||||
import http
|
||||
import getpass
|
||||
import html.entities
|
||||
import html.parser
|
||||
import http.client
|
||||
import http.cookiejar
|
||||
import http.cookies
|
||||
import http.server
|
||||
import itertools
|
||||
import os
|
||||
import shlex
|
||||
import shutil
|
||||
import socket
|
||||
import struct
|
||||
import urllib
|
||||
import tokenize
|
||||
import urllib.error
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
import xml.etree.ElementTree as etree
|
||||
from subprocess import DEVNULL
|
||||
|
||||
from .asyncio import run as compat_asyncio_run # noqa: F401
|
||||
from .re import Pattern as compat_Pattern # noqa: F401
|
||||
from .re import match as compat_Match # noqa: F401
|
||||
# isort: split
|
||||
import asyncio # noqa: F401
|
||||
import re # noqa: F401
|
||||
from asyncio import run as compat_asyncio_run # noqa: F401
|
||||
from re import Pattern as compat_Pattern # noqa: F401
|
||||
from re import match as compat_Match # noqa: F401
|
||||
|
||||
from .compat_utils import passthrough_module
|
||||
from ..dependencies import Cryptodome_AES as compat_pycrypto_AES # noqa: F401
|
||||
from ..dependencies import brotli as compat_brotli # noqa: F401
|
||||
from ..dependencies import websockets as compat_websockets # noqa: F401
|
||||
|
||||
passthrough_module(__name__, '...utils', ('WINDOWS_VT_MODE', 'windows_enable_vt_mode'))
|
||||
|
||||
|
||||
# compat_ctypes_WINFUNCTYPE = ctypes.WINFUNCTYPE
|
||||
# will not work since ctypes.WINFUNCTYPE does not exist in UNIX machines
|
||||
@@ -28,14 +43,32 @@ def compat_ctypes_WINFUNCTYPE(*args, **kwargs):
|
||||
return ctypes.WINFUNCTYPE(*args, **kwargs)
|
||||
|
||||
|
||||
def compat_setenv(key, value, env=os.environ):
|
||||
env[key] = value
|
||||
|
||||
|
||||
compat_basestring = str
|
||||
compat_casefold = str.casefold
|
||||
compat_chr = chr
|
||||
compat_collections_abc = collections.abc
|
||||
compat_cookiejar = http.cookiejar
|
||||
compat_cookiejar_Cookie = http.cookiejar.Cookie
|
||||
compat_cookies = http.cookies
|
||||
compat_cookies_SimpleCookie = http.cookies.SimpleCookie
|
||||
compat_etree_Element = etree.Element
|
||||
compat_etree_register_namespace = etree.register_namespace
|
||||
compat_filter = filter
|
||||
compat_get_terminal_size = shutil.get_terminal_size
|
||||
compat_getenv = os.getenv
|
||||
compat_getpass = getpass.getpass
|
||||
compat_html_entities = html.entities
|
||||
compat_html_entities_html5 = html.entities.html5
|
||||
compat_HTMLParser = html.parser.HTMLParser
|
||||
compat_http_client = http.client
|
||||
compat_http_server = http.server
|
||||
compat_input = input
|
||||
compat_integer_types = (int, )
|
||||
compat_itertools_count = itertools.count
|
||||
compat_kwargs = lambda kwargs: kwargs
|
||||
compat_map = map
|
||||
compat_numeric_types = (int, float, complex)
|
||||
@@ -43,11 +76,18 @@ compat_print = print
|
||||
compat_shlex_split = shlex.split
|
||||
compat_socket_create_connection = socket.create_connection
|
||||
compat_Struct = struct.Struct
|
||||
compat_struct_pack = struct.pack
|
||||
compat_struct_unpack = struct.unpack
|
||||
compat_subprocess_get_DEVNULL = lambda: DEVNULL
|
||||
compat_tokenize_tokenize = tokenize.tokenize
|
||||
compat_urllib_error = urllib.error
|
||||
compat_urllib_parse = urllib.parse
|
||||
compat_urllib_parse_quote = urllib.parse.quote
|
||||
compat_urllib_parse_quote_plus = urllib.parse.quote_plus
|
||||
compat_urllib_parse_unquote_plus = urllib.parse.unquote_plus
|
||||
compat_urllib_parse_unquote_to_bytes = urllib.parse.unquote_to_bytes
|
||||
compat_urllib_parse_urlunparse = urllib.parse.urlunparse
|
||||
compat_urllib_request = urllib.request
|
||||
compat_urllib_request_DataHandler = urllib.request.DataHandler
|
||||
compat_urllib_response = urllib.response
|
||||
compat_urlretrieve = urllib.request.urlretrieve
|
||||
|
||||
@@ -1,23 +0,0 @@
|
||||
# flake8: noqa: F405
|
||||
from asyncio import * # noqa: F403
|
||||
|
||||
from .compat_utils import passthrough_module
|
||||
|
||||
passthrough_module(__name__, 'asyncio')
|
||||
del passthrough_module
|
||||
|
||||
try:
|
||||
run # >= 3.7
|
||||
except NameError:
|
||||
def run(coro):
|
||||
try:
|
||||
loop = get_event_loop()
|
||||
except RuntimeError:
|
||||
loop = new_event_loop()
|
||||
set_event_loop(loop)
|
||||
loop.run_until_complete(coro)
|
||||
|
||||
try:
|
||||
all_tasks # >= 3.7
|
||||
except NameError:
|
||||
all_tasks = Task.all_tasks
|
||||
@@ -4,7 +4,6 @@ import importlib
|
||||
import sys
|
||||
import types
|
||||
|
||||
|
||||
_NO_ATTRIBUTE = object()
|
||||
|
||||
_Package = collections.namedtuple('Package', ('name', 'version'))
|
||||
@@ -31,9 +30,9 @@ def _is_package(module):
|
||||
return True
|
||||
|
||||
|
||||
def passthrough_module(parent, child, *, callback=lambda _: None):
|
||||
def passthrough_module(parent, child, allowed_attributes=None, *, callback=lambda _: None):
|
||||
parent_module = importlib.import_module(parent)
|
||||
child_module = importlib.import_module(child, parent)
|
||||
child_module = None # Import child module only as needed
|
||||
|
||||
class PassthroughModule(types.ModuleType):
|
||||
def __getattr__(self, attr):
|
||||
@@ -41,19 +40,30 @@ def passthrough_module(parent, child, *, callback=lambda _: None):
|
||||
with contextlib.suppress(ImportError):
|
||||
return importlib.import_module(f'.{attr}', parent)
|
||||
|
||||
ret = _NO_ATTRIBUTE
|
||||
ret = self.__from_child(attr)
|
||||
if ret is _NO_ATTRIBUTE:
|
||||
raise AttributeError(f'module {parent} has no attribute {attr}')
|
||||
callback(attr)
|
||||
return ret
|
||||
|
||||
def __from_child(self, attr):
|
||||
if allowed_attributes is None:
|
||||
if attr.startswith('__') and attr.endswith('__'):
|
||||
return _NO_ATTRIBUTE
|
||||
elif attr not in allowed_attributes:
|
||||
return _NO_ATTRIBUTE
|
||||
|
||||
nonlocal child_module
|
||||
child_module = child_module or importlib.import_module(child, parent)
|
||||
|
||||
with contextlib.suppress(AttributeError):
|
||||
ret = getattr(child_module, attr)
|
||||
return getattr(child_module, attr)
|
||||
|
||||
if _is_package(child_module):
|
||||
with contextlib.suppress(ImportError):
|
||||
ret = importlib.import_module(f'.{attr}', child)
|
||||
return importlib.import_module(f'.{attr}', child)
|
||||
|
||||
if ret is _NO_ATTRIBUTE:
|
||||
raise AttributeError(f'module {parent} has no attribute {attr}')
|
||||
|
||||
callback(attr)
|
||||
return ret
|
||||
return _NO_ATTRIBUTE
|
||||
|
||||
# Python 3.6 does not have module level __getattr__
|
||||
# https://peps.python.org/pep-0562/
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user